Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 22 additions & 3 deletions src/commands/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ function getEnvVarForProvider(provider: string): string {
return 'CLAUDE_CODE_USE_FOUNDRY'
case 'gemini':
return 'CLAUDE_CODE_USE_GEMINI'
case 'grok':
return 'CLAUDE_CODE_USE_GROK'
default:
throw new Error(`Unknown provider: ${provider}`)
}
Expand Down Expand Up @@ -48,6 +50,7 @@ const call: LocalCommandCall = async (args, context) => {
delete process.env.CLAUDE_CODE_USE_FOUNDRY
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_GROK
return {
type: 'text',
value: 'API provider cleared (will use environment variables).',
Expand All @@ -59,6 +62,7 @@ const call: LocalCommandCall = async (args, context) => {
'anthropic',
'openai',
'gemini',
'grok',
'bedrock',
'vertex',
'foundry',
Expand Down Expand Up @@ -87,6 +91,19 @@ const call: LocalCommandCall = async (args, context) => {
}
}

// Check env vars when switching to grok (including settings.env)
if (arg === 'grok') {
const mergedEnv = getMergedEnv()
const hasKey = !!(mergedEnv.GROK_API_KEY || mergedEnv.XAI_API_KEY)
if (!hasKey) {
updateSettingsForSource('userSettings', { modelType: 'grok' })
return {
type: 'text',
value: `Switched to Grok provider.\nWarning: Missing env var: GROK_API_KEY (or XAI_API_KEY)\nConfigure it via settings.json env or set manually.`,
}
}
}

// Check env vars when switching to gemini (including settings.env)
if (arg === 'gemini') {
const mergedEnv = getMergedEnv()
Expand All @@ -104,13 +121,14 @@ const call: LocalCommandCall = async (args, context) => {
// Handle different provider types
// - 'anthropic', 'openai', 'gemini' are stored in settings.json (persistent)
// - 'bedrock', 'vertex', 'foundry' are env-only (do NOT touch settings.json)
if (arg === 'anthropic' || arg === 'openai' || arg === 'gemini') {
if (arg === 'anthropic' || arg === 'openai' || arg === 'gemini' || arg === 'grok') {
// Clear any cloud provider env vars to avoid conflicts
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
delete process.env.CLAUDE_CODE_USE_FOUNDRY
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_GROK
// Update settings.json
updateSettingsForSource('userSettings', { modelType: arg })
// Ensure settings.env gets applied to process.env
Expand All @@ -122,6 +140,7 @@ const call: LocalCommandCall = async (args, context) => {
delete process.env.OPENAI_API_KEY
delete process.env.OPENAI_BASE_URL
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_GROK
process.env[getEnvVarForProvider(arg)] = '1'
// Do not modify settings.json - cloud providers controlled solely by env vars
applyConfigEnvironmentVariables()
Expand All @@ -136,9 +155,9 @@ const provider = {
type: 'local',
name: 'provider',
description:
'Switch API provider (anthropic/openai/gemini/bedrock/vertex/foundry)',
'Switch API provider (anthropic/openai/gemini/grok/bedrock/vertex/foundry)',
aliases: ['api'],
argumentHint: '[anthropic|openai|gemini|bedrock|vertex|foundry|unset]',
argumentHint: '[anthropic|openai|gemini|grok|bedrock|vertex|foundry|unset]',
supportsNonInteractive: true,
load: () => Promise.resolve({ call }),
} satisfies Command
Expand Down
6 changes: 6 additions & 0 deletions src/services/api/claude.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1350,6 +1350,12 @@ async function* queryModel(
return
}

if (getAPIProvider() === 'grok') {
const { queryModelGrok } = await import('./grok/index.js')
yield* queryModelGrok(messagesForAPI, systemPrompt, filteredTools, signal, options)
return
}

// Instrumentation: Track message count after normalization
logEvent('tengu_api_after_normalize', {
postNormalizedMessageCount: messagesForAPI.length,
Expand Down
44 changes: 44 additions & 0 deletions src/services/api/grok/__tests__/client.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { describe, expect, test, beforeEach, afterEach } from 'bun:test'
import { getGrokClient, clearGrokClientCache } from '../client.js'

describe('getGrokClient', () => {
const originalEnv = { ...process.env }

beforeEach(() => {
clearGrokClientCache()
process.env.GROK_API_KEY = 'test-key'
delete process.env.GROK_BASE_URL
})

afterEach(() => {
clearGrokClientCache()
process.env = { ...originalEnv }
})

test('creates client with default base URL', () => {
const client = getGrokClient()
expect(client).toBeDefined()
expect(client.baseURL).toBe('https://api.x.ai/v1')
})

test('uses GROK_BASE_URL when set', () => {
process.env.GROK_BASE_URL = 'https://custom.grok.api/v1'
clearGrokClientCache()
const client = getGrokClient()
expect(client.baseURL).toBe('https://custom.grok.api/v1')
})

test('returns cached client on second call', () => {
const client1 = getGrokClient()
const client2 = getGrokClient()
expect(client1).toBe(client2)
})

test('clearGrokClientCache resets cache', () => {
const client1 = getGrokClient()
clearGrokClientCache()
process.env.GROK_BASE_URL = 'https://other.api/v1'
const client2 = getGrokClient()
expect(client1).not.toBe(client2)
})
})
67 changes: 67 additions & 0 deletions src/services/api/grok/__tests__/modelMapping.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { describe, expect, test, beforeEach, afterEach } from 'bun:test'
import { resolveGrokModel } from '../modelMapping.js'

describe('resolveGrokModel', () => {
const originalEnv = { ...process.env }

beforeEach(() => {
delete process.env.GROK_MODEL
delete process.env.GROK_MODEL_MAP
delete process.env.GROK_DEFAULT_SONNET_MODEL
delete process.env.GROK_DEFAULT_OPUS_MODEL
delete process.env.GROK_DEFAULT_HAIKU_MODEL
delete process.env.ANTHROPIC_DEFAULT_SONNET_MODEL
delete process.env.ANTHROPIC_DEFAULT_OPUS_MODEL
delete process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL
})

afterEach(() => {
process.env = { ...originalEnv }
})

test('GROK_MODEL env var takes highest priority', () => {
process.env.GROK_MODEL = 'grok-custom'
expect(resolveGrokModel('claude-sonnet-4-6')).toBe('grok-custom')
})

test('maps opus models to grok-4.20-reasoning', () => {
expect(resolveGrokModel('claude-opus-4-6')).toBe('grok-4.20-reasoning')
})

test('maps sonnet models to grok-3-mini-fast', () => {
expect(resolveGrokModel('claude-sonnet-4-6')).toBe('grok-3-mini-fast')
})

test('maps haiku models to grok-3-mini-fast', () => {
expect(resolveGrokModel('claude-haiku-4-5-20251001')).toBe('grok-3-mini-fast')
})

test('GROK_MODEL_MAP overrides family mapping', () => {
process.env.GROK_MODEL_MAP = '{"opus":"grok-4","sonnet":"grok-3","haiku":"grok-mini"}'
expect(resolveGrokModel('claude-opus-4-6')).toBe('grok-4')
expect(resolveGrokModel('claude-sonnet-4-6')).toBe('grok-3')
expect(resolveGrokModel('claude-haiku-4-5-20251001')).toBe('grok-mini')
})

test('GROK_MODEL_MAP ignores invalid JSON', () => {
process.env.GROK_MODEL_MAP = 'not-json'
expect(resolveGrokModel('claude-opus-4-6')).toBe('grok-4.20-reasoning')
})

test('GROK_DEFAULT_{FAMILY}_MODEL overrides default map', () => {
process.env.GROK_DEFAULT_OPUS_MODEL = 'grok-2-latest'
expect(resolveGrokModel('claude-opus-4-6')).toBe('grok-2-latest')
})

test('passes through unknown model names', () => {
expect(resolveGrokModel('some-unknown-model')).toBe('some-unknown-model')
})

test('strips [1m] suffix before lookup', () => {
expect(resolveGrokModel('claude-sonnet-4-6[1m]')).toBe('grok-3-mini-fast')
})

test('falls back to family default for unlisted model', () => {
expect(resolveGrokModel('claude-opus-99-20300101')).toBe('grok-4.20-reasoning')
})
})
44 changes: 44 additions & 0 deletions src/services/api/grok/client.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import OpenAI from 'openai'
import { getProxyFetchOptions } from 'src/utils/proxy.js'

/**
* Environment variables:
*
* GROK_API_KEY (or XAI_API_KEY): Required. API key for the xAI Grok endpoint.
* GROK_BASE_URL: Optional. Defaults to https://api.x.ai/v1.
*/

const DEFAULT_BASE_URL = 'https://api.x.ai/v1'

let cachedClient: OpenAI | null = null

export function getGrokClient(options?: {
maxRetries?: number
fetchOverride?: typeof fetch
source?: string
}): OpenAI {
if (cachedClient) return cachedClient

Comment on lines +20 to +21
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

fetchOverride is ignored when a cached client already exists.

The early cache return prevents override-based callers from injecting custom transport once the singleton is initialized.

Proposed fix
 export function getGrokClient(options?: {
   maxRetries?: number
   fetchOverride?: typeof fetch
   source?: string
 }): OpenAI {
-  if (cachedClient) return cachedClient
+  if (cachedClient && !options?.fetchOverride) return cachedClient
@@
-  if (!options?.fetchOverride) {
+  if (!options?.fetchOverride) {
     cachedClient = client
   }

Also applies to: 35-37

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/services/api/grok/client.ts` around lines 20 - 21, The early "if
(cachedClient) return cachedClient" ignores a provided fetchOverride; update the
logic in the getGrokClient (or the function that references cachedClient and
fetchOverride) so that if fetchOverride is passed you either (a) inject/replace
the transport on the existing cachedClient with the provided fetchOverride or
(b) recreate the client using the fetchOverride before returning; apply the same
change to the other occurrence around the cachedClient check (lines ~35-37) so
callers can override the transport even after the singleton exists.

const apiKey = process.env.GROK_API_KEY || process.env.XAI_API_KEY || ''
const baseURL = process.env.GROK_BASE_URL || DEFAULT_BASE_URL

const client = new OpenAI({
apiKey,
baseURL,
maxRetries: options?.maxRetries ?? 0,
timeout: parseInt(process.env.API_TIMEOUT_MS || String(600 * 1000), 10),
dangerouslyAllowBrowser: true,
fetchOptions: getProxyFetchOptions({ forAnthropicAPI: false }) as RequestInit,
...(options?.fetchOverride && { fetch: options.fetchOverride }),
})

if (!options?.fetchOverride) {
cachedClient = client
}

return client
}

export function clearGrokClientCache(): void {
cachedClient = null
}
Loading
Loading