diff --git a/build.ts b/build.ts index 857aefe8e..6fe75cc4c 100644 --- a/build.ts +++ b/build.ts @@ -59,6 +59,7 @@ const result = await Bun.build({ splitting: true, define: getMacroDefines(), features, + external: ['openai'], }) if (!result.success) { diff --git a/src/commands/login/login.tsx b/src/commands/login/login.tsx index b4329fe62..ea01527ba 100644 --- a/src/commands/login/login.tsx +++ b/src/commands/login/login.tsx @@ -22,6 +22,9 @@ import { resetAutoModeGateCheck, resetBypassPermissionsCheck, } from '../../utils/permissions/bypassPermissionsKillswitch.js' +import { applyConfigEnvironmentVariables } from '../../utils/managedEnv.js' +import { resetModelStrings } from '../../utils/model/modelStrings.js' +import { resetSettingsCache } from '../../utils/settings/settingsCache.js' import { resetUserCache } from '../../utils/user.js' export async function call( @@ -40,6 +43,9 @@ export async function call( // Reset cost state when switching accounts resetCostState() // Refresh remotely managed settings after login (non-blocking) + resetSettingsCache() + applyConfigEnvironmentVariables() + resetModelStrings() void refreshRemoteManagedSettings() // Refresh policy limits after login (non-blocking) void refreshPolicyLimits() diff --git a/src/services/api/grok/client.ts b/src/services/api/grok/client.ts index 060d12636..87bb4f4f2 100644 --- a/src/services/api/grok/client.ts +++ b/src/services/api/grok/client.ts @@ -1,4 +1,4 @@ -import OpenAI from 'openai' +import type OpenAI from 'openai' import { getProxyFetchOptions } from 'src/utils/proxy.js' /** @@ -12,17 +12,18 @@ const DEFAULT_BASE_URL = 'https://api.x.ai/v1' let cachedClient: OpenAI | null = null -export function getGrokClient(options?: { +export async function getGrokClient(options?: { maxRetries?: number fetchOverride?: typeof fetch source?: string -}): OpenAI { +}): Promise { if (cachedClient) return cachedClient + const { default: OpenAIClass } = await import('openai') const apiKey = process.env.GROK_API_KEY || process.env.XAI_API_KEY || '' const baseURL = process.env.GROK_BASE_URL || DEFAULT_BASE_URL - const client = new OpenAI({ + const client = new OpenAIClass({ apiKey, baseURL, maxRetries: options?.maxRetries ?? 0, diff --git a/src/services/api/grok/index.ts b/src/services/api/grok/index.ts index 3198e85f6..536ec7b0b 100644 --- a/src/services/api/grok/index.ts +++ b/src/services/api/grok/index.ts @@ -65,7 +65,7 @@ export async function* queryModelGrok( const openaiTools = anthropicToolsToOpenAI(standardTools) const openaiToolChoice = anthropicToolChoiceToOpenAI(options.toolChoice) - const client = getGrokClient({ + const client = await getGrokClient({ maxRetries: 0, fetchOverride: options.fetchOverride as typeof fetch | undefined, source: options.querySource, @@ -187,7 +187,8 @@ export async function* queryModelGrok( } } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error) - logForDebugging(`[Grok] Error: ${errorMessage}`, { level: 'error' }) + const stack = error instanceof Error ? `\n${error.stack}` : '' + logForDebugging(`[Grok] Error: ${errorMessage}${stack}`, { level: 'error' }) yield createAssistantAPIErrorMessage({ content: `API Error: ${errorMessage}`, apiError: 'api_error', diff --git a/src/services/api/grok/modelMapping.ts b/src/services/api/grok/modelMapping.ts index f3e40edbc..2a62f5ef6 100644 --- a/src/services/api/grok/modelMapping.ts +++ b/src/services/api/grok/modelMapping.ts @@ -5,27 +5,31 @@ * or override the entire mapping via GROK_MODEL_MAP env var (JSON string): * GROK_MODEL_MAP='{"opus":"grok-4","sonnet":"grok-3","haiku":"grok-3-mini-fast"}' */ -const DEFAULT_MODEL_MAP: Record = { - 'claude-sonnet-4-20250514': 'grok-3-mini-fast', - 'claude-sonnet-4-5-20250929': 'grok-3-mini-fast', - 'claude-sonnet-4-6': 'grok-3-mini-fast', - 'claude-opus-4-20250514': 'grok-4.20-reasoning', - 'claude-opus-4-1-20250805': 'grok-4.20-reasoning', - 'claude-opus-4-5-20251101': 'grok-4.20-reasoning', - 'claude-opus-4-6': 'grok-4.20-reasoning', - 'claude-haiku-4-5-20251001': 'grok-3-mini-fast', - 'claude-3-5-haiku-20241022': 'grok-3-mini-fast', - 'claude-3-7-sonnet-20250219': 'grok-3-mini-fast', - 'claude-3-5-sonnet-20241022': 'grok-3-mini-fast', +function getDefaultModelMap(): Record { + return { + 'claude-sonnet-4-20250514': 'grok-3-mini-fast', + 'claude-sonnet-4-5-20250929': 'grok-3-mini-fast', + 'claude-sonnet-4-6': 'grok-3-mini-fast', + 'claude-opus-4-20250514': 'grok-4.20-reasoning', + 'claude-opus-4-1-20250805': 'grok-4.20-reasoning', + 'claude-opus-4-5-20251101': 'grok-4.20-reasoning', + 'claude-opus-4-6': 'grok-4.20-reasoning', + 'claude-haiku-4-5-20251001': 'grok-3-mini-fast', + 'claude-3-5-haiku-20241022': 'grok-3-mini-fast', + 'claude-3-7-sonnet-20250219': 'grok-3-mini-fast', + 'claude-3-5-sonnet-20241022': 'grok-3-mini-fast', + } } /** * Family-level mapping defaults (used by GROK_MODEL_MAP). */ -const DEFAULT_FAMILY_MAP: Record = { - opus: 'grok-4.20-reasoning', - sonnet: 'grok-3-mini-fast', - haiku: 'grok-3-mini-fast', +function getDefaultFamilyMap(): Record { + return { + opus: 'grok-4.20-reasoning', + sonnet: 'grok-3-mini-fast', + haiku: 'grok-3-mini-fast', + } } function getModelFamily(model: string): 'haiku' | 'sonnet' | 'opus' | null { @@ -93,13 +97,13 @@ export function resolveGrokModel(anthropicModel: string): string { } // 5. Exact model name lookup - if (DEFAULT_MODEL_MAP[cleanModel]) { - return DEFAULT_MODEL_MAP[cleanModel] + if (getDefaultModelMap()[cleanModel]) { + return getDefaultModelMap()[cleanModel] } // 6. Family-level default - if (family && DEFAULT_FAMILY_MAP[family]) { - return DEFAULT_FAMILY_MAP[family] + if (family && getDefaultFamilyMap()[family]) { + return getDefaultFamilyMap()[family] } // 7. Pass through diff --git a/src/services/api/openai/client.ts b/src/services/api/openai/client.ts index 62a37dfbc..adaae3116 100644 --- a/src/services/api/openai/client.ts +++ b/src/services/api/openai/client.ts @@ -1,4 +1,4 @@ -import OpenAI from 'openai' +import type OpenAI from 'openai' import { getProxyFetchOptions } from 'src/utils/proxy.js' import { isEnvTruthy } from 'src/utils/envUtils.js' @@ -13,17 +13,18 @@ import { isEnvTruthy } from 'src/utils/envUtils.js' let cachedClient: OpenAI | null = null -export function getOpenAIClient(options?: { +export async function getOpenAIClient(options?: { maxRetries?: number fetchOverride?: typeof fetch source?: string -}): OpenAI { +}): Promise { if (cachedClient) return cachedClient + const { default: OpenAIClass } = await import('openai') const apiKey = process.env.OPENAI_API_KEY || '' const baseURL = process.env.OPENAI_BASE_URL - const client = new OpenAI({ + const client = new OpenAIClass({ apiKey, ...(baseURL && { baseURL }), maxRetries: options?.maxRetries ?? 0, diff --git a/src/services/api/openai/index.ts b/src/services/api/openai/index.ts index 040907006..7cc69ba23 100644 --- a/src/services/api/openai/index.ts +++ b/src/services/api/openai/index.ts @@ -290,7 +290,7 @@ export async function* queryModelOpenAI( const maxTokens = options.maxOutputTokensOverride ?? upperLimit // 11. Get client - const client = getOpenAIClient({ + const client = await getOpenAIClient({ maxRetries: 0, fetchOverride: options.fetchOverride as unknown as typeof fetch, source: options.querySource, @@ -432,7 +432,8 @@ export async function* queryModelOpenAI( } } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error) - logForDebugging(`[OpenAI] Error: ${errorMessage}`, { level: 'error' }) + const stack = error instanceof Error ? `\n${error.stack}` : '' + logForDebugging(`[OpenAI] Error: ${errorMessage}${stack}`, { level: 'error' }) yield createAssistantAPIErrorMessage({ content: `API Error: ${errorMessage}`, apiError: 'api_error', diff --git a/src/services/api/openai/modelMapping.ts b/src/services/api/openai/modelMapping.ts index 7cb49c7f9..2b88774fa 100644 --- a/src/services/api/openai/modelMapping.ts +++ b/src/services/api/openai/modelMapping.ts @@ -2,18 +2,20 @@ * Default mapping from Anthropic model names to OpenAI model names. * Used only when ANTHROPIC_DEFAULT_*_MODEL env vars are not set. */ -const DEFAULT_MODEL_MAP: Record = { - 'claude-sonnet-4-20250514': 'gpt-4o', - 'claude-sonnet-4-5-20250929': 'gpt-4o', - 'claude-sonnet-4-6': 'gpt-4o', - 'claude-opus-4-20250514': 'o3', - 'claude-opus-4-1-20250805': 'o3', - 'claude-opus-4-5-20251101': 'o3', - 'claude-opus-4-6': 'o3', - 'claude-haiku-4-5-20251001': 'gpt-4o-mini', - 'claude-3-5-haiku-20241022': 'gpt-4o-mini', - 'claude-3-7-sonnet-20250219': 'gpt-4o', - 'claude-3-5-sonnet-20241022': 'gpt-4o', +function getDefaultModelMap(): Record { + return { + 'claude-sonnet-4-20250514': 'gpt-4o', + 'claude-sonnet-4-5-20250929': 'gpt-4o', + 'claude-sonnet-4-6': 'gpt-4o', + 'claude-opus-4-20250514': 'o3', + 'claude-opus-4-1-20250805': 'o3', + 'claude-opus-4-5-20251101': 'o3', + 'claude-opus-4-6': 'o3', + 'claude-haiku-4-5-20251001': 'gpt-4o-mini', + 'claude-3-5-haiku-20241022': 'gpt-4o-mini', + 'claude-3-7-sonnet-20250219': 'gpt-4o', + 'claude-3-5-sonnet-20241022': 'gpt-4o', + } } /** @@ -59,5 +61,5 @@ export function resolveOpenAIModel(anthropicModel: string): string { if (anthropicOverride) return anthropicOverride } - return DEFAULT_MODEL_MAP[cleanModel] ?? cleanModel + return getDefaultModelMap()?.[cleanModel] ?? cleanModel } diff --git a/src/utils/model/modelStrings.ts b/src/utils/model/modelStrings.ts index 5b7be104f..4b1d97330 100644 --- a/src/utils/model/modelStrings.ts +++ b/src/utils/model/modelStrings.ts @@ -143,7 +143,13 @@ export function getModelStrings(): ModelStrings { } return applyModelOverrides(ms) } - +/** + * Reset the modelStrings cache so it re-initializes with the current provider on next access. + * Call this after switching providers (e.g. after /login). + */ +export function resetModelStrings(): void { + setModelStringsState(null as unknown as ModelStrings) +} /** * Ensure model strings are fully initialized. * For Bedrock users, this waits for the profile fetch to complete.