From 4e8edda93450c9192cd36b6738a21e9acfd11e82 Mon Sep 17 00:00:00 2001 From: Andric Tham Date: Mon, 7 Jul 2025 20:24:36 +0800 Subject: [PATCH] feat(models): support reasoning for gemini --- packages/backend/convex/chat.ts | 9 +++++---- packages/backend/convex/models.ts | 2 ++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/backend/convex/chat.ts b/packages/backend/convex/chat.ts index dd7f9f1..6dae381 100644 --- a/packages/backend/convex/chat.ts +++ b/packages/backend/convex/chat.ts @@ -80,6 +80,7 @@ export const streamMessage = internalAction({ handler: async (ctx, { promptMessageId, threadId, model, useWebSearch }) => { const { thread } = await agent.continueThread(ctx, { threadId }); const modelId = model && isAllowedModel(model) ? model : defaultModel; + const modelInfo = allowedModels.find((m) => m.id === modelId); try { const result = await thread.streamText( { @@ -87,15 +88,15 @@ export const streamMessage = internalAction({ model: openrouter.chat(modelId), tools: useWebSearch ? { webSearch: webSearchTool } : undefined, maxSteps: 5, + providerOptions: modelInfo?.reasoning + ? { openrouter: { reasoning: { enabled: true } } } + : undefined, }, { saveStreamDeltas: true }, ); await result.consumeStream(); } catch (unknownError) { - const error = - unknownError instanceof Error - ? unknownError - : new Error(String(unknownError)); + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); if (error.name === "AI_TypeValidationError") { console.error("LLM returned invalid response", error); throw new ConvexError("The AI service returned an invalid response."); diff --git a/packages/backend/convex/models.ts b/packages/backend/convex/models.ts index c22b864..b1bff35 100644 --- a/packages/backend/convex/models.ts +++ b/packages/backend/convex/models.ts @@ -16,6 +16,7 @@ export const allowedModels = [ gateway: "openrouter", supportsImageUploads: false, supportsTools: false, + reasoning: true, }, { id: "qwen/qwen3-32b-04-28:free", @@ -122,6 +123,7 @@ export const listModels = query({ gateway: v.string(), supportsTools: v.optional(v.boolean()), supportsImageUploads: v.optional(v.boolean()), + reasoning: v.optional(v.boolean()), }), ), }),