Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
98 changes: 98 additions & 0 deletions backend/src/adapters/upstream/openai.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
import { describe, expect, test } from "bun:test";
import { openaiUpstreamAdapter } from "./openai";

describe("openaiUpstreamAdapter reasoning compatibility", () => {
test("parses non-stream reasoning field into thinking block", async () => {
const response = new Response(
JSON.stringify({
id: "chatcmpl-1",
object: "chat.completion",
created: 1700000000,
model: "test-model",
choices: [
{
index: 0,
message: {
role: "assistant",
content: "final answer",
reasoning: "chain of thought summary",
},
finish_reason: "stop",
},
],
usage: {
prompt_tokens: 1,
completion_tokens: 2,
total_tokens: 3,
},
}),
);

const parsed = await openaiUpstreamAdapter.parseResponse(response);
expect(parsed.content).toEqual([
{ type: "thinking", thinking: "chain of thought summary" },
{ type: "text", text: "final answer" },
]);
});

test("prefers reasoning_content over reasoning when both exist", async () => {
const response = new Response(
JSON.stringify({
id: "chatcmpl-2",
object: "chat.completion",
created: 1700000001,
model: "test-model",
choices: [
{
index: 0,
message: {
role: "assistant",
content: "final answer",
reasoning_content: "preferred reasoning content",
reasoning: "fallback reasoning",
},
finish_reason: "stop",
},
],
usage: {
prompt_tokens: 1,
completion_tokens: 2,
total_tokens: 3,
},
}),
);

const parsed = await openaiUpstreamAdapter.parseResponse(response);
expect(parsed.content).toEqual([
{ type: "thinking", thinking: "preferred reasoning content" },
{ type: "text", text: "final answer" },
]);
});

test("parses stream delta reasoning field into thinking_delta", async () => {
const stream = [
'data: {"id":"chatcmpl-3","object":"chat.completion.chunk","created":1700000002,"model":"test-model","choices":[{"index":0,"delta":{"role":"assistant","reasoning":"stream reasoning"},"finish_reason":null}]}',
'data: {"id":"chatcmpl-3","object":"chat.completion.chunk","created":1700000002,"model":"test-model","choices":[{"index":0,"delta":{"content":"stream text"},"finish_reason":"stop"}]}',
"data: [DONE]",
].join("\n");

const response = new Response(stream);
const chunks: Array<unknown> = [];
for await (const chunk of openaiUpstreamAdapter.parseStreamResponse(
response,
)) {
chunks.push(chunk);
}

expect(chunks).toContainEqual({
type: "content_block_delta",
index: 0,
delta: { type: "thinking_delta", thinking: "stream reasoning" },
});
expect(chunks).toContainEqual({
type: "content_block_delta",
index: 0,
delta: { type: "text_delta", text: "stream text" },
});
});
});
32 changes: 27 additions & 5 deletions backend/src/adapters/upstream/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ interface OpenAIChoice {
content: string | null;
tool_calls?: OpenAIToolCall[];
reasoning_content?: string;
reasoning?: string;
};
finish_reason: string | null;
}
Expand All @@ -117,6 +118,7 @@ interface OpenAIStreamChoice {
content?: string | null;
tool_calls?: OpenAIToolCallDelta[];
reasoning_content?: string;
reasoning?: string;
};
finish_reason: string | null;
}
Expand Down Expand Up @@ -291,18 +293,37 @@ function convertFinishReason(finishReason: string | null): StopReason {
}
}

function extractReasoningText(
payload?: {
reasoning_content?: string;
reasoning?: string;
},
): string | undefined {
if (!payload) {
return undefined;
}
if (payload.reasoning_content && payload.reasoning_content.length > 0) {
return payload.reasoning_content;
}
if (payload.reasoning && payload.reasoning.length > 0) {
return payload.reasoning;
}
return undefined;
Comment on lines +302 to +311
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

To improve clarity and ensure helper functions return values that are ready for use, consider making this function body more concise. In JavaScript/TypeScript, non-empty strings are 'truthy' values, while empty strings are 'falsy' values. You can leverage the || operator and optional chaining (?.) to achieve the same logic, making the code more concise and readable, and directly returning the desired value without requiring further processing by the caller.

  return payload?.reasoning_content || payload?.reasoning || undefined;
References
  1. To improve clarity, helper functions should return objects that are ready for use, without requiring the caller to immediately override properties. If a property should be null, omit it from the returned object rather than setting it and having the caller nullify it.

}

/**
* Convert OpenAI response to internal format
*/
function convertResponse(resp: OpenAIChatResponse): InternalResponse {
const choice = resp.choices[0];
const content: InternalContentBlock[] = [];

// Handle reasoning content (for o1/deepseek models)
if (choice?.message.reasoning_content) {
// Handle reasoning content (reasoning_content or reasoning)
const reasoningText = extractReasoningText(choice?.message);
if (reasoningText) {
content.push({
type: "thinking",
thinking: choice.message.reasoning_content,
thinking: reasoningText,
} as ThinkingContentBlock);
}

Expand Down Expand Up @@ -524,13 +545,14 @@ export const openaiUpstreamAdapter: UpstreamAdapter = {
}

// Handle reasoning content (thinking)
if (choice.delta.reasoning_content) {
const reasoningDelta = extractReasoningText(choice.delta);
if (reasoningDelta) {
yield {
type: "content_block_delta",
index: blockIndex,
delta: {
type: "thinking_delta",
thinking: choice.delta.reasoning_content,
thinking: reasoningDelta,
},
};
}
Expand Down