diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e8d0df2..6456f23 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,3 +38,5 @@ jobs: ANTHROPIC_API_KEY: ${{ secrets.UNIFIED_API_KEY }} OPENROUTER_BASE_URL: ${{ secrets.OPENROUTER_BASE_URL }} OPENROUTER_API_KEY: ${{ secrets.UNIFIED_API_KEY }} + OPENAI_BASE_URL: ${{ secrets.OPENAI_BASE_URL }} + OPENAI_API_KEY: ${{ secrets.UNIFIED_API_KEY }} diff --git a/cmd/claude-code-adapter-cli/serve.go b/cmd/claude-code-adapter-cli/serve.go index 29e95b1..a3a5c53 100644 --- a/cmd/claude-code-adapter-cli/serve.go +++ b/cmd/claude-code-adapter-cli/serve.go @@ -29,6 +29,7 @@ import ( "github.com/x5iu/claude-code-adapter/pkg/adapter" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" "github.com/x5iu/claude-code-adapter/pkg/profile" "github.com/x5iu/claude-code-adapter/pkg/provider" @@ -41,6 +42,7 @@ import ( const ( ProviderAnthropic = "anthropic" ProviderOpenRouter = "openrouter" + ProviderOpenAI = "openai" ) func newServeCommand() *cobra.Command { @@ -480,6 +482,41 @@ func onMessages(cmd *cobra.Command, prov provider.Provider, rec snapshot.Recorde } } else { switch ccProvider { + case ProviderOpenAI: + sn.Provider = ProviderOpenAI + slog.Info(fmt.Sprintf("[%d] using provider %q", requestID, ProviderOpenAI)) + w.Header().Set("X-Provider", ProviderOpenAI) + openaiRequest := adapter.ConvertAnthropicRequestToOpenAIRequest(ctx, req) + sn.OpenAIRequest = openaiRequest + oaiStream, header, err := prov.CreateOpenAIModelResponse(ctx, openaiRequest) + responseBuilder := openai.NewResponseBuilder() + defer func() { + sn.ResponseHeader = snapshot.Header(header) + sn.OpenAIResponse = responseBuilder.Build() + }() + if err != nil { + slog.Error(fmt.Sprintf("[%d] error making OpenAI Responses request: %s", requestID, err.Error())) + if providerError, isProviderError := provider.ParseError(err); isProviderError { + respondError(w, providerError.StatusCode(), providerError.Message()) + sn.Error = &snapshot.Error{ + Message: providerError.Message(), + Type: providerError.Type(), + Source: providerError.Source(), + } + sn.StatusCode = providerError.StatusCode() + } else { + respondError(w, http.StatusInternalServerError, err.Error()) + sn.Error = &snapshot.Error{Message: err.Error()} + sn.StatusCode = http.StatusInternalServerError + } + return + } + stream = adapter.ConvertOpenAIStreamToAnthropicStream( + ctx, + oaiStream, + adapter.WithInputTokens(inputTokens), + adapter.ExtractOpenAIResponseBuilder(responseBuilder), + ) case ProviderOpenRouter: fallthrough default: @@ -717,6 +754,11 @@ func profileToSnapshotConfig(p *profile.Profile) *snapshot.Config { AllowedProviders: p.OpenRouter.AllowedProviders, } } + if p.OpenAI != nil { + cfg.OpenAI = &snapshot.OpenAIConfig{ + BaseURL: p.OpenAI.BaseURL, + } + } return cfg } diff --git a/pkg/adapter/convert_request.go b/pkg/adapter/convert_request.go index 9d1ed91..8e4b51e 100644 --- a/pkg/adapter/convert_request.go +++ b/pkg/adapter/convert_request.go @@ -8,8 +8,10 @@ import ( "github.com/samber/lo" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" "github.com/x5iu/claude-code-adapter/pkg/profile" + "github.com/x5iu/claude-code-adapter/pkg/utils" ) type ConvertRequestOptions struct { @@ -548,3 +550,341 @@ func getOpenRouterModelReasoningFormat( } return openrouter.ChatCompletionMessageReasoningDetailFormat(prof.Options.GetReasoningFormat()) } + +func ConvertAnthropicRequestToOpenAIRequest( + ctx context.Context, + src *anthropic.GenerateMessageRequest, + options ...ConvertRequestOption, +) (dst *openai.CreateModelResponseRequest) { + prof, _ := profile.FromContext(ctx) + convertOptions := &ConvertRequestOptions{} + for _, applyOption := range options { + applyOption(convertOptions) + } + dst = &openai.CreateModelResponseRequest{ + Model: src.Model, + MaxOutputTokens: lo.ToPtr(src.MaxTokens), + Temperature: lo.ToPtr(src.Temperature), + TopP: src.TopP, + } + // Model name mapping from configuration + if modelMapper := prof.Options.GetModels(); modelMapper != nil { + if targetModel, ok := modelMapper[dst.Model]; ok { + dst.Model = targetModel + } + } + // Convert system messages: if all text, use instructions; otherwise, use input with system role + systemInput := convertAnthropicSystemContentToOpenAI(src.System) + if systemInput != nil { + // Contains non-text content, prepend to input + dst.Input = append(openai.ResponseInputParam{systemInput}, convertAnthropicMessagesToOpenAIInput(src.Messages)...) + } else { + // All text content, use instructions + dst.Instructions = convertAnthropicSystemContentToOpenAIInstruction(src.System) + dst.Input = convertAnthropicMessagesToOpenAIInput(src.Messages) + } + // Convert tools + if len(src.Tools) > 0 { + dst.Tools = make([]*openai.ResponseToolParam, 0, len(src.Tools)) + for _, srcTool := range src.Tools { + if dstTool := convertAnthropicToolToOpenAITool(prof, srcTool); dstTool != nil { + dst.Tools = append(dst.Tools, dstTool) + } + } + } + // Convert tool choice + if srcToolChoice := src.ToolChoice; srcToolChoice != nil { + dst.ToolChoice = convertAnthropicToolChoiceToOpenAIToolChoice(srcToolChoice) + dst.ParallelToolCalls = lo.ToPtr(!srcToolChoice.DisableParallelToolUse) + } + // Convert thinking/reasoning + if thinking := src.Thinking; thinking != nil && thinking.Type == anthropic.ThinkingTypeEnabled { + dst.Reasoning = &openai.ResponseReasoning{ + Effort: openai.ResponseReasoningEffort(prof.Options.GetReasoningEffort()), + } + } + return dst +} + +// convertAnthropicSystemContentToOpenAI checks if system content contains non-text content (like images). +// If it does, returns an input item with system role containing all content. +// If all content is text, returns nil (caller should use convertAnthropicSystemContentToOpenAIInstruction instead). +func convertAnthropicSystemContentToOpenAI( + src anthropic.MessageContents, +) *openai.ResponseInputItemParam { + if len(src) == 0 { + return nil + } + // Check if there's any non-text content + hasNonTextContent := false + for _, srcContent := range src { + if srcContent.Type != anthropic.MessageContentTypeText { + hasNonTextContent = true + break + } + } + if !hasNonTextContent { + return nil + } + // Convert to system message with all content types + messageContents := make(openai.ResponseMessageContents, 0, len(src)) + for _, srcContent := range src { + switch srcContent.Type { + case anthropic.MessageContentTypeText: + messageContents = append(messageContents, &openai.ResponseMessageContent{ + Text: &openai.ResponseMessageContentText{ + Type: openai.ResponseMessageContentTypeInputText, + Text: srcContent.Text, + }, + }) + case anthropic.MessageContentTypeImage: + if srcImage := srcContent.Source; srcImage != nil { + messageContents = append(messageContents, &openai.ResponseMessageContent{ + Image: &openai.ResponseMessageContentImage{ + Type: openai.ResponseMessageContentTypeInputImage, + ImageUrl: fmt.Sprintf("data:%s;%s,%s", srcImage.MediaType, srcImage.Type, srcImage.Data), + Detail: openai.ResponseMessageContentImageDetailAuto, + }, + }) + } + } + } + if len(messageContents) == 0 { + return nil + } + return &openai.ResponseInputItemParam{ + Message: &openai.ResponseMessage{ + Type: openai.ResponseInputItemTypeMessage, + Role: openai.ResponseMessageRoleSystem, + Content: messageContents, + }, + } +} + +func convertAnthropicSystemContentToOpenAIInstruction( + src anthropic.MessageContents, +) (dst string) { + if len(src) == 0 { + return "" + } + var instruction strings.Builder + for _, srcContent := range src { + if srcContent.Type != anthropic.MessageContentTypeText { + // OpenAI instructions only support text, skip non-text content + continue + } + instruction.WriteString(srcContent.Text) + instruction.WriteString("\n\n") + } + return strings.TrimSpace(instruction.String()) +} + +func convertAnthropicMessagesToOpenAIInput( + srcMessages []*anthropic.Message, +) (dst openai.ResponseInputParam) { + dst = make(openai.ResponseInputParam, 0, len(srcMessages)*2) // estimate capacity + for _, srcMessage := range srcMessages { + items := convertAnthropicMessageToOpenAIInputItems(srcMessage) + dst = append(dst, items...) + } + return dst +} + +func convertAnthropicMessageToOpenAIInputItems( + src *anthropic.Message, +) (dst []*openai.ResponseInputItemParam) { + dst = make([]*openai.ResponseInputItemParam, 0, len(src.Content)) + var role openai.ResponseMessageRole + switch src.Role { + case anthropic.MessageRoleUser: + role = openai.ResponseMessageRoleUser + case anthropic.MessageRoleAssistant: + role = openai.ResponseMessageRoleAssistant + } + // Collect text and image content into a single message + var messageContents openai.ResponseMessageContents + for _, srcContent := range src.Content { + switch srcContent.Type { + case anthropic.MessageContentTypeText: + var textType openai.ResponseMessageContentType + if role == openai.ResponseMessageRoleUser { + textType = openai.ResponseMessageContentTypeInputText + } else { + textType = openai.ResponseMessageContentTypeOutputText + } + messageContents = append(messageContents, &openai.ResponseMessageContent{ + Text: &openai.ResponseMessageContentText{ + Type: textType, + Text: srcContent.Text, + }, + }) + case anthropic.MessageContentTypeImage: + if srcImage := srcContent.Source; srcImage != nil { + messageContents = append(messageContents, &openai.ResponseMessageContent{ + Image: &openai.ResponseMessageContentImage{ + Type: openai.ResponseMessageContentTypeInputImage, + ImageUrl: fmt.Sprintf("data:%s;%s,%s", srcImage.MediaType, srcImage.Type, srcImage.Data), + Detail: openai.ResponseMessageContentImageDetailAuto, + }, + }) + } + case anthropic.MessageContentTypeToolUse: + // Flush pending message content first + if len(messageContents) > 0 { + dst = append(dst, &openai.ResponseInputItemParam{ + Message: &openai.ResponseMessage{ + Type: openai.ResponseInputItemTypeMessage, + Role: role, + Content: messageContents, + }, + }) + messageContents = nil + } + // Add function call + dst = append(dst, &openai.ResponseInputItemParam{ + FunctionCall: &openai.ResponseFunctionToolCallParam{ + Type: openai.ResponseInputItemTypeFunctionCall, + ID: srcContent.ID, + CallID: srcContent.ID, + Name: srcContent.Name, + Arguments: string(srcContent.Input), + Status: openai.ResponseStatusCompleted, + }, + }) + case anthropic.MessageContentTypeToolResult: + // Flush pending message content first + if len(messageContents) > 0 { + dst = append(dst, &openai.ResponseInputItemParam{ + Message: &openai.ResponseMessage{ + Type: openai.ResponseInputItemTypeMessage, + Role: role, + Content: messageContents, + }, + }) + messageContents = nil + } + // Add function call output + output := convertAnthropicToolResultToOpenAIFunctionOutput(srcContent) + output.CallID = srcContent.ToolUseID + dst = append(dst, &openai.ResponseInputItemParam{ + FunctionCallOutput: output, + }) + case anthropic.MessageContentTypeThinking: + // Flush pending message content first + if len(messageContents) > 0 { + dst = append(dst, &openai.ResponseInputItemParam{ + Message: &openai.ResponseMessage{ + Type: openai.ResponseInputItemTypeMessage, + Role: role, + Content: messageContents, + }, + }) + messageContents = nil + } + // Add reasoning item + dst = append(dst, &openai.ResponseInputItemParam{ + Reasoning: &openai.ResponseReasoningItem{ + Type: openai.ResponseInputItemTypeReasoningItem, + ID: utils.GenerateID("rs"), + Status: openai.ResponseStatusCompleted, + Summary: []*openai.ResponseReasoningContent{ + { + Type: openai.ResponseReasoningTypeSummaryText, + Text: srcContent.Thinking, + }, + }, + }, + }) + case anthropic.MessageContentTypeRedactedThinking: + // Skip redacted thinking + } + } + // Flush any remaining message content + if len(messageContents) > 0 { + dst = append(dst, &openai.ResponseInputItemParam{ + Message: &openai.ResponseMessage{ + Type: openai.ResponseInputItemTypeMessage, + Role: role, + Content: messageContents, + }, + }) + } + return dst +} + +func convertAnthropicToolResultToOpenAIFunctionOutput( + src *anthropic.MessageContent, +) *openai.ResponseFunctionCallOutput { + output := &openai.ResponseFunctionCallOutput{ + Type: openai.ResponseInputItemTypeFunctionCallOutput, + Status: openai.ResponseStatusCompleted, + } + if src.Content != nil { + var sb strings.Builder + for _, content := range src.Content { + if content.Type == anthropic.MessageContentTypeText { + if sb.Len() > 0 { + sb.WriteString("\n") + } + sb.WriteString(content.Text) + } + } + output.Output = sb.String() + } + return output +} + +func convertAnthropicToolToOpenAITool( + prof *profile.Profile, + src *anthropic.Tool, +) *openai.ResponseToolParam { + var srcToolType anthropic.ToolType + if src.Type == nil { + srcToolType = anthropic.ToolTypeCustom + } else { + srcToolType = *src.Type + } + switch srcToolType { + case anthropic.ToolTypeCustom: + return &openai.ResponseToolParam{ + Function: &openai.ResponseFunctionToolParam{ + Type: openai.ResponseToolCallTypeFunction, + Name: src.Name, + Description: src.Description, + Strict: prof.Options.GetStrict(), + Parameters: openai.ResponseJSONSchemaObject(src.InputSchema), + }, + } + default: + // Skip non-custom tools (server tools, etc.) + return nil + } +} + +func convertAnthropicToolChoiceToOpenAIToolChoice( + src *anthropic.ToolChoice, +) *openai.ResponseToolChoice { + switch src.Type { + case anthropic.ToolChoiceTypeAuto: + return &openai.ResponseToolChoice{ + Option: openai.ChatCompletionToolChoiceOptionAuto, + } + case anthropic.ToolChoiceTypeNone: + return &openai.ResponseToolChoice{ + Option: openai.ChatCompletionToolChoiceOptionNone, + } + case anthropic.ToolChoiceTypeAny: + return &openai.ResponseToolChoice{ + Option: openai.ChatCompletionToolChoiceOptionRequired, + } + case anthropic.ToolChoiceTypeTool: + return &openai.ResponseToolChoice{ + Function: &openai.ResponseToolChoiceFunctionParam{ + Type: openai.ResponseToolChoiceTypeFunction, + Name: src.Name, + }, + } + default: + return nil + } +} diff --git a/pkg/adapter/convert_request_test.go b/pkg/adapter/convert_request_test.go index cde0988..4ee834a 100644 --- a/pkg/adapter/convert_request_test.go +++ b/pkg/adapter/convert_request_test.go @@ -8,6 +8,7 @@ import ( "github.com/samber/lo" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" "github.com/x5iu/claude-code-adapter/pkg/profile" ) @@ -2575,3 +2576,198 @@ func TestCanonicalOpenRouterMessages_UnknownFormat_WithSignatureWithoutDelimiter t.Errorf("Expected Data to contain whole signature, got %q", encrypted.Data) } } + +// testOpenAIProfile creates a test profile for OpenAI provider +func testOpenAIProfile() *profile.Profile { + return &profile.Profile{ + Name: "test-openai", + Provider: "openai", + Options: &profile.OptionsConfig{ + Strict: false, + ContextWindowResizeFactor: 1.0, + }, + OpenAI: &profile.OpenAIConfig{ + BaseURL: "https://api.openai.com", + }, + } +} + +// testOpenAICtx creates a context with an OpenAI test profile +func testOpenAICtx() context.Context { + return profile.WithProfile(context.Background(), testOpenAIProfile()) +} + +func TestConvertAnthropicRequestToOpenAIRequest_SystemTextOnly(t *testing.T) { + // When system messages contain only text, they should be converted to instructions + src := &anthropic.GenerateMessageRequest{ + Model: "gpt-4o", + MaxTokens: 500, + System: anthropic.MessageContents{ + {Type: anthropic.MessageContentTypeText, Text: "You are a helpful assistant."}, + {Type: anthropic.MessageContentTypeText, Text: "Always be polite."}, + }, + Messages: []*anthropic.Message{ + { + Role: anthropic.MessageRoleUser, + Content: anthropic.MessageContents{ + {Type: anthropic.MessageContentTypeText, Text: "Hello"}, + }, + }, + }, + } + + got := ConvertAnthropicRequestToOpenAIRequest(testOpenAICtx(), src) + + // System should be converted to Instructions (text-only) + expectedInstructions := "You are a helpful assistant.\n\nAlways be polite." + if got.Instructions != expectedInstructions { + t.Errorf("Expected Instructions %q, got %q", expectedInstructions, got.Instructions) + } + + // Input should only have user message, no system message + if len(got.Input) != 1 { + t.Errorf("Expected 1 input item (user message only), got %d", len(got.Input)) + } + + // Verify the input is a user message, not system message + if got.Input[0].Message == nil { + t.Fatal("Expected message in input") + } + if got.Input[0].Message.Role != openai.ResponseMessageRoleUser { + t.Errorf("Expected user role, got %s", got.Input[0].Message.Role) + } +} + +func TestConvertAnthropicRequestToOpenAIRequest_SystemWithImage(t *testing.T) { + // When system messages contain images, they should be converted to input with system role + src := &anthropic.GenerateMessageRequest{ + Model: "gpt-4o", + MaxTokens: 500, + System: anthropic.MessageContents{ + {Type: anthropic.MessageContentTypeText, Text: "You are a helpful assistant."}, + { + Type: anthropic.MessageContentTypeImage, + Source: &anthropic.MessageContentSource{ + Type: "base64", + MediaType: "image/png", + Data: "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==", + }, + }, + }, + Messages: []*anthropic.Message{ + { + Role: anthropic.MessageRoleUser, + Content: anthropic.MessageContents{ + {Type: anthropic.MessageContentTypeText, Text: "Hello"}, + }, + }, + }, + } + + got := ConvertAnthropicRequestToOpenAIRequest(testOpenAICtx(), src) + + // Instructions should be empty when system contains non-text content + if got.Instructions != "" { + t.Errorf("Expected empty Instructions, got %q", got.Instructions) + } + + // Input should have 2 items: system message, then user message + if len(got.Input) != 2 { + t.Fatalf("Expected 2 input items, got %d", len(got.Input)) + } + + // First input should be system message + systemInput := got.Input[0] + if systemInput.Message == nil { + t.Fatal("Expected system message in first input") + } + if systemInput.Message.Role != openai.ResponseMessageRoleSystem { + t.Errorf("Expected system role for first input, got %s", systemInput.Message.Role) + } + + // Verify system message content includes both text and image + if len(systemInput.Message.Content) != 2 { + t.Fatalf("Expected 2 content items in system message, got %d", len(systemInput.Message.Content)) + } + + // First content should be text + if systemInput.Message.Content[0].Text == nil { + t.Error("Expected text content as first item") + } else if systemInput.Message.Content[0].Text.Text != "You are a helpful assistant." { + t.Errorf("Expected text 'You are a helpful assistant.', got %q", systemInput.Message.Content[0].Text.Text) + } + + // Second content should be image + if systemInput.Message.Content[1].Image == nil { + t.Error("Expected image content as second item") + } else if systemInput.Message.Content[1].Image.ImageUrl == "" { + t.Error("Expected non-empty image URL") + } + + // Second input should be user message + userInput := got.Input[1] + if userInput.Message == nil { + t.Fatal("Expected user message in second input") + } + if userInput.Message.Role != openai.ResponseMessageRoleUser { + t.Errorf("Expected user role for second input, got %s", userInput.Message.Role) + } +} + +func TestConvertAnthropicRequestToOpenAIRequest_EmptySystem(t *testing.T) { + // Empty system should result in empty instructions and no system input + src := &anthropic.GenerateMessageRequest{ + Model: "gpt-4o", + MaxTokens: 500, + System: anthropic.MessageContents{}, + Messages: []*anthropic.Message{ + { + Role: anthropic.MessageRoleUser, + Content: anthropic.MessageContents{ + {Type: anthropic.MessageContentTypeText, Text: "Hello"}, + }, + }, + }, + } + + got := ConvertAnthropicRequestToOpenAIRequest(testOpenAICtx(), src) + + // Instructions should be empty + if got.Instructions != "" { + t.Errorf("Expected empty Instructions, got %q", got.Instructions) + } + + // Input should only have user message + if len(got.Input) != 1 { + t.Errorf("Expected 1 input item (user message only), got %d", len(got.Input)) + } +} + +func TestConvertAnthropicRequestToOpenAIRequest_NilSystem(t *testing.T) { + // Nil system should result in empty instructions and no system input + src := &anthropic.GenerateMessageRequest{ + Model: "gpt-4o", + MaxTokens: 500, + System: nil, + Messages: []*anthropic.Message{ + { + Role: anthropic.MessageRoleUser, + Content: anthropic.MessageContents{ + {Type: anthropic.MessageContentTypeText, Text: "Hello"}, + }, + }, + }, + } + + got := ConvertAnthropicRequestToOpenAIRequest(testOpenAICtx(), src) + + // Instructions should be empty + if got.Instructions != "" { + t.Errorf("Expected empty Instructions, got %q", got.Instructions) + } + + // Input should only have user message + if len(got.Input) != 1 { + t.Errorf("Expected 1 input item (user message only), got %d", len(got.Input)) + } +} diff --git a/pkg/adapter/convert_stream.go b/pkg/adapter/convert_stream.go index 408deae..fc3f616 100644 --- a/pkg/adapter/convert_stream.go +++ b/pkg/adapter/convert_stream.go @@ -8,6 +8,7 @@ import ( "github.com/samber/lo" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" "github.com/x5iu/claude-code-adapter/pkg/profile" ) @@ -16,6 +17,7 @@ type ConvertStreamOptions struct { InputTokens int64 OpenRouterProvider *string OpenRouterChatCompletionBuilder *openrouter.ChatCompletionBuilder + OpenAIResponseBuilder *openai.ResponseBuilder } type ConvertStreamOption func(*ConvertStreamOptions) @@ -38,6 +40,12 @@ func ExtractOpenRouterChatCompletionBuilder(builder *openrouter.ChatCompletionBu } } +func ExtractOpenAIResponseBuilder(builder *openai.ResponseBuilder) ConvertStreamOption { + return func(o *ConvertStreamOptions) { + o.OpenAIResponseBuilder = builder + } +} + func ConvertOpenRouterStreamToAnthropicStream( ctx context.Context, stream openrouter.ChatCompletionStream, @@ -378,3 +386,416 @@ func reasoningDetailsContainsReasoningTypes( } return false } + +func ConvertOpenAIStreamToAnthropicStream( + ctx context.Context, + stream openai.ResponseStream, + options ...ConvertStreamOption, +) anthropic.MessageStream { + prof, _ := profile.FromContext(ctx) + convertOptions := &ConvertStreamOptions{} + for _, applyOption := range options { + applyOption(convertOptions) + } + contextWindowResizeFactor := prof.Options.GetContextWindowResizeFactor() + return func(yield func(anthropic.Event, error) bool) { + var ( + startOnce sync.Once + responseID string + model string + blockIndex int + deltaType anthropic.MessageContentDeltaType + toolCallID string + stopReason anthropic.StopReason + usage *anthropic.Usage + ) + for event, err := range stream { + if err != nil { + yield(nil, err) + return + } + if convertOptions.OpenAIResponseBuilder != nil { + convertOptions.OpenAIResponseBuilder.Add(event) + } + switch e := event.(type) { + case *openai.ResponseCreatedEvent: + responseID = e.Response.ID + model = e.Response.Model + startOnce.Do(func() { + yield(&anthropic.EventMessageStart{ + Type: anthropic.EventTypeMessageStart, + Message: &anthropic.Message{ + ID: responseID, + Type: anthropic.MessageTypeMessage, + Role: anthropic.MessageRoleAssistant, + Model: model, + Usage: &anthropic.Usage{ + InputTokens: int64(float64(convertOptions.InputTokens) * contextWindowResizeFactor), + OutputTokens: 1, + }, + }, + }, nil) + }) + case *openai.ResponseInProgressEvent: + // No action needed + case *openai.ResponseOutputItemAddedEvent: + // Handle output item based on its type + if e.Item != nil { + if msg := e.Item.Message; msg != nil { + // Message item - we'll handle content deltas separately + } else if fc := e.Item.FunctionCall; fc != nil { + // Function call started + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeInputJSONDelta + toolCallID = fc.CallID + if !yield(&anthropic.EventContentBlockStart{ + Type: anthropic.EventTypeContentBlockStart, + Index: blockIndex, + ContentBlock: &anthropic.MessageContent{ + Type: anthropic.MessageContentTypeToolUse, + ID: fc.CallID, + Name: fc.Name, + Input: json.RawMessage("{}"), + }, + }, nil) { + return + } + } else if r := e.Item.Reasoning; r != nil { + // Reasoning item started + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeThinkingDelta + if !yield(&anthropic.EventContentBlockStart{ + Type: anthropic.EventTypeContentBlockStart, + Index: blockIndex, + ContentBlock: &anthropic.MessageContent{ + Type: anthropic.MessageContentTypeThinking, + }, + }, nil) { + return + } + } + } + case *openai.ResponseOutputItemDoneEvent: + // Output item completed - no additional action needed + case *openai.ResponseContentPartAddedEvent: + // Content part added - we'll handle deltas + if e.Part != nil { + // Part is ResponseMessageContentText, check its type + if e.Part.Type == openai.ResponseMessageContentTypeInputText || + e.Part.Type == openai.ResponseMessageContentTypeOutputText { + // Text content started + if deltaType != anthropic.MessageContentDeltaTypeTextDelta { + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeTextDelta + if !yield(&anthropic.EventContentBlockStart{ + Type: anthropic.EventTypeContentBlockStart, + Index: blockIndex, + ContentBlock: &anthropic.MessageContent{ + Type: anthropic.MessageContentTypeText, + }, + }, nil) { + return + } + } + } + } + case *openai.ResponseContentPartDoneEvent: + // Content part done - no additional action needed + case *openai.ResponseTextDeltaEvent: + // Text delta + if e.Delta != "" { + if deltaType != anthropic.MessageContentDeltaTypeTextDelta { + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeTextDelta + if !yield(&anthropic.EventContentBlockStart{ + Type: anthropic.EventTypeContentBlockStart, + Index: blockIndex, + ContentBlock: &anthropic.MessageContent{ + Type: anthropic.MessageContentTypeText, + }, + }, nil) { + return + } + } + if !yield(&anthropic.EventContentBlockDelta{ + Type: anthropic.EventTypeContentBlockDelta, + Index: blockIndex, + Delta: &anthropic.MessageContentDelta{ + Type: anthropic.MessageContentDeltaTypeTextDelta, + Text: e.Delta, + }, + }, nil) { + return + } + } + case *openai.ResponseTextDoneEvent: + // Text done - no additional action needed + case *openai.ResponseReasoningTextDeltaEvent: + // Reasoning text delta + if e.Delta != "" { + if deltaType != anthropic.MessageContentDeltaTypeThinkingDelta { + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeThinkingDelta + if !yield(&anthropic.EventContentBlockStart{ + Type: anthropic.EventTypeContentBlockStart, + Index: blockIndex, + ContentBlock: &anthropic.MessageContent{ + Type: anthropic.MessageContentTypeThinking, + }, + }, nil) { + return + } + } + if !yield(&anthropic.EventContentBlockDelta{ + Type: anthropic.EventTypeContentBlockDelta, + Index: blockIndex, + Delta: &anthropic.MessageContentDelta{ + Type: anthropic.MessageContentDeltaTypeThinkingDelta, + Thinking: e.Delta, + }, + }, nil) { + return + } + } + case *openai.ResponseReasoningTextDoneEvent: + // Reasoning text done - no additional action needed + case *openai.ResponseReasoningSummaryTextDeltaEvent: + // Reasoning summary text delta (treat as thinking) + if e.Delta != "" { + if deltaType != anthropic.MessageContentDeltaTypeThinkingDelta { + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeThinkingDelta + if !yield(&anthropic.EventContentBlockStart{ + Type: anthropic.EventTypeContentBlockStart, + Index: blockIndex, + ContentBlock: &anthropic.MessageContent{ + Type: anthropic.MessageContentTypeThinking, + }, + }, nil) { + return + } + } + if !yield(&anthropic.EventContentBlockDelta{ + Type: anthropic.EventTypeContentBlockDelta, + Index: blockIndex, + Delta: &anthropic.MessageContentDelta{ + Type: anthropic.MessageContentDeltaTypeThinkingDelta, + Thinking: e.Delta, + }, + }, nil) { + return + } + } + case *openai.ResponseReasoningSummaryTextDoneEvent: + // Reasoning summary done - no additional action needed + case *openai.ResponseFunctionCallArgumentsDeltaEvent: + // Function call arguments delta + if e.Delta != "" { + if deltaType != anthropic.MessageContentDeltaTypeInputJSONDelta || (e.ItemID != "" && e.ItemID != toolCallID) { + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeInputJSONDelta + toolCallID = e.ItemID + // Note: We don't have the function name here, it should have been set in OutputItemAdded + } + if !yield(&anthropic.EventContentBlockDelta{ + Type: anthropic.EventTypeContentBlockDelta, + Index: blockIndex, + Delta: &anthropic.MessageContentDelta{ + Type: anthropic.MessageContentDeltaTypeInputJSONDelta, + PartialJSON: e.Delta, + }, + }, nil) { + return + } + } + case *openai.ResponseFunctionCallArgumentsDoneEvent: + // Function call arguments done - no additional action needed + case *openai.ResponseCompletedEvent: + // Response completed + stopReason = convertOpenAIStatusToAnthropicStopReason(e.Response.Status, e.Response.IncompleteDetails) + if e.Response.Usage != nil { + usage = &anthropic.Usage{ + InputTokens: int64(float64(e.Response.Usage.InputTokens) * contextWindowResizeFactor), + OutputTokens: int64(float64(e.Response.Usage.OutputTokens) * contextWindowResizeFactor), + } + if details := e.Response.Usage.InputTokensDetails; details != nil { + usage.CacheReadInputTokens = int64(float64(details.CachedTokens) * contextWindowResizeFactor) + } + } + case *openai.ResponseFailedEvent: + // Response failed + stopReason = anthropic.StopReasonRefusal + case *openai.ResponseIncompleteEvent: + // Response incomplete + if e.Response.IncompleteDetails != nil { + switch e.Response.IncompleteDetails.Reason { + case openai.ResponseIncompleteReasonMaxOutputTokens: + stopReason = anthropic.StopReasonMaxTokens + case openai.ResponseIncompleteReasonContentFilter: + stopReason = anthropic.StopReasonRefusal + default: + stopReason = anthropic.StopReasonPauseTurn + } + } else { + stopReason = anthropic.StopReasonPauseTurn + } + case *openai.ResponseRefusalDeltaEvent: + // Refusal delta - treat as text + if e.Delta != "" { + if deltaType != anthropic.MessageContentDeltaTypeTextDelta { + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + blockIndex++ + } + deltaType = anthropic.MessageContentDeltaTypeTextDelta + if !yield(&anthropic.EventContentBlockStart{ + Type: anthropic.EventTypeContentBlockStart, + Index: blockIndex, + ContentBlock: &anthropic.MessageContent{ + Type: anthropic.MessageContentTypeText, + }, + }, nil) { + return + } + } + if !yield(&anthropic.EventContentBlockDelta{ + Type: anthropic.EventTypeContentBlockDelta, + Index: blockIndex, + Delta: &anthropic.MessageContentDelta{ + Type: anthropic.MessageContentDeltaTypeTextDelta, + Text: e.Delta, + }, + }, nil) { + return + } + } + case *openai.ResponseErrorEvent: + // Error event + yield(nil, &openai.Error{ + Inner: struct { + Message string `json:"message"` + Type string `json:"type"` + Param any `json:"param,omitempty"` + Code string `json:"code"` + }{ + Message: e.Message, + Code: string(e.Code), + }, + }) + return + } + } + // Close any open content block + if deltaType != "" { + if !yield(&anthropic.EventContentBlockStop{ + Type: anthropic.EventTypeContentBlockStop, + Index: blockIndex, + }, nil) { + return + } + } + // Send message delta and stop + delta := &anthropic.Message{} + if stopReason != "" { + delta.StopReason = lo.ToPtr(stopReason) + } else { + delta.StopReason = lo.ToPtr(anthropic.StopReasonEndTurn) + } + if usage != nil { + delta.Usage = usage + } + if !yield(&anthropic.EventMessageDelta{ + Type: anthropic.EventTypeMessageDelta, + Delta: delta, + Usage: usage, + }, nil) { + return + } + yield(&anthropic.EventMessageStop{Type: anthropic.EventTypeMessageStop}, nil) + } +} + +func convertOpenAIStatusToAnthropicStopReason( + status openai.ResponseStatus, + incompleteDetails *openai.ResponseIncompleteDetails, +) anthropic.StopReason { + switch status { + case openai.ResponseStatusCompleted: + return anthropic.StopReasonEndTurn + case openai.ResponseStatusIncomplete: + if incompleteDetails != nil { + switch incompleteDetails.Reason { + case openai.ResponseIncompleteReasonMaxOutputTokens: + return anthropic.StopReasonMaxTokens + case openai.ResponseIncompleteReasonContentFilter: + return anthropic.StopReasonRefusal + } + } + return anthropic.StopReasonPauseTurn + case openai.ResponseStatusFailed: + return anthropic.StopReasonRefusal + default: + return anthropic.StopReasonEndTurn + } +} diff --git a/pkg/datatypes/openai/openai.go b/pkg/datatypes/openai/openai.go new file mode 100644 index 0000000..b17939a --- /dev/null +++ b/pkg/datatypes/openai/openai.go @@ -0,0 +1,2342 @@ +package openai + +import ( + "encoding/json" + "errors" + "fmt" + "iter" + + "github.com/x5iu/claude-code-adapter/pkg/utils" +) + +type Error struct { + Inner struct { + Message string `json:"message"` + Type string `json:"type"` + Param any `json:"param,omitempty"` + Code string `json:"code"` + } `json:"error"` + + statusCode int +} + +func (e *Error) Error() string { return fmt.Sprintf("%s: %s", e.Type(), e.Message()) } +func (e *Error) Type() string { return e.Inner.Type } +func (e *Error) Message() string { return e.Inner.Message } +func (e *Error) Source() string { return "openai" } +func (e *Error) StatusCode() int { return e.statusCode } +func (e *Error) SetStatusCode(statusCode int) { e.statusCode = statusCode } + +// CreateModelResponseRequest follows OpenAI response API request format +// reference: https://platform.openai.com/docs/api-reference/responses/create +type CreateModelResponseRequest struct { + Background *bool `json:"background,omitempty"` + Conversation *ResponseConversation `json:"conversation,omitempty"` + Include []ResponseIncludable `json:"include,omitempty"` + Input ResponseInputParam `json:"input,omitempty"` + Instructions string `json:"instructions,omitempty"` + MaxOutputTokens *int `json:"max_output_tokens,omitempty"` + MaxToolCalls *int `json:"max_tool_calls,omitempty"` + Metadata ResponseMetadata `json:"metadata,omitempty"` + Model string `json:"model,omitempty"` + ParallelToolCalls *bool `json:"parallel_tool_calls,omitempty"` + PreviousResponseID string `json:"previous_response_id,omitempty"` + Prompt *ResponsePromptParam `json:"prompt,omitempty"` + PromptCacheKey string `json:"prompt_cache_key,omitempty"` + Reasoning *ResponseReasoning `json:"reasoning,omitempty"` + SafetyIdentifier string `json:"safety_identifier,omitempty"` + ServiceTier ResponseServiceTier `json:"service_tier,omitempty"` + Store *bool `json:"store,omitempty"` + Stream utils.True `json:"stream"` + StreamOptions *ResponseStreamOptions `json:"stream_options,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + Text *ResponseTextConfigParam `json:"text,omitempty"` + ToolChoice *ResponseToolChoice `json:"tool_choice,omitempty"` + Tools []*ResponseToolParam `json:"tools,omitempty"` + TopLogprobs *int `json:"top_logprobs,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + Truncation ResponseTruncationStrategy `json:"truncation,omitempty"` + User string `json:"user,omitempty"` +} + +type ResponseConversation struct { + ID string `json:"id"` +} + +func (rc *ResponseConversation) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '"': + var id string + if err := json.Unmarshal(data, &id); err != nil { + return err + } + rc.ID = id + return nil + case '{': + var ir struct { + ID string `json:"id"` + } + if err := json.Unmarshal(data, &ir); err != nil { + return err + } + rc.ID = ir.ID + return nil + default: + return errors.New("conversation should be a string or an object") + } + } + return errors.New("empty conversation") +} + +type ResponseIncludable string + +const ( + ResponseIncludableCodeInterpreterCallOutputs ResponseIncludable = "code_interpreter_call.outputs" + ResponseIncludableComputerCallOutputImageUrl ResponseIncludable = "computer_call_output.output.image_url" + ResponseIncludableFileSearchCallResults ResponseIncludable = "file_search_call.results" + ResponseIncludableMessageInputImageImageUrl ResponseIncludable = "message.input_image.image_url" + ResponseIncludableMessageOutputTextLogprobs ResponseIncludable = "message.output_text.logprobs" + ResponseIncludableReasoningEncryptedContent ResponseIncludable = "reasoning.encrypted_content" +) + +func TextInput(text string) ResponseInputParam { + return ResponseInputParam{ + newResponseMessageInput(text), + } +} + +type ResponseInputParam []*ResponseInputItemParam + +func (input *ResponseInputParam) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case 'n': + return nil + case '"': + var text string + if err := json.Unmarshal(data, &text); err != nil { + return err + } + *input = append(*input, newResponseMessageInput(text)) + return nil + case '[': + return json.Unmarshal(data, &input) + default: + return errors.New("input should be a string or an array") + } + } + return errors.New("empty input") +} + +func newResponseMessageInput(text string) *ResponseInputItemParam { + return &ResponseInputItemParam{ + Message: newResponseMessage(text), + } +} + +func newResponseMessage(text string) *ResponseMessage { + return &ResponseMessage{ + Type: ResponseInputItemTypeMessage, + Role: ResponseMessageRoleUser, + Content: []*ResponseMessageContent{ + { + Text: &ResponseMessageContentText{ + Type: ResponseMessageContentTypeInputText, + Text: text, + }, + }, + }, + } +} + +// ResponseInputItemParam +// Text, image, or file inputs to the model, used to generate a response. +type ResponseInputItemParam struct { + Message *ResponseMessage + FileSearchCall *ResponseFileSearchToolCallParam + ComputerCall *ResponseComputerToolCallParam + ComputerCallOutput *ResponseComputerCallOutput + WebSearchCall *ResponseFunctionWebSearchParam + FunctionCall *ResponseFunctionToolCallParam + FunctionCallOutput *ResponseFunctionCallOutput + Reasoning *ResponseReasoningItem + ImageGenerationCall *ResponseImageGenerationCall + CodeInterpreterCall *ResponseCodeInterpreterToolCallParam + LocalShellCall *ResponseLocalShellCall + LocalShellCallOutput *ResponseLocalShellCallOutput + MCPListTools *ResponseMCPListTools + MCPApprovalRequest *ResponseMCPApprovalRequest + MCPApprovalResponse *ResponseMCPApprovalResponse + MCPCall *ResponseMCPCall + CustomToolCallOutput *ResponseCustomToolCallOutputParam + CustomToolCall *ResponseCustomToolCallParam + ItemReference *ResponseItemReference +} + +func (input *ResponseInputItemParam) MarshalJSON() ([]byte, error) { + if input.Message != nil { + return json.Marshal(input.Message) + } + if input.FileSearchCall != nil { + return json.Marshal(input.FileSearchCall) + } + if input.ComputerCall != nil { + return json.Marshal(input.ComputerCall) + } + if input.ComputerCallOutput != nil { + return json.Marshal(input.ComputerCallOutput) + } + if input.WebSearchCall != nil { + return json.Marshal(input.WebSearchCall) + } + if input.FunctionCall != nil { + return json.Marshal(input.FunctionCall) + } + if input.FunctionCallOutput != nil { + return json.Marshal(input.FunctionCallOutput) + } + if input.Reasoning != nil { + return json.Marshal(input.Reasoning) + } + if input.ImageGenerationCall != nil { + return json.Marshal(input.ImageGenerationCall) + } + if input.CodeInterpreterCall != nil { + return json.Marshal(input.CodeInterpreterCall) + } + if input.LocalShellCall != nil { + return json.Marshal(input.LocalShellCall) + } + if input.LocalShellCallOutput != nil { + return json.Marshal(input.LocalShellCallOutput) + } + if input.MCPListTools != nil { + return json.Marshal(input.MCPListTools) + } + if input.MCPApprovalRequest != nil { + return json.Marshal(input.MCPApprovalRequest) + } + if input.MCPApprovalResponse != nil { + return json.Marshal(input.MCPApprovalResponse) + } + if input.MCPCall != nil { + return json.Marshal(input.MCPCall) + } + if input.CustomToolCallOutput != nil { + return json.Marshal(input.CustomToolCallOutput) + } + if input.CustomToolCall != nil { + return json.Marshal(input.CustomToolCall) + } + if input.ItemReference != nil { + return json.Marshal(input.ItemReference) + } + return json.Marshal(nil) +} + +func (input *ResponseInputItemParam) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '"': + var text string + if err := json.Unmarshal(data, &text); err != nil { + return err + } + input.Message = newResponseMessage(text) + return nil + case '{': + var ir struct { + Type ResponseInputItemType `json:"type"` + } + if err := json.Unmarshal(data, &ir); err != nil { + return err + } + switch ir.Type { + case ResponseInputItemTypeMessage: + return json.Unmarshal(data, &input.Message) + case ResponseInputItemTypeFileSearchToolCall: + return json.Unmarshal(data, &input.FileSearchCall) + case ResponseInputItemTypeComputerToolCall: + return json.Unmarshal(data, &input.ComputerCall) + case ResponseInputItemTypeComputerCallOutput: + return json.Unmarshal(data, &input.ComputerCallOutput) + case ResponseInputItemTypeWebSearchCall: + return json.Unmarshal(data, &input.WebSearchCall) + case ResponseInputItemTypeFunctionCall: + return json.Unmarshal(data, &input.FunctionCall) + case ResponseInputItemTypeFunctionCallOutput: + return json.Unmarshal(data, &input.FunctionCallOutput) + case ResponseInputItemTypeReasoningItem: + return json.Unmarshal(data, &input.Reasoning) + case ResponseInputItemTypeImageGenerationCall: + return json.Unmarshal(data, &input.ImageGenerationCall) + case ResponseInputItemTypeCodeInterpreterCall: + return json.Unmarshal(data, &input.CodeInterpreterCall) + case ResponseInputItemTypeLocalShellCall: + return json.Unmarshal(data, &input.LocalShellCall) + case ResponseInputItemTypeLocalShellCallOutput: + return json.Unmarshal(data, &input.LocalShellCallOutput) + case ResponseInputItemTypeMCPListTools: + return json.Unmarshal(data, &input.MCPListTools) + case ResponseInputItemTypeMCPApprovalRequest: + return json.Unmarshal(data, &input.MCPApprovalRequest) + case ResponseInputItemTypeMCPApprovalResponse: + return json.Unmarshal(data, &input.MCPApprovalResponse) + case ResponseInputItemTypeMCPCall: + return json.Unmarshal(data, &input.MCPCall) + case ResponseInputItemTypeCustomToolCallOutput: + return json.Unmarshal(data, &input.CustomToolCallOutput) + case ResponseInputItemTypeCustomToolCall: + return json.Unmarshal(data, &input.CustomToolCall) + case ResponseInputItemTypeItemReference: + return json.Unmarshal(data, &input.ItemReference) + } + return fmt.Errorf("unknown input item type %q", ir.Type) + default: + return errors.New("input item should be a string or an object") + } + } + return errors.New("empty input item") +} + +type ResponseInputItemType string + +const ( + ResponseInputItemTypeMessage ResponseInputItemType = "message" + ResponseInputItemTypeFileSearchToolCall ResponseInputItemType = "file_search_call" + ResponseInputItemTypeComputerToolCall ResponseInputItemType = "computer_call" + ResponseInputItemTypeComputerCallOutput ResponseInputItemType = "computer_call_output" + ResponseInputItemTypeWebSearchCall ResponseInputItemType = "web_search_call" + ResponseInputItemTypeFunctionCall ResponseInputItemType = "function_call" + ResponseInputItemTypeFunctionCallOutput ResponseInputItemType = "function_call_output" + ResponseInputItemTypeReasoningItem ResponseInputItemType = "reasoning" + ResponseInputItemTypeImageGenerationCall ResponseInputItemType = "image_generation_call" + ResponseInputItemTypeCodeInterpreterCall ResponseInputItemType = "code_interpreter_call" + ResponseInputItemTypeLocalShellCall ResponseInputItemType = "local_shell_call" + ResponseInputItemTypeLocalShellCallOutput ResponseInputItemType = "local_shell_call_output" + ResponseInputItemTypeMCPListTools ResponseInputItemType = "mcp_list_tools" + ResponseInputItemTypeMCPApprovalRequest ResponseInputItemType = "mcp_approval_request" + ResponseInputItemTypeMCPApprovalResponse ResponseInputItemType = "mcp_approval_response" + ResponseInputItemTypeMCPCall ResponseInputItemType = "mcp_call" + ResponseInputItemTypeCustomToolCallOutput ResponseInputItemType = "custom_tool_call_output" + ResponseInputItemTypeCustomToolCall ResponseInputItemType = "custom_tool_call" + ResponseInputItemTypeItemReference ResponseInputItemType = "item_reference" +) + +type ResponseMessage struct { + Type ResponseInputItemType `json:"type"` + Role ResponseMessageRole `json:"role"` + Content ResponseMessageContents `json:"content"` + Status ResponseMessageStatus `json:"status"` +} + +func NewTextContent(text string) ResponseMessageContents { + return ResponseMessageContents{ + &ResponseMessageContent{ + Text: &ResponseMessageContentText{ + Type: ResponseMessageContentTypeInputText, + Text: text, + }, + }, + } +} + +type ResponseMessageContents []*ResponseMessageContent + +func (c *ResponseMessageContents) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '"': + var text string + if err := json.Unmarshal(data, &text); err != nil { + return err + } + *c = NewTextContent(text) + return nil + case '[': + // Use a type alias to avoid infinite recursion + type rawContents []*ResponseMessageContent + var contents rawContents + if err := json.Unmarshal(data, &contents); err != nil { + return err + } + *c = ResponseMessageContents(contents) + return nil + default: + return errors.New("message content should be a string or an array") + } + } + return errors.New("empty message content") +} + +type ResponseMessageContent struct { + Text *ResponseMessageContentText + Image *ResponseMessageContentImage + File *ResponseMessageContentFile +} + +func (content *ResponseMessageContent) MarshalJSON() ([]byte, error) { + if content.Text != nil { + return json.Marshal(content.Text) + } + if content.Image != nil { + return json.Marshal(content.Image) + } + if content.File != nil { + return json.Marshal(content.File) + } + return json.Marshal(nil) +} + +func (content *ResponseMessageContent) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '"': + var text string + if err := json.Unmarshal(data, &text); err != nil { + return err + } + content.Text = &ResponseMessageContentText{ + Type: ResponseMessageContentTypeInputText, + Text: text, + } + return nil + case '{': + var ir struct { + Type ResponseMessageContentType `json:"type"` + } + if err := json.Unmarshal(data, &ir); err != nil { + return err + } + switch ir.Type { + case ResponseMessageContentTypeInputText, + ResponseMessageContentTypeOutputText, ResponseMessageContentTypeRefusal: + return json.Unmarshal(data, &content.Text) + case ResponseMessageContentTypeInputImage: + return json.Unmarshal(data, &content.Image) + case ResponseMessageContentTypeInputFile: + return json.Unmarshal(data, &content.File) + } + return fmt.Errorf("unknown message content type %q", ir.Type) + default: + return errors.New("message content should be a string or an object") + } + } + return errors.New("empty message content") +} + +type ResponseMessageContentType string + +const ( + ResponseMessageContentTypeInputText ResponseMessageContentType = "input_text" + ResponseMessageContentTypeInputImage ResponseMessageContentType = "input_image" + ResponseMessageContentTypeInputFile ResponseMessageContentType = "input_file" + ResponseMessageContentTypeOutputText ResponseMessageContentType = "output_text" + ResponseMessageContentTypeRefusal ResponseMessageContentType = "refusal" +) + +type ResponseMessageContentText struct { + Type ResponseMessageContentType `json:"type"` + Text string `json:"text"` + Logprobs []*ResponseLogprob `json:"logprobs,omitempty"` + Annotations []*ResponseAnnotation `json:"annotations,omitempty"` + Refusal string `json:"refusal,omitempty"` +} + +type ResponseLogprob struct { + Token string `json:"token"` + Bytes []int `json:"bytes"` + Logprob float64 `json:"logprob"` + TopLogprobs []*ResponseLogprobTopLogprob `json:"top_logprobs"` +} + +type ResponseLogprobTopLogprob struct { + Token string `json:"token"` + Bytes []int `json:"bytes"` + Logprob float64 `json:"logprob"` +} + +type ResponseAnnotation struct { + FileCitation *ResponseAnnotationFileCitation + URLCitation *ResponseAnnotationURLCitation + ContainerFileCitation *ResponseAnnotationContainerFileCitation + FilePath *ResponseAnnotationFilePath +} + +func (annotation *ResponseAnnotation) MarshalJSON() ([]byte, error) { + if annotation.FileCitation != nil { + return json.Marshal(annotation.FileCitation) + } + if annotation.URLCitation != nil { + return json.Marshal(annotation.URLCitation) + } + if annotation.ContainerFileCitation != nil { + return json.Marshal(annotation.ContainerFileCitation) + } + if annotation.FilePath != nil { + return json.Marshal(annotation.FilePath) + } + return json.Marshal(nil) +} + +func (annotation *ResponseAnnotation) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '{': + var ir struct { + Type ResponseAnnotationType `json:"type"` + } + if err := json.Unmarshal(data, &ir); err != nil { + return err + } + switch ir.Type { + case ResponseAnnotationTypeFileCitation: + return json.Unmarshal(data, &annotation.FileCitation) + case ResponseAnnotationTypeURLCitation: + return json.Unmarshal(data, &annotation.URLCitation) + case ResponseAnnotationTypeContainerFileCitation: + return json.Unmarshal(data, &annotation.ContainerFileCitation) + case ResponseAnnotationTypeFilePath: + return json.Unmarshal(data, &annotation.FilePath) + } + return fmt.Errorf("unknown annotation type %q", ir.Type) + default: + return errors.New("annotation should be an object") + } + } + return errors.New("empty annotation") +} + +type ResponseAnnotationType string + +const ( + ResponseAnnotationTypeFileCitation ResponseAnnotationType = "file_citation" + ResponseAnnotationTypeURLCitation ResponseAnnotationType = "url_citation" + ResponseAnnotationTypeContainerFileCitation ResponseAnnotationType = "container_file_citation" + ResponseAnnotationTypeFilePath ResponseAnnotationType = "file_path" +) + +type ResponseAnnotationFileCitation struct { + FileID string `json:"file_id"` + Filename string `json:"filename"` + Index int `json:"index"` + Type ResponseAnnotationType `json:"type"` +} + +type ResponseAnnotationURLCitation struct { + EndIndex int `json:"end_index"` + StartIndex int `json:"start_index"` + Title string `json:"title"` + Type ResponseAnnotationType `json:"type"` + URL string `json:"url"` +} + +type ResponseAnnotationContainerFileCitation struct { + ContainerID string `json:"container_id"` + EndIndex int `json:"end_index"` + FileID string `json:"file_id"` + Filename string `json:"filename"` + StartIndex int `json:"start_index"` + Type ResponseAnnotationType `json:"type"` +} + +type ResponseAnnotationFilePath struct { + FileID string `json:"file_id"` + Index int `json:"index"` + Type ResponseAnnotationType `json:"type"` +} + +type ResponseMessageContentImage struct { + Type ResponseMessageContentType `json:"type"` + ImageUrl string `json:"image_url"` + FileID string `json:"file_id,omitempty"` + Detail ResponseMessageContentImageDetail `json:"detail,omitempty"` +} + +type ResponseMessageContentImageDetail string + +const ( + ResponseMessageContentImageDetailLow ResponseMessageContentImageDetail = "low" + ResponseMessageContentImageDetailHigh ResponseMessageContentImageDetail = "high" + ResponseMessageContentImageDetailAuto ResponseMessageContentImageDetail = "auto" +) + +type ResponseMessageContentFile struct { + Type ResponseMessageContentType `json:"type"` + FileData string `json:"file_data"` + FileID string `json:"file_id,omitempty"` + FileURL string `json:"file_url"` + Filename string `json:"filename"` +} + +type ResponseMessageRole string + +const ( + ResponseMessageRoleUser ResponseMessageRole = "user" + ResponseMessageRoleAssistant ResponseMessageRole = "assistant" + ResponseMessageRoleSystem ResponseMessageRole = "system" + ResponseMessageRoleDeveloper ResponseMessageRole = "developer" +) + +type ResponseMessageStatus string + +const ( + ResponseMessageStatusInProgress ResponseMessageStatus = "in_progress" + ResponseMessageStatusCompleted ResponseMessageStatus = "completed" + ResponseMessageStatusIncomplete ResponseMessageStatus = "incomplete" +) + +type ResponseFileSearchToolCallParam struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Queries []string `json:"queries"` + Status ResponseStatus `json:"status"` + Results []*ResponseFileSearchResult `json:"results"` +} + +type ResponseStatus string + +const ( + ResponseStatusInProgress ResponseStatus = "in_progress" + ResponseStatusSearching ResponseStatus = "searching" + ResponseStatusGenerating ResponseStatus = "generating" + ResponseStatusInterpreting ResponseStatus = "interpreting" + ResponseStatusCompleted ResponseStatus = "completed" + ResponseStatusIncomplete ResponseStatus = "incomplete" + ResponseStatusFailed ResponseStatus = "failed" +) + +type ResponseFileSearchResult struct { + Attributes map[string]any `json:"attributes,omitempty"` + FileID string `json:"file_id"` + Filename string `json:"filename"` + Score float64 `json:"score"` + Text string `json:"text"` +} + +type ResponseComputerToolCallParam struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Action *ResponseComputerAction `json:"action"` + CallID string `json:"call_id"` + PendingSafetyChecks []*ResponseComputerActionSafetyCheck `json:"pending_safety_checks"` + Status ResponseStatus `json:"status"` +} + +type ResponseComputerAction struct { + Type ResponseComputerActionType `json:"type"` + Button string `json:"button,omitempty"` + X int `json:"x,omitempty"` + Y int `json:"y,omitempty"` + Path []*ResponseComputerActionDragPath `json:"path,omitempty"` + Keys []string `json:"keys,omitempty"` + ScrollX int `json:"scroll_x,omitempty"` + ScrollY int `json:"scroll_y,omitempty"` + Text string `json:"text,omitempty"` +} + +type ResponseComputerActionType string + +const ( + ResponseComputerActionTypeClick ResponseComputerActionType = "click" + ResponseComputerActionTypeDoubleClick ResponseComputerActionType = "double_click" + ResponseComputerActionTypeDrag ResponseComputerActionType = "drag" + ResponseComputerActionTypeKeypress ResponseComputerActionType = "keypress" + ResponseComputerActionTypeMove ResponseComputerActionType = "move" + ResponseComputerActionTypeScreenshot ResponseComputerActionType = "screenshot" + ResponseComputerActionTypeScroll ResponseComputerActionType = "scroll" + ResponseComputerActionTypeType ResponseComputerActionType = "type" + ResponseComputerActionTypeWait ResponseComputerActionType = "wait" +) + +type ResponseComputerActionDragPath struct { + X int `json:"x"` + Y int `json:"y"` +} + +type ResponseComputerActionSafetyCheck struct { + ID string `json:"id"` + Code string `json:"code"` + Message string `json:"message"` +} + +type ResponseComputerCallOutput struct { + CallID string `json:"call_id"` + Output *ResponseComputerActionScreenshot `json:"output"` + Type ResponseInputItemType `json:"type"` + ID string `json:"id"` + AcknowledgedSafetyChecks []*ResponseComputerActionSafetyCheck `json:"acknowledged_safety_checks"` + Status ResponseStatus `json:"status"` +} + +type ResponseComputerActionScreenshot struct { + Type ResponseComputerActionScreenshotType `json:"type"` + FileID string `json:"file_id"` + ImageURL string `json:"image_url"` +} + +type ResponseComputerActionScreenshotType string + +const ( + ResponseComputerActionScreenshotTypeComputerScreenshot ResponseComputerActionScreenshotType = "computer_screenshot" +) + +type ResponseFunctionWebSearchParam struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Action *ResponseFunctionWebSearchAction `json:"action"` + Status ResponseStatus `json:"status"` +} + +type ResponseFunctionWebSearchAction struct { + Type ResponseFunctionWebSearchActionType `json:"type"` + Query string `json:"query,omitempty"` + URL string `json:"url,omitempty"` + Pattern string `json:"pattern,omitempty"` +} + +type ResponseFunctionWebSearchActionType string + +const ( + ResponseFunctionWebSearchActionTypeSearch ResponseFunctionWebSearchActionType = "search" + ResponseFunctionWebSearchActionTypeOpenPage ResponseFunctionWebSearchActionType = "open_page" + ResponseFunctionWebSearchActionTypeFind ResponseFunctionWebSearchActionType = "find" +) + +type ResponseFunctionToolCallParam struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Arguments string `json:"arguments"` + CallID string `json:"call_id"` + Name string `json:"name"` + Status ResponseStatus `json:"status"` +} + +type ResponseFunctionCallOutput struct { + CallID string `json:"call_id"` + Output string `json:"output"` + Type ResponseInputItemType `json:"type"` + ID string `json:"id"` + Status ResponseStatus `json:"status"` +} + +type ResponseReasoningItem struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Summary []*ResponseReasoningContent `json:"summary"` + Content []*ResponseReasoningContent `json:"content,omitempty"` + Status ResponseStatus `json:"status"` + EncryptedContent string `json:"encrypted_content,omitempty"` +} + +type ResponseReasoningContent struct { + Text string `json:"text"` + Type ResponseReasoningType `json:"type"` +} + +type ResponseReasoningType string + +const ( + ResponseReasoningTypeSummaryText ResponseReasoningType = "summary_text" + ResponseReasoningTypeReasoningText ResponseReasoningType = "reasoning_text" +) + +type ResponseImageGenerationCall struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Result string `json:"result"` + Status ResponseStatus `json:"status"` +} + +type ResponseCodeInterpreterToolCallParam struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Code string `json:"code"` + Status ResponseStatus `json:"status"` + ContainerID string `json:"container_id"` + Outputs []*ResponseCodeInterpreterToolCallOutput `json:"outputs"` +} + +type ResponseCodeInterpreterToolCallOutput struct { + Type ResponseCodeInterpreterToolCallOutputType `json:"type"` + Logs string `json:"logs,omitempty"` + URL string `json:"url,omitempty"` +} + +type ResponseCodeInterpreterToolCallOutputType string + +const ( + ResponseCodeInterpreterToolCallOutputTypeLogs ResponseCodeInterpreterToolCallOutputType = "logs" + ResponseCodeInterpreterToolCallOutputTypeImage ResponseCodeInterpreterToolCallOutputType = "image" +) + +type ResponseLocalShellCall struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Action *ResponseLocalShellAction `json:"action"` + CallID string `json:"call_id"` + Status ResponseStatus `json:"status,omitempty"` +} + +type ResponseLocalShellAction struct { + Type ResponseLocalShellActionType `json:"type"` + Command []string `json:"command"` + Env map[string]string `json:"env"` + TimeoutMS int `json:"timeout_ms,omitempty"` + User string `json:"user,omitempty"` + WorkingDirectory string `json:"working_directory,omitempty"` +} + +type ResponseLocalShellActionType string + +const ( + ResponseLocalShellActionTypeExec ResponseLocalShellActionType = "exec" +) + +type ResponseLocalShellCallOutput struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type"` + Output string `json:"output"` + Status ResponseStatus `json:"status,omitempty"` +} + +type ResponseMCPListTools struct { + ID string `json:"id"` + ServerLabel string `json:"server_label"` + Tools []*ResponseMCPListToolsTool `json:"tools"` + Type ResponseInputItemType `json:"type"` + Error string `json:"error,omitempty"` +} + +type ResponseMCPListToolsTool struct { + InputSchema ResponseJSONSchemaObject `json:"input_schema"` + Name string `json:"name"` + Annotations ResponseJSONSchemaObject `json:"annotations,omitempty"` + Description string `json:"description,omitempty"` +} + +type ResponseMCPApprovalRequest struct { + ID string `json:"id"` + Arguments string `json:"arguments"` + Name string `json:"name"` + ServerLabel string `json:"server_label"` + Type ResponseInputItemType `json:"type"` +} + +type ResponseMCPApprovalResponse struct { + ApprovalRequestID string `json:"approval_request_id"` + Approve bool `json:"approve"` + Type ResponseInputItemType `json:"type"` + ID string `json:"id,omitempty"` + Reason string `json:"reason,omitempty"` +} + +type ResponseMCPCall struct { + ID string `json:"id"` + Arguments string `json:"arguments"` + Name string `json:"name"` + ServerLabel string `json:"server_label"` + Type ResponseInputItemType `json:"type"` + Error string `json:"error,omitempty"` + Output string `json:"output,omitempty"` +} + +type ResponseCustomToolCallOutputParam struct { + CallID string `json:"call_id"` + Output string `json:"output"` + Type ResponseInputItemType `json:"type"` + ID string `json:"id"` +} + +type ResponseCustomToolCallParam struct { + CallID string `json:"call_id"` + Input string `json:"input"` + Name string `json:"name"` + Type ResponseInputItemType `json:"type"` + ID string `json:"id"` +} + +type ResponseItemReference struct { + ID string `json:"id"` + Type ResponseInputItemType `json:"type,omitempty"` +} + +type ResponseMetadata map[string]string + +// ResponsePromptParam +// reference: https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts +type ResponsePromptParam struct { + ID string `json:"id"` + Variables map[string]*ResponseInputItemParam `json:"variables,omitempty"` + Version string `json:"version,omitempty"` +} + +type ResponseReasoning struct { + Effort ResponseReasoningEffort `json:"effort,omitempty"` + GenerateSummary ResponseReasoningSummary `json:"generate_summary,omitempty"` + Summary ResponseReasoningSummary `json:"summary,omitempty"` +} + +type ResponseReasoningEffort string + +const ( + ResponseReasoningEffortMinimal ResponseReasoningEffort = "minimal" + ResponseReasoningEffortLow ResponseReasoningEffort = "low" + ResponseReasoningEffortMedium ResponseReasoningEffort = "medium" + ResponseReasoningEffortHigh ResponseReasoningEffort = "high" +) + +type ResponseReasoningSummary string + +const ( + ResponseReasoningSummaryAuto ResponseReasoningSummary = "auto" + ResponseReasoningSummaryConcise ResponseReasoningSummary = "concise" + ResponseReasoningSummaryDetailed ResponseReasoningSummary = "detailed" +) + +type ResponseServiceTier string + +const ( + ServiceTierAuto ResponseServiceTier = "auto" + ServiceTierDefault ResponseServiceTier = "default" + ServiceTierFlex ResponseServiceTier = "flex" + ServiceTierScale ResponseServiceTier = "scale" + ServiceTierPriority ResponseServiceTier = "priority" +) + +type ResponseStreamOptions struct { + IncludeObfuscation bool `json:"include_obfuscation"` +} + +type ResponseJSONSchemaObject []byte + +func (param ResponseJSONSchemaObject) MarshalJSON() ([]byte, error) { + return []byte(param), nil +} + +func (param *ResponseJSONSchemaObject) UnmarshalJSON(b []byte) error { + *param = append((*param)[:0], b...) + return nil +} + +type ResponseFormat struct { + Text *ResponseFormatText + JSONObject *ResponseFormatJSONObject + JSONSchema *ResponseFormatJSONSchema +} + +type ResponseFormatType string + +const ( + ResponseFormatTypeText ResponseFormatType = "text" + ResponseFormatTypeJSONObject ResponseFormatType = "json_object" + ResponseFormatTypeJSONSchema ResponseFormatType = "json_schema" +) + +func (format ResponseFormat) MarshalJSON() ([]byte, error) { + if format.Text != nil { + return json.Marshal(format.Text) + } + if format.JSONObject != nil { + return json.Marshal(format.JSONObject) + } + if format.JSONSchema != nil { + return json.Marshal(format.JSONSchema) + } + return json.Marshal(nil) +} + +func (format *ResponseFormat) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '{': + var ir struct { + Type ResponseFormatType `json:"type"` + } + if err := json.Unmarshal(data, &ir); err != nil { + return err + } + switch ir.Type { + case ResponseFormatTypeText: + return json.Unmarshal(data, &format.Text) + case ResponseFormatTypeJSONObject: + return json.Unmarshal(data, &format.JSONObject) + case ResponseFormatTypeJSONSchema: + return json.Unmarshal(data, &format.JSONSchema) + default: + return errors.New("invalid response_format, available types are 'text', 'json_object', 'json_schema'") + } + default: + return errors.New("response_format should be an object") + } + } + return errors.New("empty response_format") +} + +type ResponseFormatText struct { + Type ResponseFormatType `json:"type"` +} + +type ResponseFormatJSONObject struct { + Type ResponseFormatType `json:"type"` +} + +type ResponseFormatJSONSchema struct { + Type ResponseFormatType `json:"type"` + Name string `json:"name"` + Description string `json:"description,omitempty"` + Strict bool `json:"strict,omitempty"` + Schema ResponseJSONSchemaObject `json:"schema,omitempty"` +} + +type ResponseTextConfigParam struct { + Format *ResponseFormat `json:"format"` + Verbosity ResponseVerbosity `json:"verbosity,omitempty"` +} + +type ResponseVerbosity string + +const ( + ResponseVerbosityLow ResponseVerbosity = "low" + ResponseVerbosityMedium ResponseVerbosity = "medium" + ResponseVerbosityHigh ResponseVerbosity = "high" +) + +type ResponseToolParam struct { + Function *ResponseFunctionToolParam + FileSearch *ResponseFileSearchToolParam + WebSearch *ResponseWebSearchToolParam + ComputerUse *ResponseComputerToolParam + MCP *ResponseMCPToolParam + CodeInterpreter *ResponseCodeInterpreterToolParam + ImageGeneration *ResponseImageGenerationToolParam + LocalShell *ResponseLocalShellToolParam + Custom *ResponseCustomToolParam +} + +func (tool ResponseToolParam) MarshalJSON() ([]byte, error) { + if tool.Function != nil { + return json.Marshal(tool.Function) + } + if tool.FileSearch != nil { + return json.Marshal(tool.FileSearch) + } + if tool.WebSearch != nil { + return json.Marshal(tool.WebSearch) + } + if tool.ComputerUse != nil { + return json.Marshal(tool.ComputerUse) + } + if tool.MCP != nil { + return json.Marshal(tool.MCP) + } + if tool.CodeInterpreter != nil { + return json.Marshal(tool.CodeInterpreter) + } + if tool.ImageGeneration != nil { + return json.Marshal(tool.ImageGeneration) + } + if tool.LocalShell != nil { + return json.Marshal(tool.LocalShell) + } + if tool.Custom != nil { + return json.Marshal(tool.Custom) + } + return json.Marshal(nil) +} + +func (tool *ResponseToolParam) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '{': + var ir struct { + Type ResponseToolCallType `json:"type"` + } + if err := json.Unmarshal(data, &ir); err != nil { + return err + } + switch ir.Type { + case ResponseToolCallTypeFunction: + return json.Unmarshal(data, &tool.Function) + case ResponseToolCallTypeFileSearch: + return json.Unmarshal(data, &tool.FileSearch) + case ResponseToolCallTypeWebSearch: + return json.Unmarshal(data, &tool.WebSearch) + case ResponseToolCallTypeComputerUse: + return json.Unmarshal(data, &tool.ComputerUse) + case ResponseToolCallTypeMCP: + return json.Unmarshal(data, &tool.MCP) + case ResponseToolCallTypeCodeInterpreter: + return json.Unmarshal(data, &tool.CodeInterpreter) + case ResponseToolCallTypeImageGeneration: + return json.Unmarshal(data, &tool.ImageGeneration) + case ResponseToolCallTypeLocalShell: + return json.Unmarshal(data, &tool.LocalShell) + case ResponseToolCallTypeCustom: + return json.Unmarshal(data, &tool.Custom) + } + return fmt.Errorf("unknown tool type %q", ir.Type) + default: + return errors.New("tool should be an object") + } + } + return errors.New("empty tool") +} + +type ResponseToolCallType string + +const ( + ResponseToolCallTypeFunction ResponseToolCallType = "function" + ResponseToolCallTypeFileSearch ResponseToolCallType = "file_search" + ResponseToolCallTypeWebSearch ResponseToolCallType = "web_search_preview" + ResponseToolCallTypeWebSearch20250311 ResponseToolCallType = "web_search_preview_2025_03_11" + ResponseToolCallTypeComputerUse ResponseToolCallType = "computer_use_preview" + ResponseToolCallTypeMCP ResponseToolCallType = "mcp" + ResponseToolCallTypeCodeInterpreter ResponseToolCallType = "code_interpreter" + ResponseToolCallTypeImageGeneration ResponseToolCallType = "image_generation" + ResponseToolCallTypeLocalShell ResponseToolCallType = "local_shell" + ResponseToolCallTypeCustom ResponseToolCallType = "custom" +) + +type ResponseFunctionToolParam struct { + Type ResponseToolCallType `json:"type"` + Name string `json:"name"` + Description string `json:"description,omitempty"` + Strict bool `json:"strict,omitempty"` + Parameters ResponseJSONSchemaObject `json:"parameters,omitempty"` +} + +type ResponseFileSearchToolParam struct { + Type ResponseToolCallType `json:"type"` + VectorStoreIDs []string `json:"vector_store_ids"` + Filters *ResponseFileSearchToolParamFilters `json:"filters,omitempty"` + MaxNumResults int `json:"max_num_results"` + RankingOptions *ResponseFileSearchToolParamRankingOptions `json:"ranking_options,omitempty"` +} + +type ResponseFileSearchToolParamFilters struct { + Type ResponseFileSearchToolParamFiltersType `json:"type"` + Key string `json:"key"` + Value any `json:"value"` + Filters []*ResponseFileSearchToolParamFilters `json:"filters,omitempty"` +} + +type ResponseFileSearchToolParamFiltersType string + +// Comparison filters +const ( + ResponseFileSearchToolParamFiltersTypeEq ResponseFileSearchToolParamFiltersType = "eq" + ResponseFileSearchToolParamFiltersTypeNe ResponseFileSearchToolParamFiltersType = "ne" + ResponseFileSearchToolParamFiltersTypeGt ResponseFileSearchToolParamFiltersType = "gt" + ResponseFileSearchToolParamFiltersTypeGte ResponseFileSearchToolParamFiltersType = "gte" + ResponseFileSearchToolParamFiltersTypeLt ResponseFileSearchToolParamFiltersType = "lt" + ResponseFileSearchToolParamFiltersTypeLte ResponseFileSearchToolParamFiltersType = "lte" +) + +// Compound filters +const ( + ResponseFileSearchToolParamFiltersTypeAnd ResponseFileSearchToolParamFiltersType = "and" + ResponseFileSearchToolParamFiltersTypeOr ResponseFileSearchToolParamFiltersType = "or" +) + +type ResponseFileSearchToolParamRankingOptions struct { + Ranker ResponseFileSearchToolParamRanker `json:"ranker"` + ScoreThreshold float64 `json:"score_threshold"` +} + +type ResponseFileSearchToolParamRanker string + +const ( + ResponseFileSearchToolParamRankerAuto ResponseFileSearchToolParamRanker = "auto" + ResponseFileSearchToolParamRankerDefault20241115 ResponseFileSearchToolParamRanker = "default-2024-11-15" +) + +type ResponseWebSearchToolParam struct { + Type ResponseToolCallType `json:"type"` + SearchContextSize ResponseWebSearchToolParamSearchContextSize `json:"search_context_size"` + UserLocation *ResponseWebSearchToolParamUserLocation `json:"user_location,omitempty"` +} + +type ResponseWebSearchToolParamSearchContextSize string + +const ( + ResponseWebSearchToolParamSearchContextSizeLow ResponseWebSearchToolParamSearchContextSize = "low" + ResponseWebSearchToolParamSearchContextSizeMedium ResponseWebSearchToolParamSearchContextSize = "medium" + ResponseWebSearchToolParamSearchContextSizeHigh ResponseWebSearchToolParamSearchContextSize = "high" +) + +type ResponseWebSearchToolParamUserLocation struct { + Type ResponseWebSearchToolParamUserLocationType `json:"type"` + City string `json:"city,omitempty"` + Country string `json:"country,omitempty"` + Region string `json:"region,omitempty"` + Timezone string `json:"timezone,omitempty"` +} + +type ResponseWebSearchToolParamUserLocationType string + +const ( + ResponseWebSearchToolParamUserLocationTypeApproximate ResponseWebSearchToolParamUserLocationType = "approximate" +) + +type ResponseComputerToolParam struct { + Type ResponseToolCallType `json:"type"` + DisplayHeight int `json:"display_height"` + DisplayWidth int `json:"display_width"` + Environment ResponseComputerToolParamEnvironment `json:"environment"` +} + +type ResponseComputerToolParamEnvironment string + +const ( + ResponseComputerToolParamEnvironmentWindows ResponseComputerToolParamEnvironment = "windows" + ResponseComputerToolParamEnvironmentMac ResponseComputerToolParamEnvironment = "mac" + ResponseComputerToolParamEnvironmentLinux ResponseComputerToolParamEnvironment = "linux" + ResponseComputerToolParamEnvironmentUbuntu ResponseComputerToolParamEnvironment = "ubuntu" + ResponseComputerToolParamEnvironmentBrowser ResponseComputerToolParamEnvironment = "browser" +) + +type ResponseMCPToolParam struct { + Type ResponseToolCallType `json:"type"` + ServerLabel string `json:"server_label"` + AllowedTools *ResponseMCPToolParamAllowedTools `json:"allowed_tools,omitempty"` + Authorization string `json:"authorization"` + ConnectorID ResponseMCPToolParamConnectorID `json:"connector_id,omitempty"` + Headers map[string]string `json:"headers,omitempty"` + RequireApproval *ResponseMCPToolParamRequireApproval `json:"require_approval,omitempty"` + ServerDescription string `json:"server_description,omitempty"` + ServerUrl string `json:"server_url,omitempty"` +} + +type ResponseMCPToolParamAllowedTools struct { + List []string + Filter *ResponseMCPToolFilter +} + +func (param *ResponseMCPToolParamAllowedTools) MarshalJSON() ([]byte, error) { + if param.List != nil { + return json.Marshal(param.List) + } + if param.Filter != nil { + return json.Marshal(param.Filter) + } + return json.Marshal(nil) +} + +func (param *ResponseMCPToolParamAllowedTools) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '[': + return json.Unmarshal(data, ¶m.List) + case '{': + return json.Unmarshal(data, ¶m.Filter) + default: + return errors.New("allowed_tools should be an array or an object") + } + } + return errors.New("empty allowed_tools") +} + +type ResponseMCPToolParamConnectorID string + +const ( + ResponseMCPToolParamConnectorIDDropbox ResponseMCPToolParamConnectorID = "connector_dropbox" + ResponseMCPToolParamConnectorIDGmail ResponseMCPToolParamConnectorID = "connector_gmail" + ResponseMCPToolParamConnectorIDGoogleCalendar ResponseMCPToolParamConnectorID = "connector_googlecalendar" + ResponseMCPToolParamConnectorIDGoogleDrive ResponseMCPToolParamConnectorID = "connector_googledrive" + ResponseMCPToolParamConnectorIDMicrosoftTeams ResponseMCPToolParamConnectorID = "connector_microsoftteams" + ResponseMCPToolParamConnectorIDOutlookCalendar ResponseMCPToolParamConnectorID = "connector_outlookcalendar" + ResponseMCPToolParamConnectorIDOutlookEmail ResponseMCPToolParamConnectorID = "connector_outlookemail" + ResponseMCPToolParamConnectorIDSharepoint ResponseMCPToolParamConnectorID = "connector_sharepoint" +) + +type ResponseMCPToolParamRequireApproval struct { + Type ResponseMCPToolParamRequireApprovalType + Filter *ResponseMCPToolParamRequireApprovalFilter +} + +func (param *ResponseMCPToolParamRequireApproval) MarshalJSON() ([]byte, error) { + if param.Type != "" { + return json.Marshal(param.Type) + } + if param.Filter != nil { + return json.Marshal(param.Filter) + } + return json.Marshal(nil) +} + +func (param *ResponseMCPToolParamRequireApproval) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '"': + return json.Unmarshal(data, ¶m.Type) + case '{': + return json.Unmarshal(data, ¶m.Filter) + default: + return errors.New("require_approval should be a string or an object") + } + } + return errors.New("empty require_approval") +} + +type ResponseMCPToolParamRequireApprovalType string + +const ( + ResponseMCPToolParamRequireApprovalTypeAlways ResponseMCPToolParamRequireApprovalType = "always" + ResponseMCPToolParamRequireApprovalTypeNever ResponseMCPToolParamRequireApprovalType = "never" +) + +type ResponseMCPToolParamRequireApprovalFilter struct { + Always *ResponseMCPToolFilter `json:"always,omitempty"` + Never *ResponseMCPToolFilter `json:"never,omitempty"` +} + +type ResponseMCPToolFilter struct { + ReadOnly bool `json:"read_only,omitempty"` + ToolNames []string `json:"tool_names,omitempty"` +} + +type ResponseCodeInterpreterToolParam struct { + Type ResponseToolCallType `json:"type"` + Container *ResponseCodeInterpreterToolParamContainer `json:"container"` +} + +type ResponseCodeInterpreterToolParamContainer struct { + Text string + Options *ResponseCodeInterpreterContainerOptions +} + +func (param *ResponseCodeInterpreterToolParam) MarshalJSON() ([]byte, error) { + if param.Type != "" { + return json.Marshal(param.Type) + } + if param.Container != nil { + return json.Marshal(param.Container) + } + return json.Marshal(nil) +} + +func (param *ResponseCodeInterpreterToolParam) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case '"': + return json.Unmarshal(data, ¶m.Type) + case '{': + return json.Unmarshal(data, ¶m.Container) + default: + return errors.New("code_interpreter should be a string or an object") + } + } + return errors.New("empty code_interpreter") +} + +type ResponseCodeInterpreterContainerOptions struct { + Type ResponseCodeInterpreterContainerOptionsType `json:"type"` + FileIDs []string `json:"file_ids,omitempty"` +} + +type ResponseCodeInterpreterContainerOptionsType string + +const ( + ResponseCodeInterpreterContainerOptionsTypeAuto ResponseCodeInterpreterContainerOptionsType = "auto" +) + +type ResponseImageGenerationToolParam struct { + Type ResponseToolCallType `json:"type"` + Background ResponseImageGenerationBackground `json:"background,omitempty"` + InputFidelity ResponseImageGenerationInputFidelity `json:"input_fidelity,omitempty"` + InputImageMask *ResponseImageGenerationInputImageMask `json:"input_image_mask,omitempty"` + Model ResponseImageGenerationModel `json:"model,omitempty"` + Moderation ResponseImageGenerationModeration `json:"moderation,omitempty"` + OutputCompression *int `json:"output_compression,omitempty"` + OutputFormat ResponseImageGenerationOutputFormat `json:"output_format,omitempty"` + PartialImages *int `json:"partial_images,omitempty"` + Quality ResponseImageGenerationQuality `json:"quality,omitempty"` + Size ResponseImageGenerationSize `json:"size,omitempty"` +} + +type ResponseImageGenerationBackground string + +const ( + ResponseImageGenerationBackgroundTransparent ResponseImageGenerationBackground = "transparent" + ResponseImageGenerationBackgroundOpaque ResponseImageGenerationBackground = "opaque" + ResponseImageGenerationBackgroundAuto ResponseImageGenerationBackground = "auto" +) + +type ResponseImageGenerationInputFidelity string + +const ( + ResponseImageGenerationInputFidelityHigh ResponseImageGenerationInputFidelity = "high" + ResponseImageGenerationInputFidelityLow ResponseImageGenerationInputFidelity = "low" +) + +type ResponseImageGenerationInputImageMask struct { + FileID string `json:"file_id,omitempty"` + ImageURL string `json:"image_url,omitempty"` +} + +type ResponseImageGenerationModel string + +const ( + ResponseImageGenerationModelGPTImage1 ResponseImageGenerationModel = "gpt-image-1" +) + +type ResponseImageGenerationModeration string + +const ( + ResponseImageGenerationModerationAuto ResponseImageGenerationModeration = "auto" + ResponseImageGenerationModerationLow ResponseImageGenerationModeration = "low" +) + +type ResponseImageGenerationOutputFormat string + +const ( + ResponseImageGenerationOutputFormatPNG ResponseImageGenerationOutputFormat = "png" + ResponseImageGenerationOutputFormatWebP ResponseImageGenerationOutputFormat = "webp" + ResponseImageGenerationOutputFormatJPEG ResponseImageGenerationOutputFormat = "jpeg" +) + +type ResponseImageGenerationQuality string + +const ( + ResponseImageGenerationQualityLow ResponseImageGenerationQuality = "low" + ResponseImageGenerationQualityMedium ResponseImageGenerationQuality = "medium" + ResponseImageGenerationQualityHigh ResponseImageGenerationQuality = "high" + ResponseImageGenerationQualityAuto ResponseImageGenerationQuality = "auto" +) + +type ResponseImageGenerationSize string + +const ( + ResponseImageGenerationSize1024x1024 ResponseImageGenerationSize = "1024x1024" + ResponseImageGenerationSize1024x1536 ResponseImageGenerationSize = "1024x1536" + ResponseImageGenerationSize1536x1024 ResponseImageGenerationSize = "1536x1024" + ResponseImageGenerationSizeAuto ResponseImageGenerationSize = "auto" +) + +type ResponseLocalShellToolParam struct { + Type ResponseToolCallType `json:"type"` +} + +type ResponseCustomToolParam struct { + Type ResponseToolCallType `json:"type"` + Name string `json:"name"` + Description string `json:"description,omitempty"` + Format *ResponseCustomToolParamFormat `json:"format,omitempty"` +} + +type ResponseCustomToolParamFormat struct { + Type ResponseCustomToolFormatType `json:"type"` + Definition string `json:"definition,omitempty"` + Syntax ResponseCustomToolFormatSyntax `json:"syntax,omitempty"` +} + +type ResponseCustomToolFormatType string + +const ( + ResponseCustomToolFormatTypeText ResponseCustomToolFormatType = "text" + ResponseCustomToolFormatTypeGrammar ResponseCustomToolFormatType = "grammar" +) + +type ResponseCustomToolFormatSyntax string + +const ( + ResponseCustomToolFormatSyntaxLark ResponseCustomToolFormatSyntax = "lark" + ResponseCustomToolFormatSyntaxRegex ResponseCustomToolFormatSyntax = "regex" +) + +type ResponseToolChoice struct { + Option ResponseToolChoiceOption + Types *ResponseToolChoiceTypesParam + Allowed *ResponseToolChoiceAllowedParam + Function *ResponseToolChoiceFunctionParam + MCP *ResponseToolChoiceMCPParam + Custom *ResponseToolChoiceCustomParam +} + +func (toolChoice ResponseToolChoice) MarshalJSON() ([]byte, error) { + if toolChoice.Option != "" { + return json.Marshal(toolChoice.Option) + } + if toolChoice.Types != nil { + return json.Marshal(toolChoice.Types) + } + if toolChoice.Allowed != nil { + return json.Marshal(toolChoice.Allowed) + } + if toolChoice.Function != nil { + return json.Marshal(toolChoice.Function) + } + if toolChoice.MCP != nil { + return json.Marshal(toolChoice.MCP) + } + if toolChoice.Custom != nil { + return json.Marshal(toolChoice.Custom) + } + return json.Marshal(nil) +} + +func (toolChoice *ResponseToolChoice) UnmarshalJSON(data []byte) error { + for _, b := range data { + switch b { + case ' ', '\r', '\n', '\t': + case 'n': + return nil + case '"': + return json.Unmarshal(data, &toolChoice.Option) + case '{': + var ir struct { + Type ResponseToolChoiceType `json:"type"` + } + if err := json.Unmarshal(data, &ir); err != nil { + return err + } + switch ir.Type { + case ResponseToolChoiceTypeAllowedTools: + return json.Unmarshal(data, &toolChoice.Allowed) + case ResponseToolChoiceTypeFileSearch, ResponseToolChoiceTypeWebSearch, ResponseToolChoiceTypeComputerUse, ResponseToolChoiceTypeWebSearch20250311, ResponseToolChoiceTypeImageGeneration, ResponseToolChoiceTypeCodeInterpreter: + return json.Unmarshal(data, &toolChoice.Types) + case ResponseToolChoiceTypeFunction: + return json.Unmarshal(data, &toolChoice.Function) + case ResponseToolChoiceTypeMCP: + return json.Unmarshal(data, &toolChoice.MCP) + case ResponseToolChoiceTypeCustom: + return json.Unmarshal(data, &toolChoice.Custom) + } + return fmt.Errorf("unknown tool_choice type %q", ir.Type) + default: + return errors.New("tool_choice should be a string, an object or null") + } + } + return errors.New("empty tool_choice") +} + +type ResponseToolChoiceOption string + +const ( + ChatCompletionToolChoiceOptionAuto ResponseToolChoiceOption = "auto" + ChatCompletionToolChoiceOptionNone ResponseToolChoiceOption = "none" + ChatCompletionToolChoiceOptionRequired ResponseToolChoiceOption = "required" +) + +type ResponseToolChoiceType string + +const ( + ResponseToolChoiceTypeFunction ResponseToolChoiceType = "function" + ResponseToolChoiceTypeAllowedTools ResponseToolChoiceType = "allowed_tools" + ResponseToolChoiceTypeFileSearch ResponseToolChoiceType = "file_search" + ResponseToolChoiceTypeWebSearch ResponseToolChoiceType = "web_search_preview" + ResponseToolChoiceTypeComputerUse ResponseToolChoiceType = "computer_use_preview" + ResponseToolChoiceTypeWebSearch20250311 ResponseToolChoiceType = "web_search_preview_2025_03_11" + ResponseToolChoiceTypeImageGeneration ResponseToolChoiceType = "image_generation" + ResponseToolChoiceTypeCodeInterpreter ResponseToolChoiceType = "code_interpreter" + ResponseToolChoiceTypeMCP ResponseToolChoiceType = "mcp" + ResponseToolChoiceTypeCustom ResponseToolChoiceType = "custom" +) + +type ResponseToolChoiceAllowedParam struct { + Mode ResponseToolChoiceOption `json:"mode"` + Tools []*ResponseToolChoiceAllowedTool `json:"tools"` + Type ResponseToolChoiceType `json:"type"` +} + +type ResponseToolChoiceAllowedTool struct { + Name string `json:"name"` +} + +type ResponseToolChoiceTypesParam struct { + Type ResponseToolChoiceType `json:"type"` +} + +type ResponseToolChoiceFunctionParam struct { + Name string `json:"name"` + Type ResponseToolChoiceType `json:"type"` +} + +type ResponseToolChoiceMCPParam struct { + ServerLabel string `json:"server_label"` + Type ResponseToolChoiceType `json:"type"` + Name string `json:"name,omitempty"` +} + +type ResponseToolChoiceCustomParam struct { + Name string `json:"name"` + Type ResponseToolChoiceType `json:"type"` +} + +type ResponseTruncationStrategy string + +const ( + ResponseTruncationStrategyAuto ResponseTruncationStrategy = "auto" + ResponseTruncationStrategyDisabled ResponseTruncationStrategy = "disabled" +) + +type ResponseStream = iter.Seq2[Event, error] + +type Response struct { + ID string `json:"id"` + CreatedAt float64 `json:"created_at"` + Error *ResponseError `json:"error"` + IncompleteDetails *ResponseIncompleteDetails `json:"incomplete_details"` + Instructions ResponseInputParam `json:"instructions"` + Metadata ResponseMetadata `json:"metadata"` + Model string `json:"model"` + Object string `json:"object"` + Output []*ResponseOutputItem `json:"output"` + ParallelToolCalls bool `json:"parallel_tool_calls"` + Temperature *float64 `json:"temperature"` + ToolChoice *ResponseToolChoice `json:"tool_choice"` + Tools []*ResponseToolParam `json:"tools"` + TopP *float64 `json:"top_p"` + Background *bool `json:"background"` + Conversation *ResponseConversation `json:"conversation"` + MaxOutputTokens *int `json:"max_output_tokens"` + MaxToolCalls *int `json:"max_tool_calls"` + PreviousResponseID string `json:"previous_response_id"` + Prompt *ResponsePromptParam `json:"prompt"` + PromptCacheKey string `json:"prompt_cache_key"` + Reasoning *ResponseReasoning `json:"reasoning"` + SafetyIdentifier string `json:"safety_identifier"` + ServiceTier ResponseServiceTier `json:"service_tier"` + Status ResponseStatus `json:"status"` + Text *ResponseTextConfigParam `json:"text"` + TopLogprobs *int `json:"top_logprobs"` + Truncation ResponseTruncationStrategy `json:"truncation"` + Usage *ResponseUsage `json:"usage"` + User string `json:"user"` +} + +type ResponseOutputItem = ResponseInputItemParam + +type ResponseErrorCode string + +const ( + ResponseErrorCodeServerError ResponseErrorCode = "server_error" + ResponseErrorCodeRateLimitExceeded ResponseErrorCode = "rate_limit_exceeded" + ResponseErrorCodeInvalidPrompt ResponseErrorCode = "invalid_prompt" + ResponseErrorCodeVectorStoreTimeout ResponseErrorCode = "vector_store_timeout" + ResponseErrorCodeInvalidImage ResponseErrorCode = "invalid_image" + ResponseErrorCodeInvalidImageFormat ResponseErrorCode = "invalid_image_format" + ResponseErrorCodeInvalidBase64Image ResponseErrorCode = "invalid_base64_image" + ResponseErrorCodeInvalidImageURL ResponseErrorCode = "invalid_image_url" + ResponseErrorCodeImageTooLarge ResponseErrorCode = "image_too_large" + ResponseErrorCodeImageTooSmall ResponseErrorCode = "image_too_small" + ResponseErrorCodeImageParseError ResponseErrorCode = "image_parse_error" + ResponseErrorCodeImageContentPolicyViolation ResponseErrorCode = "image_content_policy_violation" + ResponseErrorCodeInvalidImageMode ResponseErrorCode = "invalid_image_mode" + ResponseErrorCodeImageFileTooLarge ResponseErrorCode = "image_file_too_large" + ResponseErrorCodeUnsupportedImageMediaType ResponseErrorCode = "unsupported_image_media_type" + ResponseErrorCodeEmptyImageFile ResponseErrorCode = "empty_image_file" + ResponseErrorCodeFailedToDownloadImage ResponseErrorCode = "failed_to_download_image" + ResponseErrorCodeImageFileNotFound ResponseErrorCode = "image_file_not_found" +) + +type ResponseError struct { + Code ResponseErrorCode `json:"code"` + Message string `json:"message"` +} + +type ResponseIncompleteReason string + +const ( + ResponseIncompleteReasonMaxOutputTokens ResponseIncompleteReason = "max_output_tokens" + ResponseIncompleteReasonContentFilter ResponseIncompleteReason = "content_filter" +) + +type ResponseIncompleteDetails struct { + Reason ResponseIncompleteReason `json:"reason"` +} + +type ResponseInputTokensDetails struct { + CachedTokens int `json:"cached_tokens"` +} + +type ResponseOutputTokensDetails struct { + ReasoningTokens int `json:"reasoning_tokens"` +} + +type ResponseUsage struct { + InputTokens int `json:"input_tokens"` + InputTokensDetails *ResponseInputTokensDetails `json:"input_tokens_details"` + OutputTokens int `json:"output_tokens"` + OutputTokensDetails *ResponseOutputTokensDetails `json:"output_tokens_details"` + TotalTokens int `json:"total_tokens"` +} + +type Event interface { + EventType() EventType +} + +type EventType string + +const ( + EventTypeResponseAudioDelta EventType = "response.audio.delta" + EventTypeResponseAudioDone EventType = "response.audio.done" + EventTypeResponseAudioTranscriptDelta EventType = "response.audio.transcript.delta" + EventTypeResponseAudioTranscriptDone EventType = "response.audio.transcript.done" + EventTypeResponseCodeInterpreterCallCodeDelta EventType = "response.code_interpreter_call_code.delta" + EventTypeResponseCodeInterpreterCallCodeDone EventType = "response.code_interpreter_call_code.done" + EventTypeResponseCodeInterpreterCallCompleted EventType = "response.code_interpreter_call.completed" + EventTypeResponseCodeInterpreterCallInProgress EventType = "response.code_interpreter_call.in_progress" + EventTypeResponseCodeInterpreterCallInterpreting EventType = "response.code_interpreter_call.interpreting" + EventTypeResponseCompleted EventType = "response.completed" + EventTypeResponseContentPartAdded EventType = "response.content_part.added" + EventTypeResponseContentPartDone EventType = "response.content_part.done" + EventTypeResponseCreated EventType = "response.created" + EventTypeError EventType = "error" + EventTypeResponseFileSearchCallCompleted EventType = "response.file_search_call.completed" + EventTypeResponseFileSearchCallInProgress EventType = "response.file_search_call.in_progress" + EventTypeResponseFileSearchCallSearching EventType = "response.file_search_call.searching" + EventTypeResponseFunctionCallArgumentsDelta EventType = "response.function_call_arguments.delta" + EventTypeResponseFunctionCallArgumentsDone EventType = "response.function_call_arguments.done" + EventTypeResponseInProgress EventType = "response.in_progress" + EventTypeResponseFailed EventType = "response.failed" + EventTypeResponseIncomplete EventType = "response.incomplete" + EventTypeResponseOutputItemAdded EventType = "response.output_item.added" + EventTypeResponseOutputItemDone EventType = "response.output_item.done" + EventTypeResponseReasoningSummaryPartAdded EventType = "response.reasoning_summary_part.added" + EventTypeResponseReasoningSummaryPartDone EventType = "response.reasoning_summary_part.done" + EventTypeResponseReasoningSummaryTextDelta EventType = "response.reasoning_summary_text.delta" + EventTypeResponseReasoningSummaryTextDone EventType = "response.reasoning_summary_text.done" + EventTypeResponseReasoningTextDelta EventType = "response.reasoning_text.delta" + EventTypeResponseReasoningTextDone EventType = "response.reasoning_text.done" + EventTypeResponseRefusalDelta EventType = "response.refusal.delta" + EventTypeResponseRefusalDone EventType = "response.refusal.done" + EventTypeResponseOutputTextDelta EventType = "response.output_text.delta" + EventTypeResponseOutputTextDone EventType = "response.output_text.done" + EventTypeResponseWebSearchCallCompleted EventType = "response.web_search_call.completed" + EventTypeResponseWebSearchCallInProgress EventType = "response.web_search_call.in_progress" + EventTypeResponseWebSearchCallSearching EventType = "response.web_search_call.searching" + EventTypeResponseImageGenCallCompleted EventType = "response.image_generation_call.completed" + EventTypeResponseImageGenCallGenerating EventType = "response.image_generation_call.generating" + EventTypeResponseImageGenCallInProgress EventType = "response.image_generation_call.in_progress" + EventTypeResponseImageGenCallPartialImage EventType = "response.image_generation_call.partial_image" + EventTypeResponseMcpCallArgumentsDelta EventType = "response.mcp_call_arguments.delta" + EventTypeResponseMcpCallArgumentsDone EventType = "response.mcp_call_arguments.done" + EventTypeResponseMcpCallCompleted EventType = "response.mcp_call.completed" + EventTypeResponseMcpCallFailed EventType = "response.mcp_call.failed" + EventTypeResponseMcpCallInProgress EventType = "response.mcp_call.in_progress" + EventTypeResponseMcpListToolsCompleted EventType = "response.mcp_list_tools.completed" + EventTypeResponseMcpListToolsFailed EventType = "response.mcp_list_tools.failed" + EventTypeResponseMcpListToolsInProgress EventType = "response.mcp_list_tools.in_progress" + EventTypeResponseOutputTextAnnotationAdded EventType = "response.output_text.annotation.added" + EventTypeResponseQueued EventType = "response.queued" + EventTypeResponseCustomToolCallInputDelta EventType = "response.custom_tool_call_input.delta" + EventTypeResponseCustomToolCallInputDone EventType = "response.custom_tool_call_input.done" +) + +type ResponseCreatedEvent struct { + Response Response `json:"response"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCreatedEvent) EventType() EventType { return EventTypeResponseCreated } + +type ResponseCompletedEvent struct { + Response *Response `json:"response"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCompletedEvent) EventType() EventType { return EventTypeResponseCompleted } + +type ResponseInProgressEvent struct { + Response *Response `json:"response"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseInProgressEvent) EventType() EventType { return EventTypeResponseInProgress } + +type ResponseFailedEvent struct { + Response *Response `json:"response"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseFailedEvent) EventType() EventType { return EventTypeResponseFailed } + +type ResponseIncompleteEvent struct { + Response *Response `json:"response"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseIncompleteEvent) EventType() EventType { return EventTypeResponseIncomplete } + +type ResponseQueuedEvent struct { + Response *Response `json:"response"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseQueuedEvent) EventType() EventType { return EventTypeResponseQueued } + +type ResponseErrorEvent struct { + Code string `json:"code,omitempty"` + Message string `json:"message"` + Param string `json:"param,omitempty"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseErrorEvent) EventType() EventType { return EventTypeError } + +type ResponseTextDeltaEvent struct { + ContentIndex int `json:"content_index"` + Delta string `json:"delta"` + ItemID string `json:"item_id"` + Logprobs []*ResponseLogprob `json:"logprobs"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseTextDeltaEvent) EventType() EventType { return EventTypeResponseOutputTextDelta } + +type ResponseTextDoneEvent struct { + ContentIndex int `json:"content_index"` + ItemID string `json:"item_id"` + Logprobs []*ResponseLogprob `json:"logprobs"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Text string `json:"text"` + Type EventType `json:"type"` +} + +func (e ResponseTextDoneEvent) EventType() EventType { return EventTypeResponseOutputTextDone } + +type ResponseRefusalDeltaEvent struct { + ContentIndex int `json:"content_index"` + Delta string `json:"delta"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseRefusalDeltaEvent) EventType() EventType { return EventTypeResponseRefusalDelta } + +type ResponseRefusalDoneEvent struct { + ContentIndex int `json:"content_index"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + Refusal string `json:"refusal"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseRefusalDoneEvent) EventType() EventType { return EventTypeResponseRefusalDone } + +type ResponseReasoningTextDeltaEvent struct { + ContentIndex int `json:"content_index"` + Delta string `json:"delta"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseReasoningTextDeltaEvent) EventType() EventType { + return EventTypeResponseReasoningTextDelta +} + +type ResponseReasoningTextDoneEvent struct { + ContentIndex int `json:"content_index"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Text string `json:"text"` + Type EventType `json:"type"` +} + +func (e ResponseReasoningTextDoneEvent) EventType() EventType { + return EventTypeResponseReasoningTextDone +} + +type ResponseReasoningSummaryPartAddedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + Part *ResponseReasoningContent `json:"part"` + SequenceNumber int `json:"sequence_number"` + SummaryIndex int `json:"summary_index"` + Type EventType `json:"type"` +} + +func (e ResponseReasoningSummaryPartAddedEvent) EventType() EventType { + return EventTypeResponseReasoningSummaryPartAdded +} + +type ResponseReasoningSummaryPartDoneEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + Part *ResponseReasoningContent `json:"part"` + SequenceNumber int `json:"sequence_number"` + SummaryIndex int `json:"summary_index"` + Type EventType `json:"type"` +} + +func (e ResponseReasoningSummaryPartDoneEvent) EventType() EventType { + return EventTypeResponseReasoningSummaryPartDone +} + +type ResponseReasoningSummaryTextDeltaEvent struct { + Delta string `json:"delta"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + SummaryIndex int `json:"summary_index"` + Type EventType `json:"type"` +} + +func (e ResponseReasoningSummaryTextDeltaEvent) EventType() EventType { + return EventTypeResponseReasoningSummaryTextDelta +} + +type ResponseReasoningSummaryTextDoneEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + SummaryIndex int `json:"summary_index"` + Text string `json:"text"` + Type EventType `json:"type"` +} + +func (e ResponseReasoningSummaryTextDoneEvent) EventType() EventType { + return EventTypeResponseReasoningSummaryTextDone +} + +type ResponseContentPartAddedEvent struct { + ContentIndex int `json:"content_index"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + Part *ResponseMessageContentText `json:"part"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseContentPartAddedEvent) EventType() EventType { + return EventTypeResponseContentPartAdded +} + +type ResponseContentPartDoneEvent struct { + ContentIndex int `json:"content_index"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + Part *ResponseMessageContentText `json:"part"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseContentPartDoneEvent) EventType() EventType { return EventTypeResponseContentPartDone } + +type ResponseOutputItemAddedEvent struct { + Item *ResponseInputItemParam `json:"item"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseOutputItemAddedEvent) EventType() EventType { return EventTypeResponseOutputItemAdded } + +type ResponseOutputItemDoneEvent struct { + Item *ResponseInputItemParam `json:"item"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseOutputItemDoneEvent) EventType() EventType { return EventTypeResponseOutputItemDone } + +type ResponseOutputTextAnnotationAddedEvent struct { + Annotation *ResponseAnnotation `json:"annotation"` + AnnotationIndex int `json:"annotation_index"` + ContentIndex int `json:"content_index"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseOutputTextAnnotationAddedEvent) EventType() EventType { + return EventTypeResponseOutputTextAnnotationAdded +} + +type ResponseFunctionCallArgumentsDeltaEvent struct { + Delta string `json:"delta"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseFunctionCallArgumentsDeltaEvent) EventType() EventType { + return EventTypeResponseFunctionCallArgumentsDelta +} + +type ResponseFunctionCallArgumentsDoneEvent struct { + Arguments string `json:"arguments"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseFunctionCallArgumentsDoneEvent) EventType() EventType { + return EventTypeResponseFunctionCallArgumentsDone +} + +type ResponseCodeInterpreterCallCodeDeltaEvent struct { + Delta string `json:"delta"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCodeInterpreterCallCodeDeltaEvent) EventType() EventType { + return EventTypeResponseCodeInterpreterCallCodeDelta +} + +type ResponseCodeInterpreterCallCodeDoneEvent struct { + Code string `json:"code"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCodeInterpreterCallCodeDoneEvent) EventType() EventType { + return EventTypeResponseCodeInterpreterCallCodeDone +} + +type ResponseCodeInterpreterCallInProgressEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCodeInterpreterCallInProgressEvent) EventType() EventType { + return EventTypeResponseCodeInterpreterCallInProgress +} + +type ResponseCodeInterpreterCallCompletedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCodeInterpreterCallCompletedEvent) EventType() EventType { + return EventTypeResponseCodeInterpreterCallCompleted +} + +type ResponseCodeInterpreterCallInterpretingEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCodeInterpreterCallInterpretingEvent) EventType() EventType { + return EventTypeResponseCodeInterpreterCallInterpreting +} + +type ResponseFileSearchCallInProgressEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseFileSearchCallInProgressEvent) EventType() EventType { + return EventTypeResponseFileSearchCallInProgress +} + +type ResponseFileSearchCallCompletedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseFileSearchCallCompletedEvent) EventType() EventType { + return EventTypeResponseFileSearchCallCompleted +} + +type ResponseFileSearchCallSearchingEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseFileSearchCallSearchingEvent) EventType() EventType { + return EventTypeResponseFileSearchCallSearching +} + +type ResponseWebSearchCallInProgressEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseWebSearchCallInProgressEvent) EventType() EventType { + return EventTypeResponseWebSearchCallInProgress +} + +type ResponseWebSearchCallCompletedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseWebSearchCallCompletedEvent) EventType() EventType { + return EventTypeResponseWebSearchCallCompleted +} + +type ResponseWebSearchCallSearchingEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseWebSearchCallSearchingEvent) EventType() EventType { + return EventTypeResponseWebSearchCallSearching +} + +type ResponseImageGenCallInProgressEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseImageGenCallInProgressEvent) EventType() EventType { + return EventTypeResponseImageGenCallInProgress +} + +type ResponseImageGenCallGeneratingEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseImageGenCallGeneratingEvent) EventType() EventType { + return EventTypeResponseImageGenCallGenerating +} + +type ResponseImageGenCallCompletedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseImageGenCallCompletedEvent) EventType() EventType { + return EventTypeResponseImageGenCallCompleted +} + +type ResponseImageGenCallPartialImageEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + PartialImageB64 string `json:"partial_image_b64"` + PartialImageIndex int `json:"partial_image_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseImageGenCallPartialImageEvent) EventType() EventType { + return EventTypeResponseImageGenCallPartialImage +} + +type ResponseMcpCallArgumentsDeltaEvent struct { + Delta string `json:"delta"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpCallArgumentsDeltaEvent) EventType() EventType { + return EventTypeResponseMcpCallArgumentsDelta +} + +type ResponseMcpCallArgumentsDoneEvent struct { + Arguments string `json:"arguments"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpCallArgumentsDoneEvent) EventType() EventType { + return EventTypeResponseMcpCallArgumentsDone +} + +type ResponseMcpCallInProgressEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpCallInProgressEvent) EventType() EventType { + return EventTypeResponseMcpCallInProgress +} + +type ResponseMcpCallCompletedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpCallCompletedEvent) EventType() EventType { + return EventTypeResponseMcpCallCompleted +} + +type ResponseMcpCallFailedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpCallFailedEvent) EventType() EventType { + return EventTypeResponseMcpCallFailed +} + +type ResponseMcpListToolsInProgressEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpListToolsInProgressEvent) EventType() EventType { + return EventTypeResponseMcpListToolsInProgress +} + +type ResponseMcpListToolsCompletedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpListToolsCompletedEvent) EventType() EventType { + return EventTypeResponseMcpListToolsCompleted +} + +type ResponseMcpListToolsFailedEvent struct { + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseMcpListToolsFailedEvent) EventType() EventType { + return EventTypeResponseMcpListToolsFailed +} + +type ResponseCustomToolCallInputDeltaEvent struct { + Delta string `json:"delta"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCustomToolCallInputDeltaEvent) EventType() EventType { + return EventTypeResponseCustomToolCallInputDelta +} + +type ResponseCustomToolCallInputDoneEvent struct { + Input string `json:"input"` + ItemID string `json:"item_id"` + OutputIndex int `json:"output_index"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseCustomToolCallInputDoneEvent) EventType() EventType { + return EventTypeResponseCustomToolCallInputDone +} + +type ResponseAudioDeltaEvent struct { + Delta string `json:"delta"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseAudioDeltaEvent) EventType() EventType { return EventTypeResponseAudioDelta } + +type ResponseAudioDoneEvent struct { + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseAudioDoneEvent) EventType() EventType { return EventTypeResponseAudioDone } + +type ResponseAudioTranscriptDeltaEvent struct { + Delta string `json:"delta"` + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseAudioTranscriptDeltaEvent) EventType() EventType { + return EventTypeResponseAudioTranscriptDelta +} + +type ResponseAudioTranscriptDoneEvent struct { + SequenceNumber int `json:"sequence_number"` + Type EventType `json:"type"` +} + +func (e ResponseAudioTranscriptDoneEvent) EventType() EventType { + return EventTypeResponseAudioTranscriptDone +} + +func NewResponseBuilder() *ResponseBuilder { + return &ResponseBuilder{ + Response: &Response{}, + } +} + +type ResponseBuilder struct { + Response *Response +} + +func (builder *ResponseBuilder) Build() *Response { + return builder.Response +} + +func (builder *ResponseBuilder) Add(event Event) { + if event.EventType() == EventTypeError { + panic("error event") + } + if event.EventType() == EventTypeResponseCompleted { + if completedResponse, ok := event.(*ResponseCompletedEvent); ok { + builder.Response = completedResponse.Response + } + } +} diff --git a/pkg/datatypes/openai/openai_test.go b/pkg/datatypes/openai/openai_test.go new file mode 100644 index 0000000..5ed3623 --- /dev/null +++ b/pkg/datatypes/openai/openai_test.go @@ -0,0 +1,382 @@ +package openai + +import ( + "encoding/json" + "reflect" + "testing" +) + +func assertJSONEqual(t *testing.T, got []byte, want string) { + t.Helper() + var gotAny any + if err := json.Unmarshal(got, &gotAny); err != nil { + t.Fatalf("unexpected marshal error: %v", err) + } + var wantAny any + if err := json.Unmarshal([]byte(want), &wantAny); err != nil { + t.Fatalf("unexpected want json error: %v", err) + } + if !reflect.DeepEqual(gotAny, wantAny) { + t.Fatalf("json mismatch: %s vs %s", string(got), want) + } +} + +func TestResponseConversationUnmarshal(t *testing.T) { + var conv ResponseConversation + if err := json.Unmarshal([]byte(`"abc"`), &conv); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if conv.ID != "abc" { + t.Fatalf("unexpected id: %s", conv.ID) + } + conv = ResponseConversation{} + if err := json.Unmarshal([]byte(`{"id":"xyz"}`), &conv); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if conv.ID != "xyz" { + t.Fatalf("unexpected id: %s", conv.ID) + } + if err := json.Unmarshal([]byte(`123`), &conv); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseInputParamUnmarshal(t *testing.T) { + var param ResponseInputParam + if err := json.Unmarshal([]byte(`"hello"`), ¶m); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(param) != 1 || param[0].Message == nil { + t.Fatalf("unexpected param: %#v", param) + } + if param[0].Message.Content[0].Text.Text != "hello" { + t.Fatalf("unexpected text: %s", param[0].Message.Content[0].Text.Text) + } + if err := json.Unmarshal([]byte(`123`), ¶m); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseInputItemParamMarshal(t *testing.T) { + msg := newResponseMessage("hi") + input := ResponseInputItemParam{Message: msg} + got, err := json.Marshal(&input) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + expected, err := json.Marshal(msg) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !reflect.DeepEqual(got, expected) { + t.Fatalf("unexpected marshal result: %s", string(got)) + } + empty := ResponseInputItemParam{} + nullJSON, err := json.Marshal(&empty) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if string(nullJSON) != "null" { + t.Fatalf("expected null, got %s", string(nullJSON)) + } +} + +func TestResponseInputItemParamUnmarshal(t *testing.T) { + var s ResponseInputItemParam + if err := json.Unmarshal([]byte(`"hello"`), &s); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if s.Message == nil || s.Message.Content[0].Text.Text != "hello" { + t.Fatalf("unexpected message: %#v", s.Message) + } + var call ResponseInputItemParam + callJSON := `{"type":"function_call","id":"1","call_id":"c","name":"f","arguments":"{}","status":"completed"}` + if err := json.Unmarshal([]byte(callJSON), &call); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if call.FunctionCall == nil || call.FunctionCall.Name != "f" { + t.Fatalf("unexpected function call: %#v", call.FunctionCall) + } + if err := json.Unmarshal([]byte(`{"type":"unknown"}`), &call); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseMessageContentJSON(t *testing.T) { + var content ResponseMessageContent + if err := json.Unmarshal([]byte(`"hello"`), &content); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if content.Text == nil || content.Text.Type != ResponseMessageContentTypeInputText { + t.Fatalf("unexpected content: %#v", content) + } + imageJSON := `{"type":"input_image","image_url":"u","detail":"high"}` + if err := json.Unmarshal([]byte(imageJSON), &content); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if content.Image == nil || content.Image.ImageUrl != "u" { + t.Fatalf("unexpected image: %#v", content.Image) + } + textContent := ResponseMessageContent{Text: &ResponseMessageContentText{Type: ResponseMessageContentTypeOutputText, Text: "x"}} + got, err := json.Marshal(&textContent) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + assertJSONEqual(t, got, `{"type":"output_text","text":"x"}`) + if err := json.Unmarshal([]byte(`{"type":"unknown"}`), &content); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseAnnotationJSON(t *testing.T) { + var annotation ResponseAnnotation + jsonData := `{"type":"file_citation","file_id":"f","filename":"name","index":2}` + if err := json.Unmarshal([]byte(jsonData), &annotation); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if annotation.FileCitation == nil || annotation.FileCitation.Filename != "name" { + t.Fatalf("unexpected citation: %#v", annotation.FileCitation) + } + got, err := json.Marshal(&annotation) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + assertJSONEqual(t, got, jsonData) + if err := json.Unmarshal([]byte(`123`), &annotation); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseFormatJSON(t *testing.T) { + var format ResponseFormat + textJSON := `{"type":"text"}` + if err := json.Unmarshal([]byte(textJSON), &format); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if format.Text == nil || format.Text.Type != ResponseFormatTypeText { + t.Fatalf("unexpected text format: %#v", format.Text) + } + format = ResponseFormat{} + schemaJSON := `{"type":"json_schema","name":"s","description":"d","strict":true,"schema":{"a":1}}` + if err := json.Unmarshal([]byte(schemaJSON), &format); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if format.JSONSchema == nil || format.JSONSchema.Name != "s" { + t.Fatalf("unexpected schema: %#v", format.JSONSchema) + } + got, err := json.Marshal(&format) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + assertJSONEqual(t, got, schemaJSON) + if err := json.Unmarshal([]byte(`123`), &format); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseToolJSON(t *testing.T) { + var tool ResponseToolParam + functionJSON := `{"type":"function","name":"foo"}` + if err := json.Unmarshal([]byte(functionJSON), &tool); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if tool.Function == nil || tool.Function.Name != "foo" { + t.Fatalf("unexpected function: %#v", tool.Function) + } + got, err := json.Marshal(&tool) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + assertJSONEqual(t, got, functionJSON) + if err := json.Unmarshal([]byte(`{"type":"unknown"}`), &tool); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseToolChoiceJSON(t *testing.T) { + var choice ResponseToolChoice + if err := json.Unmarshal([]byte(`"auto"`), &choice); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if choice.Option != ChatCompletionToolChoiceOptionAuto { + t.Fatalf("unexpected option: %s", choice.Option) + } + choice = ResponseToolChoice{} + allowedJSON := `{"type":"allowed_tools","mode":"required","tools":[{"name":"x"}]}` + if err := json.Unmarshal([]byte(allowedJSON), &choice); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if choice.Allowed == nil || choice.Allowed.Mode != ChatCompletionToolChoiceOptionRequired { + t.Fatalf("unexpected allowed: %#v", choice.Allowed) + } + got, err := json.Marshal(&choice) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + assertJSONEqual(t, got, allowedJSON) + if err := json.Unmarshal([]byte(`{"type":"unknown"}`), &choice); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseMCPToolParamAllowedToolsUnmarshal(t *testing.T) { + var allowed ResponseMCPToolParamAllowedTools + if err := json.Unmarshal([]byte(`["a","b"]`), &allowed); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !reflect.DeepEqual(allowed.List, []string{"a", "b"}) { + t.Fatalf("unexpected list: %#v", allowed.List) + } + objectJSON := `{"read_only":true,"tool_names":["x"]}` + if err := json.Unmarshal([]byte(objectJSON), &allowed); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if allowed.Filter == nil || !allowed.Filter.ReadOnly { + t.Fatalf("unexpected filter: %#v", allowed.Filter) + } + if err := json.Unmarshal([]byte(`123`), &allowed); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseMCPToolParamRequireApprovalUnmarshal(t *testing.T) { + var require ResponseMCPToolParamRequireApproval + if err := json.Unmarshal([]byte(`"always"`), &require); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if require.Type != ResponseMCPToolParamRequireApprovalTypeAlways { + t.Fatalf("unexpected type: %s", require.Type) + } + objectJSON := `{"always":{"read_only":true}}` + if err := json.Unmarshal([]byte(objectJSON), &require); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if require.Filter == nil || require.Filter.Always == nil || !require.Filter.Always.ReadOnly { + t.Fatalf("unexpected filter: %#v", require.Filter) + } + if err := json.Unmarshal([]byte(`123`), &require); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseCodeInterpreterToolParamUnmarshal(t *testing.T) { + var param ResponseCodeInterpreterToolParam + if err := json.Unmarshal([]byte(`"code_interpreter"`), ¶m); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if param.Type != ResponseToolCallTypeCodeInterpreter { + t.Fatalf("unexpected type: %s", param.Type) + } + param = ResponseCodeInterpreterToolParam{} + objectJSON := `{"Text":"run","Options":{"type":"auto","file_ids":["1"]}}` + if err := json.Unmarshal([]byte(objectJSON), ¶m); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if param.Container == nil || param.Container.Text != "run" { + t.Fatalf("unexpected container: %#v", param.Container) + } + if err := json.Unmarshal([]byte(`123`), ¶m); err == nil { + t.Fatalf("expected error") + } +} + +func TestResponseJSONSchemaObjectJSON(t *testing.T) { + original := []byte(`{"a":1}`) + var obj ResponseJSONSchemaObject + if err := json.Unmarshal(original, &obj); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !reflect.DeepEqual([]byte(obj), original) { + t.Fatalf("unexpected data: %s", string(obj)) + } + marshal, err := json.Marshal(obj) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + assertJSONEqual(t, marshal, `{"a":1}`) +} + +func TestEvent_Unmarshal_WebSearchSearching(t *testing.T) { + var e ResponseWebSearchCallSearchingEvent + jsonData := `{"type":"response.web_search_call.searching","item_id":"i","output_index":1,"sequence_number":2}` + if err := json.Unmarshal([]byte(jsonData), &e); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if e.ItemID != "i" || e.OutputIndex != 1 || e.SequenceNumber != 2 || e.EventType() != EventTypeResponseWebSearchCallSearching { + t.Fatalf("unexpected event: %#v", e) + } +} + +func TestEvent_Unmarshal_ContentPartAdded(t *testing.T) { + var e ResponseContentPartAddedEvent + jsonData := `{"type":"response.content_part.added","content_index":0,"item_id":"it","output_index":0,"part":{"type":"output_text","text":"x"},"sequence_number":1}` + if err := json.Unmarshal([]byte(jsonData), &e); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if e.Part == nil || e.Part.Type != ResponseMessageContentTypeOutputText || e.Part.Text != "x" { + t.Fatalf("unexpected part: %#v", e.Part) + } +} + +func TestEvent_Unmarshal_TextDelta(t *testing.T) { + var e ResponseTextDeltaEvent + jsonData := `{"type":"response.output_text.delta","content_index":0,"delta":"he","item_id":"it","logprobs":[],"output_index":0,"sequence_number":1}` + if err := json.Unmarshal([]byte(jsonData), &e); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if e.Delta != "he" || e.ContentIndex != 0 || e.EventType() != EventTypeResponseOutputTextDelta { + t.Fatalf("unexpected event: %#v", e) + } +} + +func TestEvent_Unmarshal_OutputItemAdded_Message(t *testing.T) { + var e ResponseOutputItemAddedEvent + jsonData := `{"type":"response.output_item.added","item":{"type":"message","id":"m","role":"assistant","content":[{"type":"output_text","text":"hello"}]},"output_index":0,"sequence_number":1}` + if err := json.Unmarshal([]byte(jsonData), &e); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if e.Item == nil || e.Item.Message == nil || e.Item.Message.Role != ResponseMessageRoleAssistant || e.Item.Message.Content[0].Text.Text != "hello" { + t.Fatalf("unexpected item: %#v", e.Item) + } +} + +func TestResponseBuilder_BuildInitial(t *testing.T) { + b := NewResponseBuilder() + if b == nil || b.Build() == nil { + t.Fatalf("nil builder or response") + } + if b.Build() != b.Response { + t.Fatalf("build not returning internal response") + } +} + +func TestResponseBuilder_AddCompletedSetsResponse(t *testing.T) { + b := NewResponseBuilder() + initial := b.Build() + r := &Response{ID: "r1"} + b.Add(&ResponseCompletedEvent{Response: r, SequenceNumber: 1, Type: EventTypeResponseCompleted}) + got := b.Build() + if got != r || got == initial { + t.Fatalf("response not set to completed response") + } +} + +func TestResponseBuilder_AddErrorPanics(t *testing.T) { + b := NewResponseBuilder() + defer func() { + if recover() == nil { + t.Fatalf("expected panic") + } + }() + b.Add(ResponseErrorEvent{Message: "x", SequenceNumber: 1, Type: EventTypeError}) +} + +func TestResponseBuilder_AddNonCompletedDoesNotReplace(t *testing.T) { + b := NewResponseBuilder() + orig := b.Build() + r := &Response{ID: "x"} + b.Add(&ResponseInProgressEvent{Response: r, SequenceNumber: 1, Type: EventTypeResponseInProgress}) + if b.Build() != orig { + t.Fatalf("non-completed event should not replace response") + } +} diff --git a/pkg/profile/config.go b/pkg/profile/config.go index 3e1ec96..7da3d05 100644 --- a/pkg/profile/config.go +++ b/pkg/profile/config.go @@ -60,6 +60,7 @@ func LoadFromViper(v *viper.Viper) (*ProfileManager, error) { Options: loadOptionsConfig(v, delimiter.ViperKey(key, "options")), Anthropic: loadAnthropicConfig(v, delimiter.ViperKey(key, "anthropic")), OpenRouter: loadOpenRouterConfig(v, delimiter.ViperKey(key, "openrouter")), + OpenAI: loadOpenAIConfig(v, delimiter.ViperKey(key, "openai")), } // Expand environment variables in API keys and URLs if p.Anthropic != nil { @@ -71,6 +72,10 @@ func LoadFromViper(v *viper.Viper) (*ProfileManager, error) { p.OpenRouter.APIKey = ExpandEnv(p.OpenRouter.APIKey) p.OpenRouter.BaseURL = ExpandEnv(p.OpenRouter.BaseURL) } + if p.OpenAI != nil { + p.OpenAI.APIKey = ExpandEnv(p.OpenAI.APIKey) + p.OpenAI.BaseURL = ExpandEnv(p.OpenAI.BaseURL) + } pm.AddProfile(p) } return pm, nil @@ -176,6 +181,16 @@ func loadOpenRouterConfig(v *viper.Viper, key string) *OpenRouterConfig { } } +func loadOpenAIConfig(v *viper.Viper, key string) *OpenAIConfig { + if !v.IsSet(key) { + return nil + } + return &OpenAIConfig{ + BaseURL: v.GetString(delimiter.ViperKey(key, "base_url")), + APIKey: v.GetString(delimiter.ViperKey(key, "api_key")), + } +} + // GetHTTPConfig returns the HTTP configuration from viper. func GetHTTPConfig(v *viper.Viper) *HTTPConfig { return &HTTPConfig{ @@ -379,3 +394,19 @@ func (o *OpenRouterConfig) GetAllowedProviders() []string { } return o.AllowedProviders } + +// GetBaseURL safely gets the OpenAI base URL with a default. +func (o *OpenAIConfig) GetBaseURL() string { + if o == nil || o.BaseURL == "" { + return "https://api.openai.com" + } + return strings.TrimSuffix(o.BaseURL, "/") +} + +// GetAPIKey safely gets the OpenAI API key. +func (o *OpenAIConfig) GetAPIKey() string { + if o == nil { + return "" + } + return o.APIKey +} diff --git a/pkg/profile/profile.go b/pkg/profile/profile.go index 93b802f..be1865e 100644 --- a/pkg/profile/profile.go +++ b/pkg/profile/profile.go @@ -20,6 +20,7 @@ type Profile struct { Options *OptionsConfig `yaml:"options" json:"options" mapstructure:"options"` Anthropic *AnthropicConfig `yaml:"anthropic" json:"anthropic" mapstructure:"anthropic"` OpenRouter *OpenRouterConfig `yaml:"openrouter" json:"openrouter" mapstructure:"openrouter"` + OpenAI *OpenAIConfig `yaml:"openai" json:"openai" mapstructure:"openai"` } // OptionsConfig contains general options for request processing. @@ -62,6 +63,12 @@ type OpenRouterConfig struct { AllowedProviders []string `yaml:"allowed_providers" json:"allowed_providers" mapstructure:"allowed_providers"` } +// OpenAIConfig contains OpenAI-specific configuration. +type OpenAIConfig struct { + BaseURL string `yaml:"base_url" json:"base_url" mapstructure:"base_url"` + APIKey string `yaml:"api_key" json:"api_key" mapstructure:"api_key"` +} + // ProfileManager manages a collection of profiles and provides model-to-profile matching. type ProfileManager struct { profiles []*Profile // profiles in order of priority diff --git a/pkg/provider/options.go b/pkg/provider/options.go index cb90f94..0a36fe7 100644 --- a/pkg/provider/options.go +++ b/pkg/provider/options.go @@ -44,6 +44,16 @@ func getConfigFromContext(ctx context.Context, keys ...string) string { case "base_url": return prof.OpenRouter.GetBaseURL() } + case "openai": + if prof.OpenAI == nil { + return "" + } + switch key { + case "api_key": + return prof.OpenAI.GetAPIKey() + case "base_url": + return prof.OpenAI.GetBaseURL() + } } return "" } diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 6d486df..ff20936 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -6,6 +6,7 @@ import ( "net/http" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" ) @@ -47,6 +48,17 @@ type Provider interface { opts ...RequestOption, ) (*anthropic.Usage, error) + // CreateOpenAIModelResponse POST retry=2 options(opts) {{ get_config .ctx "openai" "base_url" }}/v1/responses + // Content-Type: application/json + // Authorization: Bearer {{ get_config .ctx "openai" "api_key" }} + // + // {{ json_encode .req }} + CreateOpenAIModelResponse( + ctx context.Context, + req *openai.CreateModelResponseRequest, + opts ...RequestOption, + ) (openai.ResponseStream, http.Header, error) + // CreateOpenRouterChatCompletion POST retry=2 options(opts) {{ get_config .ctx "openrouter" "base_url" }}/v1/chat/completions // Content-Type: application/json // Authorization: Bearer {{ get_config .ctx "openrouter" "api_key" }} diff --git a/pkg/provider/provider_impl.go b/pkg/provider/provider_impl.go index afd9ddd..e8c19e4 100644 --- a/pkg/provider/provider_impl.go +++ b/pkg/provider/provider_impl.go @@ -14,6 +14,7 @@ import ( "text/template" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" "github.com/x5iu/claude-code-adapter/pkg/utils" __rt "github.com/x5iu/defc/runtime" @@ -23,6 +24,7 @@ const ( ProviderMethodMakeAnthropicMessagesRequest = "MakeAnthropicMessagesRequest" ProviderMethodGenerateAnthropicMessage = "GenerateAnthropicMessage" ProviderMethodCountAnthropicTokens = "CountAnthropicTokens" + ProviderMethodCreateOpenAIModelResponse = "CreateOpenAIModelResponse" ProviderMethodCreateOpenRouterChatCompletion = "CreateOpenRouterChatCompletion" ) @@ -39,6 +41,8 @@ var ( headerProviderTmplGenerateAnthropicMessage = template.Must(template.New("HeaderGenerateAnthropicMessage").Funcs(template.FuncMap{"get_config": getConfigFromContext, "json_encode": utils.JSONEncode}).Parse("Content-Type: application/json\r\nX-API-Key: {{ get_config .ctx \"anthropic\" \"api_key\" }}\r\nAnthropic-Version: {{ get_config .ctx \"anthropic\" \"version\" }}\r\n\r\n{{ json_encode .req }}")) addrProviderTmplCountAnthropicTokens = template.Must(template.New("AddressCountAnthropicTokens").Funcs(template.FuncMap{"get_config": getConfigFromContext, "json_encode": utils.JSONEncode}).Parse("{{ get_config .ctx \"anthropic\" \"base_url\" }}/v1/messages/count_tokens")) headerProviderTmplCountAnthropicTokens = template.Must(template.New("HeaderCountAnthropicTokens").Funcs(template.FuncMap{"get_config": getConfigFromContext, "json_encode": utils.JSONEncode}).Parse("Content-Type: application/json\r\nX-API-Key: {{ get_config .ctx \"anthropic\" \"api_key\" }}\r\nAnthropic-Version: {{ get_config .ctx \"anthropic\" \"version\" }}\r\n\r\n{{ json_encode .req }}")) + addrProviderTmplCreateOpenAIModelResponse = template.Must(template.New("AddressCreateOpenAIModelResponse").Funcs(template.FuncMap{"get_config": getConfigFromContext, "json_encode": utils.JSONEncode}).Parse("{{ get_config .ctx \"openai\" \"base_url\" }}/v1/responses")) + headerProviderTmplCreateOpenAIModelResponse = template.Must(template.New("HeaderCreateOpenAIModelResponse").Funcs(template.FuncMap{"get_config": getConfigFromContext, "json_encode": utils.JSONEncode}).Parse("Content-Type: application/json\r\nAuthorization: Bearer {{ get_config .ctx \"openai\" \"api_key\" }}\r\n\r\n{{ json_encode .req }}")) addrProviderTmplCreateOpenRouterChatCompletion = template.Must(template.New("AddressCreateOpenRouterChatCompletion").Funcs(template.FuncMap{"get_config": getConfigFromContext, "json_encode": utils.JSONEncode}).Parse("{{ get_config .ctx \"openrouter\" \"base_url\" }}/v1/chat/completions")) headerProviderTmplCreateOpenRouterChatCompletion = template.Must(template.New("HeaderCreateOpenRouterChatCompletion").Funcs(template.FuncMap{"get_config": getConfigFromContext, "json_encode": utils.JSONEncode}).Parse("Content-Type: application/json\r\nAuthorization: Bearer {{ get_config .ctx \"openrouter\" \"api_key\" }}\r\n\r\n{{ json_encode .req }}")) ) @@ -435,6 +439,132 @@ func (__imp *implProvider) __CountAnthropicTokens(ctx context.Context, req *anth return v0CountAnthropicTokens, nil } +func (__imp *implProvider) CreateOpenAIModelResponse(ctx context.Context, req *openai.CreateModelResponseRequest, opts ...RequestOption) (openai.ResponseStream, http.Header, error) { + __maxRetry := 2 + + __retryCount := 0 +__RETRY: + var ( + v0CreateOpenAIModelResponse openai.ResponseStream + v1CreateOpenAIModelResponse http.Header + errCreateOpenAIModelResponse error + ) + + v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, errCreateOpenAIModelResponse = __imp.__CreateOpenAIModelResponse(ctx, req, opts...) + if errCreateOpenAIModelResponse != nil { + if __retryCount < __maxRetry { + if __getResponse, ok := errCreateOpenAIModelResponse.(__rt.FutureResponseError); ok { + __getResponse.Response().Body.Close() + } + __retryCount++ + goto __RETRY + } + } + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, errCreateOpenAIModelResponse +} + +func (__imp *implProvider) __CreateOpenAIModelResponse(ctx context.Context, req *openai.CreateModelResponseRequest, opts ...RequestOption) (openai.ResponseStream, http.Header, error) { + + addrCreateOpenAIModelResponse := __rt.GetBuffer() + defer __rt.PutBuffer(addrCreateOpenAIModelResponse) + defer addrCreateOpenAIModelResponse.Reset() + + headerCreateOpenAIModelResponse := __rt.GetBuffer() + defer __rt.PutBuffer(headerCreateOpenAIModelResponse) + defer headerCreateOpenAIModelResponse.Reset() + + var ( + v0CreateOpenAIModelResponse = __rt.New[openai.ResponseStream]() + v1CreateOpenAIModelResponse = __rt.New[http.Header]() + ) + + var ( + errCreateOpenAIModelResponse error + httpResponseCreateOpenAIModelResponse *http.Response + responseCreateOpenAIModelResponse __rt.FutureResponse = __imp.responseHandler() + ) + + if errCreateOpenAIModelResponse = addrProviderTmplCreateOpenAIModelResponse.Execute(addrCreateOpenAIModelResponse, map[string]any{ + "ctx": ctx, + "req": req, + "opts": opts, + }); errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error building 'CreateOpenAIModelResponse' url: %w", errCreateOpenAIModelResponse) + } + + if errCreateOpenAIModelResponse = headerProviderTmplCreateOpenAIModelResponse.Execute(headerCreateOpenAIModelResponse, map[string]any{ + "ctx": ctx, + "req": req, + "opts": opts, + }); errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error building 'CreateOpenAIModelResponse' header: %w", errCreateOpenAIModelResponse) + } + bufReaderCreateOpenAIModelResponse := bufio.NewReader(headerCreateOpenAIModelResponse) + mimeHeaderCreateOpenAIModelResponse, errCreateOpenAIModelResponse := textproto.NewReader(bufReaderCreateOpenAIModelResponse).ReadMIMEHeader() + if errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error reading 'CreateOpenAIModelResponse' header: %w", errCreateOpenAIModelResponse) + } + + urlCreateOpenAIModelResponse := addrCreateOpenAIModelResponse.String() + requestBodyCreateOpenAIModelResponse, errCreateOpenAIModelResponse := io.ReadAll(bufReaderCreateOpenAIModelResponse) + if errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error reading 'CreateOpenAIModelResponse' request body: %w", errCreateOpenAIModelResponse) + } + requestCreateOpenAIModelResponse, errCreateOpenAIModelResponse := http.NewRequestWithContext(ctx, "POST", urlCreateOpenAIModelResponse, bytes.NewReader(requestBodyCreateOpenAIModelResponse)) + if errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error building 'CreateOpenAIModelResponse' request: %w", errCreateOpenAIModelResponse) + } + + for kCreateOpenAIModelResponse, vvCreateOpenAIModelResponse := range mimeHeaderCreateOpenAIModelResponse { + for _, vCreateOpenAIModelResponse := range vvCreateOpenAIModelResponse { + requestCreateOpenAIModelResponse.Header.Add(kCreateOpenAIModelResponse, vCreateOpenAIModelResponse) + } + } + + requestCreateOpenAIModelResponse.Header.Add("Accept-Encoding", "gzip") + + for _, opt := range opts { + if opt != nil { + opt(requestCreateOpenAIModelResponse) + } + } + + httpResponseCreateOpenAIModelResponse, errCreateOpenAIModelResponse = http.DefaultClient.Do(requestCreateOpenAIModelResponse) + + if errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error sending 'CreateOpenAIModelResponse' request: %w", errCreateOpenAIModelResponse) + } + + func() { + for _, contentEncoding := range httpResponseCreateOpenAIModelResponse.Header.Values("Content-Encoding") { + if commaIndex := strings.IndexByte(contentEncoding, ','); commaIndex >= 0 { + contentEncoding = contentEncoding[:commaIndex] + } + if strings.TrimSpace(contentEncoding) == "gzip" { + httpResponseCreateOpenAIModelResponse.Body = &__rt.GzipReadCloser{R: httpResponseCreateOpenAIModelResponse.Body} + return + } + } + }() + + if errCreateOpenAIModelResponse = responseCreateOpenAIModelResponse.FromResponse("CreateOpenAIModelResponse", httpResponseCreateOpenAIModelResponse); errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error converting 'CreateOpenAIModelResponse' response: %w", errCreateOpenAIModelResponse) + } + + addrCreateOpenAIModelResponse.Reset() + headerCreateOpenAIModelResponse.Reset() + + if errCreateOpenAIModelResponse = responseCreateOpenAIModelResponse.Err(); errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error returned from 'CreateOpenAIModelResponse' response: %w", errCreateOpenAIModelResponse) + } + + if errCreateOpenAIModelResponse = responseCreateOpenAIModelResponse.ScanValues(&v0CreateOpenAIModelResponse, &v1CreateOpenAIModelResponse); errCreateOpenAIModelResponse != nil { + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, fmt.Errorf("error scanning value from 'CreateOpenAIModelResponse' response: %w", errCreateOpenAIModelResponse) + } + + return v0CreateOpenAIModelResponse, v1CreateOpenAIModelResponse, nil +} + func (__imp *implProvider) CreateOpenRouterChatCompletion(ctx context.Context, req *openrouter.CreateChatCompletionRequest, opts ...RequestOption) (openrouter.ChatCompletionStream, http.Header, error) { __maxRetry := 2 diff --git a/pkg/provider/provider_test.go b/pkg/provider/provider_test.go index 700aa27..17019c1 100644 --- a/pkg/provider/provider_test.go +++ b/pkg/provider/provider_test.go @@ -10,6 +10,7 @@ import ( "github.com/samber/lo" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" "github.com/x5iu/claude-code-adapter/pkg/profile" ) @@ -25,6 +26,10 @@ func testCtxFromEnv(t *testing.T) context.Context { if openrouterBase == "" { openrouterBase = "https://openrouter.ai/api" } + openaiBase := os.Getenv("OPENAI_BASE_URL") + if openaiBase == "" { + openaiBase = "https://api.openai.com" + } p := &profile.Profile{ Name: "test", Models: []string{"*"}, @@ -38,6 +43,10 @@ func testCtxFromEnv(t *testing.T) context.Context { BaseURL: openrouterBase, APIKey: os.Getenv("OPENROUTER_API_KEY"), }, + OpenAI: &profile.OpenAIConfig{ + BaseURL: openaiBase, + APIKey: os.Getenv("OPENAI_API_KEY"), + }, } return profile.WithProfile(context.Background(), p) } @@ -982,3 +991,352 @@ func truncateString(s string, maxLength int) string { } return s[:maxLength] + "..." } + +func TestCreateOpenAIModelResponse_Basic(t *testing.T) { + provider := NewProvider() + ctx := testCtxFromEnv(t) + + req := &openai.CreateModelResponseRequest{ + Model: "gpt-4o", + Input: openai.TextInput("Hello, how are you?"), + } + + // Call the OpenAI API + stream, header, err := provider.CreateOpenAIModelResponse(ctx, req) + if err != nil { + t.Fatalf("CreateOpenAIModelResponse failed: %v", err) + } + + // Verify header is not nil + if header == nil { + t.Fatal("HTTP header is nil") + } + + // Process the stream and verify event format + var events []openai.Event + var hasContent bool + var hasResponseCreated bool + var hasResponseCompleted bool + + for event, streamErr := range stream { + if streamErr != nil { + t.Fatalf("Stream error: %v", streamErr) + } + + if event == nil { + t.Fatal("Received nil event") + } + + // Validate event structure + validateOpenAIEvent(t, event) + events = append(events, event) + + // Check event types + switch event.EventType() { + case openai.EventTypeResponseCreated: + hasResponseCreated = true + case openai.EventTypeResponseCompleted: + hasResponseCompleted = true + case openai.EventTypeResponseOutputTextDelta: + hasContent = true + } + } + + // Verify we received events + if len(events) == 0 { + t.Fatal("No events received from stream") + } + + // Verify we received expected event types + if !hasResponseCreated { + t.Error("Did not receive response.created event") + } + + if !hasResponseCompleted { + t.Error("Did not receive response.completed event") + } + + if !hasContent { + t.Error("Did not receive text content events") + } + + t.Logf("Successfully received %d events from OpenAI API", len(events)) +} + +func TestCreateOpenAIModelResponse_DataFormat(t *testing.T) { + provider := NewProvider() + ctx := testCtxFromEnv(t) + + req := &openai.CreateModelResponseRequest{ + Model: "gpt-4o", + Input: openai.TextInput("What is 2+2?"), + } + + stream, header, err := provider.CreateOpenAIModelResponse(ctx, req) + if err != nil { + t.Fatalf("API call failed: %v", err) + } + + if header == nil { + t.Fatal("Header should not be nil") + } + + // Use ResponseBuilder to validate the complete response structure + builder := openai.NewResponseBuilder() + + for event, streamErr := range stream { + if streamErr != nil { + t.Fatalf("Stream error: %v", streamErr) + } + + // Validate event format + validateOpenAIEvent(t, event) + + // Add to builder + builder.Add(event) + } + + // Build final response and validate + response := builder.Build() + validateOpenAIResponse(t, response) + + t.Logf("Response ID: %s, Model: %s, Status: %s", response.ID, response.Model, response.Status) +} + +func TestCreateOpenAIModelResponse_WithReasoning(t *testing.T) { + provider := NewProvider() + ctx := testCtxFromEnv(t) + + // Use a question that should trigger reasoning with o1/o3 models + req := &openai.CreateModelResponseRequest{ + Model: "o3-mini", + Input: openai.TextInput("Think step by step: If I have 15 apples and I give away 7, then buy 3 more, how many apples do I have?"), + Reasoning: &openai.ResponseReasoning{ + Effort: openai.ResponseReasoningEffortMedium, + Summary: openai.ResponseReasoningSummaryAuto, + }, + } + + stream, header, err := provider.CreateOpenAIModelResponse(ctx, req) + if err != nil { + t.Fatalf("CreateOpenAIModelResponse with reasoning failed: %v", err) + } + + if header == nil { + t.Fatal("Header should not be nil") + } + + var events []openai.Event + var hasReasoningContent bool + var hasTextContent bool + var reasoningDeltas []string + var textDeltas []string + + for event, streamErr := range stream { + if streamErr != nil { + t.Fatalf("Stream error: %v", streamErr) + } + + validateOpenAIEvent(t, event) + events = append(events, event) + + // Check for reasoning and text content + switch e := event.(type) { + case *openai.ResponseReasoningTextDeltaEvent: + if e.Delta != "" { + hasReasoningContent = true + reasoningDeltas = append(reasoningDeltas, e.Delta) + t.Logf("Received reasoning delta: %q", truncateString(e.Delta, 50)) + } + case *openai.ResponseReasoningSummaryTextDeltaEvent: + if e.Delta != "" { + hasReasoningContent = true + reasoningDeltas = append(reasoningDeltas, e.Delta) + t.Logf("Received reasoning summary delta: %q", truncateString(e.Delta, 50)) + } + case *openai.ResponseTextDeltaEvent: + if e.Delta != "" { + hasTextContent = true + textDeltas = append(textDeltas, e.Delta) + } + } + } + + // Verify we received events + if len(events) == 0 { + t.Fatal("No events received from stream") + } + + // Verify we received text content (reasoning might not always be exposed) + if !hasTextContent { + t.Error("Should receive text content") + } + + t.Logf("Reasoning validation results:") + t.Logf(" - Has reasoning: %v (%d deltas)", hasReasoningContent, len(reasoningDeltas)) + t.Logf(" - Has text content: %v (%d deltas)", hasTextContent, len(textDeltas)) + + if hasReasoningContent && len(reasoningDeltas) > 0 { + totalReasoningLength := 0 + for _, delta := range reasoningDeltas { + totalReasoningLength += len(delta) + } + t.Logf(" - Total reasoning length: %d characters", totalReasoningLength) + } +} + +func TestCreateOpenAIModelResponse_FunctionCalls(t *testing.T) { + provider := NewProvider() + ctx := testCtxFromEnv(t) + + req := &openai.CreateModelResponseRequest{ + Model: "gpt-4o", + Input: openai.TextInput("What's the weather like in San Francisco?"), + Tools: []*openai.ResponseToolParam{ + { + Function: &openai.ResponseFunctionToolParam{ + Type: openai.ResponseToolCallTypeFunction, + Name: "get_weather", + Description: "Get the current weather in a given location", + Parameters: openai.ResponseJSONSchemaObject(`{"type":"object","properties":{"location":{"type":"string","description":"The city and state, e.g. San Francisco, CA"}},"required":["location"]}`), + }, + }, + }, + ToolChoice: &openai.ResponseToolChoice{ + Option: openai.ChatCompletionToolChoiceOptionRequired, + }, + } + + stream, header, err := provider.CreateOpenAIModelResponse(ctx, req) + if err != nil { + t.Fatalf("CreateOpenAIModelResponse with tools failed: %v", err) + } + + if header == nil { + t.Fatal("Header should not be nil") + } + + var events []openai.Event + var hasFunctionCall bool + var functionArguments []string + + for event, streamErr := range stream { + if streamErr != nil { + t.Fatalf("Stream error: %v", streamErr) + } + + validateOpenAIEvent(t, event) + events = append(events, event) + + // Check for function call events + switch e := event.(type) { + case *openai.ResponseFunctionCallArgumentsDeltaEvent: + if e.Delta != "" { + hasFunctionCall = true + functionArguments = append(functionArguments, e.Delta) + } + case *openai.ResponseOutputItemAddedEvent: + if e.Item != nil && e.Item.FunctionCall != nil { + hasFunctionCall = true + t.Logf("Function call added: %s", e.Item.FunctionCall.Name) + } + } + } + + // Verify we received events + if len(events) == 0 { + t.Fatal("No events received from stream") + } + + // Verify we got a function call + if !hasFunctionCall { + t.Error("Expected function call but none was received") + } + + if len(functionArguments) > 0 { + fullArgs := strings.Join(functionArguments, "") + t.Logf("Function arguments: %s", fullArgs) + } + + t.Logf("Successfully received %d events with function call", len(events)) +} + +// validateOpenAIEvent validates the structure of an OpenAI Event +func validateOpenAIEvent(t *testing.T, event openai.Event) { + t.Helper() + + if event == nil { + t.Error("Event should not be nil") + return + } + + // Check that event type is valid + eventType := event.EventType() + switch eventType { + case openai.EventTypeResponseCreated, + openai.EventTypeResponseInProgress, + openai.EventTypeResponseCompleted, + openai.EventTypeResponseFailed, + openai.EventTypeResponseIncomplete, + openai.EventTypeResponseQueued, + openai.EventTypeError, + openai.EventTypeResponseOutputItemAdded, + openai.EventTypeResponseOutputItemDone, + openai.EventTypeResponseContentPartAdded, + openai.EventTypeResponseContentPartDone, + openai.EventTypeResponseOutputTextDelta, + openai.EventTypeResponseOutputTextDone, + openai.EventTypeResponseReasoningTextDelta, + openai.EventTypeResponseReasoningTextDone, + openai.EventTypeResponseReasoningSummaryTextDelta, + openai.EventTypeResponseReasoningSummaryTextDone, + openai.EventTypeResponseReasoningSummaryPartAdded, + openai.EventTypeResponseReasoningSummaryPartDone, + openai.EventTypeResponseFunctionCallArgumentsDelta, + openai.EventTypeResponseFunctionCallArgumentsDone, + openai.EventTypeResponseRefusalDelta, + openai.EventTypeResponseRefusalDone, + openai.EventTypeResponseOutputTextAnnotationAdded: + // Valid event type + default: + t.Logf("Unknown or unhandled event type: %s", eventType) + } + + // Validate specific event types + switch e := event.(type) { + case *openai.ResponseCreatedEvent: + if e.Response.ID == "" { + t.Error("ResponseCreated event should have a response ID") + } + case *openai.ResponseCompletedEvent: + if e.Response == nil { + t.Error("ResponseCompleted event should have a response") + } + case *openai.ResponseErrorEvent: + if e.Message == "" { + t.Error("Error event should have a message") + } + } +} + +// validateOpenAIResponse validates the structure of a complete OpenAI Response +func validateOpenAIResponse(t *testing.T, response *openai.Response) { + t.Helper() + + if response == nil { + t.Error("Response should not be nil") + return + } + + if response.ID == "" { + t.Error("Response ID should not be empty") + } + + if response.Model == "" { + t.Error("Response Model should not be empty") + } + + if response.Status == "" { + t.Error("Response Status should not be empty") + } +} diff --git a/pkg/provider/response_handler.go b/pkg/provider/response_handler.go index 84c4feb..ad48fda 100644 --- a/pkg/provider/response_handler.go +++ b/pkg/provider/response_handler.go @@ -15,6 +15,7 @@ import ( "time" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" "github.com/x5iu/claude-code-adapter/pkg/profile" "github.com/x5iu/claude-code-adapter/pkg/utils" @@ -30,6 +31,7 @@ var providerErrorParser = map[string]func(*http.Response) error{ ProviderMethodGenerateAnthropicMessage: parseError[*anthropic.Error], ProviderMethodCountAnthropicTokens: parseError[*anthropic.Error], ProviderMethodCreateOpenRouterChatCompletion: parseError[*openrouter.Error], + ProviderMethodCreateOpenAIModelResponse: parseError[*openai.Error], } func (r *ResponseHandler) ScanValues(values ...any) error { @@ -60,6 +62,12 @@ func (r *ResponseHandler) ScanValues(values ...any) error { } stream := values[0].(*openrouter.ChatCompletionStream) *stream = makeOpenRouterStream(profile.MustFromContext(ctx), r.Response.Body) + case ProviderMethodCreateOpenAIModelResponse: + if !utils.IsContentType(responseHeader, "text/event-stream") { + return fmt.Errorf("unexpected Content-Type: %s", responseHeader.Get("Content-Type")) + } + stream := values[0].(*openai.ResponseStream) + *stream = makeOpenAIStream(r.Response.Body) default: defer r.Response.Body.Close() switch { @@ -103,6 +111,70 @@ var anthropicEventBuilder = map[anthropic.EventType]func([]byte) (anthropic.Even anthropic.EventTypeContentBlockStop: unmarshalAnthropicEvent[*anthropic.EventContentBlockStop], } +var openaiEventBuilder = map[openai.EventType]func([]byte) (openai.Event, error){ + openai.EventTypeResponseAudioDelta: unmarshalOpenAIEvent[*openai.ResponseAudioDeltaEvent], + openai.EventTypeResponseAudioDone: unmarshalOpenAIEvent[*openai.ResponseAudioDoneEvent], + openai.EventTypeResponseAudioTranscriptDelta: unmarshalOpenAIEvent[*openai.ResponseAudioTranscriptDeltaEvent], + openai.EventTypeResponseAudioTranscriptDone: unmarshalOpenAIEvent[*openai.ResponseAudioTranscriptDoneEvent], + openai.EventTypeResponseCodeInterpreterCallCodeDelta: unmarshalOpenAIEvent[*openai.ResponseCodeInterpreterCallCodeDeltaEvent], + openai.EventTypeResponseCodeInterpreterCallCodeDone: unmarshalOpenAIEvent[*openai.ResponseCodeInterpreterCallCodeDoneEvent], + openai.EventTypeResponseCodeInterpreterCallCompleted: unmarshalOpenAIEvent[*openai.ResponseCodeInterpreterCallCompletedEvent], + openai.EventTypeResponseCodeInterpreterCallInProgress: unmarshalOpenAIEvent[*openai.ResponseCodeInterpreterCallInProgressEvent], + openai.EventTypeResponseCodeInterpreterCallInterpreting: unmarshalOpenAIEvent[*openai.ResponseCodeInterpreterCallInterpretingEvent], + openai.EventTypeResponseCompleted: unmarshalOpenAIEvent[*openai.ResponseCompletedEvent], + openai.EventTypeResponseContentPartAdded: unmarshalOpenAIEvent[*openai.ResponseContentPartAddedEvent], + openai.EventTypeResponseContentPartDone: unmarshalOpenAIEvent[*openai.ResponseContentPartDoneEvent], + openai.EventTypeResponseCreated: unmarshalOpenAIEvent[*openai.ResponseCreatedEvent], + openai.EventTypeError: unmarshalOpenAIEvent[*openai.ResponseErrorEvent], + openai.EventTypeResponseFileSearchCallCompleted: unmarshalOpenAIEvent[*openai.ResponseFileSearchCallCompletedEvent], + openai.EventTypeResponseFileSearchCallInProgress: unmarshalOpenAIEvent[*openai.ResponseFileSearchCallInProgressEvent], + openai.EventTypeResponseFileSearchCallSearching: unmarshalOpenAIEvent[*openai.ResponseFileSearchCallSearchingEvent], + openai.EventTypeResponseFunctionCallArgumentsDelta: unmarshalOpenAIEvent[*openai.ResponseFunctionCallArgumentsDeltaEvent], + openai.EventTypeResponseFunctionCallArgumentsDone: unmarshalOpenAIEvent[*openai.ResponseFunctionCallArgumentsDoneEvent], + openai.EventTypeResponseInProgress: unmarshalOpenAIEvent[*openai.ResponseInProgressEvent], + openai.EventTypeResponseFailed: unmarshalOpenAIEvent[*openai.ResponseFailedEvent], + openai.EventTypeResponseIncomplete: unmarshalOpenAIEvent[*openai.ResponseIncompleteEvent], + openai.EventTypeResponseOutputItemAdded: unmarshalOpenAIEvent[*openai.ResponseOutputItemAddedEvent], + openai.EventTypeResponseOutputItemDone: unmarshalOpenAIEvent[*openai.ResponseOutputItemDoneEvent], + openai.EventTypeResponseReasoningSummaryPartAdded: unmarshalOpenAIEvent[*openai.ResponseReasoningSummaryPartAddedEvent], + openai.EventTypeResponseReasoningSummaryPartDone: unmarshalOpenAIEvent[*openai.ResponseReasoningSummaryPartDoneEvent], + openai.EventTypeResponseReasoningSummaryTextDelta: unmarshalOpenAIEvent[*openai.ResponseReasoningSummaryTextDeltaEvent], + openai.EventTypeResponseReasoningSummaryTextDone: unmarshalOpenAIEvent[*openai.ResponseReasoningSummaryTextDoneEvent], + openai.EventTypeResponseReasoningTextDelta: unmarshalOpenAIEvent[*openai.ResponseReasoningTextDeltaEvent], + openai.EventTypeResponseReasoningTextDone: unmarshalOpenAIEvent[*openai.ResponseReasoningTextDoneEvent], + openai.EventTypeResponseRefusalDelta: unmarshalOpenAIEvent[*openai.ResponseRefusalDeltaEvent], + openai.EventTypeResponseRefusalDone: unmarshalOpenAIEvent[*openai.ResponseRefusalDoneEvent], + openai.EventTypeResponseOutputTextDelta: unmarshalOpenAIEvent[*openai.ResponseTextDeltaEvent], + openai.EventTypeResponseOutputTextDone: unmarshalOpenAIEvent[*openai.ResponseTextDoneEvent], + openai.EventTypeResponseWebSearchCallCompleted: unmarshalOpenAIEvent[*openai.ResponseWebSearchCallCompletedEvent], + openai.EventTypeResponseWebSearchCallInProgress: unmarshalOpenAIEvent[*openai.ResponseWebSearchCallInProgressEvent], + openai.EventTypeResponseWebSearchCallSearching: unmarshalOpenAIEvent[*openai.ResponseWebSearchCallSearchingEvent], + openai.EventTypeResponseImageGenCallCompleted: unmarshalOpenAIEvent[*openai.ResponseImageGenCallCompletedEvent], + openai.EventTypeResponseImageGenCallGenerating: unmarshalOpenAIEvent[*openai.ResponseImageGenCallGeneratingEvent], + openai.EventTypeResponseImageGenCallInProgress: unmarshalOpenAIEvent[*openai.ResponseImageGenCallInProgressEvent], + openai.EventTypeResponseImageGenCallPartialImage: unmarshalOpenAIEvent[*openai.ResponseImageGenCallPartialImageEvent], + openai.EventTypeResponseMcpCallArgumentsDelta: unmarshalOpenAIEvent[*openai.ResponseMcpCallArgumentsDeltaEvent], + openai.EventTypeResponseMcpCallArgumentsDone: unmarshalOpenAIEvent[*openai.ResponseMcpCallArgumentsDoneEvent], + openai.EventTypeResponseMcpCallCompleted: unmarshalOpenAIEvent[*openai.ResponseMcpCallCompletedEvent], + openai.EventTypeResponseMcpCallFailed: unmarshalOpenAIEvent[*openai.ResponseMcpCallFailedEvent], + openai.EventTypeResponseMcpCallInProgress: unmarshalOpenAIEvent[*openai.ResponseMcpCallInProgressEvent], + openai.EventTypeResponseMcpListToolsInProgress: unmarshalOpenAIEvent[*openai.ResponseMcpListToolsInProgressEvent], + openai.EventTypeResponseMcpListToolsCompleted: unmarshalOpenAIEvent[*openai.ResponseMcpListToolsCompletedEvent], + openai.EventTypeResponseMcpListToolsFailed: unmarshalOpenAIEvent[*openai.ResponseMcpListToolsFailedEvent], + openai.EventTypeResponseOutputTextAnnotationAdded: unmarshalOpenAIEvent[*openai.ResponseOutputTextAnnotationAddedEvent], + openai.EventTypeResponseQueued: unmarshalOpenAIEvent[*openai.ResponseQueuedEvent], + openai.EventTypeResponseCustomToolCallInputDelta: unmarshalOpenAIEvent[*openai.ResponseCustomToolCallInputDeltaEvent], + openai.EventTypeResponseCustomToolCallInputDone: unmarshalOpenAIEvent[*openai.ResponseCustomToolCallInputDoneEvent], +} + +func unmarshalOpenAIEvent[E openai.Event](data []byte) (openai.Event, error) { + var event E + if err := json.Unmarshal(data, &event); err != nil { + return nil, err + } + return event, nil +} + func unmarshalAnthropicEvent[E anthropic.Event](data []byte) (anthropic.Event, error) { var event E if err := json.Unmarshal(data, &event); err != nil { @@ -184,6 +256,41 @@ func MakeAnthropicStream(prof *profile.Profile, r io.ReadCloser) anthropic.Messa } } +func makeOpenAIStream(r io.ReadCloser) openai.ResponseStream { + return func(yield func(openai.Event, error) bool) { + defer r.Close() + scanner := bufio.NewScanner(r) + for scanner.Scan() { + line := bytes.TrimSpace(scanner.Bytes()) + if len(line) == 0 { + continue + } + eventType, isEvent := bytes.CutPrefix(line, []byte("event:")) + eventType = bytes.Clone(eventType) // next Scan overwrites bytes under eventType, a Clone keeps it unchanged + if isEvent && scanner.Scan() { + data, isData := bytes.CutPrefix(bytes.TrimSpace(scanner.Bytes()), []byte("data:")) + if !isData { + yield(nil, fmt.Errorf("missing openai %q data chunk", string(eventType))) + return + } + if unmarshalEvent, ok := openaiEventBuilder[openai.EventType(bytes.TrimSpace(eventType))]; ok { + event, err := unmarshalEvent(data) + if err != nil { + yield(nil, err) + return + } + if !yield(event, nil) { + return + } + } + } + } + if err := scanner.Err(); err != nil { + yield(nil, err) + } + } +} + func makeDataIterator(prof *profile.Profile, r io.ReadCloser) iter.Seq2[json.RawMessage, error] { buffer := make([]byte, prof.Options.GetStreamDataBufferSize()) return func(yield func(json.RawMessage, error) bool) { diff --git a/pkg/snapshot/snapshot.go b/pkg/snapshot/snapshot.go index 29feb42..64405c7 100644 --- a/pkg/snapshot/snapshot.go +++ b/pkg/snapshot/snapshot.go @@ -7,6 +7,7 @@ import ( "time" "github.com/x5iu/claude-code-adapter/pkg/datatypes/anthropic" + "github.com/x5iu/claude-code-adapter/pkg/datatypes/openai" "github.com/x5iu/claude-code-adapter/pkg/datatypes/openrouter" ) @@ -38,6 +39,8 @@ type Snapshot struct { AnthropicResponse *anthropic.Message `json:"anthropic_response,omitempty"` OpenRouterRequest *openrouter.CreateChatCompletionRequest `json:"openrouter_request,omitempty"` OpenRouterResponse *openrouter.ChatCompletion `json:"openrouter_response,omitempty"` + OpenAIRequest *openai.CreateModelResponseRequest `json:"openai_request,omitempty"` + OpenAIResponse *openai.Response `json:"openai_response,omitempty"` RequestHeader Header `json:"request_header,omitempty"` ResponseHeader Header `json:"response_header,omitempty"` } @@ -53,6 +56,11 @@ type Config struct { Options *OptionsConfig `yaml:"options" json:"options" mapstructure:"options"` Anthropic *AnthropicConfig `yaml:"anthropic" json:"anthropic" mapstructure:"anthropic"` OpenRouter *OpenRouterConfig `yaml:"openrouter" json:"openrouter" mapstructure:"openrouter"` + OpenAI *OpenAIConfig `yaml:"openai" json:"openai" mapstructure:"openai"` +} + +type OpenAIConfig struct { + BaseURL string `yaml:"base_url" json:"base_url" mapstructure:"base_url"` } type OptionsConfig struct { diff --git a/pkg/utils/utils.go b/pkg/utils/utils.go index db72304..5adedc6 100644 --- a/pkg/utils/utils.go +++ b/pkg/utils/utils.go @@ -2,6 +2,8 @@ package utils import ( "bytes" + "crypto/rand" + "encoding/hex" "encoding/json" "fmt" "net/http" @@ -36,6 +38,14 @@ func IsContentType(header http.Header, contentType string) bool { return headerContentType == contentType } +// GenerateID generates a random ID with the given prefix. +// Format: prefix_<12 random hex characters> +func GenerateID(prefix string) string { + b := make([]byte, 6) + rand.Read(b) + return prefix + "_" + hex.EncodeToString(b) +} + // The following variables and functions are copied from github.com/gin-gonic/gin@v1.10.1/recovery.go // reference: https://github.com/gin-gonic/gin