From d72748eccf0b654bac65626c5082c36d718c0ea0 Mon Sep 17 00:00:00 2001 From: Jan Svabenik Date: Fri, 27 Mar 2026 09:26:42 +0100 Subject: [PATCH] fix: parse newer openai response shapes --- internal/llmruntime/runtime.go | 175 ++++++++++++++++++++++++++-- internal/llmruntime/runtime_test.go | 90 ++++++++++++++ 2 files changed, 255 insertions(+), 10 deletions(-) diff --git a/internal/llmruntime/runtime.go b/internal/llmruntime/runtime.go index f687684..8fe0ce8 100644 --- a/internal/llmruntime/runtime.go +++ b/internal/llmruntime/runtime.go @@ -8,6 +8,7 @@ import ( "io" "net/http" "net/url" + "sort" "strings" "time" ) @@ -86,20 +87,15 @@ func (c *openAICompatibleClient) Generate(ctx context.Context, req Request) (str return "", err } - var response struct { - Choices []struct { - Message struct { - Content string `json:"content"` - } `json:"message"` - } `json:"choices"` - } + var response map[string]any if err := json.Unmarshal(body, &response); err != nil { return "", fmt.Errorf("decode openai-compatible response: %w", err) } - if len(response.Choices) == 0 { - return "", fmt.Errorf("empty openai-compatible response") + content := extractOpenAICompatibleContent(response) + if content == "" { + return "", fmt.Errorf("empty openai-compatible response content (%s)", describeOpenAICompatibleShape(response)) } - return strings.TrimSpace(response.Choices[0].Message.Content), nil + return content, nil } type anthropicClient struct { @@ -327,3 +323,162 @@ func nestedString(values map[string]any, path ...string) string { return "" } } + +func extractOpenAICompatibleContent(response map[string]any) string { + if response == nil { + return "" + } + if text := strings.TrimSpace(extractOpenAICompatibleChoicesContent(response["choices"])); text != "" { + return text + } + if text := strings.TrimSpace(extractTextFromContentValue(response["output_text"])); text != "" { + return text + } + return strings.TrimSpace(extractOpenAICompatibleOutputContent(response["output"])) +} + +func extractOpenAICompatibleChoicesContent(raw any) string { + choices, ok := raw.([]any) + if !ok { + return "" + } + for _, rawChoice := range choices { + choice, ok := rawChoice.(map[string]any) + if !ok { + continue + } + if text := strings.TrimSpace(extractTextFromContentValue(choice["message"])); text != "" { + return text + } + if text := strings.TrimSpace(extractTextFromContentValue(choice["delta"])); text != "" { + return text + } + if text := strings.TrimSpace(extractTextFromContentValue(choice["text"])); text != "" { + return text + } + } + return "" +} + +func extractOpenAICompatibleOutputContent(raw any) string { + output, ok := raw.([]any) + if !ok { + return "" + } + for _, rawItem := range output { + item, ok := rawItem.(map[string]any) + if !ok { + continue + } + if text := strings.TrimSpace(extractTextFromContentValue(item["content"])); text != "" { + return text + } + if text := strings.TrimSpace(extractTextFromContentValue(item["text"])); text != "" { + return text + } + } + return "" +} + +func extractTextFromContentValue(raw any) string { + switch value := raw.(type) { + case string: + return strings.TrimSpace(value) + case []any: + parts := make([]string, 0, len(value)) + for _, item := range value { + if text := strings.TrimSpace(extractTextFromContentValue(item)); text != "" { + parts = append(parts, text) + } + } + return strings.TrimSpace(strings.Join(parts, "\n")) + case map[string]any: + if text := strings.TrimSpace(extractTextFromContentValue(value["content"])); text != "" { + return text + } + if text := strings.TrimSpace(extractTextFromContentValue(value["text"])); text != "" { + return text + } + if text := strings.TrimSpace(extractTextFromContentValue(value["value"])); text != "" { + return text + } + if text := strings.TrimSpace(extractTextFromContentValue(value["output_text"])); text != "" { + return text + } + return "" + default: + return "" + } +} + +func describeOpenAICompatibleShape(response map[string]any) string { + parts := make([]string, 0, 8) + parts = append(parts, "top="+describeMapKeys(response)) + + if choices, ok := response["choices"].([]any); ok { + parts = append(parts, fmt.Sprintf("choices_len=%d", len(choices))) + if len(choices) > 0 { + if choice, ok := choices[0].(map[string]any); ok { + parts = append(parts, "choices0="+describeMapKeys(choice)) + if message, ok := choice["message"].(map[string]any); ok { + parts = append(parts, "message="+describeMapKeys(message)) + parts = append(parts, "message_content_type="+valueType(message["content"])) + } + } + } + } else if _, exists := response["choices"]; exists { + parts = append(parts, "choices_type="+valueType(response["choices"])) + } + + if _, exists := response["output_text"]; exists { + parts = append(parts, "output_text_type="+valueType(response["output_text"])) + } + if output, ok := response["output"].([]any); ok { + parts = append(parts, fmt.Sprintf("output_len=%d", len(output))) + if len(output) > 0 { + if first, ok := output[0].(map[string]any); ok { + parts = append(parts, "output0="+describeMapKeys(first)) + parts = append(parts, "output0_content_type="+valueType(first["content"])) + } + } + } else if _, exists := response["output"]; exists { + parts = append(parts, "output_type="+valueType(response["output"])) + } + + return strings.Join(parts, "; ") +} + +func describeMapKeys(raw map[string]any) string { + if len(raw) == 0 { + return "{}" + } + keys := make([]string, 0, len(raw)) + for key := range raw { + keys = append(keys, key) + } + sort.Strings(keys) + described := make([]string, 0, len(keys)) + for _, key := range keys { + described = append(described, fmt.Sprintf("%s:%s", key, valueType(raw[key]))) + } + return "{" + strings.Join(described, ",") + "}" +} + +func valueType(raw any) string { + switch raw.(type) { + case nil: + return "null" + case string: + return "string" + case bool: + return "bool" + case float64: + return "number" + case []any: + return "array" + case map[string]any: + return "object" + default: + return fmt.Sprintf("%T", raw) + } +} diff --git a/internal/llmruntime/runtime_test.go b/internal/llmruntime/runtime_test.go index 1d570f0..985fa75 100644 --- a/internal/llmruntime/runtime_test.go +++ b/internal/llmruntime/runtime_test.go @@ -90,6 +90,96 @@ func TestOpenAICompatibleClient_UsesMaxTokensForOlderOpenAIModels(t *testing.T) } } +func TestOpenAICompatibleClient_ExtractsMessageContentParts(t *testing.T) { + t.Parallel() + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte(`{"choices":[{"message":{"content":[{"type":"text","text":"{\"suggestions\":["},{"type":"output_text","text":"{\"fieldPath\":\"hero.title\",\"value\":\"Hello\"}"}]}}]}`)) + })) + defer server.Close() + + factory := NewFactory(2 * time.Second) + client, err := factory.ClientFor("openai") + if err != nil { + t.Fatalf("client creation failed: %v", err) + } + got, err := client.Generate(context.Background(), Request{ + Provider: "openai", + BaseURL: server.URL, + Model: "gpt-5.4-mini", + APIKey: "key", + SystemPrompt: "system", + UserPrompt: "user", + }) + if err != nil { + t.Fatalf("generate failed: %v", err) + } + if !strings.Contains(got, "hero.title") { + t.Fatalf("unexpected extracted content: %q", got) + } +} + +func TestOpenAICompatibleClient_ExtractsResponsesOutputShape(t *testing.T) { + t.Parallel() + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte(`{"id":"resp_123","object":"response","output":[{"type":"message","content":[{"type":"output_text","text":"{\"suggestions\":[{\"fieldPath\":\"hero.subtitle\",\"value\":\"World\"}]}"}]}]}`)) + })) + defer server.Close() + + factory := NewFactory(2 * time.Second) + client, err := factory.ClientFor("openai") + if err != nil { + t.Fatalf("client creation failed: %v", err) + } + got, err := client.Generate(context.Background(), Request{ + Provider: "openai", + BaseURL: server.URL, + Model: "gpt-5.4-mini", + APIKey: "key", + SystemPrompt: "system", + UserPrompt: "user", + }) + if err != nil { + t.Fatalf("generate failed: %v", err) + } + if !strings.Contains(got, "hero.subtitle") { + t.Fatalf("unexpected extracted content: %q", got) + } +} + +func TestOpenAICompatibleClient_EmptyContentIncludesShapeDiagnostics(t *testing.T) { + t.Parallel() + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte(`{"choices":[{"message":{"content":[]}}]}`)) + })) + defer server.Close() + + factory := NewFactory(2 * time.Second) + client, err := factory.ClientFor("openai") + if err != nil { + t.Fatalf("client creation failed: %v", err) + } + _, err = client.Generate(context.Background(), Request{ + Provider: "openai", + BaseURL: server.URL, + Model: "gpt-5.4-mini", + APIKey: "key", + SystemPrompt: "system", + UserPrompt: "user", + }) + if err == nil { + t.Fatalf("expected generate error") + } + if !strings.Contains(err.Error(), "empty openai-compatible response content") { + t.Fatalf("unexpected error: %v", err) + } + if !strings.Contains(err.Error(), "message_content_type=array") { + t.Fatalf("expected shape diagnostics in error: %v", err) + } +} + func TestExtractProviderErrorMessage(t *testing.T) { t.Parallel()