diff --git a/internal/translator/codex/claude/codex_claude_response.go b/internal/translator/codex/claude/codex_claude_response.go index 238d3e24..b39494b7 100644 --- a/internal/translator/codex/claude/codex_claude_response.go +++ b/internal/translator/codex/claude/codex_claude_response.go @@ -113,10 +113,10 @@ func ConvertCodexResponseToClaude(_ context.Context, _ string, originalRequestRa template = `{"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"input_tokens":0,"output_tokens":0}}` p := (*param).(*ConvertCodexResponseToClaudeParams).HasToolCall stopReason := rootResult.Get("response.stop_reason").String() - if stopReason != "" { - template, _ = sjson.Set(template, "delta.stop_reason", stopReason) - } else if p { + if p { template, _ = sjson.Set(template, "delta.stop_reason", "tool_use") + } else if stopReason == "max_tokens" || stopReason == "stop" { + template, _ = sjson.Set(template, "delta.stop_reason", stopReason) } else { template, _ = sjson.Set(template, "delta.stop_reason", "end_turn") } diff --git a/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go b/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go index 97c18c1e..4867085e 100644 --- a/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go +++ b/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go @@ -78,11 +78,16 @@ func ConvertCliResponseToOpenAI(_ context.Context, _ string, originalRequestRawJ template, _ = sjson.Set(template, "id", responseIDResult.String()) } - // Extract and set the finish reason. - if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() { - template, _ = sjson.Set(template, "choices.0.finish_reason", strings.ToLower(finishReasonResult.String())) - template, _ = sjson.Set(template, "choices.0.native_finish_reason", strings.ToLower(finishReasonResult.String())) + finishReason := "" + if stopReasonResult := gjson.GetBytes(rawJSON, "response.stop_reason"); stopReasonResult.Exists() { + finishReason = stopReasonResult.String() } + if finishReason == "" { + if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() { + finishReason = finishReasonResult.String() + } + } + finishReason = strings.ToLower(finishReason) // Extract and set usage metadata (token counts). if usageResult := gjson.GetBytes(rawJSON, "response.usageMetadata"); usageResult.Exists() { @@ -197,6 +202,12 @@ func ConvertCliResponseToOpenAI(_ context.Context, _ string, originalRequestRawJ if hasFunctionCall { template, _ = sjson.Set(template, "choices.0.finish_reason", "tool_calls") template, _ = sjson.Set(template, "choices.0.native_finish_reason", "tool_calls") + } else if finishReason != "" && (*param).(*convertCliResponseToOpenAIChatParams).FunctionIndex == 0 { + // Only pass through specific finish reasons + if finishReason == "max_tokens" || finishReason == "stop" { + template, _ = sjson.Set(template, "choices.0.finish_reason", finishReason) + template, _ = sjson.Set(template, "choices.0.native_finish_reason", finishReason) + } } return []string{template} diff --git a/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go b/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go index 9cce35f9..ee581c46 100644 --- a/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go +++ b/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go @@ -129,11 +129,16 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR candidateIndex := int(candidate.Get("index").Int()) template, _ = sjson.Set(template, "choices.0.index", candidateIndex) - // Extract and set the finish reason. - if finishReasonResult := candidate.Get("finishReason"); finishReasonResult.Exists() { - template, _ = sjson.Set(template, "choices.0.finish_reason", strings.ToLower(finishReasonResult.String())) - template, _ = sjson.Set(template, "choices.0.native_finish_reason", strings.ToLower(finishReasonResult.String())) + finishReason := "" + if stopReasonResult := gjson.GetBytes(rawJSON, "stop_reason"); stopReasonResult.Exists() { + finishReason = stopReasonResult.String() } + if finishReason == "" { + if finishReasonResult := gjson.GetBytes(rawJSON, "candidates.0.finishReason"); finishReasonResult.Exists() { + finishReason = finishReasonResult.String() + } + } + finishReason = strings.ToLower(finishReason) partsResult := candidate.Get("content.parts") hasFunctionCall := false @@ -225,6 +230,12 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR if hasFunctionCall { template, _ = sjson.Set(template, "choices.0.finish_reason", "tool_calls") template, _ = sjson.Set(template, "choices.0.native_finish_reason", "tool_calls") + } else if finishReason != "" { + // Only pass through specific finish reasons + if finishReason == "max_tokens" || finishReason == "stop" { + template, _ = sjson.Set(template, "choices.0.finish_reason", finishReason) + template, _ = sjson.Set(template, "choices.0.native_finish_reason", finishReason) + } } responseStrings = append(responseStrings, template)