fix(translator): avoid default thinkingConfig in Gemini requests

This commit is contained in:
hkfires
2025-10-29 10:46:25 +08:00
parent 9d42e4b239
commit 680b3f5010
4 changed files with 24 additions and 26 deletions

View File

@@ -135,8 +135,8 @@ func ConvertClaudeRequestToCLI(modelName string, inputRawJSON []byte, _ bool) []
tools = make([]client.ToolDeclaration, 0)
}
// Build output Gemini CLI request JSON
out := `{"model":"","request":{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}}}`
// Build output Gemini CLI request JSON (no default thinkingConfig)
out := `{"model":"","request":{"contents":[]}}`
out, _ = sjson.Set(out, "model", modelName)
if systemInstruction != nil {
b, _ := json.Marshal(systemInstruction)
@@ -151,21 +151,23 @@ func ConvertClaudeRequestToCLI(modelName string, inputRawJSON []byte, _ bool) []
out, _ = sjson.SetRaw(out, "request.tools", string(b))
}
// Map reasoning and sampling configs
// Map reasoning and sampling configs: only set thinkingConfig when explicitly requested
reasoningEffortResult := gjson.GetBytes(rawJSON, "reasoning_effort")
if reasoningEffortResult.String() == "none" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.include_thoughts", false)
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 0)
} else if reasoningEffortResult.String() == "auto" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
} else if reasoningEffortResult.String() == "low" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 1024)
} else if reasoningEffortResult.String() == "medium" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 8192)
} else if reasoningEffortResult.String() == "high" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 24576)
} else {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
if reasoningEffortResult.Exists() {
if reasoningEffortResult.String() == "none" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.include_thoughts", false)
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 0)
} else if reasoningEffortResult.String() == "auto" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
} else if reasoningEffortResult.String() == "low" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 1024)
} else if reasoningEffortResult.String() == "medium" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 8192)
} else if reasoningEffortResult.String() == "high" {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", 24576)
} else {
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
}
}
if v := gjson.GetBytes(rawJSON, "temperature"); v.Exists() && v.Type == gjson.Number {
out, _ = sjson.Set(out, "request.generationConfig.temperature", v.Num)

View File

@@ -26,8 +26,8 @@ import (
// - []byte: The transformed request data in Gemini CLI API format
func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
// Base envelope
out := []byte(`{"project":"","request":{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}},"model":"gemini-2.5-pro"}`)
// Base envelope (no default thinkingConfig)
out := []byte(`{"project":"","request":{"contents":[]},"model":"gemini-2.5-pro"}`)
// Model
out, _ = sjson.SetBytes(out, "model", modelName)
@@ -50,8 +50,6 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
default:
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
}
} else {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
}
// Temperature/top_p/top_k

View File

@@ -26,8 +26,8 @@ import (
// - []byte: The transformed request data in Gemini API format
func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
// Base envelope
out := []byte(`{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}}`)
// Base envelope (no default thinkingConfig)
out := []byte(`{"contents":[]}`)
// Model
out, _ = sjson.SetBytes(out, "model", modelName)
@@ -50,8 +50,6 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
default:
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
}
} else {
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
}
// Temperature/top_p/top_k

View File

@@ -15,8 +15,8 @@ func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte
_ = modelName // Unused but required by interface
_ = stream // Unused but required by interface
// Base Gemini API template
out := `{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}}`
// Base Gemini API template (do not include thinkingConfig by default)
out := `{"contents":[]}`
root := gjson.ParseBytes(rawJSON)