Merge pull request #1081 from router-for-me/thinking

Refine thinking validation and cross‑provider payload conversion
This commit is contained in:
Luis Pater
2026-01-18 13:34:28 +08:00
committed by GitHub
26 changed files with 3065 additions and 1181 deletions

View File

@@ -30,7 +30,7 @@ var (
type LogFormatter struct{}
// logFieldOrder defines the display order for common log fields.
var logFieldOrder = []string{"provider", "model", "mode", "budget", "level", "original_value", "min", "max", "clamped_to", "error"}
var logFieldOrder = []string{"provider", "model", "mode", "budget", "level", "original_value", "original_level", "min", "max", "clamped_to", "error"}
// Format renders a single log entry with custom formatting.
func (m *LogFormatter) Format(entry *log.Entry) ([]byte, error) {

View File

@@ -393,7 +393,7 @@ func (e *AIStudioExecutor) translateRequest(req cliproxyexecutor.Request, opts c
}
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, stream)
payload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), stream)
payload, err := thinking.ApplyThinking(payload, req.Model, "gemini")
payload, err := thinking.ApplyThinking(payload, req.Model, from.String(), to.String())
if err != nil {
return nil, translatedPayload{}, err
}

View File

@@ -137,7 +137,7 @@ func (e *AntigravityExecutor) Execute(ctx context.Context, auth *cliproxyauth.Au
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translated, err = thinking.ApplyThinking(translated, req.Model, "antigravity")
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -256,7 +256,7 @@ func (e *AntigravityExecutor) executeClaudeNonStream(ctx context.Context, auth *
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
translated, err = thinking.ApplyThinking(translated, req.Model, "antigravity")
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -622,7 +622,7 @@ func (e *AntigravityExecutor) ExecuteStream(ctx context.Context, auth *cliproxya
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
translated, err = thinking.ApplyThinking(translated, req.Model, "antigravity")
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}
@@ -802,7 +802,7 @@ func (e *AntigravityExecutor) CountTokens(ctx context.Context, auth *cliproxyaut
// Prepare payload once (doesn't depend on baseURL)
payload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
payload, err := thinking.ApplyThinking(payload, req.Model, "antigravity")
payload, err := thinking.ApplyThinking(payload, req.Model, from.String(), to.String())
if err != nil {
return cliproxyexecutor.Response{}, err
}

View File

@@ -106,7 +106,7 @@ func (e *ClaudeExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), stream)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, "claude")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -239,7 +239,7 @@ func (e *ClaudeExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, "claude")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}

View File

@@ -96,7 +96,7 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
body = sdktranslator.TranslateRequest(from, to, baseModel, body, false)
body = misc.StripCodexUserAgent(body)
body, err = thinking.ApplyThinking(body, req.Model, "codex")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -208,7 +208,7 @@ func (e *CodexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
body = sdktranslator.TranslateRequest(from, to, baseModel, body, true)
body = misc.StripCodexUserAgent(body)
body, err = thinking.ApplyThinking(body, req.Model, "codex")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}
@@ -316,7 +316,7 @@ func (e *CodexExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth
body = sdktranslator.TranslateRequest(from, to, baseModel, body, false)
body = misc.StripCodexUserAgent(body)
body, err := thinking.ApplyThinking(body, req.Model, "codex")
body, err := thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return cliproxyexecutor.Response{}, err
}

View File

@@ -123,7 +123,7 @@ func (e *GeminiCLIExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
basePayload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
basePayload, err = thinking.ApplyThinking(basePayload, req.Model, "gemini-cli")
basePayload, err = thinking.ApplyThinking(basePayload, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -272,7 +272,7 @@ func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyaut
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
basePayload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
basePayload, err = thinking.ApplyThinking(basePayload, req.Model, "gemini-cli")
basePayload, err = thinking.ApplyThinking(basePayload, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}
@@ -479,7 +479,7 @@ func (e *GeminiCLIExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.
for range models {
payload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
payload, err = thinking.ApplyThinking(payload, req.Model, "gemini-cli")
payload, err = thinking.ApplyThinking(payload, req.Model, from.String(), to.String())
if err != nil {
return cliproxyexecutor.Response{}, err
}

View File

@@ -120,7 +120,7 @@ func (e *GeminiExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body, err = thinking.ApplyThinking(body, req.Model, "gemini")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -222,7 +222,7 @@ func (e *GeminiExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body, err = thinking.ApplyThinking(body, req.Model, "gemini")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}
@@ -338,7 +338,7 @@ func (e *GeminiExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Aut
to := sdktranslator.FromString("gemini")
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, "gemini")
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, from.String(), to.String())
if err != nil {
return cliproxyexecutor.Response{}, err
}

View File

@@ -170,7 +170,7 @@ func (e *GeminiVertexExecutor) executeWithServiceAccount(ctx context.Context, au
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body, err = thinking.ApplyThinking(body, req.Model, "gemini")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -272,7 +272,7 @@ func (e *GeminiVertexExecutor) executeWithAPIKey(ctx context.Context, auth *clip
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body, err = thinking.ApplyThinking(body, req.Model, "gemini")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -375,7 +375,7 @@ func (e *GeminiVertexExecutor) executeStreamWithServiceAccount(ctx context.Conte
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body, err = thinking.ApplyThinking(body, req.Model, "gemini")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}
@@ -494,7 +494,7 @@ func (e *GeminiVertexExecutor) executeStreamWithAPIKey(ctx context.Context, auth
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body, err = thinking.ApplyThinking(body, req.Model, "gemini")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}
@@ -605,7 +605,7 @@ func (e *GeminiVertexExecutor) countTokensWithServiceAccount(ctx context.Context
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, "gemini")
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, from.String(), to.String())
if err != nil {
return cliproxyexecutor.Response{}, err
}
@@ -689,7 +689,7 @@ func (e *GeminiVertexExecutor) countTokensWithAPIKey(ctx context.Context, auth *
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, "gemini")
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, from.String(), to.String())
if err != nil {
return cliproxyexecutor.Response{}, err
}

View File

@@ -92,7 +92,7 @@ func (e *IFlowExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, "iflow")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), "iflow")
if err != nil {
return resp, err
}
@@ -190,7 +190,7 @@ func (e *IFlowExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, "iflow")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), "iflow")
if err != nil {
return nil, err
}

View File

@@ -92,7 +92,7 @@ func (e *OpenAICompatExecutor) Execute(ctx context.Context, auth *cliproxyauth.A
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), opts.Stream)
translated = applyPayloadConfigWithRoot(e.cfg, baseModel, to.String(), "", translated, originalTranslated)
translated, err = thinking.ApplyThinking(translated, req.Model, "openai")
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -187,7 +187,7 @@ func (e *OpenAICompatExecutor) ExecuteStream(ctx context.Context, auth *cliproxy
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
translated = applyPayloadConfigWithRoot(e.cfg, baseModel, to.String(), "", translated, originalTranslated)
translated, err = thinking.ApplyThinking(translated, req.Model, "openai")
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}
@@ -297,7 +297,7 @@ func (e *OpenAICompatExecutor) CountTokens(ctx context.Context, auth *cliproxyau
modelForCounting := baseModel
translated, err := thinking.ApplyThinking(translated, req.Model, "openai")
translated, err := thinking.ApplyThinking(translated, req.Model, from.String(), to.String())
if err != nil {
return cliproxyexecutor.Response{}, err
}

View File

@@ -86,7 +86,7 @@ func (e *QwenExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, "openai")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return resp, err
}
@@ -172,7 +172,7 @@ func (e *QwenExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Aut
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, "openai")
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String())
if err != nil {
return nil, err
}

View File

@@ -2,6 +2,8 @@
package thinking
import (
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
log "github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
@@ -59,7 +61,8 @@ func IsUserDefinedModel(modelInfo *registry.ModelInfo) bool {
// Parameters:
// - body: Original request body JSON
// - model: Model name, optionally with thinking suffix (e.g., "claude-sonnet-4-5(16384)")
// - provider: Provider name (gemini, gemini-cli, antigravity, claude, openai, codex, iflow)
// - fromFormat: Source request format (e.g., openai, codex, gemini)
// - toFormat: Target provider format for the request body (gemini, gemini-cli, antigravity, claude, openai, codex, iflow)
//
// Returns:
// - Modified request body JSON with thinking configuration applied
@@ -76,16 +79,21 @@ func IsUserDefinedModel(modelInfo *registry.ModelInfo) bool {
// Example:
//
// // With suffix - suffix config takes priority
// result, err := thinking.ApplyThinking(body, "gemini-2.5-pro(8192)", "gemini")
// result, err := thinking.ApplyThinking(body, "gemini-2.5-pro(8192)", "gemini", "gemini")
//
// // Without suffix - uses body config
// result, err := thinking.ApplyThinking(body, "gemini-2.5-pro", "gemini")
func ApplyThinking(body []byte, model string, provider string) ([]byte, error) {
// result, err := thinking.ApplyThinking(body, "gemini-2.5-pro", "gemini", "gemini")
func ApplyThinking(body []byte, model string, fromFormat string, toFormat string) ([]byte, error) {
providerFormat := strings.ToLower(strings.TrimSpace(toFormat))
fromFormat = strings.ToLower(strings.TrimSpace(fromFormat))
if fromFormat == "" {
fromFormat = providerFormat
}
// 1. Route check: Get provider applier
applier := GetProviderApplier(provider)
applier := GetProviderApplier(providerFormat)
if applier == nil {
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": model,
}).Debug("thinking: unknown provider, passthrough |")
return body, nil
@@ -100,19 +108,19 @@ func ApplyThinking(body []byte, model string, provider string) ([]byte, error) {
// Unknown models are treated as user-defined so thinking config can still be applied.
// The upstream service is responsible for validating the configuration.
if IsUserDefinedModel(modelInfo) {
return applyUserDefinedModel(body, modelInfo, provider, suffixResult)
return applyUserDefinedModel(body, modelInfo, fromFormat, providerFormat, suffixResult)
}
if modelInfo.Thinking == nil {
config := extractThinkingConfig(body, provider)
config := extractThinkingConfig(body, providerFormat)
if hasThinkingConfig(config) {
log.WithFields(log.Fields{
"model": baseModel,
"provider": provider,
"provider": providerFormat,
}).Debug("thinking: model does not support thinking, stripping config |")
return StripThinkingConfig(body, provider), nil
return StripThinkingConfig(body, providerFormat), nil
}
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": baseModel,
}).Debug("thinking: model does not support thinking, passthrough |")
return body, nil
@@ -121,19 +129,19 @@ func ApplyThinking(body []byte, model string, provider string) ([]byte, error) {
// 4. Get config: suffix priority over body
var config ThinkingConfig
if suffixResult.HasSuffix {
config = parseSuffixToConfig(suffixResult.RawSuffix, provider, model)
config = parseSuffixToConfig(suffixResult.RawSuffix, providerFormat, model)
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": model,
"mode": config.Mode,
"budget": config.Budget,
"level": config.Level,
}).Debug("thinking: config from model suffix |")
} else {
config = extractThinkingConfig(body, provider)
config = extractThinkingConfig(body, providerFormat)
if hasThinkingConfig(config) {
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": modelInfo.ID,
"mode": config.Mode,
"budget": config.Budget,
@@ -144,17 +152,17 @@ func ApplyThinking(body []byte, model string, provider string) ([]byte, error) {
if !hasThinkingConfig(config) {
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": modelInfo.ID,
}).Debug("thinking: no config found, passthrough |")
return body, nil
}
// 5. Validate and normalize configuration
validated, err := ValidateConfig(config, modelInfo, provider)
validated, err := ValidateConfig(config, modelInfo, fromFormat, providerFormat)
if err != nil {
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": modelInfo.ID,
"error": err.Error(),
}).Warn("thinking: validation failed |")
@@ -167,14 +175,14 @@ func ApplyThinking(body []byte, model string, provider string) ([]byte, error) {
// Defensive check: ValidateConfig should never return (nil, nil)
if validated == nil {
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": modelInfo.ID,
}).Warn("thinking: ValidateConfig returned nil config without error, passthrough |")
return body, nil
}
log.WithFields(log.Fields{
"provider": provider,
"provider": providerFormat,
"model": modelInfo.ID,
"mode": validated.Mode,
"budget": validated.Budget,
@@ -228,7 +236,7 @@ func parseSuffixToConfig(rawSuffix, provider, model string) ThinkingConfig {
// applyUserDefinedModel applies thinking configuration for user-defined models
// without ThinkingSupport validation.
func applyUserDefinedModel(body []byte, modelInfo *registry.ModelInfo, provider string, suffixResult SuffixResult) ([]byte, error) {
func applyUserDefinedModel(body []byte, modelInfo *registry.ModelInfo, fromFormat, toFormat string, suffixResult SuffixResult) ([]byte, error) {
// Get model ID for logging
modelID := ""
if modelInfo != nil {
@@ -240,39 +248,57 @@ func applyUserDefinedModel(body []byte, modelInfo *registry.ModelInfo, provider
// Get config: suffix priority over body
var config ThinkingConfig
if suffixResult.HasSuffix {
config = parseSuffixToConfig(suffixResult.RawSuffix, provider, modelID)
config = parseSuffixToConfig(suffixResult.RawSuffix, toFormat, modelID)
} else {
config = extractThinkingConfig(body, provider)
config = extractThinkingConfig(body, toFormat)
}
if !hasThinkingConfig(config) {
log.WithFields(log.Fields{
"model": modelID,
"provider": provider,
"provider": toFormat,
}).Debug("thinking: user-defined model, passthrough (no config) |")
return body, nil
}
applier := GetProviderApplier(provider)
applier := GetProviderApplier(toFormat)
if applier == nil {
log.WithFields(log.Fields{
"model": modelID,
"provider": provider,
"provider": toFormat,
}).Debug("thinking: user-defined model, passthrough (unknown provider) |")
return body, nil
}
log.WithFields(log.Fields{
"provider": provider,
"provider": toFormat,
"model": modelID,
"mode": config.Mode,
"budget": config.Budget,
"level": config.Level,
}).Debug("thinking: applying config for user-defined model (skip validation)")
config = normalizeUserDefinedConfig(config, fromFormat, toFormat)
return applier.Apply(body, config, modelInfo)
}
func normalizeUserDefinedConfig(config ThinkingConfig, fromFormat, toFormat string) ThinkingConfig {
if config.Mode != ModeLevel {
return config
}
if !isBudgetBasedProvider(toFormat) || !isLevelBasedProvider(fromFormat) {
return config
}
budget, ok := ConvertLevelToBudget(string(config.Level))
if !ok {
return config
}
config.Mode = ModeBudget
config.Budget = budget
config.Level = ""
return config
}
// extractThinkingConfig extracts provider-specific thinking config from request body.
func extractThinkingConfig(body []byte, provider string) ThinkingConfig {
if len(body) == 0 || !gjson.ValidBytes(body) {
@@ -289,7 +315,11 @@ func extractThinkingConfig(body []byte, provider string) ThinkingConfig {
case "codex":
return extractCodexConfig(body)
case "iflow":
return extractIFlowConfig(body)
config := extractIFlowConfig(body)
if hasThinkingConfig(config) {
return config
}
return extractOpenAIConfig(body)
default:
return ThinkingConfig{}
}

View File

@@ -24,6 +24,10 @@ const (
// Example: using level with a budget-only model
ErrLevelNotSupported ErrorCode = "LEVEL_NOT_SUPPORTED"
// ErrBudgetOutOfRange indicates the budget value is outside model range.
// Example: budget 64000 exceeds max 20000
ErrBudgetOutOfRange ErrorCode = "BUDGET_OUT_OF_RANGE"
// ErrProviderMismatch indicates the provider does not match the model.
// Example: applying Claude format to a Gemini model
ErrProviderMismatch ErrorCode = "PROVIDER_MISMATCH"

View File

@@ -27,28 +27,32 @@ func StripThinkingConfig(body []byte, provider string) []byte {
return body
}
var paths []string
switch provider {
case "claude":
result, _ := sjson.DeleteBytes(body, "thinking")
return result
paths = []string{"thinking"}
case "gemini":
result, _ := sjson.DeleteBytes(body, "generationConfig.thinkingConfig")
return result
paths = []string{"generationConfig.thinkingConfig"}
case "gemini-cli", "antigravity":
result, _ := sjson.DeleteBytes(body, "request.generationConfig.thinkingConfig")
return result
paths = []string{"request.generationConfig.thinkingConfig"}
case "openai":
result, _ := sjson.DeleteBytes(body, "reasoning_effort")
return result
paths = []string{"reasoning_effort"}
case "codex":
result, _ := sjson.DeleteBytes(body, "reasoning.effort")
return result
paths = []string{"reasoning.effort"}
case "iflow":
result, _ := sjson.DeleteBytes(body, "chat_template_kwargs.enable_thinking")
result, _ = sjson.DeleteBytes(result, "chat_template_kwargs.clear_thinking")
result, _ = sjson.DeleteBytes(result, "reasoning_split")
return result
paths = []string{
"chat_template_kwargs.enable_thinking",
"chat_template_kwargs.clear_thinking",
"reasoning_split",
"reasoning_effort",
}
default:
return body
}
result := body
for _, path := range paths {
result, _ = sjson.DeleteBytes(result, path)
}
return result
}

View File

@@ -9,64 +9,6 @@ import (
log "github.com/sirupsen/logrus"
)
// ClampBudget clamps a budget value to the model's supported range.
//
// Logging:
// - Warn when value=0 but ZeroAllowed=false
// - Debug when value is clamped to min/max
//
// Fields: provider, model, original_value, clamped_to, min, max
func ClampBudget(value int, modelInfo *registry.ModelInfo, provider string) int {
model := "unknown"
support := (*registry.ThinkingSupport)(nil)
if modelInfo != nil {
if modelInfo.ID != "" {
model = modelInfo.ID
}
support = modelInfo.Thinking
}
if support == nil {
return value
}
// Auto value (-1) passes through without clamping.
if value == -1 {
return value
}
min := support.Min
max := support.Max
if value == 0 && !support.ZeroAllowed {
log.WithFields(log.Fields{
"provider": provider,
"model": model,
"original_value": value,
"clamped_to": min,
"min": min,
"max": max,
}).Warn("thinking: budget zero not allowed |")
return min
}
// Some models are level-only and do not define numeric budget ranges.
if min == 0 && max == 0 {
return value
}
if value < min {
if value == 0 && support.ZeroAllowed {
return 0
}
logClamp(provider, model, value, min, min, max)
return min
}
if value > max {
logClamp(provider, model, value, max, min, max)
return max
}
return value
}
// ValidateConfig validates a thinking configuration against model capabilities.
//
// This function performs comprehensive validation:
@@ -74,10 +16,14 @@ func ClampBudget(value int, modelInfo *registry.ModelInfo, provider string) int
// - Auto-converts between Budget and Level formats based on model capability
// - Validates that requested level is in the model's supported levels list
// - Clamps budget values to model's allowed range
// - When converting Budget -> Level for level-only models, clamps the derived standard level to the nearest supported level
// (special values none/auto are preserved)
//
// Parameters:
// - config: The thinking configuration to validate
// - support: Model's ThinkingSupport properties (nil means no thinking support)
// - fromFormat: Source provider format (used to determine strict validation rules)
// - toFormat: Target provider format
//
// Returns:
// - Normalized ThinkingConfig with clamped values
@@ -87,9 +33,8 @@ func ClampBudget(value int, modelInfo *registry.ModelInfo, provider string) int
// - Budget-only model + Level config → Level converted to Budget
// - Level-only model + Budget config → Budget converted to Level
// - Hybrid model → preserve original format
func ValidateConfig(config ThinkingConfig, modelInfo *registry.ModelInfo, provider string) (*ThinkingConfig, error) {
normalized := config
func ValidateConfig(config ThinkingConfig, modelInfo *registry.ModelInfo, fromFormat, toFormat string) (*ThinkingConfig, error) {
fromFormat, toFormat = strings.ToLower(strings.TrimSpace(fromFormat)), strings.ToLower(strings.TrimSpace(toFormat))
model := "unknown"
support := (*registry.ThinkingSupport)(nil)
if modelInfo != nil {
@@ -103,101 +48,108 @@ func ValidateConfig(config ThinkingConfig, modelInfo *registry.ModelInfo, provid
if config.Mode != ModeNone {
return nil, NewThinkingErrorWithModel(ErrThinkingNotSupported, "thinking not supported for this model", model)
}
return &normalized, nil
return &config, nil
}
allowClampUnsupported := isBudgetBasedProvider(fromFormat) && isLevelBasedProvider(toFormat)
strictBudget := fromFormat != "" && isSameProviderFamily(fromFormat, toFormat)
budgetDerivedFromLevel := false
capability := detectModelCapability(modelInfo)
switch capability {
case CapabilityBudgetOnly:
if normalized.Mode == ModeLevel {
if normalized.Level == LevelAuto {
if config.Mode == ModeLevel {
if config.Level == LevelAuto {
break
}
budget, ok := ConvertLevelToBudget(string(normalized.Level))
budget, ok := ConvertLevelToBudget(string(config.Level))
if !ok {
return nil, NewThinkingError(ErrUnknownLevel, fmt.Sprintf("unknown level: %s", normalized.Level))
return nil, NewThinkingError(ErrUnknownLevel, fmt.Sprintf("unknown level: %s", config.Level))
}
normalized.Mode = ModeBudget
normalized.Budget = budget
normalized.Level = ""
config.Mode = ModeBudget
config.Budget = budget
config.Level = ""
budgetDerivedFromLevel = true
}
case CapabilityLevelOnly:
if normalized.Mode == ModeBudget {
level, ok := ConvertBudgetToLevel(normalized.Budget)
if config.Mode == ModeBudget {
level, ok := ConvertBudgetToLevel(config.Budget)
if !ok {
return nil, NewThinkingError(ErrUnknownLevel, fmt.Sprintf("budget %d cannot be converted to a valid level", normalized.Budget))
return nil, NewThinkingError(ErrUnknownLevel, fmt.Sprintf("budget %d cannot be converted to a valid level", config.Budget))
}
normalized.Mode = ModeLevel
normalized.Level = ThinkingLevel(level)
normalized.Budget = 0
// When converting Budget -> Level for level-only models, clamp the derived standard level
// to the nearest supported level. Special values (none/auto) are preserved.
config.Mode = ModeLevel
config.Level = clampLevel(ThinkingLevel(level), modelInfo, toFormat)
config.Budget = 0
}
case CapabilityHybrid:
}
if normalized.Mode == ModeLevel && normalized.Level == LevelNone {
normalized.Mode = ModeNone
normalized.Budget = 0
normalized.Level = ""
if config.Mode == ModeLevel && config.Level == LevelNone {
config.Mode = ModeNone
config.Budget = 0
config.Level = ""
}
if normalized.Mode == ModeLevel && normalized.Level == LevelAuto {
normalized.Mode = ModeAuto
normalized.Budget = -1
normalized.Level = ""
if config.Mode == ModeLevel && config.Level == LevelAuto {
config.Mode = ModeAuto
config.Budget = -1
config.Level = ""
}
if normalized.Mode == ModeBudget && normalized.Budget == 0 {
normalized.Mode = ModeNone
normalized.Level = ""
if config.Mode == ModeBudget && config.Budget == 0 {
config.Mode = ModeNone
config.Level = ""
}
if len(support.Levels) > 0 && normalized.Mode == ModeLevel {
if !isLevelSupported(string(normalized.Level), support.Levels) {
if len(support.Levels) > 0 && config.Mode == ModeLevel {
if !isLevelSupported(string(config.Level), support.Levels) {
if allowClampUnsupported {
config.Level = clampLevel(config.Level, modelInfo, toFormat)
}
if !isLevelSupported(string(config.Level), support.Levels) {
// User explicitly specified an unsupported level - return error
// (budget-derived levels may be clamped based on source format)
validLevels := normalizeLevels(support.Levels)
message := fmt.Sprintf("level %q not supported, valid levels: %s", strings.ToLower(string(normalized.Level)), strings.Join(validLevels, ", "))
message := fmt.Sprintf("level %q not supported, valid levels: %s", strings.ToLower(string(config.Level)), strings.Join(validLevels, ", "))
return nil, NewThinkingError(ErrLevelNotSupported, message)
}
}
}
if strictBudget && config.Mode == ModeBudget && !budgetDerivedFromLevel {
min, max := support.Min, support.Max
if min != 0 || max != 0 {
if config.Budget < min || config.Budget > max || (config.Budget == 0 && !support.ZeroAllowed) {
message := fmt.Sprintf("budget %d out of range [%d,%d]", config.Budget, min, max)
return nil, NewThinkingError(ErrBudgetOutOfRange, message)
}
}
}
// Convert ModeAuto to mid-range if dynamic not allowed
if normalized.Mode == ModeAuto && !support.DynamicAllowed {
normalized = convertAutoToMidRange(normalized, support, provider, model)
if config.Mode == ModeAuto && !support.DynamicAllowed {
config = convertAutoToMidRange(config, support, toFormat, model)
}
if normalized.Mode == ModeNone && provider == "claude" {
if config.Mode == ModeNone && toFormat == "claude" {
// Claude supports explicit disable via thinking.type="disabled".
// Keep Budget=0 so applier can omit budget_tokens.
normalized.Budget = 0
normalized.Level = ""
config.Budget = 0
config.Level = ""
} else {
switch normalized.Mode {
switch config.Mode {
case ModeBudget, ModeAuto, ModeNone:
normalized.Budget = ClampBudget(normalized.Budget, modelInfo, provider)
config.Budget = clampBudget(config.Budget, modelInfo, toFormat)
}
// ModeNone with clamped Budget > 0: set Level to lowest for Level-only/Hybrid models
// This ensures Apply layer doesn't need to access support.Levels
if normalized.Mode == ModeNone && normalized.Budget > 0 && len(support.Levels) > 0 {
normalized.Level = ThinkingLevel(support.Levels[0])
if config.Mode == ModeNone && config.Budget > 0 && len(support.Levels) > 0 {
config.Level = ThinkingLevel(support.Levels[0])
}
}
return &normalized, nil
}
func isLevelSupported(level string, supported []string) bool {
for _, candidate := range supported {
if strings.EqualFold(level, strings.TrimSpace(candidate)) {
return true
}
}
return false
}
func normalizeLevels(levels []string) []string {
normalized := make([]string, 0, len(levels))
for _, level := range levels {
normalized = append(normalized, strings.ToLower(strings.TrimSpace(level)))
}
return normalized
return &config, nil
}
// convertAutoToMidRange converts ModeAuto to a mid-range value when dynamic is not allowed.
@@ -246,7 +198,172 @@ func convertAutoToMidRange(config ThinkingConfig, support *registry.ThinkingSupp
return config
}
// logClamp logs a debug message when budget clamping occurs.
// standardLevelOrder defines the canonical ordering of thinking levels from lowest to highest.
var standardLevelOrder = []ThinkingLevel{LevelMinimal, LevelLow, LevelMedium, LevelHigh, LevelXHigh}
// clampLevel clamps the given level to the nearest supported level.
// On tie, prefers the lower level.
func clampLevel(level ThinkingLevel, modelInfo *registry.ModelInfo, provider string) ThinkingLevel {
model := "unknown"
var supported []string
if modelInfo != nil {
if modelInfo.ID != "" {
model = modelInfo.ID
}
if modelInfo.Thinking != nil {
supported = modelInfo.Thinking.Levels
}
}
if len(supported) == 0 || isLevelSupported(string(level), supported) {
return level
}
pos := levelIndex(string(level))
if pos == -1 {
return level
}
bestIdx, bestDist := -1, len(standardLevelOrder)+1
for _, s := range supported {
if idx := levelIndex(strings.TrimSpace(s)); idx != -1 {
if dist := abs(pos - idx); dist < bestDist || (dist == bestDist && idx < bestIdx) {
bestIdx, bestDist = idx, dist
}
}
}
if bestIdx >= 0 {
clamped := standardLevelOrder[bestIdx]
log.WithFields(log.Fields{
"provider": provider,
"model": model,
"original_level": string(level),
"clamped_to": string(clamped),
}).Debug("thinking: level clamped |")
return clamped
}
return level
}
// clampBudget clamps a budget value to the model's supported range.
func clampBudget(value int, modelInfo *registry.ModelInfo, provider string) int {
model := "unknown"
support := (*registry.ThinkingSupport)(nil)
if modelInfo != nil {
if modelInfo.ID != "" {
model = modelInfo.ID
}
support = modelInfo.Thinking
}
if support == nil {
return value
}
// Auto value (-1) passes through without clamping.
if value == -1 {
return value
}
min, max := support.Min, support.Max
if value == 0 && !support.ZeroAllowed {
log.WithFields(log.Fields{
"provider": provider,
"model": model,
"original_value": value,
"clamped_to": min,
"min": min,
"max": max,
}).Warn("thinking: budget zero not allowed |")
return min
}
// Some models are level-only and do not define numeric budget ranges.
if min == 0 && max == 0 {
return value
}
if value < min {
if value == 0 && support.ZeroAllowed {
return 0
}
logClamp(provider, model, value, min, min, max)
return min
}
if value > max {
logClamp(provider, model, value, max, min, max)
return max
}
return value
}
func isLevelSupported(level string, supported []string) bool {
for _, s := range supported {
if strings.EqualFold(level, strings.TrimSpace(s)) {
return true
}
}
return false
}
func levelIndex(level string) int {
for i, l := range standardLevelOrder {
if strings.EqualFold(level, string(l)) {
return i
}
}
return -1
}
func normalizeLevels(levels []string) []string {
out := make([]string, len(levels))
for i, l := range levels {
out[i] = strings.ToLower(strings.TrimSpace(l))
}
return out
}
func isBudgetBasedProvider(provider string) bool {
switch provider {
case "gemini", "gemini-cli", "antigravity", "claude":
return true
default:
return false
}
}
func isLevelBasedProvider(provider string) bool {
switch provider {
case "openai", "openai-response", "codex":
return true
default:
return false
}
}
func isGeminiFamily(provider string) bool {
switch provider {
case "gemini", "gemini-cli", "antigravity":
return true
default:
return false
}
}
func isSameProviderFamily(from, to string) bool {
if from == to {
return true
}
return isGeminiFamily(from) && isGeminiFamily(to)
}
func abs(x int) int {
if x < 0 {
return -x
}
return x
}
func logClamp(provider, model string, original, clampedTo, min, max int) {
log.WithFields(log.Fields{
"provider": provider,

View File

@@ -12,7 +12,6 @@ import (
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/cache"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
@@ -388,14 +387,11 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
// Map Anthropic thinking -> Gemini thinkingBudget/include_thoughts when type==enabled
if t := gjson.GetBytes(rawJSON, "thinking"); t.Exists() && t.IsObject() {
modelInfo := registry.LookupModelInfo(modelName)
if modelInfo != nil && modelInfo.Thinking != nil {
if t.Get("type").String() == "enabled" {
if b := t.Get("budget_tokens"); b.Exists() && b.Type == gjson.Number {
budget := int(b.Int())
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", budget)
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.includeThoughts", true)
}
}
}

View File

@@ -343,8 +343,8 @@ func TestConvertClaudeRequestToAntigravity_ThinkingConfig(t *testing.T) {
if thinkingConfig.Get("thinkingBudget").Int() != 8000 {
t.Errorf("Expected thinkingBudget 8000, got %d", thinkingConfig.Get("thinkingBudget").Int())
}
if !thinkingConfig.Get("include_thoughts").Bool() {
t.Error("include_thoughts should be true")
if !thinkingConfig.Get("includeThoughts").Bool() {
t.Error("includeThoughts should be true")
}
} else {
t.Log("thinkingConfig not present - model may not be registered in test registry")

View File

@@ -15,7 +15,7 @@ import (
"strings"
"github.com/google/uuid"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -115,19 +115,42 @@ func ConvertGeminiRequestToClaude(modelName string, inputRawJSON []byte, stream
}
}
// Include thoughts configuration for reasoning process visibility
// Only apply for models that support thinking and use numeric budgets, not discrete levels.
// Translator only does format conversion, ApplyThinking handles model capability validation.
if thinkingConfig := genConfig.Get("thinkingConfig"); thinkingConfig.Exists() && thinkingConfig.IsObject() {
modelInfo := registry.LookupModelInfo(modelName)
if modelInfo != nil && modelInfo.Thinking != nil && len(modelInfo.Thinking.Levels) == 0 {
// Check for thinkingBudget first - if present, enable thinking with budget
if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() && thinkingBudget.Int() > 0 {
if thinkingLevel := thinkingConfig.Get("thinkingLevel"); thinkingLevel.Exists() {
level := strings.ToLower(strings.TrimSpace(thinkingLevel.String()))
switch level {
case "":
case "none":
out, _ = sjson.Set(out, "thinking.type", "disabled")
out, _ = sjson.Delete(out, "thinking.budget_tokens")
case "auto":
out, _ = sjson.Set(out, "thinking.type", "enabled")
out, _ = sjson.Set(out, "thinking.budget_tokens", thinkingBudget.Int())
} else if includeThoughts := thinkingConfig.Get("include_thoughts"); includeThoughts.Exists() && includeThoughts.Type == gjson.True {
// Fallback to include_thoughts if no budget specified
out, _ = sjson.Delete(out, "thinking.budget_tokens")
default:
if budget, ok := thinking.ConvertLevelToBudget(level); ok {
out, _ = sjson.Set(out, "thinking.type", "enabled")
out, _ = sjson.Set(out, "thinking.budget_tokens", budget)
}
}
} else if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() {
budget := int(thinkingBudget.Int())
switch budget {
case 0:
out, _ = sjson.Set(out, "thinking.type", "disabled")
out, _ = sjson.Delete(out, "thinking.budget_tokens")
case -1:
out, _ = sjson.Set(out, "thinking.type", "enabled")
out, _ = sjson.Delete(out, "thinking.budget_tokens")
default:
out, _ = sjson.Set(out, "thinking.type", "enabled")
out, _ = sjson.Set(out, "thinking.budget_tokens", budget)
}
} else if includeThoughts := thinkingConfig.Get("includeThoughts"); includeThoughts.Exists() && includeThoughts.Type == gjson.True {
out, _ = sjson.Set(out, "thinking.type", "enabled")
} else if includeThoughts := thinkingConfig.Get("include_thoughts"); includeThoughts.Exists() && includeThoughts.Type == gjson.True {
out, _ = sjson.Set(out, "thinking.type", "enabled")
}
}
}

View File

@@ -15,7 +15,6 @@ import (
"strings"
"github.com/google/uuid"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -66,9 +65,8 @@ func ConvertOpenAIRequestToClaude(modelName string, inputRawJSON []byte, stream
root := gjson.ParseBytes(rawJSON)
// Convert OpenAI reasoning_effort to Claude thinking config.
if v := root.Get("reasoning_effort"); v.Exists() {
modelInfo := registry.LookupModelInfo(modelName)
if modelInfo != nil && modelInfo.Thinking != nil && len(modelInfo.Thinking.Levels) == 0 {
effort := strings.ToLower(strings.TrimSpace(v.String()))
if effort != "" {
budget, ok := thinking.ConvertLevelToBudget(effort)
@@ -87,7 +85,6 @@ func ConvertOpenAIRequestToClaude(modelName string, inputRawJSON []byte, stream
}
}
}
}
// Helper for generating tool call IDs in the form: toolu_<alphanum>
// This ensures unique identifiers for tool calls in the Claude Code format

View File

@@ -10,7 +10,6 @@ import (
"strings"
"github.com/google/uuid"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -54,9 +53,8 @@ func ConvertOpenAIResponsesRequestToClaude(modelName string, inputRawJSON []byte
root := gjson.ParseBytes(rawJSON)
// Convert OpenAI Responses reasoning.effort to Claude thinking config.
if v := root.Get("reasoning.effort"); v.Exists() {
modelInfo := registry.LookupModelInfo(modelName)
if modelInfo != nil && modelInfo.Thinking != nil && len(modelInfo.Thinking.Levels) == 0 {
effort := strings.ToLower(strings.TrimSpace(v.String()))
if effort != "" {
budget, ok := thinking.ConvertLevelToBudget(effort)
@@ -75,7 +73,6 @@ func ConvertOpenAIResponsesRequestToClaude(modelName string, inputRawJSON []byte
}
}
}
}
// Helper for generating tool call IDs when missing
genToolCallID := func() string {

View File

@@ -12,7 +12,6 @@ import (
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -218,20 +217,17 @@ func ConvertClaudeRequestToCodex(modelName string, inputRawJSON []byte, _ bool)
// Add additional configuration parameters for the Codex API.
template, _ = sjson.Set(template, "parallel_tool_calls", true)
// Convert thinking.budget_tokens to reasoning.effort for level-based models
reasoningEffort := "medium" // default
// Convert thinking.budget_tokens to reasoning.effort.
reasoningEffort := "medium"
if thinkingConfig := rootResult.Get("thinking"); thinkingConfig.Exists() && thinkingConfig.IsObject() {
modelInfo := registry.LookupModelInfo(modelName)
switch thinkingConfig.Get("type").String() {
case "enabled":
if modelInfo != nil && modelInfo.Thinking != nil && len(modelInfo.Thinking.Levels) > 0 {
if budgetTokens := thinkingConfig.Get("budget_tokens"); budgetTokens.Exists() {
budget := int(budgetTokens.Int())
if effort, ok := thinking.ConvertBudgetToLevel(budget); ok && effort != "" {
reasoningEffort = effort
}
}
}
case "disabled":
if effort, ok := thinking.ConvertBudgetToLevel(0); ok && effort != "" {
reasoningEffort = effort

View File

@@ -14,7 +14,6 @@ import (
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
"github.com/tidwall/gjson"
@@ -249,22 +248,28 @@ func ConvertGeminiRequestToCodex(modelName string, inputRawJSON []byte, _ bool)
// Fixed flags aligning with Codex expectations
out, _ = sjson.Set(out, "parallel_tool_calls", true)
// Convert thinkingBudget to reasoning.effort for level-based models
reasoningEffort := "medium" // default
// Convert Gemini thinkingConfig to Codex reasoning.effort.
effortSet := false
if genConfig := root.Get("generationConfig"); genConfig.Exists() {
if thinkingConfig := genConfig.Get("thinkingConfig"); thinkingConfig.Exists() && thinkingConfig.IsObject() {
modelInfo := registry.LookupModelInfo(modelName)
if modelInfo != nil && modelInfo.Thinking != nil && len(modelInfo.Thinking.Levels) > 0 {
if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() {
budget := int(thinkingBudget.Int())
if effort, ok := thinking.ConvertBudgetToLevel(budget); ok && effort != "" {
reasoningEffort = effort
if thinkingLevel := thinkingConfig.Get("thinkingLevel"); thinkingLevel.Exists() {
effort := strings.ToLower(strings.TrimSpace(thinkingLevel.String()))
if effort != "" {
out, _ = sjson.Set(out, "reasoning.effort", effort)
effortSet = true
}
} else if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() {
if effort, ok := thinking.ConvertBudgetToLevel(int(thinkingBudget.Int())); ok {
out, _ = sjson.Set(out, "reasoning.effort", effort)
effortSet = true
}
}
}
}
if !effortSet {
// No thinking config, set default effort
out, _ = sjson.Set(out, "reasoning.effort", "medium")
}
out, _ = sjson.Set(out, "reasoning.effort", reasoningEffort)
out, _ = sjson.Set(out, "reasoning.summary", "auto")
out, _ = sjson.Set(out, "stream", true)
out, _ = sjson.Set(out, "store", false)

View File

@@ -9,7 +9,6 @@ import (
"bytes"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -161,14 +160,11 @@ func ConvertClaudeRequestToCLI(modelName string, inputRawJSON []byte, _ bool) []
// Map Anthropic thinking -> Gemini thinkingBudget/include_thoughts when type==enabled
if t := gjson.GetBytes(rawJSON, "thinking"); t.Exists() && t.IsObject() {
modelInfo := registry.LookupModelInfo(modelName)
if modelInfo != nil && modelInfo.Thinking != nil {
if t.Get("type").String() == "enabled" {
if b := t.Get("budget_tokens"); b.Exists() && b.Type == gjson.Number {
budget := int(b.Int())
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", budget)
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.includeThoughts", true)
}
}
}

View File

@@ -9,7 +9,6 @@ import (
"bytes"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -153,16 +152,13 @@ func ConvertClaudeRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
}
// Map Anthropic thinking -> Gemini thinkingBudget/include_thoughts when enabled
// Only apply for models that use numeric budgets, not discrete levels.
// Translator only does format conversion, ApplyThinking handles model capability validation.
if t := gjson.GetBytes(rawJSON, "thinking"); t.Exists() && t.IsObject() {
modelInfo := registry.LookupModelInfo(modelName)
if modelInfo != nil && modelInfo.Thinking != nil && len(modelInfo.Thinking.Levels) == 0 {
if t.Get("type").String() == "enabled" {
if b := t.Get("budget_tokens"); b.Exists() && b.Type == gjson.Number {
budget := int(b.Int())
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", budget)
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
}
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.includeThoughts", true)
}
}
}

View File

@@ -77,12 +77,15 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
}
}
// Convert thinkingBudget to reasoning_effort
// Always perform conversion to support allowCompat models that may not be in registry
// Map Gemini thinkingConfig to OpenAI reasoning_effort.
if thinkingConfig := genConfig.Get("thinkingConfig"); thinkingConfig.Exists() && thinkingConfig.IsObject() {
if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() {
budget := int(thinkingBudget.Int())
if effort, ok := thinking.ConvertBudgetToLevel(budget); ok && effort != "" {
if thinkingLevel := thinkingConfig.Get("thinkingLevel"); thinkingLevel.Exists() {
effort := strings.ToLower(strings.TrimSpace(thinkingLevel.String()))
if effort != "" {
out, _ = sjson.Set(out, "reasoning_effort", effort)
}
} else if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() {
if effort, ok := thinking.ConvertBudgetToLevel(int(thinkingBudget.Int())); ok {
out, _ = sjson.Set(out, "reasoning_effort", effort)
}
}

File diff suppressed because it is too large Load Diff