feat(runtime): add thinking config normalization

This commit is contained in:
hkfires
2025-12-11 11:51:33 +08:00
parent a03d514095
commit 3ffd120ae9
4 changed files with 111 additions and 2 deletions

View File

@@ -55,6 +55,7 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
to := sdktranslator.FromString("codex")
body := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
body = applyReasoningEffortMetadata(body, req.Metadata, req.Model)
body = normalizeThinkingConfig(body, upstreamModel)
body = applyPayloadConfig(e.cfg, req.Model, body)
body, _ = sjson.SetBytes(body, "model", upstreamModel)
body, _ = sjson.SetBytes(body, "stream", true)
@@ -149,6 +150,7 @@ func (e *CodexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
body := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
body = applyReasoningEffortMetadata(body, req.Metadata, req.Model)
body = normalizeThinkingConfig(body, upstreamModel)
body = applyPayloadConfig(e.cfg, req.Model, body)
body, _ = sjson.DeleteBytes(body, "previous_response_id")
body, _ = sjson.SetBytes(body, "model", upstreamModel)

View File

@@ -59,9 +59,11 @@ func (e *OpenAICompatExecutor) Execute(ctx context.Context, auth *cliproxyauth.A
}
translated = applyPayloadConfigWithRoot(e.cfg, req.Model, to.String(), "", translated)
translated = applyReasoningEffortMetadataChatCompletions(translated, req.Metadata, req.Model)
if upstreamModel := util.ResolveOriginalModel(req.Model, req.Metadata); upstreamModel != "" {
upstreamModel := util.ResolveOriginalModel(req.Model, req.Metadata)
if upstreamModel != "" {
translated, _ = sjson.SetBytes(translated, "model", upstreamModel)
}
translated = normalizeThinkingConfig(translated, upstreamModel)
url := strings.TrimSuffix(baseURL, "/") + "/chat/completions"
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(translated))
@@ -148,9 +150,11 @@ func (e *OpenAICompatExecutor) ExecuteStream(ctx context.Context, auth *cliproxy
}
translated = applyPayloadConfigWithRoot(e.cfg, req.Model, to.String(), "", translated)
translated = applyReasoningEffortMetadataChatCompletions(translated, req.Metadata, req.Model)
if upstreamModel := util.ResolveOriginalModel(req.Model, req.Metadata); upstreamModel != "" {
upstreamModel := util.ResolveOriginalModel(req.Model, req.Metadata)
if upstreamModel != "" {
translated, _ = sjson.SetBytes(translated, "model", upstreamModel)
}
translated = normalizeThinkingConfig(translated, upstreamModel)
url := strings.TrimSuffix(baseURL, "/") + "/chat/completions"
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(translated))

View File

@@ -232,3 +232,60 @@ func matchModelPattern(pattern, model string) bool {
}
return pi == len(pattern)
}
// normalizeThinkingConfig normalizes thinking-related fields in the payload
// based on model capabilities. For models without thinking support, it strips
// reasoning fields. For models with level-based thinking, it validates and
// normalizes the reasoning effort level.
func normalizeThinkingConfig(payload []byte, model string) []byte {
if len(payload) == 0 || model == "" {
return payload
}
if !util.ModelSupportsThinking(model) {
return stripThinkingFields(payload)
}
if util.ModelUsesThinkingLevels(model) {
return normalizeReasoningEffortLevel(payload, model)
}
return payload
}
// stripThinkingFields removes thinking-related fields from the payload for
// models that do not support thinking.
func stripThinkingFields(payload []byte) []byte {
fieldsToRemove := []string{
"reasoning",
"reasoning_effort",
"reasoning.effort",
}
out := payload
for _, field := range fieldsToRemove {
if gjson.GetBytes(out, field).Exists() {
out, _ = sjson.DeleteBytes(out, field)
}
}
return out
}
// normalizeReasoningEffortLevel validates and normalizes the reasoning_effort
// or reasoning.effort field for level-based thinking models.
func normalizeReasoningEffortLevel(payload []byte, model string) []byte {
out := payload
if effort := gjson.GetBytes(out, "reasoning_effort"); effort.Exists() {
if normalized, ok := util.NormalizeReasoningEffortLevel(model, effort.String()); ok {
out, _ = sjson.SetBytes(out, "reasoning_effort", normalized)
}
}
if effort := gjson.GetBytes(out, "reasoning.effort"); effort.Exists() {
if normalized, ok := util.NormalizeReasoningEffortLevel(model, effort.String()); ok {
out, _ = sjson.SetBytes(out, "reasoning.effort", normalized)
}
}
return out
}

View File

@@ -1,6 +1,8 @@
package util
import (
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
)
@@ -67,3 +69,47 @@ func thinkingRangeFromRegistry(model string) (found bool, min int, max int, zero
}
return true, info.Thinking.Min, info.Thinking.Max, info.Thinking.ZeroAllowed, info.Thinking.DynamicAllowed
}
// GetModelThinkingLevels returns the discrete reasoning effort levels for the model.
// Returns nil if the model has no thinking support or no levels defined.
func GetModelThinkingLevels(model string) []string {
if model == "" {
return nil
}
info := registry.GetGlobalRegistry().GetModelInfo(model)
if info == nil || info.Thinking == nil {
return nil
}
return info.Thinking.Levels
}
// ModelUsesThinkingLevels reports whether the model uses discrete reasoning
// effort levels instead of numeric budgets.
func ModelUsesThinkingLevels(model string) bool {
levels := GetModelThinkingLevels(model)
return len(levels) > 0
}
// NormalizeReasoningEffortLevel validates and normalizes a reasoning effort
// level for the given model. If the level is not supported, it returns the
// first (lowest) level from the model's supported levels.
func NormalizeReasoningEffortLevel(model, effort string) (string, bool) {
levels := GetModelThinkingLevels(model)
if len(levels) == 0 {
return "", false
}
loweredEffort := strings.ToLower(strings.TrimSpace(effort))
for _, lvl := range levels {
if strings.ToLower(lvl) == loweredEffort {
return lvl, true
}
}
return defaultReasoningLevel(levels), true
}
func defaultReasoningLevel(levels []string) string {
if len(levels) > 0 {
return levels[0]
}
return ""
}