feat(util): implement dynamic thinking suffix normalization and refactor budget resolution logic

- Added support for parsing and normalizing dynamic thinking model suffixes.
- Centralized budget resolution across executors and payload helpers.
- Retired legacy Gemini-specific thinking handlers in favor of unified logic.
- Updated executors to use metadata-based thinking configuration.
- Added `ResolveOriginalModel` utility for resolving normalized upstream models using request metadata.
- Updated executors (Gemini, Codex, iFlow, OpenAI, Qwen) to incorporate upstream model resolution and substitute model values in payloads and request URLs.
- Ensured fallbacks handle cases with missing or malformed metadata to derive models robustly.
- Refactored upstream model resolution to dynamically incorporate metadata for selecting and normalizing models.
- Improved handling of thinking configurations and model overrides in executors.
- Removed hardcoded thinking model entries and migrated logic to metadata-based resolution.
- Updated payload mutations to always include the resolved model.
This commit is contained in:
Luis Pater
2025-12-11 01:16:56 +08:00
parent e717939edb
commit 423ce97665
13 changed files with 579 additions and 647 deletions

View File

@@ -1,8 +1,6 @@
package util
import (
"encoding/json"
"strconv"
"strings"
"github.com/tidwall/gjson"
@@ -15,80 +13,6 @@ const (
GeminiOriginalModelMetadataKey = "gemini_original_model"
)
func ParseGeminiThinkingSuffix(model string) (string, *int, *bool, bool) {
if model == "" {
return model, nil, nil, false
}
lower := strings.ToLower(model)
if !strings.HasPrefix(lower, "gemini-") {
return model, nil, nil, false
}
if strings.HasSuffix(lower, "-nothinking") {
base := model[:len(model)-len("-nothinking")]
budgetValue := 0
if strings.HasPrefix(lower, "gemini-2.5-pro") {
budgetValue = 128
}
include := false
return base, &budgetValue, &include, true
}
// Handle "-reasoning" suffix: enables thinking with dynamic budget (-1)
// Maps: gemini-2.5-flash-reasoning -> gemini-2.5-flash with thinkingBudget=-1
if strings.HasSuffix(lower, "-reasoning") {
base := model[:len(model)-len("-reasoning")]
budgetValue := -1 // Dynamic budget
include := true
return base, &budgetValue, &include, true
}
idx := strings.LastIndex(lower, "-thinking-")
if idx == -1 {
return model, nil, nil, false
}
digits := model[idx+len("-thinking-"):]
if digits == "" {
return model, nil, nil, false
}
end := len(digits)
for i := 0; i < len(digits); i++ {
if digits[i] < '0' || digits[i] > '9' {
end = i
break
}
}
if end == 0 {
return model, nil, nil, false
}
valueStr := digits[:end]
value, err := strconv.Atoi(valueStr)
if err != nil {
return model, nil, nil, false
}
base := model[:idx]
budgetValue := value
return base, &budgetValue, nil, true
}
func NormalizeGeminiThinkingModel(modelName string) (string, map[string]any) {
baseModel, budget, include, matched := ParseGeminiThinkingSuffix(modelName)
if !matched {
return baseModel, nil
}
metadata := map[string]any{
GeminiOriginalModelMetadataKey: modelName,
}
if budget != nil {
metadata[GeminiThinkingBudgetMetadataKey] = *budget
}
if include != nil {
metadata[GeminiIncludeThoughtsMetadataKey] = *include
}
return baseModel, metadata
}
func ApplyGeminiThinkingConfig(body []byte, budget *int, includeThoughts *bool) []byte {
if budget == nil && includeThoughts == nil {
return body
@@ -133,80 +57,6 @@ func ApplyGeminiCLIThinkingConfig(body []byte, budget *int, includeThoughts *boo
return updated
}
func GeminiThinkingFromMetadata(metadata map[string]any) (*int, *bool, bool) {
if len(metadata) == 0 {
return nil, nil, false
}
var (
budgetPtr *int
includePtr *bool
matched bool
)
if rawBudget, ok := metadata[GeminiThinkingBudgetMetadataKey]; ok {
switch v := rawBudget.(type) {
case int:
budget := v
budgetPtr = &budget
matched = true
case int32:
budget := int(v)
budgetPtr = &budget
matched = true
case int64:
budget := int(v)
budgetPtr = &budget
matched = true
case float64:
budget := int(v)
budgetPtr = &budget
matched = true
case json.Number:
if val, err := v.Int64(); err == nil {
budget := int(val)
budgetPtr = &budget
matched = true
}
}
}
if rawInclude, ok := metadata[GeminiIncludeThoughtsMetadataKey]; ok {
switch v := rawInclude.(type) {
case bool:
include := v
includePtr = &include
matched = true
case string:
if parsed, err := strconv.ParseBool(v); err == nil {
include := parsed
includePtr = &include
matched = true
}
case json.Number:
if val, err := v.Int64(); err == nil {
include := val != 0
includePtr = &include
matched = true
}
case int:
include := v != 0
includePtr = &include
matched = true
case int32:
include := v != 0
includePtr = &include
matched = true
case int64:
include := v != 0
includePtr = &include
matched = true
case float64:
include := v != 0
includePtr = &include
matched = true
}
}
return budgetPtr, includePtr, matched
}
// modelsWithDefaultThinking lists models that should have thinking enabled by default
// when no explicit thinkingConfig is provided.
var modelsWithDefaultThinking = map[string]bool{

View File

@@ -0,0 +1,327 @@
package util
import (
"encoding/json"
"strconv"
"strings"
)
const (
ThinkingBudgetMetadataKey = "thinking_budget"
ThinkingIncludeThoughtsMetadataKey = "thinking_include_thoughts"
ReasoningEffortMetadataKey = "reasoning_effort"
ThinkingOriginalModelMetadataKey = "thinking_original_model"
)
// NormalizeThinkingModel parses dynamic thinking suffixes on model names and returns
// the normalized base model with extracted metadata. Supported patterns:
// - "-thinking-<number>" extracts a numeric budget
// - "-thinking-<level>" extracts a reasoning effort level (minimal/low/medium/high/xhigh/auto/none)
// - "-thinking" maps to a default reasoning effort of "medium"
// - "-reasoning" maps to dynamic budget (-1) and include_thoughts=true
// - "-nothinking" maps to budget=0 and include_thoughts=false
func NormalizeThinkingModel(modelName string) (string, map[string]any) {
if modelName == "" {
return modelName, nil
}
lower := strings.ToLower(modelName)
baseModel := modelName
var (
budgetOverride *int
includeThoughts *bool
reasoningEffort *string
matched bool
)
switch {
case strings.HasSuffix(lower, "-nothinking"):
baseModel = modelName[:len(modelName)-len("-nothinking")]
budget := 0
include := false
budgetOverride = &budget
includeThoughts = &include
matched = true
case strings.HasSuffix(lower, "-reasoning"):
baseModel = modelName[:len(modelName)-len("-reasoning")]
budget := -1
include := true
budgetOverride = &budget
includeThoughts = &include
matched = true
default:
if idx := strings.LastIndex(lower, "-thinking-"); idx != -1 {
value := modelName[idx+len("-thinking-"):]
if value != "" {
if parsed, ok := parseIntPrefix(value); ok {
baseModel = modelName[:idx]
budgetOverride = &parsed
matched = true
} else if effort, okEffort := normalizeReasoningEffort(value); okEffort {
baseModel = modelName[:idx]
reasoningEffort = &effort
matched = true
}
}
} else if strings.HasSuffix(lower, "-thinking") {
baseModel = modelName[:len(modelName)-len("-thinking")]
effort := "medium"
reasoningEffort = &effort
matched = true
}
}
if !matched {
return baseModel, nil
}
metadata := map[string]any{
ThinkingOriginalModelMetadataKey: modelName,
}
if budgetOverride != nil {
metadata[ThinkingBudgetMetadataKey] = *budgetOverride
}
if includeThoughts != nil {
metadata[ThinkingIncludeThoughtsMetadataKey] = *includeThoughts
}
if reasoningEffort != nil {
metadata[ReasoningEffortMetadataKey] = *reasoningEffort
}
return baseModel, metadata
}
// ThinkingFromMetadata extracts thinking overrides from metadata produced by NormalizeThinkingModel.
// It accepts both the new generic keys and legacy Gemini-specific keys.
func ThinkingFromMetadata(metadata map[string]any) (*int, *bool, *string, bool) {
if len(metadata) == 0 {
return nil, nil, nil, false
}
var (
budgetPtr *int
includePtr *bool
effortPtr *string
matched bool
)
readBudget := func(key string) {
if budgetPtr != nil {
return
}
if raw, ok := metadata[key]; ok {
if v, okNumber := parseNumberToInt(raw); okNumber {
budget := v
budgetPtr = &budget
matched = true
}
}
}
readInclude := func(key string) {
if includePtr != nil {
return
}
if raw, ok := metadata[key]; ok {
switch v := raw.(type) {
case bool:
val := v
includePtr = &val
matched = true
case *bool:
if v != nil {
val := *v
includePtr = &val
matched = true
}
}
}
}
readEffort := func(key string) {
if effortPtr != nil {
return
}
if raw, ok := metadata[key]; ok {
if val, okStr := raw.(string); okStr && strings.TrimSpace(val) != "" {
normalized := strings.ToLower(strings.TrimSpace(val))
effortPtr = &normalized
matched = true
}
}
}
readBudget(ThinkingBudgetMetadataKey)
readBudget(GeminiThinkingBudgetMetadataKey)
readInclude(ThinkingIncludeThoughtsMetadataKey)
readInclude(GeminiIncludeThoughtsMetadataKey)
readEffort(ReasoningEffortMetadataKey)
readEffort("reasoning.effort")
return budgetPtr, includePtr, effortPtr, matched
}
// ResolveThinkingConfigFromMetadata derives thinking budget/include overrides,
// converting reasoning effort strings into budgets when possible.
func ResolveThinkingConfigFromMetadata(model string, metadata map[string]any) (*int, *bool, bool) {
budget, include, effort, matched := ThinkingFromMetadata(metadata)
if !matched {
return nil, nil, false
}
if budget == nil && effort != nil {
if derived, ok := ThinkingEffortToBudget(model, *effort); ok {
budget = &derived
}
}
return budget, include, budget != nil || include != nil || effort != nil
}
// ReasoningEffortFromMetadata resolves a reasoning effort string from metadata,
// inferring "auto" and "none" when budgets request dynamic or disabled thinking.
func ReasoningEffortFromMetadata(metadata map[string]any) (string, bool) {
budget, include, effort, matched := ThinkingFromMetadata(metadata)
if !matched {
return "", false
}
if effort != nil && *effort != "" {
return *effort, true
}
if budget != nil {
switch *budget {
case -1:
return "auto", true
case 0:
return "none", true
}
}
if include != nil && !*include {
return "none", true
}
return "", true
}
// ThinkingEffortToBudget maps reasoning effort levels to approximate budgets,
// clamping the result to the model's supported range.
func ThinkingEffortToBudget(model, effort string) (int, bool) {
if effort == "" {
return 0, false
}
switch strings.ToLower(effort) {
case "none":
return 0, true
case "auto":
return NormalizeThinkingBudget(model, -1), true
case "minimal":
return NormalizeThinkingBudget(model, 512), true
case "low":
return NormalizeThinkingBudget(model, 1024), true
case "medium":
return NormalizeThinkingBudget(model, 8192), true
case "high":
return NormalizeThinkingBudget(model, 24576), true
case "xhigh":
return NormalizeThinkingBudget(model, 32768), true
default:
return 0, false
}
}
// ResolveOriginalModel returns the original model name stored in metadata (if present),
// otherwise falls back to the provided model.
func ResolveOriginalModel(model string, metadata map[string]any) string {
normalize := func(name string) string {
if name == "" {
return ""
}
if base, _ := NormalizeThinkingModel(name); base != "" {
return base
}
return strings.TrimSpace(name)
}
if metadata != nil {
if v, ok := metadata[ThinkingOriginalModelMetadataKey]; ok {
if s, okStr := v.(string); okStr && strings.TrimSpace(s) != "" {
if base := normalize(s); base != "" {
return base
}
}
}
if v, ok := metadata[GeminiOriginalModelMetadataKey]; ok {
if s, okStr := v.(string); okStr && strings.TrimSpace(s) != "" {
if base := normalize(s); base != "" {
return base
}
}
}
}
// Fallback: try to re-normalize the model name when metadata was dropped.
if base := normalize(model); base != "" {
return base
}
return model
}
func parseIntPrefix(value string) (int, bool) {
if value == "" {
return 0, false
}
digits := strings.TrimLeft(value, "-")
if digits == "" {
return 0, false
}
end := len(digits)
for i := 0; i < len(digits); i++ {
if digits[i] < '0' || digits[i] > '9' {
end = i
break
}
}
if end == 0 {
return 0, false
}
val, err := strconv.Atoi(digits[:end])
if err != nil {
return 0, false
}
return val, true
}
func parseNumberToInt(raw any) (int, bool) {
switch v := raw.(type) {
case int:
return v, true
case int32:
return int(v), true
case int64:
return int(v), true
case float64:
return int(v), true
case json.Number:
if val, err := v.Int64(); err == nil {
return int(val), true
}
case string:
if strings.TrimSpace(v) == "" {
return 0, false
}
if parsed, err := strconv.Atoi(strings.TrimSpace(v)); err == nil {
return parsed, true
}
}
return 0, false
}
func normalizeReasoningEffort(value string) (string, bool) {
if value == "" {
return "", false
}
effort := strings.ToLower(strings.TrimSpace(value))
switch effort {
case "minimal", "low", "medium", "high", "xhigh", "auto", "none":
return effort, true
default:
return "", false
}
}