Merge branch 'dev' into think

This commit is contained in:
Luis Pater
2025-12-12 22:16:44 +08:00
committed by GitHub
9 changed files with 199 additions and 30 deletions

View File

@@ -54,13 +54,14 @@ func (e *OpenAICompatExecutor) Execute(ctx context.Context, auth *cliproxyauth.A
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
translated := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), opts.Stream)
if modelOverride := e.resolveUpstreamModel(req.Model, auth); modelOverride != "" {
modelOverride := e.resolveUpstreamModel(req.Model, auth)
if modelOverride != "" {
translated = e.overrideModel(translated, modelOverride)
}
translated = applyPayloadConfigWithRoot(e.cfg, req.Model, to.String(), "", translated)
translated = applyReasoningEffortMetadata(translated, req.Metadata, req.Model, "reasoning_effort")
upstreamModel := util.ResolveOriginalModel(req.Model, req.Metadata)
if upstreamModel != "" {
if upstreamModel != "" && modelOverride == "" {
translated, _ = sjson.SetBytes(translated, "model", upstreamModel)
}
translated = normalizeThinkingConfig(translated, upstreamModel)
@@ -148,13 +149,14 @@ func (e *OpenAICompatExecutor) ExecuteStream(ctx context.Context, auth *cliproxy
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
translated := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
if modelOverride := e.resolveUpstreamModel(req.Model, auth); modelOverride != "" {
modelOverride := e.resolveUpstreamModel(req.Model, auth)
if modelOverride != "" {
translated = e.overrideModel(translated, modelOverride)
}
translated = applyPayloadConfigWithRoot(e.cfg, req.Model, to.String(), "", translated)
translated = applyReasoningEffortMetadata(translated, req.Metadata, req.Model, "reasoning_effort")
upstreamModel := util.ResolveOriginalModel(req.Model, req.Metadata)
if upstreamModel != "" {
if upstreamModel != "" && modelOverride == "" {
translated, _ = sjson.SetBytes(translated, "model", upstreamModel)
}
translated = normalizeThinkingConfig(translated, upstreamModel)

View File

@@ -52,10 +52,14 @@ func applyReasoningEffortMetadata(payload []byte, metadata map[string]any, model
if len(metadata) == 0 {
return payload
}
if !util.ModelSupportsThinking(model) {
if field == "" {
return payload
}
if field == "" {
baseModel := util.ResolveOriginalModel(model, metadata)
if baseModel == "" {
baseModel = model
}
if !util.ModelSupportsThinking(baseModel) && !util.IsOpenAICompatibilityModel(baseModel) {
return payload
}
if effort, ok := util.ReasoningEffortFromMetadata(metadata); ok && effort != "" {
@@ -239,6 +243,9 @@ func normalizeThinkingConfig(payload []byte, model string) []byte {
}
if !util.ModelSupportsThinking(model) {
if util.IsOpenAICompatibilityModel(model) {
return payload
}
return stripThinkingFields(payload, false)
}