diff --git a/internal/registry/model_definitions.go b/internal/registry/model_definitions.go index d66adeb5..d31df9c9 100644 --- a/internal/registry/model_definitions.go +++ b/internal/registry/model_definitions.go @@ -430,19 +430,6 @@ func GetOpenAIModels() []*ModelInfo { MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, }, - { - ID: "gpt-5.1-minimal", - Object: "model", - Created: time.Now().Unix(), - OwnedBy: "openai", - Type: "openai", - Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Minimal", - Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", - ContextLength: 400000, - MaxCompletionTokens: 128000, - SupportedParameters: []string{"tools"}, - }, { ID: "gpt-5.1-low", Object: "model", diff --git a/internal/runtime/executor/codex_executor.go b/internal/runtime/executor/codex_executor.go index 36e8359a..5ac06bef 100644 --- a/internal/runtime/executor/codex_executor.go +++ b/internal/runtime/executor/codex_executor.go @@ -85,14 +85,12 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re default: body, _ = sjson.SetBytes(body, "reasoning.effort", "medium") } - } else if util.InArray([]string{"gpt-5.1", "gpt-5.1-minimal", "gpt-5.1-low", "gpt-5.1-medium", "gpt-5.1-high"}, req.Model) { + } else if util.InArray([]string{"gpt-5.1", "gpt-5.1-low", "gpt-5.1-medium", "gpt-5.1-high"}, req.Model) { body, _ = sjson.SetBytes(body, "model", "gpt-5.1") switch req.Model { - case "gpt-5.1-minimal": - body, _ = sjson.SetBytes(body, "reasoning.effort", "minimal") case "gpt-5.1-low": body, _ = sjson.SetBytes(body, "reasoning.effort", "low") - case "gpt-5.1-medium": + case "gpt-5.1-medium", "gpt-5.1": body, _ = sjson.SetBytes(body, "reasoning.effort", "medium") case "gpt-5.1-high": body, _ = sjson.SetBytes(body, "reasoning.effort", "high") @@ -239,14 +237,12 @@ func (e *CodexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au case "gpt-5-codex-mini-high": body, _ = sjson.SetBytes(body, "reasoning.effort", "high") } - } else if util.InArray([]string{"gpt-5.1", "gpt-5.1-minimal", "gpt-5.1-low", "gpt-5.1-medium", "gpt-5.1-high"}, req.Model) { + } else if util.InArray([]string{"gpt-5.1", "gpt-5.1-low", "gpt-5.1-medium", "gpt-5.1-high"}, req.Model) { body, _ = sjson.SetBytes(body, "model", "gpt-5.1") switch req.Model { - case "gpt-5.1-minimal": - body, _ = sjson.SetBytes(body, "reasoning.effort", "minimal") case "gpt-5.1-low": body, _ = sjson.SetBytes(body, "reasoning.effort", "low") - case "gpt-5.1-medium": + case "gpt-5.1-medium", "gpt-5.1": body, _ = sjson.SetBytes(body, "reasoning.effort", "medium") case "gpt-5.1-high": body, _ = sjson.SetBytes(body, "reasoning.effort", "high")