From 9c9ea993802e0cae1c9d4bf6a86e8196923fe581 Mon Sep 17 00:00:00 2001 From: Luis Pater Date: Sat, 30 Aug 2025 22:00:37 +0800 Subject: [PATCH] Add support for new GPT-5 model variants MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Renamed existing GPT-5 variants for consistency (`nano` → `minimal`, `mini` → `low`, etc.). - Added metadata definitions for new variants: `gpt-5-minimal`, `gpt-5-low`, `gpt-5-medium`, and updated logic to reflect variant-specific reasoning efforts. --- internal/client/codex_client.go | 13 ++++--- internal/registry/model_definitions.go | 52 ++++++++++++++++++++++++++ 2 files changed, 59 insertions(+), 6 deletions(-) diff --git a/internal/client/codex_client.go b/internal/client/codex_client.go index 3f6b6be2..2c3430b5 100644 --- a/internal/client/codex_client.go +++ b/internal/client/codex_client.go @@ -93,8 +93,9 @@ func (c *CodexClient) Provider() string { func (c *CodexClient) CanProvideModel(modelName string) bool { models := []string{ "gpt-5", - "gpt-5-mini", - "gpt-5-nano", + "gpt-5-minimal", + "gpt-5-low", + "gpt-5-medium", "gpt-5-high", "codex-mini-latest", } @@ -344,14 +345,14 @@ func (c *CodexClient) APIRequest(ctx context.Context, modelName, endpoint string // Stream must be set to true jsonBody, _ = sjson.SetBytes(jsonBody, "stream", true) - if util.InArray([]string{"gpt-5-nano", "gpt-5-mini", "gpt-5", "gpt-5-high"}, modelName) { + if util.InArray([]string{"gpt-5-minimal", "gpt-5-low", "gpt-5-medium", "gpt-5-high"}, modelName) { jsonBody, _ = sjson.SetBytes(jsonBody, "model", "gpt-5") switch modelName { - case "gpt-5-nano": + case "gpt-5-minimal": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "minimal") - case "gpt-5-mini": + case "gpt-5-low": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "low") - case "gpt-5": + case "gpt-5-medium": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "medium") case "gpt-5-high": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "high") diff --git a/internal/registry/model_definitions.go b/internal/registry/model_definitions.go index 2a4c2880..941be357 100644 --- a/internal/registry/model_definitions.go +++ b/internal/registry/model_definitions.go @@ -149,6 +149,58 @@ func GetOpenAIModels() []*ModelInfo { MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, }, + { + ID: "gpt-5-minimal", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 Minimal", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-low", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 Low", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-medium", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 Medium", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-high", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 High", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, { ID: "codex-mini-latest", Object: "model",