diff --git a/internal/client/codex_client.go b/internal/client/codex_client.go index 3f6b6be2..2c3430b5 100644 --- a/internal/client/codex_client.go +++ b/internal/client/codex_client.go @@ -93,8 +93,9 @@ func (c *CodexClient) Provider() string { func (c *CodexClient) CanProvideModel(modelName string) bool { models := []string{ "gpt-5", - "gpt-5-mini", - "gpt-5-nano", + "gpt-5-minimal", + "gpt-5-low", + "gpt-5-medium", "gpt-5-high", "codex-mini-latest", } @@ -344,14 +345,14 @@ func (c *CodexClient) APIRequest(ctx context.Context, modelName, endpoint string // Stream must be set to true jsonBody, _ = sjson.SetBytes(jsonBody, "stream", true) - if util.InArray([]string{"gpt-5-nano", "gpt-5-mini", "gpt-5", "gpt-5-high"}, modelName) { + if util.InArray([]string{"gpt-5-minimal", "gpt-5-low", "gpt-5-medium", "gpt-5-high"}, modelName) { jsonBody, _ = sjson.SetBytes(jsonBody, "model", "gpt-5") switch modelName { - case "gpt-5-nano": + case "gpt-5-minimal": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "minimal") - case "gpt-5-mini": + case "gpt-5-low": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "low") - case "gpt-5": + case "gpt-5-medium": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "medium") case "gpt-5-high": jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "high") diff --git a/internal/registry/model_definitions.go b/internal/registry/model_definitions.go index 2a4c2880..941be357 100644 --- a/internal/registry/model_definitions.go +++ b/internal/registry/model_definitions.go @@ -149,6 +149,58 @@ func GetOpenAIModels() []*ModelInfo { MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, }, + { + ID: "gpt-5-minimal", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 Minimal", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-low", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 Low", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-medium", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 Medium", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, + { + ID: "gpt-5-high", + Object: "model", + Created: time.Now().Unix(), + OwnedBy: "openai", + Type: "openai", + Version: "gpt-5-2025-08-07", + DisplayName: "GPT 5 High", + Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + ContextLength: 400000, + MaxCompletionTokens: 128000, + SupportedParameters: []string{"tools"}, + }, { ID: "codex-mini-latest", Object: "model",