From e5312fb5a25f23183305c592602b668a81b992d0 Mon Sep 17 00:00:00 2001 From: hkfires <10558748+hkfires@users.noreply.github.com> Date: Tue, 9 Dec 2025 16:54:13 +0800 Subject: [PATCH 1/4] feat(antigravity): support canonical names for antigravity models --- internal/registry/model_definitions.go | 13 ++++++++----- internal/runtime/executor/antigravity_executor.go | 9 +++++++-- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/internal/registry/model_definitions.go b/internal/registry/model_definitions.go index c82c2b67..77015d14 100644 --- a/internal/registry/model_definitions.go +++ b/internal/registry/model_definitions.go @@ -991,16 +991,19 @@ func GetIFlowModels() []*ModelInfo { type AntigravityModelConfig struct { Thinking *ThinkingSupport MaxCompletionTokens int + Name string } // GetAntigravityModelConfig returns static configuration for antigravity models. // Keys use the ALIASED model names (after modelName2Alias conversion) for direct lookup. func GetAntigravityModelConfig() map[string]*AntigravityModelConfig { return map[string]*AntigravityModelConfig{ - "gemini-2.5-flash": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}}, - "gemini-2.5-flash-lite": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}}, - "gemini-3-pro-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}}, - "gemini-claude-sonnet-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000}, - "gemini-claude-opus-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000}, + "gemini-2.5-flash": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}, Name: "models/gemini-2.5-flash"}, + "gemini-2.5-flash-lite": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}, Name: "models/gemini-2.5-flash-lite"}, + "gemini-2.5-computer-use-preview-10-2025": {Name: "models/gemini-2.5-computer-use-preview-10-2025"}, + "gemini-3-pro-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}, Name: "models/gemini-3-pro-preview"}, + "gemini-3-pro-image-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}, Name: "models/gemini-3-pro-image-preview"}, + "gemini-claude-sonnet-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000}, + "gemini-claude-opus-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000}, } } diff --git a/internal/runtime/executor/antigravity_executor.go b/internal/runtime/executor/antigravity_executor.go index 730a32fb..052d4faf 100644 --- a/internal/runtime/executor/antigravity_executor.go +++ b/internal/runtime/executor/antigravity_executor.go @@ -373,9 +373,14 @@ func FetchAntigravityModels(ctx context.Context, auth *cliproxyauth.Auth, cfg *c for originalName := range result.Map() { aliasName := modelName2Alias(originalName) if aliasName != "" { + cfg := modelConfig[aliasName] + modelName := aliasName + if cfg != nil && cfg.Name != "" { + modelName = cfg.Name + } modelInfo := ®istry.ModelInfo{ ID: aliasName, - Name: aliasName, + Name: modelName, Description: aliasName, DisplayName: aliasName, Version: aliasName, @@ -385,7 +390,7 @@ func FetchAntigravityModels(ctx context.Context, auth *cliproxyauth.Auth, cfg *c Type: antigravityAuthType, } // Look up Thinking support from static config using alias name - if cfg, ok := modelConfig[aliasName]; ok { + if cfg != nil { if cfg.Thinking != nil { modelInfo.Thinking = cfg.Thinking } From da23ddb061a029bd51e321058183ada19f98cda9 Mon Sep 17 00:00:00 2001 From: hkfires <10558748+hkfires@users.noreply.github.com> Date: Tue, 9 Dec 2025 17:34:15 +0800 Subject: [PATCH 2/4] fix(gemini): normalize model listing output --- sdk/api/handlers/gemini/gemini_handlers.go | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/sdk/api/handlers/gemini/gemini_handlers.go b/sdk/api/handlers/gemini/gemini_handlers.go index 7ba72a93..6cd9ee62 100644 --- a/sdk/api/handlers/gemini/gemini_handlers.go +++ b/sdk/api/handlers/gemini/gemini_handlers.go @@ -48,8 +48,24 @@ func (h *GeminiAPIHandler) Models() []map[string]any { // GeminiModels handles the Gemini models listing endpoint. // It returns a JSON response containing available Gemini models and their specifications. func (h *GeminiAPIHandler) GeminiModels(c *gin.Context) { + rawModels := h.Models() + normalizedModels := make([]map[string]any, 0, len(rawModels)) + defaultMethods := []string{"generateContent"} + for _, model := range rawModels { + normalizedModel := make(map[string]any, len(model)) + for k, v := range model { + normalizedModel[k] = v + } + if name, ok := normalizedModel["name"].(string); ok && name != "" && !strings.HasPrefix(name, "models/") { + normalizedModel["name"] = "models/" + name + } + if _, ok := normalizedModel["supportedGenerationMethods"]; !ok { + normalizedModel["supportedGenerationMethods"] = defaultMethods + } + normalizedModels = append(normalizedModels, normalizedModel) + } c.JSON(http.StatusOK, gin.H{ - "models": h.Models(), + "models": normalizedModels, }) } From 3cfe7008a2e857589b618f7eac5568b0c8d63acf Mon Sep 17 00:00:00 2001 From: hkfires <10558748+hkfires@users.noreply.github.com> Date: Tue, 9 Dec 2025 17:55:21 +0800 Subject: [PATCH 3/4] fix(registry): update gpt 5.1 model names --- internal/registry/model_definitions.go | 60 +++++++++++++------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/internal/registry/model_definitions.go b/internal/registry/model_definitions.go index 77015d14..de547182 100644 --- a/internal/registry/model_definitions.go +++ b/internal/registry/model_definitions.go @@ -693,8 +693,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Low", - Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + DisplayName: "GPT 5.1 Nothink", + Description: "Stable version of GPT 5.1, The best model for coding and agentic tasks across domains.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -719,8 +719,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Medium", - Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + DisplayName: "GPT 5.1 Medium", + Description: "Stable version of GPT 5.1, The best model for coding and agentic tasks across domains.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -732,8 +732,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 High", - Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.", + DisplayName: "GPT 5.1 High", + Description: "Stable version of GPT 5.1, The best model for coding and agentic tasks across domains.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -745,8 +745,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Codex", - Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + DisplayName: "GPT 5.1 Codex", + Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -758,8 +758,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Codex Low", - Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + DisplayName: "GPT 5.1 Codex Low", + Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -771,8 +771,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Codex Medium", - Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + DisplayName: "GPT 5.1 Codex Medium", + Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -784,8 +784,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Codex High", - Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.", + DisplayName: "GPT 5.1 Codex High", + Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -797,8 +797,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Codex Mini", - Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.", + DisplayName: "GPT 5.1 Codex Mini", + Description: "Stable version of GPT 5.1 Codex Mini: cheaper, faster, but less capable version of GPT 5.1 Codex.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -810,8 +810,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Codex Mini Medium", - Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.", + DisplayName: "GPT 5.1 Codex Mini Medium", + Description: "Stable version of GPT 5.1 Codex Mini: cheaper, faster, but less capable version of GPT 5.1 Codex.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -823,8 +823,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-2025-11-12", - DisplayName: "GPT 5 Codex Mini High", - Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.", + DisplayName: "GPT 5.1 Codex Mini High", + Description: "Stable version of GPT 5.1 Codex Mini: cheaper, faster, but less capable version of GPT 5.1 Codex.", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -837,8 +837,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-max", - DisplayName: "GPT 5 Codex Max", - Description: "Stable version of GPT 5 Codex Max", + DisplayName: "GPT 5.1 Codex Max", + Description: "Stable version of GPT 5.1 Codex Max", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -850,8 +850,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-max", - DisplayName: "GPT 5 Codex Max Low", - Description: "Stable version of GPT 5 Codex Max Low", + DisplayName: "GPT 5.1 Codex Max Low", + Description: "Stable version of GPT 5.1 Codex Max Low", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -863,8 +863,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-max", - DisplayName: "GPT 5 Codex Max Medium", - Description: "Stable version of GPT 5 Codex Max Medium", + DisplayName: "GPT 5.1 Codex Max Medium", + Description: "Stable version of GPT 5.1 Codex Max Medium", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -876,8 +876,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-max", - DisplayName: "GPT 5 Codex Max High", - Description: "Stable version of GPT 5 Codex Max High", + DisplayName: "GPT 5.1 Codex Max High", + Description: "Stable version of GPT 5.1 Codex Max High", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, @@ -889,8 +889,8 @@ func GetOpenAIModels() []*ModelInfo { OwnedBy: "openai", Type: "openai", Version: "gpt-5.1-max", - DisplayName: "GPT 5 Codex Max XHigh", - Description: "Stable version of GPT 5 Codex Max XHigh", + DisplayName: "GPT 5.1 Codex Max XHigh", + Description: "Stable version of GPT 5.1 Codex Max XHigh", ContextLength: 400000, MaxCompletionTokens: 128000, SupportedParameters: []string{"tools"}, From 347769b3e30ff5a1562675e4f09789906bec6a10 Mon Sep 17 00:00:00 2001 From: hkfires <10558748+hkfires@users.noreply.github.com> Date: Tue, 9 Dec 2025 18:09:14 +0800 Subject: [PATCH 4/4] fix(openai-compat): use model id for auth model display --- sdk/cliproxy/service.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/cliproxy/service.go b/sdk/cliproxy/service.go index 13d647dd..1ef829d1 100644 --- a/sdk/cliproxy/service.go +++ b/sdk/cliproxy/service.go @@ -779,7 +779,7 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) { Created: time.Now().Unix(), OwnedBy: compat.Name, Type: "openai-compatibility", - DisplayName: m.Name, + DisplayName: modelID, }) } // Register and return