mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
Merge pull request #470 from router-for-me/agry
fix(gemini): normalize model listing output
This commit is contained in:
@@ -693,8 +693,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Low",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
DisplayName: "GPT 5.1 Nothink",
|
||||
Description: "Stable version of GPT 5.1, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -719,8 +719,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Medium",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
DisplayName: "GPT 5.1 Medium",
|
||||
Description: "Stable version of GPT 5.1, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -732,8 +732,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 High",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
DisplayName: "GPT 5.1 High",
|
||||
Description: "Stable version of GPT 5.1, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -745,8 +745,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
DisplayName: "GPT 5.1 Codex",
|
||||
Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -758,8 +758,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Low",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
DisplayName: "GPT 5.1 Codex Low",
|
||||
Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -771,8 +771,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Medium",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
DisplayName: "GPT 5.1 Codex Medium",
|
||||
Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -784,8 +784,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex High",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
DisplayName: "GPT 5.1 Codex High",
|
||||
Description: "Stable version of GPT 5.1 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -797,8 +797,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Mini",
|
||||
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
|
||||
DisplayName: "GPT 5.1 Codex Mini",
|
||||
Description: "Stable version of GPT 5.1 Codex Mini: cheaper, faster, but less capable version of GPT 5.1 Codex.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -810,8 +810,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Mini Medium",
|
||||
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
|
||||
DisplayName: "GPT 5.1 Codex Mini Medium",
|
||||
Description: "Stable version of GPT 5.1 Codex Mini: cheaper, faster, but less capable version of GPT 5.1 Codex.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -823,8 +823,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Mini High",
|
||||
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
|
||||
DisplayName: "GPT 5.1 Codex Mini High",
|
||||
Description: "Stable version of GPT 5.1 Codex Mini: cheaper, faster, but less capable version of GPT 5.1 Codex.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -837,8 +837,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-max",
|
||||
DisplayName: "GPT 5 Codex Max",
|
||||
Description: "Stable version of GPT 5 Codex Max",
|
||||
DisplayName: "GPT 5.1 Codex Max",
|
||||
Description: "Stable version of GPT 5.1 Codex Max",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -850,8 +850,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-max",
|
||||
DisplayName: "GPT 5 Codex Max Low",
|
||||
Description: "Stable version of GPT 5 Codex Max Low",
|
||||
DisplayName: "GPT 5.1 Codex Max Low",
|
||||
Description: "Stable version of GPT 5.1 Codex Max Low",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -863,8 +863,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-max",
|
||||
DisplayName: "GPT 5 Codex Max Medium",
|
||||
Description: "Stable version of GPT 5 Codex Max Medium",
|
||||
DisplayName: "GPT 5.1 Codex Max Medium",
|
||||
Description: "Stable version of GPT 5.1 Codex Max Medium",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -876,8 +876,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-max",
|
||||
DisplayName: "GPT 5 Codex Max High",
|
||||
Description: "Stable version of GPT 5 Codex Max High",
|
||||
DisplayName: "GPT 5.1 Codex Max High",
|
||||
Description: "Stable version of GPT 5.1 Codex Max High",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -889,8 +889,8 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-max",
|
||||
DisplayName: "GPT 5 Codex Max XHigh",
|
||||
Description: "Stable version of GPT 5 Codex Max XHigh",
|
||||
DisplayName: "GPT 5.1 Codex Max XHigh",
|
||||
Description: "Stable version of GPT 5.1 Codex Max XHigh",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
@@ -991,16 +991,19 @@ func GetIFlowModels() []*ModelInfo {
|
||||
type AntigravityModelConfig struct {
|
||||
Thinking *ThinkingSupport
|
||||
MaxCompletionTokens int
|
||||
Name string
|
||||
}
|
||||
|
||||
// GetAntigravityModelConfig returns static configuration for antigravity models.
|
||||
// Keys use the ALIASED model names (after modelName2Alias conversion) for direct lookup.
|
||||
func GetAntigravityModelConfig() map[string]*AntigravityModelConfig {
|
||||
return map[string]*AntigravityModelConfig{
|
||||
"gemini-2.5-flash": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}},
|
||||
"gemini-2.5-flash-lite": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}},
|
||||
"gemini-3-pro-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}},
|
||||
"gemini-claude-sonnet-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
"gemini-claude-opus-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
"gemini-2.5-flash": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}, Name: "models/gemini-2.5-flash"},
|
||||
"gemini-2.5-flash-lite": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}, Name: "models/gemini-2.5-flash-lite"},
|
||||
"gemini-2.5-computer-use-preview-10-2025": {Name: "models/gemini-2.5-computer-use-preview-10-2025"},
|
||||
"gemini-3-pro-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}, Name: "models/gemini-3-pro-preview"},
|
||||
"gemini-3-pro-image-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}, Name: "models/gemini-3-pro-image-preview"},
|
||||
"gemini-claude-sonnet-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
"gemini-claude-opus-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -373,9 +373,14 @@ func FetchAntigravityModels(ctx context.Context, auth *cliproxyauth.Auth, cfg *c
|
||||
for originalName := range result.Map() {
|
||||
aliasName := modelName2Alias(originalName)
|
||||
if aliasName != "" {
|
||||
cfg := modelConfig[aliasName]
|
||||
modelName := aliasName
|
||||
if cfg != nil && cfg.Name != "" {
|
||||
modelName = cfg.Name
|
||||
}
|
||||
modelInfo := ®istry.ModelInfo{
|
||||
ID: aliasName,
|
||||
Name: aliasName,
|
||||
Name: modelName,
|
||||
Description: aliasName,
|
||||
DisplayName: aliasName,
|
||||
Version: aliasName,
|
||||
@@ -385,7 +390,7 @@ func FetchAntigravityModels(ctx context.Context, auth *cliproxyauth.Auth, cfg *c
|
||||
Type: antigravityAuthType,
|
||||
}
|
||||
// Look up Thinking support from static config using alias name
|
||||
if cfg, ok := modelConfig[aliasName]; ok {
|
||||
if cfg != nil {
|
||||
if cfg.Thinking != nil {
|
||||
modelInfo.Thinking = cfg.Thinking
|
||||
}
|
||||
|
||||
@@ -48,8 +48,24 @@ func (h *GeminiAPIHandler) Models() []map[string]any {
|
||||
// GeminiModels handles the Gemini models listing endpoint.
|
||||
// It returns a JSON response containing available Gemini models and their specifications.
|
||||
func (h *GeminiAPIHandler) GeminiModels(c *gin.Context) {
|
||||
rawModels := h.Models()
|
||||
normalizedModels := make([]map[string]any, 0, len(rawModels))
|
||||
defaultMethods := []string{"generateContent"}
|
||||
for _, model := range rawModels {
|
||||
normalizedModel := make(map[string]any, len(model))
|
||||
for k, v := range model {
|
||||
normalizedModel[k] = v
|
||||
}
|
||||
if name, ok := normalizedModel["name"].(string); ok && name != "" && !strings.HasPrefix(name, "models/") {
|
||||
normalizedModel["name"] = "models/" + name
|
||||
}
|
||||
if _, ok := normalizedModel["supportedGenerationMethods"]; !ok {
|
||||
normalizedModel["supportedGenerationMethods"] = defaultMethods
|
||||
}
|
||||
normalizedModels = append(normalizedModels, normalizedModel)
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"models": h.Models(),
|
||||
"models": normalizedModels,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -779,7 +779,7 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) {
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: compat.Name,
|
||||
Type: "openai-compatibility",
|
||||
DisplayName: m.Name,
|
||||
DisplayName: modelID,
|
||||
})
|
||||
}
|
||||
// Register and return
|
||||
|
||||
Reference in New Issue
Block a user