mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
Fixed: #246
feat(runtime): add support for GPT-5.1 models and variants Introduce GPT-5.1 model family, including minimal, low, medium, high, Codex, and Codex Mini variants. Update tokenization and reasoning effort handling to accommodate new models in executor and registry.
This commit is contained in:
@@ -417,6 +417,162 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-minimal",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Minimal",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-low",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Low",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-medium",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Medium",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-high",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 High",
|
||||
Description: "Stable version of GPT 5, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-codex",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-codex-low",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Low",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-codex-medium",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Medium",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-codex-high",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex High",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-codex-mini",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Mini",
|
||||
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-codex-mini-medium",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Mini Medium",
|
||||
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.1-codex-mini-high",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.1-2025-11-12",
|
||||
DisplayName: "GPT 5 Codex Mini High",
|
||||
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -85,6 +85,38 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
|
||||
default:
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
|
||||
}
|
||||
} else if util.InArray([]string{"gpt-5.1", "gpt-5.1-minimal", "gpt-5.1-low", "gpt-5.1-medium", "gpt-5.1-high"}, req.Model) {
|
||||
body, _ = sjson.SetBytes(body, "model", "gpt-5.1")
|
||||
switch req.Model {
|
||||
case "gpt-5.1-minimal":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "minimal")
|
||||
case "gpt-5.1-low":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "low")
|
||||
case "gpt-5.1-medium":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
|
||||
case "gpt-5.1-high":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
|
||||
}
|
||||
} else if util.InArray([]string{"gpt-5.1-codex", "gpt-5.1-codex-low", "gpt-5.1-codex-medium", "gpt-5.1-codex-high"}, req.Model) {
|
||||
body, _ = sjson.SetBytes(body, "model", "gpt-5.1-codex")
|
||||
switch req.Model {
|
||||
case "gpt-5.1-codex-low":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "low")
|
||||
case "gpt-5.1-codex-medium":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
|
||||
case "gpt-5.1-codex-high":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
|
||||
}
|
||||
} else if util.InArray([]string{"gpt-5.1-codex-mini", "gpt-5.1-codex-mini-medium", "gpt-5.1-codex-mini-high"}, req.Model) {
|
||||
body, _ = sjson.SetBytes(body, "model", "gpt-5.1-codex-mini")
|
||||
switch req.Model {
|
||||
case "gpt-5.1-codex-mini-medium":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
|
||||
case "gpt-5.1-codex-mini-high":
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
|
||||
default:
|
||||
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
|
||||
}
|
||||
}
|
||||
|
||||
body, _ = sjson.SetBytes(body, "stream", true)
|
||||
|
||||
@@ -16,6 +16,8 @@ func tokenizerForModel(model string) (tokenizer.Codec, error) {
|
||||
return tokenizer.Get(tokenizer.Cl100kBase)
|
||||
case strings.HasPrefix(sanitized, "gpt-5"):
|
||||
return tokenizer.ForModel(tokenizer.GPT5)
|
||||
case strings.HasPrefix(sanitized, "gpt-5.1"):
|
||||
return tokenizer.ForModel(tokenizer.GPT5)
|
||||
case strings.HasPrefix(sanitized, "gpt-4.1"):
|
||||
return tokenizer.ForModel(tokenizer.GPT41)
|
||||
case strings.HasPrefix(sanitized, "gpt-4o"):
|
||||
|
||||
Reference in New Issue
Block a user