mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
Add support for forcing GPT-5 Codex model configuration
- Introduced a new `ForceGPT5Codex` configuration option in settings. - Added relevant API endpoints for managing `ForceGPT5Codex`. - Enhanced Codex client to handle GPT-5 Codex-specific logic and mapping. - Updated example configuration file to include the new option. Add GPT-5 Codex model support and configuration options in documentation
This commit is contained in:
@@ -12,6 +12,14 @@ func (h *Handler) GetConfig(c *gin.Context) {
|
||||
func (h *Handler) GetDebug(c *gin.Context) { c.JSON(200, gin.H{"debug": h.cfg.Debug}) }
|
||||
func (h *Handler) PutDebug(c *gin.Context) { h.updateBoolField(c, func(v bool) { h.cfg.Debug = v }) }
|
||||
|
||||
// ForceGPT5Codex
|
||||
func (h *Handler) GetForceGPT5Codex(c *gin.Context) {
|
||||
c.JSON(200, gin.H{"gpt-5-codex": h.cfg.ForceGPT5Codex})
|
||||
}
|
||||
func (h *Handler) PutForceGPT5Codex(c *gin.Context) {
|
||||
h.updateBoolField(c, func(v bool) { h.cfg.ForceGPT5Codex = v })
|
||||
}
|
||||
|
||||
// Request log
|
||||
func (h *Handler) GetRequestLog(c *gin.Context) { c.JSON(200, gin.H{"request-log": h.cfg.RequestLog}) }
|
||||
func (h *Handler) PutRequestLog(c *gin.Context) {
|
||||
|
||||
@@ -200,6 +200,10 @@ func (s *Server) setupRoutes() {
|
||||
mgmt.PUT("/debug", s.mgmt.PutDebug)
|
||||
mgmt.PATCH("/debug", s.mgmt.PutDebug)
|
||||
|
||||
mgmt.GET("/force-gpt-5-codex", s.mgmt.GetForceGPT5Codex)
|
||||
mgmt.PUT("/force-gpt-5-codex", s.mgmt.PutForceGPT5Codex)
|
||||
mgmt.PATCH("/force-gpt-5-codex", s.mgmt.PutForceGPT5Codex)
|
||||
|
||||
mgmt.GET("/proxy-url", s.mgmt.GetProxyURL)
|
||||
mgmt.PUT("/proxy-url", s.mgmt.PutProxyURL)
|
||||
mgmt.PATCH("/proxy-url", s.mgmt.PutProxyURL)
|
||||
|
||||
@@ -136,6 +136,10 @@ func (c *CodexClient) CanProvideModel(modelName string) bool {
|
||||
"gpt-5-low",
|
||||
"gpt-5-medium",
|
||||
"gpt-5-high",
|
||||
"gpt-5-codex",
|
||||
"gpt-5-codex-low",
|
||||
"gpt-5-codex-medium",
|
||||
"gpt-5-codex-high",
|
||||
"codex-mini-latest",
|
||||
}
|
||||
return util.InArray(models, modelName)
|
||||
@@ -415,6 +419,25 @@ func (c *CodexClient) APIRequest(ctx context.Context, modelName, endpoint string
|
||||
case "gpt-5-high":
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "high")
|
||||
}
|
||||
} else if util.InArray([]string{"gpt-5-codex", "gpt-5-codex-low", "gpt-5-codex-medium", "gpt-5-codex-high"}, modelName) {
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "model", "gpt-5-codex")
|
||||
switch modelName {
|
||||
case "gpt-5-codex":
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "medium")
|
||||
case "gpt-5-codex-low":
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "low")
|
||||
case "gpt-5-codex-medium":
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "medium")
|
||||
case "gpt-5-codex-high":
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "high")
|
||||
}
|
||||
} else if c.cfg.ForceGPT5Codex {
|
||||
if gjson.GetBytes(jsonBody, "model").String() == "gpt-5" {
|
||||
if gjson.GetBytes(jsonBody, "reasoning.effort").String() == "minimal" {
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "reasoning.effort", "low")
|
||||
}
|
||||
jsonBody, _ = sjson.SetBytes(jsonBody, "model", "gpt-5-codex")
|
||||
}
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s%s", chatGPTEndpoint, endpoint)
|
||||
|
||||
@@ -44,6 +44,9 @@ type Config struct {
|
||||
// ClaudeKey defines a list of Claude API key configurations as specified in the YAML configuration file.
|
||||
ClaudeKey []ClaudeKey `yaml:"claude-api-key" json:"claude-api-key"`
|
||||
|
||||
// ForceGPT5Codex forces the use of GPT-5 Codex model.
|
||||
ForceGPT5Codex bool `yaml:"force-gpt-5-codex" json:"force-gpt-5-codex"`
|
||||
|
||||
// Codex defines a list of Codex API key configurations as specified in the YAML configuration file.
|
||||
CodexKey []CodexKey `yaml:"codex-api-key" json:"codex-api-key"`
|
||||
|
||||
|
||||
@@ -215,6 +215,58 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5-codex",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5-2025-09-15",
|
||||
DisplayName: "GPT 5 Codex",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5-codex-low",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5-2025-09-15",
|
||||
DisplayName: "GPT 5 Codex Low",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5-codex-medium",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5-2025-09-15",
|
||||
DisplayName: "GPT 5 Codex Medium",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5-codex-high",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5-2025-09-15",
|
||||
DisplayName: "GPT 5 Codex High",
|
||||
Description: "Stable version of GPT 5 Codex, The best model for coding and agentic tasks across domains.",
|
||||
ContextLength: 400000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
},
|
||||
{
|
||||
ID: "codex-mini-latest",
|
||||
Object: "model",
|
||||
|
||||
@@ -240,6 +240,9 @@ func (w *Watcher) reloadConfig() bool {
|
||||
if oldConfig.RemoteManagement.AllowRemote != newConfig.RemoteManagement.AllowRemote {
|
||||
log.Debugf(" remote-management.allow-remote: %t -> %t", oldConfig.RemoteManagement.AllowRemote, newConfig.RemoteManagement.AllowRemote)
|
||||
}
|
||||
if oldConfig.ForceGPT5Codex != newConfig.ForceGPT5Codex {
|
||||
log.Debugf(" gpt-5-codex: %t -> %t", oldConfig.ForceGPT5Codex, newConfig.ForceGPT5Codex)
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("config successfully reloaded, triggering client reload")
|
||||
|
||||
Reference in New Issue
Block a user