Compare commits

..

13 Commits

Author SHA1 Message Date
Luis Pater
30d448e73c fix(executor): update model name from codex-mini-latest to gpt-5-codex-mini 2025-11-08 11:17:40 +08:00
Luis Pater
d4064e3df4 Merge pull request #225 from jeffnash/feat/codex-mini-variants
feat(registry): add GPT-5 Codex Mini model variants
2025-11-08 11:11:04 +08:00
jeffnash
ec354f7a1a add default medium reasoning case for gpt-5-codex-mini
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-07 17:12:10 -08:00
jeffnash
240e782606 add default medium reasoning case for gpt-5-codex-mini
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-07 17:11:40 -08:00
Jeff Nash
fcb0293c0d feat(registry): add GPT-5 Codex Mini model variants
Adds three new Codex Mini model variants (mini, mini-medium, mini-high)
that map to codex-mini-latest. Codex Mini supports medium and high
reasoning effort levels only (no low/minimal). Base model defaults to
medium reasoning effort.
2025-11-07 17:07:39 -08:00
Luis Pater
682c4598ee fix(translator): handle gjson strings in OpenAI response formatting 2025-11-08 00:41:56 +08:00
Luis Pater
a7d105bd69 Fixed: #223
fix(registry): add `MiniMax-M2` model to registry definitions
2025-11-08 00:10:51 +08:00
Luis Pater
b9eef45305 Merge pull request #222 from router-for-me/api
Return auth info from memory
2025-11-07 22:41:12 +08:00
Luis Pater
c8f20a66a8 fix(executor): add logging and prompt cache key handling for OpenAI responses 2025-11-07 22:40:45 +08:00
hkfires
1f6a384c9a fix(api): omit auth file entries lacking path unless runtime-only 2025-11-07 19:15:54 +08:00
hkfires
c9fc033cf5 feat(management): support in-memory auth listing with disk fallback 2025-11-07 19:04:54 +08:00
Luis Pater
32c964d310 Merge pull request #221 from router-for-me/gemini
fix(translator): accept camelCase thinking config in OpenAI→Gemini
2025-11-07 17:00:07 +08:00
hkfires
d60040b222 fix(translator): accept camelCase thinking config in OpenAI→Gemini 2025-11-07 16:45:31 +08:00
6 changed files with 224 additions and 21 deletions

View File

@@ -12,6 +12,7 @@ import (
"net/url"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"sync"
@@ -229,8 +230,32 @@ func (h *Handler) managementCallbackURL(path string) (string, error) {
return fmt.Sprintf("http://127.0.0.1:%d%s", h.cfg.Port, path), nil
}
// List auth files
func (h *Handler) ListAuthFiles(c *gin.Context) {
if h == nil {
c.JSON(500, gin.H{"error": "handler not initialized"})
return
}
if h.authManager == nil {
h.listAuthFilesFromDisk(c)
return
}
auths := h.authManager.List()
files := make([]gin.H, 0, len(auths))
for _, auth := range auths {
if entry := h.buildAuthFileEntry(auth); entry != nil {
files = append(files, entry)
}
}
sort.Slice(files, func(i, j int) bool {
nameI, _ := files[i]["name"].(string)
nameJ, _ := files[j]["name"].(string)
return strings.ToLower(nameI) < strings.ToLower(nameJ)
})
c.JSON(200, gin.H{"files": files})
}
// List auth files from disk when the auth manager is unavailable.
func (h *Handler) listAuthFilesFromDisk(c *gin.Context) {
entries, err := os.ReadDir(h.cfg.AuthDir)
if err != nil {
c.JSON(500, gin.H{"error": fmt.Sprintf("failed to read auth dir: %v", err)})
@@ -263,6 +288,103 @@ func (h *Handler) ListAuthFiles(c *gin.Context) {
c.JSON(200, gin.H{"files": files})
}
func (h *Handler) buildAuthFileEntry(auth *coreauth.Auth) gin.H {
if auth == nil {
return nil
}
runtimeOnly := isRuntimeOnlyAuth(auth)
path := strings.TrimSpace(authAttribute(auth, "path"))
if path == "" && !runtimeOnly {
return nil
}
name := strings.TrimSpace(auth.FileName)
if name == "" {
name = auth.ID
}
entry := gin.H{
"id": auth.ID,
"name": name,
"type": strings.TrimSpace(auth.Provider),
"provider": strings.TrimSpace(auth.Provider),
"label": auth.Label,
"status": auth.Status,
"status_message": auth.StatusMessage,
"disabled": auth.Disabled,
"unavailable": auth.Unavailable,
"runtime_only": runtimeOnly,
"source": "memory",
"size": int64(0),
}
if email := authEmail(auth); email != "" {
entry["email"] = email
}
if accountType, account := auth.AccountInfo(); accountType != "" || account != "" {
if accountType != "" {
entry["account_type"] = accountType
}
if account != "" {
entry["account"] = account
}
}
if !auth.CreatedAt.IsZero() {
entry["created_at"] = auth.CreatedAt
}
if !auth.UpdatedAt.IsZero() {
entry["modtime"] = auth.UpdatedAt
entry["updated_at"] = auth.UpdatedAt
}
if !auth.LastRefreshedAt.IsZero() {
entry["last_refresh"] = auth.LastRefreshedAt
}
if path != "" {
entry["path"] = path
entry["source"] = "file"
if info, err := os.Stat(path); err == nil {
entry["size"] = info.Size()
entry["modtime"] = info.ModTime()
} else if os.IsNotExist(err) {
entry["source"] = "memory"
} else {
log.WithError(err).Warnf("failed to stat auth file %s", path)
}
}
return entry
}
func authEmail(auth *coreauth.Auth) string {
if auth == nil {
return ""
}
if auth.Metadata != nil {
if v, ok := auth.Metadata["email"].(string); ok {
return strings.TrimSpace(v)
}
}
if auth.Attributes != nil {
if v := strings.TrimSpace(auth.Attributes["email"]); v != "" {
return v
}
if v := strings.TrimSpace(auth.Attributes["account_email"]); v != "" {
return v
}
}
return ""
}
func authAttribute(auth *coreauth.Auth, key string) string {
if auth == nil || len(auth.Attributes) == 0 {
return ""
}
return auth.Attributes[key]
}
func isRuntimeOnlyAuth(auth *coreauth.Auth) bool {
if auth == nil || len(auth.Attributes) == 0 {
return false
}
return strings.EqualFold(strings.TrimSpace(auth.Attributes["runtime_only"]), "true")
}
// Download single auth file by name
func (h *Handler) DownloadAuthFile(c *gin.Context) {
name := c.Query("name")

View File

@@ -352,17 +352,43 @@ func GetOpenAIModels() []*ModelInfo {
SupportedParameters: []string{"tools"},
},
{
ID: "codex-mini-latest",
ID: "gpt-5-codex-mini",
Object: "model",
Created: time.Now().Unix(),
OwnedBy: "openai",
Type: "openai",
Version: "1.0",
DisplayName: "Codex Mini",
Description: "Lightweight code generation model",
ContextLength: 4096,
MaxCompletionTokens: 2048,
SupportedParameters: []string{"temperature", "max_tokens", "stream", "stop"},
Version: "gpt-5-2025-11-07",
DisplayName: "GPT 5 Codex Mini",
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
ContextLength: 400000,
MaxCompletionTokens: 128000,
SupportedParameters: []string{"tools"},
},
{
ID: "gpt-5-codex-mini-medium",
Object: "model",
Created: time.Now().Unix(),
OwnedBy: "openai",
Type: "openai",
Version: "gpt-5-2025-11-07",
DisplayName: "GPT 5 Codex Mini Medium",
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
ContextLength: 400000,
MaxCompletionTokens: 128000,
SupportedParameters: []string{"tools"},
},
{
ID: "gpt-5-codex-mini-high",
Object: "model",
Created: time.Now().Unix(),
OwnedBy: "openai",
Type: "openai",
Version: "gpt-5-2025-11-07",
DisplayName: "GPT 5 Codex Mini High",
Description: "Stable version of GPT 5 Codex Mini: cheaper, faster, but less capable version of GPT 5 Codex.",
ContextLength: 400000,
MaxCompletionTokens: 128000,
SupportedParameters: []string{"tools"},
},
}
}
@@ -438,6 +464,7 @@ func GetIFlowModels() []*ModelInfo {
{ID: "qwen3-235b-a22b-thinking-2507", DisplayName: "Qwen3-235B-A22B-Thinking", Description: "Qwen3 235B A22B Thinking (2507)"},
{ID: "qwen3-235b-a22b-instruct", DisplayName: "Qwen3-235B-A22B-Instruct", Description: "Qwen3 235B A22B Instruct"},
{ID: "qwen3-235b", DisplayName: "Qwen3-235B-A22B", Description: "Qwen3 235B A22B"},
{ID: "minimax-m2", DisplayName: "MiniMax-M2", Description: "MiniMax M2"},
}
models := make([]*ModelInfo, 0, len(entries))
for _, entry := range entries {

View File

@@ -75,6 +75,16 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
case "gpt-5-codex-high":
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
}
} else if util.InArray([]string{"gpt-5-codex-mini", "gpt-5-codex-mini-medium", "gpt-5-codex-mini-high"}, req.Model) {
body, _ = sjson.SetBytes(body, "model", "gpt-5-codex-mini")
switch req.Model {
case "gpt-5-codex-mini-medium":
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
case "gpt-5-codex-mini-high":
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
default:
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
}
}
body, _ = sjson.SetBytes(body, "stream", true)
@@ -188,6 +198,17 @@ func (e *CodexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
case "gpt-5-codex-high":
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
}
} else if util.InArray([]string{"gpt-5-codex-mini", "gpt-5-codex-mini-medium", "gpt-5-codex-mini-high"}, req.Model) {
body, _ = sjson.SetBytes(body, "model", "gpt-5-codex-mini")
switch req.Model {
case "gpt-5-codex-mini-medium":
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
case "gpt-5-codex-mini-high":
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
default:
// For base "gpt-5-codex-mini", set reasoning.effort to "medium" by default
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
}
}
body, _ = sjson.DeleteBytes(body, "previous_response_id")
@@ -312,6 +333,17 @@ func (e *CodexExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth
default:
body, _ = sjson.SetBytes(body, "reasoning.effort", "low")
}
} else if util.InArray([]string{"gpt-5-codex-mini", "gpt-5-codex-mini-medium", "gpt-5-codex-mini-high"}, req.Model) {
modelForCounting = "gpt-5"
body, _ = sjson.SetBytes(body, "model", "codex-mini-latest")
switch req.Model {
case "gpt-5-codex-mini-medium":
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
case "gpt-5-codex-mini-high":
body, _ = sjson.SetBytes(body, "reasoning.effort", "high")
default:
body, _ = sjson.SetBytes(body, "reasoning.effort", "medium")
}
}
body, _ = sjson.DeleteBytes(body, "previous_response_id")
@@ -508,6 +540,11 @@ func (e *CodexExecutor) cacheHelper(ctx context.Context, from sdktranslator.Form
codexCacheMap[key] = cache
}
}
} else if from == "openai-response" {
promptCacheKey := gjson.GetBytes(req.Payload, "prompt_cache_key")
if promptCacheKey.Exists() {
cache.ID = promptCacheKey.String()
}
}
rawJSON, _ = sjson.SetBytes(rawJSON, "prompt_cache_key", cache.ID)

View File

@@ -65,17 +65,23 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
if tc := gjson.GetBytes(rawJSON, "extra_body.google.thinking_config"); tc.Exists() && tc.IsObject() {
var setBudget bool
var normalized int
if v := tc.Get("thinking_budget"); v.Exists() {
if v := tc.Get("thinkingBudget"); v.Exists() {
normalized = util.NormalizeThinkingBudget(modelName, int(v.Int()))
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", normalized)
setBudget = true
} else if v := tc.Get("thinking_budget"); v.Exists() {
normalized = util.NormalizeThinkingBudget(modelName, int(v.Int()))
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", normalized)
setBudget = true
}
if v := tc.Get("include_thoughts"); v.Exists() {
if v := tc.Get("includeThoughts"); v.Exists() {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", v.Bool())
} else if setBudget {
if normalized != 0 {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}
} else if v := tc.Get("include_thoughts"); v.Exists() {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", v.Bool())
} else if setBudget && normalized != 0 {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.include_thoughts", true)
}
}
}

View File

@@ -65,18 +65,23 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
if tc := gjson.GetBytes(rawJSON, "extra_body.google.thinking_config"); tc.Exists() && tc.IsObject() {
var setBudget bool
var normalized int
if v := tc.Get("thinking_budget"); v.Exists() {
// Normalize budget to model range
if v := tc.Get("thinkingBudget"); v.Exists() {
normalized = util.NormalizeThinkingBudget(modelName, int(v.Int()))
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.thinkingBudget", normalized)
setBudget = true
} else if v := tc.Get("thinking_budget"); v.Exists() {
normalized = util.NormalizeThinkingBudget(modelName, int(v.Int()))
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.thinkingBudget", normalized)
setBudget = true
}
if v := tc.Get("include_thoughts"); v.Exists() {
if v := tc.Get("includeThoughts"); v.Exists() {
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.include_thoughts", v.Bool())
} else if setBudget {
if normalized != 0 {
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.include_thoughts", true)
}
} else if v := tc.Get("include_thoughts"); v.Exists() {
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.include_thoughts", v.Bool())
} else if setBudget && normalized != 0 {
out, _ = sjson.SetBytes(out, "generationConfig.thinkingConfig.include_thoughts", true)
}
}
}

View File

@@ -2,6 +2,7 @@ package responses
import (
"bytes"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
@@ -147,6 +148,11 @@ func ConvertOpenAIResponsesRequestToOpenAIChatCompletions(modelName string, inpu
return true
})
} else if input.Type == gjson.String {
msg := "{}"
msg, _ = sjson.Set(msg, "role", "user")
msg, _ = sjson.Set(msg, "content", input.String())
out, _ = sjson.SetRaw(out, "messages.-1", msg)
}
// Convert tools from responses format to chat completions format