mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-02 20:40:52 +08:00
**refactor(middleware): extract request logging logic and optimize condition checks**
- Added `shouldLogRequest` helper to simplify path-based request logging logic. - Updated middleware to skip management endpoints for improved security. - Introduced an explicit `nil` logger check for minimal overhead. - Updated dependencies in `go.mod`. **feat(auth): add handling for 404 response with retry logic** - Introduced support for 404 `not_found` status with a 12-hour backoff period. - Updated `manager.go` to align state and status messages for 404 scenarios. **refactor(translator): comment out debug logging in Gemini responses request**
This commit is contained in:
2
go.mod
2
go.mod
@@ -3,6 +3,7 @@ module github.com/router-for-me/CLIProxyAPI/v6
|
|||||||
go 1.24.0
|
go 1.24.0
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/andybalholm/brotli v1.0.6
|
||||||
github.com/fsnotify/fsnotify v1.9.0
|
github.com/fsnotify/fsnotify v1.9.0
|
||||||
github.com/gin-gonic/gin v1.10.1
|
github.com/gin-gonic/gin v1.10.1
|
||||||
github.com/go-git/go-git/v6 v6.0.0-20251009132922-75a182125145
|
github.com/go-git/go-git/v6 v6.0.0-20251009132922-75a182125145
|
||||||
@@ -28,7 +29,6 @@ require (
|
|||||||
cloud.google.com/go/compute/metadata v0.3.0 // indirect
|
cloud.google.com/go/compute/metadata v0.3.0 // indirect
|
||||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||||
github.com/andybalholm/brotli v1.0.6 // indirect
|
|
||||||
github.com/bytedance/sonic v1.11.6 // indirect
|
github.com/bytedance/sonic v1.11.6 // indirect
|
||||||
github.com/bytedance/sonic/loader v0.1.1 // indirect
|
github.com/bytedance/sonic/loader v0.1.1 // indirect
|
||||||
github.com/cloudflare/circl v1.6.1 // indirect
|
github.com/cloudflare/circl v1.6.1 // indirect
|
||||||
|
|||||||
@@ -19,13 +19,13 @@ import (
|
|||||||
// logger, the middleware has minimal overhead.
|
// logger, the middleware has minimal overhead.
|
||||||
func RequestLoggingMiddleware(logger logging.RequestLogger) gin.HandlerFunc {
|
func RequestLoggingMiddleware(logger logging.RequestLogger) gin.HandlerFunc {
|
||||||
return func(c *gin.Context) {
|
return func(c *gin.Context) {
|
||||||
path := c.Request.URL.Path
|
if logger == nil {
|
||||||
shouldLog := false
|
c.Next()
|
||||||
if strings.HasPrefix(path, "/v1") {
|
return
|
||||||
shouldLog = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !shouldLog {
|
path := c.Request.URL.Path
|
||||||
|
if !shouldLogRequest(path) {
|
||||||
c.Next()
|
c.Next()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -101,3 +101,13 @@ func captureRequestInfo(c *gin.Context) (*RequestInfo, error) {
|
|||||||
Body: body,
|
Body: body,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shouldLogRequest determines whether the request should be logged.
|
||||||
|
// It skips management endpoints to avoid leaking secrets but allows
|
||||||
|
// all other routes, including module-provided ones, to honor request-log.
|
||||||
|
func shouldLogRequest(path string) bool {
|
||||||
|
if strings.HasPrefix(path, "/v0/management") || strings.HasPrefix(path, "/management") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
// when registering their supported models.
|
// when registering their supported models.
|
||||||
package registry
|
package registry
|
||||||
|
|
||||||
import "time"
|
|
||||||
|
|
||||||
// GetClaudeModels returns the standard Claude model definitions
|
// GetClaudeModels returns the standard Claude model definitions
|
||||||
func GetClaudeModels() []*ModelInfo {
|
func GetClaudeModels() []*ModelInfo {
|
||||||
return []*ModelInfo{
|
return []*ModelInfo{
|
||||||
@@ -426,7 +424,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5",
|
ID: "gpt-5",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1754524800,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-08-07",
|
Version: "gpt-5-2025-08-07",
|
||||||
@@ -439,7 +437,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-minimal",
|
ID: "gpt-5-minimal",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1754524800,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-08-07",
|
Version: "gpt-5-2025-08-07",
|
||||||
@@ -452,7 +450,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-low",
|
ID: "gpt-5-low",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1754524800,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-08-07",
|
Version: "gpt-5-2025-08-07",
|
||||||
@@ -465,7 +463,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-medium",
|
ID: "gpt-5-medium",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1754524800,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-08-07",
|
Version: "gpt-5-2025-08-07",
|
||||||
@@ -478,7 +476,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-high",
|
ID: "gpt-5-high",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1754524800,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-08-07",
|
Version: "gpt-5-2025-08-07",
|
||||||
@@ -491,7 +489,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-codex",
|
ID: "gpt-5-codex",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1757894400,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-09-15",
|
Version: "gpt-5-2025-09-15",
|
||||||
@@ -504,7 +502,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-codex-low",
|
ID: "gpt-5-codex-low",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1757894400,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-09-15",
|
Version: "gpt-5-2025-09-15",
|
||||||
@@ -517,7 +515,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-codex-medium",
|
ID: "gpt-5-codex-medium",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1757894400,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-09-15",
|
Version: "gpt-5-2025-09-15",
|
||||||
@@ -530,7 +528,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-codex-high",
|
ID: "gpt-5-codex-high",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1757894400,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-09-15",
|
Version: "gpt-5-2025-09-15",
|
||||||
@@ -543,7 +541,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-codex-mini",
|
ID: "gpt-5-codex-mini",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762473600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-11-07",
|
Version: "gpt-5-2025-11-07",
|
||||||
@@ -556,7 +554,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-codex-mini-medium",
|
ID: "gpt-5-codex-mini-medium",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762473600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-11-07",
|
Version: "gpt-5-2025-11-07",
|
||||||
@@ -569,7 +567,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5-codex-mini-high",
|
ID: "gpt-5-codex-mini-high",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762473600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5-2025-11-07",
|
Version: "gpt-5-2025-11-07",
|
||||||
@@ -582,7 +580,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1",
|
ID: "gpt-5.1",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -595,7 +593,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-none",
|
ID: "gpt-5.1-none",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -608,7 +606,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-low",
|
ID: "gpt-5.1-low",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -621,7 +619,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-medium",
|
ID: "gpt-5.1-medium",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -634,7 +632,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-high",
|
ID: "gpt-5.1-high",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -647,7 +645,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex",
|
ID: "gpt-5.1-codex",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -660,7 +658,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-low",
|
ID: "gpt-5.1-codex-low",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -673,7 +671,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-medium",
|
ID: "gpt-5.1-codex-medium",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -686,7 +684,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-high",
|
ID: "gpt-5.1-codex-high",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -699,7 +697,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-mini",
|
ID: "gpt-5.1-codex-mini",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -712,7 +710,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-mini-medium",
|
ID: "gpt-5.1-codex-mini-medium",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -725,7 +723,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-mini-high",
|
ID: "gpt-5.1-codex-mini-high",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1762905600,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-2025-11-12",
|
Version: "gpt-5.1-2025-11-12",
|
||||||
@@ -739,7 +737,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-max",
|
ID: "gpt-5.1-codex-max",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1763424000,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-max",
|
Version: "gpt-5.1-max",
|
||||||
@@ -752,7 +750,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-max-low",
|
ID: "gpt-5.1-codex-max-low",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1763424000,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-max",
|
Version: "gpt-5.1-max",
|
||||||
@@ -765,7 +763,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-max-medium",
|
ID: "gpt-5.1-codex-max-medium",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1763424000,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-max",
|
Version: "gpt-5.1-max",
|
||||||
@@ -778,7 +776,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-max-high",
|
ID: "gpt-5.1-codex-max-high",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1763424000,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-max",
|
Version: "gpt-5.1-max",
|
||||||
@@ -791,7 +789,7 @@ func GetOpenAIModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "gpt-5.1-codex-max-xhigh",
|
ID: "gpt-5.1-codex-max-xhigh",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1763424000,
|
||||||
OwnedBy: "openai",
|
OwnedBy: "openai",
|
||||||
Type: "openai",
|
Type: "openai",
|
||||||
Version: "gpt-5.1-max",
|
Version: "gpt-5.1-max",
|
||||||
@@ -810,7 +808,7 @@ func GetQwenModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "qwen3-coder-plus",
|
ID: "qwen3-coder-plus",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1753228800,
|
||||||
OwnedBy: "qwen",
|
OwnedBy: "qwen",
|
||||||
Type: "qwen",
|
Type: "qwen",
|
||||||
Version: "3.0",
|
Version: "3.0",
|
||||||
@@ -823,7 +821,7 @@ func GetQwenModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "qwen3-coder-flash",
|
ID: "qwen3-coder-flash",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1753228800,
|
||||||
OwnedBy: "qwen",
|
OwnedBy: "qwen",
|
||||||
Type: "qwen",
|
Type: "qwen",
|
||||||
Version: "3.0",
|
Version: "3.0",
|
||||||
@@ -836,7 +834,7 @@ func GetQwenModels() []*ModelInfo {
|
|||||||
{
|
{
|
||||||
ID: "vision-model",
|
ID: "vision-model",
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: time.Now().Unix(),
|
Created: 1758672000,
|
||||||
OwnedBy: "qwen",
|
OwnedBy: "qwen",
|
||||||
Type: "qwen",
|
Type: "qwen",
|
||||||
Version: "3.0",
|
Version: "3.0",
|
||||||
@@ -852,38 +850,38 @@ func GetQwenModels() []*ModelInfo {
|
|||||||
// GetIFlowModels returns supported models for iFlow OAuth accounts.
|
// GetIFlowModels returns supported models for iFlow OAuth accounts.
|
||||||
|
|
||||||
func GetIFlowModels() []*ModelInfo {
|
func GetIFlowModels() []*ModelInfo {
|
||||||
created := time.Now().Unix()
|
|
||||||
entries := []struct {
|
entries := []struct {
|
||||||
ID string
|
ID string
|
||||||
DisplayName string
|
DisplayName string
|
||||||
Description string
|
Description string
|
||||||
|
Created int64
|
||||||
}{
|
}{
|
||||||
{ID: "tstars2.0", DisplayName: "TStars-2.0", Description: "iFlow TStars-2.0 multimodal assistant"},
|
{ID: "tstars2.0", DisplayName: "TStars-2.0", Description: "iFlow TStars-2.0 multimodal assistant", Created: 1746489600},
|
||||||
{ID: "qwen3-coder-plus", DisplayName: "Qwen3-Coder-Plus", Description: "Qwen3 Coder Plus code generation"},
|
{ID: "qwen3-coder-plus", DisplayName: "Qwen3-Coder-Plus", Description: "Qwen3 Coder Plus code generation", Created: 1753228800},
|
||||||
{ID: "qwen3-coder", DisplayName: "Qwen3-Coder-480B-A35B", Description: "Qwen3 Coder 480B A35B"},
|
{ID: "qwen3-coder", DisplayName: "Qwen3-Coder-480B-A35B", Description: "Qwen3 Coder 480B A35B", Created: 1753228800},
|
||||||
{ID: "qwen3-max", DisplayName: "Qwen3-Max", Description: "Qwen3 flagship model"},
|
{ID: "qwen3-max", DisplayName: "Qwen3-Max", Description: "Qwen3 flagship model", Created: 1758672000},
|
||||||
{ID: "qwen3-vl-plus", DisplayName: "Qwen3-VL-Plus", Description: "Qwen3 multimodal vision-language"},
|
{ID: "qwen3-vl-plus", DisplayName: "Qwen3-VL-Plus", Description: "Qwen3 multimodal vision-language", Created: 1758672000},
|
||||||
{ID: "qwen3-max-preview", DisplayName: "Qwen3-Max-Preview", Description: "Qwen3 Max preview build"},
|
{ID: "qwen3-max-preview", DisplayName: "Qwen3-Max-Preview", Description: "Qwen3 Max preview build", Created: 1757030400},
|
||||||
{ID: "kimi-k2-0905", DisplayName: "Kimi-K2-Instruct-0905", Description: "Moonshot Kimi K2 instruct 0905"},
|
{ID: "kimi-k2-0905", DisplayName: "Kimi-K2-Instruct-0905", Description: "Moonshot Kimi K2 instruct 0905", Created: 1757030400},
|
||||||
{ID: "glm-4.6", DisplayName: "GLM-4.6", Description: "Zhipu GLM 4.6 general model"},
|
{ID: "glm-4.6", DisplayName: "GLM-4.6", Description: "Zhipu GLM 4.6 general model", Created: 1759190400},
|
||||||
{ID: "kimi-k2", DisplayName: "Kimi-K2", Description: "Moonshot Kimi K2 general model"},
|
{ID: "kimi-k2", DisplayName: "Kimi-K2", Description: "Moonshot Kimi K2 general model", Created: 1752192000},
|
||||||
{ID: "kimi-k2-thinking", DisplayName: "Kimi-K2-Thinking", Description: "Moonshot Kimi K2 general model"},
|
{ID: "kimi-k2-thinking", DisplayName: "Kimi-K2-Thinking", Description: "Moonshot Kimi K2 general model", Created: 1762387200},
|
||||||
{ID: "deepseek-v3.2", DisplayName: "DeepSeek-V3.2-Exp", Description: "DeepSeek V3.2 experimental"},
|
{ID: "deepseek-v3.2", DisplayName: "DeepSeek-V3.2-Exp", Description: "DeepSeek V3.2 experimental", Created: 1759104000},
|
||||||
{ID: "deepseek-v3.1", DisplayName: "DeepSeek-V3.1-Terminus", Description: "DeepSeek V3.1 Terminus"},
|
{ID: "deepseek-v3.1", DisplayName: "DeepSeek-V3.1-Terminus", Description: "DeepSeek V3.1 Terminus", Created: 1756339200},
|
||||||
{ID: "deepseek-r1", DisplayName: "DeepSeek-R1", Description: "DeepSeek reasoning model R1"},
|
{ID: "deepseek-r1", DisplayName: "DeepSeek-R1", Description: "DeepSeek reasoning model R1", Created: 1737331200},
|
||||||
{ID: "deepseek-v3", DisplayName: "DeepSeek-V3-671B", Description: "DeepSeek V3 671B"},
|
{ID: "deepseek-v3", DisplayName: "DeepSeek-V3-671B", Description: "DeepSeek V3 671B", Created: 1734307200},
|
||||||
{ID: "qwen3-32b", DisplayName: "Qwen3-32B", Description: "Qwen3 32B"},
|
{ID: "qwen3-32b", DisplayName: "Qwen3-32B", Description: "Qwen3 32B", Created: 1747094400},
|
||||||
{ID: "qwen3-235b-a22b-thinking-2507", DisplayName: "Qwen3-235B-A22B-Thinking", Description: "Qwen3 235B A22B Thinking (2507)"},
|
{ID: "qwen3-235b-a22b-thinking-2507", DisplayName: "Qwen3-235B-A22B-Thinking", Description: "Qwen3 235B A22B Thinking (2507)", Created: 1753401600},
|
||||||
{ID: "qwen3-235b-a22b-instruct", DisplayName: "Qwen3-235B-A22B-Instruct", Description: "Qwen3 235B A22B Instruct"},
|
{ID: "qwen3-235b-a22b-instruct", DisplayName: "Qwen3-235B-A22B-Instruct", Description: "Qwen3 235B A22B Instruct", Created: 1753401600},
|
||||||
{ID: "qwen3-235b", DisplayName: "Qwen3-235B-A22B", Description: "Qwen3 235B A22B"},
|
{ID: "qwen3-235b", DisplayName: "Qwen3-235B-A22B", Description: "Qwen3 235B A22B", Created: 1753401600},
|
||||||
{ID: "minimax-m2", DisplayName: "MiniMax-M2", Description: "MiniMax M2"},
|
{ID: "minimax-m2", DisplayName: "MiniMax-M2", Description: "MiniMax M2", Created: 1758672000},
|
||||||
}
|
}
|
||||||
models := make([]*ModelInfo, 0, len(entries))
|
models := make([]*ModelInfo, 0, len(entries))
|
||||||
for _, entry := range entries {
|
for _, entry := range entries {
|
||||||
models = append(models, &ModelInfo{
|
models = append(models, &ModelInfo{
|
||||||
ID: entry.ID,
|
ID: entry.ID,
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: created,
|
Created: entry.Created,
|
||||||
OwnedBy: "iflow",
|
OwnedBy: "iflow",
|
||||||
Type: "iflow",
|
Type: "iflow",
|
||||||
DisplayName: entry.DisplayName,
|
DisplayName: entry.DisplayName,
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import (
|
|||||||
|
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"github.com/tidwall/gjson"
|
"github.com/tidwall/gjson"
|
||||||
"github.com/tidwall/sjson"
|
"github.com/tidwall/sjson"
|
||||||
)
|
)
|
||||||
@@ -303,7 +302,7 @@ func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte
|
|||||||
if !gjson.Get(out, "generationConfig.thinkingConfig").Exists() && modelName == "gemini-3-pro-preview" {
|
if !gjson.Get(out, "generationConfig.thinkingConfig").Exists() && modelName == "gemini-3-pro-preview" {
|
||||||
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
|
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", -1)
|
||||||
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
|
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.include_thoughts", true)
|
||||||
log.Debugf("Applied default thinkingConfig for gemini-3-pro-preview (matches Gemini CLI): thinkingBudget=-1, include_thoughts=true")
|
// log.Debugf("Applied default thinkingConfig for gemini-3-pro-preview (matches Gemini CLI): thinkingBudget=-1, include_thoughts=true")
|
||||||
}
|
}
|
||||||
|
|
||||||
result := []byte(out)
|
result := []byte(out)
|
||||||
|
|||||||
@@ -564,6 +564,11 @@ func (m *Manager) MarkResult(ctx context.Context, result Result) {
|
|||||||
state.NextRetryAfter = next
|
state.NextRetryAfter = next
|
||||||
suspendReason = "payment_required"
|
suspendReason = "payment_required"
|
||||||
shouldSuspendModel = true
|
shouldSuspendModel = true
|
||||||
|
case 404:
|
||||||
|
next := now.Add(12 * time.Hour)
|
||||||
|
state.NextRetryAfter = next
|
||||||
|
suspendReason = "not_found"
|
||||||
|
shouldSuspendModel = true
|
||||||
case 429:
|
case 429:
|
||||||
var next time.Time
|
var next time.Time
|
||||||
backoffLevel := state.Quota.BackoffLevel
|
backoffLevel := state.Quota.BackoffLevel
|
||||||
@@ -804,6 +809,9 @@ func applyAuthFailureState(auth *Auth, resultErr *Error, retryAfter *time.Durati
|
|||||||
case 402, 403:
|
case 402, 403:
|
||||||
auth.StatusMessage = "payment_required"
|
auth.StatusMessage = "payment_required"
|
||||||
auth.NextRetryAfter = now.Add(30 * time.Minute)
|
auth.NextRetryAfter = now.Add(30 * time.Minute)
|
||||||
|
case 404:
|
||||||
|
auth.StatusMessage = "not_found"
|
||||||
|
auth.NextRetryAfter = now.Add(12 * time.Hour)
|
||||||
case 429:
|
case 429:
|
||||||
auth.StatusMessage = "quota exhausted"
|
auth.StatusMessage = "quota exhausted"
|
||||||
auth.Quota.Exceeded = true
|
auth.Quota.Exceeded = true
|
||||||
|
|||||||
Reference in New Issue
Block a user