Compare commits

..

15 Commits

Author SHA1 Message Date
Luis Pater
a5b3ff11fd Merge pull request #569 from router-for-me/watcher
Watcher Module Progressive Refactoring - Phase 1
2025-12-17 02:43:34 +08:00
Luis Pater
084558f200 test(config): add unit tests for model prefix changes in config diff 2025-12-17 02:31:16 +08:00
Luis Pater
d02bf9c243 feat(diff): add support for model prefix changes in config diff logic
Enhance the configuration diff logic to include detection and reporting of `prefix` changes for all model types. Update related struct naming for consistency across the watcher module.
2025-12-17 02:05:03 +08:00
Luis Pater
26a5f67df2 Merge branch 'dev' into watcher 2025-12-17 01:48:11 +08:00
Luis Pater
600fd42a83 Merge pull request #564 from router-for-me/think
feat(thinking): unify budget/effort conversion logic and add iFlow thinking support
2025-12-17 01:21:24 +08:00
Luis Pater
670685139a fix(api): update route patterns to support wildcards for Gemini actions
Normalize action handling by accommodating wildcard patterns in route definitions for Gemini endpoints. Adjust `request.Action` parsing logic to correctly process routes with prefixed actions.
2025-12-17 01:17:02 +08:00
Luis Pater
52b6306388 feat(config): add support for model prefixes and prefix normalization
Refactor model management to include an optional `prefix` field for model credentials, enabling better namespace handling. Update affected configuration files, APIs, and handlers to support prefix normalization and routing. Remove unused OpenAI compatibility provider logic to simplify processing.
2025-12-17 01:07:26 +08:00
hkfires
521ec6f1b8 fix(watcher): simplify vertex apikey idKind to exclude base suffix 2025-12-16 22:55:38 +08:00
hkfires
b0c5d9640a refactor(diff): improve security and stability of config change detection
Introduce formatProxyURL helper to sanitize proxy addresses before
logging, stripping credentials and path components while preserving
host information. Rework model hash computation to sort and deduplicate
name/alias pairs with case normalization, ensuring consistent output
regardless of input ordering. Add signature-based identification for
anonymous OpenAI-compatible provider entries to maintain stable keys
across configuration reloads. Replace direct stdout prints with
structured logger calls for file change notifications.
2025-12-16 22:39:19 +08:00
hkfires
ef8e94e992 refactor(watcher): extract config diff helpers
Break out config diffing, hashing, and OpenAI compatibility utilities into a dedicated diff package, update watcher to consume them, and add comprehensive tests for diff logic and watcher behavior.
2025-12-16 21:45:33 +08:00
hkfires
9df96a4bb4 test(thinking): add effort to budget coverage 2025-12-16 18:34:43 +08:00
hkfires
28a428ae2f fix(thinking): align budget effort mapping across translators
Unify thinking budget-to-effort conversion in a shared helper, handle disabled/default thinking cases in translators, adjust zero-budget mapping, and drop the old OpenAI-specific helper with updated tests.
2025-12-16 18:34:43 +08:00
hkfires
b326ec3641 feat(iflow): add thinking support for iFlow models 2025-12-16 18:34:43 +08:00
Luis Pater
fcecbc7d46 Merge pull request #562 from thomasvan/fix/openai-claude-message-start-order
fix(translator): emit message_start on first chunk regardless of role field
2025-12-16 16:54:58 +08:00
Thong Van
f4007f53ba fix(translator): emit message_start on first chunk regardless of role field
Some OpenAI-compatible providers (like GitHub Copilot) may send tool_calls
in the first streaming chunk without including the role field. The previous
implementation only emitted message_start when the first chunk contained
role="assistant", causing Anthropic protocol violations when tool calls
arrived first.

This fix ensures message_start is always emitted on the very first chunk,
preventing 'content_block_start before message_start' errors in clients
that strictly validate Anthropic SSE event ordering.
2025-12-16 13:01:09 +07:00
35 changed files with 2762 additions and 717 deletions

View File

@@ -48,6 +48,9 @@ usage-statistics-enabled: false
# Proxy URL. Supports socks5/http/https protocols. Example: socks5://user:pass@192.168.1.1:1080/
proxy-url: ""
# When true, unprefixed model requests only use credentials without a prefix (except when prefix == model name).
force-model-prefix: false
# Number of times to retry a request. Retries will occur if the HTTP response code is 403, 408, 500, 502, 503, or 504.
request-retry: 3
@@ -65,6 +68,7 @@ ws-auth: false
# Gemini API keys
# gemini-api-key:
# - api-key: "AIzaSy...01"
# prefix: "test" # optional: require calls like "test/gemini-3-pro-preview" to target this credential
# base-url: "https://generativelanguage.googleapis.com"
# headers:
# X-Custom-Header: "custom-value"
@@ -79,6 +83,7 @@ ws-auth: false
# Codex API keys
# codex-api-key:
# - api-key: "sk-atSM..."
# prefix: "test" # optional: require calls like "test/gpt-5-codex" to target this credential
# base-url: "https://www.example.com" # use the custom codex API endpoint
# headers:
# X-Custom-Header: "custom-value"
@@ -93,6 +98,7 @@ ws-auth: false
# claude-api-key:
# - api-key: "sk-atSM..." # use the official claude API key, no need to set the base url
# - api-key: "sk-atSM..."
# prefix: "test" # optional: require calls like "test/claude-sonnet-latest" to target this credential
# base-url: "https://www.example.com" # use the custom claude API endpoint
# headers:
# X-Custom-Header: "custom-value"
@@ -109,6 +115,7 @@ ws-auth: false
# OpenAI compatibility providers
# openai-compatibility:
# - name: "openrouter" # The name of the provider; it will be used in the user agent and other places.
# prefix: "test" # optional: require calls like "test/kimi-k2" to target this provider's credentials
# base-url: "https://openrouter.ai/api/v1" # The base URL of the provider.
# headers:
# X-Custom-Header: "custom-value"
@@ -123,6 +130,7 @@ ws-auth: false
# Vertex API keys (Vertex-compatible endpoints, use API key + base URL)
# vertex-api-key:
# - api-key: "vk-123..." # x-goog-api-key header
# prefix: "test" # optional: require calls like "test/vertex-pro" to target this credential
# base-url: "https://example.com/api" # e.g. https://zenmux.ai/api
# proxy-url: "socks5://proxy.example.com:1080" # optional per-key proxy override
# headers:

View File

@@ -146,6 +146,9 @@ func TestAmpModule_OnConfigUpdated_CacheInvalidation(t *testing.T) {
m := &AmpModule{enabled: true}
ms := NewMultiSourceSecretWithPath("", p, time.Minute)
m.secretSource = ms
m.lastConfig = &config.AmpCode{
UpstreamAPIKey: "old-key",
}
// Warm the cache
if _, err := ms.Get(context.Background()); err != nil {
@@ -157,7 +160,7 @@ func TestAmpModule_OnConfigUpdated_CacheInvalidation(t *testing.T) {
}
// Update config - should invalidate cache
if err := m.OnConfigUpdated(&config.Config{AmpCode: config.AmpCode{UpstreamURL: "http://x"}}); err != nil {
if err := m.OnConfigUpdated(&config.Config{AmpCode: config.AmpCode{UpstreamURL: "http://x", UpstreamAPIKey: "new-key"}}); err != nil {
t.Fatal(err)
}

View File

@@ -267,7 +267,7 @@ func (m *AmpModule) registerProviderAliases(engine *gin.Engine, baseHandler *han
v1betaAmp := provider.Group("/v1beta")
{
v1betaAmp.GET("/models", geminiHandlers.GeminiModels)
v1betaAmp.POST("/models/:action", fallbackHandler.WrapHandler(geminiHandlers.GeminiHandler))
v1betaAmp.GET("/models/:action", geminiHandlers.GeminiGetHandler)
v1betaAmp.POST("/models/*action", fallbackHandler.WrapHandler(geminiHandlers.GeminiHandler))
v1betaAmp.GET("/models/*action", geminiHandlers.GeminiGetHandler)
}
}

View File

@@ -32,7 +32,9 @@ func TestRegisterManagementRoutes(t *testing.T) {
m.setProxy(proxy)
base := &handlers.BaseAPIHandler{}
m.registerManagementRoutes(r, base)
m.registerManagementRoutes(r, base, nil)
srv := httptest.NewServer(r)
defer srv.Close()
managementPaths := []struct {
path string
@@ -63,11 +65,17 @@ func TestRegisterManagementRoutes(t *testing.T) {
for _, path := range managementPaths {
t.Run(path.path, func(t *testing.T) {
proxyCalled = false
req := httptest.NewRequest(path.method, path.path, nil)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
req, err := http.NewRequest(path.method, srv.URL+path.path, nil)
if err != nil {
t.Fatalf("failed to build request: %v", err)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("request failed: %v", err)
}
defer resp.Body.Close()
if w.Code == http.StatusNotFound {
if resp.StatusCode == http.StatusNotFound {
t.Fatalf("route %s not registered", path.path)
}
if !proxyCalled {

View File

@@ -230,13 +230,9 @@ func NewServer(cfg *config.Config, authManager *auth.Manager, accessManager *sdk
envManagementSecret := envAdminPasswordSet && envAdminPassword != ""
// Create server instance
providerNames := make([]string, 0, len(cfg.OpenAICompatibility))
for _, p := range cfg.OpenAICompatibility {
providerNames = append(providerNames, p.Name)
}
s := &Server{
engine: engine,
handlers: handlers.NewBaseAPIHandlers(&cfg.SDKConfig, authManager, providerNames),
handlers: handlers.NewBaseAPIHandlers(&cfg.SDKConfig, authManager),
cfg: cfg,
accessManager: accessManager,
requestLogger: requestLogger,
@@ -334,8 +330,8 @@ func (s *Server) setupRoutes() {
v1beta.Use(AuthMiddleware(s.accessManager))
{
v1beta.GET("/models", geminiHandlers.GeminiModels)
v1beta.POST("/models/:action", geminiHandlers.GeminiHandler)
v1beta.GET("/models/:action", geminiHandlers.GeminiGetHandler)
v1beta.POST("/models/*action", geminiHandlers.GeminiHandler)
v1beta.GET("/models/*action", geminiHandlers.GeminiGetHandler)
}
// Root endpoint
@@ -919,12 +915,6 @@ func (s *Server) UpdateClients(cfg *config.Config) {
// Save YAML snapshot for next comparison
s.oldConfigYaml, _ = yaml.Marshal(cfg)
providerNames := make([]string, 0, len(cfg.OpenAICompatibility))
for _, p := range cfg.OpenAICompatibility {
providerNames = append(providerNames, p.Name)
}
s.handlers.OpenAICompatProviders = providerNames
s.handlers.UpdateClients(&cfg.SDKConfig)
if !cfg.RemoteManagement.DisableControlPanel {

View File

@@ -187,6 +187,9 @@ type ClaudeKey struct {
// APIKey is the authentication key for accessing Claude API services.
APIKey string `yaml:"api-key" json:"api-key"`
// Prefix optionally namespaces models for this credential (e.g., "teamA/claude-sonnet-4").
Prefix string `yaml:"prefix,omitempty" json:"prefix,omitempty"`
// BaseURL is the base URL for the Claude API endpoint.
// If empty, the default Claude API URL will be used.
BaseURL string `yaml:"base-url" json:"base-url"`
@@ -219,6 +222,9 @@ type CodexKey struct {
// APIKey is the authentication key for accessing Codex API services.
APIKey string `yaml:"api-key" json:"api-key"`
// Prefix optionally namespaces models for this credential (e.g., "teamA/gpt-5-codex").
Prefix string `yaml:"prefix,omitempty" json:"prefix,omitempty"`
// BaseURL is the base URL for the Codex API endpoint.
// If empty, the default Codex API URL will be used.
BaseURL string `yaml:"base-url" json:"base-url"`
@@ -239,6 +245,9 @@ type GeminiKey struct {
// APIKey is the authentication key for accessing Gemini API services.
APIKey string `yaml:"api-key" json:"api-key"`
// Prefix optionally namespaces models for this credential (e.g., "teamA/gemini-3-pro-preview").
Prefix string `yaml:"prefix,omitempty" json:"prefix,omitempty"`
// BaseURL optionally overrides the Gemini API endpoint.
BaseURL string `yaml:"base-url,omitempty" json:"base-url,omitempty"`
@@ -258,6 +267,9 @@ type OpenAICompatibility struct {
// Name is the identifier for this OpenAI compatibility configuration.
Name string `yaml:"name" json:"name"`
// Prefix optionally namespaces model aliases for this provider (e.g., "teamA/kimi-k2").
Prefix string `yaml:"prefix,omitempty" json:"prefix,omitempty"`
// BaseURL is the base URL for the external OpenAI-compatible API endpoint.
BaseURL string `yaml:"base-url" json:"base-url"`
@@ -422,6 +434,7 @@ func (cfg *Config) SanitizeOpenAICompatibility() {
for i := range cfg.OpenAICompatibility {
e := cfg.OpenAICompatibility[i]
e.Name = strings.TrimSpace(e.Name)
e.Prefix = normalizeModelPrefix(e.Prefix)
e.BaseURL = strings.TrimSpace(e.BaseURL)
e.Headers = NormalizeHeaders(e.Headers)
if e.BaseURL == "" {
@@ -442,6 +455,7 @@ func (cfg *Config) SanitizeCodexKeys() {
out := make([]CodexKey, 0, len(cfg.CodexKey))
for i := range cfg.CodexKey {
e := cfg.CodexKey[i]
e.Prefix = normalizeModelPrefix(e.Prefix)
e.BaseURL = strings.TrimSpace(e.BaseURL)
e.Headers = NormalizeHeaders(e.Headers)
e.ExcludedModels = NormalizeExcludedModels(e.ExcludedModels)
@@ -460,6 +474,7 @@ func (cfg *Config) SanitizeClaudeKeys() {
}
for i := range cfg.ClaudeKey {
entry := &cfg.ClaudeKey[i]
entry.Prefix = normalizeModelPrefix(entry.Prefix)
entry.Headers = NormalizeHeaders(entry.Headers)
entry.ExcludedModels = NormalizeExcludedModels(entry.ExcludedModels)
}
@@ -479,6 +494,7 @@ func (cfg *Config) SanitizeGeminiKeys() {
if entry.APIKey == "" {
continue
}
entry.Prefix = normalizeModelPrefix(entry.Prefix)
entry.BaseURL = strings.TrimSpace(entry.BaseURL)
entry.ProxyURL = strings.TrimSpace(entry.ProxyURL)
entry.Headers = NormalizeHeaders(entry.Headers)
@@ -492,6 +508,18 @@ func (cfg *Config) SanitizeGeminiKeys() {
cfg.GeminiKey = out
}
func normalizeModelPrefix(prefix string) string {
trimmed := strings.TrimSpace(prefix)
trimmed = strings.Trim(trimmed, "/")
if trimmed == "" {
return ""
}
if strings.Contains(trimmed, "/") {
return ""
}
return trimmed
}
func syncInlineAccessProvider(cfg *Config) {
if cfg == nil {
return

View File

@@ -13,6 +13,9 @@ type VertexCompatKey struct {
// Maps to the x-goog-api-key header.
APIKey string `yaml:"api-key" json:"api-key"`
// Prefix optionally namespaces model aliases for this credential (e.g., "teamA/vertex-pro").
Prefix string `yaml:"prefix,omitempty" json:"prefix,omitempty"`
// BaseURL is the base URL for the Vertex-compatible API endpoint.
// The executor will append "/v1/publishers/google/models/{model}:action" to this.
// Example: "https://zenmux.ai/api" becomes "https://zenmux.ai/api/v1/publishers/google/models/..."
@@ -53,6 +56,7 @@ func (cfg *Config) SanitizeVertexCompatKeys() {
if entry.APIKey == "" {
continue
}
entry.Prefix = normalizeModelPrefix(entry.Prefix)
entry.BaseURL = strings.TrimSpace(entry.BaseURL)
if entry.BaseURL == "" {
// BaseURL is required for Vertex API key entries

View File

@@ -630,6 +630,13 @@ func GetQwenModels() []*ModelInfo {
}
}
// iFlowThinkingSupport is a shared ThinkingSupport configuration for iFlow models
// that support thinking mode via chat_template_kwargs.enable_thinking (boolean toggle).
// Uses level-based configuration so standard normalization flows apply before conversion.
var iFlowThinkingSupport = &ThinkingSupport{
Levels: []string{"none", "auto", "minimal", "low", "medium", "high", "xhigh"},
}
// GetIFlowModels returns supported models for iFlow OAuth accounts.
func GetIFlowModels() []*ModelInfo {
entries := []struct {
@@ -645,9 +652,9 @@ func GetIFlowModels() []*ModelInfo {
{ID: "qwen3-vl-plus", DisplayName: "Qwen3-VL-Plus", Description: "Qwen3 multimodal vision-language", Created: 1758672000},
{ID: "qwen3-max-preview", DisplayName: "Qwen3-Max-Preview", Description: "Qwen3 Max preview build", Created: 1757030400},
{ID: "kimi-k2-0905", DisplayName: "Kimi-K2-Instruct-0905", Description: "Moonshot Kimi K2 instruct 0905", Created: 1757030400},
{ID: "glm-4.6", DisplayName: "GLM-4.6", Description: "Zhipu GLM 4.6 general model", Created: 1759190400},
{ID: "glm-4.6", DisplayName: "GLM-4.6", Description: "Zhipu GLM 4.6 general model", Created: 1759190400, Thinking: iFlowThinkingSupport},
{ID: "kimi-k2", DisplayName: "Kimi-K2", Description: "Moonshot Kimi K2 general model", Created: 1752192000},
{ID: "kimi-k2-thinking", DisplayName: "Kimi-K2-Thinking", Description: "Moonshot Kimi K2 thinking model", Created: 1762387200, Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high"}}},
{ID: "kimi-k2-thinking", DisplayName: "Kimi-K2-Thinking", Description: "Moonshot Kimi K2 thinking model", Created: 1762387200},
{ID: "deepseek-v3.2-chat", DisplayName: "DeepSeek-V3.2", Description: "DeepSeek V3.2 Chat", Created: 1764576000},
{ID: "deepseek-v3.2-reasoner", DisplayName: "DeepSeek-V3.2", Description: "DeepSeek V3.2 Reasoner", Created: 1764576000},
{ID: "deepseek-v3.2", DisplayName: "DeepSeek-V3.2-Exp", Description: "DeepSeek V3.2 experimental", Created: 1759104000},
@@ -655,10 +662,10 @@ func GetIFlowModels() []*ModelInfo {
{ID: "deepseek-r1", DisplayName: "DeepSeek-R1", Description: "DeepSeek reasoning model R1", Created: 1737331200},
{ID: "deepseek-v3", DisplayName: "DeepSeek-V3-671B", Description: "DeepSeek V3 671B", Created: 1734307200},
{ID: "qwen3-32b", DisplayName: "Qwen3-32B", Description: "Qwen3 32B", Created: 1747094400},
{ID: "qwen3-235b-a22b-thinking-2507", DisplayName: "Qwen3-235B-A22B-Thinking", Description: "Qwen3 235B A22B Thinking (2507)", Created: 1753401600, Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high"}}},
{ID: "qwen3-235b-a22b-thinking-2507", DisplayName: "Qwen3-235B-A22B-Thinking", Description: "Qwen3 235B A22B Thinking (2507)", Created: 1753401600},
{ID: "qwen3-235b-a22b-instruct", DisplayName: "Qwen3-235B-A22B-Instruct", Description: "Qwen3 235B A22B Instruct", Created: 1753401600},
{ID: "qwen3-235b", DisplayName: "Qwen3-235B-A22B", Description: "Qwen3 235B A22B", Created: 1753401600},
{ID: "minimax-m2", DisplayName: "MiniMax-M2", Description: "MiniMax M2", Created: 1758672000, Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high"}}},
{ID: "minimax-m2", DisplayName: "MiniMax-M2", Description: "MiniMax M2", Created: 1758672000},
}
models := make([]*ModelInfo, 0, len(entries))
for _, entry := range entries {

View File

@@ -66,6 +66,7 @@ func (e *IFlowExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
if errValidate := ValidateThinkingConfig(body, upstreamModel); errValidate != nil {
return resp, errValidate
}
body = applyIFlowThinkingConfig(body)
body = applyPayloadConfig(e.cfg, req.Model, body)
endpoint := strings.TrimSuffix(baseURL, "/") + iflowDefaultEndpoint
@@ -157,6 +158,7 @@ func (e *IFlowExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
if errValidate := ValidateThinkingConfig(body, upstreamModel); errValidate != nil {
return nil, errValidate
}
body = applyIFlowThinkingConfig(body)
// Ensure tools array exists to avoid provider quirks similar to Qwen's behaviour.
toolsResult := gjson.GetBytes(body, "tools")
if toolsResult.Exists() && toolsResult.IsArray() && len(toolsResult.Array()) == 0 {
@@ -442,3 +444,21 @@ func ensureToolsArray(body []byte) []byte {
}
return updated
}
// applyIFlowThinkingConfig converts normalized reasoning_effort to iFlow chat_template_kwargs.enable_thinking.
// This should be called after NormalizeThinkingConfig has processed the payload.
// iFlow only supports boolean enable_thinking, so any non-"none" effort enables thinking.
func applyIFlowThinkingConfig(body []byte) []byte {
effort := gjson.GetBytes(body, "reasoning_effort")
if !effort.Exists() {
return body
}
val := strings.ToLower(strings.TrimSpace(effort.String()))
enableThinking := val != "none" && val != ""
body, _ = sjson.DeleteBytes(body, "reasoning_effort")
body, _ = sjson.SetBytes(body, "chat_template_kwargs.enable_thinking", enableThinking)
return body
}

View File

@@ -72,13 +72,7 @@ func ApplyReasoningEffortMetadata(payload []byte, metadata map[string]any, model
// Fallback: numeric thinking_budget suffix for level-based (OpenAI-style) models.
if util.ModelUsesThinkingLevels(baseModel) || allowCompat {
if budget, _, _, matched := util.ThinkingFromMetadata(metadata); matched && budget != nil {
if effort, ok := util.OpenAIThinkingBudgetToEffort(baseModel, *budget); ok && effort != "" {
if *budget == 0 && effort == "none" && util.ModelUsesThinkingLevels(baseModel) {
if _, supported := util.NormalizeReasoningEffortLevel(baseModel, effort); !supported {
return StripThinkingFields(payload, false)
}
}
if effort, ok := util.ThinkingBudgetToEffort(baseModel, *budget); ok && effort != "" {
if updated, err := sjson.SetBytes(payload, field, effort); err == nil {
return updated
}
@@ -273,7 +267,7 @@ func StripThinkingFields(payload []byte, effortOnly bool) []byte {
"reasoning.effort",
}
if !effortOnly {
fieldsToRemove = append([]string{"reasoning"}, fieldsToRemove...)
fieldsToRemove = append([]string{"reasoning", "thinking"}, fieldsToRemove...)
}
out := payload
for _, field := range fieldsToRemove {

View File

@@ -219,15 +219,20 @@ func ConvertClaudeRequestToCodex(modelName string, inputRawJSON []byte, _ bool)
// Convert thinking.budget_tokens to reasoning.effort for level-based models
reasoningEffort := "medium" // default
if thinking := rootResult.Get("thinking"); thinking.Exists() && thinking.IsObject() {
if thinking.Get("type").String() == "enabled" {
switch thinking.Get("type").String() {
case "enabled":
if util.ModelUsesThinkingLevels(modelName) {
if budgetTokens := thinking.Get("budget_tokens"); budgetTokens.Exists() {
budget := int(budgetTokens.Int())
if effort, ok := util.OpenAIThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
if effort, ok := util.ThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
reasoningEffort = effort
}
}
}
case "disabled":
if effort, ok := util.ThinkingBudgetToEffort(modelName, 0); ok && effort != "" {
reasoningEffort = effort
}
}
}
template, _ = sjson.Set(template, "reasoning.effort", reasoningEffort)

View File

@@ -253,7 +253,7 @@ func ConvertGeminiRequestToCodex(modelName string, inputRawJSON []byte, _ bool)
if util.ModelUsesThinkingLevels(modelName) {
if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() {
budget := int(thinkingBudget.Int())
if effort, ok := util.OpenAIThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
if effort, ok := util.ThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
reasoningEffort = effort
}
}

View File

@@ -63,10 +63,22 @@ func ConvertClaudeRequestToOpenAI(modelName string, inputRawJSON []byte, stream
// Thinking: Convert Claude thinking.budget_tokens to OpenAI reasoning_effort
if thinking := root.Get("thinking"); thinking.Exists() && thinking.IsObject() {
if thinkingType := thinking.Get("type"); thinkingType.Exists() && thinkingType.String() == "enabled" {
if budgetTokens := thinking.Get("budget_tokens"); budgetTokens.Exists() {
budget := int(budgetTokens.Int())
if effort, ok := util.OpenAIThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
if thinkingType := thinking.Get("type"); thinkingType.Exists() {
switch thinkingType.String() {
case "enabled":
if budgetTokens := thinking.Get("budget_tokens"); budgetTokens.Exists() {
budget := int(budgetTokens.Int())
if effort, ok := util.ThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
out, _ = sjson.Set(out, "reasoning_effort", effort)
}
} else {
// No budget_tokens specified, default to "auto" for enabled thinking
if effort, ok := util.ThinkingBudgetToEffort(modelName, -1); ok && effort != "" {
out, _ = sjson.Set(out, "reasoning_effort", effort)
}
}
case "disabled":
if effort, ok := util.ThinkingBudgetToEffort(modelName, 0); ok && effort != "" {
out, _ = sjson.Set(out, "reasoning_effort", effort)
}
}

View File

@@ -128,9 +128,10 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
param.CreatedAt = root.Get("created").Int()
}
// Check if this is the first chunk (has role)
// Emit message_start on the very first chunk, regardless of whether it has a role field.
// Some providers (like Copilot) may send tool_calls in the first chunk without a role field.
if delta := root.Get("choices.0.delta"); delta.Exists() {
if role := delta.Get("role"); role.Exists() && role.String() == "assistant" && !param.MessageStarted {
if !param.MessageStarted {
// Send message_start event
messageStart := map[string]interface{}{
"type": "message_start",

View File

@@ -83,7 +83,7 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
if thinkingConfig := genConfig.Get("thinkingConfig"); thinkingConfig.Exists() && thinkingConfig.IsObject() {
if thinkingBudget := thinkingConfig.Get("thinkingBudget"); thinkingBudget.Exists() {
budget := int(thinkingBudget.Int())
if effort, ok := util.OpenAIThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
if effort, ok := util.ThinkingBudgetToEffort(modelName, budget); ok && effort != "" {
out, _ = sjson.Set(out, "reasoning_effort", effort)
}
}

View File

@@ -1,37 +0,0 @@
package util
// OpenAIThinkingBudgetToEffort maps a numeric thinking budget (tokens)
// into an OpenAI-style reasoning effort level for level-based models.
//
// Ranges:
// - 0 -> "none"
// - -1 -> "auto"
// - 1..1024 -> "low"
// - 1025..8192 -> "medium"
// - 8193..24576 -> "high"
// - 24577.. -> highest supported level for the model (defaults to "xhigh")
//
// Negative values other than -1 are treated as unsupported.
func OpenAIThinkingBudgetToEffort(model string, budget int) (string, bool) {
switch {
case budget == -1:
return "auto", true
case budget < -1:
return "", false
case budget == 0:
return "none", true
case budget > 0 && budget <= 1024:
return "low", true
case budget <= 8192:
return "medium", true
case budget <= 24576:
return "high", true
case budget > 24576:
if levels := GetModelThinkingLevels(model); len(levels) > 0 {
return levels[len(levels)-1], true
}
return "xhigh", true
default:
return "", false
}
}

View File

@@ -118,3 +118,83 @@ func IsOpenAICompatibilityModel(model string) bool {
}
return strings.EqualFold(strings.TrimSpace(info.Type), "openai-compatibility")
}
// ThinkingEffortToBudget maps a reasoning effort level to a numeric thinking budget (tokens),
// clamping the result to the model's supported range.
//
// Mappings (values are normalized to model's supported range):
// - "none" -> 0
// - "auto" -> -1
// - "minimal" -> 512
// - "low" -> 1024
// - "medium" -> 8192
// - "high" -> 24576
// - "xhigh" -> 32768
//
// Returns false when the effort level is empty or unsupported.
func ThinkingEffortToBudget(model, effort string) (int, bool) {
if effort == "" {
return 0, false
}
normalized, ok := NormalizeReasoningEffortLevel(model, effort)
if !ok {
normalized = strings.ToLower(strings.TrimSpace(effort))
}
switch normalized {
case "none":
return 0, true
case "auto":
return NormalizeThinkingBudget(model, -1), true
case "minimal":
return NormalizeThinkingBudget(model, 512), true
case "low":
return NormalizeThinkingBudget(model, 1024), true
case "medium":
return NormalizeThinkingBudget(model, 8192), true
case "high":
return NormalizeThinkingBudget(model, 24576), true
case "xhigh":
return NormalizeThinkingBudget(model, 32768), true
default:
return 0, false
}
}
// ThinkingBudgetToEffort maps a numeric thinking budget (tokens)
// to a reasoning effort level for level-based models.
//
// Mappings:
// - 0 -> "none" (or lowest supported level if model doesn't support "none")
// - -1 -> "auto"
// - 1..1024 -> "low"
// - 1025..8192 -> "medium"
// - 8193..24576 -> "high"
// - 24577.. -> highest supported level for the model (defaults to "xhigh")
//
// Returns false when the budget is unsupported (negative values other than -1).
func ThinkingBudgetToEffort(model string, budget int) (string, bool) {
switch {
case budget == -1:
return "auto", true
case budget < -1:
return "", false
case budget == 0:
if levels := GetModelThinkingLevels(model); len(levels) > 0 {
return levels[0], true
}
return "none", true
case budget > 0 && budget <= 1024:
return "low", true
case budget <= 8192:
return "medium", true
case budget <= 24576:
return "high", true
case budget > 24576:
if levels := GetModelThinkingLevels(model); len(levels) > 0 {
return levels[len(levels)-1], true
}
return "xhigh", true
default:
return "", false
}
}

View File

@@ -201,36 +201,6 @@ func ReasoningEffortFromMetadata(metadata map[string]any) (string, bool) {
return "", true
}
// ThinkingEffortToBudget maps reasoning effort levels to approximate budgets,
// clamping the result to the model's supported range.
func ThinkingEffortToBudget(model, effort string) (int, bool) {
if effort == "" {
return 0, false
}
normalized, ok := NormalizeReasoningEffortLevel(model, effort)
if !ok {
normalized = strings.ToLower(strings.TrimSpace(effort))
}
switch normalized {
case "none":
return 0, true
case "auto":
return NormalizeThinkingBudget(model, -1), true
case "minimal":
return NormalizeThinkingBudget(model, 512), true
case "low":
return NormalizeThinkingBudget(model, 1024), true
case "medium":
return NormalizeThinkingBudget(model, 8192), true
case "high":
return NormalizeThinkingBudget(model, 24576), true
case "xhigh":
return NormalizeThinkingBudget(model, 32768), true
default:
return 0, false
}
}
// ResolveOriginalModel returns the original model name stored in metadata (if present),
// otherwise falls back to the provided model.
func ResolveOriginalModel(model string, metadata map[string]any) string {

View File

@@ -0,0 +1,303 @@
package diff
import (
"fmt"
"net/url"
"reflect"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
)
// BuildConfigChangeDetails computes a redacted, human-readable list of config changes.
// Secrets are never printed; only structural or non-sensitive fields are surfaced.
func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
changes := make([]string, 0, 16)
if oldCfg == nil || newCfg == nil {
return changes
}
// Simple scalars
if oldCfg.Port != newCfg.Port {
changes = append(changes, fmt.Sprintf("port: %d -> %d", oldCfg.Port, newCfg.Port))
}
if oldCfg.AuthDir != newCfg.AuthDir {
changes = append(changes, fmt.Sprintf("auth-dir: %s -> %s", oldCfg.AuthDir, newCfg.AuthDir))
}
if oldCfg.Debug != newCfg.Debug {
changes = append(changes, fmt.Sprintf("debug: %t -> %t", oldCfg.Debug, newCfg.Debug))
}
if oldCfg.LoggingToFile != newCfg.LoggingToFile {
changes = append(changes, fmt.Sprintf("logging-to-file: %t -> %t", oldCfg.LoggingToFile, newCfg.LoggingToFile))
}
if oldCfg.UsageStatisticsEnabled != newCfg.UsageStatisticsEnabled {
changes = append(changes, fmt.Sprintf("usage-statistics-enabled: %t -> %t", oldCfg.UsageStatisticsEnabled, newCfg.UsageStatisticsEnabled))
}
if oldCfg.DisableCooling != newCfg.DisableCooling {
changes = append(changes, fmt.Sprintf("disable-cooling: %t -> %t", oldCfg.DisableCooling, newCfg.DisableCooling))
}
if oldCfg.RequestLog != newCfg.RequestLog {
changes = append(changes, fmt.Sprintf("request-log: %t -> %t", oldCfg.RequestLog, newCfg.RequestLog))
}
if oldCfg.RequestRetry != newCfg.RequestRetry {
changes = append(changes, fmt.Sprintf("request-retry: %d -> %d", oldCfg.RequestRetry, newCfg.RequestRetry))
}
if oldCfg.MaxRetryInterval != newCfg.MaxRetryInterval {
changes = append(changes, fmt.Sprintf("max-retry-interval: %d -> %d", oldCfg.MaxRetryInterval, newCfg.MaxRetryInterval))
}
if oldCfg.ProxyURL != newCfg.ProxyURL {
changes = append(changes, fmt.Sprintf("proxy-url: %s -> %s", formatProxyURL(oldCfg.ProxyURL), formatProxyURL(newCfg.ProxyURL)))
}
if oldCfg.WebsocketAuth != newCfg.WebsocketAuth {
changes = append(changes, fmt.Sprintf("ws-auth: %t -> %t", oldCfg.WebsocketAuth, newCfg.WebsocketAuth))
}
if oldCfg.ForceModelPrefix != newCfg.ForceModelPrefix {
changes = append(changes, fmt.Sprintf("force-model-prefix: %t -> %t", oldCfg.ForceModelPrefix, newCfg.ForceModelPrefix))
}
// Quota-exceeded behavior
if oldCfg.QuotaExceeded.SwitchProject != newCfg.QuotaExceeded.SwitchProject {
changes = append(changes, fmt.Sprintf("quota-exceeded.switch-project: %t -> %t", oldCfg.QuotaExceeded.SwitchProject, newCfg.QuotaExceeded.SwitchProject))
}
if oldCfg.QuotaExceeded.SwitchPreviewModel != newCfg.QuotaExceeded.SwitchPreviewModel {
changes = append(changes, fmt.Sprintf("quota-exceeded.switch-preview-model: %t -> %t", oldCfg.QuotaExceeded.SwitchPreviewModel, newCfg.QuotaExceeded.SwitchPreviewModel))
}
// API keys (redacted) and counts
if len(oldCfg.APIKeys) != len(newCfg.APIKeys) {
changes = append(changes, fmt.Sprintf("api-keys count: %d -> %d", len(oldCfg.APIKeys), len(newCfg.APIKeys)))
} else if !reflect.DeepEqual(trimStrings(oldCfg.APIKeys), trimStrings(newCfg.APIKeys)) {
changes = append(changes, "api-keys: values updated (count unchanged, redacted)")
}
if len(oldCfg.GeminiKey) != len(newCfg.GeminiKey) {
changes = append(changes, fmt.Sprintf("gemini-api-key count: %d -> %d", len(oldCfg.GeminiKey), len(newCfg.GeminiKey)))
} else {
for i := range oldCfg.GeminiKey {
o := oldCfg.GeminiKey[i]
n := newCfg.GeminiKey[i]
if strings.TrimSpace(o.BaseURL) != strings.TrimSpace(n.BaseURL) {
changes = append(changes, fmt.Sprintf("gemini[%d].base-url: %s -> %s", i, strings.TrimSpace(o.BaseURL), strings.TrimSpace(n.BaseURL)))
}
if strings.TrimSpace(o.ProxyURL) != strings.TrimSpace(n.ProxyURL) {
changes = append(changes, fmt.Sprintf("gemini[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
}
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
changes = append(changes, fmt.Sprintf("gemini[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
}
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("gemini[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("gemini[%d].headers: updated", i))
}
oldExcluded := SummarizeExcludedModels(o.ExcludedModels)
newExcluded := SummarizeExcludedModels(n.ExcludedModels)
if oldExcluded.hash != newExcluded.hash {
changes = append(changes, fmt.Sprintf("gemini[%d].excluded-models: updated (%d -> %d entries)", i, oldExcluded.count, newExcluded.count))
}
}
}
// Claude keys (do not print key material)
if len(oldCfg.ClaudeKey) != len(newCfg.ClaudeKey) {
changes = append(changes, fmt.Sprintf("claude-api-key count: %d -> %d", len(oldCfg.ClaudeKey), len(newCfg.ClaudeKey)))
} else {
for i := range oldCfg.ClaudeKey {
o := oldCfg.ClaudeKey[i]
n := newCfg.ClaudeKey[i]
if strings.TrimSpace(o.BaseURL) != strings.TrimSpace(n.BaseURL) {
changes = append(changes, fmt.Sprintf("claude[%d].base-url: %s -> %s", i, strings.TrimSpace(o.BaseURL), strings.TrimSpace(n.BaseURL)))
}
if strings.TrimSpace(o.ProxyURL) != strings.TrimSpace(n.ProxyURL) {
changes = append(changes, fmt.Sprintf("claude[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
}
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
changes = append(changes, fmt.Sprintf("claude[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
}
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("claude[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("claude[%d].headers: updated", i))
}
oldExcluded := SummarizeExcludedModels(o.ExcludedModels)
newExcluded := SummarizeExcludedModels(n.ExcludedModels)
if oldExcluded.hash != newExcluded.hash {
changes = append(changes, fmt.Sprintf("claude[%d].excluded-models: updated (%d -> %d entries)", i, oldExcluded.count, newExcluded.count))
}
}
}
// Codex keys (do not print key material)
if len(oldCfg.CodexKey) != len(newCfg.CodexKey) {
changes = append(changes, fmt.Sprintf("codex-api-key count: %d -> %d", len(oldCfg.CodexKey), len(newCfg.CodexKey)))
} else {
for i := range oldCfg.CodexKey {
o := oldCfg.CodexKey[i]
n := newCfg.CodexKey[i]
if strings.TrimSpace(o.BaseURL) != strings.TrimSpace(n.BaseURL) {
changes = append(changes, fmt.Sprintf("codex[%d].base-url: %s -> %s", i, strings.TrimSpace(o.BaseURL), strings.TrimSpace(n.BaseURL)))
}
if strings.TrimSpace(o.ProxyURL) != strings.TrimSpace(n.ProxyURL) {
changes = append(changes, fmt.Sprintf("codex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
}
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
changes = append(changes, fmt.Sprintf("codex[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
}
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("codex[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("codex[%d].headers: updated", i))
}
oldExcluded := SummarizeExcludedModels(o.ExcludedModels)
newExcluded := SummarizeExcludedModels(n.ExcludedModels)
if oldExcluded.hash != newExcluded.hash {
changes = append(changes, fmt.Sprintf("codex[%d].excluded-models: updated (%d -> %d entries)", i, oldExcluded.count, newExcluded.count))
}
}
}
// AmpCode settings (redacted where needed)
oldAmpURL := strings.TrimSpace(oldCfg.AmpCode.UpstreamURL)
newAmpURL := strings.TrimSpace(newCfg.AmpCode.UpstreamURL)
if oldAmpURL != newAmpURL {
changes = append(changes, fmt.Sprintf("ampcode.upstream-url: %s -> %s", oldAmpURL, newAmpURL))
}
oldAmpKey := strings.TrimSpace(oldCfg.AmpCode.UpstreamAPIKey)
newAmpKey := strings.TrimSpace(newCfg.AmpCode.UpstreamAPIKey)
switch {
case oldAmpKey == "" && newAmpKey != "":
changes = append(changes, "ampcode.upstream-api-key: added")
case oldAmpKey != "" && newAmpKey == "":
changes = append(changes, "ampcode.upstream-api-key: removed")
case oldAmpKey != newAmpKey:
changes = append(changes, "ampcode.upstream-api-key: updated")
}
if oldCfg.AmpCode.RestrictManagementToLocalhost != newCfg.AmpCode.RestrictManagementToLocalhost {
changes = append(changes, fmt.Sprintf("ampcode.restrict-management-to-localhost: %t -> %t", oldCfg.AmpCode.RestrictManagementToLocalhost, newCfg.AmpCode.RestrictManagementToLocalhost))
}
oldMappings := SummarizeAmpModelMappings(oldCfg.AmpCode.ModelMappings)
newMappings := SummarizeAmpModelMappings(newCfg.AmpCode.ModelMappings)
if oldMappings.hash != newMappings.hash {
changes = append(changes, fmt.Sprintf("ampcode.model-mappings: updated (%d -> %d entries)", oldMappings.count, newMappings.count))
}
if oldCfg.AmpCode.ForceModelMappings != newCfg.AmpCode.ForceModelMappings {
changes = append(changes, fmt.Sprintf("ampcode.force-model-mappings: %t -> %t", oldCfg.AmpCode.ForceModelMappings, newCfg.AmpCode.ForceModelMappings))
}
if entries, _ := DiffOAuthExcludedModelChanges(oldCfg.OAuthExcludedModels, newCfg.OAuthExcludedModels); len(entries) > 0 {
changes = append(changes, entries...)
}
// Remote management (never print the key)
if oldCfg.RemoteManagement.AllowRemote != newCfg.RemoteManagement.AllowRemote {
changes = append(changes, fmt.Sprintf("remote-management.allow-remote: %t -> %t", oldCfg.RemoteManagement.AllowRemote, newCfg.RemoteManagement.AllowRemote))
}
if oldCfg.RemoteManagement.DisableControlPanel != newCfg.RemoteManagement.DisableControlPanel {
changes = append(changes, fmt.Sprintf("remote-management.disable-control-panel: %t -> %t", oldCfg.RemoteManagement.DisableControlPanel, newCfg.RemoteManagement.DisableControlPanel))
}
oldPanelRepo := strings.TrimSpace(oldCfg.RemoteManagement.PanelGitHubRepository)
newPanelRepo := strings.TrimSpace(newCfg.RemoteManagement.PanelGitHubRepository)
if oldPanelRepo != newPanelRepo {
changes = append(changes, fmt.Sprintf("remote-management.panel-github-repository: %s -> %s", oldPanelRepo, newPanelRepo))
}
if oldCfg.RemoteManagement.SecretKey != newCfg.RemoteManagement.SecretKey {
switch {
case oldCfg.RemoteManagement.SecretKey == "" && newCfg.RemoteManagement.SecretKey != "":
changes = append(changes, "remote-management.secret-key: created")
case oldCfg.RemoteManagement.SecretKey != "" && newCfg.RemoteManagement.SecretKey == "":
changes = append(changes, "remote-management.secret-key: deleted")
default:
changes = append(changes, "remote-management.secret-key: updated")
}
}
// OpenAI compatibility providers (summarized)
if compat := DiffOpenAICompatibility(oldCfg.OpenAICompatibility, newCfg.OpenAICompatibility); len(compat) > 0 {
changes = append(changes, "openai-compatibility:")
for _, c := range compat {
changes = append(changes, " "+c)
}
}
// Vertex-compatible API keys
if len(oldCfg.VertexCompatAPIKey) != len(newCfg.VertexCompatAPIKey) {
changes = append(changes, fmt.Sprintf("vertex-api-key count: %d -> %d", len(oldCfg.VertexCompatAPIKey), len(newCfg.VertexCompatAPIKey)))
} else {
for i := range oldCfg.VertexCompatAPIKey {
o := oldCfg.VertexCompatAPIKey[i]
n := newCfg.VertexCompatAPIKey[i]
if strings.TrimSpace(o.BaseURL) != strings.TrimSpace(n.BaseURL) {
changes = append(changes, fmt.Sprintf("vertex[%d].base-url: %s -> %s", i, strings.TrimSpace(o.BaseURL), strings.TrimSpace(n.BaseURL)))
}
if strings.TrimSpace(o.ProxyURL) != strings.TrimSpace(n.ProxyURL) {
changes = append(changes, fmt.Sprintf("vertex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
}
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
changes = append(changes, fmt.Sprintf("vertex[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
}
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("vertex[%d].api-key: updated", i))
}
oldModels := SummarizeVertexModels(o.Models)
newModels := SummarizeVertexModels(n.Models)
if oldModels.hash != newModels.hash {
changes = append(changes, fmt.Sprintf("vertex[%d].models: updated (%d -> %d entries)", i, oldModels.count, newModels.count))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("vertex[%d].headers: updated", i))
}
}
}
return changes
}
func trimStrings(in []string) []string {
out := make([]string, len(in))
for i := range in {
out[i] = strings.TrimSpace(in[i])
}
return out
}
func equalStringMap(a, b map[string]string) bool {
if len(a) != len(b) {
return false
}
for k, v := range a {
if b[k] != v {
return false
}
}
return true
}
func formatProxyURL(raw string) string {
trimmed := strings.TrimSpace(raw)
if trimmed == "" {
return "<none>"
}
parsed, err := url.Parse(trimmed)
if err != nil {
return "<redacted>"
}
host := strings.TrimSpace(parsed.Host)
scheme := strings.TrimSpace(parsed.Scheme)
if host == "" {
// Allow host:port style without scheme.
parsed2, err2 := url.Parse("http://" + trimmed)
if err2 == nil {
host = strings.TrimSpace(parsed2.Host)
}
scheme = ""
}
if host == "" {
return "<redacted>"
}
if scheme == "" {
return host
}
return scheme + "://" + host
}

View File

@@ -0,0 +1,526 @@
package diff
import (
"testing"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
sdkconfig "github.com/router-for-me/CLIProxyAPI/v6/sdk/config"
)
func TestBuildConfigChangeDetails(t *testing.T) {
oldCfg := &config.Config{
Port: 8080,
AuthDir: "/tmp/auth-old",
GeminiKey: []config.GeminiKey{
{APIKey: "old", BaseURL: "http://old", ExcludedModels: []string{"old-model"}},
},
AmpCode: config.AmpCode{
UpstreamURL: "http://old-upstream",
ModelMappings: []config.AmpModelMapping{{From: "from-old", To: "to-old"}},
RestrictManagementToLocalhost: false,
},
RemoteManagement: config.RemoteManagement{
AllowRemote: false,
SecretKey: "old",
DisableControlPanel: false,
PanelGitHubRepository: "repo-old",
},
OAuthExcludedModels: map[string][]string{
"providerA": {"m1"},
},
OpenAICompatibility: []config.OpenAICompatibility{
{
Name: "compat-a",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "k1"},
},
Models: []config.OpenAICompatibilityModel{{Name: "m1"}},
},
},
}
newCfg := &config.Config{
Port: 9090,
AuthDir: "/tmp/auth-new",
GeminiKey: []config.GeminiKey{
{APIKey: "old", BaseURL: "http://old", ExcludedModels: []string{"old-model", "extra"}},
},
AmpCode: config.AmpCode{
UpstreamURL: "http://new-upstream",
RestrictManagementToLocalhost: true,
ModelMappings: []config.AmpModelMapping{
{From: "from-old", To: "to-old"},
{From: "from-new", To: "to-new"},
},
},
RemoteManagement: config.RemoteManagement{
AllowRemote: true,
SecretKey: "new",
DisableControlPanel: true,
PanelGitHubRepository: "repo-new",
},
OAuthExcludedModels: map[string][]string{
"providerA": {"m1", "m2"},
"providerB": {"x"},
},
OpenAICompatibility: []config.OpenAICompatibility{
{
Name: "compat-a",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "k1"},
},
Models: []config.OpenAICompatibilityModel{{Name: "m1"}, {Name: "m2"}},
},
{
Name: "compat-b",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "k2"},
},
},
},
}
details := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, details, "port: 8080 -> 9090")
expectContains(t, details, "auth-dir: /tmp/auth-old -> /tmp/auth-new")
expectContains(t, details, "gemini[0].excluded-models: updated (1 -> 2 entries)")
expectContains(t, details, "ampcode.upstream-url: http://old-upstream -> http://new-upstream")
expectContains(t, details, "ampcode.model-mappings: updated (1 -> 2 entries)")
expectContains(t, details, "remote-management.allow-remote: false -> true")
expectContains(t, details, "remote-management.secret-key: updated")
expectContains(t, details, "oauth-excluded-models[providera]: updated (1 -> 2 entries)")
expectContains(t, details, "oauth-excluded-models[providerb]: added (1 entries)")
expectContains(t, details, "openai-compatibility:")
expectContains(t, details, " provider added: compat-b (api-keys=1, models=0)")
expectContains(t, details, " provider updated: compat-a (models 1 -> 2)")
}
func TestBuildConfigChangeDetails_NoChanges(t *testing.T) {
cfg := &config.Config{
Port: 8080,
}
if details := BuildConfigChangeDetails(cfg, cfg); len(details) != 0 {
t.Fatalf("expected no change entries, got %v", details)
}
}
func TestBuildConfigChangeDetails_GeminiVertexHeadersAndForceMappings(t *testing.T) {
oldCfg := &config.Config{
GeminiKey: []config.GeminiKey{
{APIKey: "g1", Headers: map[string]string{"H": "1"}, ExcludedModels: []string{"a"}},
},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v1", BaseURL: "http://v-old", Models: []config.VertexCompatModel{{Name: "m1"}}},
},
AmpCode: config.AmpCode{
ModelMappings: []config.AmpModelMapping{{From: "a", To: "b"}},
ForceModelMappings: false,
},
}
newCfg := &config.Config{
GeminiKey: []config.GeminiKey{
{APIKey: "g1", Headers: map[string]string{"H": "2"}, ExcludedModels: []string{"a", "b"}},
},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v1", BaseURL: "http://v-new", Models: []config.VertexCompatModel{{Name: "m1"}, {Name: "m2"}}},
},
AmpCode: config.AmpCode{
ModelMappings: []config.AmpModelMapping{{From: "a", To: "c"}},
ForceModelMappings: true,
},
}
details := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, details, "gemini[0].headers: updated")
expectContains(t, details, "gemini[0].excluded-models: updated (1 -> 2 entries)")
expectContains(t, details, "ampcode.model-mappings: updated (1 -> 1 entries)")
expectContains(t, details, "ampcode.force-model-mappings: false -> true")
}
func TestBuildConfigChangeDetails_ModelPrefixes(t *testing.T) {
oldCfg := &config.Config{
GeminiKey: []config.GeminiKey{
{APIKey: "g1", Prefix: "old-g", BaseURL: "http://g", ProxyURL: "http://gp"},
},
ClaudeKey: []config.ClaudeKey{
{APIKey: "c1", Prefix: "old-c", BaseURL: "http://c", ProxyURL: "http://cp"},
},
CodexKey: []config.CodexKey{
{APIKey: "x1", Prefix: "old-x", BaseURL: "http://x", ProxyURL: "http://xp"},
},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v1", Prefix: "old-v", BaseURL: "http://v", ProxyURL: "http://vp"},
},
}
newCfg := &config.Config{
GeminiKey: []config.GeminiKey{
{APIKey: "g1", Prefix: "new-g", BaseURL: "http://g", ProxyURL: "http://gp"},
},
ClaudeKey: []config.ClaudeKey{
{APIKey: "c1", Prefix: "new-c", BaseURL: "http://c", ProxyURL: "http://cp"},
},
CodexKey: []config.CodexKey{
{APIKey: "x1", Prefix: "new-x", BaseURL: "http://x", ProxyURL: "http://xp"},
},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v1", Prefix: "new-v", BaseURL: "http://v", ProxyURL: "http://vp"},
},
}
changes := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, changes, "gemini[0].prefix: old-g -> new-g")
expectContains(t, changes, "claude[0].prefix: old-c -> new-c")
expectContains(t, changes, "codex[0].prefix: old-x -> new-x")
expectContains(t, changes, "vertex[0].prefix: old-v -> new-v")
}
func TestBuildConfigChangeDetails_NilSafe(t *testing.T) {
if details := BuildConfigChangeDetails(nil, &config.Config{}); len(details) != 0 {
t.Fatalf("expected empty change list when old nil, got %v", details)
}
if details := BuildConfigChangeDetails(&config.Config{}, nil); len(details) != 0 {
t.Fatalf("expected empty change list when new nil, got %v", details)
}
}
func TestBuildConfigChangeDetails_SecretsAndCounts(t *testing.T) {
oldCfg := &config.Config{
SDKConfig: sdkconfig.SDKConfig{
APIKeys: []string{"a"},
},
AmpCode: config.AmpCode{
UpstreamAPIKey: "",
},
RemoteManagement: config.RemoteManagement{
SecretKey: "",
},
}
newCfg := &config.Config{
SDKConfig: sdkconfig.SDKConfig{
APIKeys: []string{"a", "b", "c"},
},
AmpCode: config.AmpCode{
UpstreamAPIKey: "new-key",
},
RemoteManagement: config.RemoteManagement{
SecretKey: "new-secret",
},
}
details := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, details, "api-keys count: 1 -> 3")
expectContains(t, details, "ampcode.upstream-api-key: added")
expectContains(t, details, "remote-management.secret-key: created")
}
func TestBuildConfigChangeDetails_FlagsAndKeys(t *testing.T) {
oldCfg := &config.Config{
Port: 1000,
AuthDir: "/old",
Debug: false,
LoggingToFile: false,
UsageStatisticsEnabled: false,
DisableCooling: false,
RequestRetry: 1,
MaxRetryInterval: 1,
WebsocketAuth: false,
QuotaExceeded: config.QuotaExceeded{SwitchProject: false, SwitchPreviewModel: false},
ClaudeKey: []config.ClaudeKey{{APIKey: "c1"}},
CodexKey: []config.CodexKey{{APIKey: "x1"}},
AmpCode: config.AmpCode{UpstreamAPIKey: "keep", RestrictManagementToLocalhost: false},
RemoteManagement: config.RemoteManagement{DisableControlPanel: false, PanelGitHubRepository: "old/repo", SecretKey: "keep"},
SDKConfig: sdkconfig.SDKConfig{
RequestLog: false,
ProxyURL: "http://old-proxy",
APIKeys: []string{"key-1"},
},
}
newCfg := &config.Config{
Port: 2000,
AuthDir: "/new",
Debug: true,
LoggingToFile: true,
UsageStatisticsEnabled: true,
DisableCooling: true,
RequestRetry: 2,
MaxRetryInterval: 3,
WebsocketAuth: true,
QuotaExceeded: config.QuotaExceeded{SwitchProject: true, SwitchPreviewModel: true},
ClaudeKey: []config.ClaudeKey{
{APIKey: "c1", BaseURL: "http://new", ProxyURL: "http://p", Headers: map[string]string{"H": "1"}, ExcludedModels: []string{"a"}},
{APIKey: "c2"},
},
CodexKey: []config.CodexKey{
{APIKey: "x1", BaseURL: "http://x", ProxyURL: "http://px", Headers: map[string]string{"H": "2"}, ExcludedModels: []string{"b"}},
{APIKey: "x2"},
},
AmpCode: config.AmpCode{
UpstreamAPIKey: "",
RestrictManagementToLocalhost: true,
ModelMappings: []config.AmpModelMapping{{From: "a", To: "b"}},
},
RemoteManagement: config.RemoteManagement{
DisableControlPanel: true,
PanelGitHubRepository: "new/repo",
SecretKey: "",
},
SDKConfig: sdkconfig.SDKConfig{
RequestLog: true,
ProxyURL: "http://new-proxy",
APIKeys: []string{" key-1 ", "key-2"},
},
}
details := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, details, "debug: false -> true")
expectContains(t, details, "logging-to-file: false -> true")
expectContains(t, details, "usage-statistics-enabled: false -> true")
expectContains(t, details, "disable-cooling: false -> true")
expectContains(t, details, "request-log: false -> true")
expectContains(t, details, "request-retry: 1 -> 2")
expectContains(t, details, "max-retry-interval: 1 -> 3")
expectContains(t, details, "proxy-url: http://old-proxy -> http://new-proxy")
expectContains(t, details, "ws-auth: false -> true")
expectContains(t, details, "quota-exceeded.switch-project: false -> true")
expectContains(t, details, "quota-exceeded.switch-preview-model: false -> true")
expectContains(t, details, "api-keys count: 1 -> 2")
expectContains(t, details, "claude-api-key count: 1 -> 2")
expectContains(t, details, "codex-api-key count: 1 -> 2")
expectContains(t, details, "ampcode.restrict-management-to-localhost: false -> true")
expectContains(t, details, "ampcode.upstream-api-key: removed")
expectContains(t, details, "remote-management.disable-control-panel: false -> true")
expectContains(t, details, "remote-management.panel-github-repository: old/repo -> new/repo")
expectContains(t, details, "remote-management.secret-key: deleted")
}
func TestBuildConfigChangeDetails_AllBranches(t *testing.T) {
oldCfg := &config.Config{
Port: 1,
AuthDir: "/a",
Debug: false,
LoggingToFile: false,
UsageStatisticsEnabled: false,
DisableCooling: false,
RequestRetry: 1,
MaxRetryInterval: 1,
WebsocketAuth: false,
QuotaExceeded: config.QuotaExceeded{SwitchProject: false, SwitchPreviewModel: false},
GeminiKey: []config.GeminiKey{
{APIKey: "g-old", BaseURL: "http://g-old", ProxyURL: "http://gp-old", Headers: map[string]string{"A": "1"}},
},
ClaudeKey: []config.ClaudeKey{
{APIKey: "c-old", BaseURL: "http://c-old", ProxyURL: "http://cp-old", Headers: map[string]string{"H": "1"}, ExcludedModels: []string{"x"}},
},
CodexKey: []config.CodexKey{
{APIKey: "x-old", BaseURL: "http://x-old", ProxyURL: "http://xp-old", Headers: map[string]string{"H": "1"}, ExcludedModels: []string{"x"}},
},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v-old", BaseURL: "http://v-old", ProxyURL: "http://vp-old", Headers: map[string]string{"H": "1"}, Models: []config.VertexCompatModel{{Name: "m1"}}},
},
AmpCode: config.AmpCode{
UpstreamURL: "http://amp-old",
UpstreamAPIKey: "old-key",
RestrictManagementToLocalhost: false,
ModelMappings: []config.AmpModelMapping{{From: "a", To: "b"}},
ForceModelMappings: false,
},
RemoteManagement: config.RemoteManagement{
AllowRemote: false,
DisableControlPanel: false,
PanelGitHubRepository: "old/repo",
SecretKey: "old",
},
SDKConfig: sdkconfig.SDKConfig{
RequestLog: false,
ProxyURL: "http://old-proxy",
APIKeys: []string{" keyA "},
},
OAuthExcludedModels: map[string][]string{"p1": {"a"}},
OpenAICompatibility: []config.OpenAICompatibility{
{
Name: "prov-old",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "k1"},
},
Models: []config.OpenAICompatibilityModel{{Name: "m1"}},
},
},
}
newCfg := &config.Config{
Port: 2,
AuthDir: "/b",
Debug: true,
LoggingToFile: true,
UsageStatisticsEnabled: true,
DisableCooling: true,
RequestRetry: 2,
MaxRetryInterval: 3,
WebsocketAuth: true,
QuotaExceeded: config.QuotaExceeded{SwitchProject: true, SwitchPreviewModel: true},
GeminiKey: []config.GeminiKey{
{APIKey: "g-new", BaseURL: "http://g-new", ProxyURL: "http://gp-new", Headers: map[string]string{"A": "2"}, ExcludedModels: []string{"x", "y"}},
},
ClaudeKey: []config.ClaudeKey{
{APIKey: "c-new", BaseURL: "http://c-new", ProxyURL: "http://cp-new", Headers: map[string]string{"H": "2"}, ExcludedModels: []string{"x", "y"}},
},
CodexKey: []config.CodexKey{
{APIKey: "x-new", BaseURL: "http://x-new", ProxyURL: "http://xp-new", Headers: map[string]string{"H": "2"}, ExcludedModels: []string{"x", "y"}},
},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v-new", BaseURL: "http://v-new", ProxyURL: "http://vp-new", Headers: map[string]string{"H": "2"}, Models: []config.VertexCompatModel{{Name: "m1"}, {Name: "m2"}}},
},
AmpCode: config.AmpCode{
UpstreamURL: "http://amp-new",
UpstreamAPIKey: "",
RestrictManagementToLocalhost: true,
ModelMappings: []config.AmpModelMapping{{From: "a", To: "c"}},
ForceModelMappings: true,
},
RemoteManagement: config.RemoteManagement{
AllowRemote: true,
DisableControlPanel: true,
PanelGitHubRepository: "new/repo",
SecretKey: "",
},
SDKConfig: sdkconfig.SDKConfig{
RequestLog: true,
ProxyURL: "http://new-proxy",
APIKeys: []string{"keyB"},
},
OAuthExcludedModels: map[string][]string{"p1": {"b", "c"}, "p2": {"d"}},
OpenAICompatibility: []config.OpenAICompatibility{
{
Name: "prov-old",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "k1"},
{APIKey: "k2"},
},
Models: []config.OpenAICompatibilityModel{{Name: "m1"}, {Name: "m2"}},
},
{
Name: "prov-new",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{{APIKey: "k3"}},
},
},
}
changes := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, changes, "port: 1 -> 2")
expectContains(t, changes, "auth-dir: /a -> /b")
expectContains(t, changes, "debug: false -> true")
expectContains(t, changes, "logging-to-file: false -> true")
expectContains(t, changes, "usage-statistics-enabled: false -> true")
expectContains(t, changes, "disable-cooling: false -> true")
expectContains(t, changes, "request-retry: 1 -> 2")
expectContains(t, changes, "max-retry-interval: 1 -> 3")
expectContains(t, changes, "proxy-url: http://old-proxy -> http://new-proxy")
expectContains(t, changes, "ws-auth: false -> true")
expectContains(t, changes, "quota-exceeded.switch-project: false -> true")
expectContains(t, changes, "quota-exceeded.switch-preview-model: false -> true")
expectContains(t, changes, "api-keys: values updated (count unchanged, redacted)")
expectContains(t, changes, "gemini[0].base-url: http://g-old -> http://g-new")
expectContains(t, changes, "gemini[0].proxy-url: http://gp-old -> http://gp-new")
expectContains(t, changes, "gemini[0].api-key: updated")
expectContains(t, changes, "gemini[0].headers: updated")
expectContains(t, changes, "gemini[0].excluded-models: updated (0 -> 2 entries)")
expectContains(t, changes, "claude[0].base-url: http://c-old -> http://c-new")
expectContains(t, changes, "claude[0].proxy-url: http://cp-old -> http://cp-new")
expectContains(t, changes, "claude[0].api-key: updated")
expectContains(t, changes, "claude[0].headers: updated")
expectContains(t, changes, "claude[0].excluded-models: updated (1 -> 2 entries)")
expectContains(t, changes, "codex[0].base-url: http://x-old -> http://x-new")
expectContains(t, changes, "codex[0].proxy-url: http://xp-old -> http://xp-new")
expectContains(t, changes, "codex[0].api-key: updated")
expectContains(t, changes, "codex[0].headers: updated")
expectContains(t, changes, "codex[0].excluded-models: updated (1 -> 2 entries)")
expectContains(t, changes, "vertex[0].base-url: http://v-old -> http://v-new")
expectContains(t, changes, "vertex[0].proxy-url: http://vp-old -> http://vp-new")
expectContains(t, changes, "vertex[0].api-key: updated")
expectContains(t, changes, "vertex[0].models: updated (1 -> 2 entries)")
expectContains(t, changes, "vertex[0].headers: updated")
expectContains(t, changes, "ampcode.upstream-url: http://amp-old -> http://amp-new")
expectContains(t, changes, "ampcode.upstream-api-key: removed")
expectContains(t, changes, "ampcode.restrict-management-to-localhost: false -> true")
expectContains(t, changes, "ampcode.model-mappings: updated (1 -> 1 entries)")
expectContains(t, changes, "ampcode.force-model-mappings: false -> true")
expectContains(t, changes, "oauth-excluded-models[p1]: updated (1 -> 2 entries)")
expectContains(t, changes, "oauth-excluded-models[p2]: added (1 entries)")
expectContains(t, changes, "remote-management.allow-remote: false -> true")
expectContains(t, changes, "remote-management.disable-control-panel: false -> true")
expectContains(t, changes, "remote-management.panel-github-repository: old/repo -> new/repo")
expectContains(t, changes, "remote-management.secret-key: deleted")
expectContains(t, changes, "openai-compatibility:")
}
func TestFormatProxyURL(t *testing.T) {
tests := []struct {
name string
in string
want string
}{
{name: "empty", in: "", want: "<none>"},
{name: "invalid", in: "http://[::1", want: "<redacted>"},
{name: "fullURLRedactsUserinfoAndPath", in: "http://user:pass@example.com:8080/path?x=1#frag", want: "http://example.com:8080"},
{name: "socks5RedactsUserinfoAndPath", in: "socks5://user:pass@192.168.1.1:1080/path?x=1", want: "socks5://192.168.1.1:1080"},
{name: "socks5HostPort", in: "socks5://proxy.example.com:1080/", want: "socks5://proxy.example.com:1080"},
{name: "hostPortNoScheme", in: "example.com:1234/path?x=1", want: "example.com:1234"},
{name: "relativePathRedacted", in: "/just/path", want: "<redacted>"},
{name: "schemeAndHost", in: "https://example.com", want: "https://example.com"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := formatProxyURL(tt.in); got != tt.want {
t.Fatalf("expected %q, got %q", tt.want, got)
}
})
}
}
func TestBuildConfigChangeDetails_SecretAndUpstreamUpdates(t *testing.T) {
oldCfg := &config.Config{
AmpCode: config.AmpCode{
UpstreamAPIKey: "old",
},
RemoteManagement: config.RemoteManagement{
SecretKey: "old",
},
}
newCfg := &config.Config{
AmpCode: config.AmpCode{
UpstreamAPIKey: "new",
},
RemoteManagement: config.RemoteManagement{
SecretKey: "new",
},
}
changes := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, changes, "ampcode.upstream-api-key: updated")
expectContains(t, changes, "remote-management.secret-key: updated")
}
func TestBuildConfigChangeDetails_CountBranches(t *testing.T) {
oldCfg := &config.Config{}
newCfg := &config.Config{
GeminiKey: []config.GeminiKey{{APIKey: "g"}},
ClaudeKey: []config.ClaudeKey{{APIKey: "c"}},
CodexKey: []config.CodexKey{{APIKey: "x"}},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v", BaseURL: "http://v"},
},
}
changes := BuildConfigChangeDetails(oldCfg, newCfg)
expectContains(t, changes, "gemini-api-key count: 0 -> 1")
expectContains(t, changes, "claude-api-key count: 0 -> 1")
expectContains(t, changes, "codex-api-key count: 0 -> 1")
expectContains(t, changes, "vertex-api-key count: 0 -> 1")
}
func TestTrimStrings(t *testing.T) {
out := trimStrings([]string{" a ", "b", " c"})
if len(out) != 3 || out[0] != "a" || out[1] != "b" || out[2] != "c" {
t.Fatalf("unexpected trimmed strings: %v", out)
}
}

View File

@@ -0,0 +1,102 @@
package diff
import (
"crypto/sha256"
"encoding/hex"
"encoding/json"
"sort"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
)
// ComputeOpenAICompatModelsHash returns a stable hash for OpenAI-compat models.
// Used to detect model list changes during hot reload.
func ComputeOpenAICompatModelsHash(models []config.OpenAICompatibilityModel) string {
keys := normalizeModelPairs(func(out func(key string)) {
for _, model := range models {
name := strings.TrimSpace(model.Name)
alias := strings.TrimSpace(model.Alias)
if name == "" && alias == "" {
continue
}
out(strings.ToLower(name) + "|" + strings.ToLower(alias))
}
})
return hashJoined(keys)
}
// ComputeVertexCompatModelsHash returns a stable hash for Vertex-compatible models.
func ComputeVertexCompatModelsHash(models []config.VertexCompatModel) string {
keys := normalizeModelPairs(func(out func(key string)) {
for _, model := range models {
name := strings.TrimSpace(model.Name)
alias := strings.TrimSpace(model.Alias)
if name == "" && alias == "" {
continue
}
out(strings.ToLower(name) + "|" + strings.ToLower(alias))
}
})
return hashJoined(keys)
}
// ComputeClaudeModelsHash returns a stable hash for Claude model aliases.
func ComputeClaudeModelsHash(models []config.ClaudeModel) string {
keys := normalizeModelPairs(func(out func(key string)) {
for _, model := range models {
name := strings.TrimSpace(model.Name)
alias := strings.TrimSpace(model.Alias)
if name == "" && alias == "" {
continue
}
out(strings.ToLower(name) + "|" + strings.ToLower(alias))
}
})
return hashJoined(keys)
}
// ComputeExcludedModelsHash returns a normalized hash for excluded model lists.
func ComputeExcludedModelsHash(excluded []string) string {
if len(excluded) == 0 {
return ""
}
normalized := make([]string, 0, len(excluded))
for _, entry := range excluded {
if trimmed := strings.TrimSpace(entry); trimmed != "" {
normalized = append(normalized, strings.ToLower(trimmed))
}
}
if len(normalized) == 0 {
return ""
}
sort.Strings(normalized)
data, _ := json.Marshal(normalized)
sum := sha256.Sum256(data)
return hex.EncodeToString(sum[:])
}
func normalizeModelPairs(collect func(out func(key string))) []string {
seen := make(map[string]struct{})
keys := make([]string, 0)
collect(func(key string) {
if _, exists := seen[key]; exists {
return
}
seen[key] = struct{}{}
keys = append(keys, key)
})
if len(keys) == 0 {
return nil
}
sort.Strings(keys)
return keys
}
func hashJoined(keys []string) string {
if len(keys) == 0 {
return ""
}
sum := sha256.Sum256([]byte(strings.Join(keys, "\n")))
return hex.EncodeToString(sum[:])
}

View File

@@ -0,0 +1,159 @@
package diff
import (
"testing"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
)
func TestComputeOpenAICompatModelsHash_Deterministic(t *testing.T) {
models := []config.OpenAICompatibilityModel{
{Name: "gpt-4", Alias: "gpt4"},
{Name: "gpt-3.5-turbo"},
}
hash1 := ComputeOpenAICompatModelsHash(models)
hash2 := ComputeOpenAICompatModelsHash(models)
if hash1 == "" {
t.Fatal("hash should not be empty")
}
if hash1 != hash2 {
t.Fatalf("hash should be deterministic, got %s vs %s", hash1, hash2)
}
changed := ComputeOpenAICompatModelsHash([]config.OpenAICompatibilityModel{{Name: "gpt-4"}, {Name: "gpt-4.1"}})
if hash1 == changed {
t.Fatal("hash should change when model list changes")
}
}
func TestComputeOpenAICompatModelsHash_NormalizesAndDedups(t *testing.T) {
a := []config.OpenAICompatibilityModel{
{Name: "gpt-4", Alias: "gpt4"},
{Name: " "},
{Name: "GPT-4", Alias: "GPT4"},
{Alias: "a1"},
}
b := []config.OpenAICompatibilityModel{
{Alias: "A1"},
{Name: "gpt-4", Alias: "gpt4"},
}
h1 := ComputeOpenAICompatModelsHash(a)
h2 := ComputeOpenAICompatModelsHash(b)
if h1 == "" || h2 == "" {
t.Fatal("expected non-empty hashes for non-empty model sets")
}
if h1 != h2 {
t.Fatalf("expected normalized hashes to match, got %s / %s", h1, h2)
}
}
func TestComputeVertexCompatModelsHash_DifferentInputs(t *testing.T) {
models := []config.VertexCompatModel{{Name: "gemini-pro", Alias: "pro"}}
hash1 := ComputeVertexCompatModelsHash(models)
hash2 := ComputeVertexCompatModelsHash([]config.VertexCompatModel{{Name: "gemini-1.5-pro", Alias: "pro"}})
if hash1 == "" || hash2 == "" {
t.Fatal("hashes should not be empty for non-empty models")
}
if hash1 == hash2 {
t.Fatal("hash should differ when model content differs")
}
}
func TestComputeVertexCompatModelsHash_IgnoresBlankAndOrder(t *testing.T) {
a := []config.VertexCompatModel{
{Name: "m1", Alias: "a1"},
{Name: " "},
{Name: "M1", Alias: "A1"},
}
b := []config.VertexCompatModel{
{Name: "m1", Alias: "a1"},
}
if h1, h2 := ComputeVertexCompatModelsHash(a), ComputeVertexCompatModelsHash(b); h1 == "" || h1 != h2 {
t.Fatalf("expected same hash ignoring blanks/dupes, got %q / %q", h1, h2)
}
}
func TestComputeClaudeModelsHash_Empty(t *testing.T) {
if got := ComputeClaudeModelsHash(nil); got != "" {
t.Fatalf("expected empty hash for nil models, got %q", got)
}
if got := ComputeClaudeModelsHash([]config.ClaudeModel{}); got != "" {
t.Fatalf("expected empty hash for empty slice, got %q", got)
}
}
func TestComputeClaudeModelsHash_IgnoresBlankAndDedup(t *testing.T) {
a := []config.ClaudeModel{
{Name: "m1", Alias: "a1"},
{Name: " "},
{Name: "M1", Alias: "A1"},
}
b := []config.ClaudeModel{
{Name: "m1", Alias: "a1"},
}
if h1, h2 := ComputeClaudeModelsHash(a), ComputeClaudeModelsHash(b); h1 == "" || h1 != h2 {
t.Fatalf("expected same hash ignoring blanks/dupes, got %q / %q", h1, h2)
}
}
func TestComputeExcludedModelsHash_Normalizes(t *testing.T) {
hash1 := ComputeExcludedModelsHash([]string{" A ", "b", "a"})
hash2 := ComputeExcludedModelsHash([]string{"a", " b", "A"})
if hash1 == "" || hash2 == "" {
t.Fatal("hash should not be empty for non-empty input")
}
if hash1 != hash2 {
t.Fatalf("hash should be order/space insensitive for same multiset, got %s vs %s", hash1, hash2)
}
hash3 := ComputeExcludedModelsHash([]string{"c"})
if hash1 == hash3 {
t.Fatal("hash should differ for different normalized sets")
}
}
func TestComputeOpenAICompatModelsHash_Empty(t *testing.T) {
if got := ComputeOpenAICompatModelsHash(nil); got != "" {
t.Fatalf("expected empty hash for nil input, got %q", got)
}
if got := ComputeOpenAICompatModelsHash([]config.OpenAICompatibilityModel{}); got != "" {
t.Fatalf("expected empty hash for empty slice, got %q", got)
}
if got := ComputeOpenAICompatModelsHash([]config.OpenAICompatibilityModel{{Name: " "}, {Alias: ""}}); got != "" {
t.Fatalf("expected empty hash for blank models, got %q", got)
}
}
func TestComputeVertexCompatModelsHash_Empty(t *testing.T) {
if got := ComputeVertexCompatModelsHash(nil); got != "" {
t.Fatalf("expected empty hash for nil input, got %q", got)
}
if got := ComputeVertexCompatModelsHash([]config.VertexCompatModel{}); got != "" {
t.Fatalf("expected empty hash for empty slice, got %q", got)
}
if got := ComputeVertexCompatModelsHash([]config.VertexCompatModel{{Name: " "}}); got != "" {
t.Fatalf("expected empty hash for blank models, got %q", got)
}
}
func TestComputeExcludedModelsHash_Empty(t *testing.T) {
if got := ComputeExcludedModelsHash(nil); got != "" {
t.Fatalf("expected empty hash for nil input, got %q", got)
}
if got := ComputeExcludedModelsHash([]string{}); got != "" {
t.Fatalf("expected empty hash for empty slice, got %q", got)
}
if got := ComputeExcludedModelsHash([]string{" ", ""}); got != "" {
t.Fatalf("expected empty hash for whitespace-only entries, got %q", got)
}
}
func TestComputeClaudeModelsHash_Deterministic(t *testing.T) {
models := []config.ClaudeModel{{Name: "a", Alias: "A"}, {Name: "b"}}
h1 := ComputeClaudeModelsHash(models)
h2 := ComputeClaudeModelsHash(models)
if h1 == "" || h1 != h2 {
t.Fatalf("expected deterministic hash, got %s / %s", h1, h2)
}
if h3 := ComputeClaudeModelsHash([]config.ClaudeModel{{Name: "a"}}); h3 == h1 {
t.Fatalf("expected different hash when models change, got %s", h3)
}
}

View File

@@ -0,0 +1,151 @@
package diff
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"sort"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
)
type ExcludedModelsSummary struct {
hash string
count int
}
// SummarizeExcludedModels normalizes and hashes an excluded-model list.
func SummarizeExcludedModels(list []string) ExcludedModelsSummary {
if len(list) == 0 {
return ExcludedModelsSummary{}
}
seen := make(map[string]struct{}, len(list))
normalized := make([]string, 0, len(list))
for _, entry := range list {
if trimmed := strings.ToLower(strings.TrimSpace(entry)); trimmed != "" {
if _, exists := seen[trimmed]; exists {
continue
}
seen[trimmed] = struct{}{}
normalized = append(normalized, trimmed)
}
}
sort.Strings(normalized)
return ExcludedModelsSummary{
hash: ComputeExcludedModelsHash(normalized),
count: len(normalized),
}
}
// SummarizeOAuthExcludedModels summarizes OAuth excluded models per provider.
func SummarizeOAuthExcludedModels(entries map[string][]string) map[string]ExcludedModelsSummary {
if len(entries) == 0 {
return nil
}
out := make(map[string]ExcludedModelsSummary, len(entries))
for k, v := range entries {
key := strings.ToLower(strings.TrimSpace(k))
if key == "" {
continue
}
out[key] = SummarizeExcludedModels(v)
}
return out
}
// DiffOAuthExcludedModelChanges compares OAuth excluded models maps.
func DiffOAuthExcludedModelChanges(oldMap, newMap map[string][]string) ([]string, []string) {
oldSummary := SummarizeOAuthExcludedModels(oldMap)
newSummary := SummarizeOAuthExcludedModels(newMap)
keys := make(map[string]struct{}, len(oldSummary)+len(newSummary))
for k := range oldSummary {
keys[k] = struct{}{}
}
for k := range newSummary {
keys[k] = struct{}{}
}
changes := make([]string, 0, len(keys))
affected := make([]string, 0, len(keys))
for key := range keys {
oldInfo, okOld := oldSummary[key]
newInfo, okNew := newSummary[key]
switch {
case okOld && !okNew:
changes = append(changes, fmt.Sprintf("oauth-excluded-models[%s]: removed", key))
affected = append(affected, key)
case !okOld && okNew:
changes = append(changes, fmt.Sprintf("oauth-excluded-models[%s]: added (%d entries)", key, newInfo.count))
affected = append(affected, key)
case okOld && okNew && oldInfo.hash != newInfo.hash:
changes = append(changes, fmt.Sprintf("oauth-excluded-models[%s]: updated (%d -> %d entries)", key, oldInfo.count, newInfo.count))
affected = append(affected, key)
}
}
sort.Strings(changes)
sort.Strings(affected)
return changes, affected
}
type AmpModelMappingsSummary struct {
hash string
count int
}
// SummarizeAmpModelMappings hashes Amp model mappings for change detection.
func SummarizeAmpModelMappings(mappings []config.AmpModelMapping) AmpModelMappingsSummary {
if len(mappings) == 0 {
return AmpModelMappingsSummary{}
}
entries := make([]string, 0, len(mappings))
for _, mapping := range mappings {
from := strings.TrimSpace(mapping.From)
to := strings.TrimSpace(mapping.To)
if from == "" && to == "" {
continue
}
entries = append(entries, from+"->"+to)
}
if len(entries) == 0 {
return AmpModelMappingsSummary{}
}
sort.Strings(entries)
sum := sha256.Sum256([]byte(strings.Join(entries, "|")))
return AmpModelMappingsSummary{
hash: hex.EncodeToString(sum[:]),
count: len(entries),
}
}
type VertexModelsSummary struct {
hash string
count int
}
// SummarizeVertexModels hashes vertex-compatible models for change detection.
func SummarizeVertexModels(models []config.VertexCompatModel) VertexModelsSummary {
if len(models) == 0 {
return VertexModelsSummary{}
}
names := make([]string, 0, len(models))
for _, m := range models {
name := strings.TrimSpace(m.Name)
alias := strings.TrimSpace(m.Alias)
if name == "" && alias == "" {
continue
}
if alias != "" {
name = alias
}
names = append(names, name)
}
if len(names) == 0 {
return VertexModelsSummary{}
}
sort.Strings(names)
sum := sha256.Sum256([]byte(strings.Join(names, "|")))
return VertexModelsSummary{
hash: hex.EncodeToString(sum[:]),
count: len(names),
}
}

View File

@@ -0,0 +1,109 @@
package diff
import (
"testing"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
)
func TestSummarizeExcludedModels_NormalizesAndDedupes(t *testing.T) {
summary := SummarizeExcludedModels([]string{"A", " a ", "B", "b"})
if summary.count != 2 {
t.Fatalf("expected 2 unique entries, got %d", summary.count)
}
if summary.hash == "" {
t.Fatal("expected non-empty hash")
}
if empty := SummarizeExcludedModels(nil); empty.count != 0 || empty.hash != "" {
t.Fatalf("expected empty summary for nil input, got %+v", empty)
}
}
func TestDiffOAuthExcludedModelChanges(t *testing.T) {
oldMap := map[string][]string{
"ProviderA": {"model-1", "model-2"},
"providerB": {"x"},
}
newMap := map[string][]string{
"providerA": {"model-1", "model-3"},
"providerC": {"y"},
}
changes, affected := DiffOAuthExcludedModelChanges(oldMap, newMap)
expectContains(t, changes, "oauth-excluded-models[providera]: updated (2 -> 2 entries)")
expectContains(t, changes, "oauth-excluded-models[providerb]: removed")
expectContains(t, changes, "oauth-excluded-models[providerc]: added (1 entries)")
if len(affected) != 3 {
t.Fatalf("expected 3 affected providers, got %d", len(affected))
}
}
func TestSummarizeAmpModelMappings(t *testing.T) {
summary := SummarizeAmpModelMappings([]config.AmpModelMapping{
{From: "a", To: "A"},
{From: "b", To: "B"},
{From: " ", To: " "}, // ignored
})
if summary.count != 2 {
t.Fatalf("expected 2 entries, got %d", summary.count)
}
if summary.hash == "" {
t.Fatal("expected non-empty hash")
}
if empty := SummarizeAmpModelMappings(nil); empty.count != 0 || empty.hash != "" {
t.Fatalf("expected empty summary for nil input, got %+v", empty)
}
if blank := SummarizeAmpModelMappings([]config.AmpModelMapping{{From: " ", To: " "}}); blank.count != 0 || blank.hash != "" {
t.Fatalf("expected blank mappings ignored, got %+v", blank)
}
}
func TestSummarizeOAuthExcludedModels_NormalizesKeys(t *testing.T) {
out := SummarizeOAuthExcludedModels(map[string][]string{
"ProvA": {"X"},
"": {"ignored"},
})
if len(out) != 1 {
t.Fatalf("expected only non-empty key summary, got %d", len(out))
}
if _, ok := out["prova"]; !ok {
t.Fatalf("expected normalized key 'prova', got keys %v", out)
}
if out["prova"].count != 1 || out["prova"].hash == "" {
t.Fatalf("unexpected summary %+v", out["prova"])
}
if outEmpty := SummarizeOAuthExcludedModels(nil); outEmpty != nil {
t.Fatalf("expected nil map for nil input, got %v", outEmpty)
}
}
func TestSummarizeVertexModels(t *testing.T) {
summary := SummarizeVertexModels([]config.VertexCompatModel{
{Name: "m1"},
{Name: " ", Alias: "alias"},
{}, // ignored
})
if summary.count != 2 {
t.Fatalf("expected 2 vertex models, got %d", summary.count)
}
if summary.hash == "" {
t.Fatal("expected non-empty hash")
}
if empty := SummarizeVertexModels(nil); empty.count != 0 || empty.hash != "" {
t.Fatalf("expected empty summary for nil input, got %+v", empty)
}
if blank := SummarizeVertexModels([]config.VertexCompatModel{{Name: " "}}); blank.count != 0 || blank.hash != "" {
t.Fatalf("expected blank model ignored, got %+v", blank)
}
}
func expectContains(t *testing.T, list []string, target string) {
t.Helper()
for _, entry := range list {
if entry == target {
return
}
}
t.Fatalf("expected list to contain %q, got %#v", target, list)
}

View File

@@ -0,0 +1,183 @@
package diff
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"sort"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
)
// DiffOpenAICompatibility produces human-readable change descriptions.
func DiffOpenAICompatibility(oldList, newList []config.OpenAICompatibility) []string {
changes := make([]string, 0)
oldMap := make(map[string]config.OpenAICompatibility, len(oldList))
oldLabels := make(map[string]string, len(oldList))
for idx, entry := range oldList {
key, label := openAICompatKey(entry, idx)
oldMap[key] = entry
oldLabels[key] = label
}
newMap := make(map[string]config.OpenAICompatibility, len(newList))
newLabels := make(map[string]string, len(newList))
for idx, entry := range newList {
key, label := openAICompatKey(entry, idx)
newMap[key] = entry
newLabels[key] = label
}
keySet := make(map[string]struct{}, len(oldMap)+len(newMap))
for key := range oldMap {
keySet[key] = struct{}{}
}
for key := range newMap {
keySet[key] = struct{}{}
}
orderedKeys := make([]string, 0, len(keySet))
for key := range keySet {
orderedKeys = append(orderedKeys, key)
}
sort.Strings(orderedKeys)
for _, key := range orderedKeys {
oldEntry, oldOk := oldMap[key]
newEntry, newOk := newMap[key]
label := oldLabels[key]
if label == "" {
label = newLabels[key]
}
switch {
case !oldOk:
changes = append(changes, fmt.Sprintf("provider added: %s (api-keys=%d, models=%d)", label, countAPIKeys(newEntry), countOpenAIModels(newEntry.Models)))
case !newOk:
changes = append(changes, fmt.Sprintf("provider removed: %s (api-keys=%d, models=%d)", label, countAPIKeys(oldEntry), countOpenAIModels(oldEntry.Models)))
default:
if detail := describeOpenAICompatibilityUpdate(oldEntry, newEntry); detail != "" {
changes = append(changes, fmt.Sprintf("provider updated: %s %s", label, detail))
}
}
}
return changes
}
func describeOpenAICompatibilityUpdate(oldEntry, newEntry config.OpenAICompatibility) string {
oldKeyCount := countAPIKeys(oldEntry)
newKeyCount := countAPIKeys(newEntry)
oldModelCount := countOpenAIModels(oldEntry.Models)
newModelCount := countOpenAIModels(newEntry.Models)
details := make([]string, 0, 3)
if oldKeyCount != newKeyCount {
details = append(details, fmt.Sprintf("api-keys %d -> %d", oldKeyCount, newKeyCount))
}
if oldModelCount != newModelCount {
details = append(details, fmt.Sprintf("models %d -> %d", oldModelCount, newModelCount))
}
if !equalStringMap(oldEntry.Headers, newEntry.Headers) {
details = append(details, "headers updated")
}
if len(details) == 0 {
return ""
}
return "(" + strings.Join(details, ", ") + ")"
}
func countAPIKeys(entry config.OpenAICompatibility) int {
count := 0
for _, keyEntry := range entry.APIKeyEntries {
if strings.TrimSpace(keyEntry.APIKey) != "" {
count++
}
}
return count
}
func countOpenAIModels(models []config.OpenAICompatibilityModel) int {
count := 0
for _, model := range models {
name := strings.TrimSpace(model.Name)
alias := strings.TrimSpace(model.Alias)
if name == "" && alias == "" {
continue
}
count++
}
return count
}
func openAICompatKey(entry config.OpenAICompatibility, index int) (string, string) {
name := strings.TrimSpace(entry.Name)
if name != "" {
return "name:" + name, name
}
base := strings.TrimSpace(entry.BaseURL)
if base != "" {
return "base:" + base, base
}
for _, model := range entry.Models {
alias := strings.TrimSpace(model.Alias)
if alias == "" {
alias = strings.TrimSpace(model.Name)
}
if alias != "" {
return "alias:" + alias, alias
}
}
sig := openAICompatSignature(entry)
if sig == "" {
return fmt.Sprintf("index:%d", index), fmt.Sprintf("entry-%d", index+1)
}
short := sig
if len(short) > 8 {
short = short[:8]
}
return "sig:" + sig, "compat-" + short
}
func openAICompatSignature(entry config.OpenAICompatibility) string {
var parts []string
if v := strings.TrimSpace(entry.Name); v != "" {
parts = append(parts, "name="+strings.ToLower(v))
}
if v := strings.TrimSpace(entry.BaseURL); v != "" {
parts = append(parts, "base="+v)
}
models := make([]string, 0, len(entry.Models))
for _, model := range entry.Models {
name := strings.TrimSpace(model.Name)
alias := strings.TrimSpace(model.Alias)
if name == "" && alias == "" {
continue
}
models = append(models, strings.ToLower(name)+"|"+strings.ToLower(alias))
}
if len(models) > 0 {
sort.Strings(models)
parts = append(parts, "models="+strings.Join(models, ","))
}
if len(entry.Headers) > 0 {
keys := make([]string, 0, len(entry.Headers))
for k := range entry.Headers {
if trimmed := strings.TrimSpace(k); trimmed != "" {
keys = append(keys, strings.ToLower(trimmed))
}
}
if len(keys) > 0 {
sort.Strings(keys)
parts = append(parts, "headers="+strings.Join(keys, ","))
}
}
// Intentionally exclude API key material; only count non-empty entries.
if count := countAPIKeys(entry); count > 0 {
parts = append(parts, fmt.Sprintf("api_keys=%d", count))
}
if len(parts) == 0 {
return ""
}
sum := sha256.Sum256([]byte(strings.Join(parts, "|")))
return hex.EncodeToString(sum[:])
}

View File

@@ -0,0 +1,187 @@
package diff
import (
"strings"
"testing"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
)
func TestDiffOpenAICompatibility(t *testing.T) {
oldList := []config.OpenAICompatibility{
{
Name: "provider-a",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "key-a"},
},
Models: []config.OpenAICompatibilityModel{
{Name: "m1"},
},
},
}
newList := []config.OpenAICompatibility{
{
Name: "provider-a",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "key-a"},
{APIKey: "key-b"},
},
Models: []config.OpenAICompatibilityModel{
{Name: "m1"},
{Name: "m2"},
},
Headers: map[string]string{"X-Test": "1"},
},
{
Name: "provider-b",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{{APIKey: "key-b"}},
},
}
changes := DiffOpenAICompatibility(oldList, newList)
expectContains(t, changes, "provider added: provider-b (api-keys=1, models=0)")
expectContains(t, changes, "provider updated: provider-a (api-keys 1 -> 2, models 1 -> 2, headers updated)")
}
func TestDiffOpenAICompatibility_RemovedAndUnchanged(t *testing.T) {
oldList := []config.OpenAICompatibility{
{
Name: "provider-a",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{{APIKey: "key-a"}},
Models: []config.OpenAICompatibilityModel{{Name: "m1"}},
},
}
newList := []config.OpenAICompatibility{
{
Name: "provider-a",
APIKeyEntries: []config.OpenAICompatibilityAPIKey{{APIKey: "key-a"}},
Models: []config.OpenAICompatibilityModel{{Name: "m1"}},
},
}
if changes := DiffOpenAICompatibility(oldList, newList); len(changes) != 0 {
t.Fatalf("expected no changes, got %v", changes)
}
newList = nil
changes := DiffOpenAICompatibility(oldList, newList)
expectContains(t, changes, "provider removed: provider-a (api-keys=1, models=1)")
}
func TestOpenAICompatKeyFallbacks(t *testing.T) {
entry := config.OpenAICompatibility{
BaseURL: "http://base",
Models: []config.OpenAICompatibilityModel{{Alias: "alias-only"}},
}
key, label := openAICompatKey(entry, 0)
if key != "base:http://base" || label != "http://base" {
t.Fatalf("expected base key, got %s/%s", key, label)
}
entry.BaseURL = ""
key, label = openAICompatKey(entry, 1)
if key != "alias:alias-only" || label != "alias-only" {
t.Fatalf("expected alias fallback, got %s/%s", key, label)
}
entry.Models = nil
key, label = openAICompatKey(entry, 2)
if key != "index:2" || label != "entry-3" {
t.Fatalf("expected index fallback, got %s/%s", key, label)
}
}
func TestOpenAICompatKey_UsesName(t *testing.T) {
entry := config.OpenAICompatibility{Name: "My-Provider"}
key, label := openAICompatKey(entry, 0)
if key != "name:My-Provider" || label != "My-Provider" {
t.Fatalf("expected name key, got %s/%s", key, label)
}
}
func TestOpenAICompatKey_SignatureFallbackWhenOnlyAPIKeys(t *testing.T) {
entry := config.OpenAICompatibility{
APIKeyEntries: []config.OpenAICompatibilityAPIKey{{APIKey: "k1"}, {APIKey: "k2"}},
}
key, label := openAICompatKey(entry, 0)
if !strings.HasPrefix(key, "sig:") || !strings.HasPrefix(label, "compat-") {
t.Fatalf("expected signature key, got %s/%s", key, label)
}
}
func TestOpenAICompatSignature_EmptyReturnsEmpty(t *testing.T) {
if got := openAICompatSignature(config.OpenAICompatibility{}); got != "" {
t.Fatalf("expected empty signature, got %q", got)
}
}
func TestOpenAICompatSignature_StableAndNormalized(t *testing.T) {
a := config.OpenAICompatibility{
Name: " Provider ",
BaseURL: "http://base",
Models: []config.OpenAICompatibilityModel{
{Name: "m1"},
{Name: " "},
{Alias: "A1"},
},
Headers: map[string]string{
"X-Test": "1",
" ": "ignored",
},
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "k1"},
{APIKey: " "},
},
}
b := config.OpenAICompatibility{
Name: "provider",
BaseURL: "http://base",
Models: []config.OpenAICompatibilityModel{
{Alias: "a1"},
{Name: "m1"},
},
Headers: map[string]string{
"x-test": "2",
},
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
{APIKey: "k2"},
},
}
sigA := openAICompatSignature(a)
sigB := openAICompatSignature(b)
if sigA == "" || sigB == "" {
t.Fatalf("expected non-empty signatures, got %q / %q", sigA, sigB)
}
if sigA != sigB {
t.Fatalf("expected normalized signatures to match, got %s / %s", sigA, sigB)
}
c := b
c.Models = append(c.Models, config.OpenAICompatibilityModel{Name: "m2"})
if sigC := openAICompatSignature(c); sigC == sigB {
t.Fatalf("expected signature to change when models change, got %s", sigC)
}
}
func TestCountOpenAIModelsSkipsBlanks(t *testing.T) {
models := []config.OpenAICompatibilityModel{
{Name: "m1"},
{Name: ""},
{Alias: ""},
{Name: " "},
{Alias: "a1"},
}
if got := countOpenAIModels(models); got != 2 {
t.Fatalf("expected 2 counted models, got %d", got)
}
}
func TestOpenAICompatKeyUsesModelNameWhenAliasEmpty(t *testing.T) {
entry := config.OpenAICompatibility{
Models: []config.OpenAICompatibilityModel{{Name: "model-name"}},
}
key, label := openAICompatKey(entry, 5)
if key != "alias:model-name" || label != "model-name" {
t.Fatalf("expected model-name fallback, got %s/%s", key, label)
}
}

View File

@@ -23,6 +23,7 @@ import (
"github.com/fsnotify/fsnotify"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/runtime/geminicli"
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
"gopkg.in/yaml.v3"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
@@ -183,7 +184,7 @@ func (w *Watcher) Start(ctx context.Context) error {
go w.processEvents(ctx)
// Perform an initial full reload based on current config and auth dir
w.reloadClients(true, nil)
w.reloadClients(true, nil, false)
return nil
}
@@ -276,7 +277,7 @@ func (w *Watcher) DispatchRuntimeAuthUpdate(update AuthUpdate) bool {
return true
}
func (w *Watcher) refreshAuthState() {
func (w *Watcher) refreshAuthState(force bool) {
auths := w.SnapshotCoreAuths()
w.clientsMutex.Lock()
if len(w.runtimeAuths) > 0 {
@@ -286,12 +287,12 @@ func (w *Watcher) refreshAuthState() {
}
}
}
updates := w.prepareAuthUpdatesLocked(auths)
updates := w.prepareAuthUpdatesLocked(auths, force)
w.clientsMutex.Unlock()
w.dispatchAuthUpdates(updates)
}
func (w *Watcher) prepareAuthUpdatesLocked(auths []*coreauth.Auth) []AuthUpdate {
func (w *Watcher) prepareAuthUpdatesLocked(auths []*coreauth.Auth, force bool) []AuthUpdate {
newState := make(map[string]*coreauth.Auth, len(auths))
for _, auth := range auths {
if auth == nil || auth.ID == "" {
@@ -318,7 +319,7 @@ func (w *Watcher) prepareAuthUpdatesLocked(auths []*coreauth.Auth) []AuthUpdate
for id, auth := range newState {
if existing, ok := w.currentAuths[id]; !ok {
updates = append(updates, AuthUpdate{Action: AuthUpdateActionAdd, ID: id, Auth: auth.Clone()})
} else if !authEqual(existing, auth) {
} else if force || !authEqual(existing, auth) {
updates = append(updates, AuthUpdate{Action: AuthUpdateActionModify, ID: id, Auth: auth.Clone()})
}
}
@@ -485,170 +486,6 @@ func normalizeAuth(a *coreauth.Auth) *coreauth.Auth {
return clone
}
// computeOpenAICompatModelsHash returns a stable hash for the compatibility models so that
// changes to the model list trigger auth updates during hot reload.
func computeOpenAICompatModelsHash(models []config.OpenAICompatibilityModel) string {
if len(models) == 0 {
return ""
}
data, err := json.Marshal(models)
if err != nil || len(data) == 0 {
return ""
}
sum := sha256.Sum256(data)
return hex.EncodeToString(sum[:])
}
func computeVertexCompatModelsHash(models []config.VertexCompatModel) string {
if len(models) == 0 {
return ""
}
data, err := json.Marshal(models)
if err != nil || len(data) == 0 {
return ""
}
sum := sha256.Sum256(data)
return hex.EncodeToString(sum[:])
}
// computeClaudeModelsHash returns a stable hash for Claude model aliases.
func computeClaudeModelsHash(models []config.ClaudeModel) string {
if len(models) == 0 {
return ""
}
data, err := json.Marshal(models)
if err != nil || len(data) == 0 {
return ""
}
sum := sha256.Sum256(data)
return hex.EncodeToString(sum[:])
}
func computeExcludedModelsHash(excluded []string) string {
if len(excluded) == 0 {
return ""
}
normalized := make([]string, 0, len(excluded))
for _, entry := range excluded {
if trimmed := strings.TrimSpace(entry); trimmed != "" {
normalized = append(normalized, strings.ToLower(trimmed))
}
}
if len(normalized) == 0 {
return ""
}
sort.Strings(normalized)
data, err := json.Marshal(normalized)
if err != nil || len(data) == 0 {
return ""
}
sum := sha256.Sum256(data)
return hex.EncodeToString(sum[:])
}
type excludedModelsSummary struct {
hash string
count int
}
func summarizeExcludedModels(list []string) excludedModelsSummary {
if len(list) == 0 {
return excludedModelsSummary{}
}
seen := make(map[string]struct{}, len(list))
normalized := make([]string, 0, len(list))
for _, entry := range list {
if trimmed := strings.ToLower(strings.TrimSpace(entry)); trimmed != "" {
if _, exists := seen[trimmed]; exists {
continue
}
seen[trimmed] = struct{}{}
normalized = append(normalized, trimmed)
}
}
sort.Strings(normalized)
return excludedModelsSummary{
hash: computeExcludedModelsHash(normalized),
count: len(normalized),
}
}
type ampModelMappingsSummary struct {
hash string
count int
}
func summarizeAmpModelMappings(mappings []config.AmpModelMapping) ampModelMappingsSummary {
if len(mappings) == 0 {
return ampModelMappingsSummary{}
}
entries := make([]string, 0, len(mappings))
for _, mapping := range mappings {
from := strings.TrimSpace(mapping.From)
to := strings.TrimSpace(mapping.To)
if from == "" && to == "" {
continue
}
entries = append(entries, from+"->"+to)
}
if len(entries) == 0 {
return ampModelMappingsSummary{}
}
sort.Strings(entries)
sum := sha256.Sum256([]byte(strings.Join(entries, "|")))
return ampModelMappingsSummary{
hash: hex.EncodeToString(sum[:]),
count: len(entries),
}
}
func summarizeOAuthExcludedModels(entries map[string][]string) map[string]excludedModelsSummary {
if len(entries) == 0 {
return nil
}
out := make(map[string]excludedModelsSummary, len(entries))
for k, v := range entries {
key := strings.ToLower(strings.TrimSpace(k))
if key == "" {
continue
}
out[key] = summarizeExcludedModels(v)
}
return out
}
func diffOAuthExcludedModelChanges(oldMap, newMap map[string][]string) ([]string, []string) {
oldSummary := summarizeOAuthExcludedModels(oldMap)
newSummary := summarizeOAuthExcludedModels(newMap)
keys := make(map[string]struct{}, len(oldSummary)+len(newSummary))
for k := range oldSummary {
keys[k] = struct{}{}
}
for k := range newSummary {
keys[k] = struct{}{}
}
changes := make([]string, 0, len(keys))
affected := make([]string, 0, len(keys))
for key := range keys {
oldInfo, okOld := oldSummary[key]
newInfo, okNew := newSummary[key]
switch {
case okOld && !okNew:
changes = append(changes, fmt.Sprintf("oauth-excluded-models[%s]: removed", key))
affected = append(affected, key)
case !okOld && okNew:
changes = append(changes, fmt.Sprintf("oauth-excluded-models[%s]: added (%d entries)", key, newInfo.count))
affected = append(affected, key)
case okOld && okNew && oldInfo.hash != newInfo.hash:
changes = append(changes, fmt.Sprintf("oauth-excluded-models[%s]: updated (%d -> %d entries)", key, oldInfo.count, newInfo.count))
affected = append(affected, key)
}
}
sort.Strings(changes)
sort.Strings(affected)
return changes, affected
}
func applyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey []string, authKind string) {
if auth == nil || cfg == nil {
return
@@ -677,7 +514,7 @@ func applyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey
combined = append(combined, k)
}
sort.Strings(combined)
hash := computeExcludedModelsHash(combined)
hash := diff.ComputeExcludedModelsHash(combined)
if auth.Attributes == nil {
auth.Attributes = make(map[string]string)
}
@@ -821,7 +658,7 @@ func (w *Watcher) handleEvent(event fsnotify.Event) {
log.Debugf("auth file unchanged (hash match), skipping reload: %s", filepath.Base(event.Name))
return
}
fmt.Printf("auth file changed (%s): %s, processing incrementally\n", event.Op.String(), filepath.Base(event.Name))
log.Infof("auth file changed (%s): %s, processing incrementally", event.Op.String(), filepath.Base(event.Name))
w.addOrUpdateClient(event.Name)
return
}
@@ -829,7 +666,7 @@ func (w *Watcher) handleEvent(event fsnotify.Event) {
log.Debugf("ignoring remove for unknown auth file: %s", filepath.Base(event.Name))
return
}
fmt.Printf("auth file changed (%s): %s, processing incrementally\n", event.Op.String(), filepath.Base(event.Name))
log.Infof("auth file changed (%s): %s, processing incrementally", event.Op.String(), filepath.Base(event.Name))
w.removeClient(event.Name)
return
}
@@ -838,7 +675,7 @@ func (w *Watcher) handleEvent(event fsnotify.Event) {
log.Debugf("auth file unchanged (hash match), skipping reload: %s", filepath.Base(event.Name))
return
}
fmt.Printf("auth file changed (%s): %s, processing incrementally\n", event.Op.String(), filepath.Base(event.Name))
log.Infof("auth file changed (%s): %s, processing incrementally", event.Op.String(), filepath.Base(event.Name))
w.addOrUpdateClient(event.Name)
}
}
@@ -878,7 +715,7 @@ func (w *Watcher) reloadConfigIfChanged() {
log.Debugf("config file content unchanged (hash match), skipping reload")
return
}
fmt.Printf("config file changed, reloading: %s\n", w.configPath)
log.Infof("config file changed, reloading: %s", w.configPath)
if w.reloadConfig() {
finalHash := newHash
if updatedData, errRead := os.ReadFile(w.configPath); errRead == nil && len(updatedData) > 0 {
@@ -924,7 +761,7 @@ func (w *Watcher) reloadConfig() bool {
var affectedOAuthProviders []string
if oldConfig != nil {
_, affectedOAuthProviders = diffOAuthExcludedModelChanges(oldConfig.OAuthExcludedModels, newConfig.OAuthExcludedModels)
_, affectedOAuthProviders = diff.DiffOAuthExcludedModelChanges(oldConfig.OAuthExcludedModels, newConfig.OAuthExcludedModels)
}
// Always apply the current log level based on the latest config.
@@ -937,7 +774,7 @@ func (w *Watcher) reloadConfig() bool {
// Log configuration changes in debug mode, only when there are material diffs
if oldConfig != nil {
details := buildConfigChangeDetails(oldConfig, newConfig)
details := diff.BuildConfigChangeDetails(oldConfig, newConfig)
if len(details) > 0 {
log.Debugf("config changes detected:")
for _, d := range details {
@@ -949,15 +786,16 @@ func (w *Watcher) reloadConfig() bool {
}
authDirChanged := oldConfig == nil || oldConfig.AuthDir != newConfig.AuthDir
forceAuthRefresh := oldConfig != nil && oldConfig.ForceModelPrefix != newConfig.ForceModelPrefix
log.Infof("config successfully reloaded, triggering client reload")
// Reload clients with new config
w.reloadClients(authDirChanged, affectedOAuthProviders)
w.reloadClients(authDirChanged, affectedOAuthProviders, forceAuthRefresh)
return true
}
// reloadClients performs a full scan and reload of all clients.
func (w *Watcher) reloadClients(rescanAuth bool, affectedOAuthProviders []string) {
func (w *Watcher) reloadClients(rescanAuth bool, affectedOAuthProviders []string, forceAuthRefresh bool) {
log.Debugf("starting full client load process")
w.clientsMutex.RLock()
@@ -1048,7 +886,7 @@ func (w *Watcher) reloadClients(rescanAuth bool, affectedOAuthProviders []string
w.reloadCallback(cfg)
}
w.refreshAuthState()
w.refreshAuthState(forceAuthRefresh)
log.Infof("full client load complete - %d clients (%d auth files + %d Gemini API keys + %d Vertex API keys + %d Claude API keys + %d Codex keys + %d OpenAI-compat)",
totalNewClients,
@@ -1099,7 +937,7 @@ func (w *Watcher) addOrUpdateClient(path string) {
w.clientsMutex.Unlock() // Unlock before the callback
w.refreshAuthState()
w.refreshAuthState(false)
if w.reloadCallback != nil {
log.Debugf("triggering server update callback after add/update")
@@ -1118,7 +956,7 @@ func (w *Watcher) removeClient(path string) {
w.clientsMutex.Unlock() // Release the lock before the callback
w.refreshAuthState()
w.refreshAuthState(false)
if w.reloadCallback != nil {
log.Debugf("triggering server update callback after removal")
@@ -1147,6 +985,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if key == "" {
continue
}
prefix := strings.TrimSpace(entry.Prefix)
base := strings.TrimSpace(entry.BaseURL)
proxyURL := strings.TrimSpace(entry.ProxyURL)
id, token := idGen.next("gemini:apikey", key, base)
@@ -1162,6 +1001,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
ID: id,
Provider: "gemini",
Label: "gemini-apikey",
Prefix: prefix,
Status: coreauth.StatusActive,
ProxyURL: proxyURL,
Attributes: attrs,
@@ -1179,6 +1019,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if key == "" {
continue
}
prefix := strings.TrimSpace(ck.Prefix)
base := strings.TrimSpace(ck.BaseURL)
id, token := idGen.next("claude:apikey", key, base)
attrs := map[string]string{
@@ -1188,7 +1029,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if base != "" {
attrs["base_url"] = base
}
if hash := computeClaudeModelsHash(ck.Models); hash != "" {
if hash := diff.ComputeClaudeModelsHash(ck.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(ck.Headers, attrs)
@@ -1197,6 +1038,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
ID: id,
Provider: "claude",
Label: "claude-apikey",
Prefix: prefix,
Status: coreauth.StatusActive,
ProxyURL: proxyURL,
Attributes: attrs,
@@ -1213,6 +1055,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if key == "" {
continue
}
prefix := strings.TrimSpace(ck.Prefix)
id, token := idGen.next("codex:apikey", key, ck.BaseURL)
attrs := map[string]string{
"source": fmt.Sprintf("config:codex[%s]", token),
@@ -1227,6 +1070,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
ID: id,
Provider: "codex",
Label: "codex-apikey",
Prefix: prefix,
Status: coreauth.StatusActive,
ProxyURL: proxyURL,
Attributes: attrs,
@@ -1238,6 +1082,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
}
for i := range cfg.OpenAICompatibility {
compat := &cfg.OpenAICompatibility[i]
prefix := strings.TrimSpace(compat.Prefix)
providerName := strings.ToLower(strings.TrimSpace(compat.Name))
if providerName == "" {
providerName = "openai-compatibility"
@@ -1261,7 +1106,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if key != "" {
attrs["api_key"] = key
}
if hash := computeOpenAICompatModelsHash(compat.Models); hash != "" {
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(compat.Headers, attrs)
@@ -1269,6 +1114,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
ID: id,
Provider: providerName,
Label: compat.Name,
Prefix: prefix,
Status: coreauth.StatusActive,
ProxyURL: proxyURL,
Attributes: attrs,
@@ -1287,7 +1133,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
"compat_name": compat.Name,
"provider_key": providerName,
}
if hash := computeOpenAICompatModelsHash(compat.Models); hash != "" {
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(compat.Headers, attrs)
@@ -1295,6 +1141,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
ID: id,
Provider: providerName,
Label: compat.Name,
Prefix: prefix,
Status: coreauth.StatusActive,
Attributes: attrs,
CreatedAt: now,
@@ -1312,8 +1159,9 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
base := strings.TrimSpace(compat.BaseURL)
key := strings.TrimSpace(compat.APIKey)
prefix := strings.TrimSpace(compat.Prefix)
proxyURL := strings.TrimSpace(compat.ProxyURL)
idKind := fmt.Sprintf("vertex:apikey:%s", base)
idKind := "vertex:apikey"
id, token := idGen.next(idKind, key, base, proxyURL)
attrs := map[string]string{
"source": fmt.Sprintf("config:vertex-apikey[%s]", token),
@@ -1323,7 +1171,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if key != "" {
attrs["api_key"] = key
}
if hash := computeVertexCompatModelsHash(compat.Models); hash != "" {
if hash := diff.ComputeVertexCompatModelsHash(compat.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(compat.Headers, attrs)
@@ -1331,6 +1179,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
ID: id,
Provider: providerName,
Label: "vertex-apikey",
Prefix: prefix,
Status: coreauth.StatusActive,
ProxyURL: proxyURL,
Attributes: attrs,
@@ -1383,10 +1232,20 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
proxyURL = p
}
prefix := ""
if rawPrefix, ok := metadata["prefix"].(string); ok {
trimmed := strings.TrimSpace(rawPrefix)
trimmed = strings.Trim(trimmed, "/")
if trimmed != "" && !strings.Contains(trimmed, "/") {
prefix = trimmed
}
}
a := &coreauth.Auth{
ID: id,
Provider: provider,
Label: label,
Prefix: prefix,
Status: coreauth.StatusActive,
Attributes: map[string]string{
"source": full,
@@ -1473,6 +1332,7 @@ func synthesizeGeminiVirtualAuths(primary *coreauth.Auth, metadata map[string]an
Attributes: attrs,
Metadata: metadataCopy,
ProxyURL: primary.ProxyURL,
Prefix: primary.Prefix,
CreatedAt: now,
UpdatedAt: now,
Runtime: geminicli.NewVirtualCredential(projectID, shared),
@@ -1586,329 +1446,6 @@ func BuildAPIKeyClients(cfg *config.Config) (int, int, int, int, int) {
return geminiAPIKeyCount, vertexCompatAPIKeyCount, claudeAPIKeyCount, codexAPIKeyCount, openAICompatCount
}
func diffOpenAICompatibility(oldList, newList []config.OpenAICompatibility) []string {
changes := make([]string, 0)
oldMap := make(map[string]config.OpenAICompatibility, len(oldList))
oldLabels := make(map[string]string, len(oldList))
for idx, entry := range oldList {
key, label := openAICompatKey(entry, idx)
oldMap[key] = entry
oldLabels[key] = label
}
newMap := make(map[string]config.OpenAICompatibility, len(newList))
newLabels := make(map[string]string, len(newList))
for idx, entry := range newList {
key, label := openAICompatKey(entry, idx)
newMap[key] = entry
newLabels[key] = label
}
keySet := make(map[string]struct{}, len(oldMap)+len(newMap))
for key := range oldMap {
keySet[key] = struct{}{}
}
for key := range newMap {
keySet[key] = struct{}{}
}
orderedKeys := make([]string, 0, len(keySet))
for key := range keySet {
orderedKeys = append(orderedKeys, key)
}
sort.Strings(orderedKeys)
for _, key := range orderedKeys {
oldEntry, oldOk := oldMap[key]
newEntry, newOk := newMap[key]
label := oldLabels[key]
if label == "" {
label = newLabels[key]
}
switch {
case !oldOk:
changes = append(changes, fmt.Sprintf("provider added: %s (api-keys=%d, models=%d)", label, countAPIKeys(newEntry), countOpenAIModels(newEntry.Models)))
case !newOk:
changes = append(changes, fmt.Sprintf("provider removed: %s (api-keys=%d, models=%d)", label, countAPIKeys(oldEntry), countOpenAIModels(oldEntry.Models)))
default:
if detail := describeOpenAICompatibilityUpdate(oldEntry, newEntry); detail != "" {
changes = append(changes, fmt.Sprintf("provider updated: %s %s", label, detail))
}
}
}
return changes
}
func describeOpenAICompatibilityUpdate(oldEntry, newEntry config.OpenAICompatibility) string {
oldKeyCount := countAPIKeys(oldEntry)
newKeyCount := countAPIKeys(newEntry)
oldModelCount := countOpenAIModels(oldEntry.Models)
newModelCount := countOpenAIModels(newEntry.Models)
details := make([]string, 0, 3)
if oldKeyCount != newKeyCount {
details = append(details, fmt.Sprintf("api-keys %d -> %d", oldKeyCount, newKeyCount))
}
if oldModelCount != newModelCount {
details = append(details, fmt.Sprintf("models %d -> %d", oldModelCount, newModelCount))
}
if !equalStringMap(oldEntry.Headers, newEntry.Headers) {
details = append(details, "headers updated")
}
if len(details) == 0 {
return ""
}
return "(" + strings.Join(details, ", ") + ")"
}
func countAPIKeys(entry config.OpenAICompatibility) int {
count := 0
for _, keyEntry := range entry.APIKeyEntries {
if strings.TrimSpace(keyEntry.APIKey) != "" {
count++
}
}
return count
}
func countOpenAIModels(models []config.OpenAICompatibilityModel) int {
count := 0
for _, model := range models {
name := strings.TrimSpace(model.Name)
alias := strings.TrimSpace(model.Alias)
if name == "" && alias == "" {
continue
}
count++
}
return count
}
func openAICompatKey(entry config.OpenAICompatibility, index int) (string, string) {
name := strings.TrimSpace(entry.Name)
if name != "" {
return "name:" + name, name
}
base := strings.TrimSpace(entry.BaseURL)
if base != "" {
return "base:" + base, base
}
for _, model := range entry.Models {
alias := strings.TrimSpace(model.Alias)
if alias == "" {
alias = strings.TrimSpace(model.Name)
}
if alias != "" {
return "alias:" + alias, alias
}
}
return fmt.Sprintf("index:%d", index), fmt.Sprintf("entry-%d", index+1)
}
// buildConfigChangeDetails computes a redacted, human-readable list of config changes.
// It avoids printing secrets (like API keys) and focuses on structural or non-sensitive fields.
func buildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
changes := make([]string, 0, 16)
if oldCfg == nil || newCfg == nil {
return changes
}
// Simple scalars
if oldCfg.Port != newCfg.Port {
changes = append(changes, fmt.Sprintf("port: %d -> %d", oldCfg.Port, newCfg.Port))
}
if oldCfg.AuthDir != newCfg.AuthDir {
changes = append(changes, fmt.Sprintf("auth-dir: %s -> %s", oldCfg.AuthDir, newCfg.AuthDir))
}
if oldCfg.Debug != newCfg.Debug {
changes = append(changes, fmt.Sprintf("debug: %t -> %t", oldCfg.Debug, newCfg.Debug))
}
if oldCfg.LoggingToFile != newCfg.LoggingToFile {
changes = append(changes, fmt.Sprintf("logging-to-file: %t -> %t", oldCfg.LoggingToFile, newCfg.LoggingToFile))
}
if oldCfg.UsageStatisticsEnabled != newCfg.UsageStatisticsEnabled {
changes = append(changes, fmt.Sprintf("usage-statistics-enabled: %t -> %t", oldCfg.UsageStatisticsEnabled, newCfg.UsageStatisticsEnabled))
}
if oldCfg.DisableCooling != newCfg.DisableCooling {
changes = append(changes, fmt.Sprintf("disable-cooling: %t -> %t", oldCfg.DisableCooling, newCfg.DisableCooling))
}
if oldCfg.RequestLog != newCfg.RequestLog {
changes = append(changes, fmt.Sprintf("request-log: %t -> %t", oldCfg.RequestLog, newCfg.RequestLog))
}
if oldCfg.RequestRetry != newCfg.RequestRetry {
changes = append(changes, fmt.Sprintf("request-retry: %d -> %d", oldCfg.RequestRetry, newCfg.RequestRetry))
}
if oldCfg.MaxRetryInterval != newCfg.MaxRetryInterval {
changes = append(changes, fmt.Sprintf("max-retry-interval: %d -> %d", oldCfg.MaxRetryInterval, newCfg.MaxRetryInterval))
}
if oldCfg.ProxyURL != newCfg.ProxyURL {
changes = append(changes, fmt.Sprintf("proxy-url: %s -> %s", oldCfg.ProxyURL, newCfg.ProxyURL))
}
if oldCfg.WebsocketAuth != newCfg.WebsocketAuth {
changes = append(changes, fmt.Sprintf("ws-auth: %t -> %t", oldCfg.WebsocketAuth, newCfg.WebsocketAuth))
}
// Quota-exceeded behavior
if oldCfg.QuotaExceeded.SwitchProject != newCfg.QuotaExceeded.SwitchProject {
changes = append(changes, fmt.Sprintf("quota-exceeded.switch-project: %t -> %t", oldCfg.QuotaExceeded.SwitchProject, newCfg.QuotaExceeded.SwitchProject))
}
if oldCfg.QuotaExceeded.SwitchPreviewModel != newCfg.QuotaExceeded.SwitchPreviewModel {
changes = append(changes, fmt.Sprintf("quota-exceeded.switch-preview-model: %t -> %t", oldCfg.QuotaExceeded.SwitchPreviewModel, newCfg.QuotaExceeded.SwitchPreviewModel))
}
// API keys (redacted) and counts
if len(oldCfg.APIKeys) != len(newCfg.APIKeys) {
changes = append(changes, fmt.Sprintf("api-keys count: %d -> %d", len(oldCfg.APIKeys), len(newCfg.APIKeys)))
} else if !reflect.DeepEqual(trimStrings(oldCfg.APIKeys), trimStrings(newCfg.APIKeys)) {
changes = append(changes, "api-keys: values updated (count unchanged, redacted)")
}
if len(oldCfg.GeminiKey) != len(newCfg.GeminiKey) {
changes = append(changes, fmt.Sprintf("gemini-api-key count: %d -> %d", len(oldCfg.GeminiKey), len(newCfg.GeminiKey)))
} else {
for i := range oldCfg.GeminiKey {
if i >= len(newCfg.GeminiKey) {
break
}
o := oldCfg.GeminiKey[i]
n := newCfg.GeminiKey[i]
if strings.TrimSpace(o.BaseURL) != strings.TrimSpace(n.BaseURL) {
changes = append(changes, fmt.Sprintf("gemini[%d].base-url: %s -> %s", i, strings.TrimSpace(o.BaseURL), strings.TrimSpace(n.BaseURL)))
}
if strings.TrimSpace(o.ProxyURL) != strings.TrimSpace(n.ProxyURL) {
changes = append(changes, fmt.Sprintf("gemini[%d].proxy-url: %s -> %s", i, strings.TrimSpace(o.ProxyURL), strings.TrimSpace(n.ProxyURL)))
}
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("gemini[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("gemini[%d].headers: updated", i))
}
oldExcluded := summarizeExcludedModels(o.ExcludedModels)
newExcluded := summarizeExcludedModels(n.ExcludedModels)
if oldExcluded.hash != newExcluded.hash {
changes = append(changes, fmt.Sprintf("gemini[%d].excluded-models: updated (%d -> %d entries)", i, oldExcluded.count, newExcluded.count))
}
}
}
// Claude keys (do not print key material)
if len(oldCfg.ClaudeKey) != len(newCfg.ClaudeKey) {
changes = append(changes, fmt.Sprintf("claude-api-key count: %d -> %d", len(oldCfg.ClaudeKey), len(newCfg.ClaudeKey)))
} else {
for i := range oldCfg.ClaudeKey {
if i >= len(newCfg.ClaudeKey) {
break
}
o := oldCfg.ClaudeKey[i]
n := newCfg.ClaudeKey[i]
if strings.TrimSpace(o.BaseURL) != strings.TrimSpace(n.BaseURL) {
changes = append(changes, fmt.Sprintf("claude[%d].base-url: %s -> %s", i, strings.TrimSpace(o.BaseURL), strings.TrimSpace(n.BaseURL)))
}
if strings.TrimSpace(o.ProxyURL) != strings.TrimSpace(n.ProxyURL) {
changes = append(changes, fmt.Sprintf("claude[%d].proxy-url: %s -> %s", i, strings.TrimSpace(o.ProxyURL), strings.TrimSpace(n.ProxyURL)))
}
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("claude[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("claude[%d].headers: updated", i))
}
oldExcluded := summarizeExcludedModels(o.ExcludedModels)
newExcluded := summarizeExcludedModels(n.ExcludedModels)
if oldExcluded.hash != newExcluded.hash {
changes = append(changes, fmt.Sprintf("claude[%d].excluded-models: updated (%d -> %d entries)", i, oldExcluded.count, newExcluded.count))
}
}
}
// Codex keys (do not print key material)
if len(oldCfg.CodexKey) != len(newCfg.CodexKey) {
changes = append(changes, fmt.Sprintf("codex-api-key count: %d -> %d", len(oldCfg.CodexKey), len(newCfg.CodexKey)))
} else {
for i := range oldCfg.CodexKey {
if i >= len(newCfg.CodexKey) {
break
}
o := oldCfg.CodexKey[i]
n := newCfg.CodexKey[i]
if strings.TrimSpace(o.BaseURL) != strings.TrimSpace(n.BaseURL) {
changes = append(changes, fmt.Sprintf("codex[%d].base-url: %s -> %s", i, strings.TrimSpace(o.BaseURL), strings.TrimSpace(n.BaseURL)))
}
if strings.TrimSpace(o.ProxyURL) != strings.TrimSpace(n.ProxyURL) {
changes = append(changes, fmt.Sprintf("codex[%d].proxy-url: %s -> %s", i, strings.TrimSpace(o.ProxyURL), strings.TrimSpace(n.ProxyURL)))
}
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("codex[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("codex[%d].headers: updated", i))
}
oldExcluded := summarizeExcludedModels(o.ExcludedModels)
newExcluded := summarizeExcludedModels(n.ExcludedModels)
if oldExcluded.hash != newExcluded.hash {
changes = append(changes, fmt.Sprintf("codex[%d].excluded-models: updated (%d -> %d entries)", i, oldExcluded.count, newExcluded.count))
}
}
}
// AmpCode settings (redacted where needed)
oldAmpURL := strings.TrimSpace(oldCfg.AmpCode.UpstreamURL)
newAmpURL := strings.TrimSpace(newCfg.AmpCode.UpstreamURL)
if oldAmpURL != newAmpURL {
changes = append(changes, fmt.Sprintf("ampcode.upstream-url: %s -> %s", oldAmpURL, newAmpURL))
}
oldAmpKey := strings.TrimSpace(oldCfg.AmpCode.UpstreamAPIKey)
newAmpKey := strings.TrimSpace(newCfg.AmpCode.UpstreamAPIKey)
switch {
case oldAmpKey == "" && newAmpKey != "":
changes = append(changes, "ampcode.upstream-api-key: added")
case oldAmpKey != "" && newAmpKey == "":
changes = append(changes, "ampcode.upstream-api-key: removed")
case oldAmpKey != newAmpKey:
changes = append(changes, "ampcode.upstream-api-key: updated")
}
if oldCfg.AmpCode.RestrictManagementToLocalhost != newCfg.AmpCode.RestrictManagementToLocalhost {
changes = append(changes, fmt.Sprintf("ampcode.restrict-management-to-localhost: %t -> %t", oldCfg.AmpCode.RestrictManagementToLocalhost, newCfg.AmpCode.RestrictManagementToLocalhost))
}
oldMappings := summarizeAmpModelMappings(oldCfg.AmpCode.ModelMappings)
newMappings := summarizeAmpModelMappings(newCfg.AmpCode.ModelMappings)
if oldMappings.hash != newMappings.hash {
changes = append(changes, fmt.Sprintf("ampcode.model-mappings: updated (%d -> %d entries)", oldMappings.count, newMappings.count))
}
if entries, _ := diffOAuthExcludedModelChanges(oldCfg.OAuthExcludedModels, newCfg.OAuthExcludedModels); len(entries) > 0 {
changes = append(changes, entries...)
}
// Remote management (never print the key)
if oldCfg.RemoteManagement.AllowRemote != newCfg.RemoteManagement.AllowRemote {
changes = append(changes, fmt.Sprintf("remote-management.allow-remote: %t -> %t", oldCfg.RemoteManagement.AllowRemote, newCfg.RemoteManagement.AllowRemote))
}
if oldCfg.RemoteManagement.DisableControlPanel != newCfg.RemoteManagement.DisableControlPanel {
changes = append(changes, fmt.Sprintf("remote-management.disable-control-panel: %t -> %t", oldCfg.RemoteManagement.DisableControlPanel, newCfg.RemoteManagement.DisableControlPanel))
}
oldPanelRepo := strings.TrimSpace(oldCfg.RemoteManagement.PanelGitHubRepository)
newPanelRepo := strings.TrimSpace(newCfg.RemoteManagement.PanelGitHubRepository)
if oldPanelRepo != newPanelRepo {
changes = append(changes, fmt.Sprintf("remote-management.panel-github-repository: %s -> %s", oldPanelRepo, newPanelRepo))
}
if oldCfg.RemoteManagement.SecretKey != newCfg.RemoteManagement.SecretKey {
switch {
case oldCfg.RemoteManagement.SecretKey == "" && newCfg.RemoteManagement.SecretKey != "":
changes = append(changes, "remote-management.secret-key: created")
case oldCfg.RemoteManagement.SecretKey != "" && newCfg.RemoteManagement.SecretKey == "":
changes = append(changes, "remote-management.secret-key: deleted")
default:
changes = append(changes, "remote-management.secret-key: updated")
}
}
// OpenAI compatibility providers (summarized)
if compat := diffOpenAICompatibility(oldCfg.OpenAICompatibility, newCfg.OpenAICompatibility); len(compat) > 0 {
changes = append(changes, "openai-compatibility:")
for _, c := range compat {
changes = append(changes, " "+c)
}
}
return changes
}
func addConfigHeadersToAttrs(headers map[string]string, attrs map[string]string) {
if len(headers) == 0 || attrs == nil {
return
@@ -1922,23 +1459,3 @@ func addConfigHeadersToAttrs(headers map[string]string, attrs map[string]string)
attrs["header:"+key] = val
}
}
func trimStrings(in []string) []string {
out := make([]string, len(in))
for i := range in {
out[i] = strings.TrimSpace(in[i])
}
return out
}
func equalStringMap(a, b map[string]string) bool {
if len(a) != len(b) {
return false
}
for k, v := range a {
if b[k] != v {
return false
}
}
return true
}

View File

@@ -0,0 +1,609 @@
package watcher
import (
"context"
"crypto/sha256"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"sync/atomic"
"testing"
"time"
"github.com/fsnotify/fsnotify"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
"gopkg.in/yaml.v3"
)
func TestApplyAuthExcludedModelsMeta_APIKey(t *testing.T) {
auth := &coreauth.Auth{Attributes: map[string]string{}}
cfg := &config.Config{}
perKey := []string{" Model-1 ", "model-2"}
applyAuthExcludedModelsMeta(auth, cfg, perKey, "apikey")
expected := diff.ComputeExcludedModelsHash([]string{"model-1", "model-2"})
if got := auth.Attributes["excluded_models_hash"]; got != expected {
t.Fatalf("expected hash %s, got %s", expected, got)
}
if got := auth.Attributes["auth_kind"]; got != "apikey" {
t.Fatalf("expected auth_kind=apikey, got %s", got)
}
}
func TestApplyAuthExcludedModelsMeta_OAuthProvider(t *testing.T) {
auth := &coreauth.Auth{
Provider: "TestProv",
Attributes: map[string]string{},
}
cfg := &config.Config{
OAuthExcludedModels: map[string][]string{
"testprov": {"A", "b"},
},
}
applyAuthExcludedModelsMeta(auth, cfg, nil, "oauth")
expected := diff.ComputeExcludedModelsHash([]string{"a", "b"})
if got := auth.Attributes["excluded_models_hash"]; got != expected {
t.Fatalf("expected hash %s, got %s", expected, got)
}
if got := auth.Attributes["auth_kind"]; got != "oauth" {
t.Fatalf("expected auth_kind=oauth, got %s", got)
}
}
func TestBuildAPIKeyClientsCounts(t *testing.T) {
cfg := &config.Config{
GeminiKey: []config.GeminiKey{{APIKey: "g1"}, {APIKey: "g2"}},
VertexCompatAPIKey: []config.VertexCompatKey{
{APIKey: "v1"},
},
ClaudeKey: []config.ClaudeKey{{APIKey: "c1"}},
CodexKey: []config.CodexKey{{APIKey: "x1"}, {APIKey: "x2"}},
OpenAICompatibility: []config.OpenAICompatibility{
{APIKeyEntries: []config.OpenAICompatibilityAPIKey{{APIKey: "o1"}, {APIKey: "o2"}}},
},
}
gemini, vertex, claude, codex, compat := BuildAPIKeyClients(cfg)
if gemini != 2 || vertex != 1 || claude != 1 || codex != 2 || compat != 2 {
t.Fatalf("unexpected counts: %d %d %d %d %d", gemini, vertex, claude, codex, compat)
}
}
func TestNormalizeAuthStripsTemporalFields(t *testing.T) {
now := time.Now()
auth := &coreauth.Auth{
CreatedAt: now,
UpdatedAt: now,
LastRefreshedAt: now,
NextRefreshAfter: now,
Quota: coreauth.QuotaState{
NextRecoverAt: now,
},
Runtime: map[string]any{"k": "v"},
}
normalized := normalizeAuth(auth)
if !normalized.CreatedAt.IsZero() || !normalized.UpdatedAt.IsZero() || !normalized.LastRefreshedAt.IsZero() || !normalized.NextRefreshAfter.IsZero() {
t.Fatal("expected time fields to be zeroed")
}
if normalized.Runtime != nil {
t.Fatal("expected runtime to be nil")
}
if !normalized.Quota.NextRecoverAt.IsZero() {
t.Fatal("expected quota.NextRecoverAt to be zeroed")
}
}
func TestMatchProvider(t *testing.T) {
if _, ok := matchProvider("OpenAI", []string{"openai", "claude"}); !ok {
t.Fatal("expected match to succeed ignoring case")
}
if _, ok := matchProvider("missing", []string{"openai"}); ok {
t.Fatal("expected match to fail for unknown provider")
}
}
func TestSnapshotCoreAuths_ConfigAndAuthFiles(t *testing.T) {
authDir := t.TempDir()
metadata := map[string]any{
"type": "gemini",
"email": "user@example.com",
"project_id": "proj-a, proj-b",
"proxy_url": "https://proxy",
}
authFile := filepath.Join(authDir, "gemini.json")
data, err := json.Marshal(metadata)
if err != nil {
t.Fatalf("failed to marshal metadata: %v", err)
}
if err = os.WriteFile(authFile, data, 0o644); err != nil {
t.Fatalf("failed to write auth file: %v", err)
}
cfg := &config.Config{
AuthDir: authDir,
GeminiKey: []config.GeminiKey{
{
APIKey: "g-key",
BaseURL: "https://gemini",
ExcludedModels: []string{"Model-A", "model-b"},
Headers: map[string]string{"X-Req": "1"},
},
},
OAuthExcludedModels: map[string][]string{
"gemini-cli": {"Foo", "bar"},
},
}
w := &Watcher{authDir: authDir}
w.SetConfig(cfg)
auths := w.SnapshotCoreAuths()
if len(auths) != 4 {
t.Fatalf("expected 4 auth entries (1 config + 1 primary + 2 virtual), got %d", len(auths))
}
var geminiAPIKeyAuth *coreauth.Auth
var geminiPrimary *coreauth.Auth
virtuals := make([]*coreauth.Auth, 0)
for _, a := range auths {
switch {
case a.Provider == "gemini" && a.Attributes["api_key"] == "g-key":
geminiAPIKeyAuth = a
case a.Attributes["gemini_virtual_primary"] == "true":
geminiPrimary = a
case strings.TrimSpace(a.Attributes["gemini_virtual_parent"]) != "":
virtuals = append(virtuals, a)
}
}
if geminiAPIKeyAuth == nil {
t.Fatal("expected synthesized Gemini API key auth")
}
expectedAPIKeyHash := diff.ComputeExcludedModelsHash([]string{"Model-A", "model-b"})
if geminiAPIKeyAuth.Attributes["excluded_models_hash"] != expectedAPIKeyHash {
t.Fatalf("expected API key excluded hash %s, got %s", expectedAPIKeyHash, geminiAPIKeyAuth.Attributes["excluded_models_hash"])
}
if geminiAPIKeyAuth.Attributes["auth_kind"] != "apikey" {
t.Fatalf("expected auth_kind=apikey, got %s", geminiAPIKeyAuth.Attributes["auth_kind"])
}
if geminiPrimary == nil {
t.Fatal("expected primary gemini-cli auth from file")
}
if !geminiPrimary.Disabled || geminiPrimary.Status != coreauth.StatusDisabled {
t.Fatal("expected primary gemini-cli auth to be disabled when virtual auths are synthesized")
}
expectedOAuthHash := diff.ComputeExcludedModelsHash([]string{"Foo", "bar"})
if geminiPrimary.Attributes["excluded_models_hash"] != expectedOAuthHash {
t.Fatalf("expected OAuth excluded hash %s, got %s", expectedOAuthHash, geminiPrimary.Attributes["excluded_models_hash"])
}
if geminiPrimary.Attributes["auth_kind"] != "oauth" {
t.Fatalf("expected auth_kind=oauth, got %s", geminiPrimary.Attributes["auth_kind"])
}
if len(virtuals) != 2 {
t.Fatalf("expected 2 virtual auths, got %d", len(virtuals))
}
for _, v := range virtuals {
if v.Attributes["gemini_virtual_parent"] != geminiPrimary.ID {
t.Fatalf("virtual auth missing parent link to %s", geminiPrimary.ID)
}
if v.Attributes["excluded_models_hash"] != expectedOAuthHash {
t.Fatalf("expected virtual excluded hash %s, got %s", expectedOAuthHash, v.Attributes["excluded_models_hash"])
}
if v.Status != coreauth.StatusActive {
t.Fatalf("expected virtual auth to be active, got %s", v.Status)
}
}
}
func TestReloadConfigIfChanged_TriggersOnChangeAndSkipsUnchanged(t *testing.T) {
tmpDir := t.TempDir()
authDir := filepath.Join(tmpDir, "auth")
if err := os.MkdirAll(authDir, 0o755); err != nil {
t.Fatalf("failed to create auth dir: %v", err)
}
configPath := filepath.Join(tmpDir, "config.yaml")
writeConfig := func(port int, allowRemote bool) {
cfg := &config.Config{
Port: port,
AuthDir: authDir,
RemoteManagement: config.RemoteManagement{
AllowRemote: allowRemote,
},
}
data, err := yaml.Marshal(cfg)
if err != nil {
t.Fatalf("failed to marshal config: %v", err)
}
if err = os.WriteFile(configPath, data, 0o644); err != nil {
t.Fatalf("failed to write config: %v", err)
}
}
writeConfig(8080, false)
reloads := 0
w := &Watcher{
configPath: configPath,
authDir: authDir,
reloadCallback: func(*config.Config) { reloads++ },
}
w.reloadConfigIfChanged()
if reloads != 1 {
t.Fatalf("expected first reload to trigger callback once, got %d", reloads)
}
// Same content should be skipped by hash check.
w.reloadConfigIfChanged()
if reloads != 1 {
t.Fatalf("expected unchanged config to be skipped, callback count %d", reloads)
}
writeConfig(9090, true)
w.reloadConfigIfChanged()
if reloads != 2 {
t.Fatalf("expected changed config to trigger reload, callback count %d", reloads)
}
w.clientsMutex.RLock()
defer w.clientsMutex.RUnlock()
if w.config == nil || w.config.Port != 9090 || !w.config.RemoteManagement.AllowRemote {
t.Fatalf("expected config to be updated after reload, got %+v", w.config)
}
}
func TestStartAndStopSuccess(t *testing.T) {
tmpDir := t.TempDir()
authDir := filepath.Join(tmpDir, "auth")
if err := os.MkdirAll(authDir, 0o755); err != nil {
t.Fatalf("failed to create auth dir: %v", err)
}
configPath := filepath.Join(tmpDir, "config.yaml")
if err := os.WriteFile(configPath, []byte("auth_dir: "+authDir), 0o644); err != nil {
t.Fatalf("failed to create config file: %v", err)
}
var reloads int32
w, err := NewWatcher(configPath, authDir, func(*config.Config) {
atomic.AddInt32(&reloads, 1)
})
if err != nil {
t.Fatalf("failed to create watcher: %v", err)
}
w.SetConfig(&config.Config{AuthDir: authDir})
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
if err := w.Start(ctx); err != nil {
t.Fatalf("expected Start to succeed: %v", err)
}
cancel()
if err := w.Stop(); err != nil {
t.Fatalf("expected Stop to succeed: %v", err)
}
if got := atomic.LoadInt32(&reloads); got != 1 {
t.Fatalf("expected one reload callback, got %d", got)
}
}
func TestStartFailsWhenConfigMissing(t *testing.T) {
tmpDir := t.TempDir()
authDir := filepath.Join(tmpDir, "auth")
if err := os.MkdirAll(authDir, 0o755); err != nil {
t.Fatalf("failed to create auth dir: %v", err)
}
configPath := filepath.Join(tmpDir, "missing-config.yaml")
w, err := NewWatcher(configPath, authDir, nil)
if err != nil {
t.Fatalf("failed to create watcher: %v", err)
}
defer w.Stop()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
if err := w.Start(ctx); err == nil {
t.Fatal("expected Start to fail for missing config file")
}
}
func TestDispatchRuntimeAuthUpdateEnqueuesAndUpdatesState(t *testing.T) {
queue := make(chan AuthUpdate, 4)
w := &Watcher{}
w.SetAuthUpdateQueue(queue)
defer w.stopDispatch()
auth := &coreauth.Auth{ID: "auth-1", Provider: "test"}
if ok := w.DispatchRuntimeAuthUpdate(AuthUpdate{Action: AuthUpdateActionAdd, Auth: auth}); !ok {
t.Fatal("expected DispatchRuntimeAuthUpdate to enqueue")
}
select {
case update := <-queue:
if update.Action != AuthUpdateActionAdd || update.Auth.ID != "auth-1" {
t.Fatalf("unexpected update: %+v", update)
}
case <-time.After(2 * time.Second):
t.Fatal("timed out waiting for auth update")
}
if ok := w.DispatchRuntimeAuthUpdate(AuthUpdate{Action: AuthUpdateActionDelete, ID: "auth-1"}); !ok {
t.Fatal("expected delete update to enqueue")
}
select {
case update := <-queue:
if update.Action != AuthUpdateActionDelete || update.ID != "auth-1" {
t.Fatalf("unexpected delete update: %+v", update)
}
case <-time.After(2 * time.Second):
t.Fatal("timed out waiting for delete update")
}
w.clientsMutex.RLock()
if _, exists := w.runtimeAuths["auth-1"]; exists {
w.clientsMutex.RUnlock()
t.Fatal("expected runtime auth to be cleared after delete")
}
w.clientsMutex.RUnlock()
}
func TestAddOrUpdateClientSkipsUnchanged(t *testing.T) {
tmpDir := t.TempDir()
authFile := filepath.Join(tmpDir, "sample.json")
if err := os.WriteFile(authFile, []byte(`{"type":"demo"}`), 0o644); err != nil {
t.Fatalf("failed to create auth file: %v", err)
}
data, _ := os.ReadFile(authFile)
sum := sha256.Sum256(data)
var reloads int32
w := &Watcher{
authDir: tmpDir,
lastAuthHashes: map[string]string{
filepath.Clean(authFile): hexString(sum[:]),
},
reloadCallback: func(*config.Config) {
atomic.AddInt32(&reloads, 1)
},
}
w.SetConfig(&config.Config{AuthDir: tmpDir})
w.addOrUpdateClient(authFile)
if got := atomic.LoadInt32(&reloads); got != 0 {
t.Fatalf("expected no reload for unchanged file, got %d", got)
}
}
func TestAddOrUpdateClientTriggersReloadAndHash(t *testing.T) {
tmpDir := t.TempDir()
authFile := filepath.Join(tmpDir, "sample.json")
if err := os.WriteFile(authFile, []byte(`{"type":"demo","api_key":"k"}`), 0o644); err != nil {
t.Fatalf("failed to create auth file: %v", err)
}
var reloads int32
w := &Watcher{
authDir: tmpDir,
lastAuthHashes: make(map[string]string),
reloadCallback: func(*config.Config) {
atomic.AddInt32(&reloads, 1)
},
}
w.SetConfig(&config.Config{AuthDir: tmpDir})
w.addOrUpdateClient(authFile)
if got := atomic.LoadInt32(&reloads); got != 1 {
t.Fatalf("expected reload callback once, got %d", got)
}
normalized := filepath.Clean(authFile)
if _, ok := w.lastAuthHashes[normalized]; !ok {
t.Fatalf("expected hash to be stored for %s", normalized)
}
}
func TestRemoveClientRemovesHash(t *testing.T) {
tmpDir := t.TempDir()
authFile := filepath.Join(tmpDir, "sample.json")
var reloads int32
w := &Watcher{
authDir: tmpDir,
lastAuthHashes: map[string]string{
filepath.Clean(authFile): "hash",
},
reloadCallback: func(*config.Config) {
atomic.AddInt32(&reloads, 1)
},
}
w.SetConfig(&config.Config{AuthDir: tmpDir})
w.removeClient(authFile)
if _, ok := w.lastAuthHashes[filepath.Clean(authFile)]; ok {
t.Fatal("expected hash to be removed after deletion")
}
if got := atomic.LoadInt32(&reloads); got != 1 {
t.Fatalf("expected reload callback once, got %d", got)
}
}
func TestShouldDebounceRemove(t *testing.T) {
w := &Watcher{}
path := filepath.Clean("test.json")
if w.shouldDebounceRemove(path, time.Now()) {
t.Fatal("first call should not debounce")
}
if !w.shouldDebounceRemove(path, time.Now()) {
t.Fatal("second call within window should debounce")
}
w.clientsMutex.Lock()
w.lastRemoveTimes = map[string]time.Time{path: time.Now().Add(-2 * authRemoveDebounceWindow)}
w.clientsMutex.Unlock()
if w.shouldDebounceRemove(path, time.Now()) {
t.Fatal("call after window should not debounce")
}
}
func TestAuthFileUnchangedUsesHash(t *testing.T) {
tmpDir := t.TempDir()
authFile := filepath.Join(tmpDir, "sample.json")
content := []byte(`{"type":"demo"}`)
if err := os.WriteFile(authFile, content, 0o644); err != nil {
t.Fatalf("failed to write auth file: %v", err)
}
w := &Watcher{lastAuthHashes: make(map[string]string)}
unchanged, err := w.authFileUnchanged(authFile)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if unchanged {
t.Fatal("expected first check to report changed")
}
sum := sha256.Sum256(content)
w.lastAuthHashes[filepath.Clean(authFile)] = hexString(sum[:])
unchanged, err = w.authFileUnchanged(authFile)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if !unchanged {
t.Fatal("expected hash match to report unchanged")
}
}
func TestReloadClientsCachesAuthHashes(t *testing.T) {
tmpDir := t.TempDir()
authFile := filepath.Join(tmpDir, "one.json")
if err := os.WriteFile(authFile, []byte(`{"type":"demo"}`), 0o644); err != nil {
t.Fatalf("failed to write auth file: %v", err)
}
w := &Watcher{
authDir: tmpDir,
config: &config.Config{AuthDir: tmpDir},
}
w.reloadClients(true, nil, false)
w.clientsMutex.RLock()
defer w.clientsMutex.RUnlock()
if len(w.lastAuthHashes) != 1 {
t.Fatalf("expected hash cache for one auth file, got %d", len(w.lastAuthHashes))
}
}
func TestReloadClientsLogsConfigDiffs(t *testing.T) {
tmpDir := t.TempDir()
oldCfg := &config.Config{AuthDir: tmpDir, Port: 1, Debug: false}
newCfg := &config.Config{AuthDir: tmpDir, Port: 2, Debug: true}
w := &Watcher{
authDir: tmpDir,
config: oldCfg,
}
w.SetConfig(oldCfg)
w.oldConfigYaml, _ = yaml.Marshal(oldCfg)
w.clientsMutex.Lock()
w.config = newCfg
w.clientsMutex.Unlock()
w.reloadClients(false, nil, false)
}
func TestSetAuthUpdateQueueNilResetsDispatch(t *testing.T) {
w := &Watcher{}
queue := make(chan AuthUpdate, 1)
w.SetAuthUpdateQueue(queue)
if w.dispatchCond == nil || w.dispatchCancel == nil {
t.Fatal("expected dispatch to be initialized")
}
w.SetAuthUpdateQueue(nil)
if w.dispatchCancel != nil {
t.Fatal("expected dispatch cancel to be cleared when queue nil")
}
}
func TestStopConfigReloadTimerSafeWhenNil(t *testing.T) {
w := &Watcher{}
w.stopConfigReloadTimer()
w.configReloadMu.Lock()
w.configReloadTimer = time.AfterFunc(10*time.Millisecond, func() {})
w.configReloadMu.Unlock()
time.Sleep(1 * time.Millisecond)
w.stopConfigReloadTimer()
}
func TestHandleEventRemovesAuthFile(t *testing.T) {
tmpDir := t.TempDir()
authFile := filepath.Join(tmpDir, "remove.json")
if err := os.WriteFile(authFile, []byte(`{"type":"demo"}`), 0o644); err != nil {
t.Fatalf("failed to write auth file: %v", err)
}
if err := os.Remove(authFile); err != nil {
t.Fatalf("failed to remove auth file pre-check: %v", err)
}
var reloads int32
w := &Watcher{
authDir: tmpDir,
config: &config.Config{AuthDir: tmpDir},
lastAuthHashes: map[string]string{
filepath.Clean(authFile): "hash",
},
reloadCallback: func(*config.Config) {
atomic.AddInt32(&reloads, 1)
},
}
w.handleEvent(fsnotify.Event{Name: authFile, Op: fsnotify.Remove})
if atomic.LoadInt32(&reloads) != 1 {
t.Fatalf("expected reload callback once, got %d", reloads)
}
if _, ok := w.lastAuthHashes[filepath.Clean(authFile)]; ok {
t.Fatal("expected hash entry to be removed")
}
}
func TestDispatchAuthUpdatesFlushesQueue(t *testing.T) {
queue := make(chan AuthUpdate, 4)
w := &Watcher{}
w.SetAuthUpdateQueue(queue)
defer w.stopDispatch()
w.dispatchAuthUpdates([]AuthUpdate{
{Action: AuthUpdateActionAdd, ID: "a"},
{Action: AuthUpdateActionModify, ID: "b"},
})
got := make([]AuthUpdate, 0, 2)
for i := 0; i < 2; i++ {
select {
case u := <-queue:
got = append(got, u)
case <-time.After(2 * time.Second):
t.Fatalf("timed out waiting for update %d", i)
}
}
if len(got) != 2 || got[0].ID != "a" || got[1].ID != "b" {
t.Fatalf("unexpected updates order/content: %+v", got)
}
}
func hexString(data []byte) string {
return strings.ToLower(fmt.Sprintf("%x", data))
}

View File

@@ -84,7 +84,8 @@ func (h *GeminiAPIHandler) GeminiGetHandler(c *gin.Context) {
})
return
}
switch request.Action {
action := strings.TrimPrefix(request.Action, "/")
switch action {
case "gemini-3-pro-preview":
c.JSON(http.StatusOK, gin.H{
"name": "models/gemini-3-pro-preview",
@@ -189,7 +190,7 @@ func (h *GeminiAPIHandler) GeminiHandler(c *gin.Context) {
})
return
}
action := strings.Split(request.Action, ":")
action := strings.Split(strings.TrimPrefix(request.Action, "/"), ":")
if len(action) != 2 {
c.JSON(http.StatusNotFound, handlers.ErrorResponse{
Error: handlers.ErrorDetail{

View File

@@ -49,9 +49,6 @@ type BaseAPIHandler struct {
// Cfg holds the current application configuration.
Cfg *config.SDKConfig
// OpenAICompatProviders is a list of provider names for OpenAI compatibility.
OpenAICompatProviders []string
}
// NewBaseAPIHandlers creates a new API handlers instance.
@@ -63,11 +60,10 @@ type BaseAPIHandler struct {
//
// Returns:
// - *BaseAPIHandler: A new API handlers instance
func NewBaseAPIHandlers(cfg *config.SDKConfig, authManager *coreauth.Manager, openAICompatProviders []string) *BaseAPIHandler {
func NewBaseAPIHandlers(cfg *config.SDKConfig, authManager *coreauth.Manager) *BaseAPIHandler {
return &BaseAPIHandler{
Cfg: cfg,
AuthManager: authManager,
OpenAICompatProviders: openAICompatProviders,
Cfg: cfg,
AuthManager: authManager,
}
}
@@ -342,30 +338,19 @@ func (h *BaseAPIHandler) getRequestDetails(modelName string) (providers []string
// Resolve "auto" model to an actual available model first
resolvedModelName := util.ResolveAutoModel(modelName)
providerName, extractedModelName, isDynamic := h.parseDynamicModel(resolvedModelName)
targetModelName := resolvedModelName
if isDynamic {
targetModelName = extractedModelName
}
// Normalize the model name to handle dynamic thinking suffixes before determining the provider.
normalizedModel, metadata = normalizeModelMetadata(targetModelName)
normalizedModel, metadata = normalizeModelMetadata(resolvedModelName)
if isDynamic {
providers = []string{providerName}
} else {
// For non-dynamic models, use the normalizedModel to get the provider name.
providers = util.GetProviderName(normalizedModel)
if len(providers) == 0 && metadata != nil {
if originalRaw, ok := metadata[util.ThinkingOriginalModelMetadataKey]; ok {
if originalModel, okStr := originalRaw.(string); okStr {
originalModel = strings.TrimSpace(originalModel)
if originalModel != "" && !strings.EqualFold(originalModel, normalizedModel) {
if altProviders := util.GetProviderName(originalModel); len(altProviders) > 0 {
providers = altProviders
normalizedModel = originalModel
}
// Use the normalizedModel to get the provider name.
providers = util.GetProviderName(normalizedModel)
if len(providers) == 0 && metadata != nil {
if originalRaw, ok := metadata[util.ThinkingOriginalModelMetadataKey]; ok {
if originalModel, okStr := originalRaw.(string); okStr {
originalModel = strings.TrimSpace(originalModel)
if originalModel != "" && !strings.EqualFold(originalModel, normalizedModel) {
if altProviders := util.GetProviderName(originalModel); len(altProviders) > 0 {
providers = altProviders
normalizedModel = originalModel
}
}
}
@@ -383,30 +368,6 @@ func (h *BaseAPIHandler) getRequestDetails(modelName string) (providers []string
return providers, normalizedModel, metadata, nil
}
func (h *BaseAPIHandler) parseDynamicModel(modelName string) (providerName, model string, isDynamic bool) {
var providerPart, modelPart string
for _, sep := range []string{"://"} {
if parts := strings.SplitN(modelName, sep, 2); len(parts) == 2 {
providerPart = parts[0]
modelPart = parts[1]
break
}
}
if providerPart == "" {
return "", modelName, false
}
// Check if the provider is a configured openai-compatibility provider
for _, pName := range h.OpenAICompatProviders {
if pName == providerPart {
return providerPart, modelPart, true
}
}
return "", modelName, false
}
func cloneBytes(src []byte) []byte {
if len(src) == 0 {
return nil

View File

@@ -363,10 +363,11 @@ func (m *Manager) executeWithProvider(ctx context.Context, provider string, req
if provider == "" {
return cliproxyexecutor.Response{}, &Error{Code: "provider_not_found", Message: "provider identifier is empty"}
}
routeModel := req.Model
tried := make(map[string]struct{})
var lastErr error
for {
auth, executor, errPick := m.pickNext(ctx, provider, req.Model, opts, tried)
auth, executor, errPick := m.pickNext(ctx, provider, routeModel, opts, tried)
if errPick != nil {
if lastErr != nil {
return cliproxyexecutor.Response{}, lastErr
@@ -396,8 +397,10 @@ func (m *Manager) executeWithProvider(ctx context.Context, provider string, req
execCtx = context.WithValue(execCtx, roundTripperContextKey{}, rt)
execCtx = context.WithValue(execCtx, "cliproxy.roundtripper", rt)
}
resp, errExec := executor.Execute(execCtx, auth, req, opts)
result := Result{AuthID: auth.ID, Provider: provider, Model: req.Model, Success: errExec == nil}
execReq := req
execReq.Model, execReq.Metadata = rewriteModelForAuth(routeModel, req.Metadata, auth)
resp, errExec := executor.Execute(execCtx, auth, execReq, opts)
result := Result{AuthID: auth.ID, Provider: provider, Model: routeModel, Success: errExec == nil}
if errExec != nil {
result.Error = &Error{Message: errExec.Error()}
var se cliproxyexecutor.StatusError
@@ -420,10 +423,11 @@ func (m *Manager) executeCountWithProvider(ctx context.Context, provider string,
if provider == "" {
return cliproxyexecutor.Response{}, &Error{Code: "provider_not_found", Message: "provider identifier is empty"}
}
routeModel := req.Model
tried := make(map[string]struct{})
var lastErr error
for {
auth, executor, errPick := m.pickNext(ctx, provider, req.Model, opts, tried)
auth, executor, errPick := m.pickNext(ctx, provider, routeModel, opts, tried)
if errPick != nil {
if lastErr != nil {
return cliproxyexecutor.Response{}, lastErr
@@ -453,8 +457,10 @@ func (m *Manager) executeCountWithProvider(ctx context.Context, provider string,
execCtx = context.WithValue(execCtx, roundTripperContextKey{}, rt)
execCtx = context.WithValue(execCtx, "cliproxy.roundtripper", rt)
}
resp, errExec := executor.CountTokens(execCtx, auth, req, opts)
result := Result{AuthID: auth.ID, Provider: provider, Model: req.Model, Success: errExec == nil}
execReq := req
execReq.Model, execReq.Metadata = rewriteModelForAuth(routeModel, req.Metadata, auth)
resp, errExec := executor.CountTokens(execCtx, auth, execReq, opts)
result := Result{AuthID: auth.ID, Provider: provider, Model: routeModel, Success: errExec == nil}
if errExec != nil {
result.Error = &Error{Message: errExec.Error()}
var se cliproxyexecutor.StatusError
@@ -477,10 +483,11 @@ func (m *Manager) executeStreamWithProvider(ctx context.Context, provider string
if provider == "" {
return nil, &Error{Code: "provider_not_found", Message: "provider identifier is empty"}
}
routeModel := req.Model
tried := make(map[string]struct{})
var lastErr error
for {
auth, executor, errPick := m.pickNext(ctx, provider, req.Model, opts, tried)
auth, executor, errPick := m.pickNext(ctx, provider, routeModel, opts, tried)
if errPick != nil {
if lastErr != nil {
return nil, lastErr
@@ -510,14 +517,16 @@ func (m *Manager) executeStreamWithProvider(ctx context.Context, provider string
execCtx = context.WithValue(execCtx, roundTripperContextKey{}, rt)
execCtx = context.WithValue(execCtx, "cliproxy.roundtripper", rt)
}
chunks, errStream := executor.ExecuteStream(execCtx, auth, req, opts)
execReq := req
execReq.Model, execReq.Metadata = rewriteModelForAuth(routeModel, req.Metadata, auth)
chunks, errStream := executor.ExecuteStream(execCtx, auth, execReq, opts)
if errStream != nil {
rerr := &Error{Message: errStream.Error()}
var se cliproxyexecutor.StatusError
if errors.As(errStream, &se) && se != nil {
rerr.HTTPStatus = se.StatusCode()
}
result := Result{AuthID: auth.ID, Provider: provider, Model: req.Model, Success: false, Error: rerr}
result := Result{AuthID: auth.ID, Provider: provider, Model: routeModel, Success: false, Error: rerr}
result.RetryAfter = retryAfterFromError(errStream)
m.MarkResult(execCtx, result)
lastErr = errStream
@@ -535,18 +544,66 @@ func (m *Manager) executeStreamWithProvider(ctx context.Context, provider string
if errors.As(chunk.Err, &se) && se != nil {
rerr.HTTPStatus = se.StatusCode()
}
m.MarkResult(streamCtx, Result{AuthID: streamAuth.ID, Provider: streamProvider, Model: req.Model, Success: false, Error: rerr})
m.MarkResult(streamCtx, Result{AuthID: streamAuth.ID, Provider: streamProvider, Model: routeModel, Success: false, Error: rerr})
}
out <- chunk
}
if !failed {
m.MarkResult(streamCtx, Result{AuthID: streamAuth.ID, Provider: streamProvider, Model: req.Model, Success: true})
m.MarkResult(streamCtx, Result{AuthID: streamAuth.ID, Provider: streamProvider, Model: routeModel, Success: true})
}
}(execCtx, auth.Clone(), provider, chunks)
return out, nil
}
}
func rewriteModelForAuth(model string, metadata map[string]any, auth *Auth) (string, map[string]any) {
if auth == nil || model == "" {
return model, metadata
}
prefix := strings.TrimSpace(auth.Prefix)
if prefix == "" {
return model, metadata
}
needle := prefix + "/"
if !strings.HasPrefix(model, needle) {
return model, metadata
}
rewritten := strings.TrimPrefix(model, needle)
return rewritten, stripPrefixFromMetadata(metadata, needle)
}
func stripPrefixFromMetadata(metadata map[string]any, needle string) map[string]any {
if len(metadata) == 0 || needle == "" {
return metadata
}
keys := []string{
util.ThinkingOriginalModelMetadataKey,
util.GeminiOriginalModelMetadataKey,
}
var out map[string]any
for _, key := range keys {
raw, ok := metadata[key]
if !ok {
continue
}
value, okStr := raw.(string)
if !okStr || !strings.HasPrefix(value, needle) {
continue
}
if out == nil {
out = make(map[string]any, len(metadata))
for k, v := range metadata {
out[k] = v
}
}
out[key] = strings.TrimPrefix(value, needle)
}
if out == nil {
return metadata
}
return out
}
func (m *Manager) normalizeProviders(providers []string) []string {
if len(providers) == 0 {
return nil

View File

@@ -19,6 +19,8 @@ type Auth struct {
Index uint64 `json:"-"`
// Provider is the upstream provider key (e.g. "gemini", "claude").
Provider string `json:"provider"`
// Prefix optionally namespaces models for routing (e.g., "teamA/gemini-3-pro-preview").
Prefix string `json:"prefix,omitempty"`
// FileName stores the relative or absolute path of the backing auth file.
FileName string `json:"-"`
// Storage holds the token persistence implementation used during login flows.

View File

@@ -787,7 +787,7 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) {
if providerKey == "" {
providerKey = "openai-compatibility"
}
GlobalModelRegistry().RegisterClient(a.ID, providerKey, ms)
GlobalModelRegistry().RegisterClient(a.ID, providerKey, applyModelPrefixes(ms, a.Prefix, s.cfg.ForceModelPrefix))
} else {
// Ensure stale registrations are cleared when model list becomes empty.
GlobalModelRegistry().UnregisterClient(a.ID)
@@ -807,7 +807,7 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) {
if key == "" {
key = strings.ToLower(strings.TrimSpace(a.Provider))
}
GlobalModelRegistry().RegisterClient(a.ID, key, models)
GlobalModelRegistry().RegisterClient(a.ID, key, applyModelPrefixes(models, a.Prefix, s.cfg != nil && s.cfg.ForceModelPrefix))
return
}
@@ -987,6 +987,48 @@ func applyExcludedModels(models []*ModelInfo, excluded []string) []*ModelInfo {
return filtered
}
func applyModelPrefixes(models []*ModelInfo, prefix string, forceModelPrefix bool) []*ModelInfo {
trimmedPrefix := strings.TrimSpace(prefix)
if trimmedPrefix == "" || len(models) == 0 {
return models
}
out := make([]*ModelInfo, 0, len(models)*2)
seen := make(map[string]struct{}, len(models)*2)
addModel := func(model *ModelInfo) {
if model == nil {
return
}
id := strings.TrimSpace(model.ID)
if id == "" {
return
}
if _, exists := seen[id]; exists {
return
}
seen[id] = struct{}{}
out = append(out, model)
}
for _, model := range models {
if model == nil {
continue
}
baseID := strings.TrimSpace(model.ID)
if baseID == "" {
continue
}
if !forceModelPrefix || trimmedPrefix == baseID {
addModel(model)
}
clone := *model
clone.ID = trimmedPrefix + "/" + baseID
addModel(&clone)
}
return out
}
// matchWildcard performs case-insensitive wildcard matching where '*' matches any substring.
func matchWildcard(pattern, value string) bool {
if pattern == "" {

View File

@@ -9,6 +9,11 @@ type SDKConfig struct {
// ProxyURL is the URL of an optional proxy server to use for outbound requests.
ProxyURL string `yaml:"proxy-url" json:"proxy-url"`
// ForceModelPrefix requires explicit model prefixes (e.g., "teamA/gemini-3-pro-preview")
// to target prefixed credentials. When false, unprefixed model requests may use prefixed
// credentials as well.
ForceModelPrefix bool `yaml:"force-model-prefix" json:"force-model-prefix"`
// RequestLog enables or disables detailed request logging functionality.
RequestLog bool `yaml:"request-log" json:"request-log"`

View File

@@ -295,7 +295,7 @@ func TestThinkingConversionsAcrossProtocolsAndModels(t *testing.T) {
}
// Check numeric budget fallback for allowCompat
if budget, _, _, matched := util.ThinkingFromMetadata(metadata); matched && budget != nil {
if mapped, okMap := util.OpenAIThinkingBudgetToEffort(normalizedModel, *budget); okMap && mapped != "" {
if mapped, okMap := util.ThinkingBudgetToEffort(normalizedModel, *budget); okMap && mapped != "" {
return true, mapped, false
}
}
@@ -308,7 +308,7 @@ func TestThinkingConversionsAcrossProtocolsAndModels(t *testing.T) {
effort, ok := util.ReasoningEffortFromMetadata(metadata)
if !ok || strings.TrimSpace(effort) == "" {
if budget, _, _, matched := util.ThinkingFromMetadata(metadata); matched && budget != nil {
if mapped, okMap := util.OpenAIThinkingBudgetToEffort(normalizedModel, *budget); okMap {
if mapped, okMap := util.ThinkingBudgetToEffort(normalizedModel, *budget); okMap {
effort = mapped
ok = true
}
@@ -336,7 +336,7 @@ func TestThinkingConversionsAcrossProtocolsAndModels(t *testing.T) {
return false, "", true
}
if budget, _, _, matched := util.ThinkingFromMetadata(metadata); matched && budget != nil {
if mapped, okMap := util.OpenAIThinkingBudgetToEffort(normalizedModel, *budget); okMap && mapped != "" {
if mapped, okMap := util.ThinkingBudgetToEffort(normalizedModel, *budget); okMap && mapped != "" {
mapped = strings.ToLower(strings.TrimSpace(mapped))
if normalized, okLevel := util.NormalizeReasoningEffortLevel(normalizedModel, mapped); okLevel {
return true, normalized, false
@@ -609,7 +609,7 @@ func TestRawPayloadThinkingConversions(t *testing.T) {
return true, normalized, false
}
if budget, ok := cs.thinkingParam.(int); ok {
if mapped, okM := util.OpenAIThinkingBudgetToEffort(model, budget); okM && mapped != "" {
if mapped, okM := util.ThinkingBudgetToEffort(model, budget); okM && mapped != "" {
return true, mapped, false
}
}
@@ -625,7 +625,7 @@ func TestRawPayloadThinkingConversions(t *testing.T) {
return false, "", true // invalid level
}
if budget, ok := cs.thinkingParam.(int); ok {
if mapped, okM := util.OpenAIThinkingBudgetToEffort(model, budget); okM && mapped != "" {
if mapped, okM := util.ThinkingBudgetToEffort(model, budget); okM && mapped != "" {
// Check if the mapped effort is valid for this model
if _, validLevel := util.NormalizeReasoningEffortLevel(model, mapped); !validLevel {
return true, mapped, true // expect validation error
@@ -646,7 +646,7 @@ func TestRawPayloadThinkingConversions(t *testing.T) {
return false, "", true
}
if budget, ok := cs.thinkingParam.(int); ok {
if mapped, okM := util.OpenAIThinkingBudgetToEffort(model, budget); okM && mapped != "" {
if mapped, okM := util.ThinkingBudgetToEffort(model, budget); okM && mapped != "" {
// Check if the mapped effort is valid for this model
if _, validLevel := util.NormalizeReasoningEffortLevel(model, mapped); !validLevel {
return true, mapped, true // expect validation error
@@ -721,7 +721,7 @@ func TestRawPayloadThinkingConversions(t *testing.T) {
}
}
func TestOpenAIThinkingBudgetToEffortRanges(t *testing.T) {
func TestThinkingBudgetToEffort(t *testing.T) {
cleanup := registerCoreModels(t)
defer cleanup()
@@ -733,7 +733,7 @@ func TestOpenAIThinkingBudgetToEffortRanges(t *testing.T) {
ok bool
}{
{name: "dynamic-auto", model: "gpt-5", budget: -1, want: "auto", ok: true},
{name: "zero-none", model: "gpt-5", budget: 0, want: "none", ok: true},
{name: "zero-none", model: "gpt-5", budget: 0, want: "minimal", ok: true},
{name: "low-min", model: "gpt-5", budget: 1, want: "low", ok: true},
{name: "low-max", model: "gpt-5", budget: 1024, want: "low", ok: true},
{name: "medium-min", model: "gpt-5", budget: 1025, want: "medium", ok: true},
@@ -741,14 +741,14 @@ func TestOpenAIThinkingBudgetToEffortRanges(t *testing.T) {
{name: "high-min", model: "gpt-5", budget: 8193, want: "high", ok: true},
{name: "high-max", model: "gpt-5", budget: 24576, want: "high", ok: true},
{name: "over-max-clamps-to-highest", model: "gpt-5", budget: 64000, want: "high", ok: true},
{name: "over-max-xhigh-model", model: "gpt-5.2", budget: 50000, want: "xhigh", ok: true},
{name: "over-max-xhigh-model", model: "gpt-5.2", budget: 64000, want: "xhigh", ok: true},
{name: "negative-unsupported", model: "gpt-5", budget: -5, want: "", ok: false},
}
for _, cs := range cases {
cs := cs
t.Run(cs.name, func(t *testing.T) {
got, ok := util.OpenAIThinkingBudgetToEffort(cs.model, cs.budget)
got, ok := util.ThinkingBudgetToEffort(cs.model, cs.budget)
if ok != cs.ok {
t.Fatalf("ok mismatch for model=%s budget=%d: expect %v got %v", cs.model, cs.budget, cs.ok, ok)
}
@@ -758,3 +758,41 @@ func TestOpenAIThinkingBudgetToEffortRanges(t *testing.T) {
})
}
}
func TestThinkingEffortToBudget(t *testing.T) {
cleanup := registerCoreModels(t)
defer cleanup()
cases := []struct {
name string
model string
effort string
want int
ok bool
}{
{name: "none", model: "gemini-2.5-pro", effort: "none", want: 0, ok: true},
{name: "auto", model: "gemini-2.5-pro", effort: "auto", want: -1, ok: true},
{name: "minimal", model: "gemini-2.5-pro", effort: "minimal", want: 512, ok: true},
{name: "low", model: "gemini-2.5-pro", effort: "low", want: 1024, ok: true},
{name: "medium", model: "gemini-2.5-pro", effort: "medium", want: 8192, ok: true},
{name: "high", model: "gemini-2.5-pro", effort: "high", want: 24576, ok: true},
{name: "xhigh", model: "gemini-2.5-pro", effort: "xhigh", want: 32768, ok: true},
{name: "empty-unsupported", model: "gemini-2.5-pro", effort: "", want: 0, ok: false},
{name: "invalid-unsupported", model: "gemini-2.5-pro", effort: "ultra", want: 0, ok: false},
{name: "case-insensitive", model: "gemini-2.5-pro", effort: "LOW", want: 1024, ok: true},
{name: "case-insensitive-medium", model: "gemini-2.5-pro", effort: "MEDIUM", want: 8192, ok: true},
}
for _, cs := range cases {
cs := cs
t.Run(cs.name, func(t *testing.T) {
got, ok := util.ThinkingEffortToBudget(cs.model, cs.effort)
if ok != cs.ok {
t.Fatalf("ok mismatch for model=%s effort=%s: expect %v got %v", cs.model, cs.effort, cs.ok, ok)
}
if got != cs.want {
t.Fatalf("value mismatch for model=%s effort=%s: expect %d got %d", cs.model, cs.effort, cs.want, got)
}
})
}
}