mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
Merge pull request #572 from router-for-me/watcher
refactor(watcher): extract auth synthesizer to synthesizer package
This commit is contained in:
@@ -82,7 +82,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
|||||||
changes = append(changes, fmt.Sprintf("gemini[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
changes = append(changes, fmt.Sprintf("gemini[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||||
changes = append(changes, fmt.Sprintf("gemini[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
changes = append(changes, fmt.Sprintf("gemini[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||||
changes = append(changes, fmt.Sprintf("gemini[%d].api-key: updated", i))
|
changes = append(changes, fmt.Sprintf("gemini[%d].api-key: updated", i))
|
||||||
@@ -112,7 +112,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
|||||||
changes = append(changes, fmt.Sprintf("claude[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
changes = append(changes, fmt.Sprintf("claude[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||||
changes = append(changes, fmt.Sprintf("claude[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
changes = append(changes, fmt.Sprintf("claude[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||||
changes = append(changes, fmt.Sprintf("claude[%d].api-key: updated", i))
|
changes = append(changes, fmt.Sprintf("claude[%d].api-key: updated", i))
|
||||||
@@ -142,7 +142,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
|||||||
changes = append(changes, fmt.Sprintf("codex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
changes = append(changes, fmt.Sprintf("codex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||||
changes = append(changes, fmt.Sprintf("codex[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
changes = append(changes, fmt.Sprintf("codex[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||||
changes = append(changes, fmt.Sprintf("codex[%d].api-key: updated", i))
|
changes = append(changes, fmt.Sprintf("codex[%d].api-key: updated", i))
|
||||||
@@ -235,7 +235,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
|||||||
changes = append(changes, fmt.Sprintf("vertex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
changes = append(changes, fmt.Sprintf("vertex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||||
changes = append(changes, fmt.Sprintf("vertex[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
changes = append(changes, fmt.Sprintf("vertex[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||||
}
|
}
|
||||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||||
changes = append(changes, fmt.Sprintf("vertex[%d].api-key: updated", i))
|
changes = append(changes, fmt.Sprintf("vertex[%d].api-key: updated", i))
|
||||||
|
|||||||
@@ -234,6 +234,7 @@ func TestBuildConfigChangeDetails_FlagsAndKeys(t *testing.T) {
|
|||||||
RequestLog: false,
|
RequestLog: false,
|
||||||
ProxyURL: "http://old-proxy",
|
ProxyURL: "http://old-proxy",
|
||||||
APIKeys: []string{"key-1"},
|
APIKeys: []string{"key-1"},
|
||||||
|
ForceModelPrefix: false,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
newCfg := &config.Config{
|
newCfg := &config.Config{
|
||||||
@@ -269,6 +270,7 @@ func TestBuildConfigChangeDetails_FlagsAndKeys(t *testing.T) {
|
|||||||
RequestLog: true,
|
RequestLog: true,
|
||||||
ProxyURL: "http://new-proxy",
|
ProxyURL: "http://new-proxy",
|
||||||
APIKeys: []string{" key-1 ", "key-2"},
|
APIKeys: []string{" key-1 ", "key-2"},
|
||||||
|
ForceModelPrefix: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -282,6 +284,7 @@ func TestBuildConfigChangeDetails_FlagsAndKeys(t *testing.T) {
|
|||||||
expectContains(t, details, "max-retry-interval: 1 -> 3")
|
expectContains(t, details, "max-retry-interval: 1 -> 3")
|
||||||
expectContains(t, details, "proxy-url: http://old-proxy -> http://new-proxy")
|
expectContains(t, details, "proxy-url: http://old-proxy -> http://new-proxy")
|
||||||
expectContains(t, details, "ws-auth: false -> true")
|
expectContains(t, details, "ws-auth: false -> true")
|
||||||
|
expectContains(t, details, "force-model-prefix: false -> true")
|
||||||
expectContains(t, details, "quota-exceeded.switch-project: false -> true")
|
expectContains(t, details, "quota-exceeded.switch-project: false -> true")
|
||||||
expectContains(t, details, "quota-exceeded.switch-preview-model: false -> true")
|
expectContains(t, details, "quota-exceeded.switch-preview-model: false -> true")
|
||||||
expectContains(t, details, "api-keys count: 1 -> 2")
|
expectContains(t, details, "api-keys count: 1 -> 2")
|
||||||
|
|||||||
294
internal/watcher/synthesizer/config.go
Normal file
294
internal/watcher/synthesizer/config.go
Normal file
@@ -0,0 +1,294 @@
|
|||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||||
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ConfigSynthesizer generates Auth entries from configuration API keys.
|
||||||
|
// It handles Gemini, Claude, Codex, OpenAI-compat, and Vertex-compat providers.
|
||||||
|
type ConfigSynthesizer struct{}
|
||||||
|
|
||||||
|
// NewConfigSynthesizer creates a new ConfigSynthesizer instance.
|
||||||
|
func NewConfigSynthesizer() *ConfigSynthesizer {
|
||||||
|
return &ConfigSynthesizer{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Synthesize generates Auth entries from config API keys.
|
||||||
|
func (s *ConfigSynthesizer) Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, error) {
|
||||||
|
out := make([]*coreauth.Auth, 0, 32)
|
||||||
|
if ctx == nil || ctx.Config == nil {
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gemini API Keys
|
||||||
|
out = append(out, s.synthesizeGeminiKeys(ctx)...)
|
||||||
|
// Claude API Keys
|
||||||
|
out = append(out, s.synthesizeClaudeKeys(ctx)...)
|
||||||
|
// Codex API Keys
|
||||||
|
out = append(out, s.synthesizeCodexKeys(ctx)...)
|
||||||
|
// OpenAI-compat
|
||||||
|
out = append(out, s.synthesizeOpenAICompat(ctx)...)
|
||||||
|
// Vertex-compat
|
||||||
|
out = append(out, s.synthesizeVertexCompat(ctx)...)
|
||||||
|
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// synthesizeGeminiKeys creates Auth entries for Gemini API keys.
|
||||||
|
func (s *ConfigSynthesizer) synthesizeGeminiKeys(ctx *SynthesisContext) []*coreauth.Auth {
|
||||||
|
cfg := ctx.Config
|
||||||
|
now := ctx.Now
|
||||||
|
idGen := ctx.IDGenerator
|
||||||
|
|
||||||
|
out := make([]*coreauth.Auth, 0, len(cfg.GeminiKey))
|
||||||
|
for i := range cfg.GeminiKey {
|
||||||
|
entry := cfg.GeminiKey[i]
|
||||||
|
key := strings.TrimSpace(entry.APIKey)
|
||||||
|
if key == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
prefix := strings.TrimSpace(entry.Prefix)
|
||||||
|
base := strings.TrimSpace(entry.BaseURL)
|
||||||
|
proxyURL := strings.TrimSpace(entry.ProxyURL)
|
||||||
|
id, token := idGen.Next("gemini:apikey", key, base)
|
||||||
|
attrs := map[string]string{
|
||||||
|
"source": fmt.Sprintf("config:gemini[%s]", token),
|
||||||
|
"api_key": key,
|
||||||
|
}
|
||||||
|
if base != "" {
|
||||||
|
attrs["base_url"] = base
|
||||||
|
}
|
||||||
|
addConfigHeadersToAttrs(entry.Headers, attrs)
|
||||||
|
a := &coreauth.Auth{
|
||||||
|
ID: id,
|
||||||
|
Provider: "gemini",
|
||||||
|
Label: "gemini-apikey",
|
||||||
|
Prefix: prefix,
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
ProxyURL: proxyURL,
|
||||||
|
Attributes: attrs,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
ApplyAuthExcludedModelsMeta(a, cfg, entry.ExcludedModels, "apikey")
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// synthesizeClaudeKeys creates Auth entries for Claude API keys.
|
||||||
|
func (s *ConfigSynthesizer) synthesizeClaudeKeys(ctx *SynthesisContext) []*coreauth.Auth {
|
||||||
|
cfg := ctx.Config
|
||||||
|
now := ctx.Now
|
||||||
|
idGen := ctx.IDGenerator
|
||||||
|
|
||||||
|
out := make([]*coreauth.Auth, 0, len(cfg.ClaudeKey))
|
||||||
|
for i := range cfg.ClaudeKey {
|
||||||
|
ck := cfg.ClaudeKey[i]
|
||||||
|
key := strings.TrimSpace(ck.APIKey)
|
||||||
|
if key == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
prefix := strings.TrimSpace(ck.Prefix)
|
||||||
|
base := strings.TrimSpace(ck.BaseURL)
|
||||||
|
id, token := idGen.Next("claude:apikey", key, base)
|
||||||
|
attrs := map[string]string{
|
||||||
|
"source": fmt.Sprintf("config:claude[%s]", token),
|
||||||
|
"api_key": key,
|
||||||
|
}
|
||||||
|
if base != "" {
|
||||||
|
attrs["base_url"] = base
|
||||||
|
}
|
||||||
|
if hash := diff.ComputeClaudeModelsHash(ck.Models); hash != "" {
|
||||||
|
attrs["models_hash"] = hash
|
||||||
|
}
|
||||||
|
addConfigHeadersToAttrs(ck.Headers, attrs)
|
||||||
|
proxyURL := strings.TrimSpace(ck.ProxyURL)
|
||||||
|
a := &coreauth.Auth{
|
||||||
|
ID: id,
|
||||||
|
Provider: "claude",
|
||||||
|
Label: "claude-apikey",
|
||||||
|
Prefix: prefix,
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
ProxyURL: proxyURL,
|
||||||
|
Attributes: attrs,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
ApplyAuthExcludedModelsMeta(a, cfg, ck.ExcludedModels, "apikey")
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// synthesizeCodexKeys creates Auth entries for Codex API keys.
|
||||||
|
func (s *ConfigSynthesizer) synthesizeCodexKeys(ctx *SynthesisContext) []*coreauth.Auth {
|
||||||
|
cfg := ctx.Config
|
||||||
|
now := ctx.Now
|
||||||
|
idGen := ctx.IDGenerator
|
||||||
|
|
||||||
|
out := make([]*coreauth.Auth, 0, len(cfg.CodexKey))
|
||||||
|
for i := range cfg.CodexKey {
|
||||||
|
ck := cfg.CodexKey[i]
|
||||||
|
key := strings.TrimSpace(ck.APIKey)
|
||||||
|
if key == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
prefix := strings.TrimSpace(ck.Prefix)
|
||||||
|
id, token := idGen.Next("codex:apikey", key, ck.BaseURL)
|
||||||
|
attrs := map[string]string{
|
||||||
|
"source": fmt.Sprintf("config:codex[%s]", token),
|
||||||
|
"api_key": key,
|
||||||
|
}
|
||||||
|
if ck.BaseURL != "" {
|
||||||
|
attrs["base_url"] = ck.BaseURL
|
||||||
|
}
|
||||||
|
addConfigHeadersToAttrs(ck.Headers, attrs)
|
||||||
|
proxyURL := strings.TrimSpace(ck.ProxyURL)
|
||||||
|
a := &coreauth.Auth{
|
||||||
|
ID: id,
|
||||||
|
Provider: "codex",
|
||||||
|
Label: "codex-apikey",
|
||||||
|
Prefix: prefix,
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
ProxyURL: proxyURL,
|
||||||
|
Attributes: attrs,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
ApplyAuthExcludedModelsMeta(a, cfg, ck.ExcludedModels, "apikey")
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// synthesizeOpenAICompat creates Auth entries for OpenAI-compatible providers.
|
||||||
|
func (s *ConfigSynthesizer) synthesizeOpenAICompat(ctx *SynthesisContext) []*coreauth.Auth {
|
||||||
|
cfg := ctx.Config
|
||||||
|
now := ctx.Now
|
||||||
|
idGen := ctx.IDGenerator
|
||||||
|
|
||||||
|
out := make([]*coreauth.Auth, 0)
|
||||||
|
for i := range cfg.OpenAICompatibility {
|
||||||
|
compat := &cfg.OpenAICompatibility[i]
|
||||||
|
prefix := strings.TrimSpace(compat.Prefix)
|
||||||
|
providerName := strings.ToLower(strings.TrimSpace(compat.Name))
|
||||||
|
if providerName == "" {
|
||||||
|
providerName = "openai-compatibility"
|
||||||
|
}
|
||||||
|
base := strings.TrimSpace(compat.BaseURL)
|
||||||
|
|
||||||
|
// Handle new APIKeyEntries format (preferred)
|
||||||
|
createdEntries := 0
|
||||||
|
for j := range compat.APIKeyEntries {
|
||||||
|
entry := &compat.APIKeyEntries[j]
|
||||||
|
key := strings.TrimSpace(entry.APIKey)
|
||||||
|
proxyURL := strings.TrimSpace(entry.ProxyURL)
|
||||||
|
idKind := fmt.Sprintf("openai-compatibility:%s", providerName)
|
||||||
|
id, token := idGen.Next(idKind, key, base, proxyURL)
|
||||||
|
attrs := map[string]string{
|
||||||
|
"source": fmt.Sprintf("config:%s[%s]", providerName, token),
|
||||||
|
"base_url": base,
|
||||||
|
"compat_name": compat.Name,
|
||||||
|
"provider_key": providerName,
|
||||||
|
}
|
||||||
|
if key != "" {
|
||||||
|
attrs["api_key"] = key
|
||||||
|
}
|
||||||
|
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
|
||||||
|
attrs["models_hash"] = hash
|
||||||
|
}
|
||||||
|
addConfigHeadersToAttrs(compat.Headers, attrs)
|
||||||
|
a := &coreauth.Auth{
|
||||||
|
ID: id,
|
||||||
|
Provider: providerName,
|
||||||
|
Label: compat.Name,
|
||||||
|
Prefix: prefix,
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
ProxyURL: proxyURL,
|
||||||
|
Attributes: attrs,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
out = append(out, a)
|
||||||
|
createdEntries++
|
||||||
|
}
|
||||||
|
// Fallback: create entry without API key if no APIKeyEntries
|
||||||
|
if createdEntries == 0 {
|
||||||
|
idKind := fmt.Sprintf("openai-compatibility:%s", providerName)
|
||||||
|
id, token := idGen.Next(idKind, base)
|
||||||
|
attrs := map[string]string{
|
||||||
|
"source": fmt.Sprintf("config:%s[%s]", providerName, token),
|
||||||
|
"base_url": base,
|
||||||
|
"compat_name": compat.Name,
|
||||||
|
"provider_key": providerName,
|
||||||
|
}
|
||||||
|
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
|
||||||
|
attrs["models_hash"] = hash
|
||||||
|
}
|
||||||
|
addConfigHeadersToAttrs(compat.Headers, attrs)
|
||||||
|
a := &coreauth.Auth{
|
||||||
|
ID: id,
|
||||||
|
Provider: providerName,
|
||||||
|
Label: compat.Name,
|
||||||
|
Prefix: prefix,
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
Attributes: attrs,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// synthesizeVertexCompat creates Auth entries for Vertex-compatible providers.
|
||||||
|
func (s *ConfigSynthesizer) synthesizeVertexCompat(ctx *SynthesisContext) []*coreauth.Auth {
|
||||||
|
cfg := ctx.Config
|
||||||
|
now := ctx.Now
|
||||||
|
idGen := ctx.IDGenerator
|
||||||
|
|
||||||
|
out := make([]*coreauth.Auth, 0, len(cfg.VertexCompatAPIKey))
|
||||||
|
for i := range cfg.VertexCompatAPIKey {
|
||||||
|
compat := &cfg.VertexCompatAPIKey[i]
|
||||||
|
providerName := "vertex"
|
||||||
|
base := strings.TrimSpace(compat.BaseURL)
|
||||||
|
|
||||||
|
key := strings.TrimSpace(compat.APIKey)
|
||||||
|
prefix := strings.TrimSpace(compat.Prefix)
|
||||||
|
proxyURL := strings.TrimSpace(compat.ProxyURL)
|
||||||
|
idKind := "vertex:apikey"
|
||||||
|
id, token := idGen.Next(idKind, key, base, proxyURL)
|
||||||
|
attrs := map[string]string{
|
||||||
|
"source": fmt.Sprintf("config:vertex-apikey[%s]", token),
|
||||||
|
"base_url": base,
|
||||||
|
"provider_key": providerName,
|
||||||
|
}
|
||||||
|
if key != "" {
|
||||||
|
attrs["api_key"] = key
|
||||||
|
}
|
||||||
|
if hash := diff.ComputeVertexCompatModelsHash(compat.Models); hash != "" {
|
||||||
|
attrs["models_hash"] = hash
|
||||||
|
}
|
||||||
|
addConfigHeadersToAttrs(compat.Headers, attrs)
|
||||||
|
a := &coreauth.Auth{
|
||||||
|
ID: id,
|
||||||
|
Provider: providerName,
|
||||||
|
Label: "vertex-apikey",
|
||||||
|
Prefix: prefix,
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
ProxyURL: proxyURL,
|
||||||
|
Attributes: attrs,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
ApplyAuthExcludedModelsMeta(a, cfg, nil, "apikey")
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
613
internal/watcher/synthesizer/config_test.go
Normal file
613
internal/watcher/synthesizer/config_test.go
Normal file
@@ -0,0 +1,613 @@
|
|||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||||
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewConfigSynthesizer(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
if synth == nil {
|
||||||
|
t.Fatal("expected non-nil synthesizer")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_Synthesize_NilContext(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
auths, err := synth.Synthesize(nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 0 {
|
||||||
|
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_Synthesize_NilConfig(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: nil,
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 0 {
|
||||||
|
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_GeminiKeys(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
geminiKeys []config.GeminiKey
|
||||||
|
wantLen int
|
||||||
|
validate func(*testing.T, []*coreauth.Auth)
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "single gemini key",
|
||||||
|
geminiKeys: []config.GeminiKey{
|
||||||
|
{APIKey: "test-key-123", Prefix: "team-a"},
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
validate: func(t *testing.T, auths []*coreauth.Auth) {
|
||||||
|
if auths[0].Provider != "gemini" {
|
||||||
|
t.Errorf("expected provider gemini, got %s", auths[0].Provider)
|
||||||
|
}
|
||||||
|
if auths[0].Prefix != "team-a" {
|
||||||
|
t.Errorf("expected prefix team-a, got %s", auths[0].Prefix)
|
||||||
|
}
|
||||||
|
if auths[0].Label != "gemini-apikey" {
|
||||||
|
t.Errorf("expected label gemini-apikey, got %s", auths[0].Label)
|
||||||
|
}
|
||||||
|
if auths[0].Attributes["api_key"] != "test-key-123" {
|
||||||
|
t.Errorf("expected api_key test-key-123, got %s", auths[0].Attributes["api_key"])
|
||||||
|
}
|
||||||
|
if auths[0].Status != coreauth.StatusActive {
|
||||||
|
t.Errorf("expected status active, got %s", auths[0].Status)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "gemini key with base url and proxy",
|
||||||
|
geminiKeys: []config.GeminiKey{
|
||||||
|
{
|
||||||
|
APIKey: "api-key",
|
||||||
|
BaseURL: "https://custom.api.com",
|
||||||
|
ProxyURL: "http://proxy.local:8080",
|
||||||
|
Prefix: "custom",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
validate: func(t *testing.T, auths []*coreauth.Auth) {
|
||||||
|
if auths[0].Attributes["base_url"] != "https://custom.api.com" {
|
||||||
|
t.Errorf("expected base_url https://custom.api.com, got %s", auths[0].Attributes["base_url"])
|
||||||
|
}
|
||||||
|
if auths[0].ProxyURL != "http://proxy.local:8080" {
|
||||||
|
t.Errorf("expected proxy_url http://proxy.local:8080, got %s", auths[0].ProxyURL)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "gemini key with headers",
|
||||||
|
geminiKeys: []config.GeminiKey{
|
||||||
|
{
|
||||||
|
APIKey: "api-key",
|
||||||
|
Headers: map[string]string{"X-Custom": "value"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
validate: func(t *testing.T, auths []*coreauth.Auth) {
|
||||||
|
if auths[0].Attributes["header:X-Custom"] != "value" {
|
||||||
|
t.Errorf("expected header:X-Custom=value, got %s", auths[0].Attributes["header:X-Custom"])
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty api key skipped",
|
||||||
|
geminiKeys: []config.GeminiKey{
|
||||||
|
{APIKey: ""},
|
||||||
|
{APIKey: " "},
|
||||||
|
{APIKey: "valid-key"},
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple gemini keys",
|
||||||
|
geminiKeys: []config.GeminiKey{
|
||||||
|
{APIKey: "key-1", Prefix: "a"},
|
||||||
|
{APIKey: "key-2", Prefix: "b"},
|
||||||
|
{APIKey: "key-3", Prefix: "c"},
|
||||||
|
},
|
||||||
|
wantLen: 3,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
GeminiKey: tt.geminiKeys,
|
||||||
|
},
|
||||||
|
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != tt.wantLen {
|
||||||
|
t.Fatalf("expected %d auths, got %d", tt.wantLen, len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.validate != nil && len(auths) > 0 {
|
||||||
|
tt.validate(t, auths)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_ClaudeKeys(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
ClaudeKey: []config.ClaudeKey{
|
||||||
|
{
|
||||||
|
APIKey: "sk-ant-api-xxx",
|
||||||
|
Prefix: "main",
|
||||||
|
BaseURL: "https://api.anthropic.com",
|
||||||
|
Models: []config.ClaudeModel{
|
||||||
|
{Name: "claude-3-opus"},
|
||||||
|
{Name: "claude-3-sonnet"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
if auths[0].Provider != "claude" {
|
||||||
|
t.Errorf("expected provider claude, got %s", auths[0].Provider)
|
||||||
|
}
|
||||||
|
if auths[0].Label != "claude-apikey" {
|
||||||
|
t.Errorf("expected label claude-apikey, got %s", auths[0].Label)
|
||||||
|
}
|
||||||
|
if auths[0].Prefix != "main" {
|
||||||
|
t.Errorf("expected prefix main, got %s", auths[0].Prefix)
|
||||||
|
}
|
||||||
|
if auths[0].Attributes["api_key"] != "sk-ant-api-xxx" {
|
||||||
|
t.Errorf("expected api_key sk-ant-api-xxx, got %s", auths[0].Attributes["api_key"])
|
||||||
|
}
|
||||||
|
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||||
|
t.Error("expected models_hash in attributes")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_ClaudeKeys_SkipsEmptyAndHeaders(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
ClaudeKey: []config.ClaudeKey{
|
||||||
|
{APIKey: ""}, // empty, should be skipped
|
||||||
|
{APIKey: " "}, // whitespace, should be skipped
|
||||||
|
{APIKey: "valid-key", Headers: map[string]string{"X-Custom": "value"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth (empty keys skipped), got %d", len(auths))
|
||||||
|
}
|
||||||
|
if auths[0].Attributes["header:X-Custom"] != "value" {
|
||||||
|
t.Errorf("expected header:X-Custom=value, got %s", auths[0].Attributes["header:X-Custom"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_CodexKeys(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
CodexKey: []config.CodexKey{
|
||||||
|
{
|
||||||
|
APIKey: "codex-key-123",
|
||||||
|
Prefix: "dev",
|
||||||
|
BaseURL: "https://api.openai.com",
|
||||||
|
ProxyURL: "http://proxy.local",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
if auths[0].Provider != "codex" {
|
||||||
|
t.Errorf("expected provider codex, got %s", auths[0].Provider)
|
||||||
|
}
|
||||||
|
if auths[0].Label != "codex-apikey" {
|
||||||
|
t.Errorf("expected label codex-apikey, got %s", auths[0].Label)
|
||||||
|
}
|
||||||
|
if auths[0].ProxyURL != "http://proxy.local" {
|
||||||
|
t.Errorf("expected proxy_url http://proxy.local, got %s", auths[0].ProxyURL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_CodexKeys_SkipsEmptyAndHeaders(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
CodexKey: []config.CodexKey{
|
||||||
|
{APIKey: ""}, // empty, should be skipped
|
||||||
|
{APIKey: " "}, // whitespace, should be skipped
|
||||||
|
{APIKey: "valid-key", Headers: map[string]string{"Authorization": "Bearer xyz"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth (empty keys skipped), got %d", len(auths))
|
||||||
|
}
|
||||||
|
if auths[0].Attributes["header:Authorization"] != "Bearer xyz" {
|
||||||
|
t.Errorf("expected header:Authorization=Bearer xyz, got %s", auths[0].Attributes["header:Authorization"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_OpenAICompat(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
compat []config.OpenAICompatibility
|
||||||
|
wantLen int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "with APIKeyEntries",
|
||||||
|
compat: []config.OpenAICompatibility{
|
||||||
|
{
|
||||||
|
Name: "CustomProvider",
|
||||||
|
BaseURL: "https://custom.api.com",
|
||||||
|
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
|
||||||
|
{APIKey: "key-1"},
|
||||||
|
{APIKey: "key-2"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty APIKeyEntries included (legacy)",
|
||||||
|
compat: []config.OpenAICompatibility{
|
||||||
|
{
|
||||||
|
Name: "EmptyKeys",
|
||||||
|
BaseURL: "https://empty.api.com",
|
||||||
|
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
|
||||||
|
{APIKey: ""},
|
||||||
|
{APIKey: " "},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantLen: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "without APIKeyEntries (fallback)",
|
||||||
|
compat: []config.OpenAICompatibility{
|
||||||
|
{
|
||||||
|
Name: "NoKeyProvider",
|
||||||
|
BaseURL: "https://no-key.api.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty name defaults",
|
||||||
|
compat: []config.OpenAICompatibility{
|
||||||
|
{
|
||||||
|
Name: "",
|
||||||
|
BaseURL: "https://default.api.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantLen: 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
OpenAICompatibility: tt.compat,
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != tt.wantLen {
|
||||||
|
t.Fatalf("expected %d auths, got %d", tt.wantLen, len(auths))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_VertexCompat(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||||
|
{
|
||||||
|
APIKey: "vertex-key-123",
|
||||||
|
BaseURL: "https://vertex.googleapis.com",
|
||||||
|
Prefix: "vertex-prod",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
if auths[0].Provider != "vertex" {
|
||||||
|
t.Errorf("expected provider vertex, got %s", auths[0].Provider)
|
||||||
|
}
|
||||||
|
if auths[0].Label != "vertex-apikey" {
|
||||||
|
t.Errorf("expected label vertex-apikey, got %s", auths[0].Label)
|
||||||
|
}
|
||||||
|
if auths[0].Prefix != "vertex-prod" {
|
||||||
|
t.Errorf("expected prefix vertex-prod, got %s", auths[0].Prefix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_VertexCompat_SkipsEmptyAndHeaders(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||||
|
{APIKey: "", BaseURL: "https://vertex.api"}, // empty key creates auth without api_key attr
|
||||||
|
{APIKey: " ", BaseURL: "https://vertex.api"}, // whitespace key creates auth without api_key attr
|
||||||
|
{APIKey: "valid-key", BaseURL: "https://vertex.api", Headers: map[string]string{"X-Vertex": "test"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
// Vertex compat doesn't skip empty keys - it creates auths without api_key attribute
|
||||||
|
if len(auths) != 3 {
|
||||||
|
t.Fatalf("expected 3 auths, got %d", len(auths))
|
||||||
|
}
|
||||||
|
// First two should not have api_key attribute
|
||||||
|
if _, ok := auths[0].Attributes["api_key"]; ok {
|
||||||
|
t.Error("expected first auth to not have api_key attribute")
|
||||||
|
}
|
||||||
|
if _, ok := auths[1].Attributes["api_key"]; ok {
|
||||||
|
t.Error("expected second auth to not have api_key attribute")
|
||||||
|
}
|
||||||
|
// Third should have headers
|
||||||
|
if auths[2].Attributes["header:X-Vertex"] != "test" {
|
||||||
|
t.Errorf("expected header:X-Vertex=test, got %s", auths[2].Attributes["header:X-Vertex"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_OpenAICompat_WithModelsHash(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
OpenAICompatibility: []config.OpenAICompatibility{
|
||||||
|
{
|
||||||
|
Name: "TestProvider",
|
||||||
|
BaseURL: "https://test.api.com",
|
||||||
|
Models: []config.OpenAICompatibilityModel{
|
||||||
|
{Name: "model-a"},
|
||||||
|
{Name: "model-b"},
|
||||||
|
},
|
||||||
|
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
|
||||||
|
{APIKey: "key-with-models"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||||
|
t.Error("expected models_hash in attributes")
|
||||||
|
}
|
||||||
|
if auths[0].Attributes["api_key"] != "key-with-models" {
|
||||||
|
t.Errorf("expected api_key key-with-models, got %s", auths[0].Attributes["api_key"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_OpenAICompat_FallbackWithModels(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
OpenAICompatibility: []config.OpenAICompatibility{
|
||||||
|
{
|
||||||
|
Name: "NoKeyWithModels",
|
||||||
|
BaseURL: "https://nokey.api.com",
|
||||||
|
Models: []config.OpenAICompatibilityModel{
|
||||||
|
{Name: "model-x"},
|
||||||
|
},
|
||||||
|
Headers: map[string]string{"X-API": "header-value"},
|
||||||
|
// No APIKeyEntries - should use fallback path
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||||
|
t.Error("expected models_hash in fallback path")
|
||||||
|
}
|
||||||
|
if auths[0].Attributes["header:X-API"] != "header-value" {
|
||||||
|
t.Errorf("expected header:X-API=header-value, got %s", auths[0].Attributes["header:X-API"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_VertexCompat_WithModels(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||||
|
{
|
||||||
|
APIKey: "vertex-key",
|
||||||
|
BaseURL: "https://vertex.api",
|
||||||
|
Models: []config.VertexCompatModel{
|
||||||
|
{Name: "gemini-pro", Alias: "pro"},
|
||||||
|
{Name: "gemini-ultra", Alias: "ultra"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||||
|
t.Error("expected models_hash in vertex auth with models")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_IDStability(t *testing.T) {
|
||||||
|
cfg := &config.Config{
|
||||||
|
GeminiKey: []config.GeminiKey{
|
||||||
|
{APIKey: "stable-key", Prefix: "test"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate IDs twice with fresh generators
|
||||||
|
synth1 := NewConfigSynthesizer()
|
||||||
|
ctx1 := &SynthesisContext{
|
||||||
|
Config: cfg,
|
||||||
|
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
auths1, _ := synth1.Synthesize(ctx1)
|
||||||
|
|
||||||
|
synth2 := NewConfigSynthesizer()
|
||||||
|
ctx2 := &SynthesisContext{
|
||||||
|
Config: cfg,
|
||||||
|
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
auths2, _ := synth2.Synthesize(ctx2)
|
||||||
|
|
||||||
|
if auths1[0].ID != auths2[0].ID {
|
||||||
|
t.Errorf("same config should produce same ID: got %q and %q", auths1[0].ID, auths2[0].ID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigSynthesizer_AllProviders(t *testing.T) {
|
||||||
|
synth := NewConfigSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{
|
||||||
|
GeminiKey: []config.GeminiKey{
|
||||||
|
{APIKey: "gemini-key"},
|
||||||
|
},
|
||||||
|
ClaudeKey: []config.ClaudeKey{
|
||||||
|
{APIKey: "claude-key"},
|
||||||
|
},
|
||||||
|
CodexKey: []config.CodexKey{
|
||||||
|
{APIKey: "codex-key"},
|
||||||
|
},
|
||||||
|
OpenAICompatibility: []config.OpenAICompatibility{
|
||||||
|
{Name: "compat", BaseURL: "https://compat.api"},
|
||||||
|
},
|
||||||
|
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||||
|
{APIKey: "vertex-key", BaseURL: "https://vertex.api"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 5 {
|
||||||
|
t.Fatalf("expected 5 auths, got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
providers := make(map[string]bool)
|
||||||
|
for _, a := range auths {
|
||||||
|
providers[a.Provider] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
expected := []string{"gemini", "claude", "codex", "compat", "vertex"}
|
||||||
|
for _, p := range expected {
|
||||||
|
if !providers[p] {
|
||||||
|
t.Errorf("expected provider %s not found", p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
19
internal/watcher/synthesizer/context.go
Normal file
19
internal/watcher/synthesizer/context.go
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SynthesisContext provides the context needed for auth synthesis.
|
||||||
|
type SynthesisContext struct {
|
||||||
|
// Config is the current configuration
|
||||||
|
Config *config.Config
|
||||||
|
// AuthDir is the directory containing auth files
|
||||||
|
AuthDir string
|
||||||
|
// Now is the current time for timestamps
|
||||||
|
Now time.Time
|
||||||
|
// IDGenerator generates stable IDs for auth entries
|
||||||
|
IDGenerator *StableIDGenerator
|
||||||
|
}
|
||||||
224
internal/watcher/synthesizer/file.go
Normal file
224
internal/watcher/synthesizer/file.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/runtime/geminicli"
|
||||||
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FileSynthesizer generates Auth entries from OAuth JSON files.
|
||||||
|
// It handles file-based authentication and Gemini virtual auth generation.
|
||||||
|
type FileSynthesizer struct{}
|
||||||
|
|
||||||
|
// NewFileSynthesizer creates a new FileSynthesizer instance.
|
||||||
|
func NewFileSynthesizer() *FileSynthesizer {
|
||||||
|
return &FileSynthesizer{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Synthesize generates Auth entries from auth files in the auth directory.
|
||||||
|
func (s *FileSynthesizer) Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, error) {
|
||||||
|
out := make([]*coreauth.Auth, 0, 16)
|
||||||
|
if ctx == nil || ctx.AuthDir == "" {
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
entries, err := os.ReadDir(ctx.AuthDir)
|
||||||
|
if err != nil {
|
||||||
|
// Not an error if directory doesn't exist
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
now := ctx.Now
|
||||||
|
cfg := ctx.Config
|
||||||
|
|
||||||
|
for _, e := range entries {
|
||||||
|
if e.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
name := e.Name()
|
||||||
|
if !strings.HasSuffix(strings.ToLower(name), ".json") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
full := filepath.Join(ctx.AuthDir, name)
|
||||||
|
data, errRead := os.ReadFile(full)
|
||||||
|
if errRead != nil || len(data) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var metadata map[string]any
|
||||||
|
if errUnmarshal := json.Unmarshal(data, &metadata); errUnmarshal != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t, _ := metadata["type"].(string)
|
||||||
|
if t == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
provider := strings.ToLower(t)
|
||||||
|
if provider == "gemini" {
|
||||||
|
provider = "gemini-cli"
|
||||||
|
}
|
||||||
|
label := provider
|
||||||
|
if email, _ := metadata["email"].(string); email != "" {
|
||||||
|
label = email
|
||||||
|
}
|
||||||
|
// Use relative path under authDir as ID to stay consistent with the file-based token store
|
||||||
|
id := full
|
||||||
|
if rel, errRel := filepath.Rel(ctx.AuthDir, full); errRel == nil && rel != "" {
|
||||||
|
id = rel
|
||||||
|
}
|
||||||
|
|
||||||
|
proxyURL := ""
|
||||||
|
if p, ok := metadata["proxy_url"].(string); ok {
|
||||||
|
proxyURL = p
|
||||||
|
}
|
||||||
|
|
||||||
|
prefix := ""
|
||||||
|
if rawPrefix, ok := metadata["prefix"].(string); ok {
|
||||||
|
trimmed := strings.TrimSpace(rawPrefix)
|
||||||
|
trimmed = strings.Trim(trimmed, "/")
|
||||||
|
if trimmed != "" && !strings.Contains(trimmed, "/") {
|
||||||
|
prefix = trimmed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
a := &coreauth.Auth{
|
||||||
|
ID: id,
|
||||||
|
Provider: provider,
|
||||||
|
Label: label,
|
||||||
|
Prefix: prefix,
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
Attributes: map[string]string{
|
||||||
|
"source": full,
|
||||||
|
"path": full,
|
||||||
|
},
|
||||||
|
ProxyURL: proxyURL,
|
||||||
|
Metadata: metadata,
|
||||||
|
CreatedAt: now,
|
||||||
|
UpdatedAt: now,
|
||||||
|
}
|
||||||
|
ApplyAuthExcludedModelsMeta(a, cfg, nil, "oauth")
|
||||||
|
if provider == "gemini-cli" {
|
||||||
|
if virtuals := SynthesizeGeminiVirtualAuths(a, metadata, now); len(virtuals) > 0 {
|
||||||
|
for _, v := range virtuals {
|
||||||
|
ApplyAuthExcludedModelsMeta(v, cfg, nil, "oauth")
|
||||||
|
}
|
||||||
|
out = append(out, a)
|
||||||
|
out = append(out, virtuals...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SynthesizeGeminiVirtualAuths creates virtual Auth entries for multi-project Gemini credentials.
|
||||||
|
// It disables the primary auth and creates one virtual auth per project.
|
||||||
|
func SynthesizeGeminiVirtualAuths(primary *coreauth.Auth, metadata map[string]any, now time.Time) []*coreauth.Auth {
|
||||||
|
if primary == nil || metadata == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
projects := splitGeminiProjectIDs(metadata)
|
||||||
|
if len(projects) <= 1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
email, _ := metadata["email"].(string)
|
||||||
|
shared := geminicli.NewSharedCredential(primary.ID, email, metadata, projects)
|
||||||
|
primary.Disabled = true
|
||||||
|
primary.Status = coreauth.StatusDisabled
|
||||||
|
primary.Runtime = shared
|
||||||
|
if primary.Attributes == nil {
|
||||||
|
primary.Attributes = make(map[string]string)
|
||||||
|
}
|
||||||
|
primary.Attributes["gemini_virtual_primary"] = "true"
|
||||||
|
primary.Attributes["virtual_children"] = strings.Join(projects, ",")
|
||||||
|
source := primary.Attributes["source"]
|
||||||
|
authPath := primary.Attributes["path"]
|
||||||
|
originalProvider := primary.Provider
|
||||||
|
if originalProvider == "" {
|
||||||
|
originalProvider = "gemini-cli"
|
||||||
|
}
|
||||||
|
label := primary.Label
|
||||||
|
if label == "" {
|
||||||
|
label = originalProvider
|
||||||
|
}
|
||||||
|
virtuals := make([]*coreauth.Auth, 0, len(projects))
|
||||||
|
for _, projectID := range projects {
|
||||||
|
attrs := map[string]string{
|
||||||
|
"runtime_only": "true",
|
||||||
|
"gemini_virtual_parent": primary.ID,
|
||||||
|
"gemini_virtual_project": projectID,
|
||||||
|
}
|
||||||
|
if source != "" {
|
||||||
|
attrs["source"] = source
|
||||||
|
}
|
||||||
|
if authPath != "" {
|
||||||
|
attrs["path"] = authPath
|
||||||
|
}
|
||||||
|
metadataCopy := map[string]any{
|
||||||
|
"email": email,
|
||||||
|
"project_id": projectID,
|
||||||
|
"virtual": true,
|
||||||
|
"virtual_parent_id": primary.ID,
|
||||||
|
"type": metadata["type"],
|
||||||
|
}
|
||||||
|
proxy := strings.TrimSpace(primary.ProxyURL)
|
||||||
|
if proxy != "" {
|
||||||
|
metadataCopy["proxy_url"] = proxy
|
||||||
|
}
|
||||||
|
virtual := &coreauth.Auth{
|
||||||
|
ID: buildGeminiVirtualID(primary.ID, projectID),
|
||||||
|
Provider: originalProvider,
|
||||||
|
Label: fmt.Sprintf("%s [%s]", label, projectID),
|
||||||
|
Status: coreauth.StatusActive,
|
||||||
|
Attributes: attrs,
|
||||||
|
Metadata: metadataCopy,
|
||||||
|
ProxyURL: primary.ProxyURL,
|
||||||
|
Prefix: primary.Prefix,
|
||||||
|
CreatedAt: primary.CreatedAt,
|
||||||
|
UpdatedAt: primary.UpdatedAt,
|
||||||
|
Runtime: geminicli.NewVirtualCredential(projectID, shared),
|
||||||
|
}
|
||||||
|
virtuals = append(virtuals, virtual)
|
||||||
|
}
|
||||||
|
return virtuals
|
||||||
|
}
|
||||||
|
|
||||||
|
// splitGeminiProjectIDs extracts and deduplicates project IDs from metadata.
|
||||||
|
func splitGeminiProjectIDs(metadata map[string]any) []string {
|
||||||
|
raw, _ := metadata["project_id"].(string)
|
||||||
|
trimmed := strings.TrimSpace(raw)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
parts := strings.Split(trimmed, ",")
|
||||||
|
result := make([]string, 0, len(parts))
|
||||||
|
seen := make(map[string]struct{}, len(parts))
|
||||||
|
for _, part := range parts {
|
||||||
|
id := strings.TrimSpace(part)
|
||||||
|
if id == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := seen[id]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[id] = struct{}{}
|
||||||
|
result = append(result, id)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildGeminiVirtualID constructs a virtual auth ID from base ID and project ID.
|
||||||
|
func buildGeminiVirtualID(baseID, projectID string) string {
|
||||||
|
project := strings.TrimSpace(projectID)
|
||||||
|
if project == "" {
|
||||||
|
project = "project"
|
||||||
|
}
|
||||||
|
replacer := strings.NewReplacer("/", "_", "\\", "_", " ", "_")
|
||||||
|
return fmt.Sprintf("%s::%s", baseID, replacer.Replace(project))
|
||||||
|
}
|
||||||
612
internal/watcher/synthesizer/file_test.go
Normal file
612
internal/watcher/synthesizer/file_test.go
Normal file
@@ -0,0 +1,612 @@
|
|||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||||
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewFileSynthesizer(t *testing.T) {
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
if synth == nil {
|
||||||
|
t.Fatal("expected non-nil synthesizer")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_NilContext(t *testing.T) {
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
auths, err := synth.Synthesize(nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 0 {
|
||||||
|
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_EmptyAuthDir(t *testing.T) {
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: "",
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 0 {
|
||||||
|
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_NonExistentDir(t *testing.T) {
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: "/non/existent/path",
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 0 {
|
||||||
|
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_ValidAuthFile(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
|
||||||
|
// Create a valid auth file
|
||||||
|
authData := map[string]any{
|
||||||
|
"type": "claude",
|
||||||
|
"email": "test@example.com",
|
||||||
|
"proxy_url": "http://proxy.local",
|
||||||
|
"prefix": "test-prefix",
|
||||||
|
}
|
||||||
|
data, _ := json.Marshal(authData)
|
||||||
|
err := os.WriteFile(filepath.Join(tempDir, "claude-auth.json"), data, 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to write auth file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: tempDir,
|
||||||
|
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
if auths[0].Provider != "claude" {
|
||||||
|
t.Errorf("expected provider claude, got %s", auths[0].Provider)
|
||||||
|
}
|
||||||
|
if auths[0].Label != "test@example.com" {
|
||||||
|
t.Errorf("expected label test@example.com, got %s", auths[0].Label)
|
||||||
|
}
|
||||||
|
if auths[0].Prefix != "test-prefix" {
|
||||||
|
t.Errorf("expected prefix test-prefix, got %s", auths[0].Prefix)
|
||||||
|
}
|
||||||
|
if auths[0].ProxyURL != "http://proxy.local" {
|
||||||
|
t.Errorf("expected proxy_url http://proxy.local, got %s", auths[0].ProxyURL)
|
||||||
|
}
|
||||||
|
if auths[0].Status != coreauth.StatusActive {
|
||||||
|
t.Errorf("expected status active, got %s", auths[0].Status)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_GeminiProviderMapping(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
|
||||||
|
// Gemini type should be mapped to gemini-cli
|
||||||
|
authData := map[string]any{
|
||||||
|
"type": "gemini",
|
||||||
|
"email": "gemini@example.com",
|
||||||
|
}
|
||||||
|
data, _ := json.Marshal(authData)
|
||||||
|
err := os.WriteFile(filepath.Join(tempDir, "gemini-auth.json"), data, 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to write auth file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: tempDir,
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
if auths[0].Provider != "gemini-cli" {
|
||||||
|
t.Errorf("gemini should be mapped to gemini-cli, got %s", auths[0].Provider)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_SkipsInvalidFiles(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
|
||||||
|
// Create various invalid files
|
||||||
|
_ = os.WriteFile(filepath.Join(tempDir, "not-json.txt"), []byte("text content"), 0644)
|
||||||
|
_ = os.WriteFile(filepath.Join(tempDir, "invalid.json"), []byte("not valid json"), 0644)
|
||||||
|
_ = os.WriteFile(filepath.Join(tempDir, "empty.json"), []byte(""), 0644)
|
||||||
|
_ = os.WriteFile(filepath.Join(tempDir, "no-type.json"), []byte(`{"email": "test@example.com"}`), 0644)
|
||||||
|
|
||||||
|
// Create one valid file
|
||||||
|
validData, _ := json.Marshal(map[string]any{"type": "claude", "email": "valid@example.com"})
|
||||||
|
_ = os.WriteFile(filepath.Join(tempDir, "valid.json"), validData, 0644)
|
||||||
|
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: tempDir,
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("only valid auth file should be processed, got %d", len(auths))
|
||||||
|
}
|
||||||
|
if auths[0].Label != "valid@example.com" {
|
||||||
|
t.Errorf("expected label valid@example.com, got %s", auths[0].Label)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_SkipsDirectories(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
|
||||||
|
// Create a subdirectory with a json file inside
|
||||||
|
subDir := filepath.Join(tempDir, "subdir.json")
|
||||||
|
err := os.Mkdir(subDir, 0755)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to create subdir: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a valid file in root
|
||||||
|
validData, _ := json.Marshal(map[string]any{"type": "claude"})
|
||||||
|
_ = os.WriteFile(filepath.Join(tempDir, "valid.json"), validData, 0644)
|
||||||
|
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: tempDir,
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_RelativeID(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
|
||||||
|
authData := map[string]any{"type": "claude"}
|
||||||
|
data, _ := json.Marshal(authData)
|
||||||
|
err := os.WriteFile(filepath.Join(tempDir, "my-auth.json"), data, 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to write auth file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: tempDir,
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ID should be relative path
|
||||||
|
if auths[0].ID != "my-auth.json" {
|
||||||
|
t.Errorf("expected ID my-auth.json, got %s", auths[0].ID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_PrefixValidation(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
prefix string
|
||||||
|
wantPrefix string
|
||||||
|
}{
|
||||||
|
{"valid prefix", "myprefix", "myprefix"},
|
||||||
|
{"prefix with slashes trimmed", "/myprefix/", "myprefix"},
|
||||||
|
{"prefix with spaces trimmed", " myprefix ", "myprefix"},
|
||||||
|
{"prefix with internal slash rejected", "my/prefix", ""},
|
||||||
|
{"empty prefix", "", ""},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
authData := map[string]any{
|
||||||
|
"type": "claude",
|
||||||
|
"prefix": tt.prefix,
|
||||||
|
}
|
||||||
|
data, _ := json.Marshal(authData)
|
||||||
|
_ = os.WriteFile(filepath.Join(tempDir, "auth.json"), data, 0644)
|
||||||
|
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: tempDir,
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
if len(auths) != 1 {
|
||||||
|
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||||
|
}
|
||||||
|
if auths[0].Prefix != tt.wantPrefix {
|
||||||
|
t.Errorf("expected prefix %q, got %q", tt.wantPrefix, auths[0].Prefix)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSynthesizeGeminiVirtualAuths_NilInputs(t *testing.T) {
|
||||||
|
now := time.Now()
|
||||||
|
|
||||||
|
if SynthesizeGeminiVirtualAuths(nil, nil, now) != nil {
|
||||||
|
t.Error("expected nil for nil primary")
|
||||||
|
}
|
||||||
|
if SynthesizeGeminiVirtualAuths(&coreauth.Auth{}, nil, now) != nil {
|
||||||
|
t.Error("expected nil for nil metadata")
|
||||||
|
}
|
||||||
|
if SynthesizeGeminiVirtualAuths(nil, map[string]any{}, now) != nil {
|
||||||
|
t.Error("expected nil for nil primary with metadata")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSynthesizeGeminiVirtualAuths_SingleProject(t *testing.T) {
|
||||||
|
now := time.Now()
|
||||||
|
primary := &coreauth.Auth{
|
||||||
|
ID: "test-id",
|
||||||
|
Provider: "gemini-cli",
|
||||||
|
Label: "test@example.com",
|
||||||
|
}
|
||||||
|
metadata := map[string]any{
|
||||||
|
"project_id": "single-project",
|
||||||
|
"email": "test@example.com",
|
||||||
|
"type": "gemini",
|
||||||
|
}
|
||||||
|
|
||||||
|
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||||
|
if virtuals != nil {
|
||||||
|
t.Error("single project should not create virtuals")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSynthesizeGeminiVirtualAuths_MultiProject(t *testing.T) {
|
||||||
|
now := time.Now()
|
||||||
|
primary := &coreauth.Auth{
|
||||||
|
ID: "primary-id",
|
||||||
|
Provider: "gemini-cli",
|
||||||
|
Label: "test@example.com",
|
||||||
|
Prefix: "test-prefix",
|
||||||
|
ProxyURL: "http://proxy.local",
|
||||||
|
Attributes: map[string]string{
|
||||||
|
"source": "test-source",
|
||||||
|
"path": "/path/to/auth",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
metadata := map[string]any{
|
||||||
|
"project_id": "project-a, project-b, project-c",
|
||||||
|
"email": "test@example.com",
|
||||||
|
"type": "gemini",
|
||||||
|
}
|
||||||
|
|
||||||
|
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||||
|
|
||||||
|
if len(virtuals) != 3 {
|
||||||
|
t.Fatalf("expected 3 virtuals, got %d", len(virtuals))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check primary is disabled
|
||||||
|
if !primary.Disabled {
|
||||||
|
t.Error("expected primary to be disabled")
|
||||||
|
}
|
||||||
|
if primary.Status != coreauth.StatusDisabled {
|
||||||
|
t.Errorf("expected primary status disabled, got %s", primary.Status)
|
||||||
|
}
|
||||||
|
if primary.Attributes["gemini_virtual_primary"] != "true" {
|
||||||
|
t.Error("expected gemini_virtual_primary=true")
|
||||||
|
}
|
||||||
|
if !strings.Contains(primary.Attributes["virtual_children"], "project-a") {
|
||||||
|
t.Error("expected virtual_children to contain project-a")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check virtuals
|
||||||
|
projectIDs := []string{"project-a", "project-b", "project-c"}
|
||||||
|
for i, v := range virtuals {
|
||||||
|
if v.Provider != "gemini-cli" {
|
||||||
|
t.Errorf("expected provider gemini-cli, got %s", v.Provider)
|
||||||
|
}
|
||||||
|
if v.Status != coreauth.StatusActive {
|
||||||
|
t.Errorf("expected status active, got %s", v.Status)
|
||||||
|
}
|
||||||
|
if v.Prefix != "test-prefix" {
|
||||||
|
t.Errorf("expected prefix test-prefix, got %s", v.Prefix)
|
||||||
|
}
|
||||||
|
if v.ProxyURL != "http://proxy.local" {
|
||||||
|
t.Errorf("expected proxy_url http://proxy.local, got %s", v.ProxyURL)
|
||||||
|
}
|
||||||
|
if v.Attributes["runtime_only"] != "true" {
|
||||||
|
t.Error("expected runtime_only=true")
|
||||||
|
}
|
||||||
|
if v.Attributes["gemini_virtual_parent"] != "primary-id" {
|
||||||
|
t.Errorf("expected gemini_virtual_parent=primary-id, got %s", v.Attributes["gemini_virtual_parent"])
|
||||||
|
}
|
||||||
|
if v.Attributes["gemini_virtual_project"] != projectIDs[i] {
|
||||||
|
t.Errorf("expected gemini_virtual_project=%s, got %s", projectIDs[i], v.Attributes["gemini_virtual_project"])
|
||||||
|
}
|
||||||
|
if !strings.Contains(v.Label, "["+projectIDs[i]+"]") {
|
||||||
|
t.Errorf("expected label to contain [%s], got %s", projectIDs[i], v.Label)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSynthesizeGeminiVirtualAuths_EmptyProviderAndLabel(t *testing.T) {
|
||||||
|
now := time.Now()
|
||||||
|
// Test with empty Provider and Label to cover fallback branches
|
||||||
|
primary := &coreauth.Auth{
|
||||||
|
ID: "primary-id",
|
||||||
|
Provider: "", // empty provider - should default to gemini-cli
|
||||||
|
Label: "", // empty label - should default to provider
|
||||||
|
Attributes: map[string]string{},
|
||||||
|
}
|
||||||
|
metadata := map[string]any{
|
||||||
|
"project_id": "proj-a, proj-b",
|
||||||
|
"email": "user@example.com",
|
||||||
|
"type": "gemini",
|
||||||
|
}
|
||||||
|
|
||||||
|
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||||
|
|
||||||
|
if len(virtuals) != 2 {
|
||||||
|
t.Fatalf("expected 2 virtuals, got %d", len(virtuals))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that empty provider defaults to gemini-cli
|
||||||
|
if virtuals[0].Provider != "gemini-cli" {
|
||||||
|
t.Errorf("expected provider gemini-cli (default), got %s", virtuals[0].Provider)
|
||||||
|
}
|
||||||
|
// Check that empty label defaults to provider
|
||||||
|
if !strings.Contains(virtuals[0].Label, "gemini-cli") {
|
||||||
|
t.Errorf("expected label to contain gemini-cli, got %s", virtuals[0].Label)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSynthesizeGeminiVirtualAuths_NilPrimaryAttributes(t *testing.T) {
|
||||||
|
now := time.Now()
|
||||||
|
primary := &coreauth.Auth{
|
||||||
|
ID: "primary-id",
|
||||||
|
Provider: "gemini-cli",
|
||||||
|
Label: "test@example.com",
|
||||||
|
Attributes: nil, // nil attributes
|
||||||
|
}
|
||||||
|
metadata := map[string]any{
|
||||||
|
"project_id": "proj-a, proj-b",
|
||||||
|
"email": "test@example.com",
|
||||||
|
"type": "gemini",
|
||||||
|
}
|
||||||
|
|
||||||
|
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||||
|
|
||||||
|
if len(virtuals) != 2 {
|
||||||
|
t.Fatalf("expected 2 virtuals, got %d", len(virtuals))
|
||||||
|
}
|
||||||
|
// Nil attributes should be initialized
|
||||||
|
if primary.Attributes == nil {
|
||||||
|
t.Error("expected primary.Attributes to be initialized")
|
||||||
|
}
|
||||||
|
if primary.Attributes["gemini_virtual_primary"] != "true" {
|
||||||
|
t.Error("expected gemini_virtual_primary=true")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSplitGeminiProjectIDs(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
metadata map[string]any
|
||||||
|
want []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "single project",
|
||||||
|
metadata: map[string]any{"project_id": "proj-a"},
|
||||||
|
want: []string{"proj-a"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple projects",
|
||||||
|
metadata: map[string]any{"project_id": "proj-a, proj-b, proj-c"},
|
||||||
|
want: []string{"proj-a", "proj-b", "proj-c"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with duplicates",
|
||||||
|
metadata: map[string]any{"project_id": "proj-a, proj-b, proj-a"},
|
||||||
|
want: []string{"proj-a", "proj-b"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with empty parts",
|
||||||
|
metadata: map[string]any{"project_id": "proj-a, , proj-b, "},
|
||||||
|
want: []string{"proj-a", "proj-b"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty project_id",
|
||||||
|
metadata: map[string]any{"project_id": ""},
|
||||||
|
want: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no project_id",
|
||||||
|
metadata: map[string]any{},
|
||||||
|
want: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "whitespace only",
|
||||||
|
metadata: map[string]any{"project_id": " "},
|
||||||
|
want: nil,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := splitGeminiProjectIDs(tt.metadata)
|
||||||
|
if len(got) != len(tt.want) {
|
||||||
|
t.Fatalf("expected %v, got %v", tt.want, got)
|
||||||
|
}
|
||||||
|
for i := range got {
|
||||||
|
if got[i] != tt.want[i] {
|
||||||
|
t.Errorf("expected %v, got %v", tt.want, got)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFileSynthesizer_Synthesize_MultiProjectGemini(t *testing.T) {
|
||||||
|
tempDir := t.TempDir()
|
||||||
|
|
||||||
|
// Create a gemini auth file with multiple projects
|
||||||
|
authData := map[string]any{
|
||||||
|
"type": "gemini",
|
||||||
|
"email": "multi@example.com",
|
||||||
|
"project_id": "project-a, project-b, project-c",
|
||||||
|
}
|
||||||
|
data, _ := json.Marshal(authData)
|
||||||
|
err := os.WriteFile(filepath.Join(tempDir, "gemini-multi.json"), data, 0644)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to write auth file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
synth := NewFileSynthesizer()
|
||||||
|
ctx := &SynthesisContext{
|
||||||
|
Config: &config.Config{},
|
||||||
|
AuthDir: tempDir,
|
||||||
|
Now: time.Now(),
|
||||||
|
IDGenerator: NewStableIDGenerator(),
|
||||||
|
}
|
||||||
|
|
||||||
|
auths, err := synth.Synthesize(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
// Should have 4 auths: 1 primary (disabled) + 3 virtuals
|
||||||
|
if len(auths) != 4 {
|
||||||
|
t.Fatalf("expected 4 auths (1 primary + 3 virtuals), got %d", len(auths))
|
||||||
|
}
|
||||||
|
|
||||||
|
// First auth should be the primary (disabled)
|
||||||
|
primary := auths[0]
|
||||||
|
if !primary.Disabled {
|
||||||
|
t.Error("expected primary to be disabled")
|
||||||
|
}
|
||||||
|
if primary.Status != coreauth.StatusDisabled {
|
||||||
|
t.Errorf("expected primary status disabled, got %s", primary.Status)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remaining auths should be virtuals
|
||||||
|
for i := 1; i < 4; i++ {
|
||||||
|
v := auths[i]
|
||||||
|
if v.Status != coreauth.StatusActive {
|
||||||
|
t.Errorf("expected virtual %d to be active, got %s", i, v.Status)
|
||||||
|
}
|
||||||
|
if v.Attributes["gemini_virtual_parent"] != primary.ID {
|
||||||
|
t.Errorf("expected virtual %d parent to be %s, got %s", i, primary.ID, v.Attributes["gemini_virtual_parent"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBuildGeminiVirtualID(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
baseID string
|
||||||
|
projectID string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "basic",
|
||||||
|
baseID: "auth.json",
|
||||||
|
projectID: "my-project",
|
||||||
|
want: "auth.json::my-project",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with slashes",
|
||||||
|
baseID: "path/to/auth.json",
|
||||||
|
projectID: "project/with/slashes",
|
||||||
|
want: "path/to/auth.json::project_with_slashes",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "with spaces",
|
||||||
|
baseID: "auth.json",
|
||||||
|
projectID: "my project",
|
||||||
|
want: "auth.json::my_project",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty project",
|
||||||
|
baseID: "auth.json",
|
||||||
|
projectID: "",
|
||||||
|
want: "auth.json::project",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "whitespace project",
|
||||||
|
baseID: "auth.json",
|
||||||
|
projectID: " ",
|
||||||
|
want: "auth.json::project",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := buildGeminiVirtualID(tt.baseID, tt.projectID)
|
||||||
|
if got != tt.want {
|
||||||
|
t.Errorf("expected %q, got %q", tt.want, got)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
110
internal/watcher/synthesizer/helpers.go
Normal file
110
internal/watcher/synthesizer/helpers.go
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||||
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
// StableIDGenerator generates stable, deterministic IDs for auth entries.
|
||||||
|
// It uses SHA256 hashing with collision handling via counters.
|
||||||
|
// It is not safe for concurrent use.
|
||||||
|
type StableIDGenerator struct {
|
||||||
|
counters map[string]int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStableIDGenerator creates a new StableIDGenerator instance.
|
||||||
|
func NewStableIDGenerator() *StableIDGenerator {
|
||||||
|
return &StableIDGenerator{counters: make(map[string]int)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next generates a stable ID based on the kind and parts.
|
||||||
|
// Returns the full ID (kind:hash) and the short hash portion.
|
||||||
|
func (g *StableIDGenerator) Next(kind string, parts ...string) (string, string) {
|
||||||
|
if g == nil {
|
||||||
|
return kind + ":000000000000", "000000000000"
|
||||||
|
}
|
||||||
|
hasher := sha256.New()
|
||||||
|
hasher.Write([]byte(kind))
|
||||||
|
for _, part := range parts {
|
||||||
|
trimmed := strings.TrimSpace(part)
|
||||||
|
hasher.Write([]byte{0})
|
||||||
|
hasher.Write([]byte(trimmed))
|
||||||
|
}
|
||||||
|
digest := hex.EncodeToString(hasher.Sum(nil))
|
||||||
|
if len(digest) < 12 {
|
||||||
|
digest = fmt.Sprintf("%012s", digest)
|
||||||
|
}
|
||||||
|
short := digest[:12]
|
||||||
|
key := kind + ":" + short
|
||||||
|
index := g.counters[key]
|
||||||
|
g.counters[key] = index + 1
|
||||||
|
if index > 0 {
|
||||||
|
short = fmt.Sprintf("%s-%d", short, index)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s:%s", kind, short), short
|
||||||
|
}
|
||||||
|
|
||||||
|
// ApplyAuthExcludedModelsMeta applies excluded models metadata to an auth entry.
|
||||||
|
// It computes a hash of excluded models and sets the auth_kind attribute.
|
||||||
|
func ApplyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey []string, authKind string) {
|
||||||
|
if auth == nil || cfg == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
authKindKey := strings.ToLower(strings.TrimSpace(authKind))
|
||||||
|
seen := make(map[string]struct{})
|
||||||
|
add := func(list []string) {
|
||||||
|
for _, entry := range list {
|
||||||
|
if trimmed := strings.TrimSpace(entry); trimmed != "" {
|
||||||
|
key := strings.ToLower(trimmed)
|
||||||
|
if _, exists := seen[key]; exists {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
seen[key] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if authKindKey == "apikey" {
|
||||||
|
add(perKey)
|
||||||
|
} else if cfg.OAuthExcludedModels != nil {
|
||||||
|
providerKey := strings.ToLower(strings.TrimSpace(auth.Provider))
|
||||||
|
add(cfg.OAuthExcludedModels[providerKey])
|
||||||
|
}
|
||||||
|
combined := make([]string, 0, len(seen))
|
||||||
|
for k := range seen {
|
||||||
|
combined = append(combined, k)
|
||||||
|
}
|
||||||
|
sort.Strings(combined)
|
||||||
|
hash := diff.ComputeExcludedModelsHash(combined)
|
||||||
|
if auth.Attributes == nil {
|
||||||
|
auth.Attributes = make(map[string]string)
|
||||||
|
}
|
||||||
|
if hash != "" {
|
||||||
|
auth.Attributes["excluded_models_hash"] = hash
|
||||||
|
}
|
||||||
|
if authKind != "" {
|
||||||
|
auth.Attributes["auth_kind"] = authKind
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// addConfigHeadersToAttrs adds header configuration to auth attributes.
|
||||||
|
// Headers are prefixed with "header:" in the attributes map.
|
||||||
|
func addConfigHeadersToAttrs(headers map[string]string, attrs map[string]string) {
|
||||||
|
if len(headers) == 0 || attrs == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for hk, hv := range headers {
|
||||||
|
key := strings.TrimSpace(hk)
|
||||||
|
val := strings.TrimSpace(hv)
|
||||||
|
if key == "" || val == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
attrs["header:"+key] = val
|
||||||
|
}
|
||||||
|
}
|
||||||
264
internal/watcher/synthesizer/helpers_test.go
Normal file
264
internal/watcher/synthesizer/helpers_test.go
Normal file
@@ -0,0 +1,264 @@
|
|||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||||
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewStableIDGenerator(t *testing.T) {
|
||||||
|
gen := NewStableIDGenerator()
|
||||||
|
if gen == nil {
|
||||||
|
t.Fatal("expected non-nil generator")
|
||||||
|
}
|
||||||
|
if gen.counters == nil {
|
||||||
|
t.Fatal("expected non-nil counters map")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStableIDGenerator_Next(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
kind string
|
||||||
|
parts []string
|
||||||
|
wantPrefix string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "basic gemini apikey",
|
||||||
|
kind: "gemini:apikey",
|
||||||
|
parts: []string{"test-key", ""},
|
||||||
|
wantPrefix: "gemini:apikey:",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "claude with base url",
|
||||||
|
kind: "claude:apikey",
|
||||||
|
parts: []string{"sk-ant-xxx", "https://api.anthropic.com"},
|
||||||
|
wantPrefix: "claude:apikey:",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty parts",
|
||||||
|
kind: "codex:apikey",
|
||||||
|
parts: []string{},
|
||||||
|
wantPrefix: "codex:apikey:",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
gen := NewStableIDGenerator()
|
||||||
|
id, short := gen.Next(tt.kind, tt.parts...)
|
||||||
|
|
||||||
|
if !strings.Contains(id, tt.wantPrefix) {
|
||||||
|
t.Errorf("expected id to contain %q, got %q", tt.wantPrefix, id)
|
||||||
|
}
|
||||||
|
if short == "" {
|
||||||
|
t.Error("expected non-empty short id")
|
||||||
|
}
|
||||||
|
if len(short) != 12 {
|
||||||
|
t.Errorf("expected short id length 12, got %d", len(short))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStableIDGenerator_Stability(t *testing.T) {
|
||||||
|
gen1 := NewStableIDGenerator()
|
||||||
|
gen2 := NewStableIDGenerator()
|
||||||
|
|
||||||
|
id1, _ := gen1.Next("gemini:apikey", "test-key", "https://api.example.com")
|
||||||
|
id2, _ := gen2.Next("gemini:apikey", "test-key", "https://api.example.com")
|
||||||
|
|
||||||
|
if id1 != id2 {
|
||||||
|
t.Errorf("same inputs should produce same ID: got %q and %q", id1, id2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStableIDGenerator_CollisionHandling(t *testing.T) {
|
||||||
|
gen := NewStableIDGenerator()
|
||||||
|
|
||||||
|
id1, short1 := gen.Next("gemini:apikey", "same-key")
|
||||||
|
id2, short2 := gen.Next("gemini:apikey", "same-key")
|
||||||
|
|
||||||
|
if id1 == id2 {
|
||||||
|
t.Error("collision should be handled with suffix")
|
||||||
|
}
|
||||||
|
if short1 == short2 {
|
||||||
|
t.Error("short ids should differ")
|
||||||
|
}
|
||||||
|
if !strings.Contains(short2, "-1") {
|
||||||
|
t.Errorf("second short id should contain -1 suffix, got %q", short2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStableIDGenerator_NilReceiver(t *testing.T) {
|
||||||
|
var gen *StableIDGenerator = nil
|
||||||
|
id, short := gen.Next("test:kind", "part")
|
||||||
|
|
||||||
|
if id != "test:kind:000000000000" {
|
||||||
|
t.Errorf("expected test:kind:000000000000, got %q", id)
|
||||||
|
}
|
||||||
|
if short != "000000000000" {
|
||||||
|
t.Errorf("expected 000000000000, got %q", short)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestApplyAuthExcludedModelsMeta(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
auth *coreauth.Auth
|
||||||
|
cfg *config.Config
|
||||||
|
perKey []string
|
||||||
|
authKind string
|
||||||
|
wantHash bool
|
||||||
|
wantKind string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "apikey with excluded models",
|
||||||
|
auth: &coreauth.Auth{
|
||||||
|
Provider: "gemini",
|
||||||
|
Attributes: make(map[string]string),
|
||||||
|
},
|
||||||
|
cfg: &config.Config{},
|
||||||
|
perKey: []string{"model-a", "model-b"},
|
||||||
|
authKind: "apikey",
|
||||||
|
wantHash: true,
|
||||||
|
wantKind: "apikey",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "oauth with provider excluded models",
|
||||||
|
auth: &coreauth.Auth{
|
||||||
|
Provider: "claude",
|
||||||
|
Attributes: make(map[string]string),
|
||||||
|
},
|
||||||
|
cfg: &config.Config{
|
||||||
|
OAuthExcludedModels: map[string][]string{
|
||||||
|
"claude": {"claude-2.0"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
perKey: nil,
|
||||||
|
authKind: "oauth",
|
||||||
|
wantHash: true,
|
||||||
|
wantKind: "oauth",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nil auth",
|
||||||
|
auth: nil,
|
||||||
|
cfg: &config.Config{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nil config",
|
||||||
|
auth: &coreauth.Auth{Provider: "test"},
|
||||||
|
cfg: nil,
|
||||||
|
authKind: "apikey",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nil attributes initialized",
|
||||||
|
auth: &coreauth.Auth{
|
||||||
|
Provider: "gemini",
|
||||||
|
Attributes: nil,
|
||||||
|
},
|
||||||
|
cfg: &config.Config{},
|
||||||
|
perKey: []string{"model-x"},
|
||||||
|
authKind: "apikey",
|
||||||
|
wantHash: true,
|
||||||
|
wantKind: "apikey",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "apikey with duplicate excluded models",
|
||||||
|
auth: &coreauth.Auth{
|
||||||
|
Provider: "gemini",
|
||||||
|
Attributes: make(map[string]string),
|
||||||
|
},
|
||||||
|
cfg: &config.Config{},
|
||||||
|
perKey: []string{"model-a", "MODEL-A", "model-b", "model-a"},
|
||||||
|
authKind: "apikey",
|
||||||
|
wantHash: true,
|
||||||
|
wantKind: "apikey",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
ApplyAuthExcludedModelsMeta(tt.auth, tt.cfg, tt.perKey, tt.authKind)
|
||||||
|
|
||||||
|
if tt.auth != nil && tt.cfg != nil {
|
||||||
|
if tt.wantHash {
|
||||||
|
if _, ok := tt.auth.Attributes["excluded_models_hash"]; !ok {
|
||||||
|
t.Error("expected excluded_models_hash in attributes")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if tt.wantKind != "" {
|
||||||
|
if got := tt.auth.Attributes["auth_kind"]; got != tt.wantKind {
|
||||||
|
t.Errorf("expected auth_kind=%s, got %s", tt.wantKind, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAddConfigHeadersToAttrs(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
headers map[string]string
|
||||||
|
attrs map[string]string
|
||||||
|
want map[string]string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "basic headers",
|
||||||
|
headers: map[string]string{
|
||||||
|
"Authorization": "Bearer token",
|
||||||
|
"X-Custom": "value",
|
||||||
|
},
|
||||||
|
attrs: map[string]string{"existing": "key"},
|
||||||
|
want: map[string]string{
|
||||||
|
"existing": "key",
|
||||||
|
"header:Authorization": "Bearer token",
|
||||||
|
"header:X-Custom": "value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty headers",
|
||||||
|
headers: map[string]string{},
|
||||||
|
attrs: map[string]string{"existing": "key"},
|
||||||
|
want: map[string]string{"existing": "key"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nil headers",
|
||||||
|
headers: nil,
|
||||||
|
attrs: map[string]string{"existing": "key"},
|
||||||
|
want: map[string]string{"existing": "key"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nil attrs",
|
||||||
|
headers: map[string]string{"key": "value"},
|
||||||
|
attrs: nil,
|
||||||
|
want: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "skip empty keys and values",
|
||||||
|
headers: map[string]string{
|
||||||
|
"": "value",
|
||||||
|
"key": "",
|
||||||
|
" ": "value",
|
||||||
|
"valid": "valid-value",
|
||||||
|
},
|
||||||
|
attrs: make(map[string]string),
|
||||||
|
want: map[string]string{
|
||||||
|
"header:valid": "valid-value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
addConfigHeadersToAttrs(tt.headers, tt.attrs)
|
||||||
|
if !reflect.DeepEqual(tt.attrs, tt.want) {
|
||||||
|
t.Errorf("expected %v, got %v", tt.want, tt.attrs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
16
internal/watcher/synthesizer/interface.go
Normal file
16
internal/watcher/synthesizer/interface.go
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
// Package synthesizer provides auth synthesis strategies for the watcher package.
|
||||||
|
// It implements the Strategy pattern to support multiple auth sources:
|
||||||
|
// - ConfigSynthesizer: generates Auth entries from config API keys
|
||||||
|
// - FileSynthesizer: generates Auth entries from OAuth JSON files
|
||||||
|
package synthesizer
|
||||||
|
|
||||||
|
import (
|
||||||
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthSynthesizer defines the interface for generating Auth entries from various sources.
|
||||||
|
type AuthSynthesizer interface {
|
||||||
|
// Synthesize generates Auth entries from the given context.
|
||||||
|
// Returns a slice of Auth pointers and any error encountered.
|
||||||
|
Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, error)
|
||||||
|
}
|
||||||
@@ -8,22 +8,20 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"reflect"
|
"reflect"
|
||||||
"runtime"
|
"runtime"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/fsnotify/fsnotify"
|
"github.com/fsnotify/fsnotify"
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/runtime/geminicli"
|
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/synthesizer"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||||
@@ -78,39 +76,6 @@ type Watcher struct {
|
|||||||
oldConfigYaml []byte
|
oldConfigYaml []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
type stableIDGenerator struct {
|
|
||||||
counters map[string]int
|
|
||||||
}
|
|
||||||
|
|
||||||
func newStableIDGenerator() *stableIDGenerator {
|
|
||||||
return &stableIDGenerator{counters: make(map[string]int)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *stableIDGenerator) next(kind string, parts ...string) (string, string) {
|
|
||||||
if g == nil {
|
|
||||||
return kind + ":000000000000", "000000000000"
|
|
||||||
}
|
|
||||||
hasher := sha256.New()
|
|
||||||
hasher.Write([]byte(kind))
|
|
||||||
for _, part := range parts {
|
|
||||||
trimmed := strings.TrimSpace(part)
|
|
||||||
hasher.Write([]byte{0})
|
|
||||||
hasher.Write([]byte(trimmed))
|
|
||||||
}
|
|
||||||
digest := hex.EncodeToString(hasher.Sum(nil))
|
|
||||||
if len(digest) < 12 {
|
|
||||||
digest = fmt.Sprintf("%012s", digest)
|
|
||||||
}
|
|
||||||
short := digest[:12]
|
|
||||||
key := kind + ":" + short
|
|
||||||
index := g.counters[key]
|
|
||||||
g.counters[key] = index + 1
|
|
||||||
if index > 0 {
|
|
||||||
short = fmt.Sprintf("%s-%d", short, index)
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%s:%s", kind, short), short
|
|
||||||
}
|
|
||||||
|
|
||||||
// AuthUpdateAction represents the type of change detected in auth sources.
|
// AuthUpdateAction represents the type of change detected in auth sources.
|
||||||
type AuthUpdateAction string
|
type AuthUpdateAction string
|
||||||
|
|
||||||
@@ -486,46 +451,6 @@ func normalizeAuth(a *coreauth.Auth) *coreauth.Auth {
|
|||||||
return clone
|
return clone
|
||||||
}
|
}
|
||||||
|
|
||||||
func applyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey []string, authKind string) {
|
|
||||||
if auth == nil || cfg == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
authKindKey := strings.ToLower(strings.TrimSpace(authKind))
|
|
||||||
seen := make(map[string]struct{})
|
|
||||||
add := func(list []string) {
|
|
||||||
for _, entry := range list {
|
|
||||||
if trimmed := strings.TrimSpace(entry); trimmed != "" {
|
|
||||||
key := strings.ToLower(trimmed)
|
|
||||||
if _, exists := seen[key]; exists {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[key] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if authKindKey == "apikey" {
|
|
||||||
add(perKey)
|
|
||||||
} else if cfg.OAuthExcludedModels != nil {
|
|
||||||
providerKey := strings.ToLower(strings.TrimSpace(auth.Provider))
|
|
||||||
add(cfg.OAuthExcludedModels[providerKey])
|
|
||||||
}
|
|
||||||
combined := make([]string, 0, len(seen))
|
|
||||||
for k := range seen {
|
|
||||||
combined = append(combined, k)
|
|
||||||
}
|
|
||||||
sort.Strings(combined)
|
|
||||||
hash := diff.ComputeExcludedModelsHash(combined)
|
|
||||||
if auth.Attributes == nil {
|
|
||||||
auth.Attributes = make(map[string]string)
|
|
||||||
}
|
|
||||||
if hash != "" {
|
|
||||||
auth.Attributes["excluded_models_hash"] = hash
|
|
||||||
}
|
|
||||||
if authKind != "" {
|
|
||||||
auth.Attributes["auth_kind"] = authKind
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetClients sets the file-based clients.
|
// SetClients sets the file-based clients.
|
||||||
// SetClients removed
|
// SetClients removed
|
||||||
// SetAPIKeyClients removed
|
// SetAPIKeyClients removed
|
||||||
@@ -970,410 +895,34 @@ func (w *Watcher) removeClient(path string) {
|
|||||||
|
|
||||||
// SnapshotCoreAuths converts current clients snapshot into core auth entries.
|
// SnapshotCoreAuths converts current clients snapshot into core auth entries.
|
||||||
func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
|
func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
|
||||||
out := make([]*coreauth.Auth, 0, 32)
|
|
||||||
now := time.Now()
|
|
||||||
idGen := newStableIDGenerator()
|
|
||||||
// Also synthesize auth entries for OpenAI-compatibility providers directly from config
|
|
||||||
w.clientsMutex.RLock()
|
w.clientsMutex.RLock()
|
||||||
cfg := w.config
|
cfg := w.config
|
||||||
w.clientsMutex.RUnlock()
|
w.clientsMutex.RUnlock()
|
||||||
if cfg != nil {
|
|
||||||
// Gemini official API keys -> synthesize auths
|
ctx := &synthesizer.SynthesisContext{
|
||||||
for i := range cfg.GeminiKey {
|
Config: cfg,
|
||||||
entry := cfg.GeminiKey[i]
|
AuthDir: w.authDir,
|
||||||
key := strings.TrimSpace(entry.APIKey)
|
Now: time.Now(),
|
||||||
if key == "" {
|
IDGenerator: synthesizer.NewStableIDGenerator(),
|
||||||
continue
|
|
||||||
}
|
|
||||||
prefix := strings.TrimSpace(entry.Prefix)
|
|
||||||
base := strings.TrimSpace(entry.BaseURL)
|
|
||||||
proxyURL := strings.TrimSpace(entry.ProxyURL)
|
|
||||||
id, token := idGen.next("gemini:apikey", key, base)
|
|
||||||
attrs := map[string]string{
|
|
||||||
"source": fmt.Sprintf("config:gemini[%s]", token),
|
|
||||||
"api_key": key,
|
|
||||||
}
|
|
||||||
if base != "" {
|
|
||||||
attrs["base_url"] = base
|
|
||||||
}
|
|
||||||
addConfigHeadersToAttrs(entry.Headers, attrs)
|
|
||||||
a := &coreauth.Auth{
|
|
||||||
ID: id,
|
|
||||||
Provider: "gemini",
|
|
||||||
Label: "gemini-apikey",
|
|
||||||
Prefix: prefix,
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
ProxyURL: proxyURL,
|
|
||||||
Attributes: attrs,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
}
|
|
||||||
applyAuthExcludedModelsMeta(a, cfg, entry.ExcludedModels, "apikey")
|
|
||||||
out = append(out, a)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Claude API keys -> synthesize auths
|
var out []*coreauth.Auth
|
||||||
for i := range cfg.ClaudeKey {
|
|
||||||
ck := cfg.ClaudeKey[i]
|
|
||||||
key := strings.TrimSpace(ck.APIKey)
|
|
||||||
if key == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
prefix := strings.TrimSpace(ck.Prefix)
|
|
||||||
base := strings.TrimSpace(ck.BaseURL)
|
|
||||||
id, token := idGen.next("claude:apikey", key, base)
|
|
||||||
attrs := map[string]string{
|
|
||||||
"source": fmt.Sprintf("config:claude[%s]", token),
|
|
||||||
"api_key": key,
|
|
||||||
}
|
|
||||||
if base != "" {
|
|
||||||
attrs["base_url"] = base
|
|
||||||
}
|
|
||||||
if hash := diff.ComputeClaudeModelsHash(ck.Models); hash != "" {
|
|
||||||
attrs["models_hash"] = hash
|
|
||||||
}
|
|
||||||
addConfigHeadersToAttrs(ck.Headers, attrs)
|
|
||||||
proxyURL := strings.TrimSpace(ck.ProxyURL)
|
|
||||||
a := &coreauth.Auth{
|
|
||||||
ID: id,
|
|
||||||
Provider: "claude",
|
|
||||||
Label: "claude-apikey",
|
|
||||||
Prefix: prefix,
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
ProxyURL: proxyURL,
|
|
||||||
Attributes: attrs,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
}
|
|
||||||
applyAuthExcludedModelsMeta(a, cfg, ck.ExcludedModels, "apikey")
|
|
||||||
out = append(out, a)
|
|
||||||
}
|
|
||||||
// Codex API keys -> synthesize auths
|
|
||||||
for i := range cfg.CodexKey {
|
|
||||||
ck := cfg.CodexKey[i]
|
|
||||||
key := strings.TrimSpace(ck.APIKey)
|
|
||||||
if key == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
prefix := strings.TrimSpace(ck.Prefix)
|
|
||||||
id, token := idGen.next("codex:apikey", key, ck.BaseURL)
|
|
||||||
attrs := map[string]string{
|
|
||||||
"source": fmt.Sprintf("config:codex[%s]", token),
|
|
||||||
"api_key": key,
|
|
||||||
}
|
|
||||||
if ck.BaseURL != "" {
|
|
||||||
attrs["base_url"] = ck.BaseURL
|
|
||||||
}
|
|
||||||
addConfigHeadersToAttrs(ck.Headers, attrs)
|
|
||||||
proxyURL := strings.TrimSpace(ck.ProxyURL)
|
|
||||||
a := &coreauth.Auth{
|
|
||||||
ID: id,
|
|
||||||
Provider: "codex",
|
|
||||||
Label: "codex-apikey",
|
|
||||||
Prefix: prefix,
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
ProxyURL: proxyURL,
|
|
||||||
Attributes: attrs,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
}
|
|
||||||
applyAuthExcludedModelsMeta(a, cfg, ck.ExcludedModels, "apikey")
|
|
||||||
out = append(out, a)
|
|
||||||
}
|
|
||||||
for i := range cfg.OpenAICompatibility {
|
|
||||||
compat := &cfg.OpenAICompatibility[i]
|
|
||||||
prefix := strings.TrimSpace(compat.Prefix)
|
|
||||||
providerName := strings.ToLower(strings.TrimSpace(compat.Name))
|
|
||||||
if providerName == "" {
|
|
||||||
providerName = "openai-compatibility"
|
|
||||||
}
|
|
||||||
base := strings.TrimSpace(compat.BaseURL)
|
|
||||||
|
|
||||||
// Handle new APIKeyEntries format (preferred)
|
// Use ConfigSynthesizer for API key auth entries
|
||||||
createdEntries := 0
|
configSynth := synthesizer.NewConfigSynthesizer()
|
||||||
for j := range compat.APIKeyEntries {
|
if auths, err := configSynth.Synthesize(ctx); err == nil {
|
||||||
entry := &compat.APIKeyEntries[j]
|
out = append(out, auths...)
|
||||||
key := strings.TrimSpace(entry.APIKey)
|
|
||||||
proxyURL := strings.TrimSpace(entry.ProxyURL)
|
|
||||||
idKind := fmt.Sprintf("openai-compatibility:%s", providerName)
|
|
||||||
id, token := idGen.next(idKind, key, base, proxyURL)
|
|
||||||
attrs := map[string]string{
|
|
||||||
"source": fmt.Sprintf("config:%s[%s]", providerName, token),
|
|
||||||
"base_url": base,
|
|
||||||
"compat_name": compat.Name,
|
|
||||||
"provider_key": providerName,
|
|
||||||
}
|
|
||||||
if key != "" {
|
|
||||||
attrs["api_key"] = key
|
|
||||||
}
|
|
||||||
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
|
|
||||||
attrs["models_hash"] = hash
|
|
||||||
}
|
|
||||||
addConfigHeadersToAttrs(compat.Headers, attrs)
|
|
||||||
a := &coreauth.Auth{
|
|
||||||
ID: id,
|
|
||||||
Provider: providerName,
|
|
||||||
Label: compat.Name,
|
|
||||||
Prefix: prefix,
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
ProxyURL: proxyURL,
|
|
||||||
Attributes: attrs,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
}
|
|
||||||
out = append(out, a)
|
|
||||||
createdEntries++
|
|
||||||
}
|
|
||||||
if createdEntries == 0 {
|
|
||||||
idKind := fmt.Sprintf("openai-compatibility:%s", providerName)
|
|
||||||
id, token := idGen.next(idKind, base)
|
|
||||||
attrs := map[string]string{
|
|
||||||
"source": fmt.Sprintf("config:%s[%s]", providerName, token),
|
|
||||||
"base_url": base,
|
|
||||||
"compat_name": compat.Name,
|
|
||||||
"provider_key": providerName,
|
|
||||||
}
|
|
||||||
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
|
|
||||||
attrs["models_hash"] = hash
|
|
||||||
}
|
|
||||||
addConfigHeadersToAttrs(compat.Headers, attrs)
|
|
||||||
a := &coreauth.Auth{
|
|
||||||
ID: id,
|
|
||||||
Provider: providerName,
|
|
||||||
Label: compat.Name,
|
|
||||||
Prefix: prefix,
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
Attributes: attrs,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
}
|
|
||||||
out = append(out, a)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process Vertex API key providers (Vertex-compatible endpoints)
|
// Use FileSynthesizer for file-based OAuth auth entries
|
||||||
for i := range cfg.VertexCompatAPIKey {
|
fileSynth := synthesizer.NewFileSynthesizer()
|
||||||
compat := &cfg.VertexCompatAPIKey[i]
|
if auths, err := fileSynth.Synthesize(ctx); err == nil {
|
||||||
providerName := "vertex"
|
out = append(out, auths...)
|
||||||
base := strings.TrimSpace(compat.BaseURL)
|
|
||||||
|
|
||||||
key := strings.TrimSpace(compat.APIKey)
|
|
||||||
prefix := strings.TrimSpace(compat.Prefix)
|
|
||||||
proxyURL := strings.TrimSpace(compat.ProxyURL)
|
|
||||||
idKind := "vertex:apikey"
|
|
||||||
id, token := idGen.next(idKind, key, base, proxyURL)
|
|
||||||
attrs := map[string]string{
|
|
||||||
"source": fmt.Sprintf("config:vertex-apikey[%s]", token),
|
|
||||||
"base_url": base,
|
|
||||||
"provider_key": providerName,
|
|
||||||
}
|
|
||||||
if key != "" {
|
|
||||||
attrs["api_key"] = key
|
|
||||||
}
|
|
||||||
if hash := diff.ComputeVertexCompatModelsHash(compat.Models); hash != "" {
|
|
||||||
attrs["models_hash"] = hash
|
|
||||||
}
|
|
||||||
addConfigHeadersToAttrs(compat.Headers, attrs)
|
|
||||||
a := &coreauth.Auth{
|
|
||||||
ID: id,
|
|
||||||
Provider: providerName,
|
|
||||||
Label: "vertex-apikey",
|
|
||||||
Prefix: prefix,
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
ProxyURL: proxyURL,
|
|
||||||
Attributes: attrs,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
}
|
|
||||||
applyAuthExcludedModelsMeta(a, cfg, nil, "apikey")
|
|
||||||
out = append(out, a)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also synthesize auth entries directly from auth files (for OAuth/file-backed providers)
|
|
||||||
entries, _ := os.ReadDir(w.authDir)
|
|
||||||
for _, e := range entries {
|
|
||||||
if e.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
name := e.Name()
|
|
||||||
if !strings.HasSuffix(strings.ToLower(name), ".json") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
full := filepath.Join(w.authDir, name)
|
|
||||||
data, err := os.ReadFile(full)
|
|
||||||
if err != nil || len(data) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
var metadata map[string]any
|
|
||||||
if err = json.Unmarshal(data, &metadata); err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
t, _ := metadata["type"].(string)
|
|
||||||
if t == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
provider := strings.ToLower(t)
|
|
||||||
if provider == "gemini" {
|
|
||||||
provider = "gemini-cli"
|
|
||||||
}
|
|
||||||
label := provider
|
|
||||||
if email, _ := metadata["email"].(string); email != "" {
|
|
||||||
label = email
|
|
||||||
}
|
|
||||||
// Use relative path under authDir as ID to stay consistent with the file-based token store
|
|
||||||
id := full
|
|
||||||
if rel, errRel := filepath.Rel(w.authDir, full); errRel == nil && rel != "" {
|
|
||||||
id = rel
|
|
||||||
}
|
|
||||||
|
|
||||||
proxyURL := ""
|
|
||||||
if p, ok := metadata["proxy_url"].(string); ok {
|
|
||||||
proxyURL = p
|
|
||||||
}
|
|
||||||
|
|
||||||
prefix := ""
|
|
||||||
if rawPrefix, ok := metadata["prefix"].(string); ok {
|
|
||||||
trimmed := strings.TrimSpace(rawPrefix)
|
|
||||||
trimmed = strings.Trim(trimmed, "/")
|
|
||||||
if trimmed != "" && !strings.Contains(trimmed, "/") {
|
|
||||||
prefix = trimmed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
a := &coreauth.Auth{
|
|
||||||
ID: id,
|
|
||||||
Provider: provider,
|
|
||||||
Label: label,
|
|
||||||
Prefix: prefix,
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
Attributes: map[string]string{
|
|
||||||
"source": full,
|
|
||||||
"path": full,
|
|
||||||
},
|
|
||||||
ProxyURL: proxyURL,
|
|
||||||
Metadata: metadata,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
}
|
|
||||||
applyAuthExcludedModelsMeta(a, cfg, nil, "oauth")
|
|
||||||
if provider == "gemini-cli" {
|
|
||||||
if virtuals := synthesizeGeminiVirtualAuths(a, metadata, now); len(virtuals) > 0 {
|
|
||||||
for _, v := range virtuals {
|
|
||||||
applyAuthExcludedModelsMeta(v, cfg, nil, "oauth")
|
|
||||||
}
|
|
||||||
out = append(out, a)
|
|
||||||
out = append(out, virtuals...)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out = append(out, a)
|
|
||||||
}
|
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
func synthesizeGeminiVirtualAuths(primary *coreauth.Auth, metadata map[string]any, now time.Time) []*coreauth.Auth {
|
|
||||||
if primary == nil || metadata == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
projects := splitGeminiProjectIDs(metadata)
|
|
||||||
if len(projects) <= 1 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
email, _ := metadata["email"].(string)
|
|
||||||
shared := geminicli.NewSharedCredential(primary.ID, email, metadata, projects)
|
|
||||||
primary.Disabled = true
|
|
||||||
primary.Status = coreauth.StatusDisabled
|
|
||||||
primary.Runtime = shared
|
|
||||||
if primary.Attributes == nil {
|
|
||||||
primary.Attributes = make(map[string]string)
|
|
||||||
}
|
|
||||||
primary.Attributes["gemini_virtual_primary"] = "true"
|
|
||||||
primary.Attributes["virtual_children"] = strings.Join(projects, ",")
|
|
||||||
source := primary.Attributes["source"]
|
|
||||||
authPath := primary.Attributes["path"]
|
|
||||||
originalProvider := primary.Provider
|
|
||||||
if originalProvider == "" {
|
|
||||||
originalProvider = "gemini-cli"
|
|
||||||
}
|
|
||||||
label := primary.Label
|
|
||||||
if label == "" {
|
|
||||||
label = originalProvider
|
|
||||||
}
|
|
||||||
virtuals := make([]*coreauth.Auth, 0, len(projects))
|
|
||||||
for _, projectID := range projects {
|
|
||||||
attrs := map[string]string{
|
|
||||||
"runtime_only": "true",
|
|
||||||
"gemini_virtual_parent": primary.ID,
|
|
||||||
"gemini_virtual_project": projectID,
|
|
||||||
}
|
|
||||||
if source != "" {
|
|
||||||
attrs["source"] = source
|
|
||||||
}
|
|
||||||
if authPath != "" {
|
|
||||||
attrs["path"] = authPath
|
|
||||||
}
|
|
||||||
metadataCopy := map[string]any{
|
|
||||||
"email": email,
|
|
||||||
"project_id": projectID,
|
|
||||||
"virtual": true,
|
|
||||||
"virtual_parent_id": primary.ID,
|
|
||||||
"type": metadata["type"],
|
|
||||||
}
|
|
||||||
proxy := strings.TrimSpace(primary.ProxyURL)
|
|
||||||
if proxy != "" {
|
|
||||||
metadataCopy["proxy_url"] = proxy
|
|
||||||
}
|
|
||||||
virtual := &coreauth.Auth{
|
|
||||||
ID: buildGeminiVirtualID(primary.ID, projectID),
|
|
||||||
Provider: originalProvider,
|
|
||||||
Label: fmt.Sprintf("%s [%s]", label, projectID),
|
|
||||||
Status: coreauth.StatusActive,
|
|
||||||
Attributes: attrs,
|
|
||||||
Metadata: metadataCopy,
|
|
||||||
ProxyURL: primary.ProxyURL,
|
|
||||||
Prefix: primary.Prefix,
|
|
||||||
CreatedAt: now,
|
|
||||||
UpdatedAt: now,
|
|
||||||
Runtime: geminicli.NewVirtualCredential(projectID, shared),
|
|
||||||
}
|
|
||||||
virtuals = append(virtuals, virtual)
|
|
||||||
}
|
|
||||||
return virtuals
|
|
||||||
}
|
|
||||||
|
|
||||||
func splitGeminiProjectIDs(metadata map[string]any) []string {
|
|
||||||
raw, _ := metadata["project_id"].(string)
|
|
||||||
trimmed := strings.TrimSpace(raw)
|
|
||||||
if trimmed == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
parts := strings.Split(trimmed, ",")
|
|
||||||
result := make([]string, 0, len(parts))
|
|
||||||
seen := make(map[string]struct{}, len(parts))
|
|
||||||
for _, part := range parts {
|
|
||||||
id := strings.TrimSpace(part)
|
|
||||||
if id == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := seen[id]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
seen[id] = struct{}{}
|
|
||||||
result = append(result, id)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildGeminiVirtualID(baseID, projectID string) string {
|
|
||||||
project := strings.TrimSpace(projectID)
|
|
||||||
if project == "" {
|
|
||||||
project = "project"
|
|
||||||
}
|
|
||||||
replacer := strings.NewReplacer("/", "_", "\\", "_", " ", "_")
|
|
||||||
return fmt.Sprintf("%s::%s", baseID, replacer.Replace(project))
|
|
||||||
}
|
|
||||||
|
|
||||||
// buildCombinedClientMap merges file-based clients with API key clients from the cache.
|
// buildCombinedClientMap merges file-based clients with API key clients from the cache.
|
||||||
// buildCombinedClientMap removed
|
// buildCombinedClientMap removed
|
||||||
|
|
||||||
@@ -1445,17 +994,3 @@ func BuildAPIKeyClients(cfg *config.Config) (int, int, int, int, int) {
|
|||||||
}
|
}
|
||||||
return geminiAPIKeyCount, vertexCompatAPIKeyCount, claudeAPIKeyCount, codexAPIKeyCount, openAICompatCount
|
return geminiAPIKeyCount, vertexCompatAPIKeyCount, claudeAPIKeyCount, codexAPIKeyCount, openAICompatCount
|
||||||
}
|
}
|
||||||
|
|
||||||
func addConfigHeadersToAttrs(headers map[string]string, attrs map[string]string) {
|
|
||||||
if len(headers) == 0 || attrs == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
for hk, hv := range headers {
|
|
||||||
key := strings.TrimSpace(hk)
|
|
||||||
val := strings.TrimSpace(hv)
|
|
||||||
if key == "" || val == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
attrs["header:"+key] = val
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import (
|
|||||||
"github.com/fsnotify/fsnotify"
|
"github.com/fsnotify/fsnotify"
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||||
|
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/synthesizer"
|
||||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
)
|
)
|
||||||
@@ -24,7 +25,7 @@ func TestApplyAuthExcludedModelsMeta_APIKey(t *testing.T) {
|
|||||||
cfg := &config.Config{}
|
cfg := &config.Config{}
|
||||||
perKey := []string{" Model-1 ", "model-2"}
|
perKey := []string{" Model-1 ", "model-2"}
|
||||||
|
|
||||||
applyAuthExcludedModelsMeta(auth, cfg, perKey, "apikey")
|
synthesizer.ApplyAuthExcludedModelsMeta(auth, cfg, perKey, "apikey")
|
||||||
|
|
||||||
expected := diff.ComputeExcludedModelsHash([]string{"model-1", "model-2"})
|
expected := diff.ComputeExcludedModelsHash([]string{"model-1", "model-2"})
|
||||||
if got := auth.Attributes["excluded_models_hash"]; got != expected {
|
if got := auth.Attributes["excluded_models_hash"]; got != expected {
|
||||||
@@ -46,7 +47,7 @@ func TestApplyAuthExcludedModelsMeta_OAuthProvider(t *testing.T) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
applyAuthExcludedModelsMeta(auth, cfg, nil, "oauth")
|
synthesizer.ApplyAuthExcludedModelsMeta(auth, cfg, nil, "oauth")
|
||||||
|
|
||||||
expected := diff.ComputeExcludedModelsHash([]string{"a", "b"})
|
expected := diff.ComputeExcludedModelsHash([]string{"a", "b"})
|
||||||
if got := auth.Attributes["excluded_models_hash"]; got != expected {
|
if got := auth.Attributes["excluded_models_hash"]; got != expected {
|
||||||
@@ -369,14 +370,14 @@ func TestAddOrUpdateClientSkipsUnchanged(t *testing.T) {
|
|||||||
var reloads int32
|
var reloads int32
|
||||||
w := &Watcher{
|
w := &Watcher{
|
||||||
authDir: tmpDir,
|
authDir: tmpDir,
|
||||||
lastAuthHashes: map[string]string{
|
lastAuthHashes: make(map[string]string),
|
||||||
filepath.Clean(authFile): hexString(sum[:]),
|
|
||||||
},
|
|
||||||
reloadCallback: func(*config.Config) {
|
reloadCallback: func(*config.Config) {
|
||||||
atomic.AddInt32(&reloads, 1)
|
atomic.AddInt32(&reloads, 1)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
w.SetConfig(&config.Config{AuthDir: tmpDir})
|
w.SetConfig(&config.Config{AuthDir: tmpDir})
|
||||||
|
// Use normalizeAuthPath to match how addOrUpdateClient stores the key
|
||||||
|
w.lastAuthHashes[w.normalizeAuthPath(authFile)] = hexString(sum[:])
|
||||||
|
|
||||||
w.addOrUpdateClient(authFile)
|
w.addOrUpdateClient(authFile)
|
||||||
if got := atomic.LoadInt32(&reloads); got != 0 {
|
if got := atomic.LoadInt32(&reloads); got != 0 {
|
||||||
@@ -406,7 +407,8 @@ func TestAddOrUpdateClientTriggersReloadAndHash(t *testing.T) {
|
|||||||
if got := atomic.LoadInt32(&reloads); got != 1 {
|
if got := atomic.LoadInt32(&reloads); got != 1 {
|
||||||
t.Fatalf("expected reload callback once, got %d", got)
|
t.Fatalf("expected reload callback once, got %d", got)
|
||||||
}
|
}
|
||||||
normalized := filepath.Clean(authFile)
|
// Use normalizeAuthPath to match how addOrUpdateClient stores the key
|
||||||
|
normalized := w.normalizeAuthPath(authFile)
|
||||||
if _, ok := w.lastAuthHashes[normalized]; !ok {
|
if _, ok := w.lastAuthHashes[normalized]; !ok {
|
||||||
t.Fatalf("expected hash to be stored for %s", normalized)
|
t.Fatalf("expected hash to be stored for %s", normalized)
|
||||||
}
|
}
|
||||||
@@ -419,17 +421,17 @@ func TestRemoveClientRemovesHash(t *testing.T) {
|
|||||||
|
|
||||||
w := &Watcher{
|
w := &Watcher{
|
||||||
authDir: tmpDir,
|
authDir: tmpDir,
|
||||||
lastAuthHashes: map[string]string{
|
lastAuthHashes: make(map[string]string),
|
||||||
filepath.Clean(authFile): "hash",
|
|
||||||
},
|
|
||||||
reloadCallback: func(*config.Config) {
|
reloadCallback: func(*config.Config) {
|
||||||
atomic.AddInt32(&reloads, 1)
|
atomic.AddInt32(&reloads, 1)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
w.SetConfig(&config.Config{AuthDir: tmpDir})
|
w.SetConfig(&config.Config{AuthDir: tmpDir})
|
||||||
|
// Use normalizeAuthPath to set up the hash with the correct key format
|
||||||
|
w.lastAuthHashes[w.normalizeAuthPath(authFile)] = "hash"
|
||||||
|
|
||||||
w.removeClient(authFile)
|
w.removeClient(authFile)
|
||||||
if _, ok := w.lastAuthHashes[filepath.Clean(authFile)]; ok {
|
if _, ok := w.lastAuthHashes[w.normalizeAuthPath(authFile)]; ok {
|
||||||
t.Fatal("expected hash to be removed after deletion")
|
t.Fatal("expected hash to be removed after deletion")
|
||||||
}
|
}
|
||||||
if got := atomic.LoadInt32(&reloads); got != 1 {
|
if got := atomic.LoadInt32(&reloads); got != 1 {
|
||||||
@@ -475,7 +477,8 @@ func TestAuthFileUnchangedUsesHash(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sum := sha256.Sum256(content)
|
sum := sha256.Sum256(content)
|
||||||
w.lastAuthHashes[filepath.Clean(authFile)] = hexString(sum[:])
|
// Use normalizeAuthPath to match how authFileUnchanged looks up the key
|
||||||
|
w.lastAuthHashes[w.normalizeAuthPath(authFile)] = hexString(sum[:])
|
||||||
|
|
||||||
unchanged, err = w.authFileUnchanged(authFile)
|
unchanged, err = w.authFileUnchanged(authFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -562,19 +565,20 @@ func TestHandleEventRemovesAuthFile(t *testing.T) {
|
|||||||
w := &Watcher{
|
w := &Watcher{
|
||||||
authDir: tmpDir,
|
authDir: tmpDir,
|
||||||
config: &config.Config{AuthDir: tmpDir},
|
config: &config.Config{AuthDir: tmpDir},
|
||||||
lastAuthHashes: map[string]string{
|
lastAuthHashes: make(map[string]string),
|
||||||
filepath.Clean(authFile): "hash",
|
|
||||||
},
|
|
||||||
reloadCallback: func(*config.Config) {
|
reloadCallback: func(*config.Config) {
|
||||||
atomic.AddInt32(&reloads, 1)
|
atomic.AddInt32(&reloads, 1)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
// Use normalizeAuthPath to set up the hash with the correct key format
|
||||||
|
w.lastAuthHashes[w.normalizeAuthPath(authFile)] = "hash"
|
||||||
|
|
||||||
w.handleEvent(fsnotify.Event{Name: authFile, Op: fsnotify.Remove})
|
w.handleEvent(fsnotify.Event{Name: authFile, Op: fsnotify.Remove})
|
||||||
|
|
||||||
if atomic.LoadInt32(&reloads) != 1 {
|
if atomic.LoadInt32(&reloads) != 1 {
|
||||||
t.Fatalf("expected reload callback once, got %d", reloads)
|
t.Fatalf("expected reload callback once, got %d", reloads)
|
||||||
}
|
}
|
||||||
if _, ok := w.lastAuthHashes[filepath.Clean(authFile)]; ok {
|
if _, ok := w.lastAuthHashes[w.normalizeAuthPath(authFile)]; ok {
|
||||||
t.Fatal("expected hash entry to be removed")
|
t.Fatal("expected hash entry to be removed")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user