mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-07 23:10:51 +08:00
Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1220de02d | ||
|
|
13eb5268de | ||
|
|
88798816f2 | ||
|
|
598f0af19b | ||
|
|
a33f5d31fc | ||
|
|
506699fba1 | ||
|
|
68a27772b3 | ||
|
|
de87fb622b | ||
|
|
f27672f6cf | ||
|
|
28420c14e4 | ||
|
|
0bd221ff41 | ||
|
|
5fda6f8ef3 | ||
|
|
9b956f6338 | ||
|
|
09923f654c | ||
|
|
ae7b972649 | ||
|
|
47885e3710 | ||
|
|
4b9a260b37 | ||
|
|
2c743c8f0b | ||
|
|
9f2c278ee6 | ||
|
|
aea337cfe2 | ||
|
|
811f8f8b4f | ||
|
|
27734a23b1 | ||
|
|
1b8e538a77 | ||
|
|
41c2385aca | ||
|
|
d605985f45 | ||
|
|
d52b28b147 | ||
|
|
4afe1f42ca | ||
|
|
7481c0eaa0 | ||
|
|
ffdfad8482 |
@@ -28,4 +28,6 @@ bin/*
|
||||
.claude/*
|
||||
.vscode/*
|
||||
.serena/*
|
||||
.bmad/*
|
||||
.agent/*
|
||||
.bmad/*
|
||||
_bmad/*
|
||||
|
||||
23
.github/workflows/pr-test-build.yml
vendored
Normal file
23
.github/workflows/pr-test-build.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: pr-test-build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: Build
|
||||
run: |
|
||||
go build -o test-output ./cmd/server
|
||||
rm -f test-output
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -31,7 +31,9 @@ GEMINI.md
|
||||
.vscode/*
|
||||
.claude/*
|
||||
.serena/*
|
||||
.agent/*
|
||||
.bmad/*
|
||||
_bmad/*
|
||||
|
||||
# macOS
|
||||
.DS_Store
|
||||
|
||||
@@ -160,7 +160,7 @@ func GetGeminiModels() []*ModelInfo {
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}},
|
||||
},
|
||||
{
|
||||
ID: "gemini-3-pro-image-preview",
|
||||
@@ -175,7 +175,7 @@ func GetGeminiModels() []*ModelInfo {
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -240,7 +240,22 @@ func GetGeminiVertexModels() []*ModelInfo {
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}},
|
||||
},
|
||||
{
|
||||
ID: "gemini-3-flash-preview",
|
||||
Object: "model",
|
||||
Created: 1765929600,
|
||||
OwnedBy: "google",
|
||||
Type: "gemini",
|
||||
Name: "models/gemini-3-flash-preview",
|
||||
Version: "3.0",
|
||||
DisplayName: "Gemini 3 Flash Preview",
|
||||
Description: "Our most intelligent model built for speed, combining frontier intelligence with superior search and grounding.",
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"minimal", "low", "medium", "high"}},
|
||||
},
|
||||
{
|
||||
ID: "gemini-3-pro-image-preview",
|
||||
@@ -255,7 +270,7 @@ func GetGeminiVertexModels() []*ModelInfo {
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -317,11 +332,26 @@ func GetGeminiCLIModels() []*ModelInfo {
|
||||
Name: "models/gemini-3-pro-preview",
|
||||
Version: "3.0",
|
||||
DisplayName: "Gemini 3 Pro Preview",
|
||||
Description: "Gemini 3 Pro Preview",
|
||||
Description: "Our most intelligent model with SOTA reasoning and multimodal understanding, and powerful agentic and vibe coding capabilities",
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}},
|
||||
},
|
||||
{
|
||||
ID: "gemini-3-flash-preview",
|
||||
Object: "model",
|
||||
Created: 1765929600,
|
||||
OwnedBy: "google",
|
||||
Type: "gemini",
|
||||
Name: "models/gemini-3-flash-preview",
|
||||
Version: "3.0",
|
||||
DisplayName: "Gemini 3 Flash Preview",
|
||||
Description: "Our most intelligent model built for speed, combining frontier intelligence with superior search and grounding.",
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"minimal", "low", "medium", "high"}},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -387,7 +417,22 @@ func GetAIStudioModels() []*ModelInfo {
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}},
|
||||
},
|
||||
{
|
||||
ID: "gemini-3-flash-preview",
|
||||
Object: "model",
|
||||
Created: 1765929600,
|
||||
OwnedBy: "google",
|
||||
Type: "gemini",
|
||||
Name: "models/gemini-3-flash-preview",
|
||||
Version: "3.0",
|
||||
DisplayName: "Gemini 3 Flash Preview",
|
||||
Description: "Our most intelligent model built for speed, combining frontier intelligence with superior search and grounding.",
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"minimal", "low", "medium", "high"}},
|
||||
},
|
||||
{
|
||||
ID: "gemini-pro-latest",
|
||||
@@ -698,8 +743,9 @@ func GetAntigravityModelConfig() map[string]*AntigravityModelConfig {
|
||||
"gemini-2.5-flash": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}, Name: "models/gemini-2.5-flash"},
|
||||
"gemini-2.5-flash-lite": {Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true}, Name: "models/gemini-2.5-flash-lite"},
|
||||
"gemini-2.5-computer-use-preview-10-2025": {Name: "models/gemini-2.5-computer-use-preview-10-2025"},
|
||||
"gemini-3-pro-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}, Name: "models/gemini-3-pro-preview"},
|
||||
"gemini-3-pro-image-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true}, Name: "models/gemini-3-pro-image-preview"},
|
||||
"gemini-3-pro-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}}, Name: "models/gemini-3-pro-preview"},
|
||||
"gemini-3-pro-image-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"low", "high"}}, Name: "models/gemini-3-pro-image-preview"},
|
||||
"gemini-3-flash-preview": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"minimal", "low", "medium", "high"}}, Name: "models/gemini-3-flash-preview"},
|
||||
"gemini-claude-sonnet-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
"gemini-claude-opus-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 200000, ZeroAllowed: false, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
}
|
||||
|
||||
@@ -323,8 +323,9 @@ func (e *AIStudioExecutor) translateRequest(req cliproxyexecutor.Request, opts c
|
||||
to := sdktranslator.FromString("gemini")
|
||||
payload := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), stream)
|
||||
payload = ApplyThinkingMetadata(payload, req.Metadata, req.Model)
|
||||
payload = util.ApplyGemini3ThinkingLevelFromMetadata(req.Model, req.Metadata, payload)
|
||||
payload = util.ApplyDefaultThinkingIfNeeded(req.Model, payload)
|
||||
payload = util.ConvertThinkingLevelToBudget(payload)
|
||||
payload = util.ConvertThinkingLevelToBudget(payload, req.Model)
|
||||
payload = util.NormalizeGeminiThinkingBudget(req.Model, payload)
|
||||
payload = util.StripThinkingConfigIfUnsupported(req.Model, payload)
|
||||
payload = fixGeminiImageAspectRatio(req.Model, payload)
|
||||
|
||||
@@ -32,15 +32,16 @@ import (
|
||||
const (
|
||||
antigravityBaseURLDaily = "https://daily-cloudcode-pa.sandbox.googleapis.com"
|
||||
// antigravityBaseURLAutopush = "https://autopush-cloudcode-pa.sandbox.googleapis.com"
|
||||
antigravityBaseURLProd = "https://cloudcode-pa.googleapis.com"
|
||||
antigravityStreamPath = "/v1internal:streamGenerateContent"
|
||||
antigravityGeneratePath = "/v1internal:generateContent"
|
||||
antigravityModelsPath = "/v1internal:fetchAvailableModels"
|
||||
antigravityClientID = "1071006060591-tmhssin2h21lcre235vtolojh4g403ep.apps.googleusercontent.com"
|
||||
antigravityClientSecret = "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"
|
||||
defaultAntigravityAgent = "antigravity/1.11.5 windows/amd64"
|
||||
antigravityAuthType = "antigravity"
|
||||
refreshSkew = 3000 * time.Second
|
||||
antigravityBaseURLProd = "https://cloudcode-pa.googleapis.com"
|
||||
antigravityCountTokensPath = "/v1internal:countTokens"
|
||||
antigravityStreamPath = "/v1internal:streamGenerateContent"
|
||||
antigravityGeneratePath = "/v1internal:generateContent"
|
||||
antigravityModelsPath = "/v1internal:fetchAvailableModels"
|
||||
antigravityClientID = "1071006060591-tmhssin2h21lcre235vtolojh4g403ep.apps.googleusercontent.com"
|
||||
antigravityClientSecret = "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"
|
||||
defaultAntigravityAgent = "antigravity/1.11.5 windows/amd64"
|
||||
antigravityAuthType = "antigravity"
|
||||
refreshSkew = 3000 * time.Second
|
||||
)
|
||||
|
||||
var randSource = rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
@@ -69,6 +70,10 @@ func (e *AntigravityExecutor) PrepareRequest(_ *http.Request, _ *cliproxyauth.Au
|
||||
|
||||
// Execute performs a non-streaming request to the Antigravity API.
|
||||
func (e *AntigravityExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (resp cliproxyexecutor.Response, err error) {
|
||||
if strings.Contains(req.Model, "claude") {
|
||||
return e.executeClaudeNonStream(ctx, auth, req, opts)
|
||||
}
|
||||
|
||||
token, updatedAuth, errToken := e.ensureAccessToken(ctx, auth)
|
||||
if errToken != nil {
|
||||
return resp, errToken
|
||||
@@ -85,6 +90,7 @@ func (e *AntigravityExecutor) Execute(ctx context.Context, auth *cliproxyauth.Au
|
||||
translated := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
|
||||
|
||||
translated = applyThinkingMetadataCLI(translated, req.Metadata, req.Model)
|
||||
translated = util.ApplyGemini3ThinkingLevelFromMetadataCLI(req.Model, req.Metadata, translated)
|
||||
translated = util.ApplyDefaultThinkingIfNeededCLI(req.Model, translated)
|
||||
translated = normalizeAntigravityThinking(req.Model, translated)
|
||||
|
||||
@@ -160,6 +166,337 @@ func (e *AntigravityExecutor) Execute(ctx context.Context, auth *cliproxyauth.Au
|
||||
return resp, err
|
||||
}
|
||||
|
||||
// executeClaudeNonStream performs a claude non-streaming request to the Antigravity API.
|
||||
func (e *AntigravityExecutor) executeClaudeNonStream(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (resp cliproxyexecutor.Response, err error) {
|
||||
token, updatedAuth, errToken := e.ensureAccessToken(ctx, auth)
|
||||
if errToken != nil {
|
||||
return resp, errToken
|
||||
}
|
||||
if updatedAuth != nil {
|
||||
auth = updatedAuth
|
||||
}
|
||||
|
||||
reporter := newUsageReporter(ctx, e.Identifier(), req.Model, auth)
|
||||
defer reporter.trackFailure(ctx, &err)
|
||||
|
||||
from := opts.SourceFormat
|
||||
to := sdktranslator.FromString("antigravity")
|
||||
translated := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
|
||||
|
||||
translated = applyThinkingMetadataCLI(translated, req.Metadata, req.Model)
|
||||
translated = util.ApplyGemini3ThinkingLevelFromMetadataCLI(req.Model, req.Metadata, translated)
|
||||
translated = util.ApplyDefaultThinkingIfNeededCLI(req.Model, translated)
|
||||
translated = normalizeAntigravityThinking(req.Model, translated)
|
||||
|
||||
baseURLs := antigravityBaseURLFallbackOrder(auth)
|
||||
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
|
||||
|
||||
var lastStatus int
|
||||
var lastBody []byte
|
||||
var lastErr error
|
||||
|
||||
for idx, baseURL := range baseURLs {
|
||||
httpReq, errReq := e.buildRequest(ctx, auth, token, req.Model, translated, true, opts.Alt, baseURL)
|
||||
if errReq != nil {
|
||||
err = errReq
|
||||
return resp, err
|
||||
}
|
||||
|
||||
httpResp, errDo := httpClient.Do(httpReq)
|
||||
if errDo != nil {
|
||||
recordAPIResponseError(ctx, e.cfg, errDo)
|
||||
lastStatus = 0
|
||||
lastBody = nil
|
||||
lastErr = errDo
|
||||
if idx+1 < len(baseURLs) {
|
||||
log.Debugf("antigravity executor: request error on base url %s, retrying with fallback base url: %s", baseURL, baseURLs[idx+1])
|
||||
continue
|
||||
}
|
||||
err = errDo
|
||||
return resp, err
|
||||
}
|
||||
recordAPIResponseMetadata(ctx, e.cfg, httpResp.StatusCode, httpResp.Header.Clone())
|
||||
if httpResp.StatusCode < http.StatusOK || httpResp.StatusCode >= http.StatusMultipleChoices {
|
||||
bodyBytes, errRead := io.ReadAll(httpResp.Body)
|
||||
if errClose := httpResp.Body.Close(); errClose != nil {
|
||||
log.Errorf("antigravity executor: close response body error: %v", errClose)
|
||||
}
|
||||
if errRead != nil {
|
||||
recordAPIResponseError(ctx, e.cfg, errRead)
|
||||
lastStatus = 0
|
||||
lastBody = nil
|
||||
lastErr = errRead
|
||||
if idx+1 < len(baseURLs) {
|
||||
log.Debugf("antigravity executor: read error on base url %s, retrying with fallback base url: %s", baseURL, baseURLs[idx+1])
|
||||
continue
|
||||
}
|
||||
err = errRead
|
||||
return resp, err
|
||||
}
|
||||
appendAPIResponseChunk(ctx, e.cfg, bodyBytes)
|
||||
lastStatus = httpResp.StatusCode
|
||||
lastBody = append([]byte(nil), bodyBytes...)
|
||||
lastErr = nil
|
||||
if httpResp.StatusCode == http.StatusTooManyRequests && idx+1 < len(baseURLs) {
|
||||
log.Debugf("antigravity executor: rate limited on base url %s, retrying with fallback base url: %s", baseURL, baseURLs[idx+1])
|
||||
continue
|
||||
}
|
||||
err = statusErr{code: httpResp.StatusCode, msg: string(bodyBytes)}
|
||||
return resp, err
|
||||
}
|
||||
|
||||
out := make(chan cliproxyexecutor.StreamChunk)
|
||||
go func(resp *http.Response) {
|
||||
defer close(out)
|
||||
defer func() {
|
||||
if errClose := resp.Body.Close(); errClose != nil {
|
||||
log.Errorf("antigravity executor: close response body error: %v", errClose)
|
||||
}
|
||||
}()
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
scanner.Buffer(nil, streamScannerBuffer)
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
appendAPIResponseChunk(ctx, e.cfg, line)
|
||||
|
||||
// Filter usage metadata for all models
|
||||
// Only retain usage statistics in the terminal chunk
|
||||
line = FilterSSEUsageMetadata(line)
|
||||
|
||||
payload := jsonPayload(line)
|
||||
if payload == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if detail, ok := parseAntigravityStreamUsage(payload); ok {
|
||||
reporter.publish(ctx, detail)
|
||||
}
|
||||
|
||||
out <- cliproxyexecutor.StreamChunk{Payload: payload}
|
||||
}
|
||||
if errScan := scanner.Err(); errScan != nil {
|
||||
recordAPIResponseError(ctx, e.cfg, errScan)
|
||||
reporter.publishFailure(ctx)
|
||||
out <- cliproxyexecutor.StreamChunk{Err: errScan}
|
||||
} else {
|
||||
reporter.ensurePublished(ctx)
|
||||
}
|
||||
}(httpResp)
|
||||
|
||||
var buffer bytes.Buffer
|
||||
for chunk := range out {
|
||||
if chunk.Err != nil {
|
||||
return resp, chunk.Err
|
||||
}
|
||||
if len(chunk.Payload) > 0 {
|
||||
_, _ = buffer.Write(chunk.Payload)
|
||||
_, _ = buffer.Write([]byte("\n"))
|
||||
}
|
||||
}
|
||||
resp = cliproxyexecutor.Response{Payload: e.convertStreamToNonStream(buffer.Bytes())}
|
||||
|
||||
reporter.publish(ctx, parseAntigravityUsage(resp.Payload))
|
||||
var param any
|
||||
converted := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, resp.Payload, ¶m)
|
||||
resp = cliproxyexecutor.Response{Payload: []byte(converted)}
|
||||
reporter.ensurePublished(ctx)
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
switch {
|
||||
case lastStatus != 0:
|
||||
err = statusErr{code: lastStatus, msg: string(lastBody)}
|
||||
case lastErr != nil:
|
||||
err = lastErr
|
||||
default:
|
||||
err = statusErr{code: http.StatusServiceUnavailable, msg: "antigravity executor: no base url available"}
|
||||
}
|
||||
return resp, err
|
||||
}
|
||||
|
||||
func (e *AntigravityExecutor) convertStreamToNonStream(stream []byte) []byte {
|
||||
responseTemplate := ""
|
||||
var traceID string
|
||||
var finishReason string
|
||||
var modelVersion string
|
||||
var responseID string
|
||||
var role string
|
||||
var usageRaw string
|
||||
parts := make([]map[string]interface{}, 0)
|
||||
var pendingKind string
|
||||
var pendingText strings.Builder
|
||||
var pendingThoughtSig string
|
||||
|
||||
flushPending := func() {
|
||||
if pendingKind == "" {
|
||||
return
|
||||
}
|
||||
text := pendingText.String()
|
||||
switch pendingKind {
|
||||
case "text":
|
||||
if strings.TrimSpace(text) == "" {
|
||||
pendingKind = ""
|
||||
pendingText.Reset()
|
||||
pendingThoughtSig = ""
|
||||
return
|
||||
}
|
||||
parts = append(parts, map[string]interface{}{"text": text})
|
||||
case "thought":
|
||||
if strings.TrimSpace(text) == "" && pendingThoughtSig == "" {
|
||||
pendingKind = ""
|
||||
pendingText.Reset()
|
||||
pendingThoughtSig = ""
|
||||
return
|
||||
}
|
||||
part := map[string]interface{}{"thought": true}
|
||||
part["text"] = text
|
||||
if pendingThoughtSig != "" {
|
||||
part["thoughtSignature"] = pendingThoughtSig
|
||||
}
|
||||
parts = append(parts, part)
|
||||
}
|
||||
pendingKind = ""
|
||||
pendingText.Reset()
|
||||
pendingThoughtSig = ""
|
||||
}
|
||||
|
||||
normalizePart := func(partResult gjson.Result) map[string]interface{} {
|
||||
var m map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(partResult.Raw), &m)
|
||||
if m == nil {
|
||||
m = map[string]interface{}{}
|
||||
}
|
||||
sig := partResult.Get("thoughtSignature").String()
|
||||
if sig == "" {
|
||||
sig = partResult.Get("thought_signature").String()
|
||||
}
|
||||
if sig != "" {
|
||||
m["thoughtSignature"] = sig
|
||||
delete(m, "thought_signature")
|
||||
}
|
||||
if inlineData, ok := m["inline_data"]; ok {
|
||||
m["inlineData"] = inlineData
|
||||
delete(m, "inline_data")
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
for _, line := range bytes.Split(stream, []byte("\n")) {
|
||||
trimmed := bytes.TrimSpace(line)
|
||||
if len(trimmed) == 0 || !gjson.ValidBytes(trimmed) {
|
||||
continue
|
||||
}
|
||||
|
||||
root := gjson.ParseBytes(trimmed)
|
||||
responseNode := root.Get("response")
|
||||
if !responseNode.Exists() {
|
||||
if root.Get("candidates").Exists() {
|
||||
responseNode = root
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
responseTemplate = responseNode.Raw
|
||||
|
||||
if traceResult := root.Get("traceId"); traceResult.Exists() && traceResult.String() != "" {
|
||||
traceID = traceResult.String()
|
||||
}
|
||||
|
||||
if roleResult := responseNode.Get("candidates.0.content.role"); roleResult.Exists() {
|
||||
role = roleResult.String()
|
||||
}
|
||||
|
||||
if finishResult := responseNode.Get("candidates.0.finishReason"); finishResult.Exists() && finishResult.String() != "" {
|
||||
finishReason = finishResult.String()
|
||||
}
|
||||
|
||||
if modelResult := responseNode.Get("modelVersion"); modelResult.Exists() && modelResult.String() != "" {
|
||||
modelVersion = modelResult.String()
|
||||
}
|
||||
if responseIDResult := responseNode.Get("responseId"); responseIDResult.Exists() && responseIDResult.String() != "" {
|
||||
responseID = responseIDResult.String()
|
||||
}
|
||||
if usageResult := responseNode.Get("usageMetadata"); usageResult.Exists() {
|
||||
usageRaw = usageResult.Raw
|
||||
} else if usageResult := root.Get("usageMetadata"); usageResult.Exists() {
|
||||
usageRaw = usageResult.Raw
|
||||
}
|
||||
|
||||
if partsResult := responseNode.Get("candidates.0.content.parts"); partsResult.IsArray() {
|
||||
for _, part := range partsResult.Array() {
|
||||
hasFunctionCall := part.Get("functionCall").Exists()
|
||||
hasInlineData := part.Get("inlineData").Exists() || part.Get("inline_data").Exists()
|
||||
sig := part.Get("thoughtSignature").String()
|
||||
if sig == "" {
|
||||
sig = part.Get("thought_signature").String()
|
||||
}
|
||||
text := part.Get("text").String()
|
||||
thought := part.Get("thought").Bool()
|
||||
|
||||
if hasFunctionCall || hasInlineData {
|
||||
flushPending()
|
||||
parts = append(parts, normalizePart(part))
|
||||
continue
|
||||
}
|
||||
|
||||
if thought || part.Get("text").Exists() {
|
||||
kind := "text"
|
||||
if thought {
|
||||
kind = "thought"
|
||||
}
|
||||
if pendingKind != "" && pendingKind != kind {
|
||||
flushPending()
|
||||
}
|
||||
pendingKind = kind
|
||||
pendingText.WriteString(text)
|
||||
if kind == "thought" && sig != "" {
|
||||
pendingThoughtSig = sig
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
flushPending()
|
||||
parts = append(parts, normalizePart(part))
|
||||
}
|
||||
}
|
||||
}
|
||||
flushPending()
|
||||
|
||||
if responseTemplate == "" {
|
||||
responseTemplate = `{"candidates":[{"content":{"role":"model","parts":[]}}]}`
|
||||
}
|
||||
|
||||
partsJSON, _ := json.Marshal(parts)
|
||||
responseTemplate, _ = sjson.SetRaw(responseTemplate, "candidates.0.content.parts", string(partsJSON))
|
||||
if role != "" {
|
||||
responseTemplate, _ = sjson.Set(responseTemplate, "candidates.0.content.role", role)
|
||||
}
|
||||
if finishReason != "" {
|
||||
responseTemplate, _ = sjson.Set(responseTemplate, "candidates.0.finishReason", finishReason)
|
||||
}
|
||||
if modelVersion != "" {
|
||||
responseTemplate, _ = sjson.Set(responseTemplate, "modelVersion", modelVersion)
|
||||
}
|
||||
if responseID != "" {
|
||||
responseTemplate, _ = sjson.Set(responseTemplate, "responseId", responseID)
|
||||
}
|
||||
if usageRaw != "" {
|
||||
responseTemplate, _ = sjson.SetRaw(responseTemplate, "usageMetadata", usageRaw)
|
||||
} else if !gjson.Get(responseTemplate, "usageMetadata").Exists() {
|
||||
responseTemplate, _ = sjson.Set(responseTemplate, "usageMetadata.promptTokenCount", 0)
|
||||
responseTemplate, _ = sjson.Set(responseTemplate, "usageMetadata.candidatesTokenCount", 0)
|
||||
responseTemplate, _ = sjson.Set(responseTemplate, "usageMetadata.totalTokenCount", 0)
|
||||
}
|
||||
|
||||
output := `{"response":{},"traceId":""}`
|
||||
output, _ = sjson.SetRaw(output, "response", responseTemplate)
|
||||
if traceID != "" {
|
||||
output, _ = sjson.Set(output, "traceId", traceID)
|
||||
}
|
||||
return []byte(output)
|
||||
}
|
||||
|
||||
// ExecuteStream performs a streaming request to the Antigravity API.
|
||||
func (e *AntigravityExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (stream <-chan cliproxyexecutor.StreamChunk, err error) {
|
||||
ctx = context.WithValue(ctx, "alt", "")
|
||||
@@ -180,6 +517,7 @@ func (e *AntigravityExecutor) ExecuteStream(ctx context.Context, auth *cliproxya
|
||||
translated := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
|
||||
|
||||
translated = applyThinkingMetadataCLI(translated, req.Metadata, req.Model)
|
||||
translated = util.ApplyGemini3ThinkingLevelFromMetadataCLI(req.Model, req.Metadata, translated)
|
||||
translated = util.ApplyDefaultThinkingIfNeededCLI(req.Model, translated)
|
||||
translated = normalizeAntigravityThinking(req.Model, translated)
|
||||
|
||||
@@ -312,9 +650,131 @@ func (e *AntigravityExecutor) Refresh(ctx context.Context, auth *cliproxyauth.Au
|
||||
return updated, nil
|
||||
}
|
||||
|
||||
// CountTokens counts tokens for the given request (not supported for Antigravity).
|
||||
func (e *AntigravityExecutor) CountTokens(context.Context, *cliproxyauth.Auth, cliproxyexecutor.Request, cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
|
||||
return cliproxyexecutor.Response{}, statusErr{code: http.StatusNotImplemented, msg: "count tokens not supported"}
|
||||
// CountTokens counts tokens for the given request using the Antigravity API.
|
||||
func (e *AntigravityExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
|
||||
token, updatedAuth, errToken := e.ensureAccessToken(ctx, auth)
|
||||
if errToken != nil {
|
||||
return cliproxyexecutor.Response{}, errToken
|
||||
}
|
||||
if updatedAuth != nil {
|
||||
auth = updatedAuth
|
||||
}
|
||||
if strings.TrimSpace(token) == "" {
|
||||
return cliproxyexecutor.Response{}, statusErr{code: http.StatusUnauthorized, msg: "missing access token"}
|
||||
}
|
||||
|
||||
from := opts.SourceFormat
|
||||
to := sdktranslator.FromString("antigravity")
|
||||
respCtx := context.WithValue(ctx, "alt", opts.Alt)
|
||||
|
||||
baseURLs := antigravityBaseURLFallbackOrder(auth)
|
||||
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
|
||||
|
||||
var authID, authLabel, authType, authValue string
|
||||
if auth != nil {
|
||||
authID = auth.ID
|
||||
authLabel = auth.Label
|
||||
authType, authValue = auth.AccountInfo()
|
||||
}
|
||||
|
||||
var lastStatus int
|
||||
var lastBody []byte
|
||||
var lastErr error
|
||||
|
||||
for idx, baseURL := range baseURLs {
|
||||
payload := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
|
||||
payload = applyThinkingMetadataCLI(payload, req.Metadata, req.Model)
|
||||
payload = util.ApplyDefaultThinkingIfNeededCLI(req.Model, payload)
|
||||
payload = normalizeAntigravityThinking(req.Model, payload)
|
||||
payload = deleteJSONField(payload, "project")
|
||||
payload = deleteJSONField(payload, "model")
|
||||
payload = deleteJSONField(payload, "request.safetySettings")
|
||||
|
||||
base := strings.TrimSuffix(baseURL, "/")
|
||||
if base == "" {
|
||||
base = buildBaseURL(auth)
|
||||
}
|
||||
|
||||
var requestURL strings.Builder
|
||||
requestURL.WriteString(base)
|
||||
requestURL.WriteString(antigravityCountTokensPath)
|
||||
if opts.Alt != "" {
|
||||
requestURL.WriteString("?$alt=")
|
||||
requestURL.WriteString(url.QueryEscape(opts.Alt))
|
||||
}
|
||||
|
||||
httpReq, errReq := http.NewRequestWithContext(ctx, http.MethodPost, requestURL.String(), bytes.NewReader(payload))
|
||||
if errReq != nil {
|
||||
return cliproxyexecutor.Response{}, errReq
|
||||
}
|
||||
httpReq.Header.Set("Content-Type", "application/json")
|
||||
httpReq.Header.Set("Authorization", "Bearer "+token)
|
||||
httpReq.Header.Set("User-Agent", resolveUserAgent(auth))
|
||||
httpReq.Header.Set("Accept", "application/json")
|
||||
if host := resolveHost(base); host != "" {
|
||||
httpReq.Host = host
|
||||
}
|
||||
|
||||
recordAPIRequest(ctx, e.cfg, upstreamRequestLog{
|
||||
URL: requestURL.String(),
|
||||
Method: http.MethodPost,
|
||||
Headers: httpReq.Header.Clone(),
|
||||
Body: payload,
|
||||
Provider: e.Identifier(),
|
||||
AuthID: authID,
|
||||
AuthLabel: authLabel,
|
||||
AuthType: authType,
|
||||
AuthValue: authValue,
|
||||
})
|
||||
|
||||
httpResp, errDo := httpClient.Do(httpReq)
|
||||
if errDo != nil {
|
||||
recordAPIResponseError(ctx, e.cfg, errDo)
|
||||
lastStatus = 0
|
||||
lastBody = nil
|
||||
lastErr = errDo
|
||||
if idx+1 < len(baseURLs) {
|
||||
log.Debugf("antigravity executor: request error on base url %s, retrying with fallback base url: %s", baseURL, baseURLs[idx+1])
|
||||
continue
|
||||
}
|
||||
return cliproxyexecutor.Response{}, errDo
|
||||
}
|
||||
|
||||
recordAPIResponseMetadata(ctx, e.cfg, httpResp.StatusCode, httpResp.Header.Clone())
|
||||
bodyBytes, errRead := io.ReadAll(httpResp.Body)
|
||||
if errClose := httpResp.Body.Close(); errClose != nil {
|
||||
log.Errorf("antigravity executor: close response body error: %v", errClose)
|
||||
}
|
||||
if errRead != nil {
|
||||
recordAPIResponseError(ctx, e.cfg, errRead)
|
||||
return cliproxyexecutor.Response{}, errRead
|
||||
}
|
||||
appendAPIResponseChunk(ctx, e.cfg, bodyBytes)
|
||||
|
||||
if httpResp.StatusCode >= http.StatusOK && httpResp.StatusCode < http.StatusMultipleChoices {
|
||||
count := gjson.GetBytes(bodyBytes, "totalTokens").Int()
|
||||
translated := sdktranslator.TranslateTokenCount(respCtx, to, from, count, bodyBytes)
|
||||
return cliproxyexecutor.Response{Payload: []byte(translated)}, nil
|
||||
}
|
||||
|
||||
lastStatus = httpResp.StatusCode
|
||||
lastBody = append([]byte(nil), bodyBytes...)
|
||||
lastErr = nil
|
||||
if httpResp.StatusCode == http.StatusTooManyRequests && idx+1 < len(baseURLs) {
|
||||
log.Debugf("antigravity executor: rate limited on base url %s, retrying with fallback base url: %s", baseURL, baseURLs[idx+1])
|
||||
continue
|
||||
}
|
||||
return cliproxyexecutor.Response{}, statusErr{code: httpResp.StatusCode, msg: string(bodyBytes)}
|
||||
}
|
||||
|
||||
switch {
|
||||
case lastStatus != 0:
|
||||
return cliproxyexecutor.Response{}, statusErr{code: lastStatus, msg: string(lastBody)}
|
||||
case lastErr != nil:
|
||||
return cliproxyexecutor.Response{}, lastErr
|
||||
default:
|
||||
return cliproxyexecutor.Response{}, statusErr{code: http.StatusServiceUnavailable, msg: "antigravity executor: no base url available"}
|
||||
}
|
||||
}
|
||||
|
||||
// FetchAntigravityModels retrieves available models using the supplied auth.
|
||||
@@ -545,27 +1005,9 @@ func (e *AntigravityExecutor) buildRequest(ctx context.Context, auth *cliproxyau
|
||||
strJSON, _ = util.RenameKey(strJSON, p, p[:len(p)-len("parametersJsonSchema")]+"parameters")
|
||||
}
|
||||
|
||||
strJSON = util.DeleteKey(strJSON, "$schema")
|
||||
strJSON = util.DeleteKey(strJSON, "maxItems")
|
||||
strJSON = util.DeleteKey(strJSON, "minItems")
|
||||
strJSON = util.DeleteKey(strJSON, "minLength")
|
||||
strJSON = util.DeleteKey(strJSON, "maxLength")
|
||||
strJSON = util.DeleteKey(strJSON, "exclusiveMinimum")
|
||||
strJSON = util.DeleteKey(strJSON, "exclusiveMaximum")
|
||||
strJSON = util.DeleteKey(strJSON, "$ref")
|
||||
strJSON = util.DeleteKey(strJSON, "$defs")
|
||||
|
||||
paths = make([]string, 0)
|
||||
util.Walk(gjson.Parse(strJSON), "", "anyOf", &paths)
|
||||
for _, p := range paths {
|
||||
anyOf := gjson.Get(strJSON, p)
|
||||
if anyOf.IsArray() {
|
||||
anyOfItems := anyOf.Array()
|
||||
if len(anyOfItems) > 0 {
|
||||
strJSON, _ = sjson.SetRaw(strJSON, p[:len(p)-len(".anyOf")], anyOfItems[0].Raw)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Use the centralized schema cleaner to handle unsupported keywords,
|
||||
// const->enum conversion, and flattening of types/anyOf.
|
||||
strJSON = util.CleanJSONSchemaForGemini(strJSON)
|
||||
|
||||
payload = []byte(strJSON)
|
||||
}
|
||||
@@ -798,6 +1240,8 @@ func modelName2Alias(modelName string) string {
|
||||
return "gemini-3-pro-image-preview"
|
||||
case "gemini-3-pro-high":
|
||||
return "gemini-3-pro-preview"
|
||||
case "gemini-3-flash":
|
||||
return "gemini-3-flash-preview"
|
||||
case "claude-sonnet-4-5":
|
||||
return "gemini-claude-sonnet-4-5"
|
||||
case "claude-sonnet-4-5-thinking":
|
||||
@@ -819,6 +1263,8 @@ func alias2ModelName(modelName string) string {
|
||||
return "gemini-3-pro-image"
|
||||
case "gemini-3-pro-preview":
|
||||
return "gemini-3-pro-high"
|
||||
case "gemini-3-flash-preview":
|
||||
return "gemini-3-flash"
|
||||
case "gemini-claude-sonnet-4-5":
|
||||
return "claude-sonnet-4-5"
|
||||
case "gemini-claude-sonnet-4-5-thinking":
|
||||
|
||||
@@ -79,6 +79,7 @@ func (e *GeminiCLIExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth
|
||||
to := sdktranslator.FromString("gemini-cli")
|
||||
basePayload := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
|
||||
basePayload = applyThinkingMetadataCLI(basePayload, req.Metadata, req.Model)
|
||||
basePayload = util.ApplyGemini3ThinkingLevelFromMetadataCLI(req.Model, req.Metadata, basePayload)
|
||||
basePayload = util.ApplyDefaultThinkingIfNeededCLI(req.Model, basePayload)
|
||||
basePayload = util.NormalizeGeminiCLIThinkingBudget(req.Model, basePayload)
|
||||
basePayload = util.StripThinkingConfigIfUnsupported(req.Model, basePayload)
|
||||
@@ -217,6 +218,7 @@ func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyaut
|
||||
to := sdktranslator.FromString("gemini-cli")
|
||||
basePayload := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
|
||||
basePayload = applyThinkingMetadataCLI(basePayload, req.Metadata, req.Model)
|
||||
basePayload = util.ApplyGemini3ThinkingLevelFromMetadataCLI(req.Model, req.Metadata, basePayload)
|
||||
basePayload = util.ApplyDefaultThinkingIfNeededCLI(req.Model, basePayload)
|
||||
basePayload = util.NormalizeGeminiCLIThinkingBudget(req.Model, basePayload)
|
||||
basePayload = util.StripThinkingConfigIfUnsupported(req.Model, basePayload)
|
||||
@@ -418,6 +420,7 @@ func (e *GeminiCLIExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.
|
||||
for _, attemptModel := range models {
|
||||
payload := sdktranslator.TranslateRequest(from, to, attemptModel, bytes.Clone(req.Payload), false)
|
||||
payload = applyThinkingMetadataCLI(payload, req.Metadata, req.Model)
|
||||
payload = util.ApplyGemini3ThinkingLevelFromMetadataCLI(req.Model, req.Metadata, payload)
|
||||
payload = deleteJSONField(payload, "project")
|
||||
payload = deleteJSONField(payload, "model")
|
||||
payload = deleteJSONField(payload, "request.safetySettings")
|
||||
|
||||
@@ -7,10 +7,8 @@ package claude
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
client "github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
"github.com/tidwall/gjson"
|
||||
@@ -42,27 +40,30 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
rawJSON = bytes.Replace(rawJSON, []byte(`"url":{"type":"string","format":"uri",`), []byte(`"url":{"type":"string",`), -1)
|
||||
|
||||
// system instruction
|
||||
var systemInstruction *client.Content
|
||||
systemInstructionJSON := ""
|
||||
hasSystemInstruction := false
|
||||
systemResult := gjson.GetBytes(rawJSON, "system")
|
||||
if systemResult.IsArray() {
|
||||
systemResults := systemResult.Array()
|
||||
systemInstruction = &client.Content{Role: "user", Parts: []client.Part{}}
|
||||
systemInstructionJSON = `{"role":"user","parts":[]}`
|
||||
for i := 0; i < len(systemResults); i++ {
|
||||
systemPromptResult := systemResults[i]
|
||||
systemTypePromptResult := systemPromptResult.Get("type")
|
||||
if systemTypePromptResult.Type == gjson.String && systemTypePromptResult.String() == "text" {
|
||||
systemPrompt := systemPromptResult.Get("text").String()
|
||||
systemPart := client.Part{Text: systemPrompt}
|
||||
systemInstruction.Parts = append(systemInstruction.Parts, systemPart)
|
||||
partJSON := `{}`
|
||||
if systemPrompt != "" {
|
||||
partJSON, _ = sjson.Set(partJSON, "text", systemPrompt)
|
||||
}
|
||||
systemInstructionJSON, _ = sjson.SetRaw(systemInstructionJSON, "parts.-1", partJSON)
|
||||
hasSystemInstruction = true
|
||||
}
|
||||
}
|
||||
if len(systemInstruction.Parts) == 0 {
|
||||
systemInstruction = nil
|
||||
}
|
||||
}
|
||||
|
||||
// contents
|
||||
contents := make([]client.Content, 0)
|
||||
contentsJSON := "[]"
|
||||
hasContents := false
|
||||
messagesResult := gjson.GetBytes(rawJSON, "messages")
|
||||
if messagesResult.IsArray() {
|
||||
messageResults := messagesResult.Array()
|
||||
@@ -76,7 +77,8 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
if role == "assistant" {
|
||||
role = "model"
|
||||
}
|
||||
clientContent := client.Content{Role: role, Parts: []client.Part{}}
|
||||
clientContentJSON := `{"role":"","parts":[]}`
|
||||
clientContentJSON, _ = sjson.Set(clientContentJSON, "role", role)
|
||||
contentsResult := messageResult.Get("content")
|
||||
if contentsResult.IsArray() {
|
||||
contentResults := contentsResult.Array()
|
||||
@@ -84,36 +86,45 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
contentResult := contentResults[j]
|
||||
contentTypeResult := contentResult.Get("type")
|
||||
if contentTypeResult.Type == gjson.String && contentTypeResult.String() == "thinking" {
|
||||
// Claude "thinking" blocks are internal-only. They also require a valid provider signature
|
||||
// when replayed as conversation history. Since we cannot mint signatures, only forward
|
||||
// thinking blocks when the client provides a non-empty signature; otherwise, drop them.
|
||||
prompt := contentResult.Get("thinking").String()
|
||||
signatureResult := contentResult.Get("signature")
|
||||
if signatureResult.Type == gjson.String && signatureResult.String() != "" {
|
||||
prompt := contentResult.Get("thinking").String()
|
||||
clientContent.Parts = append(clientContent.Parts, client.Part{
|
||||
Text: prompt,
|
||||
Thought: true,
|
||||
ThoughtSignature: signatureResult.String(),
|
||||
})
|
||||
signature := geminiCLIClaudeThoughtSignature
|
||||
if signatureResult.Exists() {
|
||||
signature = signatureResult.String()
|
||||
}
|
||||
partJSON := `{}`
|
||||
partJSON, _ = sjson.Set(partJSON, "thought", true)
|
||||
if prompt != "" {
|
||||
partJSON, _ = sjson.Set(partJSON, "text", prompt)
|
||||
}
|
||||
if signature != "" {
|
||||
partJSON, _ = sjson.Set(partJSON, "thoughtSignature", signature)
|
||||
}
|
||||
clientContentJSON, _ = sjson.SetRaw(clientContentJSON, "parts.-1", partJSON)
|
||||
} else if contentTypeResult.Type == gjson.String && contentTypeResult.String() == "text" {
|
||||
prompt := contentResult.Get("text").String()
|
||||
clientContent.Parts = append(clientContent.Parts, client.Part{Text: prompt})
|
||||
partJSON := `{}`
|
||||
if prompt != "" {
|
||||
partJSON, _ = sjson.Set(partJSON, "text", prompt)
|
||||
}
|
||||
clientContentJSON, _ = sjson.SetRaw(clientContentJSON, "parts.-1", partJSON)
|
||||
} else if contentTypeResult.Type == gjson.String && contentTypeResult.String() == "tool_use" {
|
||||
functionName := contentResult.Get("name").String()
|
||||
functionArgs := contentResult.Get("input").String()
|
||||
functionID := contentResult.Get("id").String()
|
||||
var args map[string]any
|
||||
if err := json.Unmarshal([]byte(functionArgs), &args); err == nil {
|
||||
if strings.Contains(modelName, "claude") {
|
||||
clientContent.Parts = append(clientContent.Parts, client.Part{
|
||||
FunctionCall: &client.FunctionCall{ID: functionID, Name: functionName, Args: args},
|
||||
})
|
||||
} else {
|
||||
clientContent.Parts = append(clientContent.Parts, client.Part{
|
||||
FunctionCall: &client.FunctionCall{ID: functionID, Name: functionName, Args: args},
|
||||
ThoughtSignature: geminiCLIClaudeThoughtSignature,
|
||||
})
|
||||
if gjson.Valid(functionArgs) {
|
||||
argsResult := gjson.Parse(functionArgs)
|
||||
if argsResult.IsObject() {
|
||||
partJSON := `{}`
|
||||
if !strings.Contains(modelName, "claude") {
|
||||
partJSON, _ = sjson.Set(partJSON, "thoughtSignature", geminiCLIClaudeThoughtSignature)
|
||||
}
|
||||
if functionID != "" {
|
||||
partJSON, _ = sjson.Set(partJSON, "functionCall.id", functionID)
|
||||
}
|
||||
partJSON, _ = sjson.Set(partJSON, "functionCall.name", functionName)
|
||||
partJSON, _ = sjson.SetRaw(partJSON, "functionCall.args", argsResult.Raw)
|
||||
clientContentJSON, _ = sjson.SetRaw(clientContentJSON, "parts.-1", partJSON)
|
||||
}
|
||||
}
|
||||
} else if contentTypeResult.Type == gjson.String && contentTypeResult.String() == "tool_result" {
|
||||
@@ -126,43 +137,70 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
}
|
||||
functionResponseResult := contentResult.Get("content")
|
||||
|
||||
functionResponseJSON := `{}`
|
||||
functionResponseJSON, _ = sjson.Set(functionResponseJSON, "id", toolCallID)
|
||||
functionResponseJSON, _ = sjson.Set(functionResponseJSON, "name", funcName)
|
||||
|
||||
responseData := ""
|
||||
if functionResponseResult.Type == gjson.String {
|
||||
responseData = functionResponseResult.String()
|
||||
functionResponseJSON, _ = sjson.Set(functionResponseJSON, "response.result", responseData)
|
||||
} else if functionResponseResult.IsArray() {
|
||||
frResults := functionResponseResult.Array()
|
||||
if len(frResults) == 1 {
|
||||
functionResponseJSON, _ = sjson.SetRaw(functionResponseJSON, "response.result", frResults[0].Raw)
|
||||
} else {
|
||||
functionResponseJSON, _ = sjson.SetRaw(functionResponseJSON, "response.result", functionResponseResult.Raw)
|
||||
}
|
||||
|
||||
} else if functionResponseResult.IsObject() {
|
||||
functionResponseJSON, _ = sjson.SetRaw(functionResponseJSON, "response.result", functionResponseResult.Raw)
|
||||
} else {
|
||||
responseData = contentResult.Get("content").Raw
|
||||
functionResponseJSON, _ = sjson.SetRaw(functionResponseJSON, "response.result", functionResponseResult.Raw)
|
||||
}
|
||||
|
||||
functionResponse := client.FunctionResponse{ID: toolCallID, Name: funcName, Response: map[string]interface{}{"result": responseData}}
|
||||
clientContent.Parts = append(clientContent.Parts, client.Part{FunctionResponse: &functionResponse})
|
||||
partJSON := `{}`
|
||||
partJSON, _ = sjson.SetRaw(partJSON, "functionResponse", functionResponseJSON)
|
||||
clientContentJSON, _ = sjson.SetRaw(clientContentJSON, "parts.-1", partJSON)
|
||||
}
|
||||
} else if contentTypeResult.Type == gjson.String && contentTypeResult.String() == "image" {
|
||||
sourceResult := contentResult.Get("source")
|
||||
if sourceResult.Get("type").String() == "base64" {
|
||||
inlineData := &client.InlineData{
|
||||
MimeType: sourceResult.Get("media_type").String(),
|
||||
Data: sourceResult.Get("data").String(),
|
||||
inlineDataJSON := `{}`
|
||||
if mimeType := sourceResult.Get("media_type").String(); mimeType != "" {
|
||||
inlineDataJSON, _ = sjson.Set(inlineDataJSON, "mime_type", mimeType)
|
||||
}
|
||||
clientContent.Parts = append(clientContent.Parts, client.Part{InlineData: inlineData})
|
||||
if data := sourceResult.Get("data").String(); data != "" {
|
||||
inlineDataJSON, _ = sjson.Set(inlineDataJSON, "data", data)
|
||||
}
|
||||
|
||||
partJSON := `{}`
|
||||
partJSON, _ = sjson.SetRaw(partJSON, "inlineData", inlineDataJSON)
|
||||
clientContentJSON, _ = sjson.SetRaw(clientContentJSON, "parts.-1", partJSON)
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(clientContent.Parts) > 0 {
|
||||
contents = append(contents, clientContent)
|
||||
}
|
||||
contentsJSON, _ = sjson.SetRaw(contentsJSON, "-1", clientContentJSON)
|
||||
hasContents = true
|
||||
} else if contentsResult.Type == gjson.String {
|
||||
prompt := contentsResult.String()
|
||||
contents = append(contents, client.Content{Role: role, Parts: []client.Part{{Text: prompt}}})
|
||||
partJSON := `{}`
|
||||
if prompt != "" {
|
||||
partJSON, _ = sjson.Set(partJSON, "text", prompt)
|
||||
}
|
||||
clientContentJSON, _ = sjson.SetRaw(clientContentJSON, "parts.-1", partJSON)
|
||||
contentsJSON, _ = sjson.SetRaw(contentsJSON, "-1", clientContentJSON)
|
||||
hasContents = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// tools
|
||||
var tools []client.ToolDeclaration
|
||||
toolsJSON := ""
|
||||
toolDeclCount := 0
|
||||
toolsResult := gjson.GetBytes(rawJSON, "tools")
|
||||
if toolsResult.IsArray() {
|
||||
tools = make([]client.ToolDeclaration, 1)
|
||||
tools[0].FunctionDeclarations = make([]any, 0)
|
||||
toolsJSON = `[{"functionDeclarations":[]}]`
|
||||
toolsResults := toolsResult.Array()
|
||||
for i := 0; i < len(toolsResults); i++ {
|
||||
toolResult := toolsResults[i]
|
||||
@@ -173,30 +211,23 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
tool, _ = sjson.SetRaw(tool, "parametersJsonSchema", inputSchema)
|
||||
tool, _ = sjson.Delete(tool, "strict")
|
||||
tool, _ = sjson.Delete(tool, "input_examples")
|
||||
var toolDeclaration any
|
||||
if err := json.Unmarshal([]byte(tool), &toolDeclaration); err == nil {
|
||||
tools[0].FunctionDeclarations = append(tools[0].FunctionDeclarations, toolDeclaration)
|
||||
}
|
||||
toolsJSON, _ = sjson.SetRaw(toolsJSON, "0.functionDeclarations.-1", tool)
|
||||
toolDeclCount++
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tools = make([]client.ToolDeclaration, 0)
|
||||
}
|
||||
|
||||
// Build output Gemini CLI request JSON
|
||||
out := `{"model":"","request":{"contents":[]}}`
|
||||
out, _ = sjson.Set(out, "model", modelName)
|
||||
if systemInstruction != nil {
|
||||
b, _ := json.Marshal(systemInstruction)
|
||||
out, _ = sjson.SetRaw(out, "request.systemInstruction", string(b))
|
||||
if hasSystemInstruction {
|
||||
out, _ = sjson.SetRaw(out, "request.systemInstruction", systemInstructionJSON)
|
||||
}
|
||||
if len(contents) > 0 {
|
||||
b, _ := json.Marshal(contents)
|
||||
out, _ = sjson.SetRaw(out, "request.contents", string(b))
|
||||
if hasContents {
|
||||
out, _ = sjson.SetRaw(out, "request.contents", contentsJSON)
|
||||
}
|
||||
if len(tools) > 0 && len(tools[0].FunctionDeclarations) > 0 {
|
||||
b, _ := json.Marshal(tools)
|
||||
out, _ = sjson.SetRaw(out, "request.tools", string(b))
|
||||
if toolDeclCount > 0 {
|
||||
out, _ = sjson.SetRaw(out, "request.tools", toolsJSON)
|
||||
}
|
||||
|
||||
// Map Anthropic thinking -> Gemini thinkingBudget/include_thoughts when type==enabled
|
||||
|
||||
@@ -9,7 +9,6 @@ package claude
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
@@ -114,54 +113,44 @@ func ConvertAntigravityResponseToClaude(_ context.Context, _ string, originalReq
|
||||
// Extract the different types of content from each part
|
||||
partTextResult := partResult.Get("text")
|
||||
functionCallResult := partResult.Get("functionCall")
|
||||
thoughtSignatureResult := partResult.Get("thoughtSignature")
|
||||
if !thoughtSignatureResult.Exists() {
|
||||
thoughtSignatureResult = partResult.Get("thought_signature")
|
||||
}
|
||||
hasThoughtSignature := thoughtSignatureResult.Exists() && thoughtSignatureResult.String() != ""
|
||||
isThought := partResult.Get("thought").Bool()
|
||||
|
||||
// Some Antigravity/Vertex Claude streams emit the thought signature as a standalone part
|
||||
// (no text payload). Claude requires this signature to be replayed verbatim on subsequent turns.
|
||||
if isThought && hasThoughtSignature && !partTextResult.Exists() && !functionCallResult.Exists() {
|
||||
if params.ResponseType == 2 {
|
||||
output = output + "event: content_block_delta\n"
|
||||
data, _ := sjson.Set(fmt.Sprintf(`{"type":"content_block_delta","index":%d,"delta":{"type":"signature_delta","signature":""}}`, params.ResponseIndex), "delta.signature", thoughtSignatureResult.String())
|
||||
output = output + fmt.Sprintf("data: %s\n\n\n", data)
|
||||
params.HasContent = true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle text content (both regular content and thinking)
|
||||
if partTextResult.Exists() {
|
||||
// Process thinking content (internal reasoning)
|
||||
if isThought {
|
||||
// Ensure we have an open thinking block to attach thinking/signature deltas to.
|
||||
if params.ResponseType != 2 {
|
||||
if partResult.Get("thought").Bool() {
|
||||
if thoughtSignature := partResult.Get("thoughtSignature"); thoughtSignature.Exists() && thoughtSignature.String() != "" {
|
||||
output = output + "event: content_block_delta\n"
|
||||
data, _ := sjson.Set(fmt.Sprintf(`{"type":"content_block_delta","index":%d,"delta":{"type":"signature_delta","signature":""}}`, params.ResponseIndex), "delta.signature", thoughtSignature.String())
|
||||
output = output + fmt.Sprintf("data: %s\n\n\n", data)
|
||||
params.HasContent = true
|
||||
} else if params.ResponseType == 2 { // Continue existing thinking block if already in thinking state
|
||||
output = output + "event: content_block_delta\n"
|
||||
data, _ := sjson.Set(fmt.Sprintf(`{"type":"content_block_delta","index":%d,"delta":{"type":"thinking_delta","thinking":""}}`, params.ResponseIndex), "delta.thinking", partTextResult.String())
|
||||
output = output + fmt.Sprintf("data: %s\n\n\n", data)
|
||||
params.HasContent = true
|
||||
} else {
|
||||
// Transition from another state to thinking
|
||||
// First, close any existing content block
|
||||
if params.ResponseType != 0 {
|
||||
if params.ResponseType == 2 {
|
||||
// output = output + "event: content_block_delta\n"
|
||||
// output = output + fmt.Sprintf(`data: {"type":"content_block_delta","index":%d,"delta":{"type":"signature_delta","signature":null}}`, params.ResponseIndex)
|
||||
// output = output + "\n\n\n"
|
||||
}
|
||||
output = output + "event: content_block_stop\n"
|
||||
output = output + fmt.Sprintf(`data: {"type":"content_block_stop","index":%d}`, params.ResponseIndex)
|
||||
output = output + "\n\n\n"
|
||||
params.ResponseIndex++
|
||||
}
|
||||
|
||||
// Start a new thinking content block
|
||||
output = output + "event: content_block_start\n"
|
||||
output = output + fmt.Sprintf(`data: {"type":"content_block_start","index":%d,"content_block":{"type":"thinking","thinking":""}}`, params.ResponseIndex)
|
||||
output = output + "\n\n\n"
|
||||
params.ResponseType = 2
|
||||
}
|
||||
|
||||
if partTextResult.String() != "" {
|
||||
output = output + "event: content_block_delta\n"
|
||||
data, _ := sjson.Set(fmt.Sprintf(`{"type":"content_block_delta","index":%d,"delta":{"type":"thinking_delta","thinking":""}}`, params.ResponseIndex), "delta.thinking", partTextResult.String())
|
||||
output = output + fmt.Sprintf("data: %s\n\n\n", data)
|
||||
params.HasContent = true
|
||||
}
|
||||
|
||||
if hasThoughtSignature {
|
||||
output = output + "event: content_block_delta\n"
|
||||
data, _ := sjson.Set(fmt.Sprintf(`{"type":"content_block_delta","index":%d,"delta":{"type":"signature_delta","signature":""}}`, params.ResponseIndex), "delta.signature", thoughtSignatureResult.String())
|
||||
output = output + fmt.Sprintf("data: %s\n\n\n", data)
|
||||
params.ResponseType = 2 // Set state to thinking
|
||||
params.HasContent = true
|
||||
}
|
||||
} else {
|
||||
@@ -360,22 +349,22 @@ func ConvertAntigravityResponseToClaudeNonStream(_ context.Context, _ string, or
|
||||
}
|
||||
}
|
||||
|
||||
response := map[string]interface{}{
|
||||
"id": root.Get("response.responseId").String(),
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"model": root.Get("response.modelVersion").String(),
|
||||
"content": []interface{}{},
|
||||
"stop_reason": nil,
|
||||
"stop_sequence": nil,
|
||||
"usage": map[string]interface{}{
|
||||
"input_tokens": promptTokens,
|
||||
"output_tokens": outputTokens,
|
||||
},
|
||||
responseJSON := `{"id":"","type":"message","role":"assistant","model":"","content":null,"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}`
|
||||
responseJSON, _ = sjson.Set(responseJSON, "id", root.Get("response.responseId").String())
|
||||
responseJSON, _ = sjson.Set(responseJSON, "model", root.Get("response.modelVersion").String())
|
||||
responseJSON, _ = sjson.Set(responseJSON, "usage.input_tokens", promptTokens)
|
||||
responseJSON, _ = sjson.Set(responseJSON, "usage.output_tokens", outputTokens)
|
||||
|
||||
contentArrayInitialized := false
|
||||
ensureContentArray := func() {
|
||||
if contentArrayInitialized {
|
||||
return
|
||||
}
|
||||
responseJSON, _ = sjson.SetRaw(responseJSON, "content", "[]")
|
||||
contentArrayInitialized = true
|
||||
}
|
||||
|
||||
parts := root.Get("response.candidates.0.content.parts")
|
||||
var contentBlocks []interface{}
|
||||
textBuilder := strings.Builder{}
|
||||
thinkingBuilder := strings.Builder{}
|
||||
thinkingSignature := ""
|
||||
@@ -386,48 +375,45 @@ func ConvertAntigravityResponseToClaudeNonStream(_ context.Context, _ string, or
|
||||
if textBuilder.Len() == 0 {
|
||||
return
|
||||
}
|
||||
contentBlocks = append(contentBlocks, map[string]interface{}{
|
||||
"type": "text",
|
||||
"text": textBuilder.String(),
|
||||
})
|
||||
ensureContentArray()
|
||||
block := `{"type":"text","text":""}`
|
||||
block, _ = sjson.Set(block, "text", textBuilder.String())
|
||||
responseJSON, _ = sjson.SetRaw(responseJSON, "content.-1", block)
|
||||
textBuilder.Reset()
|
||||
}
|
||||
|
||||
flushThinking := func() {
|
||||
if thinkingBuilder.Len() == 0 {
|
||||
if thinkingBuilder.Len() == 0 && thinkingSignature == "" {
|
||||
return
|
||||
}
|
||||
block := map[string]interface{}{
|
||||
"type": "thinking",
|
||||
"thinking": thinkingBuilder.String(),
|
||||
}
|
||||
ensureContentArray()
|
||||
block := `{"type":"thinking","thinking":""}`
|
||||
block, _ = sjson.Set(block, "thinking", thinkingBuilder.String())
|
||||
if thinkingSignature != "" {
|
||||
block["signature"] = thinkingSignature
|
||||
block, _ = sjson.Set(block, "signature", thinkingSignature)
|
||||
}
|
||||
contentBlocks = append(contentBlocks, block)
|
||||
responseJSON, _ = sjson.SetRaw(responseJSON, "content.-1", block)
|
||||
thinkingBuilder.Reset()
|
||||
thinkingSignature = ""
|
||||
}
|
||||
|
||||
if parts.IsArray() {
|
||||
for _, part := range parts.Array() {
|
||||
thoughtSignatureResult := part.Get("thoughtSignature")
|
||||
if !thoughtSignatureResult.Exists() {
|
||||
thoughtSignatureResult = part.Get("thought_signature")
|
||||
}
|
||||
if part.Get("thought").Bool() && thoughtSignatureResult.Exists() && thoughtSignatureResult.String() != "" && (!part.Get("text").Exists() || part.Get("text").String() == "") {
|
||||
// Signature-only thought part (no text payload).
|
||||
thinkingSignature = thoughtSignatureResult.String()
|
||||
continue
|
||||
isThought := part.Get("thought").Bool()
|
||||
if isThought {
|
||||
sig := part.Get("thoughtSignature")
|
||||
if !sig.Exists() {
|
||||
sig = part.Get("thought_signature")
|
||||
}
|
||||
if sig.Exists() && sig.String() != "" {
|
||||
thinkingSignature = sig.String()
|
||||
}
|
||||
}
|
||||
|
||||
if text := part.Get("text"); text.Exists() && text.String() != "" {
|
||||
if part.Get("thought").Bool() {
|
||||
if isThought {
|
||||
flushText()
|
||||
thinkingBuilder.WriteString(text.String())
|
||||
if thoughtSignatureResult.Exists() && thoughtSignatureResult.String() != "" {
|
||||
thinkingSignature = thoughtSignatureResult.String()
|
||||
}
|
||||
continue
|
||||
}
|
||||
flushThinking()
|
||||
@@ -442,21 +428,16 @@ func ConvertAntigravityResponseToClaudeNonStream(_ context.Context, _ string, or
|
||||
|
||||
name := functionCall.Get("name").String()
|
||||
toolIDCounter++
|
||||
toolBlock := map[string]interface{}{
|
||||
"type": "tool_use",
|
||||
"id": fmt.Sprintf("tool_%d", toolIDCounter),
|
||||
"name": name,
|
||||
"input": map[string]interface{}{},
|
||||
toolBlock := `{"type":"tool_use","id":"","name":"","input":{}}`
|
||||
toolBlock, _ = sjson.Set(toolBlock, "id", fmt.Sprintf("tool_%d", toolIDCounter))
|
||||
toolBlock, _ = sjson.Set(toolBlock, "name", name)
|
||||
|
||||
if args := functionCall.Get("args"); args.Exists() && args.Raw != "" && gjson.Valid(args.Raw) {
|
||||
toolBlock, _ = sjson.SetRaw(toolBlock, "input", args.Raw)
|
||||
}
|
||||
|
||||
if args := functionCall.Get("args"); args.Exists() {
|
||||
var parsed interface{}
|
||||
if err := json.Unmarshal([]byte(args.Raw), &parsed); err == nil {
|
||||
toolBlock["input"] = parsed
|
||||
}
|
||||
}
|
||||
|
||||
contentBlocks = append(contentBlocks, toolBlock)
|
||||
ensureContentArray()
|
||||
responseJSON, _ = sjson.SetRaw(responseJSON, "content.-1", toolBlock)
|
||||
continue
|
||||
}
|
||||
}
|
||||
@@ -465,8 +446,6 @@ func ConvertAntigravityResponseToClaudeNonStream(_ context.Context, _ string, or
|
||||
flushThinking()
|
||||
flushText()
|
||||
|
||||
response["content"] = contentBlocks
|
||||
|
||||
stopReason := "end_turn"
|
||||
if hasToolCall {
|
||||
stopReason = "tool_use"
|
||||
@@ -482,19 +461,15 @@ func ConvertAntigravityResponseToClaudeNonStream(_ context.Context, _ string, or
|
||||
}
|
||||
}
|
||||
}
|
||||
response["stop_reason"] = stopReason
|
||||
responseJSON, _ = sjson.Set(responseJSON, "stop_reason", stopReason)
|
||||
|
||||
if usage := response["usage"].(map[string]interface{}); usage["input_tokens"] == int64(0) && usage["output_tokens"] == int64(0) {
|
||||
if promptTokens == 0 && outputTokens == 0 {
|
||||
if usageMeta := root.Get("response.usageMetadata"); !usageMeta.Exists() {
|
||||
delete(response, "usage")
|
||||
responseJSON, _ = sjson.Delete(responseJSON, "usage")
|
||||
}
|
||||
}
|
||||
|
||||
encoded, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
return string(encoded)
|
||||
return responseJSON
|
||||
}
|
||||
|
||||
func ClaudeTokenCount(ctx context.Context, count int64) string {
|
||||
|
||||
@@ -222,62 +222,61 @@ func ConvertOpenAIRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
} else if role == "assistant" {
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
if content.Type == gjson.String {
|
||||
// Assistant text -> single model content
|
||||
node := []byte(`{"role":"model","parts":[{"text":""}]}`)
|
||||
node, _ = sjson.SetBytes(node, "parts.0.text", content.String())
|
||||
node, _ = sjson.SetBytes(node, "parts.-1.text", content.String())
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
} else if !content.Exists() || content.Type == gjson.Null {
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
if tcs.IsArray() {
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
fIDs := make([]string, 0)
|
||||
for _, tc := range tcs.Array() {
|
||||
if tc.Get("type").String() != "function" {
|
||||
continue
|
||||
}
|
||||
fid := tc.Get("id").String()
|
||||
fname := tc.Get("function.name").String()
|
||||
fargs := tc.Get("function.arguments").String()
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.id", fid)
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.name", fname)
|
||||
node, _ = sjson.SetRawBytes(node, "parts."+itoa(p)+".functionCall.args", []byte(fargs))
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".thoughtSignature", geminiCLIFunctionThoughtSignature)
|
||||
p++
|
||||
if fid != "" {
|
||||
fIDs = append(fIDs, fid)
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
p++
|
||||
}
|
||||
|
||||
// Append a single tool content combining name + response per function
|
||||
toolNode := []byte(`{"role":"user","parts":[]}`)
|
||||
pp := 0
|
||||
for _, fid := range fIDs {
|
||||
if name, ok := tcID2Name[fid]; ok {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.id", fid)
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.name", name)
|
||||
resp := toolResponses[fid]
|
||||
if resp == "" {
|
||||
resp = "{}"
|
||||
}
|
||||
// Handle non-JSON output gracefully (matches dev branch approach)
|
||||
if resp != "null" {
|
||||
parsed := gjson.Parse(resp)
|
||||
if parsed.Type == gjson.JSON {
|
||||
toolNode, _ = sjson.SetRawBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", []byte(parsed.Raw))
|
||||
} else {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", resp)
|
||||
}
|
||||
}
|
||||
pp++
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
if tcs.IsArray() {
|
||||
fIDs := make([]string, 0)
|
||||
for _, tc := range tcs.Array() {
|
||||
if tc.Get("type").String() != "function" {
|
||||
continue
|
||||
}
|
||||
fid := tc.Get("id").String()
|
||||
fname := tc.Get("function.name").String()
|
||||
fargs := tc.Get("function.arguments").String()
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.id", fid)
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.name", fname)
|
||||
node, _ = sjson.SetRawBytes(node, "parts."+itoa(p)+".functionCall.args", []byte(fargs))
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".thoughtSignature", geminiCLIFunctionThoughtSignature)
|
||||
p++
|
||||
if fid != "" {
|
||||
fIDs = append(fIDs, fid)
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
|
||||
// Append a single tool content combining name + response per function
|
||||
toolNode := []byte(`{"role":"user","parts":[]}`)
|
||||
pp := 0
|
||||
for _, fid := range fIDs {
|
||||
if name, ok := tcID2Name[fid]; ok {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.id", fid)
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.name", name)
|
||||
resp := toolResponses[fid]
|
||||
if resp == "" {
|
||||
resp = "{}"
|
||||
}
|
||||
// Handle non-JSON output gracefully (matches dev branch approach)
|
||||
if resp != "null" {
|
||||
parsed := gjson.Parse(resp)
|
||||
if parsed.Type == gjson.JSON {
|
||||
toolNode, _ = sjson.SetRawBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", []byte(parsed.Raw))
|
||||
} else {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", resp)
|
||||
}
|
||||
}
|
||||
pp++
|
||||
}
|
||||
if pp > 0 {
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", toolNode)
|
||||
}
|
||||
}
|
||||
if pp > 0 {
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", toolNode)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -361,18 +360,3 @@ func ConvertOpenAIRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
|
||||
// itoa converts int to string without strconv import for few usages.
|
||||
func itoa(i int) string { return fmt.Sprintf("%d", i) }
|
||||
|
||||
// quoteIfNeeded ensures a string is valid JSON value (quotes plain text), pass-through for JSON objects/arrays.
|
||||
func quoteIfNeeded(s string) string {
|
||||
s = strings.TrimSpace(s)
|
||||
if s == "" {
|
||||
return "\"\""
|
||||
}
|
||||
if len(s) > 0 && (s[0] == '{' || s[0] == '[') {
|
||||
return s
|
||||
}
|
||||
// escape quotes minimally
|
||||
s = strings.ReplaceAll(s, "\\", "\\\\")
|
||||
s = strings.ReplaceAll(s, "\"", "\\\"")
|
||||
return "\"" + s + "\""
|
||||
}
|
||||
|
||||
@@ -205,52 +205,52 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
} else if role == "assistant" {
|
||||
p := 0
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
if content.Type == gjson.String {
|
||||
// Assistant text -> single model content
|
||||
node := []byte(`{"role":"model","parts":[{"text":""}]}`)
|
||||
node, _ = sjson.SetBytes(node, "parts.0.text", content.String())
|
||||
node, _ = sjson.SetBytes(node, "parts.-1.text", content.String())
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
} else if !content.Exists() || content.Type == gjson.Null {
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
if tcs.IsArray() {
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
fIDs := make([]string, 0)
|
||||
for _, tc := range tcs.Array() {
|
||||
if tc.Get("type").String() != "function" {
|
||||
continue
|
||||
}
|
||||
fid := tc.Get("id").String()
|
||||
fname := tc.Get("function.name").String()
|
||||
fargs := tc.Get("function.arguments").String()
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.name", fname)
|
||||
node, _ = sjson.SetRawBytes(node, "parts."+itoa(p)+".functionCall.args", []byte(fargs))
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".thoughtSignature", geminiCLIFunctionThoughtSignature)
|
||||
p++
|
||||
if fid != "" {
|
||||
fIDs = append(fIDs, fid)
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
p++
|
||||
}
|
||||
|
||||
// Append a single tool content combining name + response per function
|
||||
toolNode := []byte(`{"role":"tool","parts":[]}`)
|
||||
pp := 0
|
||||
for _, fid := range fIDs {
|
||||
if name, ok := tcID2Name[fid]; ok {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.name", name)
|
||||
resp := toolResponses[fid]
|
||||
if resp == "" {
|
||||
resp = "{}"
|
||||
}
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", []byte(resp))
|
||||
pp++
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
if tcs.IsArray() {
|
||||
fIDs := make([]string, 0)
|
||||
for _, tc := range tcs.Array() {
|
||||
if tc.Get("type").String() != "function" {
|
||||
continue
|
||||
}
|
||||
fid := tc.Get("id").String()
|
||||
fname := tc.Get("function.name").String()
|
||||
fargs := tc.Get("function.arguments").String()
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.name", fname)
|
||||
node, _ = sjson.SetRawBytes(node, "parts."+itoa(p)+".functionCall.args", []byte(fargs))
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".thoughtSignature", geminiCLIFunctionThoughtSignature)
|
||||
p++
|
||||
if fid != "" {
|
||||
fIDs = append(fIDs, fid)
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
|
||||
|
||||
// Append a single tool content combining name + response per function
|
||||
toolNode := []byte(`{"role":"tool","parts":[]}`)
|
||||
pp := 0
|
||||
for _, fid := range fIDs {
|
||||
if name, ok := tcID2Name[fid]; ok {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.name", name)
|
||||
resp := toolResponses[fid]
|
||||
if resp == "" {
|
||||
resp = "{}"
|
||||
}
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", []byte(resp))
|
||||
pp++
|
||||
}
|
||||
if pp > 0 {
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", toolNode)
|
||||
}
|
||||
}
|
||||
if pp > 0 {
|
||||
out, _ = sjson.SetRawBytes(out, "request.contents.-1", toolNode)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -334,18 +334,3 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
|
||||
|
||||
// itoa converts int to string without strconv import for few usages.
|
||||
func itoa(i int) string { return fmt.Sprintf("%d", i) }
|
||||
|
||||
// quoteIfNeeded ensures a string is valid JSON value (quotes plain text), pass-through for JSON objects/arrays.
|
||||
func quoteIfNeeded(s string) string {
|
||||
s = strings.TrimSpace(s)
|
||||
if s == "" {
|
||||
return "\"\""
|
||||
}
|
||||
if len(s) > 0 && (s[0] == '{' || s[0] == '[') {
|
||||
return s
|
||||
}
|
||||
// escape quotes minimally
|
||||
s = strings.ReplaceAll(s, "\\", "\\\\")
|
||||
s = strings.ReplaceAll(s, "\"", "\\\"")
|
||||
return "\"" + s + "\""
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ type Params struct {
|
||||
HasFirstResponse bool
|
||||
ResponseType int
|
||||
ResponseIndex int
|
||||
HasContent bool // Tracks whether any content (text, thinking, or tool use) has been output
|
||||
HasContent bool // Tracks whether any content (text, thinking, or tool use) has been output
|
||||
}
|
||||
|
||||
// toolUseIDCounter provides a process-wide unique counter for tool use identifiers.
|
||||
@@ -179,6 +179,18 @@ func ConvertGeminiResponseToClaude(_ context.Context, _ string, originalRequestR
|
||||
usedTool = true
|
||||
fcName := functionCallResult.Get("name").String()
|
||||
|
||||
// FIX: Handle streaming split/delta where name might be empty in subsequent chunks.
|
||||
// If we are already in tool use mode and name is empty, treat as continuation (delta).
|
||||
if (*param).(*Params).ResponseType == 3 && fcName == "" {
|
||||
if fcArgsResult := functionCallResult.Get("args"); fcArgsResult.Exists() {
|
||||
output = output + "event: content_block_delta\n"
|
||||
data, _ := sjson.Set(fmt.Sprintf(`{"type":"content_block_delta","index":%d,"delta":{"type":"input_json_delta","partial_json":""}}`, (*param).(*Params).ResponseIndex), "delta.partial_json", fcArgsResult.Raw)
|
||||
output = output + fmt.Sprintf("data: %s\n\n\n", data)
|
||||
}
|
||||
// Continue to next part without closing/opening logic
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle state transitions when switching to function calls
|
||||
// Close any existing function call block first
|
||||
if (*param).(*Params).ResponseType == 3 {
|
||||
|
||||
@@ -207,15 +207,16 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
} else if role == "assistant" {
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
|
||||
if content.Type == gjson.String {
|
||||
// Assistant text -> single model content
|
||||
node := []byte(`{"role":"model","parts":[{"text":""}]}`)
|
||||
node, _ = sjson.SetBytes(node, "parts.0.text", content.String())
|
||||
node, _ = sjson.SetBytes(node, "parts.-1.text", content.String())
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
p++
|
||||
} else if content.IsArray() {
|
||||
// Assistant multimodal content (e.g. text + image) -> single model content with parts
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
for _, item := range content.Array() {
|
||||
switch item.Get("type").String() {
|
||||
case "text":
|
||||
@@ -237,47 +238,45 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
} else if !content.Exists() || content.Type == gjson.Null {
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
if tcs.IsArray() {
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
fIDs := make([]string, 0)
|
||||
for _, tc := range tcs.Array() {
|
||||
if tc.Get("type").String() != "function" {
|
||||
continue
|
||||
}
|
||||
fid := tc.Get("id").String()
|
||||
fname := tc.Get("function.name").String()
|
||||
fargs := tc.Get("function.arguments").String()
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.name", fname)
|
||||
node, _ = sjson.SetRawBytes(node, "parts."+itoa(p)+".functionCall.args", []byte(fargs))
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".thoughtSignature", geminiFunctionThoughtSignature)
|
||||
p++
|
||||
if fid != "" {
|
||||
fIDs = append(fIDs, fid)
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
}
|
||||
|
||||
// Append a single tool content combining name + response per function
|
||||
toolNode := []byte(`{"role":"tool","parts":[]}`)
|
||||
pp := 0
|
||||
for _, fid := range fIDs {
|
||||
if name, ok := tcID2Name[fid]; ok {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.name", name)
|
||||
resp := toolResponses[fid]
|
||||
if resp == "" {
|
||||
resp = "{}"
|
||||
}
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", []byte(resp))
|
||||
pp++
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
if tcs.IsArray() {
|
||||
fIDs := make([]string, 0)
|
||||
for _, tc := range tcs.Array() {
|
||||
if tc.Get("type").String() != "function" {
|
||||
continue
|
||||
}
|
||||
fid := tc.Get("id").String()
|
||||
fname := tc.Get("function.name").String()
|
||||
fargs := tc.Get("function.arguments").String()
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.name", fname)
|
||||
node, _ = sjson.SetRawBytes(node, "parts."+itoa(p)+".functionCall.args", []byte(fargs))
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".thoughtSignature", geminiFunctionThoughtSignature)
|
||||
p++
|
||||
if fid != "" {
|
||||
fIDs = append(fIDs, fid)
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
|
||||
// Append a single tool content combining name + response per function
|
||||
toolNode := []byte(`{"role":"tool","parts":[]}`)
|
||||
pp := 0
|
||||
for _, fid := range fIDs {
|
||||
if name, ok := tcID2Name[fid]; ok {
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.name", name)
|
||||
resp := toolResponses[fid]
|
||||
if resp == "" {
|
||||
resp = "{}"
|
||||
}
|
||||
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response.result", []byte(resp))
|
||||
pp++
|
||||
}
|
||||
if pp > 0 {
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", toolNode)
|
||||
}
|
||||
}
|
||||
if pp > 0 {
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", toolNode)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -363,18 +362,3 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
|
||||
// itoa converts int to string without strconv import for few usages.
|
||||
func itoa(i int) string { return fmt.Sprintf("%d", i) }
|
||||
|
||||
// quoteIfNeeded ensures a string is valid JSON value (quotes plain text), pass-through for JSON objects/arrays.
|
||||
func quoteIfNeeded(s string) string {
|
||||
s = strings.TrimSpace(s)
|
||||
if s == "" {
|
||||
return "\"\""
|
||||
}
|
||||
if len(s) > 0 && (s[0] == '{' || s[0] == '[') {
|
||||
return s
|
||||
}
|
||||
// escape quotes minimally
|
||||
s = strings.ReplaceAll(s, "\\", "\\\\")
|
||||
s = strings.ReplaceAll(s, "\"", "\\\"")
|
||||
return "\"" + s + "\""
|
||||
}
|
||||
|
||||
496
internal/util/gemini_schema.go
Normal file
496
internal/util/gemini_schema.go
Normal file
@@ -0,0 +1,496 @@
|
||||
// Package util provides utility functions for the CLI Proxy API server.
|
||||
package util
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
var gjsonPathKeyReplacer = strings.NewReplacer(".", "\\.", "*", "\\*", "?", "\\?")
|
||||
|
||||
// CleanJSONSchemaForGemini transforms a JSON schema to be compatible with Gemini/Antigravity API.
|
||||
// It handles unsupported keywords, type flattening, and schema simplification while preserving
|
||||
// semantic information as description hints.
|
||||
func CleanJSONSchemaForGemini(jsonStr string) string {
|
||||
// Phase 1: Convert and add hints
|
||||
jsonStr = convertRefsToHints(jsonStr)
|
||||
jsonStr = convertConstToEnum(jsonStr)
|
||||
jsonStr = addEnumHints(jsonStr)
|
||||
jsonStr = addAdditionalPropertiesHints(jsonStr)
|
||||
jsonStr = moveConstraintsToDescription(jsonStr)
|
||||
|
||||
// Phase 2: Flatten complex structures
|
||||
jsonStr = mergeAllOf(jsonStr)
|
||||
jsonStr = flattenAnyOfOneOf(jsonStr)
|
||||
jsonStr = flattenTypeArrays(jsonStr)
|
||||
|
||||
// Phase 3: Cleanup
|
||||
jsonStr = removeUnsupportedKeywords(jsonStr)
|
||||
jsonStr = cleanupRequiredFields(jsonStr)
|
||||
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
// convertRefsToHints converts $ref to description hints (Lazy Hint strategy).
|
||||
func convertRefsToHints(jsonStr string) string {
|
||||
paths := findPaths(jsonStr, "$ref")
|
||||
sortByDepth(paths)
|
||||
|
||||
for _, p := range paths {
|
||||
refVal := gjson.Get(jsonStr, p).String()
|
||||
defName := refVal
|
||||
if idx := strings.LastIndex(refVal, "/"); idx >= 0 {
|
||||
defName = refVal[idx+1:]
|
||||
}
|
||||
|
||||
parentPath := trimSuffix(p, ".$ref")
|
||||
hint := fmt.Sprintf("See: %s", defName)
|
||||
if existing := gjson.Get(jsonStr, descriptionPath(parentPath)).String(); existing != "" {
|
||||
hint = fmt.Sprintf("%s (%s)", existing, hint)
|
||||
}
|
||||
|
||||
replacement := `{"type":"object","description":""}`
|
||||
replacement, _ = sjson.Set(replacement, "description", hint)
|
||||
jsonStr = setRawAt(jsonStr, parentPath, replacement)
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func convertConstToEnum(jsonStr string) string {
|
||||
for _, p := range findPaths(jsonStr, "const") {
|
||||
val := gjson.Get(jsonStr, p)
|
||||
if !val.Exists() {
|
||||
continue
|
||||
}
|
||||
enumPath := trimSuffix(p, ".const") + ".enum"
|
||||
if !gjson.Get(jsonStr, enumPath).Exists() {
|
||||
jsonStr, _ = sjson.Set(jsonStr, enumPath, []interface{}{val.Value()})
|
||||
}
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func addEnumHints(jsonStr string) string {
|
||||
for _, p := range findPaths(jsonStr, "enum") {
|
||||
arr := gjson.Get(jsonStr, p)
|
||||
if !arr.IsArray() {
|
||||
continue
|
||||
}
|
||||
items := arr.Array()
|
||||
if len(items) <= 1 || len(items) > 10 {
|
||||
continue
|
||||
}
|
||||
|
||||
var vals []string
|
||||
for _, item := range items {
|
||||
vals = append(vals, item.String())
|
||||
}
|
||||
jsonStr = appendHint(jsonStr, trimSuffix(p, ".enum"), "Allowed: "+strings.Join(vals, ", "))
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func addAdditionalPropertiesHints(jsonStr string) string {
|
||||
for _, p := range findPaths(jsonStr, "additionalProperties") {
|
||||
if gjson.Get(jsonStr, p).Type == gjson.False {
|
||||
jsonStr = appendHint(jsonStr, trimSuffix(p, ".additionalProperties"), "No extra properties allowed")
|
||||
}
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
var unsupportedConstraints = []string{
|
||||
"minLength", "maxLength", "exclusiveMinimum", "exclusiveMaximum",
|
||||
"pattern", "minItems", "maxItems",
|
||||
}
|
||||
|
||||
func moveConstraintsToDescription(jsonStr string) string {
|
||||
for _, key := range unsupportedConstraints {
|
||||
for _, p := range findPaths(jsonStr, key) {
|
||||
val := gjson.Get(jsonStr, p)
|
||||
if !val.Exists() || val.IsObject() || val.IsArray() {
|
||||
continue
|
||||
}
|
||||
parentPath := trimSuffix(p, "."+key)
|
||||
if isPropertyDefinition(parentPath) {
|
||||
continue
|
||||
}
|
||||
jsonStr = appendHint(jsonStr, parentPath, fmt.Sprintf("%s: %s", key, val.String()))
|
||||
}
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func mergeAllOf(jsonStr string) string {
|
||||
paths := findPaths(jsonStr, "allOf")
|
||||
sortByDepth(paths)
|
||||
|
||||
for _, p := range paths {
|
||||
allOf := gjson.Get(jsonStr, p)
|
||||
if !allOf.IsArray() {
|
||||
continue
|
||||
}
|
||||
parentPath := trimSuffix(p, ".allOf")
|
||||
|
||||
for _, item := range allOf.Array() {
|
||||
if props := item.Get("properties"); props.IsObject() {
|
||||
props.ForEach(func(key, value gjson.Result) bool {
|
||||
destPath := joinPath(parentPath, "properties."+escapeGJSONPathKey(key.String()))
|
||||
jsonStr, _ = sjson.SetRaw(jsonStr, destPath, value.Raw)
|
||||
return true
|
||||
})
|
||||
}
|
||||
if req := item.Get("required"); req.IsArray() {
|
||||
reqPath := joinPath(parentPath, "required")
|
||||
current := getStrings(jsonStr, reqPath)
|
||||
for _, r := range req.Array() {
|
||||
if s := r.String(); !contains(current, s) {
|
||||
current = append(current, s)
|
||||
}
|
||||
}
|
||||
jsonStr, _ = sjson.Set(jsonStr, reqPath, current)
|
||||
}
|
||||
}
|
||||
jsonStr, _ = sjson.Delete(jsonStr, p)
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func flattenAnyOfOneOf(jsonStr string) string {
|
||||
for _, key := range []string{"anyOf", "oneOf"} {
|
||||
paths := findPaths(jsonStr, key)
|
||||
sortByDepth(paths)
|
||||
|
||||
for _, p := range paths {
|
||||
arr := gjson.Get(jsonStr, p)
|
||||
if !arr.IsArray() || len(arr.Array()) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
parentPath := trimSuffix(p, "."+key)
|
||||
parentDesc := gjson.Get(jsonStr, descriptionPath(parentPath)).String()
|
||||
|
||||
items := arr.Array()
|
||||
bestIdx, allTypes := selectBest(items)
|
||||
selected := items[bestIdx].Raw
|
||||
|
||||
if parentDesc != "" {
|
||||
selected = mergeDescriptionRaw(selected, parentDesc)
|
||||
}
|
||||
|
||||
if len(allTypes) > 1 {
|
||||
hint := "Accepts: " + strings.Join(allTypes, " | ")
|
||||
selected = appendHintRaw(selected, hint)
|
||||
}
|
||||
|
||||
jsonStr = setRawAt(jsonStr, parentPath, selected)
|
||||
}
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func selectBest(items []gjson.Result) (bestIdx int, types []string) {
|
||||
bestScore := -1
|
||||
for i, item := range items {
|
||||
t := item.Get("type").String()
|
||||
score := 0
|
||||
|
||||
switch {
|
||||
case t == "object" || item.Get("properties").Exists():
|
||||
score, t = 3, orDefault(t, "object")
|
||||
case t == "array" || item.Get("items").Exists():
|
||||
score, t = 2, orDefault(t, "array")
|
||||
case t != "" && t != "null":
|
||||
score = 1
|
||||
default:
|
||||
t = orDefault(t, "null")
|
||||
}
|
||||
|
||||
if t != "" {
|
||||
types = append(types, t)
|
||||
}
|
||||
if score > bestScore {
|
||||
bestScore, bestIdx = score, i
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func flattenTypeArrays(jsonStr string) string {
|
||||
paths := findPaths(jsonStr, "type")
|
||||
sortByDepth(paths)
|
||||
|
||||
nullableFields := make(map[string][]string)
|
||||
|
||||
for _, p := range paths {
|
||||
res := gjson.Get(jsonStr, p)
|
||||
if !res.IsArray() || len(res.Array()) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
hasNull := false
|
||||
var nonNullTypes []string
|
||||
for _, item := range res.Array() {
|
||||
s := item.String()
|
||||
if s == "null" {
|
||||
hasNull = true
|
||||
} else if s != "" {
|
||||
nonNullTypes = append(nonNullTypes, s)
|
||||
}
|
||||
}
|
||||
|
||||
firstType := "string"
|
||||
if len(nonNullTypes) > 0 {
|
||||
firstType = nonNullTypes[0]
|
||||
}
|
||||
|
||||
jsonStr, _ = sjson.Set(jsonStr, p, firstType)
|
||||
|
||||
parentPath := trimSuffix(p, ".type")
|
||||
if len(nonNullTypes) > 1 {
|
||||
hint := "Accepts: " + strings.Join(nonNullTypes, " | ")
|
||||
jsonStr = appendHint(jsonStr, parentPath, hint)
|
||||
}
|
||||
|
||||
if hasNull {
|
||||
parts := splitGJSONPath(p)
|
||||
if len(parts) >= 3 && parts[len(parts)-3] == "properties" {
|
||||
fieldNameEscaped := parts[len(parts)-2]
|
||||
fieldName := unescapeGJSONPathKey(fieldNameEscaped)
|
||||
objectPath := strings.Join(parts[:len(parts)-3], ".")
|
||||
nullableFields[objectPath] = append(nullableFields[objectPath], fieldName)
|
||||
|
||||
propPath := joinPath(objectPath, "properties."+fieldNameEscaped)
|
||||
jsonStr = appendHint(jsonStr, propPath, "(nullable)")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for objectPath, fields := range nullableFields {
|
||||
reqPath := joinPath(objectPath, "required")
|
||||
req := gjson.Get(jsonStr, reqPath)
|
||||
if !req.IsArray() {
|
||||
continue
|
||||
}
|
||||
|
||||
var filtered []string
|
||||
for _, r := range req.Array() {
|
||||
if !contains(fields, r.String()) {
|
||||
filtered = append(filtered, r.String())
|
||||
}
|
||||
}
|
||||
|
||||
if len(filtered) == 0 {
|
||||
jsonStr, _ = sjson.Delete(jsonStr, reqPath)
|
||||
} else {
|
||||
jsonStr, _ = sjson.Set(jsonStr, reqPath, filtered)
|
||||
}
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func removeUnsupportedKeywords(jsonStr string) string {
|
||||
keywords := append(unsupportedConstraints,
|
||||
"$schema", "$defs", "definitions", "const", "$ref", "additionalProperties",
|
||||
)
|
||||
for _, key := range keywords {
|
||||
for _, p := range findPaths(jsonStr, key) {
|
||||
if isPropertyDefinition(trimSuffix(p, "."+key)) {
|
||||
continue
|
||||
}
|
||||
jsonStr, _ = sjson.Delete(jsonStr, p)
|
||||
}
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func cleanupRequiredFields(jsonStr string) string {
|
||||
for _, p := range findPaths(jsonStr, "required") {
|
||||
parentPath := trimSuffix(p, ".required")
|
||||
propsPath := joinPath(parentPath, "properties")
|
||||
|
||||
req := gjson.Get(jsonStr, p)
|
||||
props := gjson.Get(jsonStr, propsPath)
|
||||
if !req.IsArray() || !props.IsObject() {
|
||||
continue
|
||||
}
|
||||
|
||||
var valid []string
|
||||
for _, r := range req.Array() {
|
||||
key := r.String()
|
||||
if props.Get(escapeGJSONPathKey(key)).Exists() {
|
||||
valid = append(valid, key)
|
||||
}
|
||||
}
|
||||
|
||||
if len(valid) != len(req.Array()) {
|
||||
if len(valid) == 0 {
|
||||
jsonStr, _ = sjson.Delete(jsonStr, p)
|
||||
} else {
|
||||
jsonStr, _ = sjson.Set(jsonStr, p, valid)
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
// --- Helpers ---
|
||||
|
||||
func findPaths(jsonStr, field string) []string {
|
||||
var paths []string
|
||||
Walk(gjson.Parse(jsonStr), "", field, &paths)
|
||||
return paths
|
||||
}
|
||||
|
||||
func sortByDepth(paths []string) {
|
||||
sort.Slice(paths, func(i, j int) bool { return len(paths[i]) > len(paths[j]) })
|
||||
}
|
||||
|
||||
func trimSuffix(path, suffix string) string {
|
||||
if path == strings.TrimPrefix(suffix, ".") {
|
||||
return ""
|
||||
}
|
||||
return strings.TrimSuffix(path, suffix)
|
||||
}
|
||||
|
||||
func joinPath(base, suffix string) string {
|
||||
if base == "" {
|
||||
return suffix
|
||||
}
|
||||
return base + "." + suffix
|
||||
}
|
||||
|
||||
func setRawAt(jsonStr, path, value string) string {
|
||||
if path == "" {
|
||||
return value
|
||||
}
|
||||
result, _ := sjson.SetRaw(jsonStr, path, value)
|
||||
return result
|
||||
}
|
||||
|
||||
func isPropertyDefinition(path string) bool {
|
||||
return path == "properties" || strings.HasSuffix(path, ".properties")
|
||||
}
|
||||
|
||||
func descriptionPath(parentPath string) string {
|
||||
if parentPath == "" || parentPath == "@this" {
|
||||
return "description"
|
||||
}
|
||||
return parentPath + ".description"
|
||||
}
|
||||
|
||||
func appendHint(jsonStr, parentPath, hint string) string {
|
||||
descPath := parentPath + ".description"
|
||||
if parentPath == "" || parentPath == "@this" {
|
||||
descPath = "description"
|
||||
}
|
||||
existing := gjson.Get(jsonStr, descPath).String()
|
||||
if existing != "" {
|
||||
hint = fmt.Sprintf("%s (%s)", existing, hint)
|
||||
}
|
||||
jsonStr, _ = sjson.Set(jsonStr, descPath, hint)
|
||||
return jsonStr
|
||||
}
|
||||
|
||||
func appendHintRaw(jsonRaw, hint string) string {
|
||||
existing := gjson.Get(jsonRaw, "description").String()
|
||||
if existing != "" {
|
||||
hint = fmt.Sprintf("%s (%s)", existing, hint)
|
||||
}
|
||||
jsonRaw, _ = sjson.Set(jsonRaw, "description", hint)
|
||||
return jsonRaw
|
||||
}
|
||||
|
||||
func getStrings(jsonStr, path string) []string {
|
||||
var result []string
|
||||
if arr := gjson.Get(jsonStr, path); arr.IsArray() {
|
||||
for _, r := range arr.Array() {
|
||||
result = append(result, r.String())
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func contains(slice []string, item string) bool {
|
||||
for _, s := range slice {
|
||||
if s == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func orDefault(val, def string) string {
|
||||
if val == "" {
|
||||
return def
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func escapeGJSONPathKey(key string) string {
|
||||
return gjsonPathKeyReplacer.Replace(key)
|
||||
}
|
||||
|
||||
func unescapeGJSONPathKey(key string) string {
|
||||
if !strings.Contains(key, "\\") {
|
||||
return key
|
||||
}
|
||||
var b strings.Builder
|
||||
b.Grow(len(key))
|
||||
for i := 0; i < len(key); i++ {
|
||||
if key[i] == '\\' && i+1 < len(key) {
|
||||
i++
|
||||
b.WriteByte(key[i])
|
||||
continue
|
||||
}
|
||||
b.WriteByte(key[i])
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func splitGJSONPath(path string) []string {
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := make([]string, 0, strings.Count(path, ".")+1)
|
||||
var b strings.Builder
|
||||
b.Grow(len(path))
|
||||
|
||||
for i := 0; i < len(path); i++ {
|
||||
c := path[i]
|
||||
if c == '\\' && i+1 < len(path) {
|
||||
b.WriteByte('\\')
|
||||
i++
|
||||
b.WriteByte(path[i])
|
||||
continue
|
||||
}
|
||||
if c == '.' {
|
||||
parts = append(parts, b.String())
|
||||
b.Reset()
|
||||
continue
|
||||
}
|
||||
b.WriteByte(c)
|
||||
}
|
||||
parts = append(parts, b.String())
|
||||
return parts
|
||||
}
|
||||
|
||||
func mergeDescriptionRaw(schemaRaw, parentDesc string) string {
|
||||
childDesc := gjson.Get(schemaRaw, "description").String()
|
||||
switch {
|
||||
case childDesc == "":
|
||||
schemaRaw, _ = sjson.Set(schemaRaw, "description", parentDesc)
|
||||
return schemaRaw
|
||||
case childDesc == parentDesc:
|
||||
return schemaRaw
|
||||
default:
|
||||
combined := fmt.Sprintf("%s (%s)", parentDesc, childDesc)
|
||||
schemaRaw, _ = sjson.Set(schemaRaw, "description", combined)
|
||||
return schemaRaw
|
||||
}
|
||||
}
|
||||
613
internal/util/gemini_schema_test.go
Normal file
613
internal/util/gemini_schema_test.go
Normal file
@@ -0,0 +1,613 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCleanJSONSchemaForGemini_ConstToEnum(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"const": "InsightVizNode"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"enum": ["InsightVizNode"]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_TypeFlattening_Nullable(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": ["string", "null"]
|
||||
},
|
||||
"other": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["name", "other"]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "(nullable)"
|
||||
},
|
||||
"other": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["other"]
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_ConstraintsToDescription(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"description": "List of tags",
|
||||
"minItems": 1
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "User name",
|
||||
"minLength": 3
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
// minItems should be REMOVED and moved to description
|
||||
if strings.Contains(result, `"minItems"`) {
|
||||
t.Errorf("minItems keyword should be removed")
|
||||
}
|
||||
if !strings.Contains(result, "minItems: 1") {
|
||||
t.Errorf("minItems hint missing in description")
|
||||
}
|
||||
|
||||
// minLength should be moved to description
|
||||
if !strings.Contains(result, "minLength: 3") {
|
||||
t.Errorf("minLength hint missing in description")
|
||||
}
|
||||
if strings.Contains(result, `"minLength":`) || strings.Contains(result, `"minLength" :`) {
|
||||
t.Errorf("minLength keyword should be removed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_AnyOfFlattening_SmartSelection(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"anyOf": [
|
||||
{ "type": "null" },
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": { "type": "string" }
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "object",
|
||||
"description": "Accepts: null | object",
|
||||
"properties": {
|
||||
"kind": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_OneOfFlattening(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"config": {
|
||||
"oneOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "integer" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"config": {
|
||||
"type": "string",
|
||||
"description": "Accepts: string | integer"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_AllOfMerging(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"a": { "type": "string" }
|
||||
},
|
||||
"required": ["a"]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"b": { "type": "integer" }
|
||||
},
|
||||
"required": ["b"]
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"a": { "type": "string" },
|
||||
"b": { "type": "integer" }
|
||||
},
|
||||
"required": ["a", "b"]
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_RefHandling(t *testing.T) {
|
||||
input := `{
|
||||
"definitions": {
|
||||
"User": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"customer": { "$ref": "#/definitions/User" }
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"customer": {
|
||||
"type": "object",
|
||||
"description": "See: User"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_RefHandling_DescriptionEscaping(t *testing.T) {
|
||||
input := `{
|
||||
"definitions": {
|
||||
"User": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"customer": {
|
||||
"description": "He said \"hi\"\\nsecond line",
|
||||
"$ref": "#/definitions/User"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"customer": {
|
||||
"type": "object",
|
||||
"description": "He said \"hi\"\\nsecond line (See: User)"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_CyclicRefDefaults(t *testing.T) {
|
||||
input := `{
|
||||
"definitions": {
|
||||
"Node": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"child": { "$ref": "#/definitions/Node" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"$ref": "#/definitions/Node"
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
var resMap map[string]interface{}
|
||||
json.Unmarshal([]byte(result), &resMap)
|
||||
|
||||
if resMap["type"] != "object" {
|
||||
t.Errorf("Expected type: object, got: %v", resMap["type"])
|
||||
}
|
||||
|
||||
desc, ok := resMap["description"].(string)
|
||||
if !ok || !strings.Contains(desc, "Node") {
|
||||
t.Errorf("Expected description hint containing 'Node', got: %v", resMap["description"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_RequiredCleanup(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"a": {"type": "string"},
|
||||
"b": {"type": "string"}
|
||||
},
|
||||
"required": ["a", "b", "c"]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"a": {"type": "string"},
|
||||
"b": {"type": "string"}
|
||||
},
|
||||
"required": ["a", "b"]
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_AllOfMerging_DotKeys(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"allOf": [
|
||||
{
|
||||
"properties": {
|
||||
"my.param": { "type": "string" }
|
||||
},
|
||||
"required": ["my.param"]
|
||||
},
|
||||
{
|
||||
"properties": {
|
||||
"b": { "type": "integer" }
|
||||
},
|
||||
"required": ["b"]
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"my.param": { "type": "string" },
|
||||
"b": { "type": "integer" }
|
||||
},
|
||||
"required": ["my.param", "b"]
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_PropertyNameCollision(t *testing.T) {
|
||||
// A tool has an argument named "pattern" - should NOT be treated as a constraint
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"description": "The regex pattern"
|
||||
}
|
||||
},
|
||||
"required": ["pattern"]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"description": "The regex pattern"
|
||||
}
|
||||
},
|
||||
"required": ["pattern"]
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
|
||||
var resMap map[string]interface{}
|
||||
json.Unmarshal([]byte(result), &resMap)
|
||||
props, _ := resMap["properties"].(map[string]interface{})
|
||||
if _, ok := props["description"]; ok {
|
||||
t.Errorf("Invalid 'description' property injected into properties map")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_DotKeys(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"my.param": {
|
||||
"type": "string",
|
||||
"$ref": "#/definitions/MyType"
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"MyType": { "type": "string" }
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
var resMap map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(result), &resMap); err != nil {
|
||||
t.Fatalf("Failed to unmarshal result: %v", err)
|
||||
}
|
||||
|
||||
props, ok := resMap["properties"].(map[string]interface{})
|
||||
if !ok {
|
||||
t.Fatalf("properties missing")
|
||||
}
|
||||
|
||||
if val, ok := props["my.param"]; !ok {
|
||||
t.Fatalf("Key 'my.param' is missing. Result: %s", result)
|
||||
} else {
|
||||
valMap, _ := val.(map[string]interface{})
|
||||
if _, hasRef := valMap["$ref"]; hasRef {
|
||||
t.Errorf("Key 'my.param' still contains $ref")
|
||||
}
|
||||
if _, ok := props["my"]; ok {
|
||||
t.Errorf("Artifact key 'my' created by sjson splitting")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_AnyOfAlternativeHints(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"anyOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "integer" },
|
||||
{ "type": "null" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
if !strings.Contains(result, "Accepts:") {
|
||||
t.Errorf("Expected alternative types hint, got: %s", result)
|
||||
}
|
||||
if !strings.Contains(result, "string") || !strings.Contains(result, "integer") {
|
||||
t.Errorf("Expected all alternative types in hint, got: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_NullableHint(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": ["string", "null"],
|
||||
"description": "User name"
|
||||
}
|
||||
},
|
||||
"required": ["name"]
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
if !strings.Contains(result, "(nullable)") {
|
||||
t.Errorf("Expected nullable hint, got: %s", result)
|
||||
}
|
||||
if !strings.Contains(result, "User name") {
|
||||
t.Errorf("Expected original description to be preserved, got: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_TypeFlattening_Nullable_DotKey(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"my.param": {
|
||||
"type": ["string", "null"]
|
||||
},
|
||||
"other": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["my.param", "other"]
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"my.param": {
|
||||
"type": "string",
|
||||
"description": "(nullable)"
|
||||
},
|
||||
"other": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["other"]
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_EnumHint(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["active", "inactive", "pending"],
|
||||
"description": "Current status"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
if !strings.Contains(result, "Allowed:") {
|
||||
t.Errorf("Expected enum values hint, got: %s", result)
|
||||
}
|
||||
if !strings.Contains(result, "active") || !strings.Contains(result, "inactive") {
|
||||
t.Errorf("Expected enum values in hint, got: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_AdditionalPropertiesHint(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": { "type": "string" }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
if !strings.Contains(result, "No extra properties allowed") {
|
||||
t.Errorf("Expected additionalProperties hint, got: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_AnyOfFlattening_PreservesDescription(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"config": {
|
||||
"description": "Parent desc",
|
||||
"anyOf": [
|
||||
{ "type": "string", "description": "Child desc" },
|
||||
{ "type": "integer" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"config": {
|
||||
"type": "string",
|
||||
"description": "Parent desc (Child desc) (Accepts: string | integer)"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_SingleEnumNoHint(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"enum": ["fixed"]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
if strings.Contains(result, "Allowed:") {
|
||||
t.Errorf("Single value enum should not add Allowed hint, got: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_MultipleNonNullTypes(t *testing.T) {
|
||||
input := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": ["string", "integer", "boolean"]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
|
||||
if !strings.Contains(result, "Accepts:") {
|
||||
t.Errorf("Expected multiple types hint, got: %s", result)
|
||||
}
|
||||
if !strings.Contains(result, "string") || !strings.Contains(result, "integer") || !strings.Contains(result, "boolean") {
|
||||
t.Errorf("Expected all types in hint, got: %s", result)
|
||||
}
|
||||
}
|
||||
|
||||
func compareJSON(t *testing.T, expectedJSON, actualJSON string) {
|
||||
var expMap, actMap map[string]interface{}
|
||||
errExp := json.Unmarshal([]byte(expectedJSON), &expMap)
|
||||
errAct := json.Unmarshal([]byte(actualJSON), &actMap)
|
||||
|
||||
if errExp != nil || errAct != nil {
|
||||
t.Fatalf("JSON Unmarshal error. Exp: %v, Act: %v", errExp, errAct)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(expMap, actMap) {
|
||||
expBytes, _ := json.MarshalIndent(expMap, "", " ")
|
||||
actBytes, _ := json.MarshalIndent(actMap, "", " ")
|
||||
t.Errorf("JSON mismatch:\nExpected:\n%s\n\nActual:\n%s", string(expBytes), string(actBytes))
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
@@ -13,6 +14,44 @@ const (
|
||||
GeminiOriginalModelMetadataKey = "gemini_original_model"
|
||||
)
|
||||
|
||||
// Gemini model family detection patterns
|
||||
var (
|
||||
gemini3Pattern = regexp.MustCompile(`(?i)^gemini[_-]?3[_-]`)
|
||||
gemini3ProPattern = regexp.MustCompile(`(?i)^gemini[_-]?3[_-]pro`)
|
||||
gemini3FlashPattern = regexp.MustCompile(`(?i)^gemini[_-]?3[_-]flash`)
|
||||
gemini25Pattern = regexp.MustCompile(`(?i)^gemini[_-]?2\.5[_-]`)
|
||||
)
|
||||
|
||||
// IsGemini3Model returns true if the model is a Gemini 3 family model.
|
||||
// Gemini 3 models should use thinkingLevel (string) instead of thinkingBudget (number).
|
||||
func IsGemini3Model(model string) bool {
|
||||
return gemini3Pattern.MatchString(model)
|
||||
}
|
||||
|
||||
// IsGemini3ProModel returns true if the model is a Gemini 3 Pro variant.
|
||||
// Gemini 3 Pro supports thinkingLevel: "low", "high" (default: "high")
|
||||
func IsGemini3ProModel(model string) bool {
|
||||
return gemini3ProPattern.MatchString(model)
|
||||
}
|
||||
|
||||
// IsGemini3FlashModel returns true if the model is a Gemini 3 Flash variant.
|
||||
// Gemini 3 Flash supports thinkingLevel: "minimal", "low", "medium", "high" (default: "high")
|
||||
func IsGemini3FlashModel(model string) bool {
|
||||
return gemini3FlashPattern.MatchString(model)
|
||||
}
|
||||
|
||||
// IsGemini25Model returns true if the model is a Gemini 2.5 family model.
|
||||
// Gemini 2.5 models should use thinkingBudget (number).
|
||||
func IsGemini25Model(model string) bool {
|
||||
return gemini25Pattern.MatchString(model)
|
||||
}
|
||||
|
||||
// Gemini3ProThinkingLevels are the valid thinkingLevel values for Gemini 3 Pro models.
|
||||
var Gemini3ProThinkingLevels = []string{"low", "high"}
|
||||
|
||||
// Gemini3FlashThinkingLevels are the valid thinkingLevel values for Gemini 3 Flash models.
|
||||
var Gemini3FlashThinkingLevels = []string{"minimal", "low", "medium", "high"}
|
||||
|
||||
func ApplyGeminiThinkingConfig(body []byte, budget *int, includeThoughts *bool) []byte {
|
||||
if budget == nil && includeThoughts == nil {
|
||||
return body
|
||||
@@ -69,10 +108,141 @@ func ApplyGeminiCLIThinkingConfig(body []byte, budget *int, includeThoughts *boo
|
||||
return updated
|
||||
}
|
||||
|
||||
// ApplyGeminiThinkingLevel applies thinkingLevel config for Gemini 3 models.
|
||||
// For standard Gemini API format (generationConfig.thinkingConfig path).
|
||||
// Per Google's documentation, Gemini 3 models should use thinkingLevel instead of thinkingBudget.
|
||||
func ApplyGeminiThinkingLevel(body []byte, level string, includeThoughts *bool) []byte {
|
||||
if level == "" && includeThoughts == nil {
|
||||
return body
|
||||
}
|
||||
updated := body
|
||||
if level != "" {
|
||||
valuePath := "generationConfig.thinkingConfig.thinkingLevel"
|
||||
rewritten, err := sjson.SetBytes(updated, valuePath, level)
|
||||
if err == nil {
|
||||
updated = rewritten
|
||||
}
|
||||
}
|
||||
// Default to including thoughts when a level is set but no explicit include flag is provided.
|
||||
incl := includeThoughts
|
||||
if incl == nil && level != "" {
|
||||
defaultInclude := true
|
||||
incl = &defaultInclude
|
||||
}
|
||||
if incl != nil {
|
||||
valuePath := "generationConfig.thinkingConfig.includeThoughts"
|
||||
rewritten, err := sjson.SetBytes(updated, valuePath, *incl)
|
||||
if err == nil {
|
||||
updated = rewritten
|
||||
}
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
// ApplyGeminiCLIThinkingLevel applies thinkingLevel config for Gemini 3 models.
|
||||
// For Gemini CLI API format (request.generationConfig.thinkingConfig path).
|
||||
// Per Google's documentation, Gemini 3 models should use thinkingLevel instead of thinkingBudget.
|
||||
func ApplyGeminiCLIThinkingLevel(body []byte, level string, includeThoughts *bool) []byte {
|
||||
if level == "" && includeThoughts == nil {
|
||||
return body
|
||||
}
|
||||
updated := body
|
||||
if level != "" {
|
||||
valuePath := "request.generationConfig.thinkingConfig.thinkingLevel"
|
||||
rewritten, err := sjson.SetBytes(updated, valuePath, level)
|
||||
if err == nil {
|
||||
updated = rewritten
|
||||
}
|
||||
}
|
||||
// Default to including thoughts when a level is set but no explicit include flag is provided.
|
||||
incl := includeThoughts
|
||||
if incl == nil && level != "" {
|
||||
defaultInclude := true
|
||||
incl = &defaultInclude
|
||||
}
|
||||
if incl != nil {
|
||||
valuePath := "request.generationConfig.thinkingConfig.includeThoughts"
|
||||
rewritten, err := sjson.SetBytes(updated, valuePath, *incl)
|
||||
if err == nil {
|
||||
updated = rewritten
|
||||
}
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
// ValidateGemini3ThinkingLevel validates that the thinkingLevel is valid for the Gemini 3 model variant.
|
||||
// Returns the validated level (normalized to lowercase) and true if valid, or empty string and false if invalid.
|
||||
func ValidateGemini3ThinkingLevel(model, level string) (string, bool) {
|
||||
if level == "" {
|
||||
return "", false
|
||||
}
|
||||
normalized := strings.ToLower(strings.TrimSpace(level))
|
||||
|
||||
var validLevels []string
|
||||
if IsGemini3ProModel(model) {
|
||||
validLevels = Gemini3ProThinkingLevels
|
||||
} else if IsGemini3FlashModel(model) {
|
||||
validLevels = Gemini3FlashThinkingLevels
|
||||
} else if IsGemini3Model(model) {
|
||||
// Unknown Gemini 3 variant - allow all levels as fallback
|
||||
validLevels = Gemini3FlashThinkingLevels
|
||||
} else {
|
||||
return "", false
|
||||
}
|
||||
|
||||
for _, valid := range validLevels {
|
||||
if normalized == valid {
|
||||
return normalized, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// ThinkingBudgetToGemini3Level converts a thinkingBudget to a thinkingLevel for Gemini 3 models.
|
||||
// This provides backward compatibility when thinkingBudget is provided for Gemini 3 models.
|
||||
// Returns the appropriate thinkingLevel and true if conversion is possible.
|
||||
func ThinkingBudgetToGemini3Level(model string, budget int) (string, bool) {
|
||||
if !IsGemini3Model(model) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
// Map budget to level based on Google's documentation
|
||||
// Gemini 3 Pro: "low", "high" (default: "high")
|
||||
// Gemini 3 Flash: "minimal", "low", "medium", "high" (default: "high")
|
||||
switch {
|
||||
case budget == -1:
|
||||
// Dynamic budget maps to "high" (API default)
|
||||
return "high", true
|
||||
case budget == 0:
|
||||
// Zero budget - Gemini 3 doesn't support disabling thinking
|
||||
// Map to lowest available level
|
||||
if IsGemini3FlashModel(model) {
|
||||
return "minimal", true
|
||||
}
|
||||
return "low", true
|
||||
case budget > 0 && budget <= 512:
|
||||
if IsGemini3FlashModel(model) {
|
||||
return "minimal", true
|
||||
}
|
||||
return "low", true
|
||||
case budget <= 1024:
|
||||
return "low", true
|
||||
case budget <= 8192:
|
||||
if IsGemini3FlashModel(model) {
|
||||
return "medium", true
|
||||
}
|
||||
return "low", true // Pro doesn't have medium, use low
|
||||
default:
|
||||
return "high", true
|
||||
}
|
||||
}
|
||||
|
||||
// modelsWithDefaultThinking lists models that should have thinking enabled by default
|
||||
// when no explicit thinkingConfig is provided.
|
||||
var modelsWithDefaultThinking = map[string]bool{
|
||||
"gemini-3-pro-preview": true,
|
||||
"gemini-3-pro-preview": true,
|
||||
"gemini-3-pro-image-preview": true,
|
||||
"gemini-3-flash-preview": true,
|
||||
}
|
||||
|
||||
// ModelHasDefaultThinking returns true if the model should have thinking enabled by default.
|
||||
@@ -83,6 +253,7 @@ func ModelHasDefaultThinking(model string) bool {
|
||||
// ApplyDefaultThinkingIfNeeded injects default thinkingConfig for models that require it.
|
||||
// For standard Gemini API format (generationConfig.thinkingConfig path).
|
||||
// Returns the modified body if thinkingConfig was added, otherwise returns the original.
|
||||
// For Gemini 3 models, uses thinkingLevel instead of thinkingBudget per Google's documentation.
|
||||
func ApplyDefaultThinkingIfNeeded(model string, body []byte) []byte {
|
||||
if !ModelHasDefaultThinking(model) {
|
||||
return body
|
||||
@@ -90,14 +261,59 @@ func ApplyDefaultThinkingIfNeeded(model string, body []byte) []byte {
|
||||
if gjson.GetBytes(body, "generationConfig.thinkingConfig").Exists() {
|
||||
return body
|
||||
}
|
||||
// Gemini 3 models use thinkingLevel instead of thinkingBudget
|
||||
if IsGemini3Model(model) {
|
||||
// Don't set a default - let the API use its dynamic default ("high")
|
||||
// Only set includeThoughts
|
||||
updated, _ := sjson.SetBytes(body, "generationConfig.thinkingConfig.includeThoughts", true)
|
||||
return updated
|
||||
}
|
||||
// Gemini 2.5 and other models use thinkingBudget
|
||||
updated, _ := sjson.SetBytes(body, "generationConfig.thinkingConfig.thinkingBudget", -1)
|
||||
updated, _ = sjson.SetBytes(updated, "generationConfig.thinkingConfig.include_thoughts", true)
|
||||
return updated
|
||||
}
|
||||
|
||||
// ApplyGemini3ThinkingLevelFromMetadata applies thinkingLevel from metadata for Gemini 3 models.
|
||||
// For standard Gemini API format (generationConfig.thinkingConfig path).
|
||||
// This handles the case where reasoning_effort is specified via model name suffix (e.g., model(minimal)).
|
||||
func ApplyGemini3ThinkingLevelFromMetadata(model string, metadata map[string]any, body []byte) []byte {
|
||||
if !IsGemini3Model(model) {
|
||||
return body
|
||||
}
|
||||
effort, ok := ReasoningEffortFromMetadata(metadata)
|
||||
if !ok || effort == "" {
|
||||
return body
|
||||
}
|
||||
// Validate and apply the thinkingLevel
|
||||
if level, valid := ValidateGemini3ThinkingLevel(model, effort); valid {
|
||||
return ApplyGeminiThinkingLevel(body, level, nil)
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
// ApplyGemini3ThinkingLevelFromMetadataCLI applies thinkingLevel from metadata for Gemini 3 models.
|
||||
// For Gemini CLI API format (request.generationConfig.thinkingConfig path).
|
||||
// This handles the case where reasoning_effort is specified via model name suffix (e.g., model(minimal)).
|
||||
func ApplyGemini3ThinkingLevelFromMetadataCLI(model string, metadata map[string]any, body []byte) []byte {
|
||||
if !IsGemini3Model(model) {
|
||||
return body
|
||||
}
|
||||
effort, ok := ReasoningEffortFromMetadata(metadata)
|
||||
if !ok || effort == "" {
|
||||
return body
|
||||
}
|
||||
// Validate and apply the thinkingLevel
|
||||
if level, valid := ValidateGemini3ThinkingLevel(model, effort); valid {
|
||||
return ApplyGeminiCLIThinkingLevel(body, level, nil)
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
// ApplyDefaultThinkingIfNeededCLI injects default thinkingConfig for models that require it.
|
||||
// For Gemini CLI API format (request.generationConfig.thinkingConfig path).
|
||||
// Returns the modified body if thinkingConfig was added, otherwise returns the original.
|
||||
// For Gemini 3 models, uses thinkingLevel instead of thinkingBudget per Google's documentation.
|
||||
func ApplyDefaultThinkingIfNeededCLI(model string, body []byte) []byte {
|
||||
if !ModelHasDefaultThinking(model) {
|
||||
return body
|
||||
@@ -105,6 +321,14 @@ func ApplyDefaultThinkingIfNeededCLI(model string, body []byte) []byte {
|
||||
if gjson.GetBytes(body, "request.generationConfig.thinkingConfig").Exists() {
|
||||
return body
|
||||
}
|
||||
// Gemini 3 models use thinkingLevel instead of thinkingBudget
|
||||
if IsGemini3Model(model) {
|
||||
// Don't set a default - let the API use its dynamic default ("high")
|
||||
// Only set includeThoughts
|
||||
updated, _ := sjson.SetBytes(body, "request.generationConfig.thinkingConfig.includeThoughts", true)
|
||||
return updated
|
||||
}
|
||||
// Gemini 2.5 and other models use thinkingBudget
|
||||
updated, _ := sjson.SetBytes(body, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
|
||||
updated, _ = sjson.SetBytes(updated, "request.generationConfig.thinkingConfig.include_thoughts", true)
|
||||
return updated
|
||||
@@ -128,12 +352,29 @@ func StripThinkingConfigIfUnsupported(model string, body []byte) []byte {
|
||||
|
||||
// NormalizeGeminiThinkingBudget normalizes the thinkingBudget value in a standard Gemini
|
||||
// request body (generationConfig.thinkingConfig.thinkingBudget path).
|
||||
// For Gemini 3 models, converts thinkingBudget to thinkingLevel per Google's documentation.
|
||||
func NormalizeGeminiThinkingBudget(model string, body []byte) []byte {
|
||||
const budgetPath = "generationConfig.thinkingConfig.thinkingBudget"
|
||||
const levelPath = "generationConfig.thinkingConfig.thinkingLevel"
|
||||
|
||||
budget := gjson.GetBytes(body, budgetPath)
|
||||
if !budget.Exists() {
|
||||
return body
|
||||
}
|
||||
|
||||
// For Gemini 3 models, convert thinkingBudget to thinkingLevel
|
||||
if IsGemini3Model(model) {
|
||||
if level, ok := ThinkingBudgetToGemini3Level(model, int(budget.Int())); ok {
|
||||
updated, _ := sjson.SetBytes(body, levelPath, level)
|
||||
updated, _ = sjson.DeleteBytes(updated, budgetPath)
|
||||
return updated
|
||||
}
|
||||
// If conversion fails, just remove the budget (let API use default)
|
||||
updated, _ := sjson.DeleteBytes(body, budgetPath)
|
||||
return updated
|
||||
}
|
||||
|
||||
// For Gemini 2.5 and other models, normalize the budget value
|
||||
normalized := NormalizeThinkingBudget(model, int(budget.Int()))
|
||||
updated, _ := sjson.SetBytes(body, budgetPath, normalized)
|
||||
return updated
|
||||
@@ -141,12 +382,29 @@ func NormalizeGeminiThinkingBudget(model string, body []byte) []byte {
|
||||
|
||||
// NormalizeGeminiCLIThinkingBudget normalizes the thinkingBudget value in a Gemini CLI
|
||||
// request body (request.generationConfig.thinkingConfig.thinkingBudget path).
|
||||
// For Gemini 3 models, converts thinkingBudget to thinkingLevel per Google's documentation.
|
||||
func NormalizeGeminiCLIThinkingBudget(model string, body []byte) []byte {
|
||||
const budgetPath = "request.generationConfig.thinkingConfig.thinkingBudget"
|
||||
const levelPath = "request.generationConfig.thinkingConfig.thinkingLevel"
|
||||
|
||||
budget := gjson.GetBytes(body, budgetPath)
|
||||
if !budget.Exists() {
|
||||
return body
|
||||
}
|
||||
|
||||
// For Gemini 3 models, convert thinkingBudget to thinkingLevel
|
||||
if IsGemini3Model(model) {
|
||||
if level, ok := ThinkingBudgetToGemini3Level(model, int(budget.Int())); ok {
|
||||
updated, _ := sjson.SetBytes(body, levelPath, level)
|
||||
updated, _ = sjson.DeleteBytes(updated, budgetPath)
|
||||
return updated
|
||||
}
|
||||
// If conversion fails, just remove the budget (let API use default)
|
||||
updated, _ := sjson.DeleteBytes(body, budgetPath)
|
||||
return updated
|
||||
}
|
||||
|
||||
// For Gemini 2.5 and other models, normalize the budget value
|
||||
normalized := NormalizeThinkingBudget(model, int(budget.Int()))
|
||||
updated, _ := sjson.SetBytes(body, budgetPath, normalized)
|
||||
return updated
|
||||
@@ -218,34 +476,42 @@ func ApplyReasoningEffortToGeminiCLI(body []byte, effort string) []byte {
|
||||
}
|
||||
|
||||
// ConvertThinkingLevelToBudget checks for "generationConfig.thinkingConfig.thinkingLevel"
|
||||
// and converts it to "thinkingBudget".
|
||||
// "high" -> 32768
|
||||
// "low" -> 128
|
||||
// It removes "thinkingLevel" after conversion.
|
||||
func ConvertThinkingLevelToBudget(body []byte) []byte {
|
||||
// and converts it to "thinkingBudget" for Gemini 2.5 models.
|
||||
// For Gemini 3 models, preserves thinkingLevel as-is (does not convert).
|
||||
// Mappings for Gemini 2.5:
|
||||
// - "high" -> 32768
|
||||
// - "medium" -> 8192
|
||||
// - "low" -> 1024
|
||||
// - "minimal" -> 512
|
||||
//
|
||||
// It removes "thinkingLevel" after conversion (for Gemini 2.5 only).
|
||||
func ConvertThinkingLevelToBudget(body []byte, model string) []byte {
|
||||
levelPath := "generationConfig.thinkingConfig.thinkingLevel"
|
||||
res := gjson.GetBytes(body, levelPath)
|
||||
if !res.Exists() {
|
||||
return body
|
||||
}
|
||||
|
||||
// For Gemini 3 models, preserve thinkingLevel - don't convert to budget
|
||||
if IsGemini3Model(model) {
|
||||
return body
|
||||
}
|
||||
|
||||
level := strings.ToLower(res.String())
|
||||
var budget int
|
||||
switch level {
|
||||
case "high":
|
||||
budget = 32768
|
||||
case "medium":
|
||||
budget = 8192
|
||||
case "low":
|
||||
budget = 128
|
||||
budget = 1024
|
||||
case "minimal":
|
||||
budget = 512
|
||||
default:
|
||||
// If unknown level, we might just leave it or default.
|
||||
// User only specified high and low. We'll assume we shouldn't touch it if it's something else,
|
||||
// or maybe we should just remove the invalid level?
|
||||
// For safety adhering to strict instructions: "If high... if low...".
|
||||
// If it's something else, the upstream might fail anyway if we leave it,
|
||||
// but let's just delete the level if we processed it.
|
||||
// Actually, let's check if we need to do anything for other values.
|
||||
// For now, only handle high/low.
|
||||
return body
|
||||
// Unknown level - remove it and let the API use defaults
|
||||
updated, _ := sjson.DeleteBytes(body, levelPath)
|
||||
return updated
|
||||
}
|
||||
|
||||
// Set budget
|
||||
@@ -262,3 +528,50 @@ func ConvertThinkingLevelToBudget(body []byte) []byte {
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
// ConvertThinkingLevelToBudgetCLI checks for "request.generationConfig.thinkingConfig.thinkingLevel"
|
||||
// and converts it to "thinkingBudget" for Gemini 2.5 models.
|
||||
// For Gemini 3 models, preserves thinkingLevel as-is (does not convert).
|
||||
func ConvertThinkingLevelToBudgetCLI(body []byte, model string) []byte {
|
||||
levelPath := "request.generationConfig.thinkingConfig.thinkingLevel"
|
||||
res := gjson.GetBytes(body, levelPath)
|
||||
if !res.Exists() {
|
||||
return body
|
||||
}
|
||||
|
||||
// For Gemini 3 models, preserve thinkingLevel - don't convert to budget
|
||||
if IsGemini3Model(model) {
|
||||
return body
|
||||
}
|
||||
|
||||
level := strings.ToLower(res.String())
|
||||
var budget int
|
||||
switch level {
|
||||
case "high":
|
||||
budget = 32768
|
||||
case "medium":
|
||||
budget = 8192
|
||||
case "low":
|
||||
budget = 1024
|
||||
case "minimal":
|
||||
budget = 512
|
||||
default:
|
||||
// Unknown level - remove it and let the API use defaults
|
||||
updated, _ := sjson.DeleteBytes(body, levelPath)
|
||||
return updated
|
||||
}
|
||||
|
||||
// Set budget
|
||||
budgetPath := "request.generationConfig.thinkingConfig.thinkingBudget"
|
||||
updated, err := sjson.SetBytes(body, budgetPath, budget)
|
||||
if err != nil {
|
||||
return body
|
||||
}
|
||||
|
||||
// Remove level
|
||||
updated, err = sjson.DeleteBytes(updated, levelPath)
|
||||
if err != nil {
|
||||
return body
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ package util
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
@@ -28,10 +29,17 @@ func Walk(value gjson.Result, path, field string, paths *[]string) {
|
||||
// For JSON objects and arrays, iterate through each child
|
||||
value.ForEach(func(key, val gjson.Result) bool {
|
||||
var childPath string
|
||||
// Escape special characters for gjson/sjson path syntax
|
||||
// . -> \.
|
||||
// * -> \*
|
||||
// ? -> \?
|
||||
var keyReplacer = strings.NewReplacer(".", "\\.", "*", "\\*", "?", "\\?")
|
||||
safeKey := keyReplacer.Replace(key.String())
|
||||
|
||||
if path == "" {
|
||||
childPath = key.String()
|
||||
childPath = safeKey
|
||||
} else {
|
||||
childPath = path + "." + key.String()
|
||||
childPath = path + "." + safeKey
|
||||
}
|
||||
if key.String() == field {
|
||||
*paths = append(*paths, childPath)
|
||||
|
||||
270
internal/watcher/clients.go
Normal file
270
internal/watcher/clients.go
Normal file
@@ -0,0 +1,270 @@
|
||||
// clients.go implements watcher client lifecycle logic and persistence helpers.
|
||||
// It reloads clients, handles incremental auth file changes, and persists updates when supported.
|
||||
package watcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (w *Watcher) reloadClients(rescanAuth bool, affectedOAuthProviders []string, forceAuthRefresh bool) {
|
||||
log.Debugf("starting full client load process")
|
||||
|
||||
w.clientsMutex.RLock()
|
||||
cfg := w.config
|
||||
w.clientsMutex.RUnlock()
|
||||
|
||||
if cfg == nil {
|
||||
log.Error("config is nil, cannot reload clients")
|
||||
return
|
||||
}
|
||||
|
||||
if len(affectedOAuthProviders) > 0 {
|
||||
w.clientsMutex.Lock()
|
||||
if w.currentAuths != nil {
|
||||
filtered := make(map[string]*coreauth.Auth, len(w.currentAuths))
|
||||
for id, auth := range w.currentAuths {
|
||||
if auth == nil {
|
||||
continue
|
||||
}
|
||||
provider := strings.ToLower(strings.TrimSpace(auth.Provider))
|
||||
if _, match := matchProvider(provider, affectedOAuthProviders); match {
|
||||
continue
|
||||
}
|
||||
filtered[id] = auth
|
||||
}
|
||||
w.currentAuths = filtered
|
||||
log.Debugf("applying oauth-excluded-models to providers %v", affectedOAuthProviders)
|
||||
} else {
|
||||
w.currentAuths = nil
|
||||
}
|
||||
w.clientsMutex.Unlock()
|
||||
}
|
||||
|
||||
geminiAPIKeyCount, vertexCompatAPIKeyCount, claudeAPIKeyCount, codexAPIKeyCount, openAICompatCount := BuildAPIKeyClients(cfg)
|
||||
totalAPIKeyClients := geminiAPIKeyCount + vertexCompatAPIKeyCount + claudeAPIKeyCount + codexAPIKeyCount + openAICompatCount
|
||||
log.Debugf("loaded %d API key clients", totalAPIKeyClients)
|
||||
|
||||
var authFileCount int
|
||||
if rescanAuth {
|
||||
authFileCount = w.loadFileClients(cfg)
|
||||
log.Debugf("loaded %d file-based clients", authFileCount)
|
||||
} else {
|
||||
w.clientsMutex.RLock()
|
||||
authFileCount = len(w.lastAuthHashes)
|
||||
w.clientsMutex.RUnlock()
|
||||
log.Debugf("skipping auth directory rescan; retaining %d existing auth files", authFileCount)
|
||||
}
|
||||
|
||||
if rescanAuth {
|
||||
w.clientsMutex.Lock()
|
||||
|
||||
w.lastAuthHashes = make(map[string]string)
|
||||
if resolvedAuthDir, errResolveAuthDir := util.ResolveAuthDir(cfg.AuthDir); errResolveAuthDir != nil {
|
||||
log.Errorf("failed to resolve auth directory for hash cache: %v", errResolveAuthDir)
|
||||
} else if resolvedAuthDir != "" {
|
||||
_ = filepath.Walk(resolvedAuthDir, func(path string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".json") {
|
||||
if data, errReadFile := os.ReadFile(path); errReadFile == nil && len(data) > 0 {
|
||||
sum := sha256.Sum256(data)
|
||||
normalizedPath := w.normalizeAuthPath(path)
|
||||
w.lastAuthHashes[normalizedPath] = hex.EncodeToString(sum[:])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
w.clientsMutex.Unlock()
|
||||
}
|
||||
|
||||
totalNewClients := authFileCount + geminiAPIKeyCount + vertexCompatAPIKeyCount + claudeAPIKeyCount + codexAPIKeyCount + openAICompatCount
|
||||
|
||||
if w.reloadCallback != nil {
|
||||
log.Debugf("triggering server update callback before auth refresh")
|
||||
w.reloadCallback(cfg)
|
||||
}
|
||||
|
||||
w.refreshAuthState(forceAuthRefresh)
|
||||
|
||||
log.Infof("full client load complete - %d clients (%d auth files + %d Gemini API keys + %d Vertex API keys + %d Claude API keys + %d Codex keys + %d OpenAI-compat)",
|
||||
totalNewClients,
|
||||
authFileCount,
|
||||
geminiAPIKeyCount,
|
||||
vertexCompatAPIKeyCount,
|
||||
claudeAPIKeyCount,
|
||||
codexAPIKeyCount,
|
||||
openAICompatCount,
|
||||
)
|
||||
}
|
||||
|
||||
func (w *Watcher) addOrUpdateClient(path string) {
|
||||
data, errRead := os.ReadFile(path)
|
||||
if errRead != nil {
|
||||
log.Errorf("failed to read auth file %s: %v", filepath.Base(path), errRead)
|
||||
return
|
||||
}
|
||||
if len(data) == 0 {
|
||||
log.Debugf("ignoring empty auth file: %s", filepath.Base(path))
|
||||
return
|
||||
}
|
||||
|
||||
sum := sha256.Sum256(data)
|
||||
curHash := hex.EncodeToString(sum[:])
|
||||
normalized := w.normalizeAuthPath(path)
|
||||
|
||||
w.clientsMutex.Lock()
|
||||
|
||||
cfg := w.config
|
||||
if cfg == nil {
|
||||
log.Error("config is nil, cannot add or update client")
|
||||
w.clientsMutex.Unlock()
|
||||
return
|
||||
}
|
||||
if prev, ok := w.lastAuthHashes[normalized]; ok && prev == curHash {
|
||||
log.Debugf("auth file unchanged (hash match), skipping reload: %s", filepath.Base(path))
|
||||
w.clientsMutex.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
w.lastAuthHashes[normalized] = curHash
|
||||
|
||||
w.clientsMutex.Unlock() // Unlock before the callback
|
||||
|
||||
w.refreshAuthState(false)
|
||||
|
||||
if w.reloadCallback != nil {
|
||||
log.Debugf("triggering server update callback after add/update")
|
||||
w.reloadCallback(cfg)
|
||||
}
|
||||
w.persistAuthAsync(fmt.Sprintf("Sync auth %s", filepath.Base(path)), path)
|
||||
}
|
||||
|
||||
func (w *Watcher) removeClient(path string) {
|
||||
normalized := w.normalizeAuthPath(path)
|
||||
w.clientsMutex.Lock()
|
||||
|
||||
cfg := w.config
|
||||
delete(w.lastAuthHashes, normalized)
|
||||
|
||||
w.clientsMutex.Unlock() // Release the lock before the callback
|
||||
|
||||
w.refreshAuthState(false)
|
||||
|
||||
if w.reloadCallback != nil {
|
||||
log.Debugf("triggering server update callback after removal")
|
||||
w.reloadCallback(cfg)
|
||||
}
|
||||
w.persistAuthAsync(fmt.Sprintf("Remove auth %s", filepath.Base(path)), path)
|
||||
}
|
||||
|
||||
func (w *Watcher) loadFileClients(cfg *config.Config) int {
|
||||
authFileCount := 0
|
||||
successfulAuthCount := 0
|
||||
|
||||
authDir, errResolveAuthDir := util.ResolveAuthDir(cfg.AuthDir)
|
||||
if errResolveAuthDir != nil {
|
||||
log.Errorf("failed to resolve auth directory: %v", errResolveAuthDir)
|
||||
return 0
|
||||
}
|
||||
if authDir == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
errWalk := filepath.Walk(authDir, func(path string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
log.Debugf("error accessing path %s: %v", path, err)
|
||||
return err
|
||||
}
|
||||
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".json") {
|
||||
authFileCount++
|
||||
log.Debugf("processing auth file %d: %s", authFileCount, filepath.Base(path))
|
||||
if data, errCreate := os.ReadFile(path); errCreate == nil && len(data) > 0 {
|
||||
successfulAuthCount++
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if errWalk != nil {
|
||||
log.Errorf("error walking auth directory: %v", errWalk)
|
||||
}
|
||||
log.Debugf("auth directory scan complete - found %d .json files, %d readable", authFileCount, successfulAuthCount)
|
||||
return authFileCount
|
||||
}
|
||||
|
||||
func BuildAPIKeyClients(cfg *config.Config) (int, int, int, int, int) {
|
||||
geminiAPIKeyCount := 0
|
||||
vertexCompatAPIKeyCount := 0
|
||||
claudeAPIKeyCount := 0
|
||||
codexAPIKeyCount := 0
|
||||
openAICompatCount := 0
|
||||
|
||||
if len(cfg.GeminiKey) > 0 {
|
||||
geminiAPIKeyCount += len(cfg.GeminiKey)
|
||||
}
|
||||
if len(cfg.VertexCompatAPIKey) > 0 {
|
||||
vertexCompatAPIKeyCount += len(cfg.VertexCompatAPIKey)
|
||||
}
|
||||
if len(cfg.ClaudeKey) > 0 {
|
||||
claudeAPIKeyCount += len(cfg.ClaudeKey)
|
||||
}
|
||||
if len(cfg.CodexKey) > 0 {
|
||||
codexAPIKeyCount += len(cfg.CodexKey)
|
||||
}
|
||||
if len(cfg.OpenAICompatibility) > 0 {
|
||||
for _, compatConfig := range cfg.OpenAICompatibility {
|
||||
openAICompatCount += len(compatConfig.APIKeyEntries)
|
||||
}
|
||||
}
|
||||
return geminiAPIKeyCount, vertexCompatAPIKeyCount, claudeAPIKeyCount, codexAPIKeyCount, openAICompatCount
|
||||
}
|
||||
|
||||
func (w *Watcher) persistConfigAsync() {
|
||||
if w == nil || w.storePersister == nil {
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
if err := w.storePersister.PersistConfig(ctx); err != nil {
|
||||
log.Errorf("failed to persist config change: %v", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (w *Watcher) persistAuthAsync(message string, paths ...string) {
|
||||
if w == nil || w.storePersister == nil {
|
||||
return
|
||||
}
|
||||
filtered := make([]string, 0, len(paths))
|
||||
for _, p := range paths {
|
||||
if trimmed := strings.TrimSpace(p); trimmed != "" {
|
||||
filtered = append(filtered, trimmed)
|
||||
}
|
||||
}
|
||||
if len(filtered) == 0 {
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
if err := w.storePersister.PersistAuthFiles(ctx, message, filtered...); err != nil {
|
||||
log.Errorf("failed to persist auth changes: %v", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
134
internal/watcher/config_reload.go
Normal file
134
internal/watcher/config_reload.go
Normal file
@@ -0,0 +1,134 @@
|
||||
// config_reload.go implements debounced configuration hot reload.
|
||||
// It detects material changes and reloads clients when the config changes.
|
||||
package watcher
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (w *Watcher) stopConfigReloadTimer() {
|
||||
w.configReloadMu.Lock()
|
||||
if w.configReloadTimer != nil {
|
||||
w.configReloadTimer.Stop()
|
||||
w.configReloadTimer = nil
|
||||
}
|
||||
w.configReloadMu.Unlock()
|
||||
}
|
||||
|
||||
func (w *Watcher) scheduleConfigReload() {
|
||||
w.configReloadMu.Lock()
|
||||
defer w.configReloadMu.Unlock()
|
||||
if w.configReloadTimer != nil {
|
||||
w.configReloadTimer.Stop()
|
||||
}
|
||||
w.configReloadTimer = time.AfterFunc(configReloadDebounce, func() {
|
||||
w.configReloadMu.Lock()
|
||||
w.configReloadTimer = nil
|
||||
w.configReloadMu.Unlock()
|
||||
w.reloadConfigIfChanged()
|
||||
})
|
||||
}
|
||||
|
||||
func (w *Watcher) reloadConfigIfChanged() {
|
||||
data, err := os.ReadFile(w.configPath)
|
||||
if err != nil {
|
||||
log.Errorf("failed to read config file for hash check: %v", err)
|
||||
return
|
||||
}
|
||||
if len(data) == 0 {
|
||||
log.Debugf("ignoring empty config file write event")
|
||||
return
|
||||
}
|
||||
sum := sha256.Sum256(data)
|
||||
newHash := hex.EncodeToString(sum[:])
|
||||
|
||||
w.clientsMutex.RLock()
|
||||
currentHash := w.lastConfigHash
|
||||
w.clientsMutex.RUnlock()
|
||||
|
||||
if currentHash != "" && currentHash == newHash {
|
||||
log.Debugf("config file content unchanged (hash match), skipping reload")
|
||||
return
|
||||
}
|
||||
log.Infof("config file changed, reloading: %s", w.configPath)
|
||||
if w.reloadConfig() {
|
||||
finalHash := newHash
|
||||
if updatedData, errRead := os.ReadFile(w.configPath); errRead == nil && len(updatedData) > 0 {
|
||||
sumUpdated := sha256.Sum256(updatedData)
|
||||
finalHash = hex.EncodeToString(sumUpdated[:])
|
||||
} else if errRead != nil {
|
||||
log.WithError(errRead).Debug("failed to compute updated config hash after reload")
|
||||
}
|
||||
w.clientsMutex.Lock()
|
||||
w.lastConfigHash = finalHash
|
||||
w.clientsMutex.Unlock()
|
||||
w.persistConfigAsync()
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) reloadConfig() bool {
|
||||
log.Debug("=========================== CONFIG RELOAD ============================")
|
||||
log.Debugf("starting config reload from: %s", w.configPath)
|
||||
|
||||
newConfig, errLoadConfig := config.LoadConfig(w.configPath)
|
||||
if errLoadConfig != nil {
|
||||
log.Errorf("failed to reload config: %v", errLoadConfig)
|
||||
return false
|
||||
}
|
||||
|
||||
if w.mirroredAuthDir != "" {
|
||||
newConfig.AuthDir = w.mirroredAuthDir
|
||||
} else {
|
||||
if resolvedAuthDir, errResolveAuthDir := util.ResolveAuthDir(newConfig.AuthDir); errResolveAuthDir != nil {
|
||||
log.Errorf("failed to resolve auth directory from config: %v", errResolveAuthDir)
|
||||
} else {
|
||||
newConfig.AuthDir = resolvedAuthDir
|
||||
}
|
||||
}
|
||||
|
||||
w.clientsMutex.Lock()
|
||||
var oldConfig *config.Config
|
||||
_ = yaml.Unmarshal(w.oldConfigYaml, &oldConfig)
|
||||
w.oldConfigYaml, _ = yaml.Marshal(newConfig)
|
||||
w.config = newConfig
|
||||
w.clientsMutex.Unlock()
|
||||
|
||||
var affectedOAuthProviders []string
|
||||
if oldConfig != nil {
|
||||
_, affectedOAuthProviders = diff.DiffOAuthExcludedModelChanges(oldConfig.OAuthExcludedModels, newConfig.OAuthExcludedModels)
|
||||
}
|
||||
|
||||
util.SetLogLevel(newConfig)
|
||||
if oldConfig != nil && oldConfig.Debug != newConfig.Debug {
|
||||
log.Debugf("log level updated - debug mode changed from %t to %t", oldConfig.Debug, newConfig.Debug)
|
||||
}
|
||||
|
||||
if oldConfig != nil {
|
||||
details := diff.BuildConfigChangeDetails(oldConfig, newConfig)
|
||||
if len(details) > 0 {
|
||||
log.Debugf("config changes detected:")
|
||||
for _, d := range details {
|
||||
log.Debugf(" %s", d)
|
||||
}
|
||||
} else {
|
||||
log.Debugf("no material config field changes detected")
|
||||
}
|
||||
}
|
||||
|
||||
authDirChanged := oldConfig == nil || oldConfig.AuthDir != newConfig.AuthDir
|
||||
forceAuthRefresh := oldConfig != nil && oldConfig.ForceModelPrefix != newConfig.ForceModelPrefix
|
||||
|
||||
log.Infof("config successfully reloaded, triggering client reload")
|
||||
w.reloadClients(authDirChanged, affectedOAuthProviders, forceAuthRefresh)
|
||||
return true
|
||||
}
|
||||
@@ -82,7 +82,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
||||
changes = append(changes, fmt.Sprintf("gemini[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||
}
|
||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||
changes = append(changes, fmt.Sprintf("gemini[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
||||
changes = append(changes, fmt.Sprintf("gemini[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||
}
|
||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||
changes = append(changes, fmt.Sprintf("gemini[%d].api-key: updated", i))
|
||||
@@ -112,7 +112,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
||||
changes = append(changes, fmt.Sprintf("claude[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||
}
|
||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||
changes = append(changes, fmt.Sprintf("claude[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
||||
changes = append(changes, fmt.Sprintf("claude[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||
}
|
||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||
changes = append(changes, fmt.Sprintf("claude[%d].api-key: updated", i))
|
||||
@@ -142,7 +142,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
||||
changes = append(changes, fmt.Sprintf("codex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||
}
|
||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||
changes = append(changes, fmt.Sprintf("codex[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
||||
changes = append(changes, fmt.Sprintf("codex[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||
}
|
||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||
changes = append(changes, fmt.Sprintf("codex[%d].api-key: updated", i))
|
||||
@@ -235,7 +235,7 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
|
||||
changes = append(changes, fmt.Sprintf("vertex[%d].proxy-url: %s -> %s", i, formatProxyURL(o.ProxyURL), formatProxyURL(n.ProxyURL)))
|
||||
}
|
||||
if strings.TrimSpace(o.Prefix) != strings.TrimSpace(n.Prefix) {
|
||||
changes = append(changes, fmt.Sprintf("vertex[%d].prefix: %s -> %s", i, formatProxyURL(o.Prefix), formatProxyURL(n.Prefix)))
|
||||
changes = append(changes, fmt.Sprintf("vertex[%d].prefix: %s -> %s", i, strings.TrimSpace(o.Prefix), strings.TrimSpace(n.Prefix)))
|
||||
}
|
||||
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
|
||||
changes = append(changes, fmt.Sprintf("vertex[%d].api-key: updated", i))
|
||||
|
||||
@@ -231,9 +231,10 @@ func TestBuildConfigChangeDetails_FlagsAndKeys(t *testing.T) {
|
||||
AmpCode: config.AmpCode{UpstreamAPIKey: "keep", RestrictManagementToLocalhost: false},
|
||||
RemoteManagement: config.RemoteManagement{DisableControlPanel: false, PanelGitHubRepository: "old/repo", SecretKey: "keep"},
|
||||
SDKConfig: sdkconfig.SDKConfig{
|
||||
RequestLog: false,
|
||||
ProxyURL: "http://old-proxy",
|
||||
APIKeys: []string{"key-1"},
|
||||
RequestLog: false,
|
||||
ProxyURL: "http://old-proxy",
|
||||
APIKeys: []string{"key-1"},
|
||||
ForceModelPrefix: false,
|
||||
},
|
||||
}
|
||||
newCfg := &config.Config{
|
||||
@@ -266,9 +267,10 @@ func TestBuildConfigChangeDetails_FlagsAndKeys(t *testing.T) {
|
||||
SecretKey: "",
|
||||
},
|
||||
SDKConfig: sdkconfig.SDKConfig{
|
||||
RequestLog: true,
|
||||
ProxyURL: "http://new-proxy",
|
||||
APIKeys: []string{" key-1 ", "key-2"},
|
||||
RequestLog: true,
|
||||
ProxyURL: "http://new-proxy",
|
||||
APIKeys: []string{" key-1 ", "key-2"},
|
||||
ForceModelPrefix: true,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -282,6 +284,7 @@ func TestBuildConfigChangeDetails_FlagsAndKeys(t *testing.T) {
|
||||
expectContains(t, details, "max-retry-interval: 1 -> 3")
|
||||
expectContains(t, details, "proxy-url: http://old-proxy -> http://new-proxy")
|
||||
expectContains(t, details, "ws-auth: false -> true")
|
||||
expectContains(t, details, "force-model-prefix: false -> true")
|
||||
expectContains(t, details, "quota-exceeded.switch-project: false -> true")
|
||||
expectContains(t, details, "quota-exceeded.switch-preview-model: false -> true")
|
||||
expectContains(t, details, "api-keys count: 1 -> 2")
|
||||
|
||||
273
internal/watcher/dispatcher.go
Normal file
273
internal/watcher/dispatcher.go
Normal file
@@ -0,0 +1,273 @@
|
||||
// dispatcher.go implements auth update dispatching and queue management.
|
||||
// It batches, deduplicates, and delivers auth updates to registered consumers.
|
||||
package watcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/synthesizer"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
func (w *Watcher) setAuthUpdateQueue(queue chan<- AuthUpdate) {
|
||||
w.clientsMutex.Lock()
|
||||
defer w.clientsMutex.Unlock()
|
||||
w.authQueue = queue
|
||||
if w.dispatchCond == nil {
|
||||
w.dispatchCond = sync.NewCond(&w.dispatchMu)
|
||||
}
|
||||
if w.dispatchCancel != nil {
|
||||
w.dispatchCancel()
|
||||
if w.dispatchCond != nil {
|
||||
w.dispatchMu.Lock()
|
||||
w.dispatchCond.Broadcast()
|
||||
w.dispatchMu.Unlock()
|
||||
}
|
||||
w.dispatchCancel = nil
|
||||
}
|
||||
if queue != nil {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
w.dispatchCancel = cancel
|
||||
go w.dispatchLoop(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) dispatchRuntimeAuthUpdate(update AuthUpdate) bool {
|
||||
if w == nil {
|
||||
return false
|
||||
}
|
||||
w.clientsMutex.Lock()
|
||||
if w.runtimeAuths == nil {
|
||||
w.runtimeAuths = make(map[string]*coreauth.Auth)
|
||||
}
|
||||
switch update.Action {
|
||||
case AuthUpdateActionAdd, AuthUpdateActionModify:
|
||||
if update.Auth != nil && update.Auth.ID != "" {
|
||||
clone := update.Auth.Clone()
|
||||
w.runtimeAuths[clone.ID] = clone
|
||||
if w.currentAuths == nil {
|
||||
w.currentAuths = make(map[string]*coreauth.Auth)
|
||||
}
|
||||
w.currentAuths[clone.ID] = clone.Clone()
|
||||
}
|
||||
case AuthUpdateActionDelete:
|
||||
id := update.ID
|
||||
if id == "" && update.Auth != nil {
|
||||
id = update.Auth.ID
|
||||
}
|
||||
if id != "" {
|
||||
delete(w.runtimeAuths, id)
|
||||
if w.currentAuths != nil {
|
||||
delete(w.currentAuths, id)
|
||||
}
|
||||
}
|
||||
}
|
||||
w.clientsMutex.Unlock()
|
||||
if w.getAuthQueue() == nil {
|
||||
return false
|
||||
}
|
||||
w.dispatchAuthUpdates([]AuthUpdate{update})
|
||||
return true
|
||||
}
|
||||
|
||||
func (w *Watcher) refreshAuthState(force bool) {
|
||||
auths := w.SnapshotCoreAuths()
|
||||
w.clientsMutex.Lock()
|
||||
if len(w.runtimeAuths) > 0 {
|
||||
for _, a := range w.runtimeAuths {
|
||||
if a != nil {
|
||||
auths = append(auths, a.Clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
updates := w.prepareAuthUpdatesLocked(auths, force)
|
||||
w.clientsMutex.Unlock()
|
||||
w.dispatchAuthUpdates(updates)
|
||||
}
|
||||
|
||||
func (w *Watcher) prepareAuthUpdatesLocked(auths []*coreauth.Auth, force bool) []AuthUpdate {
|
||||
newState := make(map[string]*coreauth.Auth, len(auths))
|
||||
for _, auth := range auths {
|
||||
if auth == nil || auth.ID == "" {
|
||||
continue
|
||||
}
|
||||
newState[auth.ID] = auth.Clone()
|
||||
}
|
||||
if w.currentAuths == nil {
|
||||
w.currentAuths = newState
|
||||
if w.authQueue == nil {
|
||||
return nil
|
||||
}
|
||||
updates := make([]AuthUpdate, 0, len(newState))
|
||||
for id, auth := range newState {
|
||||
updates = append(updates, AuthUpdate{Action: AuthUpdateActionAdd, ID: id, Auth: auth.Clone()})
|
||||
}
|
||||
return updates
|
||||
}
|
||||
if w.authQueue == nil {
|
||||
w.currentAuths = newState
|
||||
return nil
|
||||
}
|
||||
updates := make([]AuthUpdate, 0, len(newState)+len(w.currentAuths))
|
||||
for id, auth := range newState {
|
||||
if existing, ok := w.currentAuths[id]; !ok {
|
||||
updates = append(updates, AuthUpdate{Action: AuthUpdateActionAdd, ID: id, Auth: auth.Clone()})
|
||||
} else if force || !authEqual(existing, auth) {
|
||||
updates = append(updates, AuthUpdate{Action: AuthUpdateActionModify, ID: id, Auth: auth.Clone()})
|
||||
}
|
||||
}
|
||||
for id := range w.currentAuths {
|
||||
if _, ok := newState[id]; !ok {
|
||||
updates = append(updates, AuthUpdate{Action: AuthUpdateActionDelete, ID: id})
|
||||
}
|
||||
}
|
||||
w.currentAuths = newState
|
||||
return updates
|
||||
}
|
||||
|
||||
func (w *Watcher) dispatchAuthUpdates(updates []AuthUpdate) {
|
||||
if len(updates) == 0 {
|
||||
return
|
||||
}
|
||||
queue := w.getAuthQueue()
|
||||
if queue == nil {
|
||||
return
|
||||
}
|
||||
baseTS := time.Now().UnixNano()
|
||||
w.dispatchMu.Lock()
|
||||
if w.pendingUpdates == nil {
|
||||
w.pendingUpdates = make(map[string]AuthUpdate)
|
||||
}
|
||||
for idx, update := range updates {
|
||||
key := w.authUpdateKey(update, baseTS+int64(idx))
|
||||
if _, exists := w.pendingUpdates[key]; !exists {
|
||||
w.pendingOrder = append(w.pendingOrder, key)
|
||||
}
|
||||
w.pendingUpdates[key] = update
|
||||
}
|
||||
if w.dispatchCond != nil {
|
||||
w.dispatchCond.Signal()
|
||||
}
|
||||
w.dispatchMu.Unlock()
|
||||
}
|
||||
|
||||
func (w *Watcher) authUpdateKey(update AuthUpdate, ts int64) string {
|
||||
if update.ID != "" {
|
||||
return update.ID
|
||||
}
|
||||
return fmt.Sprintf("%s:%d", update.Action, ts)
|
||||
}
|
||||
|
||||
func (w *Watcher) dispatchLoop(ctx context.Context) {
|
||||
for {
|
||||
batch, ok := w.nextPendingBatch(ctx)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
queue := w.getAuthQueue()
|
||||
if queue == nil {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
continue
|
||||
}
|
||||
for _, update := range batch {
|
||||
select {
|
||||
case queue <- update:
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) nextPendingBatch(ctx context.Context) ([]AuthUpdate, bool) {
|
||||
w.dispatchMu.Lock()
|
||||
defer w.dispatchMu.Unlock()
|
||||
for len(w.pendingOrder) == 0 {
|
||||
if ctx.Err() != nil {
|
||||
return nil, false
|
||||
}
|
||||
w.dispatchCond.Wait()
|
||||
if ctx.Err() != nil {
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
batch := make([]AuthUpdate, 0, len(w.pendingOrder))
|
||||
for _, key := range w.pendingOrder {
|
||||
batch = append(batch, w.pendingUpdates[key])
|
||||
delete(w.pendingUpdates, key)
|
||||
}
|
||||
w.pendingOrder = w.pendingOrder[:0]
|
||||
return batch, true
|
||||
}
|
||||
|
||||
func (w *Watcher) getAuthQueue() chan<- AuthUpdate {
|
||||
w.clientsMutex.RLock()
|
||||
defer w.clientsMutex.RUnlock()
|
||||
return w.authQueue
|
||||
}
|
||||
|
||||
func (w *Watcher) stopDispatch() {
|
||||
if w.dispatchCancel != nil {
|
||||
w.dispatchCancel()
|
||||
w.dispatchCancel = nil
|
||||
}
|
||||
w.dispatchMu.Lock()
|
||||
w.pendingOrder = nil
|
||||
w.pendingUpdates = nil
|
||||
if w.dispatchCond != nil {
|
||||
w.dispatchCond.Broadcast()
|
||||
}
|
||||
w.dispatchMu.Unlock()
|
||||
w.clientsMutex.Lock()
|
||||
w.authQueue = nil
|
||||
w.clientsMutex.Unlock()
|
||||
}
|
||||
|
||||
func authEqual(a, b *coreauth.Auth) bool {
|
||||
return reflect.DeepEqual(normalizeAuth(a), normalizeAuth(b))
|
||||
}
|
||||
|
||||
func normalizeAuth(a *coreauth.Auth) *coreauth.Auth {
|
||||
if a == nil {
|
||||
return nil
|
||||
}
|
||||
clone := a.Clone()
|
||||
clone.CreatedAt = time.Time{}
|
||||
clone.UpdatedAt = time.Time{}
|
||||
clone.LastRefreshedAt = time.Time{}
|
||||
clone.NextRefreshAfter = time.Time{}
|
||||
clone.Runtime = nil
|
||||
clone.Quota.NextRecoverAt = time.Time{}
|
||||
return clone
|
||||
}
|
||||
|
||||
func snapshotCoreAuths(cfg *config.Config, authDir string) []*coreauth.Auth {
|
||||
ctx := &synthesizer.SynthesisContext{
|
||||
Config: cfg,
|
||||
AuthDir: authDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: synthesizer.NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
var out []*coreauth.Auth
|
||||
|
||||
configSynth := synthesizer.NewConfigSynthesizer()
|
||||
if auths, err := configSynth.Synthesize(ctx); err == nil {
|
||||
out = append(out, auths...)
|
||||
}
|
||||
|
||||
fileSynth := synthesizer.NewFileSynthesizer()
|
||||
if auths, err := fileSynth.Synthesize(ctx); err == nil {
|
||||
out = append(out, auths...)
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
194
internal/watcher/events.go
Normal file
194
internal/watcher/events.go
Normal file
@@ -0,0 +1,194 @@
|
||||
// events.go implements fsnotify event handling for config and auth file changes.
|
||||
// It normalizes paths, debounces noisy events, and triggers reload/update logic.
|
||||
package watcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func matchProvider(provider string, targets []string) (string, bool) {
|
||||
p := strings.ToLower(strings.TrimSpace(provider))
|
||||
for _, t := range targets {
|
||||
if strings.EqualFold(p, strings.TrimSpace(t)) {
|
||||
return p, true
|
||||
}
|
||||
}
|
||||
return p, false
|
||||
}
|
||||
|
||||
func (w *Watcher) start(ctx context.Context) error {
|
||||
if errAddConfig := w.watcher.Add(w.configPath); errAddConfig != nil {
|
||||
log.Errorf("failed to watch config file %s: %v", w.configPath, errAddConfig)
|
||||
return errAddConfig
|
||||
}
|
||||
log.Debugf("watching config file: %s", w.configPath)
|
||||
|
||||
if errAddAuthDir := w.watcher.Add(w.authDir); errAddAuthDir != nil {
|
||||
log.Errorf("failed to watch auth directory %s: %v", w.authDir, errAddAuthDir)
|
||||
return errAddAuthDir
|
||||
}
|
||||
log.Debugf("watching auth directory: %s", w.authDir)
|
||||
|
||||
go w.processEvents(ctx)
|
||||
|
||||
w.reloadClients(true, nil, false)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *Watcher) processEvents(ctx context.Context) {
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case event, ok := <-w.watcher.Events:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
w.handleEvent(event)
|
||||
case errWatch, ok := <-w.watcher.Errors:
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
log.Errorf("file watcher error: %v", errWatch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) handleEvent(event fsnotify.Event) {
|
||||
// Filter only relevant events: config file or auth-dir JSON files.
|
||||
configOps := fsnotify.Write | fsnotify.Create | fsnotify.Rename
|
||||
normalizedName := w.normalizeAuthPath(event.Name)
|
||||
normalizedConfigPath := w.normalizeAuthPath(w.configPath)
|
||||
normalizedAuthDir := w.normalizeAuthPath(w.authDir)
|
||||
isConfigEvent := normalizedName == normalizedConfigPath && event.Op&configOps != 0
|
||||
authOps := fsnotify.Create | fsnotify.Write | fsnotify.Remove | fsnotify.Rename
|
||||
isAuthJSON := strings.HasPrefix(normalizedName, normalizedAuthDir) && strings.HasSuffix(normalizedName, ".json") && event.Op&authOps != 0
|
||||
if !isConfigEvent && !isAuthJSON {
|
||||
// Ignore unrelated files (e.g., cookie snapshots *.cookie) and other noise.
|
||||
return
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
log.Debugf("file system event detected: %s %s", event.Op.String(), event.Name)
|
||||
|
||||
// Handle config file changes
|
||||
if isConfigEvent {
|
||||
log.Debugf("config file change details - operation: %s, timestamp: %s", event.Op.String(), now.Format("2006-01-02 15:04:05.000"))
|
||||
w.scheduleConfigReload()
|
||||
return
|
||||
}
|
||||
|
||||
// Handle auth directory changes incrementally (.json only)
|
||||
if event.Op&(fsnotify.Remove|fsnotify.Rename) != 0 {
|
||||
if w.shouldDebounceRemove(normalizedName, now) {
|
||||
log.Debugf("debouncing remove event for %s", filepath.Base(event.Name))
|
||||
return
|
||||
}
|
||||
// Atomic replace on some platforms may surface as Rename (or Remove) before the new file is ready.
|
||||
// Wait briefly; if the path exists again, treat as an update instead of removal.
|
||||
time.Sleep(replaceCheckDelay)
|
||||
if _, statErr := os.Stat(event.Name); statErr == nil {
|
||||
if unchanged, errSame := w.authFileUnchanged(event.Name); errSame == nil && unchanged {
|
||||
log.Debugf("auth file unchanged (hash match), skipping reload: %s", filepath.Base(event.Name))
|
||||
return
|
||||
}
|
||||
log.Infof("auth file changed (%s): %s, processing incrementally", event.Op.String(), filepath.Base(event.Name))
|
||||
w.addOrUpdateClient(event.Name)
|
||||
return
|
||||
}
|
||||
if !w.isKnownAuthFile(event.Name) {
|
||||
log.Debugf("ignoring remove for unknown auth file: %s", filepath.Base(event.Name))
|
||||
return
|
||||
}
|
||||
log.Infof("auth file changed (%s): %s, processing incrementally", event.Op.String(), filepath.Base(event.Name))
|
||||
w.removeClient(event.Name)
|
||||
return
|
||||
}
|
||||
if event.Op&(fsnotify.Create|fsnotify.Write) != 0 {
|
||||
if unchanged, errSame := w.authFileUnchanged(event.Name); errSame == nil && unchanged {
|
||||
log.Debugf("auth file unchanged (hash match), skipping reload: %s", filepath.Base(event.Name))
|
||||
return
|
||||
}
|
||||
log.Infof("auth file changed (%s): %s, processing incrementally", event.Op.String(), filepath.Base(event.Name))
|
||||
w.addOrUpdateClient(event.Name)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) authFileUnchanged(path string) (bool, error) {
|
||||
data, errRead := os.ReadFile(path)
|
||||
if errRead != nil {
|
||||
return false, errRead
|
||||
}
|
||||
if len(data) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
sum := sha256.Sum256(data)
|
||||
curHash := hex.EncodeToString(sum[:])
|
||||
|
||||
normalized := w.normalizeAuthPath(path)
|
||||
w.clientsMutex.RLock()
|
||||
prevHash, ok := w.lastAuthHashes[normalized]
|
||||
w.clientsMutex.RUnlock()
|
||||
if ok && prevHash == curHash {
|
||||
return true, nil
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (w *Watcher) isKnownAuthFile(path string) bool {
|
||||
normalized := w.normalizeAuthPath(path)
|
||||
w.clientsMutex.RLock()
|
||||
defer w.clientsMutex.RUnlock()
|
||||
_, ok := w.lastAuthHashes[normalized]
|
||||
return ok
|
||||
}
|
||||
|
||||
func (w *Watcher) normalizeAuthPath(path string) string {
|
||||
trimmed := strings.TrimSpace(path)
|
||||
if trimmed == "" {
|
||||
return ""
|
||||
}
|
||||
cleaned := filepath.Clean(trimmed)
|
||||
if runtime.GOOS == "windows" {
|
||||
cleaned = strings.TrimPrefix(cleaned, `\\?\`)
|
||||
cleaned = strings.ToLower(cleaned)
|
||||
}
|
||||
return cleaned
|
||||
}
|
||||
|
||||
func (w *Watcher) shouldDebounceRemove(normalizedPath string, now time.Time) bool {
|
||||
if normalizedPath == "" {
|
||||
return false
|
||||
}
|
||||
w.clientsMutex.Lock()
|
||||
if w.lastRemoveTimes == nil {
|
||||
w.lastRemoveTimes = make(map[string]time.Time)
|
||||
}
|
||||
if last, ok := w.lastRemoveTimes[normalizedPath]; ok {
|
||||
if now.Sub(last) < authRemoveDebounceWindow {
|
||||
w.clientsMutex.Unlock()
|
||||
return true
|
||||
}
|
||||
}
|
||||
w.lastRemoveTimes[normalizedPath] = now
|
||||
if len(w.lastRemoveTimes) > 128 {
|
||||
cutoff := now.Add(-2 * authRemoveDebounceWindow)
|
||||
for p, t := range w.lastRemoveTimes {
|
||||
if t.Before(cutoff) {
|
||||
delete(w.lastRemoveTimes, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
w.clientsMutex.Unlock()
|
||||
return false
|
||||
}
|
||||
294
internal/watcher/synthesizer/config.go
Normal file
294
internal/watcher/synthesizer/config.go
Normal file
@@ -0,0 +1,294 @@
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
// ConfigSynthesizer generates Auth entries from configuration API keys.
|
||||
// It handles Gemini, Claude, Codex, OpenAI-compat, and Vertex-compat providers.
|
||||
type ConfigSynthesizer struct{}
|
||||
|
||||
// NewConfigSynthesizer creates a new ConfigSynthesizer instance.
|
||||
func NewConfigSynthesizer() *ConfigSynthesizer {
|
||||
return &ConfigSynthesizer{}
|
||||
}
|
||||
|
||||
// Synthesize generates Auth entries from config API keys.
|
||||
func (s *ConfigSynthesizer) Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, error) {
|
||||
out := make([]*coreauth.Auth, 0, 32)
|
||||
if ctx == nil || ctx.Config == nil {
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// Gemini API Keys
|
||||
out = append(out, s.synthesizeGeminiKeys(ctx)...)
|
||||
// Claude API Keys
|
||||
out = append(out, s.synthesizeClaudeKeys(ctx)...)
|
||||
// Codex API Keys
|
||||
out = append(out, s.synthesizeCodexKeys(ctx)...)
|
||||
// OpenAI-compat
|
||||
out = append(out, s.synthesizeOpenAICompat(ctx)...)
|
||||
// Vertex-compat
|
||||
out = append(out, s.synthesizeVertexCompat(ctx)...)
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// synthesizeGeminiKeys creates Auth entries for Gemini API keys.
|
||||
func (s *ConfigSynthesizer) synthesizeGeminiKeys(ctx *SynthesisContext) []*coreauth.Auth {
|
||||
cfg := ctx.Config
|
||||
now := ctx.Now
|
||||
idGen := ctx.IDGenerator
|
||||
|
||||
out := make([]*coreauth.Auth, 0, len(cfg.GeminiKey))
|
||||
for i := range cfg.GeminiKey {
|
||||
entry := cfg.GeminiKey[i]
|
||||
key := strings.TrimSpace(entry.APIKey)
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
prefix := strings.TrimSpace(entry.Prefix)
|
||||
base := strings.TrimSpace(entry.BaseURL)
|
||||
proxyURL := strings.TrimSpace(entry.ProxyURL)
|
||||
id, token := idGen.Next("gemini:apikey", key, base)
|
||||
attrs := map[string]string{
|
||||
"source": fmt.Sprintf("config:gemini[%s]", token),
|
||||
"api_key": key,
|
||||
}
|
||||
if base != "" {
|
||||
attrs["base_url"] = base
|
||||
}
|
||||
addConfigHeadersToAttrs(entry.Headers, attrs)
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: "gemini",
|
||||
Label: "gemini-apikey",
|
||||
Prefix: prefix,
|
||||
Status: coreauth.StatusActive,
|
||||
ProxyURL: proxyURL,
|
||||
Attributes: attrs,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
ApplyAuthExcludedModelsMeta(a, cfg, entry.ExcludedModels, "apikey")
|
||||
out = append(out, a)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// synthesizeClaudeKeys creates Auth entries for Claude API keys.
|
||||
func (s *ConfigSynthesizer) synthesizeClaudeKeys(ctx *SynthesisContext) []*coreauth.Auth {
|
||||
cfg := ctx.Config
|
||||
now := ctx.Now
|
||||
idGen := ctx.IDGenerator
|
||||
|
||||
out := make([]*coreauth.Auth, 0, len(cfg.ClaudeKey))
|
||||
for i := range cfg.ClaudeKey {
|
||||
ck := cfg.ClaudeKey[i]
|
||||
key := strings.TrimSpace(ck.APIKey)
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
prefix := strings.TrimSpace(ck.Prefix)
|
||||
base := strings.TrimSpace(ck.BaseURL)
|
||||
id, token := idGen.Next("claude:apikey", key, base)
|
||||
attrs := map[string]string{
|
||||
"source": fmt.Sprintf("config:claude[%s]", token),
|
||||
"api_key": key,
|
||||
}
|
||||
if base != "" {
|
||||
attrs["base_url"] = base
|
||||
}
|
||||
if hash := diff.ComputeClaudeModelsHash(ck.Models); hash != "" {
|
||||
attrs["models_hash"] = hash
|
||||
}
|
||||
addConfigHeadersToAttrs(ck.Headers, attrs)
|
||||
proxyURL := strings.TrimSpace(ck.ProxyURL)
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: "claude",
|
||||
Label: "claude-apikey",
|
||||
Prefix: prefix,
|
||||
Status: coreauth.StatusActive,
|
||||
ProxyURL: proxyURL,
|
||||
Attributes: attrs,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
ApplyAuthExcludedModelsMeta(a, cfg, ck.ExcludedModels, "apikey")
|
||||
out = append(out, a)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// synthesizeCodexKeys creates Auth entries for Codex API keys.
|
||||
func (s *ConfigSynthesizer) synthesizeCodexKeys(ctx *SynthesisContext) []*coreauth.Auth {
|
||||
cfg := ctx.Config
|
||||
now := ctx.Now
|
||||
idGen := ctx.IDGenerator
|
||||
|
||||
out := make([]*coreauth.Auth, 0, len(cfg.CodexKey))
|
||||
for i := range cfg.CodexKey {
|
||||
ck := cfg.CodexKey[i]
|
||||
key := strings.TrimSpace(ck.APIKey)
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
prefix := strings.TrimSpace(ck.Prefix)
|
||||
id, token := idGen.Next("codex:apikey", key, ck.BaseURL)
|
||||
attrs := map[string]string{
|
||||
"source": fmt.Sprintf("config:codex[%s]", token),
|
||||
"api_key": key,
|
||||
}
|
||||
if ck.BaseURL != "" {
|
||||
attrs["base_url"] = ck.BaseURL
|
||||
}
|
||||
addConfigHeadersToAttrs(ck.Headers, attrs)
|
||||
proxyURL := strings.TrimSpace(ck.ProxyURL)
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: "codex",
|
||||
Label: "codex-apikey",
|
||||
Prefix: prefix,
|
||||
Status: coreauth.StatusActive,
|
||||
ProxyURL: proxyURL,
|
||||
Attributes: attrs,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
ApplyAuthExcludedModelsMeta(a, cfg, ck.ExcludedModels, "apikey")
|
||||
out = append(out, a)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// synthesizeOpenAICompat creates Auth entries for OpenAI-compatible providers.
|
||||
func (s *ConfigSynthesizer) synthesizeOpenAICompat(ctx *SynthesisContext) []*coreauth.Auth {
|
||||
cfg := ctx.Config
|
||||
now := ctx.Now
|
||||
idGen := ctx.IDGenerator
|
||||
|
||||
out := make([]*coreauth.Auth, 0)
|
||||
for i := range cfg.OpenAICompatibility {
|
||||
compat := &cfg.OpenAICompatibility[i]
|
||||
prefix := strings.TrimSpace(compat.Prefix)
|
||||
providerName := strings.ToLower(strings.TrimSpace(compat.Name))
|
||||
if providerName == "" {
|
||||
providerName = "openai-compatibility"
|
||||
}
|
||||
base := strings.TrimSpace(compat.BaseURL)
|
||||
|
||||
// Handle new APIKeyEntries format (preferred)
|
||||
createdEntries := 0
|
||||
for j := range compat.APIKeyEntries {
|
||||
entry := &compat.APIKeyEntries[j]
|
||||
key := strings.TrimSpace(entry.APIKey)
|
||||
proxyURL := strings.TrimSpace(entry.ProxyURL)
|
||||
idKind := fmt.Sprintf("openai-compatibility:%s", providerName)
|
||||
id, token := idGen.Next(idKind, key, base, proxyURL)
|
||||
attrs := map[string]string{
|
||||
"source": fmt.Sprintf("config:%s[%s]", providerName, token),
|
||||
"base_url": base,
|
||||
"compat_name": compat.Name,
|
||||
"provider_key": providerName,
|
||||
}
|
||||
if key != "" {
|
||||
attrs["api_key"] = key
|
||||
}
|
||||
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
|
||||
attrs["models_hash"] = hash
|
||||
}
|
||||
addConfigHeadersToAttrs(compat.Headers, attrs)
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: providerName,
|
||||
Label: compat.Name,
|
||||
Prefix: prefix,
|
||||
Status: coreauth.StatusActive,
|
||||
ProxyURL: proxyURL,
|
||||
Attributes: attrs,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
out = append(out, a)
|
||||
createdEntries++
|
||||
}
|
||||
// Fallback: create entry without API key if no APIKeyEntries
|
||||
if createdEntries == 0 {
|
||||
idKind := fmt.Sprintf("openai-compatibility:%s", providerName)
|
||||
id, token := idGen.Next(idKind, base)
|
||||
attrs := map[string]string{
|
||||
"source": fmt.Sprintf("config:%s[%s]", providerName, token),
|
||||
"base_url": base,
|
||||
"compat_name": compat.Name,
|
||||
"provider_key": providerName,
|
||||
}
|
||||
if hash := diff.ComputeOpenAICompatModelsHash(compat.Models); hash != "" {
|
||||
attrs["models_hash"] = hash
|
||||
}
|
||||
addConfigHeadersToAttrs(compat.Headers, attrs)
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: providerName,
|
||||
Label: compat.Name,
|
||||
Prefix: prefix,
|
||||
Status: coreauth.StatusActive,
|
||||
Attributes: attrs,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
out = append(out, a)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// synthesizeVertexCompat creates Auth entries for Vertex-compatible providers.
|
||||
func (s *ConfigSynthesizer) synthesizeVertexCompat(ctx *SynthesisContext) []*coreauth.Auth {
|
||||
cfg := ctx.Config
|
||||
now := ctx.Now
|
||||
idGen := ctx.IDGenerator
|
||||
|
||||
out := make([]*coreauth.Auth, 0, len(cfg.VertexCompatAPIKey))
|
||||
for i := range cfg.VertexCompatAPIKey {
|
||||
compat := &cfg.VertexCompatAPIKey[i]
|
||||
providerName := "vertex"
|
||||
base := strings.TrimSpace(compat.BaseURL)
|
||||
|
||||
key := strings.TrimSpace(compat.APIKey)
|
||||
prefix := strings.TrimSpace(compat.Prefix)
|
||||
proxyURL := strings.TrimSpace(compat.ProxyURL)
|
||||
idKind := "vertex:apikey"
|
||||
id, token := idGen.Next(idKind, key, base, proxyURL)
|
||||
attrs := map[string]string{
|
||||
"source": fmt.Sprintf("config:vertex-apikey[%s]", token),
|
||||
"base_url": base,
|
||||
"provider_key": providerName,
|
||||
}
|
||||
if key != "" {
|
||||
attrs["api_key"] = key
|
||||
}
|
||||
if hash := diff.ComputeVertexCompatModelsHash(compat.Models); hash != "" {
|
||||
attrs["models_hash"] = hash
|
||||
}
|
||||
addConfigHeadersToAttrs(compat.Headers, attrs)
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: providerName,
|
||||
Label: "vertex-apikey",
|
||||
Prefix: prefix,
|
||||
Status: coreauth.StatusActive,
|
||||
ProxyURL: proxyURL,
|
||||
Attributes: attrs,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
ApplyAuthExcludedModelsMeta(a, cfg, nil, "apikey")
|
||||
out = append(out, a)
|
||||
}
|
||||
return out
|
||||
}
|
||||
613
internal/watcher/synthesizer/config_test.go
Normal file
613
internal/watcher/synthesizer/config_test.go
Normal file
@@ -0,0 +1,613 @@
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
func TestNewConfigSynthesizer(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
if synth == nil {
|
||||
t.Fatal("expected non-nil synthesizer")
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_Synthesize_NilContext(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
auths, err := synth.Synthesize(nil)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 0 {
|
||||
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_Synthesize_NilConfig(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: nil,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 0 {
|
||||
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_GeminiKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
geminiKeys []config.GeminiKey
|
||||
wantLen int
|
||||
validate func(*testing.T, []*coreauth.Auth)
|
||||
}{
|
||||
{
|
||||
name: "single gemini key",
|
||||
geminiKeys: []config.GeminiKey{
|
||||
{APIKey: "test-key-123", Prefix: "team-a"},
|
||||
},
|
||||
wantLen: 1,
|
||||
validate: func(t *testing.T, auths []*coreauth.Auth) {
|
||||
if auths[0].Provider != "gemini" {
|
||||
t.Errorf("expected provider gemini, got %s", auths[0].Provider)
|
||||
}
|
||||
if auths[0].Prefix != "team-a" {
|
||||
t.Errorf("expected prefix team-a, got %s", auths[0].Prefix)
|
||||
}
|
||||
if auths[0].Label != "gemini-apikey" {
|
||||
t.Errorf("expected label gemini-apikey, got %s", auths[0].Label)
|
||||
}
|
||||
if auths[0].Attributes["api_key"] != "test-key-123" {
|
||||
t.Errorf("expected api_key test-key-123, got %s", auths[0].Attributes["api_key"])
|
||||
}
|
||||
if auths[0].Status != coreauth.StatusActive {
|
||||
t.Errorf("expected status active, got %s", auths[0].Status)
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "gemini key with base url and proxy",
|
||||
geminiKeys: []config.GeminiKey{
|
||||
{
|
||||
APIKey: "api-key",
|
||||
BaseURL: "https://custom.api.com",
|
||||
ProxyURL: "http://proxy.local:8080",
|
||||
Prefix: "custom",
|
||||
},
|
||||
},
|
||||
wantLen: 1,
|
||||
validate: func(t *testing.T, auths []*coreauth.Auth) {
|
||||
if auths[0].Attributes["base_url"] != "https://custom.api.com" {
|
||||
t.Errorf("expected base_url https://custom.api.com, got %s", auths[0].Attributes["base_url"])
|
||||
}
|
||||
if auths[0].ProxyURL != "http://proxy.local:8080" {
|
||||
t.Errorf("expected proxy_url http://proxy.local:8080, got %s", auths[0].ProxyURL)
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "gemini key with headers",
|
||||
geminiKeys: []config.GeminiKey{
|
||||
{
|
||||
APIKey: "api-key",
|
||||
Headers: map[string]string{"X-Custom": "value"},
|
||||
},
|
||||
},
|
||||
wantLen: 1,
|
||||
validate: func(t *testing.T, auths []*coreauth.Auth) {
|
||||
if auths[0].Attributes["header:X-Custom"] != "value" {
|
||||
t.Errorf("expected header:X-Custom=value, got %s", auths[0].Attributes["header:X-Custom"])
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty api key skipped",
|
||||
geminiKeys: []config.GeminiKey{
|
||||
{APIKey: ""},
|
||||
{APIKey: " "},
|
||||
{APIKey: "valid-key"},
|
||||
},
|
||||
wantLen: 1,
|
||||
},
|
||||
{
|
||||
name: "multiple gemini keys",
|
||||
geminiKeys: []config.GeminiKey{
|
||||
{APIKey: "key-1", Prefix: "a"},
|
||||
{APIKey: "key-2", Prefix: "b"},
|
||||
{APIKey: "key-3", Prefix: "c"},
|
||||
},
|
||||
wantLen: 3,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
GeminiKey: tt.geminiKeys,
|
||||
},
|
||||
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != tt.wantLen {
|
||||
t.Fatalf("expected %d auths, got %d", tt.wantLen, len(auths))
|
||||
}
|
||||
|
||||
if tt.validate != nil && len(auths) > 0 {
|
||||
tt.validate(t, auths)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_ClaudeKeys(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
ClaudeKey: []config.ClaudeKey{
|
||||
{
|
||||
APIKey: "sk-ant-api-xxx",
|
||||
Prefix: "main",
|
||||
BaseURL: "https://api.anthropic.com",
|
||||
Models: []config.ClaudeModel{
|
||||
{Name: "claude-3-opus"},
|
||||
{Name: "claude-3-sonnet"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
if auths[0].Provider != "claude" {
|
||||
t.Errorf("expected provider claude, got %s", auths[0].Provider)
|
||||
}
|
||||
if auths[0].Label != "claude-apikey" {
|
||||
t.Errorf("expected label claude-apikey, got %s", auths[0].Label)
|
||||
}
|
||||
if auths[0].Prefix != "main" {
|
||||
t.Errorf("expected prefix main, got %s", auths[0].Prefix)
|
||||
}
|
||||
if auths[0].Attributes["api_key"] != "sk-ant-api-xxx" {
|
||||
t.Errorf("expected api_key sk-ant-api-xxx, got %s", auths[0].Attributes["api_key"])
|
||||
}
|
||||
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||
t.Error("expected models_hash in attributes")
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_ClaudeKeys_SkipsEmptyAndHeaders(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
ClaudeKey: []config.ClaudeKey{
|
||||
{APIKey: ""}, // empty, should be skipped
|
||||
{APIKey: " "}, // whitespace, should be skipped
|
||||
{APIKey: "valid-key", Headers: map[string]string{"X-Custom": "value"}},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth (empty keys skipped), got %d", len(auths))
|
||||
}
|
||||
if auths[0].Attributes["header:X-Custom"] != "value" {
|
||||
t.Errorf("expected header:X-Custom=value, got %s", auths[0].Attributes["header:X-Custom"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_CodexKeys(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
CodexKey: []config.CodexKey{
|
||||
{
|
||||
APIKey: "codex-key-123",
|
||||
Prefix: "dev",
|
||||
BaseURL: "https://api.openai.com",
|
||||
ProxyURL: "http://proxy.local",
|
||||
},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
if auths[0].Provider != "codex" {
|
||||
t.Errorf("expected provider codex, got %s", auths[0].Provider)
|
||||
}
|
||||
if auths[0].Label != "codex-apikey" {
|
||||
t.Errorf("expected label codex-apikey, got %s", auths[0].Label)
|
||||
}
|
||||
if auths[0].ProxyURL != "http://proxy.local" {
|
||||
t.Errorf("expected proxy_url http://proxy.local, got %s", auths[0].ProxyURL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_CodexKeys_SkipsEmptyAndHeaders(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
CodexKey: []config.CodexKey{
|
||||
{APIKey: ""}, // empty, should be skipped
|
||||
{APIKey: " "}, // whitespace, should be skipped
|
||||
{APIKey: "valid-key", Headers: map[string]string{"Authorization": "Bearer xyz"}},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth (empty keys skipped), got %d", len(auths))
|
||||
}
|
||||
if auths[0].Attributes["header:Authorization"] != "Bearer xyz" {
|
||||
t.Errorf("expected header:Authorization=Bearer xyz, got %s", auths[0].Attributes["header:Authorization"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_OpenAICompat(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
compat []config.OpenAICompatibility
|
||||
wantLen int
|
||||
}{
|
||||
{
|
||||
name: "with APIKeyEntries",
|
||||
compat: []config.OpenAICompatibility{
|
||||
{
|
||||
Name: "CustomProvider",
|
||||
BaseURL: "https://custom.api.com",
|
||||
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
|
||||
{APIKey: "key-1"},
|
||||
{APIKey: "key-2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantLen: 2,
|
||||
},
|
||||
{
|
||||
name: "empty APIKeyEntries included (legacy)",
|
||||
compat: []config.OpenAICompatibility{
|
||||
{
|
||||
Name: "EmptyKeys",
|
||||
BaseURL: "https://empty.api.com",
|
||||
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
|
||||
{APIKey: ""},
|
||||
{APIKey: " "},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantLen: 2,
|
||||
},
|
||||
{
|
||||
name: "without APIKeyEntries (fallback)",
|
||||
compat: []config.OpenAICompatibility{
|
||||
{
|
||||
Name: "NoKeyProvider",
|
||||
BaseURL: "https://no-key.api.com",
|
||||
},
|
||||
},
|
||||
wantLen: 1,
|
||||
},
|
||||
{
|
||||
name: "empty name defaults",
|
||||
compat: []config.OpenAICompatibility{
|
||||
{
|
||||
Name: "",
|
||||
BaseURL: "https://default.api.com",
|
||||
},
|
||||
},
|
||||
wantLen: 1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
OpenAICompatibility: tt.compat,
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != tt.wantLen {
|
||||
t.Fatalf("expected %d auths, got %d", tt.wantLen, len(auths))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_VertexCompat(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||
{
|
||||
APIKey: "vertex-key-123",
|
||||
BaseURL: "https://vertex.googleapis.com",
|
||||
Prefix: "vertex-prod",
|
||||
},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
if auths[0].Provider != "vertex" {
|
||||
t.Errorf("expected provider vertex, got %s", auths[0].Provider)
|
||||
}
|
||||
if auths[0].Label != "vertex-apikey" {
|
||||
t.Errorf("expected label vertex-apikey, got %s", auths[0].Label)
|
||||
}
|
||||
if auths[0].Prefix != "vertex-prod" {
|
||||
t.Errorf("expected prefix vertex-prod, got %s", auths[0].Prefix)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_VertexCompat_SkipsEmptyAndHeaders(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||
{APIKey: "", BaseURL: "https://vertex.api"}, // empty key creates auth without api_key attr
|
||||
{APIKey: " ", BaseURL: "https://vertex.api"}, // whitespace key creates auth without api_key attr
|
||||
{APIKey: "valid-key", BaseURL: "https://vertex.api", Headers: map[string]string{"X-Vertex": "test"}},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
// Vertex compat doesn't skip empty keys - it creates auths without api_key attribute
|
||||
if len(auths) != 3 {
|
||||
t.Fatalf("expected 3 auths, got %d", len(auths))
|
||||
}
|
||||
// First two should not have api_key attribute
|
||||
if _, ok := auths[0].Attributes["api_key"]; ok {
|
||||
t.Error("expected first auth to not have api_key attribute")
|
||||
}
|
||||
if _, ok := auths[1].Attributes["api_key"]; ok {
|
||||
t.Error("expected second auth to not have api_key attribute")
|
||||
}
|
||||
// Third should have headers
|
||||
if auths[2].Attributes["header:X-Vertex"] != "test" {
|
||||
t.Errorf("expected header:X-Vertex=test, got %s", auths[2].Attributes["header:X-Vertex"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_OpenAICompat_WithModelsHash(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
OpenAICompatibility: []config.OpenAICompatibility{
|
||||
{
|
||||
Name: "TestProvider",
|
||||
BaseURL: "https://test.api.com",
|
||||
Models: []config.OpenAICompatibilityModel{
|
||||
{Name: "model-a"},
|
||||
{Name: "model-b"},
|
||||
},
|
||||
APIKeyEntries: []config.OpenAICompatibilityAPIKey{
|
||||
{APIKey: "key-with-models"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||
t.Error("expected models_hash in attributes")
|
||||
}
|
||||
if auths[0].Attributes["api_key"] != "key-with-models" {
|
||||
t.Errorf("expected api_key key-with-models, got %s", auths[0].Attributes["api_key"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_OpenAICompat_FallbackWithModels(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
OpenAICompatibility: []config.OpenAICompatibility{
|
||||
{
|
||||
Name: "NoKeyWithModels",
|
||||
BaseURL: "https://nokey.api.com",
|
||||
Models: []config.OpenAICompatibilityModel{
|
||||
{Name: "model-x"},
|
||||
},
|
||||
Headers: map[string]string{"X-API": "header-value"},
|
||||
// No APIKeyEntries - should use fallback path
|
||||
},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||
t.Error("expected models_hash in fallback path")
|
||||
}
|
||||
if auths[0].Attributes["header:X-API"] != "header-value" {
|
||||
t.Errorf("expected header:X-API=header-value, got %s", auths[0].Attributes["header:X-API"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_VertexCompat_WithModels(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||
{
|
||||
APIKey: "vertex-key",
|
||||
BaseURL: "https://vertex.api",
|
||||
Models: []config.VertexCompatModel{
|
||||
{Name: "gemini-pro", Alias: "pro"},
|
||||
{Name: "gemini-ultra", Alias: "ultra"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
if _, ok := auths[0].Attributes["models_hash"]; !ok {
|
||||
t.Error("expected models_hash in vertex auth with models")
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_IDStability(t *testing.T) {
|
||||
cfg := &config.Config{
|
||||
GeminiKey: []config.GeminiKey{
|
||||
{APIKey: "stable-key", Prefix: "test"},
|
||||
},
|
||||
}
|
||||
|
||||
// Generate IDs twice with fresh generators
|
||||
synth1 := NewConfigSynthesizer()
|
||||
ctx1 := &SynthesisContext{
|
||||
Config: cfg,
|
||||
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
auths1, _ := synth1.Synthesize(ctx1)
|
||||
|
||||
synth2 := NewConfigSynthesizer()
|
||||
ctx2 := &SynthesisContext{
|
||||
Config: cfg,
|
||||
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
auths2, _ := synth2.Synthesize(ctx2)
|
||||
|
||||
if auths1[0].ID != auths2[0].ID {
|
||||
t.Errorf("same config should produce same ID: got %q and %q", auths1[0].ID, auths2[0].ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConfigSynthesizer_AllProviders(t *testing.T) {
|
||||
synth := NewConfigSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
GeminiKey: []config.GeminiKey{
|
||||
{APIKey: "gemini-key"},
|
||||
},
|
||||
ClaudeKey: []config.ClaudeKey{
|
||||
{APIKey: "claude-key"},
|
||||
},
|
||||
CodexKey: []config.CodexKey{
|
||||
{APIKey: "codex-key"},
|
||||
},
|
||||
OpenAICompatibility: []config.OpenAICompatibility{
|
||||
{Name: "compat", BaseURL: "https://compat.api"},
|
||||
},
|
||||
VertexCompatAPIKey: []config.VertexCompatKey{
|
||||
{APIKey: "vertex-key", BaseURL: "https://vertex.api"},
|
||||
},
|
||||
},
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 5 {
|
||||
t.Fatalf("expected 5 auths, got %d", len(auths))
|
||||
}
|
||||
|
||||
providers := make(map[string]bool)
|
||||
for _, a := range auths {
|
||||
providers[a.Provider] = true
|
||||
}
|
||||
|
||||
expected := []string{"gemini", "claude", "codex", "compat", "vertex"}
|
||||
for _, p := range expected {
|
||||
if !providers[p] {
|
||||
t.Errorf("expected provider %s not found", p)
|
||||
}
|
||||
}
|
||||
}
|
||||
19
internal/watcher/synthesizer/context.go
Normal file
19
internal/watcher/synthesizer/context.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
)
|
||||
|
||||
// SynthesisContext provides the context needed for auth synthesis.
|
||||
type SynthesisContext struct {
|
||||
// Config is the current configuration
|
||||
Config *config.Config
|
||||
// AuthDir is the directory containing auth files
|
||||
AuthDir string
|
||||
// Now is the current time for timestamps
|
||||
Now time.Time
|
||||
// IDGenerator generates stable IDs for auth entries
|
||||
IDGenerator *StableIDGenerator
|
||||
}
|
||||
224
internal/watcher/synthesizer/file.go
Normal file
224
internal/watcher/synthesizer/file.go
Normal file
@@ -0,0 +1,224 @@
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/runtime/geminicli"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
// FileSynthesizer generates Auth entries from OAuth JSON files.
|
||||
// It handles file-based authentication and Gemini virtual auth generation.
|
||||
type FileSynthesizer struct{}
|
||||
|
||||
// NewFileSynthesizer creates a new FileSynthesizer instance.
|
||||
func NewFileSynthesizer() *FileSynthesizer {
|
||||
return &FileSynthesizer{}
|
||||
}
|
||||
|
||||
// Synthesize generates Auth entries from auth files in the auth directory.
|
||||
func (s *FileSynthesizer) Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, error) {
|
||||
out := make([]*coreauth.Auth, 0, 16)
|
||||
if ctx == nil || ctx.AuthDir == "" {
|
||||
return out, nil
|
||||
}
|
||||
|
||||
entries, err := os.ReadDir(ctx.AuthDir)
|
||||
if err != nil {
|
||||
// Not an error if directory doesn't exist
|
||||
return out, nil
|
||||
}
|
||||
|
||||
now := ctx.Now
|
||||
cfg := ctx.Config
|
||||
|
||||
for _, e := range entries {
|
||||
if e.IsDir() {
|
||||
continue
|
||||
}
|
||||
name := e.Name()
|
||||
if !strings.HasSuffix(strings.ToLower(name), ".json") {
|
||||
continue
|
||||
}
|
||||
full := filepath.Join(ctx.AuthDir, name)
|
||||
data, errRead := os.ReadFile(full)
|
||||
if errRead != nil || len(data) == 0 {
|
||||
continue
|
||||
}
|
||||
var metadata map[string]any
|
||||
if errUnmarshal := json.Unmarshal(data, &metadata); errUnmarshal != nil {
|
||||
continue
|
||||
}
|
||||
t, _ := metadata["type"].(string)
|
||||
if t == "" {
|
||||
continue
|
||||
}
|
||||
provider := strings.ToLower(t)
|
||||
if provider == "gemini" {
|
||||
provider = "gemini-cli"
|
||||
}
|
||||
label := provider
|
||||
if email, _ := metadata["email"].(string); email != "" {
|
||||
label = email
|
||||
}
|
||||
// Use relative path under authDir as ID to stay consistent with the file-based token store
|
||||
id := full
|
||||
if rel, errRel := filepath.Rel(ctx.AuthDir, full); errRel == nil && rel != "" {
|
||||
id = rel
|
||||
}
|
||||
|
||||
proxyURL := ""
|
||||
if p, ok := metadata["proxy_url"].(string); ok {
|
||||
proxyURL = p
|
||||
}
|
||||
|
||||
prefix := ""
|
||||
if rawPrefix, ok := metadata["prefix"].(string); ok {
|
||||
trimmed := strings.TrimSpace(rawPrefix)
|
||||
trimmed = strings.Trim(trimmed, "/")
|
||||
if trimmed != "" && !strings.Contains(trimmed, "/") {
|
||||
prefix = trimmed
|
||||
}
|
||||
}
|
||||
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: provider,
|
||||
Label: label,
|
||||
Prefix: prefix,
|
||||
Status: coreauth.StatusActive,
|
||||
Attributes: map[string]string{
|
||||
"source": full,
|
||||
"path": full,
|
||||
},
|
||||
ProxyURL: proxyURL,
|
||||
Metadata: metadata,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
ApplyAuthExcludedModelsMeta(a, cfg, nil, "oauth")
|
||||
if provider == "gemini-cli" {
|
||||
if virtuals := SynthesizeGeminiVirtualAuths(a, metadata, now); len(virtuals) > 0 {
|
||||
for _, v := range virtuals {
|
||||
ApplyAuthExcludedModelsMeta(v, cfg, nil, "oauth")
|
||||
}
|
||||
out = append(out, a)
|
||||
out = append(out, virtuals...)
|
||||
continue
|
||||
}
|
||||
}
|
||||
out = append(out, a)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// SynthesizeGeminiVirtualAuths creates virtual Auth entries for multi-project Gemini credentials.
|
||||
// It disables the primary auth and creates one virtual auth per project.
|
||||
func SynthesizeGeminiVirtualAuths(primary *coreauth.Auth, metadata map[string]any, now time.Time) []*coreauth.Auth {
|
||||
if primary == nil || metadata == nil {
|
||||
return nil
|
||||
}
|
||||
projects := splitGeminiProjectIDs(metadata)
|
||||
if len(projects) <= 1 {
|
||||
return nil
|
||||
}
|
||||
email, _ := metadata["email"].(string)
|
||||
shared := geminicli.NewSharedCredential(primary.ID, email, metadata, projects)
|
||||
primary.Disabled = true
|
||||
primary.Status = coreauth.StatusDisabled
|
||||
primary.Runtime = shared
|
||||
if primary.Attributes == nil {
|
||||
primary.Attributes = make(map[string]string)
|
||||
}
|
||||
primary.Attributes["gemini_virtual_primary"] = "true"
|
||||
primary.Attributes["virtual_children"] = strings.Join(projects, ",")
|
||||
source := primary.Attributes["source"]
|
||||
authPath := primary.Attributes["path"]
|
||||
originalProvider := primary.Provider
|
||||
if originalProvider == "" {
|
||||
originalProvider = "gemini-cli"
|
||||
}
|
||||
label := primary.Label
|
||||
if label == "" {
|
||||
label = originalProvider
|
||||
}
|
||||
virtuals := make([]*coreauth.Auth, 0, len(projects))
|
||||
for _, projectID := range projects {
|
||||
attrs := map[string]string{
|
||||
"runtime_only": "true",
|
||||
"gemini_virtual_parent": primary.ID,
|
||||
"gemini_virtual_project": projectID,
|
||||
}
|
||||
if source != "" {
|
||||
attrs["source"] = source
|
||||
}
|
||||
if authPath != "" {
|
||||
attrs["path"] = authPath
|
||||
}
|
||||
metadataCopy := map[string]any{
|
||||
"email": email,
|
||||
"project_id": projectID,
|
||||
"virtual": true,
|
||||
"virtual_parent_id": primary.ID,
|
||||
"type": metadata["type"],
|
||||
}
|
||||
proxy := strings.TrimSpace(primary.ProxyURL)
|
||||
if proxy != "" {
|
||||
metadataCopy["proxy_url"] = proxy
|
||||
}
|
||||
virtual := &coreauth.Auth{
|
||||
ID: buildGeminiVirtualID(primary.ID, projectID),
|
||||
Provider: originalProvider,
|
||||
Label: fmt.Sprintf("%s [%s]", label, projectID),
|
||||
Status: coreauth.StatusActive,
|
||||
Attributes: attrs,
|
||||
Metadata: metadataCopy,
|
||||
ProxyURL: primary.ProxyURL,
|
||||
Prefix: primary.Prefix,
|
||||
CreatedAt: primary.CreatedAt,
|
||||
UpdatedAt: primary.UpdatedAt,
|
||||
Runtime: geminicli.NewVirtualCredential(projectID, shared),
|
||||
}
|
||||
virtuals = append(virtuals, virtual)
|
||||
}
|
||||
return virtuals
|
||||
}
|
||||
|
||||
// splitGeminiProjectIDs extracts and deduplicates project IDs from metadata.
|
||||
func splitGeminiProjectIDs(metadata map[string]any) []string {
|
||||
raw, _ := metadata["project_id"].(string)
|
||||
trimmed := strings.TrimSpace(raw)
|
||||
if trimmed == "" {
|
||||
return nil
|
||||
}
|
||||
parts := strings.Split(trimmed, ",")
|
||||
result := make([]string, 0, len(parts))
|
||||
seen := make(map[string]struct{}, len(parts))
|
||||
for _, part := range parts {
|
||||
id := strings.TrimSpace(part)
|
||||
if id == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[id]; ok {
|
||||
continue
|
||||
}
|
||||
seen[id] = struct{}{}
|
||||
result = append(result, id)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// buildGeminiVirtualID constructs a virtual auth ID from base ID and project ID.
|
||||
func buildGeminiVirtualID(baseID, projectID string) string {
|
||||
project := strings.TrimSpace(projectID)
|
||||
if project == "" {
|
||||
project = "project"
|
||||
}
|
||||
replacer := strings.NewReplacer("/", "_", "\\", "_", " ", "_")
|
||||
return fmt.Sprintf("%s::%s", baseID, replacer.Replace(project))
|
||||
}
|
||||
612
internal/watcher/synthesizer/file_test.go
Normal file
612
internal/watcher/synthesizer/file_test.go
Normal file
@@ -0,0 +1,612 @@
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
func TestNewFileSynthesizer(t *testing.T) {
|
||||
synth := NewFileSynthesizer()
|
||||
if synth == nil {
|
||||
t.Fatal("expected non-nil synthesizer")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_NilContext(t *testing.T) {
|
||||
synth := NewFileSynthesizer()
|
||||
auths, err := synth.Synthesize(nil)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 0 {
|
||||
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_EmptyAuthDir(t *testing.T) {
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: "",
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 0 {
|
||||
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_NonExistentDir(t *testing.T) {
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: "/non/existent/path",
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 0 {
|
||||
t.Fatalf("expected empty auths, got %d", len(auths))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_ValidAuthFile(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// Create a valid auth file
|
||||
authData := map[string]any{
|
||||
"type": "claude",
|
||||
"email": "test@example.com",
|
||||
"proxy_url": "http://proxy.local",
|
||||
"prefix": "test-prefix",
|
||||
}
|
||||
data, _ := json.Marshal(authData)
|
||||
err := os.WriteFile(filepath.Join(tempDir, "claude-auth.json"), data, 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to write auth file: %v", err)
|
||||
}
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Date(2025, 1, 1, 0, 0, 0, 0, time.UTC),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
if auths[0].Provider != "claude" {
|
||||
t.Errorf("expected provider claude, got %s", auths[0].Provider)
|
||||
}
|
||||
if auths[0].Label != "test@example.com" {
|
||||
t.Errorf("expected label test@example.com, got %s", auths[0].Label)
|
||||
}
|
||||
if auths[0].Prefix != "test-prefix" {
|
||||
t.Errorf("expected prefix test-prefix, got %s", auths[0].Prefix)
|
||||
}
|
||||
if auths[0].ProxyURL != "http://proxy.local" {
|
||||
t.Errorf("expected proxy_url http://proxy.local, got %s", auths[0].ProxyURL)
|
||||
}
|
||||
if auths[0].Status != coreauth.StatusActive {
|
||||
t.Errorf("expected status active, got %s", auths[0].Status)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_GeminiProviderMapping(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// Gemini type should be mapped to gemini-cli
|
||||
authData := map[string]any{
|
||||
"type": "gemini",
|
||||
"email": "gemini@example.com",
|
||||
}
|
||||
data, _ := json.Marshal(authData)
|
||||
err := os.WriteFile(filepath.Join(tempDir, "gemini-auth.json"), data, 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to write auth file: %v", err)
|
||||
}
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
if auths[0].Provider != "gemini-cli" {
|
||||
t.Errorf("gemini should be mapped to gemini-cli, got %s", auths[0].Provider)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_SkipsInvalidFiles(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// Create various invalid files
|
||||
_ = os.WriteFile(filepath.Join(tempDir, "not-json.txt"), []byte("text content"), 0644)
|
||||
_ = os.WriteFile(filepath.Join(tempDir, "invalid.json"), []byte("not valid json"), 0644)
|
||||
_ = os.WriteFile(filepath.Join(tempDir, "empty.json"), []byte(""), 0644)
|
||||
_ = os.WriteFile(filepath.Join(tempDir, "no-type.json"), []byte(`{"email": "test@example.com"}`), 0644)
|
||||
|
||||
// Create one valid file
|
||||
validData, _ := json.Marshal(map[string]any{"type": "claude", "email": "valid@example.com"})
|
||||
_ = os.WriteFile(filepath.Join(tempDir, "valid.json"), validData, 0644)
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("only valid auth file should be processed, got %d", len(auths))
|
||||
}
|
||||
if auths[0].Label != "valid@example.com" {
|
||||
t.Errorf("expected label valid@example.com, got %s", auths[0].Label)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_SkipsDirectories(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// Create a subdirectory with a json file inside
|
||||
subDir := filepath.Join(tempDir, "subdir.json")
|
||||
err := os.Mkdir(subDir, 0755)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create subdir: %v", err)
|
||||
}
|
||||
|
||||
// Create a valid file in root
|
||||
validData, _ := json.Marshal(map[string]any{"type": "claude"})
|
||||
_ = os.WriteFile(filepath.Join(tempDir, "valid.json"), validData, 0644)
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_RelativeID(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
authData := map[string]any{"type": "claude"}
|
||||
data, _ := json.Marshal(authData)
|
||||
err := os.WriteFile(filepath.Join(tempDir, "my-auth.json"), data, 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to write auth file: %v", err)
|
||||
}
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
// ID should be relative path
|
||||
if auths[0].ID != "my-auth.json" {
|
||||
t.Errorf("expected ID my-auth.json, got %s", auths[0].ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_PrefixValidation(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
prefix string
|
||||
wantPrefix string
|
||||
}{
|
||||
{"valid prefix", "myprefix", "myprefix"},
|
||||
{"prefix with slashes trimmed", "/myprefix/", "myprefix"},
|
||||
{"prefix with spaces trimmed", " myprefix ", "myprefix"},
|
||||
{"prefix with internal slash rejected", "my/prefix", ""},
|
||||
{"empty prefix", "", ""},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
authData := map[string]any{
|
||||
"type": "claude",
|
||||
"prefix": tt.prefix,
|
||||
}
|
||||
data, _ := json.Marshal(authData)
|
||||
_ = os.WriteFile(filepath.Join(tempDir, "auth.json"), data, 0644)
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
if auths[0].Prefix != tt.wantPrefix {
|
||||
t.Errorf("expected prefix %q, got %q", tt.wantPrefix, auths[0].Prefix)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSynthesizeGeminiVirtualAuths_NilInputs(t *testing.T) {
|
||||
now := time.Now()
|
||||
|
||||
if SynthesizeGeminiVirtualAuths(nil, nil, now) != nil {
|
||||
t.Error("expected nil for nil primary")
|
||||
}
|
||||
if SynthesizeGeminiVirtualAuths(&coreauth.Auth{}, nil, now) != nil {
|
||||
t.Error("expected nil for nil metadata")
|
||||
}
|
||||
if SynthesizeGeminiVirtualAuths(nil, map[string]any{}, now) != nil {
|
||||
t.Error("expected nil for nil primary with metadata")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSynthesizeGeminiVirtualAuths_SingleProject(t *testing.T) {
|
||||
now := time.Now()
|
||||
primary := &coreauth.Auth{
|
||||
ID: "test-id",
|
||||
Provider: "gemini-cli",
|
||||
Label: "test@example.com",
|
||||
}
|
||||
metadata := map[string]any{
|
||||
"project_id": "single-project",
|
||||
"email": "test@example.com",
|
||||
"type": "gemini",
|
||||
}
|
||||
|
||||
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||
if virtuals != nil {
|
||||
t.Error("single project should not create virtuals")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSynthesizeGeminiVirtualAuths_MultiProject(t *testing.T) {
|
||||
now := time.Now()
|
||||
primary := &coreauth.Auth{
|
||||
ID: "primary-id",
|
||||
Provider: "gemini-cli",
|
||||
Label: "test@example.com",
|
||||
Prefix: "test-prefix",
|
||||
ProxyURL: "http://proxy.local",
|
||||
Attributes: map[string]string{
|
||||
"source": "test-source",
|
||||
"path": "/path/to/auth",
|
||||
},
|
||||
}
|
||||
metadata := map[string]any{
|
||||
"project_id": "project-a, project-b, project-c",
|
||||
"email": "test@example.com",
|
||||
"type": "gemini",
|
||||
}
|
||||
|
||||
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||
|
||||
if len(virtuals) != 3 {
|
||||
t.Fatalf("expected 3 virtuals, got %d", len(virtuals))
|
||||
}
|
||||
|
||||
// Check primary is disabled
|
||||
if !primary.Disabled {
|
||||
t.Error("expected primary to be disabled")
|
||||
}
|
||||
if primary.Status != coreauth.StatusDisabled {
|
||||
t.Errorf("expected primary status disabled, got %s", primary.Status)
|
||||
}
|
||||
if primary.Attributes["gemini_virtual_primary"] != "true" {
|
||||
t.Error("expected gemini_virtual_primary=true")
|
||||
}
|
||||
if !strings.Contains(primary.Attributes["virtual_children"], "project-a") {
|
||||
t.Error("expected virtual_children to contain project-a")
|
||||
}
|
||||
|
||||
// Check virtuals
|
||||
projectIDs := []string{"project-a", "project-b", "project-c"}
|
||||
for i, v := range virtuals {
|
||||
if v.Provider != "gemini-cli" {
|
||||
t.Errorf("expected provider gemini-cli, got %s", v.Provider)
|
||||
}
|
||||
if v.Status != coreauth.StatusActive {
|
||||
t.Errorf("expected status active, got %s", v.Status)
|
||||
}
|
||||
if v.Prefix != "test-prefix" {
|
||||
t.Errorf("expected prefix test-prefix, got %s", v.Prefix)
|
||||
}
|
||||
if v.ProxyURL != "http://proxy.local" {
|
||||
t.Errorf("expected proxy_url http://proxy.local, got %s", v.ProxyURL)
|
||||
}
|
||||
if v.Attributes["runtime_only"] != "true" {
|
||||
t.Error("expected runtime_only=true")
|
||||
}
|
||||
if v.Attributes["gemini_virtual_parent"] != "primary-id" {
|
||||
t.Errorf("expected gemini_virtual_parent=primary-id, got %s", v.Attributes["gemini_virtual_parent"])
|
||||
}
|
||||
if v.Attributes["gemini_virtual_project"] != projectIDs[i] {
|
||||
t.Errorf("expected gemini_virtual_project=%s, got %s", projectIDs[i], v.Attributes["gemini_virtual_project"])
|
||||
}
|
||||
if !strings.Contains(v.Label, "["+projectIDs[i]+"]") {
|
||||
t.Errorf("expected label to contain [%s], got %s", projectIDs[i], v.Label)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSynthesizeGeminiVirtualAuths_EmptyProviderAndLabel(t *testing.T) {
|
||||
now := time.Now()
|
||||
// Test with empty Provider and Label to cover fallback branches
|
||||
primary := &coreauth.Auth{
|
||||
ID: "primary-id",
|
||||
Provider: "", // empty provider - should default to gemini-cli
|
||||
Label: "", // empty label - should default to provider
|
||||
Attributes: map[string]string{},
|
||||
}
|
||||
metadata := map[string]any{
|
||||
"project_id": "proj-a, proj-b",
|
||||
"email": "user@example.com",
|
||||
"type": "gemini",
|
||||
}
|
||||
|
||||
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||
|
||||
if len(virtuals) != 2 {
|
||||
t.Fatalf("expected 2 virtuals, got %d", len(virtuals))
|
||||
}
|
||||
|
||||
// Check that empty provider defaults to gemini-cli
|
||||
if virtuals[0].Provider != "gemini-cli" {
|
||||
t.Errorf("expected provider gemini-cli (default), got %s", virtuals[0].Provider)
|
||||
}
|
||||
// Check that empty label defaults to provider
|
||||
if !strings.Contains(virtuals[0].Label, "gemini-cli") {
|
||||
t.Errorf("expected label to contain gemini-cli, got %s", virtuals[0].Label)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSynthesizeGeminiVirtualAuths_NilPrimaryAttributes(t *testing.T) {
|
||||
now := time.Now()
|
||||
primary := &coreauth.Auth{
|
||||
ID: "primary-id",
|
||||
Provider: "gemini-cli",
|
||||
Label: "test@example.com",
|
||||
Attributes: nil, // nil attributes
|
||||
}
|
||||
metadata := map[string]any{
|
||||
"project_id": "proj-a, proj-b",
|
||||
"email": "test@example.com",
|
||||
"type": "gemini",
|
||||
}
|
||||
|
||||
virtuals := SynthesizeGeminiVirtualAuths(primary, metadata, now)
|
||||
|
||||
if len(virtuals) != 2 {
|
||||
t.Fatalf("expected 2 virtuals, got %d", len(virtuals))
|
||||
}
|
||||
// Nil attributes should be initialized
|
||||
if primary.Attributes == nil {
|
||||
t.Error("expected primary.Attributes to be initialized")
|
||||
}
|
||||
if primary.Attributes["gemini_virtual_primary"] != "true" {
|
||||
t.Error("expected gemini_virtual_primary=true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitGeminiProjectIDs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
metadata map[string]any
|
||||
want []string
|
||||
}{
|
||||
{
|
||||
name: "single project",
|
||||
metadata: map[string]any{"project_id": "proj-a"},
|
||||
want: []string{"proj-a"},
|
||||
},
|
||||
{
|
||||
name: "multiple projects",
|
||||
metadata: map[string]any{"project_id": "proj-a, proj-b, proj-c"},
|
||||
want: []string{"proj-a", "proj-b", "proj-c"},
|
||||
},
|
||||
{
|
||||
name: "with duplicates",
|
||||
metadata: map[string]any{"project_id": "proj-a, proj-b, proj-a"},
|
||||
want: []string{"proj-a", "proj-b"},
|
||||
},
|
||||
{
|
||||
name: "with empty parts",
|
||||
metadata: map[string]any{"project_id": "proj-a, , proj-b, "},
|
||||
want: []string{"proj-a", "proj-b"},
|
||||
},
|
||||
{
|
||||
name: "empty project_id",
|
||||
metadata: map[string]any{"project_id": ""},
|
||||
want: nil,
|
||||
},
|
||||
{
|
||||
name: "no project_id",
|
||||
metadata: map[string]any{},
|
||||
want: nil,
|
||||
},
|
||||
{
|
||||
name: "whitespace only",
|
||||
metadata: map[string]any{"project_id": " "},
|
||||
want: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := splitGeminiProjectIDs(tt.metadata)
|
||||
if len(got) != len(tt.want) {
|
||||
t.Fatalf("expected %v, got %v", tt.want, got)
|
||||
}
|
||||
for i := range got {
|
||||
if got[i] != tt.want[i] {
|
||||
t.Errorf("expected %v, got %v", tt.want, got)
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_MultiProjectGemini(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// Create a gemini auth file with multiple projects
|
||||
authData := map[string]any{
|
||||
"type": "gemini",
|
||||
"email": "multi@example.com",
|
||||
"project_id": "project-a, project-b, project-c",
|
||||
}
|
||||
data, _ := json.Marshal(authData)
|
||||
err := os.WriteFile(filepath.Join(tempDir, "gemini-multi.json"), data, 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to write auth file: %v", err)
|
||||
}
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, err := synth.Synthesize(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
// Should have 4 auths: 1 primary (disabled) + 3 virtuals
|
||||
if len(auths) != 4 {
|
||||
t.Fatalf("expected 4 auths (1 primary + 3 virtuals), got %d", len(auths))
|
||||
}
|
||||
|
||||
// First auth should be the primary (disabled)
|
||||
primary := auths[0]
|
||||
if !primary.Disabled {
|
||||
t.Error("expected primary to be disabled")
|
||||
}
|
||||
if primary.Status != coreauth.StatusDisabled {
|
||||
t.Errorf("expected primary status disabled, got %s", primary.Status)
|
||||
}
|
||||
|
||||
// Remaining auths should be virtuals
|
||||
for i := 1; i < 4; i++ {
|
||||
v := auths[i]
|
||||
if v.Status != coreauth.StatusActive {
|
||||
t.Errorf("expected virtual %d to be active, got %s", i, v.Status)
|
||||
}
|
||||
if v.Attributes["gemini_virtual_parent"] != primary.ID {
|
||||
t.Errorf("expected virtual %d parent to be %s, got %s", i, primary.ID, v.Attributes["gemini_virtual_parent"])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildGeminiVirtualID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
baseID string
|
||||
projectID string
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "basic",
|
||||
baseID: "auth.json",
|
||||
projectID: "my-project",
|
||||
want: "auth.json::my-project",
|
||||
},
|
||||
{
|
||||
name: "with slashes",
|
||||
baseID: "path/to/auth.json",
|
||||
projectID: "project/with/slashes",
|
||||
want: "path/to/auth.json::project_with_slashes",
|
||||
},
|
||||
{
|
||||
name: "with spaces",
|
||||
baseID: "auth.json",
|
||||
projectID: "my project",
|
||||
want: "auth.json::my_project",
|
||||
},
|
||||
{
|
||||
name: "empty project",
|
||||
baseID: "auth.json",
|
||||
projectID: "",
|
||||
want: "auth.json::project",
|
||||
},
|
||||
{
|
||||
name: "whitespace project",
|
||||
baseID: "auth.json",
|
||||
projectID: " ",
|
||||
want: "auth.json::project",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := buildGeminiVirtualID(tt.baseID, tt.projectID)
|
||||
if got != tt.want {
|
||||
t.Errorf("expected %q, got %q", tt.want, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
110
internal/watcher/synthesizer/helpers.go
Normal file
110
internal/watcher/synthesizer/helpers.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
// StableIDGenerator generates stable, deterministic IDs for auth entries.
|
||||
// It uses SHA256 hashing with collision handling via counters.
|
||||
// It is not safe for concurrent use.
|
||||
type StableIDGenerator struct {
|
||||
counters map[string]int
|
||||
}
|
||||
|
||||
// NewStableIDGenerator creates a new StableIDGenerator instance.
|
||||
func NewStableIDGenerator() *StableIDGenerator {
|
||||
return &StableIDGenerator{counters: make(map[string]int)}
|
||||
}
|
||||
|
||||
// Next generates a stable ID based on the kind and parts.
|
||||
// Returns the full ID (kind:hash) and the short hash portion.
|
||||
func (g *StableIDGenerator) Next(kind string, parts ...string) (string, string) {
|
||||
if g == nil {
|
||||
return kind + ":000000000000", "000000000000"
|
||||
}
|
||||
hasher := sha256.New()
|
||||
hasher.Write([]byte(kind))
|
||||
for _, part := range parts {
|
||||
trimmed := strings.TrimSpace(part)
|
||||
hasher.Write([]byte{0})
|
||||
hasher.Write([]byte(trimmed))
|
||||
}
|
||||
digest := hex.EncodeToString(hasher.Sum(nil))
|
||||
if len(digest) < 12 {
|
||||
digest = fmt.Sprintf("%012s", digest)
|
||||
}
|
||||
short := digest[:12]
|
||||
key := kind + ":" + short
|
||||
index := g.counters[key]
|
||||
g.counters[key] = index + 1
|
||||
if index > 0 {
|
||||
short = fmt.Sprintf("%s-%d", short, index)
|
||||
}
|
||||
return fmt.Sprintf("%s:%s", kind, short), short
|
||||
}
|
||||
|
||||
// ApplyAuthExcludedModelsMeta applies excluded models metadata to an auth entry.
|
||||
// It computes a hash of excluded models and sets the auth_kind attribute.
|
||||
func ApplyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey []string, authKind string) {
|
||||
if auth == nil || cfg == nil {
|
||||
return
|
||||
}
|
||||
authKindKey := strings.ToLower(strings.TrimSpace(authKind))
|
||||
seen := make(map[string]struct{})
|
||||
add := func(list []string) {
|
||||
for _, entry := range list {
|
||||
if trimmed := strings.TrimSpace(entry); trimmed != "" {
|
||||
key := strings.ToLower(trimmed)
|
||||
if _, exists := seen[key]; exists {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
if authKindKey == "apikey" {
|
||||
add(perKey)
|
||||
} else if cfg.OAuthExcludedModels != nil {
|
||||
providerKey := strings.ToLower(strings.TrimSpace(auth.Provider))
|
||||
add(cfg.OAuthExcludedModels[providerKey])
|
||||
}
|
||||
combined := make([]string, 0, len(seen))
|
||||
for k := range seen {
|
||||
combined = append(combined, k)
|
||||
}
|
||||
sort.Strings(combined)
|
||||
hash := diff.ComputeExcludedModelsHash(combined)
|
||||
if auth.Attributes == nil {
|
||||
auth.Attributes = make(map[string]string)
|
||||
}
|
||||
if hash != "" {
|
||||
auth.Attributes["excluded_models_hash"] = hash
|
||||
}
|
||||
if authKind != "" {
|
||||
auth.Attributes["auth_kind"] = authKind
|
||||
}
|
||||
}
|
||||
|
||||
// addConfigHeadersToAttrs adds header configuration to auth attributes.
|
||||
// Headers are prefixed with "header:" in the attributes map.
|
||||
func addConfigHeadersToAttrs(headers map[string]string, attrs map[string]string) {
|
||||
if len(headers) == 0 || attrs == nil {
|
||||
return
|
||||
}
|
||||
for hk, hv := range headers {
|
||||
key := strings.TrimSpace(hk)
|
||||
val := strings.TrimSpace(hv)
|
||||
if key == "" || val == "" {
|
||||
continue
|
||||
}
|
||||
attrs["header:"+key] = val
|
||||
}
|
||||
}
|
||||
264
internal/watcher/synthesizer/helpers_test.go
Normal file
264
internal/watcher/synthesizer/helpers_test.go
Normal file
@@ -0,0 +1,264 @@
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
func TestNewStableIDGenerator(t *testing.T) {
|
||||
gen := NewStableIDGenerator()
|
||||
if gen == nil {
|
||||
t.Fatal("expected non-nil generator")
|
||||
}
|
||||
if gen.counters == nil {
|
||||
t.Fatal("expected non-nil counters map")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStableIDGenerator_Next(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
kind string
|
||||
parts []string
|
||||
wantPrefix string
|
||||
}{
|
||||
{
|
||||
name: "basic gemini apikey",
|
||||
kind: "gemini:apikey",
|
||||
parts: []string{"test-key", ""},
|
||||
wantPrefix: "gemini:apikey:",
|
||||
},
|
||||
{
|
||||
name: "claude with base url",
|
||||
kind: "claude:apikey",
|
||||
parts: []string{"sk-ant-xxx", "https://api.anthropic.com"},
|
||||
wantPrefix: "claude:apikey:",
|
||||
},
|
||||
{
|
||||
name: "empty parts",
|
||||
kind: "codex:apikey",
|
||||
parts: []string{},
|
||||
wantPrefix: "codex:apikey:",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gen := NewStableIDGenerator()
|
||||
id, short := gen.Next(tt.kind, tt.parts...)
|
||||
|
||||
if !strings.Contains(id, tt.wantPrefix) {
|
||||
t.Errorf("expected id to contain %q, got %q", tt.wantPrefix, id)
|
||||
}
|
||||
if short == "" {
|
||||
t.Error("expected non-empty short id")
|
||||
}
|
||||
if len(short) != 12 {
|
||||
t.Errorf("expected short id length 12, got %d", len(short))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestStableIDGenerator_Stability(t *testing.T) {
|
||||
gen1 := NewStableIDGenerator()
|
||||
gen2 := NewStableIDGenerator()
|
||||
|
||||
id1, _ := gen1.Next("gemini:apikey", "test-key", "https://api.example.com")
|
||||
id2, _ := gen2.Next("gemini:apikey", "test-key", "https://api.example.com")
|
||||
|
||||
if id1 != id2 {
|
||||
t.Errorf("same inputs should produce same ID: got %q and %q", id1, id2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStableIDGenerator_CollisionHandling(t *testing.T) {
|
||||
gen := NewStableIDGenerator()
|
||||
|
||||
id1, short1 := gen.Next("gemini:apikey", "same-key")
|
||||
id2, short2 := gen.Next("gemini:apikey", "same-key")
|
||||
|
||||
if id1 == id2 {
|
||||
t.Error("collision should be handled with suffix")
|
||||
}
|
||||
if short1 == short2 {
|
||||
t.Error("short ids should differ")
|
||||
}
|
||||
if !strings.Contains(short2, "-1") {
|
||||
t.Errorf("second short id should contain -1 suffix, got %q", short2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStableIDGenerator_NilReceiver(t *testing.T) {
|
||||
var gen *StableIDGenerator = nil
|
||||
id, short := gen.Next("test:kind", "part")
|
||||
|
||||
if id != "test:kind:000000000000" {
|
||||
t.Errorf("expected test:kind:000000000000, got %q", id)
|
||||
}
|
||||
if short != "000000000000" {
|
||||
t.Errorf("expected 000000000000, got %q", short)
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyAuthExcludedModelsMeta(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
auth *coreauth.Auth
|
||||
cfg *config.Config
|
||||
perKey []string
|
||||
authKind string
|
||||
wantHash bool
|
||||
wantKind string
|
||||
}{
|
||||
{
|
||||
name: "apikey with excluded models",
|
||||
auth: &coreauth.Auth{
|
||||
Provider: "gemini",
|
||||
Attributes: make(map[string]string),
|
||||
},
|
||||
cfg: &config.Config{},
|
||||
perKey: []string{"model-a", "model-b"},
|
||||
authKind: "apikey",
|
||||
wantHash: true,
|
||||
wantKind: "apikey",
|
||||
},
|
||||
{
|
||||
name: "oauth with provider excluded models",
|
||||
auth: &coreauth.Auth{
|
||||
Provider: "claude",
|
||||
Attributes: make(map[string]string),
|
||||
},
|
||||
cfg: &config.Config{
|
||||
OAuthExcludedModels: map[string][]string{
|
||||
"claude": {"claude-2.0"},
|
||||
},
|
||||
},
|
||||
perKey: nil,
|
||||
authKind: "oauth",
|
||||
wantHash: true,
|
||||
wantKind: "oauth",
|
||||
},
|
||||
{
|
||||
name: "nil auth",
|
||||
auth: nil,
|
||||
cfg: &config.Config{},
|
||||
},
|
||||
{
|
||||
name: "nil config",
|
||||
auth: &coreauth.Auth{Provider: "test"},
|
||||
cfg: nil,
|
||||
authKind: "apikey",
|
||||
},
|
||||
{
|
||||
name: "nil attributes initialized",
|
||||
auth: &coreauth.Auth{
|
||||
Provider: "gemini",
|
||||
Attributes: nil,
|
||||
},
|
||||
cfg: &config.Config{},
|
||||
perKey: []string{"model-x"},
|
||||
authKind: "apikey",
|
||||
wantHash: true,
|
||||
wantKind: "apikey",
|
||||
},
|
||||
{
|
||||
name: "apikey with duplicate excluded models",
|
||||
auth: &coreauth.Auth{
|
||||
Provider: "gemini",
|
||||
Attributes: make(map[string]string),
|
||||
},
|
||||
cfg: &config.Config{},
|
||||
perKey: []string{"model-a", "MODEL-A", "model-b", "model-a"},
|
||||
authKind: "apikey",
|
||||
wantHash: true,
|
||||
wantKind: "apikey",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ApplyAuthExcludedModelsMeta(tt.auth, tt.cfg, tt.perKey, tt.authKind)
|
||||
|
||||
if tt.auth != nil && tt.cfg != nil {
|
||||
if tt.wantHash {
|
||||
if _, ok := tt.auth.Attributes["excluded_models_hash"]; !ok {
|
||||
t.Error("expected excluded_models_hash in attributes")
|
||||
}
|
||||
}
|
||||
if tt.wantKind != "" {
|
||||
if got := tt.auth.Attributes["auth_kind"]; got != tt.wantKind {
|
||||
t.Errorf("expected auth_kind=%s, got %s", tt.wantKind, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddConfigHeadersToAttrs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
headers map[string]string
|
||||
attrs map[string]string
|
||||
want map[string]string
|
||||
}{
|
||||
{
|
||||
name: "basic headers",
|
||||
headers: map[string]string{
|
||||
"Authorization": "Bearer token",
|
||||
"X-Custom": "value",
|
||||
},
|
||||
attrs: map[string]string{"existing": "key"},
|
||||
want: map[string]string{
|
||||
"existing": "key",
|
||||
"header:Authorization": "Bearer token",
|
||||
"header:X-Custom": "value",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty headers",
|
||||
headers: map[string]string{},
|
||||
attrs: map[string]string{"existing": "key"},
|
||||
want: map[string]string{"existing": "key"},
|
||||
},
|
||||
{
|
||||
name: "nil headers",
|
||||
headers: nil,
|
||||
attrs: map[string]string{"existing": "key"},
|
||||
want: map[string]string{"existing": "key"},
|
||||
},
|
||||
{
|
||||
name: "nil attrs",
|
||||
headers: map[string]string{"key": "value"},
|
||||
attrs: nil,
|
||||
want: nil,
|
||||
},
|
||||
{
|
||||
name: "skip empty keys and values",
|
||||
headers: map[string]string{
|
||||
"": "value",
|
||||
"key": "",
|
||||
" ": "value",
|
||||
"valid": "valid-value",
|
||||
},
|
||||
attrs: make(map[string]string),
|
||||
want: map[string]string{
|
||||
"header:valid": "valid-value",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
addConfigHeadersToAttrs(tt.headers, tt.attrs)
|
||||
if !reflect.DeepEqual(tt.attrs, tt.want) {
|
||||
t.Errorf("expected %v, got %v", tt.want, tt.attrs)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
16
internal/watcher/synthesizer/interface.go
Normal file
16
internal/watcher/synthesizer/interface.go
Normal file
@@ -0,0 +1,16 @@
|
||||
// Package synthesizer provides auth synthesis strategies for the watcher package.
|
||||
// It implements the Strategy pattern to support multiple auth sources:
|
||||
// - ConfigSynthesizer: generates Auth entries from config API keys
|
||||
// - FileSynthesizer: generates Auth entries from OAuth JSON files
|
||||
package synthesizer
|
||||
|
||||
import (
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
// AuthSynthesizer defines the interface for generating Auth entries from various sources.
|
||||
type AuthSynthesizer interface {
|
||||
// Synthesize generates Auth entries from the given context.
|
||||
// Returns a slice of Auth pointers and any error encountered.
|
||||
Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, error)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,109 +0,0 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
agclaude "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/antigravity/claude"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
func TestAntigravityClaudeRequest_DropsUnsignedThinkingBlocks(t *testing.T) {
|
||||
model := "gemini-claude-sonnet-4-5-thinking"
|
||||
input := []byte(`{
|
||||
"model":"` + model + `",
|
||||
"messages":[
|
||||
{"role":"assistant","content":[{"type":"thinking","thinking":"secret without signature"}]},
|
||||
{"role":"user","content":[{"type":"text","text":"hi"}]}
|
||||
]
|
||||
}`)
|
||||
|
||||
out := agclaude.ConvertClaudeRequestToAntigravity(model, input, false)
|
||||
contents := gjson.GetBytes(out, "request.contents")
|
||||
if !contents.Exists() || !contents.IsArray() {
|
||||
t.Fatalf("expected request.contents array, got: %s", string(out))
|
||||
}
|
||||
if got := len(contents.Array()); got != 1 {
|
||||
t.Fatalf("expected 1 content message after dropping unsigned thinking-only assistant message, got %d: %s", got, contents.Raw)
|
||||
}
|
||||
if role := contents.Array()[0].Get("role").String(); role != "user" {
|
||||
t.Fatalf("expected remaining message role=user, got %q", role)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAntigravityClaudeStreamResponse_EmitsSignatureDeltaForStandaloneSignaturePart(t *testing.T) {
|
||||
raw := []byte(`{
|
||||
"response":{
|
||||
"responseId":"resp_1",
|
||||
"modelVersion":"claude-sonnet-4-5-thinking",
|
||||
"candidates":[{
|
||||
"content":{"parts":[
|
||||
{"text":"THOUGHT","thought":true},
|
||||
{"thought":true,"thoughtSignature":"sig123"},
|
||||
{"text":"ANSWER","thought":false}
|
||||
]},
|
||||
"finishReason":"STOP"
|
||||
}],
|
||||
"usageMetadata":{"promptTokenCount":1,"candidatesTokenCount":1,"thoughtsTokenCount":1,"totalTokenCount":3}
|
||||
}
|
||||
}`)
|
||||
|
||||
var param any
|
||||
chunks := agclaude.ConvertAntigravityResponseToClaude(context.Background(), "", nil, nil, raw, ¶m)
|
||||
joined := strings.Join(chunks, "")
|
||||
if !strings.Contains(joined, `"type":"signature_delta"`) {
|
||||
t.Fatalf("expected signature_delta in stream output, got: %s", joined)
|
||||
}
|
||||
if !strings.Contains(joined, `"signature":"sig123"`) {
|
||||
t.Fatalf("expected signature sig123 in stream output, got: %s", joined)
|
||||
}
|
||||
// Signature delta must be attached to the thinking content block (index 0 in this minimal stream).
|
||||
if !strings.Contains(joined, `{"type":"content_block_delta","index":0,"delta":{"type":"signature_delta","signature":"sig123"}}`) {
|
||||
t.Fatalf("expected signature_delta to target thinking block index 0, got: %s", joined)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAntigravityClaudeNonStreamResponse_IncludesThinkingSignature(t *testing.T) {
|
||||
raw := []byte(`{
|
||||
"response":{
|
||||
"responseId":"resp_1",
|
||||
"modelVersion":"claude-sonnet-4-5-thinking",
|
||||
"candidates":[{
|
||||
"content":{"parts":[
|
||||
{"text":"THOUGHT","thought":true},
|
||||
{"thought":true,"thoughtSignature":"sig123"},
|
||||
{"text":"ANSWER","thought":false}
|
||||
]},
|
||||
"finishReason":"STOP"
|
||||
}],
|
||||
"usageMetadata":{"promptTokenCount":1,"candidatesTokenCount":1,"thoughtsTokenCount":1,"totalTokenCount":3}
|
||||
}
|
||||
}`)
|
||||
|
||||
out := agclaude.ConvertAntigravityResponseToClaudeNonStream(context.Background(), "", nil, nil, raw, nil)
|
||||
if !gjson.Valid(out) {
|
||||
t.Fatalf("expected valid JSON output, got: %s", out)
|
||||
}
|
||||
content := gjson.Get(out, "content")
|
||||
if !content.Exists() || !content.IsArray() {
|
||||
t.Fatalf("expected content array in output, got: %s", out)
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, block := range content.Array() {
|
||||
if block.Get("type").String() != "thinking" {
|
||||
continue
|
||||
}
|
||||
found = true
|
||||
if got := block.Get("signature").String(); got != "sig123" {
|
||||
t.Fatalf("expected thinking.signature=sig123, got %q (block=%s)", got, block.Raw)
|
||||
}
|
||||
if got := block.Get("thinking").String(); got != "THOUGHT" {
|
||||
t.Fatalf("expected thinking.thinking=THOUGHT, got %q (block=%s)", got, block.Raw)
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Fatalf("expected a thinking block in output, got: %s", out)
|
||||
}
|
||||
}
|
||||
423
test/gemini3_thinking_level_test.go
Normal file
423
test/gemini3_thinking_level_test.go
Normal file
@@ -0,0 +1,423 @@
|
||||
package test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
// registerGemini3Models loads Gemini 3 models into the registry for testing.
|
||||
func registerGemini3Models(t *testing.T) func() {
|
||||
t.Helper()
|
||||
reg := registry.GetGlobalRegistry()
|
||||
uid := fmt.Sprintf("gemini3-test-%d", time.Now().UnixNano())
|
||||
reg.RegisterClient(uid+"-gemini", "gemini", registry.GetGeminiModels())
|
||||
reg.RegisterClient(uid+"-aistudio", "aistudio", registry.GetAIStudioModels())
|
||||
return func() {
|
||||
reg.UnregisterClient(uid + "-gemini")
|
||||
reg.UnregisterClient(uid + "-aistudio")
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsGemini3Model(t *testing.T) {
|
||||
cases := []struct {
|
||||
model string
|
||||
expected bool
|
||||
}{
|
||||
{"gemini-3-pro-preview", true},
|
||||
{"gemini-3-flash-preview", true},
|
||||
{"gemini_3_pro_preview", true},
|
||||
{"gemini-3-pro", true},
|
||||
{"gemini-3-flash", true},
|
||||
{"GEMINI-3-PRO-PREVIEW", true},
|
||||
{"gemini-2.5-pro", false},
|
||||
{"gemini-2.5-flash", false},
|
||||
{"gpt-5", false},
|
||||
{"claude-sonnet-4-5", false},
|
||||
{"", false},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.model, func(t *testing.T) {
|
||||
got := util.IsGemini3Model(cs.model)
|
||||
if got != cs.expected {
|
||||
t.Fatalf("IsGemini3Model(%q) = %v, want %v", cs.model, got, cs.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsGemini3ProModel(t *testing.T) {
|
||||
cases := []struct {
|
||||
model string
|
||||
expected bool
|
||||
}{
|
||||
{"gemini-3-pro-preview", true},
|
||||
{"gemini_3_pro_preview", true},
|
||||
{"gemini-3-pro", true},
|
||||
{"GEMINI-3-PRO-PREVIEW", true},
|
||||
{"gemini-3-flash-preview", false},
|
||||
{"gemini-3-flash", false},
|
||||
{"gemini-2.5-pro", false},
|
||||
{"", false},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.model, func(t *testing.T) {
|
||||
got := util.IsGemini3ProModel(cs.model)
|
||||
if got != cs.expected {
|
||||
t.Fatalf("IsGemini3ProModel(%q) = %v, want %v", cs.model, got, cs.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsGemini3FlashModel(t *testing.T) {
|
||||
cases := []struct {
|
||||
model string
|
||||
expected bool
|
||||
}{
|
||||
{"gemini-3-flash-preview", true},
|
||||
{"gemini_3_flash_preview", true},
|
||||
{"gemini-3-flash", true},
|
||||
{"GEMINI-3-FLASH-PREVIEW", true},
|
||||
{"gemini-3-pro-preview", false},
|
||||
{"gemini-3-pro", false},
|
||||
{"gemini-2.5-flash", false},
|
||||
{"", false},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.model, func(t *testing.T) {
|
||||
got := util.IsGemini3FlashModel(cs.model)
|
||||
if got != cs.expected {
|
||||
t.Fatalf("IsGemini3FlashModel(%q) = %v, want %v", cs.model, got, cs.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateGemini3ThinkingLevel(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
model string
|
||||
level string
|
||||
wantOK bool
|
||||
wantVal string
|
||||
}{
|
||||
// Gemini 3 Pro: supports "low", "high"
|
||||
{"pro-low", "gemini-3-pro-preview", "low", true, "low"},
|
||||
{"pro-high", "gemini-3-pro-preview", "high", true, "high"},
|
||||
{"pro-minimal-invalid", "gemini-3-pro-preview", "minimal", false, ""},
|
||||
{"pro-medium-invalid", "gemini-3-pro-preview", "medium", false, ""},
|
||||
|
||||
// Gemini 3 Flash: supports "minimal", "low", "medium", "high"
|
||||
{"flash-minimal", "gemini-3-flash-preview", "minimal", true, "minimal"},
|
||||
{"flash-low", "gemini-3-flash-preview", "low", true, "low"},
|
||||
{"flash-medium", "gemini-3-flash-preview", "medium", true, "medium"},
|
||||
{"flash-high", "gemini-3-flash-preview", "high", true, "high"},
|
||||
|
||||
// Case insensitivity
|
||||
{"flash-LOW-case", "gemini-3-flash-preview", "LOW", true, "low"},
|
||||
{"flash-High-case", "gemini-3-flash-preview", "High", true, "high"},
|
||||
{"pro-HIGH-case", "gemini-3-pro-preview", "HIGH", true, "high"},
|
||||
|
||||
// Invalid levels
|
||||
{"flash-invalid", "gemini-3-flash-preview", "xhigh", false, ""},
|
||||
{"flash-invalid-auto", "gemini-3-flash-preview", "auto", false, ""},
|
||||
{"flash-empty", "gemini-3-flash-preview", "", false, ""},
|
||||
|
||||
// Non-Gemini 3 models
|
||||
{"non-gemini3", "gemini-2.5-pro", "high", false, ""},
|
||||
{"gpt5", "gpt-5", "high", false, ""},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.name, func(t *testing.T) {
|
||||
got, ok := util.ValidateGemini3ThinkingLevel(cs.model, cs.level)
|
||||
if ok != cs.wantOK {
|
||||
t.Fatalf("ValidateGemini3ThinkingLevel(%q, %q) ok = %v, want %v", cs.model, cs.level, ok, cs.wantOK)
|
||||
}
|
||||
if got != cs.wantVal {
|
||||
t.Fatalf("ValidateGemini3ThinkingLevel(%q, %q) = %q, want %q", cs.model, cs.level, got, cs.wantVal)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestThinkingBudgetToGemini3Level(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
model string
|
||||
budget int
|
||||
wantOK bool
|
||||
wantVal string
|
||||
}{
|
||||
// Gemini 3 Pro: maps to "low" or "high"
|
||||
{"pro-dynamic", "gemini-3-pro-preview", -1, true, "high"},
|
||||
{"pro-zero", "gemini-3-pro-preview", 0, true, "low"},
|
||||
{"pro-small", "gemini-3-pro-preview", 1000, true, "low"},
|
||||
{"pro-medium", "gemini-3-pro-preview", 8000, true, "low"},
|
||||
{"pro-large", "gemini-3-pro-preview", 20000, true, "high"},
|
||||
{"pro-huge", "gemini-3-pro-preview", 50000, true, "high"},
|
||||
|
||||
// Gemini 3 Flash: maps to "minimal", "low", "medium", "high"
|
||||
{"flash-dynamic", "gemini-3-flash-preview", -1, true, "high"},
|
||||
{"flash-zero", "gemini-3-flash-preview", 0, true, "minimal"},
|
||||
{"flash-tiny", "gemini-3-flash-preview", 500, true, "minimal"},
|
||||
{"flash-small", "gemini-3-flash-preview", 1000, true, "low"},
|
||||
{"flash-medium-val", "gemini-3-flash-preview", 8000, true, "medium"},
|
||||
{"flash-large", "gemini-3-flash-preview", 20000, true, "high"},
|
||||
{"flash-huge", "gemini-3-flash-preview", 50000, true, "high"},
|
||||
|
||||
// Non-Gemini 3 models should return false
|
||||
{"gemini25-budget", "gemini-2.5-pro", 8000, false, ""},
|
||||
{"gpt5-budget", "gpt-5", 8000, false, ""},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.name, func(t *testing.T) {
|
||||
got, ok := util.ThinkingBudgetToGemini3Level(cs.model, cs.budget)
|
||||
if ok != cs.wantOK {
|
||||
t.Fatalf("ThinkingBudgetToGemini3Level(%q, %d) ok = %v, want %v", cs.model, cs.budget, ok, cs.wantOK)
|
||||
}
|
||||
if got != cs.wantVal {
|
||||
t.Fatalf("ThinkingBudgetToGemini3Level(%q, %d) = %q, want %q", cs.model, cs.budget, got, cs.wantVal)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyGemini3ThinkingLevelFromMetadata(t *testing.T) {
|
||||
cleanup := registerGemini3Models(t)
|
||||
defer cleanup()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
model string
|
||||
metadata map[string]any
|
||||
inputBody string
|
||||
wantLevel string
|
||||
wantInclude bool
|
||||
wantNoChange bool
|
||||
}{
|
||||
{
|
||||
name: "flash-minimal-from-suffix",
|
||||
model: "gemini-3-flash-preview",
|
||||
metadata: map[string]any{"reasoning_effort": "minimal"},
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
||||
wantLevel: "minimal",
|
||||
wantInclude: true,
|
||||
},
|
||||
{
|
||||
name: "flash-medium-from-suffix",
|
||||
model: "gemini-3-flash-preview",
|
||||
metadata: map[string]any{"reasoning_effort": "medium"},
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
||||
wantLevel: "medium",
|
||||
wantInclude: true,
|
||||
},
|
||||
{
|
||||
name: "pro-high-from-suffix",
|
||||
model: "gemini-3-pro-preview",
|
||||
metadata: map[string]any{"reasoning_effort": "high"},
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
||||
wantLevel: "high",
|
||||
wantInclude: true,
|
||||
},
|
||||
{
|
||||
name: "no-metadata-no-change",
|
||||
model: "gemini-3-flash-preview",
|
||||
metadata: nil,
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
||||
wantNoChange: true,
|
||||
},
|
||||
{
|
||||
name: "non-gemini3-no-change",
|
||||
model: "gemini-2.5-pro",
|
||||
metadata: map[string]any{"reasoning_effort": "high"},
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":-1}}}`,
|
||||
wantNoChange: true,
|
||||
},
|
||||
{
|
||||
name: "invalid-level-no-change",
|
||||
model: "gemini-3-flash-preview",
|
||||
metadata: map[string]any{"reasoning_effort": "xhigh"},
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}`,
|
||||
wantNoChange: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.name, func(t *testing.T) {
|
||||
input := []byte(cs.inputBody)
|
||||
result := util.ApplyGemini3ThinkingLevelFromMetadata(cs.model, cs.metadata, input)
|
||||
|
||||
if cs.wantNoChange {
|
||||
if string(result) != cs.inputBody {
|
||||
t.Fatalf("expected no change, but got: %s", string(result))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
level := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingLevel")
|
||||
if !level.Exists() {
|
||||
t.Fatalf("thinkingLevel not set in result: %s", string(result))
|
||||
}
|
||||
if level.String() != cs.wantLevel {
|
||||
t.Fatalf("thinkingLevel = %q, want %q", level.String(), cs.wantLevel)
|
||||
}
|
||||
|
||||
include := gjson.GetBytes(result, "generationConfig.thinkingConfig.includeThoughts")
|
||||
if cs.wantInclude && (!include.Exists() || !include.Bool()) {
|
||||
t.Fatalf("includeThoughts should be true, got: %s", string(result))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyGemini3ThinkingLevelFromMetadataCLI(t *testing.T) {
|
||||
cleanup := registerGemini3Models(t)
|
||||
defer cleanup()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
model string
|
||||
metadata map[string]any
|
||||
inputBody string
|
||||
wantLevel string
|
||||
wantInclude bool
|
||||
wantNoChange bool
|
||||
}{
|
||||
{
|
||||
name: "flash-minimal-from-suffix-cli",
|
||||
model: "gemini-3-flash-preview",
|
||||
metadata: map[string]any{"reasoning_effort": "minimal"},
|
||||
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
||||
wantLevel: "minimal",
|
||||
wantInclude: true,
|
||||
},
|
||||
{
|
||||
name: "flash-low-from-suffix-cli",
|
||||
model: "gemini-3-flash-preview",
|
||||
metadata: map[string]any{"reasoning_effort": "low"},
|
||||
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
||||
wantLevel: "low",
|
||||
wantInclude: true,
|
||||
},
|
||||
{
|
||||
name: "pro-low-from-suffix-cli",
|
||||
model: "gemini-3-pro-preview",
|
||||
metadata: map[string]any{"reasoning_effort": "low"},
|
||||
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
||||
wantLevel: "low",
|
||||
wantInclude: true,
|
||||
},
|
||||
{
|
||||
name: "no-metadata-no-change-cli",
|
||||
model: "gemini-3-flash-preview",
|
||||
metadata: nil,
|
||||
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"includeThoughts":true}}}}`,
|
||||
wantNoChange: true,
|
||||
},
|
||||
{
|
||||
name: "non-gemini3-no-change-cli",
|
||||
model: "gemini-2.5-pro",
|
||||
metadata: map[string]any{"reasoning_effort": "high"},
|
||||
inputBody: `{"request":{"generationConfig":{"thinkingConfig":{"thinkingBudget":-1}}}}`,
|
||||
wantNoChange: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.name, func(t *testing.T) {
|
||||
input := []byte(cs.inputBody)
|
||||
result := util.ApplyGemini3ThinkingLevelFromMetadataCLI(cs.model, cs.metadata, input)
|
||||
|
||||
if cs.wantNoChange {
|
||||
if string(result) != cs.inputBody {
|
||||
t.Fatalf("expected no change, but got: %s", string(result))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
level := gjson.GetBytes(result, "request.generationConfig.thinkingConfig.thinkingLevel")
|
||||
if !level.Exists() {
|
||||
t.Fatalf("thinkingLevel not set in result: %s", string(result))
|
||||
}
|
||||
if level.String() != cs.wantLevel {
|
||||
t.Fatalf("thinkingLevel = %q, want %q", level.String(), cs.wantLevel)
|
||||
}
|
||||
|
||||
include := gjson.GetBytes(result, "request.generationConfig.thinkingConfig.includeThoughts")
|
||||
if cs.wantInclude && (!include.Exists() || !include.Bool()) {
|
||||
t.Fatalf("includeThoughts should be true, got: %s", string(result))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNormalizeGeminiThinkingBudget_Gemini3Conversion(t *testing.T) {
|
||||
cleanup := registerGemini3Models(t)
|
||||
defer cleanup()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
model string
|
||||
inputBody string
|
||||
wantLevel string
|
||||
wantBudget bool // if true, expect thinkingBudget instead of thinkingLevel
|
||||
}{
|
||||
{
|
||||
name: "gemini3-flash-budget-to-level",
|
||||
model: "gemini-3-flash-preview",
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":8000}}}`,
|
||||
wantLevel: "medium",
|
||||
},
|
||||
{
|
||||
name: "gemini3-pro-budget-to-level",
|
||||
model: "gemini-3-pro-preview",
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":20000}}}`,
|
||||
wantLevel: "high",
|
||||
},
|
||||
{
|
||||
name: "gemini25-keeps-budget",
|
||||
model: "gemini-2.5-pro",
|
||||
inputBody: `{"generationConfig":{"thinkingConfig":{"thinkingBudget":8000}}}`,
|
||||
wantBudget: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, cs := range cases {
|
||||
t.Run(cs.name, func(t *testing.T) {
|
||||
result := util.NormalizeGeminiThinkingBudget(cs.model, []byte(cs.inputBody))
|
||||
|
||||
if cs.wantBudget {
|
||||
budget := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingBudget")
|
||||
if !budget.Exists() {
|
||||
t.Fatalf("thinkingBudget should exist for non-Gemini3 model: %s", string(result))
|
||||
}
|
||||
level := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingLevel")
|
||||
if level.Exists() {
|
||||
t.Fatalf("thinkingLevel should not exist for non-Gemini3 model: %s", string(result))
|
||||
}
|
||||
} else {
|
||||
level := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingLevel")
|
||||
if !level.Exists() {
|
||||
t.Fatalf("thinkingLevel should exist for Gemini3 model: %s", string(result))
|
||||
}
|
||||
if level.String() != cs.wantLevel {
|
||||
t.Fatalf("thinkingLevel = %q, want %q", level.String(), cs.wantLevel)
|
||||
}
|
||||
budget := gjson.GetBytes(result, "generationConfig.thinkingConfig.thinkingBudget")
|
||||
if budget.Exists() {
|
||||
t.Fatalf("thinkingBudget should be removed for Gemini3 model: %s", string(result))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user