Compare commits

..

15 Commits

Author SHA1 Message Date
Luis Pater
8c947cafbe Merge branch 'vertex' into dev 2025-11-10 12:24:07 +08:00
Luis Pater
717eadf128 feat(vertex): add support for Vertex AI Gemini authentication and execution
Introduce Vertex AI Gemini integration with support for service account-based authentication, credential storage, and import functionality. Added new executor for Vertex AI requests, including execution and streaming paths, and integrated it into the core manager. Enhanced CLI with `--vertex-import` flag for importing service account keys.
2025-11-10 12:23:51 +08:00
Luis Pater
9e105738fd fix(server): add PATCH method to CORS allowed methods 2025-11-10 12:12:05 +08:00
Luis Pater
5d806fcefc fix(translator): support system instructions with parts and inline data in OpenAI Gemini requests
Handle both `systemInstruction` and `system_instruction` keys, processing text and inline data parts (e.g., images) for system messages in Gemini.
2025-11-10 10:31:32 +08:00
Luis Pater
6ae1dd78ed Merge pull request #230 from router-for-me/api
fix(management): exclude disabled runtime-only auths from file entries
2025-11-10 08:34:47 +08:00
hkfires
43095de162 fix(management): exclude disabled runtime-only auths from file entries 2025-11-10 08:32:42 +08:00
Luis Pater
ef7e8206d3 fix(executor): ensure usage reporting for upstream responses lacking usage data
Add `ensurePublished` to guarantee request counting even when usage fields (e.g., tokens) are absent in OpenAI-compatible executor responses, particularly for streaming paths.
2025-11-09 17:24:47 +08:00
Luis Pater
87291c0d75 Merge pull request #227 from router-for-me/api
add headers support for api
2025-11-09 14:00:37 +08:00
hkfires
51d2766d5c fix(management): sanitize keys and normalize headers 2025-11-09 12:13:02 +08:00
hkfires
a00ba77604 refactor(config): rename SyncGeminiKeys; use Sanitize* methods 2025-11-09 08:29:47 +08:00
Luis Pater
3264605c2d Merge pull request #226 from router-for-me/headers
feat(config): support HTTP headers across providers
2025-11-08 21:41:31 +08:00
hkfires
cfb9cb8951 feat(config): support HTTP headers across providers 2025-11-08 20:52:05 +08:00
Luis Pater
bb00436509 fix(service): skip disabled auth entries during executor binding
Prevent disabled auth entries from overriding active provider executors, addressing lingering configs during reloads (e.g., removed OpenAI-compat entries).
2025-11-08 18:19:34 +08:00
Luis Pater
1afbc4dd96 fix(translator): separate tool calls from content in OpenAI Claude requests 2025-11-08 17:57:46 +08:00
Luis Pater
d745f07044 fix(registry): replace Gemini model list with updated stable and preview versions 2025-11-08 15:51:57 +08:00
21 changed files with 1188 additions and 126 deletions

View File

@@ -57,6 +57,7 @@ func main() {
var iflowLogin bool
var noBrowser bool
var projectID string
var vertexImport string
var configPath string
var password string
@@ -69,6 +70,7 @@ func main() {
flag.BoolVar(&noBrowser, "no-browser", false, "Don't open browser automatically for OAuth")
flag.StringVar(&projectID, "project_id", "", "Project ID (Gemini only, not required)")
flag.StringVar(&configPath, "config", DefaultConfigPath, "Configure File Path")
flag.StringVar(&vertexImport, "vertex-import", "", "Import Vertex service account key JSON file")
flag.StringVar(&password, "password", "", "")
flag.CommandLine.Usage = func() {
@@ -417,7 +419,10 @@ func main() {
// Handle different command modes based on the provided flags.
if login {
if vertexImport != "" {
// Handle Vertex service account import
cmd.DoVertexImport(cfg, vertexImport)
} else if login {
// Handle Google/Gemini login
cmd.DoLogin(cfg, projectID, options)
} else if codexLogin {

View File

@@ -49,10 +49,10 @@ ws-auth: false
# Gemini API keys (preferred)
#gemini-api-key:
# - api-key: "AIzaSy...01"
# # base-url: "https://generativelanguage.googleapis.com"
# # headers:
# # X-Custom-Header: "custom-value"
# # proxy-url: "socks5://proxy.example.com:1080"
# base-url: "https://generativelanguage.googleapis.com"
# headers:
# X-Custom-Header: "custom-value"
# proxy-url: "socks5://proxy.example.com:1080"
# - api-key: "AIzaSy...02"
# API keys for official Generative Language API (legacy compatibility)
@@ -64,6 +64,8 @@ ws-auth: false
#codex-api-key:
# - api-key: "sk-atSM..."
# base-url: "https://www.example.com" # use the custom codex API endpoint
# headers:
# X-Custom-Header: "custom-value"
# proxy-url: "socks5://proxy.example.com:1080" # optional: per-key proxy override
# Claude API keys
@@ -71,6 +73,8 @@ ws-auth: false
# - api-key: "sk-atSM..." # use the official claude API key, no need to set the base url
# - api-key: "sk-atSM..."
# base-url: "https://www.example.com" # use the custom claude API endpoint
# headers:
# X-Custom-Header: "custom-value"
# proxy-url: "socks5://proxy.example.com:1080" # optional: per-key proxy override
# models:
# - name: "claude-3-5-sonnet-20241022" # upstream model name
@@ -80,6 +84,8 @@ ws-auth: false
#openai-compatibility:
# - name: "openrouter" # The name of the provider; it will be used in the user agent and other places.
# base-url: "https://openrouter.ai/api/v1" # The base URL of the provider.
# headers:
# X-Custom-Header: "custom-value"
# # New format with per-key proxy support (recommended):
# api-key-entries:
# - api-key: "sk-or-v1-...b780"

View File

@@ -293,6 +293,9 @@ func (h *Handler) buildAuthFileEntry(auth *coreauth.Auth) gin.H {
return nil
}
runtimeOnly := isRuntimeOnlyAuth(auth)
if runtimeOnly && (auth.Disabled || auth.Status == coreauth.StatusDisabled) {
return nil
}
path := strings.TrimSpace(authAttribute(auth, "path"))
if path == "" && !runtimeOnly {
return nil

View File

@@ -148,7 +148,7 @@ func (h *Handler) applyLegacyKeys(keys []string) {
}
h.cfg.GeminiKey = newList
h.cfg.GlAPIKey = sanitized
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
}
// api-keys
@@ -206,7 +206,7 @@ func (h *Handler) PutGeminiKeys(c *gin.Context) {
arr = obj.Items
}
h.cfg.GeminiKey = append([]config.GeminiKey(nil), arr...)
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
h.persist(c)
}
func (h *Handler) PatchGeminiKey(c *gin.Context) {
@@ -227,7 +227,7 @@ func (h *Handler) PatchGeminiKey(c *gin.Context) {
// Treat empty API key as delete.
if body.Index != nil && *body.Index >= 0 && *body.Index < len(h.cfg.GeminiKey) {
h.cfg.GeminiKey = append(h.cfg.GeminiKey[:*body.Index], h.cfg.GeminiKey[*body.Index+1:]...)
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
h.persist(c)
return
}
@@ -245,7 +245,7 @@ func (h *Handler) PatchGeminiKey(c *gin.Context) {
}
if removed {
h.cfg.GeminiKey = out
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
h.persist(c)
return
}
@@ -257,7 +257,7 @@ func (h *Handler) PatchGeminiKey(c *gin.Context) {
if body.Index != nil && *body.Index >= 0 && *body.Index < len(h.cfg.GeminiKey) {
h.cfg.GeminiKey[*body.Index] = value
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
h.persist(c)
return
}
@@ -266,7 +266,7 @@ func (h *Handler) PatchGeminiKey(c *gin.Context) {
for i := range h.cfg.GeminiKey {
if h.cfg.GeminiKey[i].APIKey == match {
h.cfg.GeminiKey[i] = value
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
h.persist(c)
return
}
@@ -284,7 +284,7 @@ func (h *Handler) DeleteGeminiKey(c *gin.Context) {
}
if len(out) != len(h.cfg.GeminiKey) {
h.cfg.GeminiKey = out
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
h.persist(c)
} else {
c.JSON(404, gin.H{"error": "item not found"})
@@ -295,7 +295,7 @@ func (h *Handler) DeleteGeminiKey(c *gin.Context) {
var idx int
if _, err := fmt.Sscanf(idxStr, "%d", &idx); err == nil && idx >= 0 && idx < len(h.cfg.GeminiKey) {
h.cfg.GeminiKey = append(h.cfg.GeminiKey[:idx], h.cfg.GeminiKey[idx+1:]...)
h.cfg.SyncGeminiKeys()
h.cfg.SanitizeGeminiKeys()
h.persist(c)
return
}
@@ -328,6 +328,7 @@ func (h *Handler) PutClaudeKeys(c *gin.Context) {
normalizeClaudeKey(&arr[i])
}
h.cfg.ClaudeKey = arr
h.cfg.SanitizeClaudeKeys()
h.persist(c)
}
func (h *Handler) PatchClaudeKey(c *gin.Context) {
@@ -340,16 +341,19 @@ func (h *Handler) PatchClaudeKey(c *gin.Context) {
c.JSON(400, gin.H{"error": "invalid body"})
return
}
normalizeClaudeKey(body.Value)
value := *body.Value
normalizeClaudeKey(&value)
if body.Index != nil && *body.Index >= 0 && *body.Index < len(h.cfg.ClaudeKey) {
h.cfg.ClaudeKey[*body.Index] = *body.Value
h.cfg.ClaudeKey[*body.Index] = value
h.cfg.SanitizeClaudeKeys()
h.persist(c)
return
}
if body.Match != nil {
for i := range h.cfg.ClaudeKey {
if h.cfg.ClaudeKey[i].APIKey == *body.Match {
h.cfg.ClaudeKey[i] = *body.Value
h.cfg.ClaudeKey[i] = value
h.cfg.SanitizeClaudeKeys()
h.persist(c)
return
}
@@ -366,6 +370,7 @@ func (h *Handler) DeleteClaudeKey(c *gin.Context) {
}
}
h.cfg.ClaudeKey = out
h.cfg.SanitizeClaudeKeys()
h.persist(c)
return
}
@@ -374,6 +379,7 @@ func (h *Handler) DeleteClaudeKey(c *gin.Context) {
_, err := fmt.Sscanf(idxStr, "%d", &idx)
if err == nil && idx >= 0 && idx < len(h.cfg.ClaudeKey) {
h.cfg.ClaudeKey = append(h.cfg.ClaudeKey[:idx], h.cfg.ClaudeKey[idx+1:]...)
h.cfg.SanitizeClaudeKeys()
h.persist(c)
return
}
@@ -413,6 +419,7 @@ func (h *Handler) PutOpenAICompat(c *gin.Context) {
}
}
h.cfg.OpenAICompatibility = filtered
h.cfg.SanitizeOpenAICompatibility()
h.persist(c)
}
func (h *Handler) PatchOpenAICompat(c *gin.Context) {
@@ -430,6 +437,7 @@ func (h *Handler) PatchOpenAICompat(c *gin.Context) {
if strings.TrimSpace(body.Value.BaseURL) == "" {
if body.Index != nil && *body.Index >= 0 && *body.Index < len(h.cfg.OpenAICompatibility) {
h.cfg.OpenAICompatibility = append(h.cfg.OpenAICompatibility[:*body.Index], h.cfg.OpenAICompatibility[*body.Index+1:]...)
h.cfg.SanitizeOpenAICompatibility()
h.persist(c)
return
}
@@ -445,6 +453,7 @@ func (h *Handler) PatchOpenAICompat(c *gin.Context) {
}
if removed {
h.cfg.OpenAICompatibility = out
h.cfg.SanitizeOpenAICompatibility()
h.persist(c)
return
}
@@ -454,6 +463,7 @@ func (h *Handler) PatchOpenAICompat(c *gin.Context) {
}
if body.Index != nil && *body.Index >= 0 && *body.Index < len(h.cfg.OpenAICompatibility) {
h.cfg.OpenAICompatibility[*body.Index] = *body.Value
h.cfg.SanitizeOpenAICompatibility()
h.persist(c)
return
}
@@ -461,6 +471,7 @@ func (h *Handler) PatchOpenAICompat(c *gin.Context) {
for i := range h.cfg.OpenAICompatibility {
if h.cfg.OpenAICompatibility[i].Name == *body.Name {
h.cfg.OpenAICompatibility[i] = *body.Value
h.cfg.SanitizeOpenAICompatibility()
h.persist(c)
return
}
@@ -477,6 +488,7 @@ func (h *Handler) DeleteOpenAICompat(c *gin.Context) {
}
}
h.cfg.OpenAICompatibility = out
h.cfg.SanitizeOpenAICompatibility()
h.persist(c)
return
}
@@ -485,6 +497,7 @@ func (h *Handler) DeleteOpenAICompat(c *gin.Context) {
_, err := fmt.Sscanf(idxStr, "%d", &idx)
if err == nil && idx >= 0 && idx < len(h.cfg.OpenAICompatibility) {
h.cfg.OpenAICompatibility = append(h.cfg.OpenAICompatibility[:idx], h.cfg.OpenAICompatibility[idx+1:]...)
h.cfg.SanitizeOpenAICompatibility()
h.persist(c)
return
}
@@ -517,13 +530,17 @@ func (h *Handler) PutCodexKeys(c *gin.Context) {
filtered := make([]config.CodexKey, 0, len(arr))
for i := range arr {
entry := arr[i]
entry.APIKey = strings.TrimSpace(entry.APIKey)
entry.BaseURL = strings.TrimSpace(entry.BaseURL)
entry.ProxyURL = strings.TrimSpace(entry.ProxyURL)
entry.Headers = config.NormalizeHeaders(entry.Headers)
if entry.BaseURL == "" {
continue
}
filtered = append(filtered, entry)
}
h.cfg.CodexKey = filtered
h.cfg.SanitizeCodexKeys()
h.persist(c)
}
func (h *Handler) PatchCodexKey(c *gin.Context) {
@@ -536,10 +553,16 @@ func (h *Handler) PatchCodexKey(c *gin.Context) {
c.JSON(400, gin.H{"error": "invalid body"})
return
}
value := *body.Value
value.APIKey = strings.TrimSpace(value.APIKey)
value.BaseURL = strings.TrimSpace(value.BaseURL)
value.ProxyURL = strings.TrimSpace(value.ProxyURL)
value.Headers = config.NormalizeHeaders(value.Headers)
// If base-url becomes empty, delete instead of update
if strings.TrimSpace(body.Value.BaseURL) == "" {
if value.BaseURL == "" {
if body.Index != nil && *body.Index >= 0 && *body.Index < len(h.cfg.CodexKey) {
h.cfg.CodexKey = append(h.cfg.CodexKey[:*body.Index], h.cfg.CodexKey[*body.Index+1:]...)
h.cfg.SanitizeCodexKeys()
h.persist(c)
return
}
@@ -555,20 +578,23 @@ func (h *Handler) PatchCodexKey(c *gin.Context) {
}
if removed {
h.cfg.CodexKey = out
h.cfg.SanitizeCodexKeys()
h.persist(c)
return
}
}
} else {
if body.Index != nil && *body.Index >= 0 && *body.Index < len(h.cfg.CodexKey) {
h.cfg.CodexKey[*body.Index] = *body.Value
h.cfg.CodexKey[*body.Index] = value
h.cfg.SanitizeCodexKeys()
h.persist(c)
return
}
if body.Match != nil {
for i := range h.cfg.CodexKey {
if h.cfg.CodexKey[i].APIKey == *body.Match {
h.cfg.CodexKey[i] = *body.Value
h.cfg.CodexKey[i] = value
h.cfg.SanitizeCodexKeys()
h.persist(c)
return
}
@@ -586,6 +612,7 @@ func (h *Handler) DeleteCodexKey(c *gin.Context) {
}
}
h.cfg.CodexKey = out
h.cfg.SanitizeCodexKeys()
h.persist(c)
return
}
@@ -594,6 +621,7 @@ func (h *Handler) DeleteCodexKey(c *gin.Context) {
_, err := fmt.Sscanf(idxStr, "%d", &idx)
if err == nil && idx >= 0 && idx < len(h.cfg.CodexKey) {
h.cfg.CodexKey = append(h.cfg.CodexKey[:idx], h.cfg.CodexKey[idx+1:]...)
h.cfg.SanitizeCodexKeys()
h.persist(c)
return
}
@@ -607,6 +635,7 @@ func normalizeOpenAICompatibilityEntry(entry *config.OpenAICompatibility) {
}
// Trim base-url; empty base-url indicates provider should be removed by sanitization
entry.BaseURL = strings.TrimSpace(entry.BaseURL)
entry.Headers = config.NormalizeHeaders(entry.Headers)
existing := make(map[string]struct{}, len(entry.APIKeyEntries))
for i := range entry.APIKeyEntries {
trimmed := strings.TrimSpace(entry.APIKeyEntries[i].APIKey)
@@ -658,6 +687,7 @@ func normalizeClaudeKey(entry *config.ClaudeKey) {
entry.APIKey = strings.TrimSpace(entry.APIKey)
entry.BaseURL = strings.TrimSpace(entry.BaseURL)
entry.ProxyURL = strings.TrimSpace(entry.ProxyURL)
entry.Headers = config.NormalizeHeaders(entry.Headers)
if len(entry.Models) == 0 {
return
}

View File

@@ -703,7 +703,7 @@ func (s *Server) Stop(ctx context.Context) error {
func corsMiddleware() gin.HandlerFunc {
return func(c *gin.Context) {
c.Header("Access-Control-Allow-Origin", "*")
c.Header("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS")
c.Header("Access-Control-Allow-Methods", "GET, POST, PUT, PATCH, DELETE, OPTIONS")
c.Header("Access-Control-Allow-Headers", "*")
if c.Request.Method == "OPTIONS" {

View File

@@ -0,0 +1,208 @@
package vertex
import (
"crypto/rsa"
"crypto/x509"
"encoding/base64"
"encoding/json"
"encoding/pem"
"fmt"
"strings"
)
// NormalizeServiceAccountJSON normalizes the given JSON-encoded service account payload.
// It returns the normalized JSON (with sanitized private_key) or, if normalization fails,
// the original bytes and the encountered error.
func NormalizeServiceAccountJSON(raw []byte) ([]byte, error) {
if len(raw) == 0 {
return raw, nil
}
var payload map[string]any
if err := json.Unmarshal(raw, &payload); err != nil {
return raw, err
}
normalized, err := NormalizeServiceAccountMap(payload)
if err != nil {
return raw, err
}
out, err := json.Marshal(normalized)
if err != nil {
return raw, err
}
return out, nil
}
// NormalizeServiceAccountMap returns a copy of the given service account map with
// a sanitized private_key field that is guaranteed to contain a valid RSA PRIVATE KEY PEM block.
func NormalizeServiceAccountMap(sa map[string]any) (map[string]any, error) {
if sa == nil {
return nil, fmt.Errorf("service account payload is empty")
}
pk, _ := sa["private_key"].(string)
if strings.TrimSpace(pk) == "" {
return nil, fmt.Errorf("service account missing private_key")
}
normalized, err := sanitizePrivateKey(pk)
if err != nil {
return nil, err
}
clone := make(map[string]any, len(sa))
for k, v := range sa {
clone[k] = v
}
clone["private_key"] = normalized
return clone, nil
}
func sanitizePrivateKey(raw string) (string, error) {
pk := strings.ReplaceAll(raw, "\r\n", "\n")
pk = strings.ReplaceAll(pk, "\r", "\n")
pk = stripANSIEscape(pk)
pk = strings.ToValidUTF8(pk, "")
pk = strings.TrimSpace(pk)
normalized := pk
if block, _ := pem.Decode([]byte(pk)); block == nil {
// Attempt to reconstruct from the textual payload.
if reconstructed, err := rebuildPEM(pk); err == nil {
normalized = reconstructed
} else {
return "", fmt.Errorf("private_key is not valid pem: %w", err)
}
}
block, _ := pem.Decode([]byte(normalized))
if block == nil {
return "", fmt.Errorf("private_key pem decode failed")
}
rsaBlock, err := ensureRSAPrivateKey(block)
if err != nil {
return "", err
}
return string(pem.EncodeToMemory(rsaBlock)), nil
}
func ensureRSAPrivateKey(block *pem.Block) (*pem.Block, error) {
if block == nil {
return nil, fmt.Errorf("pem block is nil")
}
if block.Type == "RSA PRIVATE KEY" {
if _, err := x509.ParsePKCS1PrivateKey(block.Bytes); err != nil {
return nil, fmt.Errorf("private_key invalid rsa: %w", err)
}
return block, nil
}
if block.Type == "PRIVATE KEY" {
key, err := x509.ParsePKCS8PrivateKey(block.Bytes)
if err != nil {
return nil, fmt.Errorf("private_key invalid pkcs8: %w", err)
}
rsaKey, ok := key.(*rsa.PrivateKey)
if !ok {
return nil, fmt.Errorf("private_key is not an RSA key")
}
der := x509.MarshalPKCS1PrivateKey(rsaKey)
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: der}, nil
}
// Attempt auto-detection: try PKCS#1 first, then PKCS#8.
if rsaKey, err := x509.ParsePKCS1PrivateKey(block.Bytes); err == nil {
der := x509.MarshalPKCS1PrivateKey(rsaKey)
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: der}, nil
}
if key, err := x509.ParsePKCS8PrivateKey(block.Bytes); err == nil {
if rsaKey, ok := key.(*rsa.PrivateKey); ok {
der := x509.MarshalPKCS1PrivateKey(rsaKey)
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: der}, nil
}
}
return nil, fmt.Errorf("private_key uses unsupported format")
}
func rebuildPEM(raw string) (string, error) {
kind := "PRIVATE KEY"
if strings.Contains(raw, "RSA PRIVATE KEY") {
kind = "RSA PRIVATE KEY"
}
header := "-----BEGIN " + kind + "-----"
footer := "-----END " + kind + "-----"
start := strings.Index(raw, header)
end := strings.Index(raw, footer)
if start < 0 || end <= start {
return "", fmt.Errorf("missing pem markers")
}
body := raw[start+len(header) : end]
payload := filterBase64(body)
if payload == "" {
return "", fmt.Errorf("private_key base64 payload empty")
}
der, err := base64.StdEncoding.DecodeString(payload)
if err != nil {
return "", fmt.Errorf("private_key base64 decode failed: %w", err)
}
block := &pem.Block{Type: kind, Bytes: der}
return string(pem.EncodeToMemory(block)), nil
}
func filterBase64(s string) string {
var b strings.Builder
for _, r := range s {
switch {
case r >= 'A' && r <= 'Z':
b.WriteRune(r)
case r >= 'a' && r <= 'z':
b.WriteRune(r)
case r >= '0' && r <= '9':
b.WriteRune(r)
case r == '+' || r == '/' || r == '=':
b.WriteRune(r)
default:
// skip
}
}
return b.String()
}
func stripANSIEscape(s string) string {
in := []rune(s)
var out []rune
for i := 0; i < len(in); i++ {
r := in[i]
if r != 0x1b {
out = append(out, r)
continue
}
if i+1 >= len(in) {
continue
}
next := in[i+1]
switch next {
case ']':
i += 2
for i < len(in) {
if in[i] == 0x07 {
break
}
if in[i] == 0x1b && i+1 < len(in) && in[i+1] == '\\' {
i++
break
}
i++
}
case '[':
i += 2
for i < len(in) {
if (in[i] >= 'A' && in[i] <= 'Z') || (in[i] >= 'a' && in[i] <= 'z') {
break
}
i++
}
default:
// skip single ESC
}
}
return string(out)
}

View File

@@ -0,0 +1,66 @@
// Package vertex provides token storage for Google Vertex AI Gemini via service account credentials.
// It serialises service account JSON into an auth file that is consumed by the runtime executor.
package vertex
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
log "github.com/sirupsen/logrus"
)
// VertexCredentialStorage stores the service account JSON for Vertex AI access.
// The content is persisted verbatim under the "service_account" key, together with
// helper fields for project, location and email to improve logging and discovery.
type VertexCredentialStorage struct {
// ServiceAccount holds the parsed service account JSON content.
ServiceAccount map[string]any `json:"service_account"`
// ProjectID is derived from the service account JSON (project_id).
ProjectID string `json:"project_id"`
// Email is the client_email from the service account JSON.
Email string `json:"email"`
// Location optionally sets a default region (e.g., us-central1) for Vertex endpoints.
Location string `json:"location,omitempty"`
// Type is the provider identifier stored alongside credentials. Always "vertex".
Type string `json:"type"`
}
// SaveTokenToFile writes the credential payload to the given file path in JSON format.
// It ensures the parent directory exists and logs the operation for transparency.
func (s *VertexCredentialStorage) SaveTokenToFile(authFilePath string) error {
misc.LogSavingCredentials(authFilePath)
if s == nil {
return fmt.Errorf("vertex credential: storage is nil")
}
if s.ServiceAccount == nil {
return fmt.Errorf("vertex credential: service account content is empty")
}
// Ensure we tag the file with the provider type.
s.Type = "vertex"
if err := os.MkdirAll(filepath.Dir(authFilePath), 0o700); err != nil {
return fmt.Errorf("vertex credential: create directory failed: %w", err)
}
f, err := os.Create(authFilePath)
if err != nil {
return fmt.Errorf("vertex credential: create file failed: %w", err)
}
defer func() {
if errClose := f.Close(); errClose != nil {
log.Errorf("vertex credential: failed to close file: %v", errClose)
}
}()
enc := json.NewEncoder(f)
enc.SetIndent("", " ")
if err = enc.Encode(s); err != nil {
return fmt.Errorf("vertex credential: encode failed: %w", err)
}
return nil
}

View File

@@ -0,0 +1,123 @@
// Package cmd contains CLI helpers. This file implements importing a Vertex AI
// service account JSON into the auth store as a dedicated "vertex" credential.
package cmd
import (
"context"
"encoding/json"
"fmt"
"os"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/vertex"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
sdkAuth "github.com/router-for-me/CLIProxyAPI/v6/sdk/auth"
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
log "github.com/sirupsen/logrus"
)
// DoVertexImport imports a Google Cloud service account key JSON and persists
// it as a "vertex" provider credential. The file content is embedded in the auth
// file to allow portable deployment across stores.
func DoVertexImport(cfg *config.Config, keyPath string) {
if cfg == nil {
cfg = &config.Config{}
}
if resolved, errResolve := util.ResolveAuthDir(cfg.AuthDir); errResolve == nil {
cfg.AuthDir = resolved
}
rawPath := strings.TrimSpace(keyPath)
if rawPath == "" {
log.Fatalf("vertex-import: missing service account key path")
return
}
data, errRead := os.ReadFile(rawPath)
if errRead != nil {
log.Fatalf("vertex-import: read file failed: %v", errRead)
return
}
var sa map[string]any
if errUnmarshal := json.Unmarshal(data, &sa); errUnmarshal != nil {
log.Fatalf("vertex-import: invalid service account json: %v", errUnmarshal)
return
}
// Validate and normalize private_key before saving
normalizedSA, errFix := vertex.NormalizeServiceAccountMap(sa)
if errFix != nil {
log.Fatalf("vertex-import: %v", errFix)
return
}
sa = normalizedSA
email, _ := sa["client_email"].(string)
projectID, _ := sa["project_id"].(string)
if strings.TrimSpace(projectID) == "" {
log.Fatalf("vertex-import: project_id missing in service account json")
return
}
if strings.TrimSpace(email) == "" {
// Keep empty email but warn
log.Warn("vertex-import: client_email missing in service account json")
}
// Default location if not provided by user. Can be edited in the saved file later.
location := "us-central1"
fileName := fmt.Sprintf("vertex-%s.json", sanitizeFilePart(projectID))
// Build auth record
storage := &vertex.VertexCredentialStorage{
ServiceAccount: sa,
ProjectID: projectID,
Email: email,
Location: location,
}
metadata := map[string]any{
"service_account": sa,
"project_id": projectID,
"email": email,
"location": location,
"type": "vertex",
"label": labelForVertex(projectID, email),
}
record := &coreauth.Auth{
ID: fileName,
Provider: "vertex",
FileName: fileName,
Storage: storage,
Metadata: metadata,
}
store := sdkAuth.GetTokenStore()
if setter, ok := store.(interface{ SetBaseDir(string) }); ok {
setter.SetBaseDir(cfg.AuthDir)
}
path, errSave := store.Save(context.Background(), record)
if errSave != nil {
log.Fatalf("vertex-import: save credential failed: %v", errSave)
return
}
fmt.Printf("Vertex credentials imported: %s\n", path)
}
func sanitizeFilePart(s string) string {
out := strings.TrimSpace(s)
replacers := []string{"/", "_", "\\", "_", ":", "_", " ", "-"}
for i := 0; i < len(replacers); i += 2 {
out = strings.ReplaceAll(out, replacers[i], replacers[i+1])
}
return out
}
func labelForVertex(projectID, email string) string {
p := strings.TrimSpace(projectID)
e := strings.TrimSpace(email)
if p != "" && e != "" {
return fmt.Sprintf("%s (%s)", p, e)
}
if p != "" {
return p
}
if e != "" {
return e
}
return "vertex"
}

View File

@@ -100,6 +100,9 @@ type ClaudeKey struct {
// Models defines upstream model names and aliases for request routing.
Models []ClaudeModel `yaml:"models" json:"models"`
// Headers optionally adds extra HTTP headers for requests sent with this key.
Headers map[string]string `yaml:"headers,omitempty" json:"headers,omitempty"`
}
// ClaudeModel describes a mapping between an alias and the actual upstream model name.
@@ -123,6 +126,9 @@ type CodexKey struct {
// ProxyURL overrides the global proxy setting for this API key if provided.
ProxyURL string `yaml:"proxy-url" json:"proxy-url"`
// Headers optionally adds extra HTTP headers for requests sent with this key.
Headers map[string]string `yaml:"headers,omitempty" json:"headers,omitempty"`
}
// GeminiKey represents the configuration for a Gemini API key,
@@ -159,6 +165,9 @@ type OpenAICompatibility struct {
// Models defines the model configurations including aliases for routing.
Models []OpenAICompatibilityModel `yaml:"models" json:"models"`
// Headers optionally adds extra HTTP headers for requests sent to this provider.
Headers map[string]string `yaml:"headers,omitempty" json:"headers,omitempty"`
}
// OpenAICompatibilityAPIKey represents an API key configuration with optional proxy setting.
@@ -246,23 +255,26 @@ func LoadConfigOptional(configFile string, optional bool) (*Config, error) {
// Sync request authentication providers with inline API keys for backwards compatibility.
syncInlineAccessProvider(&cfg)
// Normalize Gemini API key configuration and migrate legacy entries.
cfg.SyncGeminiKeys()
// Sanitize OpenAI compatibility providers: drop entries without base-url
sanitizeOpenAICompatibility(&cfg)
// Sanitize Gemini API key configuration and migrate legacy entries.
cfg.SanitizeGeminiKeys()
// Sanitize Codex keys: drop entries without base-url
sanitizeCodexKeys(&cfg)
cfg.SanitizeCodexKeys()
// Sanitize Claude key headers
cfg.SanitizeClaudeKeys()
// Sanitize OpenAI compatibility providers: drop entries without base-url
cfg.SanitizeOpenAICompatibility()
// Return the populated configuration struct.
return &cfg, nil
}
// sanitizeOpenAICompatibility removes OpenAI-compatibility provider entries that are
// SanitizeOpenAICompatibility removes OpenAI-compatibility provider entries that are
// not actionable, specifically those missing a BaseURL. It trims whitespace before
// evaluation and preserves the relative order of remaining entries.
func sanitizeOpenAICompatibility(cfg *Config) {
func (cfg *Config) SanitizeOpenAICompatibility() {
if cfg == nil || len(cfg.OpenAICompatibility) == 0 {
return
}
@@ -271,6 +283,7 @@ func sanitizeOpenAICompatibility(cfg *Config) {
e := cfg.OpenAICompatibility[i]
e.Name = strings.TrimSpace(e.Name)
e.BaseURL = strings.TrimSpace(e.BaseURL)
e.Headers = NormalizeHeaders(e.Headers)
if e.BaseURL == "" {
// Skip providers with no base-url; treated as removed
continue
@@ -280,9 +293,9 @@ func sanitizeOpenAICompatibility(cfg *Config) {
cfg.OpenAICompatibility = out
}
// sanitizeCodexKeys removes Codex API key entries missing a BaseURL.
// SanitizeCodexKeys removes Codex API key entries missing a BaseURL.
// It trims whitespace and preserves order for remaining entries.
func sanitizeCodexKeys(cfg *Config) {
func (cfg *Config) SanitizeCodexKeys() {
if cfg == nil || len(cfg.CodexKey) == 0 {
return
}
@@ -290,6 +303,7 @@ func sanitizeCodexKeys(cfg *Config) {
for i := range cfg.CodexKey {
e := cfg.CodexKey[i]
e.BaseURL = strings.TrimSpace(e.BaseURL)
e.Headers = NormalizeHeaders(e.Headers)
if e.BaseURL == "" {
continue
}
@@ -298,7 +312,19 @@ func sanitizeCodexKeys(cfg *Config) {
cfg.CodexKey = out
}
func (cfg *Config) SyncGeminiKeys() {
// SanitizeClaudeKeys normalizes headers for Claude credentials.
func (cfg *Config) SanitizeClaudeKeys() {
if cfg == nil || len(cfg.ClaudeKey) == 0 {
return
}
for i := range cfg.ClaudeKey {
entry := &cfg.ClaudeKey[i]
entry.Headers = NormalizeHeaders(entry.Headers)
}
}
// SanitizeGeminiKeys deduplicates and normalizes Gemini credentials.
func (cfg *Config) SanitizeGeminiKeys() {
if cfg == nil {
return
}
@@ -313,7 +339,7 @@ func (cfg *Config) SyncGeminiKeys() {
}
entry.BaseURL = strings.TrimSpace(entry.BaseURL)
entry.ProxyURL = strings.TrimSpace(entry.ProxyURL)
entry.Headers = normalizeGeminiHeaders(entry.Headers)
entry.Headers = NormalizeHeaders(entry.Headers)
if _, exists := seen[entry.APIKey]; exists {
continue
}
@@ -356,7 +382,8 @@ func looksLikeBcrypt(s string) bool {
return len(s) > 4 && (s[:4] == "$2a$" || s[:4] == "$2b$" || s[:4] == "$2y$")
}
func normalizeGeminiHeaders(headers map[string]string) map[string]string {
// NormalizeHeaders trims header keys and values and removes empty pairs.
func NormalizeHeaders(headers map[string]string) map[string]string {
if len(headers) == 0 {
return nil
}

View File

@@ -124,26 +124,53 @@ func GetGeminiModels() []*ModelInfo { return GeminiModels() }
// GetGeminiCLIModels returns the standard Gemini model definitions
func GetGeminiCLIModels() []*ModelInfo {
base := GeminiModels()
return append(base,
[]*ModelInfo{
{
ID: "gemini-3-pro-preview-11-2025",
Object: "model",
Created: time.Now().Unix(),
OwnedBy: "google",
Type: "gemini",
Name: "models/gemini-3-pro-preview-11-2025",
Version: "3",
DisplayName: "Gemini 3 Pro Preview 11-2025",
Description: "Latest preview of Gemini Pro",
InputTokenLimit: 1048576,
OutputTokenLimit: 65536,
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
},
}...,
)
return []*ModelInfo{
{
ID: "gemini-2.5-flash",
Object: "model",
Created: time.Now().Unix(),
OwnedBy: "google",
Type: "gemini",
Name: "models/gemini-2.5-flash",
Version: "001",
DisplayName: "Gemini 2.5 Flash",
Description: "Stable version of Gemini 2.5 Flash, our mid-size multimodal model that supports up to 1 million tokens, released in June of 2025.",
InputTokenLimit: 1048576,
OutputTokenLimit: 65536,
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
Thinking: &ThinkingSupport{Min: 0, Max: 24576, ZeroAllowed: true, DynamicAllowed: true},
},
{
ID: "gemini-2.5-pro",
Object: "model",
Created: time.Now().Unix(),
OwnedBy: "google",
Type: "gemini",
Name: "models/gemini-2.5-pro",
Version: "2.5",
DisplayName: "Gemini 2.5 Pro",
Description: "Stable release (June 17th, 2025) of Gemini 2.5 Pro",
InputTokenLimit: 1048576,
OutputTokenLimit: 65536,
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
},
{
ID: "gemini-3-pro-preview-11-2025",
Object: "model",
Created: time.Now().Unix(),
OwnedBy: "google",
Type: "gemini",
Name: "models/gemini-3-pro-preview-11-2025",
Version: "3",
DisplayName: "Gemini 3 Pro Preview 11-2025",
Description: "Latest preview of Gemini Pro",
InputTokenLimit: 1048576,
OutputTokenLimit: 65536,
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true},
},
}
}
// GetAIStudioModels returns the Gemini model definitions for AI Studio integrations

View File

@@ -17,6 +17,7 @@ import (
claudeauth "github.com/router-for-me/CLIProxyAPI/v6/internal/auth/claude"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
sdktranslator "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
@@ -67,7 +68,7 @@ func (e *ClaudeExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
if err != nil {
return resp, err
}
applyClaudeHeaders(httpReq, apiKey, false)
applyClaudeHeaders(httpReq, auth, apiKey, false)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
@@ -159,7 +160,7 @@ func (e *ClaudeExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
if err != nil {
return nil, err
}
applyClaudeHeaders(httpReq, apiKey, true)
applyClaudeHeaders(httpReq, auth, apiKey, true)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
@@ -290,7 +291,7 @@ func (e *ClaudeExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Aut
if err != nil {
return cliproxyexecutor.Response{}, err
}
applyClaudeHeaders(httpReq, apiKey, false)
applyClaudeHeaders(httpReq, auth, apiKey, false)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
@@ -529,7 +530,7 @@ func decodeResponseBody(body io.ReadCloser, contentEncoding string) (io.ReadClos
return body, nil
}
func applyClaudeHeaders(r *http.Request, apiKey string, stream bool) {
func applyClaudeHeaders(r *http.Request, auth *cliproxyauth.Auth, apiKey string, stream bool) {
r.Header.Set("Authorization", "Bearer "+apiKey)
r.Header.Set("Content-Type", "application/json")
@@ -564,9 +565,14 @@ func applyClaudeHeaders(r *http.Request, apiKey string, stream bool) {
r.Header.Set("Accept-Encoding", "gzip, deflate, br, zstd")
if stream {
r.Header.Set("Accept", "text/event-stream")
return
} else {
r.Header.Set("Accept", "application/json")
}
r.Header.Set("Accept", "application/json")
var attrs map[string]string
if auth != nil {
attrs = auth.Attributes
}
util.ApplyCustomHeadersFromAttrs(r, attrs)
}
func claudeCreds(a *cliproxyauth.Auth) (apiKey, baseURL string) {

View File

@@ -585,6 +585,11 @@ func applyCodexHeaders(r *http.Request, auth *cliproxyauth.Auth, token string) {
}
}
}
var attrs map[string]string
if auth != nil {
attrs = auth.Attributes
}
util.ApplyCustomHeadersFromAttrs(r, attrs)
}
func codexCreds(a *cliproxyauth.Auth) (apiKey, baseURL string) {

View File

@@ -495,44 +495,11 @@ func resolveGeminiBaseURL(auth *cliproxyauth.Auth) string {
}
func applyGeminiHeaders(req *http.Request, auth *cliproxyauth.Auth) {
if req == nil {
return
var attrs map[string]string
if auth != nil {
attrs = auth.Attributes
}
headers := geminiCustomHeaders(auth)
if len(headers) == 0 {
return
}
for k, v := range headers {
if k == "" || v == "" {
continue
}
req.Header.Set(k, v)
}
}
func geminiCustomHeaders(auth *cliproxyauth.Auth) map[string]string {
if auth == nil || auth.Attributes == nil {
return nil
}
headers := make(map[string]string, len(auth.Attributes))
for k, v := range auth.Attributes {
if !strings.HasPrefix(k, "header:") {
continue
}
name := strings.TrimSpace(strings.TrimPrefix(k, "header:"))
if name == "" {
continue
}
val := strings.TrimSpace(v)
if val == "" {
continue
}
headers[name] = val
}
if len(headers) == 0 {
return nil
}
return headers
util.ApplyCustomHeadersFromAttrs(req, attrs)
}
func fixGeminiImageAspectRatio(modelName string, rawJSON []byte) []byte {

View File

@@ -0,0 +1,421 @@
// Package executor contains provider executors. This file implements the Vertex AI
// Gemini executor that talks to Google Vertex AI endpoints using service account
// credentials imported by the CLI.
package executor
import (
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
vertexauth "github.com/router-for-me/CLIProxyAPI/v6/internal/auth/vertex"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
sdktranslator "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
log "github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
"golang.org/x/oauth2/google"
)
const (
// vertexAPIVersion aligns with current public Vertex Generative AI API.
vertexAPIVersion = "v1"
)
// GeminiVertexExecutor sends requests to Vertex AI Gemini endpoints using service account credentials.
type GeminiVertexExecutor struct {
cfg *config.Config
}
// NewGeminiVertexExecutor constructs the Vertex executor.
func NewGeminiVertexExecutor(cfg *config.Config) *GeminiVertexExecutor {
return &GeminiVertexExecutor{cfg: cfg}
}
// Identifier returns provider key for manager routing.
func (e *GeminiVertexExecutor) Identifier() string { return "vertex" }
// PrepareRequest is a no-op for Vertex.
func (e *GeminiVertexExecutor) PrepareRequest(_ *http.Request, _ *cliproxyauth.Auth) error {
return nil
}
// Execute handles non-streaming requests.
func (e *GeminiVertexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (resp cliproxyexecutor.Response, err error) {
projectID, location, saJSON, errCreds := vertexCreds(auth)
if errCreds != nil {
return resp, errCreds
}
reporter := newUsageReporter(ctx, e.Identifier(), req.Model, auth)
defer reporter.trackFailure(ctx, &err)
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
body := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
if budgetOverride, includeOverride, ok := util.GeminiThinkingFromMetadata(req.Metadata); ok && util.ModelSupportsThinking(req.Model) {
if budgetOverride != nil {
norm := util.NormalizeThinkingBudget(req.Model, *budgetOverride)
budgetOverride = &norm
}
body = util.ApplyGeminiThinkingConfig(body, budgetOverride, includeOverride)
}
body = util.StripThinkingConfigIfUnsupported(req.Model, body)
body = fixGeminiImageAspectRatio(req.Model, body)
action := "generateContent"
if req.Metadata != nil {
if a, _ := req.Metadata["action"].(string); a == "countTokens" {
action = "countTokens"
}
}
baseURL := vertexBaseURL(location)
url := fmt.Sprintf("%s/%s/projects/%s/locations/%s/publishers/google/models/%s:%s", baseURL, vertexAPIVersion, projectID, location, req.Model, action)
if opts.Alt != "" && action != "countTokens" {
url = url + fmt.Sprintf("?$alt=%s", opts.Alt)
}
body, _ = sjson.DeleteBytes(body, "session_id")
httpReq, errNewReq := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if errNewReq != nil {
return resp, errNewReq
}
httpReq.Header.Set("Content-Type", "application/json")
if token, errTok := vertexAccessToken(ctx, saJSON); errTok == nil && token != "" {
httpReq.Header.Set("Authorization", "Bearer "+token)
} else if errTok != nil {
log.Errorf("vertex executor: access token error: %v", errTok)
return resp, statusErr{code: 500, msg: "internal server error"}
}
applyGeminiHeaders(httpReq, auth)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
authLabel = auth.Label
authType, authValue = auth.AccountInfo()
}
recordAPIRequest(ctx, e.cfg, upstreamRequestLog{
URL: url,
Method: http.MethodPost,
Headers: httpReq.Header.Clone(),
Body: body,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
AuthType: authType,
AuthValue: authValue,
})
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
httpResp, errDo := httpClient.Do(httpReq)
if errDo != nil {
recordAPIResponseError(ctx, e.cfg, errDo)
return resp, errDo
}
defer func() {
if errClose := httpResp.Body.Close(); errClose != nil {
log.Errorf("vertex executor: close response body error: %v", errClose)
}
}()
recordAPIResponseMetadata(ctx, e.cfg, httpResp.StatusCode, httpResp.Header.Clone())
if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 {
b, _ := io.ReadAll(httpResp.Body)
appendAPIResponseChunk(ctx, e.cfg, b)
log.Debugf("request error, error status: %d, error body: %s", httpResp.StatusCode, summarizeErrorBody(httpResp.Header.Get("Content-Type"), b))
err = statusErr{code: httpResp.StatusCode, msg: string(b)}
return resp, err
}
data, errRead := io.ReadAll(httpResp.Body)
if errRead != nil {
recordAPIResponseError(ctx, e.cfg, errRead)
return resp, errRead
}
appendAPIResponseChunk(ctx, e.cfg, data)
reporter.publish(ctx, parseGeminiUsage(data))
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
// ExecuteStream handles SSE streaming for Vertex.
func (e *GeminiVertexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (stream <-chan cliproxyexecutor.StreamChunk, err error) {
projectID, location, saJSON, errCreds := vertexCreds(auth)
if errCreds != nil {
return nil, errCreds
}
reporter := newUsageReporter(ctx, e.Identifier(), req.Model, auth)
defer reporter.trackFailure(ctx, &err)
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
body := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
if budgetOverride, includeOverride, ok := util.GeminiThinkingFromMetadata(req.Metadata); ok && util.ModelSupportsThinking(req.Model) {
if budgetOverride != nil {
norm := util.NormalizeThinkingBudget(req.Model, *budgetOverride)
budgetOverride = &norm
}
body = util.ApplyGeminiThinkingConfig(body, budgetOverride, includeOverride)
}
body = util.StripThinkingConfigIfUnsupported(req.Model, body)
body = fixGeminiImageAspectRatio(req.Model, body)
baseURL := vertexBaseURL(location)
url := fmt.Sprintf("%s/%s/projects/%s/locations/%s/publishers/google/models/%s:%s", baseURL, vertexAPIVersion, projectID, location, req.Model, "streamGenerateContent")
if opts.Alt == "" {
url = url + "?alt=sse"
} else {
url = url + fmt.Sprintf("?$alt=%s", opts.Alt)
}
body, _ = sjson.DeleteBytes(body, "session_id")
httpReq, errNewReq := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if errNewReq != nil {
return nil, errNewReq
}
httpReq.Header.Set("Content-Type", "application/json")
if token, errTok := vertexAccessToken(ctx, saJSON); errTok == nil && token != "" {
httpReq.Header.Set("Authorization", "Bearer "+token)
} else if errTok != nil {
log.Errorf("vertex executor: access token error: %v", errTok)
return nil, statusErr{code: 500, msg: "internal server error"}
}
applyGeminiHeaders(httpReq, auth)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
authLabel = auth.Label
authType, authValue = auth.AccountInfo()
}
recordAPIRequest(ctx, e.cfg, upstreamRequestLog{
URL: url,
Method: http.MethodPost,
Headers: httpReq.Header.Clone(),
Body: body,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
AuthType: authType,
AuthValue: authValue,
})
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
httpResp, errDo := httpClient.Do(httpReq)
if errDo != nil {
recordAPIResponseError(ctx, e.cfg, errDo)
return nil, errDo
}
recordAPIResponseMetadata(ctx, e.cfg, httpResp.StatusCode, httpResp.Header.Clone())
if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 {
b, _ := io.ReadAll(httpResp.Body)
appendAPIResponseChunk(ctx, e.cfg, b)
log.Debugf("request error, error status: %d, error body: %s", httpResp.StatusCode, summarizeErrorBody(httpResp.Header.Get("Content-Type"), b))
if errClose := httpResp.Body.Close(); errClose != nil {
log.Errorf("vertex executor: close response body error: %v", errClose)
}
return nil, statusErr{code: httpResp.StatusCode, msg: string(b)}
}
out := make(chan cliproxyexecutor.StreamChunk)
stream = out
go func() {
defer close(out)
defer func() {
if errClose := httpResp.Body.Close(); errClose != nil {
log.Errorf("vertex executor: close response body error: %v", errClose)
}
}()
scanner := bufio.NewScanner(httpResp.Body)
buf := make([]byte, 20_971_520)
scanner.Buffer(buf, 20_971_520)
var param any
for scanner.Scan() {
line := scanner.Bytes()
appendAPIResponseChunk(ctx, e.cfg, line)
if detail, ok := parseGeminiStreamUsage(line); ok {
reporter.publish(ctx, detail)
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone(line), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, []byte("[DONE]"), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
if errScan := scanner.Err(); errScan != nil {
recordAPIResponseError(ctx, e.cfg, errScan)
reporter.publishFailure(ctx)
out <- cliproxyexecutor.StreamChunk{Err: errScan}
}
}()
return stream, nil
}
// CountTokens calls Vertex countTokens endpoint.
func (e *GeminiVertexExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
projectID, location, saJSON, errCreds := vertexCreds(auth)
if errCreds != nil {
return cliproxyexecutor.Response{}, errCreds
}
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
translatedReq := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
if budgetOverride, includeOverride, ok := util.GeminiThinkingFromMetadata(req.Metadata); ok && util.ModelSupportsThinking(req.Model) {
if budgetOverride != nil {
norm := util.NormalizeThinkingBudget(req.Model, *budgetOverride)
budgetOverride = &norm
}
translatedReq = util.ApplyGeminiThinkingConfig(translatedReq, budgetOverride, includeOverride)
}
translatedReq = util.StripThinkingConfigIfUnsupported(req.Model, translatedReq)
translatedReq = fixGeminiImageAspectRatio(req.Model, translatedReq)
respCtx := context.WithValue(ctx, "alt", opts.Alt)
translatedReq, _ = sjson.DeleteBytes(translatedReq, "tools")
translatedReq, _ = sjson.DeleteBytes(translatedReq, "generationConfig")
translatedReq, _ = sjson.DeleteBytes(translatedReq, "safetySettings")
baseURL := vertexBaseURL(location)
url := fmt.Sprintf("%s/%s/projects/%s/locations/%s/publishers/google/models/%s:%s", baseURL, vertexAPIVersion, projectID, location, req.Model, "countTokens")
httpReq, errNewReq := http.NewRequestWithContext(respCtx, http.MethodPost, url, bytes.NewReader(translatedReq))
if errNewReq != nil {
return cliproxyexecutor.Response{}, errNewReq
}
httpReq.Header.Set("Content-Type", "application/json")
if token, errTok := vertexAccessToken(ctx, saJSON); errTok == nil && token != "" {
httpReq.Header.Set("Authorization", "Bearer "+token)
} else if errTok != nil {
log.Errorf("vertex executor: access token error: %v", errTok)
return cliproxyexecutor.Response{}, statusErr{code: 500, msg: "internal server error"}
}
applyGeminiHeaders(httpReq, auth)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
authLabel = auth.Label
authType, authValue = auth.AccountInfo()
}
recordAPIRequest(ctx, e.cfg, upstreamRequestLog{
URL: url,
Method: http.MethodPost,
Headers: httpReq.Header.Clone(),
Body: translatedReq,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
AuthType: authType,
AuthValue: authValue,
})
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
httpResp, errDo := httpClient.Do(httpReq)
if errDo != nil {
recordAPIResponseError(ctx, e.cfg, errDo)
return cliproxyexecutor.Response{}, errDo
}
defer func() {
if errClose := httpResp.Body.Close(); errClose != nil {
log.Errorf("vertex executor: close response body error: %v", errClose)
}
}()
recordAPIResponseMetadata(ctx, e.cfg, httpResp.StatusCode, httpResp.Header.Clone())
if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 {
b, _ := io.ReadAll(httpResp.Body)
appendAPIResponseChunk(ctx, e.cfg, b)
log.Debugf("request error, error status: %d, error body: %s", httpResp.StatusCode, summarizeErrorBody(httpResp.Header.Get("Content-Type"), b))
return cliproxyexecutor.Response{}, statusErr{code: httpResp.StatusCode, msg: string(b)}
}
data, errRead := io.ReadAll(httpResp.Body)
if errRead != nil {
recordAPIResponseError(ctx, e.cfg, errRead)
return cliproxyexecutor.Response{}, errRead
}
appendAPIResponseChunk(ctx, e.cfg, data)
if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 {
log.Debugf("request error, error status: %d, error body: %s", httpResp.StatusCode, summarizeErrorBody(httpResp.Header.Get("Content-Type"), data))
return cliproxyexecutor.Response{}, statusErr{code: httpResp.StatusCode, msg: string(data)}
}
count := gjson.GetBytes(data, "totalTokens").Int()
out := sdktranslator.TranslateTokenCount(ctx, to, from, count, data)
return cliproxyexecutor.Response{Payload: []byte(out)}, nil
}
// Refresh is a no-op for service account based credentials.
func (e *GeminiVertexExecutor) Refresh(_ context.Context, auth *cliproxyauth.Auth) (*cliproxyauth.Auth, error) {
return auth, nil
}
// vertexCreds extracts project, location and raw service account JSON from auth metadata.
func vertexCreds(a *cliproxyauth.Auth) (projectID, location string, serviceAccountJSON []byte, err error) {
if a == nil || a.Metadata == nil {
return "", "", nil, fmt.Errorf("vertex executor: missing auth metadata")
}
if v, ok := a.Metadata["project_id"].(string); ok {
projectID = strings.TrimSpace(v)
}
if projectID == "" {
// Some service accounts may use "project"; still prefer standard field
if v, ok := a.Metadata["project"].(string); ok {
projectID = strings.TrimSpace(v)
}
}
if projectID == "" {
return "", "", nil, fmt.Errorf("vertex executor: missing project_id in credentials")
}
if v, ok := a.Metadata["location"].(string); ok && strings.TrimSpace(v) != "" {
location = strings.TrimSpace(v)
} else {
location = "us-central1"
}
var sa map[string]any
if raw, ok := a.Metadata["service_account"].(map[string]any); ok {
sa = raw
}
if sa == nil {
return "", "", nil, fmt.Errorf("vertex executor: missing service_account in credentials")
}
normalized, errNorm := vertexauth.NormalizeServiceAccountMap(sa)
if errNorm != nil {
return "", "", nil, fmt.Errorf("vertex executor: %w", errNorm)
}
saJSON, errMarshal := json.Marshal(normalized)
if errMarshal != nil {
return "", "", nil, fmt.Errorf("vertex executor: marshal service_account failed: %w", errMarshal)
}
return projectID, location, saJSON, nil
}
func vertexBaseURL(location string) string {
loc := strings.TrimSpace(location)
if loc == "" {
loc = "us-central1"
}
return fmt.Sprintf("https://%s-aiplatform.googleapis.com", loc)
}
func vertexAccessToken(ctx context.Context, saJSON []byte) (string, error) {
// Use cloud-platform scope for Vertex AI.
creds, errCreds := google.CredentialsFromJSON(ctx, saJSON, "https://www.googleapis.com/auth/cloud-platform")
if errCreds != nil {
return "", fmt.Errorf("vertex executor: parse service account json failed: %w", errCreds)
}
tok, errTok := creds.TokenSource.Token()
if errTok != nil {
return "", fmt.Errorf("vertex executor: get access token failed: %w", errTok)
}
return tok.AccessToken, nil
}

View File

@@ -10,6 +10,7 @@ import (
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
sdktranslator "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
@@ -66,6 +67,11 @@ func (e *OpenAICompatExecutor) Execute(ctx context.Context, auth *cliproxyauth.A
httpReq.Header.Set("Authorization", "Bearer "+apiKey)
}
httpReq.Header.Set("User-Agent", "cli-proxy-openai-compat")
var attrs map[string]string
if auth != nil {
attrs = auth.Attributes
}
util.ApplyCustomHeadersFromAttrs(httpReq, attrs)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
@@ -110,6 +116,8 @@ func (e *OpenAICompatExecutor) Execute(ctx context.Context, auth *cliproxyauth.A
}
appendAPIResponseChunk(ctx, e.cfg, body)
reporter.publish(ctx, parseOpenAIUsage(body))
// Ensure we at least record the request even if upstream doesn't return usage
reporter.ensurePublished(ctx)
// Translate response back to source format when needed
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, body, &param)
@@ -143,6 +151,11 @@ func (e *OpenAICompatExecutor) ExecuteStream(ctx context.Context, auth *cliproxy
httpReq.Header.Set("Authorization", "Bearer "+apiKey)
}
httpReq.Header.Set("User-Agent", "cli-proxy-openai-compat")
var attrs map[string]string
if auth != nil {
attrs = auth.Attributes
}
util.ApplyCustomHeadersFromAttrs(httpReq, attrs)
httpReq.Header.Set("Accept", "text/event-stream")
httpReq.Header.Set("Cache-Control", "no-cache")
var authID, authLabel, authType, authValue string
@@ -214,6 +227,8 @@ func (e *OpenAICompatExecutor) ExecuteStream(ctx context.Context, auth *cliproxy
reporter.publishFailure(ctx)
out <- cliproxyexecutor.StreamChunk{Err: errScan}
}
// Ensure we record the request if no usage chunk was ever seen
reporter.ensurePublished(ctx)
}()
return stream, nil
}

View File

@@ -84,6 +84,28 @@ func (r *usageReporter) publishWithOutcome(ctx context.Context, detail usage.Det
})
}
// ensurePublished guarantees that a usage record is emitted exactly once.
// It is safe to call multiple times; only the first call wins due to once.Do.
// This is used to ensure request counting even when upstream responses do not
// include any usage fields (tokens), especially for streaming paths.
func (r *usageReporter) ensurePublished(ctx context.Context) {
if r == nil {
return
}
r.once.Do(func() {
usage.PublishRecord(ctx, usage.Record{
Provider: r.provider,
Model: r.model,
Source: r.source,
APIKey: r.apiKey,
AuthID: r.authID,
RequestedAt: r.requestedAt,
Failed: false,
Detail: usage.Detail{},
})
})
}
func apiKeyFromContext(ctx context.Context) string {
if ctx == nil {
return ""

View File

@@ -133,27 +133,16 @@ func ConvertClaudeRequestToOpenAI(modelName string, inputRawJSON []byte, stream
return true
})
// Create main message if there's text content or tool calls
if len(contentItems) > 0 || len(toolCalls) > 0 {
// Emit text/image content as one message
if len(contentItems) > 0 {
msgJSON := `{"role":"","content":""}`
msgJSON, _ = sjson.Set(msgJSON, "role", role)
// Set content
if len(contentItems) > 0 {
contentArrayJSON := "[]"
for _, contentItem := range contentItems {
contentArrayJSON, _ = sjson.SetRaw(contentArrayJSON, "-1", contentItem)
}
msgJSON, _ = sjson.SetRaw(msgJSON, "content", contentArrayJSON)
} else {
msgJSON, _ = sjson.Set(msgJSON, "content", "")
}
// Set tool calls for assistant messages
if role == "assistant" && len(toolCalls) > 0 {
toolCallsJSON, _ := json.Marshal(toolCalls)
msgJSON, _ = sjson.SetRaw(msgJSON, "tool_calls", string(toolCallsJSON))
contentArrayJSON := "[]"
for _, contentItem := range contentItems {
contentArrayJSON, _ = sjson.SetRaw(contentArrayJSON, "-1", contentItem)
}
msgJSON, _ = sjson.SetRaw(msgJSON, "content", contentArrayJSON)
contentValue := gjson.Get(msgJSON, "content")
hasContent := false
@@ -168,11 +157,19 @@ func ConvertClaudeRequestToOpenAI(modelName string, inputRawJSON []byte, stream
hasContent = contentValue.Raw != "" && contentValue.Raw != "null"
}
if hasContent || len(toolCalls) != 0 {
if hasContent {
messagesJSON, _ = sjson.Set(messagesJSON, "-1", gjson.Parse(msgJSON).Value())
}
}
// Emit tool calls in a separate assistant message
if role == "assistant" && len(toolCalls) > 0 {
toolCallMsgJSON := `{"role":"assistant","tool_calls":[]}`
toolCallsJSON, _ := json.Marshal(toolCalls)
toolCallMsgJSON, _ = sjson.SetRaw(toolCallMsgJSON, "tool_calls", string(toolCallsJSON))
messagesJSON, _ = sjson.Set(messagesJSON, "-1", gjson.Parse(toolCallMsgJSON).Value())
}
} else if contentResult.Exists() && contentResult.Type == gjson.String {
// Simple string content
msgJSON := `{"role":"","content":""}`

View File

@@ -85,6 +85,58 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
var openAIMessages []interface{}
var toolCallIDs []string // Track tool call IDs for matching with tool results
// System instruction -> OpenAI system message
// Gemini may provide `systemInstruction` or `system_instruction`; support both keys.
systemInstruction := root.Get("systemInstruction")
if !systemInstruction.Exists() {
systemInstruction = root.Get("system_instruction")
}
if systemInstruction.Exists() {
parts := systemInstruction.Get("parts")
msg := map[string]interface{}{
"role": "system",
"content": []interface{}{},
}
var aggregatedParts []interface{}
if parts.Exists() && parts.IsArray() {
parts.ForEach(func(_, part gjson.Result) bool {
// Handle text parts
if text := part.Get("text"); text.Exists() {
formattedText := text.String()
aggregatedParts = append(aggregatedParts, map[string]interface{}{
"type": "text",
"text": formattedText,
})
}
// Handle inline data (e.g., images)
if inlineData := part.Get("inlineData"); inlineData.Exists() {
mimeType := inlineData.Get("mimeType").String()
if mimeType == "" {
mimeType = "application/octet-stream"
}
data := inlineData.Get("data").String()
imageURL := fmt.Sprintf("data:%s;base64,%s", mimeType, data)
aggregatedParts = append(aggregatedParts, map[string]interface{}{
"type": "image_url",
"image_url": map[string]interface{}{
"url": imageURL,
},
})
}
return true
})
}
if len(aggregatedParts) > 0 {
msg["content"] = aggregatedParts
openAIMessages = append(openAIMessages, msg)
}
}
if contents := root.Get("contents"); contents.Exists() && contents.IsArray() {
contents.ForEach(func(_, content gjson.Result) bool {
role := content.Get("role").String()

View File

@@ -0,0 +1,52 @@
package util
import (
"net/http"
"strings"
)
// ApplyCustomHeadersFromAttrs applies user-defined headers stored in the provided attributes map.
// Custom headers override built-in defaults when conflicts occur.
func ApplyCustomHeadersFromAttrs(r *http.Request, attrs map[string]string) {
if r == nil {
return
}
applyCustomHeaders(r, extractCustomHeaders(attrs))
}
func extractCustomHeaders(attrs map[string]string) map[string]string {
if len(attrs) == 0 {
return nil
}
headers := make(map[string]string)
for k, v := range attrs {
if !strings.HasPrefix(k, "header:") {
continue
}
name := strings.TrimSpace(strings.TrimPrefix(k, "header:"))
if name == "" {
continue
}
val := strings.TrimSpace(v)
if val == "" {
continue
}
headers[name] = val
}
if len(headers) == 0 {
return nil
}
return headers
}
func applyCustomHeaders(r *http.Request, headers map[string]string) {
if r == nil || len(headers) == 0 {
return
}
for k, v := range headers {
if k == "" || v == "" {
continue
}
r.Header.Set(k, v)
}
}

View File

@@ -762,16 +762,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if base != "" {
attrs["base_url"] = base
}
if len(entry.Headers) > 0 {
for hk, hv := range entry.Headers {
key := strings.TrimSpace(hk)
val := strings.TrimSpace(hv)
if key == "" || val == "" {
continue
}
attrs["header:"+key] = val
}
}
addConfigHeadersToAttrs(entry.Headers, attrs)
a := &coreauth.Auth{
ID: id,
Provider: "gemini",
@@ -803,6 +794,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if hash := computeClaudeModelsHash(ck.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(ck.Headers, attrs)
proxyURL := strings.TrimSpace(ck.ProxyURL)
a := &coreauth.Auth{
ID: id,
@@ -831,6 +823,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if ck.BaseURL != "" {
attrs["base_url"] = ck.BaseURL
}
addConfigHeadersToAttrs(ck.Headers, attrs)
proxyURL := strings.TrimSpace(ck.ProxyURL)
a := &coreauth.Auth{
ID: id,
@@ -873,6 +866,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if hash := computeOpenAICompatModelsHash(compat.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(compat.Headers, attrs)
a := &coreauth.Auth{
ID: id,
Provider: providerName,
@@ -905,6 +899,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if hash := computeOpenAICompatModelsHash(compat.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(compat.Headers, attrs)
a := &coreauth.Auth{
ID: id,
Provider: providerName,
@@ -930,6 +925,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
if hash := computeOpenAICompatModelsHash(compat.Models); hash != "" {
attrs["models_hash"] = hash
}
addConfigHeadersToAttrs(compat.Headers, attrs)
a := &coreauth.Auth{
ID: id,
Provider: providerName,
@@ -1131,13 +1127,16 @@ func describeOpenAICompatibilityUpdate(oldEntry, newEntry config.OpenAICompatibi
newKeyCount := countAPIKeys(newEntry)
oldModelCount := countOpenAIModels(oldEntry.Models)
newModelCount := countOpenAIModels(newEntry.Models)
details := make([]string, 0, 2)
details := make([]string, 0, 3)
if oldKeyCount != newKeyCount {
details = append(details, fmt.Sprintf("api-keys %d -> %d", oldKeyCount, newKeyCount))
}
if oldModelCount != newModelCount {
details = append(details, fmt.Sprintf("models %d -> %d", oldModelCount, newModelCount))
}
if !equalStringMap(oldEntry.Headers, newEntry.Headers) {
details = append(details, "headers updated")
}
if len(details) == 0 {
return ""
}
@@ -1303,6 +1302,9 @@ func buildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("claude[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("claude[%d].headers: updated", i))
}
}
}
@@ -1325,6 +1327,9 @@ func buildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
if strings.TrimSpace(o.APIKey) != strings.TrimSpace(n.APIKey) {
changes = append(changes, fmt.Sprintf("codex[%d].api-key: updated", i))
}
if !equalStringMap(o.Headers, n.Headers) {
changes = append(changes, fmt.Sprintf("codex[%d].headers: updated", i))
}
}
}
@@ -1357,6 +1362,20 @@ func buildConfigChangeDetails(oldCfg, newCfg *config.Config) []string {
return changes
}
func addConfigHeadersToAttrs(headers map[string]string, attrs map[string]string) {
if len(headers) == 0 || attrs == nil {
return
}
for hk, hv := range headers {
key := strings.TrimSpace(hk)
val := strings.TrimSpace(hv)
if key == "" || val == "" {
continue
}
attrs["header:"+key] = val
}
}
func trimStrings(in []string) []string {
out := make([]string, len(in))
for i := range in {

View File

@@ -305,6 +305,12 @@ func (s *Service) ensureExecutorsForAuth(a *coreauth.Auth) {
if s == nil || a == nil {
return
}
// Skip disabled auth entries when (re)binding executors.
// Disabled auths can linger during config reloads (e.g., removed OpenAI-compat entries)
// and must not override active provider executors (such as iFlow OAuth accounts).
if a.Disabled {
return
}
if compatProviderKey, _, isCompat := openAICompatInfoFromAuth(a); isCompat {
if compatProviderKey == "" {
compatProviderKey = strings.ToLower(strings.TrimSpace(a.Provider))
@@ -318,6 +324,8 @@ func (s *Service) ensureExecutorsForAuth(a *coreauth.Auth) {
switch strings.ToLower(a.Provider) {
case "gemini":
s.coreManager.RegisterExecutor(executor.NewGeminiExecutor(s.cfg))
case "vertex":
s.coreManager.RegisterExecutor(executor.NewGeminiVertexExecutor(s.cfg))
case "gemini-cli":
s.coreManager.RegisterExecutor(executor.NewGeminiCLIExecutor(s.cfg))
case "aistudio":
@@ -613,6 +621,9 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) {
switch provider {
case "gemini":
models = registry.GetGeminiModels()
case "vertex":
// Vertex AI Gemini supports the same model identifiers as Gemini.
models = registry.GetGeminiModels()
case "gemini-cli":
models = registry.GetGeminiCLIModels()
case "aistudio":
@@ -738,7 +749,7 @@ func (s *Service) resolveConfigClaudeKey(auth *coreauth.Auth) *config.ClaudeKey
continue
}
if attrKey != "" && strings.EqualFold(cfgKey, attrKey) {
if attrBase == "" || cfgBase == "" || strings.EqualFold(cfgBase, attrBase) {
if cfgBase == "" || strings.EqualFold(cfgBase, attrBase) {
return entry
}
}