mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-02 12:30:50 +08:00
Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6e713ab6b | ||
|
|
1834c65116 | ||
|
|
fc6aa8ef77 | ||
|
|
c3f88126e6 | ||
|
|
b895018ff5 | ||
|
|
9c6832cc22 | ||
|
|
1ada33ab1d | ||
|
|
78738ca3f0 | ||
|
|
ac01c74c02 | ||
|
|
02e28bbbe9 | ||
|
|
b9c7b9eea5 | ||
|
|
57195fa0f5 | ||
|
|
11f090c223 | ||
|
|
829dd06b42 | ||
|
|
20787cd107 | ||
|
|
1aa568ce45 | ||
|
|
b2cdbbdd47 | ||
|
|
8056af42a3 | ||
|
|
01be94a0de | ||
|
|
d1933075c3 | ||
|
|
a602ae859b | ||
|
|
c5d7137d66 | ||
|
|
d45ebff66b | ||
|
|
d6f671250e | ||
|
|
6d822cf309 | ||
|
|
d03a75dba5 | ||
|
|
9ff21b67a8 |
16
.github/workflows/homebrew.yml
vendored
16
.github/workflows/homebrew.yml
vendored
@@ -1,16 +0,0 @@
|
||||
name: Bump Homebrew formula
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: '*'
|
||||
|
||||
jobs:
|
||||
homebrew:
|
||||
name: Bump Homebrew formula
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Update Homebrew Formula
|
||||
uses: dawidd6/action-homebrew-bump-formula@v5
|
||||
with:
|
||||
token: ${{ secrets.HOMEBREW_TOKEN }}
|
||||
formula: cliproxyapi
|
||||
3
LICENSE
3
LICENSE
@@ -1,6 +1,7 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 Luis Pater
|
||||
Copyright (c) 2025-2005.9 Luis Pater
|
||||
Copyright (c) 2025.9-present Router-For.ME
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -639,19 +639,6 @@ These endpoints initiate provider login flows and return a URL to open in a brow
|
||||
{ "status": "ok", "url": "https://..." }
|
||||
```
|
||||
|
||||
- POST `/gemini-web-token` — Save Gemini Web cookies directly
|
||||
- Request:
|
||||
```bash
|
||||
curl -H 'Authorization: Bearer <MANAGEMENT_KEY>' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{"secure_1psid": "<__Secure-1PSID>", "secure_1psidts": "<__Secure-1PSIDTS>", "label": "<LABEL>"}' \
|
||||
http://localhost:8317/v0/management/gemini-web-token
|
||||
```
|
||||
- Response:
|
||||
```json
|
||||
{ "status": "ok", "file": "gemini-web-<hash>.json" }
|
||||
```
|
||||
|
||||
- GET `/qwen-auth-url` — Start Qwen login (device flow)
|
||||
- Request:
|
||||
```bash
|
||||
|
||||
@@ -639,19 +639,6 @@
|
||||
{ "status": "ok", "url": "https://..." }
|
||||
```
|
||||
|
||||
- POST `/gemini-web-token` — 直接保存 Gemini Web Cookie
|
||||
- 请求:
|
||||
```bash
|
||||
curl -H 'Authorization: Bearer <MANAGEMENT_KEY>' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{"secure_1psid": "<__Secure-1PSID>", "secure_1psidts": "<__Secure-1PSIDTS>", "label": "<LABEL>"}' \
|
||||
http://localhost:8317/v0/management/gemini-web-token
|
||||
```
|
||||
- 响应:
|
||||
```json
|
||||
{ "status": "ok", "file": "gemini-web-<hash>.json" }
|
||||
```
|
||||
|
||||
- GET `/qwen-auth-url` — 开始 Qwen 登录(设备授权流程)
|
||||
- 请求:
|
||||
```bash
|
||||
|
||||
42
README.md
42
README.md
@@ -17,7 +17,6 @@ Chinese providers have now been added: [Qwen Code](https://github.com/QwenLM/qwe
|
||||
- Claude Code support via OAuth login
|
||||
- Qwen Code support via OAuth login
|
||||
- iFlow support via OAuth login
|
||||
- Gemini Web support via cookie-based login
|
||||
- Streaming and non-streaming responses
|
||||
- Function calling/tools support
|
||||
- Multimodal input support (text and images)
|
||||
@@ -62,6 +61,12 @@ Chinese providers have now been added: [Qwen Code](https://github.com/QwenLM/qwe
|
||||
go build -o cli-proxy-api.exe ./cmd/server
|
||||
```
|
||||
|
||||
### Installation via Homebrew
|
||||
|
||||
```bash
|
||||
brew install cliproxyapi
|
||||
brew services start cliproxyapi
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -93,13 +98,6 @@ You can authenticate for Gemini, OpenAI, Claude, Qwen, and/or iFlow. All can coe
|
||||
|
||||
Options: add `--no-browser` to print the login URL instead of opening a browser. The local OAuth callback uses port `8085`.
|
||||
|
||||
- Gemini Web (via Cookies):
|
||||
This method authenticates by simulating a browser, using cookies obtained from the Gemini website.
|
||||
```bash
|
||||
./cli-proxy-api --gemini-web-auth
|
||||
```
|
||||
You will be prompted to enter your `__Secure-1PSID` and `__Secure-1PSIDTS` values. Please retrieve these cookies from your browser's developer tools.
|
||||
|
||||
- OpenAI (Codex/GPT via OAuth):
|
||||
```bash
|
||||
./cli-proxy-api --codex-login
|
||||
@@ -258,6 +256,7 @@ console.log(await claudeResponse.json());
|
||||
- gemini-2.5-pro
|
||||
- gemini-2.5-flash
|
||||
- gemini-2.5-flash-lite
|
||||
- gemini-2.5-flash-image
|
||||
- gemini-2.5-flash-image-preview
|
||||
- gpt-5
|
||||
- gpt-5-codex
|
||||
@@ -277,6 +276,7 @@ console.log(await claudeResponse.json());
|
||||
- deepseek-v3
|
||||
- kimi-k2
|
||||
- glm-4.5
|
||||
- glm-4.6
|
||||
- tstars2.0
|
||||
- And other iFlow-supported models
|
||||
- Gemini models auto-switch to preview variants when needed
|
||||
@@ -326,11 +326,6 @@ The server uses a YAML configuration file (`config.yaml`) located in the project
|
||||
| `openai-compatibility.*.models` | object[] | [] | The actual model name. |
|
||||
| `openai-compatibility.*.models.*.name` | string | "" | The models supported by the provider. |
|
||||
| `openai-compatibility.*.models.*.alias` | string | "" | The alias used in the API. |
|
||||
| `gemini-web` | object | {} | Configuration specific to the Gemini Web client. |
|
||||
| `gemini-web.context` | boolean | true | Enables conversation context reuse for continuous dialogue. |
|
||||
| `gemini-web.gem-mode` | string | "" | Selects a predefined Gem to attach for Gemini Web requests; allowed values: `coding-partner`, `writing-editor`. When empty, no Gem is attached. |
|
||||
| `gemini-web.max-chars-per-request` | integer | 1,000,000 | The maximum number of characters to send to Gemini Web in a single request. |
|
||||
| `gemini-web.disable-continuation-hint` | boolean | false | Disables the continuation hint for split prompts. |
|
||||
|
||||
### Example Configuration File
|
||||
|
||||
@@ -355,6 +350,11 @@ remote-management:
|
||||
# Authentication directory (supports ~ for home directory). If you use Windows, please set the directory like this: `C:/cli-proxy-api/`
|
||||
auth-dir: "~/.cli-proxy-api"
|
||||
|
||||
# API keys for authentication
|
||||
api-keys:
|
||||
- "your-api-key-1"
|
||||
- "your-api-key-2"
|
||||
|
||||
# Enable debug logging
|
||||
debug: false
|
||||
|
||||
@@ -375,12 +375,6 @@ quota-exceeded:
|
||||
switch-project: true # Whether to automatically switch to another project when a quota is exceeded
|
||||
switch-preview-model: true # Whether to automatically switch to a preview model when a quota is exceeded
|
||||
|
||||
# Gemini Web client configuration
|
||||
gemini-web:
|
||||
context: true # Enable conversation context reuse
|
||||
gem-mode: "" # Select Gem: "coding-partner" or "writing-editor"; empty means no Gem
|
||||
max-chars-per-request: 1000000 # Max characters per request
|
||||
|
||||
# API keys for official Generative Language API
|
||||
generative-language-api-key:
|
||||
- "AIzaSy...01"
|
||||
@@ -591,12 +585,6 @@ Run the following command to login (Gemini OAuth on port 8085):
|
||||
docker run --rm -p 8085:8085 -v /path/to/your/config.yaml:/CLIProxyAPI/config.yaml -v /path/to/your/auth-dir:/root/.cli-proxy-api eceasy/cli-proxy-api:latest /CLIProxyAPI/CLIProxyAPI --login
|
||||
```
|
||||
|
||||
Run the following command to login (Gemini Web Cookies):
|
||||
|
||||
```bash
|
||||
docker run -it --rm -v /path/to/your/config.yaml:/CLIProxyAPI/config.yaml -v /path/to/your/auth-dir:/root/.cli-proxy-api eceasy/cli-proxy-api:latest /CLIProxyAPI/CLIProxyAPI --gemini-web-auth
|
||||
```
|
||||
|
||||
Run the following command to login (OpenAI OAuth on port 1455):
|
||||
|
||||
```bash
|
||||
@@ -667,10 +655,6 @@ docker run --rm -p 8317:8317 -v /path/to/your/config.yaml:/CLIProxyAPI/config.ya
|
||||
```bash
|
||||
docker compose exec cli-proxy-api /CLIProxyAPI/CLIProxyAPI -no-browser --login
|
||||
```
|
||||
- **Gemini Web**:
|
||||
```bash
|
||||
docker compose exec cli-proxy-api /CLIProxyAPI/CLIProxyAPI --gemini-web-auth
|
||||
```
|
||||
- **OpenAI (Codex)**:
|
||||
```bash
|
||||
docker compose exec cli-proxy-api /CLIProxyAPI/CLIProxyAPI -no-browser --codex-login
|
||||
|
||||
43
README_CN.md
43
README_CN.md
@@ -37,7 +37,6 @@
|
||||
- 新增 Claude Code 支持(OAuth 登录)
|
||||
- 新增 Qwen Code 支持(OAuth 登录)
|
||||
- 新增 iFlow 支持(OAuth 登录)
|
||||
- 新增 Gemini Web 支持(通过 Cookie 登录)
|
||||
- 支持流式与非流式响应
|
||||
- 函数调用/工具支持
|
||||
- 多模态输入(文本、图片)
|
||||
@@ -76,6 +75,13 @@
|
||||
go build -o cli-proxy-api ./cmd/server
|
||||
```
|
||||
|
||||
### 通过 Homebrew 安装
|
||||
|
||||
```bash
|
||||
brew install cliproxyapi
|
||||
brew services start cliproxyapi
|
||||
```
|
||||
|
||||
## 使用方法
|
||||
|
||||
### 图形客户端与官方 WebUI
|
||||
@@ -106,13 +112,6 @@ CLIProxyAPI 的基于 Web 的管理中心。
|
||||
|
||||
选项:加上 `--no-browser` 可打印登录地址而不自动打开浏览器。本地 OAuth 回调端口为 `8085`。
|
||||
|
||||
- Gemini Web (通过 Cookie):
|
||||
此方法通过模拟浏览器行为,使用从 Gemini 网站获取的 Cookie 进行身份验证。
|
||||
```bash
|
||||
./cli-proxy-api --gemini-web-auth
|
||||
```
|
||||
程序将提示您输入 `__Secure-1PSID` 和 `__Secure-1PSIDTS` 的值。请从您的浏览器开发者工具中获取这些 Cookie。
|
||||
|
||||
- OpenAI(Codex/GPT,OAuth):
|
||||
```bash
|
||||
./cli-proxy-api --codex-login
|
||||
@@ -270,6 +269,7 @@ console.log(await claudeResponse.json());
|
||||
- gemini-2.5-pro
|
||||
- gemini-2.5-flash
|
||||
- gemini-2.5-flash-lite
|
||||
- gemini-2.5-flash-image
|
||||
- gemini-2.5-flash-image-preview
|
||||
- gpt-5
|
||||
- gpt-5-codex
|
||||
@@ -289,6 +289,7 @@ console.log(await claudeResponse.json());
|
||||
- deepseek-v3
|
||||
- kimi-k2
|
||||
- glm-4.5
|
||||
- glm-4.6
|
||||
- tstars2.0
|
||||
- 以及其他 iFlow 支持的模型
|
||||
- Gemini 模型在需要时自动切换到对应的 preview 版本
|
||||
@@ -338,11 +339,6 @@ console.log(await claudeResponse.json());
|
||||
| `openai-compatibility.*.models` | object[] | [] | 实际的模型名称。 |
|
||||
| `openai-compatibility.*.models.*.name` | string | "" | 提供商支持的模型。 |
|
||||
| `openai-compatibility.*.models.*.alias` | string | "" | 在API中使用的别名。 |
|
||||
| `gemini-web` | object | {} | Gemini Web 客户端的特定配置。 |
|
||||
| `gemini-web.context` | boolean | true | 是否启用会话上下文重用,以实现连续对话。 |
|
||||
| `gemini-web.gem-mode` | string | "" | 选择要附加的预设 Gem(`coding-partner` 或 `writing-editor`);为空表示不附加。 |
|
||||
| `gemini-web.max-chars-per-request` | integer | 1,000,000 | 单次请求发送给 Gemini Web 的最大字符数。 |
|
||||
| `gemini-web.disable-continuation-hint` | boolean | false | 当提示被拆分时,是否禁用连续提示的暗示。 |
|
||||
|
||||
### 配置文件示例
|
||||
|
||||
@@ -366,6 +362,11 @@ remote-management:
|
||||
# 身份验证目录(支持 ~ 表示主目录)。如果你使用Windows,建议设置成`C:/cli-proxy-api/`。
|
||||
auth-dir: "~/.cli-proxy-api"
|
||||
|
||||
# 请求认证使用的API密钥
|
||||
api-keys:
|
||||
- "your-api-key-1"
|
||||
- "your-api-key-2"
|
||||
|
||||
# 启用调试日志
|
||||
debug: false
|
||||
|
||||
@@ -387,12 +388,6 @@ quota-exceeded:
|
||||
switch-project: true # 当配额超限时是否自动切换到另一个项目
|
||||
switch-preview-model: true # 当配额超限时是否自动切换到预览模型
|
||||
|
||||
# Gemini Web 客户端配置
|
||||
gemini-web:
|
||||
context: true # 启用会话上下文重用
|
||||
gem-mode: "" # 选择 Gem:"coding-partner" 或 "writing-editor";为空表示不附加
|
||||
max-chars-per-request: 1000000 # 单次请求最大字符数
|
||||
|
||||
# AIStduio Gemini API 的 API 密钥
|
||||
generative-language-api-key:
|
||||
- "AIzaSy...01"
|
||||
@@ -599,12 +594,6 @@ auth.json:
|
||||
docker run --rm -p 8085:8085 -v /path/to/your/config.yaml:/CLIProxyAPI/config.yaml -v /path/to/your/auth-dir:/root/.cli-proxy-api eceasy/cli-proxy-api:latest /CLIProxyAPI/CLIProxyAPI --login
|
||||
```
|
||||
|
||||
运行以下命令进行登录(Gemini Web Cookie):
|
||||
|
||||
```bash
|
||||
docker run -it --rm -v /path/to/your/config.yaml:/CLIProxyAPI/config.yaml -v /path/to/your/auth-dir:/root/.cli-proxy-api eceasy/cli-proxy-api:latest /CLIProxyAPI/CLIProxyAPI --gemini-web-auth
|
||||
```
|
||||
|
||||
运行以下命令进行登录(OpenAI OAuth,端口 1455):
|
||||
|
||||
```bash
|
||||
@@ -676,10 +665,6 @@ docker run --rm -p 8317:8317 -v /path/to/your/config.yaml:/CLIProxyAPI/config.ya
|
||||
```bash
|
||||
docker compose exec cli-proxy-api /CLIProxyAPI/CLIProxyAPI -no-browser --login
|
||||
```
|
||||
- **Gemini Web**:
|
||||
```bash
|
||||
docker compose exec cli-proxy-api /CLIProxyAPI/CLIProxyAPI --gemini-web-auth
|
||||
```
|
||||
- **OpenAI (Codex)**:
|
||||
```bash
|
||||
docker compose exec cli-proxy-api /CLIProxyAPI/CLIProxyAPI -no-browser --codex-login
|
||||
|
||||
@@ -44,7 +44,6 @@ func main() {
|
||||
var claudeLogin bool
|
||||
var qwenLogin bool
|
||||
var iflowLogin bool
|
||||
var geminiWebAuth bool
|
||||
var noBrowser bool
|
||||
var projectID string
|
||||
var configPath string
|
||||
@@ -56,7 +55,6 @@ func main() {
|
||||
flag.BoolVar(&claudeLogin, "claude-login", false, "Login to Claude using OAuth")
|
||||
flag.BoolVar(&qwenLogin, "qwen-login", false, "Login to Qwen using OAuth")
|
||||
flag.BoolVar(&iflowLogin, "iflow-login", false, "Login to iFlow using OAuth")
|
||||
flag.BoolVar(&geminiWebAuth, "gemini-web-auth", false, "Auth Gemini Web using cookies")
|
||||
flag.BoolVar(&noBrowser, "no-browser", false, "Don't open browser automatically for OAuth")
|
||||
flag.StringVar(&projectID, "project_id", "", "Project ID (Gemini only, not required)")
|
||||
flag.StringVar(&configPath, "config", DefaultConfigPath, "Configure File Path")
|
||||
@@ -96,6 +94,14 @@ func main() {
|
||||
var err error
|
||||
var cfg *config.Config
|
||||
var wd string
|
||||
var isCloudDeploy bool
|
||||
|
||||
// Check for cloud deploy mode only on first execution
|
||||
// Read env var name in uppercase: DEPLOY
|
||||
deployEnv := os.Getenv("DEPLOY")
|
||||
if deployEnv == "cloud" {
|
||||
isCloudDeploy = true
|
||||
}
|
||||
|
||||
// Determine and load the configuration file.
|
||||
// If a config path is provided via flags, it is used directly.
|
||||
@@ -103,18 +109,34 @@ func main() {
|
||||
var configFilePath string
|
||||
if configPath != "" {
|
||||
configFilePath = configPath
|
||||
cfg, err = config.LoadConfig(configPath)
|
||||
cfg, err = config.LoadConfigOptional(configPath, isCloudDeploy)
|
||||
} else {
|
||||
wd, err = os.Getwd()
|
||||
if err != nil {
|
||||
log.Fatalf("failed to get working directory: %v", err)
|
||||
}
|
||||
configFilePath = filepath.Join(wd, "config.yaml")
|
||||
cfg, err = config.LoadConfig(configFilePath)
|
||||
cfg, err = config.LoadConfigOptional(configFilePath, isCloudDeploy)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("failed to load config: %v", err)
|
||||
}
|
||||
|
||||
// Log if we're running without a config file in cloud deploy mode
|
||||
var configFileExists bool
|
||||
if isCloudDeploy {
|
||||
if info, errStat := os.Stat(configFilePath); errStat != nil {
|
||||
// Don't mislead: API server will not start until configuration is provided.
|
||||
log.Info("Cloud deploy mode: No configuration file detected; standing by for configuration (API server not started)")
|
||||
configFileExists = false
|
||||
} else if info.IsDir() {
|
||||
log.Info("Cloud deploy mode: Config path is a directory; standing by for configuration (API server not started)")
|
||||
configFileExists = false
|
||||
} else {
|
||||
log.Info("Cloud deploy mode: Configuration file detected; starting service")
|
||||
configFileExists = true
|
||||
}
|
||||
}
|
||||
usage.SetStatisticsEnabled(cfg.UsageStatisticsEnabled)
|
||||
|
||||
if err = logging.ConfigureLogOutput(cfg.LoggingToFile); err != nil {
|
||||
@@ -158,9 +180,13 @@ func main() {
|
||||
cmd.DoQwenLogin(cfg, options)
|
||||
} else if iflowLogin {
|
||||
cmd.DoIFlowLogin(cfg, options)
|
||||
} else if geminiWebAuth {
|
||||
cmd.DoGeminiWebAuth(cfg)
|
||||
} else {
|
||||
// In cloud deploy mode without config file, just wait for shutdown signals
|
||||
if isCloudDeploy && !configFileExists {
|
||||
// No config file available, just wait for shutdown
|
||||
cmd.WaitForCloudDeploy()
|
||||
return
|
||||
}
|
||||
// Start the main proxy service
|
||||
cmd.StartService(cfg, configFilePath, password)
|
||||
}
|
||||
|
||||
@@ -79,19 +79,3 @@ quota-exceeded:
|
||||
# models: # The models supported by the provider.
|
||||
# - name: "moonshotai/kimi-k2:free" # The actual model name.
|
||||
# alias: "kimi-k2" # The alias used in the API.
|
||||
|
||||
# Gemini Web settings
|
||||
#gemini-web:
|
||||
# # Conversation reuse: set to true to enable (default), false to disable.
|
||||
# context: true
|
||||
# # Maximum characters per single request to Gemini Web. Requests exceeding this
|
||||
# # size split into chunks. Only the last chunk carries files and yields the final answer.
|
||||
# max-chars-per-request: 1000000
|
||||
# # Disable the short continuation hint appended to intermediate chunks
|
||||
# # when splitting long prompts. Default is false (hint enabled by default).
|
||||
# disable-continuation-hint: false
|
||||
# # Gem selection (Gem Mode):
|
||||
# # - "coding-partner": attach the predefined Coding partner Gem
|
||||
# # - "writing-editor": attach the predefined Writing editor Gem
|
||||
# # - empty: do not attach any Gem
|
||||
# gem-mode: ""
|
||||
|
||||
@@ -10,6 +10,8 @@ services:
|
||||
COMMIT: ${COMMIT:-none}
|
||||
BUILD_DATE: ${BUILD_DATE:-unknown}
|
||||
container_name: cli-proxy-api
|
||||
environment:
|
||||
DEPLOY: ${DEPLOY:-}
|
||||
ports:
|
||||
- "8317:8317"
|
||||
- "8085:8085"
|
||||
|
||||
@@ -51,9 +51,10 @@ func ReconcileProviders(oldCfg, newCfg *config.Config, existing []sdkaccess.Prov
|
||||
continue
|
||||
}
|
||||
|
||||
forceRebuild := strings.EqualFold(strings.TrimSpace(providerCfg.Type), sdkConfig.AccessProviderTypeConfigAPIKey)
|
||||
if oldCfgProvider, ok := oldCfgMap[key]; ok {
|
||||
isAliased := oldCfgProvider == providerCfg
|
||||
if !isAliased && providerConfigEqual(oldCfgProvider, providerCfg) {
|
||||
if !forceRebuild && !isAliased && providerConfigEqual(oldCfgProvider, providerCfg) {
|
||||
if existingProvider, okExisting := existingMap[key]; okExisting {
|
||||
result = append(result, existingProvider)
|
||||
finalIDs[key] = struct{}{}
|
||||
|
||||
@@ -3,8 +3,6 @@ package management
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
@@ -25,7 +23,6 @@ import (
|
||||
geminiAuth "github.com/router-for-me/CLIProxyAPI/v6/internal/auth/gemini"
|
||||
iflowauth "github.com/router-for-me/CLIProxyAPI/v6/internal/auth/iflow"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/qwen"
|
||||
// legacy client removed
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
@@ -903,65 +900,6 @@ func (h *Handler) RequestGeminiCLIToken(c *gin.Context) {
|
||||
c.JSON(200, gin.H{"status": "ok", "url": authURL, "state": state})
|
||||
}
|
||||
|
||||
func (h *Handler) CreateGeminiWebToken(c *gin.Context) {
|
||||
ctx := c.Request.Context()
|
||||
|
||||
var payload struct {
|
||||
Secure1PSID string `json:"secure_1psid"`
|
||||
Secure1PSIDTS string `json:"secure_1psidts"`
|
||||
Label string `json:"label"`
|
||||
}
|
||||
if err := c.ShouldBindJSON(&payload); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid body"})
|
||||
return
|
||||
}
|
||||
payload.Secure1PSID = strings.TrimSpace(payload.Secure1PSID)
|
||||
payload.Secure1PSIDTS = strings.TrimSpace(payload.Secure1PSIDTS)
|
||||
payload.Label = strings.TrimSpace(payload.Label)
|
||||
if payload.Secure1PSID == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "secure_1psid is required"})
|
||||
return
|
||||
}
|
||||
if payload.Secure1PSIDTS == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "secure_1psidts is required"})
|
||||
return
|
||||
}
|
||||
if payload.Label == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "label is required"})
|
||||
return
|
||||
}
|
||||
|
||||
sha := sha256.New()
|
||||
sha.Write([]byte(payload.Secure1PSID))
|
||||
hash := hex.EncodeToString(sha.Sum(nil))
|
||||
fileName := fmt.Sprintf("gemini-web-%s.json", hash[:16])
|
||||
|
||||
tokenStorage := &geminiAuth.GeminiWebTokenStorage{
|
||||
Secure1PSID: payload.Secure1PSID,
|
||||
Secure1PSIDTS: payload.Secure1PSIDTS,
|
||||
Label: payload.Label,
|
||||
}
|
||||
// Provide a stable label (gemini-web-<hash>) for logging and identification
|
||||
tokenStorage.Label = strings.TrimSuffix(fileName, ".json")
|
||||
|
||||
record := &coreauth.Auth{
|
||||
ID: fileName,
|
||||
Provider: "gemini-web",
|
||||
FileName: fileName,
|
||||
Storage: tokenStorage,
|
||||
}
|
||||
|
||||
savedPath, errSave := h.saveTokenRecord(ctx, record)
|
||||
if errSave != nil {
|
||||
log.Errorf("Failed to save Gemini Web token: %v", errSave)
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to save token"})
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully saved Gemini Web token to: %s\n", savedPath)
|
||||
c.JSON(http.StatusOK, gin.H{"status": "ok", "file": filepath.Base(savedPath)})
|
||||
}
|
||||
|
||||
func (h *Handler) RequestCodexToken(c *gin.Context) {
|
||||
ctx := context.Background()
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ package middleware
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/logging"
|
||||
@@ -17,6 +18,12 @@ import (
|
||||
// logger, the middleware has minimal overhead.
|
||||
func RequestLoggingMiddleware(logger logging.RequestLogger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
path := c.Request.URL.Path
|
||||
if strings.HasPrefix(path, "/v0/management") || path == "/keep-alive" {
|
||||
c.Next()
|
||||
return
|
||||
}
|
||||
|
||||
// Early return if logging is disabled (zero overhead)
|
||||
if !logger.IsEnabled() {
|
||||
c.Next()
|
||||
|
||||
@@ -419,7 +419,6 @@ func (s *Server) registerManagementRoutes() {
|
||||
mgmt.GET("/anthropic-auth-url", s.mgmt.RequestAnthropicToken)
|
||||
mgmt.GET("/codex-auth-url", s.mgmt.RequestCodexToken)
|
||||
mgmt.GET("/gemini-cli-auth-url", s.mgmt.RequestGeminiCLIToken)
|
||||
mgmt.POST("/gemini-web-token", s.mgmt.CreateGeminiWebToken)
|
||||
mgmt.GET("/qwen-auth-url", s.mgmt.RequestQwenToken)
|
||||
mgmt.GET("/iflow-auth-url", s.mgmt.RequestIFlowToken)
|
||||
mgmt.GET("/get-auth-status", s.mgmt.GetAuthStatus)
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
// Package gemini provides authentication and token management functionality
|
||||
// for Google's Gemini AI services. It handles OAuth2 token storage, serialization,
|
||||
// and retrieval for maintaining authenticated sessions with the Gemini API.
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// GeminiWebTokenStorage stores cookie information for Google Gemini Web authentication.
|
||||
type GeminiWebTokenStorage struct {
|
||||
Secure1PSID string `json:"secure_1psid"`
|
||||
Secure1PSIDTS string `json:"secure_1psidts"`
|
||||
Type string `json:"type"`
|
||||
LastRefresh string `json:"last_refresh,omitempty"`
|
||||
// Label is a stable account identifier used for logging, e.g. "gemini-web-<hash>".
|
||||
// It is derived from the auth file name when not explicitly set.
|
||||
Label string `json:"label,omitempty"`
|
||||
}
|
||||
|
||||
// SaveTokenToFile serializes the Gemini Web token storage to a JSON file.
|
||||
func (ts *GeminiWebTokenStorage) SaveTokenToFile(authFilePath string) error {
|
||||
misc.LogSavingCredentials(authFilePath)
|
||||
ts.Type = "gemini-web"
|
||||
// Auto-derive a stable label from the file name if missing.
|
||||
if ts.Label == "" {
|
||||
base := filepath.Base(authFilePath)
|
||||
if strings.HasSuffix(strings.ToLower(base), ".json") {
|
||||
base = strings.TrimSuffix(base, filepath.Ext(base))
|
||||
}
|
||||
if base != "" {
|
||||
ts.Label = base
|
||||
}
|
||||
}
|
||||
if ts.LastRefresh == "" {
|
||||
ts.LastRefresh = time.Now().Format(time.RFC3339)
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(authFilePath), 0700); err != nil {
|
||||
return fmt.Errorf("failed to create directory: %v", err)
|
||||
}
|
||||
|
||||
f, err := os.Create(authFilePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create token file: %w", err)
|
||||
}
|
||||
defer func() {
|
||||
if errClose := f.Close(); errClose != nil {
|
||||
log.Errorf("failed to close file: %v", errClose)
|
||||
}
|
||||
}()
|
||||
|
||||
if err = json.NewEncoder(f).Encode(ts); err != nil {
|
||||
return fmt.Errorf("failed to write token to file: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,197 +0,0 @@
|
||||
// Package cmd provides command-line interface functionality for the CLI Proxy API.
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/gemini"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
sdkAuth "github.com/router-for-me/CLIProxyAPI/v6/sdk/auth"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
// banner prints a simple ASCII banner for clarity without ANSI colors.
|
||||
func banner(title string) {
|
||||
line := strings.Repeat("=", len(title)+8)
|
||||
fmt.Println(line)
|
||||
fmt.Println("=== " + title + " ===")
|
||||
fmt.Println(line)
|
||||
}
|
||||
|
||||
// DoGeminiWebAuth handles the process of creating a Gemini Web token file.
|
||||
// New flow:
|
||||
// 1. Prompt user to paste the full cookie string.
|
||||
// 2. Extract __Secure-1PSID and __Secure-1PSIDTS from the cookie string.
|
||||
// 3. Call https://accounts.google.com/ListAccounts with the cookie to obtain email.
|
||||
// 4. Save auth file with the same structure, and set Label to the email.
|
||||
func DoGeminiWebAuth(cfg *config.Config) {
|
||||
var secure1psid, secure1psidts, email string
|
||||
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
isMacOS := strings.HasPrefix(runtime.GOOS, "darwin")
|
||||
cookieProvided := false
|
||||
banner("Gemini Web Cookie Sign-in")
|
||||
if !isMacOS {
|
||||
// NOTE: Provide extra guidance for macOS users or anyone unsure about retrieving cookies.
|
||||
fmt.Println("--- Cookie Input ---")
|
||||
fmt.Println(">> Paste your full Google Cookie and press Enter")
|
||||
fmt.Println("Tip: If you are on macOS, or don't know how to get the cookie, just press Enter and follow the prompts.")
|
||||
fmt.Print("Cookie: ")
|
||||
rawCookie, _ := reader.ReadString('\n')
|
||||
rawCookie = strings.TrimSpace(rawCookie)
|
||||
if rawCookie == "" {
|
||||
// Skip cookie-based parsing; fall back to manual field prompts.
|
||||
fmt.Println("==> No cookie provided. Proceeding with manual input.")
|
||||
} else {
|
||||
cookieProvided = true
|
||||
// Parse K=V cookie pairs separated by ';'
|
||||
cookieMap := make(map[string]string)
|
||||
parts := strings.Split(rawCookie, ";")
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p == "" {
|
||||
continue
|
||||
}
|
||||
if eq := strings.Index(p, "="); eq > 0 {
|
||||
k := strings.TrimSpace(p[:eq])
|
||||
v := strings.TrimSpace(p[eq+1:])
|
||||
if k != "" {
|
||||
cookieMap[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
secure1psid = strings.TrimSpace(cookieMap["__Secure-1PSID"])
|
||||
secure1psidts = strings.TrimSpace(cookieMap["__Secure-1PSIDTS"])
|
||||
|
||||
// Build HTTP client with proxy settings respected.
|
||||
httpClient := &http.Client{Timeout: 15 * time.Second}
|
||||
httpClient = util.SetProxy(&cfg.SDKConfig, httpClient)
|
||||
|
||||
// Request ListAccounts to extract email as label (use POST per upstream behavior).
|
||||
req, err := http.NewRequest(http.MethodPost, "https://accounts.google.com/ListAccounts", nil)
|
||||
if err != nil {
|
||||
fmt.Println("!! Failed to create request:", err)
|
||||
} else {
|
||||
req.Header.Set("Cookie", rawCookie)
|
||||
req.Header.Set("Accept", "application/json, text/plain, */*")
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36")
|
||||
req.Header.Set("Origin", "https://accounts.google.com")
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8")
|
||||
|
||||
resp, errDo := httpClient.Do(req)
|
||||
if errDo != nil {
|
||||
fmt.Println("!! Request to ListAccounts failed:", err)
|
||||
} else {
|
||||
defer func() { _ = resp.Body.Close() }()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
fmt.Printf("!! ListAccounts returned status code: %d\n", resp.StatusCode)
|
||||
} else {
|
||||
var payload []any
|
||||
if err = json.NewDecoder(resp.Body).Decode(&payload); err != nil {
|
||||
fmt.Println("!! Failed to parse ListAccounts response:", err)
|
||||
} else {
|
||||
// Expected structure like: ["gaia.l.a.r", [["gaia.l.a",1,"Name","email@example.com", ... ]]]
|
||||
if len(payload) >= 2 {
|
||||
if accounts, ok := payload[1].([]any); ok && len(accounts) >= 1 {
|
||||
if first, ok1 := accounts[0].([]any); ok1 && len(first) >= 4 {
|
||||
if em, ok2 := first[3].(string); ok2 {
|
||||
email = strings.TrimSpace(em)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if email == "" {
|
||||
fmt.Println("!! Failed to parse email from ListAccounts response")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: prompt user to input missing values
|
||||
if secure1psid == "" {
|
||||
if cookieProvided && !isMacOS {
|
||||
fmt.Println("!! Cookie missing __Secure-1PSID.")
|
||||
}
|
||||
fmt.Print("Enter __Secure-1PSID: ")
|
||||
v, _ := reader.ReadString('\n')
|
||||
secure1psid = strings.TrimSpace(v)
|
||||
}
|
||||
if secure1psidts == "" {
|
||||
if cookieProvided && !isMacOS {
|
||||
fmt.Println("!! Cookie missing __Secure-1PSIDTS.")
|
||||
}
|
||||
fmt.Print("Enter __Secure-1PSIDTS: ")
|
||||
v, _ := reader.ReadString('\n')
|
||||
secure1psidts = strings.TrimSpace(v)
|
||||
}
|
||||
if secure1psid == "" || secure1psidts == "" {
|
||||
// Use print instead of logger to avoid log redirection.
|
||||
fmt.Println("!! __Secure-1PSID and __Secure-1PSIDTS cannot be empty")
|
||||
return
|
||||
}
|
||||
if isMacOS {
|
||||
fmt.Print("Enter your account email: ")
|
||||
v, _ := reader.ReadString('\n')
|
||||
email = strings.TrimSpace(v)
|
||||
}
|
||||
|
||||
// Generate a filename based on the SHA256 hash of the PSID
|
||||
hasher := sha256.New()
|
||||
hasher.Write([]byte(secure1psid))
|
||||
hash := hex.EncodeToString(hasher.Sum(nil))
|
||||
fileName := fmt.Sprintf("gemini-web-%s.json", hash[:16])
|
||||
|
||||
// Decide label: prefer email; fallback prompt then file name without .json
|
||||
defaultLabel := strings.TrimSuffix(fileName, ".json")
|
||||
label := email
|
||||
if label == "" {
|
||||
fmt.Print(fmt.Sprintf("Enter label for this auth (default: %s): ", defaultLabel))
|
||||
v, _ := reader.ReadString('\n')
|
||||
v = strings.TrimSpace(v)
|
||||
if v != "" {
|
||||
label = v
|
||||
} else {
|
||||
label = defaultLabel
|
||||
}
|
||||
}
|
||||
|
||||
tokenStorage := &gemini.GeminiWebTokenStorage{
|
||||
Secure1PSID: secure1psid,
|
||||
Secure1PSIDTS: secure1psidts,
|
||||
Label: label,
|
||||
}
|
||||
record := &coreauth.Auth{
|
||||
ID: fileName,
|
||||
Provider: "gemini-web",
|
||||
FileName: fileName,
|
||||
Storage: tokenStorage,
|
||||
}
|
||||
store := sdkAuth.GetTokenStore()
|
||||
if cfg != nil {
|
||||
if dirSetter, ok := store.(interface{ SetBaseDir(string) }); ok {
|
||||
dirSetter.SetBaseDir(cfg.AuthDir)
|
||||
}
|
||||
}
|
||||
savedPath, err := store.Save(context.Background(), record)
|
||||
if err != nil {
|
||||
fmt.Println("!! Failed to save Gemini Web token to file:", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println("==> Successfully saved Gemini Web token!")
|
||||
fmt.Println("==> Saved to:", savedPath)
|
||||
}
|
||||
@@ -53,3 +53,17 @@ func StartService(cfg *config.Config, configPath string, localPassword string) {
|
||||
log.Fatalf("proxy service exited with error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// WaitForCloudDeploy waits indefinitely for shutdown signals in cloud deploy mode
|
||||
// when no configuration file is available.
|
||||
func WaitForCloudDeploy() {
|
||||
// Clarify that we are intentionally idle for configuration and not running the API server.
|
||||
log.Info("Cloud deploy mode: No config found; standing by for configuration. API server is not started. Press Ctrl+C to exit.")
|
||||
|
||||
ctxSignal, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM)
|
||||
defer cancel()
|
||||
|
||||
// Block until shutdown signal is received
|
||||
<-ctxSignal.Done()
|
||||
log.Info("Cloud deploy mode: Shutdown signal received; exiting")
|
||||
}
|
||||
|
||||
@@ -5,8 +5,10 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"syscall"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/sdk/config"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
@@ -51,42 +53,6 @@ type Config struct {
|
||||
|
||||
// RemoteManagement nests management-related options under 'remote-management'.
|
||||
RemoteManagement RemoteManagement `yaml:"remote-management" json:"-"`
|
||||
|
||||
// GeminiWeb groups configuration for Gemini Web client
|
||||
GeminiWeb GeminiWebConfig `yaml:"gemini-web" json:"gemini-web"`
|
||||
}
|
||||
|
||||
// GeminiWebConfig nests Gemini Web related options under 'gemini-web'.
|
||||
type GeminiWebConfig struct {
|
||||
// Context enables JSON-based conversation reuse.
|
||||
// Defaults to true if not set in YAML (see LoadConfig).
|
||||
Context bool `yaml:"context" json:"context"`
|
||||
|
||||
// GemMode selects a predefined Gem to attach for Gemini Web requests.
|
||||
// Allowed values:
|
||||
// - "coding-partner"
|
||||
// - "writing-editor"
|
||||
// When empty, no Gem is attached by configuration.
|
||||
// This is independent from CodeMode below, which is kept for backwards compatibility.
|
||||
GemMode string `yaml:"gem-mode" json:"gem-mode"`
|
||||
|
||||
// CodeMode enables legacy coding-mode behaviors for Gemini Web.
|
||||
// Backwards compatibility: when true, the service behaves as before by
|
||||
// attaching the predefined "Coding partner" Gem and enabling extra
|
||||
// conveniences (e.g., XML wrapping hints). Prefer GemMode for selecting
|
||||
// a Gem going forward.
|
||||
CodeMode bool `yaml:"code-mode" json:"code-mode"`
|
||||
|
||||
// MaxCharsPerRequest caps the number of characters (runes) sent to
|
||||
// Gemini Web in a single request. Long prompts will be split into
|
||||
// multiple requests with a continuation hint, and only the final
|
||||
// request will carry any files. When unset or <=0, a conservative
|
||||
// default of 1,000,000 will be used.
|
||||
MaxCharsPerRequest int `yaml:"max-chars-per-request" json:"max-chars-per-request"`
|
||||
|
||||
// DisableContinuationHint, when true, disables the continuation hint for split prompts.
|
||||
// The hint is enabled by default.
|
||||
DisableContinuationHint bool `yaml:"disable-continuation-hint,omitempty" json:"disable-continuation-hint,omitempty"`
|
||||
}
|
||||
|
||||
// RemoteManagement holds management API configuration under 'remote-management'.
|
||||
@@ -187,19 +153,40 @@ type OpenAICompatibilityModel struct {
|
||||
// - *Config: The loaded configuration
|
||||
// - error: An error if the configuration could not be loaded
|
||||
func LoadConfig(configFile string) (*Config, error) {
|
||||
return LoadConfigOptional(configFile, false)
|
||||
}
|
||||
|
||||
// LoadConfigOptional reads YAML from configFile.
|
||||
// If optional is true and the file is missing, it returns an empty Config.
|
||||
// If optional is true and the file is empty or invalid, it returns an empty Config.
|
||||
func LoadConfigOptional(configFile string, optional bool) (*Config, error) {
|
||||
// Read the entire configuration file into memory.
|
||||
data, err := os.ReadFile(configFile)
|
||||
if err != nil {
|
||||
if optional {
|
||||
if os.IsNotExist(err) || errors.Is(err, syscall.EISDIR) {
|
||||
// Missing and optional: return empty config (cloud deploy standby).
|
||||
return &Config{}, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("failed to read config file: %w", err)
|
||||
}
|
||||
|
||||
// In cloud deploy mode (optional=true), if file is empty or contains only whitespace, return empty config.
|
||||
if optional && len(data) == 0 {
|
||||
return &Config{}, nil
|
||||
}
|
||||
|
||||
// Unmarshal the YAML data into the Config struct.
|
||||
var cfg Config
|
||||
// Set defaults before unmarshal so that absent keys keep defaults.
|
||||
cfg.LoggingToFile = true
|
||||
cfg.UsageStatisticsEnabled = true
|
||||
cfg.GeminiWeb.Context = true
|
||||
if err = yaml.Unmarshal(data, &cfg); err != nil {
|
||||
if optional {
|
||||
// In cloud deploy mode, if YAML parsing fails, return empty config instead of error.
|
||||
return &Config{}, nil
|
||||
}
|
||||
return nil, fmt.Errorf("failed to parse config file: %w", err)
|
||||
}
|
||||
|
||||
|
||||
@@ -10,9 +10,6 @@ const (
|
||||
// GeminiCLI represents the Google Gemini CLI provider identifier.
|
||||
GeminiCLI = "gemini-cli"
|
||||
|
||||
// GeminiWeb represents the Google Gemini Web provider identifier.
|
||||
GeminiWeb = "gemini-web"
|
||||
|
||||
// Codex represents the OpenAI Codex provider identifier.
|
||||
Codex = "codex"
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
@@ -23,11 +24,17 @@ const (
|
||||
managementReleaseURL = "https://api.github.com/repos/router-for-me/Cli-Proxy-API-Management-Center/releases/latest"
|
||||
managementAssetName = "management.html"
|
||||
httpUserAgent = "CLIProxyAPI-management-updater"
|
||||
updateCheckInterval = 3 * time.Hour
|
||||
)
|
||||
|
||||
// ManagementFileName exposes the control panel asset filename.
|
||||
const ManagementFileName = managementAssetName
|
||||
|
||||
var (
|
||||
lastUpdateCheckMu sync.Mutex
|
||||
lastUpdateCheckTime time.Time
|
||||
)
|
||||
|
||||
func newHTTPClient(proxyURL string) *http.Client {
|
||||
client := &http.Client{Timeout: 15 * time.Second}
|
||||
|
||||
@@ -68,6 +75,7 @@ func FilePath(configFilePath string) string {
|
||||
|
||||
// EnsureLatestManagementHTML checks the latest management.html asset and updates the local copy when needed.
|
||||
// The function is designed to run in a background goroutine and will never panic.
|
||||
// It enforces a 3-hour rate limit to avoid frequent checks on config/auth file changes.
|
||||
func EnsureLatestManagementHTML(ctx context.Context, staticDir string, proxyURL string) {
|
||||
if ctx == nil {
|
||||
ctx = context.Background()
|
||||
@@ -79,6 +87,18 @@ func EnsureLatestManagementHTML(ctx context.Context, staticDir string, proxyURL
|
||||
return
|
||||
}
|
||||
|
||||
// Rate limiting: check only once every 3 hours
|
||||
lastUpdateCheckMu.Lock()
|
||||
now := time.Now()
|
||||
timeSinceLastCheck := now.Sub(lastUpdateCheckTime)
|
||||
if timeSinceLastCheck < updateCheckInterval {
|
||||
lastUpdateCheckMu.Unlock()
|
||||
log.Debugf("management asset update check skipped: last check was %v ago (interval: %v)", timeSinceLastCheck.Round(time.Second), updateCheckInterval)
|
||||
return
|
||||
}
|
||||
lastUpdateCheckTime = now
|
||||
lastUpdateCheckMu.Unlock()
|
||||
|
||||
if err := os.MkdirAll(staticDir, 0o755); err != nil {
|
||||
log.WithError(err).Warn("failed to prepare static directory for management asset")
|
||||
return
|
||||
|
||||
@@ -1,884 +0,0 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// GeminiClient is the async http client interface (Go port)
|
||||
type GeminiClient struct {
|
||||
Cookies map[string]string
|
||||
Proxy string
|
||||
Running bool
|
||||
httpClient *http.Client
|
||||
AccessToken string
|
||||
Timeout time.Duration
|
||||
insecure bool
|
||||
}
|
||||
|
||||
// HTTP bootstrap utilities -------------------------------------------------
|
||||
type httpOptions struct {
|
||||
ProxyURL string
|
||||
Insecure bool
|
||||
FollowRedirects bool
|
||||
}
|
||||
|
||||
func newHTTPClient(opts httpOptions) *http.Client {
|
||||
transport := &http.Transport{}
|
||||
if opts.ProxyURL != "" {
|
||||
if pu, err := url.Parse(opts.ProxyURL); err == nil {
|
||||
transport.Proxy = http.ProxyURL(pu)
|
||||
}
|
||||
}
|
||||
if opts.Insecure {
|
||||
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
jar, _ := cookiejar.New(nil)
|
||||
client := &http.Client{Transport: transport, Timeout: 60 * time.Second, Jar: jar}
|
||||
if !opts.FollowRedirects {
|
||||
client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
|
||||
return http.ErrUseLastResponse
|
||||
}
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
func applyHeaders(req *http.Request, headers http.Header) {
|
||||
for k, v := range headers {
|
||||
for _, vv := range v {
|
||||
req.Header.Add(k, vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func applyCookies(req *http.Request, cookies map[string]string) {
|
||||
for k, v := range cookies {
|
||||
req.AddCookie(&http.Cookie{Name: k, Value: v})
|
||||
}
|
||||
}
|
||||
|
||||
func sendInitRequest(cookies map[string]string, proxy string, insecure bool) (*http.Response, map[string]string, error) {
|
||||
client := newHTTPClient(httpOptions{ProxyURL: proxy, Insecure: insecure, FollowRedirects: true})
|
||||
req, _ := http.NewRequest(http.MethodGet, EndpointInit, nil)
|
||||
applyHeaders(req, HeadersGemini)
|
||||
applyCookies(req, cookies)
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
return resp, nil, &AuthError{Msg: resp.Status}
|
||||
}
|
||||
outCookies := map[string]string{}
|
||||
for _, c := range resp.Cookies() {
|
||||
outCookies[c.Name] = c.Value
|
||||
}
|
||||
for k, v := range cookies {
|
||||
outCookies[k] = v
|
||||
}
|
||||
return resp, outCookies, nil
|
||||
}
|
||||
|
||||
func getAccessToken(baseCookies map[string]string, proxy string, verbose bool, insecure bool) (string, map[string]string, error) {
|
||||
extraCookies := map[string]string{}
|
||||
{
|
||||
client := newHTTPClient(httpOptions{ProxyURL: proxy, Insecure: insecure, FollowRedirects: true})
|
||||
req, _ := http.NewRequest(http.MethodGet, EndpointGoogle, nil)
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
if verbose {
|
||||
log.Debugf("priming google cookies failed: %v", err)
|
||||
}
|
||||
} else if resp != nil {
|
||||
if u, err := url.Parse(EndpointGoogle); err == nil {
|
||||
for _, c := range client.Jar.Cookies(u) {
|
||||
extraCookies[c.Name] = c.Value
|
||||
}
|
||||
}
|
||||
_ = resp.Body.Close()
|
||||
}
|
||||
}
|
||||
|
||||
trySets := make([]map[string]string, 0, 8)
|
||||
|
||||
if v1, ok1 := baseCookies["__Secure-1PSID"]; ok1 {
|
||||
if v2, ok2 := baseCookies["__Secure-1PSIDTS"]; ok2 {
|
||||
merged := map[string]string{"__Secure-1PSID": v1, "__Secure-1PSIDTS": v2}
|
||||
if nid, ok := baseCookies["NID"]; ok {
|
||||
merged["NID"] = nid
|
||||
}
|
||||
trySets = append(trySets, merged)
|
||||
} else if verbose {
|
||||
log.Debug("Skipping base cookies: __Secure-1PSIDTS missing")
|
||||
}
|
||||
}
|
||||
|
||||
if len(extraCookies) > 0 {
|
||||
trySets = append(trySets, extraCookies)
|
||||
}
|
||||
|
||||
reToken := regexp.MustCompile(`"SNlM0e":"([^"]+)"`)
|
||||
|
||||
for _, cookies := range trySets {
|
||||
resp, mergedCookies, err := sendInitRequest(cookies, proxy, insecure)
|
||||
if err != nil {
|
||||
if verbose {
|
||||
log.Warnf("Failed init request: %v", err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
_ = resp.Body.Close()
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
matches := reToken.FindStringSubmatch(string(body))
|
||||
if len(matches) >= 2 {
|
||||
token := matches[1]
|
||||
if verbose {
|
||||
fmt.Println("Gemini access token acquired.")
|
||||
}
|
||||
return token, mergedCookies, nil
|
||||
}
|
||||
}
|
||||
return "", nil, &AuthError{Msg: "Failed to retrieve token."}
|
||||
}
|
||||
|
||||
func rotate1PSIDTS(cookies map[string]string, proxy string, insecure bool) (string, error) {
|
||||
_, ok := cookies["__Secure-1PSID"]
|
||||
if !ok {
|
||||
return "", &AuthError{Msg: "__Secure-1PSID missing"}
|
||||
}
|
||||
|
||||
// Reuse shared HTTP client helper for consistency.
|
||||
client := newHTTPClient(httpOptions{ProxyURL: proxy, Insecure: insecure, FollowRedirects: true})
|
||||
|
||||
req, _ := http.NewRequest(http.MethodPost, EndpointRotateCookies, strings.NewReader("[000,\"-0000000000000000000\"]"))
|
||||
applyHeaders(req, HeadersRotateCookies)
|
||||
applyCookies(req, cookies)
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
|
||||
if resp.StatusCode == http.StatusUnauthorized {
|
||||
return "", &AuthError{Msg: "unauthorized"}
|
||||
}
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
return "", errors.New(resp.Status)
|
||||
}
|
||||
|
||||
for _, c := range resp.Cookies() {
|
||||
if c.Name == "__Secure-1PSIDTS" {
|
||||
return c.Value, nil
|
||||
}
|
||||
}
|
||||
// Fallback: check cookie jar in case the Set-Cookie was on a redirect hop
|
||||
if u, err := url.Parse(EndpointRotateCookies); err == nil && client.Jar != nil {
|
||||
for _, c := range client.Jar.Cookies(u) {
|
||||
if c.Name == "__Secure-1PSIDTS" && c.Value != "" {
|
||||
return c.Value, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// MaskToken28 masks a sensitive token for safe logging. Keep middle partially visible.
|
||||
func MaskToken28(s string) string {
|
||||
n := len(s)
|
||||
if n == 0 {
|
||||
return ""
|
||||
}
|
||||
if n < 20 {
|
||||
return strings.Repeat("*", n)
|
||||
}
|
||||
midStart := n/2 - 2
|
||||
if midStart < 8 {
|
||||
midStart = 8
|
||||
}
|
||||
if midStart+4 > n-8 {
|
||||
midStart = n - 8 - 4
|
||||
if midStart < 8 {
|
||||
midStart = 8
|
||||
}
|
||||
}
|
||||
prefixByte := s[:8]
|
||||
middle := s[midStart : midStart+4]
|
||||
suffix := s[n-8:]
|
||||
return prefixByte + strings.Repeat("*", 4) + middle + strings.Repeat("*", 4) + suffix
|
||||
}
|
||||
|
||||
var NanoBananaModel = map[string]struct{}{
|
||||
"gemini-2.5-flash-image-web": {},
|
||||
}
|
||||
|
||||
// NewGeminiClient creates a client. Pass empty strings to auto-detect via browser cookies (not implemented in Go port).
|
||||
func NewGeminiClient(secure1psid string, secure1psidts string, proxy string, opts ...func(*GeminiClient)) *GeminiClient {
|
||||
c := &GeminiClient{
|
||||
Cookies: map[string]string{},
|
||||
Proxy: proxy,
|
||||
Running: false,
|
||||
Timeout: 300 * time.Second,
|
||||
insecure: false,
|
||||
}
|
||||
if secure1psid != "" {
|
||||
c.Cookies["__Secure-1PSID"] = secure1psid
|
||||
if secure1psidts != "" {
|
||||
c.Cookies["__Secure-1PSIDTS"] = secure1psidts
|
||||
}
|
||||
}
|
||||
for _, f := range opts {
|
||||
f(c)
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// WithInsecureTLS sets skipping TLS verification (to mirror httpx verify=False)
|
||||
func WithInsecureTLS(insecure bool) func(*GeminiClient) {
|
||||
return func(c *GeminiClient) { c.insecure = insecure }
|
||||
}
|
||||
|
||||
// Init initializes the access token and http client.
|
||||
func (c *GeminiClient) Init(timeoutSec float64, verbose bool) error {
|
||||
// get access token
|
||||
token, validCookies, err := getAccessToken(c.Cookies, c.Proxy, verbose, c.insecure)
|
||||
if err != nil {
|
||||
c.Close(0)
|
||||
return err
|
||||
}
|
||||
c.AccessToken = token
|
||||
c.Cookies = validCookies
|
||||
|
||||
tr := &http.Transport{}
|
||||
if c.Proxy != "" {
|
||||
if pu, errParse := url.Parse(c.Proxy); errParse == nil {
|
||||
tr.Proxy = http.ProxyURL(pu)
|
||||
}
|
||||
}
|
||||
if c.insecure {
|
||||
// set via roundtripper in utils_get_access_token for token; here we reuse via default Transport
|
||||
// intentionally not adding here, as requests rely on endpoints with normal TLS
|
||||
}
|
||||
c.httpClient = &http.Client{Transport: tr, Timeout: time.Duration(timeoutSec * float64(time.Second))}
|
||||
c.Running = true
|
||||
|
||||
c.Timeout = time.Duration(timeoutSec * float64(time.Second))
|
||||
if verbose {
|
||||
fmt.Println("Gemini client initialized successfully.")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *GeminiClient) Close(delaySec float64) {
|
||||
if delaySec > 0 {
|
||||
time.Sleep(time.Duration(delaySec * float64(time.Second)))
|
||||
}
|
||||
c.Running = false
|
||||
}
|
||||
|
||||
// ensureRunning mirrors the decorator behavior and retries on APIError.
|
||||
func (c *GeminiClient) ensureRunning() error {
|
||||
if c.Running {
|
||||
return nil
|
||||
}
|
||||
return c.Init(float64(c.Timeout/time.Second), false)
|
||||
}
|
||||
|
||||
// RotateTS performs a RotateCookies request and returns the new __Secure-1PSIDTS value (if any).
|
||||
func (c *GeminiClient) RotateTS() (string, error) {
|
||||
if c == nil {
|
||||
return "", fmt.Errorf("gemini web client is nil")
|
||||
}
|
||||
return rotate1PSIDTS(c.Cookies, c.Proxy, c.insecure)
|
||||
}
|
||||
|
||||
// GenerateContent sends a prompt (with optional files) and parses the response into ModelOutput.
|
||||
func (c *GeminiClient) GenerateContent(prompt string, files []string, model Model, gem *Gem, chat *ChatSession) (ModelOutput, error) {
|
||||
var empty ModelOutput
|
||||
if prompt == "" {
|
||||
return empty, &ValueError{Msg: "Prompt cannot be empty."}
|
||||
}
|
||||
if err := c.ensureRunning(); err != nil {
|
||||
return empty, err
|
||||
}
|
||||
|
||||
// Retry wrapper similar to decorator (retry=2)
|
||||
retries := 2
|
||||
for {
|
||||
out, err := c.generateOnce(prompt, files, model, gem, chat)
|
||||
if err == nil {
|
||||
return out, nil
|
||||
}
|
||||
var apiErr *APIError
|
||||
var imgErr *ImageGenerationError
|
||||
shouldRetry := false
|
||||
if errors.As(err, &imgErr) {
|
||||
if retries > 1 {
|
||||
retries = 1
|
||||
} // only once for image generation
|
||||
shouldRetry = true
|
||||
} else if errors.As(err, &apiErr) {
|
||||
shouldRetry = true
|
||||
}
|
||||
if shouldRetry && retries > 0 {
|
||||
time.Sleep(time.Second)
|
||||
retries--
|
||||
continue
|
||||
}
|
||||
return empty, err
|
||||
}
|
||||
}
|
||||
|
||||
func ensureAnyLen(slice []any, index int) []any {
|
||||
if index < len(slice) {
|
||||
return slice
|
||||
}
|
||||
gap := index + 1 - len(slice)
|
||||
return append(slice, make([]any, gap)...)
|
||||
}
|
||||
|
||||
func (c *GeminiClient) generateOnce(prompt string, files []string, model Model, gem *Gem, chat *ChatSession) (ModelOutput, error) {
|
||||
var empty ModelOutput
|
||||
// Build f.req
|
||||
var uploaded [][]any
|
||||
for _, fp := range files {
|
||||
id, err := uploadFile(fp, c.Proxy, c.insecure)
|
||||
if err != nil {
|
||||
return empty, err
|
||||
}
|
||||
name, err := parseFileName(fp)
|
||||
if err != nil {
|
||||
return empty, err
|
||||
}
|
||||
uploaded = append(uploaded, []any{[]any{id}, name})
|
||||
}
|
||||
var item0 any
|
||||
if len(uploaded) > 0 {
|
||||
item0 = []any{prompt, 0, nil, uploaded}
|
||||
} else {
|
||||
item0 = []any{prompt}
|
||||
}
|
||||
var item2 any = nil
|
||||
if chat != nil {
|
||||
item2 = chat.Metadata()
|
||||
}
|
||||
|
||||
inner := []any{item0, nil, item2}
|
||||
// Attach Gem first to keep index alignment with reference implementation
|
||||
// so the Gemini Web UI can recognize the selected Gem.
|
||||
if gem != nil {
|
||||
// pad with 16 nils then gem ID
|
||||
for i := 0; i < 16; i++ {
|
||||
inner = append(inner, nil)
|
||||
}
|
||||
inner = append(inner, gem.ID)
|
||||
}
|
||||
requestedModel := strings.ToLower(model.Name)
|
||||
if chat != nil && chat.RequestedModel() != "" {
|
||||
requestedModel = chat.RequestedModel()
|
||||
}
|
||||
if _, ok := NanoBananaModel[requestedModel]; ok {
|
||||
inner = ensureAnyLen(inner, 49)
|
||||
inner[49] = 14
|
||||
}
|
||||
innerJSON, _ := json.Marshal(inner)
|
||||
outer := []any{nil, string(innerJSON)}
|
||||
outerJSON, _ := json.Marshal(outer)
|
||||
|
||||
// form
|
||||
form := url.Values{}
|
||||
form.Set("at", c.AccessToken)
|
||||
form.Set("f.req", string(outerJSON))
|
||||
|
||||
req, _ := http.NewRequest(http.MethodPost, EndpointGenerate, strings.NewReader(form.Encode()))
|
||||
applyHeaders(req, HeadersGemini)
|
||||
applyHeaders(req, model.ModelHeader)
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded;charset=utf-8")
|
||||
applyCookies(req, c.Cookies)
|
||||
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
return empty, &TimeoutError{GeminiError{Msg: "Generate content request timed out."}}
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
|
||||
if resp.StatusCode == 429 {
|
||||
// Surface 429 as TemporarilyBlocked to match reference behavior
|
||||
c.Close(0)
|
||||
return empty, &TemporarilyBlocked{GeminiError{Msg: "Too many requests. IP temporarily blocked."}}
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: fmt.Sprintf("Failed to generate contents. Status %d", resp.StatusCode)}
|
||||
}
|
||||
|
||||
// Read body and split lines; take the 3rd line (index 2)
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
parts := strings.Split(string(b), "\n")
|
||||
if len(parts) < 3 {
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: "Invalid response data received."}
|
||||
}
|
||||
var responseJSON []any
|
||||
if err = json.Unmarshal([]byte(parts[2]), &responseJSON); err != nil {
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: "Invalid response data received."}
|
||||
}
|
||||
|
||||
// find body where main_part[4] exists
|
||||
var (
|
||||
body any
|
||||
bodyIndex int
|
||||
)
|
||||
for i, p := range responseJSON {
|
||||
arr, ok := p.([]any)
|
||||
if !ok || len(arr) < 3 {
|
||||
continue
|
||||
}
|
||||
s, ok := arr[2].(string)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
var mainPart []any
|
||||
if err = json.Unmarshal([]byte(s), &mainPart); err != nil {
|
||||
continue
|
||||
}
|
||||
if len(mainPart) > 4 && mainPart[4] != nil {
|
||||
body = mainPart
|
||||
bodyIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if body == nil {
|
||||
// Fallback: scan subsequent lines to locate a data frame with a non-empty body (mainPart[4]).
|
||||
var lastTop []any
|
||||
for li := 3; li < len(parts) && body == nil; li++ {
|
||||
line := strings.TrimSpace(parts[li])
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
var top []any
|
||||
if err = json.Unmarshal([]byte(line), &top); err != nil {
|
||||
continue
|
||||
}
|
||||
lastTop = top
|
||||
for i, p := range top {
|
||||
arr, ok := p.([]any)
|
||||
if !ok || len(arr) < 3 {
|
||||
continue
|
||||
}
|
||||
s, ok := arr[2].(string)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
var mainPart []any
|
||||
if err = json.Unmarshal([]byte(s), &mainPart); err != nil {
|
||||
continue
|
||||
}
|
||||
if len(mainPart) > 4 && mainPart[4] != nil {
|
||||
body = mainPart
|
||||
bodyIndex = i
|
||||
responseJSON = top
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
// Parse nested error code to align with error mapping
|
||||
var top []any
|
||||
// Prefer lastTop from fallback scan; otherwise try parts[2]
|
||||
if len(lastTop) > 0 {
|
||||
top = lastTop
|
||||
} else {
|
||||
_ = json.Unmarshal([]byte(parts[2]), &top)
|
||||
}
|
||||
if len(top) > 0 {
|
||||
if code, ok := extractErrorCode(top); ok {
|
||||
switch code {
|
||||
case ErrorUsageLimitExceeded:
|
||||
return empty, &UsageLimitExceeded{GeminiError{Msg: fmt.Sprintf("Failed to generate contents. Usage limit of %s has exceeded. Please try switching to another model.", model.Name)}}
|
||||
case ErrorModelInconsistent:
|
||||
return empty, &ModelInvalid{GeminiError{Msg: "Selected model is inconsistent or unavailable."}}
|
||||
case ErrorModelHeaderInvalid:
|
||||
return empty, &APIError{Msg: "Invalid model header string. Please update the selected model header."}
|
||||
case ErrorIPTemporarilyBlocked:
|
||||
return empty, &TemporarilyBlocked{GeminiError{Msg: "Too many requests. IP temporarily blocked."}}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Debug("Invalid response: control frames only; no body found")
|
||||
// Close the client to force re-initialization on next request (parity with reference client behavior)
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: "Failed to generate contents. Invalid response data received."}
|
||||
}
|
||||
|
||||
bodyArr := body.([]any)
|
||||
// metadata
|
||||
var metadata []string
|
||||
if len(bodyArr) > 1 {
|
||||
if metaArr, ok := bodyArr[1].([]any); ok {
|
||||
for _, v := range metaArr {
|
||||
if s, isOk := v.(string); isOk {
|
||||
metadata = append(metadata, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// candidates parsing
|
||||
candContainer, ok := bodyArr[4].([]any)
|
||||
if !ok {
|
||||
return empty, &APIError{Msg: "Failed to parse response body."}
|
||||
}
|
||||
candidates := make([]Candidate, 0, len(candContainer))
|
||||
reCard := regexp.MustCompile(`^http://googleusercontent\.com/card_content/\d+`)
|
||||
reGen := regexp.MustCompile(`http://googleusercontent\.com/image_generation_content/\d+`)
|
||||
|
||||
for ci, candAny := range candContainer {
|
||||
cArr, isOk := candAny.([]any)
|
||||
if !isOk {
|
||||
continue
|
||||
}
|
||||
// text: cArr[1][0]
|
||||
var text string
|
||||
if len(cArr) > 1 {
|
||||
if sArr, isOk1 := cArr[1].([]any); isOk1 && len(sArr) > 0 {
|
||||
text, _ = sArr[0].(string)
|
||||
}
|
||||
}
|
||||
if reCard.MatchString(text) {
|
||||
// candidate[22] and candidate[22][0] or text
|
||||
if len(cArr) > 22 {
|
||||
if arr, isOk1 := cArr[22].([]any); isOk1 && len(arr) > 0 {
|
||||
if s, isOk2 := arr[0].(string); isOk2 {
|
||||
text = s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// thoughts: candidate[37][0][0]
|
||||
var thoughts *string
|
||||
if len(cArr) > 37 {
|
||||
if a, ok1 := cArr[37].([]any); ok1 && len(a) > 0 {
|
||||
if b1, ok2 := a[0].([]any); ok2 && len(b1) > 0 {
|
||||
if s, ok3 := b1[0].(string); ok3 {
|
||||
ss := decodeHTML(s)
|
||||
thoughts = &ss
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// web images: candidate[12][1]
|
||||
var webImages []WebImage
|
||||
var imgSection any
|
||||
if len(cArr) > 12 {
|
||||
imgSection = cArr[12]
|
||||
}
|
||||
if arr, ok1 := imgSection.([]any); ok1 && len(arr) > 1 {
|
||||
if imagesArr, ok2 := arr[1].([]any); ok2 {
|
||||
for _, wiAny := range imagesArr {
|
||||
wiArr, ok3 := wiAny.([]any)
|
||||
if !ok3 {
|
||||
continue
|
||||
}
|
||||
// url: wiArr[0][0][0], title: wiArr[7][0], alt: wiArr[0][4]
|
||||
var urlStr, title, alt string
|
||||
if len(wiArr) > 0 {
|
||||
if a, ok5 := wiArr[0].([]any); ok5 && len(a) > 0 {
|
||||
if b1, ok6 := a[0].([]any); ok6 && len(b1) > 0 {
|
||||
urlStr, _ = b1[0].(string)
|
||||
}
|
||||
if len(a) > 4 {
|
||||
if s, ok6 := a[4].(string); ok6 {
|
||||
alt = s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(wiArr) > 7 {
|
||||
if a, ok4 := wiArr[7].([]any); ok4 && len(a) > 0 {
|
||||
title, _ = a[0].(string)
|
||||
}
|
||||
}
|
||||
webImages = append(webImages, WebImage{Image: Image{URL: urlStr, Title: title, Alt: alt, Proxy: c.Proxy}})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// generated images
|
||||
var genImages []GeneratedImage
|
||||
hasGen := false
|
||||
if arr, ok1 := imgSection.([]any); ok1 && len(arr) > 7 {
|
||||
if a, ok2 := arr[7].([]any); ok2 && len(a) > 0 && a[0] != nil {
|
||||
hasGen = true
|
||||
}
|
||||
}
|
||||
if hasGen {
|
||||
// find img part
|
||||
var imgBody []any
|
||||
for pi := bodyIndex; pi < len(responseJSON); pi++ {
|
||||
part := responseJSON[pi]
|
||||
arr, ok1 := part.([]any)
|
||||
if !ok1 || len(arr) < 3 {
|
||||
continue
|
||||
}
|
||||
s, ok1 := arr[2].(string)
|
||||
if !ok1 {
|
||||
continue
|
||||
}
|
||||
var mp []any
|
||||
if err = json.Unmarshal([]byte(s), &mp); err != nil {
|
||||
continue
|
||||
}
|
||||
if len(mp) > 4 {
|
||||
if tt, ok2 := mp[4].([]any); ok2 && len(tt) > ci {
|
||||
if sec, ok3 := tt[ci].([]any); ok3 && len(sec) > 12 {
|
||||
if ss, ok4 := sec[12].([]any); ok4 && len(ss) > 7 {
|
||||
if first, ok5 := ss[7].([]any); ok5 && len(first) > 0 && first[0] != nil {
|
||||
imgBody = mp
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if imgBody == nil {
|
||||
return empty, &ImageGenerationError{APIError{Msg: "Failed to parse generated images."}}
|
||||
}
|
||||
imgCand := imgBody[4].([]any)[ci].([]any)
|
||||
if len(imgCand) > 1 {
|
||||
if a, ok1 := imgCand[1].([]any); ok1 && len(a) > 0 {
|
||||
if s, ok2 := a[0].(string); ok2 {
|
||||
text = strings.TrimSpace(reGen.ReplaceAllString(s, ""))
|
||||
}
|
||||
}
|
||||
}
|
||||
// images list at imgCand[12][7][0]
|
||||
if len(imgCand) > 12 {
|
||||
if s1, ok1 := imgCand[12].([]any); ok1 && len(s1) > 7 {
|
||||
if s2, ok2 := s1[7].([]any); ok2 && len(s2) > 0 {
|
||||
if s3, ok3 := s2[0].([]any); ok3 {
|
||||
for ii, giAny := range s3 {
|
||||
ga, ok4 := giAny.([]any)
|
||||
if !ok4 || len(ga) < 4 {
|
||||
continue
|
||||
}
|
||||
// url: ga[0][3][3]
|
||||
var urlStr, title, alt string
|
||||
if a, ok5 := ga[0].([]any); ok5 && len(a) > 3 {
|
||||
if b1, ok6 := a[3].([]any); ok6 && len(b1) > 3 {
|
||||
urlStr, _ = b1[3].(string)
|
||||
}
|
||||
}
|
||||
// title from ga[3][6]
|
||||
if len(ga) > 3 {
|
||||
if a, ok5 := ga[3].([]any); ok5 {
|
||||
if len(a) > 6 {
|
||||
if v, ok6 := a[6].(float64); ok6 && v != 0 {
|
||||
title = fmt.Sprintf("[Generated Image %.0f]", v)
|
||||
} else {
|
||||
title = "[Generated Image]"
|
||||
}
|
||||
} else {
|
||||
title = "[Generated Image]"
|
||||
}
|
||||
// alt from ga[3][5][ii] fallback
|
||||
if len(a) > 5 {
|
||||
if tt, ok6 := a[5].([]any); ok6 {
|
||||
if ii < len(tt) {
|
||||
if s, ok7 := tt[ii].(string); ok7 {
|
||||
alt = s
|
||||
}
|
||||
} else if len(tt) > 0 {
|
||||
if s, ok7 := tt[0].(string); ok7 {
|
||||
alt = s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
genImages = append(genImages, GeneratedImage{Image: Image{URL: urlStr, Title: title, Alt: alt, Proxy: c.Proxy}, Cookies: c.Cookies})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cand := Candidate{
|
||||
RCID: fmt.Sprintf("%v", cArr[0]),
|
||||
Text: decodeHTML(text),
|
||||
Thoughts: thoughts,
|
||||
WebImages: webImages,
|
||||
GeneratedImages: genImages,
|
||||
}
|
||||
candidates = append(candidates, cand)
|
||||
}
|
||||
|
||||
if len(candidates) == 0 {
|
||||
return empty, &GeminiError{Msg: "Failed to generate contents. No output data found in response."}
|
||||
}
|
||||
output := ModelOutput{Metadata: metadata, Candidates: candidates, Chosen: 0}
|
||||
if chat != nil {
|
||||
chat.lastOutput = &output
|
||||
}
|
||||
return output, nil
|
||||
}
|
||||
|
||||
// extractErrorCode attempts to navigate the known nested error structure and fetch the integer code.
|
||||
// Mirrors reference path: response_json[0][5][2][0][1][0]
|
||||
func extractErrorCode(top []any) (int, bool) {
|
||||
if len(top) == 0 {
|
||||
return 0, false
|
||||
}
|
||||
a, ok := top[0].([]any)
|
||||
if !ok || len(a) <= 5 {
|
||||
return 0, false
|
||||
}
|
||||
b, ok := a[5].([]any)
|
||||
if !ok || len(b) <= 2 {
|
||||
return 0, false
|
||||
}
|
||||
c, ok := b[2].([]any)
|
||||
if !ok || len(c) == 0 {
|
||||
return 0, false
|
||||
}
|
||||
d, ok := c[0].([]any)
|
||||
if !ok || len(d) <= 1 {
|
||||
return 0, false
|
||||
}
|
||||
e, ok := d[1].([]any)
|
||||
if !ok || len(e) == 0 {
|
||||
return 0, false
|
||||
}
|
||||
f, ok := e[0].(float64)
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
return int(f), true
|
||||
}
|
||||
|
||||
// StartChat returns a ChatSession attached to the client
|
||||
func (c *GeminiClient) StartChat(model Model, gem *Gem, metadata []string) *ChatSession {
|
||||
return &ChatSession{client: c, metadata: normalizeMeta(metadata), model: model, gem: gem, requestedModel: strings.ToLower(model.Name)}
|
||||
}
|
||||
|
||||
// ChatSession holds conversation metadata
|
||||
type ChatSession struct {
|
||||
client *GeminiClient
|
||||
metadata []string // cid, rid, rcid
|
||||
lastOutput *ModelOutput
|
||||
model Model
|
||||
gem *Gem
|
||||
requestedModel string
|
||||
}
|
||||
|
||||
func (cs *ChatSession) String() string {
|
||||
var cid, rid, rcid string
|
||||
if len(cs.metadata) > 0 {
|
||||
cid = cs.metadata[0]
|
||||
}
|
||||
if len(cs.metadata) > 1 {
|
||||
rid = cs.metadata[1]
|
||||
}
|
||||
if len(cs.metadata) > 2 {
|
||||
rcid = cs.metadata[2]
|
||||
}
|
||||
return fmt.Sprintf("ChatSession(cid='%s', rid='%s', rcid='%s')", cid, rid, rcid)
|
||||
}
|
||||
|
||||
func normalizeMeta(v []string) []string {
|
||||
out := []string{"", "", ""}
|
||||
for i := 0; i < len(v) && i < 3; i++ {
|
||||
out[i] = v[i]
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (cs *ChatSession) Metadata() []string { return cs.metadata }
|
||||
func (cs *ChatSession) SetMetadata(v []string) { cs.metadata = normalizeMeta(v) }
|
||||
func (cs *ChatSession) RequestedModel() string { return cs.requestedModel }
|
||||
func (cs *ChatSession) SetRequestedModel(name string) {
|
||||
cs.requestedModel = strings.ToLower(name)
|
||||
}
|
||||
func (cs *ChatSession) CID() string {
|
||||
if len(cs.metadata) > 0 {
|
||||
return cs.metadata[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (cs *ChatSession) RID() string {
|
||||
if len(cs.metadata) > 1 {
|
||||
return cs.metadata[1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (cs *ChatSession) RCID() string {
|
||||
if len(cs.metadata) > 2 {
|
||||
return cs.metadata[2]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (cs *ChatSession) setCID(v string) {
|
||||
if len(cs.metadata) < 1 {
|
||||
cs.metadata = normalizeMeta(cs.metadata)
|
||||
}
|
||||
cs.metadata[0] = v
|
||||
}
|
||||
func (cs *ChatSession) setRID(v string) {
|
||||
if len(cs.metadata) < 2 {
|
||||
cs.metadata = normalizeMeta(cs.metadata)
|
||||
}
|
||||
cs.metadata[1] = v
|
||||
}
|
||||
func (cs *ChatSession) setRCID(v string) {
|
||||
if len(cs.metadata) < 3 {
|
||||
cs.metadata = normalizeMeta(cs.metadata)
|
||||
}
|
||||
cs.metadata[2] = v
|
||||
}
|
||||
|
||||
// SendMessage shortcut to client's GenerateContent
|
||||
func (cs *ChatSession) SendMessage(prompt string, files []string) (ModelOutput, error) {
|
||||
out, err := cs.client.GenerateContent(prompt, files, cs.model, cs.gem, cs)
|
||||
if err == nil {
|
||||
cs.lastOutput = &out
|
||||
cs.SetMetadata(out.Metadata)
|
||||
cs.setRCID(out.RCID())
|
||||
}
|
||||
return out, err
|
||||
}
|
||||
|
||||
// ChooseCandidate selects a candidate from last output and updates rcid
|
||||
func (cs *ChatSession) ChooseCandidate(index int) (ModelOutput, error) {
|
||||
if cs.lastOutput == nil {
|
||||
return ModelOutput{}, &ValueError{Msg: "No previous output data found in this chat session."}
|
||||
}
|
||||
if index >= len(cs.lastOutput.Candidates) {
|
||||
return ModelOutput{}, &ValueError{Msg: fmt.Sprintf("Index %d exceeds candidates", index)}
|
||||
}
|
||||
cs.lastOutput.Chosen = index
|
||||
cs.setRCID(cs.lastOutput.RCID())
|
||||
return *cs.lastOutput, nil
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
package conversation
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
||||
)
|
||||
|
||||
var (
|
||||
aliasOnce sync.Once
|
||||
aliasMap map[string]string
|
||||
)
|
||||
|
||||
// EnsureGeminiWebAliasMap populates the alias map once.
|
||||
func EnsureGeminiWebAliasMap() {
|
||||
aliasOnce.Do(func() {
|
||||
aliasMap = make(map[string]string)
|
||||
for _, m := range registry.GetGeminiModels() {
|
||||
if m.ID == "gemini-2.5-flash-lite" {
|
||||
continue
|
||||
}
|
||||
if m.ID == "gemini-2.5-flash" {
|
||||
aliasMap["gemini-2.5-flash-image-web"] = "gemini-2.5-flash"
|
||||
}
|
||||
alias := AliasFromModelID(m.ID)
|
||||
aliasMap[strings.ToLower(alias)] = strings.ToLower(m.ID)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// MapAliasToUnderlying normalizes a model alias to its underlying identifier.
|
||||
func MapAliasToUnderlying(name string) string {
|
||||
EnsureGeminiWebAliasMap()
|
||||
n := strings.ToLower(strings.TrimSpace(name))
|
||||
if n == "" {
|
||||
return n
|
||||
}
|
||||
if u, ok := aliasMap[n]; ok {
|
||||
return u
|
||||
}
|
||||
const suffix = "-web"
|
||||
if strings.HasSuffix(n, suffix) {
|
||||
return strings.TrimSuffix(n, suffix)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// AliasFromModelID mirrors the original helper for deriving alias IDs.
|
||||
func AliasFromModelID(modelID string) string {
|
||||
return modelID + "-web"
|
||||
}
|
||||
|
||||
// NormalizeModel returns the canonical identifier used for hashing.
|
||||
func NormalizeModel(model string) string {
|
||||
return MapAliasToUnderlying(model)
|
||||
}
|
||||
|
||||
// GetGeminiWebAliasedModels returns alias metadata for registry exposure.
|
||||
func GetGeminiWebAliasedModels() []*registry.ModelInfo {
|
||||
EnsureGeminiWebAliasMap()
|
||||
aliased := make([]*registry.ModelInfo, 0)
|
||||
for _, m := range registry.GetGeminiModels() {
|
||||
if m.ID == "gemini-2.5-flash-lite" {
|
||||
continue
|
||||
} else if m.ID == "gemini-2.5-flash" {
|
||||
cpy := *m
|
||||
cpy.ID = "gemini-2.5-flash-image-web"
|
||||
cpy.Name = "gemini-2.5-flash-image-web"
|
||||
cpy.DisplayName = "Nano Banana"
|
||||
cpy.Description = "Gemini 2.5 Flash Preview Image"
|
||||
aliased = append(aliased, &cpy)
|
||||
}
|
||||
cpy := *m
|
||||
cpy.ID = AliasFromModelID(m.ID)
|
||||
cpy.Name = cpy.ID
|
||||
aliased = append(aliased, &cpy)
|
||||
}
|
||||
return aliased
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
package conversation
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Message represents a minimal role-text pair used for hashing and comparison.
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// StoredMessage mirrors the persisted conversation message structure.
|
||||
type StoredMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
// Sha256Hex computes SHA-256 hex digest for the specified string.
|
||||
func Sha256Hex(s string) string {
|
||||
sum := sha256.Sum256([]byte(s))
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
// ToStoredMessages converts in-memory messages into the persisted representation.
|
||||
func ToStoredMessages(msgs []Message) []StoredMessage {
|
||||
out := make([]StoredMessage, 0, len(msgs))
|
||||
for _, m := range msgs {
|
||||
out = append(out, StoredMessage{Role: m.Role, Content: m.Text})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// StoredToMessages converts stored messages back into the in-memory representation.
|
||||
func StoredToMessages(msgs []StoredMessage) []Message {
|
||||
out := make([]Message, 0, len(msgs))
|
||||
for _, m := range msgs {
|
||||
out = append(out, Message{Role: m.Role, Text: m.Content})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// hashMessage normalizes message data and returns a stable digest.
|
||||
func hashMessage(m StoredMessage) string {
|
||||
s := fmt.Sprintf(`{"content":%q,"role":%q}`, m.Content, strings.ToLower(m.Role))
|
||||
return Sha256Hex(s)
|
||||
}
|
||||
|
||||
// HashConversationWithPrefix computes a conversation hash using the provided prefix (client identifier) and model.
|
||||
func HashConversationWithPrefix(prefix, model string, msgs []StoredMessage) string {
|
||||
var b strings.Builder
|
||||
b.WriteString(strings.ToLower(strings.TrimSpace(prefix)))
|
||||
b.WriteString("|")
|
||||
b.WriteString(strings.ToLower(strings.TrimSpace(model)))
|
||||
for _, m := range msgs {
|
||||
b.WriteString("|")
|
||||
b.WriteString(hashMessage(m))
|
||||
}
|
||||
return Sha256Hex(b.String())
|
||||
}
|
||||
|
||||
// HashConversationForAccount keeps compatibility with the per-account hash previously used.
|
||||
func HashConversationForAccount(clientID, model string, msgs []StoredMessage) string {
|
||||
return HashConversationWithPrefix(clientID, model, msgs)
|
||||
}
|
||||
|
||||
// HashConversationGlobal produces a hash suitable for cross-account lookups.
|
||||
func HashConversationGlobal(model string, msgs []StoredMessage) string {
|
||||
return HashConversationWithPrefix("global", model, msgs)
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
package conversation
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
bolt "go.etcd.io/bbolt"
|
||||
)
|
||||
|
||||
const (
|
||||
bucketMatches = "matches"
|
||||
defaultIndexFile = "gemini-web-index.bolt"
|
||||
)
|
||||
|
||||
// MatchRecord stores persisted mapping metadata for a conversation prefix.
|
||||
type MatchRecord struct {
|
||||
AccountLabel string `json:"account_label"`
|
||||
Metadata []string `json:"metadata,omitempty"`
|
||||
PrefixLen int `json:"prefix_len"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
}
|
||||
|
||||
// MatchResult combines a persisted record with the hash that produced it.
|
||||
type MatchResult struct {
|
||||
Hash string
|
||||
Record MatchRecord
|
||||
Model string
|
||||
}
|
||||
|
||||
var (
|
||||
indexOnce sync.Once
|
||||
indexDB *bolt.DB
|
||||
indexErr error
|
||||
)
|
||||
|
||||
func openIndex() (*bolt.DB, error) {
|
||||
indexOnce.Do(func() {
|
||||
path := indexPath()
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||
indexErr = err
|
||||
return
|
||||
}
|
||||
db, err := bolt.Open(path, 0o600, &bolt.Options{Timeout: 2 * time.Second})
|
||||
if err != nil {
|
||||
indexErr = err
|
||||
return
|
||||
}
|
||||
indexDB = db
|
||||
})
|
||||
return indexDB, indexErr
|
||||
}
|
||||
|
||||
func indexPath() string {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil || wd == "" {
|
||||
wd = "."
|
||||
}
|
||||
return filepath.Join(wd, "conv", defaultIndexFile)
|
||||
}
|
||||
|
||||
// StoreMatch persists or updates a conversation hash mapping.
|
||||
func StoreMatch(hash string, record MatchRecord) error {
|
||||
if strings.TrimSpace(hash) == "" {
|
||||
return errors.New("gemini-web conversation: empty hash")
|
||||
}
|
||||
db, err := openIndex()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
record.UpdatedAt = time.Now().UTC().Unix()
|
||||
payload, err := json.Marshal(record)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return db.Update(func(tx *bolt.Tx) error {
|
||||
bucket, err := tx.CreateBucketIfNotExists([]byte(bucketMatches))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Namespace by account label to avoid cross-account collisions.
|
||||
label := strings.ToLower(strings.TrimSpace(record.AccountLabel))
|
||||
if label == "" {
|
||||
return errors.New("gemini-web conversation: empty account label")
|
||||
}
|
||||
key := []byte(hash + ":" + label)
|
||||
if err := bucket.Put(key, payload); err != nil {
|
||||
return err
|
||||
}
|
||||
// Best-effort cleanup of legacy single-key format (hash -> MatchRecord).
|
||||
// We do not know its label; leave it for lookup fallback/cleanup elsewhere.
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// LookupMatch retrieves a stored mapping.
|
||||
// It prefers namespaced entries (hash:label). If multiple labels exist for the same
|
||||
// hash, it returns not found to avoid redirecting to the wrong credential.
|
||||
// Falls back to legacy single-key entries if present.
|
||||
func LookupMatch(hash string) (MatchRecord, bool, error) {
|
||||
db, err := openIndex()
|
||||
if err != nil {
|
||||
return MatchRecord{}, false, err
|
||||
}
|
||||
var foundOne bool
|
||||
var ambiguous bool
|
||||
var firstLabel string
|
||||
var single MatchRecord
|
||||
err = db.View(func(tx *bolt.Tx) error {
|
||||
bucket := tx.Bucket([]byte(bucketMatches))
|
||||
if bucket == nil {
|
||||
return nil
|
||||
}
|
||||
// Scan namespaced keys with prefix "hash:"
|
||||
prefix := []byte(hash + ":")
|
||||
c := bucket.Cursor()
|
||||
for k, v := c.Seek(prefix); k != nil && bytes.HasPrefix(k, prefix); k, v = c.Next() {
|
||||
if len(v) == 0 {
|
||||
continue
|
||||
}
|
||||
var rec MatchRecord
|
||||
if err := json.Unmarshal(v, &rec); err != nil {
|
||||
// Ignore malformed; removal is handled elsewhere.
|
||||
continue
|
||||
}
|
||||
if strings.TrimSpace(rec.AccountLabel) == "" || rec.PrefixLen <= 0 {
|
||||
continue
|
||||
}
|
||||
label := strings.ToLower(strings.TrimSpace(rec.AccountLabel))
|
||||
if !foundOne {
|
||||
firstLabel = label
|
||||
single = rec
|
||||
foundOne = true
|
||||
continue
|
||||
}
|
||||
if label != firstLabel {
|
||||
ambiguous = true
|
||||
// Early exit scan; ambiguity detected.
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if foundOne {
|
||||
return nil
|
||||
}
|
||||
// Fallback to legacy single-key format
|
||||
raw := bucket.Get([]byte(hash))
|
||||
if len(raw) == 0 {
|
||||
return nil
|
||||
}
|
||||
return json.Unmarshal(raw, &single)
|
||||
})
|
||||
if err != nil {
|
||||
return MatchRecord{}, false, err
|
||||
}
|
||||
if ambiguous {
|
||||
return MatchRecord{}, false, nil
|
||||
}
|
||||
if strings.TrimSpace(single.AccountLabel) == "" || single.PrefixLen <= 0 {
|
||||
return MatchRecord{}, false, nil
|
||||
}
|
||||
return single, true, nil
|
||||
}
|
||||
|
||||
// RemoveMatch deletes all mappings for the given hash (all labels and legacy key).
|
||||
func RemoveMatch(hash string) error {
|
||||
db, err := openIndex()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return db.Update(func(tx *bolt.Tx) error {
|
||||
bucket := tx.Bucket([]byte(bucketMatches))
|
||||
if bucket == nil {
|
||||
return nil
|
||||
}
|
||||
// Delete namespaced entries
|
||||
prefix := []byte(hash + ":")
|
||||
c := bucket.Cursor()
|
||||
for k, _ := c.Seek(prefix); k != nil && bytes.HasPrefix(k, prefix); k, _ = c.Next() {
|
||||
if err := bucket.Delete(k); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// Delete legacy entry
|
||||
_ = bucket.Delete([]byte(hash))
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// RemoveMatchForLabel deletes the mapping for the given hash and label only.
|
||||
func RemoveMatchForLabel(hash, label string) error {
|
||||
label = strings.ToLower(strings.TrimSpace(label))
|
||||
if strings.TrimSpace(hash) == "" || label == "" {
|
||||
return nil
|
||||
}
|
||||
db, err := openIndex()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return db.Update(func(tx *bolt.Tx) error {
|
||||
bucket := tx.Bucket([]byte(bucketMatches))
|
||||
if bucket == nil {
|
||||
return nil
|
||||
}
|
||||
// Remove namespaced key
|
||||
_ = bucket.Delete([]byte(hash + ":" + label))
|
||||
// If legacy single-key exists and matches label, remove it as well.
|
||||
if raw := bucket.Get([]byte(hash)); len(raw) > 0 {
|
||||
var rec MatchRecord
|
||||
if err := json.Unmarshal(raw, &rec); err == nil {
|
||||
if strings.EqualFold(strings.TrimSpace(rec.AccountLabel), label) {
|
||||
_ = bucket.Delete([]byte(hash))
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// RemoveMatchesByLabel removes all entries associated with the specified label.
|
||||
func RemoveMatchesByLabel(label string) error {
|
||||
label = strings.TrimSpace(label)
|
||||
if label == "" {
|
||||
return nil
|
||||
}
|
||||
db, err := openIndex()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return db.Update(func(tx *bolt.Tx) error {
|
||||
bucket := tx.Bucket([]byte(bucketMatches))
|
||||
if bucket == nil {
|
||||
return nil
|
||||
}
|
||||
cursor := bucket.Cursor()
|
||||
for k, v := cursor.First(); k != nil; k, v = cursor.Next() {
|
||||
if len(v) == 0 {
|
||||
continue
|
||||
}
|
||||
var record MatchRecord
|
||||
if err := json.Unmarshal(v, &record); err != nil {
|
||||
_ = bucket.Delete(k)
|
||||
continue
|
||||
}
|
||||
if strings.EqualFold(strings.TrimSpace(record.AccountLabel), label) {
|
||||
if err := bucket.Delete(k); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// StoreConversation updates all hashes representing the provided conversation snapshot.
|
||||
func StoreConversation(label, model string, msgs []Message, metadata []string) error {
|
||||
label = strings.TrimSpace(label)
|
||||
if label == "" || len(msgs) == 0 {
|
||||
return nil
|
||||
}
|
||||
hashes := BuildStorageHashes(model, msgs)
|
||||
if len(hashes) == 0 {
|
||||
return nil
|
||||
}
|
||||
for _, h := range hashes {
|
||||
rec := MatchRecord{
|
||||
AccountLabel: label,
|
||||
Metadata: append([]string(nil), metadata...),
|
||||
PrefixLen: h.PrefixLen,
|
||||
}
|
||||
if err := StoreMatch(h.Hash, rec); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
package conversation
|
||||
|
||||
import "strings"
|
||||
|
||||
// PrefixHash represents a hash candidate for a specific prefix length.
|
||||
type PrefixHash struct {
|
||||
Hash string
|
||||
PrefixLen int
|
||||
}
|
||||
|
||||
// BuildLookupHashes generates hash candidates ordered from longest to shortest prefix.
|
||||
func BuildLookupHashes(model string, msgs []Message) []PrefixHash {
|
||||
if len(msgs) < 2 {
|
||||
return nil
|
||||
}
|
||||
model = NormalizeModel(model)
|
||||
sanitized := SanitizeAssistantMessages(msgs)
|
||||
result := make([]PrefixHash, 0, len(sanitized))
|
||||
for end := len(sanitized); end >= 2; end-- {
|
||||
tailRole := strings.ToLower(strings.TrimSpace(sanitized[end-1].Role))
|
||||
if tailRole != "assistant" && tailRole != "system" {
|
||||
continue
|
||||
}
|
||||
prefix := sanitized[:end]
|
||||
hash := HashConversationGlobal(model, ToStoredMessages(prefix))
|
||||
result = append(result, PrefixHash{Hash: hash, PrefixLen: end})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// BuildStorageHashes returns hashes representing the full conversation snapshot.
|
||||
func BuildStorageHashes(model string, msgs []Message) []PrefixHash {
|
||||
if len(msgs) == 0 {
|
||||
return nil
|
||||
}
|
||||
model = NormalizeModel(model)
|
||||
sanitized := SanitizeAssistantMessages(msgs)
|
||||
if len(sanitized) == 0 {
|
||||
return nil
|
||||
}
|
||||
result := make([]PrefixHash, 0, len(sanitized))
|
||||
seen := make(map[string]struct{}, len(sanitized))
|
||||
for start := 0; start < len(sanitized); start++ {
|
||||
segment := sanitized[start:]
|
||||
if len(segment) < 2 {
|
||||
continue
|
||||
}
|
||||
tailRole := strings.ToLower(strings.TrimSpace(segment[len(segment)-1].Role))
|
||||
if tailRole != "assistant" && tailRole != "system" {
|
||||
continue
|
||||
}
|
||||
hash := HashConversationGlobal(model, ToStoredMessages(segment))
|
||||
if _, exists := seen[hash]; exists {
|
||||
continue
|
||||
}
|
||||
seen[hash] = struct{}{}
|
||||
result = append(result, PrefixHash{Hash: hash, PrefixLen: len(segment)})
|
||||
}
|
||||
if len(result) == 0 {
|
||||
hash := HashConversationGlobal(model, ToStoredMessages(sanitized))
|
||||
return []PrefixHash{{Hash: hash, PrefixLen: len(sanitized)}}
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
package conversation
|
||||
|
||||
const (
|
||||
MetadataMessagesKey = "gemini_web_messages"
|
||||
MetadataMatchKey = "gemini_web_match"
|
||||
)
|
||||
@@ -1,110 +0,0 @@
|
||||
package conversation
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
// ExtractMessages attempts to build a message list from the inbound request payload.
|
||||
func ExtractMessages(handlerType string, raw []byte) []Message {
|
||||
if len(raw) == 0 {
|
||||
return nil
|
||||
}
|
||||
if msgs := extractOpenAIStyle(raw); len(msgs) > 0 {
|
||||
return msgs
|
||||
}
|
||||
if msgs := extractGeminiContents(raw); len(msgs) > 0 {
|
||||
return msgs
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func extractOpenAIStyle(raw []byte) []Message {
|
||||
root := gjson.ParseBytes(raw)
|
||||
messages := root.Get("messages")
|
||||
if !messages.Exists() {
|
||||
return nil
|
||||
}
|
||||
out := make([]Message, 0, 8)
|
||||
messages.ForEach(func(_, entry gjson.Result) bool {
|
||||
role := strings.ToLower(strings.TrimSpace(entry.Get("role").String()))
|
||||
if role == "" {
|
||||
return true
|
||||
}
|
||||
if role == "system" {
|
||||
return true
|
||||
}
|
||||
// Ignore OpenAI tool messages to keep hashing aligned with
|
||||
// persistence (which only keeps text/inlineData for Gemini contents).
|
||||
// This avoids mismatches when a tool response is present: the
|
||||
// storage path drops tool payloads while the lookup path would
|
||||
// otherwise include them, causing sticky selection to fail.
|
||||
if role == "tool" {
|
||||
return true
|
||||
}
|
||||
var contentBuilder strings.Builder
|
||||
content := entry.Get("content")
|
||||
if !content.Exists() {
|
||||
out = append(out, Message{Role: role, Text: ""})
|
||||
return true
|
||||
}
|
||||
switch content.Type {
|
||||
case gjson.String:
|
||||
contentBuilder.WriteString(content.String())
|
||||
case gjson.JSON:
|
||||
if content.IsArray() {
|
||||
content.ForEach(func(_, part gjson.Result) bool {
|
||||
if text := part.Get("text"); text.Exists() {
|
||||
if contentBuilder.Len() > 0 {
|
||||
contentBuilder.WriteString("\n")
|
||||
}
|
||||
contentBuilder.WriteString(text.String())
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
out = append(out, Message{Role: role, Text: contentBuilder.String()})
|
||||
return true
|
||||
})
|
||||
if len(out) == 0 {
|
||||
return nil
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func extractGeminiContents(raw []byte) []Message {
|
||||
contents := gjson.GetBytes(raw, "contents")
|
||||
if !contents.Exists() {
|
||||
return nil
|
||||
}
|
||||
out := make([]Message, 0, 8)
|
||||
contents.ForEach(func(_, entry gjson.Result) bool {
|
||||
role := strings.TrimSpace(entry.Get("role").String())
|
||||
if role == "" {
|
||||
role = "user"
|
||||
} else {
|
||||
role = strings.ToLower(role)
|
||||
if role == "model" {
|
||||
role = "assistant"
|
||||
}
|
||||
}
|
||||
var builder strings.Builder
|
||||
entry.Get("parts").ForEach(func(_, part gjson.Result) bool {
|
||||
if text := part.Get("text"); text.Exists() {
|
||||
if builder.Len() > 0 {
|
||||
builder.WriteString("\n")
|
||||
}
|
||||
builder.WriteString(text.String())
|
||||
}
|
||||
return true
|
||||
})
|
||||
out = append(out, Message{Role: role, Text: builder.String()})
|
||||
return true
|
||||
})
|
||||
if len(out) == 0 {
|
||||
return nil
|
||||
}
|
||||
return out
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
package conversation
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var reThink = regexp.MustCompile(`(?is)<think>.*?</think>`)
|
||||
|
||||
// RemoveThinkTags strips <think>...</think> blocks and trims whitespace.
|
||||
func RemoveThinkTags(s string) string {
|
||||
return strings.TrimSpace(reThink.ReplaceAllString(s, ""))
|
||||
}
|
||||
|
||||
// SanitizeAssistantMessages removes think tags from assistant messages while leaving others untouched.
|
||||
func SanitizeAssistantMessages(msgs []Message) []Message {
|
||||
out := make([]Message, 0, len(msgs))
|
||||
for _, m := range msgs {
|
||||
if strings.EqualFold(strings.TrimSpace(m.Role), "assistant") {
|
||||
out = append(out, Message{Role: m.Role, Text: RemoveThinkTags(m.Text)})
|
||||
continue
|
||||
}
|
||||
out = append(out, m)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// EqualMessages compares two message slices for equality.
|
||||
func EqualMessages(a, b []Message) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for i := range a {
|
||||
if a[i].Role != b[i].Role || a[i].Text != b[i].Text {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -1,542 +0,0 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
// Image helpers ------------------------------------------------------------
|
||||
|
||||
type Image struct {
|
||||
URL string
|
||||
Title string
|
||||
Alt string
|
||||
Proxy string
|
||||
}
|
||||
|
||||
func (i Image) String() string {
|
||||
short := i.URL
|
||||
if len(short) > 20 {
|
||||
short = short[:8] + "..." + short[len(short)-12:]
|
||||
}
|
||||
return fmt.Sprintf("Image(title='%s', alt='%s', url='%s')", i.Title, i.Alt, short)
|
||||
}
|
||||
|
||||
func (i Image) Save(path string, filename string, cookies map[string]string, verbose bool, skipInvalidFilename bool, insecure bool) (string, error) {
|
||||
if filename == "" {
|
||||
// Try to parse filename from URL.
|
||||
u := i.URL
|
||||
if p := strings.Split(u, "/"); len(p) > 0 {
|
||||
filename = p[len(p)-1]
|
||||
}
|
||||
if q := strings.Split(filename, "?"); len(q) > 0 {
|
||||
filename = q[0]
|
||||
}
|
||||
}
|
||||
// Regex validation (pattern: ^(.*\.\w+)) to extract name with extension.
|
||||
if filename != "" {
|
||||
re := regexp.MustCompile(`^(.*\.\w+)`)
|
||||
if m := re.FindStringSubmatch(filename); len(m) >= 2 {
|
||||
filename = m[1]
|
||||
} else {
|
||||
if verbose {
|
||||
log.Warnf("Invalid filename: %s", filename)
|
||||
}
|
||||
if skipInvalidFilename {
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
}
|
||||
// Build client using shared helper to keep proxy/TLS behavior consistent.
|
||||
client := newHTTPClient(httpOptions{ProxyURL: i.Proxy, Insecure: insecure, FollowRedirects: true})
|
||||
client.Timeout = 120 * time.Second
|
||||
|
||||
// Helper to set raw Cookie header using provided cookies (parity with the reference client behavior).
|
||||
buildCookieHeader := func(m map[string]string) string {
|
||||
if len(m) == 0 {
|
||||
return ""
|
||||
}
|
||||
keys := make([]string, 0, len(m))
|
||||
for k := range m {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
parts := make([]string, 0, len(keys))
|
||||
for _, k := range keys {
|
||||
parts = append(parts, fmt.Sprintf("%s=%s", k, m[k]))
|
||||
}
|
||||
return strings.Join(parts, "; ")
|
||||
}
|
||||
rawCookie := buildCookieHeader(cookies)
|
||||
|
||||
client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
|
||||
// Ensure provided cookies are always sent across redirects (domain-agnostic).
|
||||
if rawCookie != "" {
|
||||
req.Header.Set("Cookie", rawCookie)
|
||||
}
|
||||
if len(via) >= 10 {
|
||||
return errors.New("stopped after 10 redirects")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
req, _ := http.NewRequest(http.MethodGet, i.URL, nil)
|
||||
if rawCookie != "" {
|
||||
req.Header.Set("Cookie", rawCookie)
|
||||
}
|
||||
// Add browser-like headers to improve compatibility.
|
||||
req.Header.Set("Accept", "image/avif,image/webp,image/apng,image/*,*/*;q=0.8")
|
||||
req.Header.Set("Connection", "keep-alive")
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return "", fmt.Errorf("error downloading image: %d %s", resp.StatusCode, resp.Status)
|
||||
}
|
||||
if ct := resp.Header.Get("Content-Type"); ct != "" && !strings.Contains(strings.ToLower(ct), "image") {
|
||||
log.Warnf("Content type of %s is not image, but %s.", filename, ct)
|
||||
}
|
||||
if path == "" {
|
||||
path = "temp"
|
||||
}
|
||||
if err = os.MkdirAll(path, 0o755); err != nil {
|
||||
return "", err
|
||||
}
|
||||
dest := filepath.Join(path, filename)
|
||||
f, err := os.Create(dest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
_, err = io.Copy(f, resp.Body)
|
||||
_ = f.Close()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if verbose {
|
||||
fmt.Printf("Image saved as %s\n", dest)
|
||||
}
|
||||
abspath, _ := filepath.Abs(dest)
|
||||
return abspath, nil
|
||||
}
|
||||
|
||||
type WebImage struct{ Image }
|
||||
|
||||
type GeneratedImage struct {
|
||||
Image
|
||||
Cookies map[string]string
|
||||
}
|
||||
|
||||
func (g GeneratedImage) Save(path string, filename string, fullSize bool, verbose bool, skipInvalidFilename bool, insecure bool) (string, error) {
|
||||
if len(g.Cookies) == 0 {
|
||||
return "", &ValueError{Msg: "GeneratedImage requires cookies."}
|
||||
}
|
||||
strURL := g.URL
|
||||
if fullSize {
|
||||
strURL = strURL + "=s2048"
|
||||
}
|
||||
if filename == "" {
|
||||
name := time.Now().Format("20060102150405")
|
||||
if len(strURL) >= 10 {
|
||||
name = fmt.Sprintf("%s_%s.png", name, strURL[len(strURL)-10:])
|
||||
} else {
|
||||
name += ".png"
|
||||
}
|
||||
filename = name
|
||||
}
|
||||
tmp := g.Image
|
||||
tmp.URL = strURL
|
||||
return tmp.Save(path, filename, g.Cookies, verbose, skipInvalidFilename, insecure)
|
||||
}
|
||||
|
||||
// Request parsing & file helpers -------------------------------------------
|
||||
|
||||
func ParseMessagesAndFiles(rawJSON []byte) ([]RoleText, [][]byte, []string, [][]int, error) {
|
||||
var messages []RoleText
|
||||
var files [][]byte
|
||||
var mimes []string
|
||||
var perMsgFileIdx [][]int
|
||||
|
||||
contents := gjson.GetBytes(rawJSON, "contents")
|
||||
if contents.Exists() {
|
||||
contents.ForEach(func(_, content gjson.Result) bool {
|
||||
role := NormalizeRole(content.Get("role").String())
|
||||
var b strings.Builder
|
||||
startFile := len(files)
|
||||
content.Get("parts").ForEach(func(_, part gjson.Result) bool {
|
||||
if text := part.Get("text"); text.Exists() {
|
||||
if b.Len() > 0 {
|
||||
b.WriteString("\n")
|
||||
}
|
||||
b.WriteString(text.String())
|
||||
}
|
||||
if inlineData := part.Get("inlineData"); inlineData.Exists() {
|
||||
data := inlineData.Get("data").String()
|
||||
if data != "" {
|
||||
if dec, err := base64.StdEncoding.DecodeString(data); err == nil {
|
||||
files = append(files, dec)
|
||||
m := inlineData.Get("mimeType").String()
|
||||
if m == "" {
|
||||
m = inlineData.Get("mime_type").String()
|
||||
}
|
||||
mimes = append(mimes, m)
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
messages = append(messages, RoleText{Role: role, Text: b.String()})
|
||||
endFile := len(files)
|
||||
if endFile > startFile {
|
||||
idxs := make([]int, 0, endFile-startFile)
|
||||
for i := startFile; i < endFile; i++ {
|
||||
idxs = append(idxs, i)
|
||||
}
|
||||
perMsgFileIdx = append(perMsgFileIdx, idxs)
|
||||
} else {
|
||||
perMsgFileIdx = append(perMsgFileIdx, nil)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
return messages, files, mimes, perMsgFileIdx, nil
|
||||
}
|
||||
|
||||
func MaterializeInlineFiles(files [][]byte, mimes []string) ([]string, *interfaces.ErrorMessage) {
|
||||
if len(files) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
paths := make([]string, 0, len(files))
|
||||
for i, data := range files {
|
||||
ext := MimeToExt(mimes, i)
|
||||
f, err := os.CreateTemp("", "gemini-upload-*"+ext)
|
||||
if err != nil {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusInternalServerError, Error: fmt.Errorf("failed to create temp file: %w", err)}
|
||||
}
|
||||
if _, err = f.Write(data); err != nil {
|
||||
_ = f.Close()
|
||||
_ = os.Remove(f.Name())
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusInternalServerError, Error: fmt.Errorf("failed to write temp file: %w", err)}
|
||||
}
|
||||
if err = f.Close(); err != nil {
|
||||
_ = os.Remove(f.Name())
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusInternalServerError, Error: fmt.Errorf("failed to close temp file: %w", err)}
|
||||
}
|
||||
paths = append(paths, f.Name())
|
||||
}
|
||||
return paths, nil
|
||||
}
|
||||
|
||||
func CleanupFiles(paths []string) {
|
||||
for _, p := range paths {
|
||||
if p != "" {
|
||||
_ = os.Remove(p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func FetchGeneratedImageData(gi GeneratedImage) (string, string, error) {
|
||||
path, err := gi.Save("", "", true, false, true, false)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
defer func() { _ = os.Remove(path) }()
|
||||
b, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
mime := http.DetectContentType(b)
|
||||
if !strings.HasPrefix(mime, "image/") {
|
||||
if guessed := mimeFromExtension(filepath.Ext(path)); guessed != "" {
|
||||
mime = guessed
|
||||
} else {
|
||||
mime = "image/png"
|
||||
}
|
||||
}
|
||||
return mime, base64.StdEncoding.EncodeToString(b), nil
|
||||
}
|
||||
|
||||
func MimeToExt(mimes []string, i int) string {
|
||||
if i < len(mimes) {
|
||||
return MimeToPreferredExt(strings.ToLower(mimes[i]))
|
||||
}
|
||||
return ".png"
|
||||
}
|
||||
|
||||
var preferredExtByMIME = map[string]string{
|
||||
"image/png": ".png",
|
||||
"image/jpeg": ".jpg",
|
||||
"image/jpg": ".jpg",
|
||||
"image/webp": ".webp",
|
||||
"image/gif": ".gif",
|
||||
"image/bmp": ".bmp",
|
||||
"image/heic": ".heic",
|
||||
"application/pdf": ".pdf",
|
||||
}
|
||||
|
||||
func MimeToPreferredExt(mime string) string {
|
||||
normalized := strings.ToLower(strings.TrimSpace(mime))
|
||||
if normalized == "" {
|
||||
return ".png"
|
||||
}
|
||||
if ext, ok := preferredExtByMIME[normalized]; ok {
|
||||
return ext
|
||||
}
|
||||
return ".png"
|
||||
}
|
||||
|
||||
func mimeFromExtension(ext string) string {
|
||||
cleaned := strings.TrimPrefix(strings.ToLower(ext), ".")
|
||||
if cleaned == "" {
|
||||
return ""
|
||||
}
|
||||
if mt, ok := misc.MimeTypes[cleaned]; ok && mt != "" {
|
||||
return mt
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// File upload helpers ------------------------------------------------------
|
||||
|
||||
func uploadFile(path string, proxy string, insecure bool) (string, error) {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = f.Close()
|
||||
}()
|
||||
|
||||
var buf bytes.Buffer
|
||||
mw := multipart.NewWriter(&buf)
|
||||
fw, err := mw.CreateFormFile("file", filepath.Base(path))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if _, err = io.Copy(fw, f); err != nil {
|
||||
return "", err
|
||||
}
|
||||
_ = mw.Close()
|
||||
|
||||
client := newHTTPClient(httpOptions{ProxyURL: proxy, Insecure: insecure, FollowRedirects: true})
|
||||
client.Timeout = 300 * time.Second
|
||||
|
||||
req, _ := http.NewRequest(http.MethodPost, EndpointUpload, &buf)
|
||||
applyHeaders(req, HeadersUpload)
|
||||
req.Header.Set("Content-Type", mw.FormDataContentType())
|
||||
req.Header.Set("Accept", "*/*")
|
||||
req.Header.Set("Connection", "keep-alive")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
return "", &APIError{Msg: resp.Status}
|
||||
}
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(b), nil
|
||||
}
|
||||
|
||||
func parseFileName(path string) (string, error) {
|
||||
if st, err := os.Stat(path); err != nil || st.IsDir() {
|
||||
return "", &ValueError{Msg: path + " is not a valid file."}
|
||||
}
|
||||
return filepath.Base(path), nil
|
||||
}
|
||||
|
||||
// Response formatting helpers ----------------------------------------------
|
||||
|
||||
var (
|
||||
reGoogle = regexp.MustCompile("(\\()?\\[`([^`]+?)`\\]\\(https://www\\.google\\.com/search\\?q=[^)]*\\)(\\))?")
|
||||
reColonNum = regexp.MustCompile(`([^:]+:\d+)`)
|
||||
reInline = regexp.MustCompile("`(\\[[^\\]]+\\]\\([^\\)]+\\))`")
|
||||
)
|
||||
|
||||
func unescapeGeminiText(s string) string {
|
||||
if s == "" {
|
||||
return s
|
||||
}
|
||||
s = strings.ReplaceAll(s, "<", "<")
|
||||
s = strings.ReplaceAll(s, "\\<", "<")
|
||||
s = strings.ReplaceAll(s, "\\_", "_")
|
||||
s = strings.ReplaceAll(s, "\\>", ">")
|
||||
return s
|
||||
}
|
||||
|
||||
func postProcessModelText(text string) string {
|
||||
text = reGoogle.ReplaceAllStringFunc(text, func(m string) string {
|
||||
subs := reGoogle.FindStringSubmatch(m)
|
||||
if len(subs) < 4 {
|
||||
return m
|
||||
}
|
||||
outerOpen := subs[1]
|
||||
display := subs[2]
|
||||
target := display
|
||||
if loc := reColonNum.FindString(display); loc != "" {
|
||||
target = loc
|
||||
}
|
||||
newSeg := "[`" + display + "`](" + target + ")"
|
||||
if outerOpen != "" {
|
||||
return "(" + newSeg + ")"
|
||||
}
|
||||
return newSeg
|
||||
})
|
||||
text = reInline.ReplaceAllString(text, "$1")
|
||||
return text
|
||||
}
|
||||
|
||||
func estimateTokens(s string) int {
|
||||
if s == "" {
|
||||
return 0
|
||||
}
|
||||
rc := float64(utf8.RuneCountInString(s))
|
||||
if rc <= 0 {
|
||||
return 0
|
||||
}
|
||||
est := int(math.Ceil(rc / 4.0))
|
||||
if est < 0 {
|
||||
return 0
|
||||
}
|
||||
return est
|
||||
}
|
||||
|
||||
// ConvertOutputToGemini converts simplified ModelOutput to Gemini API-like JSON.
|
||||
// promptText is used only to estimate usage tokens to populate usage fields.
|
||||
func ConvertOutputToGemini(output *ModelOutput, modelName string, promptText string) ([]byte, error) {
|
||||
if output == nil || len(output.Candidates) == 0 {
|
||||
return nil, fmt.Errorf("empty output")
|
||||
}
|
||||
|
||||
parts := make([]map[string]any, 0, 2)
|
||||
|
||||
var thoughtsText string
|
||||
if output.Candidates[0].Thoughts != nil {
|
||||
if t := strings.TrimSpace(*output.Candidates[0].Thoughts); t != "" {
|
||||
thoughtsText = unescapeGeminiText(t)
|
||||
parts = append(parts, map[string]any{
|
||||
"text": thoughtsText,
|
||||
"thought": true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
visible := unescapeGeminiText(output.Candidates[0].Text)
|
||||
finalText := postProcessModelText(visible)
|
||||
if finalText != "" {
|
||||
parts = append(parts, map[string]any{"text": finalText})
|
||||
}
|
||||
|
||||
if imgs := output.Candidates[0].GeneratedImages; len(imgs) > 0 {
|
||||
for _, gi := range imgs {
|
||||
if mime, data, err := FetchGeneratedImageData(gi); err == nil && data != "" {
|
||||
parts = append(parts, map[string]any{
|
||||
"inlineData": map[string]any{
|
||||
"mimeType": mime,
|
||||
"data": data,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
promptTokens := estimateTokens(promptText)
|
||||
completionTokens := estimateTokens(finalText)
|
||||
thoughtsTokens := 0
|
||||
if thoughtsText != "" {
|
||||
thoughtsTokens = estimateTokens(thoughtsText)
|
||||
}
|
||||
totalTokens := promptTokens + completionTokens
|
||||
|
||||
now := time.Now()
|
||||
resp := map[string]any{
|
||||
"candidates": []any{
|
||||
map[string]any{
|
||||
"content": map[string]any{
|
||||
"parts": parts,
|
||||
"role": "model",
|
||||
},
|
||||
"finishReason": "stop",
|
||||
"index": 0,
|
||||
},
|
||||
},
|
||||
"createTime": now.Format(time.RFC3339Nano),
|
||||
"responseId": fmt.Sprintf("gemini-web-%d", now.UnixNano()),
|
||||
"modelVersion": modelName,
|
||||
"usageMetadata": map[string]any{
|
||||
"promptTokenCount": promptTokens,
|
||||
"candidatesTokenCount": completionTokens,
|
||||
"thoughtsTokenCount": thoughtsTokens,
|
||||
"totalTokenCount": totalTokens,
|
||||
},
|
||||
}
|
||||
b, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal gemini response: %w", err)
|
||||
}
|
||||
return ensureColonSpacing(b), nil
|
||||
}
|
||||
|
||||
// ensureColonSpacing inserts a single space after JSON key-value colons while
|
||||
// leaving string content untouched. This matches the relaxed formatting used by
|
||||
// Gemini responses and keeps downstream text-processing tools compatible with
|
||||
// the proxy output.
|
||||
func ensureColonSpacing(b []byte) []byte {
|
||||
if len(b) == 0 {
|
||||
return b
|
||||
}
|
||||
var out bytes.Buffer
|
||||
out.Grow(len(b) + len(b)/8)
|
||||
inString := false
|
||||
escaped := false
|
||||
for i := 0; i < len(b); i++ {
|
||||
ch := b[i]
|
||||
out.WriteByte(ch)
|
||||
if escaped {
|
||||
escaped = false
|
||||
continue
|
||||
}
|
||||
switch ch {
|
||||
case '\\':
|
||||
escaped = true
|
||||
case '"':
|
||||
inString = !inString
|
||||
case ':':
|
||||
if !inString && i+1 < len(b) {
|
||||
next := b[i+1]
|
||||
if next != ' ' && next != '\n' && next != '\r' && next != '\t' {
|
||||
out.WriteByte(' ')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return out.Bytes()
|
||||
}
|
||||
@@ -1,253 +0,0 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
conversation "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web/conversation"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
||||
)
|
||||
|
||||
// Gemini web endpoints and default headers ----------------------------------
|
||||
const (
|
||||
EndpointGoogle = "https://www.google.com"
|
||||
EndpointInit = "https://gemini.google.com/app"
|
||||
EndpointGenerate = "https://gemini.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate"
|
||||
EndpointRotateCookies = "https://accounts.google.com/RotateCookies"
|
||||
EndpointUpload = "https://content-push.googleapis.com/upload"
|
||||
)
|
||||
|
||||
var (
|
||||
HeadersGemini = http.Header{
|
||||
"Content-Type": []string{"application/x-www-form-urlencoded;charset=utf-8"},
|
||||
"Host": []string{"gemini.google.com"},
|
||||
"Origin": []string{"https://gemini.google.com"},
|
||||
"Referer": []string{"https://gemini.google.com/"},
|
||||
"User-Agent": []string{"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"},
|
||||
"X-Same-Domain": []string{"1"},
|
||||
}
|
||||
HeadersRotateCookies = http.Header{
|
||||
"Content-Type": []string{"application/json"},
|
||||
}
|
||||
HeadersUpload = http.Header{
|
||||
"Push-ID": []string{"feeds/mcudyrk2a4khkz"},
|
||||
}
|
||||
)
|
||||
|
||||
// Model metadata -------------------------------------------------------------
|
||||
type Model struct {
|
||||
Name string
|
||||
ModelHeader http.Header
|
||||
AdvancedOnly bool
|
||||
}
|
||||
|
||||
var (
|
||||
ModelUnspecified = Model{
|
||||
Name: "unspecified",
|
||||
ModelHeader: http.Header{},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG25Flash = Model{
|
||||
Name: "gemini-2.5-flash",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[1,null,null,null,\"71c2d248d3b102ff\",null,null,0,[4]]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG25Pro = Model{
|
||||
Name: "gemini-2.5-pro",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[1,null,null,null,\"4af6c7f5da75d65d\",null,null,0,[4]]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG20Flash = Model{
|
||||
Name: "gemini-2.0-flash",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[1,null,null,null,\"f299729663a2343f\"]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG20FlashThinking = Model{
|
||||
Name: "gemini-2.0-flash-thinking",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[null,null,null,null,\"7ca48d02d802f20a\"]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
)
|
||||
|
||||
func ModelFromName(name string) (Model, error) {
|
||||
switch name {
|
||||
case ModelUnspecified.Name:
|
||||
return ModelUnspecified, nil
|
||||
case ModelG25Flash.Name:
|
||||
return ModelG25Flash, nil
|
||||
case ModelG25Pro.Name:
|
||||
return ModelG25Pro, nil
|
||||
case ModelG20Flash.Name:
|
||||
return ModelG20Flash, nil
|
||||
case ModelG20FlashThinking.Name:
|
||||
return ModelG20FlashThinking, nil
|
||||
default:
|
||||
return Model{}, &ValueError{Msg: "Unknown model name: " + name}
|
||||
}
|
||||
}
|
||||
|
||||
// Known error codes returned from the server.
|
||||
const (
|
||||
ErrorUsageLimitExceeded = 1037
|
||||
ErrorModelInconsistent = 1050
|
||||
ErrorModelHeaderInvalid = 1052
|
||||
ErrorIPTemporarilyBlocked = 1060
|
||||
)
|
||||
|
||||
func EnsureGeminiWebAliasMap() { conversation.EnsureGeminiWebAliasMap() }
|
||||
|
||||
func GetGeminiWebAliasedModels() []*registry.ModelInfo {
|
||||
return conversation.GetGeminiWebAliasedModels()
|
||||
}
|
||||
|
||||
func MapAliasToUnderlying(name string) string { return conversation.MapAliasToUnderlying(name) }
|
||||
|
||||
func AliasFromModelID(modelID string) string { return conversation.AliasFromModelID(modelID) }
|
||||
|
||||
// Conversation domain structures -------------------------------------------
|
||||
type RoleText = conversation.Message
|
||||
|
||||
type StoredMessage = conversation.StoredMessage
|
||||
|
||||
type ConversationRecord struct {
|
||||
Model string `json:"model"`
|
||||
ClientID string `json:"client_id"`
|
||||
Metadata []string `json:"metadata,omitempty"`
|
||||
Messages []StoredMessage `json:"messages"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
type Candidate struct {
|
||||
RCID string
|
||||
Text string
|
||||
Thoughts *string
|
||||
WebImages []WebImage
|
||||
GeneratedImages []GeneratedImage
|
||||
}
|
||||
|
||||
func (c Candidate) String() string {
|
||||
t := c.Text
|
||||
if len(t) > 20 {
|
||||
t = t[:20] + "..."
|
||||
}
|
||||
return fmt.Sprintf("Candidate(rcid='%s', text='%s', images=%d)", c.RCID, t, len(c.WebImages)+len(c.GeneratedImages))
|
||||
}
|
||||
|
||||
func (c Candidate) Images() []Image {
|
||||
images := make([]Image, 0, len(c.WebImages)+len(c.GeneratedImages))
|
||||
for _, wi := range c.WebImages {
|
||||
images = append(images, wi.Image)
|
||||
}
|
||||
for _, gi := range c.GeneratedImages {
|
||||
images = append(images, gi.Image)
|
||||
}
|
||||
return images
|
||||
}
|
||||
|
||||
type ModelOutput struct {
|
||||
Metadata []string
|
||||
Candidates []Candidate
|
||||
Chosen int
|
||||
}
|
||||
|
||||
func (m ModelOutput) String() string { return m.Text() }
|
||||
|
||||
func (m ModelOutput) Text() string {
|
||||
if len(m.Candidates) == 0 {
|
||||
return ""
|
||||
}
|
||||
return m.Candidates[m.Chosen].Text
|
||||
}
|
||||
|
||||
func (m ModelOutput) Thoughts() *string {
|
||||
if len(m.Candidates) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.Candidates[m.Chosen].Thoughts
|
||||
}
|
||||
|
||||
func (m ModelOutput) Images() []Image {
|
||||
if len(m.Candidates) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.Candidates[m.Chosen].Images()
|
||||
}
|
||||
|
||||
func (m ModelOutput) RCID() string {
|
||||
if len(m.Candidates) == 0 {
|
||||
return ""
|
||||
}
|
||||
return m.Candidates[m.Chosen].RCID
|
||||
}
|
||||
|
||||
type Gem struct {
|
||||
ID string
|
||||
Name string
|
||||
Description *string
|
||||
Prompt *string
|
||||
Predefined bool
|
||||
}
|
||||
|
||||
func (g Gem) String() string {
|
||||
return fmt.Sprintf("Gem(id='%s', name='%s', description='%v', prompt='%v', predefined=%v)", g.ID, g.Name, g.Description, g.Prompt, g.Predefined)
|
||||
}
|
||||
|
||||
func decodeHTML(s string) string { return html.UnescapeString(s) }
|
||||
|
||||
// Error hierarchy -----------------------------------------------------------
|
||||
type AuthError struct{ Msg string }
|
||||
|
||||
func (e *AuthError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "authentication error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
|
||||
type APIError struct{ Msg string }
|
||||
|
||||
func (e *APIError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "api error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
|
||||
type ImageGenerationError struct{ APIError }
|
||||
|
||||
type GeminiError struct{ Msg string }
|
||||
|
||||
func (e *GeminiError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "gemini error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
|
||||
type TimeoutError struct{ GeminiError }
|
||||
|
||||
type UsageLimitExceeded struct{ GeminiError }
|
||||
|
||||
type ModelInvalid struct{ GeminiError }
|
||||
|
||||
type TemporarilyBlocked struct{ GeminiError }
|
||||
|
||||
type ValueError struct{ Msg string }
|
||||
|
||||
func (e *ValueError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "value error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
@@ -1,220 +0,0 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
conversation "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web/conversation"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
var (
|
||||
reXMLAnyTag = regexp.MustCompile(`(?s)<\s*[^>]+>`)
|
||||
)
|
||||
|
||||
// NormalizeRole converts a role to a standard format (lowercase, 'model' -> 'assistant').
|
||||
func NormalizeRole(role string) string {
|
||||
r := strings.ToLower(role)
|
||||
if r == "model" {
|
||||
return "assistant"
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// NeedRoleTags checks if a list of messages requires role tags.
|
||||
func NeedRoleTags(msgs []RoleText) bool {
|
||||
for _, m := range msgs {
|
||||
if strings.ToLower(m.Role) != "user" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// AddRoleTag wraps content with a role tag.
|
||||
func AddRoleTag(role, content string, unclose bool) string {
|
||||
if role == "" {
|
||||
role = "user"
|
||||
}
|
||||
if unclose {
|
||||
return "<|im_start|>" + role + "\n" + content
|
||||
}
|
||||
return "<|im_start|>" + role + "\n" + content + "\n<|im_end|>"
|
||||
}
|
||||
|
||||
// BuildPrompt constructs the final prompt from a list of messages.
|
||||
func BuildPrompt(msgs []RoleText, tagged bool, appendAssistant bool) string {
|
||||
if len(msgs) == 0 {
|
||||
if tagged && appendAssistant {
|
||||
return AddRoleTag("assistant", "", true)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
if !tagged {
|
||||
var sb strings.Builder
|
||||
for i, m := range msgs {
|
||||
if i > 0 {
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
sb.WriteString(m.Text)
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
var sb strings.Builder
|
||||
for _, m := range msgs {
|
||||
sb.WriteString(AddRoleTag(m.Role, m.Text, false))
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
if appendAssistant {
|
||||
sb.WriteString(AddRoleTag("assistant", "", true))
|
||||
}
|
||||
return strings.TrimSpace(sb.String())
|
||||
}
|
||||
|
||||
// RemoveThinkTags strips <think>...</think> blocks from a string.
|
||||
func RemoveThinkTags(s string) string {
|
||||
return conversation.RemoveThinkTags(s)
|
||||
}
|
||||
|
||||
// SanitizeAssistantMessages removes think tags from assistant messages.
|
||||
func SanitizeAssistantMessages(msgs []RoleText) []RoleText {
|
||||
cleaned := conversation.SanitizeAssistantMessages(msgs)
|
||||
return cleaned
|
||||
}
|
||||
|
||||
// AppendXMLWrapHintIfNeeded appends an XML wrap hint to messages containing XML-like blocks.
|
||||
func AppendXMLWrapHintIfNeeded(msgs []RoleText, disable bool) []RoleText {
|
||||
if disable {
|
||||
return msgs
|
||||
}
|
||||
const xmlWrapHint = "\nFor any xml block, e.g. tool call, always wrap it with: \n`````xml\n...\n`````\n"
|
||||
out := make([]RoleText, 0, len(msgs))
|
||||
for _, m := range msgs {
|
||||
t := m.Text
|
||||
if reXMLAnyTag.MatchString(t) {
|
||||
t = t + xmlWrapHint
|
||||
}
|
||||
out = append(out, RoleText{Role: m.Role, Text: t})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// EstimateTotalTokensFromRawJSON estimates token count by summing text parts.
|
||||
func EstimateTotalTokensFromRawJSON(rawJSON []byte) int {
|
||||
totalChars := 0
|
||||
contents := gjson.GetBytes(rawJSON, "contents")
|
||||
if contents.Exists() {
|
||||
contents.ForEach(func(_, content gjson.Result) bool {
|
||||
content.Get("parts").ForEach(func(_, part gjson.Result) bool {
|
||||
if t := part.Get("text"); t.Exists() {
|
||||
totalChars += utf8.RuneCountInString(t.String())
|
||||
}
|
||||
return true
|
||||
})
|
||||
return true
|
||||
})
|
||||
}
|
||||
if totalChars <= 0 {
|
||||
return 0
|
||||
}
|
||||
return int(math.Ceil(float64(totalChars) / 4.0))
|
||||
}
|
||||
|
||||
// Request chunking helpers ------------------------------------------------
|
||||
|
||||
const continuationHint = "\n(More messages to come, please reply with just 'ok.')"
|
||||
|
||||
func ChunkByRunes(s string, size int) []string {
|
||||
if size <= 0 {
|
||||
return []string{s}
|
||||
}
|
||||
chunks := make([]string, 0, (len(s)/size)+1)
|
||||
var buf strings.Builder
|
||||
count := 0
|
||||
for _, r := range s {
|
||||
buf.WriteRune(r)
|
||||
count++
|
||||
if count >= size {
|
||||
chunks = append(chunks, buf.String())
|
||||
buf.Reset()
|
||||
count = 0
|
||||
}
|
||||
}
|
||||
if buf.Len() > 0 {
|
||||
chunks = append(chunks, buf.String())
|
||||
}
|
||||
if len(chunks) == 0 {
|
||||
return []string{""}
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
|
||||
func MaxCharsPerRequest(cfg *config.Config) int {
|
||||
// Read max characters per request from config with a conservative default.
|
||||
if cfg != nil {
|
||||
if v := cfg.GeminiWeb.MaxCharsPerRequest; v > 0 {
|
||||
return v
|
||||
}
|
||||
}
|
||||
return 1_000_000
|
||||
}
|
||||
|
||||
func SendWithSplit(chat *ChatSession, text string, files []string, cfg *config.Config) (ModelOutput, error) {
|
||||
// Validate chat session
|
||||
if chat == nil {
|
||||
return ModelOutput{}, fmt.Errorf("nil chat session")
|
||||
}
|
||||
|
||||
// Resolve maxChars characters per request
|
||||
maxChars := MaxCharsPerRequest(cfg)
|
||||
if maxChars <= 0 {
|
||||
maxChars = 1_000_000
|
||||
}
|
||||
|
||||
// If within limit, send directly
|
||||
if utf8.RuneCountInString(text) <= maxChars {
|
||||
return chat.SendMessage(text, files)
|
||||
}
|
||||
|
||||
// Decide whether to use continuation hint (enabled by default)
|
||||
useHint := true
|
||||
if cfg != nil && cfg.GeminiWeb.DisableContinuationHint {
|
||||
useHint = false
|
||||
}
|
||||
|
||||
// Compute chunk size in runes. If the hint does not fit, disable it for this request.
|
||||
hintLen := 0
|
||||
if useHint {
|
||||
hintLen = utf8.RuneCountInString(continuationHint)
|
||||
}
|
||||
chunkSize := maxChars - hintLen
|
||||
if chunkSize <= 0 {
|
||||
// maxChars is too small to accommodate the hint; fall back to no-hint splitting
|
||||
useHint = false
|
||||
chunkSize = maxChars
|
||||
}
|
||||
|
||||
// Split into rune-safe chunks
|
||||
chunks := ChunkByRunes(text, chunkSize)
|
||||
if len(chunks) == 0 {
|
||||
chunks = []string{""}
|
||||
}
|
||||
|
||||
// Send all but the last chunk without files, optionally appending hint
|
||||
for i := 0; i < len(chunks)-1; i++ {
|
||||
part := chunks[i]
|
||||
if useHint {
|
||||
part += continuationHint
|
||||
}
|
||||
if _, err := chat.SendMessage(part, nil); err != nil {
|
||||
return ModelOutput{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// Send final chunk with files and return the actual output
|
||||
return chat.SendMessage(chunks[len(chunks)-1], files)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -104,6 +104,34 @@ func GetGeminiModels() []*ModelInfo {
|
||||
OutputTokenLimit: 65536,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
},
|
||||
{
|
||||
ID: "gemini-2.5-flash-image-preview",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "google",
|
||||
Type: "gemini",
|
||||
Name: "models/gemini-2.5-flash-image-preview",
|
||||
Version: "2.5",
|
||||
DisplayName: "Gemini 2.5 Flash Image Preview",
|
||||
Description: "State-of-the-art image generation and editing model.",
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 8192,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
},
|
||||
{
|
||||
ID: "gemini-2.5-flash-image",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "google",
|
||||
Type: "gemini",
|
||||
Name: "models/gemini-2.5-flash-image",
|
||||
Version: "2.5",
|
||||
DisplayName: "Gemini 2.5 Flash Image",
|
||||
Description: "State-of-the-art image generation and editing model.",
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 8192,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -166,6 +194,20 @@ func GetGeminiCLIModels() []*ModelInfo {
|
||||
OutputTokenLimit: 8192,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
},
|
||||
{
|
||||
ID: "gemini-2.5-flash-image",
|
||||
Object: "model",
|
||||
Created: time.Now().Unix(),
|
||||
OwnedBy: "google",
|
||||
Type: "gemini",
|
||||
Name: "models/gemini-2.5-flash-image",
|
||||
Version: "2.5",
|
||||
DisplayName: "Gemini 2.5 Flash Image",
|
||||
Description: "State-of-the-art image generation and editing model.",
|
||||
InputTokenLimit: 1048576,
|
||||
OutputTokenLimit: 8192,
|
||||
SupportedGenerationMethods: []string{"generateContent", "countTokens", "createCachedContent", "batchGenerateContent"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -354,6 +396,7 @@ func GetIFlowModels() []*ModelInfo {
|
||||
{ID: "qwen3-max-preview", DisplayName: "Qwen3-Max-Preview", Description: "Qwen3 Max preview build"},
|
||||
{ID: "kimi-k2-0905", DisplayName: "Kimi-K2-Instruct-0905", Description: "Moonshot Kimi K2 instruct 0905"},
|
||||
{ID: "glm-4.5", DisplayName: "GLM-4.5", Description: "Zhipu GLM 4.5 general model"},
|
||||
{ID: "glm-4.6", DisplayName: "GLM-4.6", Description: "Zhipu GLM 4.6 general model"},
|
||||
{ID: "kimi-k2", DisplayName: "Kimi-K2", Description: "Moonshot Kimi K2 general model"},
|
||||
{ID: "deepseek-v3.2", DisplayName: "DeepSeek-V3.2-Exp", Description: "DeepSeek V3.2 experimental"},
|
||||
{ID: "deepseek-v3.1", DisplayName: "DeepSeek-V3.1-Terminus", Description: "DeepSeek V3.1 Terminus"},
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
|
||||
sdktranslator "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
|
||||
@@ -51,7 +52,7 @@ func (e *GeminiCLIExecutor) Identifier() string { return "gemini-cli" }
|
||||
func (e *GeminiCLIExecutor) PrepareRequest(_ *http.Request, _ *cliproxyauth.Auth) error { return nil }
|
||||
|
||||
func (e *GeminiCLIExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
|
||||
tokenSource, baseTokenData, err := prepareGeminiCLITokenSource(ctx, auth)
|
||||
tokenSource, baseTokenData, err := prepareGeminiCLITokenSource(ctx, e.cfg, auth)
|
||||
if err != nil {
|
||||
return cliproxyexecutor.Response{}, err
|
||||
}
|
||||
@@ -60,6 +61,7 @@ func (e *GeminiCLIExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth
|
||||
from := opts.SourceFormat
|
||||
to := sdktranslator.FromString("gemini-cli")
|
||||
basePayload := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
|
||||
basePayload = fixGeminiCLIImageAspectRatio(req.Model, basePayload)
|
||||
|
||||
action := "generateContent"
|
||||
if req.Metadata != nil {
|
||||
@@ -139,7 +141,7 @@ func (e *GeminiCLIExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth
|
||||
}
|
||||
|
||||
func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (<-chan cliproxyexecutor.StreamChunk, error) {
|
||||
tokenSource, baseTokenData, err := prepareGeminiCLITokenSource(ctx, auth)
|
||||
tokenSource, baseTokenData, err := prepareGeminiCLITokenSource(ctx, e.cfg, auth)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -148,6 +150,7 @@ func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyaut
|
||||
from := opts.SourceFormat
|
||||
to := sdktranslator.FromString("gemini-cli")
|
||||
basePayload := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
|
||||
basePayload = fixGeminiCLIImageAspectRatio(req.Model, basePayload)
|
||||
|
||||
projectID := strings.TrimSpace(stringValue(auth.Metadata, "project_id"))
|
||||
|
||||
@@ -270,7 +273,7 @@ func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyaut
|
||||
}
|
||||
|
||||
func (e *GeminiCLIExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
|
||||
tokenSource, baseTokenData, err := prepareGeminiCLITokenSource(ctx, auth)
|
||||
tokenSource, baseTokenData, err := prepareGeminiCLITokenSource(ctx, e.cfg, auth)
|
||||
if err != nil {
|
||||
return cliproxyexecutor.Response{}, err
|
||||
}
|
||||
@@ -294,6 +297,7 @@ func (e *GeminiCLIExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.
|
||||
payload = deleteJSONField(payload, "project")
|
||||
payload = deleteJSONField(payload, "model")
|
||||
payload = disableGeminiThinkingConfig(payload, attemptModel)
|
||||
payload = fixGeminiCLIImageAspectRatio(attemptModel, payload)
|
||||
|
||||
tok, errTok := tokenSource.Token()
|
||||
if errTok != nil {
|
||||
@@ -351,7 +355,7 @@ func (e *GeminiCLIExecutor) Refresh(ctx context.Context, auth *cliproxyauth.Auth
|
||||
return auth, nil
|
||||
}
|
||||
|
||||
func prepareGeminiCLITokenSource(ctx context.Context, auth *cliproxyauth.Auth) (oauth2.TokenSource, map[string]any, error) {
|
||||
func prepareGeminiCLITokenSource(ctx context.Context, cfg *config.Config, auth *cliproxyauth.Auth) (oauth2.TokenSource, map[string]any, error) {
|
||||
if auth == nil || auth.Metadata == nil {
|
||||
return nil, nil, fmt.Errorf("gemini-cli auth metadata missing")
|
||||
}
|
||||
@@ -395,8 +399,8 @@ func prepareGeminiCLITokenSource(ctx context.Context, auth *cliproxyauth.Auth) (
|
||||
}
|
||||
|
||||
ctxToken := ctx
|
||||
if rt, ok := ctx.Value("cliproxy.roundtripper").(http.RoundTripper); ok && rt != nil {
|
||||
ctxToken = context.WithValue(ctxToken, oauth2.HTTPClient, &http.Client{Transport: rt})
|
||||
if httpClient := newProxyAwareHTTPClient(ctx, cfg, auth, 0); httpClient != nil {
|
||||
ctxToken = context.WithValue(ctxToken, oauth2.HTTPClient, httpClient)
|
||||
}
|
||||
|
||||
src := conf.TokenSource(ctxToken, &token)
|
||||
@@ -518,7 +522,7 @@ func geminiModelDisallowsThinking(model string) bool {
|
||||
return false
|
||||
}
|
||||
lower := strings.ToLower(model)
|
||||
for _, marker := range []string{"gemini-2.5-flash-image-preview"} {
|
||||
for _, marker := range []string{"gemini-2.5-flash-image-preview", "gemini-2.5-flash-image"} {
|
||||
if strings.Contains(lower, marker) {
|
||||
return true
|
||||
}
|
||||
@@ -549,3 +553,45 @@ func deleteJSONField(body []byte, key string) []byte {
|
||||
}
|
||||
return updated
|
||||
}
|
||||
|
||||
func fixGeminiCLIImageAspectRatio(modelName string, rawJSON []byte) []byte {
|
||||
if modelName == "gemini-2.5-flash-image-preview" {
|
||||
aspectRatioResult := gjson.GetBytes(rawJSON, "request.generationConfig.imageConfig.aspectRatio")
|
||||
if aspectRatioResult.Exists() {
|
||||
contents := gjson.GetBytes(rawJSON, "request.contents")
|
||||
contentArray := contents.Array()
|
||||
if len(contentArray) > 0 {
|
||||
hasInlineData := false
|
||||
loopContent:
|
||||
for i := 0; i < len(contentArray); i++ {
|
||||
parts := contentArray[i].Get("parts").Array()
|
||||
for j := 0; j < len(parts); j++ {
|
||||
if parts[j].Get("inlineData").Exists() {
|
||||
hasInlineData = true
|
||||
break loopContent
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !hasInlineData {
|
||||
emptyImageBase64ed, _ := util.CreateWhiteImageBase64(aspectRatioResult.String())
|
||||
emptyImagePart := `{"inlineData":{"mime_type":"image/png","data":""}}`
|
||||
emptyImagePart, _ = sjson.Set(emptyImagePart, "inlineData.data", emptyImageBase64ed)
|
||||
newPartsJson := `[]`
|
||||
newPartsJson, _ = sjson.SetRaw(newPartsJson, "-1", `{"text": "Based on the following requirements, create an image within the uploaded picture. The new content *MUST* completely cover the entire area of the original picture, maintaining its exact proportions, and *NO* blank areas should appear."}`)
|
||||
newPartsJson, _ = sjson.SetRaw(newPartsJson, "-1", emptyImagePart)
|
||||
|
||||
parts := contentArray[0].Get("parts").Array()
|
||||
for j := 0; j < len(parts); j++ {
|
||||
newPartsJson, _ = sjson.SetRaw(newPartsJson, "-1", parts[j].Raw)
|
||||
}
|
||||
|
||||
rawJSON, _ = sjson.SetRawBytes(rawJSON, "request.contents.0.parts", []byte(newPartsJson))
|
||||
rawJSON, _ = sjson.SetRawBytes(rawJSON, "request.generationConfig.responseModalities", []byte(`["Image", "Text"]`))
|
||||
}
|
||||
}
|
||||
rawJSON, _ = sjson.DeleteBytes(rawJSON, "request.generationConfig.imageConfig")
|
||||
}
|
||||
}
|
||||
return rawJSON
|
||||
}
|
||||
|
||||
@@ -78,6 +78,7 @@ func (e *GeminiExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
|
||||
to := sdktranslator.FromString("gemini")
|
||||
body := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
|
||||
body = disableGeminiThinkingConfig(body, req.Model)
|
||||
body = fixGeminiImageAspectRatio(req.Model, body)
|
||||
|
||||
action := "generateContent"
|
||||
if req.Metadata != nil {
|
||||
@@ -136,6 +137,7 @@ func (e *GeminiExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
|
||||
to := sdktranslator.FromString("gemini")
|
||||
body := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), true)
|
||||
body = disableGeminiThinkingConfig(body, req.Model)
|
||||
body = fixGeminiImageAspectRatio(req.Model, body)
|
||||
|
||||
url := fmt.Sprintf("%s/%s/models/%s:%s", glEndpoint, glAPIVersion, req.Model, "streamGenerateContent")
|
||||
if opts.Alt == "" {
|
||||
@@ -207,6 +209,7 @@ func (e *GeminiExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Aut
|
||||
to := sdktranslator.FromString("gemini")
|
||||
translatedReq := sdktranslator.TranslateRequest(from, to, req.Model, bytes.Clone(req.Payload), false)
|
||||
translatedReq = disableGeminiThinkingConfig(translatedReq, req.Model)
|
||||
translatedReq = fixGeminiImageAspectRatio(req.Model, translatedReq)
|
||||
respCtx := context.WithValue(ctx, "alt", opts.Alt)
|
||||
translatedReq, _ = sjson.DeleteBytes(translatedReq, "tools")
|
||||
translatedReq, _ = sjson.DeleteBytes(translatedReq, "generationConfig")
|
||||
@@ -374,3 +377,45 @@ func geminiCreds(a *cliproxyauth.Auth) (apiKey, bearer string) {
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func fixGeminiImageAspectRatio(modelName string, rawJSON []byte) []byte {
|
||||
if modelName == "gemini-2.5-flash-image-preview" {
|
||||
aspectRatioResult := gjson.GetBytes(rawJSON, "generationConfig.imageConfig.aspectRatio")
|
||||
if aspectRatioResult.Exists() {
|
||||
contents := gjson.GetBytes(rawJSON, "contents")
|
||||
contentArray := contents.Array()
|
||||
if len(contentArray) > 0 {
|
||||
hasInlineData := false
|
||||
loopContent:
|
||||
for i := 0; i < len(contentArray); i++ {
|
||||
parts := contentArray[i].Get("parts").Array()
|
||||
for j := 0; j < len(parts); j++ {
|
||||
if parts[j].Get("inlineData").Exists() {
|
||||
hasInlineData = true
|
||||
break loopContent
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !hasInlineData {
|
||||
emptyImageBase64ed, _ := util.CreateWhiteImageBase64(aspectRatioResult.String())
|
||||
emptyImagePart := `{"inlineData":{"mime_type":"image/png","data":""}}`
|
||||
emptyImagePart, _ = sjson.Set(emptyImagePart, "inlineData.data", emptyImageBase64ed)
|
||||
newPartsJson := `[]`
|
||||
newPartsJson, _ = sjson.SetRaw(newPartsJson, "-1", `{"text": "Based on the following requirements, create an image within the uploaded picture. The new content *MUST* completely cover the entire area of the original picture, maintaining its exact proportions, and *NO* blank areas should appear."}`)
|
||||
newPartsJson, _ = sjson.SetRaw(newPartsJson, "-1", emptyImagePart)
|
||||
|
||||
parts := contentArray[0].Get("parts").Array()
|
||||
for j := 0; j < len(parts); j++ {
|
||||
newPartsJson, _ = sjson.SetRaw(newPartsJson, "-1", parts[j].Raw)
|
||||
}
|
||||
|
||||
rawJSON, _ = sjson.SetRawBytes(rawJSON, "contents.0.parts", []byte(newPartsJson))
|
||||
rawJSON, _ = sjson.SetRawBytes(rawJSON, "generationConfig.responseModalities", []byte(`["Image", "Text"]`))
|
||||
}
|
||||
}
|
||||
rawJSON, _ = sjson.DeleteBytes(rawJSON, "generationConfig.imageConfig")
|
||||
}
|
||||
}
|
||||
return rawJSON
|
||||
}
|
||||
|
||||
@@ -1,280 +0,0 @@
|
||||
package executor
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/gemini"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
geminiwebapi "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web"
|
||||
conversation "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web/conversation"
|
||||
cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
|
||||
sdktranslator "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type GeminiWebExecutor struct {
|
||||
cfg *config.Config
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
func NewGeminiWebExecutor(cfg *config.Config) *GeminiWebExecutor {
|
||||
return &GeminiWebExecutor{cfg: cfg}
|
||||
}
|
||||
|
||||
func (e *GeminiWebExecutor) Identifier() string { return "gemini-web" }
|
||||
|
||||
func (e *GeminiWebExecutor) PrepareRequest(_ *http.Request, _ *cliproxyauth.Auth) error { return nil }
|
||||
|
||||
func (e *GeminiWebExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
|
||||
state, err := e.stateFor(auth)
|
||||
if err != nil {
|
||||
return cliproxyexecutor.Response{}, err
|
||||
}
|
||||
if err = state.EnsureClient(); err != nil {
|
||||
return cliproxyexecutor.Response{}, err
|
||||
}
|
||||
match := extractGeminiWebMatch(opts.Metadata)
|
||||
reporter := newUsageReporter(ctx, e.Identifier(), req.Model, auth)
|
||||
|
||||
mutex := state.GetRequestMutex()
|
||||
if mutex != nil {
|
||||
mutex.Lock()
|
||||
defer mutex.Unlock()
|
||||
if match != nil {
|
||||
state.SetPendingMatch(match)
|
||||
}
|
||||
} else if match != nil {
|
||||
state.SetPendingMatch(match)
|
||||
}
|
||||
|
||||
payload := bytes.Clone(req.Payload)
|
||||
resp, errMsg, prep := state.Send(ctx, req.Model, payload, opts)
|
||||
if errMsg != nil {
|
||||
return cliproxyexecutor.Response{}, geminiWebErrorFromMessage(errMsg)
|
||||
}
|
||||
resp = state.ConvertToTarget(ctx, req.Model, prep, resp)
|
||||
reporter.publish(ctx, parseGeminiUsage(resp))
|
||||
|
||||
from := opts.SourceFormat
|
||||
to := sdktranslator.FromString("gemini-web")
|
||||
var param any
|
||||
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), payload, bytes.Clone(resp), ¶m)
|
||||
|
||||
return cliproxyexecutor.Response{Payload: []byte(out)}, nil
|
||||
}
|
||||
|
||||
func (e *GeminiWebExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (<-chan cliproxyexecutor.StreamChunk, error) {
|
||||
state, err := e.stateFor(auth)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = state.EnsureClient(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
match := extractGeminiWebMatch(opts.Metadata)
|
||||
reporter := newUsageReporter(ctx, e.Identifier(), req.Model, auth)
|
||||
|
||||
mutex := state.GetRequestMutex()
|
||||
if mutex != nil {
|
||||
mutex.Lock()
|
||||
if match != nil {
|
||||
state.SetPendingMatch(match)
|
||||
}
|
||||
}
|
||||
if mutex == nil && match != nil {
|
||||
state.SetPendingMatch(match)
|
||||
}
|
||||
|
||||
gemBytes, errMsg, prep := state.Send(ctx, req.Model, bytes.Clone(req.Payload), opts)
|
||||
if errMsg != nil {
|
||||
if mutex != nil {
|
||||
mutex.Unlock()
|
||||
}
|
||||
return nil, geminiWebErrorFromMessage(errMsg)
|
||||
}
|
||||
reporter.publish(ctx, parseGeminiUsage(gemBytes))
|
||||
|
||||
from := opts.SourceFormat
|
||||
to := sdktranslator.FromString("gemini-web")
|
||||
var param any
|
||||
|
||||
lines := state.ConvertStream(ctx, req.Model, prep, gemBytes)
|
||||
done := state.DoneStream(ctx, req.Model, prep)
|
||||
out := make(chan cliproxyexecutor.StreamChunk)
|
||||
go func() {
|
||||
defer close(out)
|
||||
if mutex != nil {
|
||||
defer mutex.Unlock()
|
||||
}
|
||||
for _, line := range lines {
|
||||
lines = sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), req.Payload, bytes.Clone([]byte(line)), ¶m)
|
||||
for _, l := range lines {
|
||||
out <- cliproxyexecutor.StreamChunk{Payload: []byte(l)}
|
||||
}
|
||||
}
|
||||
for _, line := range done {
|
||||
lines = sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), req.Payload, bytes.Clone([]byte(line)), ¶m)
|
||||
for _, l := range lines {
|
||||
out <- cliproxyexecutor.StreamChunk{Payload: []byte(l)}
|
||||
}
|
||||
}
|
||||
}()
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (e *GeminiWebExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
|
||||
return cliproxyexecutor.Response{Payload: []byte{}}, fmt.Errorf("not implemented")
|
||||
}
|
||||
|
||||
func (e *GeminiWebExecutor) Refresh(ctx context.Context, auth *cliproxyauth.Auth) (*cliproxyauth.Auth, error) {
|
||||
log.Debugf("gemini web executor: refresh called")
|
||||
state, err := e.stateFor(auth)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = state.Refresh(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ts := state.TokenSnapshot()
|
||||
if auth.Metadata == nil {
|
||||
auth.Metadata = make(map[string]any)
|
||||
}
|
||||
auth.Metadata["secure_1psid"] = ts.Secure1PSID
|
||||
auth.Metadata["secure_1psidts"] = ts.Secure1PSIDTS
|
||||
auth.Metadata["type"] = "gemini-web"
|
||||
auth.Metadata["last_refresh"] = time.Now().Format(time.RFC3339)
|
||||
if v, ok := auth.Metadata["label"].(string); !ok || strings.TrimSpace(v) == "" {
|
||||
if lbl := state.Label(); strings.TrimSpace(lbl) != "" {
|
||||
auth.Metadata["label"] = strings.TrimSpace(lbl)
|
||||
}
|
||||
}
|
||||
return auth, nil
|
||||
}
|
||||
|
||||
type geminiWebRuntime struct {
|
||||
state *geminiwebapi.GeminiWebState
|
||||
}
|
||||
|
||||
func (e *GeminiWebExecutor) stateFor(auth *cliproxyauth.Auth) (*geminiwebapi.GeminiWebState, error) {
|
||||
if auth == nil {
|
||||
return nil, fmt.Errorf("gemini-web executor: auth is nil")
|
||||
}
|
||||
if runtime, ok := auth.Runtime.(*geminiWebRuntime); ok && runtime != nil && runtime.state != nil {
|
||||
// Hot-reload: ensure cached state sees the latest config
|
||||
runtime.state.SetConfig(e.cfg)
|
||||
return runtime.state, nil
|
||||
}
|
||||
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if runtime, ok := auth.Runtime.(*geminiWebRuntime); ok && runtime != nil && runtime.state != nil {
|
||||
// Hot-reload: ensure cached state sees the latest config
|
||||
runtime.state.SetConfig(e.cfg)
|
||||
return runtime.state, nil
|
||||
}
|
||||
|
||||
ts, err := parseGeminiWebToken(auth)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg := e.cfg
|
||||
if auth.ProxyURL != "" && cfg != nil {
|
||||
copyCfg := *cfg
|
||||
copyCfg.ProxyURL = auth.ProxyURL
|
||||
cfg = ©Cfg
|
||||
}
|
||||
|
||||
storagePath := ""
|
||||
if auth.Attributes != nil {
|
||||
if p, ok := auth.Attributes["path"]; ok {
|
||||
storagePath = p
|
||||
}
|
||||
}
|
||||
state := geminiwebapi.NewGeminiWebState(cfg, ts, storagePath, auth.Label)
|
||||
runtime := &geminiWebRuntime{state: state}
|
||||
auth.Runtime = runtime
|
||||
return state, nil
|
||||
}
|
||||
|
||||
func parseGeminiWebToken(auth *cliproxyauth.Auth) (*gemini.GeminiWebTokenStorage, error) {
|
||||
if auth == nil {
|
||||
return nil, fmt.Errorf("gemini-web executor: auth is nil")
|
||||
}
|
||||
if auth.Metadata == nil {
|
||||
return nil, fmt.Errorf("gemini-web executor: missing metadata")
|
||||
}
|
||||
psid := stringFromMetadata(auth.Metadata, "secure_1psid", "secure_1psid", "__Secure-1PSID")
|
||||
psidts := stringFromMetadata(auth.Metadata, "secure_1psidts", "secure_1psidts", "__Secure-1PSIDTS")
|
||||
if psid == "" || psidts == "" {
|
||||
return nil, fmt.Errorf("gemini-web executor: incomplete cookie metadata")
|
||||
}
|
||||
label := strings.TrimSpace(stringFromMetadata(auth.Metadata, "label"))
|
||||
return &gemini.GeminiWebTokenStorage{Secure1PSID: psid, Secure1PSIDTS: psidts, Label: label}, nil
|
||||
}
|
||||
|
||||
func stringFromMetadata(meta map[string]any, keys ...string) string {
|
||||
for _, key := range keys {
|
||||
if val, ok := meta[key]; ok {
|
||||
if s, okStr := val.(string); okStr && s != "" {
|
||||
return s
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func geminiWebErrorFromMessage(msg *interfaces.ErrorMessage) error {
|
||||
if msg == nil {
|
||||
return nil
|
||||
}
|
||||
return geminiWebError{message: msg}
|
||||
}
|
||||
|
||||
type geminiWebError struct {
|
||||
message *interfaces.ErrorMessage
|
||||
}
|
||||
|
||||
func (e geminiWebError) Error() string {
|
||||
if e.message == nil {
|
||||
return "gemini-web error"
|
||||
}
|
||||
if e.message.Error != nil {
|
||||
return e.message.Error.Error()
|
||||
}
|
||||
return fmt.Sprintf("gemini-web error: status %d", e.message.StatusCode)
|
||||
}
|
||||
|
||||
func (e geminiWebError) StatusCode() int {
|
||||
if e.message == nil {
|
||||
return 0
|
||||
}
|
||||
return e.message.StatusCode
|
||||
}
|
||||
|
||||
func extractGeminiWebMatch(metadata map[string]any) *conversation.MatchResult {
|
||||
if metadata == nil {
|
||||
return nil
|
||||
}
|
||||
value, ok := metadata[conversation.MetadataMatchKey]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
switch v := value.(type) {
|
||||
case *conversation.MatchResult:
|
||||
return v
|
||||
case conversation.MatchResult:
|
||||
return &v
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,21 @@ import (
|
||||
// - []byte: The transformed request data in Gemini CLI API format
|
||||
func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bool) []byte {
|
||||
rawJSON := bytes.Clone(inputRawJSON)
|
||||
var pathsToDelete []string
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
util.Walk(root, "", "additionalProperties", &pathsToDelete)
|
||||
util.Walk(root, "", "$schema", &pathsToDelete)
|
||||
util.Walk(root, "", "ref", &pathsToDelete)
|
||||
util.Walk(root, "", "strict", &pathsToDelete)
|
||||
|
||||
var err error
|
||||
for _, p := range pathsToDelete {
|
||||
rawJSON, err = sjson.DeleteBytes(rawJSON, p)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Base envelope
|
||||
out := []byte(`{"project":"","request":{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}},"model":"gemini-2.5-pro"}`)
|
||||
|
||||
@@ -65,6 +80,31 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
|
||||
out, _ = sjson.SetBytes(out, "request.generationConfig.topK", tkr.Num)
|
||||
}
|
||||
|
||||
// Map OpenAI modalities -> Gemini CLI request.generationConfig.responseModalities
|
||||
// e.g. "modalities": ["image", "text"] -> ["Image", "Text"]
|
||||
if mods := gjson.GetBytes(rawJSON, "modalities"); mods.Exists() && mods.IsArray() {
|
||||
var responseMods []string
|
||||
for _, m := range mods.Array() {
|
||||
switch strings.ToLower(m.String()) {
|
||||
case "text":
|
||||
responseMods = append(responseMods, "Text")
|
||||
case "image":
|
||||
responseMods = append(responseMods, "Image")
|
||||
}
|
||||
}
|
||||
if len(responseMods) > 0 {
|
||||
out, _ = sjson.SetBytes(out, "request.generationConfig.responseModalities", responseMods)
|
||||
}
|
||||
}
|
||||
|
||||
// OpenRouter-style image_config support
|
||||
// If the input uses top-level image_config.aspect_ratio, map it into request.generationConfig.imageConfig.aspectRatio.
|
||||
if imgCfg := gjson.GetBytes(rawJSON, "image_config"); imgCfg.Exists() && imgCfg.IsObject() {
|
||||
if ar := imgCfg.Get("aspect_ratio"); ar.Exists() && ar.Type == gjson.String {
|
||||
out, _ = sjson.SetBytes(out, "request.generationConfig.imageConfig.aspectRatio", ar.Str)
|
||||
}
|
||||
}
|
||||
|
||||
// messages -> systemInstruction + contents
|
||||
messages := gjson.GetBytes(rawJSON, "messages")
|
||||
if messages.IsArray() {
|
||||
@@ -232,7 +272,7 @@ func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bo
|
||||
}
|
||||
|
||||
var pathsToType []string
|
||||
root := gjson.ParseBytes(out)
|
||||
root = gjson.ParseBytes(out)
|
||||
util.Walk(root, "", "type", &pathsToType)
|
||||
for _, p := range pathsToType {
|
||||
typeResult := gjson.GetBytes(out, p)
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
// convertCliResponseToOpenAIChatParams holds parameters for response conversion.
|
||||
type convertCliResponseToOpenAIChatParams struct {
|
||||
UnixTimestamp int64
|
||||
FunctionIndex int
|
||||
}
|
||||
|
||||
// ConvertCliResponseToOpenAI translates a single chunk of a streaming response from the
|
||||
@@ -40,6 +41,7 @@ func ConvertCliResponseToOpenAI(_ context.Context, _ string, originalRequestRawJ
|
||||
if *param == nil {
|
||||
*param = &convertCliResponseToOpenAIChatParams{
|
||||
UnixTimestamp: 0,
|
||||
FunctionIndex: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,13 +119,18 @@ func ConvertCliResponseToOpenAI(_ context.Context, _ string, originalRequestRawJ
|
||||
} else if functionCallResult.Exists() {
|
||||
// Handle function call content.
|
||||
toolCallsResult := gjson.Get(template, "choices.0.delta.tool_calls")
|
||||
if !toolCallsResult.Exists() || !toolCallsResult.IsArray() {
|
||||
functionCallIndex := (*param).(*convertCliResponseToOpenAIChatParams).FunctionIndex
|
||||
(*param).(*convertCliResponseToOpenAIChatParams).FunctionIndex++
|
||||
if toolCallsResult.Exists() && toolCallsResult.IsArray() {
|
||||
functionCallIndex = len(toolCallsResult.Array())
|
||||
} else {
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.tool_calls", `[]`)
|
||||
}
|
||||
|
||||
functionCallTemplate := `{"id": "","type": "function","function": {"name": "","arguments": ""}}`
|
||||
functionCallTemplate := `{"id": "","index": 0,"type": "function","function": {"name": "","arguments": ""}}`
|
||||
fcName := functionCallResult.Get("name").String()
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "id", fmt.Sprintf("%s-%d", fcName, time.Now().UnixNano()))
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "index", functionCallIndex)
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "function.name", fcName)
|
||||
if fcArgsResult := functionCallResult.Get("args"); fcArgsResult.Exists() {
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "function.arguments", fcArgsResult.Raw)
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
package chat_completions
|
||||
|
||||
import (
|
||||
. "github.com/router-for-me/CLIProxyAPI/v6/internal/constant"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
geminiChat "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/openai/chat-completions"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
translator.Register(
|
||||
OpenAI,
|
||||
GeminiWeb,
|
||||
geminiChat.ConvertOpenAIRequestToGemini,
|
||||
interfaces.TranslateResponse{
|
||||
Stream: geminiChat.ConvertGeminiResponseToOpenAI,
|
||||
NonStream: geminiChat.ConvertGeminiResponseToOpenAINonStream,
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
package responses
|
||||
|
||||
import (
|
||||
. "github.com/router-for-me/CLIProxyAPI/v6/internal/constant"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
geminiResponses "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/openai/responses"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
translator.Register(
|
||||
OpenaiResponse,
|
||||
GeminiWeb,
|
||||
geminiResponses.ConvertOpenAIResponsesRequestToGemini,
|
||||
interfaces.TranslateResponse{
|
||||
Stream: geminiResponses.ConvertGeminiResponseToOpenAIResponses,
|
||||
NonStream: geminiResponses.ConvertGeminiResponseToOpenAIResponsesNonStream,
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -26,6 +26,21 @@ import (
|
||||
// - []byte: The transformed request data in Gemini API format
|
||||
func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool) []byte {
|
||||
rawJSON := bytes.Clone(inputRawJSON)
|
||||
var pathsToDelete []string
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
util.Walk(root, "", "additionalProperties", &pathsToDelete)
|
||||
util.Walk(root, "", "$schema", &pathsToDelete)
|
||||
util.Walk(root, "", "ref", &pathsToDelete)
|
||||
util.Walk(root, "", "strict", &pathsToDelete)
|
||||
|
||||
var err error
|
||||
for _, p := range pathsToDelete {
|
||||
rawJSON, err = sjson.DeleteBytes(rawJSON, p)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Base envelope
|
||||
out := []byte(`{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}}`)
|
||||
|
||||
@@ -65,6 +80,31 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
out, _ = sjson.SetBytes(out, "generationConfig.topK", tkr.Num)
|
||||
}
|
||||
|
||||
// Map OpenAI modalities -> Gemini generationConfig.responseModalities
|
||||
// e.g. "modalities": ["image", "text"] -> ["Image", "Text"]
|
||||
if mods := gjson.GetBytes(rawJSON, "modalities"); mods.Exists() && mods.IsArray() {
|
||||
var responseMods []string
|
||||
for _, m := range mods.Array() {
|
||||
switch strings.ToLower(m.String()) {
|
||||
case "text":
|
||||
responseMods = append(responseMods, "Text")
|
||||
case "image":
|
||||
responseMods = append(responseMods, "Image")
|
||||
}
|
||||
}
|
||||
if len(responseMods) > 0 {
|
||||
out, _ = sjson.SetBytes(out, "generationConfig.responseModalities", responseMods)
|
||||
}
|
||||
}
|
||||
|
||||
// OpenRouter-style image_config support
|
||||
// If the input uses top-level image_config.aspect_ratio, map it into generationConfig.imageConfig.aspectRatio.
|
||||
if imgCfg := gjson.GetBytes(rawJSON, "image_config"); imgCfg.Exists() && imgCfg.IsObject() {
|
||||
if ar := imgCfg.Get("aspect_ratio"); ar.Exists() && ar.Type == gjson.String {
|
||||
out, _ = sjson.SetBytes(out, "generationConfig.imageConfig.aspectRatio", ar.Str)
|
||||
}
|
||||
}
|
||||
|
||||
// messages -> systemInstruction + contents
|
||||
messages := gjson.GetBytes(rawJSON, "messages")
|
||||
if messages.IsArray() {
|
||||
@@ -257,7 +297,7 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
}
|
||||
|
||||
var pathsToType []string
|
||||
root := gjson.ParseBytes(out)
|
||||
root = gjson.ParseBytes(out)
|
||||
util.Walk(root, "", "type", &pathsToType)
|
||||
for _, p := range pathsToType {
|
||||
typeResult := gjson.GetBytes(out, p)
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
// convertGeminiResponseToOpenAIChatParams holds parameters for response conversion.
|
||||
type convertGeminiResponseToOpenAIChatParams struct {
|
||||
UnixTimestamp int64
|
||||
FunctionIndex int
|
||||
}
|
||||
|
||||
// ConvertGeminiResponseToOpenAI translates a single chunk of a streaming response from the
|
||||
@@ -39,6 +40,7 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR
|
||||
if *param == nil {
|
||||
*param = &convertGeminiResponseToOpenAIChatParams{
|
||||
UnixTimestamp: 0,
|
||||
FunctionIndex: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,13 +122,18 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR
|
||||
} else if functionCallResult.Exists() {
|
||||
// Handle function call content.
|
||||
toolCallsResult := gjson.Get(template, "choices.0.delta.tool_calls")
|
||||
if !toolCallsResult.Exists() || !toolCallsResult.IsArray() {
|
||||
functionCallIndex := (*param).(*convertGeminiResponseToOpenAIChatParams).FunctionIndex
|
||||
(*param).(*convertGeminiResponseToOpenAIChatParams).FunctionIndex++
|
||||
if toolCallsResult.Exists() && toolCallsResult.IsArray() {
|
||||
functionCallIndex = len(toolCallsResult.Array())
|
||||
} else {
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.tool_calls", `[]`)
|
||||
}
|
||||
|
||||
functionCallTemplate := `{"id": "","type": "function","function": {"name": "","arguments": ""}}`
|
||||
functionCallTemplate := `{"id": "","index": 0,"type": "function","function": {"name": "","arguments": ""}}`
|
||||
fcName := functionCallResult.Get("name").String()
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "id", fmt.Sprintf("%s-%d", fcName, time.Now().UnixNano()))
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "index", functionCallIndex)
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "function.name", fcName)
|
||||
if fcArgsResult := functionCallResult.Get("args"); fcArgsResult.Exists() {
|
||||
functionCallTemplate, _ = sjson.Set(functionCallTemplate, "function.arguments", fcArgsResult.Raw)
|
||||
|
||||
@@ -23,9 +23,6 @@ import (
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/openai/chat-completions"
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/openai/responses"
|
||||
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini-web/openai/chat-completions"
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini-web/openai/responses"
|
||||
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/openai/claude"
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/openai/gemini"
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/openai/gemini-cli"
|
||||
|
||||
59
internal/util/image.go
Normal file
59
internal/util/image.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"image"
|
||||
"image/draw"
|
||||
"image/png"
|
||||
)
|
||||
|
||||
func CreateWhiteImageBase64(aspectRatio string) (string, error) {
|
||||
width := 1024
|
||||
height := 1024
|
||||
|
||||
switch aspectRatio {
|
||||
case "1:1":
|
||||
width = 1024
|
||||
height = 1024
|
||||
case "2:3":
|
||||
width = 832
|
||||
height = 1248
|
||||
case "3:2":
|
||||
width = 1248
|
||||
height = 832
|
||||
case "3:4":
|
||||
width = 864
|
||||
height = 1184
|
||||
case "4:3":
|
||||
width = 1184
|
||||
height = 864
|
||||
case "4:5":
|
||||
width = 896
|
||||
height = 1152
|
||||
case "5:4":
|
||||
width = 1152
|
||||
height = 896
|
||||
case "9:16":
|
||||
width = 768
|
||||
height = 1344
|
||||
case "16:9":
|
||||
width = 1344
|
||||
height = 768
|
||||
case "21:9":
|
||||
width = 1536
|
||||
height = 672
|
||||
}
|
||||
|
||||
img := image.NewRGBA(image.Rect(0, 0, width, height))
|
||||
draw.Draw(img, img.Bounds(), image.White, image.Point{}, draw.Src)
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := png.Encode(&buf, img); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
base64String := base64.StdEncoding.EncodeToString(buf.Bytes())
|
||||
return base64String, nil
|
||||
}
|
||||
@@ -512,21 +512,6 @@ func (w *Watcher) reloadConfig() bool {
|
||||
if oldConfig.RequestRetry != newConfig.RequestRetry {
|
||||
log.Debugf(" request-retry: %d -> %d", oldConfig.RequestRetry, newConfig.RequestRetry)
|
||||
}
|
||||
if oldConfig.GeminiWeb.Context != newConfig.GeminiWeb.Context {
|
||||
log.Debugf(" gemini-web.context: %t -> %t", oldConfig.GeminiWeb.Context, newConfig.GeminiWeb.Context)
|
||||
}
|
||||
if oldConfig.GeminiWeb.MaxCharsPerRequest != newConfig.GeminiWeb.MaxCharsPerRequest {
|
||||
log.Debugf(" gemini-web.max-chars-per-request: %d -> %d", oldConfig.GeminiWeb.MaxCharsPerRequest, newConfig.GeminiWeb.MaxCharsPerRequest)
|
||||
}
|
||||
if oldConfig.GeminiWeb.DisableContinuationHint != newConfig.GeminiWeb.DisableContinuationHint {
|
||||
log.Debugf(" gemini-web.disable-continuation-hint: %t -> %t", oldConfig.GeminiWeb.DisableContinuationHint, newConfig.GeminiWeb.DisableContinuationHint)
|
||||
}
|
||||
if oldConfig.GeminiWeb.GemMode != newConfig.GeminiWeb.GemMode {
|
||||
log.Debugf(" gemini-web.gem-mode: %s -> %s", oldConfig.GeminiWeb.GemMode, newConfig.GeminiWeb.GemMode)
|
||||
}
|
||||
if oldConfig.GeminiWeb.CodeMode != newConfig.GeminiWeb.CodeMode {
|
||||
log.Debugf(" gemini-web.code-mode: %t -> %t", oldConfig.GeminiWeb.CodeMode, newConfig.GeminiWeb.CodeMode)
|
||||
}
|
||||
if len(oldConfig.APIKeys) != len(newConfig.APIKeys) {
|
||||
log.Debugf(" api-keys count: %d -> %d", len(oldConfig.APIKeys), len(newConfig.APIKeys))
|
||||
}
|
||||
@@ -951,6 +936,11 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
|
||||
id = rel
|
||||
}
|
||||
|
||||
proxyURL := ""
|
||||
if p, ok := metadata["proxy_url"].(string); ok {
|
||||
proxyURL = p
|
||||
}
|
||||
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: provider,
|
||||
@@ -960,6 +950,7 @@ func (w *Watcher) SnapshotCoreAuths() []*coreauth.Auth {
|
||||
"source": full,
|
||||
"path": full,
|
||||
},
|
||||
ProxyURL: proxyURL,
|
||||
Metadata: metadata,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
conversation "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web/conversation"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
coreexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
|
||||
@@ -49,8 +48,6 @@ type BaseAPIHandler struct {
|
||||
Cfg *config.SDKConfig
|
||||
}
|
||||
|
||||
const geminiWebProvider = "gemini-web"
|
||||
|
||||
// NewBaseAPIHandlers creates a new API handlers instance.
|
||||
// It takes a slice of clients and configuration as input.
|
||||
//
|
||||
@@ -140,7 +137,6 @@ func (h *BaseAPIHandler) ExecuteWithAuthManager(ctx context.Context, handlerType
|
||||
if len(providers) == 0 {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusBadRequest, Error: fmt.Errorf("unknown provider for model %s", modelName)}
|
||||
}
|
||||
metadata := h.buildGeminiWebMetadata(handlerType, providers, rawJSON)
|
||||
req := coreexecutor.Request{
|
||||
Model: modelName,
|
||||
Payload: cloneBytes(rawJSON),
|
||||
@@ -150,7 +146,6 @@ func (h *BaseAPIHandler) ExecuteWithAuthManager(ctx context.Context, handlerType
|
||||
Alt: alt,
|
||||
OriginalRequest: cloneBytes(rawJSON),
|
||||
SourceFormat: sdktranslator.FromString(handlerType),
|
||||
Metadata: metadata,
|
||||
}
|
||||
resp, err := h.AuthManager.Execute(ctx, providers, req, opts)
|
||||
if err != nil {
|
||||
@@ -166,7 +161,6 @@ func (h *BaseAPIHandler) ExecuteCountWithAuthManager(ctx context.Context, handle
|
||||
if len(providers) == 0 {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusBadRequest, Error: fmt.Errorf("unknown provider for model %s", modelName)}
|
||||
}
|
||||
metadata := h.buildGeminiWebMetadata(handlerType, providers, rawJSON)
|
||||
req := coreexecutor.Request{
|
||||
Model: modelName,
|
||||
Payload: cloneBytes(rawJSON),
|
||||
@@ -176,7 +170,6 @@ func (h *BaseAPIHandler) ExecuteCountWithAuthManager(ctx context.Context, handle
|
||||
Alt: alt,
|
||||
OriginalRequest: cloneBytes(rawJSON),
|
||||
SourceFormat: sdktranslator.FromString(handlerType),
|
||||
Metadata: metadata,
|
||||
}
|
||||
resp, err := h.AuthManager.ExecuteCount(ctx, providers, req, opts)
|
||||
if err != nil {
|
||||
@@ -195,7 +188,6 @@ func (h *BaseAPIHandler) ExecuteStreamWithAuthManager(ctx context.Context, handl
|
||||
close(errChan)
|
||||
return nil, errChan
|
||||
}
|
||||
metadata := h.buildGeminiWebMetadata(handlerType, providers, rawJSON)
|
||||
req := coreexecutor.Request{
|
||||
Model: modelName,
|
||||
Payload: cloneBytes(rawJSON),
|
||||
@@ -205,7 +197,6 @@ func (h *BaseAPIHandler) ExecuteStreamWithAuthManager(ctx context.Context, handl
|
||||
Alt: alt,
|
||||
OriginalRequest: cloneBytes(rawJSON),
|
||||
SourceFormat: sdktranslator.FromString(handlerType),
|
||||
Metadata: metadata,
|
||||
}
|
||||
chunks, err := h.AuthManager.ExecuteStream(ctx, providers, req, opts)
|
||||
if err != nil {
|
||||
@@ -241,18 +232,6 @@ func cloneBytes(src []byte) []byte {
|
||||
return dst
|
||||
}
|
||||
|
||||
func (h *BaseAPIHandler) buildGeminiWebMetadata(handlerType string, providers []string, rawJSON []byte) map[string]any {
|
||||
if !util.InArray(providers, geminiWebProvider) {
|
||||
return nil
|
||||
}
|
||||
meta := make(map[string]any)
|
||||
msgs := conversation.ExtractMessages(handlerType, rawJSON)
|
||||
if len(msgs) > 0 {
|
||||
meta[conversation.MetadataMessagesKey] = msgs
|
||||
}
|
||||
return meta
|
||||
}
|
||||
|
||||
// WriteErrorResponse writes an error message to the response writer using the HTTP status embedded in the message.
|
||||
func (h *BaseAPIHandler) WriteErrorResponse(c *gin.Context, msg *interfaces.ErrorMessage) {
|
||||
status := http.StatusInternalServerError
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
// GeminiWebAuthenticator provides a minimal wrapper so core components can treat
|
||||
// Gemini Web credentials via the shared Authenticator contract.
|
||||
type GeminiWebAuthenticator struct{}
|
||||
|
||||
func NewGeminiWebAuthenticator() *GeminiWebAuthenticator { return &GeminiWebAuthenticator{} }
|
||||
|
||||
func (a *GeminiWebAuthenticator) Provider() string { return "gemini-web" }
|
||||
|
||||
func (a *GeminiWebAuthenticator) Login(ctx context.Context, cfg *config.Config, opts *LoginOptions) (*coreauth.Auth, error) {
|
||||
_ = ctx
|
||||
_ = cfg
|
||||
_ = opts
|
||||
return nil, fmt.Errorf("gemini-web authenticator does not support scripted login; use CLI --gemini-web-auth")
|
||||
}
|
||||
|
||||
func (a *GeminiWebAuthenticator) RefreshLead() *time.Duration {
|
||||
d := time.Hour
|
||||
return &d
|
||||
}
|
||||
@@ -13,7 +13,6 @@ func init() {
|
||||
registerRefreshLead("iflow", func() Authenticator { return NewIFlowAuthenticator() })
|
||||
registerRefreshLead("gemini", func() Authenticator { return NewGeminiAuthenticator() })
|
||||
registerRefreshLead("gemini-cli", func() Authenticator { return NewGeminiAuthenticator() })
|
||||
registerRefreshLead("gemini-web", func() Authenticator { return NewGeminiWebAuthenticator() })
|
||||
}
|
||||
|
||||
func registerRefreshLead(provider string, factory func() Authenticator) {
|
||||
|
||||
@@ -285,9 +285,6 @@ func (m *Manager) executeWithProvider(ctx context.Context, provider string, req
|
||||
log.Debugf("Use API key %s for model %s", util.HideAPIKey(accountInfo), req.Model)
|
||||
} else if accountType == "oauth" {
|
||||
log.Debugf("Use OAuth %s for model %s", accountInfo, req.Model)
|
||||
} else if accountType == "cookie" {
|
||||
// Only Gemini Web uses cookie; print stable account label as-is.
|
||||
log.Debugf("Use Cookie %s for model %s", accountInfo, req.Model)
|
||||
}
|
||||
|
||||
tried[auth.ID] = struct{}{}
|
||||
@@ -333,8 +330,6 @@ func (m *Manager) executeCountWithProvider(ctx context.Context, provider string,
|
||||
log.Debugf("Use API key %s for model %s", util.HideAPIKey(accountInfo), req.Model)
|
||||
} else if accountType == "oauth" {
|
||||
log.Debugf("Use OAuth %s for model %s", accountInfo, req.Model)
|
||||
} else if accountType == "cookie" {
|
||||
log.Debugf("Use Cookie %s for model %s", accountInfo, req.Model)
|
||||
}
|
||||
|
||||
tried[auth.ID] = struct{}{}
|
||||
@@ -380,8 +375,6 @@ func (m *Manager) executeStreamWithProvider(ctx context.Context, provider string
|
||||
log.Debugf("Use API key %s for model %s", util.HideAPIKey(accountInfo), req.Model)
|
||||
} else if accountType == "oauth" {
|
||||
log.Debugf("Use OAuth %s for model %s", accountInfo, req.Model)
|
||||
} else if accountType == "cookie" {
|
||||
log.Debugf("Use Cookie %s for model %s", accountInfo, req.Model)
|
||||
}
|
||||
|
||||
tried[auth.ID] = struct{}{}
|
||||
@@ -787,27 +780,31 @@ func (m *Manager) pickNext(ctx context.Context, provider, model string, opts cli
|
||||
return nil, nil, &Error{Code: "executor_not_found", Message: "executor not registered"}
|
||||
}
|
||||
candidates := make([]*Auth, 0, len(m.auths))
|
||||
for _, auth := range m.auths {
|
||||
if auth.Provider != provider || auth.Disabled {
|
||||
for _, candidate := range m.auths {
|
||||
if candidate.Provider != provider || candidate.Disabled {
|
||||
continue
|
||||
}
|
||||
if _, used := tried[auth.ID]; used {
|
||||
if _, used := tried[candidate.ID]; used {
|
||||
continue
|
||||
}
|
||||
candidates = append(candidates, auth.Clone())
|
||||
candidates = append(candidates, candidate)
|
||||
}
|
||||
m.mu.RUnlock()
|
||||
if len(candidates) == 0 {
|
||||
m.mu.RUnlock()
|
||||
return nil, nil, &Error{Code: "auth_not_found", Message: "no auth available"}
|
||||
}
|
||||
auth, errPick := m.selector.Pick(ctx, provider, model, opts, candidates)
|
||||
selected, errPick := m.selector.Pick(ctx, provider, model, opts, candidates)
|
||||
if errPick != nil {
|
||||
m.mu.RUnlock()
|
||||
return nil, nil, errPick
|
||||
}
|
||||
if auth == nil {
|
||||
if selected == nil {
|
||||
m.mu.RUnlock()
|
||||
return nil, nil, &Error{Code: "auth_not_found", Message: "selector returned no auth"}
|
||||
}
|
||||
return auth, executor, nil
|
||||
authCopy := selected.Clone()
|
||||
m.mu.RUnlock()
|
||||
return authCopy, executor, nil
|
||||
}
|
||||
|
||||
func (m *Manager) persist(ctx context.Context, auth *Auth) error {
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
conversation "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web/conversation"
|
||||
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const (
|
||||
geminiWebProviderKey = "gemini-web"
|
||||
)
|
||||
|
||||
type geminiWebStickySelector struct {
|
||||
base Selector
|
||||
}
|
||||
|
||||
func NewGeminiWebStickySelector(base Selector) Selector {
|
||||
if selector, ok := base.(*geminiWebStickySelector); ok {
|
||||
return selector
|
||||
}
|
||||
if base == nil {
|
||||
base = &RoundRobinSelector{}
|
||||
}
|
||||
return &geminiWebStickySelector{base: base}
|
||||
}
|
||||
|
||||
func (m *Manager) EnableGeminiWebStickySelector() {
|
||||
if m == nil {
|
||||
return
|
||||
}
|
||||
m.mu.Lock()
|
||||
defer m.mu.Unlock()
|
||||
if _, ok := m.selector.(*geminiWebStickySelector); ok {
|
||||
return
|
||||
}
|
||||
m.selector = NewGeminiWebStickySelector(m.selector)
|
||||
}
|
||||
|
||||
func (s *geminiWebStickySelector) Pick(ctx context.Context, provider, model string, opts cliproxyexecutor.Options, auths []*Auth) (*Auth, error) {
|
||||
if !strings.EqualFold(provider, geminiWebProviderKey) {
|
||||
if opts.Metadata != nil {
|
||||
delete(opts.Metadata, conversation.MetadataMatchKey)
|
||||
}
|
||||
return s.base.Pick(ctx, provider, model, opts, auths)
|
||||
}
|
||||
|
||||
messages := extractGeminiWebMessages(opts.Metadata)
|
||||
if len(messages) >= 2 {
|
||||
normalizedModel := conversation.NormalizeModel(model)
|
||||
candidates := conversation.BuildLookupHashes(normalizedModel, messages)
|
||||
for _, candidate := range candidates {
|
||||
record, ok, err := conversation.LookupMatch(candidate.Hash)
|
||||
if err != nil {
|
||||
log.Warnf("gemini-web selector: lookup failed for hash %s: %v", candidate.Hash, err)
|
||||
continue
|
||||
}
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
label := strings.TrimSpace(record.AccountLabel)
|
||||
if label == "" {
|
||||
continue
|
||||
}
|
||||
auth := findAuthByLabel(auths, label)
|
||||
if auth != nil {
|
||||
if opts.Metadata != nil {
|
||||
opts.Metadata[conversation.MetadataMatchKey] = &conversation.MatchResult{
|
||||
Hash: candidate.Hash,
|
||||
Record: record,
|
||||
Model: normalizedModel,
|
||||
}
|
||||
}
|
||||
return auth, nil
|
||||
}
|
||||
_ = conversation.RemoveMatchForLabel(candidate.Hash, label)
|
||||
}
|
||||
}
|
||||
|
||||
return s.base.Pick(ctx, provider, model, opts, auths)
|
||||
}
|
||||
|
||||
func extractGeminiWebMessages(metadata map[string]any) []conversation.Message {
|
||||
if metadata == nil {
|
||||
return nil
|
||||
}
|
||||
raw, ok := metadata[conversation.MetadataMessagesKey]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
switch v := raw.(type) {
|
||||
case []conversation.Message:
|
||||
return v
|
||||
case *[]conversation.Message:
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
return *v
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func findAuthByLabel(auths []*Auth, label string) *Auth {
|
||||
if len(auths) == 0 {
|
||||
return nil
|
||||
}
|
||||
normalized := strings.ToLower(strings.TrimSpace(label))
|
||||
for _, auth := range auths {
|
||||
if auth == nil {
|
||||
continue
|
||||
}
|
||||
if strings.ToLower(strings.TrimSpace(auth.Label)) == normalized {
|
||||
return auth
|
||||
}
|
||||
if auth.Metadata != nil {
|
||||
if v, ok := auth.Metadata["label"].(string); ok && strings.ToLower(strings.TrimSpace(v)) == normalized {
|
||||
return auth
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -134,24 +134,6 @@ func (a *Auth) AccountInfo() (string, string) {
|
||||
if a == nil {
|
||||
return "", ""
|
||||
}
|
||||
// For Gemini Web, prefer explicit cookie label for stability.
|
||||
if strings.ToLower(a.Provider) == "gemini-web" {
|
||||
// Prefer explicit label written into auth file (e.g., gemini-web-<hash>)
|
||||
if a.Metadata != nil {
|
||||
if v, ok := a.Metadata["label"].(string); ok && strings.TrimSpace(v) != "" {
|
||||
return "cookie", strings.TrimSpace(v)
|
||||
}
|
||||
}
|
||||
// Minimal fallback to cookie value for backward compatibility
|
||||
if a.Metadata != nil {
|
||||
if v, ok := a.Metadata["secure_1psid"].(string); ok && v != "" {
|
||||
return "cookie", v
|
||||
}
|
||||
if v, ok := a.Metadata["__Secure-1PSID"].(string); ok && v != "" {
|
||||
return "cookie", v
|
||||
}
|
||||
}
|
||||
}
|
||||
// For Gemini CLI, include project ID in the OAuth account info if present.
|
||||
if strings.ToLower(a.Provider) == "gemini-cli" {
|
||||
if a.Metadata != nil {
|
||||
|
||||
@@ -14,8 +14,6 @@ import (
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/api"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
geminiwebclient "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web"
|
||||
conversation "github.com/router-for-me/CLIProxyAPI/v6/internal/provider/gemini-web/conversation"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/runtime/executor"
|
||||
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/usage"
|
||||
@@ -207,23 +205,6 @@ func (s *Service) applyCoreAuthRemoval(ctx context.Context, id string) {
|
||||
}
|
||||
GlobalModelRegistry().UnregisterClient(id)
|
||||
if existing, ok := s.coreManager.GetByID(id); ok && existing != nil {
|
||||
if strings.EqualFold(existing.Provider, "gemini-web") {
|
||||
// Prefer the stable cookie label stored in metadata when available.
|
||||
var label string
|
||||
if existing.Metadata != nil {
|
||||
if v, ok := existing.Metadata["label"].(string); ok {
|
||||
label = strings.TrimSpace(v)
|
||||
}
|
||||
}
|
||||
if label == "" {
|
||||
label = strings.TrimSpace(existing.Label)
|
||||
}
|
||||
if label != "" {
|
||||
if err := conversation.RemoveMatchesByLabel(label); err != nil {
|
||||
log.Debugf("failed to remove gemini web sticky entries for %s: %v", label, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
existing.Disabled = true
|
||||
existing.Status = coreauth.StatusDisabled
|
||||
if _, err := s.coreManager.Update(ctx, existing); err != nil {
|
||||
@@ -271,9 +252,6 @@ func (s *Service) ensureExecutorsForAuth(a *coreauth.Auth) {
|
||||
s.coreManager.RegisterExecutor(executor.NewGeminiExecutor(s.cfg))
|
||||
case "gemini-cli":
|
||||
s.coreManager.RegisterExecutor(executor.NewGeminiCLIExecutor(s.cfg))
|
||||
case "gemini-web":
|
||||
s.coreManager.RegisterExecutor(executor.NewGeminiWebExecutor(s.cfg))
|
||||
s.coreManager.EnableGeminiWebStickySelector()
|
||||
case "claude":
|
||||
s.coreManager.RegisterExecutor(executor.NewClaudeExecutor(s.cfg))
|
||||
case "codex":
|
||||
@@ -536,8 +514,6 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) {
|
||||
models = registry.GetGeminiModels()
|
||||
case "gemini-cli":
|
||||
models = registry.GetGeminiCLIModels()
|
||||
case "gemini-web":
|
||||
models = geminiwebclient.GetGeminiWebAliasedModels()
|
||||
case "claude":
|
||||
models = registry.GetClaudeModels()
|
||||
case "codex":
|
||||
|
||||
Reference in New Issue
Block a user