From 67e0b71c1849ffd512dad8f78cf86e6a22e13815 Mon Sep 17 00:00:00 2001 From: Luis Pater Date: Wed, 3 Sep 2025 01:33:26 +0800 Subject: [PATCH] Add Codex load balancing documentation and refine JSON handling logic - Updated README and README_CN to include a guide for configuring multiple account load balancing with CLI Proxy API. - Enhanced JSON handling in gemini translators by differentiating object and string outputs. - Added commented debug logging for Gemini CLI response conversion. --- README.md | 23 +++++++++++++++++++ README_CN.md | 22 ++++++++++++++++++ .../responses/cli_openai-responses_request.go | 1 + .../gemini_openai-responses_request.go | 6 ++++- 4 files changed, 51 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5785c862..1fe596f8 100644 --- a/README.md +++ b/README.md @@ -430,6 +430,29 @@ export ANTHROPIC_MODEL=qwen3-coder-plus export ANTHROPIC_SMALL_FAST_MODEL=qwen3-coder-flash ``` +## Codex with multiple account load balancing + +Start CLI Proxy API server, and then edit the `~/.codex/config.toml` and `~/.codex/auth.json` files. + +config.toml: +```toml +model_provider = "cliproxyapi" +model = "gpt-5" # You can use any of the models that we support. +model_reasoning_effort = "high" + +[model_providers.cliproxyapi] +name = "cliproxyapi" +base_url = "http://127.0.0.1:8317/v1" +wire_api = "responses" +``` + +auth.json: +```json +{ + "OPENAI_API_KEY": "sk-dummy" +} +``` + ## Run with Docker Run the following command to login (Gemini OAuth on port 8085): diff --git a/README_CN.md b/README_CN.md index 626c5b29..a064c43b 100644 --- a/README_CN.md +++ b/README_CN.md @@ -424,6 +424,28 @@ export ANTHROPIC_MODEL=qwen3-coder-plus export ANTHROPIC_SMALL_FAST_MODEL=qwen3-coder-flash ``` +## Codex 多账户负载均衡 + +启动 CLI Proxy API 服务器, 修改 `~/.codex/config.toml` 和 `~/.codex/auth.json` 文件。 + +config.toml: +```toml +model_provider = "cliproxyapi" +model = "gpt-5" # 你可以使用任何我们支持的模型 +model_reasoning_effort = "high" + +[model_providers.cliproxyapi] +name = "cliproxyapi" +base_url = "http://127.0.0.1:8317/v1" +wire_api = "responses" +``` + +auth.json: +```json +{ + "OPENAI_API_KEY": "sk-dummy" +} +``` ## 使用 Docker 运行 diff --git a/internal/translator/gemini-cli/openai/responses/cli_openai-responses_request.go b/internal/translator/gemini-cli/openai/responses/cli_openai-responses_request.go index 2b7457e9..0331749d 100644 --- a/internal/translator/gemini-cli/openai/responses/cli_openai-responses_request.go +++ b/internal/translator/gemini-cli/openai/responses/cli_openai-responses_request.go @@ -10,5 +10,6 @@ import ( func ConvertOpenAIResponsesRequestToGeminiCLI(modelName string, inputRawJSON []byte, stream bool) []byte { rawJSON := bytes.Clone(inputRawJSON) rawJSON = ConvertOpenAIResponsesRequestToGemini(modelName, rawJSON, stream) + // log.Debug(string(rawJSON)) return ConvertGeminiRequestToGeminiCLI(modelName, rawJSON, stream) } diff --git a/internal/translator/gemini/openai/responses/gemini_openai-responses_request.go b/internal/translator/gemini/openai/responses/gemini_openai-responses_request.go index 33fa2897..f78a8e0d 100644 --- a/internal/translator/gemini/openai/responses/gemini_openai-responses_request.go +++ b/internal/translator/gemini/openai/responses/gemini_openai-responses_request.go @@ -109,7 +109,11 @@ func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte // Parse output JSON string and set as response content if output != "" { outputResult := gjson.Parse(output) - functionResponse, _ = sjson.SetRaw(functionResponse, "functionResponse.response.content", outputResult.Raw) + if outputResult.IsObject() { + functionResponse, _ = sjson.SetRaw(functionResponse, "functionResponse.response.content", outputResult.String()) + } else { + functionResponse, _ = sjson.Set(functionResponse, "functionResponse.response.content", outputResult.String()) + } } functionContent, _ = sjson.SetRaw(functionContent, "parts.-1", functionResponse)