mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-14 10:20:51 +08:00
Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ae1e8a5191 | ||
|
|
b3ccc55f09 | ||
|
|
1ce56d7413 | ||
|
|
41a78be3a2 | ||
|
|
1ff5de9a31 | ||
|
|
46a6853046 | ||
|
|
4b2d40bd67 | ||
|
|
575881cb59 | ||
|
|
f361b2716d | ||
|
|
58e09f8e5f | ||
|
|
a146c6c0aa | ||
|
|
4c133d3ea9 | ||
|
|
dc279de443 | ||
|
|
bf1634bda0 | ||
|
|
166d2d24d9 | ||
|
|
4cbcc835d1 | ||
|
|
b93026d83a | ||
|
|
5ed2133ff9 | ||
|
|
1510bfcb6f | ||
|
|
c6bd91b86b | ||
|
|
349ddcaa89 | ||
|
|
bb9fe52f1e | ||
|
|
afe4c1bfb7 | ||
|
|
865af9f19e | ||
|
|
2b97cb98b5 | ||
|
|
938a799263 |
@@ -146,6 +146,10 @@ A Windows tray application implemented using PowerShell scripts, without relying
|
||||
|
||||
霖君 is a cross-platform desktop application for managing AI programming assistants, supporting macOS, Windows, and Linux systems. Unified management of Claude Code, Gemini CLI, OpenAI Codex, Qwen Code, and other AI coding tools, with local proxy for multi-account quota tracking and one-click configuration.
|
||||
|
||||
### [CLIProxyAPI Dashboard](https://github.com/itsmylife44/cliproxyapi-dashboard)
|
||||
|
||||
A modern web-based management dashboard for CLIProxyAPI built with Next.js, React, and PostgreSQL. Features real-time log streaming, structured configuration editing, API key management, OAuth provider integration for Claude/Gemini/Codex, usage analytics, container management, and config sync with OpenCode via companion plugin - no manual YAML editing needed.
|
||||
|
||||
> [!NOTE]
|
||||
> If you developed a project based on CLIProxyAPI, please open a PR to add it to this list.
|
||||
|
||||
|
||||
@@ -145,6 +145,10 @@ Windows 托盘应用,基于 PowerShell 脚本实现,不依赖任何第三方
|
||||
|
||||
霖君是一款用于管理AI编程助手的跨平台桌面应用,支持macOS、Windows、Linux系统。统一管理Claude Code、Gemini CLI、OpenAI Codex、Qwen Code等AI编程工具,本地代理实现多账户配额跟踪和一键配置。
|
||||
|
||||
### [CLIProxyAPI Dashboard](https://github.com/itsmylife44/cliproxyapi-dashboard)
|
||||
|
||||
一个面向 CLIProxyAPI 的现代化 Web 管理仪表盘,基于 Next.js、React 和 PostgreSQL 构建。支持实时日志流、结构化配置编辑、API Key 管理、Claude/Gemini/Codex 的 OAuth 提供方集成、使用量分析、容器管理,并可通过配套插件与 OpenCode 同步配置,无需手动编辑 YAML。
|
||||
|
||||
> [!NOTE]
|
||||
> 如果你开发了基于 CLIProxyAPI 的项目,请提交一个 PR(拉取请求)将其添加到此列表中。
|
||||
|
||||
|
||||
@@ -742,6 +742,20 @@ func GetOpenAIModels() []*ModelInfo {
|
||||
SupportedParameters: []string{"tools"},
|
||||
Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high", "xhigh"}},
|
||||
},
|
||||
{
|
||||
ID: "gpt-5.3-codex-spark",
|
||||
Object: "model",
|
||||
Created: 1770912000,
|
||||
OwnedBy: "openai",
|
||||
Type: "openai",
|
||||
Version: "gpt-5.3",
|
||||
DisplayName: "GPT 5.3 Codex Spark",
|
||||
Description: "Ultra-fast coding model.",
|
||||
ContextLength: 128000,
|
||||
MaxCompletionTokens: 128000,
|
||||
SupportedParameters: []string{"tools"},
|
||||
Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high", "xhigh"}},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -814,6 +828,7 @@ func GetIFlowModels() []*ModelInfo {
|
||||
{ID: "kimi-k2-0905", DisplayName: "Kimi-K2-Instruct-0905", Description: "Moonshot Kimi K2 instruct 0905", Created: 1757030400},
|
||||
{ID: "glm-4.6", DisplayName: "GLM-4.6", Description: "Zhipu GLM 4.6 general model", Created: 1759190400, Thinking: iFlowThinkingSupport},
|
||||
{ID: "glm-4.7", DisplayName: "GLM-4.7", Description: "Zhipu GLM 4.7 general model", Created: 1766448000, Thinking: iFlowThinkingSupport},
|
||||
{ID: "glm-5", DisplayName: "GLM-5", Description: "Zhipu GLM 5 general model", Created: 1770768000, Thinking: iFlowThinkingSupport},
|
||||
{ID: "kimi-k2", DisplayName: "Kimi-K2", Description: "Moonshot Kimi K2 general model", Created: 1752192000},
|
||||
{ID: "kimi-k2-thinking", DisplayName: "Kimi-K2-Thinking", Description: "Moonshot Kimi K2 thinking model", Created: 1762387200},
|
||||
{ID: "deepseek-v3.2-chat", DisplayName: "DeepSeek-V3.2", Description: "DeepSeek V3.2 Chat", Created: 1764576000},
|
||||
@@ -828,6 +843,7 @@ func GetIFlowModels() []*ModelInfo {
|
||||
{ID: "qwen3-235b", DisplayName: "Qwen3-235B-A22B", Description: "Qwen3 235B A22B", Created: 1753401600},
|
||||
{ID: "minimax-m2", DisplayName: "MiniMax-M2", Description: "MiniMax M2", Created: 1758672000, Thinking: iFlowThinkingSupport},
|
||||
{ID: "minimax-m2.1", DisplayName: "MiniMax-M2.1", Description: "MiniMax M2.1", Created: 1766448000, Thinking: iFlowThinkingSupport},
|
||||
{ID: "minimax-m2.5", DisplayName: "MiniMax-M2.5", Description: "MiniMax M2.5", Created: 1770825600, Thinking: iFlowThinkingSupport},
|
||||
{ID: "iflow-rome-30ba3b", DisplayName: "iFlow-ROME", Description: "iFlow Rome 30BA3B model", Created: 1736899200},
|
||||
{ID: "kimi-k2.5", DisplayName: "Kimi-K2.5", Description: "Moonshot Kimi K2.5", Created: 1769443200, Thinking: iFlowThinkingSupport},
|
||||
}
|
||||
@@ -866,7 +882,7 @@ func GetAntigravityModelConfig() map[string]*AntigravityModelConfig {
|
||||
"gemini-3-flash": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"minimal", "low", "medium", "high"}}},
|
||||
"claude-sonnet-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
"claude-opus-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
"claude-opus-4-6-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: true}, MaxCompletionTokens: 128000},
|
||||
"claude-opus-4-6-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: true}, MaxCompletionTokens: 64000},
|
||||
"claude-sonnet-4-5": {MaxCompletionTokens: 64000},
|
||||
"gpt-oss-120b-medium": {},
|
||||
"tab_flash_lite_preview": {},
|
||||
|
||||
@@ -0,0 +1,159 @@
|
||||
package executor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
func TestAntigravityBuildRequest_SanitizesGeminiToolSchema(t *testing.T) {
|
||||
body := buildRequestBodyFromPayload(t, "gemini-2.5-pro")
|
||||
|
||||
decl := extractFirstFunctionDeclaration(t, body)
|
||||
if _, ok := decl["parametersJsonSchema"]; ok {
|
||||
t.Fatalf("parametersJsonSchema should be renamed to parameters")
|
||||
}
|
||||
|
||||
params, ok := decl["parameters"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("parameters missing or invalid type")
|
||||
}
|
||||
assertSchemaSanitizedAndPropertyPreserved(t, params)
|
||||
}
|
||||
|
||||
func TestAntigravityBuildRequest_SanitizesAntigravityToolSchema(t *testing.T) {
|
||||
body := buildRequestBodyFromPayload(t, "claude-opus-4-6")
|
||||
|
||||
decl := extractFirstFunctionDeclaration(t, body)
|
||||
params, ok := decl["parameters"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("parameters missing or invalid type")
|
||||
}
|
||||
assertSchemaSanitizedAndPropertyPreserved(t, params)
|
||||
}
|
||||
|
||||
func buildRequestBodyFromPayload(t *testing.T, modelName string) map[string]any {
|
||||
t.Helper()
|
||||
|
||||
executor := &AntigravityExecutor{}
|
||||
auth := &cliproxyauth.Auth{}
|
||||
payload := []byte(`{
|
||||
"request": {
|
||||
"tools": [
|
||||
{
|
||||
"function_declarations": [
|
||||
{
|
||||
"name": "tool_1",
|
||||
"parametersJsonSchema": {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "root-schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$id": {"type": "string"},
|
||||
"arg": {
|
||||
"type": "object",
|
||||
"prefill": "hello",
|
||||
"properties": {
|
||||
"mode": {
|
||||
"type": "string",
|
||||
"enum": ["a", "b"],
|
||||
"enumTitles": ["A", "B"]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
"^x-": {"type": "string"}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}`)
|
||||
|
||||
req, err := executor.buildRequest(context.Background(), auth, "token", modelName, payload, false, "", "https://example.com")
|
||||
if err != nil {
|
||||
t.Fatalf("buildRequest error: %v", err)
|
||||
}
|
||||
|
||||
raw, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("read request body error: %v", err)
|
||||
}
|
||||
|
||||
var body map[string]any
|
||||
if err := json.Unmarshal(raw, &body); err != nil {
|
||||
t.Fatalf("unmarshal request body error: %v, body=%s", err, string(raw))
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
func extractFirstFunctionDeclaration(t *testing.T, body map[string]any) map[string]any {
|
||||
t.Helper()
|
||||
|
||||
request, ok := body["request"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("request missing or invalid type")
|
||||
}
|
||||
tools, ok := request["tools"].([]any)
|
||||
if !ok || len(tools) == 0 {
|
||||
t.Fatalf("tools missing or empty")
|
||||
}
|
||||
tool, ok := tools[0].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("first tool invalid type")
|
||||
}
|
||||
decls, ok := tool["function_declarations"].([]any)
|
||||
if !ok || len(decls) == 0 {
|
||||
t.Fatalf("function_declarations missing or empty")
|
||||
}
|
||||
decl, ok := decls[0].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("first function declaration invalid type")
|
||||
}
|
||||
return decl
|
||||
}
|
||||
|
||||
func assertSchemaSanitizedAndPropertyPreserved(t *testing.T, params map[string]any) {
|
||||
t.Helper()
|
||||
|
||||
if _, ok := params["$id"]; ok {
|
||||
t.Fatalf("root $id should be removed from schema")
|
||||
}
|
||||
if _, ok := params["patternProperties"]; ok {
|
||||
t.Fatalf("patternProperties should be removed from schema")
|
||||
}
|
||||
|
||||
props, ok := params["properties"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("properties missing or invalid type")
|
||||
}
|
||||
if _, ok := props["$id"]; !ok {
|
||||
t.Fatalf("property named $id should be preserved")
|
||||
}
|
||||
|
||||
arg, ok := props["arg"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("arg property missing or invalid type")
|
||||
}
|
||||
if _, ok := arg["prefill"]; ok {
|
||||
t.Fatalf("prefill should be removed from nested schema")
|
||||
}
|
||||
|
||||
argProps, ok := arg["properties"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("arg.properties missing or invalid type")
|
||||
}
|
||||
mode, ok := argProps["mode"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("mode property missing or invalid type")
|
||||
}
|
||||
if _, ok := mode["enumTitles"]; ok {
|
||||
t.Fatalf("enumTitles should be removed from nested schema")
|
||||
}
|
||||
}
|
||||
@@ -28,8 +28,8 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
codexClientVersion = "0.98.0"
|
||||
codexUserAgent = "codex_cli_rs/0.98.0 (Mac OS 26.0.1; arm64) Apple_Terminal/464"
|
||||
codexClientVersion = "0.101.0"
|
||||
codexUserAgent = "codex_cli_rs/0.101.0 (Mac OS 26.0.1; arm64) Apple_Terminal/464"
|
||||
)
|
||||
|
||||
var dataTag = []byte("data:")
|
||||
|
||||
@@ -344,7 +344,8 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
// Inject interleaved thinking hint when both tools and thinking are active
|
||||
hasTools := toolDeclCount > 0
|
||||
thinkingResult := gjson.GetBytes(rawJSON, "thinking")
|
||||
hasThinking := thinkingResult.Exists() && thinkingResult.IsObject() && thinkingResult.Get("type").String() == "enabled"
|
||||
thinkingType := thinkingResult.Get("type").String()
|
||||
hasThinking := thinkingResult.Exists() && thinkingResult.IsObject() && (thinkingType == "enabled" || thinkingType == "adaptive")
|
||||
isClaudeThinking := util.IsClaudeThinkingModel(modelName)
|
||||
|
||||
if hasTools && hasThinking && isClaudeThinking {
|
||||
@@ -377,12 +378,18 @@ func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _
|
||||
|
||||
// Map Anthropic thinking -> Gemini thinkingBudget/include_thoughts when type==enabled
|
||||
if t := gjson.GetBytes(rawJSON, "thinking"); enableThoughtTranslate && t.Exists() && t.IsObject() {
|
||||
if t.Get("type").String() == "enabled" {
|
||||
switch t.Get("type").String() {
|
||||
case "enabled":
|
||||
if b := t.Get("budget_tokens"); b.Exists() && b.Type == gjson.Number {
|
||||
budget := int(b.Int())
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", budget)
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.includeThoughts", true)
|
||||
}
|
||||
case "adaptive":
|
||||
// Keep adaptive as a high level sentinel; ApplyThinking resolves it
|
||||
// to model-specific max capability.
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingLevel", "high")
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.includeThoughts", true)
|
||||
}
|
||||
}
|
||||
if v := gjson.GetBytes(rawJSON, "temperature"); v.Exists() && v.Type == gjson.Number {
|
||||
|
||||
@@ -222,6 +222,10 @@ func ConvertClaudeRequestToCodex(modelName string, inputRawJSON []byte, _ bool)
|
||||
reasoningEffort = effort
|
||||
}
|
||||
}
|
||||
case "adaptive":
|
||||
// Claude adaptive means "enable with max capacity"; keep it as highest level
|
||||
// and let ApplyThinking normalize per target model capability.
|
||||
reasoningEffort = string(thinking.LevelXHigh)
|
||||
case "disabled":
|
||||
if effort, ok := thinking.ConvertBudgetToLevel(0); ok && effort != "" {
|
||||
reasoningEffort = effort
|
||||
|
||||
@@ -27,6 +27,9 @@ func ConvertOpenAIResponsesRequestToCodex(modelName string, inputRawJSON []byte,
|
||||
rawJSON, _ = sjson.DeleteBytes(rawJSON, "top_p")
|
||||
rawJSON, _ = sjson.DeleteBytes(rawJSON, "service_tier")
|
||||
|
||||
// Delete the user field as it is not supported by the Codex upstream.
|
||||
rawJSON, _ = sjson.DeleteBytes(rawJSON, "user")
|
||||
|
||||
// Convert role "system" to "developer" in input array to comply with Codex API requirements.
|
||||
rawJSON = convertSystemRoleToDeveloper(rawJSON)
|
||||
|
||||
|
||||
@@ -263,3 +263,20 @@ func TestConvertSystemRoleToDeveloper_AssistantRole(t *testing.T) {
|
||||
t.Errorf("Expected third role 'assistant', got '%s'", thirdRole.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestUserFieldDeletion(t *testing.T) {
|
||||
inputJSON := []byte(`{
|
||||
"model": "gpt-5.2",
|
||||
"user": "test-user",
|
||||
"input": [{"role": "user", "content": "Hello"}]
|
||||
}`)
|
||||
|
||||
output := ConvertOpenAIResponsesRequestToCodex("gpt-5.2", inputJSON, false)
|
||||
outputStr := string(output)
|
||||
|
||||
// Verify user field is deleted
|
||||
userField := gjson.Get(outputStr, "user")
|
||||
if userField.Exists() {
|
||||
t.Errorf("user field should be deleted, but it was found with value: %s", userField.Raw)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,12 +173,18 @@ func ConvertClaudeRequestToCLI(modelName string, inputRawJSON []byte, _ bool) []
|
||||
|
||||
// Map Anthropic thinking -> Gemini thinkingBudget/include_thoughts when type==enabled
|
||||
if t := gjson.GetBytes(rawJSON, "thinking"); t.Exists() && t.IsObject() {
|
||||
if t.Get("type").String() == "enabled" {
|
||||
switch t.Get("type").String() {
|
||||
case "enabled":
|
||||
if b := t.Get("budget_tokens"); b.Exists() && b.Type == gjson.Number {
|
||||
budget := int(b.Int())
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingBudget", budget)
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.includeThoughts", true)
|
||||
}
|
||||
case "adaptive":
|
||||
// Keep adaptive as a high level sentinel; ApplyThinking resolves it
|
||||
// to model-specific max capability.
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.thinkingLevel", "high")
|
||||
out, _ = sjson.Set(out, "request.generationConfig.thinkingConfig.includeThoughts", true)
|
||||
}
|
||||
}
|
||||
if v := gjson.GetBytes(rawJSON, "temperature"); v.Exists() && v.Type == gjson.Number {
|
||||
|
||||
@@ -154,12 +154,18 @@ func ConvertClaudeRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
// Map Anthropic thinking -> Gemini thinkingBudget/include_thoughts when enabled
|
||||
// Translator only does format conversion, ApplyThinking handles model capability validation.
|
||||
if t := gjson.GetBytes(rawJSON, "thinking"); t.Exists() && t.IsObject() {
|
||||
if t.Get("type").String() == "enabled" {
|
||||
switch t.Get("type").String() {
|
||||
case "enabled":
|
||||
if b := t.Get("budget_tokens"); b.Exists() && b.Type == gjson.Number {
|
||||
budget := int(b.Int())
|
||||
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingBudget", budget)
|
||||
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.includeThoughts", true)
|
||||
}
|
||||
case "adaptive":
|
||||
// Keep adaptive as a high level sentinel; ApplyThinking resolves it
|
||||
// to model-specific max capability.
|
||||
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.thinkingLevel", "high")
|
||||
out, _ = sjson.Set(out, "generationConfig.thinkingConfig.includeThoughts", true)
|
||||
}
|
||||
}
|
||||
if v := gjson.GetBytes(rawJSON, "temperature"); v.Exists() && v.Type == gjson.Number {
|
||||
|
||||
@@ -117,19 +117,29 @@ func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte
|
||||
switch itemType {
|
||||
case "message":
|
||||
if strings.EqualFold(itemRole, "system") {
|
||||
if contentArray := item.Get("content"); contentArray.Exists() && contentArray.IsArray() {
|
||||
var builder strings.Builder
|
||||
contentArray.ForEach(func(_, contentItem gjson.Result) bool {
|
||||
text := contentItem.Get("text").String()
|
||||
if builder.Len() > 0 && text != "" {
|
||||
builder.WriteByte('\n')
|
||||
}
|
||||
builder.WriteString(text)
|
||||
return true
|
||||
})
|
||||
if !gjson.Get(out, "system_instruction").Exists() {
|
||||
systemInstr := `{"parts":[{"text":""}]}`
|
||||
systemInstr, _ = sjson.Set(systemInstr, "parts.0.text", builder.String())
|
||||
if contentArray := item.Get("content"); contentArray.Exists() {
|
||||
systemInstr := ""
|
||||
if systemInstructionResult := gjson.Get(out, "system_instruction"); systemInstructionResult.Exists() {
|
||||
systemInstr = systemInstructionResult.Raw
|
||||
} else {
|
||||
systemInstr = `{"parts":[]}`
|
||||
}
|
||||
|
||||
if contentArray.IsArray() {
|
||||
contentArray.ForEach(func(_, contentItem gjson.Result) bool {
|
||||
part := `{"text":""}`
|
||||
text := contentItem.Get("text").String()
|
||||
part, _ = sjson.Set(part, "text", text)
|
||||
systemInstr, _ = sjson.SetRaw(systemInstr, "parts.-1", part)
|
||||
return true
|
||||
})
|
||||
} else if contentArray.Type == gjson.String {
|
||||
part := `{"text":""}`
|
||||
part, _ = sjson.Set(part, "text", contentArray.String())
|
||||
systemInstr, _ = sjson.SetRaw(systemInstr, "parts.-1", part)
|
||||
}
|
||||
|
||||
if systemInstr != `{"parts":[]}` {
|
||||
out, _ = sjson.SetRaw(out, "system_instruction", systemInstr)
|
||||
}
|
||||
}
|
||||
@@ -236,8 +246,22 @@ func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte
|
||||
})
|
||||
|
||||
flush()
|
||||
}
|
||||
} else if contentArray.Type == gjson.String {
|
||||
effRole := "user"
|
||||
if itemRole != "" {
|
||||
switch strings.ToLower(itemRole) {
|
||||
case "assistant", "model":
|
||||
effRole = "model"
|
||||
default:
|
||||
effRole = strings.ToLower(itemRole)
|
||||
}
|
||||
}
|
||||
|
||||
one := `{"role":"","parts":[{"text":""}]}`
|
||||
one, _ = sjson.Set(one, "role", effRole)
|
||||
one, _ = sjson.Set(one, "parts.0.text", contentArray.String())
|
||||
out, _ = sjson.SetRaw(out, "contents.-1", one)
|
||||
}
|
||||
case "function_call":
|
||||
// Handle function calls - convert to model message with functionCall
|
||||
name := item.Get("name").String()
|
||||
|
||||
@@ -75,6 +75,10 @@ func ConvertClaudeRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
out, _ = sjson.Set(out, "reasoning_effort", effort)
|
||||
}
|
||||
}
|
||||
case "adaptive":
|
||||
// Claude adaptive means "enable with max capacity"; keep it as highest level
|
||||
// and let ApplyThinking normalize per target model capability.
|
||||
out, _ = sjson.Set(out, "reasoning_effort", string(thinking.LevelXHigh))
|
||||
case "disabled":
|
||||
if effort, ok := thinking.ConvertBudgetToLevel(0); ok && effort != "" {
|
||||
out, _ = sjson.Set(out, "reasoning_effort", effort)
|
||||
|
||||
@@ -428,8 +428,9 @@ func flattenTypeArrays(jsonStr string) string {
|
||||
|
||||
func removeUnsupportedKeywords(jsonStr string) string {
|
||||
keywords := append(unsupportedConstraints,
|
||||
"$schema", "$defs", "definitions", "const", "$ref", "additionalProperties",
|
||||
"propertyNames", // Gemini doesn't support property name validation
|
||||
"$schema", "$defs", "definitions", "const", "$ref", "$id", "additionalProperties",
|
||||
"propertyNames", "patternProperties", // Gemini doesn't support these schema keywords
|
||||
"enumTitles", "prefill", // Claude/OpenCode schema metadata fields unsupported by Gemini
|
||||
)
|
||||
|
||||
deletePaths := make([]string, 0)
|
||||
|
||||
@@ -870,6 +870,57 @@ func TestCleanJSONSchemaForAntigravity_BooleanEnumToString(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestCleanJSONSchemaForGemini_RemovesGeminiUnsupportedMetadataFields(t *testing.T) {
|
||||
input := `{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "root-schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"payload": {
|
||||
"type": "object",
|
||||
"prefill": "hello",
|
||||
"properties": {
|
||||
"mode": {
|
||||
"type": "string",
|
||||
"enum": ["a", "b"],
|
||||
"enumTitles": ["A", "B"]
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
"^x-": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"description": "property name should not be removed"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
expected := `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"payload": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"mode": {
|
||||
"type": "string",
|
||||
"enum": ["a", "b"],
|
||||
"description": "Allowed: a, b"
|
||||
}
|
||||
}
|
||||
},
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"description": "property name should not be removed"
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
result := CleanJSONSchemaForGemini(input)
|
||||
compareJSON(t, expected, result)
|
||||
}
|
||||
|
||||
func TestRemoveExtensionFields(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -92,6 +93,9 @@ func (s *FileSynthesizer) Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, e
|
||||
status = coreauth.StatusDisabled
|
||||
}
|
||||
|
||||
// Read per-account excluded models from the OAuth JSON file
|
||||
perAccountExcluded := extractExcludedModelsFromMetadata(metadata)
|
||||
|
||||
a := &coreauth.Auth{
|
||||
ID: id,
|
||||
Provider: provider,
|
||||
@@ -108,11 +112,23 @@ func (s *FileSynthesizer) Synthesize(ctx *SynthesisContext) ([]*coreauth.Auth, e
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
ApplyAuthExcludedModelsMeta(a, cfg, nil, "oauth")
|
||||
// Read priority from auth file
|
||||
if rawPriority, ok := metadata["priority"]; ok {
|
||||
switch v := rawPriority.(type) {
|
||||
case float64:
|
||||
a.Attributes["priority"] = strconv.Itoa(int(v))
|
||||
case string:
|
||||
priority := strings.TrimSpace(v)
|
||||
if _, errAtoi := strconv.Atoi(priority); errAtoi == nil {
|
||||
a.Attributes["priority"] = priority
|
||||
}
|
||||
}
|
||||
}
|
||||
ApplyAuthExcludedModelsMeta(a, cfg, perAccountExcluded, "oauth")
|
||||
if provider == "gemini-cli" {
|
||||
if virtuals := SynthesizeGeminiVirtualAuths(a, metadata, now); len(virtuals) > 0 {
|
||||
for _, v := range virtuals {
|
||||
ApplyAuthExcludedModelsMeta(v, cfg, nil, "oauth")
|
||||
ApplyAuthExcludedModelsMeta(v, cfg, perAccountExcluded, "oauth")
|
||||
}
|
||||
out = append(out, a)
|
||||
out = append(out, virtuals...)
|
||||
@@ -167,6 +183,10 @@ func SynthesizeGeminiVirtualAuths(primary *coreauth.Auth, metadata map[string]an
|
||||
if authPath != "" {
|
||||
attrs["path"] = authPath
|
||||
}
|
||||
// Propagate priority from primary auth to virtual auths
|
||||
if priorityVal, hasPriority := primary.Attributes["priority"]; hasPriority && priorityVal != "" {
|
||||
attrs["priority"] = priorityVal
|
||||
}
|
||||
metadataCopy := map[string]any{
|
||||
"email": email,
|
||||
"project_id": projectID,
|
||||
@@ -239,3 +259,40 @@ func buildGeminiVirtualID(baseID, projectID string) string {
|
||||
replacer := strings.NewReplacer("/", "_", "\\", "_", " ", "_")
|
||||
return fmt.Sprintf("%s::%s", baseID, replacer.Replace(project))
|
||||
}
|
||||
|
||||
// extractExcludedModelsFromMetadata reads per-account excluded models from the OAuth JSON metadata.
|
||||
// Supports both "excluded_models" and "excluded-models" keys, and accepts both []string and []interface{}.
|
||||
func extractExcludedModelsFromMetadata(metadata map[string]any) []string {
|
||||
if metadata == nil {
|
||||
return nil
|
||||
}
|
||||
// Try both key formats
|
||||
raw, ok := metadata["excluded_models"]
|
||||
if !ok {
|
||||
raw, ok = metadata["excluded-models"]
|
||||
}
|
||||
if !ok || raw == nil {
|
||||
return nil
|
||||
}
|
||||
var stringSlice []string
|
||||
switch v := raw.(type) {
|
||||
case []string:
|
||||
stringSlice = v
|
||||
case []interface{}:
|
||||
stringSlice = make([]string, 0, len(v))
|
||||
for _, item := range v {
|
||||
if s, ok := item.(string); ok {
|
||||
stringSlice = append(stringSlice, s)
|
||||
}
|
||||
}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
result := make([]string, 0, len(stringSlice))
|
||||
for _, s := range stringSlice {
|
||||
if trimmed := strings.TrimSpace(s); trimmed != "" {
|
||||
result = append(result, trimmed)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -297,6 +297,117 @@ func TestFileSynthesizer_Synthesize_PrefixValidation(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_PriorityParsing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
priority any
|
||||
want string
|
||||
hasValue bool
|
||||
}{
|
||||
{
|
||||
name: "string with spaces",
|
||||
priority: " 10 ",
|
||||
want: "10",
|
||||
hasValue: true,
|
||||
},
|
||||
{
|
||||
name: "number",
|
||||
priority: 8,
|
||||
want: "8",
|
||||
hasValue: true,
|
||||
},
|
||||
{
|
||||
name: "invalid string",
|
||||
priority: "1x",
|
||||
hasValue: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
authData := map[string]any{
|
||||
"type": "claude",
|
||||
"priority": tt.priority,
|
||||
}
|
||||
data, _ := json.Marshal(authData)
|
||||
errWriteFile := os.WriteFile(filepath.Join(tempDir, "auth.json"), data, 0644)
|
||||
if errWriteFile != nil {
|
||||
t.Fatalf("failed to write auth file: %v", errWriteFile)
|
||||
}
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, errSynthesize := synth.Synthesize(ctx)
|
||||
if errSynthesize != nil {
|
||||
t.Fatalf("unexpected error: %v", errSynthesize)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
value, ok := auths[0].Attributes["priority"]
|
||||
if tt.hasValue {
|
||||
if !ok {
|
||||
t.Fatal("expected priority attribute to be set")
|
||||
}
|
||||
if value != tt.want {
|
||||
t.Fatalf("expected priority %q, got %q", tt.want, value)
|
||||
}
|
||||
return
|
||||
}
|
||||
if ok {
|
||||
t.Fatalf("expected priority attribute to be absent, got %q", value)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileSynthesizer_Synthesize_OAuthExcludedModelsMerged(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
authData := map[string]any{
|
||||
"type": "claude",
|
||||
"excluded_models": []string{"custom-model", "MODEL-B"},
|
||||
}
|
||||
data, _ := json.Marshal(authData)
|
||||
errWriteFile := os.WriteFile(filepath.Join(tempDir, "auth.json"), data, 0644)
|
||||
if errWriteFile != nil {
|
||||
t.Fatalf("failed to write auth file: %v", errWriteFile)
|
||||
}
|
||||
|
||||
synth := NewFileSynthesizer()
|
||||
ctx := &SynthesisContext{
|
||||
Config: &config.Config{
|
||||
OAuthExcludedModels: map[string][]string{
|
||||
"claude": {"shared", "model-b"},
|
||||
},
|
||||
},
|
||||
AuthDir: tempDir,
|
||||
Now: time.Now(),
|
||||
IDGenerator: NewStableIDGenerator(),
|
||||
}
|
||||
|
||||
auths, errSynthesize := synth.Synthesize(ctx)
|
||||
if errSynthesize != nil {
|
||||
t.Fatalf("unexpected error: %v", errSynthesize)
|
||||
}
|
||||
if len(auths) != 1 {
|
||||
t.Fatalf("expected 1 auth, got %d", len(auths))
|
||||
}
|
||||
|
||||
got := auths[0].Attributes["excluded_models"]
|
||||
want := "custom-model,model-b,shared"
|
||||
if got != want {
|
||||
t.Fatalf("expected excluded_models %q, got %q", want, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSynthesizeGeminiVirtualAuths_NilInputs(t *testing.T) {
|
||||
now := time.Now()
|
||||
|
||||
@@ -533,6 +644,7 @@ func TestFileSynthesizer_Synthesize_MultiProjectGemini(t *testing.T) {
|
||||
"type": "gemini",
|
||||
"email": "multi@example.com",
|
||||
"project_id": "project-a, project-b, project-c",
|
||||
"priority": " 10 ",
|
||||
}
|
||||
data, _ := json.Marshal(authData)
|
||||
err := os.WriteFile(filepath.Join(tempDir, "gemini-multi.json"), data, 0644)
|
||||
@@ -565,6 +677,9 @@ func TestFileSynthesizer_Synthesize_MultiProjectGemini(t *testing.T) {
|
||||
if primary.Status != coreauth.StatusDisabled {
|
||||
t.Errorf("expected primary status disabled, got %s", primary.Status)
|
||||
}
|
||||
if gotPriority := primary.Attributes["priority"]; gotPriority != "10" {
|
||||
t.Errorf("expected primary priority 10, got %q", gotPriority)
|
||||
}
|
||||
|
||||
// Remaining auths should be virtuals
|
||||
for i := 1; i < 4; i++ {
|
||||
@@ -575,6 +690,9 @@ func TestFileSynthesizer_Synthesize_MultiProjectGemini(t *testing.T) {
|
||||
if v.Attributes["gemini_virtual_parent"] != primary.ID {
|
||||
t.Errorf("expected virtual %d parent to be %s, got %s", i, primary.ID, v.Attributes["gemini_virtual_parent"])
|
||||
}
|
||||
if gotPriority := v.Attributes["priority"]; gotPriority != "10" {
|
||||
t.Errorf("expected virtual %d priority 10, got %q", i, gotPriority)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -53,6 +53,8 @@ func (g *StableIDGenerator) Next(kind string, parts ...string) (string, string)
|
||||
|
||||
// ApplyAuthExcludedModelsMeta applies excluded models metadata to an auth entry.
|
||||
// It computes a hash of excluded models and sets the auth_kind attribute.
|
||||
// For OAuth entries, perKey (from the JSON file's excluded-models field) is merged
|
||||
// with the global oauth-excluded-models config for the provider.
|
||||
func ApplyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey []string, authKind string) {
|
||||
if auth == nil || cfg == nil {
|
||||
return
|
||||
@@ -72,9 +74,13 @@ func ApplyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey
|
||||
}
|
||||
if authKindKey == "apikey" {
|
||||
add(perKey)
|
||||
} else if cfg.OAuthExcludedModels != nil {
|
||||
providerKey := strings.ToLower(strings.TrimSpace(auth.Provider))
|
||||
add(cfg.OAuthExcludedModels[providerKey])
|
||||
} else {
|
||||
// For OAuth: merge per-account excluded models with global provider-level exclusions
|
||||
add(perKey)
|
||||
if cfg.OAuthExcludedModels != nil {
|
||||
providerKey := strings.ToLower(strings.TrimSpace(auth.Provider))
|
||||
add(cfg.OAuthExcludedModels[providerKey])
|
||||
}
|
||||
}
|
||||
combined := make([]string, 0, len(seen))
|
||||
for k := range seen {
|
||||
@@ -88,6 +94,10 @@ func ApplyAuthExcludedModelsMeta(auth *coreauth.Auth, cfg *config.Config, perKey
|
||||
if hash != "" {
|
||||
auth.Attributes["excluded_models_hash"] = hash
|
||||
}
|
||||
// Store the combined excluded models list so that routing can read it at runtime
|
||||
if len(combined) > 0 {
|
||||
auth.Attributes["excluded_models"] = strings.Join(combined, ",")
|
||||
}
|
||||
if authKind != "" {
|
||||
auth.Attributes["auth_kind"] = authKind
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff"
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
)
|
||||
|
||||
@@ -200,6 +201,30 @@ func TestApplyAuthExcludedModelsMeta(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestApplyAuthExcludedModelsMeta_OAuthMergeWritesCombinedModels(t *testing.T) {
|
||||
auth := &coreauth.Auth{
|
||||
Provider: "claude",
|
||||
Attributes: make(map[string]string),
|
||||
}
|
||||
cfg := &config.Config{
|
||||
OAuthExcludedModels: map[string][]string{
|
||||
"claude": {"global-a", "shared"},
|
||||
},
|
||||
}
|
||||
|
||||
ApplyAuthExcludedModelsMeta(auth, cfg, []string{"per", "SHARED"}, "oauth")
|
||||
|
||||
const wantCombined = "global-a,per,shared"
|
||||
if gotCombined := auth.Attributes["excluded_models"]; gotCombined != wantCombined {
|
||||
t.Fatalf("expected excluded_models=%q, got %q", wantCombined, gotCombined)
|
||||
}
|
||||
|
||||
expectedHash := diff.ComputeExcludedModelsHash([]string{"global-a", "per", "shared"})
|
||||
if gotHash := auth.Attributes["excluded_models_hash"]; gotHash != expectedHash {
|
||||
t.Fatalf("expected excluded_models_hash=%q, got %q", expectedHash, gotHash)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddConfigHeadersToAttrs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
|
||||
@@ -740,6 +740,13 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) {
|
||||
provider = "openai-compatibility"
|
||||
}
|
||||
excluded := s.oauthExcludedModels(provider, authKind)
|
||||
// The synthesizer pre-merges per-account and global exclusions into the "excluded_models" attribute.
|
||||
// If this attribute is present, it represents the complete list of exclusions and overrides the global config.
|
||||
if a.Attributes != nil {
|
||||
if val, ok := a.Attributes["excluded_models"]; ok && strings.TrimSpace(val) != "" {
|
||||
excluded = strings.Split(val, ",")
|
||||
}
|
||||
}
|
||||
var models []*ModelInfo
|
||||
switch provider {
|
||||
case "gemini":
|
||||
|
||||
65
sdk/cliproxy/service_excluded_models_test.go
Normal file
65
sdk/cliproxy/service_excluded_models_test.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package cliproxy
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/sdk/config"
|
||||
)
|
||||
|
||||
func TestRegisterModelsForAuth_UsesPreMergedExcludedModelsAttribute(t *testing.T) {
|
||||
service := &Service{
|
||||
cfg: &config.Config{
|
||||
OAuthExcludedModels: map[string][]string{
|
||||
"gemini-cli": {"gemini-2.5-pro"},
|
||||
},
|
||||
},
|
||||
}
|
||||
auth := &coreauth.Auth{
|
||||
ID: "auth-gemini-cli",
|
||||
Provider: "gemini-cli",
|
||||
Status: coreauth.StatusActive,
|
||||
Attributes: map[string]string{
|
||||
"auth_kind": "oauth",
|
||||
"excluded_models": "gemini-2.5-flash",
|
||||
},
|
||||
}
|
||||
|
||||
registry := GlobalModelRegistry()
|
||||
registry.UnregisterClient(auth.ID)
|
||||
t.Cleanup(func() {
|
||||
registry.UnregisterClient(auth.ID)
|
||||
})
|
||||
|
||||
service.registerModelsForAuth(auth)
|
||||
|
||||
models := registry.GetAvailableModelsByProvider("gemini-cli")
|
||||
if len(models) == 0 {
|
||||
t.Fatal("expected gemini-cli models to be registered")
|
||||
}
|
||||
|
||||
for _, model := range models {
|
||||
if model == nil {
|
||||
continue
|
||||
}
|
||||
modelID := strings.TrimSpace(model.ID)
|
||||
if strings.EqualFold(modelID, "gemini-2.5-flash") {
|
||||
t.Fatalf("expected model %q to be excluded by auth attribute", modelID)
|
||||
}
|
||||
}
|
||||
|
||||
seenGlobalExcluded := false
|
||||
for _, model := range models {
|
||||
if model == nil {
|
||||
continue
|
||||
}
|
||||
if strings.EqualFold(strings.TrimSpace(model.ID), "gemini-2.5-pro") {
|
||||
seenGlobalExcluded = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !seenGlobalExcluded {
|
||||
t.Fatal("expected global excluded model to be present when attribute override is set")
|
||||
}
|
||||
}
|
||||
@@ -2590,6 +2590,135 @@ func TestThinkingE2EMatrix_Body(t *testing.T) {
|
||||
runThinkingTests(t, cases)
|
||||
}
|
||||
|
||||
// TestThinkingE2EClaudeAdaptive_Body tests Claude thinking.type=adaptive extended body-only cases.
|
||||
// These cases validate that adaptive means "thinking enabled without explicit budget", and
|
||||
// cross-protocol conversion should resolve to target-model maximum thinking capability.
|
||||
func TestThinkingE2EClaudeAdaptive_Body(t *testing.T) {
|
||||
reg := registry.GetGlobalRegistry()
|
||||
uid := fmt.Sprintf("thinking-e2e-claude-adaptive-%d", time.Now().UnixNano())
|
||||
|
||||
reg.RegisterClient(uid, "test", getTestModels())
|
||||
defer reg.UnregisterClient(uid)
|
||||
|
||||
cases := []thinkingTestCase{
|
||||
// A1: Claude adaptive to OpenAI level model -> highest supported level
|
||||
{
|
||||
name: "A1",
|
||||
from: "claude",
|
||||
to: "openai",
|
||||
model: "level-model",
|
||||
inputJSON: `{"model":"level-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "reasoning_effort",
|
||||
expectValue: "high",
|
||||
expectErr: false,
|
||||
},
|
||||
// A2: Claude adaptive to Gemini level subset model -> highest supported level
|
||||
{
|
||||
name: "A2",
|
||||
from: "claude",
|
||||
to: "gemini",
|
||||
model: "level-subset-model",
|
||||
inputJSON: `{"model":"level-subset-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "generationConfig.thinkingConfig.thinkingLevel",
|
||||
expectValue: "high",
|
||||
includeThoughts: "true",
|
||||
expectErr: false,
|
||||
},
|
||||
// A3: Claude adaptive to Gemini budget model -> max budget
|
||||
{
|
||||
name: "A3",
|
||||
from: "claude",
|
||||
to: "gemini",
|
||||
model: "gemini-budget-model",
|
||||
inputJSON: `{"model":"gemini-budget-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "generationConfig.thinkingConfig.thinkingBudget",
|
||||
expectValue: "20000",
|
||||
includeThoughts: "true",
|
||||
expectErr: false,
|
||||
},
|
||||
// A4: Claude adaptive to Gemini mixed model -> highest supported level
|
||||
{
|
||||
name: "A4",
|
||||
from: "claude",
|
||||
to: "gemini",
|
||||
model: "gemini-mixed-model",
|
||||
inputJSON: `{"model":"gemini-mixed-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "generationConfig.thinkingConfig.thinkingLevel",
|
||||
expectValue: "high",
|
||||
includeThoughts: "true",
|
||||
expectErr: false,
|
||||
},
|
||||
// A5: Claude adaptive passthrough for same protocol
|
||||
{
|
||||
name: "A5",
|
||||
from: "claude",
|
||||
to: "claude",
|
||||
model: "claude-budget-model",
|
||||
inputJSON: `{"model":"claude-budget-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "thinking.type",
|
||||
expectValue: "adaptive",
|
||||
expectErr: false,
|
||||
},
|
||||
// A6: Claude adaptive to Antigravity budget model -> max budget
|
||||
{
|
||||
name: "A6",
|
||||
from: "claude",
|
||||
to: "antigravity",
|
||||
model: "antigravity-budget-model",
|
||||
inputJSON: `{"model":"antigravity-budget-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "request.generationConfig.thinkingConfig.thinkingBudget",
|
||||
expectValue: "20000",
|
||||
includeThoughts: "true",
|
||||
expectErr: false,
|
||||
},
|
||||
// A7: Claude adaptive to iFlow GLM -> enabled boolean
|
||||
{
|
||||
name: "A7",
|
||||
from: "claude",
|
||||
to: "iflow",
|
||||
model: "glm-test",
|
||||
inputJSON: `{"model":"glm-test","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "chat_template_kwargs.enable_thinking",
|
||||
expectValue: "true",
|
||||
expectErr: false,
|
||||
},
|
||||
// A8: Claude adaptive to iFlow MiniMax -> enabled boolean
|
||||
{
|
||||
name: "A8",
|
||||
from: "claude",
|
||||
to: "iflow",
|
||||
model: "minimax-test",
|
||||
inputJSON: `{"model":"minimax-test","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "reasoning_split",
|
||||
expectValue: "true",
|
||||
expectErr: false,
|
||||
},
|
||||
// A9: Claude adaptive to Codex level model -> highest supported level
|
||||
{
|
||||
name: "A9",
|
||||
from: "claude",
|
||||
to: "codex",
|
||||
model: "level-model",
|
||||
inputJSON: `{"model":"level-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "reasoning.effort",
|
||||
expectValue: "high",
|
||||
expectErr: false,
|
||||
},
|
||||
// A10: Claude adaptive on non-thinking model should still be stripped
|
||||
{
|
||||
name: "A10",
|
||||
from: "claude",
|
||||
to: "openai",
|
||||
model: "no-thinking-model",
|
||||
inputJSON: `{"model":"no-thinking-model","messages":[{"role":"user","content":"hi"}],"thinking":{"type":"adaptive"}}`,
|
||||
expectField: "",
|
||||
expectErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
runThinkingTests(t, cases)
|
||||
}
|
||||
|
||||
// getTestModels returns the shared model definitions for E2E tests.
|
||||
func getTestModels() []*registry.ModelInfo {
|
||||
return []*registry.ModelInfo{
|
||||
|
||||
Reference in New Issue
Block a user