mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
Refactor codebase
This commit is contained in:
19
internal/translator/openai/claude/init.go
Normal file
19
internal/translator/openai/claude/init.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package claude
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/internal/constant"
|
||||
"github.com/luispater/CLIProxyAPI/internal/interfaces"
|
||||
"github.com/luispater/CLIProxyAPI/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
translator.Register(
|
||||
CLAUDE,
|
||||
OPENAI,
|
||||
ConvertClaudeRequestToOpenAI,
|
||||
interfaces.TranslateResponse{
|
||||
Stream: ConvertOpenAIResponseToClaude,
|
||||
NonStream: ConvertOpenAIResponseToClaudeNonStream,
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -13,20 +13,17 @@ import (
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
// ConvertAnthropicRequestToOpenAI parses and transforms an Anthropic API request into OpenAI Chat Completions API format.
|
||||
// ConvertClaudeRequestToOpenAI parses and transforms an Anthropic API request into OpenAI Chat Completions API format.
|
||||
// It extracts the model name, system instruction, message contents, and tool declarations
|
||||
// from the raw JSON request and returns them in the format expected by the OpenAI API.
|
||||
func ConvertAnthropicRequestToOpenAI(rawJSON []byte) string {
|
||||
func ConvertClaudeRequestToOpenAI(modelName string, rawJSON []byte, stream bool) []byte {
|
||||
// Base OpenAI Chat Completions API template
|
||||
out := `{"model":"","messages":[]}`
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Model mapping
|
||||
if model := root.Get("model"); model.Exists() {
|
||||
modelStr := model.String()
|
||||
out, _ = sjson.Set(out, "model", modelStr)
|
||||
}
|
||||
out, _ = sjson.Set(out, "model", modelName)
|
||||
|
||||
// Max tokens
|
||||
if maxTokens := root.Get("max_tokens"); maxTokens.Exists() {
|
||||
@@ -62,21 +59,30 @@ func ConvertAnthropicRequestToOpenAI(rawJSON []byte) string {
|
||||
}
|
||||
|
||||
// Stream
|
||||
if stream := root.Get("stream"); stream.Exists() {
|
||||
out, _ = sjson.Set(out, "stream", stream.Bool())
|
||||
}
|
||||
out, _ = sjson.Set(out, "stream", stream)
|
||||
|
||||
// Process messages and system
|
||||
var openAIMessages []interface{}
|
||||
var messagesJSON = "[]"
|
||||
|
||||
// Handle system message first
|
||||
if system := root.Get("system"); system.Exists() && system.String() != "" {
|
||||
systemMsg := map[string]interface{}{
|
||||
"role": "system",
|
||||
"content": system.String(),
|
||||
systemMsgJSON := `{"role":"system","content":[{"type":"text","text":"Use ANY tool, the parameters MUST accord with RFC 8259 (The JavaScript Object Notation (JSON) Data Interchange Format), the keys and value MUST be enclosed in double quotes."}]}`
|
||||
if system := root.Get("system"); system.Exists() {
|
||||
if system.Type == gjson.String {
|
||||
if system.String() != "" {
|
||||
oldSystem := `{"type":"text","text":""}`
|
||||
oldSystem, _ = sjson.Set(oldSystem, "text", system.String())
|
||||
systemMsgJSON, _ = sjson.SetRaw(systemMsgJSON, "content.-1", oldSystem)
|
||||
}
|
||||
} else if system.Type == gjson.JSON {
|
||||
if system.IsArray() {
|
||||
systemResults := system.Array()
|
||||
for i := 0; i < len(systemResults); i++ {
|
||||
systemMsgJSON, _ = sjson.SetRaw(systemMsgJSON, "content.-1", systemResults[i].Raw)
|
||||
}
|
||||
}
|
||||
}
|
||||
openAIMessages = append(openAIMessages, systemMsg)
|
||||
}
|
||||
messagesJSON, _ = sjson.SetRaw(messagesJSON, "-1", systemMsgJSON)
|
||||
|
||||
// Process Anthropic messages
|
||||
if messages := root.Get("messages"); messages.Exists() && messages.IsArray() {
|
||||
@@ -84,15 +90,10 @@ func ConvertAnthropicRequestToOpenAI(rawJSON []byte) string {
|
||||
role := message.Get("role").String()
|
||||
contentResult := message.Get("content")
|
||||
|
||||
msg := map[string]interface{}{
|
||||
"role": role,
|
||||
}
|
||||
|
||||
// Handle content
|
||||
if contentResult.Exists() && contentResult.IsArray() {
|
||||
var textParts []string
|
||||
var toolCalls []interface{}
|
||||
var toolResults []interface{}
|
||||
|
||||
contentResult.ForEach(func(_, part gjson.Result) bool {
|
||||
partType := part.Get("type").String()
|
||||
@@ -118,68 +119,62 @@ func ConvertAnthropicRequestToOpenAI(rawJSON []byte) string {
|
||||
|
||||
case "tool_use":
|
||||
// Convert to OpenAI tool call format
|
||||
toolCall := map[string]interface{}{
|
||||
"id": part.Get("id").String(),
|
||||
"type": "function",
|
||||
"function": map[string]interface{}{
|
||||
"name": part.Get("name").String(),
|
||||
},
|
||||
}
|
||||
toolCallJSON := `{"id":"","type":"function","function":{"name":"","arguments":""}}`
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "id", part.Get("id").String())
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.name", part.Get("name").String())
|
||||
|
||||
// Convert input to arguments JSON string
|
||||
if input := part.Get("input"); input.Exists() {
|
||||
if inputJSON, err := json.Marshal(input.Value()); err == nil {
|
||||
if function, ok := toolCall["function"].(map[string]interface{}); ok {
|
||||
function["arguments"] = string(inputJSON)
|
||||
}
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.arguments", string(inputJSON))
|
||||
} else {
|
||||
if function, ok := toolCall["function"].(map[string]interface{}); ok {
|
||||
function["arguments"] = "{}"
|
||||
}
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.arguments", "{}")
|
||||
}
|
||||
} else {
|
||||
if function, ok := toolCall["function"].(map[string]interface{}); ok {
|
||||
function["arguments"] = "{}"
|
||||
}
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.arguments", "{}")
|
||||
}
|
||||
|
||||
toolCalls = append(toolCalls, toolCall)
|
||||
toolCalls = append(toolCalls, gjson.Parse(toolCallJSON).Value())
|
||||
|
||||
case "tool_result":
|
||||
// Convert to OpenAI tool message format
|
||||
toolResult := map[string]interface{}{
|
||||
"role": "tool",
|
||||
"tool_call_id": part.Get("tool_use_id").String(),
|
||||
"content": part.Get("content").String(),
|
||||
}
|
||||
toolResults = append(toolResults, toolResult)
|
||||
// Convert to OpenAI tool message format and add immediately to preserve order
|
||||
toolResultJSON := `{"role":"tool","tool_call_id":"","content":""}`
|
||||
toolResultJSON, _ = sjson.Set(toolResultJSON, "tool_call_id", part.Get("tool_use_id").String())
|
||||
toolResultJSON, _ = sjson.Set(toolResultJSON, "content", part.Get("content").String())
|
||||
messagesJSON, _ = sjson.Set(messagesJSON, "-1", gjson.Parse(toolResultJSON).Value())
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
// Set content
|
||||
if len(textParts) > 0 {
|
||||
msg["content"] = strings.Join(textParts, "")
|
||||
} else {
|
||||
msg["content"] = ""
|
||||
}
|
||||
// Create main message if there's text content or tool calls
|
||||
if len(textParts) > 0 || len(toolCalls) > 0 {
|
||||
msgJSON := `{"role":"","content":""}`
|
||||
msgJSON, _ = sjson.Set(msgJSON, "role", role)
|
||||
|
||||
// Set tool calls for assistant messages
|
||||
if role == "assistant" && len(toolCalls) > 0 {
|
||||
msg["tool_calls"] = toolCalls
|
||||
}
|
||||
// Set content
|
||||
if len(textParts) > 0 {
|
||||
msgJSON, _ = sjson.Set(msgJSON, "content", strings.Join(textParts, ""))
|
||||
} else {
|
||||
msgJSON, _ = sjson.Set(msgJSON, "content", "")
|
||||
}
|
||||
|
||||
openAIMessages = append(openAIMessages, msg)
|
||||
// Set tool calls for assistant messages
|
||||
if role == "assistant" && len(toolCalls) > 0 {
|
||||
toolCallsJSON, _ := json.Marshal(toolCalls)
|
||||
msgJSON, _ = sjson.SetRaw(msgJSON, "tool_calls", string(toolCallsJSON))
|
||||
}
|
||||
|
||||
// Add tool result messages separately
|
||||
for _, toolResult := range toolResults {
|
||||
openAIMessages = append(openAIMessages, toolResult)
|
||||
if gjson.Get(msgJSON, "content").String() != "" || len(toolCalls) != 0 {
|
||||
messagesJSON, _ = sjson.Set(messagesJSON, "-1", gjson.Parse(msgJSON).Value())
|
||||
}
|
||||
}
|
||||
|
||||
} else if contentResult.Exists() && contentResult.Type == gjson.String {
|
||||
// Simple string content
|
||||
msg["content"] = contentResult.String()
|
||||
openAIMessages = append(openAIMessages, msg)
|
||||
msgJSON := `{"role":"","content":""}`
|
||||
msgJSON, _ = sjson.Set(msgJSON, "role", role)
|
||||
msgJSON, _ = sjson.Set(msgJSON, "content", contentResult.String())
|
||||
messagesJSON, _ = sjson.Set(messagesJSON, "-1", gjson.Parse(msgJSON).Value())
|
||||
}
|
||||
|
||||
return true
|
||||
@@ -187,38 +182,30 @@ func ConvertAnthropicRequestToOpenAI(rawJSON []byte) string {
|
||||
}
|
||||
|
||||
// Set messages
|
||||
if len(openAIMessages) > 0 {
|
||||
messagesJSON, _ := json.Marshal(openAIMessages)
|
||||
out, _ = sjson.SetRaw(out, "messages", string(messagesJSON))
|
||||
if gjson.Parse(messagesJSON).IsArray() && len(gjson.Parse(messagesJSON).Array()) > 0 {
|
||||
out, _ = sjson.SetRaw(out, "messages", messagesJSON)
|
||||
}
|
||||
|
||||
// Process tools - convert Anthropic tools to OpenAI functions
|
||||
if tools := root.Get("tools"); tools.Exists() && tools.IsArray() {
|
||||
var openAITools []interface{}
|
||||
var toolsJSON = "[]"
|
||||
|
||||
tools.ForEach(func(_, tool gjson.Result) bool {
|
||||
openAITool := map[string]interface{}{
|
||||
"type": "function",
|
||||
"function": map[string]interface{}{
|
||||
"name": tool.Get("name").String(),
|
||||
"description": tool.Get("description").String(),
|
||||
},
|
||||
}
|
||||
openAIToolJSON := `{"type":"function","function":{"name":"","description":""}}`
|
||||
openAIToolJSON, _ = sjson.Set(openAIToolJSON, "function.name", tool.Get("name").String())
|
||||
openAIToolJSON, _ = sjson.Set(openAIToolJSON, "function.description", tool.Get("description").String())
|
||||
|
||||
// Convert Anthropic input_schema to OpenAI function parameters
|
||||
if inputSchema := tool.Get("input_schema"); inputSchema.Exists() {
|
||||
if function, ok := openAITool["function"].(map[string]interface{}); ok {
|
||||
function["parameters"] = inputSchema.Value()
|
||||
}
|
||||
openAIToolJSON, _ = sjson.Set(openAIToolJSON, "function.parameters", inputSchema.Value())
|
||||
}
|
||||
|
||||
openAITools = append(openAITools, openAITool)
|
||||
toolsJSON, _ = sjson.Set(toolsJSON, "-1", gjson.Parse(openAIToolJSON).Value())
|
||||
return true
|
||||
})
|
||||
|
||||
if len(openAITools) > 0 {
|
||||
toolsJSON, _ := json.Marshal(openAITools)
|
||||
out, _ = sjson.SetRaw(out, "tools", string(toolsJSON))
|
||||
if gjson.Parse(toolsJSON).IsArray() && len(gjson.Parse(toolsJSON).Array()) > 0 {
|
||||
out, _ = sjson.SetRaw(out, "tools", toolsJSON)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -232,12 +219,9 @@ func ConvertAnthropicRequestToOpenAI(rawJSON []byte) string {
|
||||
case "tool":
|
||||
// Specific tool choice
|
||||
toolName := toolChoice.Get("name").String()
|
||||
out, _ = sjson.Set(out, "tool_choice", map[string]interface{}{
|
||||
"type": "function",
|
||||
"function": map[string]interface{}{
|
||||
"name": toolName,
|
||||
},
|
||||
})
|
||||
toolChoiceJSON := `{"type":"function","function":{"name":""}}`
|
||||
toolChoiceJSON, _ = sjson.Set(toolChoiceJSON, "function.name", toolName)
|
||||
out, _ = sjson.SetRaw(out, "tool_choice", toolChoiceJSON)
|
||||
default:
|
||||
// Default to auto if not specified
|
||||
out, _ = sjson.Set(out, "tool_choice", "auto")
|
||||
@@ -249,5 +233,5 @@ func ConvertAnthropicRequestToOpenAI(rawJSON []byte) string {
|
||||
out, _ = sjson.Set(out, "user", user.String())
|
||||
}
|
||||
|
||||
return out
|
||||
return []byte(out)
|
||||
}
|
||||
|
||||
@@ -6,9 +6,11 @@
|
||||
package claude
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/luispater/CLIProxyAPI/internal/util"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
@@ -38,14 +40,37 @@ type ToolCallAccumulator struct {
|
||||
Arguments strings.Builder
|
||||
}
|
||||
|
||||
// ConvertOpenAIResponseToAnthropic converts OpenAI streaming response format to Anthropic API format.
|
||||
// ConvertOpenAIResponseToClaude converts OpenAI streaming response format to Anthropic API format.
|
||||
// This function processes OpenAI streaming chunks and transforms them into Anthropic-compatible JSON responses.
|
||||
// It handles text content, tool calls, and usage metadata, outputting responses that match the Anthropic API format.
|
||||
func ConvertOpenAIResponseToAnthropic(rawJSON []byte, param *ConvertOpenAIResponseToAnthropicParams) []string {
|
||||
//
|
||||
// Parameters:
|
||||
// - ctx: The context for the request.
|
||||
// - modelName: The name of the model.
|
||||
// - rawJSON: The raw JSON response from the OpenAI API.
|
||||
// - param: A pointer to a parameter object for the conversion.
|
||||
//
|
||||
// Returns:
|
||||
// - []string: A slice of strings, each containing an Anthropic-compatible JSON response.
|
||||
func ConvertOpenAIResponseToClaude(_ context.Context, _ string, rawJSON []byte, param *any) []string {
|
||||
if *param == nil {
|
||||
*param = &ConvertOpenAIResponseToAnthropicParams{
|
||||
MessageID: "",
|
||||
Model: "",
|
||||
CreatedAt: 0,
|
||||
ContentAccumulator: strings.Builder{},
|
||||
ToolCallsAccumulator: nil,
|
||||
TextContentBlockStarted: false,
|
||||
FinishReason: "",
|
||||
ContentBlocksStopped: false,
|
||||
MessageDeltaSent: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this is the [DONE] marker
|
||||
rawStr := strings.TrimSpace(string(rawJSON))
|
||||
if rawStr == "[DONE]" {
|
||||
return convertOpenAIDoneToAnthropic(param)
|
||||
return convertOpenAIDoneToAnthropic((*param).(*ConvertOpenAIResponseToAnthropicParams))
|
||||
}
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
@@ -55,7 +80,7 @@ func ConvertOpenAIResponseToAnthropic(rawJSON []byte, param *ConvertOpenAIRespon
|
||||
|
||||
if objectType == "chat.completion.chunk" {
|
||||
// Handle streaming response
|
||||
return convertOpenAIStreamingChunkToAnthropic(rawJSON, param)
|
||||
return convertOpenAIStreamingChunkToAnthropic(rawJSON, (*param).(*ConvertOpenAIResponseToAnthropicParams))
|
||||
} else if objectType == "chat.completion" {
|
||||
// Handle non-streaming response
|
||||
return convertOpenAINonStreamingToAnthropic(rawJSON)
|
||||
@@ -164,6 +189,16 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
if name := function.Get("name"); name.Exists() {
|
||||
accumulator.Name = name.String()
|
||||
|
||||
if param.TextContentBlockStarted {
|
||||
param.TextContentBlockStarted = false
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": index,
|
||||
}
|
||||
contentBlockStopJSON, _ := json.Marshal(contentBlockStop)
|
||||
results = append(results, "event: content_block_stop\ndata: "+string(contentBlockStopJSON)+"\n\n")
|
||||
}
|
||||
|
||||
// Send content_block_start for tool_use
|
||||
contentBlockStart := map[string]interface{}{
|
||||
"type": "content_block_start",
|
||||
@@ -182,19 +217,9 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
// Handle function arguments
|
||||
if args := function.Get("arguments"); args.Exists() {
|
||||
argsText := args.String()
|
||||
accumulator.Arguments.WriteString(argsText)
|
||||
|
||||
// Send input_json_delta
|
||||
inputDelta := map[string]interface{}{
|
||||
"type": "content_block_delta",
|
||||
"index": index + 1,
|
||||
"delta": map[string]interface{}{
|
||||
"type": "input_json_delta",
|
||||
"partial_json": argsText,
|
||||
},
|
||||
if argsText != "" {
|
||||
accumulator.Arguments.WriteString(argsText)
|
||||
}
|
||||
inputDeltaJSON, _ := json.Marshal(inputDelta)
|
||||
results = append(results, "event: content_block_delta\ndata: "+string(inputDeltaJSON)+"\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -221,6 +246,22 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
// Send content_block_stop for any tool calls
|
||||
if !param.ContentBlocksStopped {
|
||||
for index := range param.ToolCallsAccumulator {
|
||||
accumulator := param.ToolCallsAccumulator[index]
|
||||
|
||||
// Send complete input_json_delta with all accumulated arguments
|
||||
if accumulator.Arguments.Len() > 0 {
|
||||
inputDelta := map[string]interface{}{
|
||||
"type": "content_block_delta",
|
||||
"index": index + 1,
|
||||
"delta": map[string]interface{}{
|
||||
"type": "input_json_delta",
|
||||
"partial_json": util.FixJSON(accumulator.Arguments.String()),
|
||||
},
|
||||
}
|
||||
inputDeltaJSON, _ := json.Marshal(inputDelta)
|
||||
results = append(results, "event: content_block_delta\ndata: "+string(inputDeltaJSON)+"\n\n")
|
||||
}
|
||||
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": index + 1,
|
||||
@@ -334,6 +375,7 @@ func convertOpenAINonStreamingToAnthropic(rawJSON []byte) []string {
|
||||
|
||||
// Parse arguments
|
||||
argsStr := toolCall.Get("function.arguments").String()
|
||||
argsStr = util.FixJSON(argsStr)
|
||||
if argsStr != "" {
|
||||
var args interface{}
|
||||
if err := json.Unmarshal([]byte(argsStr), &args); err == nil {
|
||||
@@ -387,3 +429,17 @@ func mapOpenAIFinishReasonToAnthropic(openAIReason string) string {
|
||||
return "end_turn"
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertOpenAIResponseToClaudeNonStream converts a non-streaming OpenAI response to a non-streaming Anthropic response.
|
||||
//
|
||||
// Parameters:
|
||||
// - ctx: The context for the request.
|
||||
// - modelName: The name of the model.
|
||||
// - rawJSON: The raw JSON response from the OpenAI API.
|
||||
// - param: A pointer to a parameter object for the conversion.
|
||||
//
|
||||
// Returns:
|
||||
// - string: An Anthropic-compatible JSON response.
|
||||
func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, _ []byte, _ *any) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
19
internal/translator/openai/gemini-cli/init.go
Normal file
19
internal/translator/openai/gemini-cli/init.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package geminiCLI
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/internal/constant"
|
||||
"github.com/luispater/CLIProxyAPI/internal/interfaces"
|
||||
"github.com/luispater/CLIProxyAPI/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
translator.Register(
|
||||
GEMINICLI,
|
||||
OPENAI,
|
||||
ConvertGeminiCLIRequestToOpenAI,
|
||||
interfaces.TranslateResponse{
|
||||
Stream: ConvertOpenAIResponseToGeminiCLI,
|
||||
NonStream: ConvertOpenAIResponseToGeminiCLINonStream,
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
// Package geminiCLI provides request translation functionality for Gemini to OpenAI API.
|
||||
// It handles parsing and transforming Gemini API requests into OpenAI Chat Completions API format,
|
||||
// extracting model information, generation config, message contents, and tool declarations.
|
||||
// The package performs JSON data transformation to ensure compatibility
|
||||
// between Gemini API format and OpenAI API's expected format.
|
||||
package geminiCLI
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/internal/translator/openai/gemini"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
// ConvertGeminiCLIRequestToOpenAI parses and transforms a Gemini API request into OpenAI Chat Completions API format.
|
||||
// It extracts the model name, generation config, message contents, and tool declarations
|
||||
// from the raw JSON request and returns them in the format expected by the OpenAI API.
|
||||
func ConvertGeminiCLIRequestToOpenAI(modelName string, rawJSON []byte, stream bool) []byte {
|
||||
rawJSON = []byte(gjson.GetBytes(rawJSON, "request").Raw)
|
||||
rawJSON, _ = sjson.SetBytes(rawJSON, "model", modelName)
|
||||
if gjson.GetBytes(rawJSON, "systemInstruction").Exists() {
|
||||
rawJSON, _ = sjson.SetRawBytes(rawJSON, "system_instruction", []byte(gjson.GetBytes(rawJSON, "systemInstruction").Raw))
|
||||
rawJSON, _ = sjson.DeleteBytes(rawJSON, "systemInstruction")
|
||||
}
|
||||
|
||||
return ConvertGeminiRequestToOpenAI(modelName, rawJSON, stream)
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
// Package geminiCLI provides response translation functionality for OpenAI to Gemini API.
|
||||
// This package handles the conversion of OpenAI Chat Completions API responses into Gemini API-compatible
|
||||
// JSON format, transforming streaming events and non-streaming responses into the format
|
||||
// expected by Gemini API clients. It supports both streaming and non-streaming modes,
|
||||
// handling text content, tool calls, and usage metadata appropriately.
|
||||
package geminiCLI
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
. "github.com/luispater/CLIProxyAPI/internal/translator/openai/gemini"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
// ConvertOpenAIResponseToGeminiCLI converts OpenAI Chat Completions streaming response format to Gemini API format.
|
||||
// This function processes OpenAI streaming chunks and transforms them into Gemini-compatible JSON responses.
|
||||
// It handles text content, tool calls, and usage metadata, outputting responses that match the Gemini API format.
|
||||
//
|
||||
// Parameters:
|
||||
// - ctx: The context for the request.
|
||||
// - modelName: The name of the model.
|
||||
// - rawJSON: The raw JSON response from the OpenAI API.
|
||||
// - param: A pointer to a parameter object for the conversion.
|
||||
//
|
||||
// Returns:
|
||||
// - []string: A slice of strings, each containing a Gemini-compatible JSON response.
|
||||
func ConvertOpenAIResponseToGeminiCLI(ctx context.Context, modelName string, rawJSON []byte, param *any) []string {
|
||||
outputs := ConvertOpenAIResponseToGemini(ctx, modelName, rawJSON, param)
|
||||
newOutputs := make([]string, 0)
|
||||
for i := 0; i < len(outputs); i++ {
|
||||
json := `{"response": {}}`
|
||||
output, _ := sjson.SetRaw(json, "response", outputs[i])
|
||||
newOutputs = append(newOutputs, output)
|
||||
}
|
||||
return newOutputs
|
||||
}
|
||||
|
||||
// ConvertOpenAIResponseToGeminiCLINonStream converts a non-streaming OpenAI response to a non-streaming Gemini CLI response.
|
||||
//
|
||||
// Parameters:
|
||||
// - ctx: The context for the request.
|
||||
// - modelName: The name of the model.
|
||||
// - rawJSON: The raw JSON response from the OpenAI API.
|
||||
// - param: A pointer to a parameter object for the conversion.
|
||||
//
|
||||
// Returns:
|
||||
// - string: A Gemini-compatible JSON response.
|
||||
func ConvertOpenAIResponseToGeminiCLINonStream(ctx context.Context, modelName string, rawJSON []byte, param *any) string {
|
||||
strJSON := ConvertOpenAIResponseToGeminiNonStream(ctx, modelName, rawJSON, param)
|
||||
json := `{"response": {}}`
|
||||
strJSON, _ = sjson.SetRaw(json, "response", strJSON)
|
||||
return strJSON
|
||||
}
|
||||
19
internal/translator/openai/gemini/init.go
Normal file
19
internal/translator/openai/gemini/init.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/internal/constant"
|
||||
"github.com/luispater/CLIProxyAPI/internal/interfaces"
|
||||
"github.com/luispater/CLIProxyAPI/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
translator.Register(
|
||||
GEMINI,
|
||||
OPENAI,
|
||||
ConvertGeminiRequestToOpenAI,
|
||||
interfaces.TranslateResponse{
|
||||
Stream: ConvertOpenAIResponseToGemini,
|
||||
NonStream: ConvertOpenAIResponseToGeminiNonStream,
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -18,7 +18,7 @@ import (
|
||||
// ConvertGeminiRequestToOpenAI parses and transforms a Gemini API request into OpenAI Chat Completions API format.
|
||||
// It extracts the model name, generation config, message contents, and tool declarations
|
||||
// from the raw JSON request and returns them in the format expected by the OpenAI API.
|
||||
func ConvertGeminiRequestToOpenAI(rawJSON []byte) string {
|
||||
func ConvertGeminiRequestToOpenAI(modelName string, rawJSON []byte, stream bool) []byte {
|
||||
// Base OpenAI Chat Completions API template
|
||||
out := `{"model":"","messages":[]}`
|
||||
|
||||
@@ -37,10 +37,7 @@ func ConvertGeminiRequestToOpenAI(rawJSON []byte) string {
|
||||
}
|
||||
|
||||
// Model mapping
|
||||
if model := root.Get("model"); model.Exists() {
|
||||
modelStr := model.String()
|
||||
out, _ = sjson.Set(out, "model", modelStr)
|
||||
}
|
||||
out, _ = sjson.Set(out, "model", modelName)
|
||||
|
||||
// Generation config mapping
|
||||
if genConfig := root.Get("generationConfig"); genConfig.Exists() {
|
||||
@@ -79,9 +76,7 @@ func ConvertGeminiRequestToOpenAI(rawJSON []byte) string {
|
||||
}
|
||||
|
||||
// Stream parameter
|
||||
if stream := root.Get("stream"); stream.Exists() {
|
||||
out, _ = sjson.Set(out, "stream", stream.Bool())
|
||||
}
|
||||
out, _ = sjson.Set(out, "stream", stream)
|
||||
|
||||
// Process contents (Gemini messages) -> OpenAI messages
|
||||
var openAIMessages []interface{}
|
||||
@@ -355,5 +350,5 @@ func ConvertGeminiRequestToOpenAI(rawJSON []byte) string {
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
return []byte(out)
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
@@ -33,7 +34,24 @@ type ToolCallAccumulator struct {
|
||||
// ConvertOpenAIResponseToGemini converts OpenAI Chat Completions streaming response format to Gemini API format.
|
||||
// This function processes OpenAI streaming chunks and transforms them into Gemini-compatible JSON responses.
|
||||
// It handles text content, tool calls, and usage metadata, outputting responses that match the Gemini API format.
|
||||
func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseToGeminiParams) []string {
|
||||
//
|
||||
// Parameters:
|
||||
// - ctx: The context for the request.
|
||||
// - modelName: The name of the model.
|
||||
// - rawJSON: The raw JSON response from the OpenAI API.
|
||||
// - param: A pointer to a parameter object for the conversion.
|
||||
//
|
||||
// Returns:
|
||||
// - []string: A slice of strings, each containing a Gemini-compatible JSON response.
|
||||
func ConvertOpenAIResponseToGemini(_ context.Context, _ string, rawJSON []byte, param *any) []string {
|
||||
if *param == nil {
|
||||
*param = &ConvertOpenAIResponseToGeminiParams{
|
||||
ToolCallsAccumulator: nil,
|
||||
ContentAccumulator: strings.Builder{},
|
||||
IsFirstChunk: false,
|
||||
}
|
||||
}
|
||||
|
||||
// Handle [DONE] marker
|
||||
if strings.TrimSpace(string(rawJSON)) == "[DONE]" {
|
||||
return []string{}
|
||||
@@ -42,8 +60,8 @@ func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseT
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Initialize accumulators if needed
|
||||
if param.ToolCallsAccumulator == nil {
|
||||
param.ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
|
||||
if (*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator == nil {
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
|
||||
}
|
||||
|
||||
// Process choices
|
||||
@@ -85,12 +103,12 @@ func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseT
|
||||
delta := choice.Get("delta")
|
||||
|
||||
// Handle role (only in first chunk)
|
||||
if role := delta.Get("role"); role.Exists() && param.IsFirstChunk {
|
||||
if role := delta.Get("role"); role.Exists() && (*param).(*ConvertOpenAIResponseToGeminiParams).IsFirstChunk {
|
||||
// OpenAI assistant -> Gemini model
|
||||
if role.String() == "assistant" {
|
||||
template, _ = sjson.Set(template, "candidates.0.content.role", "model")
|
||||
}
|
||||
param.IsFirstChunk = false
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).IsFirstChunk = false
|
||||
results = append(results, template)
|
||||
return true
|
||||
}
|
||||
@@ -98,7 +116,7 @@ func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseT
|
||||
// Handle content delta
|
||||
if content := delta.Get("content"); content.Exists() && content.String() != "" {
|
||||
contentText := content.String()
|
||||
param.ContentAccumulator.WriteString(contentText)
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ContentAccumulator.WriteString(contentText)
|
||||
|
||||
// Create text part for this delta
|
||||
parts := []interface{}{
|
||||
@@ -124,8 +142,8 @@ func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseT
|
||||
functionArgs := function.Get("arguments").String()
|
||||
|
||||
// Initialize accumulator if needed
|
||||
if _, exists := param.ToolCallsAccumulator[toolIndex]; !exists {
|
||||
param.ToolCallsAccumulator[toolIndex] = &ToolCallAccumulator{
|
||||
if _, exists := (*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator[toolIndex]; !exists {
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator[toolIndex] = &ToolCallAccumulator{
|
||||
ID: toolID,
|
||||
Name: functionName,
|
||||
}
|
||||
@@ -133,17 +151,17 @@ func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseT
|
||||
|
||||
// Update ID if provided
|
||||
if toolID != "" {
|
||||
param.ToolCallsAccumulator[toolIndex].ID = toolID
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator[toolIndex].ID = toolID
|
||||
}
|
||||
|
||||
// Update name if provided
|
||||
if functionName != "" {
|
||||
param.ToolCallsAccumulator[toolIndex].Name = functionName
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator[toolIndex].Name = functionName
|
||||
}
|
||||
|
||||
// Accumulate arguments
|
||||
if functionArgs != "" {
|
||||
param.ToolCallsAccumulator[toolIndex].Arguments.WriteString(functionArgs)
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator[toolIndex].Arguments.WriteString(functionArgs)
|
||||
}
|
||||
}
|
||||
return true
|
||||
@@ -159,9 +177,9 @@ func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseT
|
||||
template, _ = sjson.Set(template, "candidates.0.finishReason", geminiFinishReason)
|
||||
|
||||
// If we have accumulated tool calls, output them now
|
||||
if len(param.ToolCallsAccumulator) > 0 {
|
||||
if len((*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator) > 0 {
|
||||
var parts []interface{}
|
||||
for _, accumulator := range param.ToolCallsAccumulator {
|
||||
for _, accumulator := range (*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator {
|
||||
argsStr := accumulator.Arguments.String()
|
||||
var argsMap map[string]interface{}
|
||||
|
||||
@@ -201,7 +219,7 @@ func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseT
|
||||
}
|
||||
|
||||
// Clear accumulators
|
||||
param.ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
|
||||
}
|
||||
|
||||
results = append(results, template)
|
||||
@@ -243,8 +261,17 @@ func mapOpenAIFinishReasonToGemini(openAIReason string) string {
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertOpenAINonStreamResponseToGemini converts OpenAI non-streaming response to Gemini format
|
||||
func ConvertOpenAINonStreamResponseToGemini(rawJSON []byte) string {
|
||||
// ConvertOpenAIResponseToGeminiNonStream converts a non-streaming OpenAI response to a non-streaming Gemini response.
|
||||
//
|
||||
// Parameters:
|
||||
// - ctx: The context for the request.
|
||||
// - modelName: The name of the model.
|
||||
// - rawJSON: The raw JSON response from the OpenAI API.
|
||||
// - param: A pointer to a parameter object for the conversion.
|
||||
//
|
||||
// Returns:
|
||||
// - string: A Gemini-compatible JSON response.
|
||||
func ConvertOpenAIResponseToGeminiNonStream(_ context.Context, _ string, rawJSON []byte, _ *any) string {
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Base Gemini response template
|
||||
|
||||
Reference in New Issue
Block a user