Refactor codebase

This commit is contained in:
Luis Pater
2025-08-22 01:31:12 +08:00
parent 2b1762be16
commit 8c555c4e69
109 changed files with 7319 additions and 5735 deletions

View File

@@ -1,242 +1,211 @@
// Package openai provides request translation functionality for OpenAI API.
// It handles the conversion of OpenAI-compatible request formats to the internal
// format expected by the backend client, including parsing messages, roles,
// content types (text, image, file), and tool calls.
// Package openai provides request translation functionality for OpenAI to Gemini CLI API compatibility.
// It converts OpenAI Chat Completions requests into Gemini CLI compatible JSON using gjson/sjson only.
package openai
import (
"encoding/json"
"fmt"
"strings"
"github.com/luispater/CLIProxyAPI/internal/client"
"github.com/luispater/CLIProxyAPI/internal/misc"
log "github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
// ConvertOpenAIChatRequestToCli translates a raw JSON request from an OpenAI-compatible format
// to the internal format expected by the backend client. It parses messages,
// roles, content types (text, image, file), and tool calls.
//
// This function handles the complex task of converting between the OpenAI message
// format and the internal format used by the Gemini client. It processes different
// message types (system, user, assistant, tool) and content types (text, images, files).
// ConvertOpenAIRequestToGeminiCLI converts an OpenAI Chat Completions request (raw JSON)
// into a complete Gemini CLI request JSON. All JSON construction uses sjson and lookups use gjson.
//
// Parameters:
// - rawJSON: The raw JSON bytes of the OpenAI-compatible request
// - modelName: The name of the model to use for the request
// - rawJSON: The raw JSON request data from the OpenAI API
// - stream: A boolean indicating if the request is for a streaming response (unused in current implementation)
//
// Returns:
// - string: The model name to use
// - *client.Content: System instruction content (if any)
// - []client.Content: The conversation contents in internal format
// - []client.ToolDeclaration: Tool declarations from the request
func ConvertOpenAIChatRequestToCli(rawJSON []byte) (string, *client.Content, []client.Content, []client.ToolDeclaration) {
// Extract the model name from the request, defaulting to "gemini-2.5-pro".
modelName := "gemini-2.5-pro"
modelResult := gjson.GetBytes(rawJSON, "model")
if modelResult.Type == gjson.String {
modelName = modelResult.String()
// - []byte: The transformed request data in Gemini CLI API format
func ConvertOpenAIRequestToGeminiCLI(modelName string, rawJSON []byte, _ bool) []byte {
// Base envelope
out := []byte(`{"project":"","request":{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}},"model":"gemini-2.5-pro"}`)
// Model
out, _ = sjson.SetBytes(out, "model", modelName)
// Reasoning effort -> thinkingBudget/include_thoughts
re := gjson.GetBytes(rawJSON, "reasoning_effort")
if re.Exists() {
switch re.String() {
case "none":
out, _ = sjson.DeleteBytes(out, "request.generationConfig.thinkingConfig.include_thoughts")
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", 0)
case "auto":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
case "low":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", 1024)
case "medium":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", 8192)
case "high":
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", 24576)
default:
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
}
} else {
out, _ = sjson.SetBytes(out, "request.generationConfig.thinkingConfig.thinkingBudget", -1)
}
// Initialize data structures for processing conversation messages
// contents: stores the processed conversation history
// systemInstruction: stores system-level instructions separate from conversation
contents := make([]client.Content, 0)
var systemInstruction *client.Content
messagesResult := gjson.GetBytes(rawJSON, "messages")
// Temperature/top_p/top_k
if tr := gjson.GetBytes(rawJSON, "temperature"); tr.Exists() && tr.Type == gjson.Number {
out, _ = sjson.SetBytes(out, "request.generationConfig.temperature", tr.Num)
}
if tpr := gjson.GetBytes(rawJSON, "top_p"); tpr.Exists() && tpr.Type == gjson.Number {
out, _ = sjson.SetBytes(out, "request.generationConfig.topP", tpr.Num)
}
if tkr := gjson.GetBytes(rawJSON, "top_k"); tkr.Exists() && tkr.Type == gjson.Number {
out, _ = sjson.SetBytes(out, "request.generationConfig.topK", tkr.Num)
}
// Pre-process messages to create mappings for tool calls and responses
// First pass: collect function call ID to function name mappings
toolCallToFunctionName := make(map[string]string)
toolItems := make(map[string]*client.FunctionResponse)
if messagesResult.IsArray() {
messagesResults := messagesResult.Array()
// First pass: collect function call mappings
for i := 0; i < len(messagesResults); i++ {
messageResult := messagesResults[i]
roleResult := messageResult.Get("role")
if roleResult.Type != gjson.String {
continue
}
// Extract function call ID to function name mappings
if roleResult.String() == "assistant" {
toolCallsResult := messageResult.Get("tool_calls")
if toolCallsResult.Exists() && toolCallsResult.IsArray() {
tcsResult := toolCallsResult.Array()
for j := 0; j < len(tcsResult); j++ {
tcResult := tcsResult[j]
if tcResult.Get("type").String() == "function" {
functionID := tcResult.Get("id").String()
functionName := tcResult.Get("function.name").String()
toolCallToFunctionName[functionID] = functionName
// messages -> systemInstruction + contents
messages := gjson.GetBytes(rawJSON, "messages")
if messages.IsArray() {
arr := messages.Array()
// First pass: assistant tool_calls id->name map
tcID2Name := map[string]string{}
for i := 0; i < len(arr); i++ {
m := arr[i]
if m.Get("role").String() == "assistant" {
tcs := m.Get("tool_calls")
if tcs.IsArray() {
for _, tc := range tcs.Array() {
if tc.Get("type").String() == "function" {
id := tc.Get("id").String()
name := tc.Get("function.name").String()
if id != "" && name != "" {
tcID2Name[id] = name
}
}
}
}
}
}
// Second pass: collect tool responses with correct function names
for i := 0; i < len(messagesResults); i++ {
messageResult := messagesResults[i]
roleResult := messageResult.Get("role")
if roleResult.Type != gjson.String {
continue
}
contentResult := messageResult.Get("content")
// Extract tool responses for later matching with function calls
if roleResult.String() == "tool" {
toolCallID := messageResult.Get("tool_call_id").String()
// Second pass build systemInstruction/tool responses cache
toolResponses := map[string]string{} // tool_call_id -> response text
for i := 0; i < len(arr); i++ {
m := arr[i]
role := m.Get("role").String()
if role == "tool" {
toolCallID := m.Get("tool_call_id").String()
if toolCallID != "" {
var responseData string
// Handle both string and object-based tool response formats
if contentResult.Type == gjson.String {
responseData = contentResult.String()
} else if contentResult.IsObject() && contentResult.Get("type").String() == "text" {
responseData = contentResult.Get("text").String()
c := m.Get("content")
if c.Type == gjson.String {
toolResponses[toolCallID] = c.String()
} else if c.IsObject() && c.Get("type").String() == "text" {
toolResponses[toolCallID] = c.Get("text").String()
}
// Get the correct function name from the mapping
functionName := toolCallToFunctionName[toolCallID]
if functionName == "" {
// Fallback: use tool call ID if function name not found
functionName = toolCallID
}
// Create function response object with correct function name
functionResponse := client.FunctionResponse{Name: functionName, Response: map[string]interface{}{"result": responseData}}
toolItems[toolCallID] = &functionResponse
}
}
}
}
if messagesResult.IsArray() {
messagesResults := messagesResult.Array()
for i := 0; i < len(messagesResults); i++ {
messageResult := messagesResults[i]
roleResult := messageResult.Get("role")
contentResult := messageResult.Get("content")
if roleResult.Type != gjson.String {
continue
}
for i := 0; i < len(arr); i++ {
m := arr[i]
role := m.Get("role").String()
content := m.Get("content")
role := roleResult.String()
if role == "system" && len(messagesResults) > 1 {
// System messages are converted to a user message followed by a model's acknowledgment.
if contentResult.Type == gjson.String {
systemInstruction = &client.Content{Role: "user", Parts: []client.Part{{Text: contentResult.String()}}}
} else if contentResult.IsObject() {
// Handle object-based system messages.
if contentResult.Get("type").String() == "text" {
systemInstruction = &client.Content{Role: "user", Parts: []client.Part{{Text: contentResult.Get("text").String()}}}
}
if role == "system" && len(arr) > 1 {
// system -> request.systemInstruction as a user message style
if content.Type == gjson.String {
out, _ = sjson.SetBytes(out, "request.systemInstruction.role", "user")
out, _ = sjson.SetBytes(out, "request.systemInstruction.parts.0.text", content.String())
} else if content.IsObject() && content.Get("type").String() == "text" {
out, _ = sjson.SetBytes(out, "request.systemInstruction.role", "user")
out, _ = sjson.SetBytes(out, "request.systemInstruction.parts.0.text", content.Get("text").String())
}
} else if role == "user" || (role == "system" && len(messagesResults) == 1) { // If there's only a system message, treat it as a user message.
// User messages can contain simple text or a multi-part body.
if contentResult.Type == gjson.String {
contents = append(contents, client.Content{Role: "user", Parts: []client.Part{{Text: contentResult.String()}}})
} else if contentResult.IsArray() {
// Handle multi-part user messages (text, images, files).
contentItemResults := contentResult.Array()
parts := make([]client.Part, 0)
for j := 0; j < len(contentItemResults); j++ {
contentItemResult := contentItemResults[j]
contentTypeResult := contentItemResult.Get("type")
switch contentTypeResult.String() {
} else if role == "user" || (role == "system" && len(arr) == 1) {
// Build single user content node to avoid splitting into multiple contents
node := []byte(`{"role":"user","parts":[]}`)
if content.Type == gjson.String {
node, _ = sjson.SetBytes(node, "parts.0.text", content.String())
} else if content.IsArray() {
items := content.Array()
p := 0
for _, item := range items {
switch item.Get("type").String() {
case "text":
parts = append(parts, client.Part{Text: contentItemResult.Get("text").String()})
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".text", item.Get("text").String())
p++
case "image_url":
// Parse data URI for images.
imageURL := contentItemResult.Get("image_url.url").String()
imageURL := item.Get("image_url.url").String()
if len(imageURL) > 5 {
imageURLs := strings.SplitN(imageURL[5:], ";", 2)
if len(imageURLs) == 2 && len(imageURLs[1]) > 7 {
parts = append(parts, client.Part{InlineData: &client.InlineData{
MimeType: imageURLs[0],
Data: imageURLs[1][7:],
}})
pieces := strings.SplitN(imageURL[5:], ";", 2)
if len(pieces) == 2 && len(pieces[1]) > 7 {
mime := pieces[0]
data := pieces[1][7:]
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.mime_type", mime)
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.data", data)
p++
}
}
case "file":
// Handle file attachments by determining MIME type from extension.
filename := contentItemResult.Get("file.filename").String()
fileData := contentItemResult.Get("file.file_data").String()
filename := item.Get("file.filename").String()
fileData := item.Get("file.file_data").String()
ext := ""
if split := strings.Split(filename, "."); len(split) > 1 {
ext = split[len(split)-1]
if sp := strings.Split(filename, "."); len(sp) > 1 {
ext = sp[len(sp)-1]
}
if mimeType, ok := misc.MimeTypes[ext]; ok {
parts = append(parts, client.Part{InlineData: &client.InlineData{
MimeType: mimeType,
Data: fileData,
}})
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.mime_type", mimeType)
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.data", fileData)
p++
} else {
log.Warnf("Unknown file name extension '%s' at index %d, skipping file", ext, j)
log.Warnf("Unknown file name extension '%s' in user message, skip", ext)
}
}
}
contents = append(contents, client.Content{Role: "user", Parts: parts})
}
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
} else if role == "assistant" {
// Assistant messages can contain text responses or tool calls
// In the internal format, assistant messages are converted to "model" role
if contentResult.Type == gjson.String {
// Simple text response from the assistant
contents = append(contents, client.Content{Role: "model", Parts: []client.Part{{Text: contentResult.String()}}})
} else if !contentResult.Exists() || contentResult.Type == gjson.Null {
// Handle complex tool calls made by the assistant
// This processes function calls and matches them with their responses
functionIDs := make([]string, 0)
toolCallsResult := messageResult.Get("tool_calls")
if toolCallsResult.IsArray() {
parts := make([]client.Part, 0)
tcsResult := toolCallsResult.Array()
// Process each tool call in the assistant's message
for j := 0; j < len(tcsResult); j++ {
tcResult := tcsResult[j]
// Extract function call details
functionID := tcResult.Get("id").String()
functionIDs = append(functionIDs, functionID)
functionName := tcResult.Get("function.name").String()
functionArgs := tcResult.Get("function.arguments").String()
// Parse function arguments from JSON string to map
var args map[string]any
if err := json.Unmarshal([]byte(functionArgs), &args); err == nil {
parts = append(parts, client.Part{
FunctionCall: &client.FunctionCall{
Name: functionName,
Args: args,
},
})
if content.Type == gjson.String {
// Assistant text -> single model content
node := []byte(`{"role":"model","parts":[{"text":""}]}`)
node, _ = sjson.SetBytes(node, "parts.0.text", content.String())
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
} else if !content.Exists() || content.Type == gjson.Null {
// Tool calls -> single model content with functionCall parts
tcs := m.Get("tool_calls")
if tcs.IsArray() {
node := []byte(`{"role":"model","parts":[]}`)
p := 0
fIDs := make([]string, 0)
for _, tc := range tcs.Array() {
if tc.Get("type").String() != "function" {
continue
}
fid := tc.Get("id").String()
fname := tc.Get("function.name").String()
fargs := tc.Get("function.arguments").String()
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".functionCall.name", fname)
node, _ = sjson.SetRawBytes(node, "parts."+itoa(p)+".functionCall.args", []byte(fargs))
p++
if fid != "" {
fIDs = append(fIDs, fid)
}
}
out, _ = sjson.SetRawBytes(out, "request.contents.-1", node)
// Add the model's function calls to the conversation
if len(parts) > 0 {
contents = append(contents, client.Content{
Role: "model", Parts: parts,
})
// Create a separate tool response message with the collected responses
// This matches function calls with their corresponding responses
toolParts := make([]client.Part, 0)
for _, functionID := range functionIDs {
if functionResponse, ok := toolItems[functionID]; ok {
toolParts = append(toolParts, client.Part{FunctionResponse: functionResponse})
// Append a single tool content combining name + response per function
toolNode := []byte(`{"role":"tool","parts":[]}`)
pp := 0
for _, fid := range fIDs {
if name, ok := tcID2Name[fid]; ok {
toolNode, _ = sjson.SetBytes(toolNode, "parts."+itoa(pp)+".functionResponse.name", name)
resp := toolResponses[fid]
if resp == "" {
resp = "{}"
}
toolNode, _ = sjson.SetRawBytes(toolNode, "parts."+itoa(pp)+".functionResponse.response", []byte(`{"result":`+quoteIfNeeded(resp)+`}`))
pp++
}
// Add the tool responses as a separate message in the conversation
contents = append(contents, client.Content{Role: "tool", Parts: toolParts})
}
if pp > 0 {
out, _ = sjson.SetRawBytes(out, "request.contents.-1", toolNode)
}
}
}
@@ -244,28 +213,38 @@ func ConvertOpenAIChatRequestToCli(rawJSON []byte) (string, *client.Content, []c
}
}
// Translate the tool declarations from the request.
var tools []client.ToolDeclaration
toolsResult := gjson.GetBytes(rawJSON, "tools")
if toolsResult.IsArray() {
tools = make([]client.ToolDeclaration, 1)
tools[0].FunctionDeclarations = make([]any, 0)
toolsResults := toolsResult.Array()
for i := 0; i < len(toolsResults); i++ {
toolResult := toolsResults[i]
if toolResult.Get("type").String() == "function" {
functionTypeResult := toolResult.Get("function")
if functionTypeResult.Exists() && functionTypeResult.IsObject() {
var functionDeclaration any
if err := json.Unmarshal([]byte(functionTypeResult.Raw), &functionDeclaration); err == nil {
tools[0].FunctionDeclarations = append(tools[0].FunctionDeclarations, functionDeclaration)
}
// tools -> request.tools[0].functionDeclarations
tools := gjson.GetBytes(rawJSON, "tools")
if tools.IsArray() {
out, _ = sjson.SetRawBytes(out, "request.tools", []byte(`[{"functionDeclarations":[]}]`))
fdPath := "request.tools.0.functionDeclarations"
for _, t := range tools.Array() {
if t.Get("type").String() == "function" {
fn := t.Get("function")
if fn.Exists() && fn.IsObject() {
out, _ = sjson.SetRawBytes(out, fdPath+".-1", []byte(fn.Raw))
}
}
}
} else {
tools = make([]client.ToolDeclaration, 0)
}
return modelName, systemInstruction, contents, tools
return out
}
// itoa converts int to string without strconv import for few usages.
func itoa(i int) string { return fmt.Sprintf("%d", i) }
// quoteIfNeeded ensures a string is valid JSON value (quotes plain text), pass-through for JSON objects/arrays.
func quoteIfNeeded(s string) string {
s = strings.TrimSpace(s)
if s == "" {
return "\"\""
}
if len(s) > 0 && (s[0] == '{' || s[0] == '[') {
return s
}
// escape quotes minimally
s = strings.ReplaceAll(s, "\\", "\\\\")
s = strings.ReplaceAll(s, "\"", "\\\"")
return "\"" + s + "\""
}

View File

@@ -1,26 +1,49 @@
// Package openai provides response translation functionality for converting between
// different API response formats and OpenAI-compatible formats. It handles both
// streaming and non-streaming responses, transforming backend client responses
// into OpenAI Server-Sent Events (SSE) format and standard JSON response formats.
// The package supports content translation, function calls, usage metadata,
// and various response attributes while maintaining compatibility with OpenAI API
// specifications.
// Package openai provides response translation functionality for Gemini CLI to OpenAI API compatibility.
// This package handles the conversion of Gemini CLI API responses into OpenAI Chat Completions-compatible
// JSON format, transforming streaming events and non-streaming responses into the format
// expected by OpenAI API clients. It supports both streaming and non-streaming modes,
// handling text content, tool calls, reasoning content, and usage metadata appropriately.
package openai
import (
"bytes"
"context"
"fmt"
"time"
. "github.com/luispater/CLIProxyAPI/internal/translator/gemini/openai"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
// ConvertCliResponseToOpenAIChat translates a single chunk of a streaming response from the
// backend client format to the OpenAI Server-Sent Events (SSE) format.
// It returns an empty string if the chunk contains no useful data.
func ConvertCliResponseToOpenAIChat(rawJSON []byte, unixTimestamp int64, isGlAPIKey bool) string {
if isGlAPIKey {
rawJSON, _ = sjson.SetRawBytes(rawJSON, "response", rawJSON)
// convertCliResponseToOpenAIChatParams holds parameters for response conversion.
type convertCliResponseToOpenAIChatParams struct {
UnixTimestamp int64
}
// ConvertCliResponseToOpenAI translates a single chunk of a streaming response from the
// Gemini CLI API format to the OpenAI Chat Completions streaming format.
// It processes various Gemini CLI event types and transforms them into OpenAI-compatible JSON responses.
// The function handles text content, tool calls, reasoning content, and usage metadata, outputting
// responses that match the OpenAI API format. It supports incremental updates for streaming responses.
//
// Parameters:
// - ctx: The context for the request, used for cancellation and timeout handling
// - modelName: The name of the model being used for the response (unused in current implementation)
// - rawJSON: The raw JSON response from the Gemini CLI API
// - param: A pointer to a parameter object for maintaining state between calls
//
// Returns:
// - []string: A slice of strings, each containing an OpenAI-compatible JSON response
func ConvertCliResponseToOpenAI(_ context.Context, _ string, rawJSON []byte, param *any) []string {
if *param == nil {
*param = &convertCliResponseToOpenAIChatParams{
UnixTimestamp: 0,
}
}
if bytes.Equal(rawJSON, []byte("[DONE]")) {
return []string{}
}
// Initialize the OpenAI SSE template.
@@ -35,11 +58,11 @@ func ConvertCliResponseToOpenAIChat(rawJSON []byte, unixTimestamp int64, isGlAPI
if createTimeResult := gjson.GetBytes(rawJSON, "response.createTime"); createTimeResult.Exists() {
t, err := time.Parse(time.RFC3339Nano, createTimeResult.String())
if err == nil {
unixTimestamp = t.Unix()
(*param).(*convertCliResponseToOpenAIChatParams).UnixTimestamp = t.Unix()
}
template, _ = sjson.Set(template, "created", unixTimestamp)
template, _ = sjson.Set(template, "created", (*param).(*convertCliResponseToOpenAIChatParams).UnixTimestamp)
} else {
template, _ = sjson.Set(template, "created", unixTimestamp)
template, _ = sjson.Set(template, "created", (*param).(*convertCliResponseToOpenAIChatParams).UnixTimestamp)
}
// Extract and set the response ID.
@@ -106,92 +129,26 @@ func ConvertCliResponseToOpenAIChat(rawJSON []byte, unixTimestamp int64, isGlAPI
}
}
return template
return []string{template}
}
// ConvertCliResponseToOpenAIChatNonStream aggregates response from the backend client
// convert a single, non-streaming OpenAI-compatible JSON response.
func ConvertCliResponseToOpenAIChatNonStream(rawJSON []byte, unixTimestamp int64, isGlAPIKey bool) string {
if isGlAPIKey {
rawJSON, _ = sjson.SetRawBytes(rawJSON, "response", rawJSON)
// ConvertCliResponseToOpenAINonStream converts a non-streaming Gemini CLI response to a non-streaming OpenAI response.
// This function processes the complete Gemini CLI response and transforms it into a single OpenAI-compatible
// JSON response. It handles message content, tool calls, reasoning content, and usage metadata, combining all
// the information into a single response that matches the OpenAI API format.
//
// Parameters:
// - ctx: The context for the request, used for cancellation and timeout handling
// - modelName: The name of the model being used for the response
// - rawJSON: The raw JSON response from the Gemini CLI API
// - param: A pointer to a parameter object for the conversion
//
// Returns:
// - string: An OpenAI-compatible JSON response containing all message content and metadata
func ConvertCliResponseToOpenAINonStream(ctx context.Context, modelName string, rawJSON []byte, param *any) string {
responseResult := gjson.GetBytes(rawJSON, "response")
if responseResult.Exists() {
return ConvertGeminiResponseToOpenAINonStream(ctx, modelName, []byte(responseResult.Raw), param)
}
template := `{"id":"","object":"chat.completion","created":123456,"model":"model","choices":[{"index":0,"message":{"role":"assistant","content":null,"reasoning_content":null,"tool_calls":null},"finish_reason":null,"native_finish_reason":null}]}`
if modelVersionResult := gjson.GetBytes(rawJSON, "response.modelVersion"); modelVersionResult.Exists() {
template, _ = sjson.Set(template, "model", modelVersionResult.String())
}
if createTimeResult := gjson.GetBytes(rawJSON, "response.createTime"); createTimeResult.Exists() {
t, err := time.Parse(time.RFC3339Nano, createTimeResult.String())
if err == nil {
unixTimestamp = t.Unix()
}
template, _ = sjson.Set(template, "created", unixTimestamp)
} else {
template, _ = sjson.Set(template, "created", unixTimestamp)
}
if responseIDResult := gjson.GetBytes(rawJSON, "response.responseId"); responseIDResult.Exists() {
template, _ = sjson.Set(template, "id", responseIDResult.String())
}
if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() {
template, _ = sjson.Set(template, "choices.0.finish_reason", finishReasonResult.String())
template, _ = sjson.Set(template, "choices.0.native_finish_reason", finishReasonResult.String())
}
if usageResult := gjson.GetBytes(rawJSON, "response.usageMetadata"); usageResult.Exists() {
if candidatesTokenCountResult := usageResult.Get("candidatesTokenCount"); candidatesTokenCountResult.Exists() {
template, _ = sjson.Set(template, "usage.completion_tokens", candidatesTokenCountResult.Int())
}
if totalTokenCountResult := usageResult.Get("totalTokenCount"); totalTokenCountResult.Exists() {
template, _ = sjson.Set(template, "usage.total_tokens", totalTokenCountResult.Int())
}
promptTokenCount := usageResult.Get("promptTokenCount").Int()
thoughtsTokenCount := usageResult.Get("thoughtsTokenCount").Int()
template, _ = sjson.Set(template, "usage.prompt_tokens", promptTokenCount+thoughtsTokenCount)
if thoughtsTokenCount > 0 {
template, _ = sjson.Set(template, "usage.completion_tokens_details.reasoning_tokens", thoughtsTokenCount)
}
}
// Process the main content part of the response.
partsResult := gjson.GetBytes(rawJSON, "response.candidates.0.content.parts")
if partsResult.IsArray() {
partsResults := partsResult.Array()
for i := 0; i < len(partsResults); i++ {
partResult := partsResults[i]
partTextResult := partResult.Get("text")
functionCallResult := partResult.Get("functionCall")
if partTextResult.Exists() {
// Append text content, distinguishing between regular content and reasoning.
if partResult.Get("thought").Bool() {
template, _ = sjson.Set(template, "choices.0.message.reasoning_content", partTextResult.String())
} else {
template, _ = sjson.Set(template, "choices.0.message.content", partTextResult.String())
}
template, _ = sjson.Set(template, "choices.0.message.role", "assistant")
} else if functionCallResult.Exists() {
// Append function call content to the tool_calls array.
toolCallsResult := gjson.Get(template, "choices.0.message.tool_calls")
if !toolCallsResult.Exists() || !toolCallsResult.IsArray() {
template, _ = sjson.SetRaw(template, "choices.0.message.tool_calls", `[]`)
}
functionCallItemTemplate := `{"id": "","type": "function","function": {"name": "","arguments": ""}}`
fcName := functionCallResult.Get("name").String()
functionCallItemTemplate, _ = sjson.Set(functionCallItemTemplate, "id", fmt.Sprintf("%s-%d", fcName, time.Now().UnixNano()))
functionCallItemTemplate, _ = sjson.Set(functionCallItemTemplate, "function.name", fcName)
if fcArgsResult := functionCallResult.Get("args"); fcArgsResult.Exists() {
functionCallItemTemplate, _ = sjson.Set(functionCallItemTemplate, "function.arguments", fcArgsResult.Raw)
}
template, _ = sjson.Set(template, "choices.0.message.role", "assistant")
template, _ = sjson.SetRaw(template, "choices.0.message.tool_calls.-1", functionCallItemTemplate)
} else {
// If no usable content is found, return an empty string.
return ""
}
}
}
return template
return ""
}

View File

@@ -0,0 +1,19 @@
package openai
import (
. "github.com/luispater/CLIProxyAPI/internal/constant"
"github.com/luispater/CLIProxyAPI/internal/interfaces"
"github.com/luispater/CLIProxyAPI/internal/translator/translator"
)
func init() {
translator.Register(
OPENAI,
GEMINICLI,
ConvertOpenAIRequestToGeminiCLI,
interfaces.TranslateResponse{
Stream: ConvertCliResponseToOpenAI,
NonStream: ConvertCliResponseToOpenAINonStream,
},
)
}