mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-18 20:30:51 +08:00
Add Qwen support
This commit is contained in:
359
internal/translator/openai/gemini/openai_gemini_request.go
Normal file
359
internal/translator/openai/gemini/openai_gemini_request.go
Normal file
@@ -0,0 +1,359 @@
|
||||
// Package gemini provides request translation functionality for Gemini to OpenAI API.
|
||||
// It handles parsing and transforming Gemini API requests into OpenAI Chat Completions API format,
|
||||
// extracting model information, generation config, message contents, and tool declarations.
|
||||
// The package performs JSON data transformation to ensure compatibility
|
||||
// between Gemini API format and OpenAI API's expected format.
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/json"
|
||||
"math/big"
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
// ConvertGeminiRequestToOpenAI parses and transforms a Gemini API request into OpenAI Chat Completions API format.
|
||||
// It extracts the model name, generation config, message contents, and tool declarations
|
||||
// from the raw JSON request and returns them in the format expected by the OpenAI API.
|
||||
func ConvertGeminiRequestToOpenAI(rawJSON []byte) string {
|
||||
// Base OpenAI Chat Completions API template
|
||||
out := `{"model":"","messages":[]}`
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Helper for generating tool call IDs in the form: call_<alphanum>
|
||||
genToolCallID := func() string {
|
||||
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||
var b strings.Builder
|
||||
// 24 chars random suffix
|
||||
for i := 0; i < 24; i++ {
|
||||
n, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letters))))
|
||||
b.WriteByte(letters[n.Int64()])
|
||||
}
|
||||
return "call_" + b.String()
|
||||
}
|
||||
|
||||
// Model mapping
|
||||
if model := root.Get("model"); model.Exists() {
|
||||
modelStr := model.String()
|
||||
out, _ = sjson.Set(out, "model", modelStr)
|
||||
}
|
||||
|
||||
// Generation config mapping
|
||||
if genConfig := root.Get("generationConfig"); genConfig.Exists() {
|
||||
// Temperature
|
||||
if temp := genConfig.Get("temperature"); temp.Exists() {
|
||||
out, _ = sjson.Set(out, "temperature", temp.Float())
|
||||
}
|
||||
|
||||
// Max tokens
|
||||
if maxTokens := genConfig.Get("maxOutputTokens"); maxTokens.Exists() {
|
||||
out, _ = sjson.Set(out, "max_tokens", maxTokens.Int())
|
||||
}
|
||||
|
||||
// Top P
|
||||
if topP := genConfig.Get("topP"); topP.Exists() {
|
||||
out, _ = sjson.Set(out, "top_p", topP.Float())
|
||||
}
|
||||
|
||||
// Top K (OpenAI doesn't have direct equivalent, but we can map it)
|
||||
if topK := genConfig.Get("topK"); topK.Exists() {
|
||||
// Store as custom parameter for potential use
|
||||
out, _ = sjson.Set(out, "top_k", topK.Int())
|
||||
}
|
||||
|
||||
// Stop sequences
|
||||
if stopSequences := genConfig.Get("stopSequences"); stopSequences.Exists() && stopSequences.IsArray() {
|
||||
var stops []string
|
||||
stopSequences.ForEach(func(_, value gjson.Result) bool {
|
||||
stops = append(stops, value.String())
|
||||
return true
|
||||
})
|
||||
if len(stops) > 0 {
|
||||
out, _ = sjson.Set(out, "stop", stops)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Stream parameter
|
||||
if stream := root.Get("stream"); stream.Exists() {
|
||||
out, _ = sjson.Set(out, "stream", stream.Bool())
|
||||
}
|
||||
|
||||
// Process contents (Gemini messages) -> OpenAI messages
|
||||
var openAIMessages []interface{}
|
||||
var toolCallIDs []string // Track tool call IDs for matching with tool results
|
||||
|
||||
if contents := root.Get("contents"); contents.Exists() && contents.IsArray() {
|
||||
contents.ForEach(func(_, content gjson.Result) bool {
|
||||
role := content.Get("role").String()
|
||||
parts := content.Get("parts")
|
||||
|
||||
// Convert role: model -> assistant
|
||||
if role == "model" {
|
||||
role = "assistant"
|
||||
}
|
||||
|
||||
// Create OpenAI message
|
||||
msg := map[string]interface{}{
|
||||
"role": role,
|
||||
"content": "",
|
||||
}
|
||||
|
||||
var contentParts []string
|
||||
var toolCalls []interface{}
|
||||
|
||||
if parts.Exists() && parts.IsArray() {
|
||||
parts.ForEach(func(_, part gjson.Result) bool {
|
||||
// Handle text parts
|
||||
if text := part.Get("text"); text.Exists() {
|
||||
contentParts = append(contentParts, text.String())
|
||||
}
|
||||
|
||||
// Handle function calls (Gemini) -> tool calls (OpenAI)
|
||||
if functionCall := part.Get("functionCall"); functionCall.Exists() {
|
||||
toolCallID := genToolCallID()
|
||||
toolCallIDs = append(toolCallIDs, toolCallID)
|
||||
|
||||
toolCall := map[string]interface{}{
|
||||
"id": toolCallID,
|
||||
"type": "function",
|
||||
"function": map[string]interface{}{
|
||||
"name": functionCall.Get("name").String(),
|
||||
},
|
||||
}
|
||||
|
||||
// Convert args to arguments JSON string
|
||||
if args := functionCall.Get("args"); args.Exists() {
|
||||
argsJSON, _ := json.Marshal(args.Value())
|
||||
toolCall["function"].(map[string]interface{})["arguments"] = string(argsJSON)
|
||||
} else {
|
||||
toolCall["function"].(map[string]interface{})["arguments"] = "{}"
|
||||
}
|
||||
|
||||
toolCalls = append(toolCalls, toolCall)
|
||||
}
|
||||
|
||||
// Handle function responses (Gemini) -> tool role messages (OpenAI)
|
||||
if functionResponse := part.Get("functionResponse"); functionResponse.Exists() {
|
||||
// Create tool message for function response
|
||||
toolMsg := map[string]interface{}{
|
||||
"role": "tool",
|
||||
"tool_call_id": "", // Will be set based on context
|
||||
"content": "",
|
||||
}
|
||||
|
||||
// Convert response.content to JSON string
|
||||
if response := functionResponse.Get("response"); response.Exists() {
|
||||
if content = response.Get("content"); content.Exists() {
|
||||
// Use the content field from the response
|
||||
contentJSON, _ := json.Marshal(content.Value())
|
||||
toolMsg["content"] = string(contentJSON)
|
||||
} else {
|
||||
// Fallback to entire response
|
||||
responseJSON, _ := json.Marshal(response.Value())
|
||||
toolMsg["content"] = string(responseJSON)
|
||||
}
|
||||
}
|
||||
|
||||
// Try to match with previous tool call ID
|
||||
_ = functionResponse.Get("name").String() // functionName not used for now
|
||||
if len(toolCallIDs) > 0 {
|
||||
// Use the last tool call ID (simple matching by function name)
|
||||
// In a real implementation, you might want more sophisticated matching
|
||||
toolMsg["tool_call_id"] = toolCallIDs[len(toolCallIDs)-1]
|
||||
} else {
|
||||
// Generate a tool call ID if none available
|
||||
toolMsg["tool_call_id"] = genToolCallID()
|
||||
}
|
||||
|
||||
openAIMessages = append(openAIMessages, toolMsg)
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Set content
|
||||
if len(contentParts) > 0 {
|
||||
msg["content"] = strings.Join(contentParts, "")
|
||||
}
|
||||
|
||||
// Set tool calls if any
|
||||
if len(toolCalls) > 0 {
|
||||
msg["tool_calls"] = toolCalls
|
||||
}
|
||||
|
||||
openAIMessages = append(openAIMessages, msg)
|
||||
|
||||
// switch role {
|
||||
// case "user", "model":
|
||||
// // Convert role: model -> assistant
|
||||
// if role == "model" {
|
||||
// role = "assistant"
|
||||
// }
|
||||
//
|
||||
// // Create OpenAI message
|
||||
// msg := map[string]interface{}{
|
||||
// "role": role,
|
||||
// "content": "",
|
||||
// }
|
||||
//
|
||||
// var contentParts []string
|
||||
// var toolCalls []interface{}
|
||||
//
|
||||
// if parts.Exists() && parts.IsArray() {
|
||||
// parts.ForEach(func(_, part gjson.Result) bool {
|
||||
// // Handle text parts
|
||||
// if text := part.Get("text"); text.Exists() {
|
||||
// contentParts = append(contentParts, text.String())
|
||||
// }
|
||||
//
|
||||
// // Handle function calls (Gemini) -> tool calls (OpenAI)
|
||||
// if functionCall := part.Get("functionCall"); functionCall.Exists() {
|
||||
// toolCallID := genToolCallID()
|
||||
// toolCallIDs = append(toolCallIDs, toolCallID)
|
||||
//
|
||||
// toolCall := map[string]interface{}{
|
||||
// "id": toolCallID,
|
||||
// "type": "function",
|
||||
// "function": map[string]interface{}{
|
||||
// "name": functionCall.Get("name").String(),
|
||||
// },
|
||||
// }
|
||||
//
|
||||
// // Convert args to arguments JSON string
|
||||
// if args := functionCall.Get("args"); args.Exists() {
|
||||
// argsJSON, _ := json.Marshal(args.Value())
|
||||
// toolCall["function"].(map[string]interface{})["arguments"] = string(argsJSON)
|
||||
// } else {
|
||||
// toolCall["function"].(map[string]interface{})["arguments"] = "{}"
|
||||
// }
|
||||
//
|
||||
// toolCalls = append(toolCalls, toolCall)
|
||||
// }
|
||||
//
|
||||
// return true
|
||||
// })
|
||||
// }
|
||||
//
|
||||
// // Set content
|
||||
// if len(contentParts) > 0 {
|
||||
// msg["content"] = strings.Join(contentParts, "")
|
||||
// }
|
||||
//
|
||||
// // Set tool calls if any
|
||||
// if len(toolCalls) > 0 {
|
||||
// msg["tool_calls"] = toolCalls
|
||||
// }
|
||||
//
|
||||
// openAIMessages = append(openAIMessages, msg)
|
||||
//
|
||||
// case "function":
|
||||
// // Handle Gemini function role -> OpenAI tool role
|
||||
// if parts.Exists() && parts.IsArray() {
|
||||
// parts.ForEach(func(_, part gjson.Result) bool {
|
||||
// // Handle function responses (Gemini) -> tool role messages (OpenAI)
|
||||
// if functionResponse := part.Get("functionResponse"); functionResponse.Exists() {
|
||||
// // Create tool message for function response
|
||||
// toolMsg := map[string]interface{}{
|
||||
// "role": "tool",
|
||||
// "tool_call_id": "", // Will be set based on context
|
||||
// "content": "",
|
||||
// }
|
||||
//
|
||||
// // Convert response.content to JSON string
|
||||
// if response := functionResponse.Get("response"); response.Exists() {
|
||||
// if content = response.Get("content"); content.Exists() {
|
||||
// // Use the content field from the response
|
||||
// contentJSON, _ := json.Marshal(content.Value())
|
||||
// toolMsg["content"] = string(contentJSON)
|
||||
// } else {
|
||||
// // Fallback to entire response
|
||||
// responseJSON, _ := json.Marshal(response.Value())
|
||||
// toolMsg["content"] = string(responseJSON)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // Try to match with previous tool call ID
|
||||
// _ = functionResponse.Get("name").String() // functionName not used for now
|
||||
// if len(toolCallIDs) > 0 {
|
||||
// // Use the last tool call ID (simple matching by function name)
|
||||
// // In a real implementation, you might want more sophisticated matching
|
||||
// toolMsg["tool_call_id"] = toolCallIDs[len(toolCallIDs)-1]
|
||||
// } else {
|
||||
// // Generate a tool call ID if none available
|
||||
// toolMsg["tool_call_id"] = genToolCallID()
|
||||
// }
|
||||
//
|
||||
// openAIMessages = append(openAIMessages, toolMsg)
|
||||
// }
|
||||
//
|
||||
// return true
|
||||
// })
|
||||
// }
|
||||
// }
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Set messages
|
||||
if len(openAIMessages) > 0 {
|
||||
messagesJSON, _ := json.Marshal(openAIMessages)
|
||||
out, _ = sjson.SetRaw(out, "messages", string(messagesJSON))
|
||||
}
|
||||
|
||||
// Tools mapping: Gemini tools -> OpenAI tools
|
||||
if tools := root.Get("tools"); tools.Exists() && tools.IsArray() {
|
||||
var openAITools []interface{}
|
||||
tools.ForEach(func(_, tool gjson.Result) bool {
|
||||
if functionDeclarations := tool.Get("functionDeclarations"); functionDeclarations.Exists() && functionDeclarations.IsArray() {
|
||||
functionDeclarations.ForEach(func(_, funcDecl gjson.Result) bool {
|
||||
openAITool := map[string]interface{}{
|
||||
"type": "function",
|
||||
"function": map[string]interface{}{
|
||||
"name": funcDecl.Get("name").String(),
|
||||
"description": funcDecl.Get("description").String(),
|
||||
},
|
||||
}
|
||||
|
||||
// Convert parameters schema
|
||||
if parameters := funcDecl.Get("parameters"); parameters.Exists() {
|
||||
openAITool["function"].(map[string]interface{})["parameters"] = parameters.Value()
|
||||
} else if parameters = funcDecl.Get("parametersJsonSchema"); parameters.Exists() {
|
||||
openAITool["function"].(map[string]interface{})["parameters"] = parameters.Value()
|
||||
}
|
||||
|
||||
openAITools = append(openAITools, openAITool)
|
||||
return true
|
||||
})
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(openAITools) > 0 {
|
||||
toolsJSON, _ := json.Marshal(openAITools)
|
||||
out, _ = sjson.SetRaw(out, "tools", string(toolsJSON))
|
||||
}
|
||||
}
|
||||
|
||||
// Tool choice mapping (Gemini doesn't have direct equivalent, but we can handle it)
|
||||
if toolConfig := root.Get("toolConfig"); toolConfig.Exists() {
|
||||
if functionCallingConfig := toolConfig.Get("functionCallingConfig"); functionCallingConfig.Exists() {
|
||||
mode := functionCallingConfig.Get("mode").String()
|
||||
switch mode {
|
||||
case "NONE":
|
||||
out, _ = sjson.Set(out, "tool_choice", "none")
|
||||
case "AUTO":
|
||||
out, _ = sjson.Set(out, "tool_choice", "auto")
|
||||
case "ANY":
|
||||
out, _ = sjson.Set(out, "tool_choice", "required")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
353
internal/translator/openai/gemini/openai_gemini_response.go
Normal file
353
internal/translator/openai/gemini/openai_gemini_response.go
Normal file
@@ -0,0 +1,353 @@
|
||||
// Package gemini provides response translation functionality for OpenAI to Gemini API.
|
||||
// This package handles the conversion of OpenAI Chat Completions API responses into Gemini API-compatible
|
||||
// JSON format, transforming streaming events and non-streaming responses into the format
|
||||
// expected by Gemini API clients. It supports both streaming and non-streaming modes,
|
||||
// handling text content, tool calls, and usage metadata appropriately.
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
// ConvertOpenAIResponseToGeminiParams holds parameters for response conversion
|
||||
type ConvertOpenAIResponseToGeminiParams struct {
|
||||
// Tool calls accumulator for streaming
|
||||
ToolCallsAccumulator map[int]*ToolCallAccumulator
|
||||
// Content accumulator for streaming
|
||||
ContentAccumulator strings.Builder
|
||||
// Track if this is the first chunk
|
||||
IsFirstChunk bool
|
||||
}
|
||||
|
||||
// ToolCallAccumulator holds the state for accumulating tool call data
|
||||
type ToolCallAccumulator struct {
|
||||
ID string
|
||||
Name string
|
||||
Arguments strings.Builder
|
||||
}
|
||||
|
||||
// ConvertOpenAIResponseToGemini converts OpenAI Chat Completions streaming response format to Gemini API format.
|
||||
// This function processes OpenAI streaming chunks and transforms them into Gemini-compatible JSON responses.
|
||||
// It handles text content, tool calls, and usage metadata, outputting responses that match the Gemini API format.
|
||||
func ConvertOpenAIResponseToGemini(rawJSON []byte, param *ConvertOpenAIResponseToGeminiParams) []string {
|
||||
// Handle [DONE] marker
|
||||
if strings.TrimSpace(string(rawJSON)) == "[DONE]" {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Initialize accumulators if needed
|
||||
if param.ToolCallsAccumulator == nil {
|
||||
param.ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
|
||||
}
|
||||
|
||||
// Process choices
|
||||
if choices := root.Get("choices"); choices.Exists() && choices.IsArray() {
|
||||
// Handle empty choices array (usage-only chunk)
|
||||
if len(choices.Array()) == 0 {
|
||||
// This is a usage-only chunk, handle usage and return
|
||||
if usage := root.Get("usage"); usage.Exists() {
|
||||
template := `{"candidates":[],"usageMetadata":{}}`
|
||||
|
||||
// Set model if available
|
||||
if model := root.Get("model"); model.Exists() {
|
||||
template, _ = sjson.Set(template, "model", model.String())
|
||||
}
|
||||
|
||||
usageObj := map[string]interface{}{
|
||||
"promptTokenCount": usage.Get("prompt_tokens").Int(),
|
||||
"candidatesTokenCount": usage.Get("completion_tokens").Int(),
|
||||
"totalTokenCount": usage.Get("total_tokens").Int(),
|
||||
}
|
||||
template, _ = sjson.Set(template, "usageMetadata", usageObj)
|
||||
return []string{template}
|
||||
}
|
||||
return []string{}
|
||||
}
|
||||
|
||||
var results []string
|
||||
|
||||
choices.ForEach(func(choiceIndex, choice gjson.Result) bool {
|
||||
// Base Gemini response template
|
||||
template := `{"candidates":[{"content":{"parts":[],"role":"model"},"finishReason":"STOP","index":0}]}`
|
||||
|
||||
// Set model if available
|
||||
if model := root.Get("model"); model.Exists() {
|
||||
template, _ = sjson.Set(template, "model", model.String())
|
||||
}
|
||||
|
||||
_ = int(choice.Get("index").Int()) // choiceIdx not used in streaming
|
||||
delta := choice.Get("delta")
|
||||
|
||||
// Handle role (only in first chunk)
|
||||
if role := delta.Get("role"); role.Exists() && param.IsFirstChunk {
|
||||
// OpenAI assistant -> Gemini model
|
||||
if role.String() == "assistant" {
|
||||
template, _ = sjson.Set(template, "candidates.0.content.role", "model")
|
||||
}
|
||||
param.IsFirstChunk = false
|
||||
results = append(results, template)
|
||||
return true
|
||||
}
|
||||
|
||||
// Handle content delta
|
||||
if content := delta.Get("content"); content.Exists() && content.String() != "" {
|
||||
contentText := content.String()
|
||||
param.ContentAccumulator.WriteString(contentText)
|
||||
|
||||
// Create text part for this delta
|
||||
parts := []interface{}{
|
||||
map[string]interface{}{
|
||||
"text": contentText,
|
||||
},
|
||||
}
|
||||
template, _ = sjson.Set(template, "candidates.0.content.parts", parts)
|
||||
results = append(results, template)
|
||||
return true
|
||||
}
|
||||
|
||||
// Handle tool calls delta
|
||||
if toolCalls := delta.Get("tool_calls"); toolCalls.Exists() && toolCalls.IsArray() {
|
||||
toolCalls.ForEach(func(_, toolCall gjson.Result) bool {
|
||||
toolIndex := int(toolCall.Get("index").Int())
|
||||
toolID := toolCall.Get("id").String()
|
||||
toolType := toolCall.Get("type").String()
|
||||
|
||||
if toolType == "function" {
|
||||
function := toolCall.Get("function")
|
||||
functionName := function.Get("name").String()
|
||||
functionArgs := function.Get("arguments").String()
|
||||
|
||||
// Initialize accumulator if needed
|
||||
if _, exists := param.ToolCallsAccumulator[toolIndex]; !exists {
|
||||
param.ToolCallsAccumulator[toolIndex] = &ToolCallAccumulator{
|
||||
ID: toolID,
|
||||
Name: functionName,
|
||||
}
|
||||
}
|
||||
|
||||
// Update ID if provided
|
||||
if toolID != "" {
|
||||
param.ToolCallsAccumulator[toolIndex].ID = toolID
|
||||
}
|
||||
|
||||
// Update name if provided
|
||||
if functionName != "" {
|
||||
param.ToolCallsAccumulator[toolIndex].Name = functionName
|
||||
}
|
||||
|
||||
// Accumulate arguments
|
||||
if functionArgs != "" {
|
||||
param.ToolCallsAccumulator[toolIndex].Arguments.WriteString(functionArgs)
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
// Don't output anything for tool call deltas - wait for completion
|
||||
return true
|
||||
}
|
||||
|
||||
// Handle finish reason
|
||||
if finishReason := choice.Get("finish_reason"); finishReason.Exists() {
|
||||
geminiFinishReason := mapOpenAIFinishReasonToGemini(finishReason.String())
|
||||
template, _ = sjson.Set(template, "candidates.0.finishReason", geminiFinishReason)
|
||||
|
||||
// If we have accumulated tool calls, output them now
|
||||
if len(param.ToolCallsAccumulator) > 0 {
|
||||
var parts []interface{}
|
||||
for _, accumulator := range param.ToolCallsAccumulator {
|
||||
argsStr := accumulator.Arguments.String()
|
||||
var argsMap map[string]interface{}
|
||||
|
||||
if argsStr != "" && argsStr != "{}" {
|
||||
// Handle malformed JSON by trying to fix common issues
|
||||
fixedArgs := argsStr
|
||||
// Fix unquoted keys and values (common in the sample)
|
||||
if strings.Contains(fixedArgs, "北京") && !strings.Contains(fixedArgs, "\"北京\"") {
|
||||
fixedArgs = strings.ReplaceAll(fixedArgs, "北京", "\"北京\"")
|
||||
}
|
||||
if strings.Contains(fixedArgs, "celsius") && !strings.Contains(fixedArgs, "\"celsius\"") {
|
||||
fixedArgs = strings.ReplaceAll(fixedArgs, "celsius", "\"celsius\"")
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(fixedArgs), &argsMap); err != nil {
|
||||
// If still fails, try to parse as raw string
|
||||
if err2 := json.Unmarshal([]byte("\""+argsStr+"\""), &argsMap); err2 != nil {
|
||||
// Last resort: use empty object
|
||||
argsMap = map[string]interface{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
argsMap = map[string]interface{}{}
|
||||
}
|
||||
|
||||
functionCallPart := map[string]interface{}{
|
||||
"functionCall": map[string]interface{}{
|
||||
"name": accumulator.Name,
|
||||
"args": argsMap,
|
||||
},
|
||||
}
|
||||
parts = append(parts, functionCallPart)
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
template, _ = sjson.Set(template, "candidates.0.content.parts", parts)
|
||||
}
|
||||
|
||||
// Clear accumulators
|
||||
param.ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
|
||||
}
|
||||
|
||||
results = append(results, template)
|
||||
return true
|
||||
}
|
||||
|
||||
// Handle usage information
|
||||
if usage := root.Get("usage"); usage.Exists() {
|
||||
usageObj := map[string]interface{}{
|
||||
"promptTokenCount": usage.Get("prompt_tokens").Int(),
|
||||
"candidatesTokenCount": usage.Get("completion_tokens").Int(),
|
||||
"totalTokenCount": usage.Get("total_tokens").Int(),
|
||||
}
|
||||
template, _ = sjson.Set(template, "usageMetadata", usageObj)
|
||||
results = append(results, template)
|
||||
return true
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
return results
|
||||
}
|
||||
return []string{}
|
||||
}
|
||||
|
||||
// mapOpenAIFinishReasonToGemini maps OpenAI finish reasons to Gemini finish reasons
|
||||
func mapOpenAIFinishReasonToGemini(openAIReason string) string {
|
||||
switch openAIReason {
|
||||
case "stop":
|
||||
return "STOP"
|
||||
case "length":
|
||||
return "MAX_TOKENS"
|
||||
case "tool_calls":
|
||||
return "STOP" // Gemini doesn't have a specific tool_calls finish reason
|
||||
case "content_filter":
|
||||
return "SAFETY"
|
||||
default:
|
||||
return "STOP"
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertOpenAINonStreamResponseToGemini converts OpenAI non-streaming response to Gemini format
|
||||
func ConvertOpenAINonStreamResponseToGemini(rawJSON []byte) string {
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Base Gemini response template
|
||||
out := `{"candidates":[{"content":{"parts":[],"role":"model"},"finishReason":"STOP","index":0}]}`
|
||||
|
||||
// Set model if available
|
||||
if model := root.Get("model"); model.Exists() {
|
||||
out, _ = sjson.Set(out, "model", model.String())
|
||||
}
|
||||
|
||||
// Process choices
|
||||
if choices := root.Get("choices"); choices.Exists() && choices.IsArray() {
|
||||
choices.ForEach(func(choiceIndex, choice gjson.Result) bool {
|
||||
choiceIdx := int(choice.Get("index").Int())
|
||||
message := choice.Get("message")
|
||||
|
||||
// Set role
|
||||
if role := message.Get("role"); role.Exists() {
|
||||
if role.String() == "assistant" {
|
||||
out, _ = sjson.Set(out, "candidates.0.content.role", "model")
|
||||
}
|
||||
}
|
||||
|
||||
var parts []interface{}
|
||||
|
||||
// Handle content first
|
||||
if content := message.Get("content"); content.Exists() && content.String() != "" {
|
||||
parts = append(parts, map[string]interface{}{
|
||||
"text": content.String(),
|
||||
})
|
||||
}
|
||||
|
||||
// Handle tool calls
|
||||
if toolCalls := message.Get("tool_calls"); toolCalls.Exists() && toolCalls.IsArray() {
|
||||
toolCalls.ForEach(func(_, toolCall gjson.Result) bool {
|
||||
if toolCall.Get("type").String() == "function" {
|
||||
function := toolCall.Get("function")
|
||||
functionName := function.Get("name").String()
|
||||
functionArgs := function.Get("arguments").String()
|
||||
|
||||
// Parse arguments
|
||||
var argsMap map[string]interface{}
|
||||
if functionArgs != "" && functionArgs != "{}" {
|
||||
// Handle malformed JSON by trying to fix common issues
|
||||
fixedArgs := functionArgs
|
||||
// Fix unquoted keys and values (common in the sample)
|
||||
if strings.Contains(fixedArgs, "北京") && !strings.Contains(fixedArgs, "\"北京\"") {
|
||||
fixedArgs = strings.ReplaceAll(fixedArgs, "北京", "\"北京\"")
|
||||
}
|
||||
if strings.Contains(fixedArgs, "celsius") && !strings.Contains(fixedArgs, "\"celsius\"") {
|
||||
fixedArgs = strings.ReplaceAll(fixedArgs, "celsius", "\"celsius\"")
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(fixedArgs), &argsMap); err != nil {
|
||||
// If still fails, try to parse as raw string
|
||||
if err2 := json.Unmarshal([]byte("\""+functionArgs+"\""), &argsMap); err2 != nil {
|
||||
// Last resort: use empty object
|
||||
argsMap = map[string]interface{}{}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
argsMap = map[string]interface{}{}
|
||||
}
|
||||
|
||||
functionCallPart := map[string]interface{}{
|
||||
"functionCall": map[string]interface{}{
|
||||
"name": functionName,
|
||||
"args": argsMap,
|
||||
},
|
||||
}
|
||||
parts = append(parts, functionCallPart)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Set parts
|
||||
if len(parts) > 0 {
|
||||
out, _ = sjson.Set(out, "candidates.0.content.parts", parts)
|
||||
}
|
||||
|
||||
// Handle finish reason
|
||||
if finishReason := choice.Get("finish_reason"); finishReason.Exists() {
|
||||
geminiFinishReason := mapOpenAIFinishReasonToGemini(finishReason.String())
|
||||
out, _ = sjson.Set(out, "candidates.0.finishReason", geminiFinishReason)
|
||||
}
|
||||
|
||||
// Set index
|
||||
out, _ = sjson.Set(out, "candidates.0.index", choiceIdx)
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Handle usage information
|
||||
if usage := root.Get("usage"); usage.Exists() {
|
||||
usageObj := map[string]interface{}{
|
||||
"promptTokenCount": usage.Get("prompt_tokens").Int(),
|
||||
"candidatesTokenCount": usage.Get("completion_tokens").Int(),
|
||||
"totalTokenCount": usage.Get("total_tokens").Int(),
|
||||
}
|
||||
out, _ = sjson.Set(out, "usageMetadata", usageObj)
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
Reference in New Issue
Block a user