mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 13:00:52 +08:00
refactor: replace json.Marshal and json.Unmarshal with sjson and gjson
Optimized the handling of JSON serialization and deserialization by replacing redundant `json.Marshal` and `json.Unmarshal` calls with `sjson` and `gjson`. Introduced a `marshalJSONValue` utility for compact JSON encoding, improving performance and code simplicity. Removed unused `encoding/json` imports.
This commit is contained in:
@@ -7,7 +7,6 @@ package claude
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
@@ -138,11 +137,7 @@ func ConvertClaudeRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
|
||||
// Convert input to arguments JSON string
|
||||
if input := part.Get("input"); input.Exists() {
|
||||
if inputJSON, err := json.Marshal(input.Value()); err == nil {
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.arguments", string(inputJSON))
|
||||
} else {
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.arguments", "{}")
|
||||
}
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.arguments", input.Raw)
|
||||
} else {
|
||||
toolCallJSON, _ = sjson.Set(toolCallJSON, "function.arguments", "{}")
|
||||
}
|
||||
@@ -191,8 +186,7 @@ func ConvertClaudeRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
// Emit tool calls in a separate assistant message
|
||||
if role == "assistant" && len(toolCalls) > 0 {
|
||||
toolCallMsgJSON := `{"role":"assistant","tool_calls":[]}`
|
||||
toolCallsJSON, _ := json.Marshal(toolCalls)
|
||||
toolCallMsgJSON, _ = sjson.SetRaw(toolCallMsgJSON, "tool_calls", string(toolCallsJSON))
|
||||
toolCallMsgJSON, _ = sjson.Set(toolCallMsgJSON, "tool_calls", toolCalls)
|
||||
messagesJSON, _ = sjson.Set(messagesJSON, "-1", gjson.Parse(toolCallMsgJSON).Value())
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ package claude
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
@@ -133,24 +132,10 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
if delta := root.Get("choices.0.delta"); delta.Exists() {
|
||||
if !param.MessageStarted {
|
||||
// Send message_start event
|
||||
messageStart := map[string]interface{}{
|
||||
"type": "message_start",
|
||||
"message": map[string]interface{}{
|
||||
"id": param.MessageID,
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"model": param.Model,
|
||||
"content": []interface{}{},
|
||||
"stop_reason": nil,
|
||||
"stop_sequence": nil,
|
||||
"usage": map[string]interface{}{
|
||||
"input_tokens": 0,
|
||||
"output_tokens": 0,
|
||||
},
|
||||
},
|
||||
}
|
||||
messageStartJSON, _ := json.Marshal(messageStart)
|
||||
results = append(results, "event: message_start\ndata: "+string(messageStartJSON)+"\n\n")
|
||||
messageStartJSON := `{"type":"message_start","message":{"id":"","type":"message","role":"assistant","model":"","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`
|
||||
messageStartJSON, _ = sjson.Set(messageStartJSON, "message.id", param.MessageID)
|
||||
messageStartJSON, _ = sjson.Set(messageStartJSON, "message.model", param.Model)
|
||||
results = append(results, "event: message_start\ndata: "+messageStartJSON+"\n\n")
|
||||
param.MessageStarted = true
|
||||
|
||||
// Don't send content_block_start for text here - wait for actual content
|
||||
@@ -168,29 +153,16 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
param.ThinkingContentBlockIndex = param.NextContentBlockIndex
|
||||
param.NextContentBlockIndex++
|
||||
}
|
||||
contentBlockStart := map[string]interface{}{
|
||||
"type": "content_block_start",
|
||||
"index": param.ThinkingContentBlockIndex,
|
||||
"content_block": map[string]interface{}{
|
||||
"type": "thinking",
|
||||
"thinking": "",
|
||||
},
|
||||
}
|
||||
contentBlockStartJSON, _ := json.Marshal(contentBlockStart)
|
||||
results = append(results, "event: content_block_start\ndata: "+string(contentBlockStartJSON)+"\n\n")
|
||||
contentBlockStartJSON := `{"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":""}}`
|
||||
contentBlockStartJSON, _ = sjson.Set(contentBlockStartJSON, "index", param.ThinkingContentBlockIndex)
|
||||
results = append(results, "event: content_block_start\ndata: "+contentBlockStartJSON+"\n\n")
|
||||
param.ThinkingContentBlockStarted = true
|
||||
}
|
||||
|
||||
thinkingDelta := map[string]interface{}{
|
||||
"type": "content_block_delta",
|
||||
"index": param.ThinkingContentBlockIndex,
|
||||
"delta": map[string]interface{}{
|
||||
"type": "thinking_delta",
|
||||
"thinking": reasoningText,
|
||||
},
|
||||
}
|
||||
thinkingDeltaJSON, _ := json.Marshal(thinkingDelta)
|
||||
results = append(results, "event: content_block_delta\ndata: "+string(thinkingDeltaJSON)+"\n\n")
|
||||
thinkingDeltaJSON := `{"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":""}}`
|
||||
thinkingDeltaJSON, _ = sjson.Set(thinkingDeltaJSON, "index", param.ThinkingContentBlockIndex)
|
||||
thinkingDeltaJSON, _ = sjson.Set(thinkingDeltaJSON, "delta.thinking", reasoningText)
|
||||
results = append(results, "event: content_block_delta\ndata: "+thinkingDeltaJSON+"\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -203,29 +175,16 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
param.TextContentBlockIndex = param.NextContentBlockIndex
|
||||
param.NextContentBlockIndex++
|
||||
}
|
||||
contentBlockStart := map[string]interface{}{
|
||||
"type": "content_block_start",
|
||||
"index": param.TextContentBlockIndex,
|
||||
"content_block": map[string]interface{}{
|
||||
"type": "text",
|
||||
"text": "",
|
||||
},
|
||||
}
|
||||
contentBlockStartJSON, _ := json.Marshal(contentBlockStart)
|
||||
results = append(results, "event: content_block_start\ndata: "+string(contentBlockStartJSON)+"\n\n")
|
||||
contentBlockStartJSON := `{"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}`
|
||||
contentBlockStartJSON, _ = sjson.Set(contentBlockStartJSON, "index", param.TextContentBlockIndex)
|
||||
results = append(results, "event: content_block_start\ndata: "+contentBlockStartJSON+"\n\n")
|
||||
param.TextContentBlockStarted = true
|
||||
}
|
||||
|
||||
contentDelta := map[string]interface{}{
|
||||
"type": "content_block_delta",
|
||||
"index": param.TextContentBlockIndex,
|
||||
"delta": map[string]interface{}{
|
||||
"type": "text_delta",
|
||||
"text": content.String(),
|
||||
},
|
||||
}
|
||||
contentDeltaJSON, _ := json.Marshal(contentDelta)
|
||||
results = append(results, "event: content_block_delta\ndata: "+string(contentDeltaJSON)+"\n\n")
|
||||
contentDeltaJSON := `{"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":""}}`
|
||||
contentDeltaJSON, _ = sjson.Set(contentDeltaJSON, "index", param.TextContentBlockIndex)
|
||||
contentDeltaJSON, _ = sjson.Set(contentDeltaJSON, "delta.text", content.String())
|
||||
results = append(results, "event: content_block_delta\ndata: "+contentDeltaJSON+"\n\n")
|
||||
|
||||
// Accumulate content
|
||||
param.ContentAccumulator.WriteString(content.String())
|
||||
@@ -263,18 +222,11 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
stopTextContentBlock(param, &results)
|
||||
|
||||
// Send content_block_start for tool_use
|
||||
contentBlockStart := map[string]interface{}{
|
||||
"type": "content_block_start",
|
||||
"index": blockIndex,
|
||||
"content_block": map[string]interface{}{
|
||||
"type": "tool_use",
|
||||
"id": accumulator.ID,
|
||||
"name": accumulator.Name,
|
||||
"input": map[string]interface{}{},
|
||||
},
|
||||
}
|
||||
contentBlockStartJSON, _ := json.Marshal(contentBlockStart)
|
||||
results = append(results, "event: content_block_start\ndata: "+string(contentBlockStartJSON)+"\n\n")
|
||||
contentBlockStartJSON := `{"type":"content_block_start","index":0,"content_block":{"type":"tool_use","id":"","name":"","input":{}}}`
|
||||
contentBlockStartJSON, _ = sjson.Set(contentBlockStartJSON, "index", blockIndex)
|
||||
contentBlockStartJSON, _ = sjson.Set(contentBlockStartJSON, "content_block.id", accumulator.ID)
|
||||
contentBlockStartJSON, _ = sjson.Set(contentBlockStartJSON, "content_block.name", accumulator.Name)
|
||||
results = append(results, "event: content_block_start\ndata: "+contentBlockStartJSON+"\n\n")
|
||||
}
|
||||
|
||||
// Handle function arguments
|
||||
@@ -298,12 +250,9 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
|
||||
// Send content_block_stop for thinking content if needed
|
||||
if param.ThinkingContentBlockStarted {
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": param.ThinkingContentBlockIndex,
|
||||
}
|
||||
contentBlockStopJSON, _ := json.Marshal(contentBlockStop)
|
||||
results = append(results, "event: content_block_stop\ndata: "+string(contentBlockStopJSON)+"\n\n")
|
||||
contentBlockStopJSON := `{"type":"content_block_stop","index":0}`
|
||||
contentBlockStopJSON, _ = sjson.Set(contentBlockStopJSON, "index", param.ThinkingContentBlockIndex)
|
||||
results = append(results, "event: content_block_stop\ndata: "+contentBlockStopJSON+"\n\n")
|
||||
param.ThinkingContentBlockStarted = false
|
||||
param.ThinkingContentBlockIndex = -1
|
||||
}
|
||||
@@ -319,24 +268,15 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
|
||||
// Send complete input_json_delta with all accumulated arguments
|
||||
if accumulator.Arguments.Len() > 0 {
|
||||
inputDelta := map[string]interface{}{
|
||||
"type": "content_block_delta",
|
||||
"index": blockIndex,
|
||||
"delta": map[string]interface{}{
|
||||
"type": "input_json_delta",
|
||||
"partial_json": util.FixJSON(accumulator.Arguments.String()),
|
||||
},
|
||||
}
|
||||
inputDeltaJSON, _ := json.Marshal(inputDelta)
|
||||
results = append(results, "event: content_block_delta\ndata: "+string(inputDeltaJSON)+"\n\n")
|
||||
inputDeltaJSON := `{"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":""}}`
|
||||
inputDeltaJSON, _ = sjson.Set(inputDeltaJSON, "index", blockIndex)
|
||||
inputDeltaJSON, _ = sjson.Set(inputDeltaJSON, "delta.partial_json", util.FixJSON(accumulator.Arguments.String()))
|
||||
results = append(results, "event: content_block_delta\ndata: "+inputDeltaJSON+"\n\n")
|
||||
}
|
||||
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": blockIndex,
|
||||
}
|
||||
contentBlockStopJSON, _ := json.Marshal(contentBlockStop)
|
||||
results = append(results, "event: content_block_stop\ndata: "+string(contentBlockStopJSON)+"\n\n")
|
||||
contentBlockStopJSON := `{"type":"content_block_stop","index":0}`
|
||||
contentBlockStopJSON, _ = sjson.Set(contentBlockStopJSON, "index", blockIndex)
|
||||
results = append(results, "event: content_block_stop\ndata: "+contentBlockStopJSON+"\n\n")
|
||||
delete(param.ToolCallBlockIndexes, index)
|
||||
}
|
||||
param.ContentBlocksStopped = true
|
||||
@@ -361,20 +301,11 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
|
||||
}
|
||||
}
|
||||
// Send message_delta with usage
|
||||
messageDelta := map[string]interface{}{
|
||||
"type": "message_delta",
|
||||
"delta": map[string]interface{}{
|
||||
"stop_reason": mapOpenAIFinishReasonToAnthropic(param.FinishReason),
|
||||
"stop_sequence": nil,
|
||||
},
|
||||
"usage": map[string]interface{}{
|
||||
"input_tokens": inputTokens,
|
||||
"output_tokens": outputTokens,
|
||||
},
|
||||
}
|
||||
|
||||
messageDeltaJSON, _ := json.Marshal(messageDelta)
|
||||
results = append(results, "event: message_delta\ndata: "+string(messageDeltaJSON)+"\n\n")
|
||||
messageDeltaJSON := `{"type":"message_delta","delta":{"stop_reason":"","stop_sequence":null},"usage":{"input_tokens":0,"output_tokens":0}}`
|
||||
messageDeltaJSON, _ = sjson.Set(messageDeltaJSON, "delta.stop_reason", mapOpenAIFinishReasonToAnthropic(param.FinishReason))
|
||||
messageDeltaJSON, _ = sjson.Set(messageDeltaJSON, "usage.input_tokens", inputTokens)
|
||||
messageDeltaJSON, _ = sjson.Set(messageDeltaJSON, "usage.output_tokens", outputTokens)
|
||||
results = append(results, "event: message_delta\ndata: "+messageDeltaJSON+"\n\n")
|
||||
param.MessageDeltaSent = true
|
||||
|
||||
emitMessageStopIfNeeded(param, &results)
|
||||
@@ -390,12 +321,9 @@ func convertOpenAIDoneToAnthropic(param *ConvertOpenAIResponseToAnthropicParams)
|
||||
|
||||
// Ensure all content blocks are stopped before final events
|
||||
if param.ThinkingContentBlockStarted {
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": param.ThinkingContentBlockIndex,
|
||||
}
|
||||
contentBlockStopJSON, _ := json.Marshal(contentBlockStop)
|
||||
results = append(results, "event: content_block_stop\ndata: "+string(contentBlockStopJSON)+"\n\n")
|
||||
contentBlockStopJSON := `{"type":"content_block_stop","index":0}`
|
||||
contentBlockStopJSON, _ = sjson.Set(contentBlockStopJSON, "index", param.ThinkingContentBlockIndex)
|
||||
results = append(results, "event: content_block_stop\ndata: "+contentBlockStopJSON+"\n\n")
|
||||
param.ThinkingContentBlockStarted = false
|
||||
param.ThinkingContentBlockIndex = -1
|
||||
}
|
||||
@@ -408,24 +336,15 @@ func convertOpenAIDoneToAnthropic(param *ConvertOpenAIResponseToAnthropicParams)
|
||||
blockIndex := param.toolContentBlockIndex(index)
|
||||
|
||||
if accumulator.Arguments.Len() > 0 {
|
||||
inputDelta := map[string]interface{}{
|
||||
"type": "content_block_delta",
|
||||
"index": blockIndex,
|
||||
"delta": map[string]interface{}{
|
||||
"type": "input_json_delta",
|
||||
"partial_json": util.FixJSON(accumulator.Arguments.String()),
|
||||
},
|
||||
}
|
||||
inputDeltaJSON, _ := json.Marshal(inputDelta)
|
||||
results = append(results, "event: content_block_delta\ndata: "+string(inputDeltaJSON)+"\n\n")
|
||||
inputDeltaJSON := `{"type":"content_block_delta","index":0,"delta":{"type":"input_json_delta","partial_json":""}}`
|
||||
inputDeltaJSON, _ = sjson.Set(inputDeltaJSON, "index", blockIndex)
|
||||
inputDeltaJSON, _ = sjson.Set(inputDeltaJSON, "delta.partial_json", util.FixJSON(accumulator.Arguments.String()))
|
||||
results = append(results, "event: content_block_delta\ndata: "+inputDeltaJSON+"\n\n")
|
||||
}
|
||||
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": blockIndex,
|
||||
}
|
||||
contentBlockStopJSON, _ := json.Marshal(contentBlockStop)
|
||||
results = append(results, "event: content_block_stop\ndata: "+string(contentBlockStopJSON)+"\n\n")
|
||||
contentBlockStopJSON := `{"type":"content_block_stop","index":0}`
|
||||
contentBlockStopJSON, _ = sjson.Set(contentBlockStopJSON, "index", blockIndex)
|
||||
results = append(results, "event: content_block_stop\ndata: "+contentBlockStopJSON+"\n\n")
|
||||
delete(param.ToolCallBlockIndexes, index)
|
||||
}
|
||||
param.ContentBlocksStopped = true
|
||||
@@ -433,16 +352,9 @@ func convertOpenAIDoneToAnthropic(param *ConvertOpenAIResponseToAnthropicParams)
|
||||
|
||||
// If we haven't sent message_delta yet (no usage info was received), send it now
|
||||
if param.FinishReason != "" && !param.MessageDeltaSent {
|
||||
messageDelta := map[string]interface{}{
|
||||
"type": "message_delta",
|
||||
"delta": map[string]interface{}{
|
||||
"stop_reason": mapOpenAIFinishReasonToAnthropic(param.FinishReason),
|
||||
"stop_sequence": nil,
|
||||
},
|
||||
}
|
||||
|
||||
messageDeltaJSON, _ := json.Marshal(messageDelta)
|
||||
results = append(results, "event: message_delta\ndata: "+string(messageDeltaJSON)+"\n\n")
|
||||
messageDeltaJSON := `{"type":"message_delta","delta":{"stop_reason":"","stop_sequence":null}}`
|
||||
messageDeltaJSON, _ = sjson.Set(messageDeltaJSON, "delta.stop_reason", mapOpenAIFinishReasonToAnthropic(param.FinishReason))
|
||||
results = append(results, "event: message_delta\ndata: "+messageDeltaJSON+"\n\n")
|
||||
param.MessageDeltaSent = true
|
||||
}
|
||||
|
||||
@@ -455,105 +367,73 @@ func convertOpenAIDoneToAnthropic(param *ConvertOpenAIResponseToAnthropicParams)
|
||||
func convertOpenAINonStreamingToAnthropic(rawJSON []byte) []string {
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Build Anthropic response
|
||||
response := map[string]interface{}{
|
||||
"id": root.Get("id").String(),
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"model": root.Get("model").String(),
|
||||
"content": []interface{}{},
|
||||
"stop_reason": nil,
|
||||
"stop_sequence": nil,
|
||||
"usage": map[string]interface{}{
|
||||
"input_tokens": 0,
|
||||
"output_tokens": 0,
|
||||
},
|
||||
}
|
||||
out := `{"id":"","type":"message","role":"assistant","model":"","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}`
|
||||
out, _ = sjson.Set(out, "id", root.Get("id").String())
|
||||
out, _ = sjson.Set(out, "model", root.Get("model").String())
|
||||
|
||||
// Process message content and tool calls
|
||||
var contentBlocks []interface{}
|
||||
|
||||
if choices := root.Get("choices"); choices.Exists() && choices.IsArray() {
|
||||
if choices := root.Get("choices"); choices.Exists() && choices.IsArray() && len(choices.Array()) > 0 {
|
||||
choice := choices.Array()[0] // Take first choice
|
||||
reasoningNode := choice.Get("message.reasoning_content")
|
||||
allReasoning := collectOpenAIReasoningTexts(reasoningNode)
|
||||
|
||||
for _, reasoningText := range allReasoning {
|
||||
reasoningNode := choice.Get("message.reasoning_content")
|
||||
for _, reasoningText := range collectOpenAIReasoningTexts(reasoningNode) {
|
||||
if reasoningText == "" {
|
||||
continue
|
||||
}
|
||||
contentBlocks = append(contentBlocks, map[string]interface{}{
|
||||
"type": "thinking",
|
||||
"thinking": reasoningText,
|
||||
})
|
||||
block := `{"type":"thinking","thinking":""}`
|
||||
block, _ = sjson.Set(block, "thinking", reasoningText)
|
||||
out, _ = sjson.SetRaw(out, "content.-1", block)
|
||||
}
|
||||
|
||||
// Handle text content
|
||||
if content := choice.Get("message.content"); content.Exists() && content.String() != "" {
|
||||
textBlock := map[string]interface{}{
|
||||
"type": "text",
|
||||
"text": content.String(),
|
||||
}
|
||||
contentBlocks = append(contentBlocks, textBlock)
|
||||
block := `{"type":"text","text":""}`
|
||||
block, _ = sjson.Set(block, "text", content.String())
|
||||
out, _ = sjson.SetRaw(out, "content.-1", block)
|
||||
}
|
||||
|
||||
// Handle tool calls
|
||||
if toolCalls := choice.Get("message.tool_calls"); toolCalls.Exists() && toolCalls.IsArray() {
|
||||
toolCalls.ForEach(func(_, toolCall gjson.Result) bool {
|
||||
toolUseBlock := map[string]interface{}{
|
||||
"type": "tool_use",
|
||||
"id": toolCall.Get("id").String(),
|
||||
"name": toolCall.Get("function.name").String(),
|
||||
}
|
||||
toolUseBlock := `{"type":"tool_use","id":"","name":"","input":{}}`
|
||||
toolUseBlock, _ = sjson.Set(toolUseBlock, "id", toolCall.Get("id").String())
|
||||
toolUseBlock, _ = sjson.Set(toolUseBlock, "name", toolCall.Get("function.name").String())
|
||||
|
||||
// Parse arguments
|
||||
argsStr := toolCall.Get("function.arguments").String()
|
||||
argsStr = util.FixJSON(argsStr)
|
||||
if argsStr != "" {
|
||||
var args interface{}
|
||||
if err := json.Unmarshal([]byte(argsStr), &args); err == nil {
|
||||
toolUseBlock["input"] = args
|
||||
argsStr := util.FixJSON(toolCall.Get("function.arguments").String())
|
||||
if argsStr != "" && gjson.Valid(argsStr) {
|
||||
argsJSON := gjson.Parse(argsStr)
|
||||
if argsJSON.IsObject() {
|
||||
toolUseBlock, _ = sjson.SetRaw(toolUseBlock, "input", argsJSON.Raw)
|
||||
} else {
|
||||
toolUseBlock["input"] = map[string]interface{}{}
|
||||
toolUseBlock, _ = sjson.SetRaw(toolUseBlock, "input", "{}")
|
||||
}
|
||||
} else {
|
||||
toolUseBlock["input"] = map[string]interface{}{}
|
||||
toolUseBlock, _ = sjson.SetRaw(toolUseBlock, "input", "{}")
|
||||
}
|
||||
|
||||
contentBlocks = append(contentBlocks, toolUseBlock)
|
||||
out, _ = sjson.SetRaw(out, "content.-1", toolUseBlock)
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Set stop reason
|
||||
if finishReason := choice.Get("finish_reason"); finishReason.Exists() {
|
||||
response["stop_reason"] = mapOpenAIFinishReasonToAnthropic(finishReason.String())
|
||||
out, _ = sjson.Set(out, "stop_reason", mapOpenAIFinishReasonToAnthropic(finishReason.String()))
|
||||
}
|
||||
}
|
||||
|
||||
response["content"] = contentBlocks
|
||||
|
||||
// Set usage information
|
||||
if usage := root.Get("usage"); usage.Exists() {
|
||||
response["usage"] = map[string]interface{}{
|
||||
"input_tokens": usage.Get("prompt_tokens").Int(),
|
||||
"output_tokens": usage.Get("completion_tokens").Int(),
|
||||
"reasoning_tokens": func() int64 {
|
||||
if v := usage.Get("completion_tokens_details.reasoning_tokens"); v.Exists() {
|
||||
return v.Int()
|
||||
}
|
||||
return 0
|
||||
}(),
|
||||
}
|
||||
} else {
|
||||
response["usage"] = map[string]interface{}{
|
||||
"input_tokens": 0,
|
||||
"output_tokens": 0,
|
||||
out, _ = sjson.Set(out, "usage.input_tokens", usage.Get("prompt_tokens").Int())
|
||||
out, _ = sjson.Set(out, "usage.output_tokens", usage.Get("completion_tokens").Int())
|
||||
reasoningTokens := int64(0)
|
||||
if v := usage.Get("completion_tokens_details.reasoning_tokens"); v.Exists() {
|
||||
reasoningTokens = v.Int()
|
||||
}
|
||||
out, _ = sjson.Set(out, "usage.reasoning_tokens", reasoningTokens)
|
||||
}
|
||||
|
||||
responseJSON, _ := json.Marshal(response)
|
||||
return []string{string(responseJSON)}
|
||||
return []string{out}
|
||||
}
|
||||
|
||||
// mapOpenAIFinishReasonToAnthropic maps OpenAI finish reasons to Anthropic equivalents
|
||||
@@ -620,12 +500,9 @@ func stopThinkingContentBlock(param *ConvertOpenAIResponseToAnthropicParams, res
|
||||
if !param.ThinkingContentBlockStarted {
|
||||
return
|
||||
}
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": param.ThinkingContentBlockIndex,
|
||||
}
|
||||
contentBlockStopJSON, _ := json.Marshal(contentBlockStop)
|
||||
*results = append(*results, "event: content_block_stop\ndata: "+string(contentBlockStopJSON)+"\n\n")
|
||||
contentBlockStopJSON := `{"type":"content_block_stop","index":0}`
|
||||
contentBlockStopJSON, _ = sjson.Set(contentBlockStopJSON, "index", param.ThinkingContentBlockIndex)
|
||||
*results = append(*results, "event: content_block_stop\ndata: "+contentBlockStopJSON+"\n\n")
|
||||
param.ThinkingContentBlockStarted = false
|
||||
param.ThinkingContentBlockIndex = -1
|
||||
}
|
||||
@@ -642,12 +519,9 @@ func stopTextContentBlock(param *ConvertOpenAIResponseToAnthropicParams, results
|
||||
if !param.TextContentBlockStarted {
|
||||
return
|
||||
}
|
||||
contentBlockStop := map[string]interface{}{
|
||||
"type": "content_block_stop",
|
||||
"index": param.TextContentBlockIndex,
|
||||
}
|
||||
contentBlockStopJSON, _ := json.Marshal(contentBlockStop)
|
||||
*results = append(*results, "event: content_block_stop\ndata: "+string(contentBlockStopJSON)+"\n\n")
|
||||
contentBlockStopJSON := `{"type":"content_block_stop","index":0}`
|
||||
contentBlockStopJSON, _ = sjson.Set(contentBlockStopJSON, "index", param.TextContentBlockIndex)
|
||||
*results = append(*results, "event: content_block_stop\ndata: "+contentBlockStopJSON+"\n\n")
|
||||
param.TextContentBlockStarted = false
|
||||
param.TextContentBlockIndex = -1
|
||||
}
|
||||
@@ -667,29 +541,19 @@ func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, origina
|
||||
_ = requestRawJSON
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
out := `{"id":"","type":"message","role":"assistant","model":"","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}`
|
||||
out, _ = sjson.Set(out, "id", root.Get("id").String())
|
||||
out, _ = sjson.Set(out, "model", root.Get("model").String())
|
||||
|
||||
response := map[string]interface{}{
|
||||
"id": root.Get("id").String(),
|
||||
"type": "message",
|
||||
"role": "assistant",
|
||||
"model": root.Get("model").String(),
|
||||
"content": []interface{}{},
|
||||
"stop_reason": nil,
|
||||
"stop_sequence": nil,
|
||||
"usage": map[string]interface{}{
|
||||
"input_tokens": 0,
|
||||
"output_tokens": 0,
|
||||
},
|
||||
}
|
||||
|
||||
contentBlocks := make([]interface{}, 0)
|
||||
hasToolCall := false
|
||||
stopReasonSet := false
|
||||
|
||||
if choices := root.Get("choices"); choices.Exists() && choices.IsArray() && len(choices.Array()) > 0 {
|
||||
choice := choices.Array()[0]
|
||||
|
||||
if finishReason := choice.Get("finish_reason"); finishReason.Exists() {
|
||||
response["stop_reason"] = mapOpenAIFinishReasonToAnthropic(finishReason.String())
|
||||
out, _ = sjson.Set(out, "stop_reason", mapOpenAIFinishReasonToAnthropic(finishReason.String()))
|
||||
stopReasonSet = true
|
||||
}
|
||||
|
||||
if message := choice.Get("message"); message.Exists() {
|
||||
@@ -702,10 +566,9 @@ func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, origina
|
||||
if textBuilder.Len() == 0 {
|
||||
return
|
||||
}
|
||||
contentBlocks = append(contentBlocks, map[string]interface{}{
|
||||
"type": "text",
|
||||
"text": textBuilder.String(),
|
||||
})
|
||||
block := `{"type":"text","text":""}`
|
||||
block, _ = sjson.Set(block, "text", textBuilder.String())
|
||||
out, _ = sjson.SetRaw(out, "content.-1", block)
|
||||
textBuilder.Reset()
|
||||
}
|
||||
|
||||
@@ -713,16 +576,14 @@ func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, origina
|
||||
if thinkingBuilder.Len() == 0 {
|
||||
return
|
||||
}
|
||||
contentBlocks = append(contentBlocks, map[string]interface{}{
|
||||
"type": "thinking",
|
||||
"thinking": thinkingBuilder.String(),
|
||||
})
|
||||
block := `{"type":"thinking","thinking":""}`
|
||||
block, _ = sjson.Set(block, "thinking", thinkingBuilder.String())
|
||||
out, _ = sjson.SetRaw(out, "content.-1", block)
|
||||
thinkingBuilder.Reset()
|
||||
}
|
||||
|
||||
for _, item := range contentResult.Array() {
|
||||
typeStr := item.Get("type").String()
|
||||
switch typeStr {
|
||||
switch item.Get("type").String() {
|
||||
case "text":
|
||||
flushThinking()
|
||||
textBuilder.WriteString(item.Get("text").String())
|
||||
@@ -733,25 +594,23 @@ func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, origina
|
||||
if toolCalls.IsArray() {
|
||||
toolCalls.ForEach(func(_, tc gjson.Result) bool {
|
||||
hasToolCall = true
|
||||
toolUse := map[string]interface{}{
|
||||
"type": "tool_use",
|
||||
"id": tc.Get("id").String(),
|
||||
"name": tc.Get("function.name").String(),
|
||||
}
|
||||
toolUse := `{"type":"tool_use","id":"","name":"","input":{}}`
|
||||
toolUse, _ = sjson.Set(toolUse, "id", tc.Get("id").String())
|
||||
toolUse, _ = sjson.Set(toolUse, "name", tc.Get("function.name").String())
|
||||
|
||||
argsStr := util.FixJSON(tc.Get("function.arguments").String())
|
||||
if argsStr != "" {
|
||||
var parsed interface{}
|
||||
if err := json.Unmarshal([]byte(argsStr), &parsed); err == nil {
|
||||
toolUse["input"] = parsed
|
||||
if argsStr != "" && gjson.Valid(argsStr) {
|
||||
argsJSON := gjson.Parse(argsStr)
|
||||
if argsJSON.IsObject() {
|
||||
toolUse, _ = sjson.SetRaw(toolUse, "input", argsJSON.Raw)
|
||||
} else {
|
||||
toolUse["input"] = map[string]interface{}{}
|
||||
toolUse, _ = sjson.SetRaw(toolUse, "input", "{}")
|
||||
}
|
||||
} else {
|
||||
toolUse["input"] = map[string]interface{}{}
|
||||
toolUse, _ = sjson.SetRaw(toolUse, "input", "{}")
|
||||
}
|
||||
|
||||
contentBlocks = append(contentBlocks, toolUse)
|
||||
out, _ = sjson.SetRaw(out, "content.-1", toolUse)
|
||||
return true
|
||||
})
|
||||
}
|
||||
@@ -771,10 +630,9 @@ func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, origina
|
||||
} else if contentResult.Type == gjson.String {
|
||||
textContent := contentResult.String()
|
||||
if textContent != "" {
|
||||
contentBlocks = append(contentBlocks, map[string]interface{}{
|
||||
"type": "text",
|
||||
"text": textContent,
|
||||
})
|
||||
block := `{"type":"text","text":""}`
|
||||
block, _ = sjson.Set(block, "text", textContent)
|
||||
out, _ = sjson.SetRaw(out, "content.-1", block)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -784,81 +642,52 @@ func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, origina
|
||||
if reasoningText == "" {
|
||||
continue
|
||||
}
|
||||
contentBlocks = append(contentBlocks, map[string]interface{}{
|
||||
"type": "thinking",
|
||||
"thinking": reasoningText,
|
||||
})
|
||||
block := `{"type":"thinking","thinking":""}`
|
||||
block, _ = sjson.Set(block, "thinking", reasoningText)
|
||||
out, _ = sjson.SetRaw(out, "content.-1", block)
|
||||
}
|
||||
}
|
||||
|
||||
if toolCalls := message.Get("tool_calls"); toolCalls.Exists() && toolCalls.IsArray() {
|
||||
toolCalls.ForEach(func(_, toolCall gjson.Result) bool {
|
||||
hasToolCall = true
|
||||
toolUseBlock := map[string]interface{}{
|
||||
"type": "tool_use",
|
||||
"id": toolCall.Get("id").String(),
|
||||
"name": toolCall.Get("function.name").String(),
|
||||
}
|
||||
toolUseBlock := `{"type":"tool_use","id":"","name":"","input":{}}`
|
||||
toolUseBlock, _ = sjson.Set(toolUseBlock, "id", toolCall.Get("id").String())
|
||||
toolUseBlock, _ = sjson.Set(toolUseBlock, "name", toolCall.Get("function.name").String())
|
||||
|
||||
argsStr := toolCall.Get("function.arguments").String()
|
||||
argsStr = util.FixJSON(argsStr)
|
||||
if argsStr != "" {
|
||||
var args interface{}
|
||||
if err := json.Unmarshal([]byte(argsStr), &args); err == nil {
|
||||
toolUseBlock["input"] = args
|
||||
argsStr := util.FixJSON(toolCall.Get("function.arguments").String())
|
||||
if argsStr != "" && gjson.Valid(argsStr) {
|
||||
argsJSON := gjson.Parse(argsStr)
|
||||
if argsJSON.IsObject() {
|
||||
toolUseBlock, _ = sjson.SetRaw(toolUseBlock, "input", argsJSON.Raw)
|
||||
} else {
|
||||
toolUseBlock["input"] = map[string]interface{}{}
|
||||
toolUseBlock, _ = sjson.SetRaw(toolUseBlock, "input", "{}")
|
||||
}
|
||||
} else {
|
||||
toolUseBlock["input"] = map[string]interface{}{}
|
||||
toolUseBlock, _ = sjson.SetRaw(toolUseBlock, "input", "{}")
|
||||
}
|
||||
|
||||
contentBlocks = append(contentBlocks, toolUseBlock)
|
||||
out, _ = sjson.SetRaw(out, "content.-1", toolUseBlock)
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
response["content"] = contentBlocks
|
||||
|
||||
if respUsage := root.Get("usage"); respUsage.Exists() {
|
||||
usageJSON := `{}`
|
||||
usageJSON, _ = sjson.Set(usageJSON, "input_tokens", respUsage.Get("prompt_tokens").Int())
|
||||
usageJSON, _ = sjson.Set(usageJSON, "output_tokens", respUsage.Get("completion_tokens").Int())
|
||||
parsedUsage := gjson.Parse(usageJSON).Value().(map[string]interface{})
|
||||
response["usage"] = parsedUsage
|
||||
} else {
|
||||
response["usage"] = `{"input_tokens":0,"output_tokens":0}`
|
||||
out, _ = sjson.Set(out, "usage.input_tokens", respUsage.Get("prompt_tokens").Int())
|
||||
out, _ = sjson.Set(out, "usage.output_tokens", respUsage.Get("completion_tokens").Int())
|
||||
}
|
||||
|
||||
if response["stop_reason"] == nil {
|
||||
if !stopReasonSet {
|
||||
if hasToolCall {
|
||||
response["stop_reason"] = "tool_use"
|
||||
out, _ = sjson.Set(out, "stop_reason", "tool_use")
|
||||
} else {
|
||||
response["stop_reason"] = "end_turn"
|
||||
out, _ = sjson.Set(out, "stop_reason", "end_turn")
|
||||
}
|
||||
}
|
||||
|
||||
if !hasToolCall {
|
||||
if toolBlocks := response["content"].([]interface{}); len(toolBlocks) > 0 {
|
||||
for _, block := range toolBlocks {
|
||||
if m, ok := block.(map[string]interface{}); ok && m["type"] == "tool_use" {
|
||||
hasToolCall = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if hasToolCall {
|
||||
response["stop_reason"] = "tool_use"
|
||||
}
|
||||
}
|
||||
|
||||
responseJSON, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
return string(responseJSON)
|
||||
return out
|
||||
}
|
||||
|
||||
func ClaudeTokenCount(ctx context.Context, count int64) string {
|
||||
|
||||
@@ -8,7 +8,6 @@ package gemini
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strings"
|
||||
@@ -94,7 +93,6 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
out, _ = sjson.Set(out, "stream", stream)
|
||||
|
||||
// Process contents (Gemini messages) -> OpenAI messages
|
||||
var openAIMessages []interface{}
|
||||
var toolCallIDs []string // Track tool call IDs for matching with tool results
|
||||
|
||||
// System instruction -> OpenAI system message
|
||||
@@ -105,22 +103,17 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
}
|
||||
if systemInstruction.Exists() {
|
||||
parts := systemInstruction.Get("parts")
|
||||
msg := map[string]interface{}{
|
||||
"role": "system",
|
||||
"content": []interface{}{},
|
||||
}
|
||||
|
||||
var aggregatedParts []interface{}
|
||||
msg := `{"role":"system","content":[]}`
|
||||
hasContent := false
|
||||
|
||||
if parts.Exists() && parts.IsArray() {
|
||||
parts.ForEach(func(_, part gjson.Result) bool {
|
||||
// Handle text parts
|
||||
if text := part.Get("text"); text.Exists() {
|
||||
formattedText := text.String()
|
||||
aggregatedParts = append(aggregatedParts, map[string]interface{}{
|
||||
"type": "text",
|
||||
"text": formattedText,
|
||||
})
|
||||
contentPart := `{"type":"text","text":""}`
|
||||
contentPart, _ = sjson.Set(contentPart, "text", text.String())
|
||||
msg, _ = sjson.SetRaw(msg, "content.-1", contentPart)
|
||||
hasContent = true
|
||||
}
|
||||
|
||||
// Handle inline data (e.g., images)
|
||||
@@ -132,20 +125,17 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
data := inlineData.Get("data").String()
|
||||
imageURL := fmt.Sprintf("data:%s;base64,%s", mimeType, data)
|
||||
|
||||
aggregatedParts = append(aggregatedParts, map[string]interface{}{
|
||||
"type": "image_url",
|
||||
"image_url": map[string]interface{}{
|
||||
"url": imageURL,
|
||||
},
|
||||
})
|
||||
contentPart := `{"type":"image_url","image_url":{"url":""}}`
|
||||
contentPart, _ = sjson.Set(contentPart, "image_url.url", imageURL)
|
||||
msg, _ = sjson.SetRaw(msg, "content.-1", contentPart)
|
||||
hasContent = true
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
if len(aggregatedParts) > 0 {
|
||||
msg["content"] = aggregatedParts
|
||||
openAIMessages = append(openAIMessages, msg)
|
||||
if hasContent {
|
||||
out, _ = sjson.SetRaw(out, "messages.-1", msg)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -159,16 +149,15 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
role = "assistant"
|
||||
}
|
||||
|
||||
// Create OpenAI message
|
||||
msg := map[string]interface{}{
|
||||
"role": role,
|
||||
"content": "",
|
||||
}
|
||||
msg := `{"role":"","content":""}`
|
||||
msg, _ = sjson.Set(msg, "role", role)
|
||||
|
||||
var textBuilder strings.Builder
|
||||
var aggregatedParts []interface{}
|
||||
contentWrapper := `{"arr":[]}`
|
||||
contentPartsCount := 0
|
||||
onlyTextContent := true
|
||||
var toolCalls []interface{}
|
||||
toolCallsWrapper := `{"arr":[]}`
|
||||
toolCallsCount := 0
|
||||
|
||||
if parts.Exists() && parts.IsArray() {
|
||||
parts.ForEach(func(_, part gjson.Result) bool {
|
||||
@@ -176,10 +165,10 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
if text := part.Get("text"); text.Exists() {
|
||||
formattedText := text.String()
|
||||
textBuilder.WriteString(formattedText)
|
||||
aggregatedParts = append(aggregatedParts, map[string]interface{}{
|
||||
"type": "text",
|
||||
"text": formattedText,
|
||||
})
|
||||
contentPart := `{"type":"text","text":""}`
|
||||
contentPart, _ = sjson.Set(contentPart, "text", formattedText)
|
||||
contentWrapper, _ = sjson.SetRaw(contentWrapper, "arr.-1", contentPart)
|
||||
contentPartsCount++
|
||||
}
|
||||
|
||||
// Handle inline data (e.g., images)
|
||||
@@ -193,12 +182,10 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
data := inlineData.Get("data").String()
|
||||
imageURL := fmt.Sprintf("data:%s;base64,%s", mimeType, data)
|
||||
|
||||
aggregatedParts = append(aggregatedParts, map[string]interface{}{
|
||||
"type": "image_url",
|
||||
"image_url": map[string]interface{}{
|
||||
"url": imageURL,
|
||||
},
|
||||
})
|
||||
contentPart := `{"type":"image_url","image_url":{"url":""}}`
|
||||
contentPart, _ = sjson.Set(contentPart, "image_url.url", imageURL)
|
||||
contentWrapper, _ = sjson.SetRaw(contentWrapper, "arr.-1", contentPart)
|
||||
contentPartsCount++
|
||||
}
|
||||
|
||||
// Handle function calls (Gemini) -> tool calls (OpenAI)
|
||||
@@ -206,44 +193,32 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
toolCallID := genToolCallID()
|
||||
toolCallIDs = append(toolCallIDs, toolCallID)
|
||||
|
||||
toolCall := map[string]interface{}{
|
||||
"id": toolCallID,
|
||||
"type": "function",
|
||||
"function": map[string]interface{}{
|
||||
"name": functionCall.Get("name").String(),
|
||||
},
|
||||
}
|
||||
toolCall := `{"id":"","type":"function","function":{"name":"","arguments":""}}`
|
||||
toolCall, _ = sjson.Set(toolCall, "id", toolCallID)
|
||||
toolCall, _ = sjson.Set(toolCall, "function.name", functionCall.Get("name").String())
|
||||
|
||||
// Convert args to arguments JSON string
|
||||
if args := functionCall.Get("args"); args.Exists() {
|
||||
argsJSON, _ := json.Marshal(args.Value())
|
||||
toolCall["function"].(map[string]interface{})["arguments"] = string(argsJSON)
|
||||
toolCall, _ = sjson.Set(toolCall, "function.arguments", args.Raw)
|
||||
} else {
|
||||
toolCall["function"].(map[string]interface{})["arguments"] = "{}"
|
||||
toolCall, _ = sjson.Set(toolCall, "function.arguments", "{}")
|
||||
}
|
||||
|
||||
toolCalls = append(toolCalls, toolCall)
|
||||
toolCallsWrapper, _ = sjson.SetRaw(toolCallsWrapper, "arr.-1", toolCall)
|
||||
toolCallsCount++
|
||||
}
|
||||
|
||||
// Handle function responses (Gemini) -> tool role messages (OpenAI)
|
||||
if functionResponse := part.Get("functionResponse"); functionResponse.Exists() {
|
||||
// Create tool message for function response
|
||||
toolMsg := map[string]interface{}{
|
||||
"role": "tool",
|
||||
"tool_call_id": "", // Will be set based on context
|
||||
"content": "",
|
||||
}
|
||||
toolMsg := `{"role":"tool","tool_call_id":"","content":""}`
|
||||
|
||||
// Convert response.content to JSON string
|
||||
if response := functionResponse.Get("response"); response.Exists() {
|
||||
if content = response.Get("content"); content.Exists() {
|
||||
// Use the content field from the response
|
||||
contentJSON, _ := json.Marshal(content.Value())
|
||||
toolMsg["content"] = string(contentJSON)
|
||||
if contentField := response.Get("content"); contentField.Exists() {
|
||||
toolMsg, _ = sjson.Set(toolMsg, "content", contentField.Raw)
|
||||
} else {
|
||||
// Fallback to entire response
|
||||
responseJSON, _ := json.Marshal(response.Value())
|
||||
toolMsg["content"] = string(responseJSON)
|
||||
toolMsg, _ = sjson.Set(toolMsg, "content", response.Raw)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -252,13 +227,13 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
if len(toolCallIDs) > 0 {
|
||||
// Use the last tool call ID (simple matching by function name)
|
||||
// In a real implementation, you might want more sophisticated matching
|
||||
toolMsg["tool_call_id"] = toolCallIDs[len(toolCallIDs)-1]
|
||||
toolMsg, _ = sjson.Set(toolMsg, "tool_call_id", toolCallIDs[len(toolCallIDs)-1])
|
||||
} else {
|
||||
// Generate a tool call ID if none available
|
||||
toolMsg["tool_call_id"] = genToolCallID()
|
||||
toolMsg, _ = sjson.Set(toolMsg, "tool_call_id", genToolCallID())
|
||||
}
|
||||
|
||||
openAIMessages = append(openAIMessages, toolMsg)
|
||||
out, _ = sjson.SetRaw(out, "messages.-1", toolMsg)
|
||||
}
|
||||
|
||||
return true
|
||||
@@ -266,170 +241,46 @@ func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream
|
||||
}
|
||||
|
||||
// Set content
|
||||
if len(aggregatedParts) > 0 {
|
||||
if contentPartsCount > 0 {
|
||||
if onlyTextContent {
|
||||
msg["content"] = textBuilder.String()
|
||||
msg, _ = sjson.Set(msg, "content", textBuilder.String())
|
||||
} else {
|
||||
msg["content"] = aggregatedParts
|
||||
msg, _ = sjson.SetRaw(msg, "content", gjson.Get(contentWrapper, "arr").Raw)
|
||||
}
|
||||
}
|
||||
|
||||
// Set tool calls if any
|
||||
if len(toolCalls) > 0 {
|
||||
msg["tool_calls"] = toolCalls
|
||||
if toolCallsCount > 0 {
|
||||
msg, _ = sjson.SetRaw(msg, "tool_calls", gjson.Get(toolCallsWrapper, "arr").Raw)
|
||||
}
|
||||
|
||||
openAIMessages = append(openAIMessages, msg)
|
||||
|
||||
// switch role {
|
||||
// case "user", "model":
|
||||
// // Convert role: model -> assistant
|
||||
// if role == "model" {
|
||||
// role = "assistant"
|
||||
// }
|
||||
//
|
||||
// // Create OpenAI message
|
||||
// msg := map[string]interface{}{
|
||||
// "role": role,
|
||||
// "content": "",
|
||||
// }
|
||||
//
|
||||
// var contentParts []string
|
||||
// var toolCalls []interface{}
|
||||
//
|
||||
// if parts.Exists() && parts.IsArray() {
|
||||
// parts.ForEach(func(_, part gjson.Result) bool {
|
||||
// // Handle text parts
|
||||
// if text := part.Get("text"); text.Exists() {
|
||||
// contentParts = append(contentParts, text.String())
|
||||
// }
|
||||
//
|
||||
// // Handle function calls (Gemini) -> tool calls (OpenAI)
|
||||
// if functionCall := part.Get("functionCall"); functionCall.Exists() {
|
||||
// toolCallID := genToolCallID()
|
||||
// toolCallIDs = append(toolCallIDs, toolCallID)
|
||||
//
|
||||
// toolCall := map[string]interface{}{
|
||||
// "id": toolCallID,
|
||||
// "type": "function",
|
||||
// "function": map[string]interface{}{
|
||||
// "name": functionCall.Get("name").String(),
|
||||
// },
|
||||
// }
|
||||
//
|
||||
// // Convert args to arguments JSON string
|
||||
// if args := functionCall.Get("args"); args.Exists() {
|
||||
// argsJSON, _ := json.Marshal(args.Value())
|
||||
// toolCall["function"].(map[string]interface{})["arguments"] = string(argsJSON)
|
||||
// } else {
|
||||
// toolCall["function"].(map[string]interface{})["arguments"] = "{}"
|
||||
// }
|
||||
//
|
||||
// toolCalls = append(toolCalls, toolCall)
|
||||
// }
|
||||
//
|
||||
// return true
|
||||
// })
|
||||
// }
|
||||
//
|
||||
// // Set content
|
||||
// if len(contentParts) > 0 {
|
||||
// msg["content"] = strings.Join(contentParts, "")
|
||||
// }
|
||||
//
|
||||
// // Set tool calls if any
|
||||
// if len(toolCalls) > 0 {
|
||||
// msg["tool_calls"] = toolCalls
|
||||
// }
|
||||
//
|
||||
// openAIMessages = append(openAIMessages, msg)
|
||||
//
|
||||
// case "function":
|
||||
// // Handle Gemini function role -> OpenAI tool role
|
||||
// if parts.Exists() && parts.IsArray() {
|
||||
// parts.ForEach(func(_, part gjson.Result) bool {
|
||||
// // Handle function responses (Gemini) -> tool role messages (OpenAI)
|
||||
// if functionResponse := part.Get("functionResponse"); functionResponse.Exists() {
|
||||
// // Create tool message for function response
|
||||
// toolMsg := map[string]interface{}{
|
||||
// "role": "tool",
|
||||
// "tool_call_id": "", // Will be set based on context
|
||||
// "content": "",
|
||||
// }
|
||||
//
|
||||
// // Convert response.content to JSON string
|
||||
// if response := functionResponse.Get("response"); response.Exists() {
|
||||
// if content = response.Get("content"); content.Exists() {
|
||||
// // Use the content field from the response
|
||||
// contentJSON, _ := json.Marshal(content.Value())
|
||||
// toolMsg["content"] = string(contentJSON)
|
||||
// } else {
|
||||
// // Fallback to entire response
|
||||
// responseJSON, _ := json.Marshal(response.Value())
|
||||
// toolMsg["content"] = string(responseJSON)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // Try to match with previous tool call ID
|
||||
// _ = functionResponse.Get("name").String() // functionName not used for now
|
||||
// if len(toolCallIDs) > 0 {
|
||||
// // Use the last tool call ID (simple matching by function name)
|
||||
// // In a real implementation, you might want more sophisticated matching
|
||||
// toolMsg["tool_call_id"] = toolCallIDs[len(toolCallIDs)-1]
|
||||
// } else {
|
||||
// // Generate a tool call ID if none available
|
||||
// toolMsg["tool_call_id"] = genToolCallID()
|
||||
// }
|
||||
//
|
||||
// openAIMessages = append(openAIMessages, toolMsg)
|
||||
// }
|
||||
//
|
||||
// return true
|
||||
// })
|
||||
// }
|
||||
// }
|
||||
out, _ = sjson.SetRaw(out, "messages.-1", msg)
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Set messages
|
||||
if len(openAIMessages) > 0 {
|
||||
messagesJSON, _ := json.Marshal(openAIMessages)
|
||||
out, _ = sjson.SetRaw(out, "messages", string(messagesJSON))
|
||||
}
|
||||
|
||||
// Tools mapping: Gemini tools -> OpenAI tools
|
||||
if tools := root.Get("tools"); tools.Exists() && tools.IsArray() {
|
||||
var openAITools []interface{}
|
||||
tools.ForEach(func(_, tool gjson.Result) bool {
|
||||
if functionDeclarations := tool.Get("functionDeclarations"); functionDeclarations.Exists() && functionDeclarations.IsArray() {
|
||||
functionDeclarations.ForEach(func(_, funcDecl gjson.Result) bool {
|
||||
openAITool := map[string]interface{}{
|
||||
"type": "function",
|
||||
"function": map[string]interface{}{
|
||||
"name": funcDecl.Get("name").String(),
|
||||
"description": funcDecl.Get("description").String(),
|
||||
},
|
||||
}
|
||||
openAITool := `{"type":"function","function":{"name":"","description":""}}`
|
||||
openAITool, _ = sjson.Set(openAITool, "function.name", funcDecl.Get("name").String())
|
||||
openAITool, _ = sjson.Set(openAITool, "function.description", funcDecl.Get("description").String())
|
||||
|
||||
// Convert parameters schema
|
||||
if parameters := funcDecl.Get("parameters"); parameters.Exists() {
|
||||
openAITool["function"].(map[string]interface{})["parameters"] = parameters.Value()
|
||||
} else if parameters = funcDecl.Get("parametersJsonSchema"); parameters.Exists() {
|
||||
openAITool["function"].(map[string]interface{})["parameters"] = parameters.Value()
|
||||
openAITool, _ = sjson.SetRaw(openAITool, "function.parameters", parameters.Raw)
|
||||
} else if parameters := funcDecl.Get("parametersJsonSchema"); parameters.Exists() {
|
||||
openAITool, _ = sjson.SetRaw(openAITool, "function.parameters", parameters.Raw)
|
||||
}
|
||||
|
||||
openAITools = append(openAITools, openAITool)
|
||||
out, _ = sjson.SetRaw(out, "tools.-1", openAITool)
|
||||
return true
|
||||
})
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if len(openAITools) > 0 {
|
||||
toolsJSON, _ := json.Marshal(openAITools)
|
||||
out, _ = sjson.SetRaw(out, "tools", string(toolsJSON))
|
||||
}
|
||||
}
|
||||
|
||||
// Tool choice mapping (Gemini doesn't have direct equivalent, but we can handle it)
|
||||
|
||||
@@ -8,7 +8,6 @@ package gemini
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -84,15 +83,12 @@ func ConvertOpenAIResponseToGemini(_ context.Context, _ string, originalRequestR
|
||||
template, _ = sjson.Set(template, "model", model.String())
|
||||
}
|
||||
|
||||
usageObj := map[string]interface{}{
|
||||
"promptTokenCount": usage.Get("prompt_tokens").Int(),
|
||||
"candidatesTokenCount": usage.Get("completion_tokens").Int(),
|
||||
"totalTokenCount": usage.Get("total_tokens").Int(),
|
||||
}
|
||||
template, _ = sjson.Set(template, "usageMetadata.promptTokenCount", usage.Get("prompt_tokens").Int())
|
||||
template, _ = sjson.Set(template, "usageMetadata.candidatesTokenCount", usage.Get("completion_tokens").Int())
|
||||
template, _ = sjson.Set(template, "usageMetadata.totalTokenCount", usage.Get("total_tokens").Int())
|
||||
if reasoningTokens := reasoningTokensFromUsage(usage); reasoningTokens > 0 {
|
||||
usageObj["thoughtsTokenCount"] = reasoningTokens
|
||||
template, _ = sjson.Set(template, "usageMetadata.thoughtsTokenCount", reasoningTokens)
|
||||
}
|
||||
template, _ = sjson.Set(template, "usageMetadata", usageObj)
|
||||
return []string{template}
|
||||
}
|
||||
return []string{}
|
||||
@@ -133,13 +129,8 @@ func ConvertOpenAIResponseToGemini(_ context.Context, _ string, originalRequestR
|
||||
continue
|
||||
}
|
||||
reasoningTemplate := baseTemplate
|
||||
parts := []interface{}{
|
||||
map[string]interface{}{
|
||||
"thought": true,
|
||||
"text": reasoningText,
|
||||
},
|
||||
}
|
||||
reasoningTemplate, _ = sjson.Set(reasoningTemplate, "candidates.0.content.parts", parts)
|
||||
reasoningTemplate, _ = sjson.Set(reasoningTemplate, "candidates.0.content.parts.0.thought", true)
|
||||
reasoningTemplate, _ = sjson.Set(reasoningTemplate, "candidates.0.content.parts.0.text", reasoningText)
|
||||
chunkOutputs = append(chunkOutputs, reasoningTemplate)
|
||||
}
|
||||
}
|
||||
@@ -150,13 +141,8 @@ func ConvertOpenAIResponseToGemini(_ context.Context, _ string, originalRequestR
|
||||
(*param).(*ConvertOpenAIResponseToGeminiParams).ContentAccumulator.WriteString(contentText)
|
||||
|
||||
// Create text part for this delta
|
||||
parts := []interface{}{
|
||||
map[string]interface{}{
|
||||
"text": contentText,
|
||||
},
|
||||
}
|
||||
contentTemplate := baseTemplate
|
||||
contentTemplate, _ = sjson.Set(contentTemplate, "candidates.0.content.parts", parts)
|
||||
contentTemplate, _ = sjson.Set(contentTemplate, "candidates.0.content.parts.0.text", contentText)
|
||||
chunkOutputs = append(chunkOutputs, contentTemplate)
|
||||
}
|
||||
|
||||
@@ -225,24 +211,13 @@ func ConvertOpenAIResponseToGemini(_ context.Context, _ string, originalRequestR
|
||||
|
||||
// If we have accumulated tool calls, output them now
|
||||
if len((*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator) > 0 {
|
||||
var parts []interface{}
|
||||
partIndex := 0
|
||||
for _, accumulator := range (*param).(*ConvertOpenAIResponseToGeminiParams).ToolCallsAccumulator {
|
||||
argsStr := accumulator.Arguments.String()
|
||||
var argsMap map[string]interface{}
|
||||
|
||||
argsMap = parseArgsToMap(argsStr)
|
||||
|
||||
functionCallPart := map[string]interface{}{
|
||||
"functionCall": map[string]interface{}{
|
||||
"name": accumulator.Name,
|
||||
"args": argsMap,
|
||||
},
|
||||
}
|
||||
parts = append(parts, functionCallPart)
|
||||
}
|
||||
|
||||
if len(parts) > 0 {
|
||||
template, _ = sjson.Set(template, "candidates.0.content.parts", parts)
|
||||
namePath := fmt.Sprintf("candidates.0.content.parts.%d.functionCall.name", partIndex)
|
||||
argsPath := fmt.Sprintf("candidates.0.content.parts.%d.functionCall.args", partIndex)
|
||||
template, _ = sjson.Set(template, namePath, accumulator.Name)
|
||||
template, _ = sjson.SetRaw(template, argsPath, parseArgsToObjectRaw(accumulator.Arguments.String()))
|
||||
partIndex++
|
||||
}
|
||||
|
||||
// Clear accumulators
|
||||
@@ -255,15 +230,12 @@ func ConvertOpenAIResponseToGemini(_ context.Context, _ string, originalRequestR
|
||||
|
||||
// Handle usage information
|
||||
if usage := root.Get("usage"); usage.Exists() {
|
||||
usageObj := map[string]interface{}{
|
||||
"promptTokenCount": usage.Get("prompt_tokens").Int(),
|
||||
"candidatesTokenCount": usage.Get("completion_tokens").Int(),
|
||||
"totalTokenCount": usage.Get("total_tokens").Int(),
|
||||
}
|
||||
template, _ = sjson.Set(template, "usageMetadata.promptTokenCount", usage.Get("prompt_tokens").Int())
|
||||
template, _ = sjson.Set(template, "usageMetadata.candidatesTokenCount", usage.Get("completion_tokens").Int())
|
||||
template, _ = sjson.Set(template, "usageMetadata.totalTokenCount", usage.Get("total_tokens").Int())
|
||||
if reasoningTokens := reasoningTokensFromUsage(usage); reasoningTokens > 0 {
|
||||
usageObj["thoughtsTokenCount"] = reasoningTokens
|
||||
template, _ = sjson.Set(template, "usageMetadata.thoughtsTokenCount", reasoningTokens)
|
||||
}
|
||||
template, _ = sjson.Set(template, "usageMetadata", usageObj)
|
||||
results = append(results, template)
|
||||
return true
|
||||
}
|
||||
@@ -291,46 +263,54 @@ func mapOpenAIFinishReasonToGemini(openAIReason string) string {
|
||||
}
|
||||
}
|
||||
|
||||
// parseArgsToMap safely parses a JSON string of function arguments into a map.
|
||||
// It returns an empty map if the input is empty or cannot be parsed as a JSON object.
|
||||
func parseArgsToMap(argsStr string) map[string]interface{} {
|
||||
// parseArgsToObjectRaw safely parses a JSON string of function arguments into an object JSON string.
|
||||
// It returns "{}" if the input is empty or cannot be parsed as a JSON object.
|
||||
func parseArgsToObjectRaw(argsStr string) string {
|
||||
trimmed := strings.TrimSpace(argsStr)
|
||||
if trimmed == "" || trimmed == "{}" {
|
||||
return map[string]interface{}{}
|
||||
return "{}"
|
||||
}
|
||||
|
||||
// First try strict JSON
|
||||
var out map[string]interface{}
|
||||
if errUnmarshal := json.Unmarshal([]byte(trimmed), &out); errUnmarshal == nil {
|
||||
return out
|
||||
if gjson.Valid(trimmed) {
|
||||
strict := gjson.Parse(trimmed)
|
||||
if strict.IsObject() {
|
||||
return strict.Raw
|
||||
}
|
||||
}
|
||||
|
||||
// Tolerant parse: handle streams where values are barewords (e.g., 北京, celsius)
|
||||
tolerant := tolerantParseJSONMap(trimmed)
|
||||
if len(tolerant) > 0 {
|
||||
tolerant := tolerantParseJSONObjectRaw(trimmed)
|
||||
if tolerant != "{}" {
|
||||
return tolerant
|
||||
}
|
||||
|
||||
// Fallback: return empty object when parsing fails
|
||||
return map[string]interface{}{}
|
||||
return "{}"
|
||||
}
|
||||
|
||||
// tolerantParseJSONMap attempts to parse a JSON-like object string into a map, tolerating
|
||||
func escapeSjsonPathKey(key string) string {
|
||||
key = strings.ReplaceAll(key, `\`, `\\`)
|
||||
key = strings.ReplaceAll(key, `.`, `\.`)
|
||||
return key
|
||||
}
|
||||
|
||||
// tolerantParseJSONObjectRaw attempts to parse a JSON-like object string into a JSON object string, tolerating
|
||||
// bareword values (unquoted strings) commonly seen during streamed tool calls.
|
||||
// Example input: {"location": 北京, "unit": celsius}
|
||||
func tolerantParseJSONMap(s string) map[string]interface{} {
|
||||
func tolerantParseJSONObjectRaw(s string) string {
|
||||
// Ensure we operate within the outermost braces if present
|
||||
start := strings.Index(s, "{")
|
||||
end := strings.LastIndex(s, "}")
|
||||
if start == -1 || end == -1 || start >= end {
|
||||
return map[string]interface{}{}
|
||||
return "{}"
|
||||
}
|
||||
content := s[start+1 : end]
|
||||
|
||||
runes := []rune(content)
|
||||
n := len(runes)
|
||||
i := 0
|
||||
result := make(map[string]interface{})
|
||||
result := "{}"
|
||||
|
||||
for i < n {
|
||||
// Skip whitespace and commas
|
||||
@@ -356,6 +336,7 @@ func tolerantParseJSONMap(s string) map[string]interface{} {
|
||||
break
|
||||
}
|
||||
keyName := jsonStringTokenToRawString(keyToken)
|
||||
sjsonKey := escapeSjsonPathKey(keyName)
|
||||
i = nextIdx
|
||||
|
||||
// Skip whitespace
|
||||
@@ -375,17 +356,16 @@ func tolerantParseJSONMap(s string) map[string]interface{} {
|
||||
}
|
||||
|
||||
// Parse value (string, number, object/array, bareword)
|
||||
var value interface{}
|
||||
switch runes[i] {
|
||||
case '"':
|
||||
// JSON string
|
||||
valToken, ni := parseJSONStringRunes(runes, i)
|
||||
if ni == -1 {
|
||||
// Malformed; treat as empty string
|
||||
value = ""
|
||||
result, _ = sjson.Set(result, sjsonKey, "")
|
||||
i = n
|
||||
} else {
|
||||
value = jsonStringTokenToRawString(valToken)
|
||||
result, _ = sjson.Set(result, sjsonKey, jsonStringTokenToRawString(valToken))
|
||||
i = ni
|
||||
}
|
||||
case '{', '[':
|
||||
@@ -394,11 +374,10 @@ func tolerantParseJSONMap(s string) map[string]interface{} {
|
||||
if ni == -1 {
|
||||
i = n
|
||||
} else {
|
||||
var anyVal interface{}
|
||||
if errUnmarshal := json.Unmarshal([]byte(seg), &anyVal); errUnmarshal == nil {
|
||||
value = anyVal
|
||||
if gjson.Valid(seg) {
|
||||
result, _ = sjson.SetRaw(result, sjsonKey, seg)
|
||||
} else {
|
||||
value = seg
|
||||
result, _ = sjson.Set(result, sjsonKey, seg)
|
||||
}
|
||||
i = ni
|
||||
}
|
||||
@@ -411,21 +390,19 @@ func tolerantParseJSONMap(s string) map[string]interface{} {
|
||||
token := strings.TrimSpace(string(runes[i:j]))
|
||||
// Interpret common JSON atoms and numbers; otherwise treat as string
|
||||
if token == "true" {
|
||||
value = true
|
||||
result, _ = sjson.Set(result, sjsonKey, true)
|
||||
} else if token == "false" {
|
||||
value = false
|
||||
result, _ = sjson.Set(result, sjsonKey, false)
|
||||
} else if token == "null" {
|
||||
value = nil
|
||||
result, _ = sjson.Set(result, sjsonKey, nil)
|
||||
} else if numVal, ok := tryParseNumber(token); ok {
|
||||
value = numVal
|
||||
result, _ = sjson.Set(result, sjsonKey, numVal)
|
||||
} else {
|
||||
value = token
|
||||
result, _ = sjson.Set(result, sjsonKey, token)
|
||||
}
|
||||
i = j
|
||||
}
|
||||
|
||||
result[keyName] = value
|
||||
|
||||
// Skip trailing whitespace and optional comma before next pair
|
||||
for i < n && (runes[i] == ' ' || runes[i] == '\n' || runes[i] == '\r' || runes[i] == '\t') {
|
||||
i++
|
||||
@@ -463,9 +440,9 @@ func parseJSONStringRunes(runes []rune, start int) (string, int) {
|
||||
|
||||
// jsonStringTokenToRawString converts a JSON string token (including quotes) to a raw Go string value.
|
||||
func jsonStringTokenToRawString(token string) string {
|
||||
var s string
|
||||
if errUnmarshal := json.Unmarshal([]byte(token), &s); errUnmarshal == nil {
|
||||
return s
|
||||
r := gjson.Parse(token)
|
||||
if r.Type == gjson.String {
|
||||
return r.String()
|
||||
}
|
||||
// Fallback: strip surrounding quotes if present
|
||||
if len(token) >= 2 && token[0] == '"' && token[len(token)-1] == '"' {
|
||||
@@ -579,7 +556,7 @@ func ConvertOpenAIResponseToGeminiNonStream(_ context.Context, _ string, origina
|
||||
}
|
||||
}
|
||||
|
||||
var parts []interface{}
|
||||
partIndex := 0
|
||||
|
||||
// Handle reasoning content before visible text
|
||||
if reasoning := message.Get("reasoning_content"); reasoning.Exists() {
|
||||
@@ -587,18 +564,16 @@ func ConvertOpenAIResponseToGeminiNonStream(_ context.Context, _ string, origina
|
||||
if reasoningText == "" {
|
||||
continue
|
||||
}
|
||||
parts = append(parts, map[string]interface{}{
|
||||
"thought": true,
|
||||
"text": reasoningText,
|
||||
})
|
||||
out, _ = sjson.Set(out, fmt.Sprintf("candidates.0.content.parts.%d.thought", partIndex), true)
|
||||
out, _ = sjson.Set(out, fmt.Sprintf("candidates.0.content.parts.%d.text", partIndex), reasoningText)
|
||||
partIndex++
|
||||
}
|
||||
}
|
||||
|
||||
// Handle content first
|
||||
if content := message.Get("content"); content.Exists() && content.String() != "" {
|
||||
parts = append(parts, map[string]interface{}{
|
||||
"text": content.String(),
|
||||
})
|
||||
out, _ = sjson.Set(out, fmt.Sprintf("candidates.0.content.parts.%d.text", partIndex), content.String())
|
||||
partIndex++
|
||||
}
|
||||
|
||||
// Handle tool calls
|
||||
@@ -609,27 +584,16 @@ func ConvertOpenAIResponseToGeminiNonStream(_ context.Context, _ string, origina
|
||||
functionName := function.Get("name").String()
|
||||
functionArgs := function.Get("arguments").String()
|
||||
|
||||
// Parse arguments
|
||||
var argsMap map[string]interface{}
|
||||
argsMap = parseArgsToMap(functionArgs)
|
||||
|
||||
functionCallPart := map[string]interface{}{
|
||||
"functionCall": map[string]interface{}{
|
||||
"name": functionName,
|
||||
"args": argsMap,
|
||||
},
|
||||
}
|
||||
parts = append(parts, functionCallPart)
|
||||
namePath := fmt.Sprintf("candidates.0.content.parts.%d.functionCall.name", partIndex)
|
||||
argsPath := fmt.Sprintf("candidates.0.content.parts.%d.functionCall.args", partIndex)
|
||||
out, _ = sjson.Set(out, namePath, functionName)
|
||||
out, _ = sjson.SetRaw(out, argsPath, parseArgsToObjectRaw(functionArgs))
|
||||
partIndex++
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// Set parts
|
||||
if len(parts) > 0 {
|
||||
out, _ = sjson.Set(out, "candidates.0.content.parts", parts)
|
||||
}
|
||||
|
||||
// Handle finish reason
|
||||
if finishReason := choice.Get("finish_reason"); finishReason.Exists() {
|
||||
geminiFinishReason := mapOpenAIFinishReasonToGemini(finishReason.String())
|
||||
@@ -645,15 +609,12 @@ func ConvertOpenAIResponseToGeminiNonStream(_ context.Context, _ string, origina
|
||||
|
||||
// Handle usage information
|
||||
if usage := root.Get("usage"); usage.Exists() {
|
||||
usageObj := map[string]interface{}{
|
||||
"promptTokenCount": usage.Get("prompt_tokens").Int(),
|
||||
"candidatesTokenCount": usage.Get("completion_tokens").Int(),
|
||||
"totalTokenCount": usage.Get("total_tokens").Int(),
|
||||
}
|
||||
out, _ = sjson.Set(out, "usageMetadata.promptTokenCount", usage.Get("prompt_tokens").Int())
|
||||
out, _ = sjson.Set(out, "usageMetadata.candidatesTokenCount", usage.Get("completion_tokens").Int())
|
||||
out, _ = sjson.Set(out, "usageMetadata.totalTokenCount", usage.Get("total_tokens").Int())
|
||||
if reasoningTokens := reasoningTokensFromUsage(usage); reasoningTokens > 0 {
|
||||
usageObj["thoughtsTokenCount"] = reasoningTokens
|
||||
out, _ = sjson.Set(out, "usageMetadata.thoughtsTokenCount", reasoningTokens)
|
||||
}
|
||||
out, _ = sjson.Set(out, "usageMetadata", usageObj)
|
||||
}
|
||||
|
||||
return out
|
||||
|
||||
@@ -484,16 +484,12 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponses(ctx context.Context,
|
||||
}
|
||||
}
|
||||
// Build response.output using aggregated buffers
|
||||
var outputs []interface{}
|
||||
outputsWrapper := `{"arr":[]}`
|
||||
if st.ReasoningBuf.Len() > 0 {
|
||||
outputs = append(outputs, map[string]interface{}{
|
||||
"id": st.ReasoningID,
|
||||
"type": "reasoning",
|
||||
"summary": []interface{}{map[string]interface{}{
|
||||
"type": "summary_text",
|
||||
"text": st.ReasoningBuf.String(),
|
||||
}},
|
||||
})
|
||||
item := `{"id":"","type":"reasoning","summary":[{"type":"summary_text","text":""}]}`
|
||||
item, _ = sjson.Set(item, "id", st.ReasoningID)
|
||||
item, _ = sjson.Set(item, "summary.0.text", st.ReasoningBuf.String())
|
||||
outputsWrapper, _ = sjson.SetRaw(outputsWrapper, "arr.-1", item)
|
||||
}
|
||||
// Append message items in ascending index order
|
||||
if len(st.MsgItemAdded) > 0 {
|
||||
@@ -513,18 +509,10 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponses(ctx context.Context,
|
||||
if b := st.MsgTextBuf[i]; b != nil {
|
||||
txt = b.String()
|
||||
}
|
||||
outputs = append(outputs, map[string]interface{}{
|
||||
"id": fmt.Sprintf("msg_%s_%d", st.ResponseID, i),
|
||||
"type": "message",
|
||||
"status": "completed",
|
||||
"content": []interface{}{map[string]interface{}{
|
||||
"type": "output_text",
|
||||
"annotations": []interface{}{},
|
||||
"logprobs": []interface{}{},
|
||||
"text": txt,
|
||||
}},
|
||||
"role": "assistant",
|
||||
})
|
||||
item := `{"id":"","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"logprobs":[],"text":""}],"role":"assistant"}`
|
||||
item, _ = sjson.Set(item, "id", fmt.Sprintf("msg_%s_%d", st.ResponseID, i))
|
||||
item, _ = sjson.Set(item, "content.0.text", txt)
|
||||
outputsWrapper, _ = sjson.SetRaw(outputsWrapper, "arr.-1", item)
|
||||
}
|
||||
}
|
||||
if len(st.FuncArgsBuf) > 0 {
|
||||
@@ -547,18 +535,16 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponses(ctx context.Context,
|
||||
}
|
||||
callID := st.FuncCallIDs[i]
|
||||
name := st.FuncNames[i]
|
||||
outputs = append(outputs, map[string]interface{}{
|
||||
"id": fmt.Sprintf("fc_%s", callID),
|
||||
"type": "function_call",
|
||||
"status": "completed",
|
||||
"arguments": args,
|
||||
"call_id": callID,
|
||||
"name": name,
|
||||
})
|
||||
item := `{"id":"","type":"function_call","status":"completed","arguments":"","call_id":"","name":""}`
|
||||
item, _ = sjson.Set(item, "id", fmt.Sprintf("fc_%s", callID))
|
||||
item, _ = sjson.Set(item, "arguments", args)
|
||||
item, _ = sjson.Set(item, "call_id", callID)
|
||||
item, _ = sjson.Set(item, "name", name)
|
||||
outputsWrapper, _ = sjson.SetRaw(outputsWrapper, "arr.-1", item)
|
||||
}
|
||||
}
|
||||
if len(outputs) > 0 {
|
||||
completed, _ = sjson.Set(completed, "response.output", outputs)
|
||||
if gjson.Get(outputsWrapper, "arr.#").Int() > 0 {
|
||||
completed, _ = sjson.SetRaw(completed, "response.output", gjson.Get(outputsWrapper, "arr").Raw)
|
||||
}
|
||||
if st.UsageSeen {
|
||||
completed, _ = sjson.Set(completed, "response.usage.input_tokens", st.PromptTokens)
|
||||
@@ -681,7 +667,7 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponsesNonStream(_ context.Co
|
||||
}
|
||||
|
||||
// Build output list from choices[...]
|
||||
var outputs []interface{}
|
||||
outputsWrapper := `{"arr":[]}`
|
||||
// Detect and capture reasoning content if present
|
||||
rcText := gjson.GetBytes(rawJSON, "choices.0.message.reasoning_content").String()
|
||||
includeReasoning := rcText != ""
|
||||
@@ -693,21 +679,14 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponsesNonStream(_ context.Co
|
||||
if strings.HasPrefix(rid, "resp_") {
|
||||
rid = strings.TrimPrefix(rid, "resp_")
|
||||
}
|
||||
reasoningItem := map[string]interface{}{
|
||||
"id": fmt.Sprintf("rs_%s", rid),
|
||||
"type": "reasoning",
|
||||
"encrypted_content": "",
|
||||
}
|
||||
// Prefer summary_text from reasoning_content; encrypted_content is optional
|
||||
var summaries []interface{}
|
||||
reasoningItem := `{"id":"","type":"reasoning","encrypted_content":"","summary":[]}`
|
||||
reasoningItem, _ = sjson.Set(reasoningItem, "id", fmt.Sprintf("rs_%s", rid))
|
||||
if rcText != "" {
|
||||
summaries = append(summaries, map[string]interface{}{
|
||||
"type": "summary_text",
|
||||
"text": rcText,
|
||||
})
|
||||
reasoningItem, _ = sjson.Set(reasoningItem, "summary.0.type", "summary_text")
|
||||
reasoningItem, _ = sjson.Set(reasoningItem, "summary.0.text", rcText)
|
||||
}
|
||||
reasoningItem["summary"] = summaries
|
||||
outputs = append(outputs, reasoningItem)
|
||||
outputsWrapper, _ = sjson.SetRaw(outputsWrapper, "arr.-1", reasoningItem)
|
||||
}
|
||||
|
||||
if choices := root.Get("choices"); choices.Exists() && choices.IsArray() {
|
||||
@@ -716,18 +695,10 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponsesNonStream(_ context.Co
|
||||
if msg.Exists() {
|
||||
// Text message part
|
||||
if c := msg.Get("content"); c.Exists() && c.String() != "" {
|
||||
outputs = append(outputs, map[string]interface{}{
|
||||
"id": fmt.Sprintf("msg_%s_%d", id, int(choice.Get("index").Int())),
|
||||
"type": "message",
|
||||
"status": "completed",
|
||||
"content": []interface{}{map[string]interface{}{
|
||||
"type": "output_text",
|
||||
"annotations": []interface{}{},
|
||||
"logprobs": []interface{}{},
|
||||
"text": c.String(),
|
||||
}},
|
||||
"role": "assistant",
|
||||
})
|
||||
item := `{"id":"","type":"message","status":"completed","content":[{"type":"output_text","annotations":[],"logprobs":[],"text":""}],"role":"assistant"}`
|
||||
item, _ = sjson.Set(item, "id", fmt.Sprintf("msg_%s_%d", id, int(choice.Get("index").Int())))
|
||||
item, _ = sjson.Set(item, "content.0.text", c.String())
|
||||
outputsWrapper, _ = sjson.SetRaw(outputsWrapper, "arr.-1", item)
|
||||
}
|
||||
|
||||
// Function/tool calls
|
||||
@@ -736,14 +707,12 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponsesNonStream(_ context.Co
|
||||
callID := tc.Get("id").String()
|
||||
name := tc.Get("function.name").String()
|
||||
args := tc.Get("function.arguments").String()
|
||||
outputs = append(outputs, map[string]interface{}{
|
||||
"id": fmt.Sprintf("fc_%s", callID),
|
||||
"type": "function_call",
|
||||
"status": "completed",
|
||||
"arguments": args,
|
||||
"call_id": callID,
|
||||
"name": name,
|
||||
})
|
||||
item := `{"id":"","type":"function_call","status":"completed","arguments":"","call_id":"","name":""}`
|
||||
item, _ = sjson.Set(item, "id", fmt.Sprintf("fc_%s", callID))
|
||||
item, _ = sjson.Set(item, "arguments", args)
|
||||
item, _ = sjson.Set(item, "call_id", callID)
|
||||
item, _ = sjson.Set(item, "name", name)
|
||||
outputsWrapper, _ = sjson.SetRaw(outputsWrapper, "arr.-1", item)
|
||||
return true
|
||||
})
|
||||
}
|
||||
@@ -751,8 +720,8 @@ func ConvertOpenAIChatCompletionsResponseToOpenAIResponsesNonStream(_ context.Co
|
||||
return true
|
||||
})
|
||||
}
|
||||
if len(outputs) > 0 {
|
||||
resp, _ = sjson.Set(resp, "output", outputs)
|
||||
if gjson.Get(outputsWrapper, "arr.#").Int() > 0 {
|
||||
resp, _ = sjson.SetRaw(resp, "output", gjson.Get(outputsWrapper, "arr").Raw)
|
||||
}
|
||||
|
||||
// usage mapping
|
||||
|
||||
Reference in New Issue
Block a user