Add claude code support

This commit is contained in:
Luis Pater
2025-08-19 01:16:52 +08:00
parent c5cc238308
commit d58cc55cb2
40 changed files with 4429 additions and 185 deletions

View File

@@ -0,0 +1,289 @@
// Package openai provides request translation functionality for OpenAI to Anthropic API.
// It handles parsing and transforming OpenAI Chat Completions API requests into Anthropic API format,
// extracting model information, system instructions, message contents, and tool declarations.
// The package performs JSON data transformation to ensure compatibility
// between OpenAI API format and Anthropic API's expected format.
package openai
import (
"crypto/rand"
"encoding/json"
"math/big"
"strings"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
// ConvertOpenAIRequestToAnthropic parses and transforms an OpenAI Chat Completions API request into Anthropic API format.
// It extracts the model name, system instruction, message contents, and tool declarations
// from the raw JSON request and returns them in the format expected by the Anthropic API.
func ConvertOpenAIRequestToAnthropic(rawJSON []byte) string {
// Base Anthropic API template
out := `{"model":"","max_tokens":32000,"messages":[]}`
root := gjson.ParseBytes(rawJSON)
// Helper for generating tool call IDs in the form: toolu_<alphanum>
genToolCallID := func() string {
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
var b strings.Builder
// 24 chars random suffix
for i := 0; i < 24; i++ {
n, _ := rand.Int(rand.Reader, big.NewInt(int64(len(letters))))
b.WriteByte(letters[n.Int64()])
}
return "toolu_" + b.String()
}
// Model mapping
if model := root.Get("model"); model.Exists() {
modelStr := model.String()
out, _ = sjson.Set(out, "model", modelStr)
}
// Max tokens
if maxTokens := root.Get("max_tokens"); maxTokens.Exists() {
out, _ = sjson.Set(out, "max_tokens", maxTokens.Int())
}
// Temperature
if temp := root.Get("temperature"); temp.Exists() {
out, _ = sjson.Set(out, "temperature", temp.Float())
}
// Top P
if topP := root.Get("top_p"); topP.Exists() {
out, _ = sjson.Set(out, "top_p", topP.Float())
}
// Stop sequences
if stop := root.Get("stop"); stop.Exists() {
if stop.IsArray() {
var stopSequences []string
stop.ForEach(func(_, value gjson.Result) bool {
stopSequences = append(stopSequences, value.String())
return true
})
if len(stopSequences) > 0 {
out, _ = sjson.Set(out, "stop_sequences", stopSequences)
}
} else {
out, _ = sjson.Set(out, "stop_sequences", []string{stop.String()})
}
}
// Stream
if stream := root.Get("stream"); stream.Exists() {
out, _ = sjson.Set(out, "stream", stream.Bool())
}
// Process messages
var anthropicMessages []interface{}
var toolCallIDs []string // Track tool call IDs for matching with tool results
if messages := root.Get("messages"); messages.Exists() && messages.IsArray() {
messages.ForEach(func(_, message gjson.Result) bool {
role := message.Get("role").String()
contentResult := message.Get("content")
switch role {
case "system", "user", "assistant":
// Create Anthropic message
if role == "system" {
role = "user"
}
msg := map[string]interface{}{
"role": role,
"content": []interface{}{},
}
// Handle content
if contentResult.Exists() && contentResult.Type == gjson.String && contentResult.String() != "" {
// Simple text content
msg["content"] = []interface{}{
map[string]interface{}{
"type": "text",
"text": contentResult.String(),
},
}
} else if contentResult.Exists() && contentResult.IsArray() {
// Array of content parts
var contentParts []interface{}
contentResult.ForEach(func(_, part gjson.Result) bool {
partType := part.Get("type").String()
switch partType {
case "text":
contentParts = append(contentParts, map[string]interface{}{
"type": "text",
"text": part.Get("text").String(),
})
case "image_url":
// Convert OpenAI image format to Anthropic format
imageURL := part.Get("image_url.url").String()
if strings.HasPrefix(imageURL, "data:") {
// Extract base64 data and media type
parts := strings.Split(imageURL, ",")
if len(parts) == 2 {
mediaTypePart := strings.Split(parts[0], ";")[0]
mediaType := strings.TrimPrefix(mediaTypePart, "data:")
data := parts[1]
contentParts = append(contentParts, map[string]interface{}{
"type": "image",
"source": map[string]interface{}{
"type": "base64",
"media_type": mediaType,
"data": data,
},
})
}
}
}
return true
})
if len(contentParts) > 0 {
msg["content"] = contentParts
}
} else {
// Initialize empty content array for tool calls
msg["content"] = []interface{}{}
}
// Handle tool calls (for assistant messages)
if toolCalls := message.Get("tool_calls"); toolCalls.Exists() && toolCalls.IsArray() && role == "assistant" {
var contentParts []interface{}
// Add existing text content if any
if existingContent, ok := msg["content"].([]interface{}); ok {
contentParts = existingContent
}
toolCalls.ForEach(func(_, toolCall gjson.Result) bool {
if toolCall.Get("type").String() == "function" {
toolCallID := toolCall.Get("id").String()
if toolCallID == "" {
toolCallID = genToolCallID()
}
toolCallIDs = append(toolCallIDs, toolCallID)
function := toolCall.Get("function")
toolUse := map[string]interface{}{
"type": "tool_use",
"id": toolCallID,
"name": function.Get("name").String(),
}
// Parse arguments
if args := function.Get("arguments"); args.Exists() {
argsStr := args.String()
if argsStr != "" {
var argsMap map[string]interface{}
if err := json.Unmarshal([]byte(argsStr), &argsMap); err == nil {
toolUse["input"] = argsMap
} else {
toolUse["input"] = map[string]interface{}{}
}
} else {
toolUse["input"] = map[string]interface{}{}
}
} else {
toolUse["input"] = map[string]interface{}{}
}
contentParts = append(contentParts, toolUse)
}
return true
})
msg["content"] = contentParts
}
anthropicMessages = append(anthropicMessages, msg)
case "tool":
// Handle tool result messages
toolCallID := message.Get("tool_call_id").String()
content := message.Get("content").String()
// Create tool result message
msg := map[string]interface{}{
"role": "user",
"content": []interface{}{
map[string]interface{}{
"type": "tool_result",
"tool_use_id": toolCallID,
"content": content,
},
},
}
anthropicMessages = append(anthropicMessages, msg)
}
return true
})
}
// Set messages
if len(anthropicMessages) > 0 {
messagesJSON, _ := json.Marshal(anthropicMessages)
out, _ = sjson.SetRaw(out, "messages", string(messagesJSON))
}
// Tools mapping: OpenAI tools -> Anthropic tools
if tools := root.Get("tools"); tools.Exists() && tools.IsArray() {
var anthropicTools []interface{}
tools.ForEach(func(_, tool gjson.Result) bool {
if tool.Get("type").String() == "function" {
function := tool.Get("function")
anthropicTool := map[string]interface{}{
"name": function.Get("name").String(),
"description": function.Get("description").String(),
}
// Convert parameters schema
if parameters := function.Get("parameters"); parameters.Exists() {
anthropicTool["input_schema"] = parameters.Value()
}
anthropicTools = append(anthropicTools, anthropicTool)
}
return true
})
if len(anthropicTools) > 0 {
toolsJSON, _ := json.Marshal(anthropicTools)
out, _ = sjson.SetRaw(out, "tools", string(toolsJSON))
}
}
// Tool choice mapping
if toolChoice := root.Get("tool_choice"); toolChoice.Exists() {
switch toolChoice.Type {
case gjson.String:
choice := toolChoice.String()
switch choice {
case "none":
// Don't set tool_choice, Anthropic will not use tools
case "auto":
out, _ = sjson.Set(out, "tool_choice", map[string]interface{}{"type": "auto"})
case "required":
out, _ = sjson.Set(out, "tool_choice", map[string]interface{}{"type": "any"})
}
case gjson.JSON:
// Specific tool choice
if toolChoice.Get("type").String() == "function" {
functionName := toolChoice.Get("function.name").String()
out, _ = sjson.Set(out, "tool_choice", map[string]interface{}{
"type": "tool",
"name": functionName,
})
}
default:
}
}
return out
}

View File

@@ -0,0 +1,395 @@
// Package openai provides response translation functionality for Anthropic to OpenAI API.
// This package handles the conversion of Anthropic API responses into OpenAI Chat Completions-compatible
// JSON format, transforming streaming events and non-streaming responses into the format
// expected by OpenAI API clients. It supports both streaming and non-streaming modes,
// handling text content, tool calls, and usage metadata appropriately.
package openai
import (
"encoding/json"
"strings"
"time"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
// ConvertAnthropicResponseToOpenAIParams holds parameters for response conversion
type ConvertAnthropicResponseToOpenAIParams struct {
CreatedAt int64
ResponseID string
FinishReason string
// Tool calls accumulator for streaming
ToolCallsAccumulator map[int]*ToolCallAccumulator
}
// ToolCallAccumulator holds the state for accumulating tool call data
type ToolCallAccumulator struct {
ID string
Name string
Arguments strings.Builder
}
// ConvertAnthropicResponseToOpenAI converts Anthropic streaming response format to OpenAI Chat Completions format.
// This function processes various Anthropic event types and transforms them into OpenAI-compatible JSON responses.
// It handles text content, tool calls, and usage metadata, outputting responses that match the OpenAI API format.
func ConvertAnthropicResponseToOpenAI(rawJSON []byte, param *ConvertAnthropicResponseToOpenAIParams) []string {
root := gjson.ParseBytes(rawJSON)
eventType := root.Get("type").String()
// Base OpenAI streaming response template
template := `{"id":"","object":"chat.completion.chunk","created":0,"model":"","choices":[{"index":0,"delta":{},"finish_reason":null}]}`
// Set model
modelResult := gjson.GetBytes(rawJSON, "model")
modelName := modelResult.String()
if modelName != "" {
template, _ = sjson.Set(template, "model", modelName)
}
// Set response ID and creation time
if param.ResponseID != "" {
template, _ = sjson.Set(template, "id", param.ResponseID)
}
if param.CreatedAt > 0 {
template, _ = sjson.Set(template, "created", param.CreatedAt)
}
switch eventType {
case "message_start":
// Initialize response with message metadata
if message := root.Get("message"); message.Exists() {
param.ResponseID = message.Get("id").String()
param.CreatedAt = time.Now().Unix()
template, _ = sjson.Set(template, "id", param.ResponseID)
template, _ = sjson.Set(template, "model", modelName)
template, _ = sjson.Set(template, "created", param.CreatedAt)
// Set initial role
template, _ = sjson.Set(template, "choices.0.delta.role", "assistant")
// Initialize tool calls accumulator
if param.ToolCallsAccumulator == nil {
param.ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
}
}
return []string{template}
case "content_block_start":
// Start of a content block
if contentBlock := root.Get("content_block"); contentBlock.Exists() {
blockType := contentBlock.Get("type").String()
if blockType == "tool_use" {
// Start of tool call - initialize accumulator
toolCallID := contentBlock.Get("id").String()
toolName := contentBlock.Get("name").String()
index := int(root.Get("index").Int())
if param.ToolCallsAccumulator == nil {
param.ToolCallsAccumulator = make(map[int]*ToolCallAccumulator)
}
param.ToolCallsAccumulator[index] = &ToolCallAccumulator{
ID: toolCallID,
Name: toolName,
}
// Don't output anything yet - wait for complete tool call
return []string{}
}
}
return []string{template}
case "content_block_delta":
// Handle content delta (text or tool use)
if delta := root.Get("delta"); delta.Exists() {
deltaType := delta.Get("type").String()
switch deltaType {
case "text_delta":
// Text content delta
if text := delta.Get("text"); text.Exists() {
template, _ = sjson.Set(template, "choices.0.delta.content", text.String())
}
case "input_json_delta":
// Tool use input delta - accumulate arguments
if partialJSON := delta.Get("partial_json"); partialJSON.Exists() {
index := int(root.Get("index").Int())
if param.ToolCallsAccumulator != nil {
if accumulator, exists := param.ToolCallsAccumulator[index]; exists {
accumulator.Arguments.WriteString(partialJSON.String())
}
}
}
// Don't output anything yet - wait for complete tool call
return []string{}
}
}
return []string{template}
case "content_block_stop":
// End of content block - output complete tool call if it's a tool_use block
index := int(root.Get("index").Int())
if param.ToolCallsAccumulator != nil {
if accumulator, exists := param.ToolCallsAccumulator[index]; exists {
// Build complete tool call
arguments := accumulator.Arguments.String()
if arguments == "" {
arguments = "{}"
}
toolCall := map[string]interface{}{
"index": index,
"id": accumulator.ID,
"type": "function",
"function": map[string]interface{}{
"name": accumulator.Name,
"arguments": arguments,
},
}
template, _ = sjson.Set(template, "choices.0.delta.tool_calls", []interface{}{toolCall})
// Clean up the accumulator for this index
delete(param.ToolCallsAccumulator, index)
return []string{template}
}
}
return []string{}
case "message_delta":
// Handle message-level changes
if delta := root.Get("delta"); delta.Exists() {
if stopReason := delta.Get("stop_reason"); stopReason.Exists() {
param.FinishReason = mapAnthropicStopReasonToOpenAI(stopReason.String())
template, _ = sjson.Set(template, "choices.0.finish_reason", param.FinishReason)
}
}
// Handle usage information
if usage := root.Get("usage"); usage.Exists() {
usageObj := map[string]interface{}{
"prompt_tokens": usage.Get("input_tokens").Int(),
"completion_tokens": usage.Get("output_tokens").Int(),
"total_tokens": usage.Get("input_tokens").Int() + usage.Get("output_tokens").Int(),
}
template, _ = sjson.Set(template, "usage", usageObj)
}
return []string{template}
case "message_stop":
// Final message - send [DONE]
return []string{"[DONE]\n"}
case "ping":
// Ping events - ignore
return []string{}
case "error":
// Error event
if errorData := root.Get("error"); errorData.Exists() {
errorResponse := map[string]interface{}{
"error": map[string]interface{}{
"message": errorData.Get("message").String(),
"type": errorData.Get("type").String(),
},
}
errorJSON, _ := json.Marshal(errorResponse)
return []string{string(errorJSON)}
}
return []string{}
default:
// Unknown event type - ignore
return []string{}
}
}
// mapAnthropicStopReasonToOpenAI maps Anthropic stop reasons to OpenAI stop reasons
func mapAnthropicStopReasonToOpenAI(anthropicReason string) string {
switch anthropicReason {
case "end_turn":
return "stop"
case "tool_use":
return "tool_calls"
case "max_tokens":
return "length"
case "stop_sequence":
return "stop"
default:
return "stop"
}
}
// ConvertAnthropicStreamingResponseToOpenAINonStream aggregates streaming chunks into a single non-streaming response
// following OpenAI Chat Completions API format with reasoning content support
func ConvertAnthropicStreamingResponseToOpenAINonStream(chunks [][]byte) string {
// Base OpenAI non-streaming response template
out := `{"id":"","object":"chat.completion","created":0,"model":"","choices":[{"index":0,"message":{"role":"assistant","content":""},"finish_reason":"stop"}],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0}}`
var messageID string
var model string
var createdAt int64
var inputTokens, outputTokens int64
var reasoningTokens int64
var stopReason string
var contentParts []string
var reasoningParts []string
// Use map to track tool calls by index for proper merging
toolCallsMap := make(map[int]map[string]interface{})
// Track tool call arguments accumulation
toolCallArgsMap := make(map[int]strings.Builder)
for _, chunk := range chunks {
root := gjson.ParseBytes(chunk)
eventType := root.Get("type").String()
switch eventType {
case "message_start":
if message := root.Get("message"); message.Exists() {
messageID = message.Get("id").String()
model = message.Get("model").String()
createdAt = time.Now().Unix()
if usage := message.Get("usage"); usage.Exists() {
inputTokens = usage.Get("input_tokens").Int()
}
}
case "content_block_start":
// Handle different content block types
if contentBlock := root.Get("content_block"); contentBlock.Exists() {
blockType := contentBlock.Get("type").String()
if blockType == "thinking" {
// Start of thinking/reasoning content
continue
} else if blockType == "tool_use" {
// Initialize tool call tracking
index := int(root.Get("index").Int())
toolCallsMap[index] = map[string]interface{}{
"id": contentBlock.Get("id").String(),
"type": "function",
"function": map[string]interface{}{
"name": contentBlock.Get("name").String(),
"arguments": "",
},
}
// Initialize arguments builder for this tool call
toolCallArgsMap[index] = strings.Builder{}
}
}
case "content_block_delta":
if delta := root.Get("delta"); delta.Exists() {
deltaType := delta.Get("type").String()
switch deltaType {
case "text_delta":
if text := delta.Get("text"); text.Exists() {
contentParts = append(contentParts, text.String())
}
case "thinking_delta":
// Anthropic thinking content -> OpenAI reasoning content
if thinking := delta.Get("thinking"); thinking.Exists() {
reasoningParts = append(reasoningParts, thinking.String())
}
case "input_json_delta":
// Accumulate tool call arguments
if partialJSON := delta.Get("partial_json"); partialJSON.Exists() {
index := int(root.Get("index").Int())
if builder, exists := toolCallArgsMap[index]; exists {
builder.WriteString(partialJSON.String())
toolCallArgsMap[index] = builder
}
}
}
}
case "content_block_stop":
// Finalize tool call arguments for this index
index := int(root.Get("index").Int())
if toolCall, exists := toolCallsMap[index]; exists {
if builder, argsExists := toolCallArgsMap[index]; argsExists {
// Set the accumulated arguments
arguments := builder.String()
if arguments == "" {
arguments = "{}"
}
toolCall["function"].(map[string]interface{})["arguments"] = arguments
}
}
case "message_delta":
if delta := root.Get("delta"); delta.Exists() {
if sr := delta.Get("stop_reason"); sr.Exists() {
stopReason = sr.String()
}
}
if usage := root.Get("usage"); usage.Exists() {
outputTokens = usage.Get("output_tokens").Int()
// Estimate reasoning tokens from thinking content
if len(reasoningParts) > 0 {
reasoningTokens = int64(len(strings.Join(reasoningParts, "")) / 4) // Rough estimation
}
}
}
}
// Set basic response fields
out, _ = sjson.Set(out, "id", messageID)
out, _ = sjson.Set(out, "created", createdAt)
out, _ = sjson.Set(out, "model", model)
// Set message content
messageContent := strings.Join(contentParts, "")
out, _ = sjson.Set(out, "choices.0.message.content", messageContent)
// Add reasoning content if available (following OpenAI reasoning format)
if len(reasoningParts) > 0 {
reasoningContent := strings.Join(reasoningParts, "")
// Add reasoning as a separate field in the message
out, _ = sjson.Set(out, "choices.0.message.reasoning", reasoningContent)
}
// Set tool calls if any
if len(toolCallsMap) > 0 {
// Convert tool calls map to array, preserving order by index
var toolCallsArray []interface{}
// Find the maximum index to determine the range
maxIndex := -1
for index := range toolCallsMap {
if index > maxIndex {
maxIndex = index
}
}
// Iterate through all possible indices up to maxIndex
for i := 0; i <= maxIndex; i++ {
if toolCall, exists := toolCallsMap[i]; exists {
toolCallsArray = append(toolCallsArray, toolCall)
}
}
if len(toolCallsArray) > 0 {
out, _ = sjson.Set(out, "choices.0.message.tool_calls", toolCallsArray)
out, _ = sjson.Set(out, "choices.0.finish_reason", "tool_calls")
} else {
out, _ = sjson.Set(out, "choices.0.finish_reason", mapAnthropicStopReasonToOpenAI(stopReason))
}
} else {
out, _ = sjson.Set(out, "choices.0.finish_reason", mapAnthropicStopReasonToOpenAI(stopReason))
}
// Set usage information
totalTokens := inputTokens + outputTokens
out, _ = sjson.Set(out, "usage.prompt_tokens", inputTokens)
out, _ = sjson.Set(out, "usage.completion_tokens", outputTokens)
out, _ = sjson.Set(out, "usage.total_tokens", totalTokens)
// Add reasoning tokens to usage details if available
if reasoningTokens > 0 {
out, _ = sjson.Set(out, "usage.completion_tokens_details.reasoning_tokens", reasoningTokens)
}
return out
}