**fix(claude translator): ensure default token counts when usage data is missing**

This commit is contained in:
Luis Pater
2025-11-16 13:18:21 +08:00
parent faa483b57d
commit 9875565339

View File

@@ -284,29 +284,36 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
// Handle usage information separately (this comes in a later chunk) // Handle usage information separately (this comes in a later chunk)
// Only process if usage has actual values (not null) // Only process if usage has actual values (not null)
if usage := root.Get("usage"); usage.Exists() && usage.Type != gjson.Null && param.FinishReason != "" { if param.FinishReason != "" {
// Check if usage has actual token counts usage := root.Get("usage")
promptTokens := usage.Get("prompt_tokens") var inputTokens, outputTokens int64
completionTokens := usage.Get("completion_tokens") if usage.Exists() && usage.Type != gjson.Null {
// Check if usage has actual token counts
promptTokens := usage.Get("prompt_tokens")
completionTokens := usage.Get("completion_tokens")
if promptTokens.Exists() && completionTokens.Exists() { if promptTokens.Exists() && completionTokens.Exists() {
// Send message_delta with usage inputTokens = promptTokens.Int()
messageDelta := map[string]interface{}{ outputTokens = completionTokens.Int()
"type": "message_delta",
"delta": map[string]interface{}{
"stop_reason": mapOpenAIFinishReasonToAnthropic(param.FinishReason),
"stop_sequence": nil,
},
"usage": map[string]interface{}{
"input_tokens": promptTokens.Int(),
"output_tokens": completionTokens.Int(),
},
} }
messageDeltaJSON, _ := json.Marshal(messageDelta)
results = append(results, "event: message_delta\ndata: "+string(messageDeltaJSON)+"\n\n")
param.MessageDeltaSent = true
} }
// Send message_delta with usage
messageDelta := map[string]interface{}{
"type": "message_delta",
"delta": map[string]interface{}{
"stop_reason": mapOpenAIFinishReasonToAnthropic(param.FinishReason),
"stop_sequence": nil,
},
"usage": map[string]interface{}{
"input_tokens": inputTokens,
"output_tokens": outputTokens,
},
}
messageDeltaJSON, _ := json.Marshal(messageDelta)
results = append(results, "event: message_delta\ndata: "+string(messageDeltaJSON)+"\n\n")
param.MessageDeltaSent = true
} }
return results return results
@@ -413,6 +420,11 @@ func convertOpenAINonStreamingToAnthropic(rawJSON []byte) []string {
"input_tokens": usage.Get("prompt_tokens").Int(), "input_tokens": usage.Get("prompt_tokens").Int(),
"output_tokens": usage.Get("completion_tokens").Int(), "output_tokens": usage.Get("completion_tokens").Int(),
} }
} else {
response["usage"] = map[string]interface{}{
"input_tokens": 0,
"output_tokens": 0,
}
} }
responseJSON, _ := json.Marshal(response) responseJSON, _ := json.Marshal(response)
@@ -601,6 +613,8 @@ func ConvertOpenAIResponseToClaudeNonStream(_ context.Context, _ string, origina
usageJSON, _ = sjson.Set(usageJSON, "output_tokens", respUsage.Get("completion_tokens").Int()) usageJSON, _ = sjson.Set(usageJSON, "output_tokens", respUsage.Get("completion_tokens").Int())
parsedUsage := gjson.Parse(usageJSON).Value().(map[string]interface{}) parsedUsage := gjson.Parse(usageJSON).Value().(map[string]interface{})
response["usage"] = parsedUsage response["usage"] = parsedUsage
} else {
response["usage"] = `{"input_tokens":0,"output_tokens":0}`
} }
if response["stop_reason"] == nil { if response["stop_reason"] == nil {