Add OpenAI Responses support

This commit is contained in:
Luis Pater
2025-09-01 17:38:24 +08:00
parent e11637dc62
commit 3acdec51bd
54 changed files with 2394 additions and 144 deletions

View File

@@ -34,7 +34,8 @@ import (
//
// Returns:
// - []byte: The transformed request data in Gemini CLI API format
func ConvertClaudeRequestToCLI(modelName string, rawJSON []byte, _ bool) []byte {
func ConvertClaudeRequestToCLI(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
var pathsToDelete []string
root := gjson.ParseBytes(rawJSON)
util.Walk(root, "", "additionalProperties", &pathsToDelete)

View File

@@ -41,7 +41,7 @@ type Params struct {
//
// Returns:
// - []string: A slice of strings, each containing a Claude Code-compatible JSON response
func ConvertGeminiCLIResponseToClaude(_ context.Context, _ string, rawJSON []byte, param *any) []string {
func ConvertGeminiCLIResponseToClaude(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
if *param == nil {
*param = &Params{
HasFirstResponse: false,
@@ -251,6 +251,6 @@ func ConvertGeminiCLIResponseToClaude(_ context.Context, _ string, rawJSON []byt
//
// Returns:
// - string: A Claude-compatible JSON response.
func ConvertGeminiCLIResponseToClaudeNonStream(_ context.Context, _ string, _ []byte, _ *any) string {
func ConvertGeminiCLIResponseToClaudeNonStream(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, _ []byte, _ *any) string {
return ""
}

View File

@@ -6,6 +6,7 @@
package gemini
import (
"bytes"
"encoding/json"
"fmt"
@@ -30,7 +31,8 @@ import (
//
// Returns:
// - []byte: The transformed request data in Gemini API format
func ConvertGeminiRequestToGeminiCLI(_ string, rawJSON []byte, _ bool) []byte {
func ConvertGeminiRequestToGeminiCLI(_ string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
template := ""
template = `{"project":"","request":{},"model":""}`
template, _ = sjson.SetRaw(template, "request", string(rawJSON))

View File

@@ -28,7 +28,7 @@ import (
//
// Returns:
// - []string: The transformed request data in Gemini API format
func ConvertGeminiCliRequestToGemini(ctx context.Context, _ string, rawJSON []byte, _ *any) []string {
func ConvertGeminiCliRequestToGemini(ctx context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, _ *any) []string {
if alt, ok := ctx.Value("alt").(string); ok {
var chunk []byte
if alt == "" {
@@ -67,7 +67,7 @@ func ConvertGeminiCliRequestToGemini(ctx context.Context, _ string, rawJSON []by
//
// Returns:
// - string: A Gemini-compatible JSON response containing the response data
func ConvertGeminiCliRequestToGeminiNonStream(_ context.Context, _ string, rawJSON []byte, _ *any) string {
func ConvertGeminiCliRequestToGeminiNonStream(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, _ *any) string {
responseResult := gjson.GetBytes(rawJSON, "response")
if responseResult.Exists() {
return responseResult.Raw

View File

@@ -3,6 +3,7 @@
package chat_completions
import (
"bytes"
"fmt"
"strings"
@@ -22,7 +23,8 @@ import (
//
// Returns:
// - []byte: The transformed request data in Gemini CLI API format
func ConvertOpenAIRequestToGeminiCLI(modelName string, rawJSON []byte, _ bool) []byte {
func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
// Base envelope
out := []byte(`{"project":"","request":{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}},"model":"gemini-2.5-pro"}`)

View File

@@ -35,7 +35,7 @@ type convertCliResponseToOpenAIChatParams struct {
//
// Returns:
// - []string: A slice of strings, each containing an OpenAI-compatible JSON response
func ConvertCliResponseToOpenAI(_ context.Context, _ string, rawJSON []byte, param *any) []string {
func ConvertCliResponseToOpenAI(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
if *param == nil {
*param = &convertCliResponseToOpenAIChatParams{
UnixTimestamp: 0,
@@ -145,10 +145,10 @@ func ConvertCliResponseToOpenAI(_ context.Context, _ string, rawJSON []byte, par
//
// Returns:
// - string: An OpenAI-compatible JSON response containing all message content and metadata
func ConvertCliResponseToOpenAINonStream(ctx context.Context, modelName string, rawJSON []byte, param *any) string {
func ConvertCliResponseToOpenAINonStream(ctx context.Context, modelName string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) string {
responseResult := gjson.GetBytes(rawJSON, "response")
if responseResult.Exists() {
return ConvertGeminiResponseToOpenAINonStream(ctx, modelName, []byte(responseResult.Raw), param)
return ConvertGeminiResponseToOpenAINonStream(ctx, modelName, originalRequestRawJSON, requestRawJSON, []byte(responseResult.Raw), param)
}
return ""
}

View File

@@ -1,19 +1,14 @@
package responses
import (
"bytes"
. "github.com/luispater/CLIProxyAPI/internal/translator/gemini-cli/gemini"
. "github.com/luispater/CLIProxyAPI/internal/translator/gemini/openai/responses"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
func ConvertOpenAIResponsesRequestToGeminiCLI(modelName string, rawJSON []byte, stream bool) []byte {
modelResult := gjson.GetBytes(rawJSON, "model")
rawJSON = []byte(gjson.GetBytes(rawJSON, "request").Raw)
rawJSON, _ = sjson.SetBytes(rawJSON, "model", modelResult.String())
if gjson.GetBytes(rawJSON, "systemInstruction").Exists() {
rawJSON, _ = sjson.SetRawBytes(rawJSON, "system_instruction", []byte(gjson.GetBytes(rawJSON, "systemInstruction").Raw))
rawJSON, _ = sjson.DeleteBytes(rawJSON, "systemInstruction")
}
return ConvertOpenAIResponsesRequestToGemini(modelName, rawJSON, stream)
func ConvertOpenAIResponsesRequestToGeminiCLI(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON = ConvertOpenAIResponsesRequestToGemini(modelName, rawJSON, stream)
return ConvertGeminiRequestToGeminiCLI(modelName, rawJSON, stream)
}

View File

@@ -1,11 +1,35 @@
package responses
import "context"
import (
"context"
func ConvertGeminiCLIResponseToOpenAIResponses(_ context.Context, modelName string, rawJSON []byte, param *any) []string {
return nil
. "github.com/luispater/CLIProxyAPI/internal/translator/gemini/openai/responses"
"github.com/tidwall/gjson"
)
func ConvertGeminiCLIResponseToOpenAIResponses(ctx context.Context, modelName string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
responseResult := gjson.GetBytes(rawJSON, "response")
if responseResult.Exists() {
rawJSON = []byte(responseResult.Raw)
}
return ConvertGeminiResponseToOpenAIResponses(ctx, modelName, originalRequestRawJSON, requestRawJSON, rawJSON, param)
}
func ConvertGeminiCLIResponseToOpenAIResponsesNonStream(_ context.Context, _ string, rawJSON []byte, _ *any) string {
return ""
func ConvertGeminiCLIResponseToOpenAIResponsesNonStream(ctx context.Context, modelName string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) string {
responseResult := gjson.GetBytes(rawJSON, "response")
if responseResult.Exists() {
rawJSON = []byte(responseResult.Raw)
}
requestResult := gjson.GetBytes(originalRequestRawJSON, "request")
if responseResult.Exists() {
originalRequestRawJSON = []byte(requestResult.Raw)
}
requestResult = gjson.GetBytes(requestRawJSON, "request")
if responseResult.Exists() {
requestRawJSON = []byte(requestResult.Raw)
}
return ConvertGeminiResponseToOpenAIResponsesNonStream(ctx, modelName, originalRequestRawJSON, requestRawJSON, rawJSON, param)
}