From 1f46dc271539c734fc4fbef8c7ad49ceb2518b87 Mon Sep 17 00:00:00 2001 From: Luis Pater Date: Mon, 1 Sep 2025 08:37:41 +0800 Subject: [PATCH] Refactor translator packages for OpenAI Chat Completions - Renamed `openai` packages to `chat_completions` across translator modules. - Introduced `openai_responses_handlers` with handlers for `/v1/models` and OpenAI-compatible chat completions endpoints. - Updated constants and registry identifiers for OpenAI response type. - Simplified request/response conversions and added detailed retry/error handling. - Added `golang.org/x/crypto` for additional cryptographic functions. --- .../claude_openai-responses_request.go | 5 +++++ .../claude_openai-responses_response.go | 11 +++++++++++ .../claude/openai/responses/init.go | 19 +++++++++++++++++++ .../gemini_openai-responses_request.go | 5 +++++ .../gemini_openai-responses_response.go | 11 +++++++++++ .../gemini-cli/openai/responses/init.go | 19 +++++++++++++++++++ .../gemini_openai-responses_request.go | 5 +++++ .../gemini_openai-responses_response.go | 11 +++++++++++ .../gemini/openai/responses/init.go | 19 +++++++++++++++++++ internal/translator/init.go | 3 +++ .../gemini_openai-responses_request.go | 5 +++++ .../gemini_openai-responses_response.go | 11 +++++++++++ .../openai/openai/responses/init.go | 19 +++++++++++++++++++ 13 files changed, 143 insertions(+) create mode 100644 internal/translator/claude/openai/responses/claude_openai-responses_request.go create mode 100644 internal/translator/claude/openai/responses/claude_openai-responses_response.go create mode 100644 internal/translator/claude/openai/responses/init.go create mode 100644 internal/translator/gemini-cli/openai/responses/gemini_openai-responses_request.go create mode 100644 internal/translator/gemini-cli/openai/responses/gemini_openai-responses_response.go create mode 100644 internal/translator/gemini-cli/openai/responses/init.go create mode 100644 internal/translator/gemini/openai/responses/gemini_openai-responses_request.go create mode 100644 internal/translator/gemini/openai/responses/gemini_openai-responses_response.go create mode 100644 internal/translator/gemini/openai/responses/init.go create mode 100644 internal/translator/openai/openai/responses/gemini_openai-responses_request.go create mode 100644 internal/translator/openai/openai/responses/gemini_openai-responses_response.go create mode 100644 internal/translator/openai/openai/responses/init.go diff --git a/internal/translator/claude/openai/responses/claude_openai-responses_request.go b/internal/translator/claude/openai/responses/claude_openai-responses_request.go new file mode 100644 index 00000000..e1a2e431 --- /dev/null +++ b/internal/translator/claude/openai/responses/claude_openai-responses_request.go @@ -0,0 +1,5 @@ +package responses + +func ConvertOpenAIResponsesRequestToClaude(modelName string, rawJSON []byte, stream bool) []byte { + return nil +} diff --git a/internal/translator/claude/openai/responses/claude_openai-responses_response.go b/internal/translator/claude/openai/responses/claude_openai-responses_response.go new file mode 100644 index 00000000..f5f2afaa --- /dev/null +++ b/internal/translator/claude/openai/responses/claude_openai-responses_response.go @@ -0,0 +1,11 @@ +package responses + +import "context" + +func ConvertClaudeResponseToOpenAIResponses(_ context.Context, modelName string, rawJSON []byte, param *any) []string { + return nil +} + +func ConvertClaudeResponseToOpenAIResponsesNonStream(_ context.Context, _ string, rawJSON []byte, _ *any) string { + return "" +} diff --git a/internal/translator/claude/openai/responses/init.go b/internal/translator/claude/openai/responses/init.go new file mode 100644 index 00000000..c233af44 --- /dev/null +++ b/internal/translator/claude/openai/responses/init.go @@ -0,0 +1,19 @@ +package responses + +import ( + . "github.com/luispater/CLIProxyAPI/internal/constant" + "github.com/luispater/CLIProxyAPI/internal/interfaces" + "github.com/luispater/CLIProxyAPI/internal/translator/translator" +) + +func init() { + translator.Register( + OPENAI_RESPONSE, + CLAUDE, + ConvertOpenAIResponsesRequestToClaude, + interfaces.TranslateResponse{ + Stream: ConvertClaudeResponseToOpenAIResponses, + NonStream: ConvertClaudeResponseToOpenAIResponsesNonStream, + }, + ) +} diff --git a/internal/translator/gemini-cli/openai/responses/gemini_openai-responses_request.go b/internal/translator/gemini-cli/openai/responses/gemini_openai-responses_request.go new file mode 100644 index 00000000..6d4e740d --- /dev/null +++ b/internal/translator/gemini-cli/openai/responses/gemini_openai-responses_request.go @@ -0,0 +1,5 @@ +package responses + +func ConvertOpenAIResponsesRequestToGeminiCLI(modelName string, rawJSON []byte, stream bool) []byte { + return nil +} diff --git a/internal/translator/gemini-cli/openai/responses/gemini_openai-responses_response.go b/internal/translator/gemini-cli/openai/responses/gemini_openai-responses_response.go new file mode 100644 index 00000000..2e1f8fd3 --- /dev/null +++ b/internal/translator/gemini-cli/openai/responses/gemini_openai-responses_response.go @@ -0,0 +1,11 @@ +package responses + +import "context" + +func ConvertGeminiCLIResponseToOpenAIResponses(_ context.Context, modelName string, rawJSON []byte, param *any) []string { + return nil +} + +func ConvertGeminiCLIResponseToOpenAIResponsesNonStream(_ context.Context, _ string, rawJSON []byte, _ *any) string { + return "" +} diff --git a/internal/translator/gemini-cli/openai/responses/init.go b/internal/translator/gemini-cli/openai/responses/init.go new file mode 100644 index 00000000..2aa3dc86 --- /dev/null +++ b/internal/translator/gemini-cli/openai/responses/init.go @@ -0,0 +1,19 @@ +package responses + +import ( + . "github.com/luispater/CLIProxyAPI/internal/constant" + "github.com/luispater/CLIProxyAPI/internal/interfaces" + "github.com/luispater/CLIProxyAPI/internal/translator/translator" +) + +func init() { + translator.Register( + OPENAI_RESPONSE, + GEMINICLI, + ConvertOpenAIResponsesRequestToGeminiCLI, + interfaces.TranslateResponse{ + Stream: ConvertGeminiCLIResponseToOpenAIResponses, + NonStream: ConvertGeminiCLIResponseToOpenAIResponsesNonStream, + }, + ) +} diff --git a/internal/translator/gemini/openai/responses/gemini_openai-responses_request.go b/internal/translator/gemini/openai/responses/gemini_openai-responses_request.go new file mode 100644 index 00000000..86c6158d --- /dev/null +++ b/internal/translator/gemini/openai/responses/gemini_openai-responses_request.go @@ -0,0 +1,5 @@ +package responses + +func ConvertOpenAIResponsesRequestToGemini(modelName string, rawJSON []byte, stream bool) []byte { + return nil +} diff --git a/internal/translator/gemini/openai/responses/gemini_openai-responses_response.go b/internal/translator/gemini/openai/responses/gemini_openai-responses_response.go new file mode 100644 index 00000000..11544e43 --- /dev/null +++ b/internal/translator/gemini/openai/responses/gemini_openai-responses_response.go @@ -0,0 +1,11 @@ +package responses + +import "context" + +func ConvertGeminiResponseToOpenAIResponses(_ context.Context, modelName string, rawJSON []byte, param *any) []string { + return nil +} + +func ConvertGeminiResponseToOpenAIResponsesNonStream(_ context.Context, _ string, rawJSON []byte, _ *any) string { + return "" +} diff --git a/internal/translator/gemini/openai/responses/init.go b/internal/translator/gemini/openai/responses/init.go new file mode 100644 index 00000000..6017cec4 --- /dev/null +++ b/internal/translator/gemini/openai/responses/init.go @@ -0,0 +1,19 @@ +package responses + +import ( + . "github.com/luispater/CLIProxyAPI/internal/constant" + "github.com/luispater/CLIProxyAPI/internal/interfaces" + "github.com/luispater/CLIProxyAPI/internal/translator/translator" +) + +func init() { + translator.Register( + OPENAI_RESPONSE, + GEMINI, + ConvertOpenAIResponsesRequestToGemini, + interfaces.TranslateResponse{ + Stream: ConvertGeminiResponseToOpenAIResponses, + NonStream: ConvertGeminiResponseToOpenAIResponsesNonStream, + }, + ) +} diff --git a/internal/translator/init.go b/internal/translator/init.go index f74e70a4..42db67a2 100644 --- a/internal/translator/init.go +++ b/internal/translator/init.go @@ -4,6 +4,7 @@ import ( _ "github.com/luispater/CLIProxyAPI/internal/translator/claude/gemini" _ "github.com/luispater/CLIProxyAPI/internal/translator/claude/gemini-cli" _ "github.com/luispater/CLIProxyAPI/internal/translator/claude/openai/chat-completions" + _ "github.com/luispater/CLIProxyAPI/internal/translator/claude/openai/responses" _ "github.com/luispater/CLIProxyAPI/internal/translator/codex/claude" _ "github.com/luispater/CLIProxyAPI/internal/translator/codex/gemini" _ "github.com/luispater/CLIProxyAPI/internal/translator/codex/gemini-cli" @@ -11,10 +12,12 @@ import ( _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini-cli/claude" _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini-cli/gemini" _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini-cli/openai/chat-completions" + _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini-cli/openai/responses" _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini/claude" _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini/gemini" _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini/gemini-cli" _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini/openai/chat-completions" + _ "github.com/luispater/CLIProxyAPI/internal/translator/gemini/openai/responses" _ "github.com/luispater/CLIProxyAPI/internal/translator/openai/claude" _ "github.com/luispater/CLIProxyAPI/internal/translator/openai/gemini" _ "github.com/luispater/CLIProxyAPI/internal/translator/openai/gemini-cli" diff --git a/internal/translator/openai/openai/responses/gemini_openai-responses_request.go b/internal/translator/openai/openai/responses/gemini_openai-responses_request.go new file mode 100644 index 00000000..b186369f --- /dev/null +++ b/internal/translator/openai/openai/responses/gemini_openai-responses_request.go @@ -0,0 +1,5 @@ +package responses + +func ConvertOpenAIResponsesRequestToOpenAIChatCompletions(modelName string, rawJSON []byte, stream bool) []byte { + return nil +} diff --git a/internal/translator/openai/openai/responses/gemini_openai-responses_response.go b/internal/translator/openai/openai/responses/gemini_openai-responses_response.go new file mode 100644 index 00000000..ab4592eb --- /dev/null +++ b/internal/translator/openai/openai/responses/gemini_openai-responses_response.go @@ -0,0 +1,11 @@ +package responses + +import "context" + +func ConvertOpenAIChatCompletionsResponseToOpenAIResponses(_ context.Context, modelName string, rawJSON []byte, param *any) []string { + return nil +} + +func ConvertOpenAIChatCompletionsResponseToOpenAIResponsesNonStream(_ context.Context, _ string, rawJSON []byte, _ *any) string { + return "" +} diff --git a/internal/translator/openai/openai/responses/init.go b/internal/translator/openai/openai/responses/init.go new file mode 100644 index 00000000..611f6ed3 --- /dev/null +++ b/internal/translator/openai/openai/responses/init.go @@ -0,0 +1,19 @@ +package responses + +import ( + . "github.com/luispater/CLIProxyAPI/internal/constant" + "github.com/luispater/CLIProxyAPI/internal/interfaces" + "github.com/luispater/CLIProxyAPI/internal/translator/translator" +) + +func init() { + translator.Register( + OPENAI_RESPONSE, + OPENAI, + ConvertOpenAIResponsesRequestToOpenAIChatCompletions, + interfaces.TranslateResponse{ + Stream: ConvertOpenAIChatCompletionsResponseToOpenAIResponses, + NonStream: ConvertOpenAIChatCompletionsResponseToOpenAIResponsesNonStream, + }, + ) +}