mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-19 12:50:51 +08:00
Fix responses-format handling for chat completions
This commit is contained in:
@@ -0,0 +1,43 @@
|
||||
package chat_completions
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
responsesconverter "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/openai/openai/responses"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
func TestResponsesPayloadToolsArePreserved(t *testing.T) {
|
||||
data, err := os.ReadFile("../../../../../error1.log")
|
||||
if err != nil {
|
||||
t.Fatalf("read log: %v", err)
|
||||
}
|
||||
|
||||
var requestLine string
|
||||
for _, line := range strings.Split(string(data), "\n") {
|
||||
trimmed := strings.TrimSpace(line)
|
||||
if strings.HasPrefix(trimmed, "{\"user\"") {
|
||||
requestLine = trimmed
|
||||
break
|
||||
}
|
||||
}
|
||||
if requestLine == "" {
|
||||
t.Fatalf("failed to extract request body from log")
|
||||
}
|
||||
|
||||
raw := []byte(requestLine)
|
||||
chatPayload := responsesconverter.ConvertOpenAIResponsesRequestToOpenAIChatCompletions("gpt-5.1-codex-max(xhigh)", raw, true)
|
||||
codexPayload := ConvertOpenAIRequestToCodex("gpt-5.1-codex-max(xhigh)", chatPayload, true)
|
||||
|
||||
tools := gjson.GetBytes(codexPayload, "tools")
|
||||
if !tools.IsArray() || len(tools.Array()) == 0 {
|
||||
t.Fatalf("expected tools array, got: %s", tools.Raw)
|
||||
}
|
||||
for i, tool := range tools.Array() {
|
||||
if name := strings.TrimSpace(tool.Get("name").String()); name == "" {
|
||||
t.Fatalf("tool %d missing name after conversion: %s", i, tool.Raw)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
. "github.com/router-for-me/CLIProxyAPI/v6/internal/constant"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
||||
responsesconverter "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/openai/openai/responses"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/sdk/api/handlers"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
@@ -109,7 +110,17 @@ func (h *OpenAIAPIHandler) ChatCompletions(c *gin.Context) {
|
||||
|
||||
// Check if the client requested a streaming response.
|
||||
streamResult := gjson.GetBytes(rawJSON, "stream")
|
||||
if streamResult.Type == gjson.True {
|
||||
stream := streamResult.Type == gjson.True
|
||||
|
||||
// Some clients send OpenAI Responses-format payloads to /v1/chat/completions.
|
||||
// Convert them to Chat Completions so downstream translators preserve tool metadata.
|
||||
if shouldTreatAsResponsesFormat(rawJSON) {
|
||||
modelName := gjson.GetBytes(rawJSON, "model").String()
|
||||
rawJSON = responsesconverter.ConvertOpenAIResponsesRequestToOpenAIChatCompletions(modelName, rawJSON, stream)
|
||||
stream = gjson.GetBytes(rawJSON, "stream").Bool()
|
||||
}
|
||||
|
||||
if stream {
|
||||
h.handleStreamingResponse(c, rawJSON)
|
||||
} else {
|
||||
h.handleNonStreamingResponse(c, rawJSON)
|
||||
@@ -117,6 +128,21 @@ func (h *OpenAIAPIHandler) ChatCompletions(c *gin.Context) {
|
||||
|
||||
}
|
||||
|
||||
// shouldTreatAsResponsesFormat detects OpenAI Responses-style payloads that are
|
||||
// accidentally sent to the Chat Completions endpoint.
|
||||
func shouldTreatAsResponsesFormat(rawJSON []byte) bool {
|
||||
if gjson.GetBytes(rawJSON, "messages").Exists() {
|
||||
return false
|
||||
}
|
||||
if gjson.GetBytes(rawJSON, "input").Exists() {
|
||||
return true
|
||||
}
|
||||
if gjson.GetBytes(rawJSON, "instructions").Exists() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Completions handles the /v1/completions endpoint.
|
||||
// It determines whether the request is for a streaming or non-streaming response
|
||||
// and calls the appropriate handler based on the model provider.
|
||||
|
||||
Reference in New Issue
Block a user