mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
v6 version first commit
This commit is contained in:
@@ -7,8 +7,8 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/misc"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/util"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
// Returns:
|
||||
// - []byte: The transformed request data in Gemini API format
|
||||
func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool) []byte {
|
||||
log.Debug("ConvertOpenAIRequestToGemini")
|
||||
rawJSON := bytes.Clone(inputRawJSON)
|
||||
// Base envelope
|
||||
out := []byte(`{"contents":[],"generationConfig":{"thinkingConfig":{"include_thoughts":true}}}`)
|
||||
@@ -170,6 +171,31 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
node := []byte(`{"role":"model","parts":[{"text":""}]}`)
|
||||
node, _ = sjson.SetBytes(node, "parts.0.text", content.String())
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
} else if content.IsArray() {
|
||||
// Assistant multimodal content (e.g. text + image) -> single model content with parts
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
for _, item := range content.Array() {
|
||||
switch item.Get("type").String() {
|
||||
case "text":
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".text", item.Get("text").String())
|
||||
p++
|
||||
case "image_url":
|
||||
// If the assistant returned an inline data URL, preserve it for history fidelity.
|
||||
imageURL := item.Get("image_url.url").String()
|
||||
if len(imageURL) > 5 { // expect data:...
|
||||
pieces := strings.SplitN(imageURL[5:], ";", 2)
|
||||
if len(pieces) == 2 && len(pieces[1]) > 7 {
|
||||
mime := pieces[0]
|
||||
data := pieces[1][7:]
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.mime_type", mime)
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.data", data)
|
||||
p++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
} else if !content.Exists() || content.Type == gjson.Null {
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
|
||||
@@ -8,9 +8,11 @@ package chat_completions
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
@@ -35,6 +37,7 @@ type convertGeminiResponseToOpenAIChatParams struct {
|
||||
// Returns:
|
||||
// - []string: A slice of strings, each containing an OpenAI-compatible JSON response
|
||||
func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
|
||||
log.Debug("ConvertGeminiResponseToOpenAI")
|
||||
if *param == nil {
|
||||
*param = &convertGeminiResponseToOpenAIChatParams{
|
||||
UnixTimestamp: 0,
|
||||
@@ -99,6 +102,10 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR
|
||||
partResult := partResults[i]
|
||||
partTextResult := partResult.Get("text")
|
||||
functionCallResult := partResult.Get("functionCall")
|
||||
inlineDataResult := partResult.Get("inlineData")
|
||||
if !inlineDataResult.Exists() {
|
||||
inlineDataResult = partResult.Get("inline_data")
|
||||
}
|
||||
|
||||
if partTextResult.Exists() {
|
||||
// Handle text content, distinguishing between regular content and reasoning/thoughts.
|
||||
@@ -124,6 +131,34 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.delta.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.tool_calls.-1", functionCallTemplate)
|
||||
} else if inlineDataResult.Exists() {
|
||||
data := inlineDataResult.Get("data").String()
|
||||
if data == "" {
|
||||
continue
|
||||
}
|
||||
mimeType := inlineDataResult.Get("mimeType").String()
|
||||
if mimeType == "" {
|
||||
mimeType = inlineDataResult.Get("mime_type").String()
|
||||
}
|
||||
if mimeType == "" {
|
||||
mimeType = "image/png"
|
||||
}
|
||||
imageURL := fmt.Sprintf("data:%s;base64,%s", mimeType, data)
|
||||
imagePayload, err := json.Marshal(map[string]any{
|
||||
"type": "image_url",
|
||||
"image_url": map[string]string{
|
||||
"url": imageURL,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
imagesResult := gjson.Get(template, "choices.0.delta.images")
|
||||
if !imagesResult.Exists() || !imagesResult.IsArray() {
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.images", `[]`)
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.delta.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.images.-1", string(imagePayload))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -145,6 +180,7 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR
|
||||
// Returns:
|
||||
// - string: An OpenAI-compatible JSON response containing all message content and metadata
|
||||
func ConvertGeminiResponseToOpenAINonStream(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, _ *any) string {
|
||||
log.Debug("ConvertGeminiResponseToOpenAINonStream")
|
||||
var unixTimestamp int64
|
||||
template := `{"id":"","object":"chat.completion","created":123456,"model":"model","choices":[{"index":0,"message":{"role":"assistant","content":null,"reasoning_content":null,"tool_calls":null},"finish_reason":null,"native_finish_reason":null}]}`
|
||||
if modelVersionResult := gjson.GetBytes(rawJSON, "modelVersion"); modelVersionResult.Exists() {
|
||||
@@ -193,6 +229,10 @@ func ConvertGeminiResponseToOpenAINonStream(_ context.Context, _ string, origina
|
||||
partResult := partsResults[i]
|
||||
partTextResult := partResult.Get("text")
|
||||
functionCallResult := partResult.Get("functionCall")
|
||||
inlineDataResult := partResult.Get("inlineData")
|
||||
if !inlineDataResult.Exists() {
|
||||
inlineDataResult = partResult.Get("inline_data")
|
||||
}
|
||||
|
||||
if partTextResult.Exists() {
|
||||
// Append text content, distinguishing between regular content and reasoning.
|
||||
@@ -217,9 +257,34 @@ func ConvertGeminiResponseToOpenAINonStream(_ context.Context, _ string, origina
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.message.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.message.tool_calls.-1", functionCallItemTemplate)
|
||||
} else {
|
||||
// If no usable content is found, return an empty string.
|
||||
return ""
|
||||
} else if inlineDataResult.Exists() {
|
||||
data := inlineDataResult.Get("data").String()
|
||||
if data == "" {
|
||||
continue
|
||||
}
|
||||
mimeType := inlineDataResult.Get("mimeType").String()
|
||||
if mimeType == "" {
|
||||
mimeType = inlineDataResult.Get("mime_type").String()
|
||||
}
|
||||
if mimeType == "" {
|
||||
mimeType = "image/png"
|
||||
}
|
||||
imageURL := fmt.Sprintf("data:%s;base64,%s", mimeType, data)
|
||||
imagePayload, err := json.Marshal(map[string]any{
|
||||
"type": "image_url",
|
||||
"image_url": map[string]string{
|
||||
"url": imageURL,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
imagesResult := gjson.Get(template, "choices.0.message.images")
|
||||
if !imagesResult.Exists() || !imagesResult.IsArray() {
|
||||
template, _ = sjson.SetRaw(template, "choices.0.message.images", `[]`)
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.message.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.message.images.-1", string(imagePayload))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package chat_completions
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/v5/internal/constant"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/interfaces"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/translator/translator"
|
||||
. "github.com/router-for-me/CLIProxyAPI/v6/internal/constant"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
||||
@@ -4,11 +4,13 @@ import (
|
||||
"bytes"
|
||||
"strings"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte, stream bool) []byte {
|
||||
log.Debug("ConvertOpenAIResponsesRequestToGemini")
|
||||
rawJSON := bytes.Clone(inputRawJSON)
|
||||
|
||||
// Note: modelName and stream parameters are part of the fixed method signature
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package responses
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
@@ -37,11 +39,12 @@ type geminiToResponsesState struct {
|
||||
}
|
||||
|
||||
func emitEvent(event string, payload string) string {
|
||||
return fmt.Sprintf("event: %s\ndata: %s\n\n", event, payload)
|
||||
return fmt.Sprintf("event: %s\ndata: %s", event, payload)
|
||||
}
|
||||
|
||||
// ConvertGeminiResponseToOpenAIResponses converts Gemini SSE chunks into OpenAI Responses SSE events.
|
||||
func ConvertGeminiResponseToOpenAIResponses(_ context.Context, modelName string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
|
||||
log.Debug("ConvertGeminiResponseToOpenAIResponses")
|
||||
if *param == nil {
|
||||
*param = &geminiToResponsesState{
|
||||
FuncArgsBuf: make(map[int]*strings.Builder),
|
||||
@@ -51,6 +54,10 @@ func ConvertGeminiResponseToOpenAIResponses(_ context.Context, modelName string,
|
||||
}
|
||||
st := (*param).(*geminiToResponsesState)
|
||||
|
||||
if bytes.HasPrefix(rawJSON, []byte("data:")) {
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
}
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
if !root.Exists() {
|
||||
return []string{}
|
||||
@@ -417,6 +424,7 @@ func ConvertGeminiResponseToOpenAIResponses(_ context.Context, modelName string,
|
||||
|
||||
// ConvertGeminiResponseToOpenAIResponsesNonStream aggregates Gemini response JSON into a single OpenAI Responses JSON object.
|
||||
func ConvertGeminiResponseToOpenAIResponsesNonStream(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, _ *any) string {
|
||||
log.Debug("ConvertGeminiResponseToOpenAIResponsesNonStream")
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
|
||||
// Base response scaffold
|
||||
@@ -456,7 +464,7 @@ func ConvertGeminiResponseToOpenAIResponsesNonStream(_ context.Context, _ string
|
||||
}
|
||||
if v := req.Get("model"); v.Exists() {
|
||||
resp, _ = sjson.Set(resp, "model", v.String())
|
||||
} else if v := root.Get("modelVersion"); v.Exists() {
|
||||
} else if v = root.Get("modelVersion"); v.Exists() {
|
||||
resp, _ = sjson.Set(resp, "model", v.String())
|
||||
}
|
||||
if v := req.Get("parallel_tool_calls"); v.Exists() {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
package responses
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/v5/internal/constant"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/interfaces"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/translator/translator"
|
||||
. "github.com/router-for-me/CLIProxyAPI/v6/internal/constant"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
||||
Reference in New Issue
Block a user