mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-02 12:30:50 +08:00
Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e5a6fd2d4f | ||
|
|
83a1fa618d | ||
|
|
9253bdbf77 | ||
|
|
41effa5aeb | ||
|
|
b07ed71de2 | ||
|
|
deaa64b080 | ||
|
|
d42384cdb7 | ||
|
|
24f243a1bc |
@@ -554,7 +554,7 @@ func (c *GeminiCLIClient) SendRawMessageStream(ctx context.Context, modelName st
|
||||
rawJSON, _ = sjson.SetBytes(rawJSON, "project", c.GetProjectID())
|
||||
rawJSON, _ = sjson.SetBytes(rawJSON, "model", modelName)
|
||||
|
||||
dataTag := []byte("data: ")
|
||||
dataTag := []byte("data:")
|
||||
errChan := make(chan *interfaces.ErrorMessage)
|
||||
dataChan := make(chan []byte)
|
||||
// log.Debugf(string(rawJSON))
|
||||
@@ -619,7 +619,7 @@ func (c *GeminiCLIClient) SendRawMessageStream(ctx context.Context, modelName st
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
lines := translator.Response(handlerType, c.Type(), newCtx, modelName, originalRequestRawJSON, rawJSON, line[6:], ¶m)
|
||||
lines := translator.Response(handlerType, c.Type(), newCtx, modelName, originalRequestRawJSON, rawJSON, bytes.TrimSpace(line[5:]), ¶m)
|
||||
for i := 0; i < len(lines); i++ {
|
||||
dataChan <- []byte(lines[i])
|
||||
}
|
||||
@@ -630,7 +630,7 @@ func (c *GeminiCLIClient) SendRawMessageStream(ctx context.Context, modelName st
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
dataChan <- line[6:]
|
||||
dataChan <- bytes.TrimSpace(line[5:])
|
||||
}
|
||||
c.AddAPIResponseData(ctx, line)
|
||||
}
|
||||
|
||||
@@ -33,6 +33,10 @@ type GeminiClient struct {
|
||||
accountLabel string
|
||||
}
|
||||
|
||||
var NanoBananaModel = map[string]struct{}{
|
||||
"gemini-2.5-flash-image-preview": {},
|
||||
}
|
||||
|
||||
// NewGeminiClient creates a client. Pass empty strings to auto-detect via browser cookies (not implemented in Go port).
|
||||
func NewGeminiClient(secure1psid string, secure1psidts string, proxy string, opts ...func(*GeminiClient)) *GeminiClient {
|
||||
c := &GeminiClient{
|
||||
@@ -239,6 +243,14 @@ func (c *GeminiClient) GenerateContent(prompt string, files []string, model Mode
|
||||
}
|
||||
}
|
||||
|
||||
func ensureAnyLen(slice []any, index int) []any {
|
||||
if index < len(slice) {
|
||||
return slice
|
||||
}
|
||||
gap := index + 1 - len(slice)
|
||||
return append(slice, make([]any, gap)...)
|
||||
}
|
||||
|
||||
func (c *GeminiClient) generateOnce(prompt string, files []string, model Model, gem *Gem, chat *ChatSession) (ModelOutput, error) {
|
||||
var empty ModelOutput
|
||||
// Build f.req
|
||||
@@ -266,6 +278,14 @@ func (c *GeminiClient) generateOnce(prompt string, files []string, model Model,
|
||||
}
|
||||
|
||||
inner := []any{item0, nil, item2}
|
||||
requestedModel := strings.ToLower(model.Name)
|
||||
if chat != nil && chat.RequestedModel() != "" {
|
||||
requestedModel = chat.RequestedModel()
|
||||
}
|
||||
if _, ok := NanoBananaModel[requestedModel]; ok {
|
||||
inner = ensureAnyLen(inner, 49)
|
||||
inner[49] = 14
|
||||
}
|
||||
if gem != nil {
|
||||
// pad with 16 nils then gem ID
|
||||
for i := 0; i < 16; i++ {
|
||||
@@ -674,16 +694,17 @@ func truncateForLog(s string, n int) string {
|
||||
|
||||
// StartChat returns a ChatSession attached to the client
|
||||
func (c *GeminiClient) StartChat(model Model, gem *Gem, metadata []string) *ChatSession {
|
||||
return &ChatSession{client: c, metadata: normalizeMeta(metadata), model: model, gem: gem}
|
||||
return &ChatSession{client: c, metadata: normalizeMeta(metadata), model: model, gem: gem, requestedModel: strings.ToLower(model.Name)}
|
||||
}
|
||||
|
||||
// ChatSession holds conversation metadata
|
||||
type ChatSession struct {
|
||||
client *GeminiClient
|
||||
metadata []string // cid, rid, rcid
|
||||
lastOutput *ModelOutput
|
||||
model Model
|
||||
gem *Gem
|
||||
client *GeminiClient
|
||||
metadata []string // cid, rid, rcid
|
||||
lastOutput *ModelOutput
|
||||
model Model
|
||||
gem *Gem
|
||||
requestedModel string
|
||||
}
|
||||
|
||||
func (cs *ChatSession) String() string {
|
||||
@@ -710,6 +731,10 @@ func normalizeMeta(v []string) []string {
|
||||
|
||||
func (cs *ChatSession) Metadata() []string { return cs.metadata }
|
||||
func (cs *ChatSession) SetMetadata(v []string) { cs.metadata = normalizeMeta(v) }
|
||||
func (cs *ChatSession) RequestedModel() string { return cs.requestedModel }
|
||||
func (cs *ChatSession) SetRequestedModel(name string) {
|
||||
cs.requestedModel = strings.ToLower(name)
|
||||
}
|
||||
func (cs *ChatSession) CID() string {
|
||||
if len(cs.metadata) > 0 {
|
||||
return cs.metadata[0]
|
||||
|
||||
@@ -116,6 +116,8 @@ func EnsureGeminiWebAliasMap() {
|
||||
for _, m := range registry.GetGeminiModels() {
|
||||
if m.ID == "gemini-2.5-flash-lite" {
|
||||
continue
|
||||
} else if m.ID == "gemini-2.5-flash" {
|
||||
GeminiWebAliasMap["gemini-2.5-flash-image-preview"] = "gemini-2.5-flash"
|
||||
}
|
||||
alias := AliasFromModelID(m.ID)
|
||||
GeminiWebAliasMap[strings.ToLower(alias)] = strings.ToLower(m.ID)
|
||||
@@ -130,6 +132,13 @@ func GetGeminiWebAliasedModels() []*registry.ModelInfo {
|
||||
for _, m := range registry.GetGeminiModels() {
|
||||
if m.ID == "gemini-2.5-flash-lite" {
|
||||
continue
|
||||
} else if m.ID == "gemini-2.5-flash" {
|
||||
cpy := *m
|
||||
cpy.ID = "gemini-2.5-flash-image-preview"
|
||||
cpy.Name = "gemini-2.5-flash-image-preview"
|
||||
cpy.DisplayName = "Nano Banana"
|
||||
cpy.Description = "Gemini 2.5 Flash Preview Image"
|
||||
aliased = append(aliased, &cpy)
|
||||
}
|
||||
cpy := *m
|
||||
cpy.ID = AliasFromModelID(m.ID)
|
||||
|
||||
@@ -207,7 +207,7 @@ func (c *GeminiWebClient) registerModelsOnce() {
|
||||
if c.modelsRegistered {
|
||||
return
|
||||
}
|
||||
c.RegisterModels(GEMINI, geminiWeb.GetGeminiWebAliasedModels())
|
||||
c.RegisterModels(GEMINIWEB, geminiWeb.GetGeminiWebAliasedModels())
|
||||
c.modelsRegistered = true
|
||||
}
|
||||
|
||||
@@ -219,8 +219,8 @@ func (c *GeminiWebClient) EnsureRegistered() {
|
||||
}
|
||||
}
|
||||
|
||||
func (c *GeminiWebClient) Type() string { return GEMINI }
|
||||
func (c *GeminiWebClient) Provider() string { return GEMINI }
|
||||
func (c *GeminiWebClient) Type() string { return GEMINIWEB }
|
||||
func (c *GeminiWebClient) Provider() string { return GEMINIWEB }
|
||||
func (c *GeminiWebClient) CanProvideModel(modelName string) bool {
|
||||
geminiWeb.EnsureGeminiWebAliasMap()
|
||||
_, ok := geminiWeb.GeminiWebAliasMap[strings.ToLower(modelName)]
|
||||
@@ -394,6 +394,7 @@ func (c *GeminiWebClient) prepareChat(ctx context.Context, modelName string, raw
|
||||
c.appendUpstreamRequestLog(ctx, modelName, res.tagged, true, res.prompt, len(uploadedFiles), res.reuse, res.metaLen)
|
||||
gem := c.getConfiguredGem()
|
||||
res.chat = c.gwc.StartChat(model, gem, meta)
|
||||
res.chat.SetRequestedModel(modelName)
|
||||
return res, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -298,7 +298,7 @@ func (c *GeminiClient) SendRawMessageStream(ctx context.Context, modelName strin
|
||||
handlerType := handler.HandlerType()
|
||||
rawJSON = translator.Request(handlerType, c.Type(), modelName, rawJSON, true)
|
||||
|
||||
dataTag := []byte("data: ")
|
||||
dataTag := []byte("data:")
|
||||
errChan := make(chan *interfaces.ErrorMessage)
|
||||
dataChan := make(chan []byte)
|
||||
// log.Debugf(string(rawJSON))
|
||||
@@ -342,7 +342,7 @@ func (c *GeminiClient) SendRawMessageStream(ctx context.Context, modelName strin
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
lines := translator.Response(handlerType, c.Type(), newCtx, modelName, originalRequestRawJSON, rawJSON, line[6:], ¶m)
|
||||
lines := translator.Response(handlerType, c.Type(), newCtx, modelName, originalRequestRawJSON, rawJSON, bytes.TrimSpace(line[5:]), ¶m)
|
||||
for i := 0; i < len(lines); i++ {
|
||||
dataChan <- []byte(lines[i])
|
||||
}
|
||||
@@ -353,7 +353,7 @@ func (c *GeminiClient) SendRawMessageStream(ctx context.Context, modelName strin
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
dataChan <- line[6:]
|
||||
dataChan <- bytes.TrimSpace(line[5:])
|
||||
}
|
||||
c.AddAPIResponseData(ctx, line)
|
||||
}
|
||||
|
||||
@@ -291,9 +291,8 @@ func (c *OpenAICompatibilityClient) SendRawMessageStream(ctx context.Context, mo
|
||||
handlerType := handler.HandlerType()
|
||||
rawJSON = translator.Request(handlerType, c.Type(), modelName, rawJSON, true)
|
||||
|
||||
dataTag := []byte("data: ")
|
||||
dataUglyTag := []byte("data:") // Some APIs providers don't add space after "data:", fuck for them all
|
||||
doneTag := []byte("data: [DONE]")
|
||||
dataTag := []byte("data:")
|
||||
doneTag := []byte("[DONE]")
|
||||
errChan := make(chan *interfaces.ErrorMessage)
|
||||
dataChan := make(chan []byte)
|
||||
// log.Debugf(string(rawJSON))
|
||||
@@ -332,19 +331,10 @@ func (c *OpenAICompatibilityClient) SendRawMessageStream(ctx context.Context, mo
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
if bytes.Equal(line, doneTag) {
|
||||
if bytes.Equal(bytes.TrimSpace(line[5:]), doneTag) {
|
||||
break
|
||||
}
|
||||
lines := translator.Response(handlerType, c.Type(), newCtx, modelName, originalRequestRawJSON, rawJSON, line[6:], ¶m)
|
||||
for i := 0; i < len(lines); i++ {
|
||||
c.AddAPIResponseData(ctx, line)
|
||||
dataChan <- []byte(lines[i])
|
||||
}
|
||||
} else if bytes.HasPrefix(line, dataUglyTag) {
|
||||
if bytes.Equal(line, doneTag) {
|
||||
break
|
||||
}
|
||||
lines := translator.Response(handlerType, c.Type(), newCtx, modelName, originalRequestRawJSON, rawJSON, line[5:], ¶m)
|
||||
lines := translator.Response(handlerType, c.Type(), newCtx, modelName, originalRequestRawJSON, rawJSON, bytes.TrimSpace(line[5:]), ¶m)
|
||||
for i := 0; i < len(lines); i++ {
|
||||
c.AddAPIResponseData(ctx, line)
|
||||
dataChan <- []byte(lines[i])
|
||||
@@ -356,13 +346,10 @@ func (c *OpenAICompatibilityClient) SendRawMessageStream(ctx context.Context, mo
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
if bytes.Equal(line, doneTag) {
|
||||
if bytes.Equal(bytes.TrimSpace(line[5:]), doneTag) {
|
||||
break
|
||||
}
|
||||
c.AddAPIResponseData(newCtx, line[6:])
|
||||
dataChan <- line[6:]
|
||||
} else if bytes.HasPrefix(line, dataUglyTag) {
|
||||
c.AddAPIResponseData(newCtx, line[5:])
|
||||
c.AddAPIResponseData(newCtx, bytes.TrimSpace(line[5:]))
|
||||
dataChan <- line[5:]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,8 +215,8 @@ func (c *QwenClient) SendRawMessageStream(ctx context.Context, modelName string,
|
||||
handlerType := handler.HandlerType()
|
||||
rawJSON = translator.Request(handlerType, c.Type(), modelName, rawJSON, true)
|
||||
|
||||
dataTag := []byte("data: ")
|
||||
doneTag := []byte("data: [DONE]")
|
||||
dataTag := []byte("data:")
|
||||
doneTag := []byte("[DONE]")
|
||||
errChan := make(chan *interfaces.ErrorMessage)
|
||||
dataChan := make(chan []byte)
|
||||
|
||||
@@ -264,7 +264,7 @@ func (c *QwenClient) SendRawMessageStream(ctx context.Context, modelName string,
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
lines := translator.Response(handlerType, c.Type(), ctx, modelName, originalRequestRawJSON, rawJSON, line[6:], ¶m)
|
||||
lines := translator.Response(handlerType, c.Type(), ctx, modelName, originalRequestRawJSON, rawJSON, bytes.TrimSpace(line[5:]), ¶m)
|
||||
for i := 0; i < len(lines); i++ {
|
||||
dataChan <- []byte(lines[i])
|
||||
}
|
||||
@@ -274,9 +274,9 @@ func (c *QwenClient) SendRawMessageStream(ctx context.Context, modelName string,
|
||||
} else {
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
if !bytes.HasPrefix(line, doneTag) {
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
dataChan <- line[6:]
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
if !bytes.Equal(bytes.TrimSpace(line[5:]), doneTag) {
|
||||
dataChan <- bytes.TrimSpace(line[5:])
|
||||
}
|
||||
}
|
||||
c.AddAPIResponseData(ctx, line)
|
||||
|
||||
@@ -3,6 +3,7 @@ package constant
|
||||
const (
|
||||
GEMINI = "gemini"
|
||||
GEMINICLI = "gemini-cli"
|
||||
GEMINIWEB = "gemini-web"
|
||||
CODEX = "codex"
|
||||
CLAUDE = "claude"
|
||||
OPENAI = "openai"
|
||||
|
||||
@@ -17,7 +17,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
dataTag = []byte("data: ")
|
||||
dataTag = []byte("data:")
|
||||
)
|
||||
|
||||
// ConvertAnthropicResponseToGeminiParams holds parameters for response conversion
|
||||
@@ -64,7 +64,7 @@ func ConvertClaudeResponseToGemini(_ context.Context, modelName string, original
|
||||
if !bytes.HasPrefix(rawJSON, dataTag) {
|
||||
return []string{}
|
||||
}
|
||||
rawJSON = rawJSON[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
eventType := root.Get("type").String()
|
||||
@@ -336,7 +336,7 @@ func ConvertClaudeResponseToGeminiNonStream(_ context.Context, modelName string,
|
||||
line := scanner.Bytes()
|
||||
// log.Debug(string(line))
|
||||
if bytes.HasPrefix(line, dataTag) {
|
||||
jsonData := line[6:]
|
||||
jsonData := bytes.TrimSpace(line[5:])
|
||||
streamingEvents = append(streamingEvents, jsonData)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
dataTag = []byte("data: ")
|
||||
dataTag = []byte("data:")
|
||||
)
|
||||
|
||||
// ConvertAnthropicResponseToOpenAIParams holds parameters for response conversion
|
||||
@@ -62,7 +62,7 @@ func ConvertClaudeResponseToOpenAI(_ context.Context, modelName string, original
|
||||
if !bytes.HasPrefix(rawJSON, dataTag) {
|
||||
return []string{}
|
||||
}
|
||||
rawJSON = rawJSON[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
eventType := root.Get("type").String()
|
||||
@@ -289,7 +289,7 @@ func ConvertClaudeResponseToOpenAINonStream(_ context.Context, _ string, origina
|
||||
if !bytes.HasPrefix(line, dataTag) {
|
||||
continue
|
||||
}
|
||||
chunks = append(chunks, line[6:])
|
||||
chunks = append(chunks, bytes.TrimSpace(rawJSON[5:]))
|
||||
}
|
||||
|
||||
// Base OpenAI non-streaming response template
|
||||
|
||||
@@ -34,7 +34,7 @@ type claudeToResponsesState struct {
|
||||
ReasoningIndex int
|
||||
}
|
||||
|
||||
var dataTag = []byte("data: ")
|
||||
var dataTag = []byte("data:")
|
||||
|
||||
func emitEvent(event string, payload string) string {
|
||||
return fmt.Sprintf("event: %s\ndata: %s\n\n", event, payload)
|
||||
@@ -51,7 +51,7 @@ func ConvertClaudeResponseToOpenAIResponses(ctx context.Context, modelName strin
|
||||
if !bytes.HasPrefix(rawJSON, dataTag) {
|
||||
return []string{}
|
||||
}
|
||||
rawJSON = rawJSON[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
root := gjson.ParseBytes(rawJSON)
|
||||
ev := root.Get("type").String()
|
||||
var out []string
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
dataTag = []byte("data: ")
|
||||
dataTag = []byte("data:")
|
||||
)
|
||||
|
||||
// ConvertCodexResponseToClaude performs sophisticated streaming response format conversion.
|
||||
@@ -45,7 +45,7 @@ func ConvertCodexResponseToClaude(_ context.Context, _ string, originalRequestRa
|
||||
if !bytes.HasPrefix(rawJSON, dataTag) {
|
||||
return []string{}
|
||||
}
|
||||
rawJSON = rawJSON[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
output := ""
|
||||
rootResult := gjson.ParseBytes(rawJSON)
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
dataTag = []byte("data: ")
|
||||
dataTag = []byte("data:")
|
||||
)
|
||||
|
||||
// ConvertCodexResponseToGeminiParams holds parameters for response conversion.
|
||||
@@ -53,7 +53,7 @@ func ConvertCodexResponseToGemini(_ context.Context, modelName string, originalR
|
||||
if !bytes.HasPrefix(rawJSON, dataTag) {
|
||||
return []string{}
|
||||
}
|
||||
rawJSON = rawJSON[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
rootResult := gjson.ParseBytes(rawJSON)
|
||||
typeResult := rootResult.Get("type")
|
||||
@@ -161,7 +161,7 @@ func ConvertCodexResponseToGeminiNonStream(_ context.Context, modelName string,
|
||||
if !bytes.HasPrefix(line, dataTag) {
|
||||
continue
|
||||
}
|
||||
rawJSON = line[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
rootResult := gjson.ParseBytes(rawJSON)
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
dataTag = []byte("data: ")
|
||||
dataTag = []byte("data:")
|
||||
)
|
||||
|
||||
// ConvertCliToOpenAIParams holds parameters for response conversion.
|
||||
@@ -54,7 +54,7 @@ func ConvertCodexResponseToOpenAI(_ context.Context, modelName string, originalR
|
||||
if !bytes.HasPrefix(rawJSON, dataTag) {
|
||||
return []string{}
|
||||
}
|
||||
rawJSON = rawJSON[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
// Initialize the OpenAI SSE template.
|
||||
template := `{"id":"","object":"chat.completion.chunk","created":12345,"model":"model","choices":[{"index":0,"delta":{"role":null,"content":null,"reasoning_content":null,"tool_calls":null},"finish_reason":null,"native_finish_reason":null}]}`
|
||||
@@ -175,7 +175,7 @@ func ConvertCodexResponseToOpenAINonStream(_ context.Context, _ string, original
|
||||
if !bytes.HasPrefix(line, dataTag) {
|
||||
continue
|
||||
}
|
||||
rawJSON = line[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
rootResult := gjson.ParseBytes(rawJSON)
|
||||
// Verify this is a response.completed event
|
||||
|
||||
@@ -13,8 +13,8 @@ import (
|
||||
// ConvertCodexResponseToOpenAIResponses converts OpenAI Chat Completions streaming chunks
|
||||
// to OpenAI Responses SSE events (response.*).
|
||||
func ConvertCodexResponseToOpenAIResponses(ctx context.Context, modelName string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
|
||||
if bytes.HasPrefix(rawJSON, []byte("data: ")) {
|
||||
rawJSON = rawJSON[6:]
|
||||
if bytes.HasPrefix(rawJSON, []byte("data:")) {
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
if typeResult := gjson.GetBytes(rawJSON, "type"); typeResult.Exists() {
|
||||
typeStr := typeResult.String()
|
||||
if typeStr == "response.created" || typeStr == "response.in_progress" || typeStr == "response.completed" {
|
||||
@@ -32,14 +32,14 @@ func ConvertCodexResponseToOpenAIResponsesNonStream(_ context.Context, modelName
|
||||
scanner := bufio.NewScanner(bytes.NewReader(rawJSON))
|
||||
buffer := make([]byte, 10240*1024)
|
||||
scanner.Buffer(buffer, 10240*1024)
|
||||
dataTag := []byte("data: ")
|
||||
dataTag := []byte("data:")
|
||||
for scanner.Scan() {
|
||||
line := scanner.Bytes()
|
||||
|
||||
if !bytes.HasPrefix(line, dataTag) {
|
||||
continue
|
||||
}
|
||||
rawJSON = line[6:]
|
||||
rawJSON = bytes.TrimSpace(rawJSON[5:])
|
||||
|
||||
rootResult := gjson.ParseBytes(rawJSON)
|
||||
// Verify this is a response.completed event
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
package chat_completions
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/v5/internal/constant"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/interfaces"
|
||||
geminiChat "github.com/luispater/CLIProxyAPI/v5/internal/translator/gemini/openai/chat-completions"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
translator.Register(
|
||||
OPENAI,
|
||||
GEMINIWEB,
|
||||
geminiChat.ConvertOpenAIRequestToGemini,
|
||||
interfaces.TranslateResponse{
|
||||
Stream: geminiChat.ConvertGeminiResponseToOpenAI,
|
||||
NonStream: geminiChat.ConvertGeminiResponseToOpenAINonStream,
|
||||
},
|
||||
)
|
||||
}
|
||||
20
internal/translator/gemini-web/openai/responses/init.go
Normal file
20
internal/translator/gemini-web/openai/responses/init.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package responses
|
||||
|
||||
import (
|
||||
. "github.com/luispater/CLIProxyAPI/v5/internal/constant"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/interfaces"
|
||||
geminiResponses "github.com/luispater/CLIProxyAPI/v5/internal/translator/gemini/openai/responses"
|
||||
"github.com/luispater/CLIProxyAPI/v5/internal/translator/translator"
|
||||
)
|
||||
|
||||
func init() {
|
||||
translator.Register(
|
||||
OPENAI_RESPONSE,
|
||||
GEMINIWEB,
|
||||
geminiResponses.ConvertOpenAIResponsesRequestToGemini,
|
||||
interfaces.TranslateResponse{
|
||||
Stream: geminiResponses.ConvertGeminiResponseToOpenAIResponses,
|
||||
NonStream: geminiResponses.ConvertGeminiResponseToOpenAIResponsesNonStream,
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -170,6 +170,31 @@ func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool)
|
||||
node := []byte(`{"role":"model","parts":[{"text":""}]}`)
|
||||
node, _ = sjson.SetBytes(node, "parts.0.text", content.String())
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
} else if content.IsArray() {
|
||||
// Assistant multimodal content (e.g. text + image) -> single model content with parts
|
||||
node := []byte(`{"role":"model","parts":[]}`)
|
||||
p := 0
|
||||
for _, item := range content.Array() {
|
||||
switch item.Get("type").String() {
|
||||
case "text":
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".text", item.Get("text").String())
|
||||
p++
|
||||
case "image_url":
|
||||
// If the assistant returned an inline data URL, preserve it for history fidelity.
|
||||
imageURL := item.Get("image_url.url").String()
|
||||
if len(imageURL) > 5 { // expect data:...
|
||||
pieces := strings.SplitN(imageURL[5:], ";", 2)
|
||||
if len(pieces) == 2 && len(pieces[1]) > 7 {
|
||||
mime := pieces[0]
|
||||
data := pieces[1][7:]
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.mime_type", mime)
|
||||
node, _ = sjson.SetBytes(node, "parts."+itoa(p)+".inlineData.data", data)
|
||||
p++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
out, _ = sjson.SetRawBytes(out, "contents.-1", node)
|
||||
} else if !content.Exists() || content.Type == gjson.Null {
|
||||
// Tool calls -> single model content with functionCall parts
|
||||
tcs := m.Get("tool_calls")
|
||||
|
||||
@@ -8,6 +8,7 @@ package chat_completions
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
@@ -99,6 +100,10 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR
|
||||
partResult := partResults[i]
|
||||
partTextResult := partResult.Get("text")
|
||||
functionCallResult := partResult.Get("functionCall")
|
||||
inlineDataResult := partResult.Get("inlineData")
|
||||
if !inlineDataResult.Exists() {
|
||||
inlineDataResult = partResult.Get("inline_data")
|
||||
}
|
||||
|
||||
if partTextResult.Exists() {
|
||||
// Handle text content, distinguishing between regular content and reasoning/thoughts.
|
||||
@@ -124,6 +129,34 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.delta.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.tool_calls.-1", functionCallTemplate)
|
||||
} else if inlineDataResult.Exists() {
|
||||
data := inlineDataResult.Get("data").String()
|
||||
if data == "" {
|
||||
continue
|
||||
}
|
||||
mimeType := inlineDataResult.Get("mimeType").String()
|
||||
if mimeType == "" {
|
||||
mimeType = inlineDataResult.Get("mime_type").String()
|
||||
}
|
||||
if mimeType == "" {
|
||||
mimeType = "image/png"
|
||||
}
|
||||
imageURL := fmt.Sprintf("data:%s;base64,%s", mimeType, data)
|
||||
imagePayload, err := json.Marshal(map[string]any{
|
||||
"type": "image_url",
|
||||
"image_url": map[string]string{
|
||||
"url": imageURL,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
imagesResult := gjson.Get(template, "choices.0.delta.images")
|
||||
if !imagesResult.Exists() || !imagesResult.IsArray() {
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.images", `[]`)
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.delta.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.delta.images.-1", string(imagePayload))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -193,6 +226,10 @@ func ConvertGeminiResponseToOpenAINonStream(_ context.Context, _ string, origina
|
||||
partResult := partsResults[i]
|
||||
partTextResult := partResult.Get("text")
|
||||
functionCallResult := partResult.Get("functionCall")
|
||||
inlineDataResult := partResult.Get("inlineData")
|
||||
if !inlineDataResult.Exists() {
|
||||
inlineDataResult = partResult.Get("inline_data")
|
||||
}
|
||||
|
||||
if partTextResult.Exists() {
|
||||
// Append text content, distinguishing between regular content and reasoning.
|
||||
@@ -217,9 +254,34 @@ func ConvertGeminiResponseToOpenAINonStream(_ context.Context, _ string, origina
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.message.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.message.tool_calls.-1", functionCallItemTemplate)
|
||||
} else {
|
||||
// If no usable content is found, return an empty string.
|
||||
return ""
|
||||
} else if inlineDataResult.Exists() {
|
||||
data := inlineDataResult.Get("data").String()
|
||||
if data == "" {
|
||||
continue
|
||||
}
|
||||
mimeType := inlineDataResult.Get("mimeType").String()
|
||||
if mimeType == "" {
|
||||
mimeType = inlineDataResult.Get("mime_type").String()
|
||||
}
|
||||
if mimeType == "" {
|
||||
mimeType = "image/png"
|
||||
}
|
||||
imageURL := fmt.Sprintf("data:%s;base64,%s", mimeType, data)
|
||||
imagePayload, err := json.Marshal(map[string]any{
|
||||
"type": "image_url",
|
||||
"image_url": map[string]string{
|
||||
"url": imageURL,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
imagesResult := gjson.Get(template, "choices.0.message.images")
|
||||
if !imagesResult.Exists() || !imagesResult.IsArray() {
|
||||
template, _ = sjson.SetRaw(template, "choices.0.message.images", `[]`)
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.message.role", "assistant")
|
||||
template, _ = sjson.SetRaw(template, "choices.0.message.images.-1", string(imagePayload))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,9 @@ import (
|
||||
_ "github.com/luispater/CLIProxyAPI/v5/internal/translator/gemini/openai/chat-completions"
|
||||
_ "github.com/luispater/CLIProxyAPI/v5/internal/translator/gemini/openai/responses"
|
||||
|
||||
_ "github.com/luispater/CLIProxyAPI/v5/internal/translator/gemini-web/openai/chat-completions"
|
||||
_ "github.com/luispater/CLIProxyAPI/v5/internal/translator/gemini-web/openai/responses"
|
||||
|
||||
_ "github.com/luispater/CLIProxyAPI/v5/internal/translator/openai/claude"
|
||||
_ "github.com/luispater/CLIProxyAPI/v5/internal/translator/openai/gemini"
|
||||
_ "github.com/luispater/CLIProxyAPI/v5/internal/translator/openai/gemini-cli"
|
||||
|
||||
Reference in New Issue
Block a user