mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-02 20:40:52 +08:00
Refactor error handling and variable declarations in browser and logging modules
- Simplified variable initialization in `browser.go` for readability. - Updated error handling in `request_logger.go` with better resource cleanup using deferred anonymous functions. Refactor API handlers to use `GetContextWithCancel` for streamlined context creation and response handling - Replaced redundant `context.WithCancel` and `context.WithValue` logic with the new `GetContextWithCancel` utility in all handlers. - Centralized API response storage in the given context during cancellation. - Updated associated cancellation calls for consistency and improved resource management. - Replaced `apiResponseData` with `AddAPIResponseData` for centralized response recording. - Simplified cancellation logic by switching to a boolean-based `cliCancel` method. - Removed unused `apiResponseData` slices across handlers to reduce memory usage. - Updated `handlers.go` to support unified response data storage per request context.
This commit is contained in:
@@ -108,8 +108,7 @@ func (h *ClaudeCodeAPIHandlers) handleGeminiStreamingResponse(c *gin.Context, ra
|
||||
|
||||
// Create a cancellable context for the backend client request
|
||||
// This allows proper cleanup and cancellation of ongoing requests
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
cliClient = client.NewGeminiClient(nil, nil, nil)
|
||||
@@ -159,7 +158,6 @@ outLoop:
|
||||
responseType := 0
|
||||
responseIndex := 0
|
||||
|
||||
apiResponseData := make([]byte, 0)
|
||||
// Main streaming loop - handles multiple concurrent events using Go channels
|
||||
// This select statement manages four different types of events simultaneously
|
||||
for {
|
||||
@@ -169,7 +167,6 @@ outLoop:
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("GeminiClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request to prevent resource leaks
|
||||
return
|
||||
}
|
||||
@@ -186,12 +183,11 @@ outLoop:
|
||||
_, _ = c.Writer.Write([]byte("\n\n\n"))
|
||||
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
// Convert the backend response to Claude-compatible format
|
||||
// This translation layer ensures API compatibility
|
||||
claudeFormat := translatorClaudeCodeToGeminiCli.ConvertCliResponseToClaudeCode(chunk, isGlAPIKey, hasFirstResponse, &responseType, &responseIndex)
|
||||
@@ -214,8 +210,7 @@ outLoop:
|
||||
c.Status(errInfo.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, errInfo.Error.Error())
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", []byte(errInfo.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(errInfo.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -280,8 +275,7 @@ func (h *ClaudeCodeAPIHandlers) handleCodexStreamingResponse(c *gin.Context, raw
|
||||
// return
|
||||
// Create a cancellable context for the backend client request
|
||||
// This allows proper cleanup and cancellation of ongoing requests
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -316,7 +310,6 @@ outLoop:
|
||||
hasFirstResponse := false
|
||||
hasToolCall := false
|
||||
|
||||
apiResponseData := make([]byte, 0)
|
||||
// Main streaming loop - handles multiple concurrent events using Go channels
|
||||
// This select statement manages four different types of events simultaneously
|
||||
for {
|
||||
@@ -326,7 +319,6 @@ outLoop:
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("CodexClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request to prevent resource leaks
|
||||
return
|
||||
}
|
||||
@@ -336,11 +328,12 @@ outLoop:
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
// Convert the backend response to Claude-compatible format
|
||||
// This translation layer ensures API compatibility
|
||||
if bytes.HasPrefix(chunk, []byte("data: ")) {
|
||||
@@ -371,9 +364,8 @@ outLoop:
|
||||
// Forward other errors directly to the client
|
||||
c.Status(errInfo.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, errInfo.Error.Error())
|
||||
c.Set("API_RESPONSE", []byte(errInfo.Error.Error()))
|
||||
flusher.Flush()
|
||||
cliCancel()
|
||||
cliCancel(errInfo.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -156,8 +156,7 @@ func (h *GeminiCLIAPIHandlers) handleInternalStreamGenerateContent(c *gin.Contex
|
||||
modelResult := gjson.GetBytes(rawJSON, "model")
|
||||
modelName := modelResult.String()
|
||||
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -188,26 +187,23 @@ outLoop:
|
||||
respChan, errChan := cliClient.SendRawMessageStream(cliCtx, rawJSON, "")
|
||||
hasFirstResponse := false
|
||||
|
||||
apiResponseData := make([]byte, 0)
|
||||
for {
|
||||
select {
|
||||
// Handle client disconnection.
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("GeminiClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
// Process incoming response chunks.
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
hasFirstResponse = true
|
||||
if cliClient.(*client.GeminiClient).GetGenerativeLanguageAPIKey() != "" {
|
||||
@@ -227,8 +223,7 @@ outLoop:
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, err.Error.Error())
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -248,8 +243,8 @@ func (h *GeminiCLIAPIHandlers) handleInternalGenerateContent(c *gin.Context, raw
|
||||
// log.Debugf("GenerateContent: %s", string(rawJSON))
|
||||
modelResult := gjson.GetBytes(rawJSON, "model")
|
||||
modelName := modelResult.String()
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -282,14 +277,12 @@ func (h *GeminiCLIAPIHandlers) handleInternalGenerateContent(c *gin.Context, raw
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = c.Writer.Write([]byte(err.Error.Error()))
|
||||
log.Debugf("code: %d, error: %s", err.StatusCode, err.Error.Error())
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
break
|
||||
} else {
|
||||
_, _ = c.Writer.Write(resp)
|
||||
c.Set("API_RESPONSE", resp)
|
||||
cliCancel()
|
||||
cliCancel(resp)
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -328,8 +321,7 @@ func (h *GeminiCLIAPIHandlers) handleCodexInternalStreamGenerateContent(c *gin.C
|
||||
|
||||
modelName := gjson.GetBytes(rawJSON, "model")
|
||||
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -362,7 +354,6 @@ outLoop:
|
||||
ResponseID: "",
|
||||
LastStorageOutput: "",
|
||||
}
|
||||
apiResponseData := make([]byte, 0)
|
||||
|
||||
for {
|
||||
select {
|
||||
@@ -370,20 +361,19 @@ outLoop:
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("CodexClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
// Process incoming response chunks.
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
// _, _ = logFile.Write(chunk)
|
||||
// _, _ = logFile.Write([]byte("\n"))
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
if bytes.HasPrefix(chunk, []byte("data: ")) {
|
||||
jsonData := chunk[6:]
|
||||
data := gjson.ParseBytes(jsonData)
|
||||
@@ -411,8 +401,7 @@ outLoop:
|
||||
c.Status(errMessage.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, errMessage.Error.Error())
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", []byte(errMessage.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(errMessage.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -438,8 +427,7 @@ func (h *GeminiCLIAPIHandlers) handleCodexInternalGenerateContent(c *gin.Context
|
||||
|
||||
modelName := gjson.GetBytes(rawJSON, "model")
|
||||
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -464,25 +452,24 @@ outLoop:
|
||||
|
||||
// Send the message and receive response chunks and errors via channels.
|
||||
respChan, errChan := cliClient.SendRawMessageStream(cliCtx, []byte(newRequestJSON), "")
|
||||
apiResponseData := make([]byte, 0)
|
||||
for {
|
||||
select {
|
||||
// Handle client disconnection.
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("CodexClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
// Process incoming response chunks.
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
if bytes.HasPrefix(chunk, []byte("data: ")) {
|
||||
jsonData := chunk[6:]
|
||||
data := gjson.ParseBytes(jsonData)
|
||||
@@ -506,8 +493,7 @@ outLoop:
|
||||
log.Debugf("org: %s", string(orgRawJSON))
|
||||
log.Debugf("raw: %s", string(rawJSON))
|
||||
log.Debugf("newRequestJSON: %s", newRequestJSON)
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -262,8 +262,7 @@ func (h *GeminiAPIHandlers) handleGeminiStreamGenerateContent(c *gin.Context, ra
|
||||
modelResult := gjson.GetBytes(rawJSON, "model")
|
||||
modelName := modelResult.String()
|
||||
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -324,25 +323,23 @@ outLoop:
|
||||
|
||||
// Send the message and receive response chunks and errors via channels.
|
||||
respChan, errChan := cliClient.SendRawMessageStream(cliCtx, rawJSON, alt)
|
||||
apiResponseData := make([]byte, 0)
|
||||
for {
|
||||
select {
|
||||
// Handle client disconnection.
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("GeminiClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
// Process incoming response chunks.
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
if cliClient.(*client.GeminiClient).GetGenerativeLanguageAPIKey() == "" {
|
||||
if alt == "" {
|
||||
@@ -384,8 +381,7 @@ outLoop:
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, err.Error.Error())
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -403,8 +399,7 @@ func (h *GeminiAPIHandlers) handleGeminiCountTokens(c *gin.Context, rawJSON []by
|
||||
// orgrawJSON := rawJSON
|
||||
modelResult := gjson.GetBytes(rawJSON, "model")
|
||||
modelName := modelResult.String()
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -446,8 +441,7 @@ func (h *GeminiAPIHandlers) handleGeminiCountTokens(c *gin.Context, rawJSON []by
|
||||
} else {
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = c.Writer.Write([]byte(err.Error.Error()))
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
// log.Debugf(err.Error.Error())
|
||||
// log.Debugf(string(rawJSON))
|
||||
// log.Debugf(string(orgrawJSON))
|
||||
@@ -461,8 +455,7 @@ func (h *GeminiAPIHandlers) handleGeminiCountTokens(c *gin.Context, rawJSON []by
|
||||
}
|
||||
}
|
||||
_, _ = c.Writer.Write(resp)
|
||||
c.Set("API_RESPONSE", resp)
|
||||
cliCancel()
|
||||
cliCancel(resp)
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -475,8 +468,7 @@ func (h *GeminiAPIHandlers) handleGeminiGenerateContent(c *gin.Context, rawJSON
|
||||
|
||||
modelResult := gjson.GetBytes(rawJSON, "model")
|
||||
modelName := modelResult.String()
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -538,8 +530,7 @@ func (h *GeminiAPIHandlers) handleGeminiGenerateContent(c *gin.Context, rawJSON
|
||||
} else {
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = c.Writer.Write([]byte(err.Error.Error()))
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
break
|
||||
} else {
|
||||
@@ -550,8 +541,7 @@ func (h *GeminiAPIHandlers) handleGeminiGenerateContent(c *gin.Context, rawJSON
|
||||
}
|
||||
}
|
||||
_, _ = c.Writer.Write(resp)
|
||||
c.Set("API_RESPONSE", resp)
|
||||
cliCancel()
|
||||
cliCancel(resp)
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -581,8 +571,7 @@ func (h *GeminiAPIHandlers) handleCodexStreamGenerateContent(c *gin.Context, raw
|
||||
|
||||
modelName := gjson.GetBytes(rawJSON, "model")
|
||||
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -609,8 +598,6 @@ outLoop:
|
||||
// Send the message and receive response chunks and errors via channels.
|
||||
respChan, errChan := cliClient.SendRawMessageStream(cliCtx, []byte(newRequestJSON), "")
|
||||
|
||||
apiResponseData := make([]byte, 0)
|
||||
|
||||
params := &translatorGeminiToCodex.ConvertCodexResponseToGeminiParams{
|
||||
Model: modelName.String(),
|
||||
CreatedAt: 0,
|
||||
@@ -623,18 +610,17 @@ outLoop:
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("CodexClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
// Process incoming response chunks.
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
if bytes.HasPrefix(chunk, []byte("data: ")) {
|
||||
jsonData := chunk[6:]
|
||||
@@ -662,8 +648,7 @@ outLoop:
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, err.Error.Error())
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -683,8 +668,7 @@ func (h *GeminiAPIHandlers) handleCodexGenerateContent(c *gin.Context, rawJSON [
|
||||
|
||||
modelName := gjson.GetBytes(rawJSON, "model")
|
||||
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -709,25 +693,23 @@ outLoop:
|
||||
|
||||
// Send the message and receive response chunks and errors via channels.
|
||||
respChan, errChan := cliClient.SendRawMessageStream(cliCtx, []byte(newRequestJSON), "")
|
||||
apiResponseData := make([]byte, 0)
|
||||
for {
|
||||
select {
|
||||
// Handle client disconnection.
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("CodexClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
// Process incoming response chunks.
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
if bytes.HasPrefix(chunk, []byte("data: ")) {
|
||||
jsonData := chunk[6:]
|
||||
@@ -749,8 +731,7 @@ outLoop:
|
||||
} else {
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, err.Error.Error())
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/luispater/CLIProxyAPI/internal/config"
|
||||
"github.com/luispater/CLIProxyAPI/internal/util"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"golang.org/x/net/context"
|
||||
)
|
||||
|
||||
// ErrorResponse represents a standard error response format for the API.
|
||||
@@ -50,6 +51,9 @@ type APIHandlers struct {
|
||||
// LastUsedClientIndex tracks the last used client index for each provider
|
||||
// to implement round-robin load balancing.
|
||||
LastUsedClientIndex map[string]int
|
||||
|
||||
// apiResponseData recording provider api response data
|
||||
apiResponseData map[*gin.Context][]byte
|
||||
}
|
||||
|
||||
// NewAPIHandlers creates a new API handlers instance.
|
||||
@@ -67,6 +71,7 @@ func NewAPIHandlers(cliClients []client.Client, cfg *config.Config) *APIHandlers
|
||||
Cfg: cfg,
|
||||
Mutex: &sync.Mutex{},
|
||||
LastUsedClientIndex: make(map[string]int),
|
||||
apiResponseData: make(map[*gin.Context][]byte),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,3 +190,43 @@ func (h *APIHandlers) GetAlt(c *gin.Context) string {
|
||||
}
|
||||
return alt
|
||||
}
|
||||
|
||||
func (h *APIHandlers) GetContextWithCancel(c *gin.Context, ctx context.Context) (context.Context, APIHandlerCancelFunc) {
|
||||
newCtx, cancel := context.WithCancel(ctx)
|
||||
newCtx = context.WithValue(newCtx, "gin", c)
|
||||
return newCtx, func(params ...interface{}) {
|
||||
if h.Cfg.RequestLog {
|
||||
if len(params) == 1 {
|
||||
data := params[0]
|
||||
switch data.(type) {
|
||||
case []byte:
|
||||
c.Set("API_RESPONSE", data.([]byte))
|
||||
case error:
|
||||
c.Set("API_RESPONSE", []byte(data.(error).Error()))
|
||||
case string:
|
||||
c.Set("API_RESPONSE", []byte(data.(string)))
|
||||
case bool:
|
||||
case nil:
|
||||
}
|
||||
} else {
|
||||
if _, hasKey := h.apiResponseData[c]; hasKey {
|
||||
c.Set("API_RESPONSE", h.apiResponseData[c])
|
||||
delete(h.apiResponseData, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cancel()
|
||||
}
|
||||
}
|
||||
|
||||
func (h *APIHandlers) AddAPIResponseData(c *gin.Context, data []byte) {
|
||||
if h.Cfg.RequestLog {
|
||||
if _, hasKey := h.apiResponseData[c]; !hasKey {
|
||||
h.apiResponseData[c] = make([]byte, 0)
|
||||
}
|
||||
h.apiResponseData[c] = append(h.apiResponseData[c], data...)
|
||||
}
|
||||
}
|
||||
|
||||
type APIHandlerCancelFunc func(params ...interface{})
|
||||
|
||||
@@ -160,8 +160,7 @@ func (h *OpenAIAPIHandlers) handleGeminiNonStreamingResponse(c *gin.Context, raw
|
||||
c.Header("Content-Type", "application/json")
|
||||
|
||||
modelName, systemInstruction, contents, tools := translatorOpenAIToGeminiCli.ConvertOpenAIChatRequestToCli(rawJSON)
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -195,8 +194,7 @@ func (h *OpenAIAPIHandlers) handleGeminiNonStreamingResponse(c *gin.Context, raw
|
||||
} else {
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = c.Writer.Write([]byte(err.Error.Error()))
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
break
|
||||
} else {
|
||||
@@ -204,8 +202,7 @@ func (h *OpenAIAPIHandlers) handleGeminiNonStreamingResponse(c *gin.Context, raw
|
||||
if openAIFormat != "" {
|
||||
_, _ = c.Writer.Write([]byte(openAIFormat))
|
||||
}
|
||||
c.Set("API_RESPONSE", resp)
|
||||
cliCancel()
|
||||
cliCancel(resp)
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -238,8 +235,7 @@ func (h *OpenAIAPIHandlers) handleGeminiStreamingResponse(c *gin.Context, rawJSO
|
||||
|
||||
// Prepare the request for the backend client.
|
||||
modelName, systemInstruction, contents, tools := translatorOpenAIToGeminiCli.ConvertOpenAIChatRequestToCli(rawJSON)
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -270,7 +266,6 @@ outLoop:
|
||||
}
|
||||
// Send the message and receive response chunks and errors via channels.
|
||||
respChan, errChan := cliClient.SendMessageStream(cliCtx, rawJSON, modelName, systemInstruction, contents, tools)
|
||||
apiResponseData := make([]byte, 0)
|
||||
|
||||
hasFirstResponse := false
|
||||
for {
|
||||
@@ -279,7 +274,6 @@ outLoop:
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("GeminiClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
@@ -289,11 +283,12 @@ outLoop:
|
||||
// Stream is closed, send the final [DONE] message.
|
||||
_, _ = fmt.Fprintf(c.Writer, "data: [DONE]\n\n")
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
// Convert the chunk to OpenAI format and send it to the client.
|
||||
hasFirstResponse = true
|
||||
openAIFormat := translatorOpenAIToGeminiCli.ConvertCliResponseToOpenAIChat(chunk, time.Now().Unix(), isGlAPIKey)
|
||||
@@ -310,8 +305,7 @@ outLoop:
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, err.Error.Error())
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -338,8 +332,8 @@ func (h *OpenAIAPIHandlers) handleCodexNonStreamingResponse(c *gin.Context, rawJ
|
||||
|
||||
newRequestJSON := translatorOpenAIToCodex.ConvertOpenAIChatRequestToCodex(rawJSON)
|
||||
modelName := gjson.GetBytes(rawJSON, "model")
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -363,25 +357,24 @@ outLoop:
|
||||
|
||||
// Send the message and receive response chunks and errors via channels.
|
||||
respChan, errChan := cliClient.SendRawMessageStream(cliCtx, []byte(newRequestJSON), "")
|
||||
apiResponseData := make([]byte, 0)
|
||||
for {
|
||||
select {
|
||||
// Handle client disconnection.
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("CodexClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
// Process incoming response chunks.
|
||||
case chunk, okStream := <-respChan:
|
||||
if !okStream {
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
if bytes.HasPrefix(chunk, []byte("data: ")) {
|
||||
jsonData := chunk[6:]
|
||||
data := gjson.ParseBytes(jsonData)
|
||||
@@ -400,8 +393,7 @@ outLoop:
|
||||
} else {
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = c.Writer.Write([]byte(err.Error.Error()))
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -443,8 +435,7 @@ func (h *OpenAIAPIHandlers) handleCodexStreamingResponse(c *gin.Context, rawJSON
|
||||
|
||||
modelName := gjson.GetBytes(rawJSON, "model")
|
||||
|
||||
backgroundCtx, cliCancel := context.WithCancel(context.Background())
|
||||
cliCtx := context.WithValue(backgroundCtx, "gin", c)
|
||||
cliCtx, cliCancel := h.GetContextWithCancel(c, context.Background())
|
||||
|
||||
var cliClient client.Client
|
||||
defer func() {
|
||||
@@ -471,14 +462,12 @@ outLoop:
|
||||
// Send the message and receive response chunks and errors via channels.
|
||||
var params *translatorOpenAIToCodex.ConvertCliToOpenAIParams
|
||||
respChan, errChan := cliClient.SendRawMessageStream(cliCtx, []byte(newRequestJSON), "")
|
||||
apiResponseData := make([]byte, 0)
|
||||
for {
|
||||
select {
|
||||
// Handle client disconnection.
|
||||
case <-c.Request.Context().Done():
|
||||
if c.Request.Context().Err().Error() == "context canceled" {
|
||||
log.Debugf("CodexClient disconnected: %v", c.Request.Context().Err())
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel() // Cancel the backend request.
|
||||
return
|
||||
}
|
||||
@@ -487,11 +476,12 @@ outLoop:
|
||||
if !okStream {
|
||||
_, _ = c.Writer.Write([]byte("[done]\n\n"))
|
||||
flusher.Flush()
|
||||
c.Set("API_RESPONSE", apiResponseData)
|
||||
cliCancel()
|
||||
return
|
||||
}
|
||||
apiResponseData = append(apiResponseData, chunk...)
|
||||
|
||||
h.AddAPIResponseData(c, chunk)
|
||||
|
||||
// log.Debugf("Response: %s\n", string(chunk))
|
||||
// Convert the chunk to OpenAI format and send it to the client.
|
||||
if bytes.HasPrefix(chunk, []byte("data: ")) {
|
||||
@@ -518,9 +508,8 @@ outLoop:
|
||||
} else {
|
||||
c.Status(err.StatusCode)
|
||||
_, _ = fmt.Fprint(c.Writer, err.Error.Error())
|
||||
c.Set("API_RESPONSE", []byte(err.Error.Error()))
|
||||
flusher.Flush()
|
||||
cliCancel()
|
||||
cliCancel(err.Error)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ func GetPlatformInfo() map[string]interface{} {
|
||||
info["default_command"] = "rundll32"
|
||||
case "linux":
|
||||
browsers := []string{"xdg-open", "x-www-browser", "www-browser", "firefox", "chromium", "google-chrome"}
|
||||
availableBrowsers := []string{}
|
||||
var availableBrowsers []string
|
||||
for _, browser := range browsers {
|
||||
if _, err := exec.LookPath(browser); err == nil {
|
||||
availableBrowsers = append(availableBrowsers, browser)
|
||||
|
||||
@@ -85,7 +85,7 @@ func (l *FileRequestLogger) LogRequest(url, method string, requestHeaders map[st
|
||||
content := l.formatLogContent(url, method, requestHeaders, body, apiRequest, apiResponse, decompressedResponse, statusCode, responseHeaders)
|
||||
|
||||
// Write to file
|
||||
if err := os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
if err = os.WriteFile(filePath, []byte(content), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write log file: %w", err)
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ func (l *FileRequestLogger) LogStreamingRequest(url, method string, headers map[
|
||||
|
||||
// Write initial request information
|
||||
requestInfo := l.formatRequestInfo(url, method, headers, body)
|
||||
if _, err := file.WriteString(requestInfo); err != nil {
|
||||
if _, err = file.WriteString(requestInfo); err != nil {
|
||||
_ = file.Close()
|
||||
return nil, fmt.Errorf("failed to write request info: %w", err)
|
||||
}
|
||||
@@ -257,7 +257,9 @@ func (l *FileRequestLogger) decompressGzip(data []byte) ([]byte, error) {
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create gzip reader: %w", err)
|
||||
}
|
||||
defer reader.Close()
|
||||
defer func() {
|
||||
_ = reader.Close()
|
||||
}()
|
||||
|
||||
decompressed, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
@@ -270,7 +272,9 @@ func (l *FileRequestLogger) decompressGzip(data []byte) ([]byte, error) {
|
||||
// decompressDeflate decompresses deflate-encoded data.
|
||||
func (l *FileRequestLogger) decompressDeflate(data []byte) ([]byte, error) {
|
||||
reader := flate.NewReader(bytes.NewReader(data))
|
||||
defer reader.Close()
|
||||
defer func() {
|
||||
_ = reader.Close()
|
||||
}()
|
||||
|
||||
decompressed, err := io.ReadAll(reader)
|
||||
if err != nil {
|
||||
|
||||
Reference in New Issue
Block a user