mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
refactor(handlers): streamline error and data channel handling in streaming logic
Improved consistency across OpenAI, Claude, and Gemini handlers by replacing initial `select` statement with a `for` loop for better readability and error-handling robustness.
This commit is contained in:
@@ -212,11 +212,17 @@ func (h *ClaudeCodeAPIHandler) handleStreamingResponse(c *gin.Context, rawJSON [
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Peek at the first chunk to determine success or failure before setting headers
|
// Peek at the first chunk to determine success or failure before setting headers
|
||||||
|
for {
|
||||||
select {
|
select {
|
||||||
case <-c.Request.Context().Done():
|
case <-c.Request.Context().Done():
|
||||||
cliCancel(c.Request.Context().Err())
|
cliCancel(c.Request.Context().Err())
|
||||||
return
|
return
|
||||||
case errMsg := <-errChan:
|
case errMsg, ok := <-errChan:
|
||||||
|
if !ok {
|
||||||
|
// Err channel closed cleanly; wait for data channel.
|
||||||
|
errChan = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
// Upstream failed immediately. Return proper error status and JSON.
|
// Upstream failed immediately. Return proper error status and JSON.
|
||||||
h.WriteErrorResponse(c, errMsg)
|
h.WriteErrorResponse(c, errMsg)
|
||||||
if errMsg != nil {
|
if errMsg != nil {
|
||||||
@@ -245,6 +251,8 @@ func (h *ClaudeCodeAPIHandler) handleStreamingResponse(c *gin.Context, rawJSON [
|
|||||||
|
|
||||||
// Continue streaming the rest
|
// Continue streaming the rest
|
||||||
h.forwardClaudeStream(c, flusher, func(err error) { cliCancel(err) }, dataChan, errChan)
|
h.forwardClaudeStream(c, flusher, func(err error) { cliCancel(err) }, dataChan, errChan)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -249,11 +249,17 @@ func (h *GeminiAPIHandler) handleStreamGenerateContent(c *gin.Context, modelName
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Peek at the first chunk
|
// Peek at the first chunk
|
||||||
|
for {
|
||||||
select {
|
select {
|
||||||
case <-c.Request.Context().Done():
|
case <-c.Request.Context().Done():
|
||||||
cliCancel(c.Request.Context().Err())
|
cliCancel(c.Request.Context().Err())
|
||||||
return
|
return
|
||||||
case errMsg := <-errChan:
|
case errMsg, ok := <-errChan:
|
||||||
|
if !ok {
|
||||||
|
// Err channel closed cleanly; wait for data channel.
|
||||||
|
errChan = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
// Upstream failed immediately. Return proper error status and JSON.
|
// Upstream failed immediately. Return proper error status and JSON.
|
||||||
h.WriteErrorResponse(c, errMsg)
|
h.WriteErrorResponse(c, errMsg)
|
||||||
if errMsg != nil {
|
if errMsg != nil {
|
||||||
@@ -290,6 +296,8 @@ func (h *GeminiAPIHandler) handleStreamGenerateContent(c *gin.Context, modelName
|
|||||||
|
|
||||||
// Continue
|
// Continue
|
||||||
h.forwardGeminiStream(c, flusher, alt, func(err error) { cliCancel(err) }, dataChan, errChan)
|
h.forwardGeminiStream(c, flusher, alt, func(err error) { cliCancel(err) }, dataChan, errChan)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -467,11 +467,17 @@ func (h *OpenAIAPIHandler) handleStreamingResponse(c *gin.Context, rawJSON []byt
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Peek at the first chunk to determine success or failure before setting headers
|
// Peek at the first chunk to determine success or failure before setting headers
|
||||||
|
for {
|
||||||
select {
|
select {
|
||||||
case <-c.Request.Context().Done():
|
case <-c.Request.Context().Done():
|
||||||
cliCancel(c.Request.Context().Err())
|
cliCancel(c.Request.Context().Err())
|
||||||
return
|
return
|
||||||
case errMsg := <-errChan:
|
case errMsg, ok := <-errChan:
|
||||||
|
if !ok {
|
||||||
|
// Err channel closed cleanly; wait for data channel.
|
||||||
|
errChan = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
// Upstream failed immediately. Return proper error status and JSON.
|
// Upstream failed immediately. Return proper error status and JSON.
|
||||||
h.WriteErrorResponse(c, errMsg)
|
h.WriteErrorResponse(c, errMsg)
|
||||||
if errMsg != nil {
|
if errMsg != nil {
|
||||||
@@ -498,6 +504,8 @@ func (h *OpenAIAPIHandler) handleStreamingResponse(c *gin.Context, rawJSON []byt
|
|||||||
|
|
||||||
// Continue streaming the rest
|
// Continue streaming the rest
|
||||||
h.handleStreamResult(c, flusher, func(err error) { cliCancel(err) }, dataChan, errChan)
|
h.handleStreamResult(c, flusher, func(err error) { cliCancel(err) }, dataChan, errChan)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -562,11 +570,17 @@ func (h *OpenAIAPIHandler) handleCompletionsStreamingResponse(c *gin.Context, ra
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Peek at the first chunk
|
// Peek at the first chunk
|
||||||
|
for {
|
||||||
select {
|
select {
|
||||||
case <-c.Request.Context().Done():
|
case <-c.Request.Context().Done():
|
||||||
cliCancel(c.Request.Context().Err())
|
cliCancel(c.Request.Context().Err())
|
||||||
return
|
return
|
||||||
case errMsg := <-errChan:
|
case errMsg, ok := <-errChan:
|
||||||
|
if !ok {
|
||||||
|
// Err channel closed cleanly; wait for data channel.
|
||||||
|
errChan = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
h.WriteErrorResponse(c, errMsg)
|
h.WriteErrorResponse(c, errMsg)
|
||||||
if errMsg != nil {
|
if errMsg != nil {
|
||||||
cliCancel(errMsg.Error)
|
cliCancel(errMsg.Error)
|
||||||
@@ -625,6 +639,8 @@ func (h *OpenAIAPIHandler) handleCompletionsStreamingResponse(c *gin.Context, ra
|
|||||||
stop()
|
stop()
|
||||||
cliCancel(err)
|
cliCancel(err)
|
||||||
}, convertedChan, errChan)
|
}, convertedChan, errChan)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
func (h *OpenAIAPIHandler) handleStreamResult(c *gin.Context, flusher http.Flusher, cancel func(error), data <-chan []byte, errs <-chan *interfaces.ErrorMessage) {
|
func (h *OpenAIAPIHandler) handleStreamResult(c *gin.Context, flusher http.Flusher, cancel func(error), data <-chan []byte, errs <-chan *interfaces.ErrorMessage) {
|
||||||
|
|||||||
@@ -152,11 +152,17 @@ func (h *OpenAIResponsesAPIHandler) handleStreamingResponse(c *gin.Context, rawJ
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Peek at the first chunk
|
// Peek at the first chunk
|
||||||
|
for {
|
||||||
select {
|
select {
|
||||||
case <-c.Request.Context().Done():
|
case <-c.Request.Context().Done():
|
||||||
cliCancel(c.Request.Context().Err())
|
cliCancel(c.Request.Context().Err())
|
||||||
return
|
return
|
||||||
case errMsg := <-errChan:
|
case errMsg, ok := <-errChan:
|
||||||
|
if !ok {
|
||||||
|
// Err channel closed cleanly; wait for data channel.
|
||||||
|
errChan = nil
|
||||||
|
continue
|
||||||
|
}
|
||||||
// Upstream failed immediately. Return proper error status and JSON.
|
// Upstream failed immediately. Return proper error status and JSON.
|
||||||
h.WriteErrorResponse(c, errMsg)
|
h.WriteErrorResponse(c, errMsg)
|
||||||
if errMsg != nil {
|
if errMsg != nil {
|
||||||
@@ -188,6 +194,8 @@ func (h *OpenAIResponsesAPIHandler) handleStreamingResponse(c *gin.Context, rawJ
|
|||||||
|
|
||||||
// Continue
|
// Continue
|
||||||
h.forwardResponsesStream(c, flusher, func(err error) { cliCancel(err) }, dataChan, errChan)
|
h.forwardResponsesStream(c, flusher, func(err error) { cliCancel(err) }, dataChan, errChan)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user