feat: introduce custom provider example and remove redundant debug logs

- Added `examples/custom-provider/main.go` showcasing custom executor and translator integration using the SDK.
- Removed redundant debug logs from translator modules to enhance code cleanliness.
- Updated SDK documentation with new usage and advanced examples.
- Expanded the management API with new endpoints, including request logging and GPT-5 Codex features.
This commit is contained in:
Luis Pater
2025-09-22 03:37:53 +08:00
parent d5ad5fab87
commit f81898c906
54 changed files with 907 additions and 101 deletions

View File

@@ -12,7 +12,6 @@ import (
"math/big"
"strings"
log "github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
@@ -35,7 +34,6 @@ import (
// Returns:
// - []byte: The transformed request data in Claude Code API format
func ConvertOpenAIRequestToClaude(modelName string, inputRawJSON []byte, stream bool) []byte {
log.Debug("ConvertOpenAIRequestToClaude")
rawJSON := bytes.Clone(inputRawJSON)
// Base Claude Code API template with default max_tokens value

View File

@@ -13,7 +13,6 @@ import (
"strings"
"time"
log "github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
@@ -52,7 +51,6 @@ type ToolCallAccumulator struct {
// Returns:
// - []string: A slice of strings, each containing an OpenAI-compatible JSON response
func ConvertClaudeResponseToOpenAI(_ context.Context, modelName string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
log.Debug("ConvertClaudeResponseToOpenAI")
if *param == nil {
*param = &ConvertAnthropicResponseToOpenAIParams{
CreatedAt: 0,
@@ -280,7 +278,6 @@ func mapAnthropicStopReasonToOpenAI(anthropicReason string) string {
// Returns:
// - string: An OpenAI-compatible JSON response containing all message content and metadata
func ConvertClaudeResponseToOpenAINonStream(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, _ *any) string {
log.Debug("ConvertClaudeResponseToOpenAINonStream")
chunks := make([][]byte, 0)

View File

@@ -6,7 +6,6 @@ import (
"math/big"
"strings"
log "github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
@@ -22,7 +21,6 @@ import (
// - max_output_tokens -> max_tokens
// - stream passthrough via parameter
func ConvertOpenAIResponsesRequestToClaude(modelName string, inputRawJSON []byte, stream bool) []byte {
log.Debug("ConvertOpenAIResponsesRequestToClaude")
rawJSON := bytes.Clone(inputRawJSON)
// Base Claude message payload

View File

@@ -8,7 +8,6 @@ import (
"strings"
"time"
log "github.com/sirupsen/logrus"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
@@ -43,7 +42,6 @@ func emitEvent(event string, payload string) string {
// ConvertClaudeResponseToOpenAIResponses converts Claude SSE to OpenAI Responses SSE events.
func ConvertClaudeResponseToOpenAIResponses(ctx context.Context, modelName string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, param *any) []string {
log.Debug("ConvertClaudeResponseToOpenAIResponses")
if *param == nil {
*param = &claudeToResponsesState{FuncArgsBuf: make(map[int]*strings.Builder), FuncNames: make(map[int]string), FuncCallIDs: make(map[int]string)}
}
@@ -391,7 +389,6 @@ func ConvertClaudeResponseToOpenAIResponses(ctx context.Context, modelName strin
// ConvertClaudeResponseToOpenAIResponsesNonStream aggregates Claude SSE into a single OpenAI Responses JSON.
func ConvertClaudeResponseToOpenAIResponsesNonStream(_ context.Context, _ string, originalRequestRawJSON, requestRawJSON, rawJSON []byte, _ *any) string {
log.Debug("ConvertClaudeResponseToOpenAIResponsesNonStream")
// Aggregate Claude SSE lines into a single OpenAI Responses JSON (non-stream)
// We follow the same aggregation logic as the streaming variant but produce
// one final object matching docs/out.json structure.