Compare commits

..

14 Commits

Author SHA1 Message Date
Luis Pater
dc9b4dd017 Merge branch 'kimi-provider-support-v2' into dev 2026-02-06 20:51:48 +08:00
Luis Pater
68cb81a258 feat: add Kimi authentication support and streamline device ID handling
- Introduced `RequestKimiToken` API for Kimi authentication flow.
- Integrated device ID management throughout Kimi-related components.
- Enhanced header management for Kimi API requests with device ID context.
2026-02-06 20:43:30 +08:00
hkfires
c874f19f2a refactor(config): disable automatic migration during server startup 2026-02-06 09:57:47 +08:00
test
f5f26f0cbe Add Kimi (Moonshot AI) provider support
- OAuth2 device authorization grant flow (RFC 8628) for authentication
- Streaming and non-streaming chat completions via OpenAI-compatible API
- Models: kimi-k2, kimi-k2-thinking, kimi-k2.5
- CLI `--kimi-login` command for device flow auth
- Token management with automatic refresh
- Thinking/reasoning effort support for thinking-enabled models

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-05 19:24:46 -05:00
Luis Pater
4b00312fef Merge pull request #1435 from tianyicui/fix/haiku-4-5-thinking-support
fix: Enable extended thinking support for Claude Haiku 4.5
2026-02-06 05:44:14 +08:00
Luis Pater
c5fd3db01e Merge pull request #1446 from qyhfrank/fix-claude-opus-4-6-model-metadata
fix(registry): correct Claude Opus 4.6 model metadata
2026-02-06 05:43:32 +08:00
Frank Qing
f870a9d2a7 fix(registry): correct Claude Opus 4.6 model metadata 2026-02-06 05:39:41 +08:00
Luis Pater
b4e034be1c refactor(executor): centralize Codex client version and user agent constants
- Introduced `codexClientVersion` and `codexUserAgent` constants for better maintainability.
- Updated `EnsureHeader` calls to use the new constants.
2026-02-06 05:30:28 +08:00
Luis Pater
a5a25dec57 refactor(translator, executor): remove redundant bytes.Clone calls for improved performance
- Replaced all instances of `bytes.Clone` with direct references to enhance efficiency.
- Simplified payload handling across executors and translators by eliminating unnecessary data duplication.
2026-02-06 03:26:29 +08:00
Luis Pater
c71905e5e8 Merge pull request #1440 from kvokka/add-cc-opus-4-6
feat(registry): register Claude 4.6 static data
2026-02-06 03:23:59 +08:00
kvokka
bc78d668ac feat(registry): register Claude 4.6 static data
Add model definition for Claude 4.6 Opus with 200k context length and thinking support capabilities.
2026-02-05 23:13:36 +04:00
Luis Pater
5bd0896ad7 feat(registry): add GPT 5.3 Codex model to static data 2026-02-06 01:52:41 +08:00
Luis Pater
09ecfbcaed refactor(executor): optimize payload cloning and streamline SDK translator usage
- Replaced unnecessary `bytes.Clone` calls for `opts.OriginalRequest` throughout executors.
- Introduced intermediate variable `originalPayloadSource` to simplify payload processing.
- Ensured better clarity and structure in request translation logic.
2026-02-06 01:44:20 +08:00
Tianyi Cui
706590c62a fix: Enable extended thinking support for Claude Haiku 4.5
Claude Haiku 4.5 (claude-haiku-4-5-20251001) supports extended thinking
according to Anthropic's official documentation:
https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking

The model was incorrectly marked as not supporting thinking in the static
model definitions. This fix adds ThinkingSupport with the same parameters
as other Claude 4.5 models (Sonnet, Opus):
- Min: 1024 tokens
- Max: 128000 tokens
- ZeroAllowed: true
- DynamicAllowed: false
2026-02-05 19:03:23 +08:00
59 changed files with 1687 additions and 220 deletions

View File

@@ -63,6 +63,7 @@ func main() {
var noBrowser bool
var oauthCallbackPort int
var antigravityLogin bool
var kimiLogin bool
var projectID string
var vertexImport string
var configPath string
@@ -78,6 +79,7 @@ func main() {
flag.BoolVar(&noBrowser, "no-browser", false, "Don't open browser automatically for OAuth")
flag.IntVar(&oauthCallbackPort, "oauth-callback-port", 0, "Override OAuth callback port (defaults to provider-specific port)")
flag.BoolVar(&antigravityLogin, "antigravity-login", false, "Login to Antigravity using OAuth")
flag.BoolVar(&kimiLogin, "kimi-login", false, "Login to Kimi using OAuth")
flag.StringVar(&projectID, "project_id", "", "Project ID (Gemini only, not required)")
flag.StringVar(&configPath, "config", DefaultConfigPath, "Configure File Path")
flag.StringVar(&vertexImport, "vertex-import", "", "Import Vertex service account key JSON file")
@@ -468,6 +470,8 @@ func main() {
cmd.DoIFlowLogin(cfg, options)
} else if iflowCookie {
cmd.DoIFlowCookieAuth(cfg, options)
} else if kimiLogin {
cmd.DoKimiLogin(cfg, options)
} else {
// In cloud deploy mode without config file, just wait for shutdown signals
if isCloudDeploy && !configFileExists {

View File

@@ -25,6 +25,7 @@ import (
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/codex"
geminiAuth "github.com/router-for-me/CLIProxyAPI/v6/internal/auth/gemini"
iflowauth "github.com/router-for-me/CLIProxyAPI/v6/internal/auth/iflow"
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/kimi"
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/qwen"
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
@@ -1608,6 +1609,82 @@ func (h *Handler) RequestQwenToken(c *gin.Context) {
c.JSON(200, gin.H{"status": "ok", "url": authURL, "state": state})
}
func (h *Handler) RequestKimiToken(c *gin.Context) {
ctx := context.Background()
fmt.Println("Initializing Kimi authentication...")
state := fmt.Sprintf("kmi-%d", time.Now().UnixNano())
// Initialize Kimi auth service
kimiAuth := kimi.NewKimiAuth(h.cfg)
// Generate authorization URL
deviceFlow, errStartDeviceFlow := kimiAuth.StartDeviceFlow(ctx)
if errStartDeviceFlow != nil {
log.Errorf("Failed to generate authorization URL: %v", errStartDeviceFlow)
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate authorization url"})
return
}
authURL := deviceFlow.VerificationURIComplete
if authURL == "" {
authURL = deviceFlow.VerificationURI
}
RegisterOAuthSession(state, "kimi")
go func() {
fmt.Println("Waiting for authentication...")
authBundle, errWaitForAuthorization := kimiAuth.WaitForAuthorization(ctx, deviceFlow)
if errWaitForAuthorization != nil {
SetOAuthSessionError(state, "Authentication failed")
fmt.Printf("Authentication failed: %v\n", errWaitForAuthorization)
return
}
// Create token storage
tokenStorage := kimiAuth.CreateTokenStorage(authBundle)
metadata := map[string]any{
"type": "kimi",
"access_token": authBundle.TokenData.AccessToken,
"refresh_token": authBundle.TokenData.RefreshToken,
"token_type": authBundle.TokenData.TokenType,
"scope": authBundle.TokenData.Scope,
"timestamp": time.Now().UnixMilli(),
}
if authBundle.TokenData.ExpiresAt > 0 {
expired := time.Unix(authBundle.TokenData.ExpiresAt, 0).UTC().Format(time.RFC3339)
metadata["expired"] = expired
}
if strings.TrimSpace(authBundle.DeviceID) != "" {
metadata["device_id"] = strings.TrimSpace(authBundle.DeviceID)
}
fileName := fmt.Sprintf("kimi-%d.json", time.Now().UnixMilli())
record := &coreauth.Auth{
ID: fileName,
Provider: "kimi",
FileName: fileName,
Label: "Kimi User",
Storage: tokenStorage,
Metadata: metadata,
}
savedPath, errSave := h.saveTokenRecord(ctx, record)
if errSave != nil {
log.Errorf("Failed to save authentication tokens: %v", errSave)
SetOAuthSessionError(state, "Failed to save authentication tokens")
return
}
fmt.Printf("Authentication successful! Token saved to %s\n", savedPath)
fmt.Println("You can now use Kimi services through this CLI")
CompleteOAuthSession(state)
CompleteOAuthSessionsByProvider("kimi")
}()
c.JSON(200, gin.H{"status": "ok", "url": authURL, "state": state})
}
func (h *Handler) RequestIFlowToken(c *gin.Context) {
ctx := context.Background()

View File

@@ -623,6 +623,7 @@ func (s *Server) registerManagementRoutes() {
mgmt.GET("/gemini-cli-auth-url", s.mgmt.RequestGeminiCLIToken)
mgmt.GET("/antigravity-auth-url", s.mgmt.RequestAntigravityToken)
mgmt.GET("/qwen-auth-url", s.mgmt.RequestQwenToken)
mgmt.GET("/kimi-auth-url", s.mgmt.RequestKimiToken)
mgmt.GET("/iflow-auth-url", s.mgmt.RequestIFlowToken)
mgmt.POST("/iflow-auth-url", s.mgmt.RequestIFlowCookieToken)
mgmt.POST("/oauth-callback", s.mgmt.PostOAuthCallback)

396
internal/auth/kimi/kimi.go Normal file
View File

@@ -0,0 +1,396 @@
// Package kimi provides authentication and token management for Kimi (Moonshot AI) API.
// It handles the RFC 8628 OAuth2 Device Authorization Grant flow for secure authentication.
package kimi
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"os"
"runtime"
"strings"
"time"
"github.com/google/uuid"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/util"
log "github.com/sirupsen/logrus"
)
const (
// kimiClientID is Kimi Code's OAuth client ID.
kimiClientID = "17e5f671-d194-4dfb-9706-5516cb48c098"
// kimiOAuthHost is the OAuth server endpoint.
kimiOAuthHost = "https://auth.kimi.com"
// kimiDeviceCodeURL is the endpoint for requesting device codes.
kimiDeviceCodeURL = kimiOAuthHost + "/api/oauth/device_authorization"
// kimiTokenURL is the endpoint for exchanging device codes for tokens.
kimiTokenURL = kimiOAuthHost + "/api/oauth/token"
// KimiAPIBaseURL is the base URL for Kimi API requests.
KimiAPIBaseURL = "https://api.kimi.com/coding/v1"
// defaultPollInterval is the default interval for polling token endpoint.
defaultPollInterval = 5 * time.Second
// maxPollDuration is the maximum time to wait for user authorization.
maxPollDuration = 15 * time.Minute
// refreshThresholdSeconds is when to refresh token before expiry (5 minutes).
refreshThresholdSeconds = 300
)
// KimiAuth handles Kimi authentication flow.
type KimiAuth struct {
deviceClient *DeviceFlowClient
cfg *config.Config
}
// NewKimiAuth creates a new KimiAuth service instance.
func NewKimiAuth(cfg *config.Config) *KimiAuth {
return &KimiAuth{
deviceClient: NewDeviceFlowClient(cfg),
cfg: cfg,
}
}
// StartDeviceFlow initiates the device flow authentication.
func (k *KimiAuth) StartDeviceFlow(ctx context.Context) (*DeviceCodeResponse, error) {
return k.deviceClient.RequestDeviceCode(ctx)
}
// WaitForAuthorization polls for user authorization and returns the auth bundle.
func (k *KimiAuth) WaitForAuthorization(ctx context.Context, deviceCode *DeviceCodeResponse) (*KimiAuthBundle, error) {
tokenData, err := k.deviceClient.PollForToken(ctx, deviceCode)
if err != nil {
return nil, err
}
return &KimiAuthBundle{
TokenData: tokenData,
DeviceID: k.deviceClient.deviceID,
}, nil
}
// CreateTokenStorage creates a new KimiTokenStorage from auth bundle.
func (k *KimiAuth) CreateTokenStorage(bundle *KimiAuthBundle) *KimiTokenStorage {
expired := ""
if bundle.TokenData.ExpiresAt > 0 {
expired = time.Unix(bundle.TokenData.ExpiresAt, 0).UTC().Format(time.RFC3339)
}
return &KimiTokenStorage{
AccessToken: bundle.TokenData.AccessToken,
RefreshToken: bundle.TokenData.RefreshToken,
TokenType: bundle.TokenData.TokenType,
Scope: bundle.TokenData.Scope,
DeviceID: strings.TrimSpace(bundle.DeviceID),
Expired: expired,
Type: "kimi",
}
}
// DeviceFlowClient handles the OAuth2 device flow for Kimi.
type DeviceFlowClient struct {
httpClient *http.Client
cfg *config.Config
deviceID string
}
// NewDeviceFlowClient creates a new device flow client.
func NewDeviceFlowClient(cfg *config.Config) *DeviceFlowClient {
return NewDeviceFlowClientWithDeviceID(cfg, "")
}
// NewDeviceFlowClientWithDeviceID creates a new device flow client with the specified device ID.
func NewDeviceFlowClientWithDeviceID(cfg *config.Config, deviceID string) *DeviceFlowClient {
client := &http.Client{Timeout: 30 * time.Second}
if cfg != nil {
client = util.SetProxy(&cfg.SDKConfig, client)
}
resolvedDeviceID := strings.TrimSpace(deviceID)
if resolvedDeviceID == "" {
resolvedDeviceID = getOrCreateDeviceID()
}
return &DeviceFlowClient{
httpClient: client,
cfg: cfg,
deviceID: resolvedDeviceID,
}
}
// getOrCreateDeviceID returns an in-memory device ID for the current authentication flow.
func getOrCreateDeviceID() string {
return uuid.New().String()
}
// getDeviceModel returns a device model string.
func getDeviceModel() string {
osName := runtime.GOOS
arch := runtime.GOARCH
switch osName {
case "darwin":
return fmt.Sprintf("macOS %s", arch)
case "windows":
return fmt.Sprintf("Windows %s", arch)
case "linux":
return fmt.Sprintf("Linux %s", arch)
default:
return fmt.Sprintf("%s %s", osName, arch)
}
}
// getHostname returns the machine hostname.
func getHostname() string {
hostname, err := os.Hostname()
if err != nil {
return "unknown"
}
return hostname
}
// commonHeaders returns headers required for Kimi API requests.
func (c *DeviceFlowClient) commonHeaders() map[string]string {
return map[string]string{
"X-Msh-Platform": "cli-proxy-api",
"X-Msh-Version": "1.0.0",
"X-Msh-Device-Name": getHostname(),
"X-Msh-Device-Model": getDeviceModel(),
"X-Msh-Device-Id": c.deviceID,
}
}
// RequestDeviceCode initiates the device flow by requesting a device code from Kimi.
func (c *DeviceFlowClient) RequestDeviceCode(ctx context.Context) (*DeviceCodeResponse, error) {
data := url.Values{}
data.Set("client_id", kimiClientID)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, kimiDeviceCodeURL, strings.NewReader(data.Encode()))
if err != nil {
return nil, fmt.Errorf("kimi: failed to create device code request: %w", err)
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
req.Header.Set("Accept", "application/json")
for k, v := range c.commonHeaders() {
req.Header.Set(k, v)
}
resp, err := c.httpClient.Do(req)
if err != nil {
return nil, fmt.Errorf("kimi: device code request failed: %w", err)
}
defer func() {
if errClose := resp.Body.Close(); errClose != nil {
log.Errorf("kimi device code: close body error: %v", errClose)
}
}()
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("kimi: failed to read device code response: %w", err)
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("kimi: device code request failed with status %d: %s", resp.StatusCode, string(bodyBytes))
}
var deviceCode DeviceCodeResponse
if err = json.Unmarshal(bodyBytes, &deviceCode); err != nil {
return nil, fmt.Errorf("kimi: failed to parse device code response: %w", err)
}
return &deviceCode, nil
}
// PollForToken polls the token endpoint until the user authorizes or the device code expires.
func (c *DeviceFlowClient) PollForToken(ctx context.Context, deviceCode *DeviceCodeResponse) (*KimiTokenData, error) {
if deviceCode == nil {
return nil, fmt.Errorf("kimi: device code is nil")
}
interval := time.Duration(deviceCode.Interval) * time.Second
if interval < defaultPollInterval {
interval = defaultPollInterval
}
deadline := time.Now().Add(maxPollDuration)
if deviceCode.ExpiresIn > 0 {
codeDeadline := time.Now().Add(time.Duration(deviceCode.ExpiresIn) * time.Second)
if codeDeadline.Before(deadline) {
deadline = codeDeadline
}
}
ticker := time.NewTicker(interval)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
return nil, fmt.Errorf("kimi: context cancelled: %w", ctx.Err())
case <-ticker.C:
if time.Now().After(deadline) {
return nil, fmt.Errorf("kimi: device code expired")
}
token, pollErr, shouldContinue := c.exchangeDeviceCode(ctx, deviceCode.DeviceCode)
if token != nil {
return token, nil
}
if !shouldContinue {
return nil, pollErr
}
// Continue polling
}
}
}
// exchangeDeviceCode attempts to exchange the device code for an access token.
// Returns (token, error, shouldContinue).
func (c *DeviceFlowClient) exchangeDeviceCode(ctx context.Context, deviceCode string) (*KimiTokenData, error, bool) {
data := url.Values{}
data.Set("client_id", kimiClientID)
data.Set("device_code", deviceCode)
data.Set("grant_type", "urn:ietf:params:oauth:grant-type:device_code")
req, err := http.NewRequestWithContext(ctx, http.MethodPost, kimiTokenURL, strings.NewReader(data.Encode()))
if err != nil {
return nil, fmt.Errorf("kimi: failed to create token request: %w", err), false
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
req.Header.Set("Accept", "application/json")
for k, v := range c.commonHeaders() {
req.Header.Set(k, v)
}
resp, err := c.httpClient.Do(req)
if err != nil {
return nil, fmt.Errorf("kimi: token request failed: %w", err), false
}
defer func() {
if errClose := resp.Body.Close(); errClose != nil {
log.Errorf("kimi token exchange: close body error: %v", errClose)
}
}()
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("kimi: failed to read token response: %w", err), false
}
// Parse response - Kimi returns 200 for both success and pending states
var oauthResp struct {
Error string `json:"error"`
ErrorDescription string `json:"error_description"`
AccessToken string `json:"access_token"`
RefreshToken string `json:"refresh_token"`
TokenType string `json:"token_type"`
ExpiresIn float64 `json:"expires_in"`
Scope string `json:"scope"`
}
if err = json.Unmarshal(bodyBytes, &oauthResp); err != nil {
return nil, fmt.Errorf("kimi: failed to parse token response: %w", err), false
}
if oauthResp.Error != "" {
switch oauthResp.Error {
case "authorization_pending":
return nil, nil, true // Continue polling
case "slow_down":
return nil, nil, true // Continue polling (with increased interval handled by caller)
case "expired_token":
return nil, fmt.Errorf("kimi: device code expired"), false
case "access_denied":
return nil, fmt.Errorf("kimi: access denied by user"), false
default:
return nil, fmt.Errorf("kimi: OAuth error: %s - %s", oauthResp.Error, oauthResp.ErrorDescription), false
}
}
if oauthResp.AccessToken == "" {
return nil, fmt.Errorf("kimi: empty access token in response"), false
}
var expiresAt int64
if oauthResp.ExpiresIn > 0 {
expiresAt = time.Now().Unix() + int64(oauthResp.ExpiresIn)
}
return &KimiTokenData{
AccessToken: oauthResp.AccessToken,
RefreshToken: oauthResp.RefreshToken,
TokenType: oauthResp.TokenType,
ExpiresAt: expiresAt,
Scope: oauthResp.Scope,
}, nil, false
}
// RefreshToken exchanges a refresh token for a new access token.
func (c *DeviceFlowClient) RefreshToken(ctx context.Context, refreshToken string) (*KimiTokenData, error) {
data := url.Values{}
data.Set("client_id", kimiClientID)
data.Set("grant_type", "refresh_token")
data.Set("refresh_token", refreshToken)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, kimiTokenURL, strings.NewReader(data.Encode()))
if err != nil {
return nil, fmt.Errorf("kimi: failed to create refresh request: %w", err)
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
req.Header.Set("Accept", "application/json")
for k, v := range c.commonHeaders() {
req.Header.Set(k, v)
}
resp, err := c.httpClient.Do(req)
if err != nil {
return nil, fmt.Errorf("kimi: refresh request failed: %w", err)
}
defer func() {
if errClose := resp.Body.Close(); errClose != nil {
log.Errorf("kimi refresh token: close body error: %v", errClose)
}
}()
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("kimi: failed to read refresh response: %w", err)
}
if resp.StatusCode == http.StatusUnauthorized || resp.StatusCode == http.StatusForbidden {
return nil, fmt.Errorf("kimi: refresh token rejected (status %d)", resp.StatusCode)
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("kimi: refresh failed with status %d: %s", resp.StatusCode, string(bodyBytes))
}
var tokenResp struct {
AccessToken string `json:"access_token"`
RefreshToken string `json:"refresh_token"`
TokenType string `json:"token_type"`
ExpiresIn float64 `json:"expires_in"`
Scope string `json:"scope"`
}
if err = json.Unmarshal(bodyBytes, &tokenResp); err != nil {
return nil, fmt.Errorf("kimi: failed to parse refresh response: %w", err)
}
if tokenResp.AccessToken == "" {
return nil, fmt.Errorf("kimi: empty access token in refresh response")
}
var expiresAt int64
if tokenResp.ExpiresIn > 0 {
expiresAt = time.Now().Unix() + int64(tokenResp.ExpiresIn)
}
return &KimiTokenData{
AccessToken: tokenResp.AccessToken,
RefreshToken: tokenResp.RefreshToken,
TokenType: tokenResp.TokenType,
ExpiresAt: expiresAt,
Scope: tokenResp.Scope,
}, nil
}

116
internal/auth/kimi/token.go Normal file
View File

@@ -0,0 +1,116 @@
// Package kimi provides authentication and token management functionality
// for Kimi (Moonshot AI) services. It handles OAuth2 device flow token storage,
// serialization, and retrieval for maintaining authenticated sessions with the Kimi API.
package kimi
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"time"
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
)
// KimiTokenStorage stores OAuth2 token information for Kimi API authentication.
type KimiTokenStorage struct {
// AccessToken is the OAuth2 access token used for authenticating API requests.
AccessToken string `json:"access_token"`
// RefreshToken is the OAuth2 refresh token used to obtain new access tokens.
RefreshToken string `json:"refresh_token"`
// TokenType is the type of token, typically "Bearer".
TokenType string `json:"token_type"`
// Scope is the OAuth2 scope granted to the token.
Scope string `json:"scope,omitempty"`
// DeviceID is the OAuth device flow identifier used for Kimi requests.
DeviceID string `json:"device_id,omitempty"`
// Expired is the RFC3339 timestamp when the access token expires.
Expired string `json:"expired,omitempty"`
// Type indicates the authentication provider type, always "kimi" for this storage.
Type string `json:"type"`
}
// KimiTokenData holds the raw OAuth token response from Kimi.
type KimiTokenData struct {
// AccessToken is the OAuth2 access token.
AccessToken string `json:"access_token"`
// RefreshToken is the OAuth2 refresh token.
RefreshToken string `json:"refresh_token"`
// TokenType is the type of token, typically "Bearer".
TokenType string `json:"token_type"`
// ExpiresAt is the Unix timestamp when the token expires.
ExpiresAt int64 `json:"expires_at"`
// Scope is the OAuth2 scope granted to the token.
Scope string `json:"scope"`
}
// KimiAuthBundle bundles authentication data for storage.
type KimiAuthBundle struct {
// TokenData contains the OAuth token information.
TokenData *KimiTokenData
// DeviceID is the device identifier used during OAuth device flow.
DeviceID string
}
// DeviceCodeResponse represents Kimi's device code response.
type DeviceCodeResponse struct {
// DeviceCode is the device verification code.
DeviceCode string `json:"device_code"`
// UserCode is the code the user must enter at the verification URI.
UserCode string `json:"user_code"`
// VerificationURI is the URL where the user should enter the code.
VerificationURI string `json:"verification_uri,omitempty"`
// VerificationURIComplete is the URL with the code pre-filled.
VerificationURIComplete string `json:"verification_uri_complete"`
// ExpiresIn is the number of seconds until the device code expires.
ExpiresIn int `json:"expires_in"`
// Interval is the minimum number of seconds to wait between polling requests.
Interval int `json:"interval"`
}
// SaveTokenToFile serializes the Kimi token storage to a JSON file.
func (ts *KimiTokenStorage) SaveTokenToFile(authFilePath string) error {
misc.LogSavingCredentials(authFilePath)
ts.Type = "kimi"
if err := os.MkdirAll(filepath.Dir(authFilePath), 0700); err != nil {
return fmt.Errorf("failed to create directory: %v", err)
}
f, err := os.Create(authFilePath)
if err != nil {
return fmt.Errorf("failed to create token file: %w", err)
}
defer func() {
_ = f.Close()
}()
encoder := json.NewEncoder(f)
encoder.SetIndent("", " ")
if err = encoder.Encode(ts); err != nil {
return fmt.Errorf("failed to write token to file: %w", err)
}
return nil
}
// IsExpired checks if the token has expired.
func (ts *KimiTokenStorage) IsExpired() bool {
if ts.Expired == "" {
return false // No expiry set, assume valid
}
t, err := time.Parse(time.RFC3339, ts.Expired)
if err != nil {
return true // Has expiry string but can't parse
}
// Consider expired if within refresh threshold
return time.Now().Add(time.Duration(refreshThresholdSeconds) * time.Second).After(t)
}
// NeedsRefresh checks if the token should be refreshed.
func (ts *KimiTokenStorage) NeedsRefresh() bool {
if ts.RefreshToken == "" {
return false // Can't refresh without refresh token
}
return ts.IsExpired()
}

View File

@@ -19,6 +19,7 @@ func newAuthManager() *sdkAuth.Manager {
sdkAuth.NewQwenAuthenticator(),
sdkAuth.NewIFlowAuthenticator(),
sdkAuth.NewAntigravityAuthenticator(),
sdkAuth.NewKimiAuthenticator(),
)
return manager
}

View File

@@ -0,0 +1,44 @@
package cmd
import (
"context"
"fmt"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
sdkAuth "github.com/router-for-me/CLIProxyAPI/v6/sdk/auth"
log "github.com/sirupsen/logrus"
)
// DoKimiLogin triggers the OAuth device flow for Kimi (Moonshot AI) and saves tokens.
// It initiates the device flow authentication, displays the verification URL for the user,
// and waits for authorization before saving the tokens.
//
// Parameters:
// - cfg: The application configuration containing proxy and auth directory settings
// - options: Login options including browser behavior settings
func DoKimiLogin(cfg *config.Config, options *LoginOptions) {
if options == nil {
options = &LoginOptions{}
}
manager := newAuthManager()
authOpts := &sdkAuth.LoginOptions{
NoBrowser: options.NoBrowser,
Metadata: map[string]string{},
Prompt: options.Prompt,
}
record, savedPath, err := manager.Login(context.Background(), "kimi", cfg, authOpts)
if err != nil {
log.Errorf("Kimi authentication failed: %v", err)
return
}
if savedPath != "" {
fmt.Printf("Authentication saved to %s\n", savedPath)
}
if record != nil && record.Label != "" {
fmt.Printf("Authenticated as %s\n", record.Label)
}
fmt.Println("Kimi authentication successful!")
}

View File

@@ -493,14 +493,15 @@ func LoadConfig(configFile string) (*Config, error) {
// If optional is true and the file is missing, it returns an empty Config.
// If optional is true and the file is empty or invalid, it returns an empty Config.
func LoadConfigOptional(configFile string, optional bool) (*Config, error) {
// Perform oauth-model-alias migration before loading config.
// This migrates oauth-model-mappings to oauth-model-alias if needed.
if migrated, err := MigrateOAuthModelAlias(configFile); err != nil {
// Log warning but don't fail - config loading should still work
fmt.Printf("Warning: oauth-model-alias migration failed: %v\n", err)
} else if migrated {
fmt.Println("Migrated oauth-model-mappings to oauth-model-alias")
}
// NOTE: Startup oauth-model-alias migration is intentionally disabled.
// Reason: avoid mutating config.yaml during server startup.
// Re-enable the block below if automatic startup migration is needed again.
// if migrated, err := MigrateOAuthModelAlias(configFile); err != nil {
// // Log warning but don't fail - config loading should still work
// fmt.Printf("Warning: oauth-model-alias migration failed: %v\n", err)
// } else if migrated {
// fmt.Println("Migrated oauth-model-mappings to oauth-model-alias")
// }
// Read the entire configuration file into memory.
data, err := os.ReadFile(configFile)
@@ -540,18 +541,21 @@ func LoadConfigOptional(configFile string, optional bool) (*Config, error) {
return nil, fmt.Errorf("failed to parse config file: %w", err)
}
var legacy legacyConfigData
if errLegacy := yaml.Unmarshal(data, &legacy); errLegacy == nil {
if cfg.migrateLegacyGeminiKeys(legacy.LegacyGeminiKeys) {
cfg.legacyMigrationPending = true
}
if cfg.migrateLegacyOpenAICompatibilityKeys(legacy.OpenAICompat) {
cfg.legacyMigrationPending = true
}
if cfg.migrateLegacyAmpConfig(&legacy) {
cfg.legacyMigrationPending = true
}
}
// NOTE: Startup legacy key migration is intentionally disabled.
// Reason: avoid mutating config.yaml during server startup.
// Re-enable the block below if automatic startup migration is needed again.
// var legacy legacyConfigData
// if errLegacy := yaml.Unmarshal(data, &legacy); errLegacy == nil {
// if cfg.migrateLegacyGeminiKeys(legacy.LegacyGeminiKeys) {
// cfg.legacyMigrationPending = true
// }
// if cfg.migrateLegacyOpenAICompatibilityKeys(legacy.OpenAICompat) {
// cfg.legacyMigrationPending = true
// }
// if cfg.migrateLegacyAmpConfig(&legacy) {
// cfg.legacyMigrationPending = true
// }
// }
// Hash remote management key if plaintext is detected (nested)
// We consider a value to be already hashed if it looks like a bcrypt hash ($2a$, $2b$, or $2y$ prefix).
@@ -612,17 +616,20 @@ func LoadConfigOptional(configFile string, optional bool) (*Config, error) {
// Validate raw payload rules and drop invalid entries.
cfg.SanitizePayloadRules()
if cfg.legacyMigrationPending {
fmt.Println("Detected legacy configuration keys, attempting to persist the normalized config...")
if !optional && configFile != "" {
if err := SaveConfigPreserveComments(configFile, &cfg); err != nil {
return nil, fmt.Errorf("failed to persist migrated legacy config: %w", err)
}
fmt.Println("Legacy configuration normalized and persisted.")
} else {
fmt.Println("Legacy configuration normalized in memory; persistence skipped.")
}
}
// NOTE: Legacy migration persistence is intentionally disabled together with
// startup legacy migration to keep startup read-only for config.yaml.
// Re-enable the block below if automatic startup migration is needed again.
// if cfg.legacyMigrationPending {
// fmt.Println("Detected legacy configuration keys, attempting to persist the normalized config...")
// if !optional && configFile != "" {
// if err := SaveConfigPreserveComments(configFile, &cfg); err != nil {
// return nil, fmt.Errorf("failed to persist migrated legacy config: %w", err)
// }
// fmt.Println("Legacy configuration normalized and persisted.")
// } else {
// fmt.Println("Legacy configuration normalized in memory; persistence skipped.")
// }
// }
// Return the populated configuration struct.
return &cfg, nil

View File

@@ -17,6 +17,7 @@ var antigravityModelConversionTable = map[string]string{
"gemini-claude-sonnet-4-5": "claude-sonnet-4-5",
"gemini-claude-sonnet-4-5-thinking": "claude-sonnet-4-5-thinking",
"gemini-claude-opus-4-5-thinking": "claude-opus-4-5-thinking",
"gemini-claude-opus-4-6-thinking": "claude-opus-4-6-thinking",
}
// defaultAntigravityAliases returns the default oauth-model-alias configuration
@@ -30,6 +31,7 @@ func defaultAntigravityAliases() []OAuthModelAlias {
{Name: "claude-sonnet-4-5", Alias: "gemini-claude-sonnet-4-5"},
{Name: "claude-sonnet-4-5-thinking", Alias: "gemini-claude-sonnet-4-5-thinking"},
{Name: "claude-opus-4-5-thinking", Alias: "gemini-claude-opus-4-5-thinking"},
{Name: "claude-opus-4-6-thinking", Alias: "gemini-claude-opus-4-6-thinking"},
}
}

View File

@@ -131,6 +131,9 @@ func TestMigrateOAuthModelAlias_ConvertsAntigravityModels(t *testing.T) {
if !strings.Contains(content, "claude-opus-4-5-thinking") {
t.Fatal("expected missing default alias claude-opus-4-5-thinking to be added")
}
if !strings.Contains(content, "claude-opus-4-6-thinking") {
t.Fatal("expected missing default alias claude-opus-4-6-thinking to be added")
}
}
func TestMigrateOAuthModelAlias_AddsDefaultIfNeitherExists(t *testing.T) {

View File

@@ -15,7 +15,7 @@ func GetClaudeModels() []*ModelInfo {
DisplayName: "Claude 4.5 Haiku",
ContextLength: 200000,
MaxCompletionTokens: 64000,
// Thinking: not supported for Haiku models
Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: false},
},
{
ID: "claude-sonnet-4-5-20250929",
@@ -28,6 +28,18 @@ func GetClaudeModels() []*ModelInfo {
MaxCompletionTokens: 64000,
Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: false},
},
{
ID: "claude-opus-4-6",
Object: "model",
Created: 1770318000, // 2026-02-05
OwnedBy: "anthropic",
Type: "claude",
DisplayName: "Claude 4.6 Opus",
Description: "Premium model combining maximum intelligence with practical performance",
ContextLength: 1000000,
MaxCompletionTokens: 128000,
Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: false},
},
{
ID: "claude-opus-4-5-20251101",
Object: "model",
@@ -716,6 +728,20 @@ func GetOpenAIModels() []*ModelInfo {
SupportedParameters: []string{"tools"},
Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high", "xhigh"}},
},
{
ID: "gpt-5.3-codex",
Object: "model",
Created: 1770307200,
OwnedBy: "openai",
Type: "openai",
Version: "gpt-5.3",
DisplayName: "GPT 5.3 Codex",
Description: "Stable version of GPT 5.3 Codex, The best model for coding and agentic tasks across domains.",
ContextLength: 400000,
MaxCompletionTokens: 128000,
SupportedParameters: []string{"tools"},
Thinking: &ThinkingSupport{Levels: []string{"low", "medium", "high", "xhigh"}},
},
}
}
@@ -840,8 +866,50 @@ func GetAntigravityModelConfig() map[string]*AntigravityModelConfig {
"gemini-3-flash": {Thinking: &ThinkingSupport{Min: 128, Max: 32768, ZeroAllowed: false, DynamicAllowed: true, Levels: []string{"minimal", "low", "medium", "high"}}},
"claude-sonnet-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: true}, MaxCompletionTokens: 64000},
"claude-opus-4-5-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: true}, MaxCompletionTokens: 64000},
"claude-opus-4-6-thinking": {Thinking: &ThinkingSupport{Min: 1024, Max: 128000, ZeroAllowed: true, DynamicAllowed: true}, MaxCompletionTokens: 128000},
"claude-sonnet-4-5": {MaxCompletionTokens: 64000},
"gpt-oss-120b-medium": {},
"tab_flash_lite_preview": {},
}
}
// GetKimiModels returns the standard Kimi (Moonshot AI) model definitions
func GetKimiModels() []*ModelInfo {
return []*ModelInfo{
{
ID: "kimi-k2",
Object: "model",
Created: 1752192000, // 2025-07-11
OwnedBy: "moonshot",
Type: "kimi",
DisplayName: "Kimi K2",
Description: "Kimi K2 - Moonshot AI's flagship coding model",
ContextLength: 131072,
MaxCompletionTokens: 32768,
},
{
ID: "kimi-k2-thinking",
Object: "model",
Created: 1762387200, // 2025-11-06
OwnedBy: "moonshot",
Type: "kimi",
DisplayName: "Kimi K2 Thinking",
Description: "Kimi K2 Thinking - Extended reasoning model",
ContextLength: 131072,
MaxCompletionTokens: 32768,
Thinking: &ThinkingSupport{Min: 1024, Max: 32000, ZeroAllowed: true, DynamicAllowed: true},
},
{
ID: "kimi-k2.5",
Object: "model",
Created: 1769472000, // 2026-01-26
OwnedBy: "moonshot",
Type: "kimi",
DisplayName: "Kimi K2.5",
Description: "Kimi K2.5 - Latest Moonshot AI coding model with improved capabilities",
ContextLength: 131072,
MaxCompletionTokens: 32768,
Thinking: &ThinkingSupport{Min: 1024, Max: 32000, ZeroAllowed: true, DynamicAllowed: true},
},
}
}

View File

@@ -141,7 +141,7 @@ func (e *AIStudioExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth,
URL: endpoint,
Method: http.MethodPost,
Headers: wsReq.Headers.Clone(),
Body: bytes.Clone(body.payload),
Body: body.payload,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
@@ -156,14 +156,14 @@ func (e *AIStudioExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth,
}
recordAPIResponseMetadata(ctx, e.cfg, wsResp.Status, wsResp.Headers.Clone())
if len(wsResp.Body) > 0 {
appendAPIResponseChunk(ctx, e.cfg, bytes.Clone(wsResp.Body))
appendAPIResponseChunk(ctx, e.cfg, wsResp.Body)
}
if wsResp.Status < 200 || wsResp.Status >= 300 {
return resp, statusErr{code: wsResp.Status, msg: string(wsResp.Body)}
}
reporter.publish(ctx, parseGeminiUsage(wsResp.Body))
var param any
out := sdktranslator.TranslateNonStream(ctx, body.toFormat, opts.SourceFormat, req.Model, bytes.Clone(opts.OriginalRequest), bytes.Clone(translatedReq), bytes.Clone(wsResp.Body), &param)
out := sdktranslator.TranslateNonStream(ctx, body.toFormat, opts.SourceFormat, req.Model, opts.OriginalRequest, translatedReq, wsResp.Body, &param)
resp = cliproxyexecutor.Response{Payload: ensureColonSpacedJSON([]byte(out))}
return resp, nil
}
@@ -199,7 +199,7 @@ func (e *AIStudioExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth
URL: endpoint,
Method: http.MethodPost,
Headers: wsReq.Headers.Clone(),
Body: bytes.Clone(body.payload),
Body: body.payload,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
@@ -225,7 +225,7 @@ func (e *AIStudioExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth
}
var body bytes.Buffer
if len(firstEvent.Payload) > 0 {
appendAPIResponseChunk(ctx, e.cfg, bytes.Clone(firstEvent.Payload))
appendAPIResponseChunk(ctx, e.cfg, firstEvent.Payload)
body.Write(firstEvent.Payload)
}
if firstEvent.Type == wsrelay.MessageTypeStreamEnd {
@@ -244,7 +244,7 @@ func (e *AIStudioExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth
metadataLogged = true
}
if len(event.Payload) > 0 {
appendAPIResponseChunk(ctx, e.cfg, bytes.Clone(event.Payload))
appendAPIResponseChunk(ctx, e.cfg, event.Payload)
body.Write(event.Payload)
}
if event.Type == wsrelay.MessageTypeStreamEnd {
@@ -274,12 +274,12 @@ func (e *AIStudioExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth
}
case wsrelay.MessageTypeStreamChunk:
if len(event.Payload) > 0 {
appendAPIResponseChunk(ctx, e.cfg, bytes.Clone(event.Payload))
appendAPIResponseChunk(ctx, e.cfg, event.Payload)
filtered := FilterSSEUsageMetadata(event.Payload)
if detail, ok := parseGeminiStreamUsage(filtered); ok {
reporter.publish(ctx, detail)
}
lines := sdktranslator.TranslateStream(ctx, body.toFormat, opts.SourceFormat, req.Model, bytes.Clone(opts.OriginalRequest), translatedReq, bytes.Clone(filtered), &param)
lines := sdktranslator.TranslateStream(ctx, body.toFormat, opts.SourceFormat, req.Model, opts.OriginalRequest, translatedReq, filtered, &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: ensureColonSpacedJSON([]byte(lines[i]))}
}
@@ -293,9 +293,9 @@ func (e *AIStudioExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth
metadataLogged = true
}
if len(event.Payload) > 0 {
appendAPIResponseChunk(ctx, e.cfg, bytes.Clone(event.Payload))
appendAPIResponseChunk(ctx, e.cfg, event.Payload)
}
lines := sdktranslator.TranslateStream(ctx, body.toFormat, opts.SourceFormat, req.Model, bytes.Clone(opts.OriginalRequest), translatedReq, bytes.Clone(event.Payload), &param)
lines := sdktranslator.TranslateStream(ctx, body.toFormat, opts.SourceFormat, req.Model, opts.OriginalRequest, translatedReq, event.Payload, &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: ensureColonSpacedJSON([]byte(lines[i]))}
}
@@ -350,7 +350,7 @@ func (e *AIStudioExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.A
URL: endpoint,
Method: http.MethodPost,
Headers: wsReq.Headers.Clone(),
Body: bytes.Clone(body.payload),
Body: body.payload,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
@@ -364,7 +364,7 @@ func (e *AIStudioExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.A
}
recordAPIResponseMetadata(ctx, e.cfg, resp.Status, resp.Headers.Clone())
if len(resp.Body) > 0 {
appendAPIResponseChunk(ctx, e.cfg, bytes.Clone(resp.Body))
appendAPIResponseChunk(ctx, e.cfg, resp.Body)
}
if resp.Status < 200 || resp.Status >= 300 {
return cliproxyexecutor.Response{}, statusErr{code: resp.Status, msg: string(resp.Body)}
@@ -373,7 +373,7 @@ func (e *AIStudioExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.A
if totalTokens <= 0 {
return cliproxyexecutor.Response{}, fmt.Errorf("wsrelay: totalTokens missing in response")
}
translated := sdktranslator.TranslateTokenCount(ctx, body.toFormat, opts.SourceFormat, totalTokens, bytes.Clone(resp.Body))
translated := sdktranslator.TranslateTokenCount(ctx, body.toFormat, opts.SourceFormat, totalTokens, resp.Body)
return cliproxyexecutor.Response{Payload: []byte(translated)}, nil
}
@@ -393,12 +393,13 @@ func (e *AIStudioExecutor) translateRequest(req cliproxyexecutor.Request, opts c
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, stream)
payload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), stream)
payload := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, stream)
payload, err := thinking.ApplyThinking(payload, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
return nil, translatedPayload{}, err

View File

@@ -133,12 +133,13 @@ func (e *AntigravityExecutor) Execute(ctx context.Context, auth *cliproxyauth.Au
from := opts.SourceFormat
to := sdktranslator.FromString("antigravity")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translated := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -230,7 +231,7 @@ attemptLoop:
reporter.publish(ctx, parseAntigravityUsage(bodyBytes))
var param any
converted := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, bodyBytes, &param)
converted := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, translated, bodyBytes, &param)
resp = cliproxyexecutor.Response{Payload: []byte(converted)}
reporter.ensurePublished(ctx)
return resp, nil
@@ -274,12 +275,13 @@ func (e *AntigravityExecutor) executeClaudeNonStream(ctx context.Context, auth *
from := opts.SourceFormat
to := sdktranslator.FromString("antigravity")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -433,7 +435,7 @@ attemptLoop:
reporter.publish(ctx, parseAntigravityUsage(resp.Payload))
var param any
converted := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, resp.Payload, &param)
converted := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, translated, resp.Payload, &param)
resp = cliproxyexecutor.Response{Payload: []byte(converted)}
reporter.ensurePublished(ctx)
@@ -665,12 +667,13 @@ func (e *AntigravityExecutor) ExecuteStream(ctx context.Context, auth *cliproxya
from := opts.SourceFormat
to := sdktranslator.FromString("antigravity")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
translated, err = thinking.ApplyThinking(translated, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -800,12 +803,12 @@ attemptLoop:
reporter.publish(ctx, detail)
}
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, bytes.Clone(payload), &param)
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, translated, bytes.Clone(payload), &param)
for i := range chunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(chunks[i])}
}
}
tail := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, []byte("[DONE]"), &param)
tail := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, translated, []byte("[DONE]"), &param)
for i := range tail {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(tail[i])}
}
@@ -872,7 +875,7 @@ func (e *AntigravityExecutor) CountTokens(ctx context.Context, auth *cliproxyaut
respCtx := context.WithValue(ctx, "alt", opts.Alt)
// Prepare payload once (doesn't depend on baseURL)
payload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
payload := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
payload, err := thinking.ApplyThinking(payload, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {

View File

@@ -100,12 +100,13 @@ func (e *ClaudeExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
to := sdktranslator.FromString("claude")
// Use streaming translation to preserve function calling, except for claude.
stream := from != to
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, stream)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), stream)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, stream)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
@@ -216,7 +217,7 @@ func (e *ClaudeExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
to,
from,
req.Model,
bytes.Clone(opts.OriginalRequest),
opts.OriginalRequest,
bodyForTranslation,
data,
&param,
@@ -240,12 +241,13 @@ func (e *ClaudeExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
defer reporter.trackFailure(ctx, &err)
from := opts.SourceFormat
to := sdktranslator.FromString("claude")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
@@ -381,7 +383,7 @@ func (e *ClaudeExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
to,
from,
req.Model,
bytes.Clone(opts.OriginalRequest),
opts.OriginalRequest,
bodyForTranslation,
bytes.Clone(line),
&param,
@@ -411,7 +413,7 @@ func (e *ClaudeExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Aut
to := sdktranslator.FromString("claude")
// Use streaming translation to preserve function calling, except for claude.
stream := from != to
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), stream)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, stream)
body, _ = sjson.SetBytes(body, "model", baseModel)
if !strings.HasPrefix(baseModel, "claude-3-5-haiku") {

View File

@@ -27,6 +27,11 @@ import (
"github.com/google/uuid"
)
const (
codexClientVersion = "0.98.0"
codexUserAgent = "codex_cli_rs/0.98.0 (Mac OS 26.0.1; arm64) Apple_Terminal/464"
)
var dataTag = []byte("data:")
// CodexExecutor is a stateless executor for Codex (OpenAI Responses API entrypoint).
@@ -88,12 +93,13 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
from := opts.SourceFormat
to := sdktranslator.FromString("codex")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -176,7 +182,7 @@ func (e *CodexExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
}
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(originalPayload), body, line, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, originalPayload, body, line, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -197,12 +203,13 @@ func (e *CodexExecutor) executeCompact(ctx context.Context, auth *cliproxyauth.A
from := opts.SourceFormat
to := sdktranslator.FromString("openai-response")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -265,7 +272,7 @@ func (e *CodexExecutor) executeCompact(ctx context.Context, auth *cliproxyauth.A
reporter.publish(ctx, parseOpenAIUsage(data))
reporter.ensurePublished(ctx)
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(originalPayload), body, data, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, originalPayload, body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -286,12 +293,13 @@ func (e *CodexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
from := opts.SourceFormat
to := sdktranslator.FromString("codex")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -378,7 +386,7 @@ func (e *CodexExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
}
}
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(originalPayload), body, bytes.Clone(line), &param)
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, originalPayload, body, bytes.Clone(line), &param)
for i := range chunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(chunks[i])}
}
@@ -397,7 +405,7 @@ func (e *CodexExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth
from := opts.SourceFormat
to := sdktranslator.FromString("codex")
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, err := thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -634,10 +642,10 @@ func applyCodexHeaders(r *http.Request, auth *cliproxyauth.Auth, token string, s
ginHeaders = ginCtx.Request.Header
}
misc.EnsureHeader(r.Header, ginHeaders, "Version", "0.21.0")
misc.EnsureHeader(r.Header, ginHeaders, "Version", codexClientVersion)
misc.EnsureHeader(r.Header, ginHeaders, "Openai-Beta", "responses=experimental")
misc.EnsureHeader(r.Header, ginHeaders, "Session_id", uuid.NewString())
misc.EnsureHeader(r.Header, ginHeaders, "User-Agent", "codex_cli_rs/0.50.0 (Mac OS 26.0.1; arm64) Apple_Terminal/464")
misc.EnsureHeader(r.Header, ginHeaders, "User-Agent", codexUserAgent)
if stream {
r.Header.Set("Accept", "text/event-stream")

View File

@@ -119,12 +119,13 @@ func (e *GeminiCLIExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth
from := opts.SourceFormat
to := sdktranslator.FromString("gemini-cli")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
basePayload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
basePayload := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
basePayload, err = thinking.ApplyThinking(basePayload, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -223,7 +224,7 @@ func (e *GeminiCLIExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth
if httpResp.StatusCode >= 200 && httpResp.StatusCode < 300 {
reporter.publish(ctx, parseGeminiCLIUsage(data))
var param any
out := sdktranslator.TranslateNonStream(respCtx, to, from, attemptModel, bytes.Clone(opts.OriginalRequest), payload, data, &param)
out := sdktranslator.TranslateNonStream(respCtx, to, from, attemptModel, opts.OriginalRequest, payload, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -272,12 +273,13 @@ func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyaut
from := opts.SourceFormat
to := sdktranslator.FromString("gemini-cli")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
basePayload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
basePayload := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
basePayload, err = thinking.ApplyThinking(basePayload, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -399,14 +401,14 @@ func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyaut
reporter.publish(ctx, detail)
}
if bytes.HasPrefix(line, dataTag) {
segments := sdktranslator.TranslateStream(respCtx, to, from, attemptModel, bytes.Clone(opts.OriginalRequest), reqBody, bytes.Clone(line), &param)
segments := sdktranslator.TranslateStream(respCtx, to, from, attemptModel, opts.OriginalRequest, reqBody, bytes.Clone(line), &param)
for i := range segments {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(segments[i])}
}
}
}
segments := sdktranslator.TranslateStream(respCtx, to, from, attemptModel, bytes.Clone(opts.OriginalRequest), reqBody, bytes.Clone([]byte("[DONE]")), &param)
segments := sdktranslator.TranslateStream(respCtx, to, from, attemptModel, opts.OriginalRequest, reqBody, []byte("[DONE]"), &param)
for i := range segments {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(segments[i])}
}
@@ -428,12 +430,12 @@ func (e *GeminiCLIExecutor) ExecuteStream(ctx context.Context, auth *cliproxyaut
appendAPIResponseChunk(ctx, e.cfg, data)
reporter.publish(ctx, parseGeminiCLIUsage(data))
var param any
segments := sdktranslator.TranslateStream(respCtx, to, from, attemptModel, bytes.Clone(opts.OriginalRequest), reqBody, data, &param)
segments := sdktranslator.TranslateStream(respCtx, to, from, attemptModel, opts.OriginalRequest, reqBody, data, &param)
for i := range segments {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(segments[i])}
}
segments = sdktranslator.TranslateStream(respCtx, to, from, attemptModel, bytes.Clone(opts.OriginalRequest), reqBody, bytes.Clone([]byte("[DONE]")), &param)
segments = sdktranslator.TranslateStream(respCtx, to, from, attemptModel, opts.OriginalRequest, reqBody, []byte("[DONE]"), &param)
for i := range segments {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(segments[i])}
}
@@ -485,7 +487,7 @@ func (e *GeminiCLIExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.
// The loop variable attemptModel is only used as the concrete model id sent to the upstream
// Gemini CLI endpoint when iterating fallback variants.
for range models {
payload := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
payload := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
payload, err = thinking.ApplyThinking(payload, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {

View File

@@ -116,12 +116,13 @@ func (e *GeminiExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
// Official Gemini API via API key or OAuth bearer
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -203,7 +204,7 @@ func (e *GeminiExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, r
appendAPIResponseChunk(ctx, e.cfg, data)
reporter.publish(ctx, parseGeminiUsage(data))
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, data, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -222,12 +223,13 @@ func (e *GeminiExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -318,12 +320,12 @@ func (e *GeminiExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.A
if detail, ok := parseGeminiStreamUsage(payload); ok {
reporter.publish(ctx, detail)
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone(payload), &param)
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, bytes.Clone(payload), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone([]byte("[DONE]")), &param)
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, []byte("[DONE]"), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
@@ -344,7 +346,7 @@ func (e *GeminiExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Aut
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {

View File

@@ -318,12 +318,13 @@ func (e *GeminiVertexExecutor) executeWithServiceAccount(ctx context.Context, au
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body = sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body = sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -417,7 +418,7 @@ func (e *GeminiVertexExecutor) executeWithServiceAccount(ctx context.Context, au
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, data, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -432,12 +433,13 @@ func (e *GeminiVertexExecutor) executeWithAPIKey(ctx context.Context, auth *clip
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -521,7 +523,7 @@ func (e *GeminiVertexExecutor) executeWithAPIKey(ctx context.Context, auth *clip
appendAPIResponseChunk(ctx, e.cfg, data)
reporter.publish(ctx, parseGeminiUsage(data))
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, data, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -536,12 +538,13 @@ func (e *GeminiVertexExecutor) executeStreamWithServiceAccount(ctx context.Conte
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -632,12 +635,12 @@ func (e *GeminiVertexExecutor) executeStreamWithServiceAccount(ctx context.Conte
if detail, ok := parseGeminiStreamUsage(line); ok {
reporter.publish(ctx, detail)
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone(line), &param)
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, bytes.Clone(line), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, []byte("[DONE]"), &param)
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, []byte("[DONE]"), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
@@ -660,12 +663,13 @@ func (e *GeminiVertexExecutor) executeStreamWithAPIKey(ctx context.Context, auth
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -756,12 +760,12 @@ func (e *GeminiVertexExecutor) executeStreamWithAPIKey(ctx context.Context, auth
if detail, ok := parseGeminiStreamUsage(line); ok {
reporter.publish(ctx, detail)
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone(line), &param)
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, bytes.Clone(line), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
}
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, []byte("[DONE]"), &param)
lines := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, []byte("[DONE]"), &param)
for i := range lines {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(lines[i])}
}
@@ -781,7 +785,7 @@ func (e *GeminiVertexExecutor) countTokensWithServiceAccount(ctx context.Context
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {
@@ -865,7 +869,7 @@ func (e *GeminiVertexExecutor) countTokensWithAPIKey(ctx context.Context, auth *
from := opts.SourceFormat
to := sdktranslator.FromString("gemini")
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translatedReq := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
translatedReq, err := thinking.ApplyThinking(translatedReq, req.Model, from.String(), to.String(), e.Identifier())
if err != nil {

View File

@@ -87,12 +87,13 @@ func (e *IFlowExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), "iflow", e.Identifier())
@@ -163,7 +164,7 @@ func (e *IFlowExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, re
var param any
// Note: TranslateNonStream uses req.Model (original with suffix) to preserve
// the original model name in the response for client compatibility.
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, data, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -189,12 +190,13 @@ func (e *IFlowExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), "iflow", e.Identifier())
@@ -274,7 +276,7 @@ func (e *IFlowExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
if detail, ok := parseOpenAIStreamUsage(line); ok {
reporter.publish(ctx, detail)
}
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone(line), &param)
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, bytes.Clone(line), &param)
for i := range chunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(chunks[i])}
}
@@ -296,7 +298,7 @@ func (e *IFlowExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
enc, err := tokenizerForModel(baseModel)
if err != nil {

View File

@@ -0,0 +1,471 @@
package executor
import (
"bufio"
"bytes"
"context"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"runtime"
"strings"
"time"
kimiauth "github.com/router-for-me/CLIProxyAPI/v6/internal/auth/kimi"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
sdktranslator "github.com/router-for-me/CLIProxyAPI/v6/sdk/translator"
log "github.com/sirupsen/logrus"
"github.com/tidwall/sjson"
)
// KimiExecutor is a stateless executor for Kimi API using OpenAI-compatible chat completions.
type KimiExecutor struct {
cfg *config.Config
}
// NewKimiExecutor creates a new Kimi executor.
func NewKimiExecutor(cfg *config.Config) *KimiExecutor { return &KimiExecutor{cfg: cfg} }
// Identifier returns the executor identifier.
func (e *KimiExecutor) Identifier() string { return "kimi" }
// PrepareRequest injects Kimi credentials into the outgoing HTTP request.
func (e *KimiExecutor) PrepareRequest(req *http.Request, auth *cliproxyauth.Auth) error {
if req == nil {
return nil
}
token := kimiCreds(auth)
if strings.TrimSpace(token) != "" {
req.Header.Set("Authorization", "Bearer "+token)
}
return nil
}
// HttpRequest injects Kimi credentials into the request and executes it.
func (e *KimiExecutor) HttpRequest(ctx context.Context, auth *cliproxyauth.Auth, req *http.Request) (*http.Response, error) {
if req == nil {
return nil, fmt.Errorf("kimi executor: request is nil")
}
if ctx == nil {
ctx = req.Context()
}
httpReq := req.WithContext(ctx)
if err := e.PrepareRequest(httpReq, auth); err != nil {
return nil, err
}
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
return httpClient.Do(httpReq)
}
// Execute performs a non-streaming chat completion request to Kimi.
func (e *KimiExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (resp cliproxyexecutor.Response, err error) {
baseModel := thinking.ParseSuffix(req.Model).ModelName
token := kimiCreds(auth)
reporter := newUsageReporter(ctx, e.Identifier(), baseModel, auth)
defer reporter.trackFailure(ctx, &err)
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
originalPayload := bytes.Clone(req.Payload)
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
}
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
// Strip kimi- prefix for upstream API
upstreamModel := stripKimiPrefix(baseModel)
body, err = sjson.SetBytes(body, "model", upstreamModel)
if err != nil {
return resp, fmt.Errorf("kimi executor: failed to set model in payload: %w", err)
}
body, err = thinking.ApplyThinking(body, req.Model, from.String(), "kimi", e.Identifier())
if err != nil {
return resp, err
}
requestedModel := payloadRequestedModel(opts, req.Model)
body = applyPayloadConfigWithRoot(e.cfg, baseModel, to.String(), "", body, originalTranslated, requestedModel)
url := kimiauth.KimiAPIBaseURL + "/chat/completions"
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if err != nil {
return resp, err
}
applyKimiHeadersWithAuth(httpReq, token, false, auth)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
authLabel = auth.Label
authType, authValue = auth.AccountInfo()
}
recordAPIRequest(ctx, e.cfg, upstreamRequestLog{
URL: url,
Method: http.MethodPost,
Headers: httpReq.Header.Clone(),
Body: body,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
AuthType: authType,
AuthValue: authValue,
})
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
httpResp, err := httpClient.Do(httpReq)
if err != nil {
recordAPIResponseError(ctx, e.cfg, err)
return resp, err
}
defer func() {
if errClose := httpResp.Body.Close(); errClose != nil {
log.Errorf("kimi executor: close response body error: %v", errClose)
}
}()
recordAPIResponseMetadata(ctx, e.cfg, httpResp.StatusCode, httpResp.Header.Clone())
if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 {
b, _ := io.ReadAll(httpResp.Body)
appendAPIResponseChunk(ctx, e.cfg, b)
logWithRequestID(ctx).Debugf("request error, error status: %d, error message: %s", httpResp.StatusCode, summarizeErrorBody(httpResp.Header.Get("Content-Type"), b))
err = statusErr{code: httpResp.StatusCode, msg: string(b)}
return resp, err
}
data, err := io.ReadAll(httpResp.Body)
if err != nil {
recordAPIResponseError(ctx, e.cfg, err)
return resp, err
}
appendAPIResponseChunk(ctx, e.cfg, data)
reporter.publish(ctx, parseOpenAIUsage(data))
var param any
// Note: TranslateNonStream uses req.Model (original with suffix) to preserve
// the original model name in the response for client compatibility.
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
// ExecuteStream performs a streaming chat completion request to Kimi.
func (e *KimiExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (stream <-chan cliproxyexecutor.StreamChunk, err error) {
baseModel := thinking.ParseSuffix(req.Model).ModelName
token := kimiCreds(auth)
reporter := newUsageReporter(ctx, e.Identifier(), baseModel, auth)
defer reporter.trackFailure(ctx, &err)
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
originalPayload := bytes.Clone(req.Payload)
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
}
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
// Strip kimi- prefix for upstream API
upstreamModel := stripKimiPrefix(baseModel)
body, err = sjson.SetBytes(body, "model", upstreamModel)
if err != nil {
return nil, fmt.Errorf("kimi executor: failed to set model in payload: %w", err)
}
body, err = thinking.ApplyThinking(body, req.Model, from.String(), "kimi", e.Identifier())
if err != nil {
return nil, err
}
body, err = sjson.SetBytes(body, "stream_options.include_usage", true)
if err != nil {
return nil, fmt.Errorf("kimi executor: failed to set stream_options in payload: %w", err)
}
requestedModel := payloadRequestedModel(opts, req.Model)
body = applyPayloadConfigWithRoot(e.cfg, baseModel, to.String(), "", body, originalTranslated, requestedModel)
url := kimiauth.KimiAPIBaseURL + "/chat/completions"
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
if err != nil {
return nil, err
}
applyKimiHeadersWithAuth(httpReq, token, true, auth)
var authID, authLabel, authType, authValue string
if auth != nil {
authID = auth.ID
authLabel = auth.Label
authType, authValue = auth.AccountInfo()
}
recordAPIRequest(ctx, e.cfg, upstreamRequestLog{
URL: url,
Method: http.MethodPost,
Headers: httpReq.Header.Clone(),
Body: body,
Provider: e.Identifier(),
AuthID: authID,
AuthLabel: authLabel,
AuthType: authType,
AuthValue: authValue,
})
httpClient := newProxyAwareHTTPClient(ctx, e.cfg, auth, 0)
httpResp, err := httpClient.Do(httpReq)
if err != nil {
recordAPIResponseError(ctx, e.cfg, err)
return nil, err
}
recordAPIResponseMetadata(ctx, e.cfg, httpResp.StatusCode, httpResp.Header.Clone())
if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 {
b, _ := io.ReadAll(httpResp.Body)
appendAPIResponseChunk(ctx, e.cfg, b)
logWithRequestID(ctx).Debugf("request error, error status: %d, error message: %s", httpResp.StatusCode, summarizeErrorBody(httpResp.Header.Get("Content-Type"), b))
if errClose := httpResp.Body.Close(); errClose != nil {
log.Errorf("kimi executor: close response body error: %v", errClose)
}
err = statusErr{code: httpResp.StatusCode, msg: string(b)}
return nil, err
}
out := make(chan cliproxyexecutor.StreamChunk)
stream = out
go func() {
defer close(out)
defer func() {
if errClose := httpResp.Body.Close(); errClose != nil {
log.Errorf("kimi executor: close response body error: %v", errClose)
}
}()
scanner := bufio.NewScanner(httpResp.Body)
scanner.Buffer(nil, 1_048_576) // 1MB
var param any
for scanner.Scan() {
line := scanner.Bytes()
appendAPIResponseChunk(ctx, e.cfg, line)
if detail, ok := parseOpenAIStreamUsage(line); ok {
reporter.publish(ctx, detail)
}
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone(line), &param)
for i := range chunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(chunks[i])}
}
}
doneChunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone([]byte("[DONE]")), &param)
for i := range doneChunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(doneChunks[i])}
}
if errScan := scanner.Err(); errScan != nil {
recordAPIResponseError(ctx, e.cfg, errScan)
reporter.publishFailure(ctx)
out <- cliproxyexecutor.StreamChunk{Err: errScan}
}
}()
return stream, nil
}
// CountTokens estimates token count for Kimi requests.
func (e *KimiExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth, req cliproxyexecutor.Request, opts cliproxyexecutor.Options) (cliproxyexecutor.Response, error) {
baseModel := thinking.ParseSuffix(req.Model).ModelName
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
// Use a generic tokenizer for estimation
enc, err := tokenizerForModel("gpt-4")
if err != nil {
return cliproxyexecutor.Response{}, fmt.Errorf("kimi executor: tokenizer init failed: %w", err)
}
count, err := countOpenAIChatTokens(enc, body)
if err != nil {
return cliproxyexecutor.Response{}, fmt.Errorf("kimi executor: token counting failed: %w", err)
}
usageJSON := buildOpenAIUsageJSON(count)
translated := sdktranslator.TranslateTokenCount(ctx, to, from, count, usageJSON)
return cliproxyexecutor.Response{Payload: []byte(translated)}, nil
}
// Refresh refreshes the Kimi token using the refresh token.
func (e *KimiExecutor) Refresh(ctx context.Context, auth *cliproxyauth.Auth) (*cliproxyauth.Auth, error) {
log.Debugf("kimi executor: refresh called")
if auth == nil {
return nil, fmt.Errorf("kimi executor: auth is nil")
}
// Expect refresh_token in metadata for OAuth-based accounts
var refreshToken string
if auth.Metadata != nil {
if v, ok := auth.Metadata["refresh_token"].(string); ok && strings.TrimSpace(v) != "" {
refreshToken = v
}
}
if strings.TrimSpace(refreshToken) == "" {
// Nothing to refresh
return auth, nil
}
client := kimiauth.NewDeviceFlowClientWithDeviceID(e.cfg, resolveKimiDeviceID(auth))
td, err := client.RefreshToken(ctx, refreshToken)
if err != nil {
return nil, err
}
if auth.Metadata == nil {
auth.Metadata = make(map[string]any)
}
auth.Metadata["access_token"] = td.AccessToken
if td.RefreshToken != "" {
auth.Metadata["refresh_token"] = td.RefreshToken
}
if td.ExpiresAt > 0 {
exp := time.Unix(td.ExpiresAt, 0).UTC().Format(time.RFC3339)
auth.Metadata["expired"] = exp
}
auth.Metadata["type"] = "kimi"
now := time.Now().Format(time.RFC3339)
auth.Metadata["last_refresh"] = now
return auth, nil
}
// applyKimiHeaders sets required headers for Kimi API requests.
// Headers match kimi-cli client for compatibility.
func applyKimiHeaders(r *http.Request, token string, stream bool) {
r.Header.Set("Content-Type", "application/json")
r.Header.Set("Authorization", "Bearer "+token)
// Match kimi-cli headers exactly
r.Header.Set("User-Agent", "KimiCLI/1.10.6")
r.Header.Set("X-Msh-Platform", "kimi_cli")
r.Header.Set("X-Msh-Version", "1.10.6")
r.Header.Set("X-Msh-Device-Name", getKimiHostname())
r.Header.Set("X-Msh-Device-Model", getKimiDeviceModel())
r.Header.Set("X-Msh-Device-Id", getKimiDeviceID())
if stream {
r.Header.Set("Accept", "text/event-stream")
return
}
r.Header.Set("Accept", "application/json")
}
func resolveKimiDeviceIDFromAuth(auth *cliproxyauth.Auth) string {
if auth == nil || auth.Metadata == nil {
return ""
}
deviceIDRaw, ok := auth.Metadata["device_id"]
if !ok {
return ""
}
deviceID, ok := deviceIDRaw.(string)
if !ok {
return ""
}
return strings.TrimSpace(deviceID)
}
func resolveKimiDeviceIDFromStorage(auth *cliproxyauth.Auth) string {
if auth == nil {
return ""
}
storage, ok := auth.Storage.(*kimiauth.KimiTokenStorage)
if !ok || storage == nil {
return ""
}
return strings.TrimSpace(storage.DeviceID)
}
func resolveKimiDeviceID(auth *cliproxyauth.Auth) string {
deviceID := resolveKimiDeviceIDFromAuth(auth)
if deviceID != "" {
return deviceID
}
return resolveKimiDeviceIDFromStorage(auth)
}
func applyKimiHeadersWithAuth(r *http.Request, token string, stream bool, auth *cliproxyauth.Auth) {
applyKimiHeaders(r, token, stream)
if deviceID := resolveKimiDeviceID(auth); deviceID != "" {
r.Header.Set("X-Msh-Device-Id", deviceID)
}
}
// getKimiHostname returns the machine hostname.
func getKimiHostname() string {
hostname, err := os.Hostname()
if err != nil {
return "unknown"
}
return hostname
}
// getKimiDeviceModel returns a device model string matching kimi-cli format.
func getKimiDeviceModel() string {
return fmt.Sprintf("%s %s", runtime.GOOS, runtime.GOARCH)
}
// getKimiDeviceID returns a stable device ID, matching kimi-cli storage location.
func getKimiDeviceID() string {
homeDir, err := os.UserHomeDir()
if err != nil {
return "cli-proxy-api-device"
}
// Check kimi-cli's device_id location first (platform-specific)
var kimiShareDir string
switch runtime.GOOS {
case "darwin":
kimiShareDir = filepath.Join(homeDir, "Library", "Application Support", "kimi")
case "windows":
appData := os.Getenv("APPDATA")
if appData == "" {
appData = filepath.Join(homeDir, "AppData", "Roaming")
}
kimiShareDir = filepath.Join(appData, "kimi")
default: // linux and other unix-like
kimiShareDir = filepath.Join(homeDir, ".local", "share", "kimi")
}
deviceIDPath := filepath.Join(kimiShareDir, "device_id")
if data, err := os.ReadFile(deviceIDPath); err == nil {
return strings.TrimSpace(string(data))
}
return "cli-proxy-api-device"
}
// kimiCreds extracts the access token from auth.
func kimiCreds(a *cliproxyauth.Auth) (token string) {
if a == nil {
return ""
}
// Check metadata first (OAuth flow stores tokens here)
if a.Metadata != nil {
if v, ok := a.Metadata["access_token"].(string); ok && strings.TrimSpace(v) != "" {
return v
}
}
// Fallback to attributes (API key style)
if a.Attributes != nil {
if v := a.Attributes["access_token"]; v != "" {
return v
}
if v := a.Attributes["api_key"]; v != "" {
return v
}
}
return ""
}
// stripKimiPrefix removes the "kimi-" prefix from model names for the upstream API.
func stripKimiPrefix(model string) string {
model = strings.TrimSpace(model)
if strings.HasPrefix(strings.ToLower(model), "kimi-") {
return model[5:]
}
return model
}

View File

@@ -80,7 +80,7 @@ func recordAPIRequest(ctx context.Context, cfg *config.Config, info upstreamRequ
writeHeaders(builder, info.Headers)
builder.WriteString("\nBody:\n")
if len(info.Body) > 0 {
builder.WriteString(string(bytes.Clone(info.Body)))
builder.WriteString(string(info.Body))
} else {
builder.WriteString("<empty>")
}
@@ -152,7 +152,7 @@ func appendAPIResponseChunk(ctx context.Context, cfg *config.Config, chunk []byt
if cfg == nil || !cfg.RequestLog {
return
}
data := bytes.TrimSpace(bytes.Clone(chunk))
data := bytes.TrimSpace(chunk)
if len(data) == 0 {
return
}

View File

@@ -88,12 +88,13 @@ func (e *OpenAICompatExecutor) Execute(ctx context.Context, auth *cliproxyauth.A
to = sdktranslator.FromString("openai-response")
endpoint = "/responses/compact"
}
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, opts.Stream)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), opts.Stream)
translated := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, opts.Stream)
requestedModel := payloadRequestedModel(opts, req.Model)
translated = applyPayloadConfigWithRoot(e.cfg, baseModel, to.String(), "", translated, originalTranslated, requestedModel)
if opts.Alt == "responses/compact" {
@@ -170,7 +171,7 @@ func (e *OpenAICompatExecutor) Execute(ctx context.Context, auth *cliproxyauth.A
reporter.ensurePublished(ctx)
// Translate response back to source format when needed
var param any
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, body, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, translated, body, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -189,12 +190,13 @@ func (e *OpenAICompatExecutor) ExecuteStream(ctx context.Context, auth *cliproxy
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
translated := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
requestedModel := payloadRequestedModel(opts, req.Model)
translated = applyPayloadConfigWithRoot(e.cfg, baseModel, to.String(), "", translated, originalTranslated, requestedModel)
@@ -283,7 +285,7 @@ func (e *OpenAICompatExecutor) ExecuteStream(ctx context.Context, auth *cliproxy
// OpenAI-compatible streams are SSE: lines typically prefixed with "data: ".
// Pass through translator; it yields one or more chunks for the target schema.
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), translated, bytes.Clone(line), &param)
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, translated, bytes.Clone(line), &param)
for i := range chunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(chunks[i])}
}
@@ -304,7 +306,7 @@ func (e *OpenAICompatExecutor) CountTokens(ctx context.Context, auth *cliproxyau
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
translated := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
translated := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
modelForCounting := baseModel

View File

@@ -81,12 +81,13 @@ func (e *QwenExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, false)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
@@ -150,7 +151,7 @@ func (e *QwenExecutor) Execute(ctx context.Context, auth *cliproxyauth.Auth, req
var param any
// Note: TranslateNonStream uses req.Model (original with suffix) to preserve
// the original model name in the response for client compatibility.
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, data, &param)
out := sdktranslator.TranslateNonStream(ctx, to, from, req.Model, opts.OriginalRequest, body, data, &param)
resp = cliproxyexecutor.Response{Payload: []byte(out)}
return resp, nil
}
@@ -171,12 +172,13 @@ func (e *QwenExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Aut
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
originalPayload := bytes.Clone(req.Payload)
originalPayloadSource := req.Payload
if len(opts.OriginalRequest) > 0 {
originalPayload = bytes.Clone(opts.OriginalRequest)
originalPayloadSource = opts.OriginalRequest
}
originalPayload := originalPayloadSource
originalTranslated := sdktranslator.TranslateRequest(from, to, baseModel, originalPayload, true)
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), true)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, true)
body, _ = sjson.SetBytes(body, "model", baseModel)
body, err = thinking.ApplyThinking(body, req.Model, from.String(), to.String(), e.Identifier())
@@ -253,12 +255,12 @@ func (e *QwenExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Aut
if detail, ok := parseOpenAIStreamUsage(line); ok {
reporter.publish(ctx, detail)
}
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone(line), &param)
chunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, bytes.Clone(line), &param)
for i := range chunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(chunks[i])}
}
}
doneChunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, bytes.Clone(opts.OriginalRequest), body, bytes.Clone([]byte("[DONE]")), &param)
doneChunks := sdktranslator.TranslateStream(ctx, to, from, req.Model, opts.OriginalRequest, body, []byte("[DONE]"), &param)
for i := range doneChunks {
out <- cliproxyexecutor.StreamChunk{Payload: []byte(doneChunks[i])}
}
@@ -276,7 +278,7 @@ func (e *QwenExecutor) CountTokens(ctx context.Context, auth *cliproxyauth.Auth,
from := opts.SourceFormat
to := sdktranslator.FromString("openai")
body := sdktranslator.TranslateRequest(from, to, baseModel, bytes.Clone(req.Payload), false)
body := sdktranslator.TranslateRequest(from, to, baseModel, req.Payload, false)
modelName := gjson.GetBytes(body, "model").String()
if strings.TrimSpace(modelName) == "" {

View File

@@ -7,5 +7,6 @@ import (
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/gemini"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/geminicli"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/iflow"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/kimi"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/openai"
)

View File

@@ -18,6 +18,7 @@ var providerAppliers = map[string]ProviderApplier{
"codex": nil,
"iflow": nil,
"antigravity": nil,
"kimi": nil,
}
// GetProviderApplier returns the ProviderApplier for the given provider name.
@@ -326,6 +327,9 @@ func extractThinkingConfig(body []byte, provider string) ThinkingConfig {
return config
}
return extractOpenAIConfig(body)
case "kimi":
// Kimi uses OpenAI-compatible reasoning_effort format
return extractOpenAIConfig(body)
default:
return ThinkingConfig{}
}

View File

@@ -0,0 +1,126 @@
// Package kimi implements thinking configuration for Kimi (Moonshot AI) models.
//
// Kimi models use the OpenAI-compatible reasoning_effort format with discrete levels
// (low/medium/high). The provider strips any existing thinking config and applies
// the unified ThinkingConfig in OpenAI format.
package kimi
import (
"fmt"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
)
// Applier implements thinking.ProviderApplier for Kimi models.
//
// Kimi-specific behavior:
// - Output format: reasoning_effort (string: low/medium/high)
// - Uses OpenAI-compatible format
// - Supports budget-to-level conversion
type Applier struct{}
var _ thinking.ProviderApplier = (*Applier)(nil)
// NewApplier creates a new Kimi thinking applier.
func NewApplier() *Applier {
return &Applier{}
}
func init() {
thinking.RegisterProvider("kimi", NewApplier())
}
// Apply applies thinking configuration to Kimi request body.
//
// Expected output format:
//
// {
// "reasoning_effort": "high"
// }
func (a *Applier) Apply(body []byte, config thinking.ThinkingConfig, modelInfo *registry.ModelInfo) ([]byte, error) {
if thinking.IsUserDefinedModel(modelInfo) {
return applyCompatibleKimi(body, config)
}
if modelInfo.Thinking == nil {
return body, nil
}
if len(body) == 0 || !gjson.ValidBytes(body) {
body = []byte(`{}`)
}
var effort string
switch config.Mode {
case thinking.ModeLevel:
if config.Level == "" {
return body, nil
}
effort = string(config.Level)
case thinking.ModeNone:
// Kimi uses "none" to disable thinking
effort = string(thinking.LevelNone)
case thinking.ModeBudget:
// Convert budget to level using threshold mapping
level, ok := thinking.ConvertBudgetToLevel(config.Budget)
if !ok {
return body, nil
}
effort = level
case thinking.ModeAuto:
// Auto mode maps to "auto" effort
effort = string(thinking.LevelAuto)
default:
return body, nil
}
if effort == "" {
return body, nil
}
result, err := sjson.SetBytes(body, "reasoning_effort", effort)
if err != nil {
return body, fmt.Errorf("kimi thinking: failed to set reasoning_effort: %w", err)
}
return result, nil
}
// applyCompatibleKimi applies thinking config for user-defined Kimi models.
func applyCompatibleKimi(body []byte, config thinking.ThinkingConfig) ([]byte, error) {
if len(body) == 0 || !gjson.ValidBytes(body) {
body = []byte(`{}`)
}
var effort string
switch config.Mode {
case thinking.ModeLevel:
if config.Level == "" {
return body, nil
}
effort = string(config.Level)
case thinking.ModeNone:
effort = string(thinking.LevelNone)
if config.Level != "" {
effort = string(config.Level)
}
case thinking.ModeAuto:
effort = string(thinking.LevelAuto)
case thinking.ModeBudget:
// Convert budget to level
level, ok := thinking.ConvertBudgetToLevel(config.Budget)
if !ok {
return body, nil
}
effort = level
default:
return body, nil
}
result, err := sjson.SetBytes(body, "reasoning_effort", effort)
if err != nil {
return body, fmt.Errorf("kimi thinking: failed to set reasoning_effort: %w", err)
}
return result, nil
}

View File

@@ -6,7 +6,6 @@
package claude
import (
"bytes"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/cache"
@@ -37,7 +36,7 @@ import (
// - []byte: The transformed request data in Gemini CLI API format
func ConvertClaudeRequestToAntigravity(modelName string, inputRawJSON []byte, _ bool) []byte {
enableThoughtTranslate := true
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// system instruction
systemInstructionJSON := ""

View File

@@ -6,7 +6,6 @@
package gemini
import (
"bytes"
"fmt"
"strings"
@@ -34,7 +33,7 @@ import (
// Returns:
// - []byte: The transformed request data in Gemini API format
func ConvertGeminiRequestToAntigravity(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
template := ""
template = `{"project":"","request":{},"model":""}`
template, _ = sjson.SetRaw(template, "request", string(rawJSON))

View File

@@ -1,14 +1,12 @@
package responses
import (
"bytes"
. "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/antigravity/gemini"
. "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/openai/responses"
)
func ConvertOpenAIResponsesRequestToAntigravity(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
rawJSON = ConvertOpenAIResponsesRequestToGemini(modelName, rawJSON, stream)
return ConvertGeminiRequestToAntigravity(modelName, rawJSON, stream)
}

View File

@@ -6,8 +6,6 @@
package geminiCLI
import (
"bytes"
. "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/claude/gemini"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -30,7 +28,7 @@ import (
// Returns:
// - []byte: The transformed request data in Claude Code API format
func ConvertGeminiCLIRequestToClaude(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
modelResult := gjson.GetBytes(rawJSON, "model")
// Extract the inner request object and promote it to the top level

View File

@@ -6,7 +6,6 @@
package gemini
import (
"bytes"
"crypto/rand"
"crypto/sha256"
"encoding/hex"
@@ -46,7 +45,7 @@ var (
// Returns:
// - []byte: The transformed request data in Claude Code API format
func ConvertGeminiRequestToClaude(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
if account == "" {
u, _ := uuid.NewRandom()

View File

@@ -6,7 +6,6 @@
package chat_completions
import (
"bytes"
"crypto/rand"
"crypto/sha256"
"encoding/hex"
@@ -44,7 +43,7 @@ var (
// Returns:
// - []byte: The transformed request data in Claude Code API format
func ConvertOpenAIRequestToClaude(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
if account == "" {
u, _ := uuid.NewRandom()

View File

@@ -1,7 +1,6 @@
package responses
import (
"bytes"
"crypto/rand"
"crypto/sha256"
"encoding/hex"
@@ -32,7 +31,7 @@ var (
// - max_output_tokens -> max_tokens
// - stream passthrough via parameter
func ConvertOpenAIResponsesRequestToClaude(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
if account == "" {
u, _ := uuid.NewRandom()

View File

@@ -6,7 +6,6 @@
package claude
import (
"bytes"
"fmt"
"strconv"
"strings"
@@ -35,7 +34,7 @@ import (
// Returns:
// - []byte: The transformed request data in internal client format
func ConvertClaudeRequestToCodex(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
template := `{"model":"","instructions":"","input":[]}`

View File

@@ -6,8 +6,6 @@
package geminiCLI
import (
"bytes"
. "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/codex/gemini"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -30,7 +28,7 @@ import (
// Returns:
// - []byte: The transformed request data in Codex API format
func ConvertGeminiCLIRequestToCodex(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
rawJSON = []byte(gjson.GetBytes(rawJSON, "request").Raw)
rawJSON, _ = sjson.SetBytes(rawJSON, "model", modelName)

View File

@@ -6,7 +6,6 @@
package gemini
import (
"bytes"
"crypto/rand"
"fmt"
"math/big"
@@ -37,7 +36,7 @@ import (
// Returns:
// - []byte: The transformed request data in Codex API format
func ConvertGeminiRequestToCodex(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Base template
out := `{"model":"","instructions":"","input":[]}`

View File

@@ -7,8 +7,6 @@
package chat_completions
import (
"bytes"
"strconv"
"strings"
@@ -29,7 +27,7 @@ import (
// Returns:
// - []byte: The transformed request data in OpenAI Responses API format
func ConvertOpenAIRequestToCodex(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Start with empty JSON object
out := `{"instructions":""}`

View File

@@ -1,7 +1,6 @@
package responses
import (
"bytes"
"fmt"
"github.com/tidwall/gjson"
@@ -9,7 +8,7 @@ import (
)
func ConvertOpenAIResponsesRequestToCodex(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
inputResult := gjson.GetBytes(rawJSON, "input")
if inputResult.Type == gjson.String {

View File

@@ -35,7 +35,7 @@ const geminiCLIClaudeThoughtSignature = "skip_thought_signature_validator"
// Returns:
// - []byte: The transformed request data in Gemini CLI API format
func ConvertClaudeRequestToCLI(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
rawJSON = bytes.Replace(rawJSON, []byte(`"url":{"type":"string","format":"uri",`), []byte(`"url":{"type":"string",`), -1)
// Build output Gemini CLI request JSON

View File

@@ -6,7 +6,6 @@
package gemini
import (
"bytes"
"fmt"
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
@@ -33,7 +32,7 @@ import (
// Returns:
// - []byte: The transformed request data in Gemini API format
func ConvertGeminiRequestToGeminiCLI(_ string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
template := ""
template = `{"project":"","request":{},"model":""}`
template, _ = sjson.SetRaw(template, "request", string(rawJSON))

View File

@@ -3,7 +3,6 @@
package chat_completions
import (
"bytes"
"fmt"
"strings"
@@ -28,7 +27,7 @@ const geminiCLIFunctionThoughtSignature = "skip_thought_signature_validator"
// Returns:
// - []byte: The transformed request data in Gemini CLI API format
func ConvertOpenAIRequestToGeminiCLI(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Base envelope (no default thinkingConfig)
out := []byte(`{"project":"","request":{"contents":[]},"model":"gemini-2.5-pro"}`)

View File

@@ -1,14 +1,12 @@
package responses
import (
"bytes"
. "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini-cli/gemini"
. "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/openai/responses"
)
func ConvertOpenAIResponsesRequestToGeminiCLI(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
rawJSON = ConvertOpenAIResponsesRequestToGemini(modelName, rawJSON, stream)
return ConvertGeminiRequestToGeminiCLI(modelName, rawJSON, stream)
}

View File

@@ -28,7 +28,7 @@ const geminiClaudeThoughtSignature = "skip_thought_signature_validator"
// Returns:
// - []byte: The transformed request in Gemini CLI format.
func ConvertClaudeRequestToGemini(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
rawJSON = bytes.Replace(rawJSON, []byte(`"url":{"type":"string","format":"uri",`), []byte(`"url":{"type":"string",`), -1)
// Build output Gemini CLI request JSON

View File

@@ -6,7 +6,6 @@
package geminiCLI
import (
"bytes"
"fmt"
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
@@ -19,7 +18,7 @@ import (
// It extracts the model name, system instruction, message contents, and tool declarations
// from the raw JSON request and returns them in the format expected by the internal client.
func ConvertGeminiCLIRequestToGemini(_ string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
modelResult := gjson.GetBytes(rawJSON, "model")
rawJSON = []byte(gjson.GetBytes(rawJSON, "request").Raw)
rawJSON, _ = sjson.SetBytes(rawJSON, "model", modelResult.String())

View File

@@ -4,7 +4,6 @@
package gemini
import (
"bytes"
"fmt"
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
@@ -19,7 +18,7 @@ import (
//
// It keeps the payload otherwise unchanged.
func ConvertGeminiRequestToGemini(_ string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Fast path: if no contents field, only attach safety settings
contents := gjson.GetBytes(rawJSON, "contents")
if !contents.Exists() {

View File

@@ -3,7 +3,6 @@
package chat_completions
import (
"bytes"
"fmt"
"strings"
@@ -28,7 +27,7 @@ const geminiFunctionThoughtSignature = "skip_thought_signature_validator"
// Returns:
// - []byte: The transformed request data in Gemini API format
func ConvertOpenAIRequestToGemini(modelName string, inputRawJSON []byte, _ bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Base envelope (no default thinkingConfig)
out := []byte(`{"contents":[]}`)

View File

@@ -1,7 +1,6 @@
package responses
import (
"bytes"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/gemini/common"
@@ -12,7 +11,7 @@ import (
const geminiResponsesThoughtSignature = "skip_thought_signature_validator"
func ConvertOpenAIResponsesRequestToGemini(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Note: modelName and stream parameters are part of the fixed method signature
_ = modelName // Unused but required by interface

View File

@@ -6,7 +6,6 @@
package claude
import (
"bytes"
"strings"
"github.com/router-for-me/CLIProxyAPI/v6/internal/thinking"
@@ -18,7 +17,7 @@ import (
// It extracts the model name, system instruction, message contents, and tool declarations
// from the raw JSON request and returns them in the format expected by the OpenAI API.
func ConvertClaudeRequestToOpenAI(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Base OpenAI Chat Completions API template
out := `{"model":"","messages":[]}`

View File

@@ -6,8 +6,6 @@
package geminiCLI
import (
"bytes"
. "github.com/router-for-me/CLIProxyAPI/v6/internal/translator/openai/gemini"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -17,7 +15,7 @@ import (
// It extracts the model name, generation config, message contents, and tool declarations
// from the raw JSON request and returns them in the format expected by the OpenAI API.
func ConvertGeminiCLIRequestToOpenAI(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
rawJSON = []byte(gjson.GetBytes(rawJSON, "request").Raw)
rawJSON, _ = sjson.SetBytes(rawJSON, "model", modelName)
if gjson.GetBytes(rawJSON, "systemInstruction").Exists() {

View File

@@ -6,7 +6,6 @@
package gemini
import (
"bytes"
"crypto/rand"
"fmt"
"math/big"
@@ -21,7 +20,7 @@ import (
// It extracts the model name, generation config, message contents, and tool declarations
// from the raw JSON request and returns them in the format expected by the OpenAI API.
func ConvertGeminiRequestToOpenAI(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Base OpenAI Chat Completions API template
out := `{"model":"","messages":[]}`

View File

@@ -3,7 +3,6 @@
package chat_completions
import (
"bytes"
"github.com/tidwall/sjson"
)
@@ -25,7 +24,7 @@ func ConvertOpenAIRequestToOpenAI(modelName string, inputRawJSON []byte, _ bool)
// If there's an error, return the original JSON or handle the error appropriately.
// For now, we'll return the original, but in a real scenario, logging or a more robust error
// handling mechanism would be needed.
return bytes.Clone(inputRawJSON)
return inputRawJSON
}
return updatedJSON
}

View File

@@ -1,7 +1,6 @@
package responses
import (
"bytes"
"strings"
"github.com/tidwall/gjson"
@@ -28,7 +27,7 @@ import (
// Returns:
// - []byte: The transformed request data in OpenAI chat completions format
func ConvertOpenAIResponsesRequestToOpenAIChatCompletions(modelName string, inputRawJSON []byte, stream bool) []byte {
rawJSON := bytes.Clone(inputRawJSON)
rawJSON := inputRawJSON
// Base OpenAI chat completions template with default values
out := `{"model":"","messages":[],"stream":false}`

View File

@@ -11,6 +11,7 @@ func TestIsClaudeThinkingModel(t *testing.T) {
// Claude thinking models - should return true
{"claude-sonnet-4-5-thinking", "claude-sonnet-4-5-thinking", true},
{"claude-opus-4-5-thinking", "claude-opus-4-5-thinking", true},
{"claude-opus-4-6-thinking", "claude-opus-4-6-thinking", true},
{"Claude-Sonnet-Thinking uppercase", "Claude-Sonnet-4-5-Thinking", true},
{"claude thinking mixed case", "Claude-THINKING-Model", true},

View File

@@ -377,9 +377,13 @@ func (h *BaseAPIHandler) ExecuteWithAuthManager(ctx context.Context, handlerType
}
reqMeta := requestExecutionMetadata(ctx)
reqMeta[coreexecutor.RequestedModelMetadataKey] = normalizedModel
payload := rawJSON
if len(payload) == 0 {
payload = nil
}
req := coreexecutor.Request{
Model: normalizedModel,
Payload: cloneBytes(rawJSON),
Payload: payload,
}
opts := coreexecutor.Options{
Stream: false,
@@ -404,7 +408,7 @@ func (h *BaseAPIHandler) ExecuteWithAuthManager(ctx context.Context, handlerType
}
return nil, &interfaces.ErrorMessage{StatusCode: status, Error: err, Addon: addon}
}
return cloneBytes(resp.Payload), nil
return resp.Payload, nil
}
// ExecuteCountWithAuthManager executes a non-streaming request via the core auth manager.
@@ -416,9 +420,13 @@ func (h *BaseAPIHandler) ExecuteCountWithAuthManager(ctx context.Context, handle
}
reqMeta := requestExecutionMetadata(ctx)
reqMeta[coreexecutor.RequestedModelMetadataKey] = normalizedModel
payload := rawJSON
if len(payload) == 0 {
payload = nil
}
req := coreexecutor.Request{
Model: normalizedModel,
Payload: cloneBytes(rawJSON),
Payload: payload,
}
opts := coreexecutor.Options{
Stream: false,
@@ -443,7 +451,7 @@ func (h *BaseAPIHandler) ExecuteCountWithAuthManager(ctx context.Context, handle
}
return nil, &interfaces.ErrorMessage{StatusCode: status, Error: err, Addon: addon}
}
return cloneBytes(resp.Payload), nil
return resp.Payload, nil
}
// ExecuteStreamWithAuthManager executes a streaming request via the core auth manager.
@@ -458,9 +466,13 @@ func (h *BaseAPIHandler) ExecuteStreamWithAuthManager(ctx context.Context, handl
}
reqMeta := requestExecutionMetadata(ctx)
reqMeta[coreexecutor.RequestedModelMetadataKey] = normalizedModel
payload := rawJSON
if len(payload) == 0 {
payload = nil
}
req := coreexecutor.Request{
Model: normalizedModel,
Payload: cloneBytes(rawJSON),
Payload: payload,
}
opts := coreexecutor.Options{
Stream: true,
@@ -684,7 +696,7 @@ func (h *BaseAPIHandler) WriteErrorResponse(c *gin.Context, msg *interfaces.Erro
var previous []byte
if existing, exists := c.Get("API_RESPONSE"); exists {
if existingBytes, ok := existing.([]byte); ok && len(existingBytes) > 0 {
previous = bytes.Clone(existingBytes)
previous = existingBytes
}
}
appendAPIResponse(c, body)

View File

@@ -18,6 +18,7 @@ type ManagementTokenRequester interface {
RequestCodexToken(*gin.Context)
RequestAntigravityToken(*gin.Context)
RequestQwenToken(*gin.Context)
RequestKimiToken(*gin.Context)
RequestIFlowToken(*gin.Context)
RequestIFlowCookieToken(*gin.Context)
GetAuthStatus(c *gin.Context)
@@ -55,6 +56,10 @@ func (m *managementTokenRequester) RequestQwenToken(c *gin.Context) {
m.handler.RequestQwenToken(c)
}
func (m *managementTokenRequester) RequestKimiToken(c *gin.Context) {
m.handler.RequestKimiToken(c)
}
func (m *managementTokenRequester) RequestIFlowToken(c *gin.Context) {
m.handler.RequestIFlowToken(c)
}

123
sdk/auth/kimi.go Normal file
View File

@@ -0,0 +1,123 @@
package auth
import (
"context"
"fmt"
"strings"
"time"
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/kimi"
"github.com/router-for-me/CLIProxyAPI/v6/internal/browser"
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth"
log "github.com/sirupsen/logrus"
)
// kimiRefreshLead is the duration before token expiry when refresh should occur.
var kimiRefreshLead = 5 * time.Minute
// KimiAuthenticator implements the OAuth device flow login for Kimi (Moonshot AI).
type KimiAuthenticator struct{}
// NewKimiAuthenticator constructs a new Kimi authenticator.
func NewKimiAuthenticator() Authenticator {
return &KimiAuthenticator{}
}
// Provider returns the provider key for kimi.
func (KimiAuthenticator) Provider() string {
return "kimi"
}
// RefreshLead returns the duration before token expiry when refresh should occur.
// Kimi tokens expire and need to be refreshed before expiry.
func (KimiAuthenticator) RefreshLead() *time.Duration {
return &kimiRefreshLead
}
// Login initiates the Kimi device flow authentication.
func (a KimiAuthenticator) Login(ctx context.Context, cfg *config.Config, opts *LoginOptions) (*coreauth.Auth, error) {
if cfg == nil {
return nil, fmt.Errorf("cliproxy auth: configuration is required")
}
if opts == nil {
opts = &LoginOptions{}
}
authSvc := kimi.NewKimiAuth(cfg)
// Start the device flow
fmt.Println("Starting Kimi authentication...")
deviceCode, err := authSvc.StartDeviceFlow(ctx)
if err != nil {
return nil, fmt.Errorf("kimi: failed to start device flow: %w", err)
}
// Display the verification URL
verificationURL := deviceCode.VerificationURIComplete
if verificationURL == "" {
verificationURL = deviceCode.VerificationURI
}
fmt.Printf("\nTo authenticate, please visit:\n%s\n\n", verificationURL)
if deviceCode.UserCode != "" {
fmt.Printf("User code: %s\n\n", deviceCode.UserCode)
}
// Try to open the browser automatically
if !opts.NoBrowser {
if browser.IsAvailable() {
if errOpen := browser.OpenURL(verificationURL); errOpen != nil {
log.Warnf("Failed to open browser automatically: %v", errOpen)
} else {
fmt.Println("Browser opened automatically.")
}
}
}
fmt.Println("Waiting for authorization...")
if deviceCode.ExpiresIn > 0 {
fmt.Printf("(This will timeout in %d seconds if not authorized)\n", deviceCode.ExpiresIn)
}
// Wait for user authorization
authBundle, err := authSvc.WaitForAuthorization(ctx, deviceCode)
if err != nil {
return nil, fmt.Errorf("kimi: %w", err)
}
// Create the token storage
tokenStorage := authSvc.CreateTokenStorage(authBundle)
// Build metadata with token information
metadata := map[string]any{
"type": "kimi",
"access_token": authBundle.TokenData.AccessToken,
"refresh_token": authBundle.TokenData.RefreshToken,
"token_type": authBundle.TokenData.TokenType,
"scope": authBundle.TokenData.Scope,
"timestamp": time.Now().UnixMilli(),
}
if authBundle.TokenData.ExpiresAt > 0 {
exp := time.Unix(authBundle.TokenData.ExpiresAt, 0).UTC().Format(time.RFC3339)
metadata["expired"] = exp
}
if strings.TrimSpace(authBundle.DeviceID) != "" {
metadata["device_id"] = strings.TrimSpace(authBundle.DeviceID)
}
// Generate a unique filename
fileName := fmt.Sprintf("kimi-%d.json", time.Now().UnixMilli())
fmt.Println("\nKimi authentication successful!")
return &coreauth.Auth{
ID: fileName,
Provider: a.Provider(),
FileName: fileName,
Label: "Kimi User",
Storage: tokenStorage,
Metadata: metadata,
}, nil
}

View File

@@ -14,6 +14,7 @@ func init() {
registerRefreshLead("gemini", func() Authenticator { return NewGeminiAuthenticator() })
registerRefreshLead("gemini-cli", func() Authenticator { return NewGeminiAuthenticator() })
registerRefreshLead("antigravity", func() Authenticator { return NewAntigravityAuthenticator() })
registerRefreshLead("kimi", func() Authenticator { return NewKimiAuthenticator() })
}
func registerRefreshLead(provider string, factory func() Authenticator) {

View File

@@ -398,6 +398,8 @@ func (s *Service) ensureExecutorsForAuth(a *coreauth.Auth) {
s.coreManager.RegisterExecutor(executor.NewQwenExecutor(s.cfg))
case "iflow":
s.coreManager.RegisterExecutor(executor.NewIFlowExecutor(s.cfg))
case "kimi":
s.coreManager.RegisterExecutor(executor.NewKimiExecutor(s.cfg))
default:
providerKey := strings.ToLower(strings.TrimSpace(a.Provider))
if providerKey == "" {
@@ -799,6 +801,9 @@ func (s *Service) registerModelsForAuth(a *coreauth.Auth) {
case "iflow":
models = registry.GetIFlowModels()
models = applyExcludedModels(models, excluded)
case "kimi":
models = registry.GetKimiModels()
models = applyExcludedModels(models, excluded)
default:
// Handle OpenAI-compatibility providers by name using config
if s.cfg != nil {

View File

@@ -15,6 +15,7 @@ import (
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/gemini"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/geminicli"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/iflow"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/kimi"
_ "github.com/router-for-me/CLIProxyAPI/v6/internal/thinking/provider/openai"
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"