mirror of
https://github.com/router-for-me/CLIProxyAPI.git
synced 2026-02-03 04:50:52 +08:00
refactor(gemini-web): Move provider logic to its own package
The Gemini Web API client logic has been relocated from `internal/client/gemini-web` to a new, more specific `internal/provider/gemini-web` package. This refactoring improves code organization and modularity by better isolating provider-specific implementations. As a result of this move, the `GeminiWebState` struct and its methods have been exported (capitalized) to make them accessible from the executor. All call sites have been updated to use the new package path and the exported identifiers.
This commit is contained in:
214
internal/provider/gemini-web/auth.go
Normal file
214
internal/provider/gemini-web/auth.go
Normal file
@@ -0,0 +1,214 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type httpOptions struct {
|
||||
ProxyURL string
|
||||
Insecure bool
|
||||
FollowRedirects bool
|
||||
}
|
||||
|
||||
func newHTTPClient(opts httpOptions) *http.Client {
|
||||
transport := &http.Transport{}
|
||||
if opts.ProxyURL != "" {
|
||||
if pu, err := url.Parse(opts.ProxyURL); err == nil {
|
||||
transport.Proxy = http.ProxyURL(pu)
|
||||
}
|
||||
}
|
||||
if opts.Insecure {
|
||||
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
jar, _ := cookiejar.New(nil)
|
||||
client := &http.Client{Transport: transport, Timeout: 60 * time.Second, Jar: jar}
|
||||
if !opts.FollowRedirects {
|
||||
client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
|
||||
return http.ErrUseLastResponse
|
||||
}
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
func applyHeaders(req *http.Request, headers http.Header) {
|
||||
for k, v := range headers {
|
||||
for _, vv := range v {
|
||||
req.Header.Add(k, vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func applyCookies(req *http.Request, cookies map[string]string) {
|
||||
for k, v := range cookies {
|
||||
req.AddCookie(&http.Cookie{Name: k, Value: v})
|
||||
}
|
||||
}
|
||||
|
||||
func sendInitRequest(cookies map[string]string, proxy string, insecure bool) (*http.Response, map[string]string, error) {
|
||||
client := newHTTPClient(httpOptions{ProxyURL: proxy, Insecure: insecure, FollowRedirects: true})
|
||||
req, _ := http.NewRequest(http.MethodGet, EndpointInit, nil)
|
||||
applyHeaders(req, HeadersGemini)
|
||||
applyCookies(req, cookies)
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
return resp, nil, &AuthError{Msg: resp.Status}
|
||||
}
|
||||
outCookies := map[string]string{}
|
||||
for _, c := range resp.Cookies() {
|
||||
outCookies[c.Name] = c.Value
|
||||
}
|
||||
for k, v := range cookies {
|
||||
outCookies[k] = v
|
||||
}
|
||||
return resp, outCookies, nil
|
||||
}
|
||||
|
||||
func getAccessToken(baseCookies map[string]string, proxy string, verbose bool, insecure bool) (string, map[string]string, error) {
|
||||
// Warm-up google.com to gain extra cookies (NID, etc.) and capture them.
|
||||
extraCookies := map[string]string{}
|
||||
{
|
||||
client := newHTTPClient(httpOptions{ProxyURL: proxy, Insecure: insecure, FollowRedirects: true})
|
||||
req, _ := http.NewRequest(http.MethodGet, EndpointGoogle, nil)
|
||||
resp, _ := client.Do(req)
|
||||
if resp != nil {
|
||||
if u, err := url.Parse(EndpointGoogle); err == nil {
|
||||
for _, c := range client.Jar.Cookies(u) {
|
||||
extraCookies[c.Name] = c.Value
|
||||
}
|
||||
}
|
||||
_ = resp.Body.Close()
|
||||
}
|
||||
}
|
||||
|
||||
trySets := make([]map[string]string, 0, 8)
|
||||
|
||||
if v1, ok1 := baseCookies["__Secure-1PSID"]; ok1 {
|
||||
if v2, ok2 := baseCookies["__Secure-1PSIDTS"]; ok2 {
|
||||
merged := map[string]string{"__Secure-1PSID": v1, "__Secure-1PSIDTS": v2}
|
||||
if nid, ok := baseCookies["NID"]; ok {
|
||||
merged["NID"] = nid
|
||||
}
|
||||
trySets = append(trySets, merged)
|
||||
} else if verbose {
|
||||
Debug("Skipping base cookies: __Secure-1PSIDTS missing")
|
||||
}
|
||||
}
|
||||
|
||||
cacheDir := "temp"
|
||||
_ = os.MkdirAll(cacheDir, 0o755)
|
||||
if v1, ok1 := baseCookies["__Secure-1PSID"]; ok1 {
|
||||
cacheFile := filepath.Join(cacheDir, ".cached_1psidts_"+v1+".txt")
|
||||
if b, err := os.ReadFile(cacheFile); err == nil {
|
||||
cv := strings.TrimSpace(string(b))
|
||||
if cv != "" {
|
||||
merged := map[string]string{"__Secure-1PSID": v1, "__Secure-1PSIDTS": cv}
|
||||
trySets = append(trySets, merged)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(extraCookies) > 0 {
|
||||
trySets = append(trySets, extraCookies)
|
||||
}
|
||||
|
||||
reToken := regexp.MustCompile(`"SNlM0e":"([^"]+)"`)
|
||||
|
||||
for _, cookies := range trySets {
|
||||
resp, mergedCookies, err := sendInitRequest(cookies, proxy, insecure)
|
||||
if err != nil {
|
||||
if verbose {
|
||||
Warning("Failed init request: %v", err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
_ = resp.Body.Close()
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
matches := reToken.FindStringSubmatch(string(body))
|
||||
if len(matches) >= 2 {
|
||||
token := matches[1]
|
||||
if verbose {
|
||||
Success("Gemini access token acquired.")
|
||||
}
|
||||
return token, mergedCookies, nil
|
||||
}
|
||||
}
|
||||
return "", nil, &AuthError{Msg: "Failed to retrieve token."}
|
||||
}
|
||||
|
||||
// rotate1PSIDTS refreshes __Secure-1PSIDTS
|
||||
func rotate1PSIDTS(cookies map[string]string, proxy string, insecure bool) (string, error) {
|
||||
_, ok := cookies["__Secure-1PSID"]
|
||||
if !ok {
|
||||
return "", &AuthError{Msg: "__Secure-1PSID missing"}
|
||||
}
|
||||
|
||||
tr := &http.Transport{}
|
||||
if proxy != "" {
|
||||
if pu, err := url.Parse(proxy); err == nil {
|
||||
tr.Proxy = http.ProxyURL(pu)
|
||||
}
|
||||
}
|
||||
if insecure {
|
||||
tr.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
client := &http.Client{Transport: tr, Timeout: 60 * time.Second}
|
||||
|
||||
req, _ := http.NewRequest(http.MethodPost, EndpointRotateCookies, io.NopCloser(stringsReader("[000,\"-0000000000000000000\"]")))
|
||||
applyHeaders(req, HeadersRotateCookies)
|
||||
applyCookies(req, cookies)
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
|
||||
if resp.StatusCode == http.StatusUnauthorized {
|
||||
return "", &AuthError{Msg: "unauthorized"}
|
||||
}
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
return "", errors.New(resp.Status)
|
||||
}
|
||||
|
||||
for _, c := range resp.Cookies() {
|
||||
if c.Name == "__Secure-1PSIDTS" {
|
||||
return c.Value, nil
|
||||
}
|
||||
}
|
||||
return "", nil
|
||||
}
|
||||
|
||||
// Minimal reader helpers to avoid importing strings everywhere.
|
||||
type constReader struct {
|
||||
s string
|
||||
i int
|
||||
}
|
||||
|
||||
func (r *constReader) Read(p []byte) (int, error) {
|
||||
if r.i >= len(r.s) {
|
||||
return 0, io.EOF
|
||||
}
|
||||
n := copy(p, r.s[r.i:])
|
||||
r.i += n
|
||||
return n, nil
|
||||
}
|
||||
|
||||
func stringsReader(s string) io.Reader { return &constReader{s: s} }
|
||||
692
internal/provider/gemini-web/client.go
Normal file
692
internal/provider/gemini-web/client.go
Normal file
@@ -0,0 +1,692 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GeminiClient is the async http client interface (Go port)
|
||||
type GeminiClient struct {
|
||||
Cookies map[string]string
|
||||
Proxy string
|
||||
Running bool
|
||||
httpClient *http.Client
|
||||
AccessToken string
|
||||
Timeout time.Duration
|
||||
insecure bool
|
||||
}
|
||||
|
||||
var NanoBananaModel = map[string]struct{}{
|
||||
"gemini-2.5-flash-image-preview": {},
|
||||
}
|
||||
|
||||
// NewGeminiClient creates a client. Pass empty strings to auto-detect via browser cookies (not implemented in Go port).
|
||||
func NewGeminiClient(secure1psid string, secure1psidts string, proxy string, opts ...func(*GeminiClient)) *GeminiClient {
|
||||
c := &GeminiClient{
|
||||
Cookies: map[string]string{},
|
||||
Proxy: proxy,
|
||||
Running: false,
|
||||
Timeout: 300 * time.Second,
|
||||
insecure: false,
|
||||
}
|
||||
if secure1psid != "" {
|
||||
c.Cookies["__Secure-1PSID"] = secure1psid
|
||||
if secure1psidts != "" {
|
||||
c.Cookies["__Secure-1PSIDTS"] = secure1psidts
|
||||
}
|
||||
}
|
||||
for _, f := range opts {
|
||||
f(c)
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// WithInsecureTLS sets skipping TLS verification (to mirror httpx verify=False)
|
||||
func WithInsecureTLS(insecure bool) func(*GeminiClient) {
|
||||
return func(c *GeminiClient) { c.insecure = insecure }
|
||||
}
|
||||
|
||||
// Init initializes the access token and http client.
|
||||
func (c *GeminiClient) Init(timeoutSec float64, verbose bool) error {
|
||||
// get access token
|
||||
token, validCookies, err := getAccessToken(c.Cookies, c.Proxy, verbose, c.insecure)
|
||||
if err != nil {
|
||||
c.Close(0)
|
||||
return err
|
||||
}
|
||||
c.AccessToken = token
|
||||
c.Cookies = validCookies
|
||||
|
||||
tr := &http.Transport{}
|
||||
if c.Proxy != "" {
|
||||
if pu, errParse := url.Parse(c.Proxy); errParse == nil {
|
||||
tr.Proxy = http.ProxyURL(pu)
|
||||
}
|
||||
}
|
||||
if c.insecure {
|
||||
// set via roundtripper in utils_get_access_token for token; here we reuse via default Transport
|
||||
// intentionally not adding here, as requests rely on endpoints with normal TLS
|
||||
}
|
||||
c.httpClient = &http.Client{Transport: tr, Timeout: time.Duration(timeoutSec * float64(time.Second))}
|
||||
c.Running = true
|
||||
|
||||
c.Timeout = time.Duration(timeoutSec * float64(time.Second))
|
||||
if verbose {
|
||||
Success("Gemini client initialized successfully.")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *GeminiClient) Close(delaySec float64) {
|
||||
if delaySec > 0 {
|
||||
time.Sleep(time.Duration(delaySec * float64(time.Second)))
|
||||
}
|
||||
c.Running = false
|
||||
}
|
||||
|
||||
// ensureRunning mirrors the Python decorator behavior and retries on APIError.
|
||||
func (c *GeminiClient) ensureRunning() error {
|
||||
if c.Running {
|
||||
return nil
|
||||
}
|
||||
return c.Init(float64(c.Timeout/time.Second), false)
|
||||
}
|
||||
|
||||
// RotateTS performs a RotateCookies request and returns the new __Secure-1PSIDTS value (if any).
|
||||
func (c *GeminiClient) RotateTS() (string, error) {
|
||||
if c == nil {
|
||||
return "", fmt.Errorf("gemini web client is nil")
|
||||
}
|
||||
return rotate1PSIDTS(c.Cookies, c.Proxy, c.insecure)
|
||||
}
|
||||
|
||||
// GenerateContent sends a prompt (with optional files) and parses the response into ModelOutput.
|
||||
func (c *GeminiClient) GenerateContent(prompt string, files []string, model Model, gem *Gem, chat *ChatSession) (ModelOutput, error) {
|
||||
var empty ModelOutput
|
||||
if prompt == "" {
|
||||
return empty, &ValueError{Msg: "Prompt cannot be empty."}
|
||||
}
|
||||
if err := c.ensureRunning(); err != nil {
|
||||
return empty, err
|
||||
}
|
||||
|
||||
// Retry wrapper similar to decorator (retry=2)
|
||||
retries := 2
|
||||
for {
|
||||
out, err := c.generateOnce(prompt, files, model, gem, chat)
|
||||
if err == nil {
|
||||
return out, nil
|
||||
}
|
||||
var apiErr *APIError
|
||||
var imgErr *ImageGenerationError
|
||||
shouldRetry := false
|
||||
if errors.As(err, &imgErr) {
|
||||
if retries > 1 {
|
||||
retries = 1
|
||||
} // only once for image generation
|
||||
shouldRetry = true
|
||||
} else if errors.As(err, &apiErr) {
|
||||
shouldRetry = true
|
||||
}
|
||||
if shouldRetry && retries > 0 {
|
||||
time.Sleep(time.Second)
|
||||
retries--
|
||||
continue
|
||||
}
|
||||
return empty, err
|
||||
}
|
||||
}
|
||||
|
||||
func ensureAnyLen(slice []any, index int) []any {
|
||||
if index < len(slice) {
|
||||
return slice
|
||||
}
|
||||
gap := index + 1 - len(slice)
|
||||
return append(slice, make([]any, gap)...)
|
||||
}
|
||||
|
||||
func (c *GeminiClient) generateOnce(prompt string, files []string, model Model, gem *Gem, chat *ChatSession) (ModelOutput, error) {
|
||||
var empty ModelOutput
|
||||
// Build f.req
|
||||
var uploaded [][]any
|
||||
for _, fp := range files {
|
||||
id, err := uploadFile(fp, c.Proxy, c.insecure)
|
||||
if err != nil {
|
||||
return empty, err
|
||||
}
|
||||
name, err := parseFileName(fp)
|
||||
if err != nil {
|
||||
return empty, err
|
||||
}
|
||||
uploaded = append(uploaded, []any{[]any{id}, name})
|
||||
}
|
||||
var item0 any
|
||||
if len(uploaded) > 0 {
|
||||
item0 = []any{prompt, 0, nil, uploaded}
|
||||
} else {
|
||||
item0 = []any{prompt}
|
||||
}
|
||||
var item2 any = nil
|
||||
if chat != nil {
|
||||
item2 = chat.Metadata()
|
||||
}
|
||||
|
||||
inner := []any{item0, nil, item2}
|
||||
requestedModel := strings.ToLower(model.Name)
|
||||
if chat != nil && chat.RequestedModel() != "" {
|
||||
requestedModel = chat.RequestedModel()
|
||||
}
|
||||
if _, ok := NanoBananaModel[requestedModel]; ok {
|
||||
inner = ensureAnyLen(inner, 49)
|
||||
inner[49] = 14
|
||||
}
|
||||
if gem != nil {
|
||||
// pad with 16 nils then gem ID
|
||||
for i := 0; i < 16; i++ {
|
||||
inner = append(inner, nil)
|
||||
}
|
||||
inner = append(inner, gem.ID)
|
||||
}
|
||||
innerJSON, _ := json.Marshal(inner)
|
||||
outer := []any{nil, string(innerJSON)}
|
||||
outerJSON, _ := json.Marshal(outer)
|
||||
|
||||
// form
|
||||
form := url.Values{}
|
||||
form.Set("at", c.AccessToken)
|
||||
form.Set("f.req", string(outerJSON))
|
||||
|
||||
req, _ := http.NewRequest(http.MethodPost, EndpointGenerate, strings.NewReader(form.Encode()))
|
||||
// headers
|
||||
for k, v := range HeadersGemini {
|
||||
for _, vv := range v {
|
||||
req.Header.Add(k, vv)
|
||||
}
|
||||
}
|
||||
for k, v := range model.ModelHeader {
|
||||
for _, vv := range v {
|
||||
req.Header.Add(k, vv)
|
||||
}
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded;charset=utf-8")
|
||||
for k, v := range c.Cookies {
|
||||
req.AddCookie(&http.Cookie{Name: k, Value: v})
|
||||
}
|
||||
|
||||
resp, err := c.httpClient.Do(req)
|
||||
if err != nil {
|
||||
return empty, &TimeoutError{GeminiError{Msg: "Generate content request timed out."}}
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
|
||||
if resp.StatusCode == 429 {
|
||||
// Surface 429 as TemporarilyBlocked to match Python behavior
|
||||
c.Close(0)
|
||||
return empty, &TemporarilyBlocked{GeminiError{Msg: "Too many requests. IP temporarily blocked."}}
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: fmt.Sprintf("Failed to generate contents. Status %d", resp.StatusCode)}
|
||||
}
|
||||
|
||||
// Read body and split lines; take the 3rd line (index 2)
|
||||
b, _ := io.ReadAll(resp.Body)
|
||||
parts := strings.Split(string(b), "\n")
|
||||
if len(parts) < 3 {
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: "Invalid response data received."}
|
||||
}
|
||||
var responseJSON []any
|
||||
if err = json.Unmarshal([]byte(parts[2]), &responseJSON); err != nil {
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: "Invalid response data received."}
|
||||
}
|
||||
|
||||
// find body where main_part[4] exists
|
||||
var (
|
||||
body any
|
||||
bodyIndex int
|
||||
)
|
||||
for i, p := range responseJSON {
|
||||
arr, ok := p.([]any)
|
||||
if !ok || len(arr) < 3 {
|
||||
continue
|
||||
}
|
||||
s, ok := arr[2].(string)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
var mainPart []any
|
||||
if err = json.Unmarshal([]byte(s), &mainPart); err != nil {
|
||||
continue
|
||||
}
|
||||
if len(mainPart) > 4 && mainPart[4] != nil {
|
||||
body = mainPart
|
||||
bodyIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if body == nil {
|
||||
// Fallback: scan subsequent lines to locate a data frame with a non-empty body (mainPart[4]).
|
||||
var lastTop []any
|
||||
for li := 3; li < len(parts) && body == nil; li++ {
|
||||
line := strings.TrimSpace(parts[li])
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
var top []any
|
||||
if err = json.Unmarshal([]byte(line), &top); err != nil {
|
||||
continue
|
||||
}
|
||||
lastTop = top
|
||||
for i, p := range top {
|
||||
arr, ok := p.([]any)
|
||||
if !ok || len(arr) < 3 {
|
||||
continue
|
||||
}
|
||||
s, ok := arr[2].(string)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
var mainPart []any
|
||||
if err = json.Unmarshal([]byte(s), &mainPart); err != nil {
|
||||
continue
|
||||
}
|
||||
if len(mainPart) > 4 && mainPart[4] != nil {
|
||||
body = mainPart
|
||||
bodyIndex = i
|
||||
responseJSON = top
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
// Parse nested error code to align with Python mapping
|
||||
var top []any
|
||||
// Prefer lastTop from fallback scan; otherwise try parts[2]
|
||||
if len(lastTop) > 0 {
|
||||
top = lastTop
|
||||
} else {
|
||||
_ = json.Unmarshal([]byte(parts[2]), &top)
|
||||
}
|
||||
if len(top) > 0 {
|
||||
if code, ok := extractErrorCode(top); ok {
|
||||
switch code {
|
||||
case ErrorUsageLimitExceeded:
|
||||
return empty, &UsageLimitExceeded{GeminiError{Msg: fmt.Sprintf("Failed to generate contents. Usage limit of %s has exceeded. Please try switching to another model.", model.Name)}}
|
||||
case ErrorModelInconsistent:
|
||||
return empty, &ModelInvalid{GeminiError{Msg: "Selected model is inconsistent or unavailable."}}
|
||||
case ErrorModelHeaderInvalid:
|
||||
return empty, &APIError{Msg: "Invalid model header string. Please update the selected model header."}
|
||||
case ErrorIPTemporarilyBlocked:
|
||||
return empty, &TemporarilyBlocked{GeminiError{Msg: "Too many requests. IP temporarily blocked."}}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Debug("Invalid response: control frames only; no body found")
|
||||
// Close the client to force re-initialization on next request (parity with Python client behavior)
|
||||
c.Close(0)
|
||||
return empty, &APIError{Msg: "Failed to generate contents. Invalid response data received."}
|
||||
}
|
||||
|
||||
bodyArr := body.([]any)
|
||||
// metadata
|
||||
var metadata []string
|
||||
if len(bodyArr) > 1 {
|
||||
if metaArr, ok := bodyArr[1].([]any); ok {
|
||||
for _, v := range metaArr {
|
||||
if s, isOk := v.(string); isOk {
|
||||
metadata = append(metadata, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// candidates parsing
|
||||
candContainer, ok := bodyArr[4].([]any)
|
||||
if !ok {
|
||||
return empty, &APIError{Msg: "Failed to parse response body."}
|
||||
}
|
||||
candidates := make([]Candidate, 0, len(candContainer))
|
||||
reCard := regexp.MustCompile(`^http://googleusercontent\.com/card_content/\d+`)
|
||||
reGen := regexp.MustCompile(`http://googleusercontent\.com/image_generation_content/\d+`)
|
||||
|
||||
for ci, candAny := range candContainer {
|
||||
cArr, isOk := candAny.([]any)
|
||||
if !isOk {
|
||||
continue
|
||||
}
|
||||
// text: cArr[1][0]
|
||||
var text string
|
||||
if len(cArr) > 1 {
|
||||
if sArr, isOk1 := cArr[1].([]any); isOk1 && len(sArr) > 0 {
|
||||
text, _ = sArr[0].(string)
|
||||
}
|
||||
}
|
||||
if reCard.MatchString(text) {
|
||||
// candidate[22] and candidate[22][0] or text
|
||||
if len(cArr) > 22 {
|
||||
if arr, isOk1 := cArr[22].([]any); isOk1 && len(arr) > 0 {
|
||||
if s, isOk2 := arr[0].(string); isOk2 {
|
||||
text = s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// thoughts: candidate[37][0][0]
|
||||
var thoughts *string
|
||||
if len(cArr) > 37 {
|
||||
if a, ok1 := cArr[37].([]any); ok1 && len(a) > 0 {
|
||||
if b1, ok2 := a[0].([]any); ok2 && len(b1) > 0 {
|
||||
if s, ok3 := b1[0].(string); ok3 {
|
||||
ss := decodeHTML(s)
|
||||
thoughts = &ss
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// web images: candidate[12][1]
|
||||
var webImages []WebImage
|
||||
var imgSection any
|
||||
if len(cArr) > 12 {
|
||||
imgSection = cArr[12]
|
||||
}
|
||||
if arr, ok1 := imgSection.([]any); ok1 && len(arr) > 1 {
|
||||
if imagesArr, ok2 := arr[1].([]any); ok2 {
|
||||
for _, wiAny := range imagesArr {
|
||||
wiArr, ok3 := wiAny.([]any)
|
||||
if !ok3 {
|
||||
continue
|
||||
}
|
||||
// url: wiArr[0][0][0], title: wiArr[7][0], alt: wiArr[0][4]
|
||||
var urlStr, title, alt string
|
||||
if len(wiArr) > 0 {
|
||||
if a, ok5 := wiArr[0].([]any); ok5 && len(a) > 0 {
|
||||
if b1, ok6 := a[0].([]any); ok6 && len(b1) > 0 {
|
||||
urlStr, _ = b1[0].(string)
|
||||
}
|
||||
if len(a) > 4 {
|
||||
if s, ok6 := a[4].(string); ok6 {
|
||||
alt = s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(wiArr) > 7 {
|
||||
if a, ok4 := wiArr[7].([]any); ok4 && len(a) > 0 {
|
||||
title, _ = a[0].(string)
|
||||
}
|
||||
}
|
||||
webImages = append(webImages, WebImage{Image: Image{URL: urlStr, Title: title, Alt: alt, Proxy: c.Proxy}})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// generated images
|
||||
var genImages []GeneratedImage
|
||||
hasGen := false
|
||||
if arr, ok1 := imgSection.([]any); ok1 && len(arr) > 7 {
|
||||
if a, ok2 := arr[7].([]any); ok2 && len(a) > 0 && a[0] != nil {
|
||||
hasGen = true
|
||||
}
|
||||
}
|
||||
if hasGen {
|
||||
// find img part
|
||||
var imgBody []any
|
||||
for pi := bodyIndex; pi < len(responseJSON); pi++ {
|
||||
part := responseJSON[pi]
|
||||
arr, ok1 := part.([]any)
|
||||
if !ok1 || len(arr) < 3 {
|
||||
continue
|
||||
}
|
||||
s, ok1 := arr[2].(string)
|
||||
if !ok1 {
|
||||
continue
|
||||
}
|
||||
var mp []any
|
||||
if err = json.Unmarshal([]byte(s), &mp); err != nil {
|
||||
continue
|
||||
}
|
||||
if len(mp) > 4 {
|
||||
if tt, ok2 := mp[4].([]any); ok2 && len(tt) > ci {
|
||||
if sec, ok3 := tt[ci].([]any); ok3 && len(sec) > 12 {
|
||||
if ss, ok4 := sec[12].([]any); ok4 && len(ss) > 7 {
|
||||
if first, ok5 := ss[7].([]any); ok5 && len(first) > 0 && first[0] != nil {
|
||||
imgBody = mp
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if imgBody == nil {
|
||||
return empty, &ImageGenerationError{APIError{Msg: "Failed to parse generated images."}}
|
||||
}
|
||||
imgCand := imgBody[4].([]any)[ci].([]any)
|
||||
if len(imgCand) > 1 {
|
||||
if a, ok1 := imgCand[1].([]any); ok1 && len(a) > 0 {
|
||||
if s, ok2 := a[0].(string); ok2 {
|
||||
text = strings.TrimSpace(reGen.ReplaceAllString(s, ""))
|
||||
}
|
||||
}
|
||||
}
|
||||
// images list at imgCand[12][7][0]
|
||||
if len(imgCand) > 12 {
|
||||
if s1, ok1 := imgCand[12].([]any); ok1 && len(s1) > 7 {
|
||||
if s2, ok2 := s1[7].([]any); ok2 && len(s2) > 0 {
|
||||
if s3, ok3 := s2[0].([]any); ok3 {
|
||||
for ii, giAny := range s3 {
|
||||
ga, ok4 := giAny.([]any)
|
||||
if !ok4 || len(ga) < 4 {
|
||||
continue
|
||||
}
|
||||
// url: ga[0][3][3]
|
||||
var urlStr, title, alt string
|
||||
if a, ok5 := ga[0].([]any); ok5 && len(a) > 3 {
|
||||
if b1, ok6 := a[3].([]any); ok6 && len(b1) > 3 {
|
||||
urlStr, _ = b1[3].(string)
|
||||
}
|
||||
}
|
||||
// title from ga[3][6]
|
||||
if len(ga) > 3 {
|
||||
if a, ok5 := ga[3].([]any); ok5 {
|
||||
if len(a) > 6 {
|
||||
if v, ok6 := a[6].(float64); ok6 && v != 0 {
|
||||
title = fmt.Sprintf("[Generated Image %.0f]", v)
|
||||
} else {
|
||||
title = "[Generated Image]"
|
||||
}
|
||||
} else {
|
||||
title = "[Generated Image]"
|
||||
}
|
||||
// alt from ga[3][5][ii] fallback
|
||||
if len(a) > 5 {
|
||||
if tt, ok6 := a[5].([]any); ok6 {
|
||||
if ii < len(tt) {
|
||||
if s, ok7 := tt[ii].(string); ok7 {
|
||||
alt = s
|
||||
}
|
||||
} else if len(tt) > 0 {
|
||||
if s, ok7 := tt[0].(string); ok7 {
|
||||
alt = s
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
genImages = append(genImages, GeneratedImage{Image: Image{URL: urlStr, Title: title, Alt: alt, Proxy: c.Proxy}, Cookies: c.Cookies})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cand := Candidate{
|
||||
RCID: fmt.Sprintf("%v", cArr[0]),
|
||||
Text: decodeHTML(text),
|
||||
Thoughts: thoughts,
|
||||
WebImages: webImages,
|
||||
GeneratedImages: genImages,
|
||||
}
|
||||
candidates = append(candidates, cand)
|
||||
}
|
||||
|
||||
if len(candidates) == 0 {
|
||||
return empty, &GeminiError{Msg: "Failed to generate contents. No output data found in response."}
|
||||
}
|
||||
output := ModelOutput{Metadata: metadata, Candidates: candidates, Chosen: 0}
|
||||
if chat != nil {
|
||||
chat.lastOutput = &output
|
||||
}
|
||||
return output, nil
|
||||
}
|
||||
|
||||
// extractErrorCode attempts to navigate the known nested error structure and fetch the integer code.
|
||||
// Mirrors Python path: response_json[0][5][2][0][1][0]
|
||||
func extractErrorCode(top []any) (int, bool) {
|
||||
if len(top) == 0 {
|
||||
return 0, false
|
||||
}
|
||||
a, ok := top[0].([]any)
|
||||
if !ok || len(a) <= 5 {
|
||||
return 0, false
|
||||
}
|
||||
b, ok := a[5].([]any)
|
||||
if !ok || len(b) <= 2 {
|
||||
return 0, false
|
||||
}
|
||||
c, ok := b[2].([]any)
|
||||
if !ok || len(c) == 0 {
|
||||
return 0, false
|
||||
}
|
||||
d, ok := c[0].([]any)
|
||||
if !ok || len(d) <= 1 {
|
||||
return 0, false
|
||||
}
|
||||
e, ok := d[1].([]any)
|
||||
if !ok || len(e) == 0 {
|
||||
return 0, false
|
||||
}
|
||||
f, ok := e[0].(float64)
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
return int(f), true
|
||||
}
|
||||
|
||||
// StartChat returns a ChatSession attached to the client
|
||||
func (c *GeminiClient) StartChat(model Model, gem *Gem, metadata []string) *ChatSession {
|
||||
return &ChatSession{client: c, metadata: normalizeMeta(metadata), model: model, gem: gem, requestedModel: strings.ToLower(model.Name)}
|
||||
}
|
||||
|
||||
// ChatSession holds conversation metadata
|
||||
type ChatSession struct {
|
||||
client *GeminiClient
|
||||
metadata []string // cid, rid, rcid
|
||||
lastOutput *ModelOutput
|
||||
model Model
|
||||
gem *Gem
|
||||
requestedModel string
|
||||
}
|
||||
|
||||
func (cs *ChatSession) String() string {
|
||||
var cid, rid, rcid string
|
||||
if len(cs.metadata) > 0 {
|
||||
cid = cs.metadata[0]
|
||||
}
|
||||
if len(cs.metadata) > 1 {
|
||||
rid = cs.metadata[1]
|
||||
}
|
||||
if len(cs.metadata) > 2 {
|
||||
rcid = cs.metadata[2]
|
||||
}
|
||||
return fmt.Sprintf("ChatSession(cid='%s', rid='%s', rcid='%s')", cid, rid, rcid)
|
||||
}
|
||||
|
||||
func normalizeMeta(v []string) []string {
|
||||
out := []string{"", "", ""}
|
||||
for i := 0; i < len(v) && i < 3; i++ {
|
||||
out[i] = v[i]
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (cs *ChatSession) Metadata() []string { return cs.metadata }
|
||||
func (cs *ChatSession) SetMetadata(v []string) { cs.metadata = normalizeMeta(v) }
|
||||
func (cs *ChatSession) RequestedModel() string { return cs.requestedModel }
|
||||
func (cs *ChatSession) SetRequestedModel(name string) {
|
||||
cs.requestedModel = strings.ToLower(name)
|
||||
}
|
||||
func (cs *ChatSession) CID() string {
|
||||
if len(cs.metadata) > 0 {
|
||||
return cs.metadata[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (cs *ChatSession) RID() string {
|
||||
if len(cs.metadata) > 1 {
|
||||
return cs.metadata[1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (cs *ChatSession) RCID() string {
|
||||
if len(cs.metadata) > 2 {
|
||||
return cs.metadata[2]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (cs *ChatSession) setCID(v string) {
|
||||
if len(cs.metadata) < 1 {
|
||||
cs.metadata = normalizeMeta(cs.metadata)
|
||||
}
|
||||
cs.metadata[0] = v
|
||||
}
|
||||
func (cs *ChatSession) setRID(v string) {
|
||||
if len(cs.metadata) < 2 {
|
||||
cs.metadata = normalizeMeta(cs.metadata)
|
||||
}
|
||||
cs.metadata[1] = v
|
||||
}
|
||||
func (cs *ChatSession) setRCID(v string) {
|
||||
if len(cs.metadata) < 3 {
|
||||
cs.metadata = normalizeMeta(cs.metadata)
|
||||
}
|
||||
cs.metadata[2] = v
|
||||
}
|
||||
|
||||
// SendMessage shortcut to client's GenerateContent
|
||||
func (cs *ChatSession) SendMessage(prompt string, files []string) (ModelOutput, error) {
|
||||
out, err := cs.client.GenerateContent(prompt, files, cs.model, cs.gem, cs)
|
||||
if err == nil {
|
||||
cs.lastOutput = &out
|
||||
cs.SetMetadata(out.Metadata)
|
||||
cs.setRCID(out.RCID())
|
||||
}
|
||||
return out, err
|
||||
}
|
||||
|
||||
// ChooseCandidate selects a candidate from last output and updates rcid
|
||||
func (cs *ChatSession) ChooseCandidate(index int) (ModelOutput, error) {
|
||||
if cs.lastOutput == nil {
|
||||
return ModelOutput{}, &ValueError{Msg: "No previous output data found in this chat session."}
|
||||
}
|
||||
if index >= len(cs.lastOutput.Candidates) {
|
||||
return ModelOutput{}, &ValueError{Msg: fmt.Sprintf("Index %d exceeds candidates", index)}
|
||||
}
|
||||
cs.lastOutput.Chosen = index
|
||||
cs.setRCID(cs.lastOutput.RCID())
|
||||
return *cs.lastOutput, nil
|
||||
}
|
||||
178
internal/provider/gemini-web/convert_ext.go
Normal file
178
internal/provider/gemini-web/convert_ext.go
Normal file
@@ -0,0 +1,178 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var (
|
||||
reGoogle = regexp.MustCompile("(\\()?\\[`([^`]+?)`\\]\\(https://www\\.google\\.com/search\\?q=[^)]*\\)(\\))?")
|
||||
reColonNum = regexp.MustCompile(`([^:]+:\d+)`)
|
||||
reInline = regexp.MustCompile("`(\\[[^\\]]+\\]\\([^\\)]+\\))`")
|
||||
)
|
||||
|
||||
func unescapeGeminiText(s string) string {
|
||||
if s == "" {
|
||||
return s
|
||||
}
|
||||
s = strings.ReplaceAll(s, "<", "<")
|
||||
s = strings.ReplaceAll(s, "\\<", "<")
|
||||
s = strings.ReplaceAll(s, "\\_", "_")
|
||||
s = strings.ReplaceAll(s, "\\>", ">")
|
||||
return s
|
||||
}
|
||||
|
||||
func postProcessModelText(text string) string {
|
||||
text = reGoogle.ReplaceAllStringFunc(text, func(m string) string {
|
||||
subs := reGoogle.FindStringSubmatch(m)
|
||||
if len(subs) < 4 {
|
||||
return m
|
||||
}
|
||||
outerOpen := subs[1]
|
||||
display := subs[2]
|
||||
target := display
|
||||
if loc := reColonNum.FindString(display); loc != "" {
|
||||
target = loc
|
||||
}
|
||||
newSeg := "[`" + display + "`](" + target + ")"
|
||||
if outerOpen != "" {
|
||||
return "(" + newSeg + ")"
|
||||
}
|
||||
return newSeg
|
||||
})
|
||||
text = reInline.ReplaceAllString(text, "$1")
|
||||
return text
|
||||
}
|
||||
|
||||
func estimateTokens(s string) int {
|
||||
if s == "" {
|
||||
return 0
|
||||
}
|
||||
rc := float64(utf8.RuneCountInString(s))
|
||||
if rc <= 0 {
|
||||
return 0
|
||||
}
|
||||
est := int(math.Ceil(rc / 4.0))
|
||||
if est < 0 {
|
||||
return 0
|
||||
}
|
||||
return est
|
||||
}
|
||||
|
||||
// ConvertOutputToGemini converts simplified ModelOutput to Gemini API-like JSON.
|
||||
// promptText is used only to estimate usage tokens to populate usage fields.
|
||||
func ConvertOutputToGemini(output *ModelOutput, modelName string, promptText string) ([]byte, error) {
|
||||
if output == nil || len(output.Candidates) == 0 {
|
||||
return nil, fmt.Errorf("empty output")
|
||||
}
|
||||
|
||||
parts := make([]map[string]any, 0, 2)
|
||||
|
||||
var thoughtsText string
|
||||
if output.Candidates[0].Thoughts != nil {
|
||||
if t := strings.TrimSpace(*output.Candidates[0].Thoughts); t != "" {
|
||||
thoughtsText = unescapeGeminiText(t)
|
||||
parts = append(parts, map[string]any{
|
||||
"text": thoughtsText,
|
||||
"thought": true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
visible := unescapeGeminiText(output.Candidates[0].Text)
|
||||
finalText := postProcessModelText(visible)
|
||||
if finalText != "" {
|
||||
parts = append(parts, map[string]any{"text": finalText})
|
||||
}
|
||||
|
||||
if imgs := output.Candidates[0].GeneratedImages; len(imgs) > 0 {
|
||||
for _, gi := range imgs {
|
||||
if mime, data, err := FetchGeneratedImageData(gi); err == nil && data != "" {
|
||||
parts = append(parts, map[string]any{
|
||||
"inlineData": map[string]any{
|
||||
"mimeType": mime,
|
||||
"data": data,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
promptTokens := estimateTokens(promptText)
|
||||
completionTokens := estimateTokens(finalText)
|
||||
thoughtsTokens := 0
|
||||
if thoughtsText != "" {
|
||||
thoughtsTokens = estimateTokens(thoughtsText)
|
||||
}
|
||||
totalTokens := promptTokens + completionTokens
|
||||
|
||||
now := time.Now()
|
||||
resp := map[string]any{
|
||||
"candidates": []any{
|
||||
map[string]any{
|
||||
"content": map[string]any{
|
||||
"parts": parts,
|
||||
"role": "model",
|
||||
},
|
||||
"finishReason": "stop",
|
||||
"index": 0,
|
||||
},
|
||||
},
|
||||
"createTime": now.Format(time.RFC3339Nano),
|
||||
"responseId": fmt.Sprintf("gemini-web-%d", now.UnixNano()),
|
||||
"modelVersion": modelName,
|
||||
"usageMetadata": map[string]any{
|
||||
"promptTokenCount": promptTokens,
|
||||
"candidatesTokenCount": completionTokens,
|
||||
"thoughtsTokenCount": thoughtsTokens,
|
||||
"totalTokenCount": totalTokens,
|
||||
},
|
||||
}
|
||||
b, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal gemini response: %w", err)
|
||||
}
|
||||
return ensureColonSpacing(b), nil
|
||||
}
|
||||
|
||||
// ensureColonSpacing inserts a single space after JSON key-value colons while
|
||||
// leaving string content untouched. This matches the relaxed formatting used by
|
||||
// Gemini responses and keeps downstream text-processing tools compatible with
|
||||
// the proxy output.
|
||||
func ensureColonSpacing(b []byte) []byte {
|
||||
if len(b) == 0 {
|
||||
return b
|
||||
}
|
||||
var out bytes.Buffer
|
||||
out.Grow(len(b) + len(b)/8)
|
||||
inString := false
|
||||
escaped := false
|
||||
for i := 0; i < len(b); i++ {
|
||||
ch := b[i]
|
||||
out.WriteByte(ch)
|
||||
if escaped {
|
||||
escaped = false
|
||||
continue
|
||||
}
|
||||
switch ch {
|
||||
case '\\':
|
||||
escaped = true
|
||||
case '"':
|
||||
inString = !inString
|
||||
case ':':
|
||||
if !inString && i+1 < len(b) {
|
||||
next := b[i+1]
|
||||
if next != ' ' && next != '\n' && next != '\r' && next != '\t' {
|
||||
out.WriteByte(' ')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return out.Bytes()
|
||||
}
|
||||
47
internal/provider/gemini-web/errors.go
Normal file
47
internal/provider/gemini-web/errors.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package geminiwebapi
|
||||
|
||||
type AuthError struct{ Msg string }
|
||||
|
||||
func (e *AuthError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "authentication error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
|
||||
type APIError struct{ Msg string }
|
||||
|
||||
func (e *APIError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "api error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
|
||||
type ImageGenerationError struct{ APIError }
|
||||
|
||||
type GeminiError struct{ Msg string }
|
||||
|
||||
func (e *GeminiError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "gemini error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
|
||||
type TimeoutError struct{ GeminiError }
|
||||
|
||||
type UsageLimitExceeded struct{ GeminiError }
|
||||
|
||||
type ModelInvalid struct{ GeminiError }
|
||||
|
||||
type TemporarilyBlocked struct{ GeminiError }
|
||||
|
||||
type ValueError struct{ Msg string }
|
||||
|
||||
func (e *ValueError) Error() string {
|
||||
if e.Msg == "" {
|
||||
return "value error"
|
||||
}
|
||||
return e.Msg
|
||||
}
|
||||
131
internal/provider/gemini-web/logging.go
Normal file
131
internal/provider/gemini-web/logging.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// init honors GEMINI_WEBAPI_LOG to keep parity with the Python client.
|
||||
func init() {
|
||||
if lvl := os.Getenv("GEMINI_WEBAPI_LOG"); lvl != "" {
|
||||
SetLogLevel(lvl)
|
||||
}
|
||||
}
|
||||
|
||||
// SetLogLevel adjusts logging verbosity using CLI-style strings.
|
||||
func SetLogLevel(level string) {
|
||||
switch strings.ToUpper(level) {
|
||||
case "TRACE":
|
||||
log.SetLevel(log.TraceLevel)
|
||||
case "DEBUG":
|
||||
log.SetLevel(log.DebugLevel)
|
||||
case "INFO":
|
||||
log.SetLevel(log.InfoLevel)
|
||||
case "WARNING", "WARN":
|
||||
log.SetLevel(log.WarnLevel)
|
||||
case "ERROR":
|
||||
log.SetLevel(log.ErrorLevel)
|
||||
case "CRITICAL", "FATAL":
|
||||
log.SetLevel(log.FatalLevel)
|
||||
default:
|
||||
log.SetLevel(log.InfoLevel)
|
||||
}
|
||||
}
|
||||
|
||||
func prefix(format string) string { return "[gemini_webapi] " + format }
|
||||
|
||||
func Debug(format string, v ...any) { log.Debugf(prefix(format), v...) }
|
||||
|
||||
// DebugRaw logs without the module prefix; use sparingly for messages
|
||||
// that should integrate with global formatting without extra tags.
|
||||
func DebugRaw(format string, v ...any) { log.Debugf(format, v...) }
|
||||
func Info(format string, v ...any) { log.Infof(prefix(format), v...) }
|
||||
func Warning(format string, v ...any) { log.Warnf(prefix(format), v...) }
|
||||
func Error(format string, v ...any) { log.Errorf(prefix(format), v...) }
|
||||
func Success(format string, v ...any) { log.Infof(prefix("SUCCESS "+format), v...) }
|
||||
|
||||
// MaskToken28 returns a fixed-length (28) masked representation showing:
|
||||
// first 8 chars + 8 asterisks + 4 middle chars + last 8 chars.
|
||||
// If the input is shorter than 20 characters, it returns a fully masked string
|
||||
// of length min(len(s), 28).
|
||||
func MaskToken28(s string) string {
|
||||
n := len(s)
|
||||
if n == 0 {
|
||||
return ""
|
||||
}
|
||||
if n < 20 {
|
||||
return strings.Repeat("*", n)
|
||||
}
|
||||
// Pick 4 middle characters around the center
|
||||
midStart := n/2 - 2
|
||||
if midStart < 8 {
|
||||
midStart = 8
|
||||
}
|
||||
if midStart+4 > n-8 {
|
||||
midStart = n - 8 - 4
|
||||
if midStart < 8 {
|
||||
midStart = 8
|
||||
}
|
||||
}
|
||||
prefixByte := s[:8]
|
||||
middle := s[midStart : midStart+4]
|
||||
suffix := s[n-8:]
|
||||
return prefixByte + strings.Repeat("*", 4) + middle + strings.Repeat("*", 4) + suffix
|
||||
}
|
||||
|
||||
// BuildUpstreamRequestLog builds a compact preview string for upstream request logging.
|
||||
func BuildUpstreamRequestLog(account string, contextOn bool, useTags, explicitContext bool, prompt string, filesCount int, reuse bool, metaLen int, gem *Gem) string {
|
||||
var sb strings.Builder
|
||||
sb.WriteString("\n\n=== GEMINI WEB UPSTREAM ===\n")
|
||||
sb.WriteString(fmt.Sprintf("account: %s\n", account))
|
||||
if contextOn {
|
||||
sb.WriteString("context_mode: on\n")
|
||||
} else {
|
||||
sb.WriteString("context_mode: off\n")
|
||||
}
|
||||
if reuse {
|
||||
sb.WriteString("reuseIdx: 1\n")
|
||||
} else {
|
||||
sb.WriteString("reuseIdx: 0\n")
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf("useTags: %t\n", useTags))
|
||||
sb.WriteString(fmt.Sprintf("metadata_len: %d\n", metaLen))
|
||||
if explicitContext {
|
||||
sb.WriteString("explicit_context: true\n")
|
||||
} else {
|
||||
sb.WriteString("explicit_context: false\n")
|
||||
}
|
||||
if filesCount > 0 {
|
||||
sb.WriteString(fmt.Sprintf("files: %d\n", filesCount))
|
||||
}
|
||||
|
||||
if gem != nil {
|
||||
sb.WriteString("gem:\n")
|
||||
if gem.ID != "" {
|
||||
sb.WriteString(fmt.Sprintf(" id: %s\n", gem.ID))
|
||||
}
|
||||
if gem.Name != "" {
|
||||
sb.WriteString(fmt.Sprintf(" name: %s\n", gem.Name))
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf(" predefined: %t\n", gem.Predefined))
|
||||
} else {
|
||||
sb.WriteString("gem: none\n")
|
||||
}
|
||||
|
||||
chunks := ChunkByRunes(prompt, 4096)
|
||||
preview := prompt
|
||||
truncated := false
|
||||
if len(chunks) > 1 {
|
||||
preview = chunks[0]
|
||||
truncated = true
|
||||
}
|
||||
sb.WriteString("prompt_preview:\n")
|
||||
sb.WriteString(preview)
|
||||
if truncated {
|
||||
sb.WriteString("\n... [truncated]\n")
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
394
internal/provider/gemini-web/media.go
Normal file
394
internal/provider/gemini-web/media.go
Normal file
@@ -0,0 +1,394 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/http/cookiejar"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/misc"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
// Image helpers ------------------------------------------------------------
|
||||
|
||||
type Image struct {
|
||||
URL string
|
||||
Title string
|
||||
Alt string
|
||||
Proxy string
|
||||
}
|
||||
|
||||
func (i Image) String() string {
|
||||
short := i.URL
|
||||
if len(short) > 20 {
|
||||
short = short[:8] + "..." + short[len(short)-12:]
|
||||
}
|
||||
return fmt.Sprintf("Image(title='%s', alt='%s', url='%s')", i.Title, i.Alt, short)
|
||||
}
|
||||
|
||||
func (i Image) Save(path string, filename string, cookies map[string]string, verbose bool, skipInvalidFilename bool, insecure bool) (string, error) {
|
||||
if filename == "" {
|
||||
// Try to parse filename from URL.
|
||||
u := i.URL
|
||||
if p := strings.Split(u, "/"); len(p) > 0 {
|
||||
filename = p[len(p)-1]
|
||||
}
|
||||
if q := strings.Split(filename, "?"); len(q) > 0 {
|
||||
filename = q[0]
|
||||
}
|
||||
}
|
||||
// Regex validation (align with Python: ^(.*\.\w+)) to extract name with extension.
|
||||
if filename != "" {
|
||||
re := regexp.MustCompile(`^(.*\.\w+)`)
|
||||
if m := re.FindStringSubmatch(filename); len(m) >= 2 {
|
||||
filename = m[1]
|
||||
} else {
|
||||
if verbose {
|
||||
Warning("Invalid filename: %s", filename)
|
||||
}
|
||||
if skipInvalidFilename {
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
}
|
||||
// Build client with cookie jar so cookies persist across redirects.
|
||||
tr := &http.Transport{}
|
||||
if i.Proxy != "" {
|
||||
if pu, err := url.Parse(i.Proxy); err == nil {
|
||||
tr.Proxy = http.ProxyURL(pu)
|
||||
}
|
||||
}
|
||||
if insecure {
|
||||
tr.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
jar, _ := cookiejar.New(nil)
|
||||
client := &http.Client{Transport: tr, Timeout: 120 * time.Second, Jar: jar}
|
||||
|
||||
// Helper to set raw Cookie header using provided cookies (to mirror Python client behavior).
|
||||
buildCookieHeader := func(m map[string]string) string {
|
||||
if len(m) == 0 {
|
||||
return ""
|
||||
}
|
||||
keys := make([]string, 0, len(m))
|
||||
for k := range m {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
parts := make([]string, 0, len(keys))
|
||||
for _, k := range keys {
|
||||
parts = append(parts, fmt.Sprintf("%s=%s", k, m[k]))
|
||||
}
|
||||
return strings.Join(parts, "; ")
|
||||
}
|
||||
rawCookie := buildCookieHeader(cookies)
|
||||
|
||||
client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
|
||||
// Ensure provided cookies are always sent across redirects (domain-agnostic).
|
||||
if rawCookie != "" {
|
||||
req.Header.Set("Cookie", rawCookie)
|
||||
}
|
||||
if len(via) >= 10 {
|
||||
return errors.New("stopped after 10 redirects")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
req, _ := http.NewRequest(http.MethodGet, i.URL, nil)
|
||||
if rawCookie != "" {
|
||||
req.Header.Set("Cookie", rawCookie)
|
||||
}
|
||||
// Add browser-like headers to improve compatibility.
|
||||
req.Header.Set("Accept", "image/avif,image/webp,image/apng,image/*,*/*;q=0.8")
|
||||
req.Header.Set("Connection", "keep-alive")
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return "", fmt.Errorf("error downloading image: %d %s", resp.StatusCode, resp.Status)
|
||||
}
|
||||
if ct := resp.Header.Get("Content-Type"); ct != "" && !strings.Contains(strings.ToLower(ct), "image") {
|
||||
Warning("Content type of %s is not image, but %s.", filename, ct)
|
||||
}
|
||||
if path == "" {
|
||||
path = "temp"
|
||||
}
|
||||
if err = os.MkdirAll(path, 0o755); err != nil {
|
||||
return "", err
|
||||
}
|
||||
dest := filepath.Join(path, filename)
|
||||
f, err := os.Create(dest)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
_, err = io.Copy(f, resp.Body)
|
||||
_ = f.Close()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if verbose {
|
||||
Info("Image saved as %s", dest)
|
||||
}
|
||||
abspath, _ := filepath.Abs(dest)
|
||||
return abspath, nil
|
||||
}
|
||||
|
||||
type WebImage struct{ Image }
|
||||
|
||||
type GeneratedImage struct {
|
||||
Image
|
||||
Cookies map[string]string
|
||||
}
|
||||
|
||||
func (g GeneratedImage) Save(path string, filename string, fullSize bool, verbose bool, skipInvalidFilename bool, insecure bool) (string, error) {
|
||||
if len(g.Cookies) == 0 {
|
||||
return "", &ValueError{Msg: "GeneratedImage requires cookies."}
|
||||
}
|
||||
strURL := g.URL
|
||||
if fullSize {
|
||||
strURL = strURL + "=s2048"
|
||||
}
|
||||
if filename == "" {
|
||||
name := time.Now().Format("20060102150405")
|
||||
if len(strURL) >= 10 {
|
||||
name = fmt.Sprintf("%s_%s.png", name, strURL[len(strURL)-10:])
|
||||
} else {
|
||||
name += ".png"
|
||||
}
|
||||
filename = name
|
||||
}
|
||||
tmp := g.Image
|
||||
tmp.URL = strURL
|
||||
return tmp.Save(path, filename, g.Cookies, verbose, skipInvalidFilename, insecure)
|
||||
}
|
||||
|
||||
// Request parsing & file helpers -------------------------------------------
|
||||
|
||||
func ParseMessagesAndFiles(rawJSON []byte) ([]RoleText, [][]byte, []string, [][]int, error) {
|
||||
var messages []RoleText
|
||||
var files [][]byte
|
||||
var mimes []string
|
||||
var perMsgFileIdx [][]int
|
||||
|
||||
contents := gjson.GetBytes(rawJSON, "contents")
|
||||
if contents.Exists() {
|
||||
contents.ForEach(func(_, content gjson.Result) bool {
|
||||
role := NormalizeRole(content.Get("role").String())
|
||||
var b strings.Builder
|
||||
startFile := len(files)
|
||||
content.Get("parts").ForEach(func(_, part gjson.Result) bool {
|
||||
if text := part.Get("text"); text.Exists() {
|
||||
if b.Len() > 0 {
|
||||
b.WriteString("\n")
|
||||
}
|
||||
b.WriteString(text.String())
|
||||
}
|
||||
if inlineData := part.Get("inlineData"); inlineData.Exists() {
|
||||
data := inlineData.Get("data").String()
|
||||
if data != "" {
|
||||
if dec, err := base64.StdEncoding.DecodeString(data); err == nil {
|
||||
files = append(files, dec)
|
||||
m := inlineData.Get("mimeType").String()
|
||||
if m == "" {
|
||||
m = inlineData.Get("mime_type").String()
|
||||
}
|
||||
mimes = append(mimes, m)
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
messages = append(messages, RoleText{Role: role, Text: b.String()})
|
||||
endFile := len(files)
|
||||
if endFile > startFile {
|
||||
idxs := make([]int, 0, endFile-startFile)
|
||||
for i := startFile; i < endFile; i++ {
|
||||
idxs = append(idxs, i)
|
||||
}
|
||||
perMsgFileIdx = append(perMsgFileIdx, idxs)
|
||||
} else {
|
||||
perMsgFileIdx = append(perMsgFileIdx, nil)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
return messages, files, mimes, perMsgFileIdx, nil
|
||||
}
|
||||
|
||||
func MaterializeInlineFiles(files [][]byte, mimes []string) ([]string, *interfaces.ErrorMessage) {
|
||||
if len(files) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
paths := make([]string, 0, len(files))
|
||||
for i, data := range files {
|
||||
ext := MimeToExt(mimes, i)
|
||||
f, err := os.CreateTemp("", "gemini-upload-*"+ext)
|
||||
if err != nil {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusInternalServerError, Error: fmt.Errorf("failed to create temp file: %w", err)}
|
||||
}
|
||||
if _, err = f.Write(data); err != nil {
|
||||
_ = f.Close()
|
||||
_ = os.Remove(f.Name())
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusInternalServerError, Error: fmt.Errorf("failed to write temp file: %w", err)}
|
||||
}
|
||||
if err = f.Close(); err != nil {
|
||||
_ = os.Remove(f.Name())
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: http.StatusInternalServerError, Error: fmt.Errorf("failed to close temp file: %w", err)}
|
||||
}
|
||||
paths = append(paths, f.Name())
|
||||
}
|
||||
return paths, nil
|
||||
}
|
||||
|
||||
func CleanupFiles(paths []string) {
|
||||
for _, p := range paths {
|
||||
if p != "" {
|
||||
_ = os.Remove(p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func FetchGeneratedImageData(gi GeneratedImage) (string, string, error) {
|
||||
path, err := gi.Save("", "", true, false, true, false)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
defer func() { _ = os.Remove(path) }()
|
||||
b, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
mime := http.DetectContentType(b)
|
||||
if !strings.HasPrefix(mime, "image/") {
|
||||
if guessed := mimeFromExtension(filepath.Ext(path)); guessed != "" {
|
||||
mime = guessed
|
||||
} else {
|
||||
mime = "image/png"
|
||||
}
|
||||
}
|
||||
return mime, base64.StdEncoding.EncodeToString(b), nil
|
||||
}
|
||||
|
||||
func MimeToExt(mimes []string, i int) string {
|
||||
if i < len(mimes) {
|
||||
return MimeToPreferredExt(strings.ToLower(mimes[i]))
|
||||
}
|
||||
return ".png"
|
||||
}
|
||||
|
||||
var preferredExtByMIME = map[string]string{
|
||||
"image/png": ".png",
|
||||
"image/jpeg": ".jpg",
|
||||
"image/jpg": ".jpg",
|
||||
"image/webp": ".webp",
|
||||
"image/gif": ".gif",
|
||||
"image/bmp": ".bmp",
|
||||
"image/heic": ".heic",
|
||||
"application/pdf": ".pdf",
|
||||
}
|
||||
|
||||
func MimeToPreferredExt(mime string) string {
|
||||
normalized := strings.ToLower(strings.TrimSpace(mime))
|
||||
if normalized == "" {
|
||||
return ".png"
|
||||
}
|
||||
if ext, ok := preferredExtByMIME[normalized]; ok {
|
||||
return ext
|
||||
}
|
||||
return ".png"
|
||||
}
|
||||
|
||||
func mimeFromExtension(ext string) string {
|
||||
cleaned := strings.TrimPrefix(strings.ToLower(ext), ".")
|
||||
if cleaned == "" {
|
||||
return ""
|
||||
}
|
||||
if mt, ok := misc.MimeTypes[cleaned]; ok && mt != "" {
|
||||
return mt
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// File upload helpers ------------------------------------------------------
|
||||
|
||||
func uploadFile(path string, proxy string, insecure bool) (string, error) {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = f.Close()
|
||||
}()
|
||||
|
||||
var buf bytes.Buffer
|
||||
mw := multipart.NewWriter(&buf)
|
||||
fw, err := mw.CreateFormFile("file", filepath.Base(path))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if _, err = io.Copy(fw, f); err != nil {
|
||||
return "", err
|
||||
}
|
||||
_ = mw.Close()
|
||||
|
||||
tr := &http.Transport{}
|
||||
if proxy != "" {
|
||||
if pu, errParse := url.Parse(proxy); errParse == nil {
|
||||
tr.Proxy = http.ProxyURL(pu)
|
||||
}
|
||||
}
|
||||
if insecure {
|
||||
tr.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
client := &http.Client{Transport: tr, Timeout: 300 * time.Second}
|
||||
|
||||
req, _ := http.NewRequest(http.MethodPost, EndpointUpload, &buf)
|
||||
for k, v := range HeadersUpload {
|
||||
for _, vv := range v {
|
||||
req.Header.Add(k, vv)
|
||||
}
|
||||
}
|
||||
req.Header.Set("Content-Type", mw.FormDataContentType())
|
||||
req.Header.Set("Accept", "*/*")
|
||||
req.Header.Set("Connection", "keep-alive")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer func() {
|
||||
_ = resp.Body.Close()
|
||||
}()
|
||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||
return "", &APIError{Msg: resp.Status}
|
||||
}
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(b), nil
|
||||
}
|
||||
|
||||
func parseFileName(path string) (string, error) {
|
||||
if st, err := os.Stat(path); err != nil || st.IsDir() {
|
||||
return "", &ValueError{Msg: path + " is not a valid file."}
|
||||
}
|
||||
return filepath.Base(path), nil
|
||||
}
|
||||
168
internal/provider/gemini-web/models.go
Normal file
168
internal/provider/gemini-web/models.go
Normal file
@@ -0,0 +1,168 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/registry"
|
||||
)
|
||||
|
||||
// Endpoints used by the Gemini web app
|
||||
const (
|
||||
EndpointGoogle = "https://www.google.com"
|
||||
EndpointInit = "https://gemini.google.com/app"
|
||||
EndpointGenerate = "https://gemini.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate"
|
||||
EndpointRotateCookies = "https://accounts.google.com/RotateCookies"
|
||||
EndpointUpload = "https://content-push.googleapis.com/upload"
|
||||
)
|
||||
|
||||
// Default headers
|
||||
var (
|
||||
HeadersGemini = http.Header{
|
||||
"Content-Type": []string{"application/x-www-form-urlencoded;charset=utf-8"},
|
||||
"Host": []string{"gemini.google.com"},
|
||||
"Origin": []string{"https://gemini.google.com"},
|
||||
"Referer": []string{"https://gemini.google.com/"},
|
||||
"User-Agent": []string{"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"},
|
||||
"X-Same-Domain": []string{"1"},
|
||||
}
|
||||
HeadersRotateCookies = http.Header{
|
||||
"Content-Type": []string{"application/json"},
|
||||
}
|
||||
HeadersUpload = http.Header{
|
||||
"Push-ID": []string{"feeds/mcudyrk2a4khkz"},
|
||||
}
|
||||
)
|
||||
|
||||
// Model defines available model names and headers
|
||||
type Model struct {
|
||||
Name string
|
||||
ModelHeader http.Header
|
||||
AdvancedOnly bool
|
||||
}
|
||||
|
||||
var (
|
||||
ModelUnspecified = Model{
|
||||
Name: "unspecified",
|
||||
ModelHeader: http.Header{},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG25Flash = Model{
|
||||
Name: "gemini-2.5-flash",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[1,null,null,null,\"71c2d248d3b102ff\",null,null,0,[4]]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG25Pro = Model{
|
||||
Name: "gemini-2.5-pro",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[1,null,null,null,\"4af6c7f5da75d65d\",null,null,0,[4]]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG20Flash = Model{ // Deprecated, still supported
|
||||
Name: "gemini-2.0-flash",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[1,null,null,null,\"f299729663a2343f\"]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
ModelG20FlashThinking = Model{ // Deprecated, still supported
|
||||
Name: "gemini-2.0-flash-thinking",
|
||||
ModelHeader: http.Header{
|
||||
"x-goog-ext-525001261-jspb": []string{"[null,null,null,null,\"7ca48d02d802f20a\"]"},
|
||||
},
|
||||
AdvancedOnly: false,
|
||||
}
|
||||
)
|
||||
|
||||
// ModelFromName returns a model by name or error if not found
|
||||
func ModelFromName(name string) (Model, error) {
|
||||
switch name {
|
||||
case ModelUnspecified.Name:
|
||||
return ModelUnspecified, nil
|
||||
case ModelG25Flash.Name:
|
||||
return ModelG25Flash, nil
|
||||
case ModelG25Pro.Name:
|
||||
return ModelG25Pro, nil
|
||||
case ModelG20Flash.Name:
|
||||
return ModelG20Flash, nil
|
||||
case ModelG20FlashThinking.Name:
|
||||
return ModelG20FlashThinking, nil
|
||||
default:
|
||||
return Model{}, &ValueError{Msg: "Unknown model name: " + name}
|
||||
}
|
||||
}
|
||||
|
||||
// Known error codes returned from server
|
||||
const (
|
||||
ErrorUsageLimitExceeded = 1037
|
||||
ErrorModelInconsistent = 1050
|
||||
ErrorModelHeaderInvalid = 1052
|
||||
ErrorIPTemporarilyBlocked = 1060
|
||||
)
|
||||
|
||||
var (
|
||||
GeminiWebAliasOnce sync.Once
|
||||
GeminiWebAliasMap map[string]string
|
||||
)
|
||||
|
||||
// EnsureGeminiWebAliasMap initializes alias lookup lazily.
|
||||
func EnsureGeminiWebAliasMap() {
|
||||
GeminiWebAliasOnce.Do(func() {
|
||||
GeminiWebAliasMap = make(map[string]string)
|
||||
for _, m := range registry.GetGeminiModels() {
|
||||
if m.ID == "gemini-2.5-flash-lite" {
|
||||
continue
|
||||
} else if m.ID == "gemini-2.5-flash" {
|
||||
GeminiWebAliasMap["gemini-2.5-flash-image-preview"] = "gemini-2.5-flash"
|
||||
}
|
||||
alias := AliasFromModelID(m.ID)
|
||||
GeminiWebAliasMap[strings.ToLower(alias)] = strings.ToLower(m.ID)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// GetGeminiWebAliasedModels returns Gemini models exposed with web aliases.
|
||||
func GetGeminiWebAliasedModels() []*registry.ModelInfo {
|
||||
EnsureGeminiWebAliasMap()
|
||||
aliased := make([]*registry.ModelInfo, 0)
|
||||
for _, m := range registry.GetGeminiModels() {
|
||||
if m.ID == "gemini-2.5-flash-lite" {
|
||||
continue
|
||||
} else if m.ID == "gemini-2.5-flash" {
|
||||
cpy := *m
|
||||
cpy.ID = "gemini-2.5-flash-image-preview"
|
||||
cpy.Name = "gemini-2.5-flash-image-preview"
|
||||
cpy.DisplayName = "Nano Banana"
|
||||
cpy.Description = "Gemini 2.5 Flash Preview Image"
|
||||
aliased = append(aliased, &cpy)
|
||||
}
|
||||
cpy := *m
|
||||
cpy.ID = AliasFromModelID(m.ID)
|
||||
cpy.Name = cpy.ID
|
||||
aliased = append(aliased, &cpy)
|
||||
}
|
||||
return aliased
|
||||
}
|
||||
|
||||
// MapAliasToUnderlying normalizes web aliases back to canonical Gemini IDs.
|
||||
func MapAliasToUnderlying(name string) string {
|
||||
EnsureGeminiWebAliasMap()
|
||||
n := strings.ToLower(name)
|
||||
if u, ok := GeminiWebAliasMap[n]; ok {
|
||||
return u
|
||||
}
|
||||
const suffix = "-web"
|
||||
if strings.HasSuffix(n, suffix) {
|
||||
return strings.TrimSuffix(n, suffix)
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
// AliasFromModelID builds the web alias for a Gemini model identifier.
|
||||
func AliasFromModelID(modelID string) string {
|
||||
return modelID + "-web"
|
||||
}
|
||||
364
internal/provider/gemini-web/persistence.go
Normal file
364
internal/provider/gemini-web/persistence.go
Normal file
@@ -0,0 +1,364 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
bolt "go.etcd.io/bbolt"
|
||||
)
|
||||
|
||||
// StoredMessage represents a single message in a conversation record.
|
||||
type StoredMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
// ConversationRecord stores a full conversation with its metadata for persistence.
|
||||
type ConversationRecord struct {
|
||||
Model string `json:"model"`
|
||||
ClientID string `json:"client_id"`
|
||||
Metadata []string `json:"metadata,omitempty"`
|
||||
Messages []StoredMessage `json:"messages"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// Sha256Hex computes the SHA256 hash of a string and returns its hex representation.
|
||||
func Sha256Hex(s string) string {
|
||||
sum := sha256.Sum256([]byte(s))
|
||||
return hex.EncodeToString(sum[:])
|
||||
}
|
||||
|
||||
// RoleText represents a turn in a conversation with a role and text content.
|
||||
type RoleText struct {
|
||||
Role string
|
||||
Text string
|
||||
}
|
||||
|
||||
func ToStoredMessages(msgs []RoleText) []StoredMessage {
|
||||
out := make([]StoredMessage, 0, len(msgs))
|
||||
for _, m := range msgs {
|
||||
out = append(out, StoredMessage{
|
||||
Role: m.Role,
|
||||
Content: m.Text,
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func HashMessage(m StoredMessage) string {
|
||||
s := fmt.Sprintf(`{"content":%q,"role":%q}`, m.Content, strings.ToLower(m.Role))
|
||||
return Sha256Hex(s)
|
||||
}
|
||||
|
||||
func HashConversation(clientID, model string, msgs []StoredMessage) string {
|
||||
var b strings.Builder
|
||||
b.WriteString(clientID)
|
||||
b.WriteString("|")
|
||||
b.WriteString(model)
|
||||
for _, m := range msgs {
|
||||
b.WriteString("|")
|
||||
b.WriteString(HashMessage(m))
|
||||
}
|
||||
return Sha256Hex(b.String())
|
||||
}
|
||||
|
||||
// ConvStorePath returns the path for account-level metadata persistence based on token file path.
|
||||
func ConvStorePath(tokenFilePath string) string {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil || wd == "" {
|
||||
wd = "."
|
||||
}
|
||||
convDir := filepath.Join(wd, "conv")
|
||||
base := strings.TrimSuffix(filepath.Base(tokenFilePath), filepath.Ext(tokenFilePath))
|
||||
return filepath.Join(convDir, base+".bolt")
|
||||
}
|
||||
|
||||
// ConvDataPath returns the path for full conversation persistence based on token file path.
|
||||
func ConvDataPath(tokenFilePath string) string {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil || wd == "" {
|
||||
wd = "."
|
||||
}
|
||||
convDir := filepath.Join(wd, "conv")
|
||||
base := strings.TrimSuffix(filepath.Base(tokenFilePath), filepath.Ext(tokenFilePath))
|
||||
return filepath.Join(convDir, base+".bolt")
|
||||
}
|
||||
|
||||
// LoadConvStore reads the account-level metadata store from disk.
|
||||
func LoadConvStore(path string) (map[string][]string, error) {
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
db, err := bolt.Open(path, 0o600, &bolt.Options{Timeout: time.Second})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = db.Close()
|
||||
}()
|
||||
out := map[string][]string{}
|
||||
err = db.View(func(tx *bolt.Tx) error {
|
||||
b := tx.Bucket([]byte("account_meta"))
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
return b.ForEach(func(k, v []byte) error {
|
||||
var arr []string
|
||||
if len(v) > 0 {
|
||||
if e := json.Unmarshal(v, &arr); e != nil {
|
||||
// Skip malformed entries instead of failing the whole load
|
||||
return nil
|
||||
}
|
||||
}
|
||||
out[string(k)] = arr
|
||||
return nil
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// SaveConvStore writes the account-level metadata store to disk atomically.
|
||||
func SaveConvStore(path string, data map[string][]string) error {
|
||||
if data == nil {
|
||||
data = map[string][]string{}
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
db, err := bolt.Open(path, 0o600, &bolt.Options{Timeout: 2 * time.Second})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = db.Close()
|
||||
}()
|
||||
return db.Update(func(tx *bolt.Tx) error {
|
||||
// Recreate bucket to reflect the given snapshot exactly.
|
||||
if b := tx.Bucket([]byte("account_meta")); b != nil {
|
||||
if err = tx.DeleteBucket([]byte("account_meta")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
b, errCreateBucket := tx.CreateBucket([]byte("account_meta"))
|
||||
if errCreateBucket != nil {
|
||||
return errCreateBucket
|
||||
}
|
||||
for k, v := range data {
|
||||
enc, e := json.Marshal(v)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
if e = b.Put([]byte(k), enc); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// AccountMetaKey builds the key for account-level metadata map.
|
||||
func AccountMetaKey(email, modelName string) string {
|
||||
return fmt.Sprintf("account-meta|%s|%s", email, modelName)
|
||||
}
|
||||
|
||||
// LoadConvData reads the full conversation data and index from disk.
|
||||
func LoadConvData(path string) (map[string]ConversationRecord, map[string]string, error) {
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
db, err := bolt.Open(path, 0o600, &bolt.Options{Timeout: time.Second})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = db.Close()
|
||||
}()
|
||||
items := map[string]ConversationRecord{}
|
||||
index := map[string]string{}
|
||||
err = db.View(func(tx *bolt.Tx) error {
|
||||
// Load conv_items
|
||||
if b := tx.Bucket([]byte("conv_items")); b != nil {
|
||||
if e := b.ForEach(func(k, v []byte) error {
|
||||
var rec ConversationRecord
|
||||
if len(v) > 0 {
|
||||
if e2 := json.Unmarshal(v, &rec); e2 != nil {
|
||||
// Skip malformed
|
||||
return nil
|
||||
}
|
||||
items[string(k)] = rec
|
||||
}
|
||||
return nil
|
||||
}); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
// Load conv_index
|
||||
if b := tx.Bucket([]byte("conv_index")); b != nil {
|
||||
if e := b.ForEach(func(k, v []byte) error {
|
||||
index[string(k)] = string(v)
|
||||
return nil
|
||||
}); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return items, index, nil
|
||||
}
|
||||
|
||||
// SaveConvData writes the full conversation data and index to disk atomically.
|
||||
func SaveConvData(path string, items map[string]ConversationRecord, index map[string]string) error {
|
||||
if items == nil {
|
||||
items = map[string]ConversationRecord{}
|
||||
}
|
||||
if index == nil {
|
||||
index = map[string]string{}
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||
return err
|
||||
}
|
||||
db, err := bolt.Open(path, 0o600, &bolt.Options{Timeout: 2 * time.Second})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = db.Close()
|
||||
}()
|
||||
return db.Update(func(tx *bolt.Tx) error {
|
||||
// Recreate items bucket
|
||||
if b := tx.Bucket([]byte("conv_items")); b != nil {
|
||||
if err = tx.DeleteBucket([]byte("conv_items")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
bi, errCreateBucket := tx.CreateBucket([]byte("conv_items"))
|
||||
if errCreateBucket != nil {
|
||||
return errCreateBucket
|
||||
}
|
||||
for k, rec := range items {
|
||||
enc, e := json.Marshal(rec)
|
||||
if e != nil {
|
||||
return e
|
||||
}
|
||||
if e = bi.Put([]byte(k), enc); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
|
||||
// Recreate index bucket
|
||||
if b := tx.Bucket([]byte("conv_index")); b != nil {
|
||||
if err = tx.DeleteBucket([]byte("conv_index")); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
bx, errCreateBucket := tx.CreateBucket([]byte("conv_index"))
|
||||
if errCreateBucket != nil {
|
||||
return errCreateBucket
|
||||
}
|
||||
for k, v := range index {
|
||||
if e := bx.Put([]byte(k), []byte(v)); e != nil {
|
||||
return e
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// BuildConversationRecord constructs a ConversationRecord from history and the latest output.
|
||||
// Returns false when output is empty or has no candidates.
|
||||
func BuildConversationRecord(model, clientID string, history []RoleText, output *ModelOutput, metadata []string) (ConversationRecord, bool) {
|
||||
if output == nil || len(output.Candidates) == 0 {
|
||||
return ConversationRecord{}, false
|
||||
}
|
||||
text := ""
|
||||
if t := output.Candidates[0].Text; t != "" {
|
||||
text = RemoveThinkTags(t)
|
||||
}
|
||||
final := append([]RoleText{}, history...)
|
||||
final = append(final, RoleText{Role: "assistant", Text: text})
|
||||
rec := ConversationRecord{
|
||||
Model: model,
|
||||
ClientID: clientID,
|
||||
Metadata: metadata,
|
||||
Messages: ToStoredMessages(final),
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
return rec, true
|
||||
}
|
||||
|
||||
// FindByMessageListIn looks up a conversation record by hashed message list.
|
||||
// It attempts both the stable client ID and a legacy email-based ID.
|
||||
func FindByMessageListIn(items map[string]ConversationRecord, index map[string]string, stableClientID, email, model string, msgs []RoleText) (ConversationRecord, bool) {
|
||||
stored := ToStoredMessages(msgs)
|
||||
stableHash := HashConversation(stableClientID, model, stored)
|
||||
fallbackHash := HashConversation(email, model, stored)
|
||||
|
||||
// Try stable hash via index indirection first
|
||||
if key, ok := index["hash:"+stableHash]; ok {
|
||||
if rec, ok2 := items[key]; ok2 {
|
||||
return rec, true
|
||||
}
|
||||
}
|
||||
if rec, ok := items[stableHash]; ok {
|
||||
return rec, true
|
||||
}
|
||||
// Fallback to legacy hash (email-based)
|
||||
if key, ok := index["hash:"+fallbackHash]; ok {
|
||||
if rec, ok2 := items[key]; ok2 {
|
||||
return rec, true
|
||||
}
|
||||
}
|
||||
if rec, ok := items[fallbackHash]; ok {
|
||||
return rec, true
|
||||
}
|
||||
return ConversationRecord{}, false
|
||||
}
|
||||
|
||||
// FindConversationIn tries exact then sanitized assistant messages.
|
||||
func FindConversationIn(items map[string]ConversationRecord, index map[string]string, stableClientID, email, model string, msgs []RoleText) (ConversationRecord, bool) {
|
||||
if len(msgs) == 0 {
|
||||
return ConversationRecord{}, false
|
||||
}
|
||||
if rec, ok := FindByMessageListIn(items, index, stableClientID, email, model, msgs); ok {
|
||||
return rec, true
|
||||
}
|
||||
if rec, ok := FindByMessageListIn(items, index, stableClientID, email, model, SanitizeAssistantMessages(msgs)); ok {
|
||||
return rec, true
|
||||
}
|
||||
return ConversationRecord{}, false
|
||||
}
|
||||
|
||||
// FindReusableSessionIn returns reusable metadata and the remaining message suffix.
|
||||
func FindReusableSessionIn(items map[string]ConversationRecord, index map[string]string, stableClientID, email, model string, msgs []RoleText) ([]string, []RoleText) {
|
||||
if len(msgs) < 2 {
|
||||
return nil, nil
|
||||
}
|
||||
searchEnd := len(msgs)
|
||||
for searchEnd >= 2 {
|
||||
sub := msgs[:searchEnd]
|
||||
tail := sub[len(sub)-1]
|
||||
if strings.EqualFold(tail.Role, "assistant") || strings.EqualFold(tail.Role, "system") {
|
||||
if rec, ok := FindConversationIn(items, index, stableClientID, email, model, sub); ok {
|
||||
remain := msgs[searchEnd:]
|
||||
return rec.Metadata, remain
|
||||
}
|
||||
}
|
||||
searchEnd--
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
130
internal/provider/gemini-web/prompt.go
Normal file
130
internal/provider/gemini-web/prompt.go
Normal file
@@ -0,0 +1,130 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"math"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
var (
|
||||
reThink = regexp.MustCompile(`(?s)^\s*<think>.*?</think>\s*`)
|
||||
reXMLAnyTag = regexp.MustCompile(`(?s)<\s*[^>]+>`)
|
||||
)
|
||||
|
||||
// NormalizeRole converts a role to a standard format (lowercase, 'model' -> 'assistant').
|
||||
func NormalizeRole(role string) string {
|
||||
r := strings.ToLower(role)
|
||||
if r == "model" {
|
||||
return "assistant"
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// NeedRoleTags checks if a list of messages requires role tags.
|
||||
func NeedRoleTags(msgs []RoleText) bool {
|
||||
for _, m := range msgs {
|
||||
if strings.ToLower(m.Role) != "user" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// AddRoleTag wraps content with a role tag.
|
||||
func AddRoleTag(role, content string, unclose bool) string {
|
||||
if role == "" {
|
||||
role = "user"
|
||||
}
|
||||
if unclose {
|
||||
return "<|im_start|>" + role + "\n" + content
|
||||
}
|
||||
return "<|im_start|>" + role + "\n" + content + "\n<|im_end|>"
|
||||
}
|
||||
|
||||
// BuildPrompt constructs the final prompt from a list of messages.
|
||||
func BuildPrompt(msgs []RoleText, tagged bool, appendAssistant bool) string {
|
||||
if len(msgs) == 0 {
|
||||
if tagged && appendAssistant {
|
||||
return AddRoleTag("assistant", "", true)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
if !tagged {
|
||||
var sb strings.Builder
|
||||
for i, m := range msgs {
|
||||
if i > 0 {
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
sb.WriteString(m.Text)
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
var sb strings.Builder
|
||||
for _, m := range msgs {
|
||||
sb.WriteString(AddRoleTag(m.Role, m.Text, false))
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
if appendAssistant {
|
||||
sb.WriteString(AddRoleTag("assistant", "", true))
|
||||
}
|
||||
return strings.TrimSpace(sb.String())
|
||||
}
|
||||
|
||||
// RemoveThinkTags strips <think>...</think> blocks from a string.
|
||||
func RemoveThinkTags(s string) string {
|
||||
return strings.TrimSpace(reThink.ReplaceAllString(s, ""))
|
||||
}
|
||||
|
||||
// SanitizeAssistantMessages removes think tags from assistant messages.
|
||||
func SanitizeAssistantMessages(msgs []RoleText) []RoleText {
|
||||
out := make([]RoleText, 0, len(msgs))
|
||||
for _, m := range msgs {
|
||||
if strings.ToLower(m.Role) == "assistant" {
|
||||
out = append(out, RoleText{Role: m.Role, Text: RemoveThinkTags(m.Text)})
|
||||
} else {
|
||||
out = append(out, m)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// AppendXMLWrapHintIfNeeded appends an XML wrap hint to messages containing XML-like blocks.
|
||||
func AppendXMLWrapHintIfNeeded(msgs []RoleText, disable bool) []RoleText {
|
||||
if disable {
|
||||
return msgs
|
||||
}
|
||||
const xmlWrapHint = "\nFor any xml block, e.g. tool call, always wrap it with: \n`````xml\n...\n`````\n"
|
||||
out := make([]RoleText, 0, len(msgs))
|
||||
for _, m := range msgs {
|
||||
t := m.Text
|
||||
if reXMLAnyTag.MatchString(t) {
|
||||
t = t + xmlWrapHint
|
||||
}
|
||||
out = append(out, RoleText{Role: m.Role, Text: t})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// EstimateTotalTokensFromRawJSON estimates token count by summing text parts.
|
||||
func EstimateTotalTokensFromRawJSON(rawJSON []byte) int {
|
||||
totalChars := 0
|
||||
contents := gjson.GetBytes(rawJSON, "contents")
|
||||
if contents.Exists() {
|
||||
contents.ForEach(func(_, content gjson.Result) bool {
|
||||
content.Get("parts").ForEach(func(_, part gjson.Result) bool {
|
||||
if t := part.Get("text"); t.Exists() {
|
||||
totalChars += utf8.RuneCountInString(t.String())
|
||||
}
|
||||
return true
|
||||
})
|
||||
return true
|
||||
})
|
||||
}
|
||||
if totalChars <= 0 {
|
||||
return 0
|
||||
}
|
||||
return int(math.Ceil(float64(totalChars) / 4.0))
|
||||
}
|
||||
102
internal/provider/gemini-web/request.go
Normal file
102
internal/provider/gemini-web/request.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
)
|
||||
|
||||
const continuationHint = "\n(More messages to come, please reply with just 'ok.')"
|
||||
|
||||
func ChunkByRunes(s string, size int) []string {
|
||||
if size <= 0 {
|
||||
return []string{s}
|
||||
}
|
||||
chunks := make([]string, 0, (len(s)/size)+1)
|
||||
var buf strings.Builder
|
||||
count := 0
|
||||
for _, r := range s {
|
||||
buf.WriteRune(r)
|
||||
count++
|
||||
if count >= size {
|
||||
chunks = append(chunks, buf.String())
|
||||
buf.Reset()
|
||||
count = 0
|
||||
}
|
||||
}
|
||||
if buf.Len() > 0 {
|
||||
chunks = append(chunks, buf.String())
|
||||
}
|
||||
if len(chunks) == 0 {
|
||||
return []string{""}
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
|
||||
func MaxCharsPerRequest(cfg *config.Config) int {
|
||||
// Read max characters per request from config with a conservative default.
|
||||
if cfg != nil {
|
||||
if v := cfg.GeminiWeb.MaxCharsPerRequest; v > 0 {
|
||||
return v
|
||||
}
|
||||
}
|
||||
return 1_000_000
|
||||
}
|
||||
|
||||
func SendWithSplit(chat *ChatSession, text string, files []string, cfg *config.Config) (ModelOutput, error) {
|
||||
// Validate chat session
|
||||
if chat == nil {
|
||||
return ModelOutput{}, fmt.Errorf("nil chat session")
|
||||
}
|
||||
|
||||
// Resolve maxChars characters per request
|
||||
maxChars := MaxCharsPerRequest(cfg)
|
||||
if maxChars <= 0 {
|
||||
maxChars = 1_000_000
|
||||
}
|
||||
|
||||
// If within limit, send directly
|
||||
if utf8.RuneCountInString(text) <= maxChars {
|
||||
return chat.SendMessage(text, files)
|
||||
}
|
||||
|
||||
// Decide whether to use continuation hint (enabled by default)
|
||||
useHint := true
|
||||
if cfg != nil && cfg.GeminiWeb.DisableContinuationHint {
|
||||
useHint = false
|
||||
}
|
||||
|
||||
// Compute chunk size in runes. If the hint does not fit, disable it for this request.
|
||||
hintLen := 0
|
||||
if useHint {
|
||||
hintLen = utf8.RuneCountInString(continuationHint)
|
||||
}
|
||||
chunkSize := maxChars - hintLen
|
||||
if chunkSize <= 0 {
|
||||
// maxChars is too small to accommodate the hint; fall back to no-hint splitting
|
||||
useHint = false
|
||||
chunkSize = maxChars
|
||||
}
|
||||
|
||||
// Split into rune-safe chunks
|
||||
chunks := ChunkByRunes(text, chunkSize)
|
||||
if len(chunks) == 0 {
|
||||
chunks = []string{""}
|
||||
}
|
||||
|
||||
// Send all but the last chunk without files, optionally appending hint
|
||||
for i := 0; i < len(chunks)-1; i++ {
|
||||
part := chunks[i]
|
||||
if useHint {
|
||||
part += continuationHint
|
||||
}
|
||||
if _, err := chat.SendMessage(part, nil); err != nil {
|
||||
return ModelOutput{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// Send final chunk with files and return the actual output
|
||||
return chat.SendMessage(chunks[len(chunks)-1], files)
|
||||
}
|
||||
514
internal/provider/gemini-web/state.go
Normal file
514
internal/provider/gemini-web/state.go
Normal file
@@ -0,0 +1,514 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/auth/gemini"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/config"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/constant"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces"
|
||||
"github.com/router-for-me/CLIProxyAPI/v6/internal/translator/translator"
|
||||
cliproxyexecutor "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/executor"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/tidwall/sjson"
|
||||
)
|
||||
|
||||
const (
|
||||
geminiWebDefaultTimeoutSec = 300
|
||||
)
|
||||
|
||||
type GeminiWebState struct {
|
||||
cfg *config.Config
|
||||
token *gemini.GeminiWebTokenStorage
|
||||
storagePath string
|
||||
|
||||
stableClientID string
|
||||
accountID string
|
||||
|
||||
reqMu sync.Mutex
|
||||
client *GeminiClient
|
||||
|
||||
tokenMu sync.Mutex
|
||||
tokenDirty bool
|
||||
|
||||
convMu sync.RWMutex
|
||||
convStore map[string][]string
|
||||
convData map[string]ConversationRecord
|
||||
convIndex map[string]string
|
||||
|
||||
lastRefresh time.Time
|
||||
}
|
||||
|
||||
func NewGeminiWebState(cfg *config.Config, token *gemini.GeminiWebTokenStorage, storagePath string) *GeminiWebState {
|
||||
state := &GeminiWebState{
|
||||
cfg: cfg,
|
||||
token: token,
|
||||
storagePath: storagePath,
|
||||
convStore: make(map[string][]string),
|
||||
convData: make(map[string]ConversationRecord),
|
||||
convIndex: make(map[string]string),
|
||||
}
|
||||
suffix := Sha256Hex(token.Secure1PSID)
|
||||
if len(suffix) > 16 {
|
||||
suffix = suffix[:16]
|
||||
}
|
||||
state.stableClientID = "gemini-web-" + suffix
|
||||
if storagePath != "" {
|
||||
base := strings.TrimSuffix(filepath.Base(storagePath), filepath.Ext(storagePath))
|
||||
if base != "" {
|
||||
state.accountID = base
|
||||
} else {
|
||||
state.accountID = suffix
|
||||
}
|
||||
} else {
|
||||
state.accountID = suffix
|
||||
}
|
||||
state.loadConversationCaches()
|
||||
return state
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) loadConversationCaches() {
|
||||
if path := s.convStorePath(); path != "" {
|
||||
if store, err := LoadConvStore(path); err == nil {
|
||||
s.convStore = store
|
||||
}
|
||||
}
|
||||
if path := s.convDataPath(); path != "" {
|
||||
if items, index, err := LoadConvData(path); err == nil {
|
||||
s.convData = items
|
||||
s.convIndex = index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) convStorePath() string {
|
||||
base := s.storagePath
|
||||
if base == "" {
|
||||
base = s.accountID + ".json"
|
||||
}
|
||||
return ConvStorePath(base)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) convDataPath() string {
|
||||
base := s.storagePath
|
||||
if base == "" {
|
||||
base = s.accountID + ".json"
|
||||
}
|
||||
return ConvDataPath(base)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) GetRequestMutex() *sync.Mutex { return &s.reqMu }
|
||||
|
||||
func (s *GeminiWebState) EnsureClient() error {
|
||||
if s.client != nil && s.client.Running {
|
||||
return nil
|
||||
}
|
||||
proxyURL := ""
|
||||
if s.cfg != nil {
|
||||
proxyURL = s.cfg.ProxyURL
|
||||
}
|
||||
s.client = NewGeminiClient(
|
||||
s.token.Secure1PSID,
|
||||
s.token.Secure1PSIDTS,
|
||||
proxyURL,
|
||||
)
|
||||
timeout := geminiWebDefaultTimeoutSec
|
||||
if err := s.client.Init(float64(timeout), false); err != nil {
|
||||
s.client = nil
|
||||
return err
|
||||
}
|
||||
s.lastRefresh = time.Now()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) Refresh(ctx context.Context) error {
|
||||
_ = ctx
|
||||
proxyURL := ""
|
||||
if s.cfg != nil {
|
||||
proxyURL = s.cfg.ProxyURL
|
||||
}
|
||||
s.client = NewGeminiClient(
|
||||
s.token.Secure1PSID,
|
||||
s.token.Secure1PSIDTS,
|
||||
proxyURL,
|
||||
)
|
||||
timeout := geminiWebDefaultTimeoutSec
|
||||
if err := s.client.Init(float64(timeout), false); err != nil {
|
||||
return err
|
||||
}
|
||||
// Attempt rotation proactively to persist new TS sooner
|
||||
if newTS, err := s.client.RotateTS(); err == nil && newTS != "" && newTS != s.token.Secure1PSIDTS {
|
||||
s.tokenMu.Lock()
|
||||
s.token.Secure1PSIDTS = newTS
|
||||
s.tokenDirty = true
|
||||
if s.client != nil && s.client.Cookies != nil {
|
||||
s.client.Cookies["__Secure-1PSIDTS"] = newTS
|
||||
}
|
||||
s.tokenMu.Unlock()
|
||||
}
|
||||
s.lastRefresh = time.Now()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) TokenSnapshot() *gemini.GeminiWebTokenStorage {
|
||||
s.tokenMu.Lock()
|
||||
defer s.tokenMu.Unlock()
|
||||
c := *s.token
|
||||
return &c
|
||||
}
|
||||
|
||||
type geminiWebPrepared struct {
|
||||
handlerType string
|
||||
translatedRaw []byte
|
||||
prompt string
|
||||
uploaded []string
|
||||
chat *ChatSession
|
||||
cleaned []RoleText
|
||||
underlying string
|
||||
reuse bool
|
||||
tagged bool
|
||||
originalRaw []byte
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) prepare(ctx context.Context, modelName string, rawJSON []byte, stream bool, original []byte) (*geminiWebPrepared, *interfaces.ErrorMessage) {
|
||||
res := &geminiWebPrepared{originalRaw: original}
|
||||
res.translatedRaw = bytes.Clone(rawJSON)
|
||||
if handler, ok := ctx.Value("handler").(interfaces.APIHandler); ok && handler != nil {
|
||||
res.handlerType = handler.HandlerType()
|
||||
res.translatedRaw = translator.Request(res.handlerType, constant.GeminiWeb, modelName, res.translatedRaw, stream)
|
||||
}
|
||||
recordAPIRequest(ctx, s.cfg, res.translatedRaw)
|
||||
|
||||
messages, files, mimes, msgFileIdx, err := ParseMessagesAndFiles(res.translatedRaw)
|
||||
if err != nil {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: 400, Error: fmt.Errorf("bad request: %w", err)}
|
||||
}
|
||||
cleaned := SanitizeAssistantMessages(messages)
|
||||
res.cleaned = cleaned
|
||||
res.underlying = MapAliasToUnderlying(modelName)
|
||||
model, err := ModelFromName(res.underlying)
|
||||
if err != nil {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: 400, Error: err}
|
||||
}
|
||||
|
||||
var meta []string
|
||||
useMsgs := cleaned
|
||||
filesSubset := files
|
||||
mimesSubset := mimes
|
||||
|
||||
if s.useReusableContext() {
|
||||
reuseMeta, remaining := s.findReusableSession(res.underlying, cleaned)
|
||||
if len(reuseMeta) > 0 {
|
||||
res.reuse = true
|
||||
meta = reuseMeta
|
||||
if len(remaining) == 1 {
|
||||
useMsgs = []RoleText{remaining[0]}
|
||||
} else if len(remaining) > 1 {
|
||||
useMsgs = remaining
|
||||
} else if len(cleaned) > 0 {
|
||||
useMsgs = []RoleText{cleaned[len(cleaned)-1]}
|
||||
}
|
||||
if len(useMsgs) == 1 && len(messages) > 0 && len(msgFileIdx) == len(messages) {
|
||||
lastIdx := len(msgFileIdx) - 1
|
||||
idxs := msgFileIdx[lastIdx]
|
||||
if len(idxs) > 0 {
|
||||
filesSubset = make([][]byte, 0, len(idxs))
|
||||
mimesSubset = make([]string, 0, len(idxs))
|
||||
for _, fi := range idxs {
|
||||
if fi >= 0 && fi < len(files) {
|
||||
filesSubset = append(filesSubset, files[fi])
|
||||
if fi < len(mimes) {
|
||||
mimesSubset = append(mimesSubset, mimes[fi])
|
||||
} else {
|
||||
mimesSubset = append(mimesSubset, "")
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
filesSubset = nil
|
||||
mimesSubset = nil
|
||||
}
|
||||
} else {
|
||||
filesSubset = nil
|
||||
mimesSubset = nil
|
||||
}
|
||||
} else {
|
||||
if len(cleaned) >= 2 && strings.EqualFold(cleaned[len(cleaned)-2].Role, "assistant") {
|
||||
keyUnderlying := AccountMetaKey(s.accountID, res.underlying)
|
||||
keyAlias := AccountMetaKey(s.accountID, modelName)
|
||||
s.convMu.RLock()
|
||||
fallbackMeta := s.convStore[keyUnderlying]
|
||||
if len(fallbackMeta) == 0 {
|
||||
fallbackMeta = s.convStore[keyAlias]
|
||||
}
|
||||
s.convMu.RUnlock()
|
||||
if len(fallbackMeta) > 0 {
|
||||
meta = fallbackMeta
|
||||
useMsgs = []RoleText{cleaned[len(cleaned)-1]}
|
||||
res.reuse = true
|
||||
filesSubset = nil
|
||||
mimesSubset = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
keyUnderlying := AccountMetaKey(s.accountID, res.underlying)
|
||||
keyAlias := AccountMetaKey(s.accountID, modelName)
|
||||
s.convMu.RLock()
|
||||
if v, ok := s.convStore[keyUnderlying]; ok && len(v) > 0 {
|
||||
meta = v
|
||||
} else {
|
||||
meta = s.convStore[keyAlias]
|
||||
}
|
||||
s.convMu.RUnlock()
|
||||
}
|
||||
|
||||
res.tagged = NeedRoleTags(useMsgs)
|
||||
if res.reuse && len(useMsgs) == 1 {
|
||||
res.tagged = false
|
||||
}
|
||||
|
||||
enableXML := s.cfg != nil && s.cfg.GeminiWeb.CodeMode
|
||||
useMsgs = AppendXMLWrapHintIfNeeded(useMsgs, !enableXML)
|
||||
|
||||
res.prompt = BuildPrompt(useMsgs, res.tagged, res.tagged)
|
||||
if strings.TrimSpace(res.prompt) == "" {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: 400, Error: errors.New("bad request: empty prompt after filtering system/thought content")}
|
||||
}
|
||||
|
||||
uploaded, upErr := MaterializeInlineFiles(filesSubset, mimesSubset)
|
||||
if upErr != nil {
|
||||
return nil, upErr
|
||||
}
|
||||
res.uploaded = uploaded
|
||||
|
||||
if err = s.EnsureClient(); err != nil {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: 500, Error: err}
|
||||
}
|
||||
chat := s.client.StartChat(model, s.getConfiguredGem(), meta)
|
||||
chat.SetRequestedModel(modelName)
|
||||
res.chat = chat
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) Send(ctx context.Context, modelName string, reqPayload []byte, opts cliproxyexecutor.Options) ([]byte, *interfaces.ErrorMessage, *geminiWebPrepared) {
|
||||
prep, errMsg := s.prepare(ctx, modelName, reqPayload, opts.Stream, opts.OriginalRequest)
|
||||
if errMsg != nil {
|
||||
return nil, errMsg, nil
|
||||
}
|
||||
defer CleanupFiles(prep.uploaded)
|
||||
|
||||
output, err := SendWithSplit(prep.chat, prep.prompt, prep.uploaded, s.cfg)
|
||||
if err != nil {
|
||||
return nil, s.wrapSendError(err), nil
|
||||
}
|
||||
|
||||
// Hook: For gemini-2.5-flash-image-preview, if the API returns only images without any text,
|
||||
// inject a small textual summary so that conversation persistence has non-empty assistant text.
|
||||
// This helps conversation recovery (conv store) to match sessions reliably.
|
||||
if strings.EqualFold(modelName, "gemini-2.5-flash-image-preview") {
|
||||
if len(output.Candidates) > 0 {
|
||||
c := output.Candidates[output.Chosen]
|
||||
hasNoText := strings.TrimSpace(c.Text) == ""
|
||||
hasImages := len(c.GeneratedImages) > 0 || len(c.WebImages) > 0
|
||||
if hasNoText && hasImages {
|
||||
// Build a stable, concise fallback text. Avoid dynamic details to keep hashes stable.
|
||||
// Prefer a deterministic phrase with count to aid users while keeping consistency.
|
||||
fallback := "Done"
|
||||
// Mutate the chosen candidate's text so both response conversion and
|
||||
// conversation persistence observe the same fallback.
|
||||
output.Candidates[output.Chosen].Text = fallback
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gemBytes, err := ConvertOutputToGemini(&output, modelName, prep.prompt)
|
||||
if err != nil {
|
||||
return nil, &interfaces.ErrorMessage{StatusCode: 500, Error: err}, nil
|
||||
}
|
||||
|
||||
s.addAPIResponseData(ctx, gemBytes)
|
||||
s.persistConversation(modelName, prep, &output)
|
||||
return gemBytes, nil, prep
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) wrapSendError(genErr error) *interfaces.ErrorMessage {
|
||||
status := 500
|
||||
var usage *UsageLimitExceeded
|
||||
var blocked *TemporarilyBlocked
|
||||
var invalid *ModelInvalid
|
||||
var valueErr *ValueError
|
||||
var timeout *TimeoutError
|
||||
switch {
|
||||
case errors.As(genErr, &usage):
|
||||
status = 429
|
||||
case errors.As(genErr, &blocked):
|
||||
status = 429
|
||||
case errors.As(genErr, &invalid):
|
||||
status = 400
|
||||
case errors.As(genErr, &valueErr):
|
||||
status = 400
|
||||
case errors.As(genErr, &timeout):
|
||||
status = 504
|
||||
}
|
||||
return &interfaces.ErrorMessage{StatusCode: status, Error: genErr}
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) persistConversation(modelName string, prep *geminiWebPrepared, output *ModelOutput) {
|
||||
if output == nil || prep == nil || prep.chat == nil {
|
||||
return
|
||||
}
|
||||
metadata := prep.chat.Metadata()
|
||||
if len(metadata) > 0 {
|
||||
keyUnderlying := AccountMetaKey(s.accountID, prep.underlying)
|
||||
keyAlias := AccountMetaKey(s.accountID, modelName)
|
||||
s.convMu.Lock()
|
||||
s.convStore[keyUnderlying] = metadata
|
||||
s.convStore[keyAlias] = metadata
|
||||
storeSnapshot := make(map[string][]string, len(s.convStore))
|
||||
for k, v := range s.convStore {
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
cp := make([]string, len(v))
|
||||
copy(cp, v)
|
||||
storeSnapshot[k] = cp
|
||||
}
|
||||
s.convMu.Unlock()
|
||||
_ = SaveConvStore(s.convStorePath(), storeSnapshot)
|
||||
}
|
||||
|
||||
if !s.useReusableContext() {
|
||||
return
|
||||
}
|
||||
rec, ok := BuildConversationRecord(prep.underlying, s.stableClientID, prep.cleaned, output, metadata)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
stableHash := HashConversation(rec.ClientID, prep.underlying, rec.Messages)
|
||||
accountHash := HashConversation(s.accountID, prep.underlying, rec.Messages)
|
||||
|
||||
s.convMu.Lock()
|
||||
s.convData[stableHash] = rec
|
||||
s.convIndex["hash:"+stableHash] = stableHash
|
||||
if accountHash != stableHash {
|
||||
s.convIndex["hash:"+accountHash] = stableHash
|
||||
}
|
||||
dataSnapshot := make(map[string]ConversationRecord, len(s.convData))
|
||||
for k, v := range s.convData {
|
||||
dataSnapshot[k] = v
|
||||
}
|
||||
indexSnapshot := make(map[string]string, len(s.convIndex))
|
||||
for k, v := range s.convIndex {
|
||||
indexSnapshot[k] = v
|
||||
}
|
||||
s.convMu.Unlock()
|
||||
_ = SaveConvData(s.convDataPath(), dataSnapshot, indexSnapshot)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) addAPIResponseData(ctx context.Context, line []byte) {
|
||||
appendAPIResponseChunk(ctx, s.cfg, line)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) ConvertToTarget(ctx context.Context, modelName string, prep *geminiWebPrepared, gemBytes []byte) []byte {
|
||||
if prep == nil || prep.handlerType == "" {
|
||||
return gemBytes
|
||||
}
|
||||
if !translator.NeedConvert(prep.handlerType, constant.GeminiWeb) {
|
||||
return gemBytes
|
||||
}
|
||||
var param any
|
||||
out := translator.ResponseNonStream(prep.handlerType, constant.GeminiWeb, ctx, modelName, prep.originalRaw, prep.translatedRaw, gemBytes, ¶m)
|
||||
if prep.handlerType == constant.OpenAI && out != "" {
|
||||
newID := fmt.Sprintf("chatcmpl-%x", time.Now().UnixNano())
|
||||
if v := gjson.Parse(out).Get("id"); v.Exists() {
|
||||
out, _ = sjson.Set(out, "id", newID)
|
||||
}
|
||||
}
|
||||
return []byte(out)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) ConvertStream(ctx context.Context, modelName string, prep *geminiWebPrepared, gemBytes []byte) []string {
|
||||
if prep == nil || prep.handlerType == "" {
|
||||
return []string{string(gemBytes)}
|
||||
}
|
||||
if !translator.NeedConvert(prep.handlerType, constant.GeminiWeb) {
|
||||
return []string{string(gemBytes)}
|
||||
}
|
||||
var param any
|
||||
return translator.Response(prep.handlerType, constant.GeminiWeb, ctx, modelName, prep.originalRaw, prep.translatedRaw, gemBytes, ¶m)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) DoneStream(ctx context.Context, modelName string, prep *geminiWebPrepared) []string {
|
||||
if prep == nil || prep.handlerType == "" {
|
||||
return nil
|
||||
}
|
||||
if !translator.NeedConvert(prep.handlerType, constant.GeminiWeb) {
|
||||
return nil
|
||||
}
|
||||
var param any
|
||||
return translator.Response(prep.handlerType, constant.GeminiWeb, ctx, modelName, prep.originalRaw, prep.translatedRaw, []byte("[DONE]"), ¶m)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) useReusableContext() bool {
|
||||
if s.cfg == nil {
|
||||
return true
|
||||
}
|
||||
return s.cfg.GeminiWeb.Context
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) findReusableSession(modelName string, msgs []RoleText) ([]string, []RoleText) {
|
||||
s.convMu.RLock()
|
||||
items := s.convData
|
||||
index := s.convIndex
|
||||
s.convMu.RUnlock()
|
||||
return FindReusableSessionIn(items, index, s.stableClientID, s.accountID, modelName, msgs)
|
||||
}
|
||||
|
||||
func (s *GeminiWebState) getConfiguredGem() *Gem {
|
||||
if s.cfg != nil && s.cfg.GeminiWeb.CodeMode {
|
||||
return &Gem{ID: "coding-partner", Name: "Coding partner", Predefined: true}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// recordAPIRequest stores the upstream request payload in Gin context for request logging.
|
||||
func recordAPIRequest(ctx context.Context, cfg *config.Config, payload []byte) {
|
||||
if cfg == nil || !cfg.RequestLog || len(payload) == 0 {
|
||||
return
|
||||
}
|
||||
if ginCtx, ok := ctx.Value("gin").(*gin.Context); ok && ginCtx != nil {
|
||||
ginCtx.Set("API_REQUEST", bytes.Clone(payload))
|
||||
}
|
||||
}
|
||||
|
||||
// appendAPIResponseChunk appends an upstream response chunk to Gin context for request logging.
|
||||
func appendAPIResponseChunk(ctx context.Context, cfg *config.Config, chunk []byte) {
|
||||
if cfg == nil || !cfg.RequestLog {
|
||||
return
|
||||
}
|
||||
data := bytes.TrimSpace(bytes.Clone(chunk))
|
||||
if len(data) == 0 {
|
||||
return
|
||||
}
|
||||
if ginCtx, ok := ctx.Value("gin").(*gin.Context); ok && ginCtx != nil {
|
||||
if existing, exists := ginCtx.Get("API_RESPONSE"); exists {
|
||||
if prev, okBytes := existing.([]byte); okBytes {
|
||||
prev = append(prev, data...)
|
||||
prev = append(prev, []byte("\n\n")...)
|
||||
ginCtx.Set("API_RESPONSE", prev)
|
||||
return
|
||||
}
|
||||
}
|
||||
ginCtx.Set("API_RESPONSE", data)
|
||||
}
|
||||
}
|
||||
83
internal/provider/gemini-web/types.go
Normal file
83
internal/provider/gemini-web/types.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package geminiwebapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
)
|
||||
|
||||
type Candidate struct {
|
||||
RCID string
|
||||
Text string
|
||||
Thoughts *string
|
||||
WebImages []WebImage
|
||||
GeneratedImages []GeneratedImage
|
||||
}
|
||||
|
||||
func (c Candidate) String() string {
|
||||
t := c.Text
|
||||
if len(t) > 20 {
|
||||
t = t[:20] + "..."
|
||||
}
|
||||
return fmt.Sprintf("Candidate(rcid='%s', text='%s', images=%d)", c.RCID, t, len(c.WebImages)+len(c.GeneratedImages))
|
||||
}
|
||||
|
||||
func (c Candidate) Images() []Image {
|
||||
images := make([]Image, 0, len(c.WebImages)+len(c.GeneratedImages))
|
||||
for _, wi := range c.WebImages {
|
||||
images = append(images, wi.Image)
|
||||
}
|
||||
for _, gi := range c.GeneratedImages {
|
||||
images = append(images, gi.Image)
|
||||
}
|
||||
return images
|
||||
}
|
||||
|
||||
type ModelOutput struct {
|
||||
Metadata []string
|
||||
Candidates []Candidate
|
||||
Chosen int
|
||||
}
|
||||
|
||||
func (m ModelOutput) String() string { return m.Text() }
|
||||
|
||||
func (m ModelOutput) Text() string {
|
||||
if len(m.Candidates) == 0 {
|
||||
return ""
|
||||
}
|
||||
return m.Candidates[m.Chosen].Text
|
||||
}
|
||||
|
||||
func (m ModelOutput) Thoughts() *string {
|
||||
if len(m.Candidates) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.Candidates[m.Chosen].Thoughts
|
||||
}
|
||||
|
||||
func (m ModelOutput) Images() []Image {
|
||||
if len(m.Candidates) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.Candidates[m.Chosen].Images()
|
||||
}
|
||||
|
||||
func (m ModelOutput) RCID() string {
|
||||
if len(m.Candidates) == 0 {
|
||||
return ""
|
||||
}
|
||||
return m.Candidates[m.Chosen].RCID
|
||||
}
|
||||
|
||||
type Gem struct {
|
||||
ID string
|
||||
Name string
|
||||
Description *string
|
||||
Prompt *string
|
||||
Predefined bool
|
||||
}
|
||||
|
||||
func (g Gem) String() string {
|
||||
return fmt.Sprintf("Gem(id='%s', name='%s', description='%v', prompt='%v', predefined=%v)", g.ID, g.Name, g.Description, g.Prompt, g.Predefined)
|
||||
}
|
||||
|
||||
func decodeHTML(s string) string { return html.UnescapeString(s) }
|
||||
Reference in New Issue
Block a user