mirror of
https://github.com/LifeArchiveProject/WeChatDataAnalysis.git
synced 2026-02-19 14:20:51 +08:00
Merge remote-tracking branch 'upstream/main' into feat/wx-key
This commit is contained in:
@@ -13,6 +13,7 @@ from .logging_config import setup_logging, get_logger
|
||||
from .path_fix import PathFixRoute
|
||||
from .chat_realtime_autosync import CHAT_REALTIME_AUTOSYNC
|
||||
from .routers.chat import router as _chat_router
|
||||
from .routers.chat_contacts import router as _chat_contacts_router
|
||||
from .routers.chat_export import router as _chat_export_router
|
||||
from .routers.chat_media import router as _chat_media_router
|
||||
from .routers.decrypt import router as _decrypt_router
|
||||
@@ -52,6 +53,7 @@ app.include_router(_decrypt_router)
|
||||
app.include_router(_keys_router)
|
||||
app.include_router(_media_router)
|
||||
app.include_router(_chat_router)
|
||||
app.include_router(_chat_contacts_router)
|
||||
app.include_router(_chat_export_router)
|
||||
app.include_router(_chat_media_router)
|
||||
app.include_router(_sns_router)
|
||||
|
||||
454
src/wechat_decrypt_tool/avatar_cache.py
Normal file
454
src/wechat_decrypt_tool/avatar_cache.py
Normal file
@@ -0,0 +1,454 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import time
|
||||
from email.utils import formatdate
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
from .app_paths import get_output_dir
|
||||
from .logging_config import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
AVATAR_CACHE_TTL_SECONDS = 7 * 24 * 60 * 60
|
||||
|
||||
|
||||
def is_avatar_cache_enabled() -> bool:
|
||||
v = str(os.environ.get("WECHAT_TOOL_AVATAR_CACHE_ENABLED", "1") or "").strip().lower()
|
||||
return v not in {"", "0", "false", "off", "no"}
|
||||
|
||||
|
||||
def get_avatar_cache_root_dir() -> Path:
|
||||
return get_output_dir() / "avatar_cache"
|
||||
|
||||
|
||||
def _safe_segment(value: str) -> str:
|
||||
cleaned = re.sub(r"[^0-9A-Za-z._-]+", "_", str(value or "").strip())
|
||||
cleaned = cleaned.strip("._-")
|
||||
return cleaned or "default"
|
||||
|
||||
|
||||
def _account_layout(account: str) -> tuple[Path, Path, Path, Path]:
|
||||
account_dir = get_avatar_cache_root_dir() / _safe_segment(account)
|
||||
files_dir = account_dir / "files"
|
||||
tmp_dir = account_dir / "tmp"
|
||||
db_path = account_dir / "avatar_cache.db"
|
||||
return account_dir, files_dir, tmp_dir, db_path
|
||||
|
||||
|
||||
def _ensure_account_layout(account: str) -> tuple[Path, Path, Path, Path]:
|
||||
account_dir, files_dir, tmp_dir, db_path = _account_layout(account)
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
files_dir.mkdir(parents=True, exist_ok=True)
|
||||
tmp_dir.mkdir(parents=True, exist_ok=True)
|
||||
return account_dir, files_dir, tmp_dir, db_path
|
||||
|
||||
|
||||
def _connect(account: str) -> sqlite3.Connection:
|
||||
_, _, _, db_path = _ensure_account_layout(account)
|
||||
conn = sqlite3.connect(str(db_path), timeout=5)
|
||||
conn.row_factory = sqlite3.Row
|
||||
_ensure_schema(conn)
|
||||
return conn
|
||||
|
||||
|
||||
def _ensure_schema(conn: sqlite3.Connection) -> None:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS avatar_cache_entries (
|
||||
account TEXT NOT NULL,
|
||||
cache_key TEXT NOT NULL,
|
||||
source_kind TEXT NOT NULL,
|
||||
username TEXT NOT NULL DEFAULT '',
|
||||
source_url TEXT NOT NULL DEFAULT '',
|
||||
source_md5 TEXT NOT NULL DEFAULT '',
|
||||
source_update_time INTEGER NOT NULL DEFAULT 0,
|
||||
rel_path TEXT NOT NULL DEFAULT '',
|
||||
media_type TEXT NOT NULL DEFAULT 'application/octet-stream',
|
||||
size_bytes INTEGER NOT NULL DEFAULT 0,
|
||||
etag TEXT NOT NULL DEFAULT '',
|
||||
last_modified TEXT NOT NULL DEFAULT '',
|
||||
fetched_at INTEGER NOT NULL DEFAULT 0,
|
||||
checked_at INTEGER NOT NULL DEFAULT 0,
|
||||
expires_at INTEGER NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (account, cache_key)
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_avatar_cache_entries_account_username ON avatar_cache_entries(account, username)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_avatar_cache_entries_account_source ON avatar_cache_entries(account, source_kind, source_url)"
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def _row_to_dict(row: Optional[sqlite3.Row]) -> Optional[dict[str, Any]]:
|
||||
if row is None:
|
||||
return None
|
||||
out: dict[str, Any] = {}
|
||||
for k in row.keys():
|
||||
out[str(k)] = row[k]
|
||||
return out
|
||||
|
||||
|
||||
def normalize_avatar_source_url(url: str) -> str:
|
||||
raw = str(url or "").strip()
|
||||
if not raw:
|
||||
return ""
|
||||
try:
|
||||
p = urlsplit(raw)
|
||||
except Exception:
|
||||
return raw
|
||||
scheme = str(p.scheme or "").lower()
|
||||
host = str(p.hostname or "").lower()
|
||||
if not scheme or not host:
|
||||
return raw
|
||||
netloc = host
|
||||
if p.port:
|
||||
netloc = f"{host}:{int(p.port)}"
|
||||
path = p.path or "/"
|
||||
return urlunsplit((scheme, netloc, path, p.query or "", ""))
|
||||
|
||||
|
||||
def cache_key_for_avatar_user(username: str) -> str:
|
||||
u = str(username or "").strip()
|
||||
return hashlib.sha1(f"user:{u}".encode("utf-8", errors="ignore")).hexdigest()
|
||||
|
||||
|
||||
def cache_key_for_avatar_url(url: str) -> str:
|
||||
u = normalize_avatar_source_url(url)
|
||||
return hashlib.sha1(f"url:{u}".encode("utf-8", errors="ignore")).hexdigest()
|
||||
|
||||
|
||||
def get_avatar_cache_entry(account: str, cache_key: str) -> Optional[dict[str, Any]]:
|
||||
if (not is_avatar_cache_enabled()) or (not cache_key):
|
||||
return None
|
||||
try:
|
||||
conn = _connect(account)
|
||||
except Exception:
|
||||
return None
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT * FROM avatar_cache_entries WHERE account = ? AND cache_key = ? LIMIT 1",
|
||||
(str(account or ""), str(cache_key or "")),
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
except Exception:
|
||||
return None
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def get_avatar_cache_user_entry(account: str, username: str) -> Optional[dict[str, Any]]:
|
||||
if not username:
|
||||
return None
|
||||
return get_avatar_cache_entry(account, cache_key_for_avatar_user(username))
|
||||
|
||||
|
||||
def get_avatar_cache_url_entry(account: str, source_url: str) -> Optional[dict[str, Any]]:
|
||||
if not source_url:
|
||||
return None
|
||||
return get_avatar_cache_entry(account, cache_key_for_avatar_url(source_url))
|
||||
|
||||
|
||||
def resolve_avatar_cache_entry_path(account: str, entry: Optional[dict[str, Any]]) -> Optional[Path]:
|
||||
if not entry:
|
||||
return None
|
||||
rel = str(entry.get("rel_path") or "").strip().replace("\\", "/")
|
||||
if not rel:
|
||||
return None
|
||||
account_dir, _, _, _ = _account_layout(account)
|
||||
p = account_dir / rel
|
||||
try:
|
||||
account_dir_resolved = account_dir.resolve()
|
||||
p_resolved = p.resolve()
|
||||
if p_resolved != account_dir_resolved and account_dir_resolved not in p_resolved.parents:
|
||||
return None
|
||||
return p_resolved
|
||||
except Exception:
|
||||
return p
|
||||
|
||||
|
||||
def avatar_cache_entry_file_exists(account: str, entry: Optional[dict[str, Any]]) -> Optional[Path]:
|
||||
p = resolve_avatar_cache_entry_path(account, entry)
|
||||
if not p:
|
||||
return None
|
||||
try:
|
||||
if p.exists() and p.is_file():
|
||||
return p
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def avatar_cache_entry_is_fresh(entry: Optional[dict[str, Any]], now_ts: Optional[int] = None) -> bool:
|
||||
if not entry:
|
||||
return False
|
||||
try:
|
||||
expires = int(entry.get("expires_at") or 0)
|
||||
except Exception:
|
||||
expires = 0
|
||||
if expires <= 0:
|
||||
return False
|
||||
now0 = int(now_ts or time.time())
|
||||
return expires > now0
|
||||
|
||||
|
||||
def _guess_ext(media_type: str) -> str:
|
||||
mt = str(media_type or "").strip().lower()
|
||||
if mt == "image/jpeg":
|
||||
return "jpg"
|
||||
if mt == "image/png":
|
||||
return "png"
|
||||
if mt == "image/gif":
|
||||
return "gif"
|
||||
if mt == "image/webp":
|
||||
return "webp"
|
||||
if mt == "image/bmp":
|
||||
return "bmp"
|
||||
if mt == "image/svg+xml":
|
||||
return "svg"
|
||||
if mt == "image/avif":
|
||||
return "avif"
|
||||
if mt.startswith("image/"):
|
||||
return mt.split("/", 1)[1].split("+", 1)[0].split(";", 1)[0] or "img"
|
||||
return "dat"
|
||||
|
||||
|
||||
def _http_date_from_ts(ts: Optional[int]) -> str:
|
||||
try:
|
||||
t = int(ts or 0)
|
||||
except Exception:
|
||||
t = 0
|
||||
if t <= 0:
|
||||
return ""
|
||||
try:
|
||||
return formatdate(timeval=float(t), usegmt=True)
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def upsert_avatar_cache_entry(
|
||||
account: str,
|
||||
*,
|
||||
cache_key: str,
|
||||
source_kind: str,
|
||||
username: str = "",
|
||||
source_url: str = "",
|
||||
source_md5: str = "",
|
||||
source_update_time: int = 0,
|
||||
rel_path: str = "",
|
||||
media_type: str = "application/octet-stream",
|
||||
size_bytes: int = 0,
|
||||
etag: str = "",
|
||||
last_modified: str = "",
|
||||
fetched_at: Optional[int] = None,
|
||||
checked_at: Optional[int] = None,
|
||||
expires_at: Optional[int] = None,
|
||||
) -> Optional[dict[str, Any]]:
|
||||
if (not is_avatar_cache_enabled()) or (not cache_key):
|
||||
return None
|
||||
|
||||
acct = str(account or "").strip()
|
||||
ck = str(cache_key or "").strip()
|
||||
sk = str(source_kind or "").strip().lower()
|
||||
if not acct or not ck or not sk:
|
||||
return None
|
||||
|
||||
source_url_norm = normalize_avatar_source_url(source_url) if source_url else ""
|
||||
|
||||
now_ts = int(time.time())
|
||||
fetched = int(fetched_at if fetched_at is not None else now_ts)
|
||||
checked = int(checked_at if checked_at is not None else now_ts)
|
||||
expire_ts = int(expires_at if expires_at is not None else (checked + AVATAR_CACHE_TTL_SECONDS))
|
||||
|
||||
try:
|
||||
conn = _connect(acct)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] open db failed account={acct} err={e}")
|
||||
return None
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO avatar_cache_entries (
|
||||
account, cache_key, source_kind, username, source_url,
|
||||
source_md5, source_update_time, rel_path, media_type, size_bytes,
|
||||
etag, last_modified, fetched_at, checked_at, expires_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(account, cache_key) DO UPDATE SET
|
||||
source_kind=excluded.source_kind,
|
||||
username=excluded.username,
|
||||
source_url=excluded.source_url,
|
||||
source_md5=excluded.source_md5,
|
||||
source_update_time=excluded.source_update_time,
|
||||
rel_path=excluded.rel_path,
|
||||
media_type=excluded.media_type,
|
||||
size_bytes=excluded.size_bytes,
|
||||
etag=excluded.etag,
|
||||
last_modified=excluded.last_modified,
|
||||
fetched_at=excluded.fetched_at,
|
||||
checked_at=excluded.checked_at,
|
||||
expires_at=excluded.expires_at
|
||||
""",
|
||||
(
|
||||
acct,
|
||||
ck,
|
||||
sk,
|
||||
str(username or "").strip(),
|
||||
source_url_norm,
|
||||
str(source_md5 or "").strip().lower(),
|
||||
int(source_update_time or 0),
|
||||
str(rel_path or "").strip().replace("\\", "/"),
|
||||
str(media_type or "application/octet-stream").strip() or "application/octet-stream",
|
||||
int(size_bytes or 0),
|
||||
str(etag or "").strip(),
|
||||
str(last_modified or "").strip(),
|
||||
fetched,
|
||||
checked,
|
||||
expire_ts,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
row = conn.execute(
|
||||
"SELECT * FROM avatar_cache_entries WHERE account = ? AND cache_key = ? LIMIT 1",
|
||||
(acct, ck),
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] upsert failed account={acct} cache_key={ck} err={e}")
|
||||
return None
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def touch_avatar_cache_entry(account: str, cache_key: str, *, ttl_seconds: int = AVATAR_CACHE_TTL_SECONDS) -> bool:
|
||||
if (not is_avatar_cache_enabled()) or (not cache_key):
|
||||
return False
|
||||
now_ts = int(time.time())
|
||||
try:
|
||||
conn = _connect(account)
|
||||
except Exception:
|
||||
return False
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE avatar_cache_entries SET checked_at = ?, expires_at = ? WHERE account = ? AND cache_key = ?",
|
||||
(now_ts, now_ts + max(60, int(ttl_seconds or AVATAR_CACHE_TTL_SECONDS)), str(account or ""), str(cache_key or "")),
|
||||
)
|
||||
conn.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def write_avatar_cache_payload(
|
||||
account: str,
|
||||
*,
|
||||
source_kind: str,
|
||||
username: str = "",
|
||||
source_url: str = "",
|
||||
payload: bytes,
|
||||
media_type: str,
|
||||
source_md5: str = "",
|
||||
source_update_time: int = 0,
|
||||
etag: str = "",
|
||||
last_modified: str = "",
|
||||
ttl_seconds: int = AVATAR_CACHE_TTL_SECONDS,
|
||||
) -> tuple[Optional[dict[str, Any]], Optional[Path]]:
|
||||
if (not is_avatar_cache_enabled()) or (not payload):
|
||||
return None, None
|
||||
|
||||
acct = str(account or "").strip()
|
||||
sk = str(source_kind or "").strip().lower()
|
||||
if not acct or sk not in {"user", "url"}:
|
||||
return None, None
|
||||
|
||||
source_url_norm = normalize_avatar_source_url(source_url) if source_url else ""
|
||||
if sk == "user":
|
||||
cache_key = cache_key_for_avatar_user(username)
|
||||
else:
|
||||
cache_key = cache_key_for_avatar_url(source_url_norm)
|
||||
|
||||
digest = hashlib.sha1(bytes(payload)).hexdigest()
|
||||
ext = _guess_ext(media_type)
|
||||
rel_path = f"files/{digest[:2]}/{digest}.{ext}"
|
||||
|
||||
try:
|
||||
account_dir, _, tmp_dir, _ = _ensure_account_layout(acct)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] ensure dirs failed account={acct} err={e}")
|
||||
return None, None
|
||||
|
||||
abs_path = account_dir / rel_path
|
||||
try:
|
||||
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
if (not abs_path.exists()) or (int(abs_path.stat().st_size) != len(payload)):
|
||||
tmp_path = tmp_dir / f"{digest}.{time.time_ns()}.tmp"
|
||||
tmp_path.write_bytes(payload)
|
||||
os.replace(str(tmp_path), str(abs_path))
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] write file failed account={acct} path={abs_path} err={e}")
|
||||
return None, None
|
||||
|
||||
if (not etag) and digest:
|
||||
etag = f'"{digest}"'
|
||||
if (not last_modified) and source_update_time:
|
||||
last_modified = _http_date_from_ts(source_update_time)
|
||||
if not last_modified:
|
||||
last_modified = _http_date_from_ts(int(time.time()))
|
||||
|
||||
entry = upsert_avatar_cache_entry(
|
||||
acct,
|
||||
cache_key=cache_key,
|
||||
source_kind=sk,
|
||||
username=username,
|
||||
source_url=source_url_norm,
|
||||
source_md5=source_md5,
|
||||
source_update_time=int(source_update_time or 0),
|
||||
rel_path=rel_path,
|
||||
media_type=media_type,
|
||||
size_bytes=len(payload),
|
||||
etag=etag,
|
||||
last_modified=last_modified,
|
||||
fetched_at=int(time.time()),
|
||||
checked_at=int(time.time()),
|
||||
expires_at=int(time.time()) + max(60, int(ttl_seconds or AVATAR_CACHE_TTL_SECONDS)),
|
||||
)
|
||||
if not entry:
|
||||
return None, None
|
||||
return entry, abs_path
|
||||
|
||||
|
||||
def build_avatar_cache_response_headers(
|
||||
entry: Optional[dict[str, Any]], *, max_age: int = AVATAR_CACHE_TTL_SECONDS
|
||||
) -> dict[str, str]:
|
||||
headers: dict[str, str] = {
|
||||
"Cache-Control": f"public, max-age={int(max_age)}",
|
||||
}
|
||||
if not entry:
|
||||
return headers
|
||||
etag = str(entry.get("etag") or "").strip()
|
||||
last_modified = str(entry.get("last_modified") or "").strip()
|
||||
if etag:
|
||||
headers["ETag"] = etag
|
||||
if last_modified:
|
||||
headers["Last-Modified"] = last_modified
|
||||
return headers
|
||||
|
||||
@@ -74,6 +74,25 @@ def _safe_name(s: str, max_len: int = 80) -> str:
|
||||
return t
|
||||
|
||||
|
||||
def _resolve_export_output_dir(account_dir: Path, output_dir_raw: Any) -> Path:
|
||||
text = str(output_dir_raw or "").strip()
|
||||
if not text:
|
||||
default_dir = account_dir.parents[1] / "exports" / account_dir.name
|
||||
default_dir.mkdir(parents=True, exist_ok=True)
|
||||
return default_dir
|
||||
|
||||
out_dir = Path(text).expanduser()
|
||||
if not out_dir.is_absolute():
|
||||
raise ValueError("output_dir must be an absolute path.")
|
||||
|
||||
try:
|
||||
out_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to prepare output_dir: {e}") from e
|
||||
|
||||
return out_dir.resolve()
|
||||
|
||||
|
||||
def _format_ts(ts: int) -> str:
|
||||
if not ts:
|
||||
return ""
|
||||
@@ -99,43 +118,54 @@ def _normalize_render_type_key(value: Any) -> str:
|
||||
return lower
|
||||
|
||||
|
||||
def _render_types_to_local_types(render_types: set[str]) -> Optional[set[int]]:
|
||||
rt = {str(x or "").strip() for x in (render_types or set())}
|
||||
rt = {x for x in rt if x}
|
||||
if not rt:
|
||||
def _is_render_type_selected(render_type: Any, selected_render_types: Optional[set[str]]) -> bool:
|
||||
if selected_render_types is None:
|
||||
return True
|
||||
rt = _normalize_render_type_key(render_type) or "text"
|
||||
return rt in selected_render_types
|
||||
|
||||
|
||||
def _media_kinds_from_selected_types(selected_render_types: Optional[set[str]]) -> Optional[set[MediaKind]]:
|
||||
if selected_render_types is None:
|
||||
return None
|
||||
|
||||
out: set[int] = set()
|
||||
for k in rt:
|
||||
if k == "text":
|
||||
out.add(1)
|
||||
elif k == "image":
|
||||
out.add(3)
|
||||
elif k == "voice":
|
||||
out.add(34)
|
||||
elif k == "video":
|
||||
out.update({43, 62})
|
||||
elif k == "emoji":
|
||||
out.add(47)
|
||||
elif k == "voip":
|
||||
out.add(50)
|
||||
elif k == "system":
|
||||
out.update({10000, 266287972401})
|
||||
elif k == "quote":
|
||||
out.add(244813135921)
|
||||
out.add(49) # Some quote messages are embedded as appmsg (local_type=49).
|
||||
elif k in {"link", "file", "transfer", "redpacket"}:
|
||||
out.add(49)
|
||||
else:
|
||||
# Unknown type: cannot safely prefilter by local_type.
|
||||
return None
|
||||
out: set[MediaKind] = set()
|
||||
if "image" in selected_render_types:
|
||||
out.add("image")
|
||||
if "emoji" in selected_render_types:
|
||||
out.add("emoji")
|
||||
if "video" in selected_render_types:
|
||||
out.add("video")
|
||||
out.add("video_thumb")
|
||||
if "voice" in selected_render_types:
|
||||
out.add("voice")
|
||||
if "file" in selected_render_types:
|
||||
out.add("file")
|
||||
return out
|
||||
|
||||
|
||||
def _should_estimate_by_local_type(render_types: set[str]) -> bool:
|
||||
# Only estimate counts when every requested type maps 1:1 to local_type.
|
||||
# App messages (local_type=49) are heterogeneous and cannot be counted accurately without parsing.
|
||||
return not bool(render_types & {"link", "file", "transfer", "redpacket", "quote"})
|
||||
def _resolve_effective_media_kinds(
|
||||
*,
|
||||
include_media: bool,
|
||||
media_kinds: list[MediaKind],
|
||||
selected_render_types: Optional[set[str]],
|
||||
privacy_mode: bool,
|
||||
) -> tuple[bool, list[MediaKind]]:
|
||||
if privacy_mode or (not include_media):
|
||||
return False, []
|
||||
|
||||
kinds = [k for k in media_kinds if k in {"image", "emoji", "video", "video_thumb", "voice", "file"}]
|
||||
if not kinds:
|
||||
return False, []
|
||||
|
||||
selected_media_kinds = _media_kinds_from_selected_types(selected_render_types)
|
||||
if selected_media_kinds is not None:
|
||||
kinds = [k for k in kinds if k in selected_media_kinds]
|
||||
|
||||
kinds = list(dict.fromkeys(kinds))
|
||||
if not kinds:
|
||||
return False, []
|
||||
return True, kinds
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -235,6 +265,7 @@ class ChatExportManager:
|
||||
include_media: bool,
|
||||
media_kinds: list[MediaKind],
|
||||
message_types: list[str],
|
||||
output_dir: Optional[str],
|
||||
allow_process_key_extract: bool,
|
||||
privacy_mode: bool,
|
||||
file_name: Optional[str],
|
||||
@@ -257,6 +288,7 @@ class ChatExportManager:
|
||||
"includeMedia": bool(include_media),
|
||||
"mediaKinds": media_kinds,
|
||||
"messageTypes": list(dict.fromkeys([str(t or "").strip() for t in (message_types or []) if str(t or "").strip()])),
|
||||
"outputDir": str(output_dir or "").strip(),
|
||||
"allowProcessKeyExtract": bool(allow_process_key_extract),
|
||||
"privacyMode": bool(privacy_mode),
|
||||
"fileName": str(file_name or "").strip(),
|
||||
@@ -313,10 +345,6 @@ class ChatExportManager:
|
||||
if ks in {"image", "emoji", "video", "video_thumb", "voice", "file"}:
|
||||
media_kinds.append(ks) # type: ignore[arg-type]
|
||||
|
||||
if privacy_mode:
|
||||
include_media = False
|
||||
media_kinds = []
|
||||
|
||||
st = int(opts.get("startTime") or 0) or None
|
||||
et = int(opts.get("endTime") or 0) or None
|
||||
|
||||
@@ -328,9 +356,15 @@ class ChatExportManager:
|
||||
if want:
|
||||
want_types = want
|
||||
|
||||
local_types = _render_types_to_local_types(want_types) if want_types else None
|
||||
can_estimate = (want_types is None) or _should_estimate_by_local_type(want_types)
|
||||
estimate_local_types = local_types if (want_types and can_estimate) else None
|
||||
include_media, media_kinds = _resolve_effective_media_kinds(
|
||||
include_media=include_media,
|
||||
media_kinds=media_kinds,
|
||||
selected_render_types=want_types,
|
||||
privacy_mode=privacy_mode,
|
||||
)
|
||||
|
||||
local_types = None
|
||||
estimate_local_types = None
|
||||
|
||||
target_usernames = _resolve_export_targets(
|
||||
account_dir=account_dir,
|
||||
@@ -342,8 +376,7 @@ class ChatExportManager:
|
||||
if not target_usernames:
|
||||
raise ValueError("No target conversations to export.")
|
||||
|
||||
exports_root = account_dir.parents[1] / "exports" / account_dir.name
|
||||
exports_root.mkdir(parents=True, exist_ok=True)
|
||||
exports_root = _resolve_export_output_dir(account_dir, opts.get("outputDir"))
|
||||
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
base_name = str(opts.get("fileName") or "").strip()
|
||||
@@ -456,16 +489,13 @@ class ChatExportManager:
|
||||
job.progress.current_conversation_messages_total = 0
|
||||
|
||||
try:
|
||||
if not can_estimate:
|
||||
estimated_total = 0
|
||||
else:
|
||||
estimated_total = _estimate_conversation_message_count(
|
||||
account_dir=account_dir,
|
||||
conv_username=conv_username,
|
||||
start_time=st,
|
||||
end_time=et,
|
||||
local_types=estimate_local_types,
|
||||
)
|
||||
estimated_total = _estimate_conversation_message_count(
|
||||
account_dir=account_dir,
|
||||
conv_username=conv_username,
|
||||
start_time=st,
|
||||
end_time=et,
|
||||
local_types=estimate_local_types,
|
||||
)
|
||||
except Exception:
|
||||
estimated_total = 0
|
||||
|
||||
@@ -557,6 +587,8 @@ class ChatExportManager:
|
||||
zf.writestr(f"{conv_dir}/meta.json", json.dumps(meta, ensure_ascii=False, indent=2))
|
||||
|
||||
with self._lock:
|
||||
job.progress.current_conversation_messages_exported = int(exported_count)
|
||||
job.progress.current_conversation_messages_total = int(exported_count)
|
||||
job.progress.conversations_done += 1
|
||||
|
||||
manifest = {
|
||||
@@ -1325,12 +1357,8 @@ def _write_conversation_json(
|
||||
resource_chat_id=resource_chat_id,
|
||||
sender_alias=sender_alias,
|
||||
)
|
||||
if want_types:
|
||||
rt_key = _normalize_render_type_key(msg.get("renderType"))
|
||||
if rt_key not in want_types:
|
||||
if scanned % 500 == 0 and job.cancel_requested:
|
||||
raise _JobCancelled()
|
||||
continue
|
||||
if not _is_render_type_selected(msg.get("renderType"), want_types):
|
||||
continue
|
||||
|
||||
su = str(msg.get("senderUsername") or "").strip()
|
||||
if privacy_mode:
|
||||
@@ -1506,12 +1534,8 @@ def _write_conversation_txt(
|
||||
resource_chat_id=resource_chat_id,
|
||||
sender_alias=sender_alias,
|
||||
)
|
||||
if want_types:
|
||||
rt_key = _normalize_render_type_key(msg.get("renderType"))
|
||||
if rt_key not in want_types:
|
||||
if scanned % 500 == 0 and job.cancel_requested:
|
||||
raise _JobCancelled()
|
||||
continue
|
||||
if not _is_render_type_selected(msg.get("renderType"), want_types):
|
||||
continue
|
||||
|
||||
su = str(msg.get("senderUsername") or "").strip()
|
||||
if privacy_mode:
|
||||
|
||||
@@ -45,7 +45,6 @@ from ..chat_helpers import (
|
||||
_normalize_xml_url,
|
||||
_parse_app_message,
|
||||
_parse_pat_message,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_query_head_image_usernames,
|
||||
_quote_ident,
|
||||
@@ -85,6 +84,19 @@ _REALTIME_SYNC_LOCKS: dict[tuple[str, str], threading.Lock] = {}
|
||||
_REALTIME_SYNC_ALL_LOCKS: dict[str, threading.Lock] = {}
|
||||
|
||||
|
||||
def _avatar_url_unified(
|
||||
*,
|
||||
account_dir: Path,
|
||||
username: str,
|
||||
local_avatar_usernames: set[str] | None = None,
|
||||
) -> str:
|
||||
u = str(username or "").strip()
|
||||
if not u:
|
||||
return ""
|
||||
# Unified avatar entrypoint: backend decides local db vs remote fallback + cache.
|
||||
return _build_avatar_url(str(account_dir.name or ""), u)
|
||||
|
||||
|
||||
def _realtime_sync_lock(account: str, username: str) -> threading.Lock:
|
||||
key = (str(account or "").strip(), str(username or "").strip())
|
||||
with _REALTIME_SYNC_MU:
|
||||
@@ -1946,9 +1958,11 @@ async def chat_search_index_senders(
|
||||
continue
|
||||
cnt = int(r["c"] or 0)
|
||||
row = contact_rows.get(su)
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if (not avatar_url) and (su in local_sender_avatars):
|
||||
avatar_url = _build_avatar_url(account_dir.name, su)
|
||||
avatar_url = _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_sender_avatars,
|
||||
)
|
||||
senders.append(
|
||||
{
|
||||
"username": su,
|
||||
@@ -2568,7 +2582,7 @@ def _postprocess_full_messages(
|
||||
row = sender_contact_rows.get(u)
|
||||
if _pick_display_name(row, u) == u:
|
||||
need_display.append(u)
|
||||
if (not _pick_avatar_url(row)) and (u not in local_sender_avatars):
|
||||
if u not in local_sender_avatars:
|
||||
need_avatar.append(u)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -2606,13 +2620,11 @@ def _postprocess_full_messages(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
m["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if not avatar_url and su in local_sender_avatars:
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_sender_avatars,
|
||||
)
|
||||
m["senderAvatar"] = avatar_url
|
||||
|
||||
qu = str(m.get("quoteUsername") or "").strip()
|
||||
@@ -2922,7 +2934,7 @@ def list_chat_sessions(
|
||||
if u not in local_avatar_usernames:
|
||||
need_avatar.append(u)
|
||||
else:
|
||||
if (not _pick_avatar_url(row)) and (u not in local_avatar_usernames):
|
||||
if u not in local_avatar_usernames:
|
||||
need_avatar.append(u)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -2984,15 +2996,11 @@ def list_chat_sessions(
|
||||
|
||||
# Prefer local head_image avatars when available: decrypted contact.db URLs can be stale
|
||||
# (or hotlink-protected for browsers). WCDB realtime (when available) is the next best.
|
||||
avatar_url = ""
|
||||
if username in local_avatar_usernames:
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, username)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(username) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
if not avatar_url:
|
||||
avatar_url = _pick_avatar_url(c_row) or ""
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=username,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
|
||||
last_message = ""
|
||||
if preview_mode == "session":
|
||||
@@ -4388,7 +4396,7 @@ def list_chat_messages(
|
||||
row = sender_contact_rows.get(u)
|
||||
if _pick_display_name(row, u) == u:
|
||||
need_display.append(u)
|
||||
if (not _pick_avatar_url(row)) and (u not in local_sender_avatars):
|
||||
if u not in local_sender_avatars:
|
||||
need_avatar.append(u)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -4426,13 +4434,11 @@ def list_chat_messages(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
m["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if not avatar_url and su in local_sender_avatars:
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_sender_avatars,
|
||||
)
|
||||
m["senderAvatar"] = avatar_url
|
||||
|
||||
qu = str(m.get("quoteUsername") or "").strip()
|
||||
@@ -4897,7 +4903,7 @@ async def _search_chat_messages_via_fts(
|
||||
row = contact_rows.get(uu)
|
||||
if _pick_display_name(row, uu) == uu:
|
||||
need_display.append(uu)
|
||||
if (not _pick_avatar_url(row)) and (uu not in local_avatar_usernames):
|
||||
if uu not in local_avatar_usernames:
|
||||
need_avatar.append(uu)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -4919,13 +4925,11 @@ async def _search_chat_messages_via_fts(
|
||||
wd = str(wcdb_display_names.get(username) or "").strip()
|
||||
if wd and wd != username:
|
||||
conv_name = wd
|
||||
conv_avatar = _pick_avatar_url(conv_row)
|
||||
if (not conv_avatar) and (username in local_avatar_usernames):
|
||||
conv_avatar = base_url + _build_avatar_url(account_dir.name, username)
|
||||
if not conv_avatar:
|
||||
wa = str(wcdb_avatar_urls.get(username) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
conv_avatar = wa
|
||||
conv_avatar = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=username,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
|
||||
for h in hits:
|
||||
su = str(h.get("senderUsername") or "").strip()
|
||||
@@ -4939,13 +4943,11 @@ async def _search_chat_messages_via_fts(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
h["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if (not avatar_url) and (su in local_avatar_usernames):
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
h["senderAvatar"] = avatar_url
|
||||
else:
|
||||
uniq_contacts = list(
|
||||
@@ -4968,7 +4970,7 @@ async def _search_chat_messages_via_fts(
|
||||
row = contact_rows.get(uu)
|
||||
if _pick_display_name(row, uu) == uu:
|
||||
need_display.append(uu)
|
||||
if (not _pick_avatar_url(row)) and (uu not in local_avatar_usernames):
|
||||
if uu not in local_avatar_usernames:
|
||||
need_avatar.append(uu)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -4994,13 +4996,11 @@ async def _search_chat_messages_via_fts(
|
||||
if wd and wd != cu:
|
||||
conv_name = wd
|
||||
h["conversationName"] = conv_name or cu
|
||||
conv_avatar = _pick_avatar_url(crow)
|
||||
if (not conv_avatar) and cu and (cu in local_avatar_usernames):
|
||||
conv_avatar = base_url + _build_avatar_url(account_dir.name, cu)
|
||||
if not conv_avatar and cu:
|
||||
wa = str(wcdb_avatar_urls.get(cu) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
conv_avatar = wa
|
||||
conv_avatar = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=cu,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
h["conversationAvatar"] = conv_avatar
|
||||
if su:
|
||||
row = contact_rows.get(su)
|
||||
@@ -5010,13 +5010,11 @@ async def _search_chat_messages_via_fts(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
h["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if (not avatar_url) and (su in local_avatar_usernames):
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
h["senderAvatar"] = avatar_url
|
||||
|
||||
return {
|
||||
|
||||
749
src/wechat_decrypt_tool/routers/chat_contacts.py
Normal file
749
src/wechat_decrypt_tool/routers/chat_contacts.py
Normal file
@@ -0,0 +1,749 @@
|
||||
import csv
|
||||
import json
|
||||
import re
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..chat_helpers import (
|
||||
_build_avatar_url,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_resolve_account_dir,
|
||||
_should_keep_session,
|
||||
)
|
||||
from ..path_fix import PathFixRoute
|
||||
|
||||
router = APIRouter(route_class=PathFixRoute)
|
||||
|
||||
|
||||
_SYSTEM_USERNAMES = {
|
||||
"filehelper",
|
||||
"fmessage",
|
||||
"floatbottle",
|
||||
"medianote",
|
||||
"newsapp",
|
||||
"qmessage",
|
||||
"qqmail",
|
||||
"tmessage",
|
||||
"brandsessionholder",
|
||||
"brandservicesessionholder",
|
||||
"notifymessage",
|
||||
"opencustomerservicemsg",
|
||||
"notification_messages",
|
||||
"userexperience_alarm",
|
||||
}
|
||||
|
||||
_SOURCE_SCENE_LABELS = {
|
||||
1: "通过QQ号添加",
|
||||
3: "通过微信号添加",
|
||||
6: "通过手机号添加",
|
||||
10: "通过名片添加",
|
||||
14: "通过群聊添加",
|
||||
30: "通过扫一扫添加",
|
||||
}
|
||||
|
||||
_COUNTRY_LABELS = {
|
||||
"CN": "中国大陆",
|
||||
}
|
||||
|
||||
|
||||
class ContactTypeFilter(BaseModel):
|
||||
friends: bool = True
|
||||
groups: bool = True
|
||||
officials: bool = True
|
||||
|
||||
|
||||
class ContactExportRequest(BaseModel):
|
||||
account: Optional[str] = Field(None, description="账号目录名(可选,默认使用第一个)")
|
||||
output_dir: str = Field(..., description="导出目录绝对路径")
|
||||
format: str = Field("json", description="导出格式,仅支持 json/csv")
|
||||
include_avatar_link: bool = Field(True, description="是否导出 avatarLink 字段")
|
||||
contact_types: ContactTypeFilter = Field(default_factory=ContactTypeFilter)
|
||||
keyword: Optional[str] = Field(None, description="关键词筛选(可选)")
|
||||
|
||||
|
||||
def _normalize_text(v: Any) -> str:
|
||||
if v is None:
|
||||
return ""
|
||||
return str(v).strip()
|
||||
|
||||
|
||||
def _to_int(v: Any) -> int:
|
||||
try:
|
||||
return int(v or 0)
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
def _to_optional_int(v: Any) -> Optional[int]:
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, bool):
|
||||
return int(v)
|
||||
if isinstance(v, int):
|
||||
return v
|
||||
s = _normalize_text(v)
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return int(s)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _decode_varint(raw: bytes, offset: int) -> tuple[Optional[int], int]:
|
||||
value = 0
|
||||
shift = 0
|
||||
pos = int(offset)
|
||||
n = len(raw)
|
||||
while pos < n:
|
||||
byte = raw[pos]
|
||||
pos += 1
|
||||
value |= (byte & 0x7F) << shift
|
||||
if (byte & 0x80) == 0:
|
||||
return value, pos
|
||||
shift += 7
|
||||
if shift > 63:
|
||||
return None, n
|
||||
return None, n
|
||||
|
||||
|
||||
def _decode_proto_text(raw: bytes) -> str:
|
||||
if not raw:
|
||||
return ""
|
||||
try:
|
||||
text = raw.decode("utf-8", errors="ignore")
|
||||
except Exception:
|
||||
return ""
|
||||
return re.sub(r"[\x00-\x08\x0b\x0c\x0e-\x1f]", "", text).strip()
|
||||
|
||||
|
||||
def _parse_contact_extra_buffer(extra_buffer: Any) -> dict[str, Any]:
|
||||
out = {
|
||||
"signature": "",
|
||||
"country": "",
|
||||
"province": "",
|
||||
"city": "",
|
||||
"source_scene": None,
|
||||
}
|
||||
if extra_buffer is None:
|
||||
return out
|
||||
|
||||
raw: bytes
|
||||
if isinstance(extra_buffer, memoryview):
|
||||
raw = extra_buffer.tobytes()
|
||||
elif isinstance(extra_buffer, (bytes, bytearray)):
|
||||
raw = bytes(extra_buffer)
|
||||
else:
|
||||
return out
|
||||
|
||||
if not raw:
|
||||
return out
|
||||
|
||||
idx = 0
|
||||
n = len(raw)
|
||||
while idx < n:
|
||||
tag, idx_next = _decode_varint(raw, idx)
|
||||
if tag is None:
|
||||
break
|
||||
idx = idx_next
|
||||
field_no = tag >> 3
|
||||
wire_type = tag & 0x7
|
||||
|
||||
if wire_type == 0:
|
||||
val, idx_next = _decode_varint(raw, idx)
|
||||
if val is None:
|
||||
break
|
||||
idx = idx_next
|
||||
if field_no == 8:
|
||||
out["source_scene"] = int(val)
|
||||
continue
|
||||
|
||||
if wire_type == 2:
|
||||
size, idx_next = _decode_varint(raw, idx)
|
||||
if size is None:
|
||||
break
|
||||
idx = idx_next
|
||||
end = idx + int(size)
|
||||
if end > n:
|
||||
break
|
||||
chunk = raw[idx:end]
|
||||
idx = end
|
||||
|
||||
if field_no in {4, 5, 6, 7}:
|
||||
text = _decode_proto_text(chunk)
|
||||
if field_no == 4:
|
||||
out["signature"] = text
|
||||
elif field_no == 5:
|
||||
out["country"] = text
|
||||
elif field_no == 6:
|
||||
out["province"] = text
|
||||
elif field_no == 7:
|
||||
out["city"] = text
|
||||
continue
|
||||
|
||||
if wire_type == 1:
|
||||
idx += 8
|
||||
continue
|
||||
if wire_type == 5:
|
||||
idx += 4
|
||||
continue
|
||||
|
||||
break
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _country_label(country: str) -> str:
|
||||
c = _normalize_text(country)
|
||||
if not c:
|
||||
return ""
|
||||
return _COUNTRY_LABELS.get(c.upper(), c)
|
||||
|
||||
|
||||
def _source_scene_label(source_scene: Optional[int]) -> str:
|
||||
if source_scene is None:
|
||||
return ""
|
||||
if source_scene in _SOURCE_SCENE_LABELS:
|
||||
return _SOURCE_SCENE_LABELS[source_scene]
|
||||
return f"场景码 {source_scene}"
|
||||
|
||||
|
||||
def _build_region(country: str, province: str, city: str) -> str:
|
||||
parts: list[str] = []
|
||||
country_text = _country_label(country)
|
||||
province_text = _normalize_text(province)
|
||||
city_text = _normalize_text(city)
|
||||
if country_text:
|
||||
parts.append(country_text)
|
||||
if province_text:
|
||||
parts.append(province_text)
|
||||
if city_text:
|
||||
parts.append(city_text)
|
||||
return "·".join(parts)
|
||||
|
||||
|
||||
def _safe_export_part(s: str) -> str:
|
||||
cleaned = re.sub(r"[^0-9A-Za-z._-]+", "_", str(s or "").strip())
|
||||
cleaned = cleaned.strip("._-")
|
||||
return cleaned or "account"
|
||||
|
||||
|
||||
def _is_valid_contact_username(username: str) -> bool:
|
||||
u = _normalize_text(username)
|
||||
if not u:
|
||||
return False
|
||||
if u in _SYSTEM_USERNAMES:
|
||||
return False
|
||||
if u.startswith("fake_"):
|
||||
return False
|
||||
if not _should_keep_session(u, include_official=True) and not u.startswith("gh_") and u != "weixin":
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _get_table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
||||
try:
|
||||
rows = conn.execute(f"PRAGMA table_info({table})").fetchall()
|
||||
except Exception:
|
||||
return set()
|
||||
|
||||
out: set[str] = set()
|
||||
for row in rows:
|
||||
try:
|
||||
name = _normalize_text(row["name"] if "name" in row.keys() else row[1]).lower()
|
||||
except Exception:
|
||||
continue
|
||||
if name:
|
||||
out.add(name)
|
||||
return out
|
||||
|
||||
|
||||
def _build_contact_select_sql(table: str, columns: set[str]) -> Optional[str]:
|
||||
if "username" not in columns:
|
||||
return None
|
||||
|
||||
specs: list[tuple[str, str, str]] = [
|
||||
("username", "username", "''"),
|
||||
("remark", "remark", "''"),
|
||||
("nick_name", "nick_name", "''"),
|
||||
("alias", "alias", "''"),
|
||||
("local_type", "local_type", "0"),
|
||||
("verify_flag", "verify_flag", "0"),
|
||||
("big_head_url", "big_head_url", "''"),
|
||||
("small_head_url", "small_head_url", "''"),
|
||||
("extra_buffer", "extra_buffer", "x''"),
|
||||
]
|
||||
|
||||
select_parts: list[str] = []
|
||||
for key, alias, fallback in specs:
|
||||
if key in columns:
|
||||
select_parts.append(key)
|
||||
else:
|
||||
select_parts.append(f"{fallback} AS {alias}")
|
||||
return f"SELECT {', '.join(select_parts)} FROM {table}"
|
||||
|
||||
|
||||
def _load_contact_rows_map(contact_db_path: Path) -> dict[str, dict[str, Any]]:
|
||||
out: dict[str, dict[str, Any]] = {}
|
||||
if not contact_db_path.exists():
|
||||
return out
|
||||
|
||||
conn = sqlite3.connect(str(contact_db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
def read_rows(table: str) -> list[sqlite3.Row]:
|
||||
columns = _get_table_columns(conn, table)
|
||||
sql = _build_contact_select_sql(table, columns)
|
||||
if not sql:
|
||||
return []
|
||||
try:
|
||||
return conn.execute(sql).fetchall()
|
||||
except Exception:
|
||||
return []
|
||||
return []
|
||||
|
||||
for table in ("contact", "stranger"):
|
||||
rows = read_rows(table)
|
||||
for row in rows:
|
||||
username = _normalize_text(row["username"] if "username" in row.keys() else "")
|
||||
if (not username) or (username in out):
|
||||
continue
|
||||
|
||||
extra_info = _parse_contact_extra_buffer(
|
||||
row["extra_buffer"] if "extra_buffer" in row.keys() else b""
|
||||
)
|
||||
out[username] = {
|
||||
"username": username,
|
||||
"remark": _normalize_text(row["remark"] if "remark" in row.keys() else ""),
|
||||
"nick_name": _normalize_text(row["nick_name"] if "nick_name" in row.keys() else ""),
|
||||
"alias": _normalize_text(row["alias"] if "alias" in row.keys() else ""),
|
||||
"local_type": _to_int(row["local_type"] if "local_type" in row.keys() else 0),
|
||||
"verify_flag": _to_int(row["verify_flag"] if "verify_flag" in row.keys() else 0),
|
||||
"big_head_url": _normalize_text(row["big_head_url"] if "big_head_url" in row.keys() else ""),
|
||||
"small_head_url": _normalize_text(row["small_head_url"] if "small_head_url" in row.keys() else ""),
|
||||
"country": _normalize_text(extra_info.get("country")),
|
||||
"province": _normalize_text(extra_info.get("province")),
|
||||
"city": _normalize_text(extra_info.get("city")),
|
||||
"source_scene": _to_optional_int(extra_info.get("source_scene")),
|
||||
}
|
||||
return out
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _load_session_sort_timestamps(session_db_path: Path) -> dict[str, int]:
|
||||
out: dict[str, int] = {}
|
||||
if not session_db_path.exists():
|
||||
return out
|
||||
|
||||
conn = sqlite3.connect(str(session_db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
rows: list[sqlite3.Row] = []
|
||||
queries = [
|
||||
"SELECT username, COALESCE(sort_timestamp, 0) AS ts FROM SessionTable",
|
||||
"SELECT username, COALESCE(last_timestamp, 0) AS ts FROM SessionTable",
|
||||
]
|
||||
for sql in queries:
|
||||
try:
|
||||
rows = conn.execute(sql).fetchall()
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
for row in rows:
|
||||
username = _normalize_text(row["username"] if "username" in row.keys() else "")
|
||||
if not username:
|
||||
continue
|
||||
ts = _to_int(row["ts"] if "ts" in row.keys() else 0)
|
||||
prev = out.get(username, 0)
|
||||
if ts > prev:
|
||||
out[username] = ts
|
||||
return out
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _load_session_group_usernames(session_db_path: Path) -> set[str]:
|
||||
out: set[str] = set()
|
||||
if not session_db_path.exists():
|
||||
return out
|
||||
|
||||
conn = sqlite3.connect(str(session_db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
queries = [
|
||||
"SELECT username FROM SessionTable",
|
||||
"SELECT username FROM sessiontable",
|
||||
]
|
||||
for sql in queries:
|
||||
try:
|
||||
rows = conn.execute(sql).fetchall()
|
||||
except Exception:
|
||||
continue
|
||||
for row in rows:
|
||||
username = _normalize_text(row["username"] if "username" in row.keys() else "")
|
||||
if username and ("@chatroom" in username):
|
||||
out.add(username)
|
||||
return out
|
||||
return out
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _infer_contact_type(username: str, row: dict[str, Any]) -> Optional[str]:
|
||||
if not username:
|
||||
return None
|
||||
|
||||
if "@chatroom" in username:
|
||||
return "group"
|
||||
|
||||
verify_flag = _to_int(row.get("verify_flag"))
|
||||
if username.startswith("gh_") or verify_flag != 0:
|
||||
return "official"
|
||||
|
||||
local_type = _to_int(row.get("local_type"))
|
||||
if local_type == 1:
|
||||
return "friend"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _matches_keyword(contact: dict[str, Any], keyword: str) -> bool:
|
||||
kw = _normalize_text(keyword).lower()
|
||||
if not kw:
|
||||
return True
|
||||
|
||||
fields = [
|
||||
contact.get("username", ""),
|
||||
contact.get("displayName", ""),
|
||||
contact.get("remark", ""),
|
||||
contact.get("nickname", ""),
|
||||
contact.get("alias", ""),
|
||||
contact.get("region", ""),
|
||||
contact.get("source", ""),
|
||||
contact.get("country", ""),
|
||||
contact.get("province", ""),
|
||||
contact.get("city", ""),
|
||||
]
|
||||
for field in fields:
|
||||
if kw in _normalize_text(field).lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _collect_contacts_for_account(
|
||||
*,
|
||||
account_dir: Path,
|
||||
base_url: str,
|
||||
keyword: Optional[str],
|
||||
include_friends: bool,
|
||||
include_groups: bool,
|
||||
include_officials: bool,
|
||||
) -> list[dict[str, Any]]:
|
||||
if not (include_friends or include_groups or include_officials):
|
||||
return []
|
||||
|
||||
contact_db_path = account_dir / "contact.db"
|
||||
session_db_path = account_dir / "session.db"
|
||||
contact_rows = _load_contact_rows_map(contact_db_path)
|
||||
session_ts_map = _load_session_sort_timestamps(session_db_path)
|
||||
session_group_usernames = _load_session_group_usernames(session_db_path)
|
||||
|
||||
contacts: list[dict[str, Any]] = []
|
||||
for username, row in contact_rows.items():
|
||||
if not _is_valid_contact_username(username):
|
||||
continue
|
||||
|
||||
contact_type = _infer_contact_type(username, row)
|
||||
if contact_type is None:
|
||||
continue
|
||||
if contact_type == "friend" and not include_friends:
|
||||
continue
|
||||
if contact_type == "group" and not include_groups:
|
||||
continue
|
||||
if contact_type == "official" and not include_officials:
|
||||
continue
|
||||
|
||||
display_name = _pick_display_name(row, username)
|
||||
if not display_name:
|
||||
display_name = username
|
||||
|
||||
avatar_link = _normalize_text(_pick_avatar_url(row) or "")
|
||||
avatar = base_url + _build_avatar_url(account_dir.name, username)
|
||||
country = _normalize_text(row.get("country"))
|
||||
province = _normalize_text(row.get("province"))
|
||||
city = _normalize_text(row.get("city"))
|
||||
source_scene = _to_optional_int(row.get("source_scene"))
|
||||
|
||||
item = {
|
||||
"username": username,
|
||||
"displayName": display_name,
|
||||
"remark": _normalize_text(row.get("remark")),
|
||||
"nickname": _normalize_text(row.get("nick_name")),
|
||||
"alias": _normalize_text(row.get("alias")),
|
||||
"type": contact_type,
|
||||
"country": country,
|
||||
"province": province,
|
||||
"city": city,
|
||||
"region": _build_region(country, province, city),
|
||||
"sourceScene": source_scene,
|
||||
"source": _source_scene_label(source_scene),
|
||||
"avatar": avatar,
|
||||
"avatarLink": avatar_link,
|
||||
"_sortTs": _to_int(session_ts_map.get(username, 0)),
|
||||
}
|
||||
|
||||
if not _matches_keyword(item, keyword or ""):
|
||||
continue
|
||||
contacts.append(item)
|
||||
|
||||
if include_groups:
|
||||
for username in session_group_usernames:
|
||||
if username in contact_rows:
|
||||
continue
|
||||
if not _is_valid_contact_username(username):
|
||||
continue
|
||||
|
||||
avatar_link = ""
|
||||
avatar = base_url + _build_avatar_url(account_dir.name, username)
|
||||
|
||||
item = {
|
||||
"username": username,
|
||||
"displayName": username,
|
||||
"remark": "",
|
||||
"nickname": "",
|
||||
"alias": "",
|
||||
"type": "group",
|
||||
"country": "",
|
||||
"province": "",
|
||||
"city": "",
|
||||
"region": "",
|
||||
"sourceScene": None,
|
||||
"source": "",
|
||||
"avatar": avatar,
|
||||
"avatarLink": avatar_link,
|
||||
"_sortTs": _to_int(session_ts_map.get(username, 0)),
|
||||
}
|
||||
|
||||
if not _matches_keyword(item, keyword or ""):
|
||||
continue
|
||||
contacts.append(item)
|
||||
|
||||
contacts.sort(
|
||||
key=lambda x: (
|
||||
-_to_int(x.get("_sortTs", 0)),
|
||||
_normalize_text(x.get("displayName", "")).lower(),
|
||||
_normalize_text(x.get("username", "")).lower(),
|
||||
)
|
||||
)
|
||||
for item in contacts:
|
||||
item.pop("_sortTs", None)
|
||||
return contacts
|
||||
|
||||
|
||||
def _build_counts(contacts: list[dict[str, Any]]) -> dict[str, int]:
|
||||
counts = {
|
||||
"friends": 0,
|
||||
"groups": 0,
|
||||
"officials": 0,
|
||||
"total": 0,
|
||||
}
|
||||
for item in contacts:
|
||||
t = _normalize_text(item.get("type"))
|
||||
if t == "friend":
|
||||
counts["friends"] += 1
|
||||
elif t == "group":
|
||||
counts["groups"] += 1
|
||||
elif t == "official":
|
||||
counts["officials"] += 1
|
||||
counts["total"] = len(contacts)
|
||||
return counts
|
||||
|
||||
|
||||
def _build_export_contacts(
|
||||
contacts: list[dict[str, Any]],
|
||||
*,
|
||||
include_avatar_link: bool,
|
||||
) -> list[dict[str, Any]]:
|
||||
out: list[dict[str, Any]] = []
|
||||
for item in contacts:
|
||||
row = {
|
||||
"username": _normalize_text(item.get("username")),
|
||||
"displayName": _normalize_text(item.get("displayName")),
|
||||
"remark": _normalize_text(item.get("remark")),
|
||||
"nickname": _normalize_text(item.get("nickname")),
|
||||
"alias": _normalize_text(item.get("alias")),
|
||||
"type": _normalize_text(item.get("type")),
|
||||
"region": _normalize_text(item.get("region")),
|
||||
"country": _normalize_text(item.get("country")),
|
||||
"province": _normalize_text(item.get("province")),
|
||||
"city": _normalize_text(item.get("city")),
|
||||
"source": _normalize_text(item.get("source")),
|
||||
"sourceScene": _to_optional_int(item.get("sourceScene")),
|
||||
}
|
||||
if include_avatar_link:
|
||||
row["avatarLink"] = _normalize_text(item.get("avatarLink"))
|
||||
out.append(row)
|
||||
return out
|
||||
|
||||
|
||||
def _write_json_export(
|
||||
output_path: Path,
|
||||
*,
|
||||
account: str,
|
||||
contacts: list[dict[str, Any]],
|
||||
include_avatar_link: bool,
|
||||
keyword: str,
|
||||
contact_types: ContactTypeFilter,
|
||||
) -> None:
|
||||
payload = {
|
||||
"exportedAt": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"account": account,
|
||||
"count": len(contacts),
|
||||
"filters": {
|
||||
"keyword": keyword,
|
||||
"contactTypes": {
|
||||
"friends": bool(contact_types.friends),
|
||||
"groups": bool(contact_types.groups),
|
||||
"officials": bool(contact_types.officials),
|
||||
},
|
||||
"includeAvatarLink": bool(include_avatar_link),
|
||||
},
|
||||
"contacts": contacts,
|
||||
}
|
||||
output_path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
def _write_csv_export(
|
||||
output_path: Path,
|
||||
*,
|
||||
contacts: list[dict[str, Any]],
|
||||
include_avatar_link: bool,
|
||||
) -> None:
|
||||
columns: list[tuple[str, str]] = [
|
||||
("username", "用户名"),
|
||||
("displayName", "显示名称"),
|
||||
("remark", "备注"),
|
||||
("nickname", "昵称"),
|
||||
("alias", "微信号"),
|
||||
("type", "类型"),
|
||||
("region", "地区"),
|
||||
("country", "国家/地区码"),
|
||||
("province", "省份"),
|
||||
("city", "城市"),
|
||||
("source", "来源"),
|
||||
("sourceScene", "来源场景码"),
|
||||
]
|
||||
if include_avatar_link:
|
||||
columns.append(("avatarLink", "头像链接"))
|
||||
|
||||
with output_path.open("w", encoding="utf-8-sig", newline="") as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow([label for _, label in columns])
|
||||
for item in contacts:
|
||||
writer.writerow([_normalize_text(item.get(key, "")) for key, _ in columns])
|
||||
|
||||
|
||||
@router.get("/api/chat/contacts", summary="获取联系人列表")
|
||||
def list_chat_contacts(
|
||||
request: Request,
|
||||
account: Optional[str] = None,
|
||||
keyword: Optional[str] = None,
|
||||
include_friends: bool = True,
|
||||
include_groups: bool = True,
|
||||
include_officials: bool = True,
|
||||
):
|
||||
account_dir = _resolve_account_dir(account)
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
|
||||
contacts = _collect_contacts_for_account(
|
||||
account_dir=account_dir,
|
||||
base_url=base_url,
|
||||
keyword=keyword,
|
||||
include_friends=bool(include_friends),
|
||||
include_groups=bool(include_groups),
|
||||
include_officials=bool(include_officials),
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"account": account_dir.name,
|
||||
"total": len(contacts),
|
||||
"counts": _build_counts(contacts),
|
||||
"contacts": contacts,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/api/chat/contacts/export", summary="导出联系人")
|
||||
def export_chat_contacts(request: Request, req: ContactExportRequest):
|
||||
account_dir = _resolve_account_dir(req.account)
|
||||
|
||||
output_dir_raw = _normalize_text(req.output_dir)
|
||||
if not output_dir_raw:
|
||||
raise HTTPException(status_code=400, detail="output_dir is required.")
|
||||
|
||||
output_dir = Path(output_dir_raw).expanduser()
|
||||
if not output_dir.is_absolute():
|
||||
raise HTTPException(status_code=400, detail="output_dir must be an absolute path.")
|
||||
|
||||
try:
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Failed to prepare output_dir: {e}")
|
||||
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
contacts = _collect_contacts_for_account(
|
||||
account_dir=account_dir,
|
||||
base_url=base_url,
|
||||
keyword=req.keyword,
|
||||
include_friends=bool(req.contact_types.friends),
|
||||
include_groups=bool(req.contact_types.groups),
|
||||
include_officials=bool(req.contact_types.officials),
|
||||
)
|
||||
|
||||
export_contacts = _build_export_contacts(
|
||||
contacts,
|
||||
include_avatar_link=bool(req.include_avatar_link),
|
||||
)
|
||||
|
||||
fmt = _normalize_text(req.format).lower()
|
||||
if fmt not in {"json", "csv"}:
|
||||
raise HTTPException(status_code=400, detail="Unsupported format, use 'json' or 'csv'.")
|
||||
|
||||
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
safe_account = _safe_export_part(account_dir.name)
|
||||
output_path = output_dir / f"contacts_{safe_account}_{ts}.{fmt}"
|
||||
|
||||
try:
|
||||
if fmt == "json":
|
||||
_write_json_export(
|
||||
output_path,
|
||||
account=account_dir.name,
|
||||
contacts=export_contacts,
|
||||
include_avatar_link=bool(req.include_avatar_link),
|
||||
keyword=_normalize_text(req.keyword),
|
||||
contact_types=req.contact_types,
|
||||
)
|
||||
else:
|
||||
_write_csv_export(
|
||||
output_path,
|
||||
contacts=export_contacts,
|
||||
include_avatar_link=bool(req.include_avatar_link),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to export contacts: {e}")
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"account": account_dir.name,
|
||||
"format": fmt,
|
||||
"outputPath": str(output_path),
|
||||
"count": len(export_contacts),
|
||||
}
|
||||
@@ -27,15 +27,16 @@ class ChatExportCreateRequest(BaseModel):
|
||||
end_time: Optional[int] = Field(None, description="结束时间(Unix 秒,含)")
|
||||
include_hidden: bool = Field(False, description="是否包含隐藏会话(scope!=selected 时)")
|
||||
include_official: bool = Field(False, description="是否包含公众号/官方账号会话(scope!=selected 时)")
|
||||
include_media: bool = Field(True, description="是否打包离线媒体(图片/表情/视频/语音/文件)")
|
||||
include_media: bool = Field(True, description="是否允许打包离线媒体(最终仍受 message_types 与 privacy_mode 约束)")
|
||||
media_kinds: list[MediaKind] = Field(
|
||||
default_factory=lambda: ["image", "emoji", "video", "video_thumb", "voice", "file"],
|
||||
description="打包的媒体类型",
|
||||
description="允许打包的媒体类型(最终仍受 message_types 勾选约束)",
|
||||
)
|
||||
message_types: list[MessageType] = Field(
|
||||
default_factory=list,
|
||||
description="导出消息类型(renderType)过滤:为空=导出全部消息;可多选(如仅 voice / 仅 transfer / 仅 redPacket 等)",
|
||||
description="导出消息类型(renderType)过滤:为空=导出全部类型;不为空时,仅导出勾选类型",
|
||||
)
|
||||
output_dir: Optional[str] = Field(None, description="导出目录绝对路径(可选;不填时使用默认目录)")
|
||||
allow_process_key_extract: bool = Field(
|
||||
False,
|
||||
description="预留字段:本项目不从微信进程提取媒体密钥,请使用 wx_key 获取并保存/批量解密",
|
||||
@@ -61,6 +62,7 @@ async def create_chat_export(req: ChatExportCreateRequest):
|
||||
include_media=req.include_media,
|
||||
media_kinds=req.media_kinds,
|
||||
message_types=req.message_types,
|
||||
output_dir=req.output_dir,
|
||||
allow_process_key_extract=req.allow_process_key_extract,
|
||||
privacy_mode=req.privacy_mode,
|
||||
file_name=req.file_name,
|
||||
|
||||
@@ -8,7 +8,7 @@ import os
|
||||
import sqlite3
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
@@ -16,6 +16,21 @@ from fastapi import APIRouter, HTTPException
|
||||
from fastapi.responses import FileResponse, Response
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..avatar_cache import (
|
||||
AVATAR_CACHE_TTL_SECONDS,
|
||||
avatar_cache_entry_file_exists,
|
||||
avatar_cache_entry_is_fresh,
|
||||
build_avatar_cache_response_headers,
|
||||
cache_key_for_avatar_user,
|
||||
cache_key_for_avatar_url,
|
||||
get_avatar_cache_url_entry,
|
||||
get_avatar_cache_user_entry,
|
||||
is_avatar_cache_enabled,
|
||||
normalize_avatar_source_url,
|
||||
touch_avatar_cache_entry,
|
||||
upsert_avatar_cache_entry,
|
||||
write_avatar_cache_payload,
|
||||
)
|
||||
from ..logging_config import get_logger
|
||||
from ..media_helpers import (
|
||||
_convert_silk_to_wav,
|
||||
@@ -43,14 +58,56 @@ from ..media_helpers import (
|
||||
_try_find_decrypted_resource,
|
||||
_try_strip_media_prefix,
|
||||
)
|
||||
from ..chat_helpers import _extract_md5_from_packed_info
|
||||
from ..chat_helpers import _extract_md5_from_packed_info, _load_contact_rows, _pick_avatar_url
|
||||
from ..path_fix import PathFixRoute
|
||||
from ..wcdb_realtime import WCDB_REALTIME, get_avatar_urls as _wcdb_get_avatar_urls
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
router = APIRouter(route_class=PathFixRoute)
|
||||
|
||||
|
||||
def _resolve_avatar_remote_url(*, account_dir: Path, username: str) -> str:
|
||||
u = str(username or "").strip()
|
||||
if not u:
|
||||
return ""
|
||||
|
||||
# 1) contact.db first (cheap local lookup)
|
||||
try:
|
||||
rows = _load_contact_rows(account_dir / "contact.db", [u])
|
||||
row = rows.get(u)
|
||||
raw = str(_pick_avatar_url(row) or "").strip()
|
||||
if raw.lower().startswith(("http://", "https://")):
|
||||
return normalize_avatar_source_url(raw)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 2) WCDB fallback (more complete on enterprise/openim IDs)
|
||||
try:
|
||||
wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir)
|
||||
with wcdb_conn.lock:
|
||||
mp = _wcdb_get_avatar_urls(wcdb_conn.handle, [u])
|
||||
wa = str(mp.get(u) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
return normalize_avatar_source_url(wa)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def _parse_304_headers(headers: Any) -> tuple[str, str]:
|
||||
try:
|
||||
etag = str((headers or {}).get("ETag") or "").strip()
|
||||
except Exception:
|
||||
etag = ""
|
||||
try:
|
||||
last_modified = str((headers or {}).get("Last-Modified") or "").strip()
|
||||
except Exception:
|
||||
last_modified = ""
|
||||
return etag, last_modified
|
||||
|
||||
|
||||
@lru_cache(maxsize=4096)
|
||||
def _fast_probe_image_path_in_chat_attach(
|
||||
*,
|
||||
@@ -267,27 +324,309 @@ async def get_chat_avatar(username: str, account: Optional[str] = None):
|
||||
if not username:
|
||||
raise HTTPException(status_code=400, detail="Missing username.")
|
||||
account_dir = _resolve_account_dir(account)
|
||||
account_name = str(account_dir.name or "").strip()
|
||||
user_key = str(username or "").strip()
|
||||
|
||||
# 1) Try on-disk cache first (fast path)
|
||||
user_entry = None
|
||||
cached_file = None
|
||||
if is_avatar_cache_enabled() and account_name and user_key:
|
||||
try:
|
||||
user_entry = get_avatar_cache_user_entry(account_name, user_key)
|
||||
cached_file = avatar_cache_entry_file_exists(account_name, user_entry)
|
||||
if cached_file is not None:
|
||||
logger.info(f"[avatar_cache_hit] kind=user account={account_name} username={user_key}")
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] read user cache failed account={account_name} username={user_key} err={e}")
|
||||
|
||||
head_image_db_path = account_dir / "head_image.db"
|
||||
if not head_image_db_path.exists():
|
||||
# No local head_image.db: allow fallback from cached/remote URL path.
|
||||
if cached_file is not None and user_entry:
|
||||
headers = build_avatar_cache_response_headers(user_entry)
|
||||
return FileResponse(
|
||||
str(cached_file),
|
||||
media_type=str(user_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="head_image.db not found.")
|
||||
|
||||
conn = sqlite3.connect(str(head_image_db_path))
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT image_buffer FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1",
|
||||
meta = conn.execute(
|
||||
"SELECT md5, update_time FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1",
|
||||
(username,),
|
||||
).fetchone()
|
||||
if meta and meta[0] is not None:
|
||||
db_md5 = str(meta[0] or "").strip().lower()
|
||||
try:
|
||||
db_update_time = int(meta[1] or 0)
|
||||
except Exception:
|
||||
db_update_time = 0
|
||||
|
||||
# Cache still valid against head_image metadata.
|
||||
if cached_file is not None and user_entry:
|
||||
cached_md5 = str(user_entry.get("source_md5") or "").strip().lower()
|
||||
try:
|
||||
cached_update = int(user_entry.get("source_update_time") or 0)
|
||||
except Exception:
|
||||
cached_update = 0
|
||||
if cached_md5 == db_md5 and cached_update == db_update_time:
|
||||
touch_avatar_cache_entry(account_name, str(user_entry.get("cache_key") or ""))
|
||||
headers = build_avatar_cache_response_headers(user_entry)
|
||||
return FileResponse(
|
||||
str(cached_file),
|
||||
media_type=str(user_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
# Refresh from blob (changed or first-load)
|
||||
row = conn.execute(
|
||||
"SELECT image_buffer FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1",
|
||||
(username,),
|
||||
).fetchone()
|
||||
if row and row[0] is not None:
|
||||
data = bytes(row[0]) if isinstance(row[0], (memoryview, bytearray)) else row[0]
|
||||
if not isinstance(data, (bytes, bytearray)):
|
||||
data = bytes(data)
|
||||
if data:
|
||||
media_type = _detect_image_media_type(data)
|
||||
media_type = media_type if media_type.startswith("image/") else "application/octet-stream"
|
||||
entry, out_path = write_avatar_cache_payload(
|
||||
account_name,
|
||||
source_kind="user",
|
||||
username=user_key,
|
||||
payload=bytes(data),
|
||||
media_type=media_type,
|
||||
source_md5=db_md5,
|
||||
source_update_time=db_update_time,
|
||||
ttl_seconds=AVATAR_CACHE_TTL_SECONDS,
|
||||
)
|
||||
if entry and out_path:
|
||||
logger.info(
|
||||
f"[avatar_cache_download] kind=user account={account_name} username={user_key} src=head_image"
|
||||
)
|
||||
headers = build_avatar_cache_response_headers(entry)
|
||||
return FileResponse(str(out_path), media_type=media_type, headers=headers)
|
||||
|
||||
# cache write failed: fallback to response bytes
|
||||
logger.warning(
|
||||
f"[avatar_cache_error] kind=user account={account_name} username={user_key} action=write_fallback"
|
||||
)
|
||||
return Response(content=bytes(data), media_type=media_type)
|
||||
|
||||
# meta not found (no local avatar blob)
|
||||
row = None
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if not row or row[0] is None:
|
||||
raise HTTPException(status_code=404, detail="Avatar not found.")
|
||||
# 2) Fallback: remote avatar URL (contact/WCDB), cache by URL.
|
||||
remote_url = _resolve_avatar_remote_url(account_dir=account_dir, username=user_key)
|
||||
if remote_url and is_avatar_cache_enabled():
|
||||
url_entry = get_avatar_cache_url_entry(account_name, remote_url)
|
||||
url_file = avatar_cache_entry_file_exists(account_name, url_entry)
|
||||
if url_entry and url_file and avatar_cache_entry_is_fresh(url_entry):
|
||||
logger.info(f"[avatar_cache_hit] kind=url account={account_name} username={user_key}")
|
||||
touch_avatar_cache_entry(account_name, str(url_entry.get("cache_key") or ""))
|
||||
# Keep user-key mapping aligned, so next user lookup is direct.
|
||||
try:
|
||||
upsert_avatar_cache_entry(
|
||||
account_name,
|
||||
cache_key=cache_key_for_avatar_user(user_key),
|
||||
source_kind="user",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
source_md5=str(url_entry.get("source_md5") or ""),
|
||||
source_update_time=int(url_entry.get("source_update_time") or 0),
|
||||
rel_path=str(url_entry.get("rel_path") or ""),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(url_entry.get("size_bytes") or 0),
|
||||
etag=str(url_entry.get("etag") or ""),
|
||||
last_modified=str(url_entry.get("last_modified") or ""),
|
||||
fetched_at=int(url_entry.get("fetched_at") or 0),
|
||||
checked_at=int(url_entry.get("checked_at") or 0),
|
||||
expires_at=int(url_entry.get("expires_at") or 0),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
headers = build_avatar_cache_response_headers(url_entry)
|
||||
return FileResponse(
|
||||
str(url_file),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
data = bytes(row[0]) if isinstance(row[0], (memoryview, bytearray)) else row[0]
|
||||
if not isinstance(data, (bytes, bytearray)):
|
||||
data = bytes(data)
|
||||
media_type = _detect_image_media_type(data)
|
||||
return Response(content=data, media_type=media_type)
|
||||
# Revalidate / download remote avatar
|
||||
def _download_remote_avatar(
|
||||
source_url: str,
|
||||
*,
|
||||
etag: str,
|
||||
last_modified: str,
|
||||
) -> tuple[bytes, str, str, str, bool]:
|
||||
base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36",
|
||||
"Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8",
|
||||
}
|
||||
|
||||
header_variants = [
|
||||
{
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351",
|
||||
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8",
|
||||
"Accept-Language": "zh-CN,zh;q=0.9",
|
||||
"Referer": "https://servicewechat.com/",
|
||||
"Origin": "https://servicewechat.com",
|
||||
"Range": "bytes=0-",
|
||||
},
|
||||
{"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"},
|
||||
{"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"},
|
||||
{"Referer": "https://www.baidu.com/", "Origin": "https://www.baidu.com"},
|
||||
{},
|
||||
]
|
||||
|
||||
last_err: Exception | None = None
|
||||
for extra in header_variants:
|
||||
headers = dict(base_headers)
|
||||
headers.update(extra)
|
||||
if etag:
|
||||
headers["If-None-Match"] = etag
|
||||
if last_modified:
|
||||
headers["If-Modified-Since"] = last_modified
|
||||
|
||||
r = requests.get(source_url, headers=headers, timeout=20, stream=True)
|
||||
try:
|
||||
if r.status_code == 304:
|
||||
e2, lm2 = _parse_304_headers(r.headers)
|
||||
return b"", "", (e2 or etag), (lm2 or last_modified), True
|
||||
r.raise_for_status()
|
||||
content_type = str(r.headers.get("Content-Type") or "").strip()
|
||||
e2, lm2 = _parse_304_headers(r.headers)
|
||||
max_bytes = 10 * 1024 * 1024
|
||||
chunks: list[bytes] = []
|
||||
total = 0
|
||||
for ch in r.iter_content(chunk_size=64 * 1024):
|
||||
if not ch:
|
||||
continue
|
||||
chunks.append(ch)
|
||||
total += len(ch)
|
||||
if total > max_bytes:
|
||||
raise HTTPException(status_code=400, detail="Avatar too large (>10MB).")
|
||||
return b"".join(chunks), content_type, e2, lm2, False
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
last_err = e
|
||||
finally:
|
||||
try:
|
||||
r.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
raise last_err or RuntimeError("avatar remote download failed")
|
||||
|
||||
etag0 = str((url_entry or {}).get("etag") or "").strip()
|
||||
lm0 = str((url_entry or {}).get("last_modified") or "").strip()
|
||||
try:
|
||||
payload, ct, etag_new, lm_new, not_modified = await asyncio.to_thread(
|
||||
_download_remote_avatar,
|
||||
remote_url,
|
||||
etag=etag0,
|
||||
last_modified=lm0,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] kind=url account={account_name} username={user_key} err={e}")
|
||||
if url_entry and url_file:
|
||||
headers = build_avatar_cache_response_headers(url_entry)
|
||||
return FileResponse(
|
||||
str(url_file),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="Avatar not found.")
|
||||
|
||||
if not_modified and url_entry and url_file:
|
||||
touch_avatar_cache_entry(account_name, cache_key_for_avatar_url(remote_url))
|
||||
if etag_new or lm_new:
|
||||
try:
|
||||
upsert_avatar_cache_entry(
|
||||
account_name,
|
||||
cache_key=cache_key_for_avatar_url(remote_url),
|
||||
source_kind="url",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
source_md5=str(url_entry.get("source_md5") or ""),
|
||||
source_update_time=int(url_entry.get("source_update_time") or 0),
|
||||
rel_path=str(url_entry.get("rel_path") or ""),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(url_entry.get("size_bytes") or 0),
|
||||
etag=etag_new or etag0,
|
||||
last_modified=lm_new or lm0,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
logger.info(f"[avatar_cache_revalidate] kind=url account={account_name} username={user_key} status=304")
|
||||
headers = build_avatar_cache_response_headers(url_entry)
|
||||
return FileResponse(
|
||||
str(url_file),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if payload:
|
||||
payload2, media_type, _ext = _detect_media_type_and_ext(payload)
|
||||
if media_type == "application/octet-stream" and ct:
|
||||
try:
|
||||
mt = ct.split(";")[0].strip()
|
||||
if mt.startswith("image/"):
|
||||
media_type = mt
|
||||
except Exception:
|
||||
pass
|
||||
if str(media_type or "").startswith("image/"):
|
||||
entry, out_path = write_avatar_cache_payload(
|
||||
account_name,
|
||||
source_kind="url",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
payload=payload2,
|
||||
media_type=media_type,
|
||||
etag=etag_new,
|
||||
last_modified=lm_new,
|
||||
ttl_seconds=AVATAR_CACHE_TTL_SECONDS,
|
||||
)
|
||||
if entry and out_path:
|
||||
# bind user-key record to same file for quicker next access
|
||||
try:
|
||||
upsert_avatar_cache_entry(
|
||||
account_name,
|
||||
cache_key=cache_key_for_avatar_user(user_key),
|
||||
source_kind="user",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
source_md5=str(entry.get("source_md5") or ""),
|
||||
source_update_time=int(entry.get("source_update_time") or 0),
|
||||
rel_path=str(entry.get("rel_path") or ""),
|
||||
media_type=str(entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(entry.get("size_bytes") or 0),
|
||||
etag=str(entry.get("etag") or ""),
|
||||
last_modified=str(entry.get("last_modified") or ""),
|
||||
fetched_at=int(entry.get("fetched_at") or 0),
|
||||
checked_at=int(entry.get("checked_at") or 0),
|
||||
expires_at=int(entry.get("expires_at") or 0),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
logger.info(f"[avatar_cache_download] kind=url account={account_name} username={user_key}")
|
||||
headers = build_avatar_cache_response_headers(entry)
|
||||
return FileResponse(str(out_path), media_type=media_type, headers=headers)
|
||||
|
||||
if cached_file is not None and user_entry:
|
||||
headers = build_avatar_cache_response_headers(user_entry)
|
||||
return FileResponse(
|
||||
str(cached_file),
|
||||
media_type=str(user_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
raise HTTPException(status_code=404, detail="Avatar not found.")
|
||||
|
||||
|
||||
class EmojiDownloadRequest(BaseModel):
|
||||
@@ -434,7 +773,25 @@ async def proxy_image(url: str):
|
||||
if not _is_allowed_proxy_image_host(host):
|
||||
raise HTTPException(status_code=400, detail="Unsupported url host for proxy_image.")
|
||||
|
||||
def _download_bytes() -> tuple[bytes, str]:
|
||||
source_url = normalize_avatar_source_url(u)
|
||||
proxy_account = "_proxy"
|
||||
cache_entry = get_avatar_cache_url_entry(proxy_account, source_url) if is_avatar_cache_enabled() else None
|
||||
cache_file = avatar_cache_entry_file_exists(proxy_account, cache_entry)
|
||||
if cache_entry and cache_file and avatar_cache_entry_is_fresh(cache_entry):
|
||||
logger.info(f"[avatar_cache_hit] kind=proxy_url account={proxy_account}")
|
||||
touch_avatar_cache_entry(proxy_account, cache_key_for_avatar_url(source_url))
|
||||
headers = build_avatar_cache_response_headers(cache_entry)
|
||||
return FileResponse(
|
||||
str(cache_file),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
def _download_bytes(
|
||||
*,
|
||||
if_none_match: str = "",
|
||||
if_modified_since: str = "",
|
||||
) -> tuple[bytes, str, str, str, bool]:
|
||||
base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36",
|
||||
"Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8",
|
||||
@@ -464,10 +821,20 @@ async def proxy_image(url: str):
|
||||
for extra in header_variants:
|
||||
headers = dict(base_headers)
|
||||
headers.update(extra)
|
||||
if if_none_match:
|
||||
headers["If-None-Match"] = if_none_match
|
||||
if if_modified_since:
|
||||
headers["If-Modified-Since"] = if_modified_since
|
||||
r = requests.get(u, headers=headers, timeout=20, stream=True)
|
||||
try:
|
||||
if r.status_code == 304:
|
||||
etag0 = str(r.headers.get("ETag") or "").strip()
|
||||
lm0 = str(r.headers.get("Last-Modified") or "").strip()
|
||||
return b"", "", etag0, lm0, True
|
||||
r.raise_for_status()
|
||||
content_type = str(r.headers.get("Content-Type") or "").strip()
|
||||
etag0 = str(r.headers.get("ETag") or "").strip()
|
||||
lm0 = str(r.headers.get("Last-Modified") or "").strip()
|
||||
max_bytes = 10 * 1024 * 1024
|
||||
chunks: list[bytes] = []
|
||||
total = 0
|
||||
@@ -478,7 +845,7 @@ async def proxy_image(url: str):
|
||||
total += len(ch)
|
||||
if total > max_bytes:
|
||||
raise HTTPException(status_code=400, detail="Proxy image too large (>10MB).")
|
||||
return b"".join(chunks), content_type
|
||||
return b"".join(chunks), content_type, etag0, lm0, False
|
||||
except HTTPException:
|
||||
# Hard failure, don't retry with another referer.
|
||||
raise
|
||||
@@ -493,14 +860,50 @@ async def proxy_image(url: str):
|
||||
# All variants failed.
|
||||
raise last_err or RuntimeError("proxy_image download failed")
|
||||
|
||||
etag0 = str((cache_entry or {}).get("etag") or "").strip()
|
||||
lm0 = str((cache_entry or {}).get("last_modified") or "").strip()
|
||||
try:
|
||||
data, ct = await asyncio.to_thread(_download_bytes)
|
||||
data, ct, etag_new, lm_new, not_modified = await asyncio.to_thread(
|
||||
_download_bytes,
|
||||
if_none_match=etag0,
|
||||
if_modified_since=lm0,
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning(f"proxy_image failed: url={u} err={e}")
|
||||
if cache_entry and cache_file:
|
||||
headers = build_avatar_cache_response_headers(cache_entry)
|
||||
return FileResponse(
|
||||
str(cache_file),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
raise HTTPException(status_code=502, detail=f"Proxy image failed: {e}")
|
||||
|
||||
if not_modified and cache_entry and cache_file:
|
||||
logger.info(f"[avatar_cache_revalidate] kind=proxy_url account={proxy_account} status=304")
|
||||
upsert_avatar_cache_entry(
|
||||
proxy_account,
|
||||
cache_key=cache_key_for_avatar_url(source_url),
|
||||
source_kind="url",
|
||||
source_url=source_url,
|
||||
username="",
|
||||
source_md5=str(cache_entry.get("source_md5") or ""),
|
||||
source_update_time=int(cache_entry.get("source_update_time") or 0),
|
||||
rel_path=str(cache_entry.get("rel_path") or ""),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(cache_entry.get("size_bytes") or 0),
|
||||
etag=etag_new or etag0,
|
||||
last_modified=lm_new or lm0,
|
||||
)
|
||||
headers = build_avatar_cache_response_headers(cache_entry)
|
||||
return FileResponse(
|
||||
str(cache_file),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if not data:
|
||||
raise HTTPException(status_code=502, detail="Proxy returned empty body.")
|
||||
|
||||
@@ -518,8 +921,24 @@ async def proxy_image(url: str):
|
||||
if not str(media_type or "").startswith("image/"):
|
||||
raise HTTPException(status_code=502, detail="Proxy did not return an image.")
|
||||
|
||||
if is_avatar_cache_enabled():
|
||||
entry, out_path = write_avatar_cache_payload(
|
||||
proxy_account,
|
||||
source_kind="url",
|
||||
source_url=source_url,
|
||||
payload=payload,
|
||||
media_type=media_type,
|
||||
etag=etag_new,
|
||||
last_modified=lm_new,
|
||||
ttl_seconds=AVATAR_CACHE_TTL_SECONDS,
|
||||
)
|
||||
if entry and out_path:
|
||||
logger.info(f"[avatar_cache_download] kind=proxy_url account={proxy_account}")
|
||||
headers = build_avatar_cache_response_headers(entry)
|
||||
return FileResponse(str(out_path), media_type=media_type, headers=headers)
|
||||
|
||||
resp = Response(content=payload, media_type=media_type)
|
||||
resp.headers["Cache-Control"] = "public, max-age=86400"
|
||||
resp.headers["Cache-Control"] = f"public, max-age={AVATAR_CACHE_TTL_SECONDS}"
|
||||
return resp
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ from ...chat_helpers import (
|
||||
_decode_sqlite_text,
|
||||
_iter_message_db_paths,
|
||||
_load_contact_rows,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_quote_ident,
|
||||
_should_keep_session,
|
||||
@@ -701,7 +700,7 @@ def build_card_00_global_overview(
|
||||
u, cnt = stats.top_contact
|
||||
row = contact_rows.get(u)
|
||||
display = _pick_display_name(row, u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
top_contact_obj = {
|
||||
"username": u,
|
||||
"displayName": display,
|
||||
@@ -716,7 +715,7 @@ def build_card_00_global_overview(
|
||||
u, cnt = stats.top_group
|
||||
row = contact_rows.get(u)
|
||||
display = _pick_display_name(row, u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
top_group_obj = {
|
||||
"username": u,
|
||||
"displayName": display,
|
||||
|
||||
@@ -14,7 +14,6 @@ from ...chat_helpers import (
|
||||
_build_avatar_url,
|
||||
_iter_message_db_paths,
|
||||
_load_contact_rows,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_quote_ident,
|
||||
_row_to_search_hit,
|
||||
@@ -713,7 +712,7 @@ def _fetch_message_moment_payload(
|
||||
|
||||
contact_row = contact_rows.get(username)
|
||||
display = _pick_display_name(contact_row, username)
|
||||
avatar = _pick_avatar_url(contact_row) or (_build_avatar_url(str(account_dir.name or ""), username) if username else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), username) if username else ""
|
||||
|
||||
return {
|
||||
"timestamp": int(ref.ts),
|
||||
|
||||
@@ -12,7 +12,6 @@ from typing import Any, Optional
|
||||
from ...chat_helpers import (
|
||||
_build_avatar_url,
|
||||
_load_contact_rows,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_should_keep_session,
|
||||
)
|
||||
@@ -385,7 +384,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
def conv_to_obj(score: float | None, agg: _ConvAgg) -> dict[str, Any]:
|
||||
row = contact_rows.get(agg.username)
|
||||
display = _pick_display_name(row, agg.username)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), agg.username) if agg.username else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), agg.username) if agg.username else ""
|
||||
avg_s = agg.avg_gap()
|
||||
out: dict[str, Any] = {
|
||||
"username": agg.username,
|
||||
@@ -420,7 +419,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
else:
|
||||
row = contact_rows.get(global_fastest_u)
|
||||
display = _pick_display_name(row, global_fastest_u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), global_fastest_u) if global_fastest_u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), global_fastest_u) if global_fastest_u else ""
|
||||
fastest_obj = {
|
||||
"username": global_fastest_u,
|
||||
"displayName": display,
|
||||
@@ -440,7 +439,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
else:
|
||||
row = contact_rows.get(global_slowest_u)
|
||||
display = _pick_display_name(row, global_slowest_u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), global_slowest_u) if global_slowest_u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), global_slowest_u) if global_slowest_u else ""
|
||||
slowest_obj = {
|
||||
"username": global_slowest_u,
|
||||
"displayName": display,
|
||||
@@ -547,7 +546,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
|
||||
row = contact_rows.get(u)
|
||||
display = _pick_display_name(row, u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
series.append(
|
||||
{
|
||||
"username": u,
|
||||
@@ -595,7 +594,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
if not u:
|
||||
continue
|
||||
display = _pick_display_name(r, u)
|
||||
avatar = _pick_avatar_url(r) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
all_contacts_list.append({
|
||||
"username": u,
|
||||
"displayName": display,
|
||||
|
||||
Reference in New Issue
Block a user