mirror of
https://github.com/LifeArchiveProject/WeChatDataAnalysis.git
synced 2026-02-19 14:20:51 +08:00
Merge remote-tracking branch 'upstream/main' into feat/wx-key
This commit is contained in:
22
README.md
22
README.md
@@ -5,7 +5,7 @@
|
||||
<div align="center">
|
||||
<h1>WeChatDataAnalysis - 微信数据库解密与分析工具</h1>
|
||||
<p>一个专门用于微信4.x版本数据库解密的工具(支持聊天记录实时更新)</p>
|
||||
<p><b>特别致谢</b>:<a href="https://github.com/ycccccccy/echotrace">echotrace</a>(本项目大量功能参考其实现,提供了重要技术支持)</p>
|
||||
<p><b>特别致谢</b>:<a href="https://github.com/ycccccccy/echotrace">echotrace</a>、<a href="https://github.com/hicccc77/WeFlow">WeFlow</a>(本项目大量功能参考其实现,提供了重要技术支持)</p>
|
||||
<img src="https://img.shields.io/github/v/tag/LifeArchiveProject/WeChatDataAnalysis" alt="Version" />
|
||||
<img src="https://img.shields.io/github/stars/LifeArchiveProject/WeChatDataAnalysis" alt="Stars" />
|
||||
<img src="https://gh-down-badges.linkof.link/LifeArchiveProject/WeChatDataAnalysis" alt="Downloads" />
|
||||
@@ -66,7 +66,7 @@
|
||||
|
||||
## 年度总结
|
||||
|
||||
年度总结现在支持 4 种不同风格(style1-4)。如果你对某个风格有更好的修改建议,或有新风格的点子,欢迎到 Issue 区反馈:https://github.com/LifeArchiveProject/WeChatDataAnalysis/issues
|
||||
年度总结现在支持 3 种不同风格(style1、style2、style3)。如果你对某个风格有更好的修改建议,或有新风格的点子,欢迎到 Issue 区反馈:https://github.com/LifeArchiveProject/WeChatDataAnalysis/issues
|
||||
|
||||
> ⚠️ **提醒**:年度总结目前还不是最终版本,后续还会增加新总结或新风格。
|
||||
|
||||
@@ -82,12 +82,10 @@
|
||||
<td><img src="frontend/public/style2.png" alt="年度总结 Style 2" width="400"/></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><b>Style 3</b></td>
|
||||
<td align="center"><b>Style 4</b></td>
|
||||
<td align="center" colspan="2"><b>Style 3</b></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><img src="frontend/public/style3.png" alt="年度总结 Style 3" width="400"/></td>
|
||||
<td><img src="frontend/public/style4.png" alt="年度总结 Style 4" width="400"/></td>
|
||||
<td align="center" colspan="2"><img src="frontend/public/style3.png" alt="年度总结 Style 3" width="400"/></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
@@ -196,19 +194,22 @@ npm run dist
|
||||
1. **[echotrace](https://github.com/ycccccccy/echotrace)** - 微信数据解析/取证工具
|
||||
- 本项目大量功能参考并复用其实现思路,提供了重要技术支持
|
||||
|
||||
2. **[wx_key](https://github.com/ycccccccy/wx_key)** - 微信数据库与图片密钥提取工具
|
||||
2. **[WeFlow](https://github.com/hicccc77/WeFlow)** - 微信数据分析工具
|
||||
- 提供了重要的功能参考和技术支持
|
||||
|
||||
3. **[wx_key](https://github.com/ycccccccy/wx_key)** - 微信数据库与图片密钥提取工具
|
||||
- 支持获取微信 4.x 数据库密钥与缓存图片密钥
|
||||
- 本项目推荐使用此工具获取密钥
|
||||
|
||||
3. **[wechat-dump-rs](https://github.com/0xlane/wechat-dump-rs)** - Rust实现的微信数据库解密工具
|
||||
4. **[wechat-dump-rs](https://github.com/0xlane/wechat-dump-rs)** - Rust实现的微信数据库解密工具
|
||||
- 提供了SQLCipher 4.0解密的正确实现参考
|
||||
- 本项目的HMAC验证和页面处理逻辑基于此项目的实现
|
||||
|
||||
4. **[oh-my-wechat](https://github.com/chclt/oh-my-wechat)** - 微信聊天记录查看工具
|
||||
5. **[oh-my-wechat](https://github.com/chclt/oh-my-wechat)** - 微信聊天记录查看工具
|
||||
- 提供了优秀的聊天记录界面设计参考
|
||||
- 本项目的聊天界面风格参考了此项目的实现
|
||||
|
||||
5. **[vue3-wechat-tool](https://github.com/Ele-Cat/vue3-wechat-tool)** - 微信聊天记录工具(Vue3)
|
||||
6. **[vue3-wechat-tool](https://github.com/Ele-Cat/vue3-wechat-tool)** - 微信聊天记录工具(Vue3)
|
||||
- 提供了聊天记录展示与交互的实现参考
|
||||
|
||||
## Star History
|
||||
@@ -222,3 +223,4 @@ npm run dist
|
||||
---
|
||||
|
||||
**免责声明**: 本工具仅供学习研究使用,使用者需自行承担使用风险。开发者不对因使用本工具造成的任何损失负责。
|
||||
|
||||
|
||||
@@ -611,6 +611,25 @@ function registerWindowIpc() {
|
||||
return getCloseBehavior();
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle("dialog:chooseDirectory", async (_event, options) => {
|
||||
try {
|
||||
const result = await dialog.showOpenDialog({
|
||||
title: String(options?.title || "选择文件夹"),
|
||||
properties: ["openDirectory", "createDirectory"],
|
||||
});
|
||||
return {
|
||||
canceled: !!result?.canceled,
|
||||
filePaths: Array.isArray(result?.filePaths) ? result.filePaths : [],
|
||||
};
|
||||
} catch (err) {
|
||||
logMain(`[main] dialog:chooseDirectory failed: ${err?.message || err}`);
|
||||
return {
|
||||
canceled: true,
|
||||
filePaths: [],
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
||||
@@ -11,4 +11,6 @@ contextBridge.exposeInMainWorld("wechatDesktop", {
|
||||
|
||||
getCloseBehavior: () => ipcRenderer.invoke("app:getCloseBehavior"),
|
||||
setCloseBehavior: (behavior) => ipcRenderer.invoke("app:setCloseBehavior", String(behavior || "")),
|
||||
|
||||
chooseDirectory: (options = {}) => ipcRenderer.invoke("dialog:chooseDirectory", options),
|
||||
});
|
||||
|
||||
@@ -30,7 +30,7 @@ onBeforeUnmount(() => {
|
||||
})
|
||||
|
||||
const route = useRoute()
|
||||
const isChatRoute = computed(() => route.path?.startsWith('/chat') || route.path?.startsWith('/sns'))
|
||||
const isChatRoute = computed(() => route.path?.startsWith('/chat') || route.path?.startsWith('/sns') || route.path?.startsWith('/contacts'))
|
||||
|
||||
const rootClass = computed(() => {
|
||||
const base = 'bg-gradient-to-br from-green-50 via-emerald-50 to-green-100'
|
||||
|
||||
@@ -730,35 +730,39 @@
|
||||
}
|
||||
|
||||
.header-btn {
|
||||
@apply flex items-center gap-1.5 text-xs px-3 py-1.5 rounded-lg bg-white border border-gray-200 text-gray-700 transition-all duration-200 disabled:opacity-50 disabled:cursor-not-allowed;
|
||||
@apply flex items-center gap-1.5 text-xs px-3 py-1.5 rounded-md bg-white border border-gray-200 text-gray-700 transition-all duration-150 disabled:opacity-50 disabled:cursor-not-allowed shadow-sm;
|
||||
}
|
||||
|
||||
.header-btn:hover:not(:disabled) {
|
||||
@apply bg-gray-50 border-gray-300;
|
||||
@apply bg-gray-50 border-gray-300 shadow;
|
||||
}
|
||||
|
||||
.header-btn:active:not(:disabled) {
|
||||
@apply bg-gray-100;
|
||||
@apply bg-gray-100 scale-95;
|
||||
}
|
||||
|
||||
.header-btn svg {
|
||||
@apply w-3.5 h-3.5;
|
||||
}
|
||||
|
||||
.header-btn-icon {
|
||||
@apply w-8 h-8 flex items-center justify-center rounded-lg bg-white border border-gray-200 text-gray-600 transition-all duration-200;
|
||||
@apply w-8 h-8 flex items-center justify-center rounded-lg bg-transparent border border-transparent text-gray-600 transition-all duration-200 disabled:opacity-50 disabled:cursor-not-allowed;
|
||||
}
|
||||
|
||||
.header-btn-icon:hover {
|
||||
@apply bg-gray-50 border-gray-300 text-gray-800;
|
||||
@apply bg-transparent border-transparent text-gray-800;
|
||||
}
|
||||
|
||||
.header-btn-icon-active {
|
||||
@apply bg-[#03C160]/10 border-[#03C160] text-[#03C160];
|
||||
@apply bg-transparent border-transparent text-[#03C160];
|
||||
}
|
||||
|
||||
.header-btn-icon-active:hover {
|
||||
@apply bg-[#03C160]/15;
|
||||
@apply bg-transparent;
|
||||
}
|
||||
|
||||
.message-filter-select {
|
||||
@apply text-xs px-2 py-1.5 rounded-lg bg-white border border-gray-200 text-gray-700 focus:outline-none focus:ring-2 focus:ring-[#03C160]/20 focus:border-[#03C160] transition-all disabled:opacity-50 disabled:cursor-not-allowed;
|
||||
@apply text-xs px-2 py-1.5 rounded-lg bg-transparent border-0 text-gray-700 focus:outline-none focus:ring-0 transition-all disabled:opacity-50 disabled:cursor-not-allowed;
|
||||
}
|
||||
|
||||
/* 搜索侧边栏样式 */
|
||||
|
||||
@@ -292,6 +292,7 @@ export const useApi = () => {
|
||||
message_types: Array.isArray(data.message_types) ? data.message_types : [],
|
||||
include_media: data.include_media == null ? true : !!data.include_media,
|
||||
media_kinds: Array.isArray(data.media_kinds) ? data.media_kinds : ['image', 'emoji', 'video', 'video_thumb', 'voice', 'file'],
|
||||
output_dir: data.output_dir == null ? null : String(data.output_dir || '').trim(),
|
||||
allow_process_key_extract: !!data.allow_process_key_extract,
|
||||
privacy_mode: !!data.privacy_mode,
|
||||
file_name: data.file_name || null
|
||||
@@ -313,6 +314,36 @@ export const useApi = () => {
|
||||
return await request(`/chat/exports/${encodeURIComponent(String(exportId))}`, { method: 'DELETE' })
|
||||
}
|
||||
|
||||
// 联系人
|
||||
const listChatContacts = async (params = {}) => {
|
||||
const query = new URLSearchParams()
|
||||
if (params && params.account) query.set('account', params.account)
|
||||
if (params && params.keyword) query.set('keyword', params.keyword)
|
||||
if (params && params.include_friends != null) query.set('include_friends', String(!!params.include_friends))
|
||||
if (params && params.include_groups != null) query.set('include_groups', String(!!params.include_groups))
|
||||
if (params && params.include_officials != null) query.set('include_officials', String(!!params.include_officials))
|
||||
const url = '/chat/contacts' + (query.toString() ? `?${query.toString()}` : '')
|
||||
return await request(url)
|
||||
}
|
||||
|
||||
const exportChatContacts = async (payload = {}) => {
|
||||
return await request('/chat/contacts/export', {
|
||||
method: 'POST',
|
||||
body: {
|
||||
account: payload.account || null,
|
||||
output_dir: payload.output_dir || '',
|
||||
format: payload.format || 'json',
|
||||
include_avatar_link: payload.include_avatar_link == null ? true : !!payload.include_avatar_link,
|
||||
keyword: payload.keyword || null,
|
||||
contact_types: {
|
||||
friends: payload?.contact_types?.friends == null ? true : !!payload.contact_types.friends,
|
||||
groups: payload?.contact_types?.groups == null ? true : !!payload.contact_types.groups,
|
||||
officials: payload?.contact_types?.officials == null ? true : !!payload.contact_types.officials,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// WeChat Wrapped(年度总结)
|
||||
const getWrappedAnnual = async (params = {}) => {
|
||||
const query = new URLSearchParams()
|
||||
@@ -388,6 +419,8 @@ export const useApi = () => {
|
||||
getChatExport,
|
||||
listChatExports,
|
||||
cancelChatExport,
|
||||
listChatContacts,
|
||||
exportChatContacts,
|
||||
getWrappedAnnual,
|
||||
getWrappedAnnualMeta,
|
||||
getWrappedAnnualCard,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
572
frontend/pages/contacts.vue
Normal file
572
frontend/pages/contacts.vue
Normal file
@@ -0,0 +1,572 @@
|
||||
<template>
|
||||
<div class="h-screen flex overflow-hidden" style="background-color: #EDEDED">
|
||||
<div class="border-r border-gray-200 flex flex-col" style="background-color: #e8e7e7; width: 60px; min-width: 60px; max-width: 60px">
|
||||
<div class="flex-1 flex flex-col justify-start pt-0 gap-0">
|
||||
<div class="w-full h-[60px] flex items-center justify-center">
|
||||
<div class="w-[40px] h-[40px] rounded-md overflow-hidden bg-gray-300 flex-shrink-0">
|
||||
<img v-if="selfAvatarUrl" :src="selfAvatarUrl" alt="avatar" class="w-full h-full object-cover" />
|
||||
<div v-else class="w-full h-full flex items-center justify-center text-white text-xs font-bold" style="background-color: #4B5563">我</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="w-full h-[var(--sidebar-rail-step)] flex items-center justify-center cursor-pointer group" title="聊天" @click="goChat">
|
||||
<div class="w-[var(--sidebar-rail-btn)] h-[var(--sidebar-rail-btn)] rounded-md flex items-center justify-center transition-colors bg-transparent group-hover:bg-[#E1E1E1]">
|
||||
<div class="w-[var(--sidebar-rail-icon)] h-[var(--sidebar-rail-icon)]" :class="isChatRoute ? 'text-[#07b75b]' : 'text-[#5d5d5d]'">
|
||||
<svg class="w-full h-full" viewBox="0 0 24 24" fill="currentColor" aria-hidden="true">
|
||||
<path d="M12 19.8C17.52 19.8 22 15.99 22 11.3C22 6.6 17.52 2.8 12 2.8C6.48 2.8 2 6.6 2 11.3C2 13.29 2.8 15.12 4.15 16.57C4.6 17.05 4.82 17.29 4.92 17.44C5.14 17.79 5.21 17.99 5.23 18.4C5.24 18.59 5.22 18.81 5.16 19.26C5.1 19.75 5.07 19.99 5.13 20.16C5.23 20.49 5.53 20.71 5.87 20.72C6.04 20.72 6.27 20.63 6.72 20.43L8.07 19.86C8.43 19.71 8.61 19.63 8.77 19.59C8.95 19.55 9.04 19.54 9.22 19.54C9.39 19.53 9.64 19.57 10.14 19.65C10.74 19.75 11.37 19.8 12 19.8Z" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="w-full h-[var(--sidebar-rail-step)] flex items-center justify-center cursor-pointer group" title="朋友圈" @click="goSns">
|
||||
<div class="w-[var(--sidebar-rail-btn)] h-[var(--sidebar-rail-btn)] rounded-md flex items-center justify-center transition-colors bg-transparent group-hover:bg-[#E1E1E1]">
|
||||
<div class="w-[var(--sidebar-rail-icon)] h-[var(--sidebar-rail-icon)]" :class="isSnsRoute ? 'text-[#07b75b]' : 'text-[#5d5d5d]'">
|
||||
<svg class="w-full h-full" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true">
|
||||
<circle cx="12" cy="12" r="10" />
|
||||
<line x1="14.31" y1="8" x2="20.05" y2="17.94" />
|
||||
<line x1="9.69" y1="8" x2="21.17" y2="8" />
|
||||
<line x1="7.38" y1="12" x2="13.12" y2="2.06" />
|
||||
<line x1="9.69" y1="16" x2="3.95" y2="6.06" />
|
||||
<line x1="14.31" y1="16" x2="2.83" y2="16" />
|
||||
<line x1="16.62" y1="12" x2="10.88" y2="21.94" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="w-full h-[var(--sidebar-rail-step)] flex items-center justify-center cursor-pointer group" title="联系人">
|
||||
<div class="w-[var(--sidebar-rail-btn)] h-[var(--sidebar-rail-btn)] rounded-md flex items-center justify-center transition-colors bg-transparent group-hover:bg-[#E1E1E1]">
|
||||
<div class="w-[var(--sidebar-rail-icon)] h-[var(--sidebar-rail-icon)] text-[#07b75b]">
|
||||
<svg class="w-full h-full" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H7a4 4 0 0 0-4 4v2" />
|
||||
<circle cx="10" cy="7" r="4" />
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87" />
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="w-full h-[var(--sidebar-rail-step)] flex items-center justify-center cursor-pointer group" title="年度总结" @click="goWrapped">
|
||||
<div class="w-[var(--sidebar-rail-btn)] h-[var(--sidebar-rail-btn)] rounded-md flex items-center justify-center transition-colors bg-transparent group-hover:bg-[#E1E1E1]">
|
||||
<div class="w-[var(--sidebar-rail-icon)] h-[var(--sidebar-rail-icon)]" :class="isWrappedRoute ? 'text-[#07b75b]' : 'text-[#5d5d5d]'">
|
||||
<svg class="w-full h-full" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true">
|
||||
<rect x="4" y="4" width="16" height="16" rx="2" />
|
||||
<path d="M8 16v-5" />
|
||||
<path d="M12 16v-8" />
|
||||
<path d="M16 16v-3" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="w-full h-[var(--sidebar-rail-step)] flex items-center justify-center cursor-pointer group" @click="privacyMode = !privacyMode" :title="privacyMode ? '关闭隐私模式' : '开启隐私模式'">
|
||||
<div class="w-[var(--sidebar-rail-btn)] h-[var(--sidebar-rail-btn)] rounded-md flex items-center justify-center transition-colors bg-transparent group-hover:bg-[#E1E1E1]">
|
||||
<svg class="w-[var(--sidebar-rail-icon)] h-[var(--sidebar-rail-icon)]" :class="privacyMode ? 'text-[#07b75b]' : 'text-[#5d5d5d]'" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5">
|
||||
<path v-if="privacyMode" stroke-linecap="round" stroke-linejoin="round" d="M3.98 8.223A10.477 10.477 0 001.934 12C3.226 16.338 7.244 19.5 12 19.5c.993 0 1.953-.138 2.863-.395M6.228 6.228A10.45 10.45 0 0112 4.5c4.756 0 8.773 3.162 10.065 7.498a10.523 10.523 0 01-4.293 5.774M6.228 6.228L3 3m3.228 3.228l3.65 3.65m7.894 7.894L21 21m-3.228-3.228l-3.65-3.65m0 0a3 3 0 10-4.243-4.243m4.242 4.242L9.88 9.88" />
|
||||
<path v-else stroke-linecap="round" stroke-linejoin="round" d="M2.036 12.322a1.012 1.012 0 010-.639C3.423 7.51 7.36 4.5 12 4.5c4.638 0 8.573 3.007 9.963 7.178.07.207.07.431 0 .639C20.577 16.49 16.64 19.5 12 19.5c-4.638 0-8.573-3.007-9.963-7.178z" />
|
||||
<circle v-if="!privacyMode" cx="12" cy="12" r="3" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex-1 flex flex-col min-h-0" style="background-color: #EDEDED">
|
||||
<DesktopTitleBar />
|
||||
<div class="flex-1 min-h-0 overflow-hidden p-4">
|
||||
<div class="h-full grid grid-cols-1 lg:grid-cols-[400px_minmax(0,1fr)] gap-4">
|
||||
<div class="bg-white border border-gray-200 rounded-lg flex flex-col min-h-0 overflow-hidden">
|
||||
<div class="p-3 border-b border-gray-200" style="background-color: #F7F7F7">
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="contact-search-wrapper flex-1" :class="{ 'privacy-blur': privacyMode }">
|
||||
<svg class="contact-search-icon" fill="none" stroke="currentColor" viewBox="0 0 16 16">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" d="M7.33333 12.6667C10.2789 12.6667 12.6667 10.2789 12.6667 7.33333C12.6667 4.38781 10.2789 2 7.33333 2C4.38781 2 2 4.38781 2 7.33333C2 10.2789 4.38781 12.6667 7.33333 12.6667Z" />
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1.5" d="M14 14L11.1 11.1" />
|
||||
</svg>
|
||||
<input v-model="searchKeyword" class="contact-search-input" type="text" placeholder="搜索联系人" />
|
||||
<button v-if="searchKeyword" type="button" class="contact-search-clear" @click="searchKeyword = ''">
|
||||
<svg class="w-3.5 h-3.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<select v-if="availableAccounts.length > 1" v-model="selectedAccount" class="account-select">
|
||||
<option v-for="acc in availableAccounts" :key="acc" :value="acc">{{ acc }}</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="px-3 py-2 border-b border-gray-200 bg-white flex items-center gap-4 text-sm">
|
||||
<label class="flex items-center gap-2">
|
||||
<input v-model="contactTypes.friends" type="checkbox" />
|
||||
<span>好友 {{ counts.friends }}</span>
|
||||
</label>
|
||||
<label class="flex items-center gap-2">
|
||||
<input v-model="contactTypes.groups" type="checkbox" />
|
||||
<span>群聊 {{ counts.groups }}</span>
|
||||
</label>
|
||||
<label class="flex items-center gap-2">
|
||||
<input v-model="contactTypes.officials" type="checkbox" />
|
||||
<span>公众号 {{ counts.officials }}</span>
|
||||
</label>
|
||||
<span class="ml-auto text-gray-500">总计 {{ counts.total }}</span>
|
||||
</div>
|
||||
|
||||
<div class="flex-1 min-h-0 overflow-auto">
|
||||
<div v-if="loading" class="p-4 text-sm text-gray-500">加载中…</div>
|
||||
<div v-else-if="error" class="p-4 text-sm text-red-500 whitespace-pre-wrap">{{ error }}</div>
|
||||
<div v-else-if="contacts.length === 0" class="p-4 text-sm text-gray-500">暂无联系人</div>
|
||||
<div v-else>
|
||||
<div
|
||||
v-for="contact in contacts"
|
||||
:key="contact.username"
|
||||
class="px-3 py-2 border-b border-gray-100 flex items-center gap-3"
|
||||
>
|
||||
<div class="w-10 h-10 rounded-md overflow-hidden bg-gray-300 shrink-0" :class="{ 'privacy-blur': privacyMode }">
|
||||
<img v-if="contact.avatar" :src="contact.avatar" :alt="contact.displayName" class="w-full h-full object-cover" referrerpolicy="no-referrer" />
|
||||
<div v-else class="w-full h-full flex items-center justify-center text-white text-xs font-bold" style="background-color:#4B5563">{{ contact.displayName?.charAt(0) || '?' }}</div>
|
||||
</div>
|
||||
<div class="min-w-0 flex-1" :class="{ 'privacy-blur': privacyMode }">
|
||||
<div class="text-sm text-gray-900 truncate">{{ contact.displayName }}</div>
|
||||
<div class="text-xs text-gray-500 truncate">{{ contact.username }}</div>
|
||||
<div class="text-[11px] text-gray-500 truncate" v-if="contact.type !== 'group' && (contact.region || contact.source)">
|
||||
<span v-if="contact.region">地区:{{ contact.region }}</span>
|
||||
<span v-if="contact.region && contact.source"> · </span>
|
||||
<span
|
||||
v-if="contact.source"
|
||||
:title="contact.sourceScene != null ? `来源场景码:${contact.sourceScene}` : ''"
|
||||
>来源:{{ contact.source }}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="text-xs px-2 py-0.5 rounded" :class="typeBadgeClass(contact.type)">
|
||||
{{ typeLabel(contact.type) }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="bg-white border border-gray-200 rounded-lg p-4 flex flex-col gap-3">
|
||||
<div>
|
||||
<div class="text-base font-medium text-gray-900">导出联系人</div>
|
||||
<div class="text-xs text-gray-500 mt-1">支持 JSON / CSV,默认包含头像链接</div>
|
||||
</div>
|
||||
|
||||
<div class="space-y-2 text-sm">
|
||||
<div class="font-medium text-gray-800">导出格式</div>
|
||||
<label class="flex items-center gap-2"><input v-model="exportFormat" type="radio" value="json" /> JSON</label>
|
||||
<label class="flex items-center gap-2"><input v-model="exportFormat" type="radio" value="csv" /> CSV (Excel)</label>
|
||||
</div>
|
||||
|
||||
<div class="space-y-2 text-sm">
|
||||
<div class="font-medium text-gray-800">导出类型(多选)</div>
|
||||
<label class="flex items-center gap-2"><input v-model="exportTypes.friends" type="checkbox" /> 好友</label>
|
||||
<label class="flex items-center gap-2"><input v-model="exportTypes.groups" type="checkbox" /> 群聊</label>
|
||||
<label class="flex items-center gap-2"><input v-model="exportTypes.officials" type="checkbox" /> 公众号</label>
|
||||
</div>
|
||||
|
||||
<label class="flex items-center gap-2 text-sm">
|
||||
<input v-model="includeAvatarLink" type="checkbox" />
|
||||
导出头像链接
|
||||
</label>
|
||||
|
||||
<div class="space-y-2 text-sm">
|
||||
<div class="font-medium text-gray-800">导出目录</div>
|
||||
<div class="px-2 py-2 rounded border border-gray-200 bg-gray-50 text-xs break-all min-h-[38px]">{{ exportFolder || '未选择' }}</div>
|
||||
<button type="button" class="w-full px-3 py-2 rounded border border-gray-200 hover:bg-gray-50" @click="chooseExportFolder">选择文件夹</button>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class="mt-2 w-full px-3 py-2 rounded text-white"
|
||||
:class="canExport && !exporting ? 'bg-[#03C160] hover:bg-[#02ad56]' : 'bg-gray-300 cursor-not-allowed'"
|
||||
:disabled="!canExport || exporting"
|
||||
@click="startExport"
|
||||
>
|
||||
{{ exporting ? '导出中…' : '开始导出' }}
|
||||
</button>
|
||||
|
||||
<div v-if="exportMsg" class="text-xs whitespace-pre-wrap" :class="exportOk ? 'text-green-600' : 'text-red-500'">{{ exportMsg }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
useHead({ title: '联系人 - 微信数据分析助手' })
|
||||
|
||||
const route = useRoute()
|
||||
const api = useApi()
|
||||
|
||||
const isChatRoute = computed(() => route.path?.startsWith('/chat'))
|
||||
const isSnsRoute = computed(() => route.path?.startsWith('/sns'))
|
||||
const isWrappedRoute = computed(() => route.path?.startsWith('/wrapped'))
|
||||
|
||||
const PRIVACY_MODE_KEY = 'ui.privacy_mode'
|
||||
const privacyMode = ref(false)
|
||||
onMounted(() => {
|
||||
if (!process.client) return
|
||||
try {
|
||||
privacyMode.value = localStorage.getItem(PRIVACY_MODE_KEY) === '1'
|
||||
} catch {}
|
||||
})
|
||||
watch(() => privacyMode.value, (v) => {
|
||||
if (!process.client) return
|
||||
try {
|
||||
localStorage.setItem(PRIVACY_MODE_KEY, v ? '1' : '0')
|
||||
} catch {}
|
||||
})
|
||||
|
||||
const sidebarMediaBase = process.client ? 'http://localhost:8000' : ''
|
||||
|
||||
const availableAccounts = ref([])
|
||||
const selectedAccount = ref(null)
|
||||
const searchKeyword = ref('')
|
||||
|
||||
const contactTypes = reactive({
|
||||
friends: true,
|
||||
groups: true,
|
||||
officials: true,
|
||||
})
|
||||
|
||||
const contacts = ref([])
|
||||
const counts = reactive({
|
||||
friends: 0,
|
||||
groups: 0,
|
||||
officials: 0,
|
||||
total: 0,
|
||||
})
|
||||
|
||||
const loading = ref(false)
|
||||
const error = ref('')
|
||||
|
||||
const exportFormat = ref('json')
|
||||
const includeAvatarLink = ref(true)
|
||||
const exportTypes = reactive({
|
||||
friends: true,
|
||||
groups: true,
|
||||
officials: true,
|
||||
})
|
||||
const exportFolder = ref('')
|
||||
const exportFolderHandle = ref(null)
|
||||
const exporting = ref(false)
|
||||
const exportMsg = ref('')
|
||||
const exportOk = ref(false)
|
||||
|
||||
const selfAvatarUrl = computed(() => {
|
||||
const acc = String(selectedAccount.value || '').trim()
|
||||
if (!acc) return ''
|
||||
return `${sidebarMediaBase}/api/chat/avatar?account=${encodeURIComponent(acc)}&username=${encodeURIComponent(acc)}`
|
||||
})
|
||||
|
||||
const typeLabel = (type) => {
|
||||
if (type === 'friend') return '好友'
|
||||
if (type === 'group') return '群聊'
|
||||
if (type === 'official') return '公众号'
|
||||
return '其他'
|
||||
}
|
||||
|
||||
const typeBadgeClass = (type) => {
|
||||
if (type === 'friend') return 'bg-blue-100 text-blue-700'
|
||||
if (type === 'group') return 'bg-green-100 text-green-700'
|
||||
if (type === 'official') return 'bg-orange-100 text-orange-700'
|
||||
return 'bg-gray-100 text-gray-600'
|
||||
}
|
||||
|
||||
const goChat = async () => {
|
||||
await navigateTo('/chat')
|
||||
}
|
||||
|
||||
const goSns = async () => {
|
||||
await navigateTo('/sns')
|
||||
}
|
||||
|
||||
const goWrapped = async () => {
|
||||
await navigateTo('/wrapped')
|
||||
}
|
||||
|
||||
const isDesktopExportRuntime = () => {
|
||||
return !!(process.client && window?.wechatDesktop?.chooseDirectory)
|
||||
}
|
||||
|
||||
const isWebDirectoryPickerSupported = () => {
|
||||
return !!(process.client && typeof window.showDirectoryPicker === 'function')
|
||||
}
|
||||
|
||||
const canExport = computed(() => {
|
||||
const hasExportTarget = isDesktopExportRuntime()
|
||||
? !!exportFolder.value
|
||||
: !!exportFolderHandle.value
|
||||
return !!selectedAccount.value && hasExportTarget && (exportTypes.friends || exportTypes.groups || exportTypes.officials)
|
||||
})
|
||||
|
||||
const safeExportPart = (value) => {
|
||||
const cleaned = String(value || '').trim().replace(/[^0-9A-Za-z._-]+/g, '_').replace(/^[._-]+|[._-]+$/g, '')
|
||||
return cleaned || 'account'
|
||||
}
|
||||
|
||||
const buildExportTimestamp = () => {
|
||||
const now = new Date()
|
||||
const pad = (n) => String(n).padStart(2, '0')
|
||||
return `${now.getFullYear()}${pad(now.getMonth() + 1)}${pad(now.getDate())}_${pad(now.getHours())}${pad(now.getMinutes())}${pad(now.getSeconds())}`
|
||||
}
|
||||
|
||||
const escapeCsvCell = (value) => {
|
||||
const text = String(value == null ? '' : value)
|
||||
if (/[",\n\r]/.test(text)) return `"${text.replace(/"/g, '""')}"`
|
||||
return text
|
||||
}
|
||||
|
||||
const buildExportContactsPayload = async () => {
|
||||
const resp = await api.listChatContacts({
|
||||
account: selectedAccount.value,
|
||||
keyword: searchKeyword.value || '',
|
||||
include_friends: exportTypes.friends,
|
||||
include_groups: exportTypes.groups,
|
||||
include_officials: exportTypes.officials,
|
||||
})
|
||||
const contactsList = Array.isArray(resp?.contacts) ? resp.contacts : []
|
||||
const exportContacts = contactsList.map((item) => {
|
||||
const row = {
|
||||
username: String(item?.username || ''),
|
||||
displayName: String(item?.displayName || ''),
|
||||
remark: String(item?.remark || ''),
|
||||
nickname: String(item?.nickname || ''),
|
||||
alias: String(item?.alias || ''),
|
||||
type: String(item?.type || ''),
|
||||
region: String(item?.region || ''),
|
||||
country: String(item?.country || ''),
|
||||
province: String(item?.province || ''),
|
||||
city: String(item?.city || ''),
|
||||
source: String(item?.source || ''),
|
||||
sourceScene: item?.sourceScene == null ? '' : String(item?.sourceScene),
|
||||
}
|
||||
if (includeAvatarLink.value) {
|
||||
row.avatarLink = String(item?.avatarLink || '')
|
||||
}
|
||||
return row
|
||||
})
|
||||
|
||||
return {
|
||||
account: String(selectedAccount.value || ''),
|
||||
count: exportContacts.length,
|
||||
contacts: exportContacts,
|
||||
}
|
||||
}
|
||||
|
||||
const writeWebExportFile = async ({ fileName, content }) => {
|
||||
if (!exportFolderHandle.value || typeof exportFolderHandle.value.getFileHandle !== 'function') {
|
||||
throw new Error('未选择浏览器导出目录')
|
||||
}
|
||||
const fileHandle = await exportFolderHandle.value.getFileHandle(fileName, { create: true })
|
||||
const writable = await fileHandle.createWritable()
|
||||
await writable.write(content)
|
||||
await writable.close()
|
||||
}
|
||||
|
||||
const exportContactsInWeb = async () => {
|
||||
const fmt = String(exportFormat.value || 'json').toLowerCase()
|
||||
if (fmt !== 'json' && fmt !== 'csv') {
|
||||
throw new Error('网页端仅支持 JSON/CSV 导出')
|
||||
}
|
||||
if (!exportFolderHandle.value) {
|
||||
throw new Error('请先选择导出目录')
|
||||
}
|
||||
|
||||
const payload = await buildExportContactsPayload()
|
||||
const fileName = `contacts_${safeExportPart(payload.account)}_${buildExportTimestamp()}.${fmt}`
|
||||
|
||||
if (fmt === 'json') {
|
||||
const jsonPayload = {
|
||||
exportedAt: new Date().toISOString().replace(/\.\d{3}Z$/, 'Z'),
|
||||
account: payload.account,
|
||||
count: payload.count,
|
||||
filters: {
|
||||
keyword: String(searchKeyword.value || ''),
|
||||
contactTypes: {
|
||||
friends: !!exportTypes.friends,
|
||||
groups: !!exportTypes.groups,
|
||||
officials: !!exportTypes.officials,
|
||||
},
|
||||
includeAvatarLink: !!includeAvatarLink.value,
|
||||
},
|
||||
contacts: payload.contacts,
|
||||
}
|
||||
await writeWebExportFile({ fileName, content: JSON.stringify(jsonPayload, null, 2) })
|
||||
} else {
|
||||
const columns = [
|
||||
['username', '用户名'],
|
||||
['displayName', '显示名称'],
|
||||
['remark', '备注'],
|
||||
['nickname', '昵称'],
|
||||
['alias', '微信号'],
|
||||
['type', '类型'],
|
||||
['region', '地区'],
|
||||
['country', '国家/地区码'],
|
||||
['province', '省份'],
|
||||
['city', '城市'],
|
||||
['source', '来源'],
|
||||
['sourceScene', '来源场景码'],
|
||||
]
|
||||
if (includeAvatarLink.value) {
|
||||
columns.push(['avatarLink', '头像链接'])
|
||||
}
|
||||
const lines = [columns.map(([, label]) => escapeCsvCell(label)).join(',')]
|
||||
for (const row of payload.contacts) {
|
||||
lines.push(columns.map(([key]) => escapeCsvCell(row[key])).join(','))
|
||||
}
|
||||
await writeWebExportFile({ fileName, content: `\uFEFF${lines.join('\n')}` })
|
||||
}
|
||||
|
||||
return {
|
||||
count: payload.count,
|
||||
outputPath: `${exportFolder.value}/${fileName}`,
|
||||
}
|
||||
}
|
||||
|
||||
const loadAccounts = async () => {
|
||||
try {
|
||||
const resp = await api.listChatAccounts()
|
||||
const accounts = resp?.accounts || []
|
||||
availableAccounts.value = accounts
|
||||
selectedAccount.value = selectedAccount.value || resp?.default_account || accounts[0] || null
|
||||
} catch (e) {
|
||||
availableAccounts.value = []
|
||||
selectedAccount.value = null
|
||||
}
|
||||
}
|
||||
|
||||
const loadContacts = async () => {
|
||||
if (!selectedAccount.value) {
|
||||
contacts.value = []
|
||||
counts.friends = 0
|
||||
counts.groups = 0
|
||||
counts.officials = 0
|
||||
counts.total = 0
|
||||
return
|
||||
}
|
||||
loading.value = true
|
||||
error.value = ''
|
||||
try {
|
||||
const resp = await api.listChatContacts({
|
||||
account: selectedAccount.value,
|
||||
keyword: searchKeyword.value || '',
|
||||
include_friends: contactTypes.friends,
|
||||
include_groups: contactTypes.groups,
|
||||
include_officials: contactTypes.officials,
|
||||
})
|
||||
contacts.value = Array.isArray(resp?.contacts) ? resp.contacts : []
|
||||
counts.friends = Number(resp?.counts?.friends || 0)
|
||||
counts.groups = Number(resp?.counts?.groups || 0)
|
||||
counts.officials = Number(resp?.counts?.officials || 0)
|
||||
counts.total = Number(resp?.counts?.total || contacts.value.length)
|
||||
} catch (e) {
|
||||
contacts.value = []
|
||||
error.value = e?.message || '加载联系人失败'
|
||||
} finally {
|
||||
loading.value = false
|
||||
}
|
||||
}
|
||||
|
||||
let keywordTimer = null
|
||||
watch(() => searchKeyword.value, () => {
|
||||
if (keywordTimer) clearTimeout(keywordTimer)
|
||||
keywordTimer = setTimeout(() => {
|
||||
void loadContacts()
|
||||
}, 250)
|
||||
})
|
||||
|
||||
watch(() => [selectedAccount.value, contactTypes.friends, contactTypes.groups, contactTypes.officials], () => {
|
||||
void loadContacts()
|
||||
})
|
||||
|
||||
const chooseExportFolder = async () => {
|
||||
exportMsg.value = ''
|
||||
exportOk.value = false
|
||||
try {
|
||||
if (!process.client) {
|
||||
exportMsg.value = '当前环境不支持选择导出目录'
|
||||
return
|
||||
}
|
||||
|
||||
if (isDesktopExportRuntime()) {
|
||||
const result = await window.wechatDesktop.chooseDirectory({ title: '选择导出目录' })
|
||||
if (result && !result.canceled && Array.isArray(result.filePaths) && result.filePaths.length > 0) {
|
||||
exportFolder.value = String(result.filePaths[0] || '')
|
||||
exportFolderHandle.value = null
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (isWebDirectoryPickerSupported()) {
|
||||
const handle = await window.showDirectoryPicker()
|
||||
if (handle) {
|
||||
exportFolderHandle.value = handle
|
||||
exportFolder.value = `浏览器目录:${String(handle.name || '已选择')}`
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
exportMsg.value = '当前浏览器不支持目录选择,请使用桌面端或 Chromium 新版浏览器'
|
||||
} catch (e) {
|
||||
exportMsg.value = e?.message || '选择文件夹失败'
|
||||
exportOk.value = false
|
||||
}
|
||||
}
|
||||
|
||||
const startExport = async () => {
|
||||
exportMsg.value = ''
|
||||
exportOk.value = false
|
||||
|
||||
if (!canExport.value) {
|
||||
exportMsg.value = '请先选择账号、导出目录,并至少勾选一种联系人类型'
|
||||
return
|
||||
}
|
||||
|
||||
exporting.value = true
|
||||
try {
|
||||
const resp = isDesktopExportRuntime()
|
||||
? await api.exportChatContacts({
|
||||
account: selectedAccount.value,
|
||||
output_dir: exportFolder.value,
|
||||
format: exportFormat.value,
|
||||
include_avatar_link: includeAvatarLink.value,
|
||||
keyword: searchKeyword.value || '',
|
||||
contact_types: {
|
||||
friends: exportTypes.friends,
|
||||
groups: exportTypes.groups,
|
||||
officials: exportTypes.officials,
|
||||
}
|
||||
})
|
||||
: await exportContactsInWeb()
|
||||
exportOk.value = true
|
||||
exportMsg.value = `导出成功:${resp?.outputPath || ''}\n共 ${Number(resp?.count || 0)} 个联系人`
|
||||
} catch (e) {
|
||||
exportOk.value = false
|
||||
exportMsg.value = e?.message || '导出失败'
|
||||
} finally {
|
||||
exporting.value = false
|
||||
}
|
||||
}
|
||||
|
||||
onMounted(async () => {
|
||||
await loadAccounts()
|
||||
await loadContacts()
|
||||
})
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.privacy-blur {
|
||||
filter: blur(9px);
|
||||
transition: filter 0.2s ease;
|
||||
}
|
||||
|
||||
.privacy-blur:hover {
|
||||
filter: none;
|
||||
}
|
||||
</style>
|
||||
@@ -68,6 +68,26 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- 联系人图标 -->
|
||||
<div
|
||||
class="w-full h-[var(--sidebar-rail-step)] flex items-center justify-center cursor-pointer group"
|
||||
title="联系人"
|
||||
@click="goContacts"
|
||||
>
|
||||
<div
|
||||
class="w-[var(--sidebar-rail-btn)] h-[var(--sidebar-rail-btn)] rounded-md flex items-center justify-center transition-colors bg-transparent group-hover:bg-[#E1E1E1]"
|
||||
>
|
||||
<div class="w-[var(--sidebar-rail-icon)] h-[var(--sidebar-rail-icon)]" :class="isContactsRoute ? 'text-[#07b75b]' : 'text-[#5d5d5d]'">
|
||||
<svg class="w-full h-full" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.8" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true">
|
||||
<path d="M17 21v-2a4 4 0 0 0-4-4H7a4 4 0 0 0-4 4v2" />
|
||||
<circle cx="10" cy="7" r="4" />
|
||||
<path d="M23 21v-2a4 4 0 0 0-3-3.87" />
|
||||
<path d="M16 3.13a4 4 0 0 1 0 7.75" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- 年度总结图标 -->
|
||||
<div
|
||||
class="w-full h-[var(--sidebar-rail-step)] flex items-center justify-center cursor-pointer group"
|
||||
@@ -411,6 +431,7 @@ const route = useRoute()
|
||||
|
||||
const isChatRoute = computed(() => route.path?.startsWith('/chat'))
|
||||
const isSnsRoute = computed(() => route.path?.startsWith('/sns'))
|
||||
const isContactsRoute = computed(() => route.path?.startsWith('/contacts'))
|
||||
const isWrappedRoute = computed(() => route.path?.startsWith('/wrapped'))
|
||||
|
||||
// 隐私模式(聊天/朋友圈共用本地开关)
|
||||
@@ -1051,6 +1072,10 @@ const goSns = async () => {
|
||||
await navigateTo('/sns')
|
||||
}
|
||||
|
||||
const goContacts = async () => {
|
||||
await navigateTo('/contacts')
|
||||
}
|
||||
|
||||
const goWrapped = async () => {
|
||||
await navigateTo('/wrapped')
|
||||
}
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.9 MiB After Width: | Height: | Size: 97 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 97 KiB |
@@ -13,6 +13,7 @@ from .logging_config import setup_logging, get_logger
|
||||
from .path_fix import PathFixRoute
|
||||
from .chat_realtime_autosync import CHAT_REALTIME_AUTOSYNC
|
||||
from .routers.chat import router as _chat_router
|
||||
from .routers.chat_contacts import router as _chat_contacts_router
|
||||
from .routers.chat_export import router as _chat_export_router
|
||||
from .routers.chat_media import router as _chat_media_router
|
||||
from .routers.decrypt import router as _decrypt_router
|
||||
@@ -52,6 +53,7 @@ app.include_router(_decrypt_router)
|
||||
app.include_router(_keys_router)
|
||||
app.include_router(_media_router)
|
||||
app.include_router(_chat_router)
|
||||
app.include_router(_chat_contacts_router)
|
||||
app.include_router(_chat_export_router)
|
||||
app.include_router(_chat_media_router)
|
||||
app.include_router(_sns_router)
|
||||
|
||||
454
src/wechat_decrypt_tool/avatar_cache.py
Normal file
454
src/wechat_decrypt_tool/avatar_cache.py
Normal file
@@ -0,0 +1,454 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import time
|
||||
from email.utils import formatdate
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
from .app_paths import get_output_dir
|
||||
from .logging_config import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
AVATAR_CACHE_TTL_SECONDS = 7 * 24 * 60 * 60
|
||||
|
||||
|
||||
def is_avatar_cache_enabled() -> bool:
|
||||
v = str(os.environ.get("WECHAT_TOOL_AVATAR_CACHE_ENABLED", "1") or "").strip().lower()
|
||||
return v not in {"", "0", "false", "off", "no"}
|
||||
|
||||
|
||||
def get_avatar_cache_root_dir() -> Path:
|
||||
return get_output_dir() / "avatar_cache"
|
||||
|
||||
|
||||
def _safe_segment(value: str) -> str:
|
||||
cleaned = re.sub(r"[^0-9A-Za-z._-]+", "_", str(value or "").strip())
|
||||
cleaned = cleaned.strip("._-")
|
||||
return cleaned or "default"
|
||||
|
||||
|
||||
def _account_layout(account: str) -> tuple[Path, Path, Path, Path]:
|
||||
account_dir = get_avatar_cache_root_dir() / _safe_segment(account)
|
||||
files_dir = account_dir / "files"
|
||||
tmp_dir = account_dir / "tmp"
|
||||
db_path = account_dir / "avatar_cache.db"
|
||||
return account_dir, files_dir, tmp_dir, db_path
|
||||
|
||||
|
||||
def _ensure_account_layout(account: str) -> tuple[Path, Path, Path, Path]:
|
||||
account_dir, files_dir, tmp_dir, db_path = _account_layout(account)
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
files_dir.mkdir(parents=True, exist_ok=True)
|
||||
tmp_dir.mkdir(parents=True, exist_ok=True)
|
||||
return account_dir, files_dir, tmp_dir, db_path
|
||||
|
||||
|
||||
def _connect(account: str) -> sqlite3.Connection:
|
||||
_, _, _, db_path = _ensure_account_layout(account)
|
||||
conn = sqlite3.connect(str(db_path), timeout=5)
|
||||
conn.row_factory = sqlite3.Row
|
||||
_ensure_schema(conn)
|
||||
return conn
|
||||
|
||||
|
||||
def _ensure_schema(conn: sqlite3.Connection) -> None:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS avatar_cache_entries (
|
||||
account TEXT NOT NULL,
|
||||
cache_key TEXT NOT NULL,
|
||||
source_kind TEXT NOT NULL,
|
||||
username TEXT NOT NULL DEFAULT '',
|
||||
source_url TEXT NOT NULL DEFAULT '',
|
||||
source_md5 TEXT NOT NULL DEFAULT '',
|
||||
source_update_time INTEGER NOT NULL DEFAULT 0,
|
||||
rel_path TEXT NOT NULL DEFAULT '',
|
||||
media_type TEXT NOT NULL DEFAULT 'application/octet-stream',
|
||||
size_bytes INTEGER NOT NULL DEFAULT 0,
|
||||
etag TEXT NOT NULL DEFAULT '',
|
||||
last_modified TEXT NOT NULL DEFAULT '',
|
||||
fetched_at INTEGER NOT NULL DEFAULT 0,
|
||||
checked_at INTEGER NOT NULL DEFAULT 0,
|
||||
expires_at INTEGER NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (account, cache_key)
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_avatar_cache_entries_account_username ON avatar_cache_entries(account, username)"
|
||||
)
|
||||
conn.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_avatar_cache_entries_account_source ON avatar_cache_entries(account, source_kind, source_url)"
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def _row_to_dict(row: Optional[sqlite3.Row]) -> Optional[dict[str, Any]]:
|
||||
if row is None:
|
||||
return None
|
||||
out: dict[str, Any] = {}
|
||||
for k in row.keys():
|
||||
out[str(k)] = row[k]
|
||||
return out
|
||||
|
||||
|
||||
def normalize_avatar_source_url(url: str) -> str:
|
||||
raw = str(url or "").strip()
|
||||
if not raw:
|
||||
return ""
|
||||
try:
|
||||
p = urlsplit(raw)
|
||||
except Exception:
|
||||
return raw
|
||||
scheme = str(p.scheme or "").lower()
|
||||
host = str(p.hostname or "").lower()
|
||||
if not scheme or not host:
|
||||
return raw
|
||||
netloc = host
|
||||
if p.port:
|
||||
netloc = f"{host}:{int(p.port)}"
|
||||
path = p.path or "/"
|
||||
return urlunsplit((scheme, netloc, path, p.query or "", ""))
|
||||
|
||||
|
||||
def cache_key_for_avatar_user(username: str) -> str:
|
||||
u = str(username or "").strip()
|
||||
return hashlib.sha1(f"user:{u}".encode("utf-8", errors="ignore")).hexdigest()
|
||||
|
||||
|
||||
def cache_key_for_avatar_url(url: str) -> str:
|
||||
u = normalize_avatar_source_url(url)
|
||||
return hashlib.sha1(f"url:{u}".encode("utf-8", errors="ignore")).hexdigest()
|
||||
|
||||
|
||||
def get_avatar_cache_entry(account: str, cache_key: str) -> Optional[dict[str, Any]]:
|
||||
if (not is_avatar_cache_enabled()) or (not cache_key):
|
||||
return None
|
||||
try:
|
||||
conn = _connect(account)
|
||||
except Exception:
|
||||
return None
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT * FROM avatar_cache_entries WHERE account = ? AND cache_key = ? LIMIT 1",
|
||||
(str(account or ""), str(cache_key or "")),
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
except Exception:
|
||||
return None
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def get_avatar_cache_user_entry(account: str, username: str) -> Optional[dict[str, Any]]:
|
||||
if not username:
|
||||
return None
|
||||
return get_avatar_cache_entry(account, cache_key_for_avatar_user(username))
|
||||
|
||||
|
||||
def get_avatar_cache_url_entry(account: str, source_url: str) -> Optional[dict[str, Any]]:
|
||||
if not source_url:
|
||||
return None
|
||||
return get_avatar_cache_entry(account, cache_key_for_avatar_url(source_url))
|
||||
|
||||
|
||||
def resolve_avatar_cache_entry_path(account: str, entry: Optional[dict[str, Any]]) -> Optional[Path]:
|
||||
if not entry:
|
||||
return None
|
||||
rel = str(entry.get("rel_path") or "").strip().replace("\\", "/")
|
||||
if not rel:
|
||||
return None
|
||||
account_dir, _, _, _ = _account_layout(account)
|
||||
p = account_dir / rel
|
||||
try:
|
||||
account_dir_resolved = account_dir.resolve()
|
||||
p_resolved = p.resolve()
|
||||
if p_resolved != account_dir_resolved and account_dir_resolved not in p_resolved.parents:
|
||||
return None
|
||||
return p_resolved
|
||||
except Exception:
|
||||
return p
|
||||
|
||||
|
||||
def avatar_cache_entry_file_exists(account: str, entry: Optional[dict[str, Any]]) -> Optional[Path]:
|
||||
p = resolve_avatar_cache_entry_path(account, entry)
|
||||
if not p:
|
||||
return None
|
||||
try:
|
||||
if p.exists() and p.is_file():
|
||||
return p
|
||||
except Exception:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def avatar_cache_entry_is_fresh(entry: Optional[dict[str, Any]], now_ts: Optional[int] = None) -> bool:
|
||||
if not entry:
|
||||
return False
|
||||
try:
|
||||
expires = int(entry.get("expires_at") or 0)
|
||||
except Exception:
|
||||
expires = 0
|
||||
if expires <= 0:
|
||||
return False
|
||||
now0 = int(now_ts or time.time())
|
||||
return expires > now0
|
||||
|
||||
|
||||
def _guess_ext(media_type: str) -> str:
|
||||
mt = str(media_type or "").strip().lower()
|
||||
if mt == "image/jpeg":
|
||||
return "jpg"
|
||||
if mt == "image/png":
|
||||
return "png"
|
||||
if mt == "image/gif":
|
||||
return "gif"
|
||||
if mt == "image/webp":
|
||||
return "webp"
|
||||
if mt == "image/bmp":
|
||||
return "bmp"
|
||||
if mt == "image/svg+xml":
|
||||
return "svg"
|
||||
if mt == "image/avif":
|
||||
return "avif"
|
||||
if mt.startswith("image/"):
|
||||
return mt.split("/", 1)[1].split("+", 1)[0].split(";", 1)[0] or "img"
|
||||
return "dat"
|
||||
|
||||
|
||||
def _http_date_from_ts(ts: Optional[int]) -> str:
|
||||
try:
|
||||
t = int(ts or 0)
|
||||
except Exception:
|
||||
t = 0
|
||||
if t <= 0:
|
||||
return ""
|
||||
try:
|
||||
return formatdate(timeval=float(t), usegmt=True)
|
||||
except Exception:
|
||||
return ""
|
||||
|
||||
|
||||
def upsert_avatar_cache_entry(
|
||||
account: str,
|
||||
*,
|
||||
cache_key: str,
|
||||
source_kind: str,
|
||||
username: str = "",
|
||||
source_url: str = "",
|
||||
source_md5: str = "",
|
||||
source_update_time: int = 0,
|
||||
rel_path: str = "",
|
||||
media_type: str = "application/octet-stream",
|
||||
size_bytes: int = 0,
|
||||
etag: str = "",
|
||||
last_modified: str = "",
|
||||
fetched_at: Optional[int] = None,
|
||||
checked_at: Optional[int] = None,
|
||||
expires_at: Optional[int] = None,
|
||||
) -> Optional[dict[str, Any]]:
|
||||
if (not is_avatar_cache_enabled()) or (not cache_key):
|
||||
return None
|
||||
|
||||
acct = str(account or "").strip()
|
||||
ck = str(cache_key or "").strip()
|
||||
sk = str(source_kind or "").strip().lower()
|
||||
if not acct or not ck or not sk:
|
||||
return None
|
||||
|
||||
source_url_norm = normalize_avatar_source_url(source_url) if source_url else ""
|
||||
|
||||
now_ts = int(time.time())
|
||||
fetched = int(fetched_at if fetched_at is not None else now_ts)
|
||||
checked = int(checked_at if checked_at is not None else now_ts)
|
||||
expire_ts = int(expires_at if expires_at is not None else (checked + AVATAR_CACHE_TTL_SECONDS))
|
||||
|
||||
try:
|
||||
conn = _connect(acct)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] open db failed account={acct} err={e}")
|
||||
return None
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT INTO avatar_cache_entries (
|
||||
account, cache_key, source_kind, username, source_url,
|
||||
source_md5, source_update_time, rel_path, media_type, size_bytes,
|
||||
etag, last_modified, fetched_at, checked_at, expires_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(account, cache_key) DO UPDATE SET
|
||||
source_kind=excluded.source_kind,
|
||||
username=excluded.username,
|
||||
source_url=excluded.source_url,
|
||||
source_md5=excluded.source_md5,
|
||||
source_update_time=excluded.source_update_time,
|
||||
rel_path=excluded.rel_path,
|
||||
media_type=excluded.media_type,
|
||||
size_bytes=excluded.size_bytes,
|
||||
etag=excluded.etag,
|
||||
last_modified=excluded.last_modified,
|
||||
fetched_at=excluded.fetched_at,
|
||||
checked_at=excluded.checked_at,
|
||||
expires_at=excluded.expires_at
|
||||
""",
|
||||
(
|
||||
acct,
|
||||
ck,
|
||||
sk,
|
||||
str(username or "").strip(),
|
||||
source_url_norm,
|
||||
str(source_md5 or "").strip().lower(),
|
||||
int(source_update_time or 0),
|
||||
str(rel_path or "").strip().replace("\\", "/"),
|
||||
str(media_type or "application/octet-stream").strip() or "application/octet-stream",
|
||||
int(size_bytes or 0),
|
||||
str(etag or "").strip(),
|
||||
str(last_modified or "").strip(),
|
||||
fetched,
|
||||
checked,
|
||||
expire_ts,
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
row = conn.execute(
|
||||
"SELECT * FROM avatar_cache_entries WHERE account = ? AND cache_key = ? LIMIT 1",
|
||||
(acct, ck),
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] upsert failed account={acct} cache_key={ck} err={e}")
|
||||
return None
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def touch_avatar_cache_entry(account: str, cache_key: str, *, ttl_seconds: int = AVATAR_CACHE_TTL_SECONDS) -> bool:
|
||||
if (not is_avatar_cache_enabled()) or (not cache_key):
|
||||
return False
|
||||
now_ts = int(time.time())
|
||||
try:
|
||||
conn = _connect(account)
|
||||
except Exception:
|
||||
return False
|
||||
try:
|
||||
conn.execute(
|
||||
"UPDATE avatar_cache_entries SET checked_at = ?, expires_at = ? WHERE account = ? AND cache_key = ?",
|
||||
(now_ts, now_ts + max(60, int(ttl_seconds or AVATAR_CACHE_TTL_SECONDS)), str(account or ""), str(cache_key or "")),
|
||||
)
|
||||
conn.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
finally:
|
||||
try:
|
||||
conn.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def write_avatar_cache_payload(
|
||||
account: str,
|
||||
*,
|
||||
source_kind: str,
|
||||
username: str = "",
|
||||
source_url: str = "",
|
||||
payload: bytes,
|
||||
media_type: str,
|
||||
source_md5: str = "",
|
||||
source_update_time: int = 0,
|
||||
etag: str = "",
|
||||
last_modified: str = "",
|
||||
ttl_seconds: int = AVATAR_CACHE_TTL_SECONDS,
|
||||
) -> tuple[Optional[dict[str, Any]], Optional[Path]]:
|
||||
if (not is_avatar_cache_enabled()) or (not payload):
|
||||
return None, None
|
||||
|
||||
acct = str(account or "").strip()
|
||||
sk = str(source_kind or "").strip().lower()
|
||||
if not acct or sk not in {"user", "url"}:
|
||||
return None, None
|
||||
|
||||
source_url_norm = normalize_avatar_source_url(source_url) if source_url else ""
|
||||
if sk == "user":
|
||||
cache_key = cache_key_for_avatar_user(username)
|
||||
else:
|
||||
cache_key = cache_key_for_avatar_url(source_url_norm)
|
||||
|
||||
digest = hashlib.sha1(bytes(payload)).hexdigest()
|
||||
ext = _guess_ext(media_type)
|
||||
rel_path = f"files/{digest[:2]}/{digest}.{ext}"
|
||||
|
||||
try:
|
||||
account_dir, _, tmp_dir, _ = _ensure_account_layout(acct)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] ensure dirs failed account={acct} err={e}")
|
||||
return None, None
|
||||
|
||||
abs_path = account_dir / rel_path
|
||||
try:
|
||||
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
if (not abs_path.exists()) or (int(abs_path.stat().st_size) != len(payload)):
|
||||
tmp_path = tmp_dir / f"{digest}.{time.time_ns()}.tmp"
|
||||
tmp_path.write_bytes(payload)
|
||||
os.replace(str(tmp_path), str(abs_path))
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] write file failed account={acct} path={abs_path} err={e}")
|
||||
return None, None
|
||||
|
||||
if (not etag) and digest:
|
||||
etag = f'"{digest}"'
|
||||
if (not last_modified) and source_update_time:
|
||||
last_modified = _http_date_from_ts(source_update_time)
|
||||
if not last_modified:
|
||||
last_modified = _http_date_from_ts(int(time.time()))
|
||||
|
||||
entry = upsert_avatar_cache_entry(
|
||||
acct,
|
||||
cache_key=cache_key,
|
||||
source_kind=sk,
|
||||
username=username,
|
||||
source_url=source_url_norm,
|
||||
source_md5=source_md5,
|
||||
source_update_time=int(source_update_time or 0),
|
||||
rel_path=rel_path,
|
||||
media_type=media_type,
|
||||
size_bytes=len(payload),
|
||||
etag=etag,
|
||||
last_modified=last_modified,
|
||||
fetched_at=int(time.time()),
|
||||
checked_at=int(time.time()),
|
||||
expires_at=int(time.time()) + max(60, int(ttl_seconds or AVATAR_CACHE_TTL_SECONDS)),
|
||||
)
|
||||
if not entry:
|
||||
return None, None
|
||||
return entry, abs_path
|
||||
|
||||
|
||||
def build_avatar_cache_response_headers(
|
||||
entry: Optional[dict[str, Any]], *, max_age: int = AVATAR_CACHE_TTL_SECONDS
|
||||
) -> dict[str, str]:
|
||||
headers: dict[str, str] = {
|
||||
"Cache-Control": f"public, max-age={int(max_age)}",
|
||||
}
|
||||
if not entry:
|
||||
return headers
|
||||
etag = str(entry.get("etag") or "").strip()
|
||||
last_modified = str(entry.get("last_modified") or "").strip()
|
||||
if etag:
|
||||
headers["ETag"] = etag
|
||||
if last_modified:
|
||||
headers["Last-Modified"] = last_modified
|
||||
return headers
|
||||
|
||||
@@ -74,6 +74,25 @@ def _safe_name(s: str, max_len: int = 80) -> str:
|
||||
return t
|
||||
|
||||
|
||||
def _resolve_export_output_dir(account_dir: Path, output_dir_raw: Any) -> Path:
|
||||
text = str(output_dir_raw or "").strip()
|
||||
if not text:
|
||||
default_dir = account_dir.parents[1] / "exports" / account_dir.name
|
||||
default_dir.mkdir(parents=True, exist_ok=True)
|
||||
return default_dir
|
||||
|
||||
out_dir = Path(text).expanduser()
|
||||
if not out_dir.is_absolute():
|
||||
raise ValueError("output_dir must be an absolute path.")
|
||||
|
||||
try:
|
||||
out_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to prepare output_dir: {e}") from e
|
||||
|
||||
return out_dir.resolve()
|
||||
|
||||
|
||||
def _format_ts(ts: int) -> str:
|
||||
if not ts:
|
||||
return ""
|
||||
@@ -99,43 +118,54 @@ def _normalize_render_type_key(value: Any) -> str:
|
||||
return lower
|
||||
|
||||
|
||||
def _render_types_to_local_types(render_types: set[str]) -> Optional[set[int]]:
|
||||
rt = {str(x or "").strip() for x in (render_types or set())}
|
||||
rt = {x for x in rt if x}
|
||||
if not rt:
|
||||
def _is_render_type_selected(render_type: Any, selected_render_types: Optional[set[str]]) -> bool:
|
||||
if selected_render_types is None:
|
||||
return True
|
||||
rt = _normalize_render_type_key(render_type) or "text"
|
||||
return rt in selected_render_types
|
||||
|
||||
|
||||
def _media_kinds_from_selected_types(selected_render_types: Optional[set[str]]) -> Optional[set[MediaKind]]:
|
||||
if selected_render_types is None:
|
||||
return None
|
||||
|
||||
out: set[int] = set()
|
||||
for k in rt:
|
||||
if k == "text":
|
||||
out.add(1)
|
||||
elif k == "image":
|
||||
out.add(3)
|
||||
elif k == "voice":
|
||||
out.add(34)
|
||||
elif k == "video":
|
||||
out.update({43, 62})
|
||||
elif k == "emoji":
|
||||
out.add(47)
|
||||
elif k == "voip":
|
||||
out.add(50)
|
||||
elif k == "system":
|
||||
out.update({10000, 266287972401})
|
||||
elif k == "quote":
|
||||
out.add(244813135921)
|
||||
out.add(49) # Some quote messages are embedded as appmsg (local_type=49).
|
||||
elif k in {"link", "file", "transfer", "redpacket"}:
|
||||
out.add(49)
|
||||
else:
|
||||
# Unknown type: cannot safely prefilter by local_type.
|
||||
return None
|
||||
out: set[MediaKind] = set()
|
||||
if "image" in selected_render_types:
|
||||
out.add("image")
|
||||
if "emoji" in selected_render_types:
|
||||
out.add("emoji")
|
||||
if "video" in selected_render_types:
|
||||
out.add("video")
|
||||
out.add("video_thumb")
|
||||
if "voice" in selected_render_types:
|
||||
out.add("voice")
|
||||
if "file" in selected_render_types:
|
||||
out.add("file")
|
||||
return out
|
||||
|
||||
|
||||
def _should_estimate_by_local_type(render_types: set[str]) -> bool:
|
||||
# Only estimate counts when every requested type maps 1:1 to local_type.
|
||||
# App messages (local_type=49) are heterogeneous and cannot be counted accurately without parsing.
|
||||
return not bool(render_types & {"link", "file", "transfer", "redpacket", "quote"})
|
||||
def _resolve_effective_media_kinds(
|
||||
*,
|
||||
include_media: bool,
|
||||
media_kinds: list[MediaKind],
|
||||
selected_render_types: Optional[set[str]],
|
||||
privacy_mode: bool,
|
||||
) -> tuple[bool, list[MediaKind]]:
|
||||
if privacy_mode or (not include_media):
|
||||
return False, []
|
||||
|
||||
kinds = [k for k in media_kinds if k in {"image", "emoji", "video", "video_thumb", "voice", "file"}]
|
||||
if not kinds:
|
||||
return False, []
|
||||
|
||||
selected_media_kinds = _media_kinds_from_selected_types(selected_render_types)
|
||||
if selected_media_kinds is not None:
|
||||
kinds = [k for k in kinds if k in selected_media_kinds]
|
||||
|
||||
kinds = list(dict.fromkeys(kinds))
|
||||
if not kinds:
|
||||
return False, []
|
||||
return True, kinds
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -235,6 +265,7 @@ class ChatExportManager:
|
||||
include_media: bool,
|
||||
media_kinds: list[MediaKind],
|
||||
message_types: list[str],
|
||||
output_dir: Optional[str],
|
||||
allow_process_key_extract: bool,
|
||||
privacy_mode: bool,
|
||||
file_name: Optional[str],
|
||||
@@ -257,6 +288,7 @@ class ChatExportManager:
|
||||
"includeMedia": bool(include_media),
|
||||
"mediaKinds": media_kinds,
|
||||
"messageTypes": list(dict.fromkeys([str(t or "").strip() for t in (message_types or []) if str(t or "").strip()])),
|
||||
"outputDir": str(output_dir or "").strip(),
|
||||
"allowProcessKeyExtract": bool(allow_process_key_extract),
|
||||
"privacyMode": bool(privacy_mode),
|
||||
"fileName": str(file_name or "").strip(),
|
||||
@@ -313,10 +345,6 @@ class ChatExportManager:
|
||||
if ks in {"image", "emoji", "video", "video_thumb", "voice", "file"}:
|
||||
media_kinds.append(ks) # type: ignore[arg-type]
|
||||
|
||||
if privacy_mode:
|
||||
include_media = False
|
||||
media_kinds = []
|
||||
|
||||
st = int(opts.get("startTime") or 0) or None
|
||||
et = int(opts.get("endTime") or 0) or None
|
||||
|
||||
@@ -328,9 +356,15 @@ class ChatExportManager:
|
||||
if want:
|
||||
want_types = want
|
||||
|
||||
local_types = _render_types_to_local_types(want_types) if want_types else None
|
||||
can_estimate = (want_types is None) or _should_estimate_by_local_type(want_types)
|
||||
estimate_local_types = local_types if (want_types and can_estimate) else None
|
||||
include_media, media_kinds = _resolve_effective_media_kinds(
|
||||
include_media=include_media,
|
||||
media_kinds=media_kinds,
|
||||
selected_render_types=want_types,
|
||||
privacy_mode=privacy_mode,
|
||||
)
|
||||
|
||||
local_types = None
|
||||
estimate_local_types = None
|
||||
|
||||
target_usernames = _resolve_export_targets(
|
||||
account_dir=account_dir,
|
||||
@@ -342,8 +376,7 @@ class ChatExportManager:
|
||||
if not target_usernames:
|
||||
raise ValueError("No target conversations to export.")
|
||||
|
||||
exports_root = account_dir.parents[1] / "exports" / account_dir.name
|
||||
exports_root.mkdir(parents=True, exist_ok=True)
|
||||
exports_root = _resolve_export_output_dir(account_dir, opts.get("outputDir"))
|
||||
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
base_name = str(opts.get("fileName") or "").strip()
|
||||
@@ -456,16 +489,13 @@ class ChatExportManager:
|
||||
job.progress.current_conversation_messages_total = 0
|
||||
|
||||
try:
|
||||
if not can_estimate:
|
||||
estimated_total = 0
|
||||
else:
|
||||
estimated_total = _estimate_conversation_message_count(
|
||||
account_dir=account_dir,
|
||||
conv_username=conv_username,
|
||||
start_time=st,
|
||||
end_time=et,
|
||||
local_types=estimate_local_types,
|
||||
)
|
||||
estimated_total = _estimate_conversation_message_count(
|
||||
account_dir=account_dir,
|
||||
conv_username=conv_username,
|
||||
start_time=st,
|
||||
end_time=et,
|
||||
local_types=estimate_local_types,
|
||||
)
|
||||
except Exception:
|
||||
estimated_total = 0
|
||||
|
||||
@@ -557,6 +587,8 @@ class ChatExportManager:
|
||||
zf.writestr(f"{conv_dir}/meta.json", json.dumps(meta, ensure_ascii=False, indent=2))
|
||||
|
||||
with self._lock:
|
||||
job.progress.current_conversation_messages_exported = int(exported_count)
|
||||
job.progress.current_conversation_messages_total = int(exported_count)
|
||||
job.progress.conversations_done += 1
|
||||
|
||||
manifest = {
|
||||
@@ -1325,12 +1357,8 @@ def _write_conversation_json(
|
||||
resource_chat_id=resource_chat_id,
|
||||
sender_alias=sender_alias,
|
||||
)
|
||||
if want_types:
|
||||
rt_key = _normalize_render_type_key(msg.get("renderType"))
|
||||
if rt_key not in want_types:
|
||||
if scanned % 500 == 0 and job.cancel_requested:
|
||||
raise _JobCancelled()
|
||||
continue
|
||||
if not _is_render_type_selected(msg.get("renderType"), want_types):
|
||||
continue
|
||||
|
||||
su = str(msg.get("senderUsername") or "").strip()
|
||||
if privacy_mode:
|
||||
@@ -1506,12 +1534,8 @@ def _write_conversation_txt(
|
||||
resource_chat_id=resource_chat_id,
|
||||
sender_alias=sender_alias,
|
||||
)
|
||||
if want_types:
|
||||
rt_key = _normalize_render_type_key(msg.get("renderType"))
|
||||
if rt_key not in want_types:
|
||||
if scanned % 500 == 0 and job.cancel_requested:
|
||||
raise _JobCancelled()
|
||||
continue
|
||||
if not _is_render_type_selected(msg.get("renderType"), want_types):
|
||||
continue
|
||||
|
||||
su = str(msg.get("senderUsername") or "").strip()
|
||||
if privacy_mode:
|
||||
|
||||
@@ -45,7 +45,6 @@ from ..chat_helpers import (
|
||||
_normalize_xml_url,
|
||||
_parse_app_message,
|
||||
_parse_pat_message,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_query_head_image_usernames,
|
||||
_quote_ident,
|
||||
@@ -85,6 +84,19 @@ _REALTIME_SYNC_LOCKS: dict[tuple[str, str], threading.Lock] = {}
|
||||
_REALTIME_SYNC_ALL_LOCKS: dict[str, threading.Lock] = {}
|
||||
|
||||
|
||||
def _avatar_url_unified(
|
||||
*,
|
||||
account_dir: Path,
|
||||
username: str,
|
||||
local_avatar_usernames: set[str] | None = None,
|
||||
) -> str:
|
||||
u = str(username or "").strip()
|
||||
if not u:
|
||||
return ""
|
||||
# Unified avatar entrypoint: backend decides local db vs remote fallback + cache.
|
||||
return _build_avatar_url(str(account_dir.name or ""), u)
|
||||
|
||||
|
||||
def _realtime_sync_lock(account: str, username: str) -> threading.Lock:
|
||||
key = (str(account or "").strip(), str(username or "").strip())
|
||||
with _REALTIME_SYNC_MU:
|
||||
@@ -1946,9 +1958,11 @@ async def chat_search_index_senders(
|
||||
continue
|
||||
cnt = int(r["c"] or 0)
|
||||
row = contact_rows.get(su)
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if (not avatar_url) and (su in local_sender_avatars):
|
||||
avatar_url = _build_avatar_url(account_dir.name, su)
|
||||
avatar_url = _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_sender_avatars,
|
||||
)
|
||||
senders.append(
|
||||
{
|
||||
"username": su,
|
||||
@@ -2568,7 +2582,7 @@ def _postprocess_full_messages(
|
||||
row = sender_contact_rows.get(u)
|
||||
if _pick_display_name(row, u) == u:
|
||||
need_display.append(u)
|
||||
if (not _pick_avatar_url(row)) and (u not in local_sender_avatars):
|
||||
if u not in local_sender_avatars:
|
||||
need_avatar.append(u)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -2606,13 +2620,11 @@ def _postprocess_full_messages(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
m["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if not avatar_url and su in local_sender_avatars:
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_sender_avatars,
|
||||
)
|
||||
m["senderAvatar"] = avatar_url
|
||||
|
||||
qu = str(m.get("quoteUsername") or "").strip()
|
||||
@@ -2922,7 +2934,7 @@ def list_chat_sessions(
|
||||
if u not in local_avatar_usernames:
|
||||
need_avatar.append(u)
|
||||
else:
|
||||
if (not _pick_avatar_url(row)) and (u not in local_avatar_usernames):
|
||||
if u not in local_avatar_usernames:
|
||||
need_avatar.append(u)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -2984,15 +2996,11 @@ def list_chat_sessions(
|
||||
|
||||
# Prefer local head_image avatars when available: decrypted contact.db URLs can be stale
|
||||
# (or hotlink-protected for browsers). WCDB realtime (when available) is the next best.
|
||||
avatar_url = ""
|
||||
if username in local_avatar_usernames:
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, username)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(username) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
if not avatar_url:
|
||||
avatar_url = _pick_avatar_url(c_row) or ""
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=username,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
|
||||
last_message = ""
|
||||
if preview_mode == "session":
|
||||
@@ -4388,7 +4396,7 @@ def list_chat_messages(
|
||||
row = sender_contact_rows.get(u)
|
||||
if _pick_display_name(row, u) == u:
|
||||
need_display.append(u)
|
||||
if (not _pick_avatar_url(row)) and (u not in local_sender_avatars):
|
||||
if u not in local_sender_avatars:
|
||||
need_avatar.append(u)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -4426,13 +4434,11 @@ def list_chat_messages(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
m["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if not avatar_url and su in local_sender_avatars:
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_sender_avatars,
|
||||
)
|
||||
m["senderAvatar"] = avatar_url
|
||||
|
||||
qu = str(m.get("quoteUsername") or "").strip()
|
||||
@@ -4897,7 +4903,7 @@ async def _search_chat_messages_via_fts(
|
||||
row = contact_rows.get(uu)
|
||||
if _pick_display_name(row, uu) == uu:
|
||||
need_display.append(uu)
|
||||
if (not _pick_avatar_url(row)) and (uu not in local_avatar_usernames):
|
||||
if uu not in local_avatar_usernames:
|
||||
need_avatar.append(uu)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -4919,13 +4925,11 @@ async def _search_chat_messages_via_fts(
|
||||
wd = str(wcdb_display_names.get(username) or "").strip()
|
||||
if wd and wd != username:
|
||||
conv_name = wd
|
||||
conv_avatar = _pick_avatar_url(conv_row)
|
||||
if (not conv_avatar) and (username in local_avatar_usernames):
|
||||
conv_avatar = base_url + _build_avatar_url(account_dir.name, username)
|
||||
if not conv_avatar:
|
||||
wa = str(wcdb_avatar_urls.get(username) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
conv_avatar = wa
|
||||
conv_avatar = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=username,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
|
||||
for h in hits:
|
||||
su = str(h.get("senderUsername") or "").strip()
|
||||
@@ -4939,13 +4943,11 @@ async def _search_chat_messages_via_fts(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
h["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if (not avatar_url) and (su in local_avatar_usernames):
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
h["senderAvatar"] = avatar_url
|
||||
else:
|
||||
uniq_contacts = list(
|
||||
@@ -4968,7 +4970,7 @@ async def _search_chat_messages_via_fts(
|
||||
row = contact_rows.get(uu)
|
||||
if _pick_display_name(row, uu) == uu:
|
||||
need_display.append(uu)
|
||||
if (not _pick_avatar_url(row)) and (uu not in local_avatar_usernames):
|
||||
if uu not in local_avatar_usernames:
|
||||
need_avatar.append(uu)
|
||||
|
||||
need_display = list(dict.fromkeys(need_display))
|
||||
@@ -4994,13 +4996,11 @@ async def _search_chat_messages_via_fts(
|
||||
if wd and wd != cu:
|
||||
conv_name = wd
|
||||
h["conversationName"] = conv_name or cu
|
||||
conv_avatar = _pick_avatar_url(crow)
|
||||
if (not conv_avatar) and cu and (cu in local_avatar_usernames):
|
||||
conv_avatar = base_url + _build_avatar_url(account_dir.name, cu)
|
||||
if not conv_avatar and cu:
|
||||
wa = str(wcdb_avatar_urls.get(cu) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
conv_avatar = wa
|
||||
conv_avatar = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=cu,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
h["conversationAvatar"] = conv_avatar
|
||||
if su:
|
||||
row = contact_rows.get(su)
|
||||
@@ -5010,13 +5010,11 @@ async def _search_chat_messages_via_fts(
|
||||
if wd and wd != su:
|
||||
display_name = wd
|
||||
h["senderDisplayName"] = display_name
|
||||
avatar_url = _pick_avatar_url(row)
|
||||
if (not avatar_url) and (su in local_avatar_usernames):
|
||||
avatar_url = base_url + _build_avatar_url(account_dir.name, su)
|
||||
if not avatar_url:
|
||||
wa = str(wcdb_avatar_urls.get(su) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
avatar_url = wa
|
||||
avatar_url = base_url + _avatar_url_unified(
|
||||
account_dir=account_dir,
|
||||
username=su,
|
||||
local_avatar_usernames=local_avatar_usernames,
|
||||
)
|
||||
h["senderAvatar"] = avatar_url
|
||||
|
||||
return {
|
||||
|
||||
749
src/wechat_decrypt_tool/routers/chat_contacts.py
Normal file
749
src/wechat_decrypt_tool/routers/chat_contacts.py
Normal file
@@ -0,0 +1,749 @@
|
||||
import csv
|
||||
import json
|
||||
import re
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Request
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..chat_helpers import (
|
||||
_build_avatar_url,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_resolve_account_dir,
|
||||
_should_keep_session,
|
||||
)
|
||||
from ..path_fix import PathFixRoute
|
||||
|
||||
router = APIRouter(route_class=PathFixRoute)
|
||||
|
||||
|
||||
_SYSTEM_USERNAMES = {
|
||||
"filehelper",
|
||||
"fmessage",
|
||||
"floatbottle",
|
||||
"medianote",
|
||||
"newsapp",
|
||||
"qmessage",
|
||||
"qqmail",
|
||||
"tmessage",
|
||||
"brandsessionholder",
|
||||
"brandservicesessionholder",
|
||||
"notifymessage",
|
||||
"opencustomerservicemsg",
|
||||
"notification_messages",
|
||||
"userexperience_alarm",
|
||||
}
|
||||
|
||||
_SOURCE_SCENE_LABELS = {
|
||||
1: "通过QQ号添加",
|
||||
3: "通过微信号添加",
|
||||
6: "通过手机号添加",
|
||||
10: "通过名片添加",
|
||||
14: "通过群聊添加",
|
||||
30: "通过扫一扫添加",
|
||||
}
|
||||
|
||||
_COUNTRY_LABELS = {
|
||||
"CN": "中国大陆",
|
||||
}
|
||||
|
||||
|
||||
class ContactTypeFilter(BaseModel):
|
||||
friends: bool = True
|
||||
groups: bool = True
|
||||
officials: bool = True
|
||||
|
||||
|
||||
class ContactExportRequest(BaseModel):
|
||||
account: Optional[str] = Field(None, description="账号目录名(可选,默认使用第一个)")
|
||||
output_dir: str = Field(..., description="导出目录绝对路径")
|
||||
format: str = Field("json", description="导出格式,仅支持 json/csv")
|
||||
include_avatar_link: bool = Field(True, description="是否导出 avatarLink 字段")
|
||||
contact_types: ContactTypeFilter = Field(default_factory=ContactTypeFilter)
|
||||
keyword: Optional[str] = Field(None, description="关键词筛选(可选)")
|
||||
|
||||
|
||||
def _normalize_text(v: Any) -> str:
|
||||
if v is None:
|
||||
return ""
|
||||
return str(v).strip()
|
||||
|
||||
|
||||
def _to_int(v: Any) -> int:
|
||||
try:
|
||||
return int(v or 0)
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
|
||||
def _to_optional_int(v: Any) -> Optional[int]:
|
||||
if v is None:
|
||||
return None
|
||||
if isinstance(v, bool):
|
||||
return int(v)
|
||||
if isinstance(v, int):
|
||||
return v
|
||||
s = _normalize_text(v)
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
return int(s)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _decode_varint(raw: bytes, offset: int) -> tuple[Optional[int], int]:
|
||||
value = 0
|
||||
shift = 0
|
||||
pos = int(offset)
|
||||
n = len(raw)
|
||||
while pos < n:
|
||||
byte = raw[pos]
|
||||
pos += 1
|
||||
value |= (byte & 0x7F) << shift
|
||||
if (byte & 0x80) == 0:
|
||||
return value, pos
|
||||
shift += 7
|
||||
if shift > 63:
|
||||
return None, n
|
||||
return None, n
|
||||
|
||||
|
||||
def _decode_proto_text(raw: bytes) -> str:
|
||||
if not raw:
|
||||
return ""
|
||||
try:
|
||||
text = raw.decode("utf-8", errors="ignore")
|
||||
except Exception:
|
||||
return ""
|
||||
return re.sub(r"[\x00-\x08\x0b\x0c\x0e-\x1f]", "", text).strip()
|
||||
|
||||
|
||||
def _parse_contact_extra_buffer(extra_buffer: Any) -> dict[str, Any]:
|
||||
out = {
|
||||
"signature": "",
|
||||
"country": "",
|
||||
"province": "",
|
||||
"city": "",
|
||||
"source_scene": None,
|
||||
}
|
||||
if extra_buffer is None:
|
||||
return out
|
||||
|
||||
raw: bytes
|
||||
if isinstance(extra_buffer, memoryview):
|
||||
raw = extra_buffer.tobytes()
|
||||
elif isinstance(extra_buffer, (bytes, bytearray)):
|
||||
raw = bytes(extra_buffer)
|
||||
else:
|
||||
return out
|
||||
|
||||
if not raw:
|
||||
return out
|
||||
|
||||
idx = 0
|
||||
n = len(raw)
|
||||
while idx < n:
|
||||
tag, idx_next = _decode_varint(raw, idx)
|
||||
if tag is None:
|
||||
break
|
||||
idx = idx_next
|
||||
field_no = tag >> 3
|
||||
wire_type = tag & 0x7
|
||||
|
||||
if wire_type == 0:
|
||||
val, idx_next = _decode_varint(raw, idx)
|
||||
if val is None:
|
||||
break
|
||||
idx = idx_next
|
||||
if field_no == 8:
|
||||
out["source_scene"] = int(val)
|
||||
continue
|
||||
|
||||
if wire_type == 2:
|
||||
size, idx_next = _decode_varint(raw, idx)
|
||||
if size is None:
|
||||
break
|
||||
idx = idx_next
|
||||
end = idx + int(size)
|
||||
if end > n:
|
||||
break
|
||||
chunk = raw[idx:end]
|
||||
idx = end
|
||||
|
||||
if field_no in {4, 5, 6, 7}:
|
||||
text = _decode_proto_text(chunk)
|
||||
if field_no == 4:
|
||||
out["signature"] = text
|
||||
elif field_no == 5:
|
||||
out["country"] = text
|
||||
elif field_no == 6:
|
||||
out["province"] = text
|
||||
elif field_no == 7:
|
||||
out["city"] = text
|
||||
continue
|
||||
|
||||
if wire_type == 1:
|
||||
idx += 8
|
||||
continue
|
||||
if wire_type == 5:
|
||||
idx += 4
|
||||
continue
|
||||
|
||||
break
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _country_label(country: str) -> str:
|
||||
c = _normalize_text(country)
|
||||
if not c:
|
||||
return ""
|
||||
return _COUNTRY_LABELS.get(c.upper(), c)
|
||||
|
||||
|
||||
def _source_scene_label(source_scene: Optional[int]) -> str:
|
||||
if source_scene is None:
|
||||
return ""
|
||||
if source_scene in _SOURCE_SCENE_LABELS:
|
||||
return _SOURCE_SCENE_LABELS[source_scene]
|
||||
return f"场景码 {source_scene}"
|
||||
|
||||
|
||||
def _build_region(country: str, province: str, city: str) -> str:
|
||||
parts: list[str] = []
|
||||
country_text = _country_label(country)
|
||||
province_text = _normalize_text(province)
|
||||
city_text = _normalize_text(city)
|
||||
if country_text:
|
||||
parts.append(country_text)
|
||||
if province_text:
|
||||
parts.append(province_text)
|
||||
if city_text:
|
||||
parts.append(city_text)
|
||||
return "·".join(parts)
|
||||
|
||||
|
||||
def _safe_export_part(s: str) -> str:
|
||||
cleaned = re.sub(r"[^0-9A-Za-z._-]+", "_", str(s or "").strip())
|
||||
cleaned = cleaned.strip("._-")
|
||||
return cleaned or "account"
|
||||
|
||||
|
||||
def _is_valid_contact_username(username: str) -> bool:
|
||||
u = _normalize_text(username)
|
||||
if not u:
|
||||
return False
|
||||
if u in _SYSTEM_USERNAMES:
|
||||
return False
|
||||
if u.startswith("fake_"):
|
||||
return False
|
||||
if not _should_keep_session(u, include_official=True) and not u.startswith("gh_") and u != "weixin":
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _get_table_columns(conn: sqlite3.Connection, table: str) -> set[str]:
|
||||
try:
|
||||
rows = conn.execute(f"PRAGMA table_info({table})").fetchall()
|
||||
except Exception:
|
||||
return set()
|
||||
|
||||
out: set[str] = set()
|
||||
for row in rows:
|
||||
try:
|
||||
name = _normalize_text(row["name"] if "name" in row.keys() else row[1]).lower()
|
||||
except Exception:
|
||||
continue
|
||||
if name:
|
||||
out.add(name)
|
||||
return out
|
||||
|
||||
|
||||
def _build_contact_select_sql(table: str, columns: set[str]) -> Optional[str]:
|
||||
if "username" not in columns:
|
||||
return None
|
||||
|
||||
specs: list[tuple[str, str, str]] = [
|
||||
("username", "username", "''"),
|
||||
("remark", "remark", "''"),
|
||||
("nick_name", "nick_name", "''"),
|
||||
("alias", "alias", "''"),
|
||||
("local_type", "local_type", "0"),
|
||||
("verify_flag", "verify_flag", "0"),
|
||||
("big_head_url", "big_head_url", "''"),
|
||||
("small_head_url", "small_head_url", "''"),
|
||||
("extra_buffer", "extra_buffer", "x''"),
|
||||
]
|
||||
|
||||
select_parts: list[str] = []
|
||||
for key, alias, fallback in specs:
|
||||
if key in columns:
|
||||
select_parts.append(key)
|
||||
else:
|
||||
select_parts.append(f"{fallback} AS {alias}")
|
||||
return f"SELECT {', '.join(select_parts)} FROM {table}"
|
||||
|
||||
|
||||
def _load_contact_rows_map(contact_db_path: Path) -> dict[str, dict[str, Any]]:
|
||||
out: dict[str, dict[str, Any]] = {}
|
||||
if not contact_db_path.exists():
|
||||
return out
|
||||
|
||||
conn = sqlite3.connect(str(contact_db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
def read_rows(table: str) -> list[sqlite3.Row]:
|
||||
columns = _get_table_columns(conn, table)
|
||||
sql = _build_contact_select_sql(table, columns)
|
||||
if not sql:
|
||||
return []
|
||||
try:
|
||||
return conn.execute(sql).fetchall()
|
||||
except Exception:
|
||||
return []
|
||||
return []
|
||||
|
||||
for table in ("contact", "stranger"):
|
||||
rows = read_rows(table)
|
||||
for row in rows:
|
||||
username = _normalize_text(row["username"] if "username" in row.keys() else "")
|
||||
if (not username) or (username in out):
|
||||
continue
|
||||
|
||||
extra_info = _parse_contact_extra_buffer(
|
||||
row["extra_buffer"] if "extra_buffer" in row.keys() else b""
|
||||
)
|
||||
out[username] = {
|
||||
"username": username,
|
||||
"remark": _normalize_text(row["remark"] if "remark" in row.keys() else ""),
|
||||
"nick_name": _normalize_text(row["nick_name"] if "nick_name" in row.keys() else ""),
|
||||
"alias": _normalize_text(row["alias"] if "alias" in row.keys() else ""),
|
||||
"local_type": _to_int(row["local_type"] if "local_type" in row.keys() else 0),
|
||||
"verify_flag": _to_int(row["verify_flag"] if "verify_flag" in row.keys() else 0),
|
||||
"big_head_url": _normalize_text(row["big_head_url"] if "big_head_url" in row.keys() else ""),
|
||||
"small_head_url": _normalize_text(row["small_head_url"] if "small_head_url" in row.keys() else ""),
|
||||
"country": _normalize_text(extra_info.get("country")),
|
||||
"province": _normalize_text(extra_info.get("province")),
|
||||
"city": _normalize_text(extra_info.get("city")),
|
||||
"source_scene": _to_optional_int(extra_info.get("source_scene")),
|
||||
}
|
||||
return out
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _load_session_sort_timestamps(session_db_path: Path) -> dict[str, int]:
|
||||
out: dict[str, int] = {}
|
||||
if not session_db_path.exists():
|
||||
return out
|
||||
|
||||
conn = sqlite3.connect(str(session_db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
rows: list[sqlite3.Row] = []
|
||||
queries = [
|
||||
"SELECT username, COALESCE(sort_timestamp, 0) AS ts FROM SessionTable",
|
||||
"SELECT username, COALESCE(last_timestamp, 0) AS ts FROM SessionTable",
|
||||
]
|
||||
for sql in queries:
|
||||
try:
|
||||
rows = conn.execute(sql).fetchall()
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
for row in rows:
|
||||
username = _normalize_text(row["username"] if "username" in row.keys() else "")
|
||||
if not username:
|
||||
continue
|
||||
ts = _to_int(row["ts"] if "ts" in row.keys() else 0)
|
||||
prev = out.get(username, 0)
|
||||
if ts > prev:
|
||||
out[username] = ts
|
||||
return out
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _load_session_group_usernames(session_db_path: Path) -> set[str]:
|
||||
out: set[str] = set()
|
||||
if not session_db_path.exists():
|
||||
return out
|
||||
|
||||
conn = sqlite3.connect(str(session_db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
queries = [
|
||||
"SELECT username FROM SessionTable",
|
||||
"SELECT username FROM sessiontable",
|
||||
]
|
||||
for sql in queries:
|
||||
try:
|
||||
rows = conn.execute(sql).fetchall()
|
||||
except Exception:
|
||||
continue
|
||||
for row in rows:
|
||||
username = _normalize_text(row["username"] if "username" in row.keys() else "")
|
||||
if username and ("@chatroom" in username):
|
||||
out.add(username)
|
||||
return out
|
||||
return out
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _infer_contact_type(username: str, row: dict[str, Any]) -> Optional[str]:
|
||||
if not username:
|
||||
return None
|
||||
|
||||
if "@chatroom" in username:
|
||||
return "group"
|
||||
|
||||
verify_flag = _to_int(row.get("verify_flag"))
|
||||
if username.startswith("gh_") or verify_flag != 0:
|
||||
return "official"
|
||||
|
||||
local_type = _to_int(row.get("local_type"))
|
||||
if local_type == 1:
|
||||
return "friend"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _matches_keyword(contact: dict[str, Any], keyword: str) -> bool:
|
||||
kw = _normalize_text(keyword).lower()
|
||||
if not kw:
|
||||
return True
|
||||
|
||||
fields = [
|
||||
contact.get("username", ""),
|
||||
contact.get("displayName", ""),
|
||||
contact.get("remark", ""),
|
||||
contact.get("nickname", ""),
|
||||
contact.get("alias", ""),
|
||||
contact.get("region", ""),
|
||||
contact.get("source", ""),
|
||||
contact.get("country", ""),
|
||||
contact.get("province", ""),
|
||||
contact.get("city", ""),
|
||||
]
|
||||
for field in fields:
|
||||
if kw in _normalize_text(field).lower():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _collect_contacts_for_account(
|
||||
*,
|
||||
account_dir: Path,
|
||||
base_url: str,
|
||||
keyword: Optional[str],
|
||||
include_friends: bool,
|
||||
include_groups: bool,
|
||||
include_officials: bool,
|
||||
) -> list[dict[str, Any]]:
|
||||
if not (include_friends or include_groups or include_officials):
|
||||
return []
|
||||
|
||||
contact_db_path = account_dir / "contact.db"
|
||||
session_db_path = account_dir / "session.db"
|
||||
contact_rows = _load_contact_rows_map(contact_db_path)
|
||||
session_ts_map = _load_session_sort_timestamps(session_db_path)
|
||||
session_group_usernames = _load_session_group_usernames(session_db_path)
|
||||
|
||||
contacts: list[dict[str, Any]] = []
|
||||
for username, row in contact_rows.items():
|
||||
if not _is_valid_contact_username(username):
|
||||
continue
|
||||
|
||||
contact_type = _infer_contact_type(username, row)
|
||||
if contact_type is None:
|
||||
continue
|
||||
if contact_type == "friend" and not include_friends:
|
||||
continue
|
||||
if contact_type == "group" and not include_groups:
|
||||
continue
|
||||
if contact_type == "official" and not include_officials:
|
||||
continue
|
||||
|
||||
display_name = _pick_display_name(row, username)
|
||||
if not display_name:
|
||||
display_name = username
|
||||
|
||||
avatar_link = _normalize_text(_pick_avatar_url(row) or "")
|
||||
avatar = base_url + _build_avatar_url(account_dir.name, username)
|
||||
country = _normalize_text(row.get("country"))
|
||||
province = _normalize_text(row.get("province"))
|
||||
city = _normalize_text(row.get("city"))
|
||||
source_scene = _to_optional_int(row.get("source_scene"))
|
||||
|
||||
item = {
|
||||
"username": username,
|
||||
"displayName": display_name,
|
||||
"remark": _normalize_text(row.get("remark")),
|
||||
"nickname": _normalize_text(row.get("nick_name")),
|
||||
"alias": _normalize_text(row.get("alias")),
|
||||
"type": contact_type,
|
||||
"country": country,
|
||||
"province": province,
|
||||
"city": city,
|
||||
"region": _build_region(country, province, city),
|
||||
"sourceScene": source_scene,
|
||||
"source": _source_scene_label(source_scene),
|
||||
"avatar": avatar,
|
||||
"avatarLink": avatar_link,
|
||||
"_sortTs": _to_int(session_ts_map.get(username, 0)),
|
||||
}
|
||||
|
||||
if not _matches_keyword(item, keyword or ""):
|
||||
continue
|
||||
contacts.append(item)
|
||||
|
||||
if include_groups:
|
||||
for username in session_group_usernames:
|
||||
if username in contact_rows:
|
||||
continue
|
||||
if not _is_valid_contact_username(username):
|
||||
continue
|
||||
|
||||
avatar_link = ""
|
||||
avatar = base_url + _build_avatar_url(account_dir.name, username)
|
||||
|
||||
item = {
|
||||
"username": username,
|
||||
"displayName": username,
|
||||
"remark": "",
|
||||
"nickname": "",
|
||||
"alias": "",
|
||||
"type": "group",
|
||||
"country": "",
|
||||
"province": "",
|
||||
"city": "",
|
||||
"region": "",
|
||||
"sourceScene": None,
|
||||
"source": "",
|
||||
"avatar": avatar,
|
||||
"avatarLink": avatar_link,
|
||||
"_sortTs": _to_int(session_ts_map.get(username, 0)),
|
||||
}
|
||||
|
||||
if not _matches_keyword(item, keyword or ""):
|
||||
continue
|
||||
contacts.append(item)
|
||||
|
||||
contacts.sort(
|
||||
key=lambda x: (
|
||||
-_to_int(x.get("_sortTs", 0)),
|
||||
_normalize_text(x.get("displayName", "")).lower(),
|
||||
_normalize_text(x.get("username", "")).lower(),
|
||||
)
|
||||
)
|
||||
for item in contacts:
|
||||
item.pop("_sortTs", None)
|
||||
return contacts
|
||||
|
||||
|
||||
def _build_counts(contacts: list[dict[str, Any]]) -> dict[str, int]:
|
||||
counts = {
|
||||
"friends": 0,
|
||||
"groups": 0,
|
||||
"officials": 0,
|
||||
"total": 0,
|
||||
}
|
||||
for item in contacts:
|
||||
t = _normalize_text(item.get("type"))
|
||||
if t == "friend":
|
||||
counts["friends"] += 1
|
||||
elif t == "group":
|
||||
counts["groups"] += 1
|
||||
elif t == "official":
|
||||
counts["officials"] += 1
|
||||
counts["total"] = len(contacts)
|
||||
return counts
|
||||
|
||||
|
||||
def _build_export_contacts(
|
||||
contacts: list[dict[str, Any]],
|
||||
*,
|
||||
include_avatar_link: bool,
|
||||
) -> list[dict[str, Any]]:
|
||||
out: list[dict[str, Any]] = []
|
||||
for item in contacts:
|
||||
row = {
|
||||
"username": _normalize_text(item.get("username")),
|
||||
"displayName": _normalize_text(item.get("displayName")),
|
||||
"remark": _normalize_text(item.get("remark")),
|
||||
"nickname": _normalize_text(item.get("nickname")),
|
||||
"alias": _normalize_text(item.get("alias")),
|
||||
"type": _normalize_text(item.get("type")),
|
||||
"region": _normalize_text(item.get("region")),
|
||||
"country": _normalize_text(item.get("country")),
|
||||
"province": _normalize_text(item.get("province")),
|
||||
"city": _normalize_text(item.get("city")),
|
||||
"source": _normalize_text(item.get("source")),
|
||||
"sourceScene": _to_optional_int(item.get("sourceScene")),
|
||||
}
|
||||
if include_avatar_link:
|
||||
row["avatarLink"] = _normalize_text(item.get("avatarLink"))
|
||||
out.append(row)
|
||||
return out
|
||||
|
||||
|
||||
def _write_json_export(
|
||||
output_path: Path,
|
||||
*,
|
||||
account: str,
|
||||
contacts: list[dict[str, Any]],
|
||||
include_avatar_link: bool,
|
||||
keyword: str,
|
||||
contact_types: ContactTypeFilter,
|
||||
) -> None:
|
||||
payload = {
|
||||
"exportedAt": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"account": account,
|
||||
"count": len(contacts),
|
||||
"filters": {
|
||||
"keyword": keyword,
|
||||
"contactTypes": {
|
||||
"friends": bool(contact_types.friends),
|
||||
"groups": bool(contact_types.groups),
|
||||
"officials": bool(contact_types.officials),
|
||||
},
|
||||
"includeAvatarLink": bool(include_avatar_link),
|
||||
},
|
||||
"contacts": contacts,
|
||||
}
|
||||
output_path.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
|
||||
|
||||
def _write_csv_export(
|
||||
output_path: Path,
|
||||
*,
|
||||
contacts: list[dict[str, Any]],
|
||||
include_avatar_link: bool,
|
||||
) -> None:
|
||||
columns: list[tuple[str, str]] = [
|
||||
("username", "用户名"),
|
||||
("displayName", "显示名称"),
|
||||
("remark", "备注"),
|
||||
("nickname", "昵称"),
|
||||
("alias", "微信号"),
|
||||
("type", "类型"),
|
||||
("region", "地区"),
|
||||
("country", "国家/地区码"),
|
||||
("province", "省份"),
|
||||
("city", "城市"),
|
||||
("source", "来源"),
|
||||
("sourceScene", "来源场景码"),
|
||||
]
|
||||
if include_avatar_link:
|
||||
columns.append(("avatarLink", "头像链接"))
|
||||
|
||||
with output_path.open("w", encoding="utf-8-sig", newline="") as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow([label for _, label in columns])
|
||||
for item in contacts:
|
||||
writer.writerow([_normalize_text(item.get(key, "")) for key, _ in columns])
|
||||
|
||||
|
||||
@router.get("/api/chat/contacts", summary="获取联系人列表")
|
||||
def list_chat_contacts(
|
||||
request: Request,
|
||||
account: Optional[str] = None,
|
||||
keyword: Optional[str] = None,
|
||||
include_friends: bool = True,
|
||||
include_groups: bool = True,
|
||||
include_officials: bool = True,
|
||||
):
|
||||
account_dir = _resolve_account_dir(account)
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
|
||||
contacts = _collect_contacts_for_account(
|
||||
account_dir=account_dir,
|
||||
base_url=base_url,
|
||||
keyword=keyword,
|
||||
include_friends=bool(include_friends),
|
||||
include_groups=bool(include_groups),
|
||||
include_officials=bool(include_officials),
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"account": account_dir.name,
|
||||
"total": len(contacts),
|
||||
"counts": _build_counts(contacts),
|
||||
"contacts": contacts,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/api/chat/contacts/export", summary="导出联系人")
|
||||
def export_chat_contacts(request: Request, req: ContactExportRequest):
|
||||
account_dir = _resolve_account_dir(req.account)
|
||||
|
||||
output_dir_raw = _normalize_text(req.output_dir)
|
||||
if not output_dir_raw:
|
||||
raise HTTPException(status_code=400, detail="output_dir is required.")
|
||||
|
||||
output_dir = Path(output_dir_raw).expanduser()
|
||||
if not output_dir.is_absolute():
|
||||
raise HTTPException(status_code=400, detail="output_dir must be an absolute path.")
|
||||
|
||||
try:
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Failed to prepare output_dir: {e}")
|
||||
|
||||
base_url = str(request.base_url).rstrip("/")
|
||||
contacts = _collect_contacts_for_account(
|
||||
account_dir=account_dir,
|
||||
base_url=base_url,
|
||||
keyword=req.keyword,
|
||||
include_friends=bool(req.contact_types.friends),
|
||||
include_groups=bool(req.contact_types.groups),
|
||||
include_officials=bool(req.contact_types.officials),
|
||||
)
|
||||
|
||||
export_contacts = _build_export_contacts(
|
||||
contacts,
|
||||
include_avatar_link=bool(req.include_avatar_link),
|
||||
)
|
||||
|
||||
fmt = _normalize_text(req.format).lower()
|
||||
if fmt not in {"json", "csv"}:
|
||||
raise HTTPException(status_code=400, detail="Unsupported format, use 'json' or 'csv'.")
|
||||
|
||||
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
safe_account = _safe_export_part(account_dir.name)
|
||||
output_path = output_dir / f"contacts_{safe_account}_{ts}.{fmt}"
|
||||
|
||||
try:
|
||||
if fmt == "json":
|
||||
_write_json_export(
|
||||
output_path,
|
||||
account=account_dir.name,
|
||||
contacts=export_contacts,
|
||||
include_avatar_link=bool(req.include_avatar_link),
|
||||
keyword=_normalize_text(req.keyword),
|
||||
contact_types=req.contact_types,
|
||||
)
|
||||
else:
|
||||
_write_csv_export(
|
||||
output_path,
|
||||
contacts=export_contacts,
|
||||
include_avatar_link=bool(req.include_avatar_link),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to export contacts: {e}")
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"account": account_dir.name,
|
||||
"format": fmt,
|
||||
"outputPath": str(output_path),
|
||||
"count": len(export_contacts),
|
||||
}
|
||||
@@ -27,15 +27,16 @@ class ChatExportCreateRequest(BaseModel):
|
||||
end_time: Optional[int] = Field(None, description="结束时间(Unix 秒,含)")
|
||||
include_hidden: bool = Field(False, description="是否包含隐藏会话(scope!=selected 时)")
|
||||
include_official: bool = Field(False, description="是否包含公众号/官方账号会话(scope!=selected 时)")
|
||||
include_media: bool = Field(True, description="是否打包离线媒体(图片/表情/视频/语音/文件)")
|
||||
include_media: bool = Field(True, description="是否允许打包离线媒体(最终仍受 message_types 与 privacy_mode 约束)")
|
||||
media_kinds: list[MediaKind] = Field(
|
||||
default_factory=lambda: ["image", "emoji", "video", "video_thumb", "voice", "file"],
|
||||
description="打包的媒体类型",
|
||||
description="允许打包的媒体类型(最终仍受 message_types 勾选约束)",
|
||||
)
|
||||
message_types: list[MessageType] = Field(
|
||||
default_factory=list,
|
||||
description="导出消息类型(renderType)过滤:为空=导出全部消息;可多选(如仅 voice / 仅 transfer / 仅 redPacket 等)",
|
||||
description="导出消息类型(renderType)过滤:为空=导出全部类型;不为空时,仅导出勾选类型",
|
||||
)
|
||||
output_dir: Optional[str] = Field(None, description="导出目录绝对路径(可选;不填时使用默认目录)")
|
||||
allow_process_key_extract: bool = Field(
|
||||
False,
|
||||
description="预留字段:本项目不从微信进程提取媒体密钥,请使用 wx_key 获取并保存/批量解密",
|
||||
@@ -61,6 +62,7 @@ async def create_chat_export(req: ChatExportCreateRequest):
|
||||
include_media=req.include_media,
|
||||
media_kinds=req.media_kinds,
|
||||
message_types=req.message_types,
|
||||
output_dir=req.output_dir,
|
||||
allow_process_key_extract=req.allow_process_key_extract,
|
||||
privacy_mode=req.privacy_mode,
|
||||
file_name=req.file_name,
|
||||
|
||||
@@ -8,7 +8,7 @@ import os
|
||||
import sqlite3
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
@@ -16,6 +16,21 @@ from fastapi import APIRouter, HTTPException
|
||||
from fastapi.responses import FileResponse, Response
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..avatar_cache import (
|
||||
AVATAR_CACHE_TTL_SECONDS,
|
||||
avatar_cache_entry_file_exists,
|
||||
avatar_cache_entry_is_fresh,
|
||||
build_avatar_cache_response_headers,
|
||||
cache_key_for_avatar_user,
|
||||
cache_key_for_avatar_url,
|
||||
get_avatar_cache_url_entry,
|
||||
get_avatar_cache_user_entry,
|
||||
is_avatar_cache_enabled,
|
||||
normalize_avatar_source_url,
|
||||
touch_avatar_cache_entry,
|
||||
upsert_avatar_cache_entry,
|
||||
write_avatar_cache_payload,
|
||||
)
|
||||
from ..logging_config import get_logger
|
||||
from ..media_helpers import (
|
||||
_convert_silk_to_wav,
|
||||
@@ -43,14 +58,56 @@ from ..media_helpers import (
|
||||
_try_find_decrypted_resource,
|
||||
_try_strip_media_prefix,
|
||||
)
|
||||
from ..chat_helpers import _extract_md5_from_packed_info
|
||||
from ..chat_helpers import _extract_md5_from_packed_info, _load_contact_rows, _pick_avatar_url
|
||||
from ..path_fix import PathFixRoute
|
||||
from ..wcdb_realtime import WCDB_REALTIME, get_avatar_urls as _wcdb_get_avatar_urls
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
router = APIRouter(route_class=PathFixRoute)
|
||||
|
||||
|
||||
def _resolve_avatar_remote_url(*, account_dir: Path, username: str) -> str:
|
||||
u = str(username or "").strip()
|
||||
if not u:
|
||||
return ""
|
||||
|
||||
# 1) contact.db first (cheap local lookup)
|
||||
try:
|
||||
rows = _load_contact_rows(account_dir / "contact.db", [u])
|
||||
row = rows.get(u)
|
||||
raw = str(_pick_avatar_url(row) or "").strip()
|
||||
if raw.lower().startswith(("http://", "https://")):
|
||||
return normalize_avatar_source_url(raw)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 2) WCDB fallback (more complete on enterprise/openim IDs)
|
||||
try:
|
||||
wcdb_conn = WCDB_REALTIME.ensure_connected(account_dir)
|
||||
with wcdb_conn.lock:
|
||||
mp = _wcdb_get_avatar_urls(wcdb_conn.handle, [u])
|
||||
wa = str(mp.get(u) or "").strip()
|
||||
if wa.lower().startswith(("http://", "https://")):
|
||||
return normalize_avatar_source_url(wa)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def _parse_304_headers(headers: Any) -> tuple[str, str]:
|
||||
try:
|
||||
etag = str((headers or {}).get("ETag") or "").strip()
|
||||
except Exception:
|
||||
etag = ""
|
||||
try:
|
||||
last_modified = str((headers or {}).get("Last-Modified") or "").strip()
|
||||
except Exception:
|
||||
last_modified = ""
|
||||
return etag, last_modified
|
||||
|
||||
|
||||
@lru_cache(maxsize=4096)
|
||||
def _fast_probe_image_path_in_chat_attach(
|
||||
*,
|
||||
@@ -267,27 +324,309 @@ async def get_chat_avatar(username: str, account: Optional[str] = None):
|
||||
if not username:
|
||||
raise HTTPException(status_code=400, detail="Missing username.")
|
||||
account_dir = _resolve_account_dir(account)
|
||||
account_name = str(account_dir.name or "").strip()
|
||||
user_key = str(username or "").strip()
|
||||
|
||||
# 1) Try on-disk cache first (fast path)
|
||||
user_entry = None
|
||||
cached_file = None
|
||||
if is_avatar_cache_enabled() and account_name and user_key:
|
||||
try:
|
||||
user_entry = get_avatar_cache_user_entry(account_name, user_key)
|
||||
cached_file = avatar_cache_entry_file_exists(account_name, user_entry)
|
||||
if cached_file is not None:
|
||||
logger.info(f"[avatar_cache_hit] kind=user account={account_name} username={user_key}")
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] read user cache failed account={account_name} username={user_key} err={e}")
|
||||
|
||||
head_image_db_path = account_dir / "head_image.db"
|
||||
if not head_image_db_path.exists():
|
||||
# No local head_image.db: allow fallback from cached/remote URL path.
|
||||
if cached_file is not None and user_entry:
|
||||
headers = build_avatar_cache_response_headers(user_entry)
|
||||
return FileResponse(
|
||||
str(cached_file),
|
||||
media_type=str(user_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="head_image.db not found.")
|
||||
|
||||
conn = sqlite3.connect(str(head_image_db_path))
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT image_buffer FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1",
|
||||
meta = conn.execute(
|
||||
"SELECT md5, update_time FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1",
|
||||
(username,),
|
||||
).fetchone()
|
||||
if meta and meta[0] is not None:
|
||||
db_md5 = str(meta[0] or "").strip().lower()
|
||||
try:
|
||||
db_update_time = int(meta[1] or 0)
|
||||
except Exception:
|
||||
db_update_time = 0
|
||||
|
||||
# Cache still valid against head_image metadata.
|
||||
if cached_file is not None and user_entry:
|
||||
cached_md5 = str(user_entry.get("source_md5") or "").strip().lower()
|
||||
try:
|
||||
cached_update = int(user_entry.get("source_update_time") or 0)
|
||||
except Exception:
|
||||
cached_update = 0
|
||||
if cached_md5 == db_md5 and cached_update == db_update_time:
|
||||
touch_avatar_cache_entry(account_name, str(user_entry.get("cache_key") or ""))
|
||||
headers = build_avatar_cache_response_headers(user_entry)
|
||||
return FileResponse(
|
||||
str(cached_file),
|
||||
media_type=str(user_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
# Refresh from blob (changed or first-load)
|
||||
row = conn.execute(
|
||||
"SELECT image_buffer FROM head_image WHERE username = ? ORDER BY update_time DESC LIMIT 1",
|
||||
(username,),
|
||||
).fetchone()
|
||||
if row and row[0] is not None:
|
||||
data = bytes(row[0]) if isinstance(row[0], (memoryview, bytearray)) else row[0]
|
||||
if not isinstance(data, (bytes, bytearray)):
|
||||
data = bytes(data)
|
||||
if data:
|
||||
media_type = _detect_image_media_type(data)
|
||||
media_type = media_type if media_type.startswith("image/") else "application/octet-stream"
|
||||
entry, out_path = write_avatar_cache_payload(
|
||||
account_name,
|
||||
source_kind="user",
|
||||
username=user_key,
|
||||
payload=bytes(data),
|
||||
media_type=media_type,
|
||||
source_md5=db_md5,
|
||||
source_update_time=db_update_time,
|
||||
ttl_seconds=AVATAR_CACHE_TTL_SECONDS,
|
||||
)
|
||||
if entry and out_path:
|
||||
logger.info(
|
||||
f"[avatar_cache_download] kind=user account={account_name} username={user_key} src=head_image"
|
||||
)
|
||||
headers = build_avatar_cache_response_headers(entry)
|
||||
return FileResponse(str(out_path), media_type=media_type, headers=headers)
|
||||
|
||||
# cache write failed: fallback to response bytes
|
||||
logger.warning(
|
||||
f"[avatar_cache_error] kind=user account={account_name} username={user_key} action=write_fallback"
|
||||
)
|
||||
return Response(content=bytes(data), media_type=media_type)
|
||||
|
||||
# meta not found (no local avatar blob)
|
||||
row = None
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if not row or row[0] is None:
|
||||
raise HTTPException(status_code=404, detail="Avatar not found.")
|
||||
# 2) Fallback: remote avatar URL (contact/WCDB), cache by URL.
|
||||
remote_url = _resolve_avatar_remote_url(account_dir=account_dir, username=user_key)
|
||||
if remote_url and is_avatar_cache_enabled():
|
||||
url_entry = get_avatar_cache_url_entry(account_name, remote_url)
|
||||
url_file = avatar_cache_entry_file_exists(account_name, url_entry)
|
||||
if url_entry and url_file and avatar_cache_entry_is_fresh(url_entry):
|
||||
logger.info(f"[avatar_cache_hit] kind=url account={account_name} username={user_key}")
|
||||
touch_avatar_cache_entry(account_name, str(url_entry.get("cache_key") or ""))
|
||||
# Keep user-key mapping aligned, so next user lookup is direct.
|
||||
try:
|
||||
upsert_avatar_cache_entry(
|
||||
account_name,
|
||||
cache_key=cache_key_for_avatar_user(user_key),
|
||||
source_kind="user",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
source_md5=str(url_entry.get("source_md5") or ""),
|
||||
source_update_time=int(url_entry.get("source_update_time") or 0),
|
||||
rel_path=str(url_entry.get("rel_path") or ""),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(url_entry.get("size_bytes") or 0),
|
||||
etag=str(url_entry.get("etag") or ""),
|
||||
last_modified=str(url_entry.get("last_modified") or ""),
|
||||
fetched_at=int(url_entry.get("fetched_at") or 0),
|
||||
checked_at=int(url_entry.get("checked_at") or 0),
|
||||
expires_at=int(url_entry.get("expires_at") or 0),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
headers = build_avatar_cache_response_headers(url_entry)
|
||||
return FileResponse(
|
||||
str(url_file),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
data = bytes(row[0]) if isinstance(row[0], (memoryview, bytearray)) else row[0]
|
||||
if not isinstance(data, (bytes, bytearray)):
|
||||
data = bytes(data)
|
||||
media_type = _detect_image_media_type(data)
|
||||
return Response(content=data, media_type=media_type)
|
||||
# Revalidate / download remote avatar
|
||||
def _download_remote_avatar(
|
||||
source_url: str,
|
||||
*,
|
||||
etag: str,
|
||||
last_modified: str,
|
||||
) -> tuple[bytes, str, str, str, bool]:
|
||||
base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36",
|
||||
"Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8",
|
||||
}
|
||||
|
||||
header_variants = [
|
||||
{
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x63090719) XWEB/8351",
|
||||
"Accept": "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8",
|
||||
"Accept-Language": "zh-CN,zh;q=0.9",
|
||||
"Referer": "https://servicewechat.com/",
|
||||
"Origin": "https://servicewechat.com",
|
||||
"Range": "bytes=0-",
|
||||
},
|
||||
{"Referer": "https://wx.qq.com/", "Origin": "https://wx.qq.com"},
|
||||
{"Referer": "https://mp.weixin.qq.com/", "Origin": "https://mp.weixin.qq.com"},
|
||||
{"Referer": "https://www.baidu.com/", "Origin": "https://www.baidu.com"},
|
||||
{},
|
||||
]
|
||||
|
||||
last_err: Exception | None = None
|
||||
for extra in header_variants:
|
||||
headers = dict(base_headers)
|
||||
headers.update(extra)
|
||||
if etag:
|
||||
headers["If-None-Match"] = etag
|
||||
if last_modified:
|
||||
headers["If-Modified-Since"] = last_modified
|
||||
|
||||
r = requests.get(source_url, headers=headers, timeout=20, stream=True)
|
||||
try:
|
||||
if r.status_code == 304:
|
||||
e2, lm2 = _parse_304_headers(r.headers)
|
||||
return b"", "", (e2 or etag), (lm2 or last_modified), True
|
||||
r.raise_for_status()
|
||||
content_type = str(r.headers.get("Content-Type") or "").strip()
|
||||
e2, lm2 = _parse_304_headers(r.headers)
|
||||
max_bytes = 10 * 1024 * 1024
|
||||
chunks: list[bytes] = []
|
||||
total = 0
|
||||
for ch in r.iter_content(chunk_size=64 * 1024):
|
||||
if not ch:
|
||||
continue
|
||||
chunks.append(ch)
|
||||
total += len(ch)
|
||||
if total > max_bytes:
|
||||
raise HTTPException(status_code=400, detail="Avatar too large (>10MB).")
|
||||
return b"".join(chunks), content_type, e2, lm2, False
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
last_err = e
|
||||
finally:
|
||||
try:
|
||||
r.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
raise last_err or RuntimeError("avatar remote download failed")
|
||||
|
||||
etag0 = str((url_entry or {}).get("etag") or "").strip()
|
||||
lm0 = str((url_entry or {}).get("last_modified") or "").strip()
|
||||
try:
|
||||
payload, ct, etag_new, lm_new, not_modified = await asyncio.to_thread(
|
||||
_download_remote_avatar,
|
||||
remote_url,
|
||||
etag=etag0,
|
||||
last_modified=lm0,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"[avatar_cache_error] kind=url account={account_name} username={user_key} err={e}")
|
||||
if url_entry and url_file:
|
||||
headers = build_avatar_cache_response_headers(url_entry)
|
||||
return FileResponse(
|
||||
str(url_file),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="Avatar not found.")
|
||||
|
||||
if not_modified and url_entry and url_file:
|
||||
touch_avatar_cache_entry(account_name, cache_key_for_avatar_url(remote_url))
|
||||
if etag_new or lm_new:
|
||||
try:
|
||||
upsert_avatar_cache_entry(
|
||||
account_name,
|
||||
cache_key=cache_key_for_avatar_url(remote_url),
|
||||
source_kind="url",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
source_md5=str(url_entry.get("source_md5") or ""),
|
||||
source_update_time=int(url_entry.get("source_update_time") or 0),
|
||||
rel_path=str(url_entry.get("rel_path") or ""),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(url_entry.get("size_bytes") or 0),
|
||||
etag=etag_new or etag0,
|
||||
last_modified=lm_new or lm0,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
logger.info(f"[avatar_cache_revalidate] kind=url account={account_name} username={user_key} status=304")
|
||||
headers = build_avatar_cache_response_headers(url_entry)
|
||||
return FileResponse(
|
||||
str(url_file),
|
||||
media_type=str(url_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if payload:
|
||||
payload2, media_type, _ext = _detect_media_type_and_ext(payload)
|
||||
if media_type == "application/octet-stream" and ct:
|
||||
try:
|
||||
mt = ct.split(";")[0].strip()
|
||||
if mt.startswith("image/"):
|
||||
media_type = mt
|
||||
except Exception:
|
||||
pass
|
||||
if str(media_type or "").startswith("image/"):
|
||||
entry, out_path = write_avatar_cache_payload(
|
||||
account_name,
|
||||
source_kind="url",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
payload=payload2,
|
||||
media_type=media_type,
|
||||
etag=etag_new,
|
||||
last_modified=lm_new,
|
||||
ttl_seconds=AVATAR_CACHE_TTL_SECONDS,
|
||||
)
|
||||
if entry and out_path:
|
||||
# bind user-key record to same file for quicker next access
|
||||
try:
|
||||
upsert_avatar_cache_entry(
|
||||
account_name,
|
||||
cache_key=cache_key_for_avatar_user(user_key),
|
||||
source_kind="user",
|
||||
username=user_key,
|
||||
source_url=remote_url,
|
||||
source_md5=str(entry.get("source_md5") or ""),
|
||||
source_update_time=int(entry.get("source_update_time") or 0),
|
||||
rel_path=str(entry.get("rel_path") or ""),
|
||||
media_type=str(entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(entry.get("size_bytes") or 0),
|
||||
etag=str(entry.get("etag") or ""),
|
||||
last_modified=str(entry.get("last_modified") or ""),
|
||||
fetched_at=int(entry.get("fetched_at") or 0),
|
||||
checked_at=int(entry.get("checked_at") or 0),
|
||||
expires_at=int(entry.get("expires_at") or 0),
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
logger.info(f"[avatar_cache_download] kind=url account={account_name} username={user_key}")
|
||||
headers = build_avatar_cache_response_headers(entry)
|
||||
return FileResponse(str(out_path), media_type=media_type, headers=headers)
|
||||
|
||||
if cached_file is not None and user_entry:
|
||||
headers = build_avatar_cache_response_headers(user_entry)
|
||||
return FileResponse(
|
||||
str(cached_file),
|
||||
media_type=str(user_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
raise HTTPException(status_code=404, detail="Avatar not found.")
|
||||
|
||||
|
||||
class EmojiDownloadRequest(BaseModel):
|
||||
@@ -434,7 +773,25 @@ async def proxy_image(url: str):
|
||||
if not _is_allowed_proxy_image_host(host):
|
||||
raise HTTPException(status_code=400, detail="Unsupported url host for proxy_image.")
|
||||
|
||||
def _download_bytes() -> tuple[bytes, str]:
|
||||
source_url = normalize_avatar_source_url(u)
|
||||
proxy_account = "_proxy"
|
||||
cache_entry = get_avatar_cache_url_entry(proxy_account, source_url) if is_avatar_cache_enabled() else None
|
||||
cache_file = avatar_cache_entry_file_exists(proxy_account, cache_entry)
|
||||
if cache_entry and cache_file and avatar_cache_entry_is_fresh(cache_entry):
|
||||
logger.info(f"[avatar_cache_hit] kind=proxy_url account={proxy_account}")
|
||||
touch_avatar_cache_entry(proxy_account, cache_key_for_avatar_url(source_url))
|
||||
headers = build_avatar_cache_response_headers(cache_entry)
|
||||
return FileResponse(
|
||||
str(cache_file),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
def _download_bytes(
|
||||
*,
|
||||
if_none_match: str = "",
|
||||
if_modified_since: str = "",
|
||||
) -> tuple[bytes, str, str, str, bool]:
|
||||
base_headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120 Safari/537.36",
|
||||
"Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8",
|
||||
@@ -464,10 +821,20 @@ async def proxy_image(url: str):
|
||||
for extra in header_variants:
|
||||
headers = dict(base_headers)
|
||||
headers.update(extra)
|
||||
if if_none_match:
|
||||
headers["If-None-Match"] = if_none_match
|
||||
if if_modified_since:
|
||||
headers["If-Modified-Since"] = if_modified_since
|
||||
r = requests.get(u, headers=headers, timeout=20, stream=True)
|
||||
try:
|
||||
if r.status_code == 304:
|
||||
etag0 = str(r.headers.get("ETag") or "").strip()
|
||||
lm0 = str(r.headers.get("Last-Modified") or "").strip()
|
||||
return b"", "", etag0, lm0, True
|
||||
r.raise_for_status()
|
||||
content_type = str(r.headers.get("Content-Type") or "").strip()
|
||||
etag0 = str(r.headers.get("ETag") or "").strip()
|
||||
lm0 = str(r.headers.get("Last-Modified") or "").strip()
|
||||
max_bytes = 10 * 1024 * 1024
|
||||
chunks: list[bytes] = []
|
||||
total = 0
|
||||
@@ -478,7 +845,7 @@ async def proxy_image(url: str):
|
||||
total += len(ch)
|
||||
if total > max_bytes:
|
||||
raise HTTPException(status_code=400, detail="Proxy image too large (>10MB).")
|
||||
return b"".join(chunks), content_type
|
||||
return b"".join(chunks), content_type, etag0, lm0, False
|
||||
except HTTPException:
|
||||
# Hard failure, don't retry with another referer.
|
||||
raise
|
||||
@@ -493,14 +860,50 @@ async def proxy_image(url: str):
|
||||
# All variants failed.
|
||||
raise last_err or RuntimeError("proxy_image download failed")
|
||||
|
||||
etag0 = str((cache_entry or {}).get("etag") or "").strip()
|
||||
lm0 = str((cache_entry or {}).get("last_modified") or "").strip()
|
||||
try:
|
||||
data, ct = await asyncio.to_thread(_download_bytes)
|
||||
data, ct, etag_new, lm_new, not_modified = await asyncio.to_thread(
|
||||
_download_bytes,
|
||||
if_none_match=etag0,
|
||||
if_modified_since=lm0,
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning(f"proxy_image failed: url={u} err={e}")
|
||||
if cache_entry and cache_file:
|
||||
headers = build_avatar_cache_response_headers(cache_entry)
|
||||
return FileResponse(
|
||||
str(cache_file),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
raise HTTPException(status_code=502, detail=f"Proxy image failed: {e}")
|
||||
|
||||
if not_modified and cache_entry and cache_file:
|
||||
logger.info(f"[avatar_cache_revalidate] kind=proxy_url account={proxy_account} status=304")
|
||||
upsert_avatar_cache_entry(
|
||||
proxy_account,
|
||||
cache_key=cache_key_for_avatar_url(source_url),
|
||||
source_kind="url",
|
||||
source_url=source_url,
|
||||
username="",
|
||||
source_md5=str(cache_entry.get("source_md5") or ""),
|
||||
source_update_time=int(cache_entry.get("source_update_time") or 0),
|
||||
rel_path=str(cache_entry.get("rel_path") or ""),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
size_bytes=int(cache_entry.get("size_bytes") or 0),
|
||||
etag=etag_new or etag0,
|
||||
last_modified=lm_new or lm0,
|
||||
)
|
||||
headers = build_avatar_cache_response_headers(cache_entry)
|
||||
return FileResponse(
|
||||
str(cache_file),
|
||||
media_type=str(cache_entry.get("media_type") or "application/octet-stream"),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if not data:
|
||||
raise HTTPException(status_code=502, detail="Proxy returned empty body.")
|
||||
|
||||
@@ -518,8 +921,24 @@ async def proxy_image(url: str):
|
||||
if not str(media_type or "").startswith("image/"):
|
||||
raise HTTPException(status_code=502, detail="Proxy did not return an image.")
|
||||
|
||||
if is_avatar_cache_enabled():
|
||||
entry, out_path = write_avatar_cache_payload(
|
||||
proxy_account,
|
||||
source_kind="url",
|
||||
source_url=source_url,
|
||||
payload=payload,
|
||||
media_type=media_type,
|
||||
etag=etag_new,
|
||||
last_modified=lm_new,
|
||||
ttl_seconds=AVATAR_CACHE_TTL_SECONDS,
|
||||
)
|
||||
if entry and out_path:
|
||||
logger.info(f"[avatar_cache_download] kind=proxy_url account={proxy_account}")
|
||||
headers = build_avatar_cache_response_headers(entry)
|
||||
return FileResponse(str(out_path), media_type=media_type, headers=headers)
|
||||
|
||||
resp = Response(content=payload, media_type=media_type)
|
||||
resp.headers["Cache-Control"] = "public, max-age=86400"
|
||||
resp.headers["Cache-Control"] = f"public, max-age={AVATAR_CACHE_TTL_SECONDS}"
|
||||
return resp
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ from ...chat_helpers import (
|
||||
_decode_sqlite_text,
|
||||
_iter_message_db_paths,
|
||||
_load_contact_rows,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_quote_ident,
|
||||
_should_keep_session,
|
||||
@@ -701,7 +700,7 @@ def build_card_00_global_overview(
|
||||
u, cnt = stats.top_contact
|
||||
row = contact_rows.get(u)
|
||||
display = _pick_display_name(row, u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
top_contact_obj = {
|
||||
"username": u,
|
||||
"displayName": display,
|
||||
@@ -716,7 +715,7 @@ def build_card_00_global_overview(
|
||||
u, cnt = stats.top_group
|
||||
row = contact_rows.get(u)
|
||||
display = _pick_display_name(row, u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
top_group_obj = {
|
||||
"username": u,
|
||||
"displayName": display,
|
||||
|
||||
@@ -14,7 +14,6 @@ from ...chat_helpers import (
|
||||
_build_avatar_url,
|
||||
_iter_message_db_paths,
|
||||
_load_contact_rows,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_quote_ident,
|
||||
_row_to_search_hit,
|
||||
@@ -713,7 +712,7 @@ def _fetch_message_moment_payload(
|
||||
|
||||
contact_row = contact_rows.get(username)
|
||||
display = _pick_display_name(contact_row, username)
|
||||
avatar = _pick_avatar_url(contact_row) or (_build_avatar_url(str(account_dir.name or ""), username) if username else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), username) if username else ""
|
||||
|
||||
return {
|
||||
"timestamp": int(ref.ts),
|
||||
|
||||
@@ -12,7 +12,6 @@ from typing import Any, Optional
|
||||
from ...chat_helpers import (
|
||||
_build_avatar_url,
|
||||
_load_contact_rows,
|
||||
_pick_avatar_url,
|
||||
_pick_display_name,
|
||||
_should_keep_session,
|
||||
)
|
||||
@@ -385,7 +384,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
def conv_to_obj(score: float | None, agg: _ConvAgg) -> dict[str, Any]:
|
||||
row = contact_rows.get(agg.username)
|
||||
display = _pick_display_name(row, agg.username)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), agg.username) if agg.username else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), agg.username) if agg.username else ""
|
||||
avg_s = agg.avg_gap()
|
||||
out: dict[str, Any] = {
|
||||
"username": agg.username,
|
||||
@@ -420,7 +419,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
else:
|
||||
row = contact_rows.get(global_fastest_u)
|
||||
display = _pick_display_name(row, global_fastest_u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), global_fastest_u) if global_fastest_u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), global_fastest_u) if global_fastest_u else ""
|
||||
fastest_obj = {
|
||||
"username": global_fastest_u,
|
||||
"displayName": display,
|
||||
@@ -440,7 +439,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
else:
|
||||
row = contact_rows.get(global_slowest_u)
|
||||
display = _pick_display_name(row, global_slowest_u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), global_slowest_u) if global_slowest_u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), global_slowest_u) if global_slowest_u else ""
|
||||
slowest_obj = {
|
||||
"username": global_slowest_u,
|
||||
"displayName": display,
|
||||
@@ -547,7 +546,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
|
||||
row = contact_rows.get(u)
|
||||
display = _pick_display_name(row, u)
|
||||
avatar = _pick_avatar_url(row) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
series.append(
|
||||
{
|
||||
"username": u,
|
||||
@@ -595,7 +594,7 @@ def compute_reply_speed_stats(*, account_dir: Path, year: int) -> dict[str, Any]
|
||||
if not u:
|
||||
continue
|
||||
display = _pick_display_name(r, u)
|
||||
avatar = _pick_avatar_url(r) or (_build_avatar_url(str(account_dir.name or ""), u) if u else "")
|
||||
avatar = _build_avatar_url(str(account_dir.name or ""), u) if u else ""
|
||||
all_contacts_list.append({
|
||||
"username": u,
|
||||
"displayName": display,
|
||||
|
||||
173
tests/test_avatar_cache_chat_media.py
Normal file
173
tests/test_avatar_cache_chat_media.py
Normal file
@@ -0,0 +1,173 @@
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
import unittest
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
sys.path.insert(0, str(ROOT / "src"))
|
||||
|
||||
|
||||
class TestAvatarCacheChatMedia(unittest.TestCase):
|
||||
def _seed_contact_db(self, path: Path, *, username: str = "wxid_friend") -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE contact (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE stranger (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
username,
|
||||
"",
|
||||
"测试好友",
|
||||
"",
|
||||
1,
|
||||
0,
|
||||
"https://wx.qlogo.cn/mmhead/ver_1/test_remote_avatar/132",
|
||||
"",
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _seed_session_db(self, path: Path, *, username: str = "wxid_friend") -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE SessionTable (
|
||||
username TEXT,
|
||||
sort_timestamp INTEGER,
|
||||
last_timestamp INTEGER
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", (username, 200, 200))
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _seed_head_image_db(self, path: Path, *, username: str = "wxid_friend") -> None:
|
||||
# 1x1 PNG
|
||||
png = bytes.fromhex(
|
||||
"89504E470D0A1A0A"
|
||||
"0000000D49484452000000010000000108060000001F15C489"
|
||||
"0000000D49444154789C6360606060000000050001A5F64540"
|
||||
"0000000049454E44AE426082"
|
||||
)
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute("CREATE TABLE head_image(username TEXT PRIMARY KEY, md5 TEXT, image_buffer BLOB, update_time INTEGER)")
|
||||
conn.execute(
|
||||
"INSERT INTO head_image VALUES (?, ?, ?, ?)",
|
||||
(username, "0123456789abcdef0123456789abcdef", sqlite3.Binary(png), 1735689600),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def test_chat_avatar_caches_to_output_avatar_cache(self):
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
username = "wxid_friend"
|
||||
account_dir = root / "output" / "databases" / account
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self._seed_contact_db(account_dir / "contact.db", username=username)
|
||||
self._seed_session_db(account_dir / "session.db", username=username)
|
||||
self._seed_head_image_db(account_dir / "head_image.db", username=username)
|
||||
|
||||
prev_data = None
|
||||
prev_cache = None
|
||||
try:
|
||||
prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
prev_cache = os.environ.get("WECHAT_TOOL_AVATAR_CACHE_ENABLED")
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
os.environ["WECHAT_TOOL_AVATAR_CACHE_ENABLED"] = "1"
|
||||
|
||||
import wechat_decrypt_tool.app_paths as app_paths
|
||||
import wechat_decrypt_tool.chat_helpers as chat_helpers
|
||||
import wechat_decrypt_tool.avatar_cache as avatar_cache
|
||||
import wechat_decrypt_tool.routers.chat_media as chat_media
|
||||
|
||||
importlib.reload(app_paths)
|
||||
importlib.reload(chat_helpers)
|
||||
importlib.reload(avatar_cache)
|
||||
importlib.reload(chat_media)
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(chat_media.router)
|
||||
client = TestClient(app)
|
||||
|
||||
resp = client.get("/api/chat/avatar", params={"account": account, "username": username})
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
self.assertTrue(resp.headers.get("content-type", "").startswith("image/"))
|
||||
|
||||
cache_db = root / "output" / "avatar_cache" / account / "avatar_cache.db"
|
||||
self.assertTrue(cache_db.exists())
|
||||
|
||||
conn = sqlite3.connect(str(cache_db))
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT cache_key, source_kind, username, rel_path, media_type FROM avatar_cache_entries WHERE source_kind = 'user' LIMIT 1"
|
||||
).fetchone()
|
||||
self.assertIsNotNone(row)
|
||||
rel_path = str(row[3] or "")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
self.assertTrue(rel_path)
|
||||
cache_file = (root / "output" / "avatar_cache" / account / rel_path).resolve()
|
||||
self.assertTrue(cache_file.exists())
|
||||
|
||||
resp2 = client.get("/api/chat/avatar", params={"account": account, "username": username})
|
||||
self.assertEqual(resp2.status_code, 200)
|
||||
self.assertEqual(resp2.content, resp.content)
|
||||
finally:
|
||||
if prev_data is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data
|
||||
if prev_cache is None:
|
||||
os.environ.pop("WECHAT_TOOL_AVATAR_CACHE_ENABLED", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_AVATAR_CACHE_ENABLED"] = prev_cache
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
418
tests/test_chat_export_message_types_semantics.py
Normal file
418
tests/test_chat_export_message_types_semantics.py
Normal file
@@ -0,0 +1,418 @@
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
import sqlite3
|
||||
import sys
|
||||
import unittest
|
||||
import zipfile
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
sys.path.insert(0, str(ROOT / "src"))
|
||||
|
||||
|
||||
class TestChatExportMessageTypesSemantics(unittest.TestCase):
|
||||
def _reload_export_modules(self):
|
||||
import wechat_decrypt_tool.app_paths as app_paths
|
||||
import wechat_decrypt_tool.chat_helpers as chat_helpers
|
||||
import wechat_decrypt_tool.media_helpers as media_helpers
|
||||
import wechat_decrypt_tool.chat_export_service as chat_export_service
|
||||
|
||||
importlib.reload(app_paths)
|
||||
importlib.reload(chat_helpers)
|
||||
importlib.reload(media_helpers)
|
||||
importlib.reload(chat_export_service)
|
||||
return chat_export_service
|
||||
|
||||
def _seed_contact_db(self, path: Path, *, account: str, username: str) -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE contact (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE stranger (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(account, "", "我", "", 1, 0, "", ""),
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(username, "", "测试好友", "", 1, 0, "", ""),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _seed_session_db(self, path: Path, *, username: str) -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE SessionTable (
|
||||
username TEXT,
|
||||
is_hidden INTEGER,
|
||||
sort_timestamp INTEGER
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO SessionTable VALUES (?, ?, ?)",
|
||||
(username, 0, 1735689600),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _seed_message_db(self, path: Path, *, account: str, username: str) -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute("CREATE TABLE Name2Id (rowid INTEGER PRIMARY KEY, user_name TEXT)")
|
||||
conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (1, account))
|
||||
conn.execute("INSERT INTO Name2Id(rowid, user_name) VALUES (?, ?)", (2, username))
|
||||
|
||||
table_name = f"msg_{hashlib.md5(username.encode('utf-8')).hexdigest()}"
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE {table_name} (
|
||||
local_id INTEGER,
|
||||
server_id INTEGER,
|
||||
local_type INTEGER,
|
||||
sort_seq INTEGER,
|
||||
real_sender_id INTEGER,
|
||||
create_time INTEGER,
|
||||
message_content TEXT,
|
||||
compress_content BLOB
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
image_xml = '<msg><img md5="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" cdnthumburl="img_file_id_1" /></msg>'
|
||||
video_xml = '<msg><videomsg md5="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" cdnthumbmd5="cccccccccccccccccccccccccccccccc" cdnvideourl="video_file_id_1" cdnthumburl="video_thumb_id_1" /></msg>'
|
||||
|
||||
rows = [
|
||||
(1, 1001, 3, 1, 2, 1735689601, image_xml, None),
|
||||
(2, 1002, 43, 2, 2, 1735689602, video_xml, None),
|
||||
(3, 1003, 49, 3, 2, 1735689603, '<msg><appmsg><type>2000</type><des>收到转账0.01元</des></appmsg></msg>', None),
|
||||
(4, 1004, 1, 4, 2, 1735689604, '普通文本消息', None),
|
||||
(5, 1005, 10000, 5, 2, 1735689605, '系统提示消息', None),
|
||||
]
|
||||
conn.executemany(
|
||||
f"INSERT INTO {table_name} (local_id, server_id, local_type, sort_seq, real_sender_id, create_time, message_content, compress_content) VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
rows,
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _seed_media_files(self, account_dir: Path) -> None:
|
||||
resource_root = account_dir / "resource"
|
||||
(resource_root / "aa").mkdir(parents=True, exist_ok=True)
|
||||
(resource_root / "bb").mkdir(parents=True, exist_ok=True)
|
||||
(resource_root / "cc").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
(resource_root / "aa" / "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.jpg").write_bytes(b"\xff\xd8\xff\xd9")
|
||||
(resource_root / "bb" / "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.mp4").write_bytes(b"video-bytes")
|
||||
(resource_root / "cc" / "cccccccccccccccccccccccccccccccc.jpg").write_bytes(b"\xff\xd8\xff\xd9")
|
||||
|
||||
def _seed_source_info(self, account_dir: Path, wxid_dir: Path) -> None:
|
||||
payload = {
|
||||
"wxid_dir": str(wxid_dir),
|
||||
"db_storage_path": str(wxid_dir / "db_storage"),
|
||||
}
|
||||
(account_dir / "_source.json").write_text(json.dumps(payload, ensure_ascii=False), encoding="utf-8")
|
||||
|
||||
def _seed_wxid_media_files(self, wxid_dir: Path) -> None:
|
||||
(wxid_dir / "msg" / "video").mkdir(parents=True, exist_ok=True)
|
||||
(wxid_dir / "msg" / "attach").mkdir(parents=True, exist_ok=True)
|
||||
(wxid_dir / "cache").mkdir(parents=True, exist_ok=True)
|
||||
(wxid_dir / "db_storage").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
(wxid_dir / "msg" / "video" / "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb.mp4").write_bytes(b"video-bytes")
|
||||
(wxid_dir / "msg" / "video" / "cccccccccccccccccccccccccccccccc.jpg").write_bytes(b"\xff\xd8\xff\xd9")
|
||||
|
||||
def _prepare_account(self, root: Path, *, account: str, username: str) -> Path:
|
||||
account_dir = root / "output" / "databases" / account
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
wxid_dir = root / "wxid_data" / account
|
||||
|
||||
self._seed_contact_db(account_dir / "contact.db", account=account, username=username)
|
||||
self._seed_session_db(account_dir / "session.db", username=username)
|
||||
self._seed_message_db(account_dir / "message_0.db", account=account, username=username)
|
||||
self._seed_media_files(account_dir)
|
||||
self._seed_wxid_media_files(wxid_dir)
|
||||
self._seed_source_info(account_dir, wxid_dir)
|
||||
return account_dir
|
||||
|
||||
def _create_job(self, manager, *, account: str, username: str, message_types, include_media=True, media_kinds=None, privacy_mode=False):
|
||||
if media_kinds is None:
|
||||
media_kinds = ["image", "emoji", "video", "video_thumb", "voice", "file"]
|
||||
|
||||
job = manager.create_job(
|
||||
account=account,
|
||||
scope="selected",
|
||||
usernames=[username],
|
||||
export_format="json",
|
||||
start_time=None,
|
||||
end_time=None,
|
||||
include_hidden=False,
|
||||
include_official=False,
|
||||
include_media=include_media,
|
||||
media_kinds=media_kinds,
|
||||
message_types=message_types,
|
||||
output_dir=None,
|
||||
allow_process_key_extract=False,
|
||||
privacy_mode=privacy_mode,
|
||||
file_name=None,
|
||||
)
|
||||
|
||||
for _ in range(200):
|
||||
latest = manager.get_job(job.export_id)
|
||||
if latest and latest.status in {"done", "error", "cancelled"}:
|
||||
return latest
|
||||
import time as _time
|
||||
|
||||
_time.sleep(0.05)
|
||||
self.fail("export job did not finish in time")
|
||||
|
||||
def _load_export_payload(self, zip_path: Path):
|
||||
self.assertTrue(zip_path.exists())
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
names = set(zf.namelist())
|
||||
msg_path = next((n for n in names if n.endswith("/messages.json")), "")
|
||||
self.assertTrue(msg_path)
|
||||
import json as _json
|
||||
|
||||
payload = _json.loads(zf.read(msg_path).decode("utf-8"))
|
||||
manifest = _json.loads(zf.read("manifest.json").decode("utf-8"))
|
||||
return payload, manifest, names
|
||||
|
||||
def test_unchecked_image_is_filtered_out(self):
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
username = "wxid_friend"
|
||||
self._prepare_account(root, account=account, username=username)
|
||||
|
||||
prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
try:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
svc = self._reload_export_modules()
|
||||
job = self._create_job(
|
||||
svc.CHAT_EXPORT_MANAGER,
|
||||
account=account,
|
||||
username=username,
|
||||
message_types=["text", "transfer"],
|
||||
include_media=True,
|
||||
)
|
||||
self.assertEqual(job.status, "done", msg=job.error)
|
||||
|
||||
payload, _, names = self._load_export_payload(job.zip_path)
|
||||
image_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 3), None)
|
||||
self.assertIsNone(image_msg)
|
||||
render_types = {str(m.get("renderType") or "") for m in payload.get("messages", [])}
|
||||
self.assertTrue(render_types.issubset({"text", "transfer"}))
|
||||
self.assertFalse(any(n.startswith("media/images/") for n in names))
|
||||
finally:
|
||||
if prev_data is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data
|
||||
|
||||
def test_checked_image_exports_media_file(self):
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
username = "wxid_friend"
|
||||
self._prepare_account(root, account=account, username=username)
|
||||
|
||||
prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
try:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
svc = self._reload_export_modules()
|
||||
job = self._create_job(
|
||||
svc.CHAT_EXPORT_MANAGER,
|
||||
account=account,
|
||||
username=username,
|
||||
message_types=["image", "text"],
|
||||
include_media=True,
|
||||
)
|
||||
self.assertEqual(job.status, "done", msg=job.error)
|
||||
|
||||
payload, _, names = self._load_export_payload(job.zip_path)
|
||||
image_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 3), None)
|
||||
self.assertIsNotNone(image_msg)
|
||||
self.assertEqual(str(image_msg.get("renderType") or ""), "image")
|
||||
self.assertTrue(isinstance(image_msg.get("offlineMedia"), list) and image_msg.get("offlineMedia"))
|
||||
self.assertTrue(any(n.startswith("media/images/") for n in names))
|
||||
finally:
|
||||
if prev_data is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data
|
||||
|
||||
def test_unchecked_non_media_type_is_filtered_out(self):
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
username = "wxid_friend"
|
||||
self._prepare_account(root, account=account, username=username)
|
||||
|
||||
prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
try:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
svc = self._reload_export_modules()
|
||||
job = self._create_job(
|
||||
svc.CHAT_EXPORT_MANAGER,
|
||||
account=account,
|
||||
username=username,
|
||||
message_types=["text"],
|
||||
include_media=True,
|
||||
)
|
||||
self.assertEqual(job.status, "done", msg=job.error)
|
||||
|
||||
payload, manifest, _ = self._load_export_payload(job.zip_path)
|
||||
system_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 10000), None)
|
||||
self.assertIsNone(system_msg)
|
||||
self.assertTrue(all(str(m.get("renderType") or "") == "text" for m in payload.get("messages", [])))
|
||||
self.assertEqual(manifest.get("filters", {}).get("messageTypes"), ["text"])
|
||||
finally:
|
||||
if prev_data is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data
|
||||
|
||||
def test_checked_video_exports_video_and_thumb(self):
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
username = "wxid_friend"
|
||||
self._prepare_account(root, account=account, username=username)
|
||||
|
||||
prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
try:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
svc = self._reload_export_modules()
|
||||
job = self._create_job(
|
||||
svc.CHAT_EXPORT_MANAGER,
|
||||
account=account,
|
||||
username=username,
|
||||
message_types=["video", "text"],
|
||||
include_media=True,
|
||||
)
|
||||
self.assertEqual(job.status, "done", msg=job.error)
|
||||
|
||||
payload, _, names = self._load_export_payload(job.zip_path)
|
||||
video_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 43), None)
|
||||
self.assertIsNotNone(video_msg)
|
||||
self.assertEqual(str(video_msg.get("renderType") or ""), "video")
|
||||
image_msg = next((m for m in payload.get("messages", []) if int(m.get("type") or 0) == 3), None)
|
||||
self.assertIsNone(image_msg)
|
||||
media_items = video_msg.get("offlineMedia") or []
|
||||
kinds = sorted(str(x.get("kind") or "") for x in media_items)
|
||||
self.assertIn("video", kinds)
|
||||
self.assertIn("video_thumb", kinds)
|
||||
self.assertTrue(any(n.startswith("media/videos/") for n in names))
|
||||
self.assertTrue(any(n.startswith("media/video_thumbs/") for n in names))
|
||||
finally:
|
||||
if prev_data is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data
|
||||
|
||||
def test_privacy_mode_never_exports_media(self):
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
username = "wxid_friend"
|
||||
self._prepare_account(root, account=account, username=username)
|
||||
|
||||
prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
try:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
svc = self._reload_export_modules()
|
||||
job = self._create_job(
|
||||
svc.CHAT_EXPORT_MANAGER,
|
||||
account=account,
|
||||
username=username,
|
||||
message_types=["image", "video", "text"],
|
||||
include_media=True,
|
||||
privacy_mode=True,
|
||||
)
|
||||
self.assertEqual(job.status, "done", msg=job.error)
|
||||
|
||||
payload, manifest, names = self._load_export_payload(job.zip_path)
|
||||
self.assertFalse(any(n.startswith("media/images/") for n in names))
|
||||
self.assertFalse(any(n.startswith("media/videos/") for n in names))
|
||||
self.assertFalse(any(n.startswith("media/video_thumbs/") for n in names))
|
||||
|
||||
for msg in payload.get("messages", []):
|
||||
self.assertFalse(msg.get("offlineMedia"))
|
||||
|
||||
self.assertFalse(bool(manifest.get("options", {}).get("includeMedia")))
|
||||
finally:
|
||||
if prev_data is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data
|
||||
|
||||
def test_transfer_only_exports_transfer_messages(self):
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
username = "wxid_friend"
|
||||
self._prepare_account(root, account=account, username=username)
|
||||
|
||||
prev_data = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
try:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
svc = self._reload_export_modules()
|
||||
job = self._create_job(
|
||||
svc.CHAT_EXPORT_MANAGER,
|
||||
account=account,
|
||||
username=username,
|
||||
message_types=["transfer"],
|
||||
include_media=True,
|
||||
)
|
||||
self.assertEqual(job.status, "done", msg=job.error)
|
||||
|
||||
payload, manifest, _ = self._load_export_payload(job.zip_path)
|
||||
messages = list(payload.get("messages", []))
|
||||
self.assertEqual(len(messages), 1)
|
||||
self.assertTrue(all(str(m.get("renderType") or "") == "transfer" for m in messages))
|
||||
self.assertEqual(manifest.get("filters", {}).get("messageTypes"), ["transfer"])
|
||||
finally:
|
||||
if prev_data is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev_data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
71
tests/test_contact_type_detection.py
Normal file
71
tests/test_contact_type_detection.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import sys
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
sys.path.insert(0, str(ROOT / "src"))
|
||||
|
||||
|
||||
class TestContactTypeDetection(unittest.TestCase):
|
||||
def test_infer_group(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 0, "alias": "", "remark": "", "nick_name": ""}
|
||||
self.assertEqual(_infer_contact_type("123@chatroom", row), "group")
|
||||
|
||||
def test_infer_official_by_prefix(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 0, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""}
|
||||
self.assertEqual(_infer_contact_type("gh_xxx", row), "official")
|
||||
|
||||
def test_infer_official_by_verify_flag(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 1, "verify_flag": 24, "alias": "", "remark": "", "nick_name": ""}
|
||||
self.assertEqual(_infer_contact_type("wxid_xxx", row), "official")
|
||||
|
||||
def test_infer_none_for_local_type_3_without_verify(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 3, "verify_flag": 0, "alias": "", "remark": "", "nick_name": "普通联系人"}
|
||||
self.assertIsNone(_infer_contact_type("wxid_xxx", row))
|
||||
|
||||
def test_infer_none_from_wxid_alias_when_local_type_not_1(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 0, "verify_flag": 0, "alias": "wechat_id", "remark": "", "nick_name": ""}
|
||||
self.assertIsNone(_infer_contact_type("wxid_xxx", row))
|
||||
|
||||
def test_infer_friend_from_local_type_1(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 1, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""}
|
||||
self.assertEqual(_infer_contact_type("wxid_xxx", row), "friend")
|
||||
|
||||
def test_infer_none_from_local_type_2(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 2, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""}
|
||||
self.assertIsNone(_infer_contact_type("wxid_xxx", row))
|
||||
|
||||
def test_infer_none_when_empty_type_0(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _infer_contact_type
|
||||
|
||||
row = {"local_type": 0, "verify_flag": 0, "alias": "", "remark": "", "nick_name": ""}
|
||||
self.assertIsNone(_infer_contact_type("wxid_xxx", row))
|
||||
|
||||
def test_valid_contact_username_filters_system_accounts(self):
|
||||
from wechat_decrypt_tool.routers.chat_contacts import _is_valid_contact_username
|
||||
|
||||
self.assertFalse(_is_valid_contact_username("filehelper"))
|
||||
self.assertFalse(_is_valid_contact_username("notifymessage"))
|
||||
self.assertFalse(_is_valid_contact_username("fake_abc"))
|
||||
self.assertTrue(_is_valid_contact_username("weixin"))
|
||||
self.assertTrue(_is_valid_contact_username("wxid_abc"))
|
||||
self.assertTrue(_is_valid_contact_username("123@chatroom"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
546
tests/test_contacts_export.py
Normal file
546
tests/test_contacts_export.py
Normal file
@@ -0,0 +1,546 @@
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
import unittest
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
sys.path.insert(0, str(ROOT / "src"))
|
||||
|
||||
|
||||
class TestContactsExport(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _encode_varint(value: int) -> bytes:
|
||||
v = int(value)
|
||||
out = bytearray()
|
||||
while True:
|
||||
b = v & 0x7F
|
||||
v >>= 7
|
||||
if v:
|
||||
out.append(b | 0x80)
|
||||
else:
|
||||
out.append(b)
|
||||
break
|
||||
return bytes(out)
|
||||
|
||||
@classmethod
|
||||
def _encode_field_len(cls, field_no: int, raw: bytes) -> bytes:
|
||||
tag = (int(field_no) << 3) | 2
|
||||
payload = bytes(raw)
|
||||
return cls._encode_varint(tag) + cls._encode_varint(len(payload)) + payload
|
||||
|
||||
@classmethod
|
||||
def _encode_field_varint(cls, field_no: int, value: int) -> bytes:
|
||||
tag = int(field_no) << 3
|
||||
return cls._encode_varint(tag) + cls._encode_varint(int(value))
|
||||
|
||||
@classmethod
|
||||
def _build_extra_buffer(cls, *, country: str, province: str, city: str, source_scene: int) -> bytes:
|
||||
return b"".join(
|
||||
[
|
||||
cls._encode_field_len(5, country.encode("utf-8")),
|
||||
cls._encode_field_len(6, province.encode("utf-8")),
|
||||
cls._encode_field_len(7, city.encode("utf-8")),
|
||||
cls._encode_field_varint(8, source_scene),
|
||||
]
|
||||
)
|
||||
|
||||
def _seed_contact_db(self, path: Path) -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE contact (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT,
|
||||
extra_buffer BLOB
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE stranger (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT,
|
||||
extra_buffer BLOB
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
friend_extra_buffer = self._build_extra_buffer(
|
||||
country="CN",
|
||||
province="Sichuan",
|
||||
city="Chengdu",
|
||||
source_scene=14,
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"wxid_friend",
|
||||
"好友备注",
|
||||
"好友昵称",
|
||||
"friend_alias",
|
||||
1,
|
||||
0,
|
||||
"https://cdn.example.com/friend_big.jpg",
|
||||
"https://cdn.example.com/friend_small.jpg",
|
||||
friend_extra_buffer,
|
||||
),
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"room@chatroom",
|
||||
"",
|
||||
"测试群",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"https://cdn.example.com/group_big.jpg",
|
||||
"",
|
||||
b"",
|
||||
),
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"gh_official",
|
||||
"",
|
||||
"公众号",
|
||||
"",
|
||||
4,
|
||||
8,
|
||||
"",
|
||||
"https://cdn.example.com/official_small.jpg",
|
||||
b"",
|
||||
),
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"wxid_local_type_3",
|
||||
"",
|
||||
"不应计入联系人",
|
||||
"",
|
||||
3,
|
||||
0,
|
||||
"",
|
||||
"",
|
||||
b"",
|
||||
),
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"weixin",
|
||||
"",
|
||||
"微信团队",
|
||||
"",
|
||||
1,
|
||||
56,
|
||||
"",
|
||||
"",
|
||||
b"",
|
||||
),
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"filehelper",
|
||||
"",
|
||||
"文件传输助手",
|
||||
"",
|
||||
0,
|
||||
0,
|
||||
"",
|
||||
"",
|
||||
b"",
|
||||
),
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO stranger VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"stranger_verified",
|
||||
"",
|
||||
"陌生人认证号",
|
||||
"",
|
||||
4,
|
||||
24,
|
||||
"",
|
||||
"",
|
||||
b"",
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _seed_session_db(self, path: Path) -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE SessionTable (
|
||||
username TEXT,
|
||||
sort_timestamp INTEGER,
|
||||
last_timestamp INTEGER
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("room@chatroom", 300, 300))
|
||||
conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("wxid_friend", 200, 200))
|
||||
conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("gh_official", 100, 100))
|
||||
conn.execute("INSERT INTO SessionTable VALUES (?, ?, ?)", ("missing@chatroom", 250, 250))
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _seed_contact_db_legacy(self, path: Path) -> None:
|
||||
conn = sqlite3.connect(str(path))
|
||||
try:
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE contact (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE stranger (
|
||||
username TEXT,
|
||||
remark TEXT,
|
||||
nick_name TEXT,
|
||||
alias TEXT,
|
||||
local_type INTEGER,
|
||||
verify_flag INTEGER,
|
||||
big_head_url TEXT,
|
||||
small_head_url TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO contact VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(
|
||||
"wxid_legacy_friend",
|
||||
"旧版好友备注",
|
||||
"旧版好友昵称",
|
||||
"legacy_friend_alias",
|
||||
1,
|
||||
0,
|
||||
"",
|
||||
"",
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def test_export_json_and_csv(self):
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
account_dir = root / "output" / "databases" / account
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self._seed_contact_db(account_dir / "contact.db")
|
||||
self._seed_session_db(account_dir / "session.db")
|
||||
|
||||
prev = None
|
||||
try:
|
||||
prev = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
|
||||
import wechat_decrypt_tool.chat_helpers as chat_helpers
|
||||
import wechat_decrypt_tool.routers.chat_contacts as chat_contacts
|
||||
|
||||
importlib.reload(chat_helpers)
|
||||
importlib.reload(chat_contacts)
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(chat_contacts.router)
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
list_resp = client.get(
|
||||
"/api/chat/contacts",
|
||||
params={
|
||||
"account": account,
|
||||
"include_friends": True,
|
||||
"include_groups": True,
|
||||
"include_officials": True,
|
||||
},
|
||||
)
|
||||
self.assertEqual(list_resp.status_code, 200)
|
||||
list_payload = list_resp.json()
|
||||
self.assertEqual(list_payload["status"], "success")
|
||||
self.assertEqual(list_payload["total"], 6)
|
||||
self.assertEqual(list_payload["counts"]["friends"], 1)
|
||||
self.assertEqual(list_payload["counts"]["groups"], 2)
|
||||
self.assertEqual(list_payload["counts"]["officials"], 3)
|
||||
usernames = {str(x.get("username")) for x in list_payload.get("contacts", [])}
|
||||
self.assertIn("missing@chatroom", usernames)
|
||||
self.assertIn("weixin", usernames)
|
||||
self.assertNotIn("wxid_local_type_3", usernames)
|
||||
first = list_payload["contacts"][0]
|
||||
self.assertIn("avatarLink", first)
|
||||
|
||||
friend_contact = next(
|
||||
(x for x in list_payload.get("contacts", []) if str(x.get("username")) == "wxid_friend"),
|
||||
{},
|
||||
)
|
||||
self.assertEqual(friend_contact.get("country"), "CN")
|
||||
self.assertEqual(friend_contact.get("province"), "Sichuan")
|
||||
self.assertEqual(friend_contact.get("city"), "Chengdu")
|
||||
self.assertEqual(friend_contact.get("region"), "中国大陆·Sichuan·Chengdu")
|
||||
self.assertEqual(friend_contact.get("sourceScene"), 14)
|
||||
self.assertEqual(friend_contact.get("source"), "通过群聊添加")
|
||||
|
||||
export_dir = root / "exports"
|
||||
export_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
json_resp = client.post(
|
||||
"/api/chat/contacts/export",
|
||||
json={
|
||||
"account": account,
|
||||
"output_dir": str(export_dir),
|
||||
"format": "json",
|
||||
"include_avatar_link": True,
|
||||
"contact_types": {
|
||||
"friends": True,
|
||||
"groups": True,
|
||||
"officials": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertEqual(json_resp.status_code, 200)
|
||||
json_payload = json_resp.json()
|
||||
self.assertEqual(json_payload["status"], "success")
|
||||
self.assertEqual(json_payload["count"], 6)
|
||||
json_path = Path(json_payload["outputPath"])
|
||||
self.assertTrue(json_path.exists())
|
||||
|
||||
data = json.loads(json_path.read_text(encoding="utf-8"))
|
||||
self.assertEqual(data["count"], 6)
|
||||
self.assertIn("avatarLink", data["contacts"][0])
|
||||
self.assertIn("region", data["contacts"][0])
|
||||
self.assertIn("country", data["contacts"][0])
|
||||
self.assertIn("province", data["contacts"][0])
|
||||
self.assertIn("city", data["contacts"][0])
|
||||
self.assertIn("source", data["contacts"][0])
|
||||
self.assertIn("sourceScene", data["contacts"][0])
|
||||
export_usernames = {str(x.get("username")) for x in data.get("contacts", [])}
|
||||
self.assertIn("missing@chatroom", export_usernames)
|
||||
self.assertNotIn("wxid_local_type_3", export_usernames)
|
||||
|
||||
friend_export = next(
|
||||
(x for x in data.get("contacts", []) if str(x.get("username")) == "wxid_friend"),
|
||||
{},
|
||||
)
|
||||
self.assertEqual(friend_export.get("region"), "中国大陆·Sichuan·Chengdu")
|
||||
self.assertEqual(friend_export.get("sourceScene"), 14)
|
||||
self.assertEqual(friend_export.get("source"), "通过群聊添加")
|
||||
|
||||
csv_resp = client.post(
|
||||
"/api/chat/contacts/export",
|
||||
json={
|
||||
"account": account,
|
||||
"output_dir": str(export_dir),
|
||||
"format": "csv",
|
||||
"include_avatar_link": False,
|
||||
"contact_types": {
|
||||
"friends": True,
|
||||
"groups": False,
|
||||
"officials": False,
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertEqual(csv_resp.status_code, 200)
|
||||
csv_payload = csv_resp.json()
|
||||
self.assertEqual(csv_payload["count"], 1)
|
||||
csv_path = Path(csv_payload["outputPath"])
|
||||
text = csv_path.read_text(encoding="utf-8-sig")
|
||||
self.assertIn("用户名,显示名称,备注,昵称,微信号,类型,地区,国家/地区码,省份,城市,来源,来源场景码", text.splitlines()[0])
|
||||
self.assertNotIn("头像链接", text.splitlines()[0])
|
||||
self.assertIn("wxid_friend", text)
|
||||
self.assertIn("中国大陆·Sichuan·Chengdu", text)
|
||||
self.assertIn("通过群聊添加", text)
|
||||
self.assertIn(",14", text)
|
||||
self.assertNotIn("wxid_local_type_3", text)
|
||||
finally:
|
||||
if prev is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev
|
||||
|
||||
def test_export_invalid_format_returns_400(self):
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
account_dir = root / "output" / "databases" / account
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self._seed_contact_db(account_dir / "contact.db")
|
||||
self._seed_session_db(account_dir / "session.db")
|
||||
|
||||
prev = None
|
||||
try:
|
||||
prev = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
|
||||
import wechat_decrypt_tool.chat_helpers as chat_helpers
|
||||
import wechat_decrypt_tool.routers.chat_contacts as chat_contacts
|
||||
|
||||
importlib.reload(chat_helpers)
|
||||
importlib.reload(chat_contacts)
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(chat_contacts.router)
|
||||
|
||||
client = TestClient(app)
|
||||
resp = client.post(
|
||||
"/api/chat/contacts/export",
|
||||
json={
|
||||
"account": account,
|
||||
"output_dir": str(root / "exports"),
|
||||
"format": "vcf",
|
||||
"include_avatar_link": True,
|
||||
"contact_types": {
|
||||
"friends": True,
|
||||
"groups": True,
|
||||
"officials": True,
|
||||
},
|
||||
},
|
||||
)
|
||||
self.assertEqual(resp.status_code, 400)
|
||||
finally:
|
||||
if prev is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev
|
||||
|
||||
def test_missing_contact_db_returns_404(self):
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_test"
|
||||
account_dir = root / "output" / "databases" / account
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# only session.db exists
|
||||
self._seed_session_db(account_dir / "session.db")
|
||||
|
||||
prev = None
|
||||
try:
|
||||
prev = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
|
||||
import wechat_decrypt_tool.chat_helpers as chat_helpers
|
||||
import wechat_decrypt_tool.routers.chat_contacts as chat_contacts
|
||||
|
||||
importlib.reload(chat_helpers)
|
||||
importlib.reload(chat_contacts)
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(chat_contacts.router)
|
||||
client = TestClient(app)
|
||||
|
||||
resp = client.get("/api/chat/contacts", params={"account": account})
|
||||
self.assertEqual(resp.status_code, 404)
|
||||
finally:
|
||||
if prev is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev
|
||||
|
||||
def test_legacy_schema_without_extra_buffer_is_compatible(self):
|
||||
from fastapi import FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
root = Path(td)
|
||||
account = "wxid_legacy"
|
||||
account_dir = root / "output" / "databases" / account
|
||||
account_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self._seed_contact_db_legacy(account_dir / "contact.db")
|
||||
self._seed_session_db(account_dir / "session.db")
|
||||
|
||||
prev = None
|
||||
try:
|
||||
prev = os.environ.get("WECHAT_TOOL_DATA_DIR")
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = str(root)
|
||||
|
||||
import wechat_decrypt_tool.chat_helpers as chat_helpers
|
||||
import wechat_decrypt_tool.routers.chat_contacts as chat_contacts
|
||||
|
||||
importlib.reload(chat_helpers)
|
||||
importlib.reload(chat_contacts)
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(chat_contacts.router)
|
||||
client = TestClient(app)
|
||||
|
||||
resp = client.get(
|
||||
"/api/chat/contacts",
|
||||
params={
|
||||
"account": account,
|
||||
"include_friends": True,
|
||||
"include_groups": False,
|
||||
"include_officials": False,
|
||||
},
|
||||
)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
payload = resp.json()
|
||||
self.assertEqual(payload.get("status"), "success")
|
||||
self.assertEqual(int(payload.get("total", 0)), 1)
|
||||
|
||||
contact = payload.get("contacts", [])[0]
|
||||
self.assertEqual(contact.get("username"), "wxid_legacy_friend")
|
||||
self.assertEqual(contact.get("country"), "")
|
||||
self.assertEqual(contact.get("province"), "")
|
||||
self.assertEqual(contact.get("city"), "")
|
||||
self.assertEqual(contact.get("region"), "")
|
||||
self.assertIsNone(contact.get("sourceScene"))
|
||||
self.assertEqual(contact.get("source"), "")
|
||||
finally:
|
||||
if prev is None:
|
||||
os.environ.pop("WECHAT_TOOL_DATA_DIR", None)
|
||||
else:
|
||||
os.environ["WECHAT_TOOL_DATA_DIR"] = prev
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user