Compare commits
39 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
18fe644715 | ||
|
|
40cde8c69a | ||
|
|
4b0af47906 | ||
|
|
9365b3c8cd | ||
|
|
4b9f015ea7 | ||
|
|
c42d4a084e | ||
|
|
5bb3feb05b | ||
|
|
05f776ed8b | ||
|
|
9cec809485 | ||
|
|
429f909152 | ||
|
|
084dd23df1 | ||
|
|
e55afdd739 | ||
|
|
72128a132b | ||
|
|
92ca2cddad | ||
|
|
3db0d1dfe5 | ||
|
|
57907323e6 | ||
|
|
dbdca44c5f | ||
|
|
fe1dd2201f | ||
|
|
e0ae194cc3 | ||
|
|
6fc5700457 | ||
|
|
c4fdcf86d4 | ||
|
|
3088500c8d | ||
|
|
861f3a3624 | ||
|
|
c55783e4d9 | ||
|
|
955e284d41 | ||
|
|
fc4c47427e | ||
|
|
e2d7563faa | ||
|
|
27d69f7f8d | ||
|
|
a77bb5af44 | ||
|
|
00286261a4 | ||
|
|
0b898dccaa | ||
|
|
a1d9ac4e68 | ||
|
|
4150939e23 | ||
|
|
8f84b7f063 | ||
|
|
04b245ac64 | ||
|
|
12f7e62957 | ||
|
|
9600d310c7 | ||
|
|
dec5a2472a | ||
|
|
13eb7c6ea2 |
@@ -12,10 +12,12 @@ BiliBili ShadowReplay 是一个缓存直播并进行实时编辑投稿的工具
|
||||
|
||||
目前仅支持 B 站和抖音平台的直播。
|
||||
|
||||

|
||||
[](https://www.star-history.com/#Xinrea/bili-shadowreplay&Date)
|
||||
|
||||
## 安装和使用
|
||||
|
||||

|
||||
|
||||
前往网站查看说明:[BiliBili ShadowReplay](https://bsr.xinrea.cn/)
|
||||
|
||||
## 参与开发
|
||||
|
||||
@@ -18,15 +18,43 @@ export default defineConfig({
|
||||
{
|
||||
text: "开始使用",
|
||||
items: [
|
||||
{ text: "安装准备", link: "/getting-started/installation" },
|
||||
{ text: "配置使用", link: "/getting-started/configuration" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/ffmpeg" },
|
||||
{
|
||||
text: "安装准备",
|
||||
items: [
|
||||
{
|
||||
text: "桌面端安装",
|
||||
link: "/getting-started/installation/desktop",
|
||||
},
|
||||
{
|
||||
text: "Docker 安装",
|
||||
link: "/getting-started/installation/docker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "配置使用",
|
||||
items: [
|
||||
{ text: "账号配置", link: "/getting-started/config/account" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/config/ffmpeg" },
|
||||
{ text: "Whisper 配置", link: "/getting-started/config/whisper" },
|
||||
{ text: "LLM 配置", link: "/getting-started/config/llm" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "说明文档",
|
||||
items: [
|
||||
{ text: "功能说明", link: "/usage/features" },
|
||||
{
|
||||
text: "功能说明",
|
||||
items: [
|
||||
{ text: "工作流程", link: "/usage/features/workflow" },
|
||||
{ text: "直播间管理", link: "/usage/features/room" },
|
||||
{ text: "切片功能", link: "/usage/features/clip" },
|
||||
{ text: "字幕功能", link: "/usage/features/subtitle" },
|
||||
{ text: "弹幕功能", link: "/usage/features/danmaku" },
|
||||
],
|
||||
},
|
||||
{ text: "常见问题", link: "/usage/faq" },
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,27 +1,12 @@
|
||||
# 配置使用
|
||||
|
||||
## 账号配置
|
||||
# 账号配置
|
||||
|
||||
要添加直播间,至少需要配置一个同平台的账号。在账号页面,你可以通过添加账号按钮添加一个账号。
|
||||
|
||||
- B 站账号:目前支持扫码登录和 Cookie 手动配置两种方式,推荐使用扫码登录
|
||||
- 抖音账号:目前仅支持 Cookie 手动配置登陆
|
||||
|
||||
### 抖音账号配置
|
||||
## 抖音账号配置
|
||||
|
||||
首先确保已经登录抖音,然后打开[个人主页](https://www.douyin.com/user/self),右键单击网页,在菜单中选择 `检查(Inspect)`,打开开发者工具,切换到 `网络(Network)` 选项卡,然后刷新网页,此时能在列表中找到 `self` 请求(一般是列表中第一个),单击该请求,查看`请求标头`,在 `请求标头` 中找到 `Cookie`,复制该字段的值,粘贴到配置页面的 `Cookie` 输入框中,要注意复制完全。
|
||||
|
||||

|
||||
|
||||
## FFmpeg 配置
|
||||
|
||||
如果想要使用切片生成和压制功能,请确保 FFmpeg 已正确配置;除了 Windows 平台打包自带 FFfmpeg 以外,其他平台需要手动安装 FFfmpeg,请参考 [FFfmpeg 配置](/getting-started/ffmpeg)。
|
||||
|
||||
## Whisper 模型配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper 模型路径,模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
9
docs/getting-started/config/llm.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# LLM 配置
|
||||
|
||||

|
||||
|
||||
助手页面的 AI Agent 助手功能需要配置大模型,目前仅支持配置 OpenAI 协议兼容的大模型服务。
|
||||
|
||||
本软件并不提供大模型服务,请自行选择服务提供商。要注意,使用 AI Agent 助手需要消耗比普通对话更多的 Token,请确保有足够的 Token 余额。
|
||||
|
||||
此外,AI Agent 的功能需要大模型支持 Function Calling 功能,否则无法正常调用工具。
|
||||
35
docs/getting-started/config/whisper.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# Whisper 配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付费获取 API Key)。
|
||||
|
||||
> [!NOTE]
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
|
||||
## 本地运行 Whisper 模型
|
||||
|
||||

|
||||
|
||||
如果需要使用本地运行 Whisper 模型进行字幕生成,需要下载 Whisper.cpp 模型,并在设置中指定模型路径。模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
|
||||
## 使用在线 Whisper 服务
|
||||
|
||||

|
||||
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有 OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
|
||||
## 字幕识别质量的调优
|
||||
|
||||
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
|
||||
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian, Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish, French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic, Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian, Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili, Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
|
||||
提示词可以优化生成的字幕的风格(也会一定程度上影响质量),要注意,Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。
|
||||
@@ -1,66 +0,0 @@
|
||||
# 安装准备
|
||||
|
||||
## 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
安装包分为两个版本,普通版和 debug 版,普通版适合大部分用户使用,debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
### Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
### Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
### MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
|
||||
## Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
### 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
### 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
22
docs/getting-started/installation/desktop.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
安装包分为两个版本,普通版和 debug 版,普通版适合大部分用户使用,debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
## Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
## Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
## MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
41
docs/getting-started/installation/docker.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
## 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
## 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
@@ -11,10 +11,10 @@ hero:
|
||||
actions:
|
||||
- theme: brand
|
||||
text: 开始使用
|
||||
link: /getting-started/installation
|
||||
link: /getting-started/installation/desktop
|
||||
- theme: alt
|
||||
text: 说明文档
|
||||
link: /usage/features
|
||||
link: /usage/features/room_manage
|
||||
|
||||
features:
|
||||
- icon: 📹
|
||||
@@ -38,9 +38,9 @@ features:
|
||||
- icon: 🔍
|
||||
title: 云端部署
|
||||
details: 支持 Docker 部署,提供 Web 控制界面
|
||||
- icon: 📦
|
||||
title: 多平台支持
|
||||
details: 桌面端支持 Windows/Linux/macOS
|
||||
- icon: 🤖
|
||||
title: AI Agent 支持
|
||||
details: 支持 AI 助手管理录播,分析直播内容,生成切片
|
||||
---
|
||||
|
||||
## 总览
|
||||
@@ -63,7 +63,7 @@ features:
|
||||
|
||||
## 封面编辑
|
||||
|
||||

|
||||

|
||||
|
||||
## 设置
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 555 KiB After Width: | Height: | Size: 195 KiB |
BIN
docs/public/images/ai_agent.png
Normal file
|
After Width: | Height: | Size: 261 KiB |
|
Before Width: | Height: | Size: 1.2 MiB After Width: | Height: | Size: 434 KiB |
BIN
docs/public/images/clip_manage.png
Normal file
|
After Width: | Height: | Size: 234 KiB |
BIN
docs/public/images/clip_preview.png
Normal file
|
After Width: | Height: | Size: 2.3 MiB |
BIN
docs/public/images/cover_edit.png
Normal file
|
After Width: | Height: | Size: 2.1 MiB |
|
Before Width: | Height: | Size: 2.9 MiB |
|
Before Width: | Height: | Size: 2.8 MiB After Width: | Height: | Size: 2.1 MiB |
BIN
docs/public/images/model_config.png
Normal file
|
After Width: | Height: | Size: 383 KiB |
|
Before Width: | Height: | Size: 1.9 MiB After Width: | Height: | Size: 949 KiB |
|
Before Width: | Height: | Size: 622 KiB After Width: | Height: | Size: 244 KiB |
|
Before Width: | Height: | Size: 397 KiB After Width: | Height: | Size: 372 KiB |
BIN
docs/public/images/tasks.png
Normal file
|
After Width: | Height: | Size: 201 KiB |
BIN
docs/public/images/whisper_local.png
Normal file
|
After Width: | Height: | Size: 194 KiB |
BIN
docs/public/images/whisper_online.png
Normal file
|
After Width: | Height: | Size: 199 KiB |
BIN
docs/public/images/workflow.excalidraw.png
Normal file
|
After Width: | Height: | Size: 516 KiB |
BIN
docs/public/videos/deeplinking.mp4
Normal file
1
docs/usage/features/clip.md
Normal file
@@ -0,0 +1 @@
|
||||
# 切片
|
||||
1
docs/usage/features/danmaku.md
Normal file
@@ -0,0 +1 @@
|
||||
# 弹幕
|
||||
20
docs/usage/features/room.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# 直播间
|
||||
|
||||
> [!WARNING]
|
||||
> 在添加管理直播间前,请确保账号列表中有对应平台的可用账号。
|
||||
|
||||
## 添加直播间
|
||||
|
||||
### 手动添加直播间
|
||||
|
||||
你可以在 BSR 直播间页面,点击按钮手动添加直播间。你需要选择平台,并输入直播间号。
|
||||
|
||||
直播间号通常是直播间网页地址尾部的遗传数字,例如 `https://live.bilibili.com/123456` 中的 `123456`,或是 `https://live.douyin.com/123456` 中的 `123456`。
|
||||
|
||||
抖音直播间比较特殊,当未开播时,你无法找到直播间的入口,因此你需要当直播间开播时找到直播间网页地址,并记录其直播间号。
|
||||
|
||||
### 使用 DeepLinking 快速添加直播间
|
||||
|
||||
<video src="/videos/deeplinking.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
|
||||
在浏览器中观看直播时,替换地址栏中直播间地址中的 `https://` 为 `bsr://` 即可快速唤起 BSR 添加直播间。
|
||||
1
docs/usage/features/subtitle.md
Normal file
@@ -0,0 +1 @@
|
||||
# 字幕
|
||||
7
docs/usage/features/workflow.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# 工作流程
|
||||
|
||||

|
||||
|
||||
## 1. 直播间与录制
|
||||
|
||||
添加直播间后,当直播间开播时,会自动开始录制,每次录制会自动生成一场录播记录,你可以点击直播间卡片右下角的历史记录按钮,查看录播记录。
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "bili-shadowreplay",
|
||||
"private": true,
|
||||
"version": "2.9.0",
|
||||
"version": "2.10.3",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -11,14 +11,16 @@
|
||||
"tauri": "tauri",
|
||||
"docs:dev": "vitepress dev docs",
|
||||
"docs:build": "vitepress build docs",
|
||||
"docs:preview": "vitepress preview docs"
|
||||
"docs:preview": "vitepress preview docs",
|
||||
"bump": "node scripts/bump.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@langchain/core": "^0.3.64",
|
||||
"@langchain/deepseek": "^0.1.0",
|
||||
"@langchain/langgraph": "^0.3.10",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@tauri-apps/api": "^2.4.1",
|
||||
"@tauri-apps/api": "^2.6.2",
|
||||
"@tauri-apps/plugin-deep-link": "~2",
|
||||
"@tauri-apps/plugin-dialog": "~2",
|
||||
"@tauri-apps/plugin-fs": "~2",
|
||||
"@tauri-apps/plugin-http": "~2",
|
||||
|
||||
58
scripts/bump.cjs
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
function updatePackageJson(version) {
|
||||
const packageJsonPath = path.join(process.cwd(), "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(
|
||||
packageJsonPath,
|
||||
JSON.stringify(packageJson, null, 2) + "\n"
|
||||
);
|
||||
console.log(`✅ Updated package.json version to ${version}`);
|
||||
}
|
||||
|
||||
function updateCargoToml(version) {
|
||||
const cargoTomlPath = path.join(process.cwd(), "src-tauri", "Cargo.toml");
|
||||
let cargoToml = fs.readFileSync(cargoTomlPath, "utf8");
|
||||
|
||||
// Update the version in the [package] section
|
||||
cargoToml = cargoToml.replace(/^version = ".*"$/m, `version = "${version}"`);
|
||||
|
||||
fs.writeFileSync(cargoTomlPath, cargoToml);
|
||||
console.log(`✅ Updated Cargo.toml version to ${version}`);
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("❌ Please provide a version number");
|
||||
console.error("Usage: yarn bump <version>");
|
||||
console.error("Example: yarn bump 3.1.0");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = args[0];
|
||||
|
||||
// Validate version format (simple check)
|
||||
if (!/^\d+\.\d+\.\d+/.test(version)) {
|
||||
console.error(
|
||||
"❌ Invalid version format. Please use semantic versioning (e.g., 3.1.0)"
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
updatePackageJson(version);
|
||||
updateCargoToml(version);
|
||||
console.log(`🎉 Successfully bumped version to ${version}`);
|
||||
} catch (error) {
|
||||
console.error("❌ Error updating version:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
88
src-tauri/Cargo.lock
generated
@@ -536,7 +536,7 @@ checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
|
||||
|
||||
[[package]]
|
||||
name = "bili-shadowreplay"
|
||||
version = "1.0.0"
|
||||
version = "2.10.3"
|
||||
dependencies = [
|
||||
"async-ffmpeg-sidecar",
|
||||
"async-std",
|
||||
@@ -571,6 +571,7 @@ dependencies = [
|
||||
"sysinfo",
|
||||
"tauri",
|
||||
"tauri-build",
|
||||
"tauri-plugin-deep-link",
|
||||
"tauri-plugin-dialog",
|
||||
"tauri-plugin-fs",
|
||||
"tauri-plugin-http",
|
||||
@@ -975,6 +976,26 @@ version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
|
||||
|
||||
[[package]]
|
||||
name = "const-random"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359"
|
||||
dependencies = [
|
||||
"const-random-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "const-random-macro"
|
||||
version = "0.1.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e"
|
||||
dependencies = [
|
||||
"getrandom 0.2.16",
|
||||
"once_cell",
|
||||
"tiny-keccak",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.4.0"
|
||||
@@ -1151,6 +1172,12 @@ version = "0.8.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
||||
|
||||
[[package]]
|
||||
name = "crunchy"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
version = "0.1.6"
|
||||
@@ -1529,6 +1556,15 @@ dependencies = [
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dlv-list"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f"
|
||||
dependencies = [
|
||||
"const-random",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "document-features"
|
||||
version = "0.2.11"
|
||||
@@ -3927,6 +3963,16 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "ordered-multimap"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79"
|
||||
dependencies = [
|
||||
"dlv-list",
|
||||
"hashbrown 0.14.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ordered-stream"
|
||||
version = "0.2.0"
|
||||
@@ -4987,6 +5033,16 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-ini"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7295b7ce3bf4806b419dc3420745998b447178b7005e2011947b38fc5aa6791"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"ordered-multimap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.25"
|
||||
@@ -6320,6 +6376,26 @@ dependencies = [
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tauri-plugin-deep-link"
|
||||
version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fec67f32d7a06d80bd3dc009fdb678c35a66116d9cb8cd2bb32e406c2b5bbd2"
|
||||
dependencies = [
|
||||
"dunce",
|
||||
"rust-ini",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tauri",
|
||||
"tauri-plugin",
|
||||
"tauri-utils",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"url",
|
||||
"windows-registry",
|
||||
"windows-result 0.3.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tauri-plugin-dialog"
|
||||
version = "2.3.0"
|
||||
@@ -6451,6 +6527,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tauri",
|
||||
"tauri-plugin-deep-link",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"windows-sys 0.60.2",
|
||||
@@ -6692,6 +6769,15 @@ dependencies = [
|
||||
"time-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiny-keccak"
|
||||
version = "2.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
|
||||
dependencies = [
|
||||
"crunchy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinystr"
|
||||
version = "0.8.1"
|
||||
|
||||
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "bili-shadowreplay"
|
||||
version = "1.0.0"
|
||||
version = "2.10.3"
|
||||
description = "BiliBili ShadowReplay"
|
||||
authors = ["Xinrea"]
|
||||
license = ""
|
||||
@@ -71,6 +71,7 @@ gui = [
|
||||
"tauri-utils",
|
||||
"tauri-plugin-os",
|
||||
"tauri-plugin-notification",
|
||||
"tauri-plugin-deep-link",
|
||||
"fix-path-env",
|
||||
"tauri-build",
|
||||
]
|
||||
@@ -83,6 +84,7 @@ optional = true
|
||||
[dependencies.tauri-plugin-single-instance]
|
||||
version = "2"
|
||||
optional = true
|
||||
features = ["deep-link"]
|
||||
|
||||
[dependencies.tauri-plugin-dialog]
|
||||
version = "2"
|
||||
@@ -117,6 +119,10 @@ optional = true
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-deep-link]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.fix-path-env]
|
||||
git = "https://github.com/tauri-apps/fix-path-env-rs"
|
||||
optional = true
|
||||
|
||||
@@ -2,7 +2,11 @@
|
||||
"identifier": "migrated",
|
||||
"description": "permissions that were migrated from v1",
|
||||
"local": true,
|
||||
"windows": ["main", "Live*", "Clip*"],
|
||||
"windows": [
|
||||
"main",
|
||||
"Live*",
|
||||
"Clip*"
|
||||
],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"fs:allow-read-file",
|
||||
@@ -16,7 +20,9 @@
|
||||
"fs:allow-exists",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": ["**"]
|
||||
"allow": [
|
||||
"**"
|
||||
]
|
||||
},
|
||||
"core:window:default",
|
||||
"core:window:allow-start-dragging",
|
||||
@@ -65,6 +71,7 @@
|
||||
"shell:default",
|
||||
"sql:default",
|
||||
"os:default",
|
||||
"dialog:default"
|
||||
"dialog:default",
|
||||
"deep-link:default"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -123,7 +123,8 @@ impl BiliDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let wbi_key = self.get_wbi_key().await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, self.room_id).await?;
|
||||
let real_room = self.get_real_room(&wbi_key, self.room_id).await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, real_room).await?;
|
||||
let ws_hosts = danmu_info.data.host_list.clone();
|
||||
let mut conn = None;
|
||||
log::debug!("ws_hosts: {:?}", ws_hosts);
|
||||
@@ -152,7 +153,7 @@ impl BiliDanmu {
|
||||
*self.write.write().await = Some(write);
|
||||
|
||||
let json = serde_json::to_string(&WsSend {
|
||||
roomid: self.room_id,
|
||||
roomid: real_room,
|
||||
key: danmu_info.data.token,
|
||||
uid: self.user_id,
|
||||
protover: 3,
|
||||
@@ -239,7 +240,6 @@ impl BiliDanmu {
|
||||
wbi_key: &str,
|
||||
room_id: u64,
|
||||
) -> Result<DanmuInfo, DanmuStreamError> {
|
||||
let room_id = self.get_real_room(wbi_key, room_id).await?;
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default"]}}
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}
|
||||
@@ -4220,6 +4220,60 @@
|
||||
"const": "core:window:deny-unminimize",
|
||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
|
||||
"type": "string",
|
||||
"const": "deep-link:default",
|
||||
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-get-current",
|
||||
"markdownDescription": "Enables the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-is-registered",
|
||||
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-register",
|
||||
"markdownDescription": "Enables the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-unregister",
|
||||
"markdownDescription": "Enables the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-get-current",
|
||||
"markdownDescription": "Denies the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-is-registered",
|
||||
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-register",
|
||||
"markdownDescription": "Denies the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-unregister",
|
||||
"markdownDescription": "Denies the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||
"type": "string",
|
||||
|
||||
@@ -4220,6 +4220,60 @@
|
||||
"const": "core:window:deny-unminimize",
|
||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
|
||||
"type": "string",
|
||||
"const": "deep-link:default",
|
||||
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-get-current",
|
||||
"markdownDescription": "Enables the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-is-registered",
|
||||
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-register",
|
||||
"markdownDescription": "Enables the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-unregister",
|
||||
"markdownDescription": "Enables the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-get-current",
|
||||
"markdownDescription": "Denies the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-is-registered",
|
||||
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-register",
|
||||
"markdownDescription": "Denies the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-unregister",
|
||||
"markdownDescription": "Denies the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||
"type": "string",
|
||||
|
||||
@@ -35,6 +35,8 @@ pub struct Config {
|
||||
pub config_path: String,
|
||||
#[serde(default = "default_whisper_language")]
|
||||
pub whisper_language: String,
|
||||
#[serde(default = "default_user_agent")]
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
@@ -86,6 +88,10 @@ fn default_whisper_language() -> String {
|
||||
"auto".to_string()
|
||||
}
|
||||
|
||||
fn default_user_agent() -> String {
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36".to_string()
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load(
|
||||
config_path: &PathBuf,
|
||||
@@ -123,6 +129,7 @@ impl Config {
|
||||
status_check_interval: default_status_check_interval(),
|
||||
config_path: config_path.to_str().unwrap().into(),
|
||||
whisper_language: default_whisper_language(),
|
||||
user_agent: default_user_agent(),
|
||||
};
|
||||
|
||||
config.save();
|
||||
@@ -155,6 +162,12 @@ impl Config {
|
||||
self.save();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_user_agent(&mut self, user_agent: &str) {
|
||||
self.user_agent = user_agent.to_string();
|
||||
self.save();
|
||||
}
|
||||
|
||||
pub fn generate_clip_name(&self, params: &ClipRangeParams) -> PathBuf {
|
||||
let platform = PlatformType::from_str(¶ms.platform).unwrap();
|
||||
|
||||
|
||||
@@ -9,7 +9,8 @@ use rand::Rng;
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
pub struct AccountRow {
|
||||
pub platform: String,
|
||||
pub uid: u64,
|
||||
pub uid: u64, // Keep for Bilibili compatibility
|
||||
pub id_str: Option<String>, // New field for string IDs like Douyin sec_uid
|
||||
pub name: String,
|
||||
pub avatar: String,
|
||||
pub csrf: String,
|
||||
@@ -50,9 +51,10 @@ impl Database {
|
||||
return Err(DatabaseError::InvalidCookiesError);
|
||||
}
|
||||
|
||||
// parse uid
|
||||
let uid = if platform == PlatformType::BiliBili {
|
||||
cookies
|
||||
// parse uid and id_str based on platform
|
||||
let (uid, id_str) = if platform == PlatformType::BiliBili {
|
||||
// For Bilibili, extract numeric uid from cookies
|
||||
let uid = cookies
|
||||
.split("DedeUserID=")
|
||||
.collect::<Vec<&str>>()
|
||||
.get(1)
|
||||
@@ -63,15 +65,18 @@ impl Database {
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.parse::<u64>()
|
||||
.map_err(|_| DatabaseError::InvalidCookiesError)?
|
||||
.map_err(|_| DatabaseError::InvalidCookiesError)?;
|
||||
(uid, None)
|
||||
} else {
|
||||
// generate a random uid
|
||||
rand::thread_rng().gen_range(10000..=i32::MAX) as u64
|
||||
// For Douyin, use temporary uid and will set id_str later with real sec_uid
|
||||
let temp_uid = rand::thread_rng().gen_range(10000..=i32::MAX) as u64;
|
||||
(temp_uid, Some(format!("temp_{}", temp_uid)))
|
||||
};
|
||||
|
||||
let account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid,
|
||||
id_str,
|
||||
name: "".into(),
|
||||
avatar: "".into(),
|
||||
csrf: csrf.unwrap(),
|
||||
@@ -79,7 +84,7 @@ impl Database {
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7)").bind(account.uid as i64).bind(&account.platform).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(account.uid as i64).bind(&account.platform).bind(&account.id_str).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
|
||||
Ok(account)
|
||||
}
|
||||
@@ -120,6 +125,52 @@ impl Database {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_account_with_id_str(
|
||||
&self,
|
||||
old_account: &AccountRow,
|
||||
new_id_str: &str,
|
||||
name: &str,
|
||||
avatar: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
|
||||
// If the id_str changed, we need to delete the old record and create a new one
|
||||
if old_account.id_str.as_deref() != Some(new_id_str) {
|
||||
// Delete the old record (for Douyin accounts, we use uid to identify)
|
||||
sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
|
||||
// Insert the new record with updated id_str
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.bind(new_id_str)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(&old_account.csrf)
|
||||
.bind(&old_account.cookies)
|
||||
.bind(&old_account.created_at)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
} else {
|
||||
// id_str is the same, just update name and avatar
|
||||
sqlx::query(
|
||||
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
|
||||
)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_accounts(&self) -> Result<Vec<AccountRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts")
|
||||
|
||||
@@ -133,9 +133,9 @@ impl Database {
|
||||
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
.bind(offset as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
} else {
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
|
||||
@@ -2,11 +2,13 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
|
||||
use crate::progress_reporter::{ProgressReporter, ProgressReporterTrait};
|
||||
use crate::subtitle_generator::{whisper_cpp, GenerateResult, SubtitleGenerator, SubtitleGeneratorType};
|
||||
use crate::subtitle_generator::whisper_online;
|
||||
use crate::subtitle_generator::{
|
||||
whisper_cpp, GenerateResult, SubtitleGenerator, SubtitleGeneratorType,
|
||||
};
|
||||
use async_ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
|
||||
use async_ffmpeg_sidecar::log_parser::FfmpegLogParser;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{AsyncBufReadExt, BufReader};
|
||||
|
||||
pub async fn clip_from_m3u8(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
@@ -249,6 +251,53 @@ async fn get_audio_duration(file: &Path) -> Result<u64, String> {
|
||||
duration.ok_or_else(|| "Failed to parse duration".to_string())
|
||||
}
|
||||
|
||||
/// Get the precise duration of a video segment (TS/MP4) in seconds
|
||||
pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
|
||||
// Use ffprobe to get the exact duration of the segment
|
||||
let child = tokio::process::Command::new(ffprobe_path())
|
||||
.args(["-v", "quiet"])
|
||||
.args(["-show_entries", "format=duration"])
|
||||
.args(["-of", "csv=p=0"])
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!(
|
||||
"Failed to spawn ffprobe process for segment: {}",
|
||||
e
|
||||
));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut duration = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
// Parse the exact duration as f64 for precise timing
|
||||
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
|
||||
duration = Some(seconds_f64);
|
||||
log::debug!("Parsed segment duration: {} seconds", seconds_f64);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Failed to get segment duration: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
duration.ok_or_else(|| "Failed to parse segment duration".to_string())
|
||||
}
|
||||
|
||||
pub async fn encode_video_subtitle(
|
||||
reporter: &impl ProgressReporterTrait,
|
||||
file: &Path,
|
||||
@@ -421,10 +470,7 @@ pub async fn encode_video_danmu(
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub async fn generic_ffmpeg_command(
|
||||
args: &[&str],
|
||||
) -> Result<String, String> {
|
||||
pub async fn generic_ffmpeg_command(args: &[&str]) -> Result<String, String> {
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
.args(args)
|
||||
.stderr(Stdio::piped())
|
||||
@@ -474,8 +520,7 @@ pub async fn generate_video_subtitle(
|
||||
if whisper_model.is_empty() {
|
||||
return Err("Whisper model not configured".to_string());
|
||||
}
|
||||
if let Ok(generator) =
|
||||
whisper_cpp::new(Path::new(&whisper_model), whisper_prompt).await
|
||||
if let Ok(generator) = whisper_cpp::new(Path::new(&whisper_model), whisper_prompt).await
|
||||
{
|
||||
let chunk_dir = extract_audio_chunks(file, "wav").await?;
|
||||
|
||||
@@ -584,7 +629,6 @@ pub async fn generate_video_subtitle(
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Trying to run ffmpeg for version
|
||||
pub async fn check_ffmpeg() -> Result<String, String> {
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
@@ -624,6 +668,48 @@ pub async fn check_ffmpeg() -> Result<String, String> {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_video_resolution(file: &str) -> Result<String, String> {
|
||||
// ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 input.mp4
|
||||
let child = tokio::process::Command::new(ffprobe_path())
|
||||
.arg("-i")
|
||||
.arg(file)
|
||||
.arg("-v")
|
||||
.arg("error")
|
||||
.arg("-select_streams")
|
||||
.arg("v:0")
|
||||
.arg("-show_entries")
|
||||
.arg("stream=width,height")
|
||||
.arg("-of")
|
||||
.arg("csv=s=x:p=0")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn();
|
||||
if let Err(e) = child {
|
||||
log::error!("Faild to spwan ffprobe process: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take();
|
||||
if stdout.is_none() {
|
||||
log::error!("Failed to take ffprobe output");
|
||||
return Err("Failed to take ffprobe output".into());
|
||||
}
|
||||
|
||||
let stdout = stdout.unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut lines = reader.lines();
|
||||
let line = lines.next_line().await.unwrap();
|
||||
if line.is_none() {
|
||||
return Err("Failed to parse resolution from output".into());
|
||||
}
|
||||
let line = line.unwrap();
|
||||
let resolution = line.split("x").collect::<Vec<&str>>();
|
||||
if resolution.len() != 2 {
|
||||
return Err("Failed to parse resolution from output".into());
|
||||
}
|
||||
Ok(format!("{}x{}", resolution[0], resolution[1]))
|
||||
}
|
||||
|
||||
fn ffmpeg_path() -> PathBuf {
|
||||
let mut path = Path::new("ffmpeg").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
@@ -641,3 +727,16 @@ fn ffprobe_path() -> PathBuf {
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
// tests
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_video_size() {
|
||||
let file = Path::new("/Users/xinreasuper/Desktop/shadowreplay-test/output2/[1789714684][1753965688317][摄像头被前夫抛妻弃子直播挣点奶粉][2025-07-31_12-58-14].mp4");
|
||||
let resolution = get_video_resolution(file.to_str().unwrap()).await.unwrap();
|
||||
println!("Resolution: {}", resolution);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,6 +37,37 @@ pub async fn add_account(
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await?;
|
||||
} else if platform == "douyin" {
|
||||
// Get user info from Douyin API
|
||||
let douyin_client = crate::recorder::douyin::client::DouyinClient::new(
|
||||
&state.config.read().await.user_agent,
|
||||
&account,
|
||||
);
|
||||
match douyin_client.get_user_info().await {
|
||||
Ok(user_info) => {
|
||||
// For Douyin, use sec_uid as the primary identifier in id_str field
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
state
|
||||
.db
|
||||
.update_account_with_id_str(
|
||||
&account,
|
||||
&user_info.sec_uid,
|
||||
&user_info.nickname,
|
||||
&avatar_url,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to get Douyin user info: {}", e);
|
||||
// Keep the account but with default values
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(account)
|
||||
}
|
||||
|
||||
@@ -245,3 +245,10 @@ pub async fn update_whisper_language(
|
||||
state.config.write().await.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_user_agent(state: state_type!(), user_agent: String) -> Result<(), ()> {
|
||||
log::info!("Updating user agent to {}", user_agent);
|
||||
state.config.write().await.set_user_agent(&user_agent);
|
||||
Ok(())
|
||||
}
|
||||
@@ -147,7 +147,10 @@ pub async fn get_archive_subtitle(
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
Ok(state.recorder_manager.get_archive_subtitle(platform.unwrap(), room_id, &live_id).await?)
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.get_archive_subtitle(platform.unwrap(), room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
@@ -161,7 +164,10 @@ pub async fn generate_archive_subtitle(
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
Ok(state.recorder_manager.generate_archive_subtitle(platform.unwrap(), room_id, &live_id).await?)
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.generate_archive_subtitle(platform.unwrap(), room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
|
||||
@@ -301,4 +301,4 @@ pub async fn list_folder(_state: state_type!(), path: String) -> Result<Vec<Stri
|
||||
files.push(entry.path().to_str().unwrap().to_string());
|
||||
}
|
||||
Ok(files)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -411,7 +411,18 @@ pub async fn generate_video_subtitle(
|
||||
let filepath = Path::new(state.config.read().await.output.as_str()).join(&video.file);
|
||||
let file = Path::new(&filepath);
|
||||
|
||||
match ffmpeg::generate_video_subtitle(Some(&reporter), file, generator_type, &whisper_model, &whisper_prompt, &openai_api_key, &openai_api_endpoint, language_hint).await {
|
||||
match ffmpeg::generate_video_subtitle(
|
||||
Some(&reporter),
|
||||
file,
|
||||
generator_type,
|
||||
&whisper_model,
|
||||
&whisper_prompt,
|
||||
&openai_api_key,
|
||||
&openai_api_endpoint,
|
||||
language_hint,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
reporter.finish(true, "字幕生成完成").await;
|
||||
// for local whisper, we need to update the task status to success
|
||||
@@ -552,7 +563,6 @@ async fn encode_video_subtitle_inner(
|
||||
Ok(new_video)
|
||||
}
|
||||
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn generic_ffmpeg_command(
|
||||
_state: state_type!(),
|
||||
@@ -560,4 +570,4 @@ pub async fn generic_ffmpeg_command(
|
||||
) -> Result<String, String> {
|
||||
let args_str: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
|
||||
ffmpeg::generic_ffmpeg_command(&args_str).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,21 +18,22 @@ use crate::{
|
||||
get_config, update_auto_generate, update_clip_name_format, update_notify,
|
||||
update_openai_api_endpoint, update_openai_api_key, update_status_check_interval,
|
||||
update_subtitle_generator_type, update_subtitle_setting, update_whisper_language,
|
||||
update_whisper_model, update_whisper_prompt,
|
||||
update_user_agent, update_whisper_model, update_whisper_prompt,
|
||||
},
|
||||
message::{delete_message, get_messages, read_message},
|
||||
recorder::{
|
||||
add_recorder, delete_archive, export_danmu, fetch_hls, get_archive, get_archive_subtitle, get_archives,
|
||||
get_danmu_record, get_recent_record, get_recorder_list, get_room_info,
|
||||
get_today_record_count, get_total_length, remove_recorder, send_danmaku, set_enable,
|
||||
ExportDanmuOptions, generate_archive_subtitle,
|
||||
add_recorder, delete_archive, export_danmu, fetch_hls, generate_archive_subtitle,
|
||||
get_archive, get_archive_subtitle, get_archives, get_danmu_record, get_recent_record,
|
||||
get_recorder_list, get_room_info, get_today_record_count, get_total_length,
|
||||
remove_recorder, send_danmaku, set_enable, ExportDanmuOptions,
|
||||
},
|
||||
task::{delete_task, get_tasks},
|
||||
utils::{console_log, get_disk_info, list_folder, DiskInfo},
|
||||
video::{
|
||||
cancel, clip_range, delete_video, encode_video_subtitle, generate_video_subtitle,
|
||||
get_all_videos, get_video, get_video_cover, get_video_subtitle, get_video_typelist,
|
||||
get_videos, update_video_cover, update_video_subtitle, upload_procedure, generic_ffmpeg_command,
|
||||
generic_ffmpeg_command, get_all_videos, get_video, get_video_cover, get_video_subtitle,
|
||||
get_video_typelist, get_videos, update_video_cover, update_video_subtitle,
|
||||
upload_procedure,
|
||||
},
|
||||
AccountInfo,
|
||||
},
|
||||
@@ -273,6 +274,22 @@ struct UpdateSubtitleSettingRequest {
|
||||
auto_subtitle: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateUserAgentRequest {
|
||||
user_agent: String,
|
||||
}
|
||||
|
||||
async fn handler_update_user_agent(
|
||||
state: axum::extract::State<State>,
|
||||
Json(user_agent): Json<UpdateUserAgentRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
update_user_agent(state.0, user_agent.user_agent)
|
||||
.await
|
||||
.expect("Failed to update user agent");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
async fn handler_update_subtitle_setting(
|
||||
state: axum::extract::State<State>,
|
||||
Json(subtitle_setting): Json<UpdateSubtitleSettingRequest>,
|
||||
@@ -518,7 +535,8 @@ async fn handler_get_archive_subtitle(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GetArchiveSubtitleRequest>,
|
||||
) -> Result<Json<ApiResponse<String>>, ApiError> {
|
||||
let subtitle = get_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
|
||||
let subtitle =
|
||||
get_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
|
||||
Ok(Json(ApiResponse::success(subtitle)))
|
||||
}
|
||||
|
||||
@@ -534,7 +552,8 @@ async fn handler_generate_archive_subtitle(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GenerateArchiveSubtitleRequest>,
|
||||
) -> Result<Json<ApiResponse<String>>, ApiError> {
|
||||
let subtitle = generate_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
|
||||
let subtitle =
|
||||
generate_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
|
||||
Ok(Json(ApiResponse::success(subtitle)))
|
||||
}
|
||||
|
||||
@@ -613,7 +632,8 @@ async fn handler_get_recent_record(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GetRecentRecordRequest>,
|
||||
) -> Result<Json<ApiResponse<Vec<RecordRow>>>, ApiError> {
|
||||
let recent_record = get_recent_record(state.0, param.room_id, param.offset, param.limit).await?;
|
||||
let recent_record =
|
||||
get_recent_record(state.0, param.room_id, param.offset, param.limit).await?;
|
||||
Ok(Json(ApiResponse::success(recent_record)))
|
||||
}
|
||||
|
||||
@@ -1316,6 +1336,10 @@ pub async fn start_api_server(state: State) {
|
||||
.route(
|
||||
"/api/update_whisper_language",
|
||||
post(handler_update_whisper_language),
|
||||
)
|
||||
.route(
|
||||
"/api/update_user_agent",
|
||||
post(handler_update_user_agent),
|
||||
);
|
||||
} else {
|
||||
log::info!("Running in readonly mode, some api routes are disabled");
|
||||
@@ -1333,7 +1357,10 @@ pub async fn start_api_server(state: State) {
|
||||
.route("/api/get_room_info", post(handler_get_room_info))
|
||||
.route("/api/get_archives", post(handler_get_archives))
|
||||
.route("/api/get_archive", post(handler_get_archive))
|
||||
.route("/api/get_archive_subtitle", post(handler_get_archive_subtitle))
|
||||
.route(
|
||||
"/api/get_archive_subtitle",
|
||||
post(handler_get_archive_subtitle),
|
||||
)
|
||||
.route("/api/get_danmu_record", post(handler_get_danmu_record))
|
||||
.route("/api/get_total_length", post(handler_get_total_length))
|
||||
.route(
|
||||
|
||||
@@ -26,6 +26,7 @@ use chrono::Utc;
|
||||
use config::Config;
|
||||
use database::Database;
|
||||
use recorder::bilibili::client::BiliClient;
|
||||
use recorder::PlatformType;
|
||||
use recorder_manager::RecorderManager;
|
||||
use simplelog::ConfigBuilder;
|
||||
use state::State;
|
||||
@@ -42,7 +43,6 @@ use std::os::windows::fs::MetadataExt;
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
use {
|
||||
recorder::PlatformType,
|
||||
tauri::{Manager, WindowEvent},
|
||||
tauri_plugin_sql::{Migration, MigrationKind},
|
||||
};
|
||||
@@ -117,6 +117,9 @@ async fn setup_logging(log_dir: &Path) -> Result<(), Box<dyn std::error::Error>>
|
||||
),
|
||||
])?;
|
||||
|
||||
// logging current package version
|
||||
log::info!("Current version: {}", env!("CARGO_PKG_VERSION"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -155,6 +158,13 @@ fn get_migrations() -> Vec<Migration> {
|
||||
sql: r#"CREATE TABLE tasks (id TEXT PRIMARY KEY, type TEXT, status TEXT, message TEXT, metadata TEXT, created_at TEXT);"#,
|
||||
kind: MigrationKind::Up,
|
||||
},
|
||||
// add id_str column to support string IDs like Douyin sec_uid while keeping uid for Bilibili compatibility
|
||||
Migration {
|
||||
version: 5,
|
||||
description: "add_id_str_column",
|
||||
sql: r#"ALTER TABLE accounts ADD COLUMN id_str TEXT;"#,
|
||||
kind: MigrationKind::Up,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -202,7 +212,7 @@ async fn setup_server_state(args: Args) -> Result<State, Box<dyn std::error::Err
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
let client = Arc::new(BiliClient::new()?);
|
||||
let client = Arc::new(BiliClient::new(&config.user_agent)?);
|
||||
let config = Arc::new(RwLock::new(config));
|
||||
let db = Arc::new(Database::new());
|
||||
// connect to sqlite database
|
||||
@@ -233,6 +243,63 @@ async fn setup_server_state(args: Args) -> Result<State, Box<dyn std::error::Err
|
||||
let progress_manager = Arc::new(ProgressManager::new());
|
||||
let emitter = EventEmitter::new(progress_manager.get_event_sender());
|
||||
let recorder_manager = Arc::new(RecorderManager::new(emitter, db.clone(), config.clone()));
|
||||
|
||||
// Update account infos for headless mode
|
||||
let accounts = db.get_accounts().await?;
|
||||
for account in accounts {
|
||||
let platform = PlatformType::from_str(&account.platform).unwrap();
|
||||
|
||||
if platform == PlatformType::BiliBili {
|
||||
match client.get_user_info(&account, account.uid).await {
|
||||
Ok(account_info) => {
|
||||
if let Err(e) = db
|
||||
.update_account(
|
||||
&account.platform,
|
||||
account_info.user_id,
|
||||
&account_info.user_name,
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Bilibili account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Bilibili user info failed {}", e);
|
||||
}
|
||||
}
|
||||
} else if platform == PlatformType::Douyin {
|
||||
// Update Douyin account info
|
||||
use crate::recorder::douyin::client::DouyinClient;
|
||||
let douyin_client = DouyinClient::new(&config.read().await.user_agent, &account);
|
||||
match douyin_client.get_user_info().await {
|
||||
Ok(user_info) => {
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
if let Err(e) = db
|
||||
.update_account_with_id_str(
|
||||
&account,
|
||||
&user_info.sec_uid,
|
||||
&user_info.nickname,
|
||||
&avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Douyin account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Douyin user info failed {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let _ = try_rebuild_archives(&db, config.read().await.cache.clone().into()).await;
|
||||
|
||||
Ok(State {
|
||||
@@ -267,7 +334,7 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
|
||||
}
|
||||
};
|
||||
|
||||
let client = Arc::new(BiliClient::new()?);
|
||||
let client = Arc::new(BiliClient::new(&config.user_agent)?);
|
||||
let config = Arc::new(RwLock::new(config));
|
||||
let config_clone = config.clone();
|
||||
let dbs = app.state::<tauri_plugin_sql::DbInstances>().inner();
|
||||
@@ -304,28 +371,55 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
|
||||
|
||||
// update account infos
|
||||
for account in accounts {
|
||||
// only update bilibili account
|
||||
let platform = PlatformType::from_str(&account.platform).unwrap();
|
||||
if platform != PlatformType::BiliBili {
|
||||
continue;
|
||||
}
|
||||
|
||||
match client_clone.get_user_info(&account, account.uid).await {
|
||||
Ok(account_info) => {
|
||||
if let Err(e) = db_clone
|
||||
.update_account(
|
||||
&account.platform,
|
||||
account_info.user_id,
|
||||
&account_info.user_name,
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating account info {}", e);
|
||||
if platform == PlatformType::BiliBili {
|
||||
match client_clone.get_user_info(&account, account.uid).await {
|
||||
Ok(account_info) => {
|
||||
if let Err(e) = db_clone
|
||||
.update_account(
|
||||
&account.platform,
|
||||
account_info.user_id,
|
||||
&account_info.user_name,
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Bilibili account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Bilibili user info failed {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get user info failed {}", e);
|
||||
} else if platform == PlatformType::Douyin {
|
||||
// Update Douyin account info
|
||||
use crate::recorder::douyin::client::DouyinClient;
|
||||
let douyin_client = DouyinClient::new(&config_clone.read().await.user_agent, &account);
|
||||
match douyin_client.get_user_info().await {
|
||||
Ok(user_info) => {
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
if let Err(e) = db_clone
|
||||
.update_account_with_id_str(
|
||||
&account,
|
||||
&user_info.sec_uid,
|
||||
&user_info.nickname,
|
||||
&avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Douyin account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Douyin user info failed {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -410,6 +504,7 @@ fn setup_invoke_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<
|
||||
crate::handlers::config::update_auto_generate,
|
||||
crate::handlers::config::update_status_check_interval,
|
||||
crate::handlers::config::update_whisper_language,
|
||||
crate::handlers::config::update_user_agent,
|
||||
crate::handlers::message::get_messages,
|
||||
crate::handlers::message::read_message,
|
||||
crate::handlers::message::delete_message,
|
||||
@@ -462,7 +557,7 @@ fn setup_invoke_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let _ = fix_path_env::fix();
|
||||
|
||||
let builder = tauri::Builder::default();
|
||||
let builder = tauri::Builder::default().plugin(tauri_plugin_deep_link::init());
|
||||
let builder = setup_plugins(builder);
|
||||
let builder = setup_event_handlers(builder);
|
||||
let builder = setup_invoke_handlers(builder);
|
||||
|
||||
@@ -82,7 +82,10 @@ pub trait Recorder: Send + Sync + 'static {
|
||||
async fn comments(&self, live_id: &str) -> Result<Vec<DanmuEntry>, errors::RecorderError>;
|
||||
async fn is_recording(&self, live_id: &str) -> bool;
|
||||
async fn get_archive_subtitle(&self, live_id: &str) -> Result<String, errors::RecorderError>;
|
||||
async fn generate_archive_subtitle(&self, live_id: &str) -> Result<String, errors::RecorderError>;
|
||||
async fn generate_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, errors::RecorderError>;
|
||||
async fn enable(&self);
|
||||
async fn disable(&self);
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use super::entry::{EntryStore, Range};
|
||||
use super::errors::RecorderError;
|
||||
use super::PlatformType;
|
||||
use crate::database::account::AccountRow;
|
||||
use crate::ffmpeg::get_video_resolution;
|
||||
use crate::progress_manager::Event;
|
||||
use crate::progress_reporter::EventEmitter;
|
||||
use crate::recorder_manager::RecorderEvent;
|
||||
@@ -13,7 +14,6 @@ use crate::subtitle_generator::item_to_srt;
|
||||
|
||||
use super::danmu::{DanmuEntry, DanmuStorage};
|
||||
use super::entry::TsEntry;
|
||||
use std::path::Path;
|
||||
use chrono::Utc;
|
||||
use client::{BiliClient, BiliStream, RoomInfo, StreamType, UserInfo};
|
||||
use danmu_stream::danmu_stream::DanmuStream;
|
||||
@@ -22,11 +22,12 @@ use danmu_stream::DanmuMessageType;
|
||||
use errors::BiliClientError;
|
||||
use m3u8_rs::{Playlist, QuotedOrUnquoted, VariantStream};
|
||||
use regex::Regex;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
use tokio::task::JoinHandle;
|
||||
use url::Url;
|
||||
@@ -68,9 +69,12 @@ pub struct BiliRecorder {
|
||||
danmu_storage: Arc<RwLock<Option<DanmuStorage>>>,
|
||||
live_end_channel: broadcast::Sender<RecorderEvent>,
|
||||
enabled: Arc<RwLock<bool>>,
|
||||
last_segment_offset: Arc<RwLock<Option<i64>>>, // 保存上次处理的最后一个片段的偏移
|
||||
current_header_info: Arc<RwLock<Option<HeaderInfo>>>, // 保存当前的分辨率
|
||||
|
||||
danmu_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
record_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
master_manifest: Arc<RwLock<Option<String>>>,
|
||||
}
|
||||
|
||||
impl From<DatabaseError> for super::errors::RecorderError {
|
||||
@@ -97,9 +101,15 @@ pub struct BiliRecorderOptions {
|
||||
pub channel: broadcast::Sender<RecorderEvent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct HeaderInfo {
|
||||
url: String,
|
||||
resolution: String,
|
||||
}
|
||||
|
||||
impl BiliRecorder {
|
||||
pub async fn new(options: BiliRecorderOptions) -> Result<Self, super::errors::RecorderError> {
|
||||
let client = BiliClient::new()?;
|
||||
let client = BiliClient::new(&options.config.read().await.user_agent)?;
|
||||
let room_info = client
|
||||
.get_room_info(&options.account, options.room_id)
|
||||
.await?;
|
||||
@@ -140,9 +150,11 @@ impl BiliRecorder {
|
||||
danmu_storage: Arc::new(RwLock::new(None)),
|
||||
live_end_channel: options.channel,
|
||||
enabled: Arc::new(RwLock::new(options.auto_start)),
|
||||
|
||||
last_segment_offset: Arc::new(RwLock::new(None)),
|
||||
current_header_info: Arc::new(RwLock::new(None)),
|
||||
danmu_task: Arc::new(Mutex::new(None)),
|
||||
record_task: Arc::new(Mutex::new(None)),
|
||||
master_manifest: Arc::new(RwLock::new(None)),
|
||||
};
|
||||
log::info!("Recorder for room {} created.", options.room_id);
|
||||
Ok(recorder)
|
||||
@@ -154,6 +166,8 @@ impl BiliRecorder {
|
||||
*self.live_stream.write().await = None;
|
||||
*self.last_update.write().await = Utc::now().timestamp();
|
||||
*self.danmu_storage.write().await = None;
|
||||
*self.last_segment_offset.write().await = None;
|
||||
*self.current_header_info.write().await = None;
|
||||
}
|
||||
|
||||
async fn should_record(&self) -> bool {
|
||||
@@ -259,11 +273,13 @@ impl BiliRecorder {
|
||||
return true;
|
||||
}
|
||||
|
||||
let master_manifest =
|
||||
m3u8_rs::parse_playlist_res(master_manifest.as_ref().unwrap().as_bytes())
|
||||
.map_err(|_| super::errors::RecorderError::M3u8ParseFailed {
|
||||
content: master_manifest.as_ref().unwrap().clone(),
|
||||
});
|
||||
let master_manifest = master_manifest.unwrap();
|
||||
*self.master_manifest.write().await = Some(master_manifest.clone());
|
||||
|
||||
let master_manifest = m3u8_rs::parse_playlist_res(master_manifest.as_bytes())
|
||||
.map_err(|_| super::errors::RecorderError::M3u8ParseFailed {
|
||||
content: master_manifest.clone(),
|
||||
});
|
||||
if master_manifest.is_err() {
|
||||
log::error!(
|
||||
"[{}]Parse master manifest failed: {}",
|
||||
@@ -317,26 +333,12 @@ impl BiliRecorder {
|
||||
let stream = new_stream.unwrap();
|
||||
|
||||
let should_update_stream = self.live_stream.read().await.is_none()
|
||||
|| !self
|
||||
.live_stream
|
||||
.read()
|
||||
.await
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.is_same(&stream)
|
||||
|| self.force_update.load(Ordering::Relaxed);
|
||||
|
||||
if should_update_stream {
|
||||
log::info!(
|
||||
"[{}]Update to a new stream: {:?} => {}",
|
||||
self.room_id,
|
||||
self.live_stream.read().await.clone(),
|
||||
stream
|
||||
);
|
||||
|
||||
self.force_update.store(false, Ordering::Relaxed);
|
||||
|
||||
let new_stream = self.fetch_real_stream(stream).await;
|
||||
let new_stream = self.fetch_real_stream(&stream).await;
|
||||
if new_stream.is_err() {
|
||||
log::error!(
|
||||
"[{}]Fetch real stream failed: {}",
|
||||
@@ -349,6 +351,13 @@ impl BiliRecorder {
|
||||
let new_stream = new_stream.unwrap();
|
||||
*self.live_stream.write().await = Some(new_stream);
|
||||
*self.last_update.write().await = Utc::now().timestamp();
|
||||
|
||||
log::info!(
|
||||
"[{}]Update to a new stream: {:?} => {}",
|
||||
self.room_id,
|
||||
self.live_stream.read().await.clone(),
|
||||
stream
|
||||
);
|
||||
}
|
||||
|
||||
true
|
||||
@@ -455,6 +464,10 @@ impl BiliRecorder {
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed fetching index content from {}", stream.index());
|
||||
log::error!(
|
||||
"Master manifest: {}",
|
||||
self.master_manifest.read().await.as_ref().unwrap()
|
||||
);
|
||||
Err(super::errors::RecorderError::BiliClientError { err: e })
|
||||
}
|
||||
}
|
||||
@@ -466,6 +479,7 @@ impl BiliRecorder {
|
||||
return Err(super::errors::RecorderError::NoStreamAvailable);
|
||||
}
|
||||
let stream = stream.unwrap();
|
||||
|
||||
let index_content = self
|
||||
.client
|
||||
.read()
|
||||
@@ -480,6 +494,7 @@ impl BiliRecorder {
|
||||
url: stream.index(),
|
||||
});
|
||||
}
|
||||
|
||||
let mut header_url = String::from("");
|
||||
let re = Regex::new(r"h.*\.m4s").unwrap();
|
||||
if let Some(captures) = re.captures(&index_content) {
|
||||
@@ -488,12 +503,24 @@ impl BiliRecorder {
|
||||
if header_url.is_empty() {
|
||||
log::warn!("Parse header url failed: {}", index_content);
|
||||
}
|
||||
|
||||
Ok(header_url)
|
||||
}
|
||||
|
||||
async fn get_resolution(
|
||||
&self,
|
||||
header_url: &str,
|
||||
) -> Result<String, super::errors::RecorderError> {
|
||||
log::debug!("Get resolution from {}", header_url);
|
||||
let resolution = get_video_resolution(header_url)
|
||||
.await
|
||||
.map_err(|e| super::errors::RecorderError::FfmpegError { err: e })?;
|
||||
Ok(resolution)
|
||||
}
|
||||
|
||||
async fn fetch_real_stream(
|
||||
&self,
|
||||
stream: BiliStream,
|
||||
stream: &BiliStream,
|
||||
) -> Result<BiliStream, super::errors::RecorderError> {
|
||||
let index_content = self
|
||||
.client
|
||||
@@ -502,16 +529,9 @@ impl BiliRecorder {
|
||||
.get_index_content(&self.account, &stream.index())
|
||||
.await?;
|
||||
if index_content.is_empty() {
|
||||
return Err(super::errors::RecorderError::InvalidStream { stream });
|
||||
}
|
||||
let index_content = self
|
||||
.client
|
||||
.read()
|
||||
.await
|
||||
.get_index_content(&self.account, &stream.index())
|
||||
.await?;
|
||||
if index_content.is_empty() {
|
||||
return Err(super::errors::RecorderError::InvalidStream { stream });
|
||||
return Err(super::errors::RecorderError::InvalidStream {
|
||||
stream: stream.clone(),
|
||||
});
|
||||
}
|
||||
if index_content.contains("Not Found") {
|
||||
return Err(super::errors::RecorderError::IndexNotFound {
|
||||
@@ -522,14 +542,23 @@ impl BiliRecorder {
|
||||
// this index content provides another m3u8 url
|
||||
// example: https://765b047cec3b099771d4b1851136046f.v.smtcdns.net/d1--cn-gotcha204-3.bilivideo.com/live-bvc/246284/live_1323355750_55526594/index.m3u8?expires=1741318366&len=0&oi=1961017843&pt=h5&qn=10000&trid=1007049a5300422eeffd2d6995d67b67ca5a&sigparams=cdn,expires,len,oi,pt,qn,trid&cdn=cn-gotcha204&sign=7ef1241439467ef27d3c804c1eda8d4d&site=1c89ef99adec13fab3a3592ee4db26d3&free_type=0&mid=475210&sche=ban&bvchls=1&trace=16&isp=ct&rg=East&pv=Shanghai&source=puv3_onetier&p2p_type=-1&score=1&suffix=origin&deploy_env=prod&flvsk=e5c4d6fb512ed7832b706f0a92f7a8c8&sk=246b3930727a89629f17520b1b551a2f&pp=rtmp&hot_cdn=57345&origin_bitrate=657300&sl=1&info_source=cache&vd=bc&src=puv3&order=1&TxLiveCode=cold_stream&TxDispType=3&svr_type=live_oc&tencent_test_client_ip=116.226.193.243&dispatch_from=OC_MGR61.170.74.11&utime=1741314857497
|
||||
let new_url = index_content.lines().last().unwrap();
|
||||
let base_url = new_url.split('/').next().unwrap();
|
||||
let host = base_url.split('/').next().unwrap();
|
||||
// extra is params after index.m3u8
|
||||
let extra = new_url.split(base_url).last().unwrap();
|
||||
let new_stream = BiliStream::new(StreamType::FMP4, base_url, host, extra);
|
||||
return Box::pin(self.fetch_real_stream(new_stream)).await;
|
||||
|
||||
// extract host: cn-gotcha204-3.bilivideo.com
|
||||
let host = new_url.split('/').nth(2).unwrap_or_default();
|
||||
let extra = new_url.split('?').nth(1).unwrap_or_default();
|
||||
// extract base url: live-bvc/246284/live_1323355750_55526594/
|
||||
let base_url = new_url
|
||||
.split('/')
|
||||
.skip(3)
|
||||
.take_while(|&part| !part.contains('?') && part != "index.m3u8")
|
||||
.collect::<Vec<&str>>()
|
||||
.join("/")
|
||||
+ "/";
|
||||
|
||||
let new_stream = BiliStream::new(StreamType::FMP4, base_url.as_str(), host, extra);
|
||||
return Box::pin(self.fetch_real_stream(&new_stream)).await;
|
||||
}
|
||||
Ok(stream)
|
||||
Ok(stream.clone())
|
||||
}
|
||||
|
||||
async fn get_work_dir(&self, live_id: &str) -> String {
|
||||
@@ -549,8 +578,24 @@ impl BiliRecorder {
|
||||
}
|
||||
let current_stream = current_stream.unwrap();
|
||||
let parsed = self.get_playlist().await;
|
||||
if parsed.is_err() {
|
||||
self.force_update.store(true, Ordering::Relaxed);
|
||||
return Err(parsed.err().unwrap());
|
||||
}
|
||||
|
||||
let playlist = parsed.unwrap();
|
||||
|
||||
let mut timestamp: i64 = self.live_id.read().await.parse::<i64>().unwrap_or(0);
|
||||
let mut work_dir = self.get_work_dir(timestamp.to_string().as_str()).await;
|
||||
let mut work_dir;
|
||||
let mut is_first_record = false;
|
||||
|
||||
// Get url from EXT-X-MAP
|
||||
let header_url = self.get_header_url().await?;
|
||||
if header_url.is_empty() {
|
||||
return Err(super::errors::RecorderError::EmptyHeader);
|
||||
}
|
||||
let full_header_url = current_stream.ts_url(&header_url);
|
||||
|
||||
// Check header if None
|
||||
if (self.entry_store.read().await.as_ref().is_none()
|
||||
|| self
|
||||
@@ -563,34 +608,11 @@ impl BiliRecorder {
|
||||
.is_none())
|
||||
&& current_stream.format == StreamType::FMP4
|
||||
{
|
||||
// Get url from EXT-X-MAP
|
||||
let header_url = self.get_header_url().await?;
|
||||
if header_url.is_empty() {
|
||||
return Err(super::errors::RecorderError::EmptyHeader);
|
||||
}
|
||||
timestamp = Utc::now().timestamp_millis();
|
||||
*self.live_id.write().await = timestamp.to_string();
|
||||
|
||||
self.db
|
||||
.add_record(
|
||||
PlatformType::BiliBili,
|
||||
timestamp.to_string().as_str(),
|
||||
self.room_id,
|
||||
&self.room_info.read().await.room_title,
|
||||
self.cover.read().await.clone(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
// now work dir is confirmed
|
||||
work_dir = self.get_work_dir(timestamp.to_string().as_str()).await;
|
||||
is_first_record = true;
|
||||
|
||||
let entry_store = EntryStore::new(&work_dir).await;
|
||||
*self.entry_store.write().await = Some(entry_store);
|
||||
|
||||
// danmau file
|
||||
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
|
||||
*self.danmu_storage.write().await = DanmuStorage::new(&danmu_file_path).await;
|
||||
let full_header_url = current_stream.ts_url(&header_url);
|
||||
let file_name = header_url.split('/').next_back().unwrap();
|
||||
let mut header = TsEntry {
|
||||
url: file_name.to_string(),
|
||||
@@ -600,6 +622,12 @@ impl BiliRecorder {
|
||||
ts: timestamp,
|
||||
is_header: true,
|
||||
};
|
||||
|
||||
// Create work directory before download
|
||||
tokio::fs::create_dir_all(&work_dir)
|
||||
.await
|
||||
.map_err(|e| super::errors::RecorderError::IoError { err: e })?;
|
||||
|
||||
// Download header
|
||||
match self
|
||||
.client
|
||||
@@ -611,11 +639,39 @@ impl BiliRecorder {
|
||||
Ok(size) => {
|
||||
if size == 0 {
|
||||
log::error!("Download header failed: {}", full_header_url);
|
||||
// Clean up empty directory since header download failed
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
return Err(super::errors::RecorderError::InvalidStream {
|
||||
stream: current_stream,
|
||||
});
|
||||
}
|
||||
header.size = size;
|
||||
|
||||
// Now that download succeeded, create the record and setup stores
|
||||
self.db
|
||||
.add_record(
|
||||
PlatformType::BiliBili,
|
||||
timestamp.to_string().as_str(),
|
||||
self.room_id,
|
||||
&self.room_info.read().await.room_title,
|
||||
self.cover.read().await.clone(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let entry_store = EntryStore::new(&work_dir).await;
|
||||
*self.entry_store.write().await = Some(entry_store);
|
||||
|
||||
// danmu file
|
||||
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
|
||||
*self.danmu_storage.write().await = DanmuStorage::new(&danmu_file_path).await;
|
||||
|
||||
self.entry_store
|
||||
.write()
|
||||
.await
|
||||
@@ -623,23 +679,133 @@ impl BiliRecorder {
|
||||
.unwrap()
|
||||
.add_entry(header)
|
||||
.await;
|
||||
|
||||
let new_resolution = self.get_resolution(&full_header_url).await?;
|
||||
|
||||
log::info!(
|
||||
"[{}] Initial header resolution: {} {}",
|
||||
self.room_id,
|
||||
header_url,
|
||||
new_resolution
|
||||
);
|
||||
|
||||
*self.current_header_info.write().await = Some(HeaderInfo {
|
||||
url: header_url.clone(),
|
||||
resolution: new_resolution,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Download header failed: {}", e);
|
||||
// Clean up empty directory since header download failed
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
work_dir = self.get_work_dir(timestamp.to_string().as_str()).await;
|
||||
// For non-FMP4 streams, check if we need to initialize
|
||||
if self.entry_store.read().await.as_ref().is_none() {
|
||||
timestamp = Utc::now().timestamp_millis();
|
||||
*self.live_id.write().await = timestamp.to_string();
|
||||
work_dir = self.get_work_dir(timestamp.to_string().as_str()).await;
|
||||
is_first_record = true;
|
||||
}
|
||||
}
|
||||
|
||||
// check resolution change
|
||||
let current_header_info = self.current_header_info.read().await.clone();
|
||||
if current_header_info.is_some() {
|
||||
let current_header_info = current_header_info.unwrap();
|
||||
if current_header_info.url != header_url {
|
||||
let new_resolution = self.get_resolution(&full_header_url).await?;
|
||||
log::debug!(
|
||||
"[{}] Header url changed: {} => {}, resolution: {} => {}",
|
||||
self.room_id,
|
||||
current_header_info.url,
|
||||
header_url,
|
||||
current_header_info.resolution,
|
||||
new_resolution
|
||||
);
|
||||
if current_header_info.resolution != new_resolution {
|
||||
self.reset().await;
|
||||
|
||||
return Err(super::errors::RecorderError::ResolutionChanged {
|
||||
err: format!(
|
||||
"Resolution changed: {} => {}",
|
||||
current_header_info.resolution, new_resolution
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
match parsed {
|
||||
Ok(Playlist::MasterPlaylist(pl)) => log::debug!("Master playlist:\n{:?}", pl),
|
||||
Ok(Playlist::MediaPlaylist(pl)) => {
|
||||
|
||||
match playlist {
|
||||
Playlist::MasterPlaylist(pl) => log::debug!("Master playlist:\n{:?}", pl),
|
||||
Playlist::MediaPlaylist(pl) => {
|
||||
let mut new_segment_fetched = false;
|
||||
let last_sequence = self
|
||||
.entry_store
|
||||
.read()
|
||||
.await
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.last_sequence;
|
||||
.map(|store| store.last_sequence)
|
||||
.unwrap_or(0); // For first-time recording, start from 0
|
||||
|
||||
// Parse BILI-AUX offsets to calculate precise durations for FMP4
|
||||
let mut segment_offsets = Vec::new();
|
||||
for ts in pl.segments.iter() {
|
||||
let mut seg_offset: i64 = 0;
|
||||
for tag in &ts.unknown_tags {
|
||||
if tag.tag == "BILI-AUX" {
|
||||
if let Some(rest) = &tag.rest {
|
||||
let parts: Vec<&str> = rest.split('|').collect();
|
||||
if !parts.is_empty() {
|
||||
let offset_hex = parts.first().unwrap();
|
||||
if let Ok(offset) = i64::from_str_radix(offset_hex, 16) {
|
||||
seg_offset = offset;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
segment_offsets.push(seg_offset);
|
||||
}
|
||||
|
||||
// Extract stream start timestamp from header if available for FMP4
|
||||
let stream_start_timestamp = if current_stream.format == StreamType::FMP4 {
|
||||
if let Some(header_entry) = self
|
||||
.entry_store
|
||||
.read()
|
||||
.await
|
||||
.as_ref()
|
||||
.and_then(|store| store.get_header())
|
||||
{
|
||||
// Parse timestamp from header filename like "h1753276580.m4s"
|
||||
if let Some(timestamp_str) = header_entry
|
||||
.url
|
||||
.strip_prefix("h")
|
||||
.and_then(|s| s.strip_suffix(".m4s"))
|
||||
{
|
||||
timestamp_str.parse::<i64>().unwrap_or(0)
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
0
|
||||
}
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// Get the last segment offset from previous processing
|
||||
let mut last_offset = *self.last_segment_offset.read().await;
|
||||
|
||||
for (i, ts) in pl.segments.iter().enumerate() {
|
||||
let sequence = pl.media_sequence + i as u64;
|
||||
@@ -653,15 +819,83 @@ impl BiliRecorder {
|
||||
continue;
|
||||
}
|
||||
|
||||
let ts_mili = Utc::now().timestamp_millis();
|
||||
// Calculate precise timestamp from stream start + BILI-AUX offset for FMP4
|
||||
let ts_mili = if current_stream.format == StreamType::FMP4
|
||||
&& stream_start_timestamp > 0
|
||||
&& i < segment_offsets.len()
|
||||
{
|
||||
let seg_offset = segment_offsets[i];
|
||||
|
||||
stream_start_timestamp * 1000 + seg_offset
|
||||
} else {
|
||||
// Fallback to current time if parsing fails or not FMP4
|
||||
Utc::now().timestamp_millis()
|
||||
};
|
||||
|
||||
// encode segment offset into filename
|
||||
let file_name = ts.uri.split('/').next_back().unwrap_or(&ts.uri);
|
||||
let ts_length = pl.target_duration as f64;
|
||||
|
||||
// Calculate precise duration from BILI-AUX offsets for FMP4
|
||||
let precise_length_from_aux =
|
||||
if current_stream.format == StreamType::FMP4 && i < segment_offsets.len() {
|
||||
let current_offset = segment_offsets[i];
|
||||
|
||||
// Get the previous offset for duration calculation
|
||||
let prev_offset = if i > 0 {
|
||||
// Use previous segment in current M3U8
|
||||
Some(segment_offsets[i - 1])
|
||||
} else {
|
||||
// Use saved last offset from previous M3U8 processing
|
||||
last_offset
|
||||
};
|
||||
|
||||
if let Some(prev) = prev_offset {
|
||||
let duration_ms = current_offset - prev;
|
||||
if duration_ms > 0 {
|
||||
Some(duration_ms as f64 / 1000.0) // Convert ms to seconds
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
// No previous offset available, use target duration
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let client = self.client.clone();
|
||||
let mut retry = 0;
|
||||
let mut work_dir_created_for_non_fmp4 = false;
|
||||
|
||||
// For non-FMP4 streams, create record on first successful ts download
|
||||
if is_first_record && current_stream.format != StreamType::FMP4 {
|
||||
// Create work directory before first ts download
|
||||
tokio::fs::create_dir_all(&work_dir)
|
||||
.await
|
||||
.map_err(|e| super::errors::RecorderError::IoError { err: e })?;
|
||||
work_dir_created_for_non_fmp4 = true;
|
||||
}
|
||||
|
||||
loop {
|
||||
if retry > 3 {
|
||||
log::error!("Download ts failed after retry");
|
||||
|
||||
// Clean up empty directory if first ts download failed for non-FMP4
|
||||
if is_first_record
|
||||
&& current_stream.format != StreamType::FMP4
|
||||
&& work_dir_created_for_non_fmp4
|
||||
{
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await
|
||||
{
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
match client
|
||||
@@ -673,11 +907,84 @@ impl BiliRecorder {
|
||||
Ok(size) => {
|
||||
if size == 0 {
|
||||
log::error!("Segment with size 0, stream might be corrupted");
|
||||
|
||||
// Clean up empty directory if first ts download failed for non-FMP4
|
||||
if is_first_record
|
||||
&& current_stream.format != StreamType::FMP4
|
||||
&& work_dir_created_for_non_fmp4
|
||||
{
|
||||
if let Err(cleanup_err) =
|
||||
tokio::fs::remove_dir_all(&work_dir).await
|
||||
{
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return Err(super::errors::RecorderError::InvalidStream {
|
||||
stream: current_stream,
|
||||
});
|
||||
}
|
||||
|
||||
// Create record and setup stores on first successful download for non-FMP4
|
||||
if is_first_record && current_stream.format != StreamType::FMP4 {
|
||||
self.db
|
||||
.add_record(
|
||||
PlatformType::BiliBili,
|
||||
timestamp.to_string().as_str(),
|
||||
self.room_id,
|
||||
&self.room_info.read().await.room_title,
|
||||
self.cover.read().await.clone(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let entry_store = EntryStore::new(&work_dir).await;
|
||||
*self.entry_store.write().await = Some(entry_store);
|
||||
|
||||
// danmu file
|
||||
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
|
||||
*self.danmu_storage.write().await =
|
||||
DanmuStorage::new(&danmu_file_path).await;
|
||||
|
||||
is_first_record = false;
|
||||
}
|
||||
|
||||
// Get precise duration - prioritize BILI-AUX for FMP4, fallback to ffprobe if needed
|
||||
let precise_length = if let Some(aux_duration) =
|
||||
precise_length_from_aux
|
||||
{
|
||||
aux_duration
|
||||
} else if current_stream.format != StreamType::FMP4 {
|
||||
// For regular TS segments, use direct ffprobe
|
||||
let file_path = format!("{}/{}", work_dir, file_name);
|
||||
match crate::ffmpeg::get_segment_duration(std::path::Path::new(
|
||||
&file_path,
|
||||
))
|
||||
.await
|
||||
{
|
||||
Ok(duration) => {
|
||||
log::debug!(
|
||||
"Precise TS segment duration: {}s (original: {}s)",
|
||||
duration,
|
||||
ts_length
|
||||
);
|
||||
duration
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to get precise TS duration for {}: {}, using fallback", file_name, e);
|
||||
ts_length
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// FMP4 segment without BILI-AUX info, use fallback
|
||||
log::debug!("No BILI-AUX data available for FMP4 segment {}, using target duration", file_name);
|
||||
ts_length
|
||||
};
|
||||
|
||||
self.entry_store
|
||||
.write()
|
||||
.await
|
||||
@@ -686,18 +993,43 @@ impl BiliRecorder {
|
||||
.add_entry(TsEntry {
|
||||
url: file_name.into(),
|
||||
sequence,
|
||||
length: ts_length,
|
||||
length: precise_length,
|
||||
size,
|
||||
ts: ts_mili,
|
||||
is_header: false,
|
||||
})
|
||||
.await;
|
||||
|
||||
// Update last offset for next segment calculation
|
||||
if current_stream.format == StreamType::FMP4
|
||||
&& i < segment_offsets.len()
|
||||
{
|
||||
last_offset = Some(segment_offsets[i]);
|
||||
}
|
||||
|
||||
new_segment_fetched = true;
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
retry += 1;
|
||||
log::warn!("Download ts failed, retry {}: {}", retry, e);
|
||||
|
||||
// If this is the last retry and it's the first record for non-FMP4, clean up
|
||||
if retry > 3
|
||||
&& is_first_record
|
||||
&& current_stream.format != StreamType::FMP4
|
||||
&& work_dir_created_for_non_fmp4
|
||||
{
|
||||
if let Err(cleanup_err) =
|
||||
tokio::fs::remove_dir_all(&work_dir).await
|
||||
{
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -705,6 +1037,12 @@ impl BiliRecorder {
|
||||
|
||||
if new_segment_fetched {
|
||||
*self.last_update.write().await = Utc::now().timestamp();
|
||||
|
||||
// Save the last offset for next M3U8 processing
|
||||
if current_stream.format == StreamType::FMP4 {
|
||||
*self.last_segment_offset.write().await = last_offset;
|
||||
}
|
||||
|
||||
self.db
|
||||
.update_record(
|
||||
timestamp.to_string().as_str(),
|
||||
@@ -727,19 +1065,17 @@ impl BiliRecorder {
|
||||
}
|
||||
}
|
||||
// check the current stream is too slow or not
|
||||
if let Some(last_ts) = self.entry_store.read().await.as_ref().unwrap().last_ts() {
|
||||
if last_ts < Utc::now().timestamp() - 10 {
|
||||
log::error!("Stream is too slow, last entry ts is at {}", last_ts);
|
||||
return Err(super::errors::RecorderError::SlowStream {
|
||||
stream: current_stream,
|
||||
});
|
||||
if let Some(entry_store) = self.entry_store.read().await.as_ref() {
|
||||
if let Some(last_ts) = entry_store.last_ts() {
|
||||
if last_ts < Utc::now().timestamp() - 10 {
|
||||
log::error!("Stream is too slow, last entry ts is at {}", last_ts);
|
||||
return Err(super::errors::RecorderError::SlowStream {
|
||||
stream: current_stream,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
self.force_update.store(true, Ordering::Relaxed);
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
|
||||
// check stream is nearly expired
|
||||
@@ -788,11 +1124,12 @@ impl BiliRecorder {
|
||||
None
|
||||
};
|
||||
|
||||
self.entry_store.read().await.as_ref().unwrap().manifest(
|
||||
!live_status || range.is_some(),
|
||||
true,
|
||||
range,
|
||||
)
|
||||
if let Some(entry_store) = self.entry_store.read().await.as_ref() {
|
||||
entry_store.manifest(!live_status || range.is_some(), true, range)
|
||||
} else {
|
||||
// Return empty manifest if entry_store is not initialized yet
|
||||
"#EXTM3U\n#EXT-X-VERSION:3\n".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -938,7 +1275,11 @@ impl super::Recorder for BiliRecorder {
|
||||
Ok(if live_id == *self.live_id.read().await {
|
||||
// just return current cache content
|
||||
match self.danmu_storage.read().await.as_ref() {
|
||||
Some(storage) => storage.get_entries(self.first_segment_ts(live_id).await).await,
|
||||
Some(storage) => {
|
||||
storage
|
||||
.get_entries(self.first_segment_ts(live_id).await)
|
||||
.await
|
||||
}
|
||||
None => Vec::new(),
|
||||
}
|
||||
} else {
|
||||
@@ -956,7 +1297,9 @@ impl super::Recorder for BiliRecorder {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let storage = storage.unwrap();
|
||||
storage.get_entries(self.first_segment_ts(live_id).await).await
|
||||
storage
|
||||
.get_entries(self.first_segment_ts(live_id).await)
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
@@ -964,7 +1307,10 @@ impl super::Recorder for BiliRecorder {
|
||||
*self.live_id.read().await == live_id && *self.live_status.read().await
|
||||
}
|
||||
|
||||
async fn get_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
|
||||
async fn get_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, super::errors::RecorderError> {
|
||||
// read subtitle file under work_dir
|
||||
let work_dir = self.get_work_dir(live_id).await;
|
||||
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
|
||||
@@ -981,7 +1327,10 @@ impl super::Recorder for BiliRecorder {
|
||||
Ok(subtitle_content)
|
||||
}
|
||||
|
||||
async fn generate_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
|
||||
async fn generate_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, super::errors::RecorderError> {
|
||||
// generate subtitle file under work_dir
|
||||
let work_dir = self.get_work_dir(live_id).await;
|
||||
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
|
||||
@@ -994,7 +1343,13 @@ impl super::Recorder for BiliRecorder {
|
||||
log::info!("M3U8 index file generated: {}", m3u8_index_file_path);
|
||||
// generate a tmp clip file
|
||||
let clip_file_path = format!("{}/{}", work_dir, "tmp.mp4");
|
||||
if let Err(e) = crate::ffmpeg::clip_from_m3u8(None::<&crate::progress_reporter::ProgressReporter>, Path::new(&m3u8_index_file_path), Path::new(&clip_file_path)).await {
|
||||
if let Err(e) = crate::ffmpeg::clip_from_m3u8(
|
||||
None::<&crate::progress_reporter::ProgressReporter>,
|
||||
Path::new(&m3u8_index_file_path),
|
||||
Path::new(&clip_file_path),
|
||||
)
|
||||
.await
|
||||
{
|
||||
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
|
||||
error: e.to_string(),
|
||||
});
|
||||
@@ -1002,7 +1357,17 @@ impl super::Recorder for BiliRecorder {
|
||||
log::info!("Temp clip file generated: {}", clip_file_path);
|
||||
// generate subtitle file
|
||||
let config = self.config.read().await;
|
||||
let result = crate::ffmpeg::generate_video_subtitle(None, Path::new(&clip_file_path), "whisper", &config.whisper_model, &config.whisper_prompt, &config.openai_api_key, &config.openai_api_endpoint, &config.whisper_language).await;
|
||||
let result = crate::ffmpeg::generate_video_subtitle(
|
||||
None,
|
||||
Path::new(&clip_file_path),
|
||||
"whisper",
|
||||
&config.whisper_model,
|
||||
&config.whisper_prompt,
|
||||
&config.openai_api_key,
|
||||
&config.openai_api_endpoint,
|
||||
&config.whisper_language,
|
||||
)
|
||||
.await;
|
||||
// write subtitle file
|
||||
if let Err(e) = result {
|
||||
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
|
||||
@@ -1011,7 +1376,12 @@ impl super::Recorder for BiliRecorder {
|
||||
}
|
||||
log::info!("Subtitle generated");
|
||||
let result = result.unwrap();
|
||||
let subtitle_content = result.subtitle_content.iter().map(item_to_srt).collect::<Vec<String>>().join("");
|
||||
let subtitle_content = result
|
||||
.subtitle_content
|
||||
.iter()
|
||||
.map(item_to_srt)
|
||||
.collect::<Vec<String>>()
|
||||
.join("");
|
||||
subtitle_file.write_all(subtitle_content.as_bytes()).await?;
|
||||
log::info!("Subtitle file written");
|
||||
// remove tmp file
|
||||
|
||||
@@ -138,25 +138,12 @@ impl BiliStream {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_same(&self, other: &BiliStream) -> bool {
|
||||
// Extract live_id part from path (e.g., live_1848752274_71463808)
|
||||
let get_live_id = |path: &str| {
|
||||
path.split('/')
|
||||
.find(|part| part.starts_with("live_"))
|
||||
.unwrap_or("")
|
||||
.to_string()
|
||||
};
|
||||
let self_live_id = get_live_id(&self.path);
|
||||
let other_live_id = get_live_id(&other.path);
|
||||
self_live_id == other_live_id
|
||||
}
|
||||
}
|
||||
|
||||
impl BiliClient {
|
||||
pub fn new() -> Result<BiliClient, BiliClientError> {
|
||||
pub fn new(user_agent: &str) -> Result<BiliClient, BiliClientError> {
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36".parse().unwrap());
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
|
||||
if let Ok(client) = Client::builder().timeout(Duration::from_secs(10)).build() {
|
||||
Ok(BiliClient { client, headers })
|
||||
|
||||
@@ -67,12 +67,16 @@ impl DanmuStorage {
|
||||
|
||||
// get entries with ts relative to live start time
|
||||
pub async fn get_entries(&self, live_start_ts: i64) -> Vec<DanmuEntry> {
|
||||
let mut danmus: Vec<DanmuEntry> = self.cache.read().await.iter().map(|entry| {
|
||||
DanmuEntry {
|
||||
let mut danmus: Vec<DanmuEntry> = self
|
||||
.cache
|
||||
.read()
|
||||
.await
|
||||
.iter()
|
||||
.map(|entry| DanmuEntry {
|
||||
ts: entry.ts - live_start_ts,
|
||||
content: entry.content.clone(),
|
||||
}
|
||||
}).collect();
|
||||
})
|
||||
.collect();
|
||||
// filter out danmus with ts < 0
|
||||
danmus.retain(|entry| entry.ts >= 0);
|
||||
danmus
|
||||
|
||||
@@ -19,11 +19,11 @@ use danmu_stream::danmu_stream::DanmuStream;
|
||||
use danmu_stream::provider::ProviderType;
|
||||
use danmu_stream::DanmuMessageType;
|
||||
use rand::random;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
@@ -90,7 +90,7 @@ impl DouyinRecorder {
|
||||
enabled: bool,
|
||||
channel: broadcast::Sender<RecorderEvent>,
|
||||
) -> Result<Self, super::errors::RecorderError> {
|
||||
let client = client::DouyinClient::new(account);
|
||||
let client = client::DouyinClient::new(&config.read().await.user_agent, account);
|
||||
let room_info = client.get_room_info(room_id).await?;
|
||||
let mut live_status = LiveStatus::Offline;
|
||||
if room_info.data.room_status == 0 {
|
||||
@@ -209,59 +209,8 @@ impl DouyinRecorder {
|
||||
.hls_pull_url
|
||||
.is_empty()
|
||||
{
|
||||
*self.live_id.write().await = Utc::now().timestamp_millis().to_string();
|
||||
*self.danmu_room_id.write().await = info.data.data[0].id_str.clone();
|
||||
// create a new record
|
||||
let cover_url = info.data.data[0]
|
||||
.cover
|
||||
.as_ref()
|
||||
.map(|cover| cover.url_list[0].clone());
|
||||
let cover = if let Some(url) = cover_url {
|
||||
Some(self.client.get_cover_base64(&url).await.unwrap())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Err(e) = self
|
||||
.db
|
||||
.add_record(
|
||||
PlatformType::Douyin,
|
||||
self.live_id.read().await.as_str(),
|
||||
self.room_id,
|
||||
&info.data.data[0].title,
|
||||
cover,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Failed to add record: {}", e);
|
||||
}
|
||||
|
||||
// setup entry store
|
||||
let work_dir = self.get_work_dir(self.live_id.read().await.as_str()).await;
|
||||
let entry_store = EntryStore::new(&work_dir).await;
|
||||
*self.entry_store.write().await = Some(entry_store);
|
||||
|
||||
// setup danmu store
|
||||
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
|
||||
let danmu_store = DanmuStorage::new(&danmu_file_path).await;
|
||||
*self.danmu_store.write().await = danmu_store;
|
||||
|
||||
// start danmu task
|
||||
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
|
||||
danmu_task.abort();
|
||||
}
|
||||
if let Some(danmu_stream_task) = self.danmu_stream_task.lock().await.as_mut() {
|
||||
danmu_stream_task.abort();
|
||||
}
|
||||
let live_id = self.live_id.read().await.clone();
|
||||
let self_clone = self.clone();
|
||||
*self.danmu_task.lock().await = Some(tokio::spawn(async move {
|
||||
log::info!("Start fetching danmu for live {}", live_id);
|
||||
let _ = self_clone.danmu().await;
|
||||
}));
|
||||
|
||||
// setup stream url
|
||||
// Only set stream URL, don't create record yet
|
||||
// Record will be created when first ts download succeeds
|
||||
let new_stream_url = self.get_best_stream_url(&info).await;
|
||||
if new_stream_url.is_none() {
|
||||
log::error!("No stream url found in room_info: {:#?}", info);
|
||||
@@ -270,6 +219,7 @@ impl DouyinRecorder {
|
||||
|
||||
log::info!("New douyin stream URL: {}", new_stream_url.clone().unwrap());
|
||||
*self.stream_url.write().await = Some(new_stream_url.unwrap());
|
||||
*self.danmu_room_id.write().await = info.data.data[0].id_str.clone();
|
||||
}
|
||||
|
||||
true
|
||||
@@ -283,7 +233,13 @@ impl DouyinRecorder {
|
||||
|
||||
async fn danmu(&self) -> Result<(), super::errors::RecorderError> {
|
||||
let cookies = self.account.cookies.clone();
|
||||
let danmu_room_id = self.danmu_room_id.read().await.clone().parse::<u64>().unwrap_or(0);
|
||||
let danmu_room_id = self
|
||||
.danmu_room_id
|
||||
.read()
|
||||
.await
|
||||
.clone()
|
||||
.parse::<u64>()
|
||||
.unwrap_or(0);
|
||||
let danmu_stream = DanmuStream::new(ProviderType::Douyin, &cookies, danmu_room_id).await;
|
||||
if danmu_stream.is_err() {
|
||||
let err = danmu_stream.err().unwrap();
|
||||
@@ -369,6 +325,25 @@ impl DouyinRecorder {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_stream_url(&self, stream_url: &str) -> (String, String) {
|
||||
// Parse stream URL to extract base URL and query parameters
|
||||
// Example: http://7167739a741646b4651b6949b2f3eb8e.livehwc3.cn/pull-hls-l26.douyincdn.com/third/stream-693342996808860134_or4.m3u8?sub_m3u8=true&user_session_id=16090eb45ab8a2f042f7c46563936187&major_anchor_level=common&edge_slice=true&expire=67d944ec&sign=47b95cc6e8de20d82f3d404412fa8406
|
||||
|
||||
let base_url = stream_url
|
||||
.rfind('/')
|
||||
.map(|i| &stream_url[..=i])
|
||||
.unwrap_or(stream_url)
|
||||
.to_string();
|
||||
|
||||
let query_params = stream_url
|
||||
.find('?')
|
||||
.map(|i| &stream_url[i..])
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
(base_url, query_params)
|
||||
}
|
||||
|
||||
async fn update_entries(&self) -> Result<u128, RecorderError> {
|
||||
let task_begin_time = std::time::Instant::now();
|
||||
|
||||
@@ -391,18 +366,29 @@ impl DouyinRecorder {
|
||||
*self.stream_url.write().await = Some(updated_stream_url);
|
||||
|
||||
let mut new_segment_fetched = false;
|
||||
let work_dir = self.get_work_dir(self.live_id.read().await.as_str()).await;
|
||||
let mut is_first_segment = self.entry_store.read().await.is_none();
|
||||
let work_dir;
|
||||
|
||||
// Create work directory if not exists
|
||||
tokio::fs::create_dir_all(&work_dir).await?;
|
||||
// If this is the first segment, prepare but don't create directories yet
|
||||
if is_first_segment {
|
||||
// Generate live_id for potential use
|
||||
let live_id = Utc::now().timestamp_millis().to_string();
|
||||
*self.live_id.write().await = live_id.clone();
|
||||
work_dir = self.get_work_dir(&live_id).await;
|
||||
} else {
|
||||
work_dir = self.get_work_dir(self.live_id.read().await.as_str()).await;
|
||||
}
|
||||
|
||||
let last_sequence = self
|
||||
.entry_store
|
||||
.read()
|
||||
.await
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.last_sequence;
|
||||
let last_sequence = if is_first_segment {
|
||||
0
|
||||
} else {
|
||||
self.entry_store
|
||||
.read()
|
||||
.await
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.last_sequence
|
||||
};
|
||||
|
||||
for segment in playlist.segments.iter() {
|
||||
let formated_ts_name = segment.uri.clone();
|
||||
@@ -421,64 +407,206 @@ impl DouyinRecorder {
|
||||
}
|
||||
|
||||
// example: pull-l3.douyincdn.com_stream-405850027547689439_or4-1752675567719.ts
|
||||
let mut uri = segment.uri.clone();
|
||||
// if uri contains ?params, remove it
|
||||
if let Some(pos) = uri.find('?') {
|
||||
uri = uri[..pos].to_string();
|
||||
}
|
||||
let uri = segment.uri.clone();
|
||||
|
||||
let ts_url = if uri.starts_with("http") {
|
||||
uri.clone()
|
||||
} else {
|
||||
// Get the base URL without the filename and query parameters
|
||||
let base_url = stream_url
|
||||
.rfind('/')
|
||||
.map(|i| &stream_url[..=i])
|
||||
.unwrap_or(&stream_url);
|
||||
// Get the query parameters
|
||||
let query = stream_url.find('?').map(|i| &stream_url[i..]).unwrap_or("");
|
||||
// Combine: base_url + new_filename + query_params
|
||||
format!("{}{}{}", base_url, uri, query)
|
||||
// Parse the stream URL to extract base URL and query parameters
|
||||
let (base_url, query_params) = self.parse_stream_url(&stream_url);
|
||||
|
||||
// Check if the segment URI already has query parameters
|
||||
if uri.contains('?') {
|
||||
// If segment URI has query params, append m3u8 query params with &
|
||||
format!("{}{}&{}", base_url, uri, &query_params[1..]) // Remove leading ? from query_params
|
||||
} else {
|
||||
// If segment URI has no query params, append m3u8 query params with ?
|
||||
format!("{}{}{}", base_url, uri, query_params)
|
||||
}
|
||||
};
|
||||
|
||||
let file_name = format!("{}.ts", sequence);
|
||||
// Download segment with retry mechanism
|
||||
let mut retry_count = 0;
|
||||
let max_retries = 3;
|
||||
let mut download_success = false;
|
||||
let mut work_dir_created = false;
|
||||
|
||||
// Download segment
|
||||
match self
|
||||
.client
|
||||
.download_ts(&ts_url, &format!("{}/{}", work_dir, file_name))
|
||||
.await
|
||||
{
|
||||
Ok(size) => {
|
||||
if size == 0 {
|
||||
log::error!("Download segment failed: {}", ts_url);
|
||||
continue;
|
||||
while retry_count < max_retries && !download_success {
|
||||
let file_name = format!("{}.ts", sequence);
|
||||
let file_path = format!("{}/{}", work_dir, file_name);
|
||||
|
||||
// If this is the first segment, create work directory before first download attempt
|
||||
if is_first_segment && !work_dir_created {
|
||||
// Create work directory only when we're about to download
|
||||
if let Err(e) = tokio::fs::create_dir_all(&work_dir).await {
|
||||
log::error!("Failed to create work directory: {}", e);
|
||||
return Err(e.into());
|
||||
}
|
||||
let ts_entry = TsEntry {
|
||||
url: file_name,
|
||||
sequence,
|
||||
length: segment.duration as f64,
|
||||
size,
|
||||
ts: Utc::now().timestamp_millis(),
|
||||
is_header: false,
|
||||
};
|
||||
|
||||
self.entry_store
|
||||
.write()
|
||||
.await
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.add_entry(ts_entry)
|
||||
.await;
|
||||
|
||||
new_segment_fetched = true;
|
||||
work_dir_created = true;
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to download segment: {}", e);
|
||||
*self.stream_url.write().await = None;
|
||||
return Err(e.into());
|
||||
|
||||
match self.client.download_ts(&ts_url, &file_path).await {
|
||||
Ok(size) => {
|
||||
if size == 0 {
|
||||
log::error!("Download segment failed (empty response): {}", ts_url);
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// If this is the first successful download, create record and initialize stores
|
||||
if is_first_segment {
|
||||
// Create database record
|
||||
let room_info = room_info.as_ref().unwrap();
|
||||
let cover_url = room_info.data.data[0]
|
||||
.cover
|
||||
.as_ref()
|
||||
.map(|cover| cover.url_list[0].clone());
|
||||
let cover = if let Some(url) = cover_url {
|
||||
Some(self.client.get_cover_base64(&url).await.unwrap_or_default())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Err(e) = self
|
||||
.db
|
||||
.add_record(
|
||||
PlatformType::Douyin,
|
||||
self.live_id.read().await.as_str(),
|
||||
self.room_id,
|
||||
&room_info.data.data[0].title,
|
||||
cover,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Failed to add record: {}", e);
|
||||
}
|
||||
|
||||
// Setup entry store
|
||||
let entry_store = EntryStore::new(&work_dir).await;
|
||||
*self.entry_store.write().await = Some(entry_store);
|
||||
|
||||
// Setup danmu store
|
||||
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
|
||||
let danmu_store = DanmuStorage::new(&danmu_file_path).await;
|
||||
*self.danmu_store.write().await = danmu_store;
|
||||
|
||||
// Start danmu task
|
||||
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
|
||||
danmu_task.abort();
|
||||
}
|
||||
if let Some(danmu_stream_task) =
|
||||
self.danmu_stream_task.lock().await.as_mut()
|
||||
{
|
||||
danmu_stream_task.abort();
|
||||
}
|
||||
let live_id = self.live_id.read().await.clone();
|
||||
let self_clone = self.clone();
|
||||
*self.danmu_task.lock().await = Some(tokio::spawn(async move {
|
||||
log::info!("Start fetching danmu for live {}", live_id);
|
||||
let _ = self_clone.danmu().await;
|
||||
}));
|
||||
|
||||
is_first_segment = false;
|
||||
}
|
||||
|
||||
let ts_entry = TsEntry {
|
||||
url: file_name,
|
||||
sequence,
|
||||
length: segment.duration as f64,
|
||||
size,
|
||||
ts: Utc::now().timestamp_millis(),
|
||||
is_header: false,
|
||||
};
|
||||
|
||||
self.entry_store
|
||||
.write()
|
||||
.await
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.add_entry(ts_entry)
|
||||
.await;
|
||||
|
||||
new_segment_fetched = true;
|
||||
download_success = true;
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"Failed to download segment (attempt {}/{}): {} - URL: {}",
|
||||
retry_count + 1,
|
||||
max_retries,
|
||||
e,
|
||||
ts_url
|
||||
);
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_millis(1000 * retry_count as u64))
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
// If all retries failed, check if it's a 400 error
|
||||
if e.to_string().contains("400") {
|
||||
log::error!(
|
||||
"HTTP 400 error for segment, stream URL may be expired: {}",
|
||||
ts_url
|
||||
);
|
||||
*self.stream_url.write().await = None;
|
||||
|
||||
// Clean up empty directory if first segment failed
|
||||
if is_first_segment && work_dir_created {
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await
|
||||
{
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
}
|
||||
|
||||
// Clean up empty directory if first segment failed
|
||||
if is_first_segment && work_dir_created {
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !download_success {
|
||||
log::error!(
|
||||
"Failed to download segment after {} retries: {}",
|
||||
max_retries,
|
||||
ts_url
|
||||
);
|
||||
|
||||
// Clean up empty directory if first segment failed after all retries
|
||||
if is_first_segment && work_dir_created {
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if new_segment_fetched {
|
||||
@@ -637,7 +765,10 @@ impl Recorder for DouyinRecorder {
|
||||
m3u8_content
|
||||
}
|
||||
|
||||
async fn get_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
|
||||
async fn get_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, super::errors::RecorderError> {
|
||||
let work_dir = self.get_work_dir(live_id).await;
|
||||
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
|
||||
let subtitle_file = File::open(subtitle_file_path).await;
|
||||
@@ -653,7 +784,10 @@ impl Recorder for DouyinRecorder {
|
||||
Ok(subtitle_content)
|
||||
}
|
||||
|
||||
async fn generate_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
|
||||
async fn generate_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, super::errors::RecorderError> {
|
||||
// generate subtitle file under work_dir
|
||||
let work_dir = self.get_work_dir(live_id).await;
|
||||
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
|
||||
@@ -665,22 +799,43 @@ impl Recorder for DouyinRecorder {
|
||||
tokio::fs::write(&m3u8_index_file_path, m3u8_content).await?;
|
||||
// generate a tmp clip file
|
||||
let clip_file_path = format!("{}/{}", work_dir, "tmp.mp4");
|
||||
if let Err(e) = crate::ffmpeg::clip_from_m3u8(None::<&crate::progress_reporter::ProgressReporter>, Path::new(&m3u8_index_file_path), Path::new(&clip_file_path)).await {
|
||||
if let Err(e) = crate::ffmpeg::clip_from_m3u8(
|
||||
None::<&crate::progress_reporter::ProgressReporter>,
|
||||
Path::new(&m3u8_index_file_path),
|
||||
Path::new(&clip_file_path),
|
||||
)
|
||||
.await
|
||||
{
|
||||
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
|
||||
error: e.to_string(),
|
||||
});
|
||||
}
|
||||
// generate subtitle file
|
||||
let config = self.config.read().await;
|
||||
let result = crate::ffmpeg::generate_video_subtitle(None, Path::new(&clip_file_path), "whisper", &config.whisper_model, &config.whisper_prompt, &config.openai_api_key, &config.openai_api_endpoint, &config.whisper_language).await;
|
||||
let result = crate::ffmpeg::generate_video_subtitle(
|
||||
None,
|
||||
Path::new(&clip_file_path),
|
||||
"whisper",
|
||||
&config.whisper_model,
|
||||
&config.whisper_prompt,
|
||||
&config.openai_api_key,
|
||||
&config.openai_api_endpoint,
|
||||
&config.whisper_language,
|
||||
)
|
||||
.await;
|
||||
// write subtitle file
|
||||
if let Err(e) = result {
|
||||
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
|
||||
error: e.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
let result = result.unwrap();
|
||||
let subtitle_content = result.subtitle_content.iter().map(item_to_srt).collect::<Vec<String>>().join("");
|
||||
let subtitle_content = result
|
||||
.subtitle_content
|
||||
.iter()
|
||||
.map(item_to_srt)
|
||||
.collect::<Vec<String>>()
|
||||
.join("");
|
||||
subtitle_file.write_all(subtitle_content.as_bytes()).await?;
|
||||
|
||||
// remove tmp file
|
||||
@@ -757,7 +912,11 @@ impl Recorder for DouyinRecorder {
|
||||
Ok(if live_id == *self.live_id.read().await {
|
||||
// just return current cache content
|
||||
match self.danmu_store.read().await.as_ref() {
|
||||
Some(storage) => storage.get_entries(self.first_segment_ts(live_id).await).await,
|
||||
Some(storage) => {
|
||||
storage
|
||||
.get_entries(self.first_segment_ts(live_id).await)
|
||||
.await
|
||||
}
|
||||
None => Vec::new(),
|
||||
}
|
||||
} else {
|
||||
@@ -775,7 +934,9 @@ impl Recorder for DouyinRecorder {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let storage = storage.unwrap();
|
||||
storage.get_entries(self.first_segment_ts(live_id).await).await
|
||||
storage
|
||||
.get_entries(self.first_segment_ts(live_id).await)
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -6,11 +6,9 @@ use reqwest::{Client, Error as ReqwestError};
|
||||
use super::response::DouyinRoomInfoResponse;
|
||||
use std::fmt;
|
||||
|
||||
const USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DouyinClientError {
|
||||
Network(ReqwestError),
|
||||
Network(String),
|
||||
Io(std::io::Error),
|
||||
Playlist(String),
|
||||
}
|
||||
@@ -27,7 +25,7 @@ impl fmt::Display for DouyinClientError {
|
||||
|
||||
impl From<ReqwestError> for DouyinClientError {
|
||||
fn from(err: ReqwestError) -> Self {
|
||||
DouyinClientError::Network(err)
|
||||
DouyinClientError::Network(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,8 +42,8 @@ pub struct DouyinClient {
|
||||
}
|
||||
|
||||
impl DouyinClient {
|
||||
pub fn new(account: &AccountRow) -> Self {
|
||||
let client = Client::builder().user_agent(USER_AGENT).build().unwrap();
|
||||
pub fn new(user_agent: &str, account: &AccountRow) -> Self {
|
||||
let client = Client::builder().user_agent(user_agent).build().unwrap();
|
||||
Self {
|
||||
client,
|
||||
cookies: account.cookies.clone(),
|
||||
@@ -57,7 +55,7 @@ impl DouyinClient {
|
||||
room_id: u64,
|
||||
) -> Result<DouyinRoomInfoResponse, DouyinClientError> {
|
||||
let url = format!(
|
||||
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={}",
|
||||
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&a_bogus=0&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={}",
|
||||
room_id
|
||||
);
|
||||
|
||||
@@ -65,14 +63,97 @@ impl DouyinClient {
|
||||
.client
|
||||
.get(&url)
|
||||
.header("Referer", "https://live.douyin.com/")
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.header("Cookie", self.cookies.clone())
|
||||
.send()
|
||||
.await?
|
||||
.json::<DouyinRoomInfoResponse>()
|
||||
.await?;
|
||||
|
||||
Ok(resp)
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if status.is_success() {
|
||||
if let Ok(data) = serde_json::from_str::<DouyinRoomInfoResponse>(&text) {
|
||||
return Ok(data);
|
||||
} else {
|
||||
log::error!("Failed to parse room info response: {}", text);
|
||||
return Err(DouyinClientError::Network(format!(
|
||||
"Failed to parse room info response: {}",
|
||||
text
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
log::error!("Failed to get room info: {}", status);
|
||||
Err(DouyinClientError::Network(format!(
|
||||
"Failed to get room info: {} {}",
|
||||
status, text
|
||||
)))
|
||||
}
|
||||
|
||||
pub async fn get_user_info(&self) -> Result<super::response::User, DouyinClientError> {
|
||||
// Use the IM spotlight relation API to get user info
|
||||
let url = "https://www.douyin.com/aweme/v1/web/im/spotlight/relation/";
|
||||
let resp = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("Referer", "https://www.douyin.com/")
|
||||
.header("Cookie", self.cookies.clone())
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if status.is_success() {
|
||||
if let Ok(data) = serde_json::from_str::<super::response::DouyinRelationResponse>(&text)
|
||||
{
|
||||
if data.status_code == 0 {
|
||||
let owner_sec_uid = &data.owner_sec_uid;
|
||||
|
||||
// Find the user's own info in the followings list by matching sec_uid
|
||||
if let Some(followings) = &data.followings {
|
||||
for following in followings {
|
||||
if following.sec_uid == *owner_sec_uid {
|
||||
let user = super::response::User {
|
||||
id_str: following.uid.clone(),
|
||||
sec_uid: following.sec_uid.clone(),
|
||||
nickname: following.nickname.clone(),
|
||||
avatar_thumb: following.avatar_thumb.clone(),
|
||||
follow_info: super::response::FollowInfo::default(),
|
||||
foreign_user: 0,
|
||||
open_id_str: "".to_string(),
|
||||
};
|
||||
return Ok(user);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If not found in followings, create a minimal user info from owner_sec_uid
|
||||
let user = super::response::User {
|
||||
id_str: "".to_string(), // We don't have the numeric UID
|
||||
sec_uid: owner_sec_uid.clone(),
|
||||
nickname: "抖音用户".to_string(), // Default nickname
|
||||
avatar_thumb: super::response::AvatarThumb { url_list: vec![] },
|
||||
follow_info: super::response::FollowInfo::default(),
|
||||
foreign_user: 0,
|
||||
open_id_str: "".to_string(),
|
||||
};
|
||||
return Ok(user);
|
||||
}
|
||||
} else {
|
||||
log::error!("Failed to parse user info response: {}", text);
|
||||
return Err(DouyinClientError::Network(format!(
|
||||
"Failed to parse user info response: {}",
|
||||
text
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
log::error!("Failed to get user info: {}", status);
|
||||
|
||||
Err(DouyinClientError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Failed to get user info from Douyin relation API",
|
||||
)))
|
||||
}
|
||||
|
||||
pub async fn get_cover_base64(&self, url: &str) -> Result<String, DouyinClientError> {
|
||||
@@ -90,7 +171,22 @@ impl DouyinClient {
|
||||
&self,
|
||||
url: &str,
|
||||
) -> Result<(MediaPlaylist, String), DouyinClientError> {
|
||||
let content = self.client.get(url).send().await?.text().await?;
|
||||
let content = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("Referer", "https://live.douyin.com/")
|
||||
.header("Cookie", self.cookies.clone())
|
||||
.header("Accept", "*/*")
|
||||
.header("Accept-Language", "zh-CN,zh;q=0.9,en;q=0.8")
|
||||
.header("Accept-Encoding", "gzip, deflate, br")
|
||||
.header("Connection", "keep-alive")
|
||||
.header("Sec-Fetch-Dest", "empty")
|
||||
.header("Sec-Fetch-Mode", "cors")
|
||||
.header("Sec-Fetch-Site", "cross-site")
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
// m3u8 content: #EXTM3U
|
||||
// #EXT-X-VERSION:3
|
||||
// #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2560000
|
||||
@@ -110,12 +206,25 @@ impl DouyinClient {
|
||||
}
|
||||
|
||||
pub async fn download_ts(&self, url: &str, path: &str) -> Result<u64, DouyinClientError> {
|
||||
let response = self.client.get(url).send().await?;
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("Referer", "https://live.douyin.com/")
|
||||
.header("Cookie", self.cookies.clone())
|
||||
.header("Accept", "*/*")
|
||||
.header("Accept-Language", "zh-CN,zh;q=0.9,en;q=0.8")
|
||||
.header("Accept-Encoding", "gzip, deflate, br")
|
||||
.header("Connection", "keep-alive")
|
||||
.header("Sec-Fetch-Dest", "empty")
|
||||
.header("Sec-Fetch-Mode", "cors")
|
||||
.header("Sec-Fetch-Site", "cross-site")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() != reqwest::StatusCode::OK {
|
||||
return Err(DouyinClientError::Network(
|
||||
response.error_for_status().unwrap_err(),
|
||||
));
|
||||
let error = response.error_for_status().unwrap_err();
|
||||
log::error!("HTTP error: {} for URL: {}", error, url);
|
||||
return Err(DouyinClientError::Network(error.to_string()));
|
||||
}
|
||||
|
||||
let mut file = tokio::fs::File::create(path).await?;
|
||||
|
||||
@@ -182,8 +182,7 @@ pub struct Extra {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PullDatas {
|
||||
}
|
||||
pub struct PullDatas {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -436,8 +435,7 @@ pub struct Stats {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LinkerMap {
|
||||
}
|
||||
pub struct LinkerMap {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -478,13 +476,11 @@ pub struct LinkerDetail {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LinkerMapStr {
|
||||
}
|
||||
pub struct LinkerMapStr {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PlaymodeDetail {
|
||||
}
|
||||
pub struct PlaymodeDetail {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -589,4 +585,91 @@ pub struct User {
|
||||
pub foreign_user: i64,
|
||||
#[serde(rename = "open_id_str")]
|
||||
pub open_id_str: String,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DouyinRelationResponse {
|
||||
pub extra: Option<Extra2>,
|
||||
pub followings: Option<Vec<Following>>,
|
||||
#[serde(rename = "owner_sec_uid")]
|
||||
pub owner_sec_uid: String,
|
||||
#[serde(rename = "status_code")]
|
||||
pub status_code: i64,
|
||||
#[serde(rename = "log_pb")]
|
||||
pub log_pb: Option<LogPb>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Extra2 {
|
||||
#[serde(rename = "fatal_item_ids")]
|
||||
pub fatal_item_ids: Vec<String>,
|
||||
pub logid: String,
|
||||
pub now: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogPb {
|
||||
#[serde(rename = "impr_id")]
|
||||
pub impr_id: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Following {
|
||||
#[serde(rename = "account_cert_info")]
|
||||
pub account_cert_info: String,
|
||||
#[serde(rename = "avatar_signature")]
|
||||
pub avatar_signature: String,
|
||||
#[serde(rename = "avatar_small")]
|
||||
pub avatar_small: AvatarSmall,
|
||||
#[serde(rename = "avatar_thumb")]
|
||||
pub avatar_thumb: AvatarThumb,
|
||||
#[serde(rename = "birthday_hide_level")]
|
||||
pub birthday_hide_level: i64,
|
||||
#[serde(rename = "commerce_user_level")]
|
||||
pub commerce_user_level: i64,
|
||||
#[serde(rename = "custom_verify")]
|
||||
pub custom_verify: String,
|
||||
#[serde(rename = "enterprise_verify_reason")]
|
||||
pub enterprise_verify_reason: String,
|
||||
#[serde(rename = "follow_status")]
|
||||
pub follow_status: i64,
|
||||
#[serde(rename = "follower_status")]
|
||||
pub follower_status: i64,
|
||||
#[serde(rename = "has_e_account_role")]
|
||||
pub has_e_account_role: bool,
|
||||
#[serde(rename = "im_activeness")]
|
||||
pub im_activeness: i64,
|
||||
#[serde(rename = "im_role_ids")]
|
||||
pub im_role_ids: Vec<serde_json::Value>,
|
||||
#[serde(rename = "is_im_oversea_user")]
|
||||
pub is_im_oversea_user: i64,
|
||||
pub nickname: String,
|
||||
#[serde(rename = "sec_uid")]
|
||||
pub sec_uid: String,
|
||||
#[serde(rename = "short_id")]
|
||||
pub short_id: String,
|
||||
pub signature: String,
|
||||
#[serde(rename = "social_relation_sub_type")]
|
||||
pub social_relation_sub_type: i64,
|
||||
#[serde(rename = "social_relation_type")]
|
||||
pub social_relation_type: i64,
|
||||
pub uid: String,
|
||||
#[serde(rename = "unique_id")]
|
||||
pub unique_id: String,
|
||||
#[serde(rename = "verification_type")]
|
||||
pub verification_type: i64,
|
||||
#[serde(rename = "webcast_sp_info")]
|
||||
pub webcast_sp_info: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AvatarSmall {
|
||||
pub uri: String,
|
||||
#[serde(rename = "url_list")]
|
||||
pub url_list: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -212,7 +212,6 @@ impl EntryStore {
|
||||
/// `vod` indicates the manifest is for stream or video.
|
||||
/// `force_time` adds DATE-TIME tag for each entry.
|
||||
pub fn manifest(&self, vod: bool, force_time: bool, range: Option<Range>) -> String {
|
||||
log::info!("Generate manifest for range: {:?} with vod: {} and force_time: {}", range, vod, force_time);
|
||||
let mut m3u8_content = "#EXTM3U\n".to_string();
|
||||
m3u8_content += "#EXT-X-VERSION:6\n";
|
||||
m3u8_content += if vod {
|
||||
@@ -240,12 +239,6 @@ impl EntryStore {
|
||||
// Collect entries in range
|
||||
let first_entry = self.entries.first().unwrap();
|
||||
let first_entry_ts = first_entry.ts_seconds();
|
||||
log::debug!("First entry ts: {}", first_entry_ts);
|
||||
let last_entry = self.entries.last().unwrap();
|
||||
let last_entry_ts = last_entry.ts_seconds();
|
||||
log::debug!("Last entry ts: {}", last_entry_ts);
|
||||
log::debug!("Full length: {}", last_entry_ts - first_entry_ts);
|
||||
log::debug!("Range: {:?}", range);
|
||||
let mut entries_in_range = vec![];
|
||||
for e in &self.entries {
|
||||
// ignore header, cause it's already in EXT-X-MAP
|
||||
|
||||
@@ -22,4 +22,6 @@ custom_error! {pub RecorderError
|
||||
DanmuStreamError {err: danmu_stream::DanmuStreamError} = "Danmu stream error: {err}",
|
||||
SubtitleNotFound {live_id: String} = "Subtitle not found: {live_id}",
|
||||
SubtitleGenerationFailed {error: String} = "Subtitle generation failed: {error}",
|
||||
FfmpegError {err: String} = "FFmpeg error: {err}",
|
||||
ResolutionChanged {err: String} = "Resolution changed: {err}",
|
||||
}
|
||||
|
||||
@@ -613,7 +613,12 @@ impl RecorderManager {
|
||||
Ok(self.db.get_record(room_id, live_id).await?)
|
||||
}
|
||||
|
||||
pub async fn get_archive_subtitle(&self, platform: PlatformType, room_id: u64, live_id: &str) -> Result<String, RecorderManagerError> {
|
||||
pub async fn get_archive_subtitle(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
room_id: u64,
|
||||
live_id: &str,
|
||||
) -> Result<String, RecorderManagerError> {
|
||||
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
|
||||
if let Some(recorder_ref) = self.recorders.read().await.get(&recorder_id) {
|
||||
let recorder = recorder_ref.as_ref();
|
||||
@@ -623,7 +628,12 @@ impl RecorderManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn generate_archive_subtitle(&self, platform: PlatformType, room_id: u64, live_id: &str) -> Result<String, RecorderManagerError> {
|
||||
pub async fn generate_archive_subtitle(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
room_id: u64,
|
||||
live_id: &str,
|
||||
) -> Result<String, RecorderManagerError> {
|
||||
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
|
||||
if let Some(recorder_ref) = self.recorders.read().await.get(&recorder_id) {
|
||||
let recorder = recorder_ref.as_ref();
|
||||
|
||||
@@ -22,6 +22,11 @@
|
||||
"plugins": {
|
||||
"sql": {
|
||||
"preload": ["sqlite:data_v2.db"]
|
||||
},
|
||||
"deep-link": {
|
||||
"desktop": {
|
||||
"schemes": ["bsr"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"app": {
|
||||
|
||||
@@ -5,12 +5,47 @@
|
||||
import Setting from "./page/Setting.svelte";
|
||||
import Account from "./page/Account.svelte";
|
||||
import About from "./page/About.svelte";
|
||||
import { log } from "./lib/invoker";
|
||||
import { log, onOpenUrl } from "./lib/invoker";
|
||||
import Clip from "./page/Clip.svelte";
|
||||
import Task from "./page/Task.svelte";
|
||||
import AI from "./page/AI.svelte";
|
||||
import { onMount } from "svelte";
|
||||
|
||||
let active = "总览";
|
||||
|
||||
onMount(async () => {
|
||||
await onOpenUrl((urls: string[]) => {
|
||||
console.log("Received Deep Link:", urls);
|
||||
if (urls.length > 0) {
|
||||
const url = urls[0];
|
||||
// extract platform and room_id from url
|
||||
// url example:
|
||||
// bsr://live.bilibili.com/167537?live_from=85001&spm_id_from=333.1365.live_users.item.click
|
||||
// bsr://live.douyin.com/200525029536
|
||||
|
||||
let platform = "";
|
||||
let room_id = "";
|
||||
|
||||
if (url.startsWith("bsr://live.bilibili.com/")) {
|
||||
// 1. remove bsr://live.bilibili.com/
|
||||
// 2. remove all query params
|
||||
room_id = url.replace("bsr://live.bilibili.com/", "").split("?")[0];
|
||||
platform = "bilibili";
|
||||
}
|
||||
|
||||
if (url.startsWith("bsr://live.douyin.com/")) {
|
||||
room_id = url.replace("bsr://live.douyin.com/", "").split("?")[0];
|
||||
platform = "douyin";
|
||||
}
|
||||
|
||||
if (platform && room_id) {
|
||||
// switch to room page
|
||||
active = "直播间";
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
log.info("App loaded");
|
||||
</script>
|
||||
|
||||
|
||||
@@ -42,7 +42,9 @@
|
||||
}
|
||||
|
||||
// find video in videos
|
||||
video = videos.find((v) => v.id === parseInt(videoId));
|
||||
let new_video = videos.find((v) => v.id === parseInt(videoId));
|
||||
|
||||
handleVideoChange(new_video);
|
||||
|
||||
// 显示视频预览
|
||||
showVideoPreview = true;
|
||||
@@ -55,6 +57,11 @@
|
||||
});
|
||||
|
||||
async function handleVideoChange(newVideo: VideoItem) {
|
||||
if (newVideo) {
|
||||
// get cover from video
|
||||
const cover = await invoke("get_video_cover", { id: newVideo.id });
|
||||
newVideo.cover = cover as string;
|
||||
}
|
||||
video = newVideo;
|
||||
}
|
||||
|
||||
|
||||
@@ -142,8 +142,10 @@
|
||||
}
|
||||
|
||||
if (TAURI_ENV) {
|
||||
console.log("register tauri network plugin");
|
||||
shaka.net.NetworkingEngine.registerScheme("http", tauriNetworkPlugin);
|
||||
shaka.net.NetworkingEngine.registerScheme("https", tauriNetworkPlugin);
|
||||
shaka.net.NetworkingEngine.registerScheme("tauri", tauriNetworkPlugin);
|
||||
}
|
||||
|
||||
async function update_stream_list() {
|
||||
|
||||
@@ -29,7 +29,6 @@
|
||||
import TypeSelect from "./TypeSelect.svelte";
|
||||
import { invoke, TAURI_ENV, listen, log, close_window } from "../lib/invoker";
|
||||
import { onDestroy, onMount } from "svelte";
|
||||
import { getCurrentWebviewWindow } from "@tauri-apps/api/webviewWindow";
|
||||
import { listen as tauriListen } from "@tauri-apps/api/event";
|
||||
import type { AccountInfo } from "./db";
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ export interface RecorderItem {
|
||||
export interface AccountItem {
|
||||
platform: string;
|
||||
uid: number;
|
||||
id_str?: string; // For platforms like Douyin that use string IDs
|
||||
name: string;
|
||||
avatar: string;
|
||||
csrf: string;
|
||||
|
||||
@@ -4,6 +4,7 @@ import { fetch as tauri_fetch } from "@tauri-apps/plugin-http";
|
||||
import { convertFileSrc as tauri_convert } from "@tauri-apps/api/core";
|
||||
import { listen as tauri_listen } from "@tauri-apps/api/event";
|
||||
import { open as tauri_open } from "@tauri-apps/plugin-shell";
|
||||
import { onOpenUrl as tauri_onOpenUrl } from "@tauri-apps/plugin-deep-link";
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
@@ -169,6 +170,12 @@ async function close_window() {
|
||||
window.close();
|
||||
}
|
||||
|
||||
async function onOpenUrl(func: (urls: string[]) => void) {
|
||||
if (TAURI_ENV) {
|
||||
return await tauri_onOpenUrl(func);
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
invoke,
|
||||
get,
|
||||
@@ -180,4 +187,5 @@ export {
|
||||
open,
|
||||
log,
|
||||
close_window,
|
||||
onOpenUrl,
|
||||
};
|
||||
|
||||
@@ -622,22 +622,24 @@
|
||||
on:click={loadModels}
|
||||
disabled={!settings.endpoint || !settings.api_key || isLoadingModels}
|
||||
>
|
||||
{isLoadingModels ? '加载中...' : '刷新列表'}
|
||||
{isLoadingModels ? '加载中...' : '刷新模型列表'}
|
||||
</button>
|
||||
</div>
|
||||
<select
|
||||
id="model"
|
||||
bind:value={settings.model}
|
||||
class="w-full px-4 py-3 border border-gray-300 dark:border-gray-600 rounded-xl bg-white dark:bg-gray-700 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent transition-all duration-200"
|
||||
>
|
||||
{#if availableModels.length === 0}
|
||||
<option value="">请先配置 API 并刷新模型列表</option>
|
||||
{:else}
|
||||
<div class="relative">
|
||||
<input
|
||||
id="model"
|
||||
type="text"
|
||||
bind:value={settings.model}
|
||||
list="model-options"
|
||||
placeholder="输入模型名称或从列表中选择"
|
||||
class="w-full px-4 py-3 border border-gray-300 dark:border-gray-600 rounded-xl bg-white dark:bg-gray-700 text-gray-900 dark:text-white placeholder-gray-500 dark:placeholder-gray-400 focus:ring-2 focus:ring-blue-500 focus:border-transparent transition-all duration-200"
|
||||
/>
|
||||
<datalist id="model-options">
|
||||
{#each availableModels as model}
|
||||
<option value={model.value}>{model.label}</option>
|
||||
{/each}
|
||||
{/if}
|
||||
</select>
|
||||
</datalist>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -160,21 +160,16 @@
|
||||
<div>
|
||||
<div class="flex items-center space-x-2">
|
||||
<h3 class="font-medium text-gray-900 dark:text-white">
|
||||
{account.platform === "bilibili"
|
||||
? account.name
|
||||
: "抖音账号" + account.uid}
|
||||
{account.name || (account.platform === "bilibili" ? "B站账号" : "抖音账号") + account.uid}
|
||||
</h3>
|
||||
</div>
|
||||
{#if account.platform === "bilibili"}
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400">
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400">
|
||||
{#if account.platform === "bilibili"}
|
||||
UID: {account.uid}
|
||||
</p>
|
||||
{/if}
|
||||
{#if account.platform === "douyin"}
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400">
|
||||
仅用于获取直播流
|
||||
</p>
|
||||
{/if}
|
||||
{:else if account.platform === "douyin"}
|
||||
ID: {account.id_str || account.uid} • 仅用于获取直播流
|
||||
{/if}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex items-center space-x-3">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<script lang="ts">
|
||||
import { invoke, open } from "../lib/invoker";
|
||||
import { invoke, open, onOpenUrl } from "../lib/invoker";
|
||||
import { message } from "@tauri-apps/plugin-dialog";
|
||||
import { fade, scale } from "svelte/transition";
|
||||
import { Dropdown, DropdownItem } from "flowbite-svelte";
|
||||
@@ -15,11 +15,11 @@
|
||||
Trash2,
|
||||
X,
|
||||
History,
|
||||
Activity,
|
||||
} from "lucide-svelte";
|
||||
import BilibiliIcon from "../lib/BilibiliIcon.svelte";
|
||||
import DouyinIcon from "../lib/DouyinIcon.svelte";
|
||||
import AutoRecordIcon from "../lib/AutoRecordIcon.svelte";
|
||||
import { onMount } from "svelte";
|
||||
|
||||
export let room_count = 0;
|
||||
let room_active = 0;
|
||||
@@ -62,13 +62,6 @@
|
||||
update_summary();
|
||||
setInterval(update_summary, 5000);
|
||||
|
||||
function format_time(time: number) {
|
||||
let hours = Math.floor(time / 3600);
|
||||
let minutes = Math.floor((time % 3600) / 60);
|
||||
let seconds = Math.floor(time % 60);
|
||||
return `${hours.toString().padStart(2, "0")}:${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
|
||||
}
|
||||
|
||||
// modals
|
||||
let deleteModal = false;
|
||||
let deleteRoom = null;
|
||||
@@ -82,21 +75,30 @@
|
||||
let archiveModal = false;
|
||||
let archiveRoom = null;
|
||||
let archives: RecordItem[] = [];
|
||||
|
||||
async function showArchives(room_id: number) {
|
||||
archives = await invoke("get_archives", { roomId: room_id });
|
||||
// sort archives by ts in descending order
|
||||
archives.sort((a, b) => {
|
||||
updateArchives();
|
||||
archiveModal = true;
|
||||
console.log(archives);
|
||||
}
|
||||
|
||||
async function updateArchives() {
|
||||
let updated_archives = (await invoke("get_archives", {
|
||||
roomId: archiveRoom.room_id,
|
||||
})) as RecordItem[];
|
||||
updated_archives.sort((a, b) => {
|
||||
return (
|
||||
new Date(b.created_at).getTime() - new Date(a.created_at).getTime()
|
||||
);
|
||||
});
|
||||
archiveModal = true;
|
||||
console.log(archives);
|
||||
archives = updated_archives;
|
||||
}
|
||||
|
||||
function format_ts(ts_string: string) {
|
||||
const date = new Date(ts_string);
|
||||
return date.toLocaleString();
|
||||
}
|
||||
|
||||
function format_duration(duration: number) {
|
||||
const hours = Math.floor(duration / 3600)
|
||||
.toString()
|
||||
@@ -108,6 +110,7 @@
|
||||
|
||||
return `${hours}:${minutes}:${seconds}`;
|
||||
}
|
||||
|
||||
function format_size(size: number) {
|
||||
if (size < 1024) {
|
||||
return `${size} B`;
|
||||
@@ -119,6 +122,7 @@
|
||||
return `${(size / 1024 / 1024 / 1024).toFixed(2)} GiB`;
|
||||
}
|
||||
}
|
||||
|
||||
function calc_bitrate(size: number, duration: number) {
|
||||
return ((size * 8) / duration / 1024).toFixed(0);
|
||||
}
|
||||
@@ -135,9 +139,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
// Add toggle state for auto-recording
|
||||
let autoRecordStates = new Map<string, boolean>();
|
||||
|
||||
// Function to toggle auto-record state
|
||||
function toggleEnabled(room: RecorderInfo) {
|
||||
invoke("set_enable", {
|
||||
@@ -165,6 +166,61 @@
|
||||
open("https://live.douyin.com/" + room.room_id);
|
||||
}
|
||||
}
|
||||
|
||||
function addNewRecorder(room_id: number, platform: string) {
|
||||
invoke("add_recorder", {
|
||||
roomId: room_id,
|
||||
platform: platform,
|
||||
})
|
||||
.then(() => {
|
||||
addModal = false;
|
||||
addRoom = "";
|
||||
})
|
||||
.catch(async (e) => {
|
||||
await message(e);
|
||||
});
|
||||
}
|
||||
|
||||
onMount(async () => {
|
||||
await onOpenUrl((urls: string[]) => {
|
||||
console.log("Received Deep Link:", urls);
|
||||
if (urls.length > 0) {
|
||||
const url = urls[0];
|
||||
// extract platform and room_id from url
|
||||
// url example:
|
||||
// bsr://live.bilibili.com/167537?live_from=85001&spm_id_from=333.1365.live_users.item.click
|
||||
// bsr://live.douyin.com/200525029536
|
||||
|
||||
let platform = "";
|
||||
let room_id = "";
|
||||
|
||||
if (url.startsWith("bsr://live.bilibili.com/")) {
|
||||
// 1. remove bsr://live.bilibili.com/
|
||||
// 2. remove all query params
|
||||
room_id = url.replace("bsr://live.bilibili.com/", "").split("?")[0];
|
||||
platform = "bilibili";
|
||||
}
|
||||
|
||||
if (url.startsWith("bsr://live.douyin.com/")) {
|
||||
room_id = url.replace("bsr://live.douyin.com/", "").split("?")[0];
|
||||
platform = "douyin";
|
||||
}
|
||||
|
||||
if (platform && room_id) {
|
||||
addModal = true;
|
||||
addRoom = room_id;
|
||||
selectedPlatform = platform;
|
||||
|
||||
if (Number.isInteger(Number(room_id))) {
|
||||
addValid = true;
|
||||
} else {
|
||||
addErrorMsg = "ID格式错误,请检查输入";
|
||||
addValid = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="flex-1 p-6 overflow-auto custom-scrollbar-light bg-gray-50">
|
||||
@@ -515,17 +571,9 @@
|
||||
class="px-4 py-2 bg-[#0A84FF] hover:bg-[#0A84FF]/90 text-white text-sm font-medium rounded-lg transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
disabled={!addValid}
|
||||
on:click={() => {
|
||||
invoke("add_recorder", {
|
||||
roomId: Number(addRoom),
|
||||
platform: selectedPlatform,
|
||||
})
|
||||
.then(() => {
|
||||
addModal = false;
|
||||
addRoom = "";
|
||||
})
|
||||
.catch(async (e) => {
|
||||
await message(e);
|
||||
});
|
||||
addNewRecorder(Number(addRoom), selectedPlatform);
|
||||
addModal = false;
|
||||
addRoom = "";
|
||||
}}
|
||||
>
|
||||
添加
|
||||
@@ -599,7 +647,7 @@
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-gray-200 dark:divide-gray-700/50">
|
||||
{#each archives as archive}
|
||||
{#each archives as archive (archive.live_id)}
|
||||
<tr
|
||||
class="group hover:bg-[#f5f5f7] dark:hover:bg-[#3a3a3c] transition-colors"
|
||||
>
|
||||
@@ -663,9 +711,7 @@
|
||||
liveId: archive.live_id,
|
||||
})
|
||||
.then(async () => {
|
||||
archives = await invoke("get_archives", {
|
||||
roomId: archiveRoom.room_id,
|
||||
});
|
||||
await updateArchives();
|
||||
})
|
||||
.catch((e) => {
|
||||
alert(e);
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
},
|
||||
status_check_interval: 30, // 默认30秒
|
||||
whisper_language: "",
|
||||
user_agent: "",
|
||||
};
|
||||
|
||||
let showModal = false;
|
||||
@@ -182,6 +183,30 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="p-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<h3 class="text-sm font-medium text-gray-900 dark:text-white">
|
||||
User-Agent
|
||||
</h3>
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||
当出现风控问题时,可以尝试修改此项来解决,改动需要重启程序才能生效
|
||||
</p>
|
||||
</div>
|
||||
<div class="flex items-center space-x-2">
|
||||
<input
|
||||
type="text"
|
||||
class="px-3 py-2 bg-gray-100 dark:bg-gray-700 rounded-lg border border-gray-200 dark:border-gray-600 text-gray-900 dark:text-white w-96"
|
||||
bind:value={setting_model.user_agent}
|
||||
on:change={async () => {
|
||||
await invoke("update_user_agent", {
|
||||
userAgent: setting_model.user_agent,
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- API Server Settings -->
|
||||
@@ -612,7 +637,7 @@
|
||||
Whisper 提示词
|
||||
</h3>
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||
生成字幕时使用的提示词,提示词的含义无意义,只用于设定风格
|
||||
生成字幕时使用的提示词,尽量简洁明了,提示音频内容偏向的领域以及字幕的风格
|
||||
</p>
|
||||
</div>
|
||||
<div class="flex items-center space-x-2">
|
||||
|
||||
14
yarn.lock
@@ -817,11 +817,16 @@
|
||||
svelte-hmr "^0.15.3"
|
||||
vitefu "^0.2.4"
|
||||
|
||||
"@tauri-apps/api@^2.4.1", "@tauri-apps/api@^2.6.0":
|
||||
"@tauri-apps/api@^2.6.0":
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/@tauri-apps/api/-/api-2.6.0.tgz#efd873bf04b0d72cea81f9397e16218f5deafe0f"
|
||||
integrity sha512-hRNcdercfgpzgFrMXWwNDBN0B7vNzOzRepy6ZAmhxi5mDLVPNrTpo9MGg2tN/F7JRugj4d2aF7E1rtPXAHaetg==
|
||||
|
||||
"@tauri-apps/api@^2.6.2":
|
||||
version "2.7.0"
|
||||
resolved "https://registry.yarnpkg.com/@tauri-apps/api/-/api-2.7.0.tgz#44319e7cd34e898d21cc770961209bd50ac4cefe"
|
||||
integrity sha512-v7fVE8jqBl8xJFOcBafDzXFc8FnicoH3j8o8DNNs0tHuEBmXUDqrCOAzMRX0UkfpwqZLqvrvK0GNQ45DfnoVDg==
|
||||
|
||||
"@tauri-apps/cli-darwin-arm64@2.6.2":
|
||||
version "2.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.6.2.tgz#c69478438cae93dd892ea43d6cf7934a1c7f7839"
|
||||
@@ -894,6 +899,13 @@
|
||||
"@tauri-apps/cli-win32-ia32-msvc" "2.6.2"
|
||||
"@tauri-apps/cli-win32-x64-msvc" "2.6.2"
|
||||
|
||||
"@tauri-apps/plugin-deep-link@~2":
|
||||
version "2.4.1"
|
||||
resolved "https://registry.yarnpkg.com/@tauri-apps/plugin-deep-link/-/plugin-deep-link-2.4.1.tgz#2f22d01d3e3795a607a2b31857cf99fb56126701"
|
||||
integrity sha512-I8Bo+spcAKGhIIJ1qN/gapp/Ot3mosQL98znxr975Zn2ODAkUZ++BQ9FnTpR7PDwfIl5ANSGdIW/YU01zVTcJw==
|
||||
dependencies:
|
||||
"@tauri-apps/api" "^2.6.0"
|
||||
|
||||
"@tauri-apps/plugin-dialog@~2":
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@tauri-apps/plugin-dialog/-/plugin-dialog-2.3.0.tgz#123d2cd3d98467b9b115d23ad71eef469d6ead35"
|
||||
|
||||