Compare commits
121 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8cb875f449 | ||
|
|
e6bbe65723 | ||
|
|
f4a71a2476 | ||
|
|
47b9362b0a | ||
|
|
c1aad0806e | ||
|
|
4ccc90f9fb | ||
|
|
7dc63440e6 | ||
|
|
4094e8b80d | ||
|
|
e27cbaf715 | ||
|
|
1f39b27d79 | ||
|
|
f45891fd95 | ||
|
|
18fe644715 | ||
|
|
40cde8c69a | ||
|
|
4b0af47906 | ||
|
|
9365b3c8cd | ||
|
|
4b9f015ea7 | ||
|
|
c42d4a084e | ||
|
|
5bb3feb05b | ||
|
|
05f776ed8b | ||
|
|
9cec809485 | ||
|
|
429f909152 | ||
|
|
084dd23df1 | ||
|
|
e55afdd739 | ||
|
|
72128a132b | ||
|
|
92ca2cddad | ||
|
|
3db0d1dfe5 | ||
|
|
57907323e6 | ||
|
|
dbdca44c5f | ||
|
|
fe1dd2201f | ||
|
|
e0ae194cc3 | ||
|
|
6fc5700457 | ||
|
|
c4fdcf86d4 | ||
|
|
3088500c8d | ||
|
|
861f3a3624 | ||
|
|
c55783e4d9 | ||
|
|
955e284d41 | ||
|
|
fc4c47427e | ||
|
|
e2d7563faa | ||
|
|
27d69f7f8d | ||
|
|
a77bb5af44 | ||
|
|
00286261a4 | ||
|
|
0b898dccaa | ||
|
|
a1d9ac4e68 | ||
|
|
4150939e23 | ||
|
|
8f84b7f063 | ||
|
|
04b245ac64 | ||
|
|
12f7e62957 | ||
|
|
9600d310c7 | ||
|
|
dec5a2472a | ||
|
|
13eb7c6ea2 | ||
|
|
2356cfa10a | ||
|
|
3bfaefb3b0 | ||
|
|
78b8c25d96 | ||
|
|
c1d2ff2b96 | ||
|
|
24aee9446a | ||
|
|
2fb094ec31 | ||
|
|
53897c66ee | ||
|
|
ca4e266ae6 | ||
|
|
6612a1e16f | ||
|
|
55ceb65dfb | ||
|
|
6cad3d6afb | ||
|
|
151e1bdb8a | ||
|
|
44a3cfd1ff | ||
|
|
9cbc3028a7 | ||
|
|
8c30730d7b | ||
|
|
acfb870f9d | ||
|
|
3813528f50 | ||
|
|
e3bb014644 | ||
|
|
76a7afde76 | ||
|
|
1184f9f3f5 | ||
|
|
b754f8938f | ||
|
|
6b30ff04b7 | ||
|
|
1c40acca63 | ||
|
|
a5a7a8afaf | ||
|
|
583ac13a37 | ||
|
|
3e58972072 | ||
|
|
f15aa27727 | ||
|
|
2581014dbd | ||
|
|
baaaa1b57e | ||
|
|
160fbb3590 | ||
|
|
6f3253678c | ||
|
|
563ad66243 | ||
|
|
a8d002cc53 | ||
|
|
0615410fa4 | ||
|
|
fc98e065f8 | ||
|
|
66f671ffa0 | ||
|
|
69a35af456 | ||
|
|
e462bd0b4c | ||
|
|
ae6483427f | ||
|
|
ad97677104 | ||
|
|
996d15ef25 | ||
|
|
06de32ffe7 | ||
|
|
dd43074e46 | ||
|
|
93495e13db | ||
|
|
16950edae4 | ||
|
|
4af1203360 | ||
|
|
55b5bd1fd2 | ||
|
|
f0a7cf4ed0 | ||
|
|
62e7412abf | ||
|
|
275bf647d2 | ||
|
|
00af723be9 | ||
|
|
19da577836 | ||
|
|
bf3a2b469b | ||
|
|
bf31bfd099 | ||
|
|
d02fea99f2 | ||
|
|
2404bacb4e | ||
|
|
b6c274c181 | ||
|
|
f9b472aee7 | ||
|
|
45f277741b | ||
|
|
94179f59cd | ||
|
|
c7b550a3e3 | ||
|
|
fd51fd2387 | ||
|
|
23d1798ab6 | ||
|
|
90e81d0d4d | ||
|
|
6a7a19547d | ||
|
|
1550849ee2 | ||
|
|
15116e2197 | ||
|
|
63eda5179b | ||
|
|
d7b1277363 | ||
|
|
337c933b92 | ||
|
|
b01b2cc9c0 |
2
.github/workflows/main.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
- name: Install CUDA toolkit (Windows CUDA only)
|
||||
if: matrix.platform == 'windows-latest' && matrix.features == 'cuda'
|
||||
uses: Jimver/cuda-toolkit@master
|
||||
uses: Jimver/cuda-toolkit@v0.2.24
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
|
||||
5
.gitignore
vendored
@@ -32,4 +32,7 @@ src-tauri/tests/audio/*.srt
|
||||
.env
|
||||
|
||||
docs/.vitepress/cache
|
||||
docs/.vitepress/dist
|
||||
docs/.vitepress/dist
|
||||
|
||||
*.debug.js
|
||||
*.debug.map
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
[[language]]
|
||||
name = "rust"
|
||||
auto-format = true
|
||||
rulers = []
|
||||
|
||||
[[language]]
|
||||
name = "svelte"
|
||||
|
||||
@@ -42,6 +42,7 @@ RUN apt-get update && apt-get install -y \
|
||||
# Copy Rust project files
|
||||
COPY src-tauri/Cargo.toml src-tauri/Cargo.lock ./src-tauri/
|
||||
COPY src-tauri/src ./src-tauri/src
|
||||
COPY src-tauri/crates ./src-tauri/crates
|
||||
|
||||
# Build Rust backend
|
||||
WORKDIR /app/src-tauri
|
||||
|
||||
10
README.md
@@ -4,23 +4,27 @@
|
||||
|
||||

|
||||

|
||||
|
||||

|
||||

|
||||
[](https://deepwiki.com/Xinrea/bili-shadowreplay)
|
||||
|
||||
BiliBili ShadowReplay 是一个缓存直播并进行实时编辑投稿的工具。通过划定时间区间,并编辑简单的必需信息,即可完成直播切片以及投稿,将整个流程压缩到分钟级。同时,也支持对缓存的历史直播进行回放,以及相同的切片编辑投稿处理流程。
|
||||
|
||||
目前仅支持 B 站和抖音平台的直播。
|
||||
|
||||

|
||||
[](https://www.star-history.com/#Xinrea/bili-shadowreplay&Date)
|
||||
|
||||
## 安装和使用
|
||||
|
||||

|
||||
|
||||
前往网站查看说明:[BiliBili ShadowReplay](https://bsr.xinrea.cn/)
|
||||
|
||||
## 参与开发
|
||||
|
||||
[Contributing](.github/CONTRIBUTING.md)
|
||||
可以通过 [DeepWiki](https://deepwiki.com/Xinrea/bili-shadowreplay) 了解本项目。
|
||||
|
||||
贡献指南:[Contributing](.github/CONTRIBUTING.md)
|
||||
|
||||
## 赞助
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { defineConfig } from "vitepress";
|
||||
import { withMermaid } from "vitepress-plugin-mermaid";
|
||||
|
||||
// https://vitepress.dev/reference/site-config
|
||||
export default defineConfig({
|
||||
export default withMermaid({
|
||||
title: "BiliBili ShadowReplay",
|
||||
description: "直播录制/实时回放/剪辑/投稿工具",
|
||||
themeConfig: {
|
||||
@@ -18,21 +19,54 @@ export default defineConfig({
|
||||
{
|
||||
text: "开始使用",
|
||||
items: [
|
||||
{ text: "安装准备", link: "/getting-started/installation" },
|
||||
{ text: "配置使用", link: "/getting-started/configuration" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/ffmpeg" },
|
||||
{
|
||||
text: "安装准备",
|
||||
items: [
|
||||
{
|
||||
text: "桌面端安装",
|
||||
link: "/getting-started/installation/desktop",
|
||||
},
|
||||
{
|
||||
text: "Docker 安装",
|
||||
link: "/getting-started/installation/docker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "配置使用",
|
||||
items: [
|
||||
{ text: "账号配置", link: "/getting-started/config/account" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/config/ffmpeg" },
|
||||
{ text: "Whisper 配置", link: "/getting-started/config/whisper" },
|
||||
{ text: "LLM 配置", link: "/getting-started/config/llm" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "说明文档",
|
||||
items: [
|
||||
{ text: "功能说明", link: "/usage/features" },
|
||||
{
|
||||
text: "功能说明",
|
||||
items: [
|
||||
{ text: "工作流程", link: "/usage/features/workflow" },
|
||||
{ text: "直播间管理", link: "/usage/features/room" },
|
||||
{ text: "切片功能", link: "/usage/features/clip" },
|
||||
{ text: "字幕功能", link: "/usage/features/subtitle" },
|
||||
{ text: "弹幕功能", link: "/usage/features/danmaku" },
|
||||
],
|
||||
},
|
||||
{ text: "常见问题", link: "/usage/faq" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "开发文档",
|
||||
items: [{ text: "架构设计", link: "/develop/architecture" }],
|
||||
items: [
|
||||
{
|
||||
text: "DeepWiki",
|
||||
link: "https://deepwiki.com/Xinrea/bili-shadowreplay",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# 架构设计
|
||||
12
docs/getting-started/config/account.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# 账号配置
|
||||
|
||||
要添加直播间,至少需要配置一个同平台的账号。在账号页面,你可以通过添加账号按钮添加一个账号。
|
||||
|
||||
- B 站账号:目前支持扫码登录和 Cookie 手动配置两种方式,推荐使用扫码登录
|
||||
- 抖音账号:目前仅支持 Cookie 手动配置登陆
|
||||
|
||||
## 抖音账号配置
|
||||
|
||||
首先确保已经登录抖音,然后打开[个人主页](https://www.douyin.com/user/self),右键单击网页,在菜单中选择 `检查(Inspect)`,打开开发者工具,切换到 `网络(Network)` 选项卡,然后刷新网页,此时能在列表中找到 `self` 请求(一般是列表中第一个),单击该请求,查看`请求标头`,在 `请求标头` 中找到 `Cookie`,复制该字段的值,粘贴到配置页面的 `Cookie` 输入框中,要注意复制完全。
|
||||
|
||||

|
||||
9
docs/getting-started/config/llm.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# LLM 配置
|
||||
|
||||

|
||||
|
||||
助手页面的 AI Agent 助手功能需要配置大模型,目前仅支持配置 OpenAI 协议兼容的大模型服务。
|
||||
|
||||
本软件并不提供大模型服务,请自行选择服务提供商。要注意,使用 AI Agent 助手需要消耗比普通对话更多的 Token,请确保有足够的 Token 余额。
|
||||
|
||||
此外,AI Agent 的功能需要大模型支持 Function Calling 功能,否则无法正常调用工具。
|
||||
35
docs/getting-started/config/whisper.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# Whisper 配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付费获取 API Key)。
|
||||
|
||||
> [!NOTE]
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
|
||||
## 本地运行 Whisper 模型
|
||||
|
||||

|
||||
|
||||
如果需要使用本地运行 Whisper 模型进行字幕生成,需要下载 Whisper.cpp 模型,并在设置中指定模型路径。模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
|
||||
## 使用在线 Whisper 服务
|
||||
|
||||

|
||||
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有 OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
|
||||
## 字幕识别质量的调优
|
||||
|
||||
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
|
||||
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian, Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish, French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic, Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian, Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili, Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
|
||||
提示词可以优化生成的字幕的风格(也会一定程度上影响质量),要注意,Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。
|
||||
@@ -1,21 +0,0 @@
|
||||
# 配置使用
|
||||
|
||||
## 账号配置
|
||||
|
||||
要添加直播间,至少需要配置一个同平台的账号。在账号页面,你可以通过添加账号按钮添加一个账号。
|
||||
|
||||
- B 站账号:目前支持扫码登录和 Cookie 手动配置两种方式
|
||||
- 抖音账号:目前仅支持 Cookie 手动配置登陆
|
||||
|
||||
## FFmpeg 配置
|
||||
|
||||
如果想要使用切片生成和压制功能,请确保 FFmpeg 已正确配置;除了 Windows 平台打包自带 FFfmpeg 以外,其他平台需要手动安装 FFfmpeg,请参考 [FFfmpeg 配置](/getting-started/ffmpeg)。
|
||||
|
||||
## Whisper 模型配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper 模型路径,模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
@@ -1,66 +0,0 @@
|
||||
# 安装准备
|
||||
|
||||
## 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
安装包分为两个版本,普通版和 debug 版,普通版适合大部分用户使用,debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
### Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
### Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
### MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
|
||||
## Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
### 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
### 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
22
docs/getting-started/installation/desktop.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
安装包分为两个版本,普通版和 debug 版,普通版适合大部分用户使用,debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
## Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
## Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
## MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
41
docs/getting-started/installation/docker.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
## 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
## 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
@@ -11,10 +11,10 @@ hero:
|
||||
actions:
|
||||
- theme: brand
|
||||
text: 开始使用
|
||||
link: /getting-started/installation
|
||||
link: /getting-started/installation/desktop
|
||||
- theme: alt
|
||||
text: 说明文档
|
||||
link: /usage/features
|
||||
link: /usage/features/workflow
|
||||
|
||||
features:
|
||||
- icon: 📹
|
||||
@@ -38,9 +38,9 @@ features:
|
||||
- icon: 🔍
|
||||
title: 云端部署
|
||||
details: 支持 Docker 部署,提供 Web 控制界面
|
||||
- icon: 📦
|
||||
title: 多平台支持
|
||||
details: 桌面端支持 Windows/Linux/macOS
|
||||
- icon: 🤖
|
||||
title: AI Agent 支持
|
||||
details: 支持 AI 助手管理录播,分析直播内容,生成切片
|
||||
---
|
||||
|
||||
## 总览
|
||||
@@ -63,7 +63,7 @@ features:
|
||||
|
||||
## 封面编辑
|
||||
|
||||

|
||||

|
||||
|
||||
## 设置
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 555 KiB After Width: | Height: | Size: 195 KiB |
BIN
docs/public/images/ai_agent.png
Normal file
|
After Width: | Height: | Size: 261 KiB |
|
Before Width: | Height: | Size: 1.2 MiB After Width: | Height: | Size: 434 KiB |
BIN
docs/public/images/clip_manage.png
Normal file
|
After Width: | Height: | Size: 234 KiB |
BIN
docs/public/images/clip_preview.png
Normal file
|
After Width: | Height: | Size: 2.3 MiB |
BIN
docs/public/images/cover_edit.png
Normal file
|
After Width: | Height: | Size: 2.1 MiB |
|
Before Width: | Height: | Size: 2.9 MiB |
BIN
docs/public/images/douyin_cookie.png
Normal file
|
After Width: | Height: | Size: 548 KiB |
|
Before Width: | Height: | Size: 2.8 MiB After Width: | Height: | Size: 2.1 MiB |
BIN
docs/public/images/model_config.png
Normal file
|
After Width: | Height: | Size: 383 KiB |
|
Before Width: | Height: | Size: 1.9 MiB After Width: | Height: | Size: 949 KiB |
|
Before Width: | Height: | Size: 622 KiB After Width: | Height: | Size: 244 KiB |
|
Before Width: | Height: | Size: 721 KiB After Width: | Height: | Size: 372 KiB |
BIN
docs/public/images/tasks.png
Normal file
|
After Width: | Height: | Size: 201 KiB |
BIN
docs/public/images/whisper_local.png
Normal file
|
After Width: | Height: | Size: 194 KiB |
BIN
docs/public/images/whisper_online.png
Normal file
|
After Width: | Height: | Size: 199 KiB |
BIN
docs/public/images/whole_clip.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
BIN
docs/public/videos/deeplinking.mp4
Normal file
BIN
docs/public/videos/room_remove.mp4
Normal file
@@ -0,0 +1,31 @@
|
||||
# 常见问题
|
||||
|
||||
## 一、在哪里反馈问题?
|
||||
|
||||
你可以前往 [Github Issues](https://github.com/Xinrea/bili-shadowreplay/issues/new?template=bug_report.md) 提交问题,或是加入[反馈交流群](https://qm.qq.com/q/v4lrE6gyum)。
|
||||
|
||||
1. 在提交问题前,请先阅读其它常见问题,确保你的问题已有解答;
|
||||
2. 其次,请确保你的程序已更新到最新版本;
|
||||
3. 最后,你应准备好提供你的程序日志文件,以便更好地定位问题。
|
||||
|
||||
## 二、在哪里查看日志?
|
||||
|
||||
在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮。当你打开日志目录所在位置后,进入 `logs` 目录,找到后缀名为 `log` 的文件,这便是你需要提供给开发者的日志文件。
|
||||
|
||||
## 三、无法预览直播或是生成切片
|
||||
|
||||
如果你是 macOS 或 Linux 用户,请确保你已安装了 `ffmpeg` 和 `ffprobe` 工具;如果不知道如何安装,请参考 [FFmpeg 配置](/getting-started/config/ffmpeg)。
|
||||
|
||||
如果你是 Windows 用户,程序目录下应当自带了 `ffmpeg` 和 `ffprobe` 工具,如果无法预览直播或是生成切片,请向开发者反馈。
|
||||
|
||||
## 四、添加 B 站直播间出现 -352 错误
|
||||
|
||||
`-352` 错误是由 B 站风控机制导致的,如果你添加了大量的 B 站直播间进行录制,可以在设置页面调整直播间状态的检查间隔,尽量避免风控;如果你在直播间数量较少的情况下出现该错误,请向开发者反馈。
|
||||
|
||||
## 五、录播为什么都是碎片文件?
|
||||
|
||||
缓存目录下的录播文件并非用于直接播放或是投稿,而是用于直播流的预览与实时回放。如果你需要录播文件用于投稿,请打开对应录播的预览界面,使用快捷键创建选区,生成所需范围的切片,切片文件为常规的 mp4 文件,位于你所设置的切片目录下。
|
||||
|
||||
如果你将 BSR 作为单纯的录播软件使用,在设置中可以开启`整场录播生成`,这样在直播结束后,BSR 会自动生成整场录播的切片。
|
||||
|
||||

|
||||
|
||||
1
docs/usage/features/clip.md
Normal file
@@ -0,0 +1 @@
|
||||
# 切片
|
||||
1
docs/usage/features/danmaku.md
Normal file
@@ -0,0 +1 @@
|
||||
# 弹幕
|
||||
38
docs/usage/features/room.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# 直播间
|
||||
|
||||
> [!WARNING]
|
||||
> 在添加管理直播间前,请确保账号列表中有对应平台的可用账号。
|
||||
|
||||
## 添加直播间
|
||||
|
||||
### 手动添加直播间
|
||||
|
||||
你可以在 BSR 直播间页面,点击按钮手动添加直播间。你需要选择平台,并输入直播间号。
|
||||
|
||||
直播间号通常是直播间网页地址尾部的遗传数字,例如 `https://live.bilibili.com/123456` 中的 `123456`,或是 `https://live.douyin.com/123456` 中的 `123456`。
|
||||
|
||||
抖音直播间比较特殊,当未开播时,你无法找到直播间的入口,因此你需要当直播间开播时找到直播间网页地址,并记录其直播间号。
|
||||
|
||||
抖音直播间需要输入主播的 sec_uid,你可以在主播主页的 URL 中找到,例如 `https://www.douyin.com/user/MS4wLjABAAAA` 中的 `MS4wLjABAAAA`。
|
||||
|
||||
### 使用 DeepLinking 快速添加直播间
|
||||
|
||||
<video src="/videos/deeplinking.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
|
||||
在浏览器中观看直播时,替换地址栏中直播间地址中的 `https://` 为 `bsr://` 即可快速唤起 BSR 添加直播间。
|
||||
|
||||
## 启用/禁用直播间
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择启用/禁用直播间。
|
||||
|
||||
- 启用后,当直播间开播时,会自动开始录制
|
||||
- 禁用后,当直播间开播时,不会自动开始录制
|
||||
|
||||
## 移除直播间
|
||||
|
||||
> [!CAUTION]
|
||||
> 移除直播间后,该直播间相关的所有录播都会被删除,请谨慎操作。
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择移除直播间。
|
||||
|
||||
<video src="/videos/room_remove.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
1
docs/usage/features/subtitle.md
Normal file
@@ -0,0 +1 @@
|
||||
# 字幕
|
||||
30
docs/usage/features/workflow.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# 工作流程
|
||||
|
||||
- 直播间:各个平台的直播间
|
||||
- 录播:直播流的存档,每次录制会自动生成一场录播记录
|
||||
- 切片:从直播流中剪切生成的视频片段
|
||||
- 投稿:将切片上传到各个平台(目前仅支持 Bilibili)
|
||||
|
||||
下图展示了它们之间的关系:
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A[直播间] -->|录制| B[录播 01]
|
||||
A -->|录制| C[录播 02]
|
||||
A -->|录制| E[录播 N]
|
||||
|
||||
B --> F[直播流预览窗口]
|
||||
|
||||
F -->|区间生成| G[切片 01]
|
||||
F -->|区间生成| H[切片 02]
|
||||
F -->|区间生成| I[切片 N]
|
||||
|
||||
G --> J[切片预览窗口]
|
||||
|
||||
J -->|字幕压制| K[新切片]
|
||||
|
||||
K --> J
|
||||
|
||||
J -->|投稿| L[Bilibili]
|
||||
|
||||
```
|
||||
13
index_clip.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh-cn">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>切片窗口</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main_clip.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -8,15 +8,20 @@
|
||||
<link rel="stylesheet" href="shaka-player/youtube-theme.css" />
|
||||
<script src="shaka-player/shaka-player.ui.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="src/live_main.ts"></script>
|
||||
<script type="module" src="src/main_live.ts"></script>
|
||||
<style>
|
||||
input[type="range"]::-webkit-slider-thumb {
|
||||
width: 12px; /* 设置滑块按钮宽度 */
|
||||
height: 12px; /* 设置滑块按钮高度 */
|
||||
border-radius: 50%; /* 设置为圆形 */
|
||||
width: 12px;
|
||||
/* 设置滑块按钮宽度 */
|
||||
height: 12px;
|
||||
/* 设置滑块按钮高度 */
|
||||
border-radius: 50%;
|
||||
/* 设置为圆形 */
|
||||
}
|
||||
|
||||
html {
|
||||
scrollbar-face-color: #646464;
|
||||
scrollbar-base-color: #646464;
|
||||
@@ -31,20 +36,25 @@
|
||||
width: 8px;
|
||||
height: 3px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button {
|
||||
background-color: #666;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background-color: #646464;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track-piece {
|
||||
background-color: #000;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
height: 50px;
|
||||
background-color: #666;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-corner {
|
||||
background-color: #646464;
|
||||
}
|
||||
19
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "bili-shadowreplay",
|
||||
"private": true,
|
||||
"version": "2.5.3",
|
||||
"version": "2.10.6",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -11,10 +11,16 @@
|
||||
"tauri": "tauri",
|
||||
"docs:dev": "vitepress dev docs",
|
||||
"docs:build": "vitepress build docs",
|
||||
"docs:preview": "vitepress preview docs"
|
||||
"docs:preview": "vitepress preview docs",
|
||||
"bump": "node scripts/bump.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": "^2.4.1",
|
||||
"@langchain/core": "^0.3.64",
|
||||
"@langchain/deepseek": "^0.1.0",
|
||||
"@langchain/langgraph": "^0.3.10",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@tauri-apps/api": "^2.6.2",
|
||||
"@tauri-apps/plugin-deep-link": "~2",
|
||||
"@tauri-apps/plugin-dialog": "~2",
|
||||
"@tauri-apps/plugin-fs": "~2",
|
||||
"@tauri-apps/plugin-http": "~2",
|
||||
@@ -23,6 +29,7 @@
|
||||
"@tauri-apps/plugin-shell": "~2",
|
||||
"@tauri-apps/plugin-sql": "~2",
|
||||
"lucide-svelte": "^0.479.0",
|
||||
"marked": "^16.1.1",
|
||||
"qrcode": "^1.5.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -35,6 +42,7 @@
|
||||
"flowbite": "^2.5.1",
|
||||
"flowbite-svelte": "^0.46.16",
|
||||
"flowbite-svelte-icons": "^1.6.1",
|
||||
"mermaid": "^11.9.0",
|
||||
"postcss": "^8.4.21",
|
||||
"svelte": "^3.54.0",
|
||||
"svelte-check": "^3.0.0",
|
||||
@@ -44,6 +52,7 @@
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^4.6.4",
|
||||
"vite": "^4.0.0",
|
||||
"vitepress": "^1.6.3"
|
||||
"vitepress": "^1.6.3",
|
||||
"vitepress-plugin-mermaid": "^2.0.17"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
58
scripts/bump.cjs
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
function updatePackageJson(version) {
|
||||
const packageJsonPath = path.join(process.cwd(), "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(
|
||||
packageJsonPath,
|
||||
JSON.stringify(packageJson, null, 2) + "\n"
|
||||
);
|
||||
console.log(`✅ Updated package.json version to ${version}`);
|
||||
}
|
||||
|
||||
function updateCargoToml(version) {
|
||||
const cargoTomlPath = path.join(process.cwd(), "src-tauri", "Cargo.toml");
|
||||
let cargoToml = fs.readFileSync(cargoTomlPath, "utf8");
|
||||
|
||||
// Update the version in the [package] section
|
||||
cargoToml = cargoToml.replace(/^version = ".*"$/m, `version = "${version}"`);
|
||||
|
||||
fs.writeFileSync(cargoTomlPath, cargoToml);
|
||||
console.log(`✅ Updated Cargo.toml version to ${version}`);
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("❌ Please provide a version number");
|
||||
console.error("Usage: yarn bump <version>");
|
||||
console.error("Example: yarn bump 3.1.0");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = args[0];
|
||||
|
||||
// Validate version format (simple check)
|
||||
if (!/^\d+\.\d+\.\d+/.test(version)) {
|
||||
console.error(
|
||||
"❌ Invalid version format. Please use semantic versioning (e.g., 3.1.0)"
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
updatePackageJson(version);
|
||||
updateCargoToml(version);
|
||||
console.log(`🎉 Successfully bumped version to ${version}`);
|
||||
} catch (error) {
|
||||
console.error("❌ Error updating version:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
2393
src-tauri/Cargo.lock
generated
@@ -1,6 +1,10 @@
|
||||
[workspace]
|
||||
members = ["crates/danmu_stream"]
|
||||
resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "bili-shadowreplay"
|
||||
version = "1.0.0"
|
||||
version = "2.10.6"
|
||||
description = "BiliBili ShadowReplay"
|
||||
authors = ["Xinrea"]
|
||||
license = ""
|
||||
@@ -10,8 +14,9 @@ edition = "2021"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
danmu_stream = { path = "crates/danmu_stream" }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.11", features = ["blocking", "json"] }
|
||||
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"] }
|
||||
serde_derive = "1.0.158"
|
||||
serde = "1.0.158"
|
||||
sysinfo = "0.32.0"
|
||||
@@ -21,7 +26,6 @@ async-ffmpeg-sidecar = "0.0.1"
|
||||
chrono = { version = "0.4.24", features = ["serde"] }
|
||||
toml = "0.7.3"
|
||||
custom_error = "1.9.2"
|
||||
felgens = { git = "https://github.com/Xinrea/felgens.git", tag = "v0.4.3" }
|
||||
regex = "1.7.3"
|
||||
tokio = { version = "1.27.0", features = ["process"] }
|
||||
platform-dirs = "0.3.0"
|
||||
@@ -41,12 +45,13 @@ whisper-rs = "0.14.2"
|
||||
hound = "3.5.1"
|
||||
uuid = { version = "1.4", features = ["v4"] }
|
||||
axum = { version = "0.7", features = ["macros"] }
|
||||
tower-http = { version = "0.5", features = ["cors", "fs", "limit"] }
|
||||
tower-http = { version = "0.5", features = ["cors", "fs"] }
|
||||
futures-core = "0.3"
|
||||
futures = "0.3"
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
url = "2.5.4"
|
||||
srtparse = "0.2.0"
|
||||
|
||||
[features]
|
||||
# this feature is used for production builds or when `devPath` points to the filesystem
|
||||
@@ -66,6 +71,7 @@ gui = [
|
||||
"tauri-utils",
|
||||
"tauri-plugin-os",
|
||||
"tauri-plugin-notification",
|
||||
"tauri-plugin-deep-link",
|
||||
"fix-path-env",
|
||||
"tauri-build",
|
||||
]
|
||||
@@ -78,6 +84,7 @@ optional = true
|
||||
[dependencies.tauri-plugin-single-instance]
|
||||
version = "2"
|
||||
optional = true
|
||||
features = ["deep-link"]
|
||||
|
||||
[dependencies.tauri-plugin-dialog]
|
||||
version = "2"
|
||||
@@ -112,6 +119,10 @@ optional = true
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-deep-link]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.fix-path-env]
|
||||
git = "https://github.com/tauri-apps/fix-path-env-rs"
|
||||
optional = true
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
"local": true,
|
||||
"windows": [
|
||||
"main",
|
||||
"Live*"
|
||||
"Live*",
|
||||
"Clip*"
|
||||
],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
@@ -70,6 +71,7 @@
|
||||
"shell:default",
|
||||
"sql:default",
|
||||
"os:default",
|
||||
"dialog:default"
|
||||
"dialog:default",
|
||||
"deep-link:default"
|
||||
]
|
||||
}
|
||||
@@ -5,8 +5,10 @@ live_end_notify = true
|
||||
clip_notify = true
|
||||
post_notify = true
|
||||
auto_subtitle = false
|
||||
subtitle_generator_type = "whisper_online"
|
||||
whisper_model = "./whisper_model.bin"
|
||||
whisper_prompt = "这是一段中文 你们好"
|
||||
openai_api_key = ""
|
||||
clip_name_format = "[{room_id}][{live_id}][{title}][{created_at}].mp4"
|
||||
|
||||
[auto_generate]
|
||||
|
||||
44
src-tauri/crates/danmu_stream/Cargo.toml
Normal file
@@ -0,0 +1,44 @@
|
||||
[package]
|
||||
name = "danmu_stream"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "danmu_stream"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[example]]
|
||||
name = "douyin"
|
||||
path = "examples/douyin.rs"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
|
||||
futures-util = "0.3"
|
||||
prost = "0.12"
|
||||
chrono = "0.4"
|
||||
log = "0.4"
|
||||
env_logger = "0.10"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
url = "2.4"
|
||||
md5 = "0.7"
|
||||
regex = "1.9"
|
||||
deno_core = "0.242.0"
|
||||
pct-str = "2.0.0"
|
||||
custom_error = "1.9.2"
|
||||
flate2 = "1.0"
|
||||
scroll = "0.13.0"
|
||||
scroll_derive = "0.13.0"
|
||||
brotli = "8.0.1"
|
||||
http = "1.0"
|
||||
rand = "0.9.1"
|
||||
urlencoding = "2.1.3"
|
||||
gzip = "0.1.2"
|
||||
hex = "0.4.3"
|
||||
async-trait = "0.1.88"
|
||||
uuid = "1.17.0"
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = "0.10"
|
||||
41
src-tauri/crates/danmu_stream/examples/bilibili.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use std::{sync::Arc, time::Duration};
|
||||
|
||||
use danmu_stream::{danmu_stream::DanmuStream, provider::ProviderType, DanmuMessageType};
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 768756;
|
||||
let cookie = "";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::BiliBili, cookie, room_id).await?);
|
||||
|
||||
log::info!("Start to receive danmu messages: {}", cookie);
|
||||
|
||||
let stream_clone = stream.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
log::info!("Waitting for message");
|
||||
if let Ok(Some(msg)) = stream_clone.recv().await {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
log::info!("Received danmu message: {:?}", danmu.message);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::info!("Channel closed");
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let _ = stream.start().await;
|
||||
|
||||
sleep(Duration::from_secs(10)).await;
|
||||
|
||||
stream.stop().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
40
src-tauri/crates/danmu_stream/examples/douyin.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use std::{sync::Arc, time::Duration};
|
||||
|
||||
use danmu_stream::{danmu_stream::DanmuStream, provider::ProviderType, DanmuMessageType};
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 7514298567821937427; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
|
||||
let cookie = "your_cookie";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::Douyin, cookie, room_id).await?);
|
||||
|
||||
log::info!("Start to receive danmu messages");
|
||||
|
||||
let _ = stream.start().await;
|
||||
|
||||
let stream_clone = stream.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
if let Ok(Some(msg)) = stream_clone.recv().await {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
log::info!("Received danmu message: {:?}", danmu.message);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::info!("Channel closed");
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
sleep(Duration::from_secs(10)).await;
|
||||
|
||||
stream.stop().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
51
src-tauri/crates/danmu_stream/src/danmu_stream.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{
|
||||
provider::{new, DanmuProvider, ProviderType},
|
||||
DanmuMessageType, DanmuStreamError,
|
||||
};
|
||||
use tokio::sync::{mpsc, RwLock};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DanmuStream {
|
||||
pub provider_type: ProviderType,
|
||||
pub identifier: String,
|
||||
pub room_id: u64,
|
||||
pub provider: Arc<RwLock<Box<dyn DanmuProvider>>>,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
rx: Arc<RwLock<mpsc::UnboundedReceiver<DanmuMessageType>>>,
|
||||
}
|
||||
|
||||
impl DanmuStream {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
) -> Result<Self, DanmuStreamError> {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let provider = new(provider_type, identifier, room_id).await?;
|
||||
Ok(Self {
|
||||
provider_type,
|
||||
identifier: identifier.to_string(),
|
||||
room_id,
|
||||
provider: Arc::new(RwLock::new(provider)),
|
||||
tx,
|
||||
rx: Arc::new(RwLock::new(rx)),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn start(&self) -> Result<(), DanmuStreamError> {
|
||||
self.provider.write().await.start(self.tx.clone()).await
|
||||
}
|
||||
|
||||
pub async fn stop(&self) -> Result<(), DanmuStreamError> {
|
||||
self.provider.write().await.stop().await?;
|
||||
// close channel
|
||||
self.rx.write().await.close();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn recv(&self) -> Result<Option<DanmuMessageType>, DanmuStreamError> {
|
||||
Ok(self.rx.write().await.recv().await)
|
||||
}
|
||||
}
|
||||
51
src-tauri/crates/danmu_stream/src/http_client.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
impl From<reqwest::Error> for DanmuStreamError {
|
||||
fn from(value: reqwest::Error) -> Self {
|
||||
Self::HttpError { err: value }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<url::ParseError> for DanmuStreamError {
|
||||
fn from(value: url::ParseError) -> Self {
|
||||
Self::ParseError { err: value }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ApiClient {
|
||||
client: reqwest::Client,
|
||||
header: HeaderMap,
|
||||
}
|
||||
|
||||
impl ApiClient {
|
||||
pub fn new(cookies: &str) -> Self {
|
||||
let mut header = HeaderMap::new();
|
||||
header.insert("cookie", cookies.parse().unwrap());
|
||||
|
||||
Self {
|
||||
client: reqwest::Client::new(),
|
||||
header,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get(
|
||||
&self,
|
||||
url: &str,
|
||||
query: Option<&[(&str, &str)]>,
|
||||
) -> Result<reqwest::Response, DanmuStreamError> {
|
||||
let resp = self
|
||||
.client
|
||||
.get(url)
|
||||
.query(query.unwrap_or_default())
|
||||
.headers(self.header.clone())
|
||||
.timeout(Duration::from_secs(10))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
}
|
||||
31
src-tauri/crates/danmu_stream/src/lib.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
pub mod danmu_stream;
|
||||
mod http_client;
|
||||
pub mod provider;
|
||||
|
||||
use custom_error::custom_error;
|
||||
|
||||
custom_error! {pub DanmuStreamError
|
||||
HttpError {err: reqwest::Error} = "HttpError {err}",
|
||||
ParseError {err: url::ParseError} = "ParseError {err}",
|
||||
WebsocketError {err: String } = "WebsocketError {err}",
|
||||
PackError {err: String} = "PackError {err}",
|
||||
UnsupportProto {proto: u16} = "UnsupportProto {proto}",
|
||||
MessageParseError {err: String} = "MessageParseError {err}",
|
||||
InvalidIdentifier {err: String} = "InvalidIdentifier {err}"
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DanmuMessageType {
|
||||
DanmuMessage(DanmuMessage),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DanmuMessage {
|
||||
pub room_id: u64,
|
||||
pub user_id: u64,
|
||||
pub user_name: String,
|
||||
pub message: String,
|
||||
pub color: u32,
|
||||
/// timestamp in milliseconds
|
||||
pub timestamp: i64,
|
||||
}
|
||||
72
src-tauri/crates/danmu_stream/src/provider.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
mod bilibili;
|
||||
mod douyin;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::{
|
||||
provider::bilibili::BiliDanmu, provider::douyin::DouyinDanmu, DanmuMessageType,
|
||||
DanmuStreamError,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ProviderType {
|
||||
BiliBili,
|
||||
Douyin,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait DanmuProvider: Send + Sync {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
async fn start(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError>;
|
||||
|
||||
async fn stop(&self) -> Result<(), DanmuStreamError>;
|
||||
}
|
||||
|
||||
/// Creates a new danmu stream provider for the specified platform.
|
||||
///
|
||||
/// This function initializes and starts a danmu stream provider based on the specified platform type.
|
||||
/// The provider will fetch danmu messages and send them through the provided channel.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `tx` - An unbounded sender channel that will receive danmu messages
|
||||
/// * `provider_type` - The type of platform to fetch danmu from (BiliBili or Douyin)
|
||||
/// * `identifier` - User validation information (e.g., cookies) required by the platform
|
||||
/// * `room_id` - The unique identifier of the room/channel to fetch danmu from. Notice that douyin room_id is more like a live_id, it changes every time the live starts.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Result<(), DanmmuStreamError>` where:
|
||||
/// * `Ok(())` indicates successful initialization and start of the provider, only return after disconnect
|
||||
/// * `Err(DanmmuStreamError)` indicates an error occurred during initialization or startup
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use tokio::sync::mpsc;
|
||||
/// let (tx, mut rx) = mpsc::unbounded_channel();
|
||||
/// new(tx, ProviderType::BiliBili, "your_cookie", 123456).await?;
|
||||
/// ```
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
) -> Result<Box<dyn DanmuProvider>, DanmuStreamError> {
|
||||
match provider_type {
|
||||
ProviderType::BiliBili => {
|
||||
let bili = BiliDanmu::new(identifier, room_id).await?;
|
||||
Ok(Box::new(bili))
|
||||
}
|
||||
ProviderType::Douyin => {
|
||||
let douyin = DouyinDanmu::new(identifier, room_id).await?;
|
||||
Ok(Box::new(douyin))
|
||||
}
|
||||
}
|
||||
}
|
||||
440
src-tauri/crates/danmu_stream/src/provider/bilibili.rs
Normal file
@@ -0,0 +1,440 @@
|
||||
mod dannmu_msg;
|
||||
mod interact_word;
|
||||
mod pack;
|
||||
mod send_gift;
|
||||
mod stream;
|
||||
mod super_chat;
|
||||
|
||||
use std::{sync::Arc, time::SystemTime};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use futures_util::{SinkExt, StreamExt, TryStreamExt};
|
||||
use log::{error, info};
|
||||
use pct_str::{PctString, URIReserved};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{
|
||||
sync::{mpsc, RwLock},
|
||||
time::{sleep, Duration},
|
||||
};
|
||||
use tokio_tungstenite::{connect_async, tungstenite::Message};
|
||||
|
||||
use crate::{
|
||||
http_client::ApiClient,
|
||||
provider::{DanmuMessageType, DanmuProvider},
|
||||
DanmuStreamError,
|
||||
};
|
||||
|
||||
type WsReadType = futures_util::stream::SplitStream<
|
||||
tokio_tungstenite::WebSocketStream<tokio_tungstenite::MaybeTlsStream<tokio::net::TcpStream>>,
|
||||
>;
|
||||
|
||||
type WsWriteType = futures_util::stream::SplitSink<
|
||||
tokio_tungstenite::WebSocketStream<tokio_tungstenite::MaybeTlsStream<tokio::net::TcpStream>>,
|
||||
Message,
|
||||
>;
|
||||
|
||||
pub struct BiliDanmu {
|
||||
client: ApiClient,
|
||||
room_id: u64,
|
||||
user_id: u64,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for BiliDanmu {
|
||||
async fn new(cookie: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let user_id = BiliDanmu::parse_user_id(cookie)?;
|
||||
// add buvid3 to cookie
|
||||
let cookie = format!("{};buvid3={}", cookie, uuid::Uuid::new_v4());
|
||||
let client = ApiClient::new(&cookie);
|
||||
|
||||
Ok(Self {
|
||||
client,
|
||||
user_id,
|
||||
room_id,
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
})
|
||||
}
|
||||
|
||||
async fn start(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const MAX_RETRIES: u32 = 5;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Bilibili WebSocket connection started, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
|
||||
loop {
|
||||
if *self.stop.read().await {
|
||||
break;
|
||||
}
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!("Bilibili WebSocket connection closed normally");
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Bilibili WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
|
||||
if retry_count >= MAX_RETRIES {
|
||||
return Err(DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect after {} retries", MAX_RETRIES),
|
||||
});
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}/{})",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
MAX_RETRIES
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop(&self) -> Result<(), DanmuStreamError> {
|
||||
*self.stop.write().await = true;
|
||||
if let Some(mut write) = self.write.write().await.take() {
|
||||
if let Err(e) = write.close().await {
|
||||
error!("Failed to close WebSocket connection: {}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl BiliDanmu {
|
||||
async fn connect_and_handle(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let wbi_key = self.get_wbi_key().await?;
|
||||
let real_room = self.get_real_room(&wbi_key, self.room_id).await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, real_room).await?;
|
||||
let ws_hosts = danmu_info.data.host_list.clone();
|
||||
let mut conn = None;
|
||||
log::debug!("ws_hosts: {:?}", ws_hosts);
|
||||
// try to connect to ws_hsots, once success, send the token to the tx
|
||||
for i in ws_hosts {
|
||||
let host = format!("wss://{}/sub", i.host);
|
||||
match connect_async(&host).await {
|
||||
Ok((c, _)) => {
|
||||
conn = Some(c);
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"Connect ws host: {} has error, trying next host ...\n{:?}\n{:?}",
|
||||
host, i, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let conn = conn.ok_or(DanmuStreamError::WebsocketError {
|
||||
err: "Failed to connect to ws host".into(),
|
||||
})?;
|
||||
|
||||
let (write, read) = conn.split();
|
||||
*self.write.write().await = Some(write);
|
||||
|
||||
let json = serde_json::to_string(&WsSend {
|
||||
roomid: real_room,
|
||||
key: danmu_info.data.token,
|
||||
uid: self.user_id,
|
||||
protover: 3,
|
||||
platform: "web".to_string(),
|
||||
t: 2,
|
||||
})
|
||||
.map_err(|e| DanmuStreamError::WebsocketError { err: e.to_string() })?;
|
||||
|
||||
let json = pack::encode(&json, 7);
|
||||
if let Some(write) = self.write.write().await.as_mut() {
|
||||
write
|
||||
.send(Message::binary(json))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError { err: e.to_string() })?;
|
||||
}
|
||||
|
||||
tokio::select! {
|
||||
v = BiliDanmu::send_heartbeat_packets(Arc::clone(&self.write)) => v,
|
||||
v = BiliDanmu::recv(read, tx, Arc::clone(&self.stop)) => v
|
||||
}?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_heartbeat_packets(
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
loop {
|
||||
if let Some(write) = write.write().await.as_mut() {
|
||||
write
|
||||
.send(Message::binary(pack::encode("", 2)))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError { err: e.to_string() })?;
|
||||
}
|
||||
sleep(Duration::from_secs(30)).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn recv(
|
||||
mut read: WsReadType,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
while let Ok(Some(msg)) = read.try_next().await {
|
||||
if *stop.read().await {
|
||||
log::info!("Stopping bilibili danmu stream");
|
||||
break;
|
||||
}
|
||||
let data = msg.into_data();
|
||||
|
||||
if !data.is_empty() {
|
||||
let s = pack::build_pack(&data);
|
||||
|
||||
if let Ok(msgs) = s {
|
||||
for i in msgs {
|
||||
let ws = stream::WsStreamCtx::new(&i);
|
||||
if let Ok(ws) = ws {
|
||||
match ws.match_msg() {
|
||||
Ok(v) => {
|
||||
log::debug!("Received message: {:?}", v);
|
||||
tx.send(v).map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: e.to_string(),
|
||||
})?;
|
||||
}
|
||||
Err(e) => {
|
||||
log::trace!(
|
||||
"This message parsing is not yet supported:\nMessage: {i}\nErr: {e:#?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::error!("{}", ws.unwrap_err());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_danmu_info(
|
||||
&self,
|
||||
wbi_key: &str,
|
||||
room_id: u64,
|
||||
) -> Result<DanmuInfo, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
serde_json::json!({
|
||||
"id": room_id,
|
||||
"type": 0,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let resp = self
|
||||
.client
|
||||
.get(
|
||||
&format!(
|
||||
"https://api.live.bilibili.com/xlive/web-room/v1/index/getDanmuInfo?{}",
|
||||
params
|
||||
),
|
||||
None,
|
||||
)
|
||||
.await?
|
||||
.json::<DanmuInfo>()
|
||||
.await?;
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: u64) -> Result<u64, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
serde_json::json!({
|
||||
"id": room_id,
|
||||
"from": "room",
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let resp = self
|
||||
.client
|
||||
.get(
|
||||
&format!(
|
||||
"https://api.live.bilibili.com/room/v1/Room/room_init?{}",
|
||||
params
|
||||
),
|
||||
None,
|
||||
)
|
||||
.await?
|
||||
.json::<RoomInit>()
|
||||
.await?
|
||||
.data
|
||||
.room_id;
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
fn parse_user_id(cookie: &str) -> Result<u64, DanmuStreamError> {
|
||||
let mut user_id = None;
|
||||
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let re = Regex::new(r"DedeUserID=(\d+)").unwrap();
|
||||
if let Some(captures) = re.captures(cookie) {
|
||||
if let Some(user) = captures.get(1) {
|
||||
user_id = Some(user.as_str().parse::<u64>().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(user_id) = user_id {
|
||||
Ok(user_id)
|
||||
} else {
|
||||
Err(DanmuStreamError::InvalidIdentifier {
|
||||
err: format!("Failed to find user_id in cookie: {cookie}"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_wbi_key(&self) -> Result<String, DanmuStreamError> {
|
||||
let nav_info: serde_json::Value = self
|
||||
.client
|
||||
.get("https://api.bilibili.com/x/web-interface/nav", None)
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
let re = Regex::new(r"wbi/(.*).png").unwrap();
|
||||
let img = re
|
||||
.captures(nav_info["data"]["wbi_img"]["img_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let sub = re
|
||||
.captures(nav_info["data"]["wbi_img"]["sub_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let raw_string = format!("{}{}", img, sub);
|
||||
Ok(raw_string)
|
||||
}
|
||||
|
||||
pub async fn get_sign(
|
||||
&self,
|
||||
wbi_key: &str,
|
||||
mut parameters: serde_json::Value,
|
||||
) -> Result<String, DanmuStreamError> {
|
||||
let table = vec![
|
||||
46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49, 33, 9, 42,
|
||||
19, 29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40, 61, 26, 17, 0, 1, 60,
|
||||
51, 30, 4, 22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11, 36, 20, 34, 44, 52,
|
||||
];
|
||||
let raw_string = wbi_key;
|
||||
let mut encoded = Vec::new();
|
||||
table.into_iter().for_each(|x| {
|
||||
if x < raw_string.len() {
|
||||
encoded.push(raw_string.as_bytes()[x]);
|
||||
}
|
||||
});
|
||||
// only keep 32 bytes of encoded
|
||||
encoded = encoded[0..32].to_vec();
|
||||
let encoded = String::from_utf8(encoded).unwrap();
|
||||
// Timestamp in seconds
|
||||
let wts = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
parameters
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.insert("wts".to_owned(), serde_json::Value::String(wts.to_string()));
|
||||
// Get all keys from parameters into vec
|
||||
let mut keys = parameters
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.map(|x| x.to_owned())
|
||||
.collect::<Vec<String>>();
|
||||
// sort keys
|
||||
keys.sort();
|
||||
let mut params = String::new();
|
||||
keys.iter().for_each(|x| {
|
||||
params.push_str(x);
|
||||
params.push('=');
|
||||
// Convert value to string based on its type
|
||||
let value = match parameters.get(x).unwrap() {
|
||||
serde_json::Value::String(s) => s.clone(),
|
||||
serde_json::Value::Number(n) => n.to_string(),
|
||||
serde_json::Value::Bool(b) => b.to_string(),
|
||||
_ => "".to_string(),
|
||||
};
|
||||
// Value filters !'()* characters
|
||||
let value = value.replace(['!', '\'', '(', ')', '*'], "");
|
||||
let value = PctString::encode(value.chars(), URIReserved);
|
||||
params.push_str(value.as_str());
|
||||
// add & if not last
|
||||
if x != keys.last().unwrap() {
|
||||
params.push('&');
|
||||
}
|
||||
});
|
||||
// md5 params+encoded
|
||||
let w_rid = md5::compute(params.to_string() + encoded.as_str());
|
||||
let params = params + format!("&w_rid={:x}", w_rid).as_str();
|
||||
Ok(params)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct WsSend {
|
||||
uid: u64,
|
||||
roomid: u64,
|
||||
key: String,
|
||||
protover: u32,
|
||||
platform: String,
|
||||
#[serde(rename = "type")]
|
||||
t: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct DanmuInfo {
|
||||
pub data: DanmuInfoData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct DanmuInfoData {
|
||||
pub token: String,
|
||||
pub host_list: Vec<WsHost>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsHost {
|
||||
pub host: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct RoomInit {
|
||||
data: RoomInitData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct RoomInitData {
|
||||
room_id: u64,
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct BiliDanmuMessage {
|
||||
pub uid: u64,
|
||||
pub username: String,
|
||||
pub msg: String,
|
||||
pub fan: Option<String>,
|
||||
pub fan_level: Option<u64>,
|
||||
pub timestamp: i64,
|
||||
}
|
||||
|
||||
impl BiliDanmuMessage {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let info = ctx
|
||||
.info
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "info is None".to_string(),
|
||||
})?;
|
||||
|
||||
let array_2 = info
|
||||
.get(2)
|
||||
.and_then(|x| x.as_array())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "array_2 is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let uid = array_2.first().and_then(|x| x.as_u64()).ok_or_else(|| {
|
||||
DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let username = array_2
|
||||
.get(1)
|
||||
.and_then(|x| x.as_str())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "username is None".to_string(),
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
let msg = info
|
||||
.get(1)
|
||||
.and_then(|x| x.as_str())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "msg is None".to_string(),
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
let array_3 = info
|
||||
.get(3)
|
||||
.and_then(|x| x.as_array())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "array_3 is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let fan = array_3
|
||||
.get(1)
|
||||
.and_then(|x| x.as_str())
|
||||
.map(|x| x.to_owned());
|
||||
|
||||
let fan_level = array_3.first().and_then(|x| x.as_u64());
|
||||
|
||||
let timestamp = info
|
||||
.first()
|
||||
.and_then(|x| x.as_array())
|
||||
.and_then(|x| x.get(4))
|
||||
.and_then(|x| x.as_i64())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "timestamp is None".to_string(),
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
uid,
|
||||
username,
|
||||
msg,
|
||||
fan,
|
||||
fan_level,
|
||||
timestamp,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct InteractWord {
|
||||
pub uid: u64,
|
||||
pub uname: String,
|
||||
pub fan: Option<String>,
|
||||
pub fan_level: Option<u32>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl InteractWord {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let data = ctx
|
||||
.data
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "data is None".to_string(),
|
||||
})?;
|
||||
|
||||
let uname = data
|
||||
.uname
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uname is None".to_string(),
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
let uid = data
|
||||
.uid
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?
|
||||
.as_u64()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?;
|
||||
|
||||
let fan = data
|
||||
.fans_medal
|
||||
.as_ref()
|
||||
.and_then(|x| x.medal_name.to_owned());
|
||||
|
||||
let fan = if fan == Some("".to_string()) {
|
||||
None
|
||||
} else {
|
||||
fan
|
||||
};
|
||||
|
||||
let fan_level = data.fans_medal.as_ref().and_then(|x| x.medal_level);
|
||||
|
||||
let fan_level = if fan_level == Some(0) {
|
||||
None
|
||||
} else {
|
||||
fan_level
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
uid,
|
||||
uname,
|
||||
fan,
|
||||
fan_level,
|
||||
})
|
||||
}
|
||||
}
|
||||
161
src-tauri/crates/danmu_stream/src/provider/bilibili/pack.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
// This file is copied from https://github.com/eatradish/felgens/blob/master/src/pack.rs
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use flate2::read::ZlibDecoder;
|
||||
use scroll::Pread;
|
||||
use scroll_derive::Pread;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Pread, Clone)]
|
||||
struct BilibiliPackHeader {
|
||||
pack_len: u32,
|
||||
_header_len: u16,
|
||||
ver: u16,
|
||||
_op: u32,
|
||||
_seq: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Pread)]
|
||||
struct PackHotCount {
|
||||
count: u32,
|
||||
}
|
||||
|
||||
type BilibiliPackCtx<'a> = (BilibiliPackHeader, &'a [u8]);
|
||||
|
||||
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx, DanmuStreamError> {
|
||||
let data = buffer
|
||||
.pread_with(0, scroll::BE)
|
||||
.map_err(|e: scroll::Error| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let buf = &buffer[16..];
|
||||
|
||||
Ok((data, buf))
|
||||
}
|
||||
|
||||
fn write_int(buffer: &[u8], start: usize, val: u32) -> Vec<u8> {
|
||||
let val_bytes = val.to_be_bytes();
|
||||
|
||||
let mut buf = buffer.to_vec();
|
||||
|
||||
for (i, c) in val_bytes.iter().enumerate() {
|
||||
buf[start + i] = *c;
|
||||
}
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn encode(s: &str, op: u8) -> Vec<u8> {
|
||||
let data = s.as_bytes();
|
||||
let packet_len = 16 + data.len();
|
||||
let header = vec![0, 0, 0, 0, 0, 16, 0, 1, 0, 0, 0, op, 0, 0, 0, 1];
|
||||
|
||||
let header = write_int(&header, 0, packet_len as u32);
|
||||
|
||||
[&header, data].concat()
|
||||
}
|
||||
|
||||
pub fn build_pack(buf: &[u8]) -> Result<Vec<String>, DanmuStreamError> {
|
||||
let ctx = pack(buf)?;
|
||||
let msgs = decode(ctx)?;
|
||||
|
||||
Ok(msgs)
|
||||
}
|
||||
|
||||
fn get_hot_count(body: &[u8]) -> Result<u32, DanmuStreamError> {
|
||||
let count = body
|
||||
.pread_with::<PackHotCount>(0, scroll::BE)
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?
|
||||
.count;
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
fn zlib_decode(body: &[u8]) -> Result<(BilibiliPackHeader, Vec<u8>), DanmuStreamError> {
|
||||
let mut buf = vec![];
|
||||
let mut z = ZlibDecoder::new(body);
|
||||
z.read_to_end(&mut buf)
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let ctx = pack(&buf)?;
|
||||
let header = ctx.0;
|
||||
let buf = ctx.1.to_vec();
|
||||
|
||||
Ok((header, buf))
|
||||
}
|
||||
|
||||
fn decode(ctx: BilibiliPackCtx) -> Result<Vec<String>, DanmuStreamError> {
|
||||
let (mut header, body) = ctx;
|
||||
|
||||
let mut buf = body.to_vec();
|
||||
|
||||
loop {
|
||||
(header, buf) = match header.ver {
|
||||
2 => zlib_decode(&buf)?,
|
||||
3 => brotli_decode(&buf)?,
|
||||
0 | 1 => break,
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
let msgs = match header.ver {
|
||||
0 => split_msgs(buf, header)?,
|
||||
1 => vec![format!("{{\"count\": {}}}", get_hot_count(&buf)?)],
|
||||
x => return Err(DanmuStreamError::UnsupportProto { proto: x }),
|
||||
};
|
||||
|
||||
Ok(msgs)
|
||||
}
|
||||
|
||||
fn split_msgs(buf: Vec<u8>, header: BilibiliPackHeader) -> Result<Vec<String>, DanmuStreamError> {
|
||||
let mut buf = buf;
|
||||
let mut header = header;
|
||||
let mut msgs = vec![];
|
||||
let mut offset = 0;
|
||||
let buf_len = buf.len();
|
||||
|
||||
msgs.push(
|
||||
std::str::from_utf8(&buf[..(header.pack_len - 16) as usize])
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?
|
||||
.to_string(),
|
||||
);
|
||||
buf = buf[(header.pack_len - 16) as usize..].to_vec();
|
||||
offset += header.pack_len - 16;
|
||||
|
||||
while offset != buf_len as u32 {
|
||||
let ctx = pack(&buf).map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
header = ctx.0;
|
||||
buf = ctx.1.to_vec();
|
||||
|
||||
msgs.push(
|
||||
std::str::from_utf8(&buf[..(header.pack_len - 16) as usize])
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
buf = buf[(header.pack_len - 16) as usize..].to_vec();
|
||||
|
||||
offset += header.pack_len;
|
||||
}
|
||||
|
||||
Ok(msgs)
|
||||
}
|
||||
|
||||
fn brotli_decode(body: &[u8]) -> Result<(BilibiliPackHeader, Vec<u8>), DanmuStreamError> {
|
||||
let mut reader = brotli::Decompressor::new(body, 4096);
|
||||
|
||||
let mut buf = Vec::new();
|
||||
|
||||
reader
|
||||
.read_to_end(&mut buf)
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let ctx = pack(&buf).map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let header = ctx.0;
|
||||
let buf = ctx.1.to_vec();
|
||||
|
||||
Ok((header, buf))
|
||||
}
|
||||
115
src-tauri/crates/danmu_stream/src/provider/bilibili/send_gift.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct SendGift {
|
||||
pub action: String,
|
||||
pub gift_name: String,
|
||||
pub num: u64,
|
||||
pub uname: String,
|
||||
pub uid: u64,
|
||||
pub medal_name: Option<String>,
|
||||
pub medal_level: Option<u32>,
|
||||
pub price: u32,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SendGift {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let data = ctx
|
||||
.data
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "data is None".to_string(),
|
||||
})?;
|
||||
|
||||
let action = data
|
||||
.action
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "action is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let combo_send = data.combo_send.clone();
|
||||
|
||||
let gift_name = if let Some(gift) = data.gift_name.as_ref() {
|
||||
gift.to_owned()
|
||||
} else if let Some(gift) = combo_send.clone().and_then(|x| x.gift_name) {
|
||||
gift
|
||||
} else {
|
||||
return Err(DanmuStreamError::MessageParseError {
|
||||
err: "gift_name is None".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
let num = if let Some(num) = combo_send.clone().and_then(|x| x.combo_num) {
|
||||
num
|
||||
} else if let Some(num) = data.num {
|
||||
num
|
||||
} else if let Some(num) = combo_send.and_then(|x| x.gift_num) {
|
||||
num
|
||||
} else {
|
||||
return Err(DanmuStreamError::MessageParseError {
|
||||
err: "num is None".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
let uname = data
|
||||
.uname
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uname is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let uid = data
|
||||
.uid
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?
|
||||
.as_u64()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?;
|
||||
|
||||
let medal_name = data
|
||||
.medal_info
|
||||
.as_ref()
|
||||
.and_then(|x| x.medal_name.to_owned());
|
||||
|
||||
let medal_level = data.medal_info.as_ref().and_then(|x| x.medal_level);
|
||||
|
||||
let medal_name = if medal_name == Some("".to_string()) {
|
||||
None
|
||||
} else {
|
||||
medal_name
|
||||
};
|
||||
|
||||
let medal_level = if medal_level == Some(0) {
|
||||
None
|
||||
} else {
|
||||
medal_level
|
||||
};
|
||||
|
||||
let price = data
|
||||
.price
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "price is None".to_string(),
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
action,
|
||||
gift_name,
|
||||
num,
|
||||
uname,
|
||||
uid,
|
||||
medal_name,
|
||||
medal_level,
|
||||
price,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,97 @@
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
provider::{bilibili::dannmu_msg::BiliDanmuMessage, DanmuMessageType},
|
||||
DanmuMessage, DanmuStreamError,
|
||||
};
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsStreamCtx {
|
||||
pub cmd: Option<String>,
|
||||
pub info: Option<Vec<Value>>,
|
||||
pub data: Option<WsStreamCtxData>,
|
||||
#[serde(flatten)]
|
||||
_v: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
#[allow(dead_code)]
|
||||
pub struct WsStreamCtxData {
|
||||
pub message: Option<String>,
|
||||
pub price: Option<u32>,
|
||||
pub start_time: Option<u64>,
|
||||
pub time: Option<u32>,
|
||||
pub uid: Option<Value>,
|
||||
pub user_info: Option<WsStreamCtxDataUser>,
|
||||
pub medal_info: Option<WsStreamCtxDataMedalInfo>,
|
||||
pub uname: Option<String>,
|
||||
pub fans_medal: Option<WsStreamCtxDataMedalInfo>,
|
||||
pub action: Option<String>,
|
||||
#[serde(rename = "giftName")]
|
||||
pub gift_name: Option<String>,
|
||||
pub num: Option<u64>,
|
||||
pub combo_num: Option<u64>,
|
||||
pub gift_num: Option<u64>,
|
||||
pub combo_send: Box<Option<WsStreamCtxData>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsStreamCtxDataMedalInfo {
|
||||
pub medal_name: Option<String>,
|
||||
pub medal_level: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
#[allow(dead_code)]
|
||||
pub struct WsStreamCtxDataUser {
|
||||
pub face: String,
|
||||
pub uname: String,
|
||||
}
|
||||
|
||||
impl WsStreamCtx {
|
||||
pub fn new(s: &str) -> Result<Self, DanmuStreamError> {
|
||||
serde_json::from_str(s).map_err(|_| DanmuStreamError::MessageParseError {
|
||||
err: "Failed to parse message".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn match_msg(&self) -> Result<DanmuMessageType, DanmuStreamError> {
|
||||
let cmd = self.handle_cmd();
|
||||
|
||||
let danmu_msg = match cmd {
|
||||
Some(c) if c.contains("DANMU_MSG") => Some(BiliDanmuMessage::new_from_ctx(self)?),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(danmu_msg) = danmu_msg {
|
||||
Ok(DanmuMessageType::DanmuMessage(DanmuMessage {
|
||||
room_id: 0,
|
||||
user_id: danmu_msg.uid,
|
||||
user_name: danmu_msg.username,
|
||||
message: danmu_msg.msg,
|
||||
color: 0,
|
||||
timestamp: danmu_msg.timestamp,
|
||||
}))
|
||||
} else {
|
||||
Err(DanmuStreamError::MessageParseError {
|
||||
err: "Unknown message".to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_cmd(&self) -> Option<&str> {
|
||||
// handle DANMU_MSG:4:0:2:2:2:0
|
||||
let cmd = if let Some(c) = self.cmd.as_deref() {
|
||||
if c.starts_with("DM_INTERACTION") {
|
||||
Some("DANMU_MSG")
|
||||
} else {
|
||||
Some(c)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
cmd
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct SuperChatMessage {
|
||||
pub uname: String,
|
||||
pub uid: u64,
|
||||
pub face: String,
|
||||
pub price: u32,
|
||||
pub start_time: u64,
|
||||
pub time: u32,
|
||||
pub msg: String,
|
||||
pub medal_name: Option<String>,
|
||||
pub medal_level: Option<u32>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SuperChatMessage {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let data = ctx
|
||||
.data
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "data is None".to_string(),
|
||||
})?;
|
||||
|
||||
let user_info =
|
||||
data.user_info
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "user_info is None".to_string(),
|
||||
})?;
|
||||
|
||||
let uname = user_info.uname.to_owned();
|
||||
|
||||
let uid = data.uid.as_ref().and_then(|x| x.as_u64()).ok_or_else(|| {
|
||||
DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let face = user_info.face.to_owned();
|
||||
|
||||
let price = data
|
||||
.price
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "price is None".to_string(),
|
||||
})?;
|
||||
|
||||
let start_time = data
|
||||
.start_time
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "start_time is None".to_string(),
|
||||
})?;
|
||||
|
||||
let time = data
|
||||
.time
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "time is None".to_string(),
|
||||
})?;
|
||||
|
||||
let msg = data
|
||||
.message
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "message is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let medal = data
|
||||
.medal_info
|
||||
.as_ref()
|
||||
.map(|x| (x.medal_name.to_owned(), x.medal_level.to_owned()));
|
||||
|
||||
let medal_name = medal.as_ref().and_then(|(name, _)| name.to_owned());
|
||||
|
||||
let medal_level = medal.and_then(|(_, level)| level);
|
||||
|
||||
Ok(Self {
|
||||
uname,
|
||||
uid,
|
||||
face,
|
||||
price,
|
||||
start_time,
|
||||
time,
|
||||
msg,
|
||||
medal_name,
|
||||
medal_level,
|
||||
})
|
||||
}
|
||||
}
|
||||
462
src-tauri/crates/danmu_stream/src/provider/douyin.rs
Normal file
@@ -0,0 +1,462 @@
|
||||
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
|
||||
use async_trait::async_trait;
|
||||
use deno_core::v8;
|
||||
use deno_core::JsRuntime;
|
||||
use deno_core::RuntimeOptions;
|
||||
use flate2::read::GzDecoder;
|
||||
use futures_util::{SinkExt, StreamExt, TryStreamExt};
|
||||
use log::debug;
|
||||
use log::{error, info};
|
||||
use prost::bytes::Bytes;
|
||||
use prost::Message;
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, SystemTime};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio_tungstenite::{
|
||||
connect_async, tungstenite::Message as WsMessage, MaybeTlsStream, WebSocketStream,
|
||||
};
|
||||
|
||||
mod messages;
|
||||
use messages::*;
|
||||
|
||||
const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36";
|
||||
|
||||
const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(10);
|
||||
|
||||
type WsReadType = futures_util::stream::SplitStream<WebSocketStream<MaybeTlsStream<TcpStream>>>;
|
||||
type WsWriteType =
|
||||
futures_util::stream::SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, WsMessage>;
|
||||
|
||||
pub struct DouyinDanmu {
|
||||
room_id: u64,
|
||||
cookie: String,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
}
|
||||
|
||||
impl DouyinDanmu {
|
||||
async fn connect_and_handle(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let url = self.get_wss_url().await?;
|
||||
|
||||
let request = tokio_tungstenite::tungstenite::http::Request::builder()
|
||||
.uri(url)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::COOKIE,
|
||||
self.cookie.as_str(),
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::REFERER,
|
||||
"https://live.douyin.com/",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::USER_AGENT,
|
||||
USER_AGENT,
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::HOST,
|
||||
"webcast5-ws-web-hl.douyin.com",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::UPGRADE,
|
||||
"websocket",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::CONNECTION,
|
||||
"Upgrade",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::SEC_WEBSOCKET_VERSION,
|
||||
"13",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::SEC_WEBSOCKET_EXTENSIONS,
|
||||
"permessage-deflate; client_max_window_bits",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::SEC_WEBSOCKET_KEY,
|
||||
"V1Yza5x1zcfkembl6u/0Pg==",
|
||||
)
|
||||
.body(())
|
||||
.unwrap();
|
||||
|
||||
let (ws_stream, response) =
|
||||
connect_async(request)
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect to douyin websocket: {}", e),
|
||||
})?;
|
||||
|
||||
// Log the response status for debugging
|
||||
info!("WebSocket connection response: {:?}", response.status());
|
||||
|
||||
let (write, read) = ws_stream.split();
|
||||
*self.write.write().await = Some(write);
|
||||
self.handle_connection(read, tx).await
|
||||
}
|
||||
|
||||
async fn get_wss_url(&self) -> Result<String, DanmuStreamError> {
|
||||
// Create a new V8 runtime
|
||||
let mut runtime = JsRuntime::new(RuntimeOptions::default());
|
||||
|
||||
// Add global CryptoJS object
|
||||
let crypto_js = include_str!("douyin/crypto-js.min.js");
|
||||
runtime
|
||||
.execute_script(
|
||||
"<crypto-js.min.js>",
|
||||
deno_core::FastString::Static(crypto_js),
|
||||
)
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute crypto-js: {}", e),
|
||||
})?;
|
||||
|
||||
// Load and execute the sign.js file
|
||||
let js_code = include_str!("douyin/webmssdk.js");
|
||||
runtime
|
||||
.execute_script("<sign.js>", deno_core::FastString::Static(js_code))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
|
||||
// Call the get_wss_url function
|
||||
let sign_call = format!("get_wss_url(\"{}\")", self.room_id);
|
||||
let result = runtime
|
||||
.execute_script(
|
||||
"<sign_call>",
|
||||
deno_core::FastString::Owned(sign_call.into_boxed_str()),
|
||||
)
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
|
||||
// Get the result from the V8 runtime
|
||||
let scope = &mut runtime.handle_scope();
|
||||
let local = v8::Local::new(scope, result);
|
||||
let url = local.to_string(scope).unwrap().to_rust_string_lossy(scope);
|
||||
|
||||
debug!("Douyin wss url: {}", url);
|
||||
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
async fn handle_connection(
|
||||
&self,
|
||||
mut read: WsReadType,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
// Start heartbeat task with error handling
|
||||
let (tx_write, mut _rx_write) = mpsc::channel(32);
|
||||
let tx_write_clone = tx_write.clone();
|
||||
let stop = Arc::clone(&self.stop);
|
||||
let heartbeat_handle = tokio::spawn(async move {
|
||||
let mut last_heartbeat = SystemTime::now();
|
||||
let mut consecutive_failures = 0;
|
||||
const MAX_FAILURES: u32 = 3;
|
||||
|
||||
loop {
|
||||
if *stop.read().await {
|
||||
log::info!("Stopping douyin danmu stream");
|
||||
break;
|
||||
}
|
||||
|
||||
tokio::time::sleep(HEARTBEAT_INTERVAL).await;
|
||||
|
||||
match Self::send_heartbeat(&tx_write_clone).await {
|
||||
Ok(_) => {
|
||||
last_heartbeat = SystemTime::now();
|
||||
consecutive_failures = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to send heartbeat: {}", e);
|
||||
consecutive_failures += 1;
|
||||
|
||||
if consecutive_failures >= MAX_FAILURES {
|
||||
error!("Too many consecutive heartbeat failures, closing connection");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if we've exceeded the maximum time without a successful heartbeat
|
||||
if let Ok(duration) = last_heartbeat.elapsed() {
|
||||
if duration > HEARTBEAT_INTERVAL * 2 {
|
||||
error!("No successful heartbeat for too long, closing connection");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Main message handling loop
|
||||
let room_id = self.room_id;
|
||||
let stop = Arc::clone(&self.stop);
|
||||
let write = Arc::clone(&self.write);
|
||||
let message_handle = tokio::spawn(async move {
|
||||
while let Some(msg) =
|
||||
read.try_next()
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to read message: {}", e),
|
||||
})?
|
||||
{
|
||||
if *stop.read().await {
|
||||
log::info!("Stopping douyin danmu stream");
|
||||
break;
|
||||
}
|
||||
|
||||
match msg {
|
||||
WsMessage::Binary(data) => {
|
||||
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, room_id).await {
|
||||
if let Some(write) = write.write().await.as_mut() {
|
||||
if let Err(e) =
|
||||
write.send(WsMessage::Binary(ack.encode_to_vec())).await
|
||||
{
|
||||
error!("Failed to send ack: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
WsMessage::Close(_) => {
|
||||
info!("WebSocket connection closed");
|
||||
break;
|
||||
}
|
||||
WsMessage::Ping(data) => {
|
||||
// Respond to ping with pong
|
||||
if let Err(e) = tx_write.send(WsMessage::Pong(data)).await {
|
||||
error!("Failed to send pong: {}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok::<(), DanmuStreamError>(())
|
||||
});
|
||||
|
||||
// Wait for either the heartbeat or message handling to complete
|
||||
tokio::select! {
|
||||
result = heartbeat_handle => {
|
||||
if let Err(e) = result {
|
||||
error!("Heartbeat task failed: {}", e);
|
||||
}
|
||||
}
|
||||
result = message_handle => {
|
||||
if let Err(e) = result {
|
||||
error!("Message handling task failed: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_heartbeat(tx: &mpsc::Sender<WsMessage>) -> Result<(), DanmuStreamError> {
|
||||
// heartbeat message: 3A 02 68 62
|
||||
tx.send(WsMessage::Binary(vec![0x3A, 0x02, 0x68, 0x62]))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to send heartbeat message: {}", e),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_binary_message(
|
||||
data: &[u8],
|
||||
tx: &mpsc::UnboundedSender<DanmuMessageType>,
|
||||
room_id: u64,
|
||||
) -> Result<Option<PushFrame>, DanmuStreamError> {
|
||||
// First decode the PushFrame
|
||||
let push_frame = PushFrame::decode(Bytes::from(data.to_vec())).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode PushFrame: {}", e),
|
||||
}
|
||||
})?;
|
||||
|
||||
// Decompress the payload
|
||||
let mut decoder = GzDecoder::new(push_frame.payload.as_slice());
|
||||
let mut decompressed = Vec::new();
|
||||
decoder
|
||||
.read_to_end(&mut decompressed)
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decompress payload: {}", e),
|
||||
})?;
|
||||
|
||||
// Decode the Response from decompressed payload
|
||||
let response = Response::decode(Bytes::from(decompressed)).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode Response: {}", e),
|
||||
}
|
||||
})?;
|
||||
|
||||
// if payload_package.needAck:
|
||||
// obj = PushFrame()
|
||||
// obj.payloadType = 'ack'
|
||||
// obj.logId = log_id
|
||||
// obj.payloadType = payload_package.internalExt
|
||||
// ack = obj.SerializeToString()
|
||||
let mut ack = None;
|
||||
if response.need_ack {
|
||||
let ack_msg = PushFrame {
|
||||
payload_type: "ack".to_string(),
|
||||
log_id: push_frame.log_id,
|
||||
payload_encoding: "".to_string(),
|
||||
payload: vec![],
|
||||
seq_id: 0,
|
||||
service: 0,
|
||||
method: 0,
|
||||
headers_list: vec![],
|
||||
};
|
||||
|
||||
debug!("Need to respond ack: {:?}", ack_msg);
|
||||
|
||||
ack = Some(ack_msg);
|
||||
}
|
||||
|
||||
for message in response.messages_list {
|
||||
match message.method.as_str() {
|
||||
"WebcastChatMessage" => {
|
||||
let chat_msg =
|
||||
DouyinChatMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode chat message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = chat_msg.user {
|
||||
let danmu_msg = DanmuMessage {
|
||||
room_id,
|
||||
user_id: user.id,
|
||||
user_name: user.nick_name,
|
||||
message: chat_msg.content,
|
||||
color: 0xffffff,
|
||||
timestamp: chat_msg.event_time as i64 * 1000,
|
||||
};
|
||||
debug!("Received danmu message: {:?}", danmu_msg);
|
||||
tx.send(DanmuMessageType::DanmuMessage(danmu_msg))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to send message to channel: {}", e),
|
||||
})?;
|
||||
}
|
||||
}
|
||||
"WebcastGiftMessage" => {
|
||||
let gift_msg = GiftMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode gift message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = gift_msg.user {
|
||||
if let Some(gift) = gift_msg.gift {
|
||||
log::debug!("Received gift: {} from user: {}", gift.name, user.nick_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
"WebcastLikeMessage" => {
|
||||
let like_msg = LikeMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode like message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = like_msg.user {
|
||||
log::debug!(
|
||||
"Received {} likes from user: {}",
|
||||
like_msg.count,
|
||||
user.nick_name
|
||||
);
|
||||
}
|
||||
}
|
||||
"WebcastMemberMessage" => {
|
||||
let member_msg =
|
||||
MemberMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode member message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = member_msg.user {
|
||||
log::debug!(
|
||||
"Member joined: {} (Action: {})",
|
||||
user.nick_name,
|
||||
member_msg.action_description
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
debug!("Unknown message: {:?}", message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ack)
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for DouyinDanmu {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
Ok(Self {
|
||||
room_id,
|
||||
cookie: identifier.to_string(),
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
})
|
||||
}
|
||||
|
||||
async fn start(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const MAX_RETRIES: u32 = 5;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Douyin WebSocket connection started, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
|
||||
loop {
|
||||
if *self.stop.read().await {
|
||||
break;
|
||||
}
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!("Douyin WebSocket connection closed normally");
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Douyin WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
|
||||
if retry_count >= MAX_RETRIES {
|
||||
return Err(DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect after {} retries", MAX_RETRIES),
|
||||
});
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}/{})",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
MAX_RETRIES
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop(&self) -> Result<(), DanmuStreamError> {
|
||||
*self.stop.write().await = true;
|
||||
if let Some(mut write) = self.write.write().await.take() {
|
||||
if let Err(e) = write.close().await {
|
||||
error!("Failed to close WebSocket connection: {}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1
src-tauri/crates/danmu_stream/src/provider/douyin/crypto-js.min.js
vendored
Normal file
861
src-tauri/crates/danmu_stream/src/provider/douyin/messages.rs
Normal file
@@ -0,0 +1,861 @@
|
||||
use prost::Message;
|
||||
use std::collections::HashMap;
|
||||
|
||||
// message Response {
|
||||
// repeated Message messagesList = 1;
|
||||
// string cursor = 2;
|
||||
// uint64 fetchInterval = 3;
|
||||
// uint64 now = 4;
|
||||
// string internalExt = 5;
|
||||
// uint32 fetchType = 6;
|
||||
// map<string, string> routeParams = 7;
|
||||
// uint64 heartbeatDuration = 8;
|
||||
// bool needAck = 9;
|
||||
// string pushServer = 10;
|
||||
// string liveCursor = 11;
|
||||
// bool historyNoMore = 12;
|
||||
// }
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct Response {
|
||||
#[prost(message, repeated, tag = "1")]
|
||||
pub messages_list: Vec<CommonMessage>,
|
||||
#[prost(string, tag = "2")]
|
||||
pub cursor: String,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub fetch_interval: u64,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub now: u64,
|
||||
#[prost(string, tag = "5")]
|
||||
pub internal_ext: String,
|
||||
#[prost(uint32, tag = "6")]
|
||||
pub fetch_type: u32,
|
||||
#[prost(map = "string, string", tag = "7")]
|
||||
pub route_params: HashMap<String, String>,
|
||||
#[prost(uint64, tag = "8")]
|
||||
pub heartbeat_duration: u64,
|
||||
#[prost(bool, tag = "9")]
|
||||
pub need_ack: bool,
|
||||
#[prost(string, tag = "10")]
|
||||
pub push_server: String,
|
||||
#[prost(string, tag = "11")]
|
||||
pub live_cursor: String,
|
||||
#[prost(bool, tag = "12")]
|
||||
pub history_no_more: bool,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct CommonMessage {
|
||||
#[prost(string, tag = "1")]
|
||||
pub method: String,
|
||||
#[prost(bytes, tag = "2")]
|
||||
pub payload: Vec<u8>,
|
||||
#[prost(int64, tag = "3")]
|
||||
pub msg_id: i64,
|
||||
#[prost(int32, tag = "4")]
|
||||
pub msg_type: i32,
|
||||
#[prost(int64, tag = "5")]
|
||||
pub offset: i64,
|
||||
#[prost(bool, tag = "6")]
|
||||
pub need_wrds_store: bool,
|
||||
#[prost(int64, tag = "7")]
|
||||
pub wrds_version: i64,
|
||||
#[prost(string, tag = "8")]
|
||||
pub wrds_sub_key: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct Common {
|
||||
#[prost(string, tag = "1")]
|
||||
pub method: String,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub msg_id: u64,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub room_id: u64,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub create_time: u64,
|
||||
#[prost(uint32, tag = "5")]
|
||||
pub monitor: u32,
|
||||
#[prost(bool, tag = "6")]
|
||||
pub is_show_msg: bool,
|
||||
#[prost(string, tag = "7")]
|
||||
pub describe: String,
|
||||
#[prost(uint64, tag = "9")]
|
||||
pub fold_type: u64,
|
||||
#[prost(uint64, tag = "10")]
|
||||
pub anchor_fold_type: u64,
|
||||
#[prost(uint64, tag = "11")]
|
||||
pub priority_score: u64,
|
||||
#[prost(string, tag = "12")]
|
||||
pub log_id: String,
|
||||
#[prost(string, tag = "13")]
|
||||
pub msg_process_filter_k: String,
|
||||
#[prost(string, tag = "14")]
|
||||
pub msg_process_filter_v: String,
|
||||
#[prost(message, optional, tag = "15")]
|
||||
pub user: Option<User>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct User {
|
||||
#[prost(uint64, tag = "1")]
|
||||
pub id: u64,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub short_id: u64,
|
||||
#[prost(string, tag = "3")]
|
||||
pub nick_name: String,
|
||||
#[prost(uint32, tag = "4")]
|
||||
pub gender: u32,
|
||||
#[prost(string, tag = "5")]
|
||||
pub signature: String,
|
||||
#[prost(uint32, tag = "6")]
|
||||
pub level: u32,
|
||||
#[prost(uint64, tag = "7")]
|
||||
pub birthday: u64,
|
||||
#[prost(string, tag = "8")]
|
||||
pub telephone: String,
|
||||
#[prost(message, optional, tag = "9")]
|
||||
pub avatar_thumb: Option<Image>,
|
||||
#[prost(message, optional, tag = "10")]
|
||||
pub avatar_medium: Option<Image>,
|
||||
#[prost(message, optional, tag = "11")]
|
||||
pub avatar_large: Option<Image>,
|
||||
#[prost(bool, tag = "12")]
|
||||
pub verified: bool,
|
||||
#[prost(uint32, tag = "13")]
|
||||
pub experience: u32,
|
||||
#[prost(string, tag = "14")]
|
||||
pub city: String,
|
||||
#[prost(int32, tag = "15")]
|
||||
pub status: i32,
|
||||
#[prost(uint64, tag = "16")]
|
||||
pub create_time: u64,
|
||||
#[prost(uint64, tag = "17")]
|
||||
pub modify_time: u64,
|
||||
#[prost(uint32, tag = "18")]
|
||||
pub secret: u32,
|
||||
#[prost(string, tag = "19")]
|
||||
pub share_qrcode_uri: String,
|
||||
#[prost(uint32, tag = "20")]
|
||||
pub income_share_percent: u32,
|
||||
#[prost(message, repeated, tag = "21")]
|
||||
pub badge_image_list: Vec<Image>,
|
||||
#[prost(message, optional, tag = "22")]
|
||||
pub follow_info: Option<FollowInfo>,
|
||||
#[prost(message, optional, tag = "23")]
|
||||
pub pay_grade: Option<PayGrade>,
|
||||
#[prost(message, optional, tag = "24")]
|
||||
pub fans_club: Option<FansClub>,
|
||||
#[prost(string, tag = "26")]
|
||||
pub special_id: String,
|
||||
#[prost(message, optional, tag = "27")]
|
||||
pub avatar_border: Option<Image>,
|
||||
#[prost(message, optional, tag = "28")]
|
||||
pub medal: Option<Image>,
|
||||
#[prost(message, repeated, tag = "29")]
|
||||
pub real_time_icons_list: Vec<Image>,
|
||||
#[prost(string, tag = "38")]
|
||||
pub display_id: String,
|
||||
#[prost(string, tag = "46")]
|
||||
pub sec_uid: String,
|
||||
#[prost(uint64, tag = "1022")]
|
||||
pub fan_ticket_count: u64,
|
||||
#[prost(string, tag = "1028")]
|
||||
pub id_str: String,
|
||||
#[prost(uint32, tag = "1045")]
|
||||
pub age_range: u32,
|
||||
}
|
||||
|
||||
#[derive(Message, PartialEq)]
|
||||
pub struct Image {
|
||||
#[prost(string, repeated, tag = "1")]
|
||||
pub url_list_list: Vec<String>,
|
||||
#[prost(string, tag = "2")]
|
||||
pub uri: String,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub height: u64,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub width: u64,
|
||||
#[prost(string, tag = "5")]
|
||||
pub avg_color: String,
|
||||
#[prost(uint32, tag = "6")]
|
||||
pub image_type: u32,
|
||||
#[prost(string, tag = "7")]
|
||||
pub open_web_url: String,
|
||||
#[prost(message, optional, tag = "8")]
|
||||
pub content: Option<ImageContent>,
|
||||
#[prost(bool, tag = "9")]
|
||||
pub is_animated: bool,
|
||||
#[prost(message, optional, tag = "10")]
|
||||
pub flex_setting_list: Option<NinePatchSetting>,
|
||||
#[prost(message, optional, tag = "11")]
|
||||
pub text_setting_list: Option<NinePatchSetting>,
|
||||
}
|
||||
|
||||
#[derive(Message, PartialEq)]
|
||||
pub struct ImageContent {
|
||||
#[prost(string, tag = "1")]
|
||||
pub name: String,
|
||||
#[prost(string, tag = "2")]
|
||||
pub font_color: String,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub level: u64,
|
||||
#[prost(string, tag = "4")]
|
||||
pub alternative_text: String,
|
||||
}
|
||||
|
||||
#[derive(Message, PartialEq)]
|
||||
pub struct NinePatchSetting {
|
||||
#[prost(string, repeated, tag = "1")]
|
||||
pub setting_list_list: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct FollowInfo {
|
||||
#[prost(uint64, tag = "1")]
|
||||
pub following_count: u64,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub follower_count: u64,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub follow_status: u64,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub push_status: u64,
|
||||
#[prost(string, tag = "5")]
|
||||
pub remark_name: String,
|
||||
#[prost(string, tag = "6")]
|
||||
pub follower_count_str: String,
|
||||
#[prost(string, tag = "7")]
|
||||
pub following_count_str: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct PayGrade {
|
||||
#[prost(int64, tag = "1")]
|
||||
pub total_diamond_count: i64,
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub diamond_icon: Option<Image>,
|
||||
#[prost(string, tag = "3")]
|
||||
pub name: String,
|
||||
#[prost(message, optional, tag = "4")]
|
||||
pub icon: Option<Image>,
|
||||
#[prost(string, tag = "5")]
|
||||
pub next_name: String,
|
||||
#[prost(int64, tag = "6")]
|
||||
pub level: i64,
|
||||
#[prost(message, optional, tag = "7")]
|
||||
pub next_icon: Option<Image>,
|
||||
#[prost(int64, tag = "8")]
|
||||
pub next_diamond: i64,
|
||||
#[prost(int64, tag = "9")]
|
||||
pub now_diamond: i64,
|
||||
#[prost(int64, tag = "10")]
|
||||
pub this_grade_min_diamond: i64,
|
||||
#[prost(int64, tag = "11")]
|
||||
pub this_grade_max_diamond: i64,
|
||||
#[prost(int64, tag = "12")]
|
||||
pub pay_diamond_bak: i64,
|
||||
#[prost(string, tag = "13")]
|
||||
pub grade_describe: String,
|
||||
#[prost(message, repeated, tag = "14")]
|
||||
pub grade_icon_list: Vec<GradeIcon>,
|
||||
#[prost(int64, tag = "15")]
|
||||
pub screen_chat_type: i64,
|
||||
#[prost(message, optional, tag = "16")]
|
||||
pub im_icon: Option<Image>,
|
||||
#[prost(message, optional, tag = "17")]
|
||||
pub im_icon_with_level: Option<Image>,
|
||||
#[prost(message, optional, tag = "18")]
|
||||
pub live_icon: Option<Image>,
|
||||
#[prost(message, optional, tag = "19")]
|
||||
pub new_im_icon_with_level: Option<Image>,
|
||||
#[prost(message, optional, tag = "20")]
|
||||
pub new_live_icon: Option<Image>,
|
||||
#[prost(int64, tag = "21")]
|
||||
pub upgrade_need_consume: i64,
|
||||
#[prost(string, tag = "22")]
|
||||
pub next_privileges: String,
|
||||
#[prost(message, optional, tag = "23")]
|
||||
pub background: Option<Image>,
|
||||
#[prost(message, optional, tag = "24")]
|
||||
pub background_back: Option<Image>,
|
||||
#[prost(int64, tag = "25")]
|
||||
pub score: i64,
|
||||
#[prost(message, optional, tag = "26")]
|
||||
pub buff_info: Option<GradeBuffInfo>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct GradeIcon {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub icon: Option<Image>,
|
||||
#[prost(int64, tag = "2")]
|
||||
pub icon_diamond: i64,
|
||||
#[prost(int64, tag = "3")]
|
||||
pub level: i64,
|
||||
#[prost(string, tag = "4")]
|
||||
pub level_str: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct GradeBuffInfo {}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct FansClub {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub data: Option<FansClubData>,
|
||||
#[prost(map = "int32, message", tag = "2")]
|
||||
pub prefer_data: HashMap<i32, FansClubData>,
|
||||
}
|
||||
|
||||
#[derive(Message, PartialEq)]
|
||||
pub struct FansClubData {
|
||||
#[prost(string, tag = "1")]
|
||||
pub club_name: String,
|
||||
#[prost(int32, tag = "2")]
|
||||
pub level: i32,
|
||||
#[prost(int32, tag = "3")]
|
||||
pub user_fans_club_status: i32,
|
||||
#[prost(message, optional, tag = "4")]
|
||||
pub badge: Option<UserBadge>,
|
||||
#[prost(int64, repeated, tag = "5")]
|
||||
pub available_gift_ids: Vec<i64>,
|
||||
#[prost(int64, tag = "6")]
|
||||
pub anchor_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Message, PartialEq)]
|
||||
pub struct UserBadge {
|
||||
#[prost(map = "int32, message", tag = "1")]
|
||||
pub icons: HashMap<i32, Image>,
|
||||
#[prost(string, tag = "2")]
|
||||
pub title: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct PublicAreaCommon {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub user_label: Option<Image>,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub user_consume_in_room: u64,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub user_send_gift_cnt_in_room: u64,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct LandscapeAreaCommon {
|
||||
#[prost(bool, tag = "1")]
|
||||
pub show_head: bool,
|
||||
#[prost(bool, tag = "2")]
|
||||
pub show_nickname: bool,
|
||||
#[prost(bool, tag = "3")]
|
||||
pub show_font_color: bool,
|
||||
#[prost(string, repeated, tag = "4")]
|
||||
pub color_value_list: Vec<String>,
|
||||
#[prost(enumeration = "CommentTypeTag", repeated, tag = "5")]
|
||||
pub comment_type_tags_list: Vec<i32>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct Text {
|
||||
#[prost(string, tag = "1")]
|
||||
pub key: String,
|
||||
#[prost(string, tag = "2")]
|
||||
pub default_patter: String,
|
||||
#[prost(message, optional, tag = "3")]
|
||||
pub default_format: Option<TextFormat>,
|
||||
#[prost(message, repeated, tag = "4")]
|
||||
pub pieces_list: Vec<TextPiece>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextFormat {
|
||||
#[prost(string, tag = "1")]
|
||||
pub color: String,
|
||||
#[prost(bool, tag = "2")]
|
||||
pub bold: bool,
|
||||
#[prost(bool, tag = "3")]
|
||||
pub italic: bool,
|
||||
#[prost(uint32, tag = "4")]
|
||||
pub weight: u32,
|
||||
#[prost(uint32, tag = "5")]
|
||||
pub italic_angle: u32,
|
||||
#[prost(uint32, tag = "6")]
|
||||
pub font_size: u32,
|
||||
#[prost(bool, tag = "7")]
|
||||
pub use_heigh_light_color: bool,
|
||||
#[prost(bool, tag = "8")]
|
||||
pub use_remote_clor: bool,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextPiece {
|
||||
#[prost(bool, tag = "1")]
|
||||
pub r#type: bool,
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub format: Option<TextFormat>,
|
||||
#[prost(string, tag = "3")]
|
||||
pub string_value: String,
|
||||
#[prost(message, optional, tag = "4")]
|
||||
pub user_value: Option<TextPieceUser>,
|
||||
#[prost(message, optional, tag = "5")]
|
||||
pub gift_value: Option<TextPieceGift>,
|
||||
#[prost(message, optional, tag = "6")]
|
||||
pub heart_value: Option<TextPieceHeart>,
|
||||
#[prost(message, optional, tag = "7")]
|
||||
pub pattern_ref_value: Option<TextPiecePatternRef>,
|
||||
#[prost(message, optional, tag = "8")]
|
||||
pub image_value: Option<TextPieceImage>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextPieceUser {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub user: Option<User>,
|
||||
#[prost(bool, tag = "2")]
|
||||
pub with_colon: bool,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextPieceGift {
|
||||
#[prost(uint64, tag = "1")]
|
||||
pub gift_id: u64,
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub name_ref: Option<PatternRef>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct PatternRef {
|
||||
#[prost(string, tag = "1")]
|
||||
pub key: String,
|
||||
#[prost(string, tag = "2")]
|
||||
pub default_pattern: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextPieceHeart {
|
||||
#[prost(string, tag = "1")]
|
||||
pub color: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextPiecePatternRef {
|
||||
#[prost(string, tag = "1")]
|
||||
pub key: String,
|
||||
#[prost(string, tag = "2")]
|
||||
pub default_pattern: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextPieceImage {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub image: Option<Image>,
|
||||
#[prost(float, tag = "2")]
|
||||
pub scaling_rate: f32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
|
||||
#[repr(i32)]
|
||||
pub enum CommentTypeTag {
|
||||
CommentTypeTagUnknown = 0,
|
||||
CommentTypeTagStar = 1,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct DouyinChatMessage {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub common: Option<Common>,
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub user: Option<User>,
|
||||
#[prost(string, tag = "3")]
|
||||
pub content: String,
|
||||
#[prost(bool, tag = "4")]
|
||||
pub visible_to_sender: bool,
|
||||
#[prost(message, optional, tag = "5")]
|
||||
pub background_image: Option<Image>,
|
||||
#[prost(string, tag = "6")]
|
||||
pub full_screen_text_color: String,
|
||||
#[prost(message, optional, tag = "7")]
|
||||
pub background_image_v2: Option<Image>,
|
||||
#[prost(message, optional, tag = "9")]
|
||||
pub public_area_common: Option<PublicAreaCommon>,
|
||||
#[prost(message, optional, tag = "10")]
|
||||
pub gift_image: Option<Image>,
|
||||
#[prost(uint64, tag = "11")]
|
||||
pub agree_msg_id: u64,
|
||||
#[prost(uint32, tag = "12")]
|
||||
pub priority_level: u32,
|
||||
#[prost(message, optional, tag = "13")]
|
||||
pub landscape_area_common: Option<LandscapeAreaCommon>,
|
||||
#[prost(uint64, tag = "15")]
|
||||
pub event_time: u64,
|
||||
#[prost(bool, tag = "16")]
|
||||
pub send_review: bool,
|
||||
#[prost(bool, tag = "17")]
|
||||
pub from_intercom: bool,
|
||||
#[prost(bool, tag = "18")]
|
||||
pub intercom_hide_user_card: bool,
|
||||
#[prost(string, tag = "20")]
|
||||
pub chat_by: String,
|
||||
#[prost(uint32, tag = "21")]
|
||||
pub individual_chat_priority: u32,
|
||||
#[prost(message, optional, tag = "22")]
|
||||
pub rtf_content: Option<Text>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct GiftMessage {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub common: Option<Common>,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub gift_id: u64,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub fan_ticket_count: u64,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub group_count: u64,
|
||||
#[prost(uint64, tag = "5")]
|
||||
pub repeat_count: u64,
|
||||
#[prost(uint64, tag = "6")]
|
||||
pub combo_count: u64,
|
||||
#[prost(message, optional, tag = "7")]
|
||||
pub user: Option<User>,
|
||||
#[prost(message, optional, tag = "8")]
|
||||
pub to_user: Option<User>,
|
||||
#[prost(uint32, tag = "9")]
|
||||
pub repeat_end: u32,
|
||||
#[prost(message, optional, tag = "10")]
|
||||
pub text_effect: Option<TextEffect>,
|
||||
#[prost(uint64, tag = "11")]
|
||||
pub group_id: u64,
|
||||
#[prost(uint64, tag = "12")]
|
||||
pub income_taskgifts: u64,
|
||||
#[prost(uint64, tag = "13")]
|
||||
pub room_fan_ticket_count: u64,
|
||||
#[prost(message, optional, tag = "14")]
|
||||
pub priority: Option<GiftIMPriority>,
|
||||
#[prost(message, optional, tag = "15")]
|
||||
pub gift: Option<GiftStruct>,
|
||||
#[prost(string, tag = "16")]
|
||||
pub log_id: String,
|
||||
#[prost(uint64, tag = "17")]
|
||||
pub send_type: u64,
|
||||
#[prost(message, optional, tag = "18")]
|
||||
pub public_area_common: Option<PublicAreaCommon>,
|
||||
#[prost(message, optional, tag = "19")]
|
||||
pub tray_display_text: Option<Text>,
|
||||
#[prost(uint64, tag = "20")]
|
||||
pub banned_display_effects: u64,
|
||||
#[prost(bool, tag = "25")]
|
||||
pub display_for_self: bool,
|
||||
#[prost(string, tag = "26")]
|
||||
pub interact_gift_info: String,
|
||||
#[prost(string, tag = "27")]
|
||||
pub diy_item_info: String,
|
||||
#[prost(uint64, repeated, tag = "28")]
|
||||
pub min_asset_set_list: Vec<u64>,
|
||||
#[prost(uint64, tag = "29")]
|
||||
pub total_count: u64,
|
||||
#[prost(uint32, tag = "30")]
|
||||
pub client_gift_source: u32,
|
||||
#[prost(uint64, repeated, tag = "32")]
|
||||
pub to_user_ids_list: Vec<u64>,
|
||||
#[prost(uint64, tag = "33")]
|
||||
pub send_time: u64,
|
||||
#[prost(uint64, tag = "34")]
|
||||
pub force_display_effects: u64,
|
||||
#[prost(string, tag = "35")]
|
||||
pub trace_id: String,
|
||||
#[prost(uint64, tag = "36")]
|
||||
pub effect_display_ts: u64,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct GiftStruct {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub image: Option<Image>,
|
||||
#[prost(string, tag = "2")]
|
||||
pub describe: String,
|
||||
#[prost(bool, tag = "3")]
|
||||
pub notify: bool,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub duration: u64,
|
||||
#[prost(uint64, tag = "5")]
|
||||
pub id: u64,
|
||||
#[prost(bool, tag = "7")]
|
||||
pub for_linkmic: bool,
|
||||
#[prost(bool, tag = "8")]
|
||||
pub doodle: bool,
|
||||
#[prost(bool, tag = "9")]
|
||||
pub for_fansclub: bool,
|
||||
#[prost(bool, tag = "10")]
|
||||
pub combo: bool,
|
||||
#[prost(uint32, tag = "11")]
|
||||
pub r#type: u32,
|
||||
#[prost(uint32, tag = "12")]
|
||||
pub diamond_count: u32,
|
||||
#[prost(bool, tag = "13")]
|
||||
pub is_displayed_on_panel: bool,
|
||||
#[prost(uint64, tag = "14")]
|
||||
pub primary_effect_id: u64,
|
||||
#[prost(message, optional, tag = "15")]
|
||||
pub gift_label_icon: Option<Image>,
|
||||
#[prost(string, tag = "16")]
|
||||
pub name: String,
|
||||
#[prost(string, tag = "17")]
|
||||
pub region: String,
|
||||
#[prost(string, tag = "18")]
|
||||
pub manual: String,
|
||||
#[prost(bool, tag = "19")]
|
||||
pub for_custom: bool,
|
||||
#[prost(message, optional, tag = "21")]
|
||||
pub icon: Option<Image>,
|
||||
#[prost(uint32, tag = "22")]
|
||||
pub action_type: u32,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct GiftIMPriority {
|
||||
#[prost(uint64, repeated, tag = "1")]
|
||||
pub queue_sizes_list: Vec<u64>,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub self_queue_priority: u64,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub priority: u64,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextEffect {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub portrait: Option<TextEffectDetail>,
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub landscape: Option<TextEffectDetail>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct TextEffectDetail {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub text: Option<Text>,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub text_font_size: u32,
|
||||
#[prost(message, optional, tag = "3")]
|
||||
pub background: Option<Image>,
|
||||
#[prost(uint32, tag = "4")]
|
||||
pub start: u32,
|
||||
#[prost(uint32, tag = "5")]
|
||||
pub duration: u32,
|
||||
#[prost(uint32, tag = "6")]
|
||||
pub x: u32,
|
||||
#[prost(uint32, tag = "7")]
|
||||
pub y: u32,
|
||||
#[prost(uint32, tag = "8")]
|
||||
pub width: u32,
|
||||
#[prost(uint32, tag = "9")]
|
||||
pub height: u32,
|
||||
#[prost(uint32, tag = "10")]
|
||||
pub shadow_dx: u32,
|
||||
#[prost(uint32, tag = "11")]
|
||||
pub shadow_dy: u32,
|
||||
#[prost(uint32, tag = "12")]
|
||||
pub shadow_radius: u32,
|
||||
#[prost(string, tag = "13")]
|
||||
pub shadow_color: String,
|
||||
#[prost(string, tag = "14")]
|
||||
pub stroke_color: String,
|
||||
#[prost(uint32, tag = "15")]
|
||||
pub stroke_width: u32,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct LikeMessage {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub common: Option<Common>,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub count: u64,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub total: u64,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub color: u64,
|
||||
#[prost(message, optional, tag = "5")]
|
||||
pub user: Option<User>,
|
||||
#[prost(string, tag = "6")]
|
||||
pub icon: String,
|
||||
#[prost(message, optional, tag = "7")]
|
||||
pub double_like_detail: Option<DoubleLikeDetail>,
|
||||
#[prost(message, optional, tag = "8")]
|
||||
pub display_control_info: Option<DisplayControlInfo>,
|
||||
#[prost(uint64, tag = "9")]
|
||||
pub linkmic_guest_uid: u64,
|
||||
#[prost(string, tag = "10")]
|
||||
pub scene: String,
|
||||
#[prost(message, optional, tag = "11")]
|
||||
pub pico_display_info: Option<PicoDisplayInfo>,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct DoubleLikeDetail {
|
||||
#[prost(bool, tag = "1")]
|
||||
pub double_flag: bool,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub seq_id: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub renewals_num: u32,
|
||||
#[prost(uint32, tag = "4")]
|
||||
pub triggers_num: u32,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct DisplayControlInfo {
|
||||
#[prost(bool, tag = "1")]
|
||||
pub show_text: bool,
|
||||
#[prost(bool, tag = "2")]
|
||||
pub show_icons: bool,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct PicoDisplayInfo {
|
||||
#[prost(uint64, tag = "1")]
|
||||
pub combo_sum_count: u64,
|
||||
#[prost(string, tag = "2")]
|
||||
pub emoji: String,
|
||||
#[prost(message, optional, tag = "3")]
|
||||
pub emoji_icon: Option<Image>,
|
||||
#[prost(string, tag = "4")]
|
||||
pub emoji_text: String,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct MemberMessage {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub common: Option<Common>,
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub user: Option<User>,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub member_count: u64,
|
||||
#[prost(message, optional, tag = "4")]
|
||||
pub operator: Option<User>,
|
||||
#[prost(bool, tag = "5")]
|
||||
pub is_set_to_admin: bool,
|
||||
#[prost(bool, tag = "6")]
|
||||
pub is_top_user: bool,
|
||||
#[prost(uint64, tag = "7")]
|
||||
pub rank_score: u64,
|
||||
#[prost(uint64, tag = "8")]
|
||||
pub top_user_no: u64,
|
||||
#[prost(uint64, tag = "9")]
|
||||
pub enter_type: u64,
|
||||
#[prost(uint64, tag = "10")]
|
||||
pub action: u64,
|
||||
#[prost(string, tag = "11")]
|
||||
pub action_description: String,
|
||||
#[prost(uint64, tag = "12")]
|
||||
pub user_id: u64,
|
||||
#[prost(message, optional, tag = "13")]
|
||||
pub effect_config: Option<EffectConfig>,
|
||||
#[prost(string, tag = "14")]
|
||||
pub pop_str: String,
|
||||
#[prost(message, optional, tag = "15")]
|
||||
pub enter_effect_config: Option<EffectConfig>,
|
||||
#[prost(message, optional, tag = "16")]
|
||||
pub background_image: Option<Image>,
|
||||
#[prost(message, optional, tag = "17")]
|
||||
pub background_image_v2: Option<Image>,
|
||||
#[prost(message, optional, tag = "18")]
|
||||
pub anchor_display_text: Option<Text>,
|
||||
#[prost(message, optional, tag = "19")]
|
||||
pub public_area_common: Option<PublicAreaCommon>,
|
||||
#[prost(uint64, tag = "20")]
|
||||
pub user_enter_tip_type: u64,
|
||||
#[prost(uint64, tag = "21")]
|
||||
pub anchor_enter_tip_type: u64,
|
||||
}
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct EffectConfig {
|
||||
#[prost(uint64, tag = "1")]
|
||||
pub r#type: u64,
|
||||
#[prost(message, optional, tag = "2")]
|
||||
pub icon: Option<Image>,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub avatar_pos: u64,
|
||||
#[prost(message, optional, tag = "4")]
|
||||
pub text: Option<Text>,
|
||||
#[prost(message, optional, tag = "5")]
|
||||
pub text_icon: Option<Image>,
|
||||
#[prost(uint32, tag = "6")]
|
||||
pub stay_time: u32,
|
||||
#[prost(uint64, tag = "7")]
|
||||
pub anim_asset_id: u64,
|
||||
#[prost(message, optional, tag = "8")]
|
||||
pub badge: Option<Image>,
|
||||
#[prost(uint64, repeated, tag = "9")]
|
||||
pub flex_setting_array_list: Vec<u64>,
|
||||
#[prost(message, optional, tag = "10")]
|
||||
pub text_icon_overlay: Option<Image>,
|
||||
#[prost(message, optional, tag = "11")]
|
||||
pub animated_badge: Option<Image>,
|
||||
#[prost(bool, tag = "12")]
|
||||
pub has_sweep_light: bool,
|
||||
#[prost(uint64, repeated, tag = "13")]
|
||||
pub text_flex_setting_array_list: Vec<u64>,
|
||||
#[prost(uint64, tag = "14")]
|
||||
pub center_anim_asset_id: u64,
|
||||
#[prost(message, optional, tag = "15")]
|
||||
pub dynamic_image: Option<Image>,
|
||||
#[prost(map = "string, string", tag = "16")]
|
||||
pub extra_map: HashMap<String, String>,
|
||||
#[prost(uint64, tag = "17")]
|
||||
pub mp4_anim_asset_id: u64,
|
||||
#[prost(uint64, tag = "18")]
|
||||
pub priority: u64,
|
||||
#[prost(uint64, tag = "19")]
|
||||
pub max_wait_time: u64,
|
||||
#[prost(string, tag = "20")]
|
||||
pub dress_id: String,
|
||||
#[prost(uint64, tag = "21")]
|
||||
pub alignment: u64,
|
||||
#[prost(uint64, tag = "22")]
|
||||
pub alignment_offset: u64,
|
||||
}
|
||||
|
||||
// message PushFrame {
|
||||
// uint64 seqId = 1;
|
||||
// uint64 logId = 2;
|
||||
// uint64 service = 3;
|
||||
// uint64 method = 4;
|
||||
// repeated HeadersList headersList = 5;
|
||||
// string payloadEncoding = 6;
|
||||
// string payloadType = 7;
|
||||
// bytes payload = 8;
|
||||
// }
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct PushFrame {
|
||||
#[prost(uint64, tag = "1")]
|
||||
pub seq_id: u64,
|
||||
#[prost(uint64, tag = "2")]
|
||||
pub log_id: u64,
|
||||
#[prost(uint64, tag = "3")]
|
||||
pub service: u64,
|
||||
#[prost(uint64, tag = "4")]
|
||||
pub method: u64,
|
||||
#[prost(message, repeated, tag = "5")]
|
||||
pub headers_list: Vec<HeadersList>,
|
||||
#[prost(string, tag = "6")]
|
||||
pub payload_encoding: String,
|
||||
#[prost(string, tag = "7")]
|
||||
pub payload_type: String,
|
||||
#[prost(bytes, tag = "8")]
|
||||
pub payload: Vec<u8>,
|
||||
}
|
||||
|
||||
// message HeadersList {
|
||||
// string key = 1;
|
||||
// string value = 2;
|
||||
// }
|
||||
|
||||
#[derive(Message)]
|
||||
pub struct HeadersList {
|
||||
#[prost(string, tag = "1")]
|
||||
pub key: String,
|
||||
#[prost(string, tag = "2")]
|
||||
pub value: String,
|
||||
}
|
||||
13167
src-tauri/crates/danmu_stream/src/provider/douyin/webmssdk.js
Normal file
@@ -1 +1 @@
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default"]}}
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}
|
||||
@@ -37,7 +37,7 @@
|
||||
],
|
||||
"definitions": {
|
||||
"Capability": {
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier",
|
||||
@@ -49,7 +49,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.",
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.",
|
||||
"default": "",
|
||||
"type": "string"
|
||||
},
|
||||
@@ -3152,6 +3152,12 @@
|
||||
"const": "core:webview:allow-reparent",
|
||||
"markdownDescription": "Enables the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:allow-set-webview-auto-resize",
|
||||
"markdownDescription": "Enables the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -3254,6 +3260,12 @@
|
||||
"const": "core:webview:deny-reparent",
|
||||
"markdownDescription": "Denies the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:deny-set-webview-auto-resize",
|
||||
"markdownDescription": "Denies the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -4208,6 +4220,60 @@
|
||||
"const": "core:window:deny-unminimize",
|
||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
|
||||
"type": "string",
|
||||
"const": "deep-link:default",
|
||||
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-get-current",
|
||||
"markdownDescription": "Enables the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-is-registered",
|
||||
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-register",
|
||||
"markdownDescription": "Enables the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-unregister",
|
||||
"markdownDescription": "Enables the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-get-current",
|
||||
"markdownDescription": "Denies the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-is-registered",
|
||||
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-register",
|
||||
"markdownDescription": "Denies the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-unregister",
|
||||
"markdownDescription": "Denies the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||
"type": "string",
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
],
|
||||
"definitions": {
|
||||
"Capability": {
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier",
|
||||
@@ -49,7 +49,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.",
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.",
|
||||
"default": "",
|
||||
"type": "string"
|
||||
},
|
||||
@@ -3152,6 +3152,12 @@
|
||||
"const": "core:webview:allow-reparent",
|
||||
"markdownDescription": "Enables the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:allow-set-webview-auto-resize",
|
||||
"markdownDescription": "Enables the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -3254,6 +3260,12 @@
|
||||
"const": "core:webview:deny-reparent",
|
||||
"markdownDescription": "Denies the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:deny-set-webview-auto-resize",
|
||||
"markdownDescription": "Denies the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -4208,6 +4220,60 @@
|
||||
"const": "core:window:deny-unminimize",
|
||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
|
||||
"type": "string",
|
||||
"const": "deep-link:default",
|
||||
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-get-current",
|
||||
"markdownDescription": "Enables the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-is-registered",
|
||||
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-register",
|
||||
"markdownDescription": "Enables the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-unregister",
|
||||
"markdownDescription": "Enables the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-get-current",
|
||||
"markdownDescription": "Denies the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-is-registered",
|
||||
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-register",
|
||||
"markdownDescription": "Denies the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-unregister",
|
||||
"markdownDescription": "Denies the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||
"type": "string",
|
||||
|
||||
@@ -15,16 +15,28 @@ pub struct Config {
|
||||
pub post_notify: bool,
|
||||
#[serde(default = "default_auto_subtitle")]
|
||||
pub auto_subtitle: bool,
|
||||
#[serde(default = "default_subtitle_generator_type")]
|
||||
pub subtitle_generator_type: String,
|
||||
#[serde(default = "default_whisper_model")]
|
||||
pub whisper_model: String,
|
||||
#[serde(default = "default_whisper_prompt")]
|
||||
pub whisper_prompt: String,
|
||||
#[serde(default = "default_openai_api_endpoint")]
|
||||
pub openai_api_endpoint: String,
|
||||
#[serde(default = "default_openai_api_key")]
|
||||
pub openai_api_key: String,
|
||||
#[serde(default = "default_clip_name_format")]
|
||||
pub clip_name_format: String,
|
||||
#[serde(default = "default_auto_generate_config")]
|
||||
pub auto_generate: AutoGenerateConfig,
|
||||
#[serde(default = "default_status_check_interval")]
|
||||
pub status_check_interval: u64,
|
||||
#[serde(skip)]
|
||||
pub config_path: String,
|
||||
#[serde(default = "default_whisper_language")]
|
||||
pub whisper_language: String,
|
||||
#[serde(default = "default_user_agent")]
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
@@ -37,6 +49,10 @@ fn default_auto_subtitle() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn default_subtitle_generator_type() -> String {
|
||||
"whisper".to_string()
|
||||
}
|
||||
|
||||
fn default_whisper_model() -> String {
|
||||
"whisper_model.bin".to_string()
|
||||
}
|
||||
@@ -45,6 +61,14 @@ fn default_whisper_prompt() -> String {
|
||||
"这是一段中文 你们好".to_string()
|
||||
}
|
||||
|
||||
fn default_openai_api_endpoint() -> String {
|
||||
"https://api.openai.com/v1".to_string()
|
||||
}
|
||||
|
||||
fn default_openai_api_key() -> String {
|
||||
"".to_string()
|
||||
}
|
||||
|
||||
fn default_clip_name_format() -> String {
|
||||
"[{room_id}][{live_id}][{title}][{created_at}].mp4".to_string()
|
||||
}
|
||||
@@ -56,11 +80,23 @@ fn default_auto_generate_config() -> AutoGenerateConfig {
|
||||
}
|
||||
}
|
||||
|
||||
fn default_status_check_interval() -> u64 {
|
||||
30
|
||||
}
|
||||
|
||||
fn default_whisper_language() -> String {
|
||||
"auto".to_string()
|
||||
}
|
||||
|
||||
fn default_user_agent() -> String {
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36".to_string()
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load(
|
||||
config_path: &PathBuf,
|
||||
default_cache: &PathBuf,
|
||||
default_output: &PathBuf,
|
||||
default_cache: &Path,
|
||||
default_output: &Path,
|
||||
) -> Result<Self, String> {
|
||||
if let Ok(content) = std::fs::read_to_string(config_path) {
|
||||
if let Ok(mut config) = toml::from_str::<Config>(&content) {
|
||||
@@ -83,13 +119,21 @@ impl Config {
|
||||
clip_notify: true,
|
||||
post_notify: true,
|
||||
auto_subtitle: false,
|
||||
whisper_model: "whisper_model.bin".to_string(),
|
||||
whisper_prompt: "这是一段中文 你们好".to_string(),
|
||||
clip_name_format: "[{room_id}][{live_id}][{title}][{created_at}].mp4".to_string(),
|
||||
subtitle_generator_type: default_subtitle_generator_type(),
|
||||
whisper_model: default_whisper_model(),
|
||||
whisper_prompt: default_whisper_prompt(),
|
||||
openai_api_endpoint: default_openai_api_endpoint(),
|
||||
openai_api_key: default_openai_api_key(),
|
||||
clip_name_format: default_clip_name_format(),
|
||||
auto_generate: default_auto_generate_config(),
|
||||
status_check_interval: default_status_check_interval(),
|
||||
config_path: config_path.to_str().unwrap().into(),
|
||||
whisper_language: default_whisper_language(),
|
||||
user_agent: default_user_agent(),
|
||||
};
|
||||
|
||||
config.save();
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
@@ -100,16 +144,30 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_cache_path(&mut self, path: &str) {
|
||||
self.cache = path.to_string();
|
||||
self.save();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_output_path(&mut self, path: &str) {
|
||||
self.output = path.into();
|
||||
self.save();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_whisper_language(&mut self, language: &str) {
|
||||
self.whisper_language = language.to_string();
|
||||
self.save();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_user_agent(&mut self, user_agent: &str) {
|
||||
self.user_agent = user_agent.to_string();
|
||||
self.save();
|
||||
}
|
||||
|
||||
pub fn generate_clip_name(&self, params: &ClipRangeParams) -> PathBuf {
|
||||
let platform = PlatformType::from_str(¶ms.platform).unwrap();
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ pub mod account;
|
||||
pub mod message;
|
||||
pub mod record;
|
||||
pub mod recorder;
|
||||
pub mod task;
|
||||
pub mod video;
|
||||
|
||||
pub struct Database {
|
||||
|
||||
@@ -3,12 +3,14 @@ use crate::recorder::PlatformType;
|
||||
use super::Database;
|
||||
use super::DatabaseError;
|
||||
use chrono::Utc;
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::Rng;
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
pub struct AccountRow {
|
||||
pub platform: String,
|
||||
pub uid: u64,
|
||||
pub uid: u64, // Keep for Bilibili compatibility
|
||||
pub id_str: Option<String>, // New field for string IDs like Douyin sec_uid
|
||||
pub name: String,
|
||||
pub avatar: String,
|
||||
pub csrf: String,
|
||||
@@ -19,7 +21,11 @@ pub struct AccountRow {
|
||||
// accounts
|
||||
impl Database {
|
||||
// CREATE TABLE accounts (uid INTEGER PRIMARY KEY, name TEXT, avatar TEXT, csrf TEXT, cookies TEXT, created_at TEXT);
|
||||
pub async fn add_account(&self, platform: &str, cookies: &str) -> Result<AccountRow, DatabaseError> {
|
||||
pub async fn add_account(
|
||||
&self,
|
||||
platform: &str,
|
||||
cookies: &str,
|
||||
) -> Result<AccountRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let platform = PlatformType::from_str(platform).unwrap();
|
||||
|
||||
@@ -45,9 +51,10 @@ impl Database {
|
||||
return Err(DatabaseError::InvalidCookiesError);
|
||||
}
|
||||
|
||||
// parse uid
|
||||
let uid = if platform == PlatformType::BiliBili {
|
||||
cookies
|
||||
// parse uid and id_str based on platform
|
||||
let (uid, id_str) = if platform == PlatformType::BiliBili {
|
||||
// For Bilibili, extract numeric uid from cookies
|
||||
let uid = cookies
|
||||
.split("DedeUserID=")
|
||||
.collect::<Vec<&str>>()
|
||||
.get(1)
|
||||
@@ -58,15 +65,18 @@ impl Database {
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.parse::<u64>()
|
||||
.map_err(|_| DatabaseError::InvalidCookiesError)?
|
||||
.map_err(|_| DatabaseError::InvalidCookiesError)?;
|
||||
(uid, None)
|
||||
} else {
|
||||
// generate a random uid
|
||||
rand::thread_rng().gen_range(10000..=i32::MAX) as u64
|
||||
// For Douyin, use temporary uid and will set id_str later with real sec_uid
|
||||
let temp_uid = rand::thread_rng().gen_range(10000..=i32::MAX) as u64;
|
||||
(temp_uid, Some(format!("temp_{}", temp_uid)))
|
||||
};
|
||||
|
||||
let account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid,
|
||||
id_str,
|
||||
name: "".into(),
|
||||
avatar: "".into(),
|
||||
csrf: csrf.unwrap(),
|
||||
@@ -74,7 +84,7 @@ impl Database {
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7)").bind(account.uid as i64).bind(&account.platform).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(account.uid as i64).bind(&account.platform).bind(&account.id_str).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
|
||||
Ok(account)
|
||||
}
|
||||
@@ -100,19 +110,67 @@ impl Database {
|
||||
avatar: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let sql = sqlx::query("UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4")
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(uid as i64)
|
||||
.bind(platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
let sql = sqlx::query(
|
||||
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
|
||||
)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(uid as i64)
|
||||
.bind(platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
if sql.rows_affected() != 1 {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_account_with_id_str(
|
||||
&self,
|
||||
old_account: &AccountRow,
|
||||
new_id_str: &str,
|
||||
name: &str,
|
||||
avatar: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
|
||||
// If the id_str changed, we need to delete the old record and create a new one
|
||||
if old_account.id_str.as_deref() != Some(new_id_str) {
|
||||
// Delete the old record (for Douyin accounts, we use uid to identify)
|
||||
sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
|
||||
// Insert the new record with updated id_str
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.bind(new_id_str)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(&old_account.csrf)
|
||||
.bind(&old_account.cookies)
|
||||
.bind(&old_account.created_at)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
} else {
|
||||
// id_str is the same, just update name and avatar
|
||||
sqlx::query(
|
||||
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
|
||||
)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_accounts(&self) -> Result<Vec<AccountRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts")
|
||||
@@ -122,20 +180,30 @@ impl Database {
|
||||
|
||||
pub async fn get_account(&self, platform: &str, uid: u64) -> Result<AccountRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(
|
||||
sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts WHERE uid = $1 and platform = $2")
|
||||
.bind(uid as i64)
|
||||
.bind(platform)
|
||||
.fetch_one(&lock)
|
||||
.await?,
|
||||
Ok(sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT * FROM accounts WHERE uid = $1 and platform = $2",
|
||||
)
|
||||
.bind(uid as i64)
|
||||
.bind(platform)
|
||||
.fetch_one(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_account_by_platform(&self, platform: &str) -> Result<AccountRow, DatabaseError> {
|
||||
pub async fn get_account_by_platform(
|
||||
&self,
|
||||
platform: &str,
|
||||
) -> Result<AccountRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts WHERE platform = $1")
|
||||
.bind(platform)
|
||||
.fetch_one(&lock)
|
||||
.await?)
|
||||
let accounts =
|
||||
sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts WHERE platform = $1")
|
||||
.bind(platform)
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
if accounts.is_empty() {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
}
|
||||
// randomly select one account
|
||||
let account = accounts.choose(&mut rand::thread_rng()).unwrap();
|
||||
Ok(account.clone())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,16 +123,28 @@ impl Database {
|
||||
|
||||
pub async fn get_recent_record(
|
||||
&self,
|
||||
room_id: u64,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
if room_id == 0 {
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
} else {
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
)
|
||||
.bind(room_id as i64)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ pub struct RecorderRow {
|
||||
pub created_at: String,
|
||||
pub platform: String,
|
||||
pub auto_start: bool,
|
||||
pub extra: String,
|
||||
}
|
||||
|
||||
// recorders
|
||||
@@ -18,6 +19,7 @@ impl Database {
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
room_id: u64,
|
||||
extra: &str,
|
||||
) -> Result<RecorderRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let recorder = RecorderRow {
|
||||
@@ -25,14 +27,16 @@ impl Database {
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
platform: platform.as_str().to_string(),
|
||||
auto_start: true,
|
||||
extra: extra.to_string(),
|
||||
};
|
||||
let _ = sqlx::query(
|
||||
"INSERT INTO recorders (room_id, created_at, platform, auto_start) VALUES ($1, $2, $3, $4)",
|
||||
"INSERT OR REPLACE INTO recorders (room_id, created_at, platform, auto_start, extra) VALUES ($1, $2, $3, $4, $5)",
|
||||
)
|
||||
.bind(room_id as i64)
|
||||
.bind(&recorder.created_at)
|
||||
.bind(platform.as_str())
|
||||
.bind(recorder.auto_start)
|
||||
.bind(extra)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(recorder)
|
||||
@@ -56,7 +60,7 @@ impl Database {
|
||||
pub async fn get_recorders(&self) -> Result<Vec<RecorderRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecorderRow>(
|
||||
"SELECT room_id, created_at, platform, auto_start FROM recorders",
|
||||
"SELECT room_id, created_at, platform, auto_start, extra FROM recorders",
|
||||
)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
|
||||
86
src-tauri/src/database/task.rs
Normal file
@@ -0,0 +1,86 @@
|
||||
use super::Database;
|
||||
use super::DatabaseError;
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
pub struct TaskRow {
|
||||
pub id: String,
|
||||
#[sqlx(rename = "type")]
|
||||
pub task_type: String,
|
||||
pub status: String,
|
||||
pub message: String,
|
||||
pub metadata: String,
|
||||
pub created_at: String,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn add_task(&self, task: &TaskRow) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let _ = sqlx::query(
|
||||
"INSERT INTO tasks (id, type, status, message, metadata, created_at) VALUES ($1, $2, $3, $4, $5, $6)",
|
||||
)
|
||||
.bind(&task.id)
|
||||
.bind(&task.task_type)
|
||||
.bind(&task.status)
|
||||
.bind(&task.message)
|
||||
.bind(&task.metadata)
|
||||
.bind(&task.created_at)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_tasks(&self) -> Result<Vec<TaskRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let tasks = sqlx::query_as::<_, TaskRow>("SELECT * FROM tasks")
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
pub async fn update_task(
|
||||
&self,
|
||||
id: &str,
|
||||
status: &str,
|
||||
message: &str,
|
||||
metadata: Option<&str>,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
if let Some(metadata) = metadata {
|
||||
let _ = sqlx::query(
|
||||
"UPDATE tasks SET status = $1, message = $2, metadata = $3 WHERE id = $4",
|
||||
)
|
||||
.bind(status)
|
||||
.bind(message)
|
||||
.bind(metadata)
|
||||
.bind(id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
} else {
|
||||
let _ = sqlx::query("UPDATE tasks SET status = $1, message = $2 WHERE id = $3")
|
||||
.bind(status)
|
||||
.bind(message)
|
||||
.bind(id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_task(&self, id: &str) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let _ = sqlx::query("DELETE FROM tasks WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn finish_pending_tasks(&self) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let _ = sqlx::query("UPDATE tasks SET status = 'failed' WHERE status = 'pending'")
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -17,17 +17,34 @@ pub struct VideoRow {
|
||||
pub tags: String,
|
||||
pub area: i64,
|
||||
pub created_at: String,
|
||||
pub platform: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
pub struct VideoNoCover {
|
||||
pub id: i64,
|
||||
pub room_id: u64,
|
||||
pub file: String,
|
||||
pub length: i64,
|
||||
pub size: i64,
|
||||
pub status: i64,
|
||||
pub bvid: String,
|
||||
pub title: String,
|
||||
pub desc: String,
|
||||
pub tags: String,
|
||||
pub area: i64,
|
||||
pub created_at: String,
|
||||
pub platform: String,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn get_videos(&self, room_id: u64) -> Result<Vec<VideoRow>, DatabaseError> {
|
||||
pub async fn get_videos(&self, room_id: u64) -> Result<Vec<VideoNoCover>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(
|
||||
sqlx::query_as::<_, VideoRow>("SELECT * FROM videos WHERE room_id = $1;")
|
||||
.bind(room_id as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?,
|
||||
)
|
||||
let videos = sqlx::query_as::<_, VideoNoCover>("SELECT * FROM videos WHERE room_id = $1;")
|
||||
.bind(room_id as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
Ok(videos)
|
||||
}
|
||||
|
||||
pub async fn get_video(&self, id: i64) -> Result<VideoRow, DatabaseError> {
|
||||
@@ -66,7 +83,7 @@ impl Database {
|
||||
|
||||
pub async fn add_video(&self, video: &VideoRow) -> Result<VideoRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let sql = sqlx::query("INSERT INTO videos (room_id, cover, file, length, size, status, bvid, title, desc, tags, area, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)")
|
||||
let sql = sqlx::query("INSERT INTO videos (room_id, cover, file, length, size, status, bvid, title, desc, tags, area, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)")
|
||||
.bind(video.room_id as i64)
|
||||
.bind(&video.cover)
|
||||
.bind(&video.file)
|
||||
@@ -79,6 +96,7 @@ impl Database {
|
||||
.bind(&video.tags)
|
||||
.bind(video.area)
|
||||
.bind(&video.created_at)
|
||||
.bind(&video.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
let video = VideoRow {
|
||||
@@ -97,4 +115,22 @@ impl Database {
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_all_videos(&self) -> Result<Vec<VideoNoCover>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let videos =
|
||||
sqlx::query_as::<_, VideoNoCover>("SELECT * FROM videos ORDER BY created_at DESC;")
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
Ok(videos)
|
||||
}
|
||||
|
||||
pub async fn get_video_cover(&self, id: i64) -> Result<String, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let video = sqlx::query_as::<_, VideoRow>("SELECT * FROM videos WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_one(&lock)
|
||||
.await?;
|
||||
Ok(video.cover)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,19 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
|
||||
use crate::progress_reporter::ProgressReporterTrait;
|
||||
use async_ffmpeg_sidecar::event::FfmpegEvent;
|
||||
use crate::progress_reporter::{ProgressReporter, ProgressReporterTrait};
|
||||
use crate::subtitle_generator::whisper_online;
|
||||
use crate::subtitle_generator::{
|
||||
whisper_cpp, GenerateResult, SubtitleGenerator, SubtitleGeneratorType,
|
||||
};
|
||||
use async_ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
|
||||
use async_ffmpeg_sidecar::log_parser::FfmpegLogParser;
|
||||
use tokio::io::BufReader;
|
||||
use tokio::io::{AsyncBufReadExt, BufReader};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
pub async fn clip_from_m3u8(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
@@ -21,7 +30,11 @@ pub async fn clip_from_m3u8(
|
||||
std::fs::create_dir_all(output_folder).unwrap();
|
||||
}
|
||||
|
||||
let child = tokio::process::Command::new("ffmpeg")
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffmpeg_process
|
||||
.args(["-i", &format!("{}", m3u8_index.display())])
|
||||
.args(["-c", "copy"])
|
||||
.args(["-y", output_path.to_str().unwrap()])
|
||||
@@ -50,6 +63,12 @@ pub async fn clip_from_m3u8(
|
||||
.update(format!("编码中:{}", p.time).as_str())
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(level, content) => {
|
||||
// log error if content contains error
|
||||
if content.contains("error") || level == LogLevel::Error {
|
||||
log::error!("Clip error: {}", content);
|
||||
}
|
||||
}
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Clip error: {}", e);
|
||||
clip_error = Some(e.to_string());
|
||||
@@ -72,20 +91,92 @@ pub async fn clip_from_m3u8(
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn extract_audio(file: &Path) -> Result<(), String> {
|
||||
pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf, String> {
|
||||
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -ar 16000 test.wav
|
||||
log::info!("Extract audio task start: {}", file.display());
|
||||
let output_path = file.with_extension("wav");
|
||||
let output_path = file.with_extension(format);
|
||||
let mut extract_error = None;
|
||||
|
||||
let child = tokio::process::Command::new("ffmpeg")
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.args(["-ar", "16000"])
|
||||
.args([output_path.to_str().unwrap()])
|
||||
.args(["-y"])
|
||||
.args(["-progress", "pipe:2"])
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
// 降低采样率以提高处理速度,同时保持足够的音质用于语音识别
|
||||
let sample_rate = if format == "mp3" { "22050" } else { "16000" };
|
||||
|
||||
// First, get the duration of the input file
|
||||
let duration = get_audio_duration(file).await?;
|
||||
log::info!("Audio duration: {} seconds", duration);
|
||||
|
||||
// Split into chunks of 30 seconds
|
||||
let chunk_duration = 30;
|
||||
let chunk_count = (duration as f64 / chunk_duration as f64).ceil() as usize;
|
||||
log::info!(
|
||||
"Splitting into {} chunks of {} seconds each",
|
||||
chunk_count,
|
||||
chunk_duration
|
||||
);
|
||||
|
||||
// Create output directory for chunks
|
||||
let output_dir = output_path.parent().unwrap();
|
||||
let base_name = output_path.file_stem().unwrap().to_str().unwrap();
|
||||
let chunk_dir = output_dir.join(format!("{}_chunks", base_name));
|
||||
|
||||
if !chunk_dir.exists() {
|
||||
std::fs::create_dir_all(&chunk_dir)
|
||||
.map_err(|e| format!("Failed to create chunk directory: {}", e))?;
|
||||
}
|
||||
|
||||
// Use ffmpeg segment feature to split audio into chunks
|
||||
let segment_pattern = chunk_dir.join(format!("{}_%03d.{}", base_name, format));
|
||||
|
||||
// 构建优化的ffmpeg命令参数
|
||||
let file_str = file.to_str().unwrap();
|
||||
let chunk_duration_str = chunk_duration.to_string();
|
||||
let segment_pattern_str = segment_pattern.to_str().unwrap();
|
||||
|
||||
let mut args = vec![
|
||||
"-i",
|
||||
file_str,
|
||||
"-ar",
|
||||
sample_rate,
|
||||
"-vn",
|
||||
"-f",
|
||||
"segment",
|
||||
"-segment_time",
|
||||
&chunk_duration_str,
|
||||
"-reset_timestamps",
|
||||
"1",
|
||||
"-y",
|
||||
"-progress",
|
||||
"pipe:2",
|
||||
];
|
||||
|
||||
// 根据格式添加优化的编码参数
|
||||
if format == "mp3" {
|
||||
args.extend_from_slice(&[
|
||||
"-c:a",
|
||||
"mp3",
|
||||
"-b:a",
|
||||
"64k", // 降低比特率以提高速度
|
||||
"-compression_level",
|
||||
"0", // 最快压缩
|
||||
]);
|
||||
} else {
|
||||
args.extend_from_slice(&[
|
||||
"-c:a",
|
||||
"pcm_s16le", // 使用PCM编码,速度更快
|
||||
]);
|
||||
}
|
||||
|
||||
// 添加性能优化参数
|
||||
args.extend_from_slice(&[
|
||||
"-threads", "0", // 使用所有可用CPU核心
|
||||
]);
|
||||
|
||||
args.push(segment_pattern_str);
|
||||
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffmpeg_process.args(&args).stderr(Stdio::piped()).spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
@@ -103,6 +194,7 @@ pub async fn extract_audio(file: &Path) -> Result<(), String> {
|
||||
extract_error = Some(e.to_string());
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -116,11 +208,114 @@ pub async fn extract_audio(file: &Path) -> Result<(), String> {
|
||||
log::error!("Extract audio error: {}", error);
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!("Extract audio task end: {}", output_path.display());
|
||||
Ok(())
|
||||
log::info!(
|
||||
"Extract audio task end: {} chunks created in {}",
|
||||
chunk_count,
|
||||
chunk_dir.display()
|
||||
);
|
||||
Ok(chunk_dir)
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the duration of an audio/video file in seconds
|
||||
async fn get_audio_duration(file: &Path) -> Result<u64, String> {
|
||||
// Use ffprobe with format option to get duration
|
||||
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffprobe_process
|
||||
.args(["-v", "quiet"])
|
||||
.args(["-show_entries", "format=duration"])
|
||||
.args(["-of", "csv=p=0"])
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!("Failed to spawn ffprobe process: {}", e));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut duration = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
// The new command outputs duration directly as a float
|
||||
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
|
||||
duration = Some(seconds_f64.ceil() as u64);
|
||||
log::debug!("Parsed duration: {} seconds", seconds_f64);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Failed to get duration: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
duration.ok_or_else(|| "Failed to parse duration".to_string())
|
||||
}
|
||||
|
||||
/// Get the precise duration of a video segment (TS/MP4) in seconds
|
||||
pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
|
||||
// Use ffprobe to get the exact duration of the segment
|
||||
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffprobe_process
|
||||
.args(["-v", "quiet"])
|
||||
.args(["-show_entries", "format=duration"])
|
||||
.args(["-of", "csv=p=0"])
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!(
|
||||
"Failed to spawn ffprobe process for segment: {}",
|
||||
e
|
||||
));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut duration = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
// Parse the exact duration as f64 for precise timing
|
||||
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
|
||||
duration = Some(seconds_f64);
|
||||
log::debug!("Parsed segment duration: {} seconds", seconds_f64);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Failed to get segment duration: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
duration.ok_or_else(|| "Failed to parse segment duration".to_string())
|
||||
}
|
||||
|
||||
pub async fn encode_video_subtitle(
|
||||
reporter: &impl ProgressReporterTrait,
|
||||
file: &Path,
|
||||
@@ -157,7 +352,11 @@ pub async fn encode_video_subtitle(
|
||||
let vf = format!("subtitles={}:force_style='{}'", subtitle, srt_style);
|
||||
log::info!("vf: {}", vf);
|
||||
|
||||
let child = tokio::process::Command::new("ffmpeg")
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffmpeg_process
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.args(["-vf", vf.as_str()])
|
||||
.args(["-c:v", "libx264"])
|
||||
@@ -188,9 +387,7 @@ pub async fn encode_video_subtitle(
|
||||
reporter.update(format!("压制中:{}", p.time).as_str());
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
log::debug!("{}", content);
|
||||
}
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -240,7 +437,11 @@ pub async fn encode_video_danmu(
|
||||
format!("'{}'", subtitle.display())
|
||||
};
|
||||
|
||||
let child = tokio::process::Command::new("ffmpeg")
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffmpeg_process
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.args(["-vf", &format!("ass={}", subtitle)])
|
||||
.args(["-c:v", "libx264"])
|
||||
@@ -275,9 +476,7 @@ pub async fn encode_video_danmu(
|
||||
.unwrap()
|
||||
.update(format!("压制中:{}", p.time).as_str());
|
||||
}
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
log::debug!("{}", content);
|
||||
}
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
_ => {}
|
||||
}
|
||||
@@ -296,3 +495,279 @@ pub async fn encode_video_danmu(
|
||||
Ok(output_path)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn generic_ffmpeg_command(args: &[&str]) -> Result<String, String> {
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffmpeg_process.args(args).stderr(Stdio::piped()).spawn();
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut logs = Vec::new();
|
||||
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
logs.push(content);
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Generic ffmpeg command error: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
Ok(logs.join("\n"))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn generate_video_subtitle(
|
||||
reporter: Option<&ProgressReporter>,
|
||||
file: &Path,
|
||||
generator_type: &str,
|
||||
whisper_model: &str,
|
||||
whisper_prompt: &str,
|
||||
openai_api_key: &str,
|
||||
openai_api_endpoint: &str,
|
||||
language_hint: &str,
|
||||
) -> Result<GenerateResult, String> {
|
||||
match generator_type {
|
||||
"whisper" => {
|
||||
if whisper_model.is_empty() {
|
||||
return Err("Whisper model not configured".to_string());
|
||||
}
|
||||
if let Ok(generator) = whisper_cpp::new(Path::new(&whisper_model), whisper_prompt).await
|
||||
{
|
||||
let chunk_dir = extract_audio_chunks(file, "wav").await?;
|
||||
|
||||
let mut full_result = GenerateResult {
|
||||
subtitle_id: "".to_string(),
|
||||
subtitle_content: vec![],
|
||||
generator_type: SubtitleGeneratorType::Whisper,
|
||||
};
|
||||
|
||||
let mut chunk_paths = vec![];
|
||||
for entry in std::fs::read_dir(&chunk_dir)
|
||||
.map_err(|e| format!("Failed to read chunk directory: {}", e))?
|
||||
{
|
||||
let entry =
|
||||
entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||||
let path = entry.path();
|
||||
chunk_paths.push(path);
|
||||
}
|
||||
|
||||
// sort chunk paths by name
|
||||
chunk_paths
|
||||
.sort_by_key(|path| path.file_name().unwrap().to_str().unwrap().to_string());
|
||||
|
||||
let mut results = Vec::new();
|
||||
for path in chunk_paths {
|
||||
let result = generator
|
||||
.generate_subtitle(reporter, &path, language_hint)
|
||||
.await;
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
for (i, result) in results.iter().enumerate() {
|
||||
if let Ok(result) = result {
|
||||
full_result.subtitle_id = result.subtitle_id.clone();
|
||||
full_result.concat(result, 30 * i as u64);
|
||||
}
|
||||
}
|
||||
|
||||
// delete chunk directory
|
||||
let _ = tokio::fs::remove_dir_all(chunk_dir).await;
|
||||
|
||||
Ok(full_result)
|
||||
} else {
|
||||
Err("Failed to initialize Whisper model".to_string())
|
||||
}
|
||||
}
|
||||
"whisper_online" => {
|
||||
if openai_api_key.is_empty() {
|
||||
return Err("API key not configured".to_string());
|
||||
}
|
||||
if let Ok(generator) = whisper_online::new(
|
||||
Some(openai_api_endpoint),
|
||||
Some(openai_api_key),
|
||||
Some(whisper_prompt),
|
||||
)
|
||||
.await
|
||||
{
|
||||
let chunk_dir = extract_audio_chunks(file, "mp3").await?;
|
||||
|
||||
let mut full_result = GenerateResult {
|
||||
subtitle_id: "".to_string(),
|
||||
subtitle_content: vec![],
|
||||
generator_type: SubtitleGeneratorType::WhisperOnline,
|
||||
};
|
||||
|
||||
let mut chunk_paths = vec![];
|
||||
for entry in std::fs::read_dir(&chunk_dir)
|
||||
.map_err(|e| format!("Failed to read chunk directory: {}", e))?
|
||||
{
|
||||
let entry =
|
||||
entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||||
let path = entry.path();
|
||||
chunk_paths.push(path);
|
||||
}
|
||||
// sort chunk paths by name
|
||||
chunk_paths
|
||||
.sort_by_key(|path| path.file_name().unwrap().to_str().unwrap().to_string());
|
||||
|
||||
let mut results = Vec::new();
|
||||
for path in chunk_paths {
|
||||
let result = generator
|
||||
.generate_subtitle(reporter, &path, language_hint)
|
||||
.await;
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
for (i, result) in results.iter().enumerate() {
|
||||
if let Ok(result) = result {
|
||||
full_result.subtitle_id = result.subtitle_id.clone();
|
||||
full_result.concat(result, 30 * i as u64);
|
||||
}
|
||||
}
|
||||
|
||||
// delete chunk directory
|
||||
let _ = tokio::fs::remove_dir_all(chunk_dir).await;
|
||||
|
||||
Ok(full_result)
|
||||
} else {
|
||||
Err("Failed to initialize Whisper Online".to_string())
|
||||
}
|
||||
}
|
||||
_ => Err(format!(
|
||||
"Unknown subtitle generator type: {}",
|
||||
generator_type
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Trying to run ffmpeg for version
|
||||
pub async fn check_ffmpeg() -> Result<String, String> {
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
.arg("-version")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn();
|
||||
if let Err(e) = child {
|
||||
log::error!("Faild to spwan ffmpeg process: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
|
||||
let stdout = child.stdout.take();
|
||||
if stdout.is_none() {
|
||||
log::error!("Failed to take ffmpeg output");
|
||||
return Err("Failed to take ffmpeg output".into());
|
||||
}
|
||||
|
||||
let stdout = stdout.unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut version = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::ParsedVersion(v) => version = Some(v.version),
|
||||
FfmpegEvent::LogEOF => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(version) = version {
|
||||
Ok(version)
|
||||
} else {
|
||||
Err("Failed to parse version from output".into())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_video_resolution(file: &str) -> Result<String, String> {
|
||||
// ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 input.mp4
|
||||
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffprobe_process
|
||||
.arg("-i")
|
||||
.arg(file)
|
||||
.arg("-v")
|
||||
.arg("error")
|
||||
.arg("-select_streams")
|
||||
.arg("v:0")
|
||||
.arg("-show_entries")
|
||||
.arg("stream=width,height")
|
||||
.arg("-of")
|
||||
.arg("csv=s=x:p=0")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn();
|
||||
if let Err(e) = child {
|
||||
log::error!("Faild to spwan ffprobe process: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take();
|
||||
if stdout.is_none() {
|
||||
log::error!("Failed to take ffprobe output");
|
||||
return Err("Failed to take ffprobe output".into());
|
||||
}
|
||||
|
||||
let stdout = stdout.unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut lines = reader.lines();
|
||||
let line = lines.next_line().await.unwrap();
|
||||
if line.is_none() {
|
||||
return Err("Failed to parse resolution from output".into());
|
||||
}
|
||||
let line = line.unwrap();
|
||||
let resolution = line.split("x").collect::<Vec<&str>>();
|
||||
if resolution.len() != 2 {
|
||||
return Err("Failed to parse resolution from output".into());
|
||||
}
|
||||
Ok(format!("{}x{}", resolution[0], resolution[1]))
|
||||
}
|
||||
|
||||
fn ffmpeg_path() -> PathBuf {
|
||||
let mut path = Path::new("ffmpeg").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
path.set_extension("exe");
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn ffprobe_path() -> PathBuf {
|
||||
let mut path = Path::new("ffprobe").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
path.set_extension("exe");
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
// tests
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_video_size() {
|
||||
let file = Path::new("/Users/xinreasuper/Desktop/shadowreplay-test/output2/[1789714684][1753965688317][摄像头被前夫抛妻弃子直播挣点奶粉][2025-07-31_12-58-14].mp4");
|
||||
let resolution = get_video_resolution(file.to_str().unwrap()).await.unwrap();
|
||||
println!("Resolution: {}", resolution);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ use crate::recorder::bilibili::client::{QrInfo, QrStatus};
|
||||
use crate::state::State;
|
||||
use crate::state_type;
|
||||
|
||||
use hyper::header::HeaderValue;
|
||||
#[cfg(feature = "gui")]
|
||||
use tauri::State as TauriState;
|
||||
|
||||
@@ -20,6 +21,10 @@ pub async fn add_account(
|
||||
platform: String,
|
||||
cookies: &str,
|
||||
) -> Result<AccountRow, String> {
|
||||
// check if cookies is valid
|
||||
if let Err(e) = cookies.parse::<HeaderValue>() {
|
||||
return Err(format!("Invalid cookies: {}", e));
|
||||
}
|
||||
let account = state.db.add_account(&platform, cookies).await?;
|
||||
if platform == "bilibili" {
|
||||
let account_info = state.client.get_user_info(&account, account.uid).await?;
|
||||
@@ -32,6 +37,37 @@ pub async fn add_account(
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await?;
|
||||
} else if platform == "douyin" {
|
||||
// Get user info from Douyin API
|
||||
let douyin_client = crate::recorder::douyin::client::DouyinClient::new(
|
||||
&state.config.read().await.user_agent,
|
||||
&account,
|
||||
);
|
||||
match douyin_client.get_user_info().await {
|
||||
Ok(user_info) => {
|
||||
// For Douyin, use sec_uid as the primary identifier in id_str field
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
state
|
||||
.db
|
||||
.update_account_with_id_str(
|
||||
&account,
|
||||
&user_info.sec_uid,
|
||||
&user_info.nickname,
|
||||
&avatar_url,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to get Douyin user info: {}", e);
|
||||
// Keep the account but with default values
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(account)
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ pub async fn get_config(state: state_type!()) -> Result<Config, ()> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
#[allow(dead_code)]
|
||||
pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<(), String> {
|
||||
let old_cache_path = state.config.read().await.cache.clone();
|
||||
if old_cache_path == cache_path {
|
||||
@@ -77,6 +78,7 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
#[allow(dead_code)]
|
||||
pub async fn set_output_path(state: state_type!(), output_path: String) -> Result<(), ()> {
|
||||
let mut config = state.config.write().await;
|
||||
let old_output_path = config.output.clone();
|
||||
@@ -170,6 +172,42 @@ pub async fn update_whisper_prompt(state: state_type!(), whisper_prompt: String)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_subtitle_generator_type(
|
||||
state: state_type!(),
|
||||
subtitle_generator_type: String,
|
||||
) -> Result<(), ()> {
|
||||
log::info!(
|
||||
"Updating subtitle generator type to {}",
|
||||
subtitle_generator_type
|
||||
);
|
||||
let mut config = state.config.write().await;
|
||||
config.subtitle_generator_type = subtitle_generator_type;
|
||||
config.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_openai_api_key(state: state_type!(), openai_api_key: String) -> Result<(), ()> {
|
||||
log::info!("Updating openai api key");
|
||||
let mut config = state.config.write().await;
|
||||
config.openai_api_key = openai_api_key;
|
||||
config.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_openai_api_endpoint(
|
||||
state: state_type!(),
|
||||
openai_api_endpoint: String,
|
||||
) -> Result<(), ()> {
|
||||
log::info!("Updating openai api endpoint to {}", openai_api_endpoint);
|
||||
let mut config = state.config.write().await;
|
||||
config.openai_api_endpoint = openai_api_endpoint;
|
||||
config.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_auto_generate(
|
||||
state: state_type!(),
|
||||
@@ -182,3 +220,35 @@ pub async fn update_auto_generate(
|
||||
config.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_status_check_interval(
|
||||
state: state_type!(),
|
||||
mut interval: u64,
|
||||
) -> Result<(), ()> {
|
||||
if interval < 10 {
|
||||
interval = 10; // Minimum interval of 10 seconds
|
||||
}
|
||||
log::info!("Updating status check interval to {} seconds", interval);
|
||||
state.config.write().await.status_check_interval = interval;
|
||||
state.config.write().await.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_whisper_language(
|
||||
state: state_type!(),
|
||||
whisper_language: String,
|
||||
) -> Result<(), ()> {
|
||||
log::info!("Updating whisper language to {}", whisper_language);
|
||||
state.config.write().await.whisper_language = whisper_language;
|
||||
state.config.write().await.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_user_agent(state: state_type!(), user_agent: String) -> Result<(), ()> {
|
||||
log::info!("Updating user agent to {}", user_agent);
|
||||
state.config.write().await.set_user_agent(&user_agent);
|
||||
Ok(())
|
||||
}
|
||||
@@ -3,6 +3,7 @@ pub mod config;
|
||||
pub mod macros;
|
||||
pub mod message;
|
||||
pub mod recorder;
|
||||
pub mod task;
|
||||
pub mod utils;
|
||||
pub mod video;
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ pub async fn add_recorder(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
extra: String,
|
||||
) -> Result<RecorderRow, String> {
|
||||
log::info!("Add recorder: {} {}", platform, room_id);
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
@@ -32,6 +33,7 @@ pub async fn add_recorder(
|
||||
if let Ok(account) = state.db.get_account_by_platform("bilibili").await {
|
||||
Ok(account)
|
||||
} else {
|
||||
log::error!("No available bilibili account found");
|
||||
Err("没有可用账号,请先添加账号".to_string())
|
||||
}
|
||||
}
|
||||
@@ -39,6 +41,7 @@ pub async fn add_recorder(
|
||||
if let Ok(account) = state.db.get_account_by_platform("douyin").await {
|
||||
Ok(account)
|
||||
} else {
|
||||
log::error!("No available douyin account found");
|
||||
Err("没有可用账号,请先添加账号".to_string())
|
||||
}
|
||||
}
|
||||
@@ -48,20 +51,26 @@ pub async fn add_recorder(
|
||||
match account {
|
||||
Ok(account) => match state
|
||||
.recorder_manager
|
||||
.add_recorder(&account, platform, room_id, true)
|
||||
.add_recorder(&account, platform, room_id, &extra, true)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
let room = state.db.add_recorder(platform, room_id).await?;
|
||||
let room = state.db.add_recorder(platform, room_id, &extra).await?;
|
||||
state
|
||||
.db
|
||||
.new_message("添加直播间", &format!("添加了新直播间 {}", room_id))
|
||||
.await?;
|
||||
Ok(room)
|
||||
}
|
||||
Err(e) => Err(format!("添加失败: {}", e)),
|
||||
Err(e) => {
|
||||
log::error!("Failed to add recorder: {}", e);
|
||||
Err(format!("添加失败: {}", e))
|
||||
}
|
||||
},
|
||||
Err(e) => Err(format!("添加失败: {}", e)),
|
||||
Err(e) => {
|
||||
log::error!("Failed to add recorder: {}", e);
|
||||
Err(format!("添加失败: {}", e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,6 +80,7 @@ pub async fn remove_recorder(
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
) -> Result<(), String> {
|
||||
log::info!("Remove recorder: {} {}", platform, room_id);
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
match state
|
||||
.recorder_manager
|
||||
@@ -82,9 +92,13 @@ pub async fn remove_recorder(
|
||||
.db
|
||||
.new_message("移除直播间", &format!("移除了直播间 {}", room_id))
|
||||
.await?;
|
||||
Ok(state.db.remove_recorder(room_id).await?)
|
||||
log::info!("Removed recorder: {} {}", platform.as_str(), room_id);
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to remove recorder: {}", e);
|
||||
Err(e.to_string())
|
||||
}
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,6 +137,40 @@ pub async fn get_archive(
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_archive_subtitle(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
live_id: String,
|
||||
) -> Result<String, String> {
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.get_archive_subtitle(platform.unwrap(), room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn generate_archive_subtitle(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
live_id: String,
|
||||
) -> Result<String, String> {
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.generate_archive_subtitle(platform.unwrap(), room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn delete_archive(
|
||||
state: state_type!(),
|
||||
@@ -130,10 +178,13 @@ pub async fn delete_archive(
|
||||
room_id: u64,
|
||||
live_id: String,
|
||||
) -> Result<(), String> {
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
state
|
||||
.recorder_manager
|
||||
.delete_archive(platform, room_id, &live_id)
|
||||
.delete_archive(platform.unwrap(), room_id, &live_id)
|
||||
.await?;
|
||||
state
|
||||
.db
|
||||
@@ -152,10 +203,13 @@ pub async fn get_danmu_record(
|
||||
room_id: u64,
|
||||
live_id: String,
|
||||
) -> Result<Vec<DanmuEntry>, String> {
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.get_danmu(platform, room_id, &live_id)
|
||||
.get_danmu(platform.unwrap(), room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -175,10 +229,13 @@ pub async fn export_danmu(
|
||||
state: state_type!(),
|
||||
options: ExportDanmuOptions,
|
||||
) -> Result<String, String> {
|
||||
let platform = PlatformType::from_str(&options.platform).unwrap();
|
||||
let platform = PlatformType::from_str(&options.platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let mut danmus = state
|
||||
.recorder_manager
|
||||
.get_danmu(platform, options.room_id, &options.live_id)
|
||||
.get_danmu(platform.unwrap(), options.room_id, &options.live_id)
|
||||
.await?;
|
||||
|
||||
log::debug!("First danmu entry: {:?}", danmus.first());
|
||||
@@ -236,52 +293,35 @@ pub async fn get_today_record_count(state: state_type!()) -> Result<i64, String>
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_recent_record(
|
||||
state: state_type!(),
|
||||
room_id: u64,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
) -> Result<Vec<RecordRow>, String> {
|
||||
match state.db.get_recent_record(offset, limit).await {
|
||||
match state.db.get_recent_record(room_id, offset, limit).await {
|
||||
Ok(records) => Ok(records),
|
||||
Err(e) => Err(format!("Failed to get recent record: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn set_auto_start(
|
||||
pub async fn set_enable(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
auto_start: bool,
|
||||
enabled: bool,
|
||||
) -> Result<(), String> {
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
log::info!("Set enable for recorder {platform} {room_id} {enabled}");
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
state
|
||||
.recorder_manager
|
||||
.set_auto_start(platform, room_id, auto_start)
|
||||
.set_enable(platform.unwrap(), room_id, enabled)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn force_start(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
) -> Result<(), String> {
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
state.recorder_manager.force_start(platform, room_id).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn force_stop(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
) -> Result<(), String> {
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
state.recorder_manager.force_stop(platform, room_id).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn fetch_hls(state: state_type!(), uri: String) -> Result<Vec<u8>, String> {
|
||||
// Handle wildcard pattern in the URI
|
||||
|
||||
15
src-tauri/src/handlers/task.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
#[cfg(feature = "gui")]
|
||||
use tauri::State as TauriState;
|
||||
|
||||
use crate::state::State;
|
||||
use crate::{database::task::TaskRow, state_type};
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_tasks(state: state_type!()) -> Result<Vec<TaskRow>, String> {
|
||||
Ok(state.db.get_tasks().await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn delete_task(state: state_type!(), id: &str) -> Result<(), String> {
|
||||
Ok(state.db.delete_task(id).await?)
|
||||
}
|
||||
@@ -14,6 +14,7 @@ use {
|
||||
tokio::io::AsyncWriteExt,
|
||||
};
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn copy_dir_all(
|
||||
src: impl AsRef<std::path::Path>,
|
||||
dst: impl AsRef<std::path::Path>,
|
||||
@@ -88,7 +89,7 @@ pub fn show_in_folder(path: String) {
|
||||
pub struct DiskInfo {
|
||||
disk: String,
|
||||
total: u64,
|
||||
free: u64,
|
||||
pub free: u64,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
@@ -101,11 +102,27 @@ pub async fn get_disk_info(state: state_type!()) -> Result<DiskInfo, ()> {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
cache = cwd.join(cache);
|
||||
}
|
||||
|
||||
get_disk_info_inner(cache).await
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn console_log(_state: state_type!(), level: &str, message: &str) -> Result<(), ()> {
|
||||
match level {
|
||||
"error" => log::error!("[frontend] {}", message),
|
||||
"warn" => log::warn!("[frontend] {}", message),
|
||||
"info" => log::info!("[frontend] {}", message),
|
||||
_ => log::debug!("[frontend] {}", message),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_disk_info_inner(target: PathBuf) -> Result<DiskInfo, ()> {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// get disk info from df command
|
||||
let output = tokio::process::Command::new("df")
|
||||
.arg(cache)
|
||||
.arg(target)
|
||||
.output()
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -129,7 +146,7 @@ pub async fn get_disk_info(state: state_type!()) -> Result<DiskInfo, ()> {
|
||||
{
|
||||
// check system disk info
|
||||
let disks = sysinfo::Disks::new_with_refreshed_list();
|
||||
// get cache disk info
|
||||
// get target disk info
|
||||
let mut disk_info = DiskInfo {
|
||||
disk: "".into(),
|
||||
total: 0,
|
||||
@@ -140,7 +157,7 @@ pub async fn get_disk_info(state: state_type!()) -> Result<DiskInfo, ()> {
|
||||
let mut longest_match = 0;
|
||||
for disk in disks.list() {
|
||||
let mount_point = disk.mount_point().to_str().unwrap();
|
||||
if cache.starts_with(mount_point) && mount_point.split("/").count() > longest_match {
|
||||
if target.starts_with(mount_point) && mount_point.split("/").count() > longest_match {
|
||||
disk_info.disk = mount_point.into();
|
||||
disk_info.total = disk.total_space();
|
||||
disk_info.free = disk.available_space();
|
||||
@@ -211,7 +228,7 @@ pub async fn open_live(
|
||||
format!("Live:{}:{}", room_id, live_id),
|
||||
tauri::WebviewUrl::App(
|
||||
format!(
|
||||
"live_index.html?platform={}&room_id={}&live_id={}",
|
||||
"index_live.html?platform={}&room_id={}&live_id={}",
|
||||
platform.as_str(),
|
||||
room_id,
|
||||
live_id
|
||||
@@ -242,3 +259,46 @@ pub async fn open_live(
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
#[tauri::command]
|
||||
pub async fn open_clip(state: state_type!(), video_id: i64) -> Result<(), String> {
|
||||
log::info!("Open clip window: {}", video_id);
|
||||
let builder = tauri::WebviewWindowBuilder::new(
|
||||
&state.app_handle,
|
||||
format!("Clip:{}", video_id),
|
||||
tauri::WebviewUrl::App(format!("index_clip.html?id={}", video_id).into()),
|
||||
)
|
||||
.title(format!("Clip window:{}", video_id))
|
||||
.theme(Some(Theme::Light))
|
||||
.inner_size(1200.0, 800.0)
|
||||
.effects(WindowEffectsConfig {
|
||||
effects: vec![
|
||||
tauri_utils::WindowEffect::Tabbed,
|
||||
tauri_utils::WindowEffect::Mica,
|
||||
],
|
||||
state: None,
|
||||
radius: None,
|
||||
color: None,
|
||||
});
|
||||
|
||||
if let Err(e) = builder.decorations(true).build() {
|
||||
log::error!("clip window build failed: {}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn list_folder(_state: state_type!(), path: String) -> Result<Vec<String>, String> {
|
||||
let path = PathBuf::from(path);
|
||||
let entries = std::fs::read_dir(path);
|
||||
if entries.is_err() {
|
||||
return Err(format!("Read directory failed: {}", entries.err().unwrap()));
|
||||
}
|
||||
let mut files = Vec::new();
|
||||
for entry in entries.unwrap().flatten() {
|
||||
files.push(entry.path().to_str().unwrap().to_string());
|
||||
}
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
use crate::database::video::VideoRow;
|
||||
use crate::database::task::TaskRow;
|
||||
use crate::database::video::{VideoNoCover, VideoRow};
|
||||
use crate::ffmpeg;
|
||||
use crate::handlers::utils::get_disk_info_inner;
|
||||
use crate::progress_reporter::{
|
||||
cancel_progress, EventEmitter, ProgressReporter, ProgressReporterTrait,
|
||||
};
|
||||
use crate::recorder::bilibili::profile::Profile;
|
||||
use crate::recorder_manager::ClipRangeParams;
|
||||
use crate::subtitle_generator::whisper::{self};
|
||||
use crate::subtitle_generator::SubtitleGenerator;
|
||||
use crate::subtitle_generator::item_to_srt;
|
||||
use chrono::Utc;
|
||||
use std::path::Path;
|
||||
use serde_json::json;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::state::State;
|
||||
use crate::state_type;
|
||||
@@ -22,25 +24,76 @@ pub async fn clip_range(
|
||||
event_id: String,
|
||||
params: ClipRangeParams,
|
||||
) -> Result<VideoRow, String> {
|
||||
// check storage space, preserve 1GB for other usage
|
||||
let output = state.config.read().await.output.clone();
|
||||
let mut output = PathBuf::from(&output);
|
||||
if output.is_relative() {
|
||||
// get current working directory
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
output = cwd.join(output);
|
||||
}
|
||||
if let Ok(disk_info) = get_disk_info_inner(output).await {
|
||||
// if free space is less than 1GB, return error
|
||||
if disk_info.free < 1024 * 1024 * 1024 {
|
||||
return Err("Storage space is not enough, clip canceled".to_string());
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "gui")]
|
||||
let emitter = EventEmitter::new(state.app_handle.clone());
|
||||
#[cfg(feature = "headless")]
|
||||
let emitter = EventEmitter::new(state.progress_manager.get_event_sender());
|
||||
let reporter = ProgressReporter::new(&emitter, &event_id).await?;
|
||||
match clip_range_inner(state, &reporter, params).await {
|
||||
let mut params_without_cover = params.clone();
|
||||
params_without_cover.cover = "".to_string();
|
||||
let task = TaskRow {
|
||||
id: event_id.clone(),
|
||||
task_type: "clip_range".to_string(),
|
||||
status: "pending".to_string(),
|
||||
message: "".to_string(),
|
||||
metadata: json!({
|
||||
"params": params_without_cover,
|
||||
})
|
||||
.to_string(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
state.db.add_task(&task).await?;
|
||||
log::info!("Create task: {} {}", task.id, task.task_type);
|
||||
match clip_range_inner(&state, &reporter, params).await {
|
||||
Ok(video) => {
|
||||
reporter.finish(true, "切片完成").await;
|
||||
state
|
||||
.db
|
||||
.update_task(&event_id, "success", "切片完成", None)
|
||||
.await?;
|
||||
if state.config.read().await.auto_subtitle {
|
||||
// generate a subtitle task event id
|
||||
let subtitle_event_id = format!("{}_subtitle", event_id);
|
||||
let result =
|
||||
generate_video_subtitle(state.clone(), subtitle_event_id, video.id).await;
|
||||
if let Ok(subtitle) = result {
|
||||
let result = update_video_subtitle(state.clone(), video.id, subtitle).await;
|
||||
if let Err(e) = result {
|
||||
log::error!("Update video subtitle error: {}", e);
|
||||
}
|
||||
} else {
|
||||
log::error!("Generate video subtitle error: {}", result.err().unwrap());
|
||||
}
|
||||
}
|
||||
Ok(video)
|
||||
}
|
||||
Err(e) => {
|
||||
reporter.finish(false, &format!("切片失败: {}", e)).await;
|
||||
state
|
||||
.db
|
||||
.update_task(&event_id, "failed", &format!("切片失败: {}", e), None)
|
||||
.await?;
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn clip_range_inner(
|
||||
state: state_type!(),
|
||||
state: &state_type!(),
|
||||
reporter: &ProgressReporter,
|
||||
params: ClipRangeParams,
|
||||
) -> Result<VideoRow, String> {
|
||||
@@ -88,27 +141,9 @@ async fn clip_range_inner(
|
||||
desc: "".into(),
|
||||
tags: "".into(),
|
||||
area: 0,
|
||||
platform: params.platform.clone(),
|
||||
})
|
||||
.await?;
|
||||
if state.config.read().await.auto_subtitle
|
||||
&& !state.config.read().await.whisper_model.is_empty()
|
||||
{
|
||||
log::info!("Auto subtitle enabled");
|
||||
if let Ok(generator) = whisper::new(
|
||||
Path::new(&state.config.read().await.whisper_model),
|
||||
&state.config.read().await.whisper_prompt,
|
||||
)
|
||||
.await
|
||||
{
|
||||
reporter.update("提取音频中");
|
||||
let audio_path = file.with_extension("wav");
|
||||
ffmpeg::extract_audio(&file).await?;
|
||||
reporter.update("生成字幕中");
|
||||
generator
|
||||
.generate_subtitle(reporter, &audio_path, &file.with_extension("srt"))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
state
|
||||
.db
|
||||
.new_message(
|
||||
@@ -156,20 +191,44 @@ pub async fn upload_procedure(
|
||||
#[cfg(feature = "headless")]
|
||||
let emitter = EventEmitter::new(state.progress_manager.get_event_sender());
|
||||
let reporter = ProgressReporter::new(&emitter, &event_id).await?;
|
||||
match upload_procedure_inner(state, &reporter, uid, room_id, video_id, cover, profile).await {
|
||||
let task = TaskRow {
|
||||
id: event_id.clone(),
|
||||
task_type: "upload_procedure".to_string(),
|
||||
status: "pending".to_string(),
|
||||
message: "".to_string(),
|
||||
metadata: json!({
|
||||
"uid": uid,
|
||||
"room_id": room_id,
|
||||
"video_id": video_id,
|
||||
"profile": profile,
|
||||
})
|
||||
.to_string(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
state.db.add_task(&task).await?;
|
||||
log::info!("Create task: {:?}", task);
|
||||
match upload_procedure_inner(&state, &reporter, uid, room_id, video_id, cover, profile).await {
|
||||
Ok(bvid) => {
|
||||
reporter.finish(true, "投稿成功").await;
|
||||
state
|
||||
.db
|
||||
.update_task(&event_id, "success", "投稿成功", None)
|
||||
.await?;
|
||||
Ok(bvid)
|
||||
}
|
||||
Err(e) => {
|
||||
reporter.finish(false, &format!("投稿失败: {}", e)).await;
|
||||
state
|
||||
.db
|
||||
.update_task(&event_id, "failed", &format!("投稿失败: {}", e), None)
|
||||
.await?;
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn upload_procedure_inner(
|
||||
state: state_type!(),
|
||||
state: &state_type!(),
|
||||
reporter: &ProgressReporter,
|
||||
uid: u64,
|
||||
room_id: u64,
|
||||
@@ -246,8 +305,26 @@ pub async fn get_video(state: state_type!(), id: i64) -> Result<VideoRow, String
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_videos(state: state_type!(), room_id: u64) -> Result<Vec<VideoRow>, String> {
|
||||
Ok(state.db.get_videos(room_id).await?)
|
||||
pub async fn get_videos(state: state_type!(), room_id: u64) -> Result<Vec<VideoNoCover>, String> {
|
||||
state
|
||||
.db
|
||||
.get_videos(room_id)
|
||||
.await
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_all_videos(state: state_type!()) -> Result<Vec<VideoNoCover>, String> {
|
||||
state.db.get_all_videos().await.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_video_cover(state: state_type!(), id: i64) -> Result<String, String> {
|
||||
state
|
||||
.db
|
||||
.get_video_cover(id)
|
||||
.await
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
@@ -259,19 +336,17 @@ pub async fn delete_video(state: state_type!(), id: i64) -> Result<(), String> {
|
||||
// delete video files
|
||||
let filepath = Path::new(state.config.read().await.output.as_str()).join(&video.file);
|
||||
let file = Path::new(&filepath);
|
||||
if let Err(e) = std::fs::remove_file(file) {
|
||||
log::warn!("Delete video file error: {}", e);
|
||||
}
|
||||
let _ = std::fs::remove_file(file);
|
||||
|
||||
// delete srt file
|
||||
let srt_path = file.with_extension("srt");
|
||||
if let Err(e) = std::fs::remove_file(srt_path) {
|
||||
log::warn!("Delete srt file error: {}", e);
|
||||
}
|
||||
let _ = std::fs::remove_file(srt_path);
|
||||
// delete wav file
|
||||
let wav_path = file.with_extension("wav");
|
||||
if let Err(e) = std::fs::remove_file(wav_path) {
|
||||
log::warn!("Delete wav file error: {}", e);
|
||||
}
|
||||
let _ = std::fs::remove_file(wav_path);
|
||||
// delete mp3 file
|
||||
let mp3_path = file.with_extension("mp3");
|
||||
let _ = std::fs::remove_file(mp3_path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -294,6 +369,7 @@ pub async fn update_video_cover(
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_video_subtitle(state: state_type!(), id: i64) -> Result<String, String> {
|
||||
log::debug!("Get video subtitle: {}", id);
|
||||
let video = state.db.get_video(id).await?;
|
||||
let filepath = Path::new(state.config.read().await.output.as_str()).join(&video.file);
|
||||
let file = Path::new(&filepath);
|
||||
@@ -316,46 +392,90 @@ pub async fn generate_video_subtitle(
|
||||
#[cfg(feature = "headless")]
|
||||
let emitter = EventEmitter::new(state.progress_manager.get_event_sender());
|
||||
let reporter = ProgressReporter::new(&emitter, &event_id).await?;
|
||||
match generate_video_subtitle_inner(state, &reporter, id).await {
|
||||
Ok(subtitle) => {
|
||||
let task = TaskRow {
|
||||
id: event_id.clone(),
|
||||
task_type: "generate_video_subtitle".to_string(),
|
||||
status: "pending".to_string(),
|
||||
message: "".to_string(),
|
||||
metadata: json!({
|
||||
"video_id": id,
|
||||
})
|
||||
.to_string(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
state.db.add_task(&task).await?;
|
||||
log::info!("Create task: {:?}", task);
|
||||
let config = state.config.read().await;
|
||||
let generator_type = config.subtitle_generator_type.as_str();
|
||||
let whisper_model = config.whisper_model.clone();
|
||||
let whisper_prompt = config.whisper_prompt.clone();
|
||||
let openai_api_key = config.openai_api_key.clone();
|
||||
let openai_api_endpoint = config.openai_api_endpoint.clone();
|
||||
let language_hint = state.config.read().await.whisper_language.clone();
|
||||
let language_hint = language_hint.as_str();
|
||||
|
||||
let video = state.db.get_video(id).await?;
|
||||
let filepath = Path::new(state.config.read().await.output.as_str()).join(&video.file);
|
||||
let file = Path::new(&filepath);
|
||||
|
||||
match ffmpeg::generate_video_subtitle(
|
||||
Some(&reporter),
|
||||
file,
|
||||
generator_type,
|
||||
&whisper_model,
|
||||
&whisper_prompt,
|
||||
&openai_api_key,
|
||||
&openai_api_endpoint,
|
||||
language_hint,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
reporter.finish(true, "字幕生成完成").await;
|
||||
// for local whisper, we need to update the task status to success
|
||||
state
|
||||
.db
|
||||
.update_task(
|
||||
&event_id,
|
||||
"success",
|
||||
"字幕生成完成",
|
||||
Some(
|
||||
json!({
|
||||
"task_id": result.subtitle_id,
|
||||
"service": result.generator_type.as_str(),
|
||||
})
|
||||
.to_string()
|
||||
.as_str(),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let subtitle = result
|
||||
.subtitle_content
|
||||
.iter()
|
||||
.map(item_to_srt)
|
||||
.collect::<Vec<String>>()
|
||||
.join("");
|
||||
|
||||
let result = update_video_subtitle(state.clone(), id, subtitle.clone()).await;
|
||||
if let Err(e) = result {
|
||||
log::error!("Update video subtitle error: {}", e);
|
||||
}
|
||||
Ok(subtitle)
|
||||
}
|
||||
Err(e) => {
|
||||
reporter
|
||||
.finish(false, &format!("字幕生成失败: {}", e))
|
||||
.await;
|
||||
state
|
||||
.db
|
||||
.update_task(&event_id, "failed", &format!("字幕生成失败: {}", e), None)
|
||||
.await?;
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn generate_video_subtitle_inner(
|
||||
state: state_type!(),
|
||||
reporter: &ProgressReporter,
|
||||
id: i64,
|
||||
) -> Result<String, String> {
|
||||
let video = state.db.get_video(id).await?;
|
||||
let filepath = Path::new(state.config.read().await.output.as_str()).join(&video.file);
|
||||
let file = Path::new(&filepath);
|
||||
if let Ok(generator) = whisper::new(
|
||||
Path::new(&state.config.read().await.whisper_model),
|
||||
&state.config.read().await.whisper_prompt,
|
||||
)
|
||||
.await
|
||||
{
|
||||
let audio_path = file.with_extension("wav");
|
||||
ffmpeg::extract_audio(file).await?;
|
||||
|
||||
let subtitle = generator
|
||||
.generate_subtitle(reporter, &audio_path, &file.with_extension("srt"))
|
||||
.await?;
|
||||
Ok(subtitle)
|
||||
} else {
|
||||
Err("Whisper model not found".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_video_subtitle(
|
||||
state: state_type!(),
|
||||
@@ -384,22 +504,44 @@ pub async fn encode_video_subtitle(
|
||||
#[cfg(feature = "headless")]
|
||||
let emitter = EventEmitter::new(state.progress_manager.get_event_sender());
|
||||
let reporter = ProgressReporter::new(&emitter, &event_id).await?;
|
||||
match encode_video_subtitle_inner(state, &reporter, id, srt_style).await {
|
||||
let task = TaskRow {
|
||||
id: event_id.clone(),
|
||||
task_type: "encode_video_subtitle".to_string(),
|
||||
status: "pending".to_string(),
|
||||
message: "".to_string(),
|
||||
metadata: json!({
|
||||
"video_id": id,
|
||||
"srt_style": srt_style,
|
||||
})
|
||||
.to_string(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
state.db.add_task(&task).await?;
|
||||
log::info!("Create task: {:?}", task);
|
||||
match encode_video_subtitle_inner(&state, &reporter, id, srt_style).await {
|
||||
Ok(video) => {
|
||||
reporter.finish(true, "字幕编码完成").await;
|
||||
state
|
||||
.db
|
||||
.update_task(&event_id, "success", "字幕编码完成", None)
|
||||
.await?;
|
||||
Ok(video)
|
||||
}
|
||||
Err(e) => {
|
||||
reporter
|
||||
.finish(false, &format!("字幕编码失败: {}", e))
|
||||
.await;
|
||||
state
|
||||
.db
|
||||
.update_task(&event_id, "failed", &format!("字幕编码失败: {}", e), None)
|
||||
.await?;
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn encode_video_subtitle_inner(
|
||||
state: state_type!(),
|
||||
state: &state_type!(),
|
||||
reporter: &ProgressReporter,
|
||||
id: i64,
|
||||
srt_style: String,
|
||||
@@ -427,8 +569,18 @@ async fn encode_video_subtitle_inner(
|
||||
desc: video.desc.clone(),
|
||||
tags: video.tags.clone(),
|
||||
area: video.area,
|
||||
platform: video.platform,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(new_video)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn generic_ffmpeg_command(
|
||||
_state: state_type!(),
|
||||
args: Vec<String>,
|
||||
) -> Result<String, String> {
|
||||
let args_str: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
|
||||
ffmpeg::generic_ffmpeg_command(&args_str).await
|
||||
}
|
||||
|
||||
@@ -3,30 +3,37 @@ use std::fmt::{self, Display};
|
||||
use crate::{
|
||||
config::Config,
|
||||
database::{
|
||||
account::AccountRow, message::MessageRow, record::RecordRow, recorder::RecorderRow,
|
||||
video::VideoRow,
|
||||
account::AccountRow,
|
||||
message::MessageRow,
|
||||
record::RecordRow,
|
||||
recorder::RecorderRow,
|
||||
task::TaskRow,
|
||||
video::{VideoNoCover, VideoRow},
|
||||
},
|
||||
handlers::{
|
||||
account::{
|
||||
add_account, get_account_count, get_accounts, get_qr, get_qr_status, remove_account,
|
||||
},
|
||||
config::{
|
||||
get_config, set_cache_path, set_output_path, update_auto_generate,
|
||||
update_clip_name_format, update_notify, update_subtitle_setting, update_whisper_model,
|
||||
update_whisper_prompt,
|
||||
get_config, update_auto_generate, update_clip_name_format, update_notify,
|
||||
update_openai_api_endpoint, update_openai_api_key, update_status_check_interval,
|
||||
update_subtitle_generator_type, update_subtitle_setting, update_user_agent,
|
||||
update_whisper_language, update_whisper_model, update_whisper_prompt,
|
||||
},
|
||||
message::{delete_message, get_messages, read_message},
|
||||
recorder::{
|
||||
add_recorder, delete_archive, export_danmu, fetch_hls, force_start, force_stop,
|
||||
get_archive, get_archives, get_danmu_record, get_recent_record, get_recorder_list,
|
||||
get_room_info, get_today_record_count, get_total_length, remove_recorder, send_danmaku,
|
||||
set_auto_start, ExportDanmuOptions,
|
||||
add_recorder, delete_archive, export_danmu, fetch_hls, generate_archive_subtitle,
|
||||
get_archive, get_archive_subtitle, get_archives, get_danmu_record, get_recent_record,
|
||||
get_recorder_list, get_room_info, get_today_record_count, get_total_length,
|
||||
remove_recorder, send_danmaku, set_enable, ExportDanmuOptions,
|
||||
},
|
||||
utils::{get_disk_info, DiskInfo},
|
||||
task::{delete_task, get_tasks},
|
||||
utils::{console_log, get_disk_info, list_folder, DiskInfo},
|
||||
video::{
|
||||
cancel, clip_range, delete_video, encode_video_subtitle, generate_video_subtitle,
|
||||
get_video, get_video_subtitle, get_video_typelist, get_videos, update_video_cover,
|
||||
update_video_subtitle, upload_procedure,
|
||||
generic_ffmpeg_command, get_all_videos, get_video, get_video_cover, get_video_subtitle,
|
||||
get_video_typelist, get_videos, update_video_cover, update_video_subtitle,
|
||||
upload_procedure,
|
||||
},
|
||||
AccountInfo,
|
||||
},
|
||||
@@ -190,33 +197,17 @@ async fn handler_get_config(
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SetCachePathRequest {
|
||||
cache_path: String,
|
||||
struct UpdateStatusCheckIntervalRequest {
|
||||
interval: u64,
|
||||
}
|
||||
|
||||
async fn handler_set_cache_path(
|
||||
async fn handler_update_status_check_interval(
|
||||
state: axum::extract::State<State>,
|
||||
Json(cache_path): Json<SetCachePathRequest>,
|
||||
Json(request): Json<UpdateStatusCheckIntervalRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
set_cache_path(state.0, cache_path.cache_path)
|
||||
update_status_check_interval(state.0, request.interval)
|
||||
.await
|
||||
.expect("Failed to set cache path");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SetOutputPathRequest {
|
||||
output_path: String,
|
||||
}
|
||||
|
||||
async fn handler_set_output_path(
|
||||
state: axum::extract::State<State>,
|
||||
Json(output_path): Json<SetOutputPathRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
set_output_path(state.0, output_path.output_path)
|
||||
.await
|
||||
.expect("Failed to set output path");
|
||||
.expect("Failed to update status check interval");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
@@ -261,12 +252,44 @@ async fn handler_update_whisper_model(
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateWhisperLanguageRequest {
|
||||
whisper_language: String,
|
||||
}
|
||||
|
||||
async fn handler_update_whisper_language(
|
||||
state: axum::extract::State<State>,
|
||||
Json(whisper_language): Json<UpdateWhisperLanguageRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
update_whisper_language(state.0, whisper_language.whisper_language)
|
||||
.await
|
||||
.expect("Failed to update whisper language");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateSubtitleSettingRequest {
|
||||
auto_subtitle: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateUserAgentRequest {
|
||||
user_agent: String,
|
||||
}
|
||||
|
||||
async fn handler_update_user_agent(
|
||||
state: axum::extract::State<State>,
|
||||
Json(user_agent): Json<UpdateUserAgentRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
update_user_agent(state.0, user_agent.user_agent)
|
||||
.await
|
||||
.expect("Failed to update user agent");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
async fn handler_update_subtitle_setting(
|
||||
state: axum::extract::State<State>,
|
||||
Json(subtitle_setting): Json<UpdateSubtitleSettingRequest>,
|
||||
@@ -309,6 +332,54 @@ async fn handler_update_whisper_prompt(
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateSubtitleGeneratorTypeRequest {
|
||||
subtitle_generator_type: String,
|
||||
}
|
||||
|
||||
async fn handler_update_subtitle_generator_type(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<UpdateSubtitleGeneratorTypeRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
update_subtitle_generator_type(state.0, param.subtitle_generator_type)
|
||||
.await
|
||||
.expect("Failed to update subtitle generator type");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateOpenaiApiEndpointRequest {
|
||||
openai_api_endpoint: String,
|
||||
}
|
||||
|
||||
async fn handler_update_openai_api_endpoint(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<UpdateOpenaiApiEndpointRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
update_openai_api_endpoint(state.0, param.openai_api_endpoint)
|
||||
.await
|
||||
.expect("Failed to update openai api endpoint");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateOpenaiApiKeyRequest {
|
||||
openai_api_key: String,
|
||||
}
|
||||
|
||||
async fn handler_update_openai_api_key(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<UpdateOpenaiApiKeyRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
update_openai_api_key(state.0, param.openai_api_key)
|
||||
.await
|
||||
.expect("Failed to update openai api key");
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct UpdateAutoGenerateRequest {
|
||||
@@ -379,13 +450,14 @@ async fn handler_get_recorder_list(
|
||||
struct AddRecorderRequest {
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
extra: String,
|
||||
}
|
||||
|
||||
async fn handler_add_recorder(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<AddRecorderRequest>,
|
||||
) -> Result<Json<ApiResponse<RecorderRow>>, ApiError> {
|
||||
let recorder = add_recorder(state.0, param.platform, param.room_id)
|
||||
let recorder = add_recorder(state.0, param.platform, param.room_id, param.extra)
|
||||
.await
|
||||
.expect("Failed to add recorder");
|
||||
Ok(Json(ApiResponse::success(recorder)))
|
||||
@@ -452,6 +524,40 @@ async fn handler_get_archive(
|
||||
Ok(Json(ApiResponse::success(archive)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct GetArchiveSubtitleRequest {
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
live_id: String,
|
||||
}
|
||||
|
||||
async fn handler_get_archive_subtitle(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GetArchiveSubtitleRequest>,
|
||||
) -> Result<Json<ApiResponse<String>>, ApiError> {
|
||||
let subtitle =
|
||||
get_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
|
||||
Ok(Json(ApiResponse::success(subtitle)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct GenerateArchiveSubtitleRequest {
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
live_id: String,
|
||||
}
|
||||
|
||||
async fn handler_generate_archive_subtitle(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GenerateArchiveSubtitleRequest>,
|
||||
) -> Result<Json<ApiResponse<String>>, ApiError> {
|
||||
let subtitle =
|
||||
generate_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
|
||||
Ok(Json(ApiResponse::success(subtitle)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DeleteArchiveRequest {
|
||||
@@ -518,6 +624,7 @@ async fn handler_get_today_record_count(
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct GetRecentRecordRequest {
|
||||
room_id: u64,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
}
|
||||
@@ -526,52 +633,24 @@ async fn handler_get_recent_record(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GetRecentRecordRequest>,
|
||||
) -> Result<Json<ApiResponse<Vec<RecordRow>>>, ApiError> {
|
||||
let recent_record = get_recent_record(state.0, param.offset, param.limit).await?;
|
||||
let recent_record =
|
||||
get_recent_record(state.0, param.room_id, param.offset, param.limit).await?;
|
||||
Ok(Json(ApiResponse::success(recent_record)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SetAutoStartRequest {
|
||||
struct SetEnableRequest {
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
auto_start: bool,
|
||||
enabled: bool,
|
||||
}
|
||||
async fn handler_set_auto_start(
|
||||
|
||||
async fn handler_set_enable(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<SetAutoStartRequest>,
|
||||
Json(param): Json<SetEnableRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
set_auto_start(state.0, param.platform, param.room_id, param.auto_start).await?;
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForceStartRequest {
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
}
|
||||
|
||||
async fn handler_force_start(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<ForceStartRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
force_start(state.0, param.platform, param.room_id).await?;
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForceStopRequest {
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
}
|
||||
|
||||
async fn handler_force_stop(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<ForceStopRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
force_stop(state.0, param.platform, param.room_id).await?;
|
||||
set_enable(state.0, param.platform, param.room_id, param.enabled).await?;
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
@@ -651,14 +730,36 @@ async fn handler_get_video(
|
||||
struct GetVideosRequest {
|
||||
room_id: u64,
|
||||
}
|
||||
|
||||
async fn handler_get_videos(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GetVideosRequest>,
|
||||
) -> Result<Json<ApiResponse<Vec<VideoRow>>>, ApiError> {
|
||||
) -> Result<Json<ApiResponse<Vec<VideoNoCover>>>, ApiError> {
|
||||
let videos = get_videos(state.0, param.room_id).await?;
|
||||
Ok(Json(ApiResponse::success(videos)))
|
||||
}
|
||||
|
||||
async fn handler_get_all_videos(
|
||||
state: axum::extract::State<State>,
|
||||
) -> Result<Json<ApiResponse<Vec<VideoNoCover>>>, ApiError> {
|
||||
let videos = get_all_videos(state.0).await?;
|
||||
Ok(Json(ApiResponse::success(videos)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct GetVideoCoverRequest {
|
||||
id: i64,
|
||||
}
|
||||
|
||||
async fn handler_get_video_cover(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GetVideoCoverRequest>,
|
||||
) -> Result<Json<ApiResponse<String>>, ApiError> {
|
||||
let video_cover = get_video_cover(state.0, param.id).await?;
|
||||
Ok(Json(ApiResponse::success(video_cover)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DeleteVideoRequest {
|
||||
@@ -706,8 +807,8 @@ async fn handler_generate_video_subtitle(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<GenerateVideoSubtitleRequest>,
|
||||
) -> Result<Json<ApiResponse<String>>, ApiError> {
|
||||
generate_video_subtitle(state.0, param.event_id.clone(), param.id).await?;
|
||||
Ok(Json(ApiResponse::success(param.event_id)))
|
||||
let result = generate_video_subtitle(state.0, param.event_id.clone(), param.id).await?;
|
||||
Ok(Json(ApiResponse::success(result)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -762,6 +863,14 @@ async fn handler_encode_video_subtitle(
|
||||
encode_video_subtitle_param.event_id,
|
||||
)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ConsoleLogRequest {
|
||||
level: String,
|
||||
message: String,
|
||||
}
|
||||
|
||||
async fn handler_get_disk_info(
|
||||
state: axum::extract::State<State>,
|
||||
) -> Result<Json<ApiResponse<DiskInfo>>, ApiError> {
|
||||
@@ -771,6 +880,14 @@ async fn handler_get_disk_info(
|
||||
Ok(Json(ApiResponse::success(disk_info)))
|
||||
}
|
||||
|
||||
async fn handler_console_log(
|
||||
state: axum::extract::State<State>,
|
||||
Json(param): Json<ConsoleLogRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
let _ = console_log(state.0, ¶m.level, ¶m.message).await;
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct HttpProxyRequest {
|
||||
@@ -848,6 +965,55 @@ async fn handler_export_danmu(
|
||||
Ok(Json(ApiResponse::success(result)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DeleteTaskRequest {
|
||||
id: String,
|
||||
}
|
||||
|
||||
async fn handler_delete_task(
|
||||
state: axum::extract::State<State>,
|
||||
Json(params): Json<DeleteTaskRequest>,
|
||||
) -> Result<Json<ApiResponse<()>>, ApiError> {
|
||||
delete_task(state.0, ¶ms.id).await?;
|
||||
Ok(Json(ApiResponse::success(())))
|
||||
}
|
||||
|
||||
async fn handler_get_tasks(
|
||||
state: axum::extract::State<State>,
|
||||
) -> Result<Json<ApiResponse<Vec<TaskRow>>>, ApiError> {
|
||||
let tasks = get_tasks(state.0).await?;
|
||||
Ok(Json(ApiResponse::success(tasks)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct GenericFfmpegCommandRequest {
|
||||
args: Vec<String>,
|
||||
}
|
||||
|
||||
async fn handler_generic_ffmpeg_command(
|
||||
state: axum::extract::State<State>,
|
||||
Json(params): Json<GenericFfmpegCommandRequest>,
|
||||
) -> Result<Json<ApiResponse<String>>, ApiError> {
|
||||
let result = generic_ffmpeg_command(state.0, params.args).await?;
|
||||
Ok(Json(ApiResponse::success(result)))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ListFolderRequest {
|
||||
path: String,
|
||||
}
|
||||
|
||||
async fn handler_list_folder(
|
||||
state: axum::extract::State<State>,
|
||||
Json(params): Json<ListFolderRequest>,
|
||||
) -> Result<Json<ApiResponse<Vec<String>>>, ApiError> {
|
||||
let result = list_folder(state.0, params.path).await?;
|
||||
Ok(Json(ApiResponse::success(result)))
|
||||
}
|
||||
|
||||
async fn handler_hls(
|
||||
state: axum::extract::State<State>,
|
||||
Path(uri): Path<String>,
|
||||
@@ -876,7 +1042,7 @@ async fn handler_hls(
|
||||
.map_err(|_| StatusCode::NOT_FOUND)?;
|
||||
|
||||
// Set appropriate content type based on file extension
|
||||
let content_type = match filename.split('.').last() {
|
||||
let content_type = match filename.split('.').next_back() {
|
||||
Some("m3u8") => "application/vnd.apple.mpegurl",
|
||||
Some("ts") => "video/mp2t",
|
||||
Some("aac") => "audio/aac",
|
||||
@@ -1088,100 +1254,146 @@ pub async fn start_api_server(state: State) {
|
||||
.allow_methods(Any)
|
||||
.allow_headers(Any);
|
||||
|
||||
let app = Router::new()
|
||||
let mut app = Router::new()
|
||||
// Serve static files from dist directory
|
||||
.nest_service("/", ServeDir::new("./dist"))
|
||||
// Account commands
|
||||
.route("/api/get_accounts", post(handler_get_accounts))
|
||||
.route("/api/add_account", post(handler_add_account))
|
||||
.route("/api/remove_account", post(handler_remove_account))
|
||||
.route("/api/get_account_count", post(handler_get_account_count))
|
||||
.route("/api/get_qr", post(handler_get_qr))
|
||||
.route("/api/get_qr_status", post(handler_get_qr_status))
|
||||
.route("/api/get_account_count", post(handler_get_account_count));
|
||||
|
||||
// Only add add/remove routes if not in readonly mode
|
||||
if !state.readonly {
|
||||
app = app
|
||||
.route("/api/get_qr", post(handler_get_qr))
|
||||
.route("/api/get_qr_status", post(handler_get_qr_status))
|
||||
.route("/api/add_account", post(handler_add_account))
|
||||
.route("/api/remove_account", post(handler_remove_account))
|
||||
.route(
|
||||
"/api/update_whisper_model",
|
||||
post(handler_update_whisper_model),
|
||||
)
|
||||
.route(
|
||||
"/api/update_subtitle_setting",
|
||||
post(handler_update_subtitle_setting),
|
||||
)
|
||||
.route(
|
||||
"/api/update_clip_name_format",
|
||||
post(handler_update_clip_name_format),
|
||||
)
|
||||
.route("/api/add_recorder", post(handler_add_recorder))
|
||||
.route("/api/remove_recorder", post(handler_remove_recorder))
|
||||
.route("/api/delete_archive", post(handler_delete_archive))
|
||||
.route("/api/send_danmaku", post(handler_send_danmaku))
|
||||
.route("/api/set_enable", post(handler_set_enable))
|
||||
.route("/api/upload_procedure", post(handler_upload_procedure))
|
||||
.route("/api/cancel", post(handler_cancel))
|
||||
.route("/api/delete_video", post(handler_delete_video))
|
||||
.route(
|
||||
"/api/generate_video_subtitle",
|
||||
post(handler_generate_video_subtitle),
|
||||
)
|
||||
.route(
|
||||
"/api/generate_archive_subtitle",
|
||||
post(handler_generate_archive_subtitle),
|
||||
)
|
||||
.route(
|
||||
"/api/generic_ffmpeg_command",
|
||||
post(handler_generic_ffmpeg_command),
|
||||
)
|
||||
.route(
|
||||
"/api/update_video_subtitle",
|
||||
post(handler_update_video_subtitle),
|
||||
)
|
||||
.route("/api/update_video_cover", post(handler_update_video_cover))
|
||||
.route(
|
||||
"/api/encode_video_subtitle",
|
||||
post(handler_encode_video_subtitle),
|
||||
)
|
||||
.route("/api/update_notify", post(handler_update_notify))
|
||||
.route(
|
||||
"/api/update_status_check_interval",
|
||||
post(handler_update_status_check_interval),
|
||||
)
|
||||
.route(
|
||||
"/api/update_whisper_prompt",
|
||||
post(handler_update_whisper_prompt),
|
||||
)
|
||||
.route(
|
||||
"/api/update_subtitle_generator_type",
|
||||
post(handler_update_subtitle_generator_type),
|
||||
)
|
||||
.route(
|
||||
"/api/update_openai_api_endpoint",
|
||||
post(handler_update_openai_api_endpoint),
|
||||
)
|
||||
.route(
|
||||
"/api/update_openai_api_key",
|
||||
post(handler_update_openai_api_key),
|
||||
)
|
||||
.route(
|
||||
"/api/update_auto_generate",
|
||||
post(handler_update_auto_generate),
|
||||
)
|
||||
.route(
|
||||
"/api/update_whisper_language",
|
||||
post(handler_update_whisper_language),
|
||||
)
|
||||
.route("/api/update_user_agent", post(handler_update_user_agent));
|
||||
} else {
|
||||
log::info!("Running in readonly mode, some api routes are disabled");
|
||||
}
|
||||
|
||||
app = app
|
||||
// Config commands
|
||||
.route("/api/get_config", post(handler_get_config))
|
||||
.route("/api/set_cache_path", post(handler_set_cache_path))
|
||||
.route("/api/set_output_path", post(handler_set_output_path))
|
||||
.route("/api/update_notify", post(handler_update_notify))
|
||||
.route(
|
||||
"/api/update_whisper_model",
|
||||
post(handler_update_whisper_model),
|
||||
)
|
||||
.route(
|
||||
"/api/update_subtitle_setting",
|
||||
post(handler_update_subtitle_setting),
|
||||
)
|
||||
.route(
|
||||
"/api/update_clip_name_format",
|
||||
post(handler_update_clip_name_format),
|
||||
)
|
||||
.route(
|
||||
"/api/update_whisper_prompt",
|
||||
post(handler_update_whisper_prompt),
|
||||
)
|
||||
.route(
|
||||
"/api/update_auto_generate",
|
||||
post(handler_update_auto_generate),
|
||||
)
|
||||
// Message commands
|
||||
.route("/api/get_messages", post(handler_get_messages))
|
||||
.route("/api/read_message", post(handler_read_message))
|
||||
.route("/api/delete_message", post(handler_delete_message))
|
||||
// Recorder commands
|
||||
.route("/api/get_recorder_list", post(handler_get_recorder_list))
|
||||
.route("/api/add_recorder", post(handler_add_recorder))
|
||||
.route("/api/remove_recorder", post(handler_remove_recorder))
|
||||
.route("/api/get_room_info", post(handler_get_room_info))
|
||||
.route("/api/get_archives", post(handler_get_archives))
|
||||
.route("/api/get_archive", post(handler_get_archive))
|
||||
.route("/api/delete_archive", post(handler_delete_archive))
|
||||
.route(
|
||||
"/api/get_archive_subtitle",
|
||||
post(handler_get_archive_subtitle),
|
||||
)
|
||||
.route("/api/get_danmu_record", post(handler_get_danmu_record))
|
||||
.route("/api/send_danmaku", post(handler_send_danmaku))
|
||||
.route("/api/get_total_length", post(handler_get_total_length))
|
||||
.route(
|
||||
"/api/get_today_record_count",
|
||||
post(handler_get_today_record_count),
|
||||
)
|
||||
.route("/api/get_recent_record", post(handler_get_recent_record))
|
||||
.route("/api/set_auto_start", post(handler_set_auto_start))
|
||||
.route("/api/force_start", post(handler_force_start))
|
||||
.route("/api/force_stop", post(handler_force_stop))
|
||||
// Video commands
|
||||
.route("/api/clip_range", post(handler_clip_range))
|
||||
.route("/api/upload_procedure", post(handler_upload_procedure))
|
||||
.route("/api/cancel", post(handler_cancel))
|
||||
.route("/api/get_video", post(handler_get_video))
|
||||
.route("/api/get_videos", post(handler_get_videos))
|
||||
.route("/api/delete_video", post(handler_delete_video))
|
||||
.route("/api/get_video_cover", post(handler_get_video_cover))
|
||||
.route("/api/get_all_videos", post(handler_get_all_videos))
|
||||
.route("/api/get_video_typelist", post(handler_get_video_typelist))
|
||||
.route("/api/update_video_cover", post(handler_update_video_cover))
|
||||
.route(
|
||||
"/api/generate_video_subtitle",
|
||||
post(handler_generate_video_subtitle),
|
||||
)
|
||||
.route("/api/get_video_subtitle", post(handler_get_video_subtitle))
|
||||
.route(
|
||||
"/api/update_video_subtitle",
|
||||
post(handler_update_video_subtitle),
|
||||
)
|
||||
.route(
|
||||
"/api/encode_video_subtitle",
|
||||
post(handler_encode_video_subtitle),
|
||||
)
|
||||
.route("/api/delete_task", post(handler_delete_task))
|
||||
.route("/api/get_tasks", post(handler_get_tasks))
|
||||
.route("/api/export_danmu", post(handler_export_danmu))
|
||||
// Utils commands
|
||||
.route("/api/get_disk_info", post(handler_get_disk_info))
|
||||
.route("/api/console_log", post(handler_console_log))
|
||||
.route("/api/list_folder", post(handler_list_folder))
|
||||
.route("/api/fetch", post(handler_fetch))
|
||||
.route("/hls/*uri", get(handler_hls))
|
||||
.route("/output/*uri", get(handler_output))
|
||||
.route("/api/sse", get(handler_sse))
|
||||
.route("/api/sse", get(handler_sse));
|
||||
|
||||
let router = app
|
||||
.layer(cors)
|
||||
.layer(DefaultBodyLimit::max(20 * 1024 * 1024))
|
||||
.with_state(state);
|
||||
|
||||
let addr = "0.0.0.0:3000";
|
||||
println!("API server listening on http://{}", addr);
|
||||
log::info!("API server listening on http://{}", addr);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
axum::serve(listener, router).await.unwrap();
|
||||
}
|
||||
|
||||
@@ -21,9 +21,12 @@ mod subtitle_generator;
|
||||
mod tray;
|
||||
|
||||
use archive_migration::try_rebuild_archives;
|
||||
use async_std::fs;
|
||||
use chrono::Utc;
|
||||
use config::Config;
|
||||
use database::Database;
|
||||
use recorder::bilibili::client::BiliClient;
|
||||
use recorder::PlatformType;
|
||||
use recorder_manager::RecorderManager;
|
||||
use simplelog::ConfigBuilder;
|
||||
use state::State;
|
||||
@@ -32,9 +35,14 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::fs::MetadataExt;
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
use {
|
||||
recorder::PlatformType,
|
||||
tauri::{Manager, WindowEvent},
|
||||
tauri_plugin_sql::{Migration, MigrationKind},
|
||||
};
|
||||
@@ -53,16 +61,36 @@ use {
|
||||
},
|
||||
};
|
||||
|
||||
/// open a log file, if file size exceeds 1MB, backup log file and create a new one.
|
||||
async fn open_log_file(log_dir: &Path) -> Result<File, Box<dyn std::error::Error>> {
|
||||
let log_filename = log_dir.join("bsr.log");
|
||||
|
||||
if let Ok(meta) = fs::metadata(&log_filename).await {
|
||||
#[cfg(target_os = "windows")]
|
||||
let file_size = meta.file_size();
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let file_size = meta.size();
|
||||
if file_size > 1024 * 1024 {
|
||||
// move original file to backup
|
||||
let date_str = Utc::now().format("%Y-%m-%d_%H-%M-%S").to_string();
|
||||
let backup_filename = log_dir.join(format!("bsr-{date_str}.log"));
|
||||
fs::rename(&log_filename, backup_filename).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(File::options()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&log_filename)?)
|
||||
}
|
||||
|
||||
async fn setup_logging(log_dir: &Path) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// mkdir if not exists
|
||||
if !log_dir.exists() {
|
||||
std::fs::create_dir_all(log_dir)?;
|
||||
}
|
||||
|
||||
let log_file = log_dir.join("bsr.log");
|
||||
|
||||
// open file with append mode
|
||||
let file = File::options().create(true).append(true).open(&log_file)?;
|
||||
let file = open_log_file(log_dir).await?;
|
||||
|
||||
let config = ConfigBuilder::new()
|
||||
.set_target_level(simplelog::LevelFilter::Debug)
|
||||
@@ -72,6 +100,7 @@ async fn setup_logging(log_dir: &Path) -> Result<(), Box<dyn std::error::Error>>
|
||||
.add_filter_ignore_str("sqlx")
|
||||
.add_filter_ignore_str("reqwest")
|
||||
.add_filter_ignore_str("h2")
|
||||
.add_filter_ignore_str("danmu_stream")
|
||||
.build();
|
||||
|
||||
simplelog::CombinedLogger::init(vec![
|
||||
@@ -88,6 +117,9 @@ async fn setup_logging(log_dir: &Path) -> Result<(), Box<dyn std::error::Error>>
|
||||
),
|
||||
])?;
|
||||
|
||||
// logging current package version
|
||||
log::info!("Current version: {}", env!("CARGO_PKG_VERSION"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -112,6 +144,34 @@ fn get_migrations() -> Vec<Migration> {
|
||||
sql: r#"ALTER TABLE recorders ADD COLUMN auto_start INTEGER NOT NULL DEFAULT 1;"#,
|
||||
kind: MigrationKind::Up,
|
||||
},
|
||||
// add platform column to videos table
|
||||
Migration {
|
||||
version: 3,
|
||||
description: "add_platform_column",
|
||||
sql: r#"ALTER TABLE videos ADD COLUMN platform TEXT;"#,
|
||||
kind: MigrationKind::Up,
|
||||
},
|
||||
// add task table to record encode/upload task
|
||||
Migration {
|
||||
version: 4,
|
||||
description: "add_task_table",
|
||||
sql: r#"CREATE TABLE tasks (id TEXT PRIMARY KEY, type TEXT, status TEXT, message TEXT, metadata TEXT, created_at TEXT);"#,
|
||||
kind: MigrationKind::Up,
|
||||
},
|
||||
// add id_str column to support string IDs like Douyin sec_uid while keeping uid for Bilibili compatibility
|
||||
Migration {
|
||||
version: 5,
|
||||
description: "add_id_str_column",
|
||||
sql: r#"ALTER TABLE accounts ADD COLUMN id_str TEXT;"#,
|
||||
kind: MigrationKind::Up,
|
||||
},
|
||||
// add extra column to recorders
|
||||
Migration {
|
||||
version: 6,
|
||||
description: "add_extra_column_to_recorders",
|
||||
sql: r#"ALTER TABLE recorders ADD COLUMN extra TEXT;"#,
|
||||
kind: MigrationKind::Up,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@@ -159,7 +219,7 @@ async fn setup_server_state(args: Args) -> Result<State, Box<dyn std::error::Err
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
let client = Arc::new(BiliClient::new()?);
|
||||
let client = Arc::new(BiliClient::new(&config.user_agent)?);
|
||||
let config = Arc::new(RwLock::new(config));
|
||||
let db = Arc::new(Database::new());
|
||||
// connect to sqlite database
|
||||
@@ -185,10 +245,68 @@ async fn setup_server_state(args: Args) -> Result<State, Box<dyn std::error::Err
|
||||
.expect("Failed to run migrations");
|
||||
|
||||
db.set(db_pool).await;
|
||||
db.finish_pending_tasks().await?;
|
||||
|
||||
let progress_manager = Arc::new(ProgressManager::new());
|
||||
let emitter = EventEmitter::new(progress_manager.get_event_sender());
|
||||
let recorder_manager = Arc::new(RecorderManager::new(emitter, db.clone(), config.clone()));
|
||||
|
||||
// Update account infos for headless mode
|
||||
let accounts = db.get_accounts().await?;
|
||||
for account in accounts {
|
||||
let platform = PlatformType::from_str(&account.platform).unwrap();
|
||||
|
||||
if platform == PlatformType::BiliBili {
|
||||
match client.get_user_info(&account, account.uid).await {
|
||||
Ok(account_info) => {
|
||||
if let Err(e) = db
|
||||
.update_account(
|
||||
&account.platform,
|
||||
account_info.user_id,
|
||||
&account_info.user_name,
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Bilibili account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Bilibili user info failed {}", e);
|
||||
}
|
||||
}
|
||||
} else if platform == PlatformType::Douyin {
|
||||
// Update Douyin account info
|
||||
use crate::recorder::douyin::client::DouyinClient;
|
||||
let douyin_client = DouyinClient::new(&config.read().await.user_agent, &account);
|
||||
match douyin_client.get_user_info().await {
|
||||
Ok(user_info) => {
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
if let Err(e) = db
|
||||
.update_account_with_id_str(
|
||||
&account,
|
||||
&user_info.sec_uid,
|
||||
&user_info.nickname,
|
||||
&avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Douyin account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Douyin user info failed {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let _ = try_rebuild_archives(&db, config.read().await.cache.clone().into()).await;
|
||||
|
||||
Ok(State {
|
||||
@@ -197,6 +315,7 @@ async fn setup_server_state(args: Args) -> Result<State, Box<dyn std::error::Err
|
||||
config,
|
||||
recorder_manager,
|
||||
progress_manager,
|
||||
readonly: args.readonly,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -222,7 +341,7 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
|
||||
}
|
||||
};
|
||||
|
||||
let client = Arc::new(BiliClient::new()?);
|
||||
let client = Arc::new(BiliClient::new(&config.user_agent)?);
|
||||
let config = Arc::new(RwLock::new(config));
|
||||
let config_clone = config.clone();
|
||||
let dbs = app.state::<tauri_plugin_sql::DbInstances>().inner();
|
||||
@@ -230,6 +349,13 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
|
||||
let db_clone = db.clone();
|
||||
let client_clone = client.clone();
|
||||
let emitter = EventEmitter::new(app.handle().clone());
|
||||
let binding = dbs.0.read().await;
|
||||
let dbpool = binding.get("sqlite:data_v2.db").unwrap();
|
||||
let sqlite_pool = match dbpool {
|
||||
tauri_plugin_sql::DbPool::Sqlite(pool) => Some(pool),
|
||||
};
|
||||
db_clone.set(sqlite_pool.unwrap().clone()).await;
|
||||
db_clone.finish_pending_tasks().await?;
|
||||
|
||||
let recorder_manager = Arc::new(RecorderManager::new(
|
||||
app.app_handle().clone(),
|
||||
@@ -237,12 +363,6 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
|
||||
db.clone(),
|
||||
config.clone(),
|
||||
));
|
||||
let binding = dbs.0.read().await;
|
||||
let dbpool = binding.get("sqlite:data_v2.db").unwrap();
|
||||
let sqlite_pool = match dbpool {
|
||||
tauri_plugin_sql::DbPool::Sqlite(pool) => Some(pool),
|
||||
};
|
||||
db_clone.set(sqlite_pool.unwrap().clone()).await;
|
||||
|
||||
let accounts = db_clone.get_accounts().await?;
|
||||
if accounts.is_empty() {
|
||||
@@ -258,28 +378,55 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
|
||||
|
||||
// update account infos
|
||||
for account in accounts {
|
||||
// only update bilibili account
|
||||
let platform = PlatformType::from_str(&account.platform).unwrap();
|
||||
if platform != PlatformType::BiliBili {
|
||||
continue;
|
||||
}
|
||||
|
||||
match client_clone.get_user_info(&account, account.uid).await {
|
||||
Ok(account_info) => {
|
||||
if let Err(e) = db_clone
|
||||
.update_account(
|
||||
&account.platform,
|
||||
account_info.user_id,
|
||||
&account_info.user_name,
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating account info {}", e);
|
||||
if platform == PlatformType::BiliBili {
|
||||
match client_clone.get_user_info(&account, account.uid).await {
|
||||
Ok(account_info) => {
|
||||
if let Err(e) = db_clone
|
||||
.update_account(
|
||||
&account.platform,
|
||||
account_info.user_id,
|
||||
&account_info.user_name,
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Bilibili account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Bilibili user info failed {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get user info failed {}", e);
|
||||
} else if platform == PlatformType::Douyin {
|
||||
// Update Douyin account info
|
||||
use crate::recorder::douyin::client::DouyinClient;
|
||||
let douyin_client = DouyinClient::new(&config_clone.read().await.user_agent, &account);
|
||||
match douyin_client.get_user_info().await {
|
||||
Ok(user_info) => {
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
if let Err(e) = db_clone
|
||||
.update_account_with_id_str(
|
||||
&account,
|
||||
&user_info.sec_uid,
|
||||
&user_info.nickname,
|
||||
&avatar_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Error when updating Douyin account info {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Get Douyin user info failed {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -332,7 +479,8 @@ fn setup_plugins(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<tauri::W
|
||||
fn setup_event_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<tauri::Wry> {
|
||||
builder.on_window_event(|window, event| {
|
||||
if let WindowEvent::CloseRequested { api, .. } = event {
|
||||
if !window.label().starts_with("Live") {
|
||||
// main window is not closable
|
||||
if window.label() == "main" {
|
||||
window.hide().unwrap();
|
||||
api.prevent_close();
|
||||
}
|
||||
@@ -357,7 +505,13 @@ fn setup_invoke_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<
|
||||
crate::handlers::config::update_subtitle_setting,
|
||||
crate::handlers::config::update_clip_name_format,
|
||||
crate::handlers::config::update_whisper_prompt,
|
||||
crate::handlers::config::update_subtitle_generator_type,
|
||||
crate::handlers::config::update_openai_api_key,
|
||||
crate::handlers::config::update_openai_api_endpoint,
|
||||
crate::handlers::config::update_auto_generate,
|
||||
crate::handlers::config::update_status_check_interval,
|
||||
crate::handlers::config::update_whisper_language,
|
||||
crate::handlers::config::update_user_agent,
|
||||
crate::handlers::message::get_messages,
|
||||
crate::handlers::message::read_message,
|
||||
crate::handlers::message::delete_message,
|
||||
@@ -367,6 +521,8 @@ fn setup_invoke_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<
|
||||
crate::handlers::recorder::get_room_info,
|
||||
crate::handlers::recorder::get_archives,
|
||||
crate::handlers::recorder::get_archive,
|
||||
crate::handlers::recorder::get_archive_subtitle,
|
||||
crate::handlers::recorder::generate_archive_subtitle,
|
||||
crate::handlers::recorder::delete_archive,
|
||||
crate::handlers::recorder::get_danmu_record,
|
||||
crate::handlers::recorder::export_danmu,
|
||||
@@ -374,15 +530,15 @@ fn setup_invoke_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<
|
||||
crate::handlers::recorder::get_total_length,
|
||||
crate::handlers::recorder::get_today_record_count,
|
||||
crate::handlers::recorder::get_recent_record,
|
||||
crate::handlers::recorder::set_auto_start,
|
||||
crate::handlers::recorder::force_start,
|
||||
crate::handlers::recorder::force_stop,
|
||||
crate::handlers::recorder::set_enable,
|
||||
crate::handlers::recorder::fetch_hls,
|
||||
crate::handlers::video::clip_range,
|
||||
crate::handlers::video::upload_procedure,
|
||||
crate::handlers::video::cancel,
|
||||
crate::handlers::video::get_video,
|
||||
crate::handlers::video::get_videos,
|
||||
crate::handlers::video::get_all_videos,
|
||||
crate::handlers::video::get_video_cover,
|
||||
crate::handlers::video::delete_video,
|
||||
crate::handlers::video::get_video_typelist,
|
||||
crate::handlers::video::update_video_cover,
|
||||
@@ -390,18 +546,25 @@ fn setup_invoke_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<
|
||||
crate::handlers::video::get_video_subtitle,
|
||||
crate::handlers::video::update_video_subtitle,
|
||||
crate::handlers::video::encode_video_subtitle,
|
||||
crate::handlers::video::generic_ffmpeg_command,
|
||||
crate::handlers::task::get_tasks,
|
||||
crate::handlers::task::delete_task,
|
||||
crate::handlers::utils::show_in_folder,
|
||||
crate::handlers::utils::export_to_file,
|
||||
crate::handlers::utils::get_disk_info,
|
||||
crate::handlers::utils::open_live,
|
||||
crate::handlers::utils::open_clip,
|
||||
crate::handlers::utils::open_log_folder,
|
||||
crate::handlers::utils::console_log,
|
||||
crate::handlers::utils::list_folder,
|
||||
])
|
||||
}
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let _ = fix_path_env::fix();
|
||||
let builder = tauri::Builder::default();
|
||||
|
||||
let builder = tauri::Builder::default().plugin(tauri_plugin_deep_link::init());
|
||||
let builder = setup_plugins(builder);
|
||||
let builder = setup_event_handlers(builder);
|
||||
let builder = setup_invoke_handlers(builder);
|
||||
@@ -412,6 +575,12 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let state = setup_app_state(app).await?;
|
||||
let _ = tray::create_tray(app.handle());
|
||||
|
||||
// check ffmpeg status
|
||||
match ffmpeg::check_ffmpeg().await {
|
||||
Err(e) => log::error!("Failed to check ffmpeg version: {e}"),
|
||||
Ok(v) => log::info!("Checked ffmpeg version: {v}"),
|
||||
}
|
||||
|
||||
app.manage(state);
|
||||
Ok(())
|
||||
})
|
||||
@@ -432,6 +601,10 @@ struct Args {
|
||||
/// Path to the database folder
|
||||
#[arg(short, long, default_value_t = String::from("./data"))]
|
||||
db: String,
|
||||
|
||||
/// ReadOnly mode
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
readonly: bool,
|
||||
}
|
||||
|
||||
#[cfg(feature = "headless")]
|
||||
@@ -443,6 +616,12 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
.await
|
||||
.expect("Failed to setup server state");
|
||||
|
||||
// check ffmpeg status
|
||||
match ffmpeg::check_ffmpeg().await {
|
||||
Err(e) => log::error!("Failed to check ffmpeg version: {e}"),
|
||||
Ok(v) => log::info!("Checked ffmpeg version: {v}"),
|
||||
}
|
||||
|
||||
http_server::start_api_server(state).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use async_trait::async_trait;
|
||||
use serde::Serialize;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use std::sync::LazyLock;
|
||||
@@ -10,6 +9,7 @@ use crate::progress_manager::Event;
|
||||
#[cfg(feature = "gui")]
|
||||
use {
|
||||
crate::recorder::danmu::DanmuEntry,
|
||||
serde::Serialize,
|
||||
tauri::{AppHandle, Emitter},
|
||||
};
|
||||
|
||||
@@ -89,7 +89,7 @@ impl EventEmitter {
|
||||
"progress-finished",
|
||||
FinishEvent {
|
||||
id,
|
||||
success: success.clone(),
|
||||
success: *success,
|
||||
message,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -81,7 +81,11 @@ pub trait Recorder: Send + Sync + 'static {
|
||||
async fn info(&self) -> RecorderInfo;
|
||||
async fn comments(&self, live_id: &str) -> Result<Vec<DanmuEntry>, errors::RecorderError>;
|
||||
async fn is_recording(&self, live_id: &str) -> bool;
|
||||
async fn force_start(&self);
|
||||
async fn force_stop(&self);
|
||||
async fn set_auto_start(&self, auto_start: bool);
|
||||
async fn get_archive_subtitle(&self, live_id: &str) -> Result<String, errors::RecorderError>;
|
||||
async fn generate_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, errors::RecorderError>;
|
||||
async fn enable(&self);
|
||||
async fn disable(&self);
|
||||
}
|
||||
|
||||
@@ -138,25 +138,12 @@ impl BiliStream {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_same(&self, other: &BiliStream) -> bool {
|
||||
// Extract live_id part from path (e.g., live_1848752274_71463808)
|
||||
let get_live_id = |path: &str| {
|
||||
path.split('/')
|
||||
.find(|part| part.starts_with("live_"))
|
||||
.unwrap_or("")
|
||||
.to_string()
|
||||
};
|
||||
let self_live_id = get_live_id(&self.path);
|
||||
let other_live_id = get_live_id(&other.path);
|
||||
self_live_id == other_live_id
|
||||
}
|
||||
}
|
||||
|
||||
impl BiliClient {
|
||||
pub fn new() -> Result<BiliClient, BiliClientError> {
|
||||
pub fn new(user_agent: &str) -> Result<BiliClient, BiliClientError> {
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36".parse().unwrap());
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
|
||||
if let Ok(client) = Client::builder().timeout(Duration::from_secs(10)).build() {
|
||||
Ok(BiliClient { client, headers })
|
||||
@@ -214,7 +201,11 @@ impl BiliClient {
|
||||
pub async fn logout(&self, account: &AccountRow) -> Result<(), BiliClientError> {
|
||||
let url = "https://passport.bilibili.com/login/exit/v2";
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let params = [("csrf", account.csrf.clone())];
|
||||
let _ = self
|
||||
.client
|
||||
@@ -241,8 +232,12 @@ impl BiliClient {
|
||||
});
|
||||
let params = self.get_sign(params).await?;
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
let res: serde_json::Value = self
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let resp = self
|
||||
.client
|
||||
.get(format!(
|
||||
"https://api.bilibili.com/x/space/wbi/acc/info?{}",
|
||||
@@ -250,15 +245,24 @@ impl BiliClient {
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
if res["code"].as_i64().unwrap_or(-1) != 0 {
|
||||
log::error!(
|
||||
"Get user info failed {}",
|
||||
res["code"].as_i64().unwrap_or(-1)
|
||||
);
|
||||
return Err(BiliClientError::InvalidCode);
|
||||
|
||||
if !resp.status().is_success() {
|
||||
if resp.status() == reqwest::StatusCode::PRECONDITION_FAILED {
|
||||
return Err(BiliClientError::SecurityControlError);
|
||||
}
|
||||
return Err(BiliClientError::InvalidResponseStatus {
|
||||
status: resp.status(),
|
||||
});
|
||||
}
|
||||
|
||||
let res: serde_json::Value = resp.json().await?;
|
||||
let code = res["code"]
|
||||
.as_u64()
|
||||
.ok_or(BiliClientError::InvalidResponseJson { resp: res.clone() })?;
|
||||
if code != 0 {
|
||||
log::error!("Get user info failed {}", code);
|
||||
return Err(BiliClientError::InvalidMessageCode { code });
|
||||
}
|
||||
Ok(UserInfo {
|
||||
user_id,
|
||||
@@ -274,8 +278,12 @@ impl BiliClient {
|
||||
room_id: u64,
|
||||
) -> Result<RoomInfo, BiliClientError> {
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
let res: serde_json::Value = self
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let response = self
|
||||
.client
|
||||
.get(format!(
|
||||
"https://api.live.bilibili.com/room/v1/Room/get_info?room_id={}",
|
||||
@@ -283,12 +291,23 @@ impl BiliClient {
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
let code = res["code"].as_u64().ok_or(BiliClientError::InvalidValue)?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
if response.status() == reqwest::StatusCode::PRECONDITION_FAILED {
|
||||
return Err(BiliClientError::SecurityControlError);
|
||||
}
|
||||
return Err(BiliClientError::InvalidResponseStatus {
|
||||
status: response.status(),
|
||||
});
|
||||
}
|
||||
|
||||
let res: serde_json::Value = response.json().await?;
|
||||
let code = res["code"]
|
||||
.as_u64()
|
||||
.ok_or(BiliClientError::InvalidResponseJson { resp: res.clone() })?;
|
||||
if code != 0 {
|
||||
return Err(BiliClientError::InvalidCode);
|
||||
return Err(BiliClientError::InvalidMessageCode { code });
|
||||
}
|
||||
|
||||
let room_id = res["data"]["room_id"]
|
||||
@@ -339,7 +358,11 @@ impl BiliClient {
|
||||
url: &String,
|
||||
) -> Result<String, BiliClientError> {
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let response = self
|
||||
.client
|
||||
.get(url.to_owned())
|
||||
@@ -362,11 +385,11 @@ impl BiliClient {
|
||||
.headers(self.headers.clone())
|
||||
.send()
|
||||
.await?;
|
||||
let mut file = std::fs::File::create(file_path)?;
|
||||
let mut file = tokio::fs::File::create(file_path).await?;
|
||||
let bytes = res.bytes().await?;
|
||||
let size = bytes.len() as u64;
|
||||
let mut content = std::io::Cursor::new(bytes);
|
||||
std::io::copy(&mut content, &mut file)?;
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(size)
|
||||
}
|
||||
|
||||
@@ -456,7 +479,11 @@ impl BiliClient {
|
||||
video_file: &Path,
|
||||
) -> Result<PreuploadResponse, BiliClientError> {
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/preupload?name={}&r=upos&profile=ugcfx/bup",
|
||||
video_file.file_name().unwrap().to_str().unwrap()
|
||||
@@ -695,7 +722,11 @@ impl BiliClient {
|
||||
video: &profile::Video,
|
||||
) -> Result<VideoSubmitData, BiliClientError> {
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/x/vu/web/add/v3?ts={}&csrf={}",
|
||||
chrono::Local::now().timestamp(),
|
||||
@@ -713,19 +744,19 @@ impl BiliClient {
|
||||
.await
|
||||
{
|
||||
Ok(raw_resp) => {
|
||||
let json = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json) {
|
||||
let json: Value = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
|
||||
match resp.data {
|
||||
response::Data::VideoSubmit(data) => Ok(data),
|
||||
_ => Err(BiliClientError::InvalidResponse),
|
||||
}
|
||||
} else {
|
||||
println!("Parse response failed");
|
||||
log::error!("Parse response failed: {}", json);
|
||||
Err(BiliClientError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Send failed {}", e);
|
||||
log::error!("Send failed {}", e);
|
||||
Err(BiliClientError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
@@ -741,7 +772,11 @@ impl BiliClient {
|
||||
chrono::Local::now().timestamp(),
|
||||
);
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let params = [("csrf", account.csrf.clone()), ("cover", cover.to_string())];
|
||||
match self
|
||||
.client
|
||||
@@ -753,19 +788,19 @@ impl BiliClient {
|
||||
.await
|
||||
{
|
||||
Ok(raw_resp) => {
|
||||
let json = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json) {
|
||||
let json: Value = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
|
||||
match resp.data {
|
||||
response::Data::Cover(data) => Ok(data.url),
|
||||
_ => Err(BiliClientError::InvalidResponse),
|
||||
}
|
||||
} else {
|
||||
println!("Parse response failed");
|
||||
log::error!("Parse response failed: {}", json);
|
||||
Err(BiliClientError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Send failed {}", e);
|
||||
log::error!("Send failed {}", e);
|
||||
Err(BiliClientError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
@@ -779,7 +814,11 @@ impl BiliClient {
|
||||
) -> Result<(), BiliClientError> {
|
||||
let url = "https://api.live.bilibili.com/msg/send".to_string();
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let params = [
|
||||
("bubble", "0"),
|
||||
("msg", message),
|
||||
@@ -809,7 +848,11 @@ impl BiliClient {
|
||||
) -> Result<Vec<response::Typelist>, BiliClientError> {
|
||||
let url = "https://member.bilibili.com/x/vupre/web/archive/pre?lang=cn";
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("cookie", account.cookies.parse().unwrap());
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(BiliClientError::InvalidCookie);
|
||||
}
|
||||
let resp: GeneralResponse = self
|
||||
.client
|
||||
.get(url)
|
||||
|
||||
@@ -3,16 +3,20 @@ use custom_error::custom_error;
|
||||
custom_error! {pub BiliClientError
|
||||
InvalidResponse = "Invalid response",
|
||||
InitClientError = "Client init error",
|
||||
InvalidCode = "Invalid Code",
|
||||
InvalidResponseStatus{ status: reqwest::StatusCode } = "Invalid response status: {status}",
|
||||
InvalidResponseJson{ resp: serde_json::Value } = "Invalid response json: {resp}",
|
||||
InvalidMessageCode{ code: u64 } = "Invalid message code: {code}",
|
||||
InvalidValue = "Invalid value",
|
||||
InvalidUrl = "Invalid url",
|
||||
InvalidFormat = "Invalid stream format",
|
||||
InvalidStream = "Invalid stream",
|
||||
InvalidCookie = "Invalid cookie",
|
||||
UploadError{err: String} = "Upload error: {err}",
|
||||
UploadCancelled = "Upload was cancelled by user",
|
||||
EmptyCache = "Empty cache",
|
||||
ClientError{err: reqwest::Error} = "Client error: {err}",
|
||||
IOError{err: std::io::Error} = "IO error: {err}",
|
||||
SecurityControlError = "Security control error",
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for BiliClientError {
|
||||
|
||||
@@ -12,6 +12,7 @@ pub struct GeneralResponse {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(untagged)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum Data {
|
||||
VideoSubmit(VideoSubmitData),
|
||||
Cover(CoverData),
|
||||
|
||||
@@ -65,7 +65,20 @@ impl DanmuStorage {
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn get_entries(&self) -> Vec<DanmuEntry> {
|
||||
self.cache.read().await.clone()
|
||||
// get entries with ts relative to live start time
|
||||
pub async fn get_entries(&self, live_start_ts: i64) -> Vec<DanmuEntry> {
|
||||
let mut danmus: Vec<DanmuEntry> = self
|
||||
.cache
|
||||
.read()
|
||||
.await
|
||||
.iter()
|
||||
.map(|entry| DanmuEntry {
|
||||
ts: entry.ts - live_start_ts,
|
||||
content: entry.content.clone(),
|
||||
})
|
||||
.collect();
|
||||
// filter out danmus with ts < 0
|
||||
danmus.retain(|entry| entry.ts >= 0);
|
||||
danmus
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,15 +7,27 @@ use super::{
|
||||
UserInfo,
|
||||
};
|
||||
use crate::database::Database;
|
||||
use crate::progress_manager::Event;
|
||||
use crate::progress_reporter::EventEmitter;
|
||||
use crate::recorder_manager::RecorderEvent;
|
||||
use crate::subtitle_generator::item_to_srt;
|
||||
use crate::{config::Config, database::account::AccountRow};
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use client::DouyinClientError;
|
||||
use dashmap::DashMap;
|
||||
use danmu_stream::danmu_stream::DanmuStream;
|
||||
use danmu_stream::provider::ProviderType;
|
||||
use danmu_stream::DanmuMessageType;
|
||||
use rand::random;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, RwLock};
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
use super::danmu::DanmuStorage;
|
||||
|
||||
#[cfg(not(feature = "headless"))]
|
||||
use {tauri::AppHandle, tauri_plugin_notification::NotificationExt};
|
||||
@@ -42,60 +54,77 @@ impl From<DouyinClientError> for RecorderError {
|
||||
pub struct DouyinRecorder {
|
||||
#[cfg(not(feature = "headless"))]
|
||||
app_handle: AppHandle,
|
||||
emitter: EventEmitter,
|
||||
client: client::DouyinClient,
|
||||
db: Arc<Database>,
|
||||
pub room_id: u64,
|
||||
pub room_info: Arc<RwLock<Option<response::DouyinRoomInfoResponse>>>,
|
||||
pub stream_url: Arc<RwLock<Option<String>>>,
|
||||
pub entry_store: Arc<RwLock<Option<EntryStore>>>,
|
||||
pub live_id: Arc<RwLock<String>>,
|
||||
pub live_status: Arc<RwLock<LiveStatus>>,
|
||||
account: AccountRow,
|
||||
room_id: u64,
|
||||
sec_user_id: String,
|
||||
room_info: Arc<RwLock<Option<client::DouyinBasicRoomInfo>>>,
|
||||
stream_url: Arc<RwLock<Option<String>>>,
|
||||
entry_store: Arc<RwLock<Option<EntryStore>>>,
|
||||
danmu_store: Arc<RwLock<Option<DanmuStorage>>>,
|
||||
live_id: Arc<RwLock<String>>,
|
||||
danmu_room_id: Arc<RwLock<String>>,
|
||||
live_status: Arc<RwLock<LiveStatus>>,
|
||||
is_recording: Arc<RwLock<bool>>,
|
||||
auto_start: Arc<RwLock<bool>>,
|
||||
current_record: Arc<RwLock<bool>>,
|
||||
running: Arc<RwLock<bool>>,
|
||||
last_update: Arc<RwLock<i64>>,
|
||||
m3u8_cache: DashMap<String, String>,
|
||||
config: Arc<RwLock<Config>>,
|
||||
live_end_channel: broadcast::Sender<RecorderEvent>,
|
||||
enabled: Arc<RwLock<bool>>,
|
||||
|
||||
danmu_stream_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
danmu_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
record_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
}
|
||||
|
||||
impl DouyinRecorder {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn new(
|
||||
#[cfg(not(feature = "headless"))] app_handle: AppHandle,
|
||||
emitter: EventEmitter,
|
||||
room_id: u64,
|
||||
sec_user_id: &str,
|
||||
config: Arc<RwLock<Config>>,
|
||||
douyin_account: &AccountRow,
|
||||
account: &AccountRow,
|
||||
db: &Arc<Database>,
|
||||
auto_start: bool,
|
||||
enabled: bool,
|
||||
channel: broadcast::Sender<RecorderEvent>,
|
||||
) -> Result<Self, super::errors::RecorderError> {
|
||||
let client = client::DouyinClient::new(douyin_account);
|
||||
let room_info = client.get_room_info(room_id).await?;
|
||||
let client = client::DouyinClient::new(&config.read().await.user_agent, account);
|
||||
let room_info = client.get_room_info(room_id, sec_user_id).await?;
|
||||
let mut live_status = LiveStatus::Offline;
|
||||
if room_info.data.room_status == 0 {
|
||||
if room_info.status == 0 {
|
||||
live_status = LiveStatus::Live;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
#[cfg(not(feature = "headless"))]
|
||||
app_handle,
|
||||
emitter,
|
||||
db: db.clone(),
|
||||
account: account.clone(),
|
||||
room_id,
|
||||
sec_user_id: sec_user_id.to_string(),
|
||||
live_id: Arc::new(RwLock::new(String::new())),
|
||||
danmu_room_id: Arc::new(RwLock::new(String::new())),
|
||||
entry_store: Arc::new(RwLock::new(None)),
|
||||
danmu_store: Arc::new(RwLock::new(None)),
|
||||
client,
|
||||
room_info: Arc::new(RwLock::new(Some(room_info))),
|
||||
stream_url: Arc::new(RwLock::new(None)),
|
||||
live_status: Arc::new(RwLock::new(live_status)),
|
||||
running: Arc::new(RwLock::new(false)),
|
||||
is_recording: Arc::new(RwLock::new(false)),
|
||||
auto_start: Arc::new(RwLock::new(auto_start)),
|
||||
current_record: Arc::new(RwLock::new(false)),
|
||||
enabled: Arc::new(RwLock::new(enabled)),
|
||||
last_update: Arc::new(RwLock::new(Utc::now().timestamp())),
|
||||
m3u8_cache: DashMap::new(),
|
||||
config,
|
||||
live_end_channel: channel,
|
||||
|
||||
danmu_stream_task: Arc::new(Mutex::new(None)),
|
||||
danmu_task: Arc::new(Mutex::new(None)),
|
||||
record_task: Arc::new(Mutex::new(None)),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -104,27 +133,27 @@ impl DouyinRecorder {
|
||||
return false;
|
||||
}
|
||||
|
||||
*self.current_record.read().await
|
||||
*self.enabled.read().await
|
||||
}
|
||||
|
||||
async fn check_status(&self) -> bool {
|
||||
match self.client.get_room_info(self.room_id).await {
|
||||
match self
|
||||
.client
|
||||
.get_room_info(self.room_id, &self.sec_user_id)
|
||||
.await
|
||||
{
|
||||
Ok(info) => {
|
||||
let live_status = info.data.room_status == 0; // room_status == 0 表示正在直播
|
||||
let previous_liveid = self.live_id.read().await.clone();
|
||||
let live_status = info.status == 0; // room_status == 0 表示正在直播
|
||||
|
||||
*self.room_info.write().await = Some(info.clone());
|
||||
|
||||
if (*self.live_status.read().await == LiveStatus::Live) != live_status {
|
||||
// live status changed, reset current record flag
|
||||
*self.current_record.write().await = false;
|
||||
|
||||
log::info!(
|
||||
"[{}]Live status changed to {}, current_record: {}, auto_start: {}",
|
||||
"[{}]Live status changed to {}, auto_start: {}",
|
||||
self.room_id,
|
||||
live_status,
|
||||
*self.current_record.read().await,
|
||||
*self.auto_start.read().await
|
||||
*self.enabled.read().await
|
||||
);
|
||||
|
||||
if live_status {
|
||||
@@ -135,7 +164,7 @@ impl DouyinRecorder {
|
||||
.title("BiliShadowReplay - 直播开始")
|
||||
.body(format!(
|
||||
"{} 开启了直播:{}",
|
||||
info.data.user.nickname, info.data.data[0].title
|
||||
info.user_name, info.room_title
|
||||
))
|
||||
.show()
|
||||
.unwrap();
|
||||
@@ -147,7 +176,7 @@ impl DouyinRecorder {
|
||||
.title("BiliShadowReplay - 直播结束")
|
||||
.body(format!(
|
||||
"{} 关闭了直播:{}",
|
||||
info.data.user.nickname, info.data.data[0].title
|
||||
info.user_name, info.room_title
|
||||
))
|
||||
.show()
|
||||
.unwrap();
|
||||
@@ -168,65 +197,30 @@ impl DouyinRecorder {
|
||||
}
|
||||
|
||||
if !live_status {
|
||||
*self.current_record.write().await = false;
|
||||
self.reset().await;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if !*self.current_record.read().await && !*self.auto_start.read().await {
|
||||
let should_record = self.should_record().await;
|
||||
|
||||
if !should_record {
|
||||
return true;
|
||||
}
|
||||
|
||||
if *self.auto_start.read().await
|
||||
&& previous_liveid != info.data.data[0].id_str.clone()
|
||||
{
|
||||
*self.current_record.write().await = true;
|
||||
}
|
||||
|
||||
if *self.current_record.read().await {
|
||||
// Get stream URL when live starts
|
||||
if !info.data.data[0]
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.hls_pull_url
|
||||
.is_empty()
|
||||
{
|
||||
*self.live_id.write().await = info.data.data[0].id_str.clone();
|
||||
// create a new record
|
||||
let cover_url = info.data.data[0]
|
||||
.cover
|
||||
.as_ref()
|
||||
.map(|cover| cover.url_list[0].clone());
|
||||
let cover = if let Some(url) = cover_url {
|
||||
Some(self.client.get_cover_base64(&url).await.unwrap())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Err(e) = self
|
||||
.db
|
||||
.add_record(
|
||||
PlatformType::Douyin,
|
||||
self.live_id.read().await.as_str(),
|
||||
self.room_id,
|
||||
&info.data.data[0].title,
|
||||
cover,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Failed to add record: {}", e);
|
||||
}
|
||||
|
||||
// setup entry store
|
||||
let work_dir = self.get_work_dir(self.live_id.read().await.as_str()).await;
|
||||
let entry_store = EntryStore::new(&work_dir).await;
|
||||
*self.entry_store.write().await = Some(entry_store);
|
||||
// Get stream URL when live starts
|
||||
if !info.hls_url.is_empty() {
|
||||
// Only set stream URL, don't create record yet
|
||||
// Record will be created when first ts download succeeds
|
||||
let new_stream_url = self.get_best_stream_url(&info).await;
|
||||
if new_stream_url.is_none() {
|
||||
log::error!("No stream url found in room_info: {:#?}", info);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
log::info!("New douyin stream URL: {}", new_stream_url.clone().unwrap());
|
||||
*self.stream_url.write().await = Some(new_stream_url.unwrap());
|
||||
*self.danmu_room_id.write().await = info.room_id_str.clone();
|
||||
}
|
||||
|
||||
true
|
||||
@@ -238,9 +232,58 @@ impl DouyinRecorder {
|
||||
}
|
||||
}
|
||||
|
||||
async fn danmu(&self) -> Result<(), super::errors::RecorderError> {
|
||||
let cookies = self.account.cookies.clone();
|
||||
let danmu_room_id = self
|
||||
.danmu_room_id
|
||||
.read()
|
||||
.await
|
||||
.clone()
|
||||
.parse::<u64>()
|
||||
.unwrap_or(0);
|
||||
let danmu_stream = DanmuStream::new(ProviderType::Douyin, &cookies, danmu_room_id).await;
|
||||
if danmu_stream.is_err() {
|
||||
let err = danmu_stream.err().unwrap();
|
||||
log::error!("Failed to create danmu stream: {}", err);
|
||||
return Err(super::errors::RecorderError::DanmuStreamError { err });
|
||||
}
|
||||
let danmu_stream = danmu_stream.unwrap();
|
||||
|
||||
let danmu_stream_clone = danmu_stream.clone();
|
||||
*self.danmu_stream_task.lock().await = Some(tokio::spawn(async move {
|
||||
let _ = danmu_stream_clone.start().await;
|
||||
}));
|
||||
|
||||
loop {
|
||||
if let Ok(Some(msg)) = danmu_stream.recv().await {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
let ts = Utc::now().timestamp_millis();
|
||||
self.emitter.emit(&Event::DanmuReceived {
|
||||
room: self.room_id,
|
||||
ts,
|
||||
content: danmu.message.clone(),
|
||||
});
|
||||
if let Some(storage) = self.danmu_store.read().await.as_ref() {
|
||||
storage.add_line(ts, &danmu.message).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::error!("Failed to receive danmu message");
|
||||
return Err(super::errors::RecorderError::DanmuStreamError {
|
||||
err: danmu_stream::DanmuStreamError::WebsocketError {
|
||||
err: "Failed to receive danmu message".to_string(),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn reset(&self) {
|
||||
*self.entry_store.write().await = None;
|
||||
*self.live_id.write().await = String::new();
|
||||
*self.danmu_room_id.write().await = String::new();
|
||||
*self.last_update.write().await = Utc::now().timestamp();
|
||||
*self.stream_url.write().await = None;
|
||||
}
|
||||
@@ -254,18 +297,8 @@ impl DouyinRecorder {
|
||||
)
|
||||
}
|
||||
|
||||
async fn get_best_stream_url(
|
||||
&self,
|
||||
room_info: &response::DouyinRoomInfoResponse,
|
||||
) -> Option<String> {
|
||||
let stream_data = room_info.data.data[0]
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.live_core_sdk_data
|
||||
.pull_data
|
||||
.stream_data
|
||||
.clone();
|
||||
async fn get_best_stream_url(&self, room_info: &client::DouyinBasicRoomInfo) -> Option<String> {
|
||||
let stream_data = room_info.stream_data.clone();
|
||||
// parse stream_data into stream_info
|
||||
let stream_info = serde_json::from_str::<stream_info::StreamInfo>(&stream_data);
|
||||
if let Ok(stream_info) = stream_info {
|
||||
@@ -283,6 +316,25 @@ impl DouyinRecorder {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_stream_url(&self, stream_url: &str) -> (String, String) {
|
||||
// Parse stream URL to extract base URL and query parameters
|
||||
// Example: http://7167739a741646b4651b6949b2f3eb8e.livehwc3.cn/pull-hls-l26.douyincdn.com/third/stream-693342996808860134_or4.m3u8?sub_m3u8=true&user_session_id=16090eb45ab8a2f042f7c46563936187&major_anchor_level=common&edge_slice=true&expire=67d944ec&sign=47b95cc6e8de20d82f3d404412fa8406
|
||||
|
||||
let base_url = stream_url
|
||||
.rfind('/')
|
||||
.map(|i| &stream_url[..=i])
|
||||
.unwrap_or(stream_url)
|
||||
.to_string();
|
||||
|
||||
let query_params = stream_url
|
||||
.find('?')
|
||||
.map(|i| &stream_url[i..])
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
(base_url, query_params)
|
||||
}
|
||||
|
||||
async fn update_entries(&self) -> Result<u128, RecorderError> {
|
||||
let task_begin_time = std::time::Instant::now();
|
||||
|
||||
@@ -294,91 +346,256 @@ impl DouyinRecorder {
|
||||
}
|
||||
|
||||
if self.stream_url.read().await.is_none() {
|
||||
let new_stream_url = self.get_best_stream_url(room_info.as_ref().unwrap()).await;
|
||||
if new_stream_url.is_none() {
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
}
|
||||
log::info!("New douyin stream URL: {}", new_stream_url.clone().unwrap());
|
||||
*self.stream_url.write().await = Some(new_stream_url.unwrap());
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
}
|
||||
let stream_url = self.stream_url.read().await.as_ref().unwrap().clone();
|
||||
|
||||
let mut stream_url = self.stream_url.read().await.as_ref().unwrap().clone();
|
||||
|
||||
// Get m3u8 playlist
|
||||
let (playlist, updated_stream_url) = self.client.get_m3u8_content(&stream_url).await?;
|
||||
|
||||
*self.stream_url.write().await = Some(updated_stream_url);
|
||||
*self.stream_url.write().await = Some(updated_stream_url.clone());
|
||||
stream_url = updated_stream_url;
|
||||
|
||||
let mut new_segment_fetched = false;
|
||||
let work_dir = self.get_work_dir(self.live_id.read().await.as_str()).await;
|
||||
let mut is_first_segment = self.entry_store.read().await.is_none();
|
||||
let work_dir;
|
||||
|
||||
// Create work directory if not exists
|
||||
tokio::fs::create_dir_all(&work_dir).await?;
|
||||
// If this is the first segment, prepare but don't create directories yet
|
||||
if is_first_segment {
|
||||
// Generate live_id for potential use
|
||||
let live_id = Utc::now().timestamp_millis().to_string();
|
||||
*self.live_id.write().await = live_id.clone();
|
||||
work_dir = self.get_work_dir(&live_id).await;
|
||||
} else {
|
||||
work_dir = self.get_work_dir(self.live_id.read().await.as_str()).await;
|
||||
}
|
||||
|
||||
let last_sequence = self
|
||||
.entry_store
|
||||
.read()
|
||||
.await
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.last_sequence();
|
||||
let last_sequence = if is_first_segment {
|
||||
0
|
||||
} else {
|
||||
self.entry_store
|
||||
.read()
|
||||
.await
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.last_sequence
|
||||
};
|
||||
|
||||
for (i, segment) in playlist.segments.iter().enumerate() {
|
||||
let sequence = playlist.media_sequence + i as u64;
|
||||
for segment in playlist.segments.iter() {
|
||||
let formated_ts_name = segment.uri.clone();
|
||||
let sequence = extract_sequence_from(&formated_ts_name);
|
||||
if sequence.is_none() {
|
||||
log::error!(
|
||||
"No timestamp extracted from douyin ts name: {}",
|
||||
formated_ts_name
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let sequence = sequence.unwrap();
|
||||
if sequence <= last_sequence {
|
||||
continue;
|
||||
}
|
||||
|
||||
new_segment_fetched = true;
|
||||
let mut uri = segment.uri.clone();
|
||||
// if uri contains ?params, remove it
|
||||
if let Some(pos) = uri.find('?') {
|
||||
uri = uri[..pos].to_string();
|
||||
}
|
||||
// example: pull-l3.douyincdn.com_stream-405850027547689439_or4-1752675567719.ts
|
||||
let uri = segment.uri.clone();
|
||||
|
||||
let ts_url = if uri.starts_with("http") {
|
||||
uri.clone()
|
||||
} else {
|
||||
// Get the base URL without the filename and query parameters
|
||||
let base_url = stream_url
|
||||
.rfind('/')
|
||||
.map(|i| &stream_url[..=i])
|
||||
.unwrap_or(&stream_url);
|
||||
// Get the query parameters
|
||||
let query = stream_url.find('?').map(|i| &stream_url[i..]).unwrap_or("");
|
||||
// Combine: base_url + new_filename + query_params
|
||||
format!("{}{}{}", base_url, uri, query)
|
||||
// Parse the stream URL to extract base URL and query parameters
|
||||
let (base_url, query_params) = self.parse_stream_url(&stream_url);
|
||||
|
||||
// Check if the segment URI already has query parameters
|
||||
if uri.contains('?') {
|
||||
// If segment URI has query params, append m3u8 query params with &
|
||||
format!("{}{}&{}", base_url, uri, &query_params[1..]) // Remove leading ? from query_params
|
||||
} else {
|
||||
// If segment URI has no query params, append m3u8 query params with ?
|
||||
format!("{}{}{}", base_url, uri, query_params)
|
||||
}
|
||||
};
|
||||
|
||||
let file_name = format!("{}.ts", sequence);
|
||||
// Download segment with retry mechanism
|
||||
let mut retry_count = 0;
|
||||
let max_retries = 3;
|
||||
let mut download_success = false;
|
||||
let mut work_dir_created = false;
|
||||
|
||||
// Download segment
|
||||
match self
|
||||
.client
|
||||
.download_ts(&ts_url, &format!("{}/{}", work_dir, file_name))
|
||||
.await
|
||||
{
|
||||
Ok(size) => {
|
||||
let ts_entry = TsEntry {
|
||||
url: file_name,
|
||||
sequence,
|
||||
length: segment.duration as f64,
|
||||
size,
|
||||
ts: Utc::now().timestamp(),
|
||||
is_header: false,
|
||||
};
|
||||
while retry_count < max_retries && !download_success {
|
||||
let file_name = format!("{}.ts", sequence);
|
||||
let file_path = format!("{}/{}", work_dir, file_name);
|
||||
|
||||
self.entry_store
|
||||
.write()
|
||||
.await
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.add_entry(ts_entry)
|
||||
.await;
|
||||
// If this is the first segment, create work directory before first download attempt
|
||||
if is_first_segment && !work_dir_created {
|
||||
// Create work directory only when we're about to download
|
||||
if let Err(e) = tokio::fs::create_dir_all(&work_dir).await {
|
||||
log::error!("Failed to create work directory: {}", e);
|
||||
return Err(e.into());
|
||||
}
|
||||
work_dir_created = true;
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to download segment: {}", e);
|
||||
|
||||
match self.client.download_ts(&ts_url, &file_path).await {
|
||||
Ok(size) => {
|
||||
if size == 0 {
|
||||
log::error!("Download segment failed (empty response): {}", ts_url);
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// If this is the first successful download, create record and initialize stores
|
||||
if is_first_segment {
|
||||
// Create database record
|
||||
let room_info = room_info.as_ref().unwrap();
|
||||
let cover_url = room_info.cover.clone();
|
||||
let cover = if let Some(url) = cover_url {
|
||||
Some(self.client.get_cover_base64(&url).await.unwrap_or_default())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Err(e) = self
|
||||
.db
|
||||
.add_record(
|
||||
PlatformType::Douyin,
|
||||
self.live_id.read().await.as_str(),
|
||||
self.room_id,
|
||||
&room_info.room_title,
|
||||
cover,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Failed to add record: {}", e);
|
||||
}
|
||||
|
||||
// Setup entry store
|
||||
let entry_store = EntryStore::new(&work_dir).await;
|
||||
*self.entry_store.write().await = Some(entry_store);
|
||||
|
||||
// Setup danmu store
|
||||
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
|
||||
let danmu_store = DanmuStorage::new(&danmu_file_path).await;
|
||||
*self.danmu_store.write().await = danmu_store;
|
||||
|
||||
// Start danmu task
|
||||
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
|
||||
danmu_task.abort();
|
||||
}
|
||||
if let Some(danmu_stream_task) =
|
||||
self.danmu_stream_task.lock().await.as_mut()
|
||||
{
|
||||
danmu_stream_task.abort();
|
||||
}
|
||||
let live_id = self.live_id.read().await.clone();
|
||||
let self_clone = self.clone();
|
||||
*self.danmu_task.lock().await = Some(tokio::spawn(async move {
|
||||
log::info!("Start fetching danmu for live {}", live_id);
|
||||
let _ = self_clone.danmu().await;
|
||||
}));
|
||||
|
||||
is_first_segment = false;
|
||||
}
|
||||
|
||||
let ts_entry = TsEntry {
|
||||
url: file_name,
|
||||
sequence,
|
||||
length: segment.duration as f64,
|
||||
size,
|
||||
ts: Utc::now().timestamp_millis(),
|
||||
is_header: false,
|
||||
};
|
||||
|
||||
self.entry_store
|
||||
.write()
|
||||
.await
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.add_entry(ts_entry)
|
||||
.await;
|
||||
|
||||
new_segment_fetched = true;
|
||||
download_success = true;
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"Failed to download segment (attempt {}/{}): {} - URL: {}",
|
||||
retry_count + 1,
|
||||
max_retries,
|
||||
e,
|
||||
ts_url
|
||||
);
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_millis(1000 * retry_count as u64))
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
// If all retries failed, check if it's a 400 error
|
||||
if e.to_string().contains("400") {
|
||||
log::error!(
|
||||
"HTTP 400 error for segment, stream URL may be expired: {}",
|
||||
ts_url
|
||||
);
|
||||
*self.stream_url.write().await = None;
|
||||
|
||||
// Clean up empty directory if first segment failed
|
||||
if is_first_segment && work_dir_created {
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await
|
||||
{
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
}
|
||||
|
||||
// Clean up empty directory if first segment failed
|
||||
if is_first_segment && work_dir_created {
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !download_success {
|
||||
log::error!(
|
||||
"Failed to download segment after {} retries: {}",
|
||||
max_retries,
|
||||
ts_url
|
||||
);
|
||||
|
||||
// Clean up empty directory if first segment failed after all retries
|
||||
if is_first_segment && work_dir_created {
|
||||
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
|
||||
log::warn!(
|
||||
"Failed to cleanup empty work directory {}: {}",
|
||||
work_dir,
|
||||
cleanup_err
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if new_segment_fetched {
|
||||
@@ -386,6 +603,14 @@ impl DouyinRecorder {
|
||||
self.update_record().await;
|
||||
}
|
||||
|
||||
// if no new segment fetched for 10 seconds
|
||||
if *self.last_update.read().await + 10 < Utc::now().timestamp() {
|
||||
log::warn!("No new segment fetched for 10 seconds");
|
||||
*self.stream_url.write().await = None;
|
||||
*self.last_update.write().await = Utc::now().timestamp();
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
}
|
||||
|
||||
Ok(task_begin_time.elapsed().as_millis())
|
||||
}
|
||||
|
||||
@@ -409,6 +634,7 @@ impl DouyinRecorder {
|
||||
}
|
||||
|
||||
async fn generate_m3u8(&self, live_id: &str, start: i64, end: i64) -> String {
|
||||
log::debug!("Generate m3u8 for {live_id}:{start}:{end}");
|
||||
let range = if start != 0 || end != 0 {
|
||||
Some(Range {
|
||||
x: start as f32,
|
||||
@@ -425,22 +651,32 @@ impl DouyinRecorder {
|
||||
.await
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.manifest(false, range)
|
||||
.manifest(range.is_some(), false, range)
|
||||
} else {
|
||||
let work_dir = self.get_work_dir(live_id).await;
|
||||
EntryStore::new(&work_dir).await.manifest(true, range)
|
||||
EntryStore::new(&work_dir)
|
||||
.await
|
||||
.manifest(true, false, range)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_sequence_from(name: &str) -> Option<u64> {
|
||||
use regex::Regex;
|
||||
let re = Regex::new(r"(\d+)\.ts").ok()?;
|
||||
let captures = re.captures(name)?;
|
||||
captures.get(1)?.as_str().parse().ok()
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Recorder for DouyinRecorder {
|
||||
async fn run(&self) {
|
||||
*self.running.write().await = true;
|
||||
|
||||
let self_clone = self.clone();
|
||||
tokio::spawn(async move {
|
||||
*self.record_task.lock().await = Some(tokio::spawn(async move {
|
||||
while *self_clone.running.read().await {
|
||||
let mut connection_fail_count = 0;
|
||||
if self_clone.check_status().await {
|
||||
// Live status is ok, start recording
|
||||
while self_clone.should_record().await {
|
||||
@@ -458,56 +694,145 @@ impl Recorder for DouyinRecorder {
|
||||
);
|
||||
}
|
||||
*self_clone.is_recording.write().await = true;
|
||||
connection_fail_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("[{}]Update entries error: {}", self_clone.room_id, e);
|
||||
if let RecorderError::DouyinClientError { err: _e } = e {
|
||||
connection_fail_count =
|
||||
std::cmp::min(5, connection_fail_count + 1);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
*self_clone.is_recording.write().await = false;
|
||||
// Check status again after 2-5 seconds
|
||||
tokio::time::sleep(Duration::from_secs(2)).await;
|
||||
// Check status again after some seconds
|
||||
let secs = random::<u64>() % 5;
|
||||
tokio::time::sleep(Duration::from_secs(
|
||||
secs + 2_u64.pow(connection_fail_count),
|
||||
))
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
// Check live status every 10s
|
||||
tokio::time::sleep(Duration::from_secs(10)).await;
|
||||
|
||||
let interval = self_clone.config.read().await.status_check_interval;
|
||||
tokio::time::sleep(Duration::from_secs(interval)).await;
|
||||
}
|
||||
log::info!("recording thread {} quit.", self_clone.room_id);
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
async fn stop(&self) {
|
||||
*self.running.write().await = false;
|
||||
// stop 3 tasks
|
||||
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
|
||||
let _ = danmu_task.abort();
|
||||
}
|
||||
if let Some(danmu_stream_task) = self.danmu_stream_task.lock().await.as_mut() {
|
||||
let _ = danmu_stream_task.abort();
|
||||
}
|
||||
if let Some(record_task) = self.record_task.lock().await.as_mut() {
|
||||
let _ = record_task.abort();
|
||||
}
|
||||
log::info!("Recorder for room {} quit.", self.room_id);
|
||||
}
|
||||
|
||||
async fn m3u8_content(&self, live_id: &str, start: i64, end: i64) -> String {
|
||||
let cache_key = format!("{}:{}:{}", live_id, start, end);
|
||||
let range_required = start != 0 || end != 0;
|
||||
if !range_required {
|
||||
return self.generate_m3u8(live_id, start, end).await;
|
||||
}
|
||||
|
||||
if let Some(cached) = self.m3u8_cache.get(&cache_key) {
|
||||
return cached.clone();
|
||||
}
|
||||
let m3u8_content = self.generate_m3u8(live_id, start, end).await;
|
||||
self.m3u8_cache.insert(cache_key, m3u8_content.clone());
|
||||
m3u8_content
|
||||
self.generate_m3u8(live_id, start, end).await
|
||||
}
|
||||
|
||||
async fn master_m3u8(&self, _live_id: &str, start: i64, end: i64) -> String {
|
||||
async fn master_m3u8(&self, live_id: &str, start: i64, end: i64) -> String {
|
||||
let mut m3u8_content = "#EXTM3U\n".to_string();
|
||||
m3u8_content += "#EXT-X-VERSION:6\n";
|
||||
m3u8_content += format!(
|
||||
"#EXT-X-STREAM-INF:{}\n",
|
||||
"BANDWIDTH=1280000,RESOLUTION=1920x1080,CODECS=\"avc1.64001F,mp4a.40.2\""
|
||||
"#EXT-X-STREAM-INF:BANDWIDTH=1280000,RESOLUTION=1920x1080,CODECS=\"avc1.64001F,mp4a.40.2\",DANMU={}\n",
|
||||
self.first_segment_ts(live_id).await / 1000
|
||||
)
|
||||
.as_str();
|
||||
m3u8_content += &format!("playlist.m3u8?start={}&end={}\n", start, end);
|
||||
m3u8_content
|
||||
}
|
||||
|
||||
async fn get_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, super::errors::RecorderError> {
|
||||
let work_dir = self.get_work_dir(live_id).await;
|
||||
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
|
||||
let subtitle_file = File::open(subtitle_file_path).await;
|
||||
if subtitle_file.is_err() {
|
||||
return Err(super::errors::RecorderError::SubtitleNotFound {
|
||||
live_id: live_id.to_string(),
|
||||
});
|
||||
}
|
||||
let subtitle_file = subtitle_file.unwrap();
|
||||
let mut subtitle_file = BufReader::new(subtitle_file);
|
||||
let mut subtitle_content = String::new();
|
||||
subtitle_file.read_to_string(&mut subtitle_content).await?;
|
||||
Ok(subtitle_content)
|
||||
}
|
||||
|
||||
async fn generate_archive_subtitle(
|
||||
&self,
|
||||
live_id: &str,
|
||||
) -> Result<String, super::errors::RecorderError> {
|
||||
// generate subtitle file under work_dir
|
||||
let work_dir = self.get_work_dir(live_id).await;
|
||||
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
|
||||
let mut subtitle_file = File::create(subtitle_file_path).await?;
|
||||
// first generate a tmp clip file
|
||||
// generate a tmp m3u8 index file
|
||||
let m3u8_index_file_path = format!("{}/{}", work_dir, "tmp.m3u8");
|
||||
let m3u8_content = self.m3u8_content(live_id, 0, 0).await;
|
||||
tokio::fs::write(&m3u8_index_file_path, m3u8_content).await?;
|
||||
// generate a tmp clip file
|
||||
let clip_file_path = format!("{}/{}", work_dir, "tmp.mp4");
|
||||
if let Err(e) = crate::ffmpeg::clip_from_m3u8(
|
||||
None::<&crate::progress_reporter::ProgressReporter>,
|
||||
Path::new(&m3u8_index_file_path),
|
||||
Path::new(&clip_file_path),
|
||||
)
|
||||
.await
|
||||
{
|
||||
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
|
||||
error: e.to_string(),
|
||||
});
|
||||
}
|
||||
// generate subtitle file
|
||||
let config = self.config.read().await;
|
||||
let result = crate::ffmpeg::generate_video_subtitle(
|
||||
None,
|
||||
Path::new(&clip_file_path),
|
||||
"whisper",
|
||||
&config.whisper_model,
|
||||
&config.whisper_prompt,
|
||||
&config.openai_api_key,
|
||||
&config.openai_api_endpoint,
|
||||
&config.whisper_language,
|
||||
)
|
||||
.await;
|
||||
// write subtitle file
|
||||
if let Err(e) = result {
|
||||
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
|
||||
error: e.to_string(),
|
||||
});
|
||||
}
|
||||
let result = result.unwrap();
|
||||
let subtitle_content = result
|
||||
.subtitle_content
|
||||
.iter()
|
||||
.map(item_to_srt)
|
||||
.collect::<Vec<String>>()
|
||||
.join("");
|
||||
subtitle_file.write_all(subtitle_content.as_bytes()).await?;
|
||||
|
||||
// remove tmp file
|
||||
tokio::fs::remove_file(&m3u8_index_file_path).await?;
|
||||
tokio::fs::remove_file(&clip_file_path).await?;
|
||||
Ok(subtitle_content)
|
||||
}
|
||||
|
||||
async fn first_segment_ts(&self, live_id: &str) -> i64 {
|
||||
if *self.live_id.read().await == live_id {
|
||||
let entry_store = self.entry_store.read().await;
|
||||
@@ -526,17 +851,11 @@ impl Recorder for DouyinRecorder {
|
||||
let room_info = self.room_info.read().await;
|
||||
let room_cover_url = room_info
|
||||
.as_ref()
|
||||
.and_then(|info| {
|
||||
info.data
|
||||
.data
|
||||
.first()
|
||||
.and_then(|data| data.cover.as_ref())
|
||||
.map(|cover| cover.url_list[0].clone())
|
||||
})
|
||||
.and_then(|info| info.cover.clone())
|
||||
.unwrap_or_default();
|
||||
let room_title = room_info
|
||||
.as_ref()
|
||||
.and_then(|info| info.data.data.first().map(|data| data.title.clone()))
|
||||
.map(|info| info.room_title.clone())
|
||||
.unwrap_or_default();
|
||||
RecorderInfo {
|
||||
room_id: self.room_id,
|
||||
@@ -548,15 +867,15 @@ impl Recorder for DouyinRecorder {
|
||||
user_info: UserInfo {
|
||||
user_id: room_info
|
||||
.as_ref()
|
||||
.map(|info| info.data.user.sec_uid.clone())
|
||||
.map(|info| info.sec_user_id.clone())
|
||||
.unwrap_or_default(),
|
||||
user_name: room_info
|
||||
.as_ref()
|
||||
.map(|info| info.data.user.nickname.clone())
|
||||
.map(|info| info.user_name.clone())
|
||||
.unwrap_or_default(),
|
||||
user_avatar: room_info
|
||||
.as_ref()
|
||||
.map(|info| info.data.user.avatar_thumb.url_list[0].clone())
|
||||
.map(|info| info.user_avatar.clone())
|
||||
.unwrap_or_default(),
|
||||
},
|
||||
total_length: if let Some(store) = self.entry_store.read().await.as_ref() {
|
||||
@@ -567,28 +886,52 @@ impl Recorder for DouyinRecorder {
|
||||
current_live_id: self.live_id.read().await.clone(),
|
||||
live_status: *self.live_status.read().await == LiveStatus::Live,
|
||||
is_recording: *self.is_recording.read().await,
|
||||
auto_start: *self.auto_start.read().await,
|
||||
auto_start: *self.enabled.read().await,
|
||||
platform: PlatformType::Douyin.as_str().to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn comments(&self, _live_id: &str) -> Result<Vec<DanmuEntry>, RecorderError> {
|
||||
Ok(vec![])
|
||||
async fn comments(&self, live_id: &str) -> Result<Vec<DanmuEntry>, RecorderError> {
|
||||
Ok(if live_id == *self.live_id.read().await {
|
||||
// just return current cache content
|
||||
match self.danmu_store.read().await.as_ref() {
|
||||
Some(storage) => {
|
||||
storage
|
||||
.get_entries(self.first_segment_ts(live_id).await)
|
||||
.await
|
||||
}
|
||||
None => Vec::new(),
|
||||
}
|
||||
} else {
|
||||
// load disk cache
|
||||
let cache_file_path = format!(
|
||||
"{}/douyin/{}/{}/{}",
|
||||
self.config.read().await.cache,
|
||||
self.room_id,
|
||||
live_id,
|
||||
"danmu.txt"
|
||||
);
|
||||
log::debug!("loading danmu cache from {}", cache_file_path);
|
||||
let storage = DanmuStorage::new(&cache_file_path).await;
|
||||
if storage.is_none() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
let storage = storage.unwrap();
|
||||
storage
|
||||
.get_entries(self.first_segment_ts(live_id).await)
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
async fn is_recording(&self, live_id: &str) -> bool {
|
||||
*self.live_id.read().await == live_id && *self.live_status.read().await == LiveStatus::Live
|
||||
}
|
||||
|
||||
async fn force_start(&self) {
|
||||
*self.current_record.write().await = true;
|
||||
async fn enable(&self) {
|
||||
*self.enabled.write().await = true;
|
||||
}
|
||||
|
||||
async fn force_stop(&self) {
|
||||
*self.current_record.write().await = false;
|
||||
}
|
||||
|
||||
async fn set_auto_start(&self, auto_start: bool) {
|
||||
*self.auto_start.write().await = auto_start;
|
||||
async fn disable(&self) {
|
||||
*self.enabled.write().await = false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
use base64::Engine;
|
||||
use reqwest::{Client, Error as ReqwestError};
|
||||
use m3u8_rs::{Playlist, MediaPlaylist};
|
||||
use tokio::fs::File;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use crate::database::account::AccountRow;
|
||||
use base64::Engine;
|
||||
use m3u8_rs::{MediaPlaylist, Playlist};
|
||||
use reqwest::{Client, Error as ReqwestError};
|
||||
|
||||
use super::response::DouyinRoomInfoResponse;
|
||||
use std::fmt;
|
||||
|
||||
const USER_AGENT: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DouyinClientError {
|
||||
Network(ReqwestError),
|
||||
Network(String),
|
||||
Io(std::io::Error),
|
||||
Playlist(String),
|
||||
}
|
||||
@@ -29,7 +25,7 @@ impl fmt::Display for DouyinClientError {
|
||||
|
||||
impl From<ReqwestError> for DouyinClientError {
|
||||
fn from(err: ReqwestError) -> Self {
|
||||
DouyinClientError::Network(err)
|
||||
DouyinClientError::Network(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,37 +35,300 @@ impl From<std::io::Error> for DouyinClientError {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DouyinBasicRoomInfo {
|
||||
pub room_id_str: String,
|
||||
pub room_title: String,
|
||||
pub cover: Option<String>,
|
||||
pub status: i64,
|
||||
pub hls_url: String,
|
||||
pub stream_data: String,
|
||||
// user related
|
||||
pub user_name: String,
|
||||
pub user_avatar: String,
|
||||
pub sec_user_id: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DouyinClient {
|
||||
client: Client,
|
||||
cookies: String,
|
||||
account: AccountRow,
|
||||
}
|
||||
|
||||
impl DouyinClient {
|
||||
pub fn new(account: &AccountRow) -> Self {
|
||||
let client = Client::builder()
|
||||
.user_agent(USER_AGENT)
|
||||
.build()
|
||||
.unwrap();
|
||||
Self { client, cookies: account.cookies.clone() }
|
||||
pub fn new(user_agent: &str, account: &AccountRow) -> Self {
|
||||
let client = Client::builder().user_agent(user_agent).build().unwrap();
|
||||
Self {
|
||||
client,
|
||||
account: account.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_room_info(&self, room_id: u64) -> Result<DouyinRoomInfoResponse, DouyinClientError> {
|
||||
pub async fn get_room_info(
|
||||
&self,
|
||||
room_id: u64,
|
||||
sec_user_id: &str,
|
||||
) -> Result<DouyinBasicRoomInfo, DouyinClientError> {
|
||||
let url = format!(
|
||||
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={}",
|
||||
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&a_bogus=0&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={}",
|
||||
room_id
|
||||
);
|
||||
|
||||
let resp = self.client.get(&url)
|
||||
let resp = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("Referer", "https://live.douyin.com/")
|
||||
.header("User-Agent", USER_AGENT)
|
||||
.header("Cookie", self.cookies.clone())
|
||||
.header("Cookie", self.account.cookies.clone())
|
||||
.send()
|
||||
.await?
|
||||
.json::<DouyinRoomInfoResponse>()
|
||||
.await?;
|
||||
|
||||
Ok(resp)
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if text.is_empty() {
|
||||
log::warn!("Empty room info response, trying H5 API");
|
||||
return self.get_room_info_h5(room_id, sec_user_id).await;
|
||||
}
|
||||
|
||||
if status.is_success() {
|
||||
if let Ok(data) = serde_json::from_str::<DouyinRoomInfoResponse>(&text) {
|
||||
let cover = data
|
||||
.data
|
||||
.data
|
||||
.first()
|
||||
.and_then(|data| data.cover.as_ref())
|
||||
.map(|cover| cover.url_list[0].clone());
|
||||
return Ok(DouyinBasicRoomInfo {
|
||||
room_id_str: data.data.data[0].id_str.clone(),
|
||||
sec_user_id: sec_user_id.to_string(),
|
||||
cover,
|
||||
room_title: data.data.data[0].title.clone(),
|
||||
user_name: data.data.user.nickname.clone(),
|
||||
user_avatar: data.data.user.avatar_thumb.url_list[0].clone(),
|
||||
status: data.data.room_status,
|
||||
hls_url: data.data.data[0]
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|stream_url| stream_url.hls_pull_url.clone())
|
||||
.unwrap_or_default(),
|
||||
stream_data: data.data.data[0]
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|s| s.live_core_sdk_data.pull_data.stream_data.clone())
|
||||
.unwrap_or_default(),
|
||||
});
|
||||
} else {
|
||||
log::error!("Failed to parse room info response: {}", text);
|
||||
return self.get_room_info_h5(room_id, sec_user_id).await;
|
||||
}
|
||||
}
|
||||
|
||||
log::error!("Failed to get room info: {}", status);
|
||||
return self.get_room_info_h5(room_id, sec_user_id).await;
|
||||
}
|
||||
|
||||
pub async fn get_room_info_h5(
|
||||
&self,
|
||||
room_id: u64,
|
||||
sec_user_id: &str,
|
||||
) -> Result<DouyinBasicRoomInfo, DouyinClientError> {
|
||||
// 参考biliup实现,构建完整的URL参数
|
||||
let room_id_str = room_id.to_string();
|
||||
// https://webcast.amemv.com/webcast/room/reflow/info/?type_id=0&live_id=1&version_code=99.99.99&app_id=1128&room_id=10000&sec_user_id=MS4wLjAB&aid=6383&device_platform=web&browser_language=zh-CN&browser_platform=Win32&browser_name=Mozilla&browser_version=5.0
|
||||
let url_params = [
|
||||
("type_id", "0"),
|
||||
("live_id", "1"),
|
||||
("version_code", "99.99.99"),
|
||||
("app_id", "1128"),
|
||||
("room_id", &room_id_str),
|
||||
("sec_user_id", sec_user_id),
|
||||
("aid", "6383"),
|
||||
("device_platform", "web"),
|
||||
];
|
||||
|
||||
// 构建URL
|
||||
let query_string = url_params
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{}={}", k, v))
|
||||
.collect::<Vec<_>>()
|
||||
.join("&");
|
||||
let url = format!(
|
||||
"https://webcast.amemv.com/webcast/room/reflow/info/?{}",
|
||||
query_string
|
||||
);
|
||||
|
||||
log::info!("get_room_info_h5: {}", url);
|
||||
|
||||
let resp = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36")
|
||||
.header("Referer", "https://live.douyin.com/")
|
||||
.header("Cookie", self.account.cookies.clone())
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if status.is_success() {
|
||||
// Try to parse as H5 response format
|
||||
if let Ok(h5_data) =
|
||||
serde_json::from_str::<super::response::DouyinH5RoomInfoResponse>(&text)
|
||||
{
|
||||
// Extract RoomBasicInfo from H5 response
|
||||
let room = &h5_data.data.room;
|
||||
let owner = &room.owner;
|
||||
|
||||
let cover = room
|
||||
.cover
|
||||
.as_ref()
|
||||
.and_then(|c| c.url_list.first().cloned());
|
||||
let hls_url = room
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|s| s.hls_pull_url.clone())
|
||||
.unwrap_or_default();
|
||||
|
||||
return Ok(DouyinBasicRoomInfo {
|
||||
room_id_str: room.id_str.clone(),
|
||||
room_title: room.title.clone(),
|
||||
cover,
|
||||
status: if room.status == 2 { 0 } else { 1 },
|
||||
hls_url,
|
||||
user_name: owner.nickname.clone(),
|
||||
user_avatar: owner
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.unwrap_or(&String::new())
|
||||
.clone(),
|
||||
sec_user_id: owner.sec_uid.clone(),
|
||||
stream_data: room
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|s| s.live_core_sdk_data.pull_data.stream_data.clone())
|
||||
.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
// If that fails, try to parse as a generic JSON to see what we got
|
||||
if let Ok(json_value) = serde_json::from_str::<serde_json::Value>(&text) {
|
||||
log::error!(
|
||||
"Unexpected response structure: {}",
|
||||
serde_json::to_string_pretty(&json_value).unwrap_or_default()
|
||||
);
|
||||
|
||||
// Check if it's an error response
|
||||
if let Some(status_code) = json_value.get("status_code").and_then(|v| v.as_i64()) {
|
||||
if status_code != 0 {
|
||||
let error_msg = json_value
|
||||
.get("status_message")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("Unknown error");
|
||||
return Err(DouyinClientError::Network(format!(
|
||||
"API returned error status_code: {} - {}",
|
||||
status_code, error_msg
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// 检查是否是"invalid session"错误
|
||||
if let Some(status_message) =
|
||||
json_value.get("status_message").and_then(|v| v.as_str())
|
||||
{
|
||||
if status_message.contains("invalid session") {
|
||||
return Err(DouyinClientError::Network(
|
||||
"Invalid session - please check your cookies. Make sure you have valid sessionid, passport_csrf_token, and other authentication cookies from douyin.com".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
return Err(DouyinClientError::Network(format!(
|
||||
"Failed to parse h5 room info response: {}",
|
||||
text
|
||||
)));
|
||||
} else {
|
||||
log::error!("Failed to parse h5 room info response: {}", text);
|
||||
return Err(DouyinClientError::Network(format!(
|
||||
"Failed to parse h5 room info response: {}",
|
||||
text
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
log::error!("Failed to get h5 room info: {}", status);
|
||||
Err(DouyinClientError::Network(format!(
|
||||
"Failed to get h5 room info: {} {}",
|
||||
status, text
|
||||
)))
|
||||
}
|
||||
|
||||
pub async fn get_user_info(&self) -> Result<super::response::User, DouyinClientError> {
|
||||
// Use the IM spotlight relation API to get user info
|
||||
let url = "https://www.douyin.com/aweme/v1/web/im/spotlight/relation/";
|
||||
let resp = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("Referer", "https://www.douyin.com/")
|
||||
.header("Cookie", self.account.cookies.clone())
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if status.is_success() {
|
||||
if let Ok(data) = serde_json::from_str::<super::response::DouyinRelationResponse>(&text)
|
||||
{
|
||||
if data.status_code == 0 {
|
||||
let owner_sec_uid = &data.owner_sec_uid;
|
||||
|
||||
// Find the user's own info in the followings list by matching sec_uid
|
||||
if let Some(followings) = &data.followings {
|
||||
for following in followings {
|
||||
if following.sec_uid == *owner_sec_uid {
|
||||
let user = super::response::User {
|
||||
id_str: following.uid.clone(),
|
||||
sec_uid: following.sec_uid.clone(),
|
||||
nickname: following.nickname.clone(),
|
||||
avatar_thumb: following.avatar_thumb.clone(),
|
||||
follow_info: super::response::FollowInfo::default(),
|
||||
foreign_user: 0,
|
||||
open_id_str: "".to_string(),
|
||||
};
|
||||
return Ok(user);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If not found in followings, create a minimal user info from owner_sec_uid
|
||||
let user = super::response::User {
|
||||
id_str: "".to_string(), // We don't have the numeric UID
|
||||
sec_uid: owner_sec_uid.clone(),
|
||||
nickname: "抖音用户".to_string(), // Default nickname
|
||||
avatar_thumb: super::response::AvatarThumb { url_list: vec![] },
|
||||
follow_info: super::response::FollowInfo::default(),
|
||||
foreign_user: 0,
|
||||
open_id_str: "".to_string(),
|
||||
};
|
||||
return Ok(user);
|
||||
}
|
||||
} else {
|
||||
log::error!("Failed to parse user info response: {}", text);
|
||||
return Err(DouyinClientError::Network(format!(
|
||||
"Failed to parse user info response: {}",
|
||||
text
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
log::error!("Failed to get user info: {}", status);
|
||||
|
||||
Err(DouyinClientError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
"Failed to get user info from Douyin relation API",
|
||||
)))
|
||||
}
|
||||
|
||||
pub async fn get_cover_base64(&self, url: &str) -> Result<String, DouyinClientError> {
|
||||
@@ -77,47 +336,50 @@ impl DouyinClient {
|
||||
let response = self.client.get(url).send().await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let base64 = base64::engine::general_purpose::STANDARD.encode(bytes);
|
||||
let mime_type = mime_guess::from_path(url).first_or_octet_stream().to_string();
|
||||
let mime_type = mime_guess::from_path(url)
|
||||
.first_or_octet_stream()
|
||||
.to_string();
|
||||
Ok(format!("data:{};base64,{}", mime_type, base64))
|
||||
}
|
||||
|
||||
pub async fn get_m3u8_content(&self, url: &str) -> Result<(MediaPlaylist, String), DouyinClientError> {
|
||||
let content = self.client.get(url)
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
pub async fn get_m3u8_content(
|
||||
&self,
|
||||
url: &str,
|
||||
) -> Result<(MediaPlaylist, String), DouyinClientError> {
|
||||
let content = self.client.get(url).send().await?.text().await?;
|
||||
// m3u8 content: #EXTM3U
|
||||
// #EXT-X-VERSION:3
|
||||
// #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2560000
|
||||
// http://7167739a741646b4651b6949b2f3eb8e.livehwc3.cn/pull-hls-l26.douyincdn.com/third/stream-693342996808860134_or4.m3u8?sub_m3u8=true&user_session_id=16090eb45ab8a2f042f7c46563936187&major_anchor_level=common&edge_slice=true&expire=67d944ec&sign=47b95cc6e8de20d82f3d404412fa8406
|
||||
if content.contains("BANDWIDTH") {
|
||||
log::info!("Master manifest with playlist URL: {}", url);
|
||||
let new_url = content.lines().last().unwrap();
|
||||
return Box::pin(self.get_m3u8_content(new_url)).await;
|
||||
}
|
||||
|
||||
match m3u8_rs::parse_playlist_res(content.as_bytes()) {
|
||||
Ok(Playlist::MasterPlaylist(_)) => {
|
||||
Err(DouyinClientError::Playlist("Unexpected master playlist".to_string()))
|
||||
}
|
||||
Ok(Playlist::MasterPlaylist(_)) => Err(DouyinClientError::Playlist(
|
||||
"Unexpected master playlist".to_string(),
|
||||
)),
|
||||
Ok(Playlist::MediaPlaylist(pl)) => Ok((pl, url.to_string())),
|
||||
Err(e) => Err(DouyinClientError::Playlist(e.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn download_ts(&self, url: &str, path: &str) -> Result<u64, DouyinClientError> {
|
||||
let response = self.client.get(url)
|
||||
.send()
|
||||
.await?;
|
||||
let response = self.client.get(url).send().await?;
|
||||
|
||||
if response.status() != reqwest::StatusCode::OK {
|
||||
return Err(DouyinClientError::Network(response.error_for_status().unwrap_err()));
|
||||
let error = response.error_for_status().unwrap_err();
|
||||
log::error!("HTTP error: {} for URL: {}", error, url);
|
||||
return Err(DouyinClientError::Network(error.to_string()));
|
||||
}
|
||||
|
||||
let content = response.bytes().await?;
|
||||
let mut file = File::create(path).await?;
|
||||
file.write_all(&content).await?;
|
||||
|
||||
Ok(content.len() as u64)
|
||||
|
||||
let mut file = tokio::fs::File::create(path).await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let size = bytes.len() as u64;
|
||||
let mut content = std::io::Cursor::new(bytes);
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(size)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,8 +182,7 @@ pub struct Extra {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PullDatas {
|
||||
}
|
||||
pub struct PullDatas {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -436,8 +435,7 @@ pub struct Stats {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LinkerMap {
|
||||
}
|
||||
pub struct LinkerMap {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -478,13 +476,11 @@ pub struct LinkerDetail {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LinkerMapStr {
|
||||
}
|
||||
pub struct LinkerMapStr {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PlaymodeDetail {
|
||||
}
|
||||
pub struct PlaymodeDetail {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -589,4 +585,208 @@ pub struct User {
|
||||
pub foreign_user: i64,
|
||||
#[serde(rename = "open_id_str")]
|
||||
pub open_id_str: String,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DouyinRelationResponse {
|
||||
pub extra: Option<Extra2>,
|
||||
pub followings: Option<Vec<Following>>,
|
||||
#[serde(rename = "owner_sec_uid")]
|
||||
pub owner_sec_uid: String,
|
||||
#[serde(rename = "status_code")]
|
||||
pub status_code: i64,
|
||||
#[serde(rename = "log_pb")]
|
||||
pub log_pb: Option<LogPb>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Extra2 {
|
||||
#[serde(rename = "fatal_item_ids")]
|
||||
pub fatal_item_ids: Vec<String>,
|
||||
pub logid: String,
|
||||
pub now: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LogPb {
|
||||
#[serde(rename = "impr_id")]
|
||||
pub impr_id: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Following {
|
||||
#[serde(rename = "account_cert_info")]
|
||||
pub account_cert_info: String,
|
||||
#[serde(rename = "avatar_signature")]
|
||||
pub avatar_signature: String,
|
||||
#[serde(rename = "avatar_small")]
|
||||
pub avatar_small: AvatarSmall,
|
||||
#[serde(rename = "avatar_thumb")]
|
||||
pub avatar_thumb: AvatarThumb,
|
||||
#[serde(rename = "birthday_hide_level")]
|
||||
pub birthday_hide_level: i64,
|
||||
#[serde(rename = "commerce_user_level")]
|
||||
pub commerce_user_level: i64,
|
||||
#[serde(rename = "custom_verify")]
|
||||
pub custom_verify: String,
|
||||
#[serde(rename = "enterprise_verify_reason")]
|
||||
pub enterprise_verify_reason: String,
|
||||
#[serde(rename = "follow_status")]
|
||||
pub follow_status: i64,
|
||||
#[serde(rename = "follower_status")]
|
||||
pub follower_status: i64,
|
||||
#[serde(rename = "has_e_account_role")]
|
||||
pub has_e_account_role: bool,
|
||||
#[serde(rename = "im_activeness")]
|
||||
pub im_activeness: i64,
|
||||
#[serde(rename = "im_role_ids")]
|
||||
pub im_role_ids: Vec<serde_json::Value>,
|
||||
#[serde(rename = "is_im_oversea_user")]
|
||||
pub is_im_oversea_user: i64,
|
||||
pub nickname: String,
|
||||
#[serde(rename = "sec_uid")]
|
||||
pub sec_uid: String,
|
||||
#[serde(rename = "short_id")]
|
||||
pub short_id: String,
|
||||
pub signature: String,
|
||||
#[serde(rename = "social_relation_sub_type")]
|
||||
pub social_relation_sub_type: i64,
|
||||
#[serde(rename = "social_relation_type")]
|
||||
pub social_relation_type: i64,
|
||||
pub uid: String,
|
||||
#[serde(rename = "unique_id")]
|
||||
pub unique_id: String,
|
||||
#[serde(rename = "verification_type")]
|
||||
pub verification_type: i64,
|
||||
#[serde(rename = "webcast_sp_info")]
|
||||
pub webcast_sp_info: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct AvatarSmall {
|
||||
pub uri: String,
|
||||
#[serde(rename = "url_list")]
|
||||
pub url_list: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DouyinH5RoomInfoResponse {
|
||||
pub data: H5Data,
|
||||
pub extra: H5Extra,
|
||||
#[serde(rename = "status_code")]
|
||||
pub status_code: i64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5Data {
|
||||
pub room: H5Room,
|
||||
pub user: H5User,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5Room {
|
||||
pub id: u64,
|
||||
#[serde(rename = "id_str")]
|
||||
pub id_str: String,
|
||||
pub status: i64,
|
||||
pub title: String,
|
||||
pub cover: Option<H5Cover>,
|
||||
#[serde(rename = "stream_url")]
|
||||
pub stream_url: Option<H5StreamUrl>,
|
||||
pub owner: H5Owner,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5Cover {
|
||||
#[serde(rename = "url_list")]
|
||||
pub url_list: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5StreamUrl {
|
||||
pub provider: i64,
|
||||
pub id: u64,
|
||||
#[serde(rename = "id_str")]
|
||||
pub id_str: String,
|
||||
#[serde(rename = "default_resolution")]
|
||||
pub default_resolution: String,
|
||||
#[serde(rename = "rtmp_pull_url")]
|
||||
pub rtmp_pull_url: String,
|
||||
#[serde(rename = "flv_pull_url")]
|
||||
pub flv_pull_url: H5FlvPullUrl,
|
||||
#[serde(rename = "hls_pull_url")]
|
||||
pub hls_pull_url: String,
|
||||
#[serde(rename = "hls_pull_url_map")]
|
||||
pub hls_pull_url_map: H5HlsPullUrlMap,
|
||||
#[serde(rename = "live_core_sdk_data")]
|
||||
pub live_core_sdk_data: LiveCoreSdkData,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5FlvPullUrl {
|
||||
#[serde(rename = "FULL_HD1")]
|
||||
pub full_hd1: Option<String>,
|
||||
#[serde(rename = "HD1")]
|
||||
pub hd1: Option<String>,
|
||||
#[serde(rename = "SD1")]
|
||||
pub sd1: Option<String>,
|
||||
#[serde(rename = "SD2")]
|
||||
pub sd2: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5HlsPullUrlMap {
|
||||
#[serde(rename = "FULL_HD1")]
|
||||
pub full_hd1: Option<String>,
|
||||
#[serde(rename = "HD1")]
|
||||
pub hd1: Option<String>,
|
||||
#[serde(rename = "SD1")]
|
||||
pub sd1: Option<String>,
|
||||
#[serde(rename = "SD2")]
|
||||
pub sd2: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5Owner {
|
||||
pub nickname: String,
|
||||
#[serde(rename = "avatar_thumb")]
|
||||
pub avatar_thumb: H5AvatarThumb,
|
||||
#[serde(rename = "sec_uid")]
|
||||
pub sec_uid: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5AvatarThumb {
|
||||
#[serde(rename = "url_list")]
|
||||
pub url_list: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5User {
|
||||
pub nickname: String,
|
||||
#[serde(rename = "avatar_thumb")]
|
||||
pub avatar_thumb: Option<H5AvatarThumb>,
|
||||
#[serde(rename = "sec_uid")]
|
||||
pub sec_uid: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct H5Extra {
|
||||
pub now: i64,
|
||||
}
|
||||
|
||||
@@ -47,19 +47,42 @@ impl TsEntry {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_segment(&self, continuous: bool) -> String {
|
||||
/// Get timestamp in seconds
|
||||
pub fn ts_seconds(&self) -> i64 {
|
||||
// For some legacy problem, douyin entry's ts is s, bilibili entry's ts is ms.
|
||||
// This should be fixed after 2.5.6, but we need to support entry.log generated by previous version.
|
||||
if self.ts > 10000000000 {
|
||||
self.ts / 1000
|
||||
} else {
|
||||
self.ts
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ts_mili(&self) -> i64 {
|
||||
// if already in ms, return as is
|
||||
if self.ts > 10000000000 {
|
||||
self.ts
|
||||
} else {
|
||||
self.ts * 1000
|
||||
}
|
||||
}
|
||||
|
||||
pub fn date_time(&self) -> String {
|
||||
let date_str = Utc
|
||||
.timestamp_opt(self.ts_seconds(), 0)
|
||||
.unwrap()
|
||||
.to_rfc3339();
|
||||
format!("#EXT-X-PROGRAM-DATE-TIME:{}\n", date_str)
|
||||
}
|
||||
|
||||
/// Convert entry into a segment in HLS manifest.
|
||||
pub fn to_segment(&self) -> String {
|
||||
if self.is_header {
|
||||
return "".into();
|
||||
}
|
||||
|
||||
let mut content = if continuous {
|
||||
String::new()
|
||||
} else {
|
||||
"#EXT-X-DISCONTINUITY\n".into()
|
||||
};
|
||||
let mut content = String::new();
|
||||
|
||||
let date_str = Utc.timestamp_opt(self.ts / 1000, 0).unwrap().to_rfc3339();
|
||||
content += &format!("#EXT-X-PROGRAM-DATE-TIME:{}\n", date_str);
|
||||
content += &format!("#EXTINF:{:.2},\n", self.length);
|
||||
content += &format!("{}\n", self.url);
|
||||
|
||||
@@ -86,9 +109,7 @@ pub struct EntryStore {
|
||||
entries: Vec<TsEntry>,
|
||||
total_duration: f64,
|
||||
total_size: u64,
|
||||
last_sequence: u64,
|
||||
|
||||
pub continue_sequence: u64,
|
||||
pub last_sequence: u64,
|
||||
}
|
||||
|
||||
impl EntryStore {
|
||||
@@ -111,7 +132,6 @@ impl EntryStore {
|
||||
total_duration: 0.0,
|
||||
total_size: 0,
|
||||
last_sequence: 0,
|
||||
continue_sequence: 0,
|
||||
};
|
||||
|
||||
entry_store.load(work_dir).await;
|
||||
@@ -136,9 +156,7 @@ impl EntryStore {
|
||||
|
||||
let entry = entry.unwrap();
|
||||
|
||||
if entry.sequence > self.last_sequence {
|
||||
self.last_sequence = entry.sequence;
|
||||
}
|
||||
self.last_sequence = std::cmp::max(self.last_sequence, entry.sequence);
|
||||
|
||||
if entry.is_header {
|
||||
self.header = Some(entry.clone());
|
||||
@@ -149,8 +167,6 @@ impl EntryStore {
|
||||
self.total_duration += entry.length;
|
||||
self.total_size += entry.size;
|
||||
}
|
||||
|
||||
self.continue_sequence = self.last_sequence + 100;
|
||||
}
|
||||
|
||||
pub async fn add_entry(&mut self, entry: TsEntry) {
|
||||
@@ -166,9 +182,7 @@ impl EntryStore {
|
||||
|
||||
self.log_file.flush().await.unwrap();
|
||||
|
||||
if self.last_sequence < entry.sequence {
|
||||
self.last_sequence = entry.sequence;
|
||||
}
|
||||
self.last_sequence = std::cmp::max(self.last_sequence, entry.sequence);
|
||||
|
||||
self.total_duration += entry.length;
|
||||
self.total_size += entry.size;
|
||||
@@ -186,20 +200,18 @@ impl EntryStore {
|
||||
self.total_size
|
||||
}
|
||||
|
||||
pub fn last_sequence(&self) -> u64 {
|
||||
self.last_sequence
|
||||
pub fn first_ts(&self) -> Option<i64> {
|
||||
self.entries.first().map(|x| x.ts_mili())
|
||||
}
|
||||
|
||||
pub fn last_ts(&self) -> Option<i64> {
|
||||
self.entries.last().map(|entry| entry.ts)
|
||||
self.entries.last().map(|x| x.ts_mili())
|
||||
}
|
||||
|
||||
pub fn first_ts(&self) -> Option<i64> {
|
||||
self.entries.first().map(|e| e.ts)
|
||||
}
|
||||
|
||||
/// Generate a hls manifest for selected range
|
||||
pub fn manifest(&self, vod: bool, range: Option<Range>) -> String {
|
||||
/// Generate a hls manifest for selected range.
|
||||
/// `vod` indicates the manifest is for stream or video.
|
||||
/// `force_time` adds DATE-TIME tag for each entry.
|
||||
pub fn manifest(&self, vod: bool, force_time: bool, range: Option<Range>) -> String {
|
||||
let mut m3u8_content = "#EXTM3U\n".to_string();
|
||||
m3u8_content += "#EXT-X-VERSION:6\n";
|
||||
m3u8_content += if vod {
|
||||
@@ -224,23 +236,42 @@ impl EntryStore {
|
||||
m3u8_content += &format!("#EXT-X-MAP:URI=\"{}\"\n", header.url);
|
||||
}
|
||||
|
||||
// Collect entries in range
|
||||
let first_entry = self.entries.first().unwrap();
|
||||
let first_entry_ts = first_entry.ts / 1000;
|
||||
let mut previous_seq = first_entry.sequence;
|
||||
let first_entry_ts = first_entry.ts_seconds();
|
||||
let mut entries_in_range = vec![];
|
||||
for e in &self.entries {
|
||||
// ignore header, cause it's already in EXT-X-MAP
|
||||
if e.is_header {
|
||||
continue;
|
||||
}
|
||||
let discontinuous = e.sequence < previous_seq || e.sequence - previous_seq > 1;
|
||||
previous_seq = e.sequence;
|
||||
|
||||
let entry_offset = (e.ts / 1000 - first_entry_ts) as f32;
|
||||
let entry_offset = (e.ts_seconds() - first_entry_ts) as f32;
|
||||
if range.is_none_or(|r| r.is_in(entry_offset)) {
|
||||
m3u8_content += &e.to_segment(!discontinuous);
|
||||
entries_in_range.push(e);
|
||||
}
|
||||
}
|
||||
|
||||
if entries_in_range.is_empty() {
|
||||
m3u8_content += end_content;
|
||||
log::warn!("No entries in range, return empty manifest");
|
||||
return m3u8_content;
|
||||
}
|
||||
|
||||
let mut previous_seq = entries_in_range.first().unwrap().sequence;
|
||||
for (i, e) in entries_in_range.iter().enumerate() {
|
||||
let discontinuous = e.sequence < previous_seq || e.sequence - previous_seq > 1;
|
||||
if discontinuous {
|
||||
m3u8_content += "#EXT-X-DISCONTINUITY\n";
|
||||
}
|
||||
// Add date time under these situations.
|
||||
if i == 0 || i == entries_in_range.len() - 1 || force_time || discontinuous {
|
||||
m3u8_content += &e.date_time();
|
||||
}
|
||||
m3u8_content += &e.to_segment();
|
||||
|
||||
previous_seq = e.sequence;
|
||||
}
|
||||
|
||||
m3u8_content += end_content;
|
||||
m3u8_content
|
||||
}
|
||||
|
||||