Compare commits
116 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf439d60c4 | ||
|
|
6127c67cd3 | ||
|
|
183eb063bb | ||
|
|
532609c57d | ||
|
|
83c6979973 | ||
|
|
8bea9336ae | ||
|
|
617a6a0b8e | ||
|
|
140ab772d0 | ||
|
|
e7d8c8814d | ||
|
|
588559c645 | ||
|
|
e7411d25b4 | ||
|
|
ed0bd88e3b | ||
|
|
b4fb2d058a | ||
|
|
4058b425c8 | ||
|
|
55d872a38c | ||
|
|
1f666d402d | ||
|
|
64dec36773 | ||
|
|
0b3df59015 | ||
|
|
e1b4f8ede9 | ||
|
|
b8c3f0a464 | ||
|
|
ee714c855b | ||
|
|
c5165c752d | ||
|
|
8c275c2edb | ||
|
|
2fc6f673d3 | ||
|
|
b7c9d12e41 | ||
|
|
8ca2934e8d | ||
|
|
868a2a1940 | ||
|
|
a1e57c5b9c | ||
|
|
b7a76e8f10 | ||
|
|
e82159b9a2 | ||
|
|
35d068e109 | ||
|
|
136e1a3774 | ||
|
|
fc6c6adfce | ||
|
|
5981d97d5f | ||
|
|
42c12b3bf9 | ||
|
|
39a8d0d741 | ||
|
|
1db0609961 | ||
|
|
26d60cecbf | ||
|
|
a97ec33c07 | ||
|
|
9a1bc0f449 | ||
|
|
e4d1024082 | ||
|
|
3d5f97f635 | ||
|
|
7c82766549 | ||
|
|
cfe91e0782 | ||
|
|
9a26683a76 | ||
|
|
8a9344e3ee | ||
|
|
d200c7cf09 | ||
|
|
eb01d62e53 | ||
|
|
f65f375283 | ||
|
|
08979d2079 | ||
|
|
c6efe07303 | ||
|
|
7294f0ca6d | ||
|
|
eac1c09149 | ||
|
|
1e9cd61eba | ||
|
|
7b7f341fa0 | ||
|
|
ac806b49b2 | ||
|
|
f20636a107 | ||
|
|
787a30e6f7 | ||
|
|
d1d217be18 | ||
|
|
944d0a371a | ||
|
|
0df03e0c9c | ||
|
|
7ffdf65705 | ||
|
|
89cdf91a48 | ||
|
|
43ebc27044 | ||
|
|
e6159555f3 | ||
|
|
1f2508aae9 | ||
|
|
ad13f58fa7 | ||
|
|
de4959d49f | ||
|
|
b5b75129e7 | ||
|
|
84346a486f | ||
|
|
3bdcddf5a2 | ||
|
|
98f68a5e14 | ||
|
|
2249b86af3 | ||
|
|
fd889922d8 | ||
|
|
8db7c6e320 | ||
|
|
5bc4ed6dfd | ||
|
|
22ad5f7fea | ||
|
|
c0369c1a14 | ||
|
|
322f4a3ca5 | ||
|
|
4e32453441 | ||
|
|
66725b8a64 | ||
|
|
f7bcbbca83 | ||
|
|
07a3b33040 | ||
|
|
2f9b4582f8 | ||
|
|
c3f63c58cf | ||
|
|
4a3529bc2e | ||
|
|
b0355a919f | ||
|
|
cfe1a0b4b9 | ||
|
|
b655e98f35 | ||
|
|
2d1021bc42 | ||
|
|
33d74999b9 | ||
|
|
84b7dd7a3c | ||
|
|
0c678fbda3 | ||
|
|
3486f7d050 | ||
|
|
d42a1010b8 | ||
|
|
ece6ceea45 | ||
|
|
b22ebb399e | ||
|
|
4431b10cb7 | ||
|
|
01a0c929e8 | ||
|
|
b06f6e8d09 | ||
|
|
753227acbb | ||
|
|
c7dd9091d0 | ||
|
|
bae20ce011 | ||
|
|
8da4759668 | ||
|
|
eb7c6d91e9 | ||
|
|
3c24dfe8a6 | ||
|
|
bb916daaaf | ||
|
|
3931e484c2 | ||
|
|
b67e258c31 | ||
|
|
1a7e6f5a43 | ||
|
|
437204dbe6 | ||
|
|
af105277d9 | ||
|
|
7efd327a36 | ||
|
|
0141586fa9 | ||
|
|
df1d8ccac6 | ||
|
|
10b6b95e4d |
@@ -2,20 +2,27 @@
|
||||
|
||||
## AI Components
|
||||
|
||||
- **LangChain Integration**: Uses `@langchain/core`, `@langchain/deepseek`, `@langchain/langgraph`, `@langchain/ollama`
|
||||
- **Whisper Transcription**: Local and online transcription via `whisper-rs` in Rust backend
|
||||
- **LangChain Integration**: Uses `@langchain/core`, `@langchain/deepseek`,
|
||||
`@langchain/langgraph`, `@langchain/ollama`
|
||||
- **Whisper Transcription**: Local and online transcription via `whisper-rs` in
|
||||
Rust backend
|
||||
- **AI Agent**: Located in [src/lib/agent/](mdc:src/lib/agent/) directory
|
||||
|
||||
## Frontend AI Features
|
||||
|
||||
- **AI Page**: [src/page/AI.svelte](mdc:src/page/AI.svelte) - Main AI interface
|
||||
- **Agent Logic**: [src/lib/agent/](mdc:src/lib/agent/) - AI agent implementation
|
||||
- **Interface**: [src/lib/interface.ts](mdc:src/lib/interface.ts) - AI communication layer
|
||||
- **Interface**: [src/lib/interface.ts](mdc:src/lib/interface.ts)
|
||||
\- AI communication layer
|
||||
|
||||
## Backend AI Features
|
||||
|
||||
- **Subtitle Generation**: [src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/) - AI-powered subtitle creation
|
||||
- **Whisper Integration**: [src-tauri/src/subtitle_generator.rs](mdc:src-tauri/src/subtitle_generator.rs) - Speech-to-text processing
|
||||
- **Subtitle Generation**:
|
||||
[src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/) -
|
||||
AI-powered subtitle creation
|
||||
- **Whisper Integration**:
|
||||
[src-tauri/src/subtitle_generator.rs](mdc:src-tauri/src/subtitle_generator.rs)
|
||||
\- Speech-to-text processing
|
||||
- **CUDA Support**: Optional CUDA acceleration for Whisper via feature flag
|
||||
|
||||
## AI Workflows
|
||||
|
||||
@@ -3,13 +3,16 @@
|
||||
## Build Scripts
|
||||
|
||||
- **PowerShell**: [build.ps1](mdc:build.ps1) - Windows build script
|
||||
- **FFmpeg Setup**: [ffmpeg_setup.ps1](mdc:ffmpeg_setup.ps1) - FFmpeg installation script
|
||||
- **Version Bump**: [scripts/bump.cjs](mdc:scripts/bump.cjs) - Version management script
|
||||
- **FFmpeg Setup**: [ffmpeg_setup.ps1](mdc:ffmpeg_setup.ps1)
|
||||
\- FFmpeg installation script
|
||||
- **Version Bump**: [scripts/bump.cjs](mdc:scripts/bump.cjs)
|
||||
\- Version management script
|
||||
|
||||
## Package Management
|
||||
|
||||
- **Node.js**: [package.json](mdc:package.json) - Frontend dependencies and scripts
|
||||
- **Rust**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml) - Backend dependencies and features
|
||||
- **Rust**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Backend dependencies and features
|
||||
- **Lock Files**: [yarn.lock](mdc:yarn.lock) - Yarn dependency lock
|
||||
|
||||
## Build Configuration
|
||||
@@ -17,16 +20,22 @@
|
||||
- **Vite**: [vite.config.ts](mdc:vite.config.ts) - Frontend build tool configuration
|
||||
- **Tailwind**: [tailwind.config.cjs](mdc:tailwind.config.cjs) - CSS framework configuration
|
||||
- **PostCSS**: [postcss.config.cjs](mdc:postcss.config.cjs) - CSS processing configuration
|
||||
- **TypeScript**: [tsconfig.json](mdc:tsconfig.json), [tsconfig.node.json](mdc:tsconfig.node.json) - TypeScript configuration
|
||||
- **TypeScript**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[tsconfig.node.json](mdc:tsconfig.node.json) - TypeScript configuration
|
||||
|
||||
## Tauri Configuration
|
||||
|
||||
- **Main Config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json) - Core Tauri settings
|
||||
- **Main Config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- Core Tauri settings
|
||||
- **Platform Configs**:
|
||||
- [src-tauri/tauri.macos.conf.json](mdc:src-tauri/tauri.macos.conf.json) - macOS specific
|
||||
- [src-tauri/tauri.linux.conf.json](mdc:src-tauri/tauri.linux.conf.json) - Linux specific
|
||||
- [src-tauri/tauri.windows.conf.json](mdc:src-tauri/tauri.windows.conf.json) - Windows specific
|
||||
- [src-tauri/tauri.windows.cuda.conf.json](mdc:src-tauri/tauri.windows.cuda.conf.json) - Windows with CUDA
|
||||
- [src-tauri/tauri.macos.conf.json](mdc:src-tauri/tauri.macos.conf.json)
|
||||
\- macOS specific
|
||||
- [src-tauri/tauri.linux.conf.json](mdc:src-tauri/tauri.linux.conf.json)
|
||||
\- Linux specific
|
||||
- [src-tauri/tauri.windows.conf.json](mdc:src-tauri/tauri.windows.conf.json)
|
||||
\- Windows specific
|
||||
- [src-tauri/tauri.windows.cuda.conf.json](mdc:src-tauri/tauri.windows.cuda.conf.json)
|
||||
\- Windows with CUDA
|
||||
|
||||
## Docker Support
|
||||
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
## Database Architecture
|
||||
|
||||
- **SQLite Database**: Primary data storage using `sqlx` with async runtime
|
||||
- **Database Module**: [src-tauri/src/database/](mdc:src-tauri/src/database/) - Core database operations
|
||||
- **Migration System**: [src-tauri/src/migration.rs](mdc:src-tauri/src/migration.rs) - Database schema management
|
||||
- **Database Module**: [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- Core database operations
|
||||
- **Migration System**: [src-tauri/src/migration.rs](mdc:src-tauri/src/migration.rs)
|
||||
\- Database schema management
|
||||
|
||||
## Data Models
|
||||
|
||||
@@ -15,9 +17,11 @@
|
||||
|
||||
## Frontend Data Layer
|
||||
|
||||
- **Database Interface**: [src/lib/db.ts](mdc:src/lib/db.ts) - Frontend database operations
|
||||
- **Database Interface**: [src/lib/db.ts](mdc:src/lib/db.ts)
|
||||
\- Frontend database operations
|
||||
- **Stores**: [src/lib/stores/](mdc:src/lib/stores/) - State management for data
|
||||
- **Version Management**: [src/lib/stores/version.ts](mdc:src/lib/stores/version.ts) - Version tracking
|
||||
- **Version Management**: [src/lib/stores/version.ts](mdc:src/lib/stores/version.ts)
|
||||
\- Version tracking
|
||||
|
||||
## Data Operations
|
||||
|
||||
@@ -28,13 +32,17 @@
|
||||
|
||||
## File Management
|
||||
|
||||
- **Cache Directory**: [src-tauri/cache/](mdc:src-tauri/cache/) - Temporary file storage
|
||||
- **Upload Directory**: [src-tauri/cache/uploads/](mdc:src-tauri/cache/uploads/) - User upload storage
|
||||
- **Bilibili Cache**: [src-tauri/cache/bilibili/](mdc:src-tauri/cache/bilibili/) - Platform-specific cache
|
||||
- **Cache Directory**: [src-tauri/cache/](mdc:src-tauri/cache/)
|
||||
\- Temporary file storage
|
||||
- **Upload Directory**: [src-tauri/cache/uploads/](mdc:src-tauri/cache/uploads/)
|
||||
\- User upload storage
|
||||
- **Bilibili Cache**: [src-tauri/cache/bilibili/](mdc:src-tauri/cache/bilibili/)
|
||||
\- Platform-specific cache
|
||||
|
||||
## Data Persistence
|
||||
|
||||
- **SQLite Files**: [src-tauri/data/data_v2.db](mdc:src-tauri/data/data_v2.db) - Main database file
|
||||
- **SQLite Files**: [src-tauri/data/data_v2.db](mdc:src-tauri/data/data_v2.db)
|
||||
\- Main database file
|
||||
- **Write-Ahead Logging**: WAL mode for concurrent access and performance
|
||||
- **Backup Strategy**: Database backup and recovery procedures
|
||||
- **Migration Handling**: Automatic schema updates and data migration
|
||||
|
||||
@@ -17,8 +17,10 @@
|
||||
## Component Structure
|
||||
|
||||
- **Page components**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Reusable components**: Located in [src/lib/components/](mdc:src/lib/components/) directory
|
||||
- **Layout components**: [src/App.svelte](mdc:src/App.svelte), [src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
- **Reusable components**: Located in [src/lib/components/](mdc:src/lib/components/)
|
||||
directory
|
||||
- **Layout components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
|
||||
## Styling
|
||||
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
# BiliBili ShadowReplay Project Overview
|
||||
|
||||
This is a Tauri-based desktop application for caching live streams and performing real-time editing and submission. It supports Bilibili and Douyin platforms.
|
||||
This is a Tauri-based desktop application for caching live streams and performing
|
||||
real-time editing and submission. It supports Bilibili and Douyin platforms.
|
||||
|
||||
## Project Structure
|
||||
|
||||
### Frontend (Svelte + TypeScript)
|
||||
|
||||
- **Main entry points**: [src/main.ts](mdc:src/main.ts), [src/main_clip.ts](mdc:src/main_clip.ts), [src/main_live.ts](mdc:src/main_live.ts)
|
||||
- **App components**: [src/App.svelte](mdc:src/App.svelte), [src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
- **Main entry points**: [src/main.ts](mdc:src/main.ts),
|
||||
[src/main_clip.ts](mdc:src/main_clip.ts), [src/main_live.ts](mdc:src/main_live.ts)
|
||||
- **App components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
- **Pages**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Components**: Located in [src/lib/components/](mdc:src/lib/components/) directory
|
||||
- **Stores**: Located in [src/lib/stores/](mdc:src/lib/stores/) directory
|
||||
@@ -19,11 +22,14 @@ This is a Tauri-based desktop application for caching live streams and performin
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) - Stream recording functionality
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/) - Database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/) - Tauri command handlers
|
||||
- **Custom crate**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) - Danmaku stream processing
|
||||
- **Custom crate**:
|
||||
[src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) -
|
||||
Danmaku stream processing
|
||||
|
||||
### Configuration
|
||||
|
||||
- **Frontend config**: [tsconfig.json](mdc:tsconfig.json), [vite.config.ts](mdc:vite.config.ts), [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- **Frontend config**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[vite.config.ts](mdc:vite.config.ts), [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- **Backend config**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml), [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
|
||||
|
||||
@@ -2,16 +2,22 @@
|
||||
|
||||
## Project Structure
|
||||
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs) - Application entry point
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
|
||||
\- Application entry point
|
||||
- **Core modules**:
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) - Stream recording and management
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/) - SQLite database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/) - Tauri command handlers
|
||||
- [src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/) - AI-powered subtitle generation
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Stream recording and management
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- SQLite database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/)
|
||||
\- Tauri command handlers
|
||||
- [src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/)
|
||||
\- AI-powered subtitle generation
|
||||
|
||||
## Custom Crates
|
||||
|
||||
- **danmu_stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) - Danmaku stream processing library
|
||||
- **danmu_stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Danmaku stream processing library
|
||||
|
||||
## Dependencies
|
||||
|
||||
@@ -23,9 +29,12 @@
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Cargo.toml**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml) - Dependencies and features
|
||||
- **Tauri config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json) - App configuration
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml) - User configuration template
|
||||
- **Cargo.toml**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Dependencies and features
|
||||
- **Tauri config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- App configuration
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
\- User configuration template
|
||||
|
||||
## Features
|
||||
|
||||
|
||||
@@ -2,9 +2,12 @@
|
||||
|
||||
## Core Recording Components
|
||||
|
||||
- **Recorder Manager**: [src-tauri/src/recorder_manager.rs](mdc:src-tauri/src/recorder_manager.rs) - Main recording orchestration
|
||||
- **Recorder**: [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) - Individual stream recording logic
|
||||
- **Danmaku Stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) - Custom crate for bullet comment processing
|
||||
- **Recorder Manager**: [src-tauri/src/recorder_manager.rs](mdc:src-tauri/src/recorder_manager.rs)
|
||||
\- Main recording orchestration
|
||||
- **Recorder**: [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Individual stream recording logic
|
||||
- **Danmaku Stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Custom crate for bullet comment processing
|
||||
|
||||
## Supported Platforms
|
||||
|
||||
@@ -21,10 +24,14 @@
|
||||
|
||||
## Frontend Interfaces
|
||||
|
||||
- **Live Mode**: [src/AppLive.svelte](mdc:src/AppLive.svelte) - Live streaming interface
|
||||
- **Clip Mode**: [src/AppClip.svelte](mdc:src/AppClip.svelte) - Video editing and clipping
|
||||
- **Room Management**: [src/page/Room.svelte](mdc:src/page/Room.svelte) - Stream room configuration
|
||||
- **Task Management**: [src/page/Task.svelte](mdc:src/page/Task.svelte) - Recording task monitoring
|
||||
- **Live Mode**: [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
\- Live streaming interface
|
||||
- **Clip Mode**: [src/AppClip.svelte](mdc:src/AppClip.svelte)
|
||||
\- Video editing and clipping
|
||||
- **Room Management**: [src/page/Room.svelte](mdc:src/page/Room.svelte)
|
||||
\- Stream room configuration
|
||||
- **Task Management**: [src/page/Task.svelte](mdc:src/page/Task.svelte)
|
||||
\- Recording task monitoring
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
|
||||
7
.github/CONTRIBUTING.md
vendored
@@ -12,7 +12,8 @@
|
||||
|
||||
### Windows
|
||||
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。
|
||||
`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
|
||||
默认运行为 `cpu` 版本,使用 `yarn tauri dev --features cuda` 命令运行 `cuda` 版本。
|
||||
|
||||
@@ -20,7 +21,9 @@ Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于
|
||||
|
||||
1. 安装 LLVM 且配置相关环境变量,详情见 [LLVM Windows Setup](https://llvm.org/docs/GettingStarted.html#building-llvm-on-windows);
|
||||
|
||||
2. 安装 CUDA Toolkit,详情见 [CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);要注意,安装时请勾选 **VisualStudio integration**。
|
||||
2. 安装 CUDA Toolkit,详情见
|
||||
[CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);
|
||||
要注意,安装时请勾选 **VisualStudio integration**。
|
||||
|
||||
### 常见问题
|
||||
|
||||
|
||||
43
.github/workflows/check.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths:
|
||||
- 'src-tauri/**'
|
||||
- '.github/workflows/check.yml'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
src-tauri/target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf ffmpeg
|
||||
|
||||
- name: Check formatting
|
||||
run: cargo fmt --check
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check tests
|
||||
run: cargo test -v && cargo test --no-default-features --features headless -v
|
||||
working-directory: src-tauri
|
||||
1
.github/workflows/main.yml
vendored
@@ -108,4 +108,3 @@ jobs:
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }} ${{ matrix.platform == 'windows-latest' && matrix.features == 'cuda' && '--config src-tauri/tauri.windows.cuda.conf.json' || '' }}
|
||||
includeDebug: true
|
||||
|
||||
5
.markdownlint.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"MD033": {
|
||||
"allowed_elements": ["nobr", "sup"]
|
||||
}
|
||||
}
|
||||
46
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,46 @@
|
||||
fail_fast: true
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
exclude: '(\.json$|public/)'
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: cargo-fmt
|
||||
name: cargo fmt
|
||||
entry: cargo fmt --manifest-path src-tauri/Cargo.toml --
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- id: cargo-clippy
|
||||
name: cargo clippy
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-clippy-headless
|
||||
name: cargo clippy headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
|
||||
- id: cargo-test
|
||||
name: cargo test
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-test-headless
|
||||
name: cargo test headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
14
Dockerfile
@@ -23,7 +23,7 @@ COPY . .
|
||||
RUN yarn build
|
||||
|
||||
# Build Rust backend
|
||||
FROM rust:1.86-slim AS rust-builder
|
||||
FROM rust:1.90-slim AS rust-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -48,15 +48,9 @@ COPY src-tauri/crates ./src-tauri/crates
|
||||
WORKDIR /app/src-tauri
|
||||
RUN rustup component add rustfmt
|
||||
RUN cargo build --no-default-features --features headless --release
|
||||
# Download and install FFmpeg static build
|
||||
RUN wget https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz \
|
||||
&& tar xf ffmpeg-release-amd64-static.tar.xz \
|
||||
&& mv ffmpeg-*-static/ffmpeg ./ \
|
||||
&& mv ffmpeg-*-static/ffprobe ./ \
|
||||
&& rm -rf ffmpeg-*-static ffmpeg-release-amd64-static.tar.xz
|
||||
|
||||
# Final stage
|
||||
FROM debian:bookworm-slim AS final
|
||||
FROM debian:trixie-slim AS final
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -67,13 +61,13 @@ RUN apt-get update && apt-get install -y \
|
||||
fonts-wqy-microhei \
|
||||
netbase \
|
||||
nscd \
|
||||
ffmpeg \
|
||||
&& update-ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
RUN touch /etc/netgroup
|
||||
RUN mkdir -p /var/run/nscd && chmod 755 /var/run/nscd
|
||||
RUN nscd
|
||||
|
||||
# Add /app to PATH
|
||||
ENV PATH="/app:${PATH}"
|
||||
@@ -83,8 +77,6 @@ COPY --from=frontend-builder /app/dist ./dist
|
||||
|
||||
# Copy built Rust binary
|
||||
COPY --from=rust-builder /app/src-tauri/target/release/bili-shadowreplay .
|
||||
COPY --from=rust-builder /app/src-tauri/ffmpeg ./ffmpeg
|
||||
COPY --from=rust-builder /app/src-tauri/ffprobe ./ffprobe
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3000
|
||||
|
||||
@@ -28,4 +28,5 @@ BiliBili ShadowReplay 是一个缓存直播并进行实时编辑投稿的工具
|
||||
|
||||
## 赞助
|
||||
|
||||

|
||||
<!-- markdownlint-disable MD033 -->
|
||||
<img src="docs/public/images/donate.png" alt="donate" width="300">
|
||||
|
||||
2
_typos.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[default.extend-identifiers]
|
||||
pull_datas = "pull_datas"
|
||||
@@ -1,9 +1,11 @@
|
||||
# Whisper 配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付费获取 API Key)。
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付
|
||||
费获取 API Key)。
|
||||
|
||||
> [!NOTE]
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使
|
||||
> 用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
|
||||
## 本地运行 Whisper 模型
|
||||
|
||||
@@ -16,20 +18,29 @@
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此
|
||||
推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
|
||||
## 使用在线 Whisper 服务
|
||||
|
||||

|
||||
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有 OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有
|
||||
OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
|
||||
## 字幕识别质量的调优
|
||||
|
||||
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
|
||||
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。
|
||||
根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian, Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish, French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic, Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian, Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili, Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian,
|
||||
Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish,
|
||||
French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic,
|
||||
Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian,
|
||||
Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish,
|
||||
Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili,
|
||||
Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
|
||||
提示词可以优化生成的字幕的风格(也会一定程度上影响质量),要注意,Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
安装包分为两个版本,普通版和 debug 版,普通版适合大部分用户使用,debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
对于 MacOS 用户,请先手动安装 FFmpeg,详情见 [FFmpeg 配置](../config/ffmpeg.md)。
|
||||
|
||||
## Windows
|
||||
|
||||
|
||||
@@ -17,6 +17,8 @@
|
||||
|
||||
### 使用 DeepLinking 快速添加直播间
|
||||
|
||||
<!-- MD033 -->
|
||||
|
||||
<video src="/videos/deeplinking.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
|
||||
在浏览器中观看直播时,替换地址栏中直播间地址中的 `https://` 为 `bsr://` 即可快速唤起 BSR 添加直播间。
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"id": "a96a5e9f-9857-4c13-b889-91da2ace208a",
|
||||
"event": "recorder.added",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"room_id": "26966466",
|
||||
"created_at": "2025-09-07T03:33:14.258796+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
@@ -35,7 +35,7 @@
|
||||
"id": "e33623d4-e040-4390-88f5-d351ceeeace7",
|
||||
"event": "recorder.removed",
|
||||
"payload": {
|
||||
"room_id": 27183290,
|
||||
"room_id": "27183290",
|
||||
"created_at": "2025-08-30T10:54:18.569198+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
@@ -57,9 +57,9 @@
|
||||
"id": "f12f3424-f7d8-4b2f-a8b7-55477411482e",
|
||||
"event": "live.started",
|
||||
"payload": {
|
||||
"room_id": 843610,
|
||||
"room_id": "843610",
|
||||
"room_info": {
|
||||
"room_id": 843610,
|
||||
"room_id": "843610",
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
@@ -86,9 +86,9 @@
|
||||
"id": "e8b0756a-02f9-4655-b5ae-a170bf9547bd",
|
||||
"event": "live.ended",
|
||||
"payload": {
|
||||
"room_id": 843610,
|
||||
"room_id": "843610",
|
||||
"room_info": {
|
||||
"room_id": 843610,
|
||||
"room_id": "843610",
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
@@ -117,9 +117,9 @@
|
||||
"id": "5ec1ea10-2b31-48fd-8deb-f2d7d2ea5985",
|
||||
"event": "record.started",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"room_id": "26966466",
|
||||
"room_info": {
|
||||
"room_id": 26966466,
|
||||
"room_id": "26966466",
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
@@ -146,9 +146,9 @@
|
||||
"id": "56fd03e5-3965-4c2e-a6a9-bb6932347eb3",
|
||||
"event": "record.ended",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"room_id": "26966466",
|
||||
"room_info": {
|
||||
"room_id": 26966466,
|
||||
"room_id": "26966466",
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
@@ -177,7 +177,7 @@
|
||||
"payload": {
|
||||
"platform": "bilibili",
|
||||
"live_id": "1756607084705",
|
||||
"room_id": 1967212929,
|
||||
"room_id": "1967212929",
|
||||
"title": "灶台O.o",
|
||||
"length": 9,
|
||||
"size": 1927112,
|
||||
@@ -198,7 +198,7 @@
|
||||
"event": "clip.generated",
|
||||
"payload": {
|
||||
"id": 316,
|
||||
"room_id": 27183290,
|
||||
"room_id": "27183290",
|
||||
"cover": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].jpg",
|
||||
"file": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].mp4",
|
||||
"note": "",
|
||||
@@ -225,7 +225,7 @@
|
||||
"event": "clip.deleted",
|
||||
"payload": {
|
||||
"id": 313,
|
||||
"room_id": 27183290,
|
||||
"room_id": "27183290",
|
||||
"cover": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].jpg",
|
||||
"file": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].mp4",
|
||||
"note": "",
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<link rel="stylesheet" href="shaka-player/controls.min.css" />
|
||||
<link rel="stylesheet" href="shaka-player/controls.css" />
|
||||
<link rel="stylesheet" href="shaka-player/youtube-theme.css" />
|
||||
<script src="shaka-player/shaka-player.ui.js"></script>
|
||||
</head>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "bili-shadowreplay",
|
||||
"private": true,
|
||||
"version": "2.12.2",
|
||||
"version": "2.16.1",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -30,7 +30,9 @@
|
||||
"@tauri-apps/plugin-sql": "~2",
|
||||
"lucide-svelte": "^0.479.0",
|
||||
"marked": "^16.1.1",
|
||||
"qrcode": "^1.5.4"
|
||||
"qrcode": "^1.5.4",
|
||||
"socket.io-client": "^4.8.1",
|
||||
"wavesurfer.js": "^7.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/vite-plugin-svelte": "^2.0.0",
|
||||
|
||||
BIN
public/imgs/bilibili.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
public/imgs/bilibili_avatar.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 306 KiB After Width: | Height: | Size: 246 KiB |
BIN
public/imgs/douyin_avatar.png
Normal file
|
After Width: | Height: | Size: 153 KiB |
BIN
public/imgs/huya.png
Normal file
|
After Width: | Height: | Size: 219 KiB |
BIN
public/imgs/huya_avatar.png
Normal file
|
After Width: | Height: | Size: 865 KiB |
983
public/shaka-player/controls.css
Normal file
@@ -0,0 +1,983 @@
|
||||
/*! @license
|
||||
* Shaka Player
|
||||
* Copyright 2016 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
.shaka-hidden {
|
||||
display: none !important;
|
||||
}
|
||||
.shaka-video-container {
|
||||
position: relative;
|
||||
top: 0;
|
||||
left: 0;
|
||||
display: flex;
|
||||
font-family: Roboto, sans-serif, TengwarTelcontar;
|
||||
font-weight: 400;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
}
|
||||
.shaka-video-container .material-svg-icon {
|
||||
font-size: 24px;
|
||||
}
|
||||
.shaka-video-container:fullscreen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:fullscreen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-video-container:-webkit-full-screen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:-webkit-full-screen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-video-container:-moz-full-screen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:-moz-full-screen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-video-container:-ms-fullscreen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:-ms-fullscreen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-controls-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
box-sizing: border-box;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-video-container:not([shaka-controls="true"]) .shaka-controls-container {
|
||||
display: none;
|
||||
}
|
||||
.shaka-controls-container * {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-fullscreen-button {
|
||||
display: none;
|
||||
}
|
||||
.shaka-canvas-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
pointer-events: none;
|
||||
}
|
||||
.shaka-vr-canvas-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
pointer-events: none;
|
||||
}
|
||||
.shaka-bottom-controls {
|
||||
width: 98%;
|
||||
padding: 0;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-controls-button-panel {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
overflow: hidden;
|
||||
min-width: 48px;
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
font-style: normal;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-controls-button-panel,
|
||||
.shaka-controls-container[shown="true"] .shaka-controls-button-panel {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-controls-button-panel > * {
|
||||
color: #fff;
|
||||
height: 48px;
|
||||
width: 48px;
|
||||
line-height: 0.5;
|
||||
padding: 0 2px;
|
||||
background: 0 0;
|
||||
border: 0;
|
||||
cursor: pointer;
|
||||
opacity: 0.9;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.1s;
|
||||
text-shadow: 0 0 2px rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-fast-forward-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-rewind-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-skip-next-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-skip-previous-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-small-play-button .material-svg-icon {
|
||||
font-size: 32px;
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-fullscreen-button .material-svg-icon {
|
||||
font-size: 24px;
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-overflow-menu-button {
|
||||
position: relative;
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-overflow-menu-button .material-svg-icon {
|
||||
font-size: 24px;
|
||||
}
|
||||
.shaka-controls-button-panel > :hover {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-controls-button-panel .shaka-overflow-menu-only {
|
||||
display: none;
|
||||
}
|
||||
.shaka-play-button-container {
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
flex-shrink: 1;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-statistics-container {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
scrollbar-color: white rgba(0, 0, 0, 0.5);
|
||||
scrollbar-width: thin;
|
||||
min-width: 300px;
|
||||
color: #fff;
|
||||
background-color: rgba(35, 35, 35, 0.9);
|
||||
font-size: 14px;
|
||||
padding: 5px 10px;
|
||||
border-radius: 2px;
|
||||
position: absolute;
|
||||
z-index: 2;
|
||||
left: 15px;
|
||||
top: 15px;
|
||||
max-height: calc(100% - 115px);
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-statistics-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-statistics-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-statistics-container div {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.shaka-statistics-container span {
|
||||
color: #969696;
|
||||
}
|
||||
.shaka-ad-statistics-container {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
scrollbar-color: white rgba(0, 0, 0, 0.5);
|
||||
scrollbar-width: thin;
|
||||
min-width: 150px;
|
||||
color: #fff;
|
||||
background-color: rgba(35, 35, 35, 0.9);
|
||||
font-size: 14px;
|
||||
padding: 5px 10px;
|
||||
border-radius: 2px;
|
||||
position: absolute;
|
||||
z-index: 2;
|
||||
right: 15px;
|
||||
top: 15px;
|
||||
max-height: calc(100% - 115px);
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-ad-statistics-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-ad-statistics-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-ad-statistics-container div {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.shaka-ad-statistics-container span {
|
||||
color: #969696;
|
||||
}
|
||||
.shaka-context-menu {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
white-space: nowrap;
|
||||
background: rgba(28, 28, 28, 0.9);
|
||||
border-radius: 2px;
|
||||
min-width: 190px;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
position: absolute;
|
||||
z-index: 3;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-context-menu,
|
||||
.shaka-controls-container[shown="true"] .shaka-context-menu {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-context-menu button {
|
||||
font-size: 14px;
|
||||
background: 0 0;
|
||||
color: #fff;
|
||||
border: none;
|
||||
min-height: 30px;
|
||||
padding: 10px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-context-menu button:hover {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-context-menu button label {
|
||||
cursor: pointer;
|
||||
margin-left: 5px;
|
||||
}
|
||||
.shaka-keyboard-navigation .shaka-context-menu button:focus {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-context-menu button .shaka-current-selection-span {
|
||||
display: none;
|
||||
}
|
||||
.shaka-scrim-container {
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
flex-shrink: 1;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
height: 61px;
|
||||
background: linear-gradient(rgba(0, 0, 0, 0) 0, rgba(0, 0, 0, 0.5) 100%);
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-scrim-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-scrim-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-text-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
pointer-events: none;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
min-width: 48px;
|
||||
transition: bottom cubic-bezier(0.4, 0, 0.6, 1) 0.1s;
|
||||
transition-delay: 0.5s;
|
||||
font-size: 20px;
|
||||
line-height: 1.4;
|
||||
color: #fff;
|
||||
}
|
||||
.shaka-text-container span.shaka-text-wrapper {
|
||||
display: inline;
|
||||
background: 0 0;
|
||||
}
|
||||
.shaka-controls-container[shown="true"] ~ .shaka-text-container {
|
||||
transition-delay: 0s;
|
||||
}
|
||||
.shaka-spinner-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
flex-shrink: 1;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
.shaka-video-container:not([shaka-controls="true"]) .shaka-spinner-container {
|
||||
display: none;
|
||||
}
|
||||
.shaka-hidden-fast-forward-container,
|
||||
.shaka-hidden-rewind-container {
|
||||
height: 100%;
|
||||
width: 40%;
|
||||
flex-shrink: 1;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-hidden-fast-forward-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 60%;
|
||||
}
|
||||
.shaka-hidden-rewind-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
}
|
||||
.shaka-video-container.no-cursor {
|
||||
cursor: none !important;
|
||||
}
|
||||
.shaka-video-container.no-cursor * {
|
||||
cursor: none !important;
|
||||
}
|
||||
.shaka-play-button {
|
||||
box-sizing: border-box;
|
||||
padding: calc(15% / 2);
|
||||
width: 0;
|
||||
height: 0;
|
||||
margin: 0;
|
||||
border-radius: 50%;
|
||||
box-shadow: rgba(0, 0, 0, 0.1) 0 0 20px 0;
|
||||
border: none;
|
||||
background-size: 50%;
|
||||
background-repeat: no-repeat;
|
||||
background-position: center center;
|
||||
background-color: rgba(255, 255, 255, 0.9);
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-play-button,
|
||||
.shaka-controls-container[shown="true"] .shaka-play-button {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-play-button[icon="play"] {
|
||||
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%23000000%22%20height%3D%2224%22%20viewBox%3D%220%200%2024%2024%22%20width%3D%2224%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%20%20%3Cpath%20d%3D%22M8%205v14l11-7z%22%2F%3E%0A%20%20%20%20%3Cpath%20d%3D%22M0%200h24v24H0z%22%20fill%3D%22none%22%2F%3E%0A%3C%2Fsvg%3E");
|
||||
}
|
||||
.shaka-play-button[icon="pause"] {
|
||||
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%23000000%22%20height%3D%2224%22%20viewBox%3D%220%200%2024%2024%22%20width%3D%2224%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%20%20%3Cpath%20d%3D%22M6%2019h4V5H6v14zm8-14v14h4V5h-4z%22%2F%3E%0A%20%20%20%20%3Cpath%20d%3D%22M0%200h24v24H0z%22%20fill%3D%22none%22%2F%3E%0A%3C%2Fsvg%3E");
|
||||
}
|
||||
.shaka-play-button[icon="replay"] {
|
||||
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%231f1f1f%22%20height%3D%2224px%22%20viewBox%3D%220%20-960%20960%20960%22%20width%3D%2224px%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%3Cpath%20d%3D%22M480-80q-75%200-140.5-28.5t-114-77q-48.5-48.5-77-114T120-440h80q0%20117%2081.5%20198.5T480-160q117%200%20198.5-81.5T760-440q0-117-81.5-198.5T480-720h-6l62%2062-56%2058-160-160%20160-160%2056%2058-62%2062h6q75%200%20140.5%2028.5t114%2077q48.5%2048.5%2077%20114T840-440q0%2075-28.5%20140.5t-77%20114q-48.5%2048.5-114%2077T480-80Z%22%2F%3E%0A%3C%2Fsvg%3E");
|
||||
}
|
||||
@media (prefers-reduced-transparency: no-preference) {
|
||||
.shaka-controls-container[shown="true"] .shaka-play-button {
|
||||
opacity: 0.75;
|
||||
}
|
||||
}
|
||||
.shaka-current-time {
|
||||
font-size: 14px;
|
||||
color: #fff;
|
||||
cursor: pointer;
|
||||
width: auto;
|
||||
padding: 0 5px;
|
||||
}
|
||||
.shaka-current-time[disabled] {
|
||||
background-color: transparent;
|
||||
color: #fff;
|
||||
cursor: default;
|
||||
}
|
||||
.shaka-controls-container button:focus,
|
||||
.shaka-controls-container input:focus {
|
||||
outline: 1px solid Highlight;
|
||||
}
|
||||
.shaka-controls-container button:-moz-focus-inner,
|
||||
.shaka-controls-container input:-moz-focus-outer {
|
||||
outline: 0;
|
||||
border: 0;
|
||||
}
|
||||
.shaka-controls-container:not(.shaka-keyboard-navigation) button:focus,
|
||||
.shaka-controls-container:not(.shaka-keyboard-navigation) input:focus {
|
||||
outline: 0;
|
||||
}
|
||||
.shaka-fast-forward-container,
|
||||
.shaka-rewind-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
flex-shrink: 1;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
margin: 0;
|
||||
border: none;
|
||||
color: #fff;
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
cursor: default;
|
||||
font-size: 20px;
|
||||
opacity: 0;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
}
|
||||
.shaka-fast-forward-container {
|
||||
border-radius: 40% 0 0 40%;
|
||||
}
|
||||
.shaka-rewind-container {
|
||||
border-radius: 0 40% 40% 0;
|
||||
}
|
||||
.shaka-forward-rewind-container-icon {
|
||||
font-size: 32px;
|
||||
}
|
||||
.shaka-range-container {
|
||||
position: relative;
|
||||
top: 0;
|
||||
left: 0;
|
||||
margin: calc((12px - 4px) / 2) 6px;
|
||||
height: 4px;
|
||||
border-radius: 4px;
|
||||
background: #fff;
|
||||
box-sizing: content-box;
|
||||
}
|
||||
.shaka-volume-bar-container {
|
||||
width: 100px;
|
||||
padding: 0;
|
||||
transition-property: opacity, width;
|
||||
transition-duration: 250ms;
|
||||
transition-timing-function: cubic-bezier(0.4, 0, 0.6, 1);
|
||||
}
|
||||
.shaka-volume-bar-container:hover {
|
||||
width: 100px !important;
|
||||
opacity: 1 !important;
|
||||
}
|
||||
@media (max-width: 474px) {
|
||||
.shaka-volume-bar-container {
|
||||
width: 50px;
|
||||
}
|
||||
.shaka-volume-bar-container:hover {
|
||||
width: 50px !important;
|
||||
}
|
||||
.shaka-mute-button:hover + .shaka-volume-bar-container-allow-hiding {
|
||||
width: 50px;
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
.shaka-mute-button
|
||||
+ .shaka-volume-bar-container-allow-hiding:not(:focus-within) {
|
||||
width: 0;
|
||||
opacity: 0;
|
||||
}
|
||||
@media (min-width: 475px) {
|
||||
.shaka-mute-button:hover + .shaka-volume-bar-container-allow-hiding {
|
||||
width: 100px;
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
.shaka-range-element {
|
||||
-webkit-appearance: none;
|
||||
background: 0 0;
|
||||
cursor: pointer;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
height: 12px;
|
||||
top: calc((4px - 12px) / 2);
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-range-element::-webkit-slider-runnable-track {
|
||||
width: 100%;
|
||||
cursor: pointer;
|
||||
height: 12px;
|
||||
background: 0 0;
|
||||
color: transparent;
|
||||
border: none;
|
||||
}
|
||||
.shaka-range-element::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
border: none;
|
||||
border-radius: 12px;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
background: #fff;
|
||||
}
|
||||
.shaka-range-element::-moz-range-track {
|
||||
width: 100%;
|
||||
cursor: pointer;
|
||||
height: 12px;
|
||||
background: 0 0;
|
||||
color: transparent;
|
||||
border: none;
|
||||
}
|
||||
.shaka-range-element::-moz-range-thumb {
|
||||
-webkit-appearance: none;
|
||||
border: none;
|
||||
border-radius: 12px;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
background: #fff;
|
||||
}
|
||||
.shaka-seek-bar-container {
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
top: 5px;
|
||||
height: 5px;
|
||||
margin-bottom: 0;
|
||||
background-clip: padding-box !important;
|
||||
border-top: 4px solid transparent;
|
||||
border-bottom: 4px solid transparent;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-seek-bar-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-seek-bar-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-seek-bar-container .shaka-seek-bar {
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 250ms;
|
||||
opacity: 0;
|
||||
}
|
||||
.shaka-seek-bar-container:hover .shaka-seek-bar {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-ad-markers {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.shaka-spacer {
|
||||
cursor: default;
|
||||
flex-shrink: 1;
|
||||
flex-grow: 1;
|
||||
margin: 0;
|
||||
}
|
||||
.shaka-overflow-menu,
|
||||
.shaka-settings-menu {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
scrollbar-color: white rgba(0, 0, 0, 0.5);
|
||||
scrollbar-width: thin;
|
||||
white-space: nowrap;
|
||||
background: rgba(28, 28, 28, 0.9);
|
||||
border-radius: 15px;
|
||||
max-height: 250px;
|
||||
min-width: 190px;
|
||||
padding: 5px 0;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
position: absolute;
|
||||
z-index: 2;
|
||||
right: 15px;
|
||||
bottom: 62px;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-overflow-menu,
|
||||
.shaka-controls-container[casting="true"] .shaka-settings-menu,
|
||||
.shaka-controls-container[shown="true"] .shaka-overflow-menu,
|
||||
.shaka-controls-container[shown="true"] .shaka-settings-menu {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-overflow-menu button,
|
||||
.shaka-settings-menu button {
|
||||
font-size: 14px;
|
||||
background: 0 0;
|
||||
color: #fff;
|
||||
border: none;
|
||||
min-height: 30px;
|
||||
padding: 10px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-overflow-menu button:hover,
|
||||
.shaka-settings-menu button:hover {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-overflow-menu button label,
|
||||
.shaka-settings-menu button label {
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-keyboard-navigation .shaka-overflow-menu button:focus,
|
||||
.shaka-keyboard-navigation .shaka-settings-menu button:focus {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-overflow-menu .material-svg-icon,
|
||||
.shaka-settings-menu .material-svg-icon {
|
||||
padding-left: 0;
|
||||
padding-right: 10px;
|
||||
}
|
||||
.shaka-overflow-menu .material-svg-icon.shaka-chosen-item,
|
||||
.shaka-settings-menu .material-svg-icon.shaka-chosen-item {
|
||||
order: -1;
|
||||
line-height: 17px;
|
||||
font-size: 18px;
|
||||
}
|
||||
.shaka-overflow-menu.shaka-low-position,
|
||||
.shaka-settings-menu.shaka-low-position {
|
||||
bottom: 48px;
|
||||
}
|
||||
.shaka-overflow-menu span {
|
||||
text-align: left;
|
||||
}
|
||||
.shaka-overflow-button-label {
|
||||
position: relative;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.shaka-overflow-button-label-inline {
|
||||
box-sizing: border-box;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
width: calc(100% - 34px);
|
||||
padding-right: 28px;
|
||||
background-image: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iMjRweCIgdmlld0JveD0iMCAwIDI0IDI0IiB3aWR0aD0iMjRweCIgZmlsbD0iI2VlZWVlZSI+PHBhdGggZD0iTTAgMGgyNHYyNEgwVjB6IiBmaWxsPSJub25lIi8+PHBhdGggZD0iTTguNTkgMTYuNTlMMTMuMTcgMTIgOC41OSA3LjQxIDEwIDZsNiA2LTYgNi0xLjQxLTEuNDF6Ii8+PC9zdmc+");
|
||||
background-repeat: no-repeat;
|
||||
background-position: right 5px center;
|
||||
background-size: 24px 24px;
|
||||
}
|
||||
.shaka-simple-overflow-button-label-inline {
|
||||
box-sizing: border-box;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
width: calc(100% - 50px);
|
||||
}
|
||||
.shaka-current-selection-span {
|
||||
font-size: 12px;
|
||||
padding-left: 10px;
|
||||
}
|
||||
.shaka-current-auto-quality {
|
||||
margin-left: 5px;
|
||||
font-size: 11px;
|
||||
color: #ccc;
|
||||
}
|
||||
.shaka-current-quality-mark,
|
||||
.shaka-quality-mark {
|
||||
color: red;
|
||||
margin-left: 2px !important;
|
||||
font-size: 10px;
|
||||
height: 17px;
|
||||
}
|
||||
.shaka-quality-mark {
|
||||
line-height: 6px;
|
||||
}
|
||||
.shaka-overflow-playback-rate-mark,
|
||||
.shaka-overflow-quality-mark {
|
||||
background: red;
|
||||
color: #fff;
|
||||
border-radius: 2px;
|
||||
font-family: Roboto, sans-serif, TengwarTelcontar;
|
||||
font-size: 10px;
|
||||
font-weight: 700;
|
||||
line-height: 10px;
|
||||
text-shadow: none;
|
||||
padding: 1px;
|
||||
position: absolute;
|
||||
right: 4px;
|
||||
top: 10px;
|
||||
}
|
||||
.shaka-settings-menu span {
|
||||
margin-left: 28px;
|
||||
}
|
||||
.shaka-settings-menu span.shaka-chosen-item {
|
||||
margin-left: 0;
|
||||
}
|
||||
.shaka-settings-menu .shaka-chapter {
|
||||
margin-left: 10px;
|
||||
}
|
||||
.shaka-back-to-overflow-button {
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.2) !important;
|
||||
}
|
||||
.shaka-back-to-overflow-button span {
|
||||
margin-left: 0;
|
||||
}
|
||||
.shaka-back-to-overflow-button .material-svg-icon {
|
||||
padding-right: 10px;
|
||||
font-size: 18px !important;
|
||||
}
|
||||
.shaka-back-to-overflow-button:hover {
|
||||
background: 0 0 !important;
|
||||
}
|
||||
.shaka-controls-container[ad-active="true"] {
|
||||
pointer-events: none;
|
||||
}
|
||||
.shaka-controls-container[ad-active="true"] .shaka-bottom-controls {
|
||||
pointer-events: auto;
|
||||
}
|
||||
.shaka-client-side-ad-container,
|
||||
.shaka-server-side-ad-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
}
|
||||
.shaka-video-container[shaka-controls="true"]
|
||||
.shaka-client-side-ad-container
|
||||
iframe,
|
||||
.shaka-video-container[shaka-controls="true"]
|
||||
.shaka-server-side-ad-container
|
||||
iframe {
|
||||
height: 90%;
|
||||
}
|
||||
.shaka-ad-controls {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
z-index: 1;
|
||||
padding-bottom: 1%;
|
||||
}
|
||||
.shaka-video-container:not([shaka-controls="true"]) .shaka-ad-controls {
|
||||
display: none;
|
||||
}
|
||||
.shaka-ad-controls button,
|
||||
.shaka-ad-controls div {
|
||||
color: #fff;
|
||||
font-size: initial;
|
||||
}
|
||||
.shaka-ad-info {
|
||||
font-size: 14px;
|
||||
color: #fff;
|
||||
width: auto;
|
||||
padding: 0 5px;
|
||||
}
|
||||
.shaka-ad-info[disabled] {
|
||||
background-color: transparent;
|
||||
color: #fff;
|
||||
cursor: default;
|
||||
padding: 0;
|
||||
}
|
||||
.shaka-skip-ad-container {
|
||||
position: relative;
|
||||
right: calc((100% - 98%) / 2 * -1);
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
margin: 0;
|
||||
margin-left: auto;
|
||||
}
|
||||
.shaka-skip-ad-button {
|
||||
padding: 5px 15px;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-skip-ad-button:disabled {
|
||||
background: rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
.shaka-skip-ad-counter {
|
||||
padding: 5px;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
margin: 0;
|
||||
} /*!
|
||||
* @license
|
||||
* The tooltip is based on https://github.com/felipefialho/css-components/
|
||||
* Local modifications have been performed.
|
||||
*
|
||||
* Copyright (c) 2017 Felipe Fialho
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
.shaka-tooltips-on {
|
||||
overflow: visible;
|
||||
}
|
||||
.shaka-tooltips-on > .shaka-tooltip,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status {
|
||||
position: relative;
|
||||
}
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:hover:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:hover:after {
|
||||
content: attr(aria-label);
|
||||
font-family: Roboto, sans-serif, TengwarTelcontar;
|
||||
line-height: 20px;
|
||||
white-space: nowrap;
|
||||
font-size: 14px;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
color: #fff;
|
||||
border-radius: 2px;
|
||||
padding: 2px 10px;
|
||||
position: absolute;
|
||||
bottom: 62px;
|
||||
left: calc(48px / 2);
|
||||
-webkit-transform: translateX(-50%);
|
||||
-moz-transform: translateX(-50%);
|
||||
-ms-transform: translateX(-50%);
|
||||
-o-transform: translateX(-50%);
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
@media (prefers-reduced-transparency) {
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:hover:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:hover:after {
|
||||
background-color: rgba(0, 0, 0, 0.9);
|
||||
}
|
||||
}
|
||||
.shaka-tooltips-on.shaka-tooltips-low-position > .shaka-tooltip:active:after,
|
||||
.shaka-tooltips-on.shaka-tooltips-low-position
|
||||
> .shaka-tooltip:focus-visible:after,
|
||||
.shaka-tooltips-on.shaka-tooltips-low-position > .shaka-tooltip:hover:after {
|
||||
bottom: 48px;
|
||||
}
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:hover:after {
|
||||
content: attr(aria-label) " (" attr(shaka-status) ")";
|
||||
}
|
||||
.shaka-tooltips-on button:first-child:active:after,
|
||||
.shaka-tooltips-on button:first-child:focus-visible:after,
|
||||
.shaka-tooltips-on button:first-child:hover:after {
|
||||
left: 0;
|
||||
-webkit-transform: translateX(0);
|
||||
-moz-transform: translateX(0);
|
||||
-ms-transform: translateX(0);
|
||||
-o-transform: translateX(0);
|
||||
transform: translateX(0);
|
||||
}
|
||||
.shaka-tooltips-on button:last-child:active:after,
|
||||
.shaka-tooltips-on button:last-child:focus-visible:after,
|
||||
.shaka-tooltips-on button:last-child:hover:after {
|
||||
left: 48px;
|
||||
-webkit-transform: translateX(-100%);
|
||||
-moz-transform: translateX(-100%);
|
||||
-ms-transform: translateX(-100%);
|
||||
-o-transform: translateX(-100%);
|
||||
transform: translateX(-100%);
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container {
|
||||
background-color: #000;
|
||||
border: 1px solid #000;
|
||||
box-shadow: 0 8px 8px 0 rgba(0, 0, 0, 0.5);
|
||||
min-width: 150px;
|
||||
overflow: hidden;
|
||||
position: absolute;
|
||||
visibility: hidden;
|
||||
width: 15%;
|
||||
z-index: 1;
|
||||
pointer-events: none;
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container #shaka-player-ui-thumbnail-image {
|
||||
position: absolute;
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container #shaka-player-ui-thumbnail-time-container {
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
position: absolute;
|
||||
right: 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container
|
||||
#shaka-player-ui-thumbnail-time-container
|
||||
#shaka-player-ui-thumbnail-time {
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
border-radius: 14px;
|
||||
color: #fff;
|
||||
font-size: 14px;
|
||||
padding: 0 5px;
|
||||
}
|
||||
@media (prefers-reduced-transparency) {
|
||||
#shaka-player-ui-thumbnail-container
|
||||
#shaka-player-ui-thumbnail-time-container
|
||||
#shaka-player-ui-thumbnail-time {
|
||||
background-color: rgba(0, 0, 0, 0.9);
|
||||
}
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container.portrait-thumbnail {
|
||||
min-width: 75px;
|
||||
width: 7.5%;
|
||||
}
|
||||
#shaka-player-ui-time-container {
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
border-radius: 5px;
|
||||
color: #fff;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
justify-content: center;
|
||||
overflow: hidden;
|
||||
padding: 0 3px;
|
||||
position: absolute;
|
||||
visibility: hidden;
|
||||
z-index: 1;
|
||||
}
|
||||
@media (prefers-reduced-transparency) {
|
||||
#shaka-player-ui-time-container {
|
||||
background-color: rgba(0, 0, 0, 0.9);
|
||||
}
|
||||
}
|
||||
.material-svg-icon {
|
||||
display: inline-block;
|
||||
fill: currentcolor;
|
||||
width: 1em;
|
||||
height: 1em;
|
||||
}
|
||||
@font-face {
|
||||
font-family: Roboto;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-stretch: normal;
|
||||
src: url(./fonts/KFOMCnqEu92Fr1ME7kSn66aGLdTylUAMQXC89YmC2DPNWubEbVmUiA8.ttf)
|
||||
format("truetype");
|
||||
} /*# sourceMappingURL=controls.css.map */
|
||||
1
public/shaka-player/controls.css.map
Normal file
53
public/shaka-player/controls.min.css
vendored
BIN
public/shaka-player/fonts/KFOlCnqEu92Fr1MmEU9vAw.ttf
Normal file
BIN
public/shaka-player/fonts/KFOmCnqEu92Fr1Me5Q.ttf
Normal file
7727
public/shaka-player/shaka-player.ui.debug.externs.js
Normal file
7727
public/shaka-player/shaka-player.ui.externs.js
Normal file
@@ -1,19 +1,19 @@
|
||||
@font-face {
|
||||
font-family: 'Roboto';
|
||||
font-family: "Roboto";
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-display: swap;
|
||||
src: url(https://fonts.gstatic.com/s/roboto/v27/KFOmCnqEu92Fr1Me5Q.ttf) format('truetype');
|
||||
src: url(./fonts/KFOmCnqEu92Fr1Me5Q.ttf) format("truetype");
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'Roboto';
|
||||
font-family: "Roboto";
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
font-display: swap;
|
||||
src: url(https://fonts.gstatic.com/s/roboto/v27/KFOlCnqEu92Fr1MmEU9vAw.ttf) format('truetype');
|
||||
src: url(./fonts/KFOlCnqEu92Fr1MmEU9vAw.ttf) format("truetype");
|
||||
}
|
||||
.youtube-theme {
|
||||
font-family: 'Roboto', sans-serif;
|
||||
font-family: "Roboto", sans-serif;
|
||||
}
|
||||
.youtube-theme .shaka-bottom-controls {
|
||||
width: 100%;
|
||||
@@ -27,18 +27,18 @@
|
||||
display: flex;
|
||||
-webkit-box-orient: vertical;
|
||||
-webkit-box-direction: normal;
|
||||
-ms-flex-direction: column;
|
||||
flex-direction: column;
|
||||
-ms-flex-direction: column;
|
||||
flex-direction: column;
|
||||
}
|
||||
.youtube-theme .shaka-ad-controls {
|
||||
-webkit-box-ordinal-group: 2;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel {
|
||||
-webkit-box-ordinal-group: 3;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
height: 40px;
|
||||
padding: 0 10px;
|
||||
}
|
||||
@@ -48,36 +48,36 @@
|
||||
}
|
||||
.youtube-theme .shaka-small-play-button {
|
||||
-webkit-box-ordinal-group: -2;
|
||||
-ms-flex-order: -3;
|
||||
order: -3;
|
||||
-ms-flex-order: -3;
|
||||
order: -3;
|
||||
}
|
||||
.youtube-theme .shaka-mute-button {
|
||||
-webkit-box-ordinal-group: -1;
|
||||
-ms-flex-order: -2;
|
||||
order: -2;
|
||||
-ms-flex-order: -2;
|
||||
order: -2;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel > * {
|
||||
margin: 0;
|
||||
padding: 3px 8px;
|
||||
color: #EEE;
|
||||
color: #eee;
|
||||
height: 40px;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel > *:focus {
|
||||
outline: none;
|
||||
-webkit-box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
|
||||
box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
|
||||
color: #FFF;
|
||||
box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
|
||||
color: #fff;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel > *:hover {
|
||||
color: #FFF;
|
||||
color: #fff;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel .shaka-volume-bar-container {
|
||||
position: relative;
|
||||
z-index: 10;
|
||||
left: -1px;
|
||||
-webkit-box-ordinal-group: 0;
|
||||
-ms-flex-order: -1;
|
||||
order: -1;
|
||||
-ms-flex-order: -1;
|
||||
order: -1;
|
||||
opacity: 0;
|
||||
width: 0px;
|
||||
-webkit-transition: width 0.2s cubic-bezier(0.4, 0, 1, 1);
|
||||
@@ -120,23 +120,25 @@
|
||||
opacity: 1;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-seek-bar-container input[type=range]::-webkit-slider-thumb {
|
||||
background: #FF0000;
|
||||
.youtube-theme
|
||||
.shaka-seek-bar-container
|
||||
input[type="range"]::-webkit-slider-thumb {
|
||||
background: #ff0000;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-seek-bar-container input[type=range]::-moz-range-thumb {
|
||||
background: #FF0000;
|
||||
.youtube-theme .shaka-seek-bar-container input[type="range"]::-moz-range-thumb {
|
||||
background: #ff0000;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-seek-bar-container input[type=range]::-ms-thumb {
|
||||
background: #FF0000;
|
||||
.youtube-theme .shaka-seek-bar-container input[type="range"]::-ms-thumb {
|
||||
background: #ff0000;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-video-container * {
|
||||
font-family: 'Roboto', sans-serif;
|
||||
font-family: "Roboto", sans-serif;
|
||||
}
|
||||
.youtube-theme .shaka-video-container .material-icons-round {
|
||||
font-family: 'Material Icons Sharp';
|
||||
font-family: "Material Icons Sharp";
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu,
|
||||
.youtube-theme .shaka-settings-menu {
|
||||
@@ -170,14 +172,14 @@
|
||||
}
|
||||
.youtube-theme .shaka-settings-menu button[aria-selected="true"] span {
|
||||
-webkit-box-ordinal-group: 3;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
margin-left: 0;
|
||||
}
|
||||
.youtube-theme .shaka-settings-menu button[aria-selected="true"] i {
|
||||
-webkit-box-ordinal-group: 2;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
font-size: 18px;
|
||||
padding-left: 5px;
|
||||
}
|
||||
@@ -192,25 +194,25 @@
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: justify;
|
||||
-ms-flex-pack: justify;
|
||||
justify-content: space-between;
|
||||
-ms-flex-pack: justify;
|
||||
justify-content: space-between;
|
||||
-webkit-box-orient: horizontal;
|
||||
-webkit-box-direction: normal;
|
||||
-ms-flex-direction: row;
|
||||
flex-direction: row;
|
||||
-ms-flex-direction: row;
|
||||
flex-direction: row;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
cursor: default;
|
||||
outline: none;
|
||||
height: 40px;
|
||||
-webkit-box-flex: 0;
|
||||
-ms-flex: 0 0 100%;
|
||||
flex: 0 0 100%;
|
||||
-ms-flex: 0 0 100%;
|
||||
flex: 0 0 100%;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu button .shaka-overflow-button-label span {
|
||||
-ms-flex-negative: initial;
|
||||
flex-shrink: initial;
|
||||
flex-shrink: initial;
|
||||
padding-left: 15px;
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
@@ -218,11 +220,11 @@
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu span + span {
|
||||
color: #FFF;
|
||||
color: #fff;
|
||||
font-weight: 400 !important;
|
||||
font-size: 12px !important;
|
||||
padding-right: 8px;
|
||||
@@ -230,7 +232,7 @@
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu span + span:after {
|
||||
content: "navigate_next";
|
||||
font-family: 'Material Icons Sharp';
|
||||
font-family: "Material Icons Sharp";
|
||||
font-size: 20px;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu .shaka-pip-button span + span {
|
||||
@@ -270,10 +272,10 @@
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu button,
|
||||
.youtube-theme .shaka-settings-menu button {
|
||||
color: #EEE;
|
||||
color: #eee;
|
||||
}
|
||||
.youtube-theme .shaka-captions-off {
|
||||
color: #BFBFBF;
|
||||
color: #bfbfbf;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu-button {
|
||||
font-size: 18px;
|
||||
|
||||
678
src-tauri/Cargo.lock
generated
@@ -1,22 +1,30 @@
|
||||
[workspace]
|
||||
members = ["crates/danmu_stream"]
|
||||
members = ["crates/danmu_stream", "crates/recorder"]
|
||||
resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "bili-shadowreplay"
|
||||
version = "2.12.2"
|
||||
version = "2.16.1"
|
||||
description = "BiliBili ShadowReplay"
|
||||
authors = ["Xinrea"]
|
||||
license = ""
|
||||
repository = ""
|
||||
edition = "2021"
|
||||
|
||||
[lints.clippy]
|
||||
correctness="deny"
|
||||
suspicious="deny"
|
||||
complexity="deny"
|
||||
style="deny"
|
||||
perf="deny"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
danmu_stream = { path = "crates/danmu_stream" }
|
||||
recorder = { path = "crates/recorder" }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"] }
|
||||
reqwest = { workspace = true}
|
||||
serde_derive = "1.0.158"
|
||||
serde = "1.0.158"
|
||||
sysinfo = "0.32.0"
|
||||
@@ -25,7 +33,6 @@ async-std = "1.12.0"
|
||||
async-ffmpeg-sidecar = "0.0.1"
|
||||
chrono = { version = "0.4.24", features = ["serde"] }
|
||||
toml = "0.7.3"
|
||||
custom_error = "1.9.2"
|
||||
regex = "1.7.3"
|
||||
tokio = { version = "1.27.0", features = ["process"] }
|
||||
platform-dirs = "0.3.0"
|
||||
@@ -43,16 +50,21 @@ mime_guess = "2.0"
|
||||
async-trait = "0.1.87"
|
||||
whisper-rs = "0.14.2"
|
||||
hound = "3.5.1"
|
||||
uuid = { version = "1.4", features = ["v4"] }
|
||||
uuid = { workspace = true }
|
||||
axum = { version = "0.7", features = ["macros", "multipart"] }
|
||||
tower-http = { version = "0.5", features = ["cors", "fs"] }
|
||||
futures-core = "0.3"
|
||||
futures = "0.3"
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
tokio-stream = "0.1"
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
url = "2.5.4"
|
||||
srtparse = "0.2.0"
|
||||
thiserror = "1.0"
|
||||
thiserror = "2"
|
||||
deno_core = "0.355"
|
||||
sanitize-filename = "0.6.0"
|
||||
socketioxide = "0.17.2"
|
||||
scraper = "0.24.0"
|
||||
|
||||
[features]
|
||||
# this feature is used for production builds or when `devPath` points to the filesystem
|
||||
@@ -139,3 +151,7 @@ whisper-rs = { version = "0.14.2", default-features = false }
|
||||
[target.'cfg(darwin)'.dependencies.whisper-rs]
|
||||
version = "0.14.2"
|
||||
features = ["metal"]
|
||||
|
||||
[workspace.dependencies]
|
||||
reqwest = { version = "0.11", features = ["blocking", "json", "multipart", "gzip"] }
|
||||
uuid = { version = "1.4", features = ["v4"] }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
fn main() {
|
||||
#[cfg(feature = "gui")]
|
||||
tauri_build::build()
|
||||
tauri_build::build();
|
||||
}
|
||||
|
||||
@@ -2,11 +2,7 @@
|
||||
"identifier": "migrated",
|
||||
"description": "permissions that were migrated from v1",
|
||||
"local": true,
|
||||
"windows": [
|
||||
"main",
|
||||
"Live*",
|
||||
"Clip*"
|
||||
],
|
||||
"windows": ["main", "Live*", "Clip*"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"fs:allow-read-file",
|
||||
@@ -20,9 +16,7 @@
|
||||
"fs:allow-exists",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": [
|
||||
"**"
|
||||
]
|
||||
"allow": ["**"]
|
||||
},
|
||||
"core:window:default",
|
||||
"core:window:allow-start-dragging",
|
||||
@@ -42,19 +36,10 @@
|
||||
"identifier": "http:default",
|
||||
"allow": [
|
||||
{
|
||||
"url": "https://*.hdslb.com/"
|
||||
"url": "https://*.*"
|
||||
},
|
||||
{
|
||||
"url": "https://afdian.com/"
|
||||
},
|
||||
{
|
||||
"url": "https://*.afdiancdn.com/"
|
||||
},
|
||||
{
|
||||
"url": "https://*.douyin.com/"
|
||||
},
|
||||
{
|
||||
"url": "https://*.douyinpic.com/"
|
||||
"url": "http://*.*"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -74,4 +59,4 @@
|
||||
"dialog:default",
|
||||
"deep-link:default"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,9 +10,6 @@ whisper_model = "./whisper_model.bin"
|
||||
whisper_prompt = "这是一段中文 你们好"
|
||||
openai_api_key = ""
|
||||
clip_name_format = "[{room_id}][{live_id}][{title}][{created_at}].mp4"
|
||||
# FLV 转换后自动清理源文件
|
||||
# 启用后,导入 FLV 视频并自动转换为 MP4 后,会删除原始 FLV 文件以节省存储空间
|
||||
cleanup_source_flv_after_import = false
|
||||
|
||||
[auto_generate]
|
||||
enabled = false
|
||||
|
||||
@@ -42,7 +42,7 @@ urlencoding = "2.1"
|
||||
gzip = "0.1.2"
|
||||
hex = "0.4.3"
|
||||
async-trait = "0.1"
|
||||
uuid = "1"
|
||||
uuid = { workspace = true}
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = "0.14"
|
||||
|
||||
@@ -8,7 +8,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 768756;
|
||||
let room_id = "768756";
|
||||
let cookie = "";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::BiliBili, cookie, room_id).await?);
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 7514298567821937427; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
|
||||
let room_id = "7514298567821937427"; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
|
||||
let cookie = "your_cookie";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::Douyin, cookie, room_id).await?);
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::{
|
||||
pub struct DanmuStream {
|
||||
pub provider_type: ProviderType,
|
||||
pub identifier: String,
|
||||
pub room_id: u64,
|
||||
pub room_id: String,
|
||||
pub provider: Arc<RwLock<Box<dyn DanmuProvider>>>,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
rx: Arc<RwLock<mpsc::UnboundedReceiver<DanmuMessageType>>>,
|
||||
@@ -21,14 +21,14 @@ impl DanmuStream {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<Self, DanmuStreamError> {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let provider = new(provider_type, identifier, room_id).await?;
|
||||
Ok(Self {
|
||||
provider_type,
|
||||
identifier: identifier.to_string(),
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
provider: Arc::new(RwLock::new(provider)),
|
||||
tx,
|
||||
rx: Arc::new(RwLock::new(rx)),
|
||||
|
||||
@@ -29,7 +29,7 @@ pub enum DanmuMessageType {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DanmuMessage {
|
||||
pub room_id: u64,
|
||||
pub room_id: String,
|
||||
pub user_id: u64,
|
||||
pub user_name: String,
|
||||
pub message: String,
|
||||
|
||||
@@ -36,15 +36,15 @@ type WsWriteType = futures_util::stream::SplitSink<
|
||||
|
||||
pub struct BiliDanmu {
|
||||
client: ApiClient,
|
||||
room_id: u64,
|
||||
user_id: u64,
|
||||
room_id: String,
|
||||
user_id: i64,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for BiliDanmu {
|
||||
async fn new(cookie: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
async fn new(cookie: &str, room_id: &str) -> Result<Self, DanmuStreamError> {
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let user_id = BiliDanmu::parse_user_id(cookie)?;
|
||||
// add buvid3 to cookie
|
||||
@@ -54,7 +54,7 @@ impl DanmuProvider for BiliDanmu {
|
||||
Ok(Self {
|
||||
client,
|
||||
user_id,
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
})
|
||||
@@ -86,7 +86,7 @@ impl DanmuProvider for BiliDanmu {
|
||||
"Bilibili WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
break;
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!(
|
||||
@@ -126,8 +126,10 @@ impl BiliDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let wbi_key = self.get_wbi_key().await?;
|
||||
let real_room = self.get_real_room(&wbi_key, self.room_id).await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, real_room).await?;
|
||||
let real_room = self.get_real_room(&wbi_key, &self.room_id).await?;
|
||||
let danmu_info = self
|
||||
.get_danmu_info(&wbi_key, real_room.to_string().as_str())
|
||||
.await?;
|
||||
let ws_hosts = danmu_info.data.host_list.clone();
|
||||
let mut conn = None;
|
||||
log::debug!("ws_hosts: {:?}", ws_hosts);
|
||||
@@ -241,7 +243,7 @@ impl BiliDanmu {
|
||||
async fn get_danmu_info(
|
||||
&self,
|
||||
wbi_key: &str,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<DanmuInfo, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
@@ -268,7 +270,7 @@ impl BiliDanmu {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: u64) -> Result<u64, DanmuStreamError> {
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: &str) -> Result<i64, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
@@ -296,14 +298,14 @@ impl BiliDanmu {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
fn parse_user_id(cookie: &str) -> Result<u64, DanmuStreamError> {
|
||||
fn parse_user_id(cookie: &str) -> Result<i64, DanmuStreamError> {
|
||||
let mut user_id = None;
|
||||
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let re = Regex::new(r"DedeUserID=(\d+)").unwrap();
|
||||
if let Some(captures) = re.captures(cookie) {
|
||||
if let Some(user) = captures.get(1) {
|
||||
user_id = Some(user.as_str().parse::<u64>().unwrap());
|
||||
user_id = Some(user.as_str().parse::<i64>().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -407,8 +409,8 @@ impl BiliDanmu {
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct WsSend {
|
||||
uid: u64,
|
||||
roomid: u64,
|
||||
uid: i64,
|
||||
roomid: i64,
|
||||
key: String,
|
||||
protover: u32,
|
||||
platform: String,
|
||||
@@ -439,5 +441,5 @@ pub struct RoomInit {
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct RoomInitData {
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ impl WsStreamCtx {
|
||||
|
||||
if let Some(danmu_msg) = danmu_msg {
|
||||
Ok(DanmuMessageType::DanmuMessage(DanmuMessage {
|
||||
room_id: 0,
|
||||
room_id: "".to_string(),
|
||||
user_id: danmu_msg.uid,
|
||||
user_name: danmu_msg.username,
|
||||
message: danmu_msg.msg,
|
||||
|
||||
@@ -33,7 +33,7 @@ type WsWriteType =
|
||||
futures_util::stream::SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, WsMessage>;
|
||||
|
||||
pub struct DouyinDanmu {
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
cookie: String,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
@@ -192,7 +192,7 @@ impl DouyinDanmu {
|
||||
});
|
||||
|
||||
// Main message handling loop
|
||||
let room_id = self.room_id;
|
||||
let room_id = self.room_id.clone();
|
||||
let stop = Arc::clone(&self.stop);
|
||||
let write = Arc::clone(&self.write);
|
||||
let message_handle = tokio::spawn(async move {
|
||||
@@ -210,7 +210,7 @@ impl DouyinDanmu {
|
||||
|
||||
match msg {
|
||||
WsMessage::Binary(data) => {
|
||||
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, room_id).await {
|
||||
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, &room_id).await {
|
||||
if let Some(write) = write.write().await.as_mut() {
|
||||
if let Err(e) =
|
||||
write.send(WsMessage::binary(ack.encode_to_vec())).await
|
||||
@@ -268,7 +268,7 @@ impl DouyinDanmu {
|
||||
async fn handle_binary_message(
|
||||
data: &[u8],
|
||||
tx: &mpsc::UnboundedSender<DanmuMessageType>,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<Option<PushFrame>, DanmuStreamError> {
|
||||
// First decode the PushFrame
|
||||
let push_frame = PushFrame::decode(Bytes::from(data.to_vec())).map_err(|e| {
|
||||
@@ -328,7 +328,7 @@ async fn handle_binary_message(
|
||||
})?;
|
||||
if let Some(user) = chat_msg.user {
|
||||
let danmu_msg = DanmuMessage {
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
user_id: user.id,
|
||||
user_name: user.nick_name,
|
||||
message: chat_msg.content,
|
||||
@@ -394,9 +394,9 @@ async fn handle_binary_message(
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for DouyinDanmu {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
async fn new(identifier: &str, room_id: &str) -> Result<Self, DanmuStreamError> {
|
||||
Ok(Self {
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
cookie: identifier.to_string(),
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
@@ -408,7 +408,6 @@ impl DanmuProvider for DouyinDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const MAX_RETRIES: u32 = 5;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Douyin WebSocket connection started, room_id: {}",
|
||||
@@ -422,28 +421,25 @@ impl DanmuProvider for DouyinDanmu {
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!("Douyin WebSocket connection closed normally");
|
||||
break;
|
||||
info!(
|
||||
"Douyin WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Douyin WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
|
||||
if retry_count >= MAX_RETRIES {
|
||||
return Err(DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect after {} retries", MAX_RETRIES),
|
||||
});
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}/{})",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
MAX_RETRIES
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
self.room_id
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -17,7 +17,7 @@ pub enum ProviderType {
|
||||
|
||||
#[async_trait]
|
||||
pub trait DanmuProvider: Send + Sync {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError>
|
||||
async fn new(identifier: &str, room_id: &str) -> Result<Self, DanmuStreamError>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
@@ -57,7 +57,7 @@ pub trait DanmuProvider: Send + Sync {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<Box<dyn DanmuProvider>, DanmuStreamError> {
|
||||
match provider_type {
|
||||
ProviderType::BiliBili => {
|
||||
|
||||
37
src-tauri/crates/recorder/Cargo.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[package]
|
||||
name = "recorder"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "recorder"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
danmu_stream = { path = "../danmu_stream" }
|
||||
async-trait = "0.1.89"
|
||||
rand = "0.9.2"
|
||||
chrono = "0.4.42"
|
||||
tokio = "1.48.0"
|
||||
reqwest = { workspace = true}
|
||||
pct-str = "2.0.0"
|
||||
serde_json = "1.0.145"
|
||||
serde = "1.0.228"
|
||||
regex = "1.12.2"
|
||||
deno_core = "0.355"
|
||||
uuid = { workspace = true}
|
||||
serde_derive = "1.0.228"
|
||||
thiserror = "2.0.17"
|
||||
log = "0.4.28"
|
||||
sanitize-filename = "0.6.0"
|
||||
m3u8-rs = "6.0.0"
|
||||
async-ffmpeg-sidecar = "0.0.3"
|
||||
md5 = "0.8.0"
|
||||
scraper = "0.24.0"
|
||||
base64 = "0.22.1"
|
||||
url = "2.5.0"
|
||||
urlencoding = "2.1.3"
|
||||
fastrand = "2.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.11"
|
||||
9
src-tauri/crates/recorder/src/account.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Account {
|
||||
pub platform: String,
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub avatar: String,
|
||||
pub csrf: String,
|
||||
pub cookies: String,
|
||||
}
|
||||
431
src-tauri/crates/recorder/src/core/hls_recorder.rs
Normal file
@@ -0,0 +1,431 @@
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use m3u8_rs::{MediaPlaylist, Playlist};
|
||||
use reqwest::header::HeaderMap;
|
||||
use std::time::Duration;
|
||||
use tokio::fs::{File, OpenOptions};
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
|
||||
use crate::core::playlist::HlsPlaylist;
|
||||
use crate::core::{Codec, Format};
|
||||
use crate::errors::RecorderError;
|
||||
use crate::ffmpeg::VideoMetadata;
|
||||
use crate::{core::HlsStream, events::RecorderEvent};
|
||||
|
||||
const UPDATE_TIMEOUT: Duration = Duration::from_secs(20);
|
||||
const UPDATE_INTERVAL: Duration = Duration::from_secs(1);
|
||||
const PLAYLIST_FILE_NAME: &str = "playlist.m3u8";
|
||||
const DOWNLOAD_RETRY: u32 = 3;
|
||||
/// A recorder for HLS streams
|
||||
///
|
||||
/// This recorder fetches, caches and serves TS entries, currently supporting `StreamType::FMP4, StreamType::TS`.
|
||||
///
|
||||
/// Segments will be downloaded to work_dir, and `playlist.m3u8` will be generated in work_dir.
|
||||
#[derive(Clone)]
|
||||
pub struct HlsRecorder {
|
||||
room_id: String,
|
||||
stream: Arc<HlsStream>,
|
||||
client: reqwest::Client,
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
work_dir: PathBuf,
|
||||
playlist: Arc<Mutex<HlsPlaylist>>,
|
||||
headers: HeaderMap,
|
||||
|
||||
enabled: Arc<AtomicBool>,
|
||||
|
||||
sequence: Arc<AtomicU64>,
|
||||
updated_at: Arc<AtomicI64>,
|
||||
|
||||
cached_duration_secs: Arc<AtomicU64>,
|
||||
cached_size_bytes: Arc<AtomicU64>,
|
||||
|
||||
pre_metadata: Arc<RwLock<Option<VideoMetadata>>>,
|
||||
}
|
||||
|
||||
impl HlsRecorder {
|
||||
pub async fn new(
|
||||
room_id: String,
|
||||
stream: Arc<HlsStream>,
|
||||
client: reqwest::Client,
|
||||
cookies: Option<String>,
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
work_dir: PathBuf,
|
||||
enabled: Arc<AtomicBool>,
|
||||
) -> Self {
|
||||
// try to create work_dir
|
||||
if !work_dir.exists() {
|
||||
std::fs::create_dir_all(&work_dir).unwrap();
|
||||
}
|
||||
let playlist_path = work_dir.join(PLAYLIST_FILE_NAME);
|
||||
|
||||
// set user agent
|
||||
let user_agent =
|
||||
crate::utils::user_agent_generator::UserAgentGenerator::new().generate(false);
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
if let Some(cookies) = cookies {
|
||||
headers.insert("cookie", cookies.parse().unwrap());
|
||||
}
|
||||
Self {
|
||||
room_id,
|
||||
stream,
|
||||
client,
|
||||
event_channel,
|
||||
work_dir,
|
||||
playlist: Arc::new(Mutex::new(HlsPlaylist::new(playlist_path).await)),
|
||||
headers,
|
||||
enabled,
|
||||
sequence: Arc::new(AtomicU64::new(0)),
|
||||
updated_at: Arc::new(AtomicI64::new(chrono::Utc::now().timestamp_millis())),
|
||||
cached_duration_secs: Arc::new(AtomicU64::new(0)),
|
||||
cached_size_bytes: Arc::new(AtomicU64::new(0)),
|
||||
pre_metadata: Arc::new(RwLock::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Start the recorder blockingly
|
||||
///
|
||||
/// This will start the recorder and update the entries periodically.
|
||||
pub async fn start(&self) -> Result<(), RecorderError> {
|
||||
while self.enabled.load(Ordering::Relaxed) {
|
||||
let result = self.update_entries().await;
|
||||
if let Err(e) = result {
|
||||
match e {
|
||||
RecorderError::ResolutionChanged { .. } => {
|
||||
log::error!("Resolution changed: {}", e);
|
||||
self.playlist.lock().await.close().await?;
|
||||
return Err(e);
|
||||
}
|
||||
RecorderError::UpdateTimeout => {
|
||||
log::error!(
|
||||
"Source playlist is not updated for a long time, stop recording"
|
||||
);
|
||||
self.playlist.lock().await.close().await?;
|
||||
return Err(e);
|
||||
}
|
||||
RecorderError::M3u8ParseFailed { .. } => {
|
||||
log::error!("[{}]M3u8 parse failed: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
_ => {
|
||||
// Other errors are not critical, just log it
|
||||
log::error!("[{}]Update entries error: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tokio::time::sleep(UPDATE_INTERVAL).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn stop(&self) {
|
||||
self.enabled.store(false, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
async fn query_playlist(&self, stream: &HlsStream) -> Result<Playlist, RecorderError> {
|
||||
let url = stream.index();
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.headers(self.headers.clone())
|
||||
.send()
|
||||
.await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let (_, playlist) =
|
||||
m3u8_rs::parse_playlist(&bytes).map_err(|_| RecorderError::M3u8ParseFailed {
|
||||
content: String::from_utf8(bytes.to_vec()).unwrap(),
|
||||
})?;
|
||||
Ok(playlist)
|
||||
}
|
||||
|
||||
async fn query_media_playlist(&self) -> Result<MediaPlaylist, RecorderError> {
|
||||
let playlist = self.query_playlist(&self.stream).await?;
|
||||
match playlist {
|
||||
Playlist::MediaPlaylist(playlist) => Ok(playlist),
|
||||
Playlist::MasterPlaylist(playlist) => {
|
||||
// just return the first variant
|
||||
match playlist.variants.first() {
|
||||
Some(variant) => {
|
||||
let real_stream = construct_stream_from_variant(
|
||||
&self.stream.id,
|
||||
&variant.uri,
|
||||
self.stream.format.clone(),
|
||||
self.stream.codec.clone(),
|
||||
)
|
||||
.await?;
|
||||
let playlist = self.query_playlist(&real_stream).await?;
|
||||
match playlist {
|
||||
Playlist::MediaPlaylist(playlist) => Ok(playlist),
|
||||
Playlist::MasterPlaylist(_) => Err(RecorderError::M3u8ParseFailed {
|
||||
content: "No media playlist found".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
None => Err(RecorderError::M3u8ParseFailed {
|
||||
content: "No variants found".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_entries(&self) -> Result<(), RecorderError> {
|
||||
let media_playlist = self.query_media_playlist().await?;
|
||||
let playlist_sequence = media_playlist.media_sequence;
|
||||
let last_sequence = self.sequence.load(Ordering::Relaxed);
|
||||
let last_metadata = self.pre_metadata.read().await.clone();
|
||||
let mut updated = false;
|
||||
for (i, segment) in media_playlist.segments.iter().enumerate() {
|
||||
let segment_sequence = playlist_sequence + i as u64;
|
||||
if segment_sequence <= last_sequence {
|
||||
continue;
|
||||
}
|
||||
|
||||
let segment_full_url = self.stream.ts_url(&segment.uri);
|
||||
// to get filename, we need to remove the query parameters
|
||||
// for example: 1.ts?expires=1760808243
|
||||
// we need to remove the query parameters: 1.ts
|
||||
let filename = segment.uri.split('?').next().unwrap_or(&segment.uri);
|
||||
let segment_path = self.work_dir.join(filename);
|
||||
let Ok(size) = download(
|
||||
&self.client,
|
||||
&segment_full_url,
|
||||
&segment_path,
|
||||
DOWNLOAD_RETRY,
|
||||
)
|
||||
.await
|
||||
else {
|
||||
log::error!("Download failed: {:#?}", segment);
|
||||
return Err(RecorderError::IoError(std::io::Error::other(
|
||||
"Download failed",
|
||||
)));
|
||||
};
|
||||
|
||||
// check if the stream is changed
|
||||
let segment_metadata = crate::ffmpeg::extract_video_metadata(&segment_path)
|
||||
.await
|
||||
.map_err(RecorderError::FfmpegError)?;
|
||||
|
||||
// IMPORTANT: This handles bilibili ts stream segment, which might lack of SPS/PPS and need to be appended behind last segment
|
||||
if segment_metadata.seems_corrupted() {
|
||||
let mut playlist = self.playlist.lock().await;
|
||||
if playlist.is_empty().await {
|
||||
// ignore this segment
|
||||
log::error!(
|
||||
"Segment is corrupted and has no previous segment, ignore: {}",
|
||||
segment_path.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let last_segment = playlist.last_segment().await;
|
||||
let last_segment_uri = last_segment.unwrap().uri.clone();
|
||||
let last_segment_path = segment_path.with_file_name(last_segment_uri);
|
||||
// append segment data behind last segment data
|
||||
let mut last_segment_file = OpenOptions::new()
|
||||
.append(true)
|
||||
.open(&last_segment_path)
|
||||
.await?;
|
||||
log::debug!(
|
||||
"Appending segment data behind last segment: {}",
|
||||
last_segment_path.display()
|
||||
);
|
||||
let mut segment_file = File::open(&segment_path).await?;
|
||||
let mut buffer = Vec::new();
|
||||
segment_file.read_to_end(&mut buffer).await?;
|
||||
last_segment_file.write_all(&buffer).await?;
|
||||
let _ = tokio::fs::remove_file(&segment_path).await;
|
||||
playlist.append_last_segment(segment.clone()).await?;
|
||||
|
||||
self.cached_duration_secs
|
||||
.fetch_add(segment_metadata.duration as u64, Ordering::Relaxed);
|
||||
self.cached_size_bytes.fetch_add(size, Ordering::Relaxed);
|
||||
self.sequence.store(segment_sequence, Ordering::Relaxed);
|
||||
self.updated_at
|
||||
.store(chrono::Utc::now().timestamp_millis(), Ordering::Relaxed);
|
||||
updated = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(last_metadata) = &last_metadata {
|
||||
if last_metadata != &segment_metadata {
|
||||
return Err(RecorderError::ResolutionChanged {
|
||||
err: "Resolution changed".to_string(),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
self.pre_metadata
|
||||
.write()
|
||||
.await
|
||||
.replace(segment_metadata.clone());
|
||||
}
|
||||
|
||||
let mut new_segment = segment.clone();
|
||||
new_segment.duration = segment_metadata.duration as f32;
|
||||
|
||||
self.playlist.lock().await.add_segment(new_segment).await?;
|
||||
|
||||
self.cached_duration_secs
|
||||
.fetch_add(segment_metadata.duration as u64, Ordering::Relaxed);
|
||||
self.cached_size_bytes.fetch_add(size, Ordering::Relaxed);
|
||||
self.sequence.store(segment_sequence, Ordering::Relaxed);
|
||||
self.updated_at
|
||||
.store(chrono::Utc::now().timestamp_millis(), Ordering::Relaxed);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// Source playlist may not be updated for a long time, check if it's timeout
|
||||
let current_time = chrono::Utc::now().timestamp_millis();
|
||||
if self.updated_at.load(Ordering::Relaxed) + (UPDATE_TIMEOUT.as_millis() as i64)
|
||||
< current_time
|
||||
{
|
||||
return Err(RecorderError::UpdateTimeout);
|
||||
}
|
||||
|
||||
if updated {
|
||||
let _ = self.event_channel.send(RecorderEvent::RecordUpdate {
|
||||
live_id: self.stream.id.clone(),
|
||||
duration_secs: self.cached_duration_secs.load(Ordering::Relaxed),
|
||||
cached_size_bytes: self.cached_size_bytes.load(Ordering::Relaxed),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Download url content into fpath
|
||||
async fn download_inner(
|
||||
client: &reqwest::Client,
|
||||
url: &str,
|
||||
path: &Path,
|
||||
) -> Result<u64, RecorderError> {
|
||||
if !path.parent().unwrap().exists() {
|
||||
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||
}
|
||||
let response = client.get(url).send().await?;
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
log::warn!("Download segment failed: {url}: {status}");
|
||||
return Err(RecorderError::InvalidResponseStatus { status });
|
||||
}
|
||||
let bytes = response.bytes().await?;
|
||||
let size = bytes.len() as u64;
|
||||
let mut file = tokio::fs::File::create(&path).await?;
|
||||
let mut content = std::io::Cursor::new(bytes.clone());
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(size)
|
||||
}
|
||||
|
||||
async fn download(
|
||||
client: &reqwest::Client,
|
||||
url: &str,
|
||||
path: &Path,
|
||||
retry: u32,
|
||||
) -> Result<u64, RecorderError> {
|
||||
for i in 0..retry {
|
||||
let result = download_inner(client, url, path).await;
|
||||
if let Ok(size) = result {
|
||||
return Ok(size);
|
||||
}
|
||||
log::error!("Download failed, retry: {}", i);
|
||||
// sleep for 500 ms
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
|
||||
Err(RecorderError::IoError(std::io::Error::other(
|
||||
"Download failed",
|
||||
)))
|
||||
}
|
||||
|
||||
pub async fn construct_stream_from_variant(
|
||||
id: &str,
|
||||
variant_url: &str,
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
) -> Result<HlsStream, RecorderError> {
|
||||
// construct the real stream from variant
|
||||
// example: https://cn-jsnt-ct-01-07.bilivideo.com/live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8?expires=1760808243
|
||||
let (body, extra) = variant_url.split_once('?').unwrap_or((variant_url, ""));
|
||||
// body example: https://cn-jsnt-ct-01-07.bilivideo.com/live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8
|
||||
|
||||
// extract host, should be like: https://cn-jsnt-ct-01-07.bilivideo.com, which contains http schema
|
||||
let host = if let Some(schema_end) = body.find("://") {
|
||||
let after_schema = &body[schema_end + 3..];
|
||||
if let Some(path_start) = after_schema.find('/') {
|
||||
format!("{}{}", &body[..schema_end + 3], &after_schema[..path_start])
|
||||
} else {
|
||||
body.to_string()
|
||||
}
|
||||
} else {
|
||||
return Err(RecorderError::M3u8ParseFailed {
|
||||
content: "Invalid URL format: missing protocol".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
// extract base, should be like: /live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8
|
||||
let base = if let Some(schema_end) = body.find("://") {
|
||||
let after_schema = &body[schema_end + 3..];
|
||||
if let Some(path_start) = after_schema.find('/') {
|
||||
format!("/{}", &after_schema[path_start + 1..])
|
||||
} else {
|
||||
"/".to_string()
|
||||
}
|
||||
} else {
|
||||
return Err(RecorderError::M3u8ParseFailed {
|
||||
content: "Invalid URL format: missing protocol".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
// Add '?' to base if there are query parameters, to match the expected format
|
||||
let base_with_query = if !extra.is_empty() {
|
||||
format!("{}?", base)
|
||||
} else {
|
||||
base
|
||||
};
|
||||
|
||||
let real_stream = HlsStream::new(
|
||||
id.to_string(),
|
||||
host,
|
||||
base_with_query,
|
||||
extra.to_string(),
|
||||
format,
|
||||
codec,
|
||||
);
|
||||
|
||||
Ok(real_stream)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::core::{Codec, Format};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_construct_stream_from_variant() {
|
||||
let stream = construct_stream_from_variant(
|
||||
"test",
|
||||
"https://hs.hls.huya.com/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103",
|
||||
Format::TS,
|
||||
Codec::Avc,
|
||||
).await.unwrap();
|
||||
assert_eq!(stream.index(), "https://hs.hls.huya.com/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.ts_url("1.ts"), "https://hs.hls.huya.com/huyalive/1.ts?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.ts_url("1.ts?expires=1760808243"), "https://hs.hls.huya.com/huyalive/1.ts?expires=1760808243&ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.host, "https://hs.hls.huya.com");
|
||||
assert_eq!(
|
||||
stream.base,
|
||||
"/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?"
|
||||
);
|
||||
assert_eq!(stream.extra, "ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.format, Format::TS);
|
||||
assert_eq!(stream.codec, Codec::Avc);
|
||||
}
|
||||
}
|
||||
97
src-tauri/crates/recorder/src/core/mod.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use std::fmt;
|
||||
pub mod hls_recorder;
|
||||
pub mod playlist;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Format {
|
||||
Flv,
|
||||
TS,
|
||||
FMP4,
|
||||
}
|
||||
|
||||
impl fmt::Display for Format {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Codec {
|
||||
Avc,
|
||||
Hevc,
|
||||
}
|
||||
|
||||
impl fmt::Display for Codec {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for HLS streams
|
||||
///
|
||||
/// This trait provides a common interface for HLS streams.
|
||||
/// For example:
|
||||
/// ```text
|
||||
/// host: https://d1--cn-gotcha104b.bilivideo.com
|
||||
/// base: /live-bvc/375028/live_2124647716_1414766_bluray.m3u8?
|
||||
/// extra: expire=1734567890&oi=1234567890&s=1234567890&pt=0&ps=0&bw=1000000&tk=1234567890
|
||||
/// ```
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HlsStream {
|
||||
id: String,
|
||||
host: String,
|
||||
base: String,
|
||||
extra: String,
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
}
|
||||
|
||||
impl HlsStream {
|
||||
pub fn new(
|
||||
id: String,
|
||||
host: String,
|
||||
base: String,
|
||||
extra: String,
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
host,
|
||||
base,
|
||||
extra,
|
||||
format,
|
||||
codec,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn index(&self) -> String {
|
||||
if self.extra.is_empty() {
|
||||
format!("{}{}", self.host, self.base)
|
||||
} else {
|
||||
format!("{}{}{}", self.host, self.base, self.extra)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ts_url(&self, seg_name: &str) -> String {
|
||||
let base = self.base.clone();
|
||||
let m3u8_filename = base.split('/').next_back().unwrap();
|
||||
let base_url = base.replace(m3u8_filename, seg_name);
|
||||
if self.extra.is_empty() {
|
||||
format!("{}{}", self.host, base_url)
|
||||
} else {
|
||||
// Check if base_url already contains query parameters
|
||||
if base_url.contains('?') {
|
||||
// If seg_name already has query params, append extra with '&'
|
||||
// Remove trailing '?' or '&' before appending
|
||||
let base_trimmed = base_url.trim_end_matches('?').trim_end_matches('&');
|
||||
format!("{}{}&{}", self.host, base_trimmed, self.extra)
|
||||
} else {
|
||||
// If no query params, add them with '?'
|
||||
// Remove trailing '?' from base_url if present
|
||||
let base_without_query = base_url.trim_end_matches('?');
|
||||
format!("{}{}?{}", self.host, base_without_query, self.extra)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
87
src-tauri/crates/recorder/src/core/playlist.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use m3u8_rs::{MediaPlaylist, MediaPlaylistType, MediaSegment};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::errors::RecorderError;
|
||||
|
||||
pub struct HlsPlaylist {
|
||||
pub playlist: MediaPlaylist,
|
||||
pub file_path: PathBuf,
|
||||
}
|
||||
|
||||
impl HlsPlaylist {
|
||||
pub async fn new(file_path: PathBuf) -> Self {
|
||||
if file_path.exists() {
|
||||
let bytes = tokio::fs::read(&file_path).await.unwrap();
|
||||
let (_, playlist) = m3u8_rs::parse_media_playlist(&bytes).unwrap();
|
||||
Self {
|
||||
playlist,
|
||||
file_path,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
playlist: MediaPlaylist::default(),
|
||||
file_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn last_segment(&self) -> Option<&MediaSegment> {
|
||||
self.playlist.segments.last()
|
||||
}
|
||||
|
||||
pub async fn append_last_segment(
|
||||
&mut self,
|
||||
segment: MediaSegment,
|
||||
) -> Result<(), RecorderError> {
|
||||
if self.is_empty().await {
|
||||
self.add_segment(segment).await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
{
|
||||
let last = self.playlist.segments.last_mut().unwrap();
|
||||
let new_duration = last.duration + segment.duration;
|
||||
last.duration = new_duration;
|
||||
self.playlist.target_duration =
|
||||
std::cmp::max(self.playlist.target_duration, new_duration as u64);
|
||||
self.flush().await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn add_segment(&mut self, segment: MediaSegment) -> Result<(), RecorderError> {
|
||||
self.playlist.segments.push(segment);
|
||||
self.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn flush(&self) -> Result<(), RecorderError> {
|
||||
// Create an in-memory buffer to serialize the playlist into.
|
||||
// `Vec<u8>` implements `std::io::Write`, which `m3u8_rs::MediaPlaylist::write_to` expects.
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
// Serialize the playlist into the buffer.
|
||||
self.playlist
|
||||
.write_to(&mut buffer)
|
||||
.map_err(RecorderError::IoError)?;
|
||||
|
||||
// Write the buffer to the file
|
||||
tokio::fs::write(&self.file_path, buffer)
|
||||
.await
|
||||
.map_err(RecorderError::IoError)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn close(&mut self) -> Result<(), RecorderError> {
|
||||
self.playlist.end_list = true;
|
||||
self.playlist.playlist_type = Some(MediaPlaylistType::Vod);
|
||||
self.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn is_empty(&self) -> bool {
|
||||
self.playlist.segments.is_empty()
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Serialize;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::{
|
||||
@@ -18,7 +20,7 @@ pub struct DanmuStorage {
|
||||
}
|
||||
|
||||
impl DanmuStorage {
|
||||
pub async fn new(file_path: &str) -> Option<DanmuStorage> {
|
||||
pub async fn new(file_path: &PathBuf) -> Option<DanmuStorage> {
|
||||
let file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
@@ -38,7 +40,7 @@ impl DanmuStorage {
|
||||
let parts: Vec<&str> = line.split(':').collect();
|
||||
let ts: i64 = parts[0].parse().unwrap();
|
||||
let content = parts[1].to_string();
|
||||
preload_cache.push(DanmuEntry { ts, content })
|
||||
preload_cache.push(DanmuEntry { ts, content });
|
||||
}
|
||||
let file = OpenOptions::new()
|
||||
.append(true)
|
||||
@@ -61,7 +63,7 @@ impl DanmuStorage {
|
||||
.file
|
||||
.write()
|
||||
.await
|
||||
.write(format!("{}:{}\n", ts, content).as_bytes())
|
||||
.write(format!("{ts}:{content}\n").as_bytes())
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
use core::fmt;
|
||||
use std::fmt::Display;
|
||||
|
||||
use async_std::{
|
||||
fs::{File, OpenOptions},
|
||||
io::{prelude::BufReadExt, BufReader, WriteExt},
|
||||
path::Path,
|
||||
stream::StreamExt,
|
||||
};
|
||||
use chrono::{TimeZone, Utc};
|
||||
use core::fmt;
|
||||
use std::{fmt::Display, path::Path};
|
||||
use tokio::{
|
||||
fs::OpenOptions,
|
||||
io::{AsyncBufReadExt, BufReader},
|
||||
};
|
||||
|
||||
const ENTRY_FILE_NAME: &str = "entries.log";
|
||||
|
||||
@@ -31,19 +28,19 @@ impl TsEntry {
|
||||
url: parts[0].to_string(),
|
||||
sequence: parts[1]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse sequence: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse sequence: {e}"))?,
|
||||
length: parts[2]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse length: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse length: {e}"))?,
|
||||
size: parts[3]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse size: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse size: {e}"))?,
|
||||
ts: parts[4]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse timestamp: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse timestamp: {e}"))?,
|
||||
is_header: parts[5]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse is_header: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse is_header: {e}"))?,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -51,34 +48,25 @@ impl TsEntry {
|
||||
pub fn ts_seconds(&self) -> i64 {
|
||||
// For some legacy problem, douyin entry's ts is s, bilibili entry's ts is ms.
|
||||
// This should be fixed after 2.5.6, but we need to support entry.log generated by previous version.
|
||||
if self.ts > 10000000000 {
|
||||
if self.ts > 10_000_000_000 {
|
||||
self.ts / 1000
|
||||
} else {
|
||||
self.ts
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ts_mili(&self) -> i64 {
|
||||
// if already in ms, return as is
|
||||
if self.ts > 10000000000 {
|
||||
self.ts
|
||||
} else {
|
||||
self.ts * 1000
|
||||
}
|
||||
}
|
||||
|
||||
pub fn date_time(&self) -> String {
|
||||
let date_str = Utc
|
||||
.timestamp_opt(self.ts_seconds(), 0)
|
||||
.unwrap()
|
||||
.to_rfc3339();
|
||||
format!("#EXT-X-PROGRAM-DATE-TIME:{}\n", date_str)
|
||||
format!("#EXT-X-PROGRAM-DATE-TIME:{date_str}\n")
|
||||
}
|
||||
|
||||
/// Convert entry into a segment in HLS manifest.
|
||||
pub fn to_segment(&self) -> String {
|
||||
if self.is_header {
|
||||
return "".into();
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let mut content = String::new();
|
||||
@@ -100,11 +88,9 @@ impl Display for TsEntry {
|
||||
}
|
||||
}
|
||||
|
||||
/// EntryStore is used to management stream segments, which is basicly a simple version of hls manifest,
|
||||
/// and of course, provids methods to generate hls manifest for frontend player.
|
||||
/// `EntryStore` is used to management stream segments, which is basically a simple version of hls manifest,
|
||||
/// and of course, provides methods to generate hls manifest for frontend player.
|
||||
pub struct EntryStore {
|
||||
// append only log file
|
||||
log_file: File,
|
||||
header: Option<TsEntry>,
|
||||
entries: Vec<TsEntry>,
|
||||
total_duration: f64,
|
||||
@@ -115,18 +101,11 @@ pub struct EntryStore {
|
||||
impl EntryStore {
|
||||
pub async fn new(work_dir: &str) -> Self {
|
||||
// if work_dir is not exists, create it
|
||||
if !Path::new(work_dir).exists().await {
|
||||
if !Path::new(work_dir).exists() {
|
||||
std::fs::create_dir_all(work_dir).unwrap();
|
||||
}
|
||||
// open append only log file
|
||||
let log_file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut entry_store = Self {
|
||||
log_file,
|
||||
header: None,
|
||||
entries: vec![],
|
||||
total_duration: 0.0,
|
||||
@@ -143,14 +122,26 @@ impl EntryStore {
|
||||
let file = OpenOptions::new()
|
||||
.create(false)
|
||||
.read(true)
|
||||
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
|
||||
.await
|
||||
.unwrap();
|
||||
let mut lines = BufReader::new(file).lines();
|
||||
while let Some(Ok(line)) = lines.next().await {
|
||||
let entry = TsEntry::from(&line);
|
||||
.open(format!("{work_dir}/{ENTRY_FILE_NAME}"))
|
||||
.await; // The `file` variable from the previous line now holds `Result<tokio::fs::File, tokio::io::Error>`
|
||||
let file_handle = match file {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
log::info!(
|
||||
"Entry file not found at {work_dir}/{ENTRY_FILE_NAME}, starting fresh."
|
||||
);
|
||||
} else {
|
||||
log::error!("Failed to open entry file: {e}");
|
||||
}
|
||||
return; // Exit the load function if file cannot be opened
|
||||
}
|
||||
};
|
||||
let mut lines = BufReader::new(file_handle).lines();
|
||||
while let Ok(Some(line)) = lines.next_line().await {
|
||||
let entry = TsEntry::from(line.as_str());
|
||||
if let Err(e) = entry {
|
||||
log::error!("Failed to parse entry: {} {}", e, line);
|
||||
log::error!("Failed to parse entry: {e} {line}");
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -169,45 +160,12 @@ impl EntryStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn add_entry(&mut self, entry: TsEntry) {
|
||||
if entry.is_header {
|
||||
self.header = Some(entry.clone());
|
||||
} else {
|
||||
self.entries.push(entry.clone());
|
||||
}
|
||||
|
||||
if let Err(e) = self.log_file.write_all(entry.to_string().as_bytes()).await {
|
||||
log::error!("Failed to write entry to log file: {}", e);
|
||||
}
|
||||
|
||||
self.log_file.flush().await.unwrap();
|
||||
|
||||
self.last_sequence = std::cmp::max(self.last_sequence, entry.sequence);
|
||||
|
||||
self.total_duration += entry.length;
|
||||
self.total_size += entry.size;
|
||||
pub fn len(&self) -> usize {
|
||||
self.entries.len()
|
||||
}
|
||||
|
||||
pub fn get_header(&self) -> Option<&TsEntry> {
|
||||
self.header.as_ref()
|
||||
}
|
||||
|
||||
pub fn total_duration(&self) -> f64 {
|
||||
self.total_duration
|
||||
}
|
||||
|
||||
pub fn total_size(&self) -> u64 {
|
||||
self.total_size
|
||||
}
|
||||
|
||||
/// Get first timestamp in milliseconds
|
||||
pub fn first_ts(&self) -> Option<i64> {
|
||||
self.entries.first().map(|x| x.ts_mili())
|
||||
}
|
||||
|
||||
/// Get last timestamp in milliseconds
|
||||
pub fn last_ts(&self) -> Option<i64> {
|
||||
self.entries.last().map(|x| x.ts_mili())
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.entries.is_empty()
|
||||
}
|
||||
|
||||
/// Generate a hls manifest for selected range.
|
||||
81
src-tauri/crates/recorder/src/errors.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use super::platforms::bilibili::api::BiliStream;
|
||||
use super::platforms::douyin::stream_info::DouyinStream;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Stream {
|
||||
BiliBili(BiliStream),
|
||||
Douyin(DouyinStream),
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum RecorderError {
|
||||
#[error("Index not found: {url}")]
|
||||
IndexNotFound { url: String },
|
||||
#[error("Can not delete current stream: {live_id}")]
|
||||
ArchiveInUse { live_id: String },
|
||||
#[error("Cache is empty")]
|
||||
EmptyCache,
|
||||
#[error("Parse m3u8 content failed: {content}")]
|
||||
M3u8ParseFailed { content: String },
|
||||
#[error("No available stream provided")]
|
||||
NoStreamAvailable,
|
||||
#[error("Stream is freezed: {stream:#?}")]
|
||||
FreezedStream { stream: Stream },
|
||||
#[error("Stream is nearly expired: {stream:#?}")]
|
||||
StreamExpired { stream: Stream },
|
||||
#[error("No room info provided")]
|
||||
NoRoomInfo,
|
||||
#[error("Invalid stream: {stream:#?}")]
|
||||
InvalidStream { stream: Stream },
|
||||
#[error("Stream is too slow: {stream:#?}")]
|
||||
SlowStream { stream: Stream },
|
||||
#[error("Header url is empty")]
|
||||
EmptyHeader,
|
||||
#[error("Header timestamp is invalid")]
|
||||
InvalidTimestamp,
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
#[error("Danmu stream error: {0}")]
|
||||
DanmuStreamError(#[from] danmu_stream::DanmuStreamError),
|
||||
#[error("Subtitle not found: {live_id}")]
|
||||
SubtitleNotFound { live_id: String },
|
||||
#[error("Subtitle generation failed: {error}")]
|
||||
SubtitleGenerationFailed { error: String },
|
||||
#[error("Resolution changed: {err}")]
|
||||
ResolutionChanged { err: String },
|
||||
#[error("Ffmpeg error: {0}")]
|
||||
FfmpegError(String),
|
||||
#[error("Format not found: {format}")]
|
||||
FormatNotFound { format: String },
|
||||
#[error("Codec not found: {codecs}")]
|
||||
CodecNotFound { codecs: String },
|
||||
#[error("Invalid cookies")]
|
||||
InvalidCookies,
|
||||
#[error("API error: {error}")]
|
||||
ApiError { error: String },
|
||||
#[error("Invalid value")]
|
||||
InvalidValue,
|
||||
#[error("Invalid response")]
|
||||
InvalidResponse,
|
||||
#[error("Invalid response json: {resp}")]
|
||||
InvalidResponseJson { resp: serde_json::Value },
|
||||
#[error("Invalid response status: {status}")]
|
||||
InvalidResponseStatus { status: reqwest::StatusCode },
|
||||
#[error("Upload cancelled")]
|
||||
UploadCancelled,
|
||||
#[error("Upload error: {err}")]
|
||||
UploadError { err: String },
|
||||
#[error("Client error: {0}")]
|
||||
ClientError(#[from] reqwest::Error),
|
||||
#[error("Security control error")]
|
||||
SecurityControlError,
|
||||
#[error("JavaScript runtime error: {0}")]
|
||||
JsRuntimeError(String),
|
||||
#[error("Update timeout")]
|
||||
UpdateTimeout,
|
||||
#[error("Unsupported stream")]
|
||||
UnsupportedStream,
|
||||
#[error("Empty record")]
|
||||
EmptyRecord,
|
||||
}
|
||||
39
src-tauri/crates/recorder/src/events.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use crate::platforms::PlatformType;
|
||||
use crate::RecorderInfo;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum RecorderEvent {
|
||||
LiveStart {
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
LiveEnd {
|
||||
room_id: String,
|
||||
platform: PlatformType,
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
RecordStart {
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
RecordEnd {
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
RecordUpdate {
|
||||
live_id: String,
|
||||
duration_secs: u64,
|
||||
cached_size_bytes: u64,
|
||||
},
|
||||
ProgressUpdate {
|
||||
id: String,
|
||||
content: String,
|
||||
},
|
||||
ProgressFinished {
|
||||
id: String,
|
||||
success: bool,
|
||||
message: String,
|
||||
},
|
||||
DanmuReceived {
|
||||
room: String,
|
||||
ts: i64,
|
||||
content: String,
|
||||
},
|
||||
}
|
||||
111
src-tauri/crates/recorder/src/ffmpeg/mod.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
// 视频元数据结构
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VideoMetadata {
|
||||
pub duration: f64,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub video_codec: String,
|
||||
pub audio_codec: String,
|
||||
}
|
||||
|
||||
impl VideoMetadata {
|
||||
pub fn seems_corrupted(&self) -> bool {
|
||||
self.width == 0 && self.height == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl std::cmp::PartialEq for VideoMetadata {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.width == other.width
|
||||
&& self.height == other.height
|
||||
&& self.video_codec == other.video_codec
|
||||
&& self.audio_codec == other.audio_codec
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
#[cfg(target_os = "windows")]
|
||||
#[allow(unused_imports)]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
fn ffprobe_path() -> PathBuf {
|
||||
let mut path = Path::new("ffprobe").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
path.set_extension("exe");
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
/// Extract basic information from a video file.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `file_path` - The path to the video file.
|
||||
///
|
||||
/// # Returns
|
||||
/// A `Result` containing the video metadata or an error message.
|
||||
pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, String> {
|
||||
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let output = ffprobe_process
|
||||
.args([
|
||||
"-v",
|
||||
"quiet",
|
||||
"-print_format",
|
||||
"json",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
&format!("{}", file_path.display()),
|
||||
])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| format!("执行ffprobe失败: {e}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"ffprobe执行失败: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
let json_str = String::from_utf8_lossy(&output.stdout);
|
||||
let json: serde_json::Value =
|
||||
serde_json::from_str(&json_str).map_err(|e| format!("解析ffprobe输出失败: {e}"))?;
|
||||
|
||||
// 解析视频流信息
|
||||
let streams = json["streams"].as_array().ok_or("未找到视频流信息")?;
|
||||
|
||||
if streams.is_empty() {
|
||||
return Err("未找到视频流".to_string());
|
||||
}
|
||||
|
||||
let mut metadata = VideoMetadata {
|
||||
duration: 0.0,
|
||||
width: 0,
|
||||
height: 0,
|
||||
video_codec: String::new(),
|
||||
audio_codec: String::new(),
|
||||
};
|
||||
|
||||
for stream in streams {
|
||||
let codec_name = stream["codec_type"].as_str().unwrap_or("");
|
||||
if codec_name == "video" {
|
||||
metadata.video_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
|
||||
metadata.width = stream["width"].as_u64().unwrap_or(0) as u32;
|
||||
metadata.height = stream["height"].as_u64().unwrap_or(0) as u32;
|
||||
metadata.duration = stream["duration"]
|
||||
.as_str()
|
||||
.unwrap_or("0.0")
|
||||
.parse::<f64>()
|
||||
.unwrap_or(0.0);
|
||||
} else if codec_name == "audio" {
|
||||
metadata.audio_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
|
||||
}
|
||||
}
|
||||
Ok(metadata)
|
||||
}
|
||||
251
src-tauri/crates/recorder/src/lib.rs
Normal file
@@ -0,0 +1,251 @@
|
||||
pub mod account;
|
||||
pub mod core;
|
||||
pub mod danmu;
|
||||
pub mod entry;
|
||||
pub mod errors;
|
||||
pub mod events;
|
||||
mod ffmpeg;
|
||||
pub mod platforms;
|
||||
pub mod traits;
|
||||
pub mod utils;
|
||||
use crate::danmu::DanmuStorage;
|
||||
use crate::events::RecorderEvent;
|
||||
use crate::{account::Account, platforms::PlatformType};
|
||||
|
||||
use std::{
|
||||
fmt::Display,
|
||||
path::PathBuf,
|
||||
sync::{atomic, Arc},
|
||||
};
|
||||
use tokio::{
|
||||
sync::{broadcast, Mutex, RwLock},
|
||||
task::JoinHandle,
|
||||
};
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
|
||||
pub struct RecorderInfo {
|
||||
pub room_info: RoomInfo,
|
||||
pub user_info: UserInfo,
|
||||
pub platform_live_id: String,
|
||||
pub live_id: String,
|
||||
pub recording: bool,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug, Default)]
|
||||
pub struct RoomInfo {
|
||||
pub platform: String,
|
||||
pub room_id: String,
|
||||
pub room_title: String,
|
||||
pub room_cover: String,
|
||||
/// Whether the room is live
|
||||
pub status: bool,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug, Default)]
|
||||
pub struct UserInfo {
|
||||
pub user_id: String,
|
||||
pub user_name: String,
|
||||
pub user_avatar: String,
|
||||
}
|
||||
|
||||
/// `Recorder` is the base struct for all recorders
|
||||
/// It contains the basic information for a recorder
|
||||
/// and the extra information for the recorder
|
||||
#[derive(Clone)]
|
||||
pub struct Recorder<T>
|
||||
where
|
||||
T: Send + Sync,
|
||||
{
|
||||
platform: PlatformType,
|
||||
room_id: String,
|
||||
/// The account for the recorder
|
||||
account: Account,
|
||||
/// The client for the recorder
|
||||
client: reqwest::Client,
|
||||
/// The event channel for the recorder
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
/// The cache directory for the recorder
|
||||
cache_dir: PathBuf,
|
||||
/// Whether the recorder is quitting
|
||||
quit: Arc<atomic::AtomicBool>,
|
||||
/// Whether the recorder is enabled
|
||||
enabled: Arc<atomic::AtomicBool>,
|
||||
/// Whether the recorder is recording
|
||||
is_recording: Arc<atomic::AtomicBool>,
|
||||
/// The room info for the recorder
|
||||
room_info: Arc<RwLock<RoomInfo>>,
|
||||
/// The user info for the recorder
|
||||
user_info: Arc<RwLock<UserInfo>>,
|
||||
/// The update interval for room status
|
||||
update_interval: Arc<atomic::AtomicU64>,
|
||||
|
||||
/// The platform live id for the current recording
|
||||
platform_live_id: Arc<RwLock<String>>,
|
||||
/// The live id for the current recording, generally is the timestamp of the recording start time
|
||||
live_id: Arc<RwLock<String>>,
|
||||
/// The danmu task for the current recording
|
||||
danmu_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
/// The record task for the current recording
|
||||
record_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
/// The danmu storage for the current recording
|
||||
danmu_storage: Arc<RwLock<Option<DanmuStorage>>>,
|
||||
/// The last update time of the current recording
|
||||
last_update: Arc<atomic::AtomicI64>,
|
||||
/// The last sequence of the current recording
|
||||
last_sequence: Arc<atomic::AtomicU64>,
|
||||
/// The total duration of the current recording in milliseconds
|
||||
total_duration: Arc<atomic::AtomicU64>,
|
||||
/// The total size of the current recording in bytes
|
||||
total_size: Arc<atomic::AtomicU64>,
|
||||
|
||||
/// The extra information for the recorder
|
||||
extra: T,
|
||||
}
|
||||
|
||||
impl<T: Send + Sync> traits::RecorderBasicTrait<T> for Recorder<T> {
|
||||
fn platform(&self) -> PlatformType {
|
||||
self.platform
|
||||
}
|
||||
|
||||
fn room_id(&self) -> String {
|
||||
self.room_id.clone()
|
||||
}
|
||||
|
||||
fn account(&self) -> &Account {
|
||||
&self.account
|
||||
}
|
||||
|
||||
fn client(&self) -> &reqwest::Client {
|
||||
&self.client
|
||||
}
|
||||
|
||||
fn event_channel(&self) -> &broadcast::Sender<RecorderEvent> {
|
||||
&self.event_channel
|
||||
}
|
||||
|
||||
fn cache_dir(&self) -> PathBuf {
|
||||
self.cache_dir.clone()
|
||||
}
|
||||
|
||||
fn quit(&self) -> &atomic::AtomicBool {
|
||||
&self.quit
|
||||
}
|
||||
|
||||
fn enabled(&self) -> &atomic::AtomicBool {
|
||||
&self.enabled
|
||||
}
|
||||
|
||||
fn is_recording(&self) -> &atomic::AtomicBool {
|
||||
&self.is_recording
|
||||
}
|
||||
|
||||
fn room_info(&self) -> Arc<RwLock<RoomInfo>> {
|
||||
self.room_info.clone()
|
||||
}
|
||||
|
||||
fn user_info(&self) -> Arc<RwLock<UserInfo>> {
|
||||
self.user_info.clone()
|
||||
}
|
||||
|
||||
fn platform_live_id(&self) -> Arc<RwLock<String>> {
|
||||
self.platform_live_id.clone()
|
||||
}
|
||||
|
||||
fn live_id(&self) -> Arc<RwLock<String>> {
|
||||
self.live_id.clone()
|
||||
}
|
||||
|
||||
fn danmu_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>> {
|
||||
self.danmu_task.clone()
|
||||
}
|
||||
|
||||
fn record_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>> {
|
||||
self.record_task.clone()
|
||||
}
|
||||
|
||||
fn danmu_storage(&self) -> Arc<RwLock<Option<DanmuStorage>>> {
|
||||
self.danmu_storage.clone()
|
||||
}
|
||||
|
||||
fn last_update(&self) -> &atomic::AtomicI64 {
|
||||
&self.last_update
|
||||
}
|
||||
|
||||
fn last_sequence(&self) -> &atomic::AtomicU64 {
|
||||
&self.last_sequence
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> &atomic::AtomicU64 {
|
||||
&self.total_duration
|
||||
}
|
||||
|
||||
fn total_size(&self) -> &atomic::AtomicU64 {
|
||||
&self.total_size
|
||||
}
|
||||
|
||||
fn extra(&self) -> &T {
|
||||
&self.extra
|
||||
}
|
||||
}
|
||||
|
||||
/// Cache path is relative to cache path in config
|
||||
#[derive(Clone)]
|
||||
pub struct CachePath {
|
||||
pub cache_path: PathBuf,
|
||||
pub platform: PlatformType,
|
||||
pub room_id: String,
|
||||
pub live_id: String,
|
||||
pub file_name: Option<String>,
|
||||
}
|
||||
|
||||
impl CachePath {
|
||||
pub fn new(cache_path: PathBuf, platform: PlatformType, room_id: &str, live_id: &str) -> Self {
|
||||
Self {
|
||||
cache_path,
|
||||
platform,
|
||||
room_id: room_id.to_string(),
|
||||
live_id: live_id.to_string(),
|
||||
file_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sanitize filename and set it
|
||||
pub fn with_filename(&self, file_name: &str) -> Self {
|
||||
let sanitized_filename = sanitize_filename::sanitize(file_name);
|
||||
Self {
|
||||
file_name: Some(sanitized_filename),
|
||||
..self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get relative path to cache path
|
||||
pub fn relative_path(&self) -> PathBuf {
|
||||
if let Some(file_name) = &self.file_name {
|
||||
return PathBuf::from(format!(
|
||||
"{}/{}/{}/{}",
|
||||
self.platform.as_str(),
|
||||
self.room_id,
|
||||
self.live_id,
|
||||
file_name
|
||||
));
|
||||
}
|
||||
|
||||
PathBuf::from(format!(
|
||||
"{}/{}/{}",
|
||||
self.platform.as_str(),
|
||||
self.room_id,
|
||||
self.live_id
|
||||
))
|
||||
}
|
||||
|
||||
pub fn full_path(&self) -> PathBuf {
|
||||
self.cache_path.clone().join(self.relative_path())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for CachePath {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.full_path().display())
|
||||
}
|
||||
}
|
||||
474
src-tauri/crates/recorder/src/platforms/bilibili.rs
Normal file
@@ -0,0 +1,474 @@
|
||||
pub mod api;
|
||||
pub mod profile;
|
||||
pub mod response;
|
||||
use crate::account::Account;
|
||||
use crate::core::hls_recorder::HlsRecorder;
|
||||
use crate::events::RecorderEvent;
|
||||
use crate::platforms::bilibili::api::{Protocol, Qn};
|
||||
use crate::platforms::PlatformType;
|
||||
use crate::traits::RecorderTrait;
|
||||
use crate::{Recorder, RoomInfo, UserInfo};
|
||||
|
||||
use crate::core::Format;
|
||||
use crate::core::{Codec, HlsStream};
|
||||
use crate::danmu::DanmuStorage;
|
||||
use crate::platforms::bilibili::api::BiliStream;
|
||||
use chrono::Utc;
|
||||
use danmu_stream::danmu_stream::DanmuStream;
|
||||
use danmu_stream::provider::ProviderType;
|
||||
use danmu_stream::DanmuMessageType;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{atomic, Arc};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
/// A recorder for `BiliBili` live streams
|
||||
///
|
||||
/// This recorder fetches, caches and serves TS entries, currently supporting only `StreamType::FMP4`.
|
||||
/// As high-quality streams are accessible only to logged-in users, the use of a `BiliClient`, which manages cookies, is required.
|
||||
#[derive(Clone)]
|
||||
pub struct BiliExtra {
|
||||
cover: Arc<RwLock<Option<String>>>,
|
||||
live_stream: Arc<RwLock<Option<BiliStream>>>,
|
||||
}
|
||||
|
||||
pub type BiliRecorder = Recorder<BiliExtra>;
|
||||
|
||||
impl BiliRecorder {
|
||||
pub async fn new(
|
||||
room_id: &str,
|
||||
account: &Account,
|
||||
cache_dir: PathBuf,
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
update_interval: Arc<atomic::AtomicU64>,
|
||||
enabled: bool,
|
||||
) -> Result<Self, crate::errors::RecorderError> {
|
||||
let client = reqwest::Client::new();
|
||||
let extra = BiliExtra {
|
||||
cover: Arc::new(RwLock::new(None)),
|
||||
live_stream: Arc::new(RwLock::new(None)),
|
||||
};
|
||||
|
||||
let recorder = Self {
|
||||
platform: PlatformType::BiliBili,
|
||||
room_id: room_id.to_string(),
|
||||
account: account.clone(),
|
||||
client,
|
||||
event_channel,
|
||||
cache_dir,
|
||||
quit: Arc::new(atomic::AtomicBool::new(false)),
|
||||
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
|
||||
update_interval,
|
||||
is_recording: Arc::new(atomic::AtomicBool::new(false)),
|
||||
room_info: Arc::new(RwLock::new(RoomInfo::default())),
|
||||
user_info: Arc::new(RwLock::new(UserInfo::default())),
|
||||
platform_live_id: Arc::new(RwLock::new(String::new())),
|
||||
live_id: Arc::new(RwLock::new(String::new())),
|
||||
danmu_storage: Arc::new(RwLock::new(None)),
|
||||
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
|
||||
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
|
||||
danmu_task: Arc::new(Mutex::new(None)),
|
||||
record_task: Arc::new(Mutex::new(None)),
|
||||
total_duration: Arc::new(atomic::AtomicU64::new(0)),
|
||||
total_size: Arc::new(atomic::AtomicU64::new(0)),
|
||||
extra,
|
||||
};
|
||||
|
||||
log::info!("[{}]Recorder for room {} created.", room_id, room_id);
|
||||
|
||||
Ok(recorder)
|
||||
}
|
||||
|
||||
fn log_info(&self, message: &str) {
|
||||
log::info!("[{}]{}", self.room_id, message);
|
||||
}
|
||||
|
||||
fn log_error(&self, message: &str) {
|
||||
log::error!("[{}]{}", self.room_id, message);
|
||||
}
|
||||
|
||||
pub async fn reset(&self) {
|
||||
*self.extra.live_stream.write().await = None;
|
||||
self.last_update
|
||||
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
|
||||
*self.danmu_storage.write().await = None;
|
||||
*self.platform_live_id.write().await = String::new();
|
||||
*self.live_id.write().await = String::new();
|
||||
self.total_duration.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_size.store(0, atomic::Ordering::Relaxed);
|
||||
}
|
||||
|
||||
async fn check_status(&self) -> bool {
|
||||
let pre_live_status = self.room_info.read().await.status;
|
||||
match api::get_room_info(&self.client, &self.account, &self.room_id).await {
|
||||
Ok(room_info) => {
|
||||
*self.room_info.write().await = RoomInfo {
|
||||
platform: "bilibili".to_string(),
|
||||
room_id: self.room_id.to_string(),
|
||||
room_title: room_info.room_title,
|
||||
room_cover: room_info.room_cover_url.clone(),
|
||||
status: room_info.live_status == 1,
|
||||
};
|
||||
// Only update user info once
|
||||
if self.user_info.read().await.user_id != room_info.user_id {
|
||||
let user_id = room_info.user_id;
|
||||
let user_info = api::get_user_info(&self.client, &self.account, &user_id).await;
|
||||
if let Ok(user_info) = user_info {
|
||||
*self.user_info.write().await = UserInfo {
|
||||
user_id: user_id.to_string(),
|
||||
user_name: user_info.user_name,
|
||||
user_avatar: user_info.user_avatar_url,
|
||||
}
|
||||
} else {
|
||||
self.log_error(&format!(
|
||||
"Failed to get user info: {}",
|
||||
user_info.err().unwrap()
|
||||
));
|
||||
}
|
||||
}
|
||||
let live_status = room_info.live_status == 1;
|
||||
|
||||
// handle live notification
|
||||
if pre_live_status != live_status {
|
||||
self.log_info(&format!(
|
||||
"Live status changed to {}, enabled: {}",
|
||||
live_status,
|
||||
self.enabled.load(atomic::Ordering::Relaxed)
|
||||
));
|
||||
|
||||
if live_status {
|
||||
// Get cover image
|
||||
let room_cover_path = Path::new(PlatformType::BiliBili.as_str())
|
||||
.join(&self.room_id)
|
||||
.join("cover.jpg");
|
||||
let full_room_cover_path = self.cache_dir.join(&room_cover_path);
|
||||
if (api::download_file(
|
||||
&self.client,
|
||||
&room_info.room_cover_url,
|
||||
&full_room_cover_path,
|
||||
)
|
||||
.await)
|
||||
.is_ok()
|
||||
{
|
||||
*self.extra.cover.write().await =
|
||||
Some(room_cover_path.to_str().unwrap().to_string());
|
||||
}
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
} else {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
|
||||
platform: PlatformType::BiliBili,
|
||||
room_id: self.room_id.to_string(),
|
||||
recorder: self.info().await,
|
||||
});
|
||||
*self.live_id.write().await = String::new();
|
||||
}
|
||||
|
||||
// just doing reset, cuz live status is changed
|
||||
self.reset().await;
|
||||
}
|
||||
|
||||
*self.platform_live_id.write().await = room_info.live_start_time.to_string();
|
||||
|
||||
if !live_status {
|
||||
return false;
|
||||
}
|
||||
|
||||
// no need to check stream if should not record
|
||||
if !self.should_record().await {
|
||||
return true;
|
||||
}
|
||||
|
||||
// current_record => update stream
|
||||
// auto_start+is_new_stream => update stream and current_record=true
|
||||
let new_stream = api::get_stream_info(
|
||||
&self.client,
|
||||
&self.account,
|
||||
&self.room_id,
|
||||
Protocol::HttpHls,
|
||||
Format::TS,
|
||||
&[Codec::Avc, Codec::Hevc],
|
||||
Qn::Q4K,
|
||||
)
|
||||
.await;
|
||||
|
||||
match new_stream {
|
||||
Ok(stream) => {
|
||||
let pre_live_stream = self.extra.live_stream.read().await.clone();
|
||||
*self.extra.live_stream.write().await = Some(stream.clone());
|
||||
self.last_update
|
||||
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
|
||||
|
||||
log::info!(
|
||||
"[{}]Update to a new stream: {:#?} => {:#?}",
|
||||
&self.room_id,
|
||||
pre_live_stream,
|
||||
stream
|
||||
);
|
||||
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
if let crate::errors::RecorderError::FormatNotFound { format } = e {
|
||||
log::error!("[{}]Format {} not found", &self.room_id, format);
|
||||
|
||||
true
|
||||
} else {
|
||||
log::error!("[{}]Fetch stream failed: {}", &self.room_id, e);
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("[{}]Update room status failed: {}", &self.room_id, e);
|
||||
// may encounter internet issues, not sure whether the stream is closed or started, just remain
|
||||
pre_live_status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn danmu(&self) -> Result<(), crate::errors::RecorderError> {
|
||||
let cookies = self.account.cookies.clone();
|
||||
let room_id = self.room_id.clone();
|
||||
let danmu_stream = DanmuStream::new(ProviderType::BiliBili, &cookies, &room_id).await;
|
||||
if danmu_stream.is_err() {
|
||||
let err = danmu_stream.err().unwrap();
|
||||
log::error!("[{}]Failed to create danmu stream: {}", &self.room_id, err);
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
let danmu_stream = danmu_stream.unwrap();
|
||||
|
||||
let mut start_fut = Box::pin(danmu_stream.start());
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
start_res = &mut start_fut => {
|
||||
match start_res {
|
||||
Ok(_) => {
|
||||
log::info!("[{}]Danmu stream finished", &self.room_id);
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("[{}]Danmu stream start error: {}", &self.room_id, err);
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
recv_res = danmu_stream.recv() => {
|
||||
match recv_res {
|
||||
Ok(Some(msg)) => {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
let ts = Utc::now().timestamp_millis();
|
||||
let _ = self.event_channel.send(RecorderEvent::DanmuReceived {
|
||||
room: self.room_id.clone(),
|
||||
ts,
|
||||
content: danmu.message.clone(),
|
||||
});
|
||||
if let Some(storage) = self.danmu_storage.write().await.as_ref() {
|
||||
storage.add_line(ts, &danmu.message).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
log::info!("[{}]Danmu stream closed", &self.room_id);
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("[{}]Failed to receive danmu message: {}", &self.room_id, err);
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update entries for a new live
|
||||
async fn update_entries(&self, live_id: &str) -> Result<(), crate::errors::RecorderError> {
|
||||
let current_stream = self.extra.live_stream.read().await.clone();
|
||||
let Some(current_stream) = current_stream else {
|
||||
return Err(crate::errors::RecorderError::NoStreamAvailable);
|
||||
};
|
||||
|
||||
let work_dir = self.work_dir(live_id).await;
|
||||
log::info!("[{}]New record started: {}", self.room_id, live_id);
|
||||
|
||||
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
|
||||
|
||||
let danmu_path = work_dir.with_filename("danmu.txt");
|
||||
*self.danmu_storage.write().await = DanmuStorage::new(&danmu_path.full_path()).await;
|
||||
|
||||
let cover_path = work_dir.with_filename("cover.jpg");
|
||||
let room_cover_path = self
|
||||
.cache_dir
|
||||
.join(PlatformType::BiliBili.as_str())
|
||||
.join(&self.room_id)
|
||||
.join("cover.jpg");
|
||||
|
||||
tokio::fs::copy(room_cover_path, &cover_path.full_path())
|
||||
.await
|
||||
.map_err(crate::errors::RecorderError::IoError)?;
|
||||
|
||||
*self.live_id.write().await = live_id.to_string();
|
||||
|
||||
// send record start event
|
||||
let _ = self.event_channel.send(RecorderEvent::RecordStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
|
||||
self.is_recording.store(true, atomic::Ordering::Relaxed);
|
||||
|
||||
let stream = Arc::new(HlsStream::new(
|
||||
live_id.to_string(),
|
||||
current_stream.url_info.first().unwrap().host.clone(),
|
||||
current_stream.base_url.clone(),
|
||||
current_stream.url_info.first().unwrap().extra.clone(),
|
||||
current_stream.format,
|
||||
current_stream.codec,
|
||||
));
|
||||
let hls_recorder = HlsRecorder::new(
|
||||
self.room_id.to_string(),
|
||||
stream,
|
||||
self.client.clone(),
|
||||
None,
|
||||
self.event_channel.clone(),
|
||||
work_dir.full_path(),
|
||||
self.enabled.clone(),
|
||||
)
|
||||
.await;
|
||||
if let Err(e) = hls_recorder.start().await {
|
||||
log::error!("[{}]Failed to start hls recorder: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl crate::traits::RecorderTrait<BiliExtra> for BiliRecorder {
|
||||
async fn run(&self) {
|
||||
let self_clone = self.clone();
|
||||
let danmu_task = tokio::spawn(async move {
|
||||
let _ = self_clone.danmu().await;
|
||||
});
|
||||
*self.danmu_task.lock().await = Some(danmu_task);
|
||||
|
||||
let self_clone = self.clone();
|
||||
*self.record_task.lock().await = Some(tokio::spawn(async move {
|
||||
log::info!("[{}]Start running recorder", self_clone.room_id);
|
||||
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
|
||||
if self_clone.check_status().await {
|
||||
// Live status is ok, start recording.
|
||||
if self_clone.should_record().await {
|
||||
let live_id = Utc::now().timestamp_millis().to_string();
|
||||
|
||||
if let Err(e) = self_clone.update_entries(&live_id).await {
|
||||
log::error!("[{}]Update entries error: {}", self_clone.room_id, e);
|
||||
}
|
||||
|
||||
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
|
||||
recorder: self_clone.info().await,
|
||||
});
|
||||
}
|
||||
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(false, atomic::Ordering::Relaxed);
|
||||
|
||||
self_clone.reset().await;
|
||||
// go check status again after random 2-5 secs
|
||||
let secs = rand::random::<u64>() % 4 + 2;
|
||||
tokio::time::sleep(Duration::from_secs(secs)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
tokio::time::sleep(Duration::from_secs(
|
||||
self_clone.update_interval.load(atomic::Ordering::Relaxed),
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn parse_fmp4_playlist() {
|
||||
let content = r#"#EXTM3U
|
||||
#EXT-X-VERSION:7
|
||||
#EXT-X-START:TIME-OFFSET=0
|
||||
#EXT-X-MEDIA-SEQUENCE:323066244
|
||||
#EXT-X-TARGETDURATION:1
|
||||
#EXT-X-MAP:URI=\"h1758715459.m4s\"
|
||||
#EXT-BILI-AUX:97d350|K|7d1e3|fe1425ab
|
||||
#EXTINF:1.00,7d1e3|fe1425ab
|
||||
323066244.m4s
|
||||
#EXT-BILI-AUX:97d706|N|757d4|c9094969
|
||||
#EXTINF:1.00,757d4|c9094969
|
||||
323066245.m4s
|
||||
#EXT-BILI-AUX:97daee|N|8223d|f307566a
|
||||
#EXTINF:1.00,8223d|f307566a
|
||||
323066246.m4s
|
||||
#EXT-BILI-AUX:97dee7|N|775cc|428d567
|
||||
#EXTINF:1.00,775cc|428d567
|
||||
323066247.m4s
|
||||
#EXT-BILI-AUX:97e2df|N|10410|9a62fe61
|
||||
#EXTINF:0.17,10410|9a62fe61
|
||||
323066248.m4s
|
||||
#EXT-BILI-AUX:97e397|K|679d2|8fbee7df
|
||||
#EXTINF:1.00,679d2|8fbee7df
|
||||
323066249.m4s
|
||||
#EXT-BILI-AUX:97e74d|N|8907b|67d1c6ad
|
||||
#EXTINF:1.00,8907b|67d1c6ad
|
||||
323066250.m4s
|
||||
#EXT-BILI-AUX:97eb35|N|87374|f6406797
|
||||
#EXTINF:1.00,87374|f6406797
|
||||
323066251.m4s
|
||||
#EXT-BILI-AUX:97ef2d|N|6b792|b8125097
|
||||
#EXTINF:1.00,6b792|b8125097
|
||||
323066252.m4s
|
||||
#EXT-BILI-AUX:97f326|N|e213|b30c02c6
|
||||
#EXTINF:0.17,e213|b30c02c6
|
||||
323066253.m4s
|
||||
#EXT-BILI-AUX:97f3de|K|65754|7ea6dcc8
|
||||
#EXTINF:1.00,65754|7ea6dcc8
|
||||
323066254.m4s
|
||||
"#;
|
||||
let (_, pl) = m3u8_rs::parse_media_playlist(content.as_bytes()).unwrap();
|
||||
// ExtTag { tag: "X-MAP", rest: Some("URI=\\\"h1758715459.m4s\\\"") }
|
||||
let header_url = pl
|
||||
.segments
|
||||
.first()
|
||||
.unwrap()
|
||||
.unknown_tags
|
||||
.iter()
|
||||
.find(|t| t.tag == "X-MAP")
|
||||
.map(|t| {
|
||||
let rest = t.rest.clone().unwrap();
|
||||
rest.split('=').nth(1).unwrap().replace("\\\"", "")
|
||||
});
|
||||
// #EXT-BILI-AUX:a5e4e0|K|79b3e|ebde469e
|
||||
let is_key = pl
|
||||
.segments
|
||||
.first()
|
||||
.unwrap()
|
||||
.unknown_tags
|
||||
.iter()
|
||||
.find(|t| t.tag == "BILI-AUX")
|
||||
.map(|t| {
|
||||
let rest = t.rest.clone().unwrap();
|
||||
rest.split('|').nth(1).unwrap() == "K"
|
||||
});
|
||||
assert_eq!(is_key, Some(true));
|
||||
assert_eq!(header_url, Some("h1758715459.m4s".to_string()));
|
||||
}
|
||||
}
|
||||
966
src-tauri/crates/recorder/src/platforms/bilibili/api.rs
Normal file
@@ -0,0 +1,966 @@
|
||||
use super::profile;
|
||||
use super::profile::Profile;
|
||||
use super::response;
|
||||
use super::response::GeneralResponse;
|
||||
use super::response::PostVideoMetaResponse;
|
||||
use super::response::PreuploadResponse;
|
||||
use super::response::VideoSubmitData;
|
||||
use crate::account::Account;
|
||||
use crate::core::Codec;
|
||||
use crate::core::Format;
|
||||
use crate::errors::RecorderError;
|
||||
use crate::utils::user_agent_generator;
|
||||
use chrono::TimeZone;
|
||||
use pct_str::PctString;
|
||||
use pct_str::URIReserved;
|
||||
use rand::seq::IndexedRandom;
|
||||
use rand::seq::SliceRandom;
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use std::time::SystemTime;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tokio::time::Instant;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct UploadParams<'a> {
|
||||
preupload_response: &'a PreuploadResponse,
|
||||
post_video_meta_response: &'a PostVideoMetaResponse,
|
||||
video_file: &'a Path,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct RoomInfo {
|
||||
pub live_status: u8,
|
||||
pub room_cover_url: String,
|
||||
pub room_id: String,
|
||||
pub room_keyframe_url: String,
|
||||
pub room_title: String,
|
||||
pub user_id: String,
|
||||
pub live_start_time: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct UserInfo {
|
||||
pub user_id: String,
|
||||
pub user_name: String,
|
||||
pub user_sign: String,
|
||||
pub user_avatar_url: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct QrInfo {
|
||||
pub oauth_key: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct QrStatus {
|
||||
pub code: u8,
|
||||
pub cookies: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BiliStream {
|
||||
pub format: Format,
|
||||
pub codec: Codec,
|
||||
pub base_url: String,
|
||||
pub url_info: Vec<UrlInfo>,
|
||||
pub drm: bool,
|
||||
pub master_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UrlInfo {
|
||||
pub host: String,
|
||||
pub extra: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub enum Protocol {
|
||||
HttpStream,
|
||||
HttpHls,
|
||||
}
|
||||
|
||||
impl fmt::Display for Protocol {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
// 30000 杜比
|
||||
// 20000 4K
|
||||
// 15000 2K
|
||||
// 10000 原画
|
||||
// 400 蓝光
|
||||
// 250 超清
|
||||
// 150 高清
|
||||
// 80 流畅
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub enum Qn {
|
||||
Dolby = 30000,
|
||||
Q4K = 20000,
|
||||
Q2K = 15000,
|
||||
Q1080PH = 10000,
|
||||
Q1080P = 400,
|
||||
Q720P = 250,
|
||||
Hd = 150,
|
||||
Smooth = 80,
|
||||
}
|
||||
|
||||
impl fmt::Display for Qn {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for BiliStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"type: {:?}, codec: {:?}, base_url: {}, url_info: {:?}, drm: {}, master_url: {:?}",
|
||||
self.format, self.codec, self.base_url, self.url_info, self.drm, self.master_url
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl BiliStream {
|
||||
pub fn new(
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
base_url: &str,
|
||||
url_info: Vec<UrlInfo>,
|
||||
drm: bool,
|
||||
master_url: Option<String>,
|
||||
) -> BiliStream {
|
||||
BiliStream {
|
||||
format,
|
||||
codec,
|
||||
base_url: base_url.into(),
|
||||
url_info,
|
||||
drm,
|
||||
master_url,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn index(&self) -> String {
|
||||
let url_info = self.url_info.choose(&mut rand::rng()).unwrap();
|
||||
format!("{}{}{}", url_info.host, self.base_url, url_info.extra)
|
||||
}
|
||||
|
||||
pub fn ts_url(&self, seg_name: &str) -> String {
|
||||
let m3u8_filename = self.base_url.split('/').next_back().unwrap();
|
||||
let base_url = self.base_url.replace(m3u8_filename, seg_name);
|
||||
let url_info = self.url_info.choose(&mut rand::rng()).unwrap();
|
||||
format!("{}{}?{}", url_info.host, base_url, url_info.extra)
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_user_agent_header() -> reqwest::header::HeaderMap {
|
||||
let user_agent = user_agent_generator::UserAgentGenerator::new().generate(false);
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
headers
|
||||
}
|
||||
|
||||
pub async fn get_qr(client: &Client) -> Result<QrInfo, RecorderError> {
|
||||
let headers = generate_user_agent_header();
|
||||
let res: serde_json::Value = client
|
||||
.get("https://passport.bilibili.com/x/passport-login/web/qrcode/generate")
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
Ok(QrInfo {
|
||||
oauth_key: res["data"]["qrcode_key"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string(),
|
||||
url: res["data"]["url"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_qr_status(client: &Client, qrcode_key: &str) -> Result<QrStatus, RecorderError> {
|
||||
let headers = generate_user_agent_header();
|
||||
let res: serde_json::Value = client
|
||||
.get(format!(
|
||||
"https://passport.bilibili.com/x/passport-login/web/qrcode/poll?qrcode_key={qrcode_key}"
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
let code: u8 = res["data"]["code"].as_u64().unwrap_or(400) as u8;
|
||||
let mut cookies: String = String::new();
|
||||
if code == 0 {
|
||||
let url = res["data"]["url"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let query_str = url.split('?').next_back().unwrap();
|
||||
cookies = query_str.replace('&', ";");
|
||||
}
|
||||
Ok(QrStatus { code, cookies })
|
||||
}
|
||||
|
||||
pub async fn logout(client: &Client, account: &Account) -> Result<(), RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
let url = "https://passport.bilibili.com/login/exit/v2";
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let params = [("csrf", account.csrf.clone())];
|
||||
let _ = client
|
||||
.post(url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_user_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
user_id: &str,
|
||||
) -> Result<UserInfo, RecorderError> {
|
||||
let params: Value = json!({
|
||||
"mid": user_id.to_string(),
|
||||
"platform": "web",
|
||||
"web_location": "1550101",
|
||||
"token": "",
|
||||
"w_webid": "",
|
||||
});
|
||||
let params = get_sign(client, params).await?;
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let resp = client
|
||||
.get(format!(
|
||||
"https://api.bilibili.com/x/space/wbi/acc/info?{params}"
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
if resp.status() == reqwest::StatusCode::PRECONDITION_FAILED {
|
||||
return Err(RecorderError::SecurityControlError);
|
||||
}
|
||||
return Err(RecorderError::InvalidResponseStatus {
|
||||
status: resp.status(),
|
||||
});
|
||||
}
|
||||
|
||||
let res: serde_json::Value = resp.json().await?;
|
||||
let code = res["code"]
|
||||
.as_u64()
|
||||
.ok_or(RecorderError::InvalidResponseJson { resp: res.clone() })?;
|
||||
if code != 0 {
|
||||
log::error!("Get user info failed {code}");
|
||||
return Err(RecorderError::InvalidResponseJson { resp: res.clone() });
|
||||
}
|
||||
Ok(UserInfo {
|
||||
user_id: user_id.to_string(),
|
||||
user_name: res["data"]["name"].as_str().unwrap_or("").to_string(),
|
||||
user_sign: res["data"]["sign"].as_str().unwrap_or("").to_string(),
|
||||
user_avatar_url: res["data"]["face"].as_str().unwrap_or("").to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_room_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
) -> Result<RoomInfo, RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let response = client
|
||||
.get(format!(
|
||||
"https://api.live.bilibili.com/room/v1/Room/get_info?room_id={room_id}"
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
if response.status() == reqwest::StatusCode::PRECONDITION_FAILED {
|
||||
return Err(RecorderError::SecurityControlError);
|
||||
}
|
||||
return Err(RecorderError::InvalidResponseStatus {
|
||||
status: response.status(),
|
||||
});
|
||||
}
|
||||
|
||||
let res: serde_json::Value = response.json().await?;
|
||||
let code = res["code"]
|
||||
.as_u64()
|
||||
.ok_or(RecorderError::InvalidResponseJson { resp: res.clone() })?;
|
||||
if code != 0 {
|
||||
return Err(RecorderError::InvalidResponseJson { resp: res.clone() });
|
||||
}
|
||||
|
||||
let room_id = res["data"]["room_id"]
|
||||
.as_i64()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let room_title = res["data"]["title"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let room_cover_url = res["data"]["user_cover"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let room_keyframe_url = res["data"]["keyframe"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let user_id = res["data"]["uid"]
|
||||
.as_i64()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let live_status = res["data"]["live_status"]
|
||||
.as_u64()
|
||||
.ok_or(RecorderError::InvalidValue)? as u8;
|
||||
// "live_time": "2025-08-09 18:33:35",
|
||||
let live_start_time_str = res["data"]["live_time"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?;
|
||||
let live_start_time = if live_start_time_str == "0000-00-00 00:00:00" {
|
||||
0
|
||||
} else {
|
||||
// this is a fixed Asia/Shanghai datetime str
|
||||
let naive = chrono::NaiveDateTime::parse_from_str(live_start_time_str, "%Y-%m-%d %H:%M:%S")
|
||||
.map_err(|_| RecorderError::InvalidValue)?;
|
||||
// parse as UTC datetime and convert to timestamp
|
||||
chrono::Utc
|
||||
.from_local_datetime(&naive)
|
||||
.earliest()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.timestamp()
|
||||
- 8 * 3600
|
||||
};
|
||||
Ok(RoomInfo {
|
||||
live_status,
|
||||
room_cover_url,
|
||||
room_id,
|
||||
room_keyframe_url,
|
||||
room_title,
|
||||
user_id,
|
||||
live_start_time,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get stream info from room id
|
||||
///
|
||||
/// https://socialsisteryi.github.io/bilibili-API-collect/docs/live/info.html#%E8%8E%B7%E5%8F%96%E7%9B%B4%E6%92%AD%E9%97%B4%E4%BF%A1%E6%81%AF-1
|
||||
/// https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo?room_id=31368705&protocol=1&format=1&codec=0&qn=10000&platform=h5
|
||||
pub async fn get_stream_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
protocol: Protocol,
|
||||
format: Format,
|
||||
codec: &[Codec],
|
||||
qn: Qn,
|
||||
) -> Result<BiliStream, RecorderError> {
|
||||
let url = format!(
|
||||
"https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo?room_id={}&protocol={}&format={}&codec={}&qn={}&platform=h5",
|
||||
room_id,
|
||||
protocol.clone() as u8,
|
||||
format.clone() as u8,
|
||||
codec.iter().map(|c| (c.clone() as u8).to_string()).collect::<Vec<String>>().join(","),
|
||||
qn as i64,
|
||||
);
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let response = client.get(url).headers(headers).send().await?;
|
||||
let res: serde_json::Value = response.json().await?;
|
||||
|
||||
let code = res["code"].as_u64().unwrap_or(0);
|
||||
let message = res["message"].as_str().unwrap_or("");
|
||||
if code != 0 {
|
||||
return Err(RecorderError::ApiError {
|
||||
error: format!("Code {code} not found, message: {message}"),
|
||||
});
|
||||
}
|
||||
|
||||
log::debug!("Get stream info response: {res}");
|
||||
|
||||
// Parse the new API response structure
|
||||
let playurl_info = &res["data"]["playurl_info"]["playurl"];
|
||||
let empty_vec = vec![];
|
||||
let streams = playurl_info["stream"].as_array().unwrap_or(&empty_vec);
|
||||
|
||||
if streams.is_empty() {
|
||||
return Err(RecorderError::ApiError {
|
||||
error: "No streams available".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
// Find the matching protocol
|
||||
let target_protocol = match protocol {
|
||||
Protocol::HttpStream => "http_stream",
|
||||
Protocol::HttpHls => "http_hls",
|
||||
};
|
||||
|
||||
let stream = streams
|
||||
.iter()
|
||||
.find(|s| s["protocol_name"].as_str() == Some(target_protocol))
|
||||
.ok_or_else(|| RecorderError::ApiError {
|
||||
error: format!("Protocol {target_protocol} not found"),
|
||||
})?;
|
||||
|
||||
// Find the matching format
|
||||
let target_format = match format {
|
||||
Format::Flv => "flv",
|
||||
Format::TS => "ts",
|
||||
Format::FMP4 => "fmp4",
|
||||
};
|
||||
|
||||
let empty_vec = vec![];
|
||||
let format_info = stream["format"]
|
||||
.as_array()
|
||||
.unwrap_or(&empty_vec)
|
||||
.iter()
|
||||
.find(|f| f["format_name"].as_str() == Some(target_format))
|
||||
.ok_or_else(|| RecorderError::FormatNotFound {
|
||||
format: target_format.to_owned(),
|
||||
})?;
|
||||
|
||||
// Find the matching codec
|
||||
let target_codecs = codec
|
||||
.iter()
|
||||
.map(|c| match c {
|
||||
Codec::Avc => "avc",
|
||||
Codec::Hevc => "hevc",
|
||||
})
|
||||
.collect::<Vec<&str>>();
|
||||
|
||||
let codec_info = format_info["codec"]
|
||||
.as_array()
|
||||
.unwrap_or(&empty_vec)
|
||||
.iter()
|
||||
.find(|c| target_codecs.contains(&c["codec_name"].as_str().unwrap_or("")))
|
||||
.ok_or_else(|| RecorderError::CodecNotFound {
|
||||
codecs: target_codecs.join(","),
|
||||
})?;
|
||||
|
||||
let url_info = codec_info["url_info"].as_array().unwrap_or(&empty_vec);
|
||||
|
||||
let mut url_info = url_info
|
||||
.iter()
|
||||
.map(|u| UrlInfo {
|
||||
host: u["host"].as_str().unwrap_or("").to_string(),
|
||||
extra: u["extra"].as_str().unwrap_or("").to_string(),
|
||||
})
|
||||
.collect::<Vec<UrlInfo>>();
|
||||
|
||||
url_info.shuffle(&mut rand::rng());
|
||||
|
||||
let drm = codec_info["drm"].as_bool().unwrap_or(false);
|
||||
let base_url = codec_info["base_url"].as_str().unwrap_or("").to_string();
|
||||
let master_url = format_info["master_url"].as_str().map(|s| s.to_string());
|
||||
let codec = codec_info["codec_name"].as_str().unwrap_or("");
|
||||
let codec = match codec {
|
||||
"avc" => Codec::Avc,
|
||||
"hevc" => Codec::Hevc,
|
||||
_ => {
|
||||
return Err(RecorderError::CodecNotFound {
|
||||
codecs: codec.to_string(),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
Ok(BiliStream {
|
||||
format,
|
||||
codec,
|
||||
base_url,
|
||||
url_info,
|
||||
drm,
|
||||
master_url,
|
||||
})
|
||||
}
|
||||
|
||||
/// Download file from url to path
|
||||
pub async fn download_file(client: &Client, url: &str, path: &Path) -> Result<(), RecorderError> {
|
||||
if !path.parent().unwrap().exists() {
|
||||
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||
}
|
||||
let response = client.get(url).send().await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let mut file = tokio::fs::File::create(&path).await?;
|
||||
let mut content = std::io::Cursor::new(bytes);
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Method from js code
|
||||
pub async fn get_sign(client: &Client, mut parameters: Value) -> Result<String, RecorderError> {
|
||||
let table = vec![
|
||||
46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49, 33, 9, 42, 19,
|
||||
29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40, 61, 26, 17, 0, 1, 60, 51, 30, 4,
|
||||
22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11, 36, 20, 34, 44, 52,
|
||||
];
|
||||
let nav_info: Value = client
|
||||
.get("https://api.bilibili.com/x/web-interface/nav")
|
||||
.headers(generate_user_agent_header())
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
let re = Regex::new(r"wbi/(.*).png").unwrap();
|
||||
let img = re
|
||||
.captures(nav_info["data"]["wbi_img"]["img_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let sub = re
|
||||
.captures(nav_info["data"]["wbi_img"]["sub_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let raw_string = format!("{img}{sub}");
|
||||
let mut encoded = Vec::new();
|
||||
for x in table {
|
||||
if x < raw_string.len() {
|
||||
encoded.push(raw_string.as_bytes()[x]);
|
||||
}
|
||||
}
|
||||
// only keep 32 bytes of encoded
|
||||
encoded = encoded[0..32].to_vec();
|
||||
let encoded = String::from_utf8(encoded).unwrap();
|
||||
// Timestamp in seconds
|
||||
let wts = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
parameters
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.insert("wts".to_owned(), serde_json::Value::String(wts.to_string()));
|
||||
// Get all keys from parameters into vec
|
||||
let mut keys = parameters
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.map(std::borrow::ToOwned::to_owned)
|
||||
.collect::<Vec<String>>();
|
||||
// sort keys
|
||||
keys.sort();
|
||||
let mut params = String::new();
|
||||
for x in &keys {
|
||||
params.push_str(x);
|
||||
params.push('=');
|
||||
// Value filters !'()* characters
|
||||
let value = parameters
|
||||
.get(x)
|
||||
.unwrap()
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.replace(['!', '\'', '(', ')', '*'], "");
|
||||
let value = PctString::encode(value.chars(), URIReserved);
|
||||
params.push_str(value.as_str());
|
||||
// add & if not last
|
||||
if x != keys.last().unwrap() {
|
||||
params.push('&');
|
||||
}
|
||||
}
|
||||
// md5 params+encoded
|
||||
let w_rid = md5::compute(params.to_string() + encoded.as_str());
|
||||
let params = params + format!("&w_rid={w_rid:x}").as_str();
|
||||
Ok(params)
|
||||
}
|
||||
|
||||
async fn preupload_video(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
video_file: &Path,
|
||||
) -> Result<PreuploadResponse, RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/preupload?name={}&r=upos&profile=ugcfx/bup",
|
||||
video_file.file_name().unwrap().to_str().unwrap()
|
||||
);
|
||||
let response = client
|
||||
.get(&url)
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json::<PreuploadResponse>()
|
||||
.await?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn post_video_meta(
|
||||
client: &Client,
|
||||
preupload_response: &PreuploadResponse,
|
||||
video_file: &Path,
|
||||
) -> Result<PostVideoMetaResponse, RecorderError> {
|
||||
let url = format!(
|
||||
"https:{}{}?uploads=&output=json&profile=ugcfx/bup&filesize={}&partsize={}&biz_id={}",
|
||||
preupload_response.endpoint,
|
||||
preupload_response.upos_uri.replace("upos:/", ""),
|
||||
video_file.metadata().unwrap().len(),
|
||||
preupload_response.chunk_size,
|
||||
preupload_response.biz_id
|
||||
);
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("X-Upos-Auth", &preupload_response.auth)
|
||||
.send()
|
||||
.await?
|
||||
.json::<PostVideoMetaResponse>()
|
||||
.await?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn upload_video(client: &Client, params: UploadParams<'_>) -> Result<usize, RecorderError> {
|
||||
let mut file = File::open(params.video_file).await?;
|
||||
let mut buffer = vec![0; params.preupload_response.chunk_size];
|
||||
let file_size = params.video_file.metadata()?.len();
|
||||
let chunk_size = params.preupload_response.chunk_size as u64;
|
||||
let total_chunks = (file_size as f64 / chunk_size as f64).ceil() as usize;
|
||||
|
||||
let start = Instant::now();
|
||||
let mut chunk = 0;
|
||||
let mut read_total = 0;
|
||||
let max_retries = 3;
|
||||
let timeout = Duration::from_secs(30);
|
||||
|
||||
while let Ok(size) = file.read(&mut buffer[read_total..]).await {
|
||||
read_total += size;
|
||||
log::debug!("size: {size}, total: {read_total}");
|
||||
if size > 0 && (read_total as u64) < chunk_size {
|
||||
continue;
|
||||
}
|
||||
if size == 0 && read_total == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
let mut retry_count = 0;
|
||||
let mut success = false;
|
||||
|
||||
while retry_count < max_retries && !success {
|
||||
let url = format!(
|
||||
"https:{}{}?partNumber={}&uploadId={}&chunk={}&chunks={}&size={}&start={}&end={}&total={}",
|
||||
params.preupload_response.endpoint,
|
||||
params.preupload_response.upos_uri.replace("upos:/", ""),
|
||||
chunk + 1,
|
||||
params.post_video_meta_response.upload_id,
|
||||
chunk,
|
||||
total_chunks,
|
||||
read_total,
|
||||
chunk * params.preupload_response.chunk_size,
|
||||
chunk * params.preupload_response.chunk_size + read_total,
|
||||
params.video_file.metadata().unwrap().len()
|
||||
);
|
||||
|
||||
match client
|
||||
.put(&url)
|
||||
.header("X-Upos-Auth", ¶ms.preupload_response.auth)
|
||||
.header("Content-Type", "application/octet-stream")
|
||||
.header("Content-Length", read_total.to_string())
|
||||
.timeout(timeout)
|
||||
.body(buffer[..read_total].to_vec())
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
success = true;
|
||||
let _ = response.text().await?;
|
||||
} else {
|
||||
log::error!("Upload failed with status: {}", response.status());
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_secs(2u64.pow(retry_count as u32)))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Upload error: {e}");
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_secs(2u64.pow(retry_count as u32))).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !success {
|
||||
return Err(RecorderError::UploadError {
|
||||
err: format!("Failed to upload chunk {chunk} after {max_retries} retries"),
|
||||
});
|
||||
}
|
||||
|
||||
chunk += 1;
|
||||
read_total = 0;
|
||||
log::debug!(
|
||||
"[bili]speed: {:.1} KiB/s",
|
||||
(chunk * params.preupload_response.chunk_size) as f64
|
||||
/ start.elapsed().as_secs_f64()
|
||||
/ 1024.0
|
||||
);
|
||||
}
|
||||
Ok(total_chunks)
|
||||
}
|
||||
|
||||
async fn end_upload(
|
||||
client: &Client,
|
||||
preupload_response: &PreuploadResponse,
|
||||
post_video_meta_response: &PostVideoMetaResponse,
|
||||
chunks: usize,
|
||||
) -> Result<(), RecorderError> {
|
||||
let url = format!(
|
||||
"https:{}{}?output=json&name={}&profile=ugcfx/bup&uploadId={}&biz_id={}",
|
||||
preupload_response.endpoint,
|
||||
preupload_response.upos_uri.replace("upos:/", ""),
|
||||
preupload_response.upos_uri,
|
||||
post_video_meta_response.upload_id,
|
||||
preupload_response.biz_id
|
||||
);
|
||||
let parts: Vec<Value> = (1..=chunks)
|
||||
.map(|i| json!({ "partNumber": i, "eTag": "etag" }))
|
||||
.collect();
|
||||
let body = json!({ "parts": parts });
|
||||
client
|
||||
.post(&url)
|
||||
.header("X-Upos-Auth", &preupload_response.auth)
|
||||
.header("Content-Type", "application/json; charset=UTF-8")
|
||||
.body(body.to_string())
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn prepare_video(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
video_file: &Path,
|
||||
) -> Result<profile::Video, RecorderError> {
|
||||
log::info!("Start Preparing Video: {}", video_file.to_str().unwrap());
|
||||
let preupload = preupload_video(client, account, video_file).await?;
|
||||
log::info!("Preupload Response: {preupload:?}");
|
||||
let metaposted = post_video_meta(client, &preupload, video_file).await?;
|
||||
log::info!("Post Video Meta Response: {metaposted:?}");
|
||||
let uploaded = upload_video(
|
||||
client,
|
||||
UploadParams {
|
||||
preupload_response: &preupload,
|
||||
post_video_meta_response: &metaposted,
|
||||
video_file,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
log::info!("Uploaded: {uploaded}");
|
||||
end_upload(client, &preupload, &metaposted, uploaded).await?;
|
||||
let filename = Path::new(&metaposted.key)
|
||||
.file_stem()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap();
|
||||
Ok(profile::Video {
|
||||
title: filename.to_string(),
|
||||
filename: filename.to_string(),
|
||||
desc: String::new(),
|
||||
cid: preupload.biz_id,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn submit_video(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
profile_template: &Profile,
|
||||
video: &profile::Video,
|
||||
) -> Result<VideoSubmitData, RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/x/vu/web/add/v3?ts={}&csrf={}",
|
||||
chrono::Local::now().timestamp(),
|
||||
account.csrf
|
||||
);
|
||||
let mut preprofile = profile_template.clone();
|
||||
preprofile.videos.push(video.clone());
|
||||
match client
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/json; charset=UTF-8")
|
||||
.body(serde_json::ser::to_string(&preprofile).unwrap_or_default())
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(raw_resp) => {
|
||||
let json: Value = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
|
||||
match resp.data {
|
||||
response::Data::VideoSubmit(data) => Ok(data),
|
||||
_ => Err(RecorderError::InvalidResponse),
|
||||
}
|
||||
} else {
|
||||
log::error!("Parse response failed: {json}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Send failed {e}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upload_cover(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
cover: &str,
|
||||
) -> Result<String, RecorderError> {
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/x/vu/web/cover/up?ts={}&csrf={}",
|
||||
chrono::Local::now().timestamp_millis(),
|
||||
account.csrf
|
||||
);
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let params = [("csrf", account.csrf.clone()), ("cover", cover.to_string())];
|
||||
match client
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(raw_resp) => {
|
||||
let json: Value = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
|
||||
match resp.data {
|
||||
response::Data::Cover(data) => Ok(data.url),
|
||||
_ => Err(RecorderError::InvalidResponse),
|
||||
}
|
||||
} else {
|
||||
log::error!("Parse response failed: {json}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Send failed {e}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_danmaku(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
message: &str,
|
||||
) -> Result<(), RecorderError> {
|
||||
let url = "https://api.live.bilibili.com/msg/send".to_string();
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let params = [
|
||||
("bubble", "0"),
|
||||
("msg", message),
|
||||
("color", "16777215"),
|
||||
("mode", "1"),
|
||||
("fontsize", "25"),
|
||||
("room_type", "0"),
|
||||
("rnd", &format!("{}", chrono::Local::now().timestamp())),
|
||||
("roomid", room_id),
|
||||
("csrf", &account.csrf),
|
||||
("csrf_token", &account.csrf),
|
||||
];
|
||||
let _ = client
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_video_typelist(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
) -> Result<Vec<response::Typelist>, RecorderError> {
|
||||
let url = "https://member.bilibili.com/x/vupre/web/archive/pre?lang=cn";
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let resp: GeneralResponse = client
|
||||
.get(url)
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
if resp.code == 0 {
|
||||
if let response::Data::VideoTypeList(data) = resp.data {
|
||||
Ok(data.typelist)
|
||||
} else {
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
} else {
|
||||
log::error!("Get video typelist failed with code {}", resp.code);
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
355
src-tauri/crates/recorder/src/platforms/douyin.rs
Normal file
@@ -0,0 +1,355 @@
|
||||
pub mod api;
|
||||
mod response;
|
||||
pub mod stream_info;
|
||||
use crate::account::Account;
|
||||
use crate::core::hls_recorder::{construct_stream_from_variant, HlsRecorder};
|
||||
use crate::core::{Codec, Format};
|
||||
use crate::errors::RecorderError;
|
||||
use crate::events::RecorderEvent;
|
||||
use crate::platforms::douyin::stream_info::DouyinStream;
|
||||
use crate::traits::RecorderTrait;
|
||||
use crate::{Recorder, RoomInfo, UserInfo};
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use danmu_stream::danmu_stream::DanmuStream;
|
||||
use danmu_stream::provider::ProviderType;
|
||||
use danmu_stream::DanmuMessageType;
|
||||
use rand::random;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{atomic, Arc};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
|
||||
use crate::danmu::DanmuStorage;
|
||||
use crate::platforms::PlatformType;
|
||||
|
||||
pub type DouyinRecorder = Recorder<DouyinExtra>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DouyinExtra {
|
||||
sec_user_id: String,
|
||||
live_stream: Arc<RwLock<Option<DouyinStream>>>,
|
||||
}
|
||||
|
||||
fn get_best_stream_url(stream: &DouyinStream) -> Option<String> {
|
||||
// find the best stream url
|
||||
if stream.data.origin.main.hls.is_empty() {
|
||||
log::error!("No stream url found in stream_data: {stream:#?}");
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(stream.data.origin.main.hls.clone())
|
||||
}
|
||||
|
||||
impl DouyinRecorder {
|
||||
pub async fn new(
|
||||
room_id: &str,
|
||||
sec_user_id: &str,
|
||||
account: &Account,
|
||||
cache_dir: PathBuf,
|
||||
channel: broadcast::Sender<RecorderEvent>,
|
||||
update_interval: Arc<atomic::AtomicU64>,
|
||||
enabled: bool,
|
||||
) -> Result<Self, crate::errors::RecorderError> {
|
||||
Ok(Self {
|
||||
platform: PlatformType::Douyin,
|
||||
room_id: room_id.to_string(),
|
||||
account: account.clone(),
|
||||
client: reqwest::Client::new(),
|
||||
event_channel: channel,
|
||||
cache_dir,
|
||||
quit: Arc::new(atomic::AtomicBool::new(false)),
|
||||
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
|
||||
is_recording: Arc::new(atomic::AtomicBool::new(false)),
|
||||
room_info: Arc::new(RwLock::new(RoomInfo::default())),
|
||||
user_info: Arc::new(RwLock::new(UserInfo::default())),
|
||||
platform_live_id: Arc::new(RwLock::new(String::new())),
|
||||
live_id: Arc::new(RwLock::new(String::new())),
|
||||
danmu_storage: Arc::new(RwLock::new(None)),
|
||||
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
|
||||
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
|
||||
danmu_task: Arc::new(Mutex::new(None)),
|
||||
record_task: Arc::new(Mutex::new(None)),
|
||||
update_interval,
|
||||
total_duration: Arc::new(atomic::AtomicU64::new(0)),
|
||||
total_size: Arc::new(atomic::AtomicU64::new(0)),
|
||||
extra: DouyinExtra {
|
||||
sec_user_id: sec_user_id.to_string(),
|
||||
live_stream: Arc::new(RwLock::new(None)),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async fn check_status(&self) -> bool {
|
||||
let pre_live_status = self.room_info.read().await.status;
|
||||
match api::get_room_info(
|
||||
&self.client,
|
||||
&self.account,
|
||||
&self.room_id,
|
||||
&self.extra.sec_user_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(info) => {
|
||||
let live_status = info.status == 0; // room_status == 0 表示正在直播
|
||||
|
||||
*self.room_info.write().await = RoomInfo {
|
||||
platform: PlatformType::Douyin.as_str().to_string(),
|
||||
room_id: self.room_id.to_string(),
|
||||
room_title: info.room_title.clone(),
|
||||
room_cover: info.cover.clone().unwrap_or_default(),
|
||||
status: live_status,
|
||||
};
|
||||
|
||||
*self.user_info.write().await = UserInfo {
|
||||
user_id: info.sec_user_id.clone(),
|
||||
user_name: info.user_name.clone(),
|
||||
user_avatar: info.user_avatar.clone(),
|
||||
};
|
||||
|
||||
if pre_live_status != live_status {
|
||||
// live status changed, reset current record flag
|
||||
log::info!(
|
||||
"[{}]Live status changed to {}, auto_start: {}",
|
||||
self.room_id,
|
||||
live_status,
|
||||
self.enabled.load(atomic::Ordering::Relaxed)
|
||||
);
|
||||
|
||||
if live_status {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
} else {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
|
||||
platform: PlatformType::Douyin,
|
||||
room_id: self.room_id.clone(),
|
||||
recorder: self.info().await,
|
||||
});
|
||||
}
|
||||
|
||||
self.reset().await;
|
||||
}
|
||||
|
||||
if !live_status {
|
||||
self.reset().await;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
let should_record = self.should_record().await;
|
||||
|
||||
if !should_record {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Get stream URL when live starts
|
||||
if !info.hls_url.is_empty() {
|
||||
// Only set stream URL, don't create record yet
|
||||
// Record will be created when first ts download succeeds
|
||||
// parse info.stream_data into DouyinStream
|
||||
let stream_data = info.stream_data.clone();
|
||||
let Ok(stream) = serde_json::from_str::<DouyinStream>(&stream_data) else {
|
||||
log::error!("Failed to parse stream data: {:#?}", &info);
|
||||
return false;
|
||||
};
|
||||
let Some(new_stream_url) = get_best_stream_url(&stream) else {
|
||||
log::error!("No stream url found in stream_data: {stream:#?}");
|
||||
return false;
|
||||
};
|
||||
|
||||
log::info!("New douyin stream URL: {}", new_stream_url.clone());
|
||||
*self.extra.live_stream.write().await = Some(stream);
|
||||
(*self.platform_live_id.write().await).clone_from(&info.room_id_str);
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("[{}]Update room status failed: {}", &self.room_id, e);
|
||||
pre_live_status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn danmu(&self) -> Result<(), crate::errors::RecorderError> {
|
||||
let cookies = self.account.cookies.clone();
|
||||
let danmu_room_id = self
|
||||
.platform_live_id
|
||||
.read()
|
||||
.await
|
||||
.clone()
|
||||
.parse::<i64>()
|
||||
.unwrap_or(0);
|
||||
let danmu_stream =
|
||||
DanmuStream::new(ProviderType::Douyin, &cookies, &danmu_room_id.to_string()).await;
|
||||
if danmu_stream.is_err() {
|
||||
let err = danmu_stream.err().unwrap();
|
||||
log::error!("Failed to create danmu stream: {err}");
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
let danmu_stream = danmu_stream.unwrap();
|
||||
|
||||
let mut start_fut = Box::pin(danmu_stream.start());
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
start_res = &mut start_fut => {
|
||||
match start_res {
|
||||
Ok(_) => {
|
||||
log::info!("Danmu stream finished");
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("Danmu stream start error: {err}");
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
recv_res = danmu_stream.recv() => {
|
||||
match recv_res {
|
||||
Ok(Some(msg)) => {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
let ts = Utc::now().timestamp_millis();
|
||||
let _ = self.event_channel.send(RecorderEvent::DanmuReceived {
|
||||
room: self.room_id.clone(),
|
||||
ts,
|
||||
content: danmu.message.clone(),
|
||||
});
|
||||
|
||||
if let Some(danmu_storage) = self.danmu_storage.read().await.as_ref() {
|
||||
danmu_storage.add_line(ts, &danmu.message).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
log::info!("Danmu stream closed");
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("Failed to receive danmu message: {err}");
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn reset(&self) {
|
||||
*self.platform_live_id.write().await = String::new();
|
||||
self.last_update
|
||||
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
|
||||
self.last_sequence.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_duration.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_size.store(0, atomic::Ordering::Relaxed);
|
||||
*self.extra.live_stream.write().await = None;
|
||||
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
|
||||
danmu_task.abort();
|
||||
let _ = danmu_task.await;
|
||||
log::info!("Danmu task aborted");
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_entries(&self, live_id: &str) -> Result<(), RecorderError> {
|
||||
// Get current room info and stream URL
|
||||
let room_info = self.room_info.read().await.clone();
|
||||
let Some(stream) = self.extra.live_stream.read().await.clone() else {
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
};
|
||||
let Some(stream_url) = get_best_stream_url(&stream) else {
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
};
|
||||
|
||||
let work_dir = self.work_dir(live_id).await;
|
||||
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
|
||||
|
||||
// download cover
|
||||
let cover_url = room_info.room_cover.clone();
|
||||
let cover_path = work_dir.with_filename("cover.jpg");
|
||||
let _ = api::download_file(&self.client, &cover_url, &cover_path.full_path()).await;
|
||||
|
||||
// Setup danmu store
|
||||
let danmu_file_path = work_dir.with_filename("danmu.txt");
|
||||
let danmu_storage = DanmuStorage::new(&danmu_file_path.full_path()).await;
|
||||
*self.danmu_storage.write().await = danmu_storage;
|
||||
|
||||
// Start danmu task
|
||||
*self.live_id.write().await = live_id.to_string();
|
||||
|
||||
let self_clone = self.clone();
|
||||
log::info!("Start fetching danmu for live {live_id}");
|
||||
*self.danmu_task.lock().await = Some(tokio::spawn(async move {
|
||||
let _ = self_clone.danmu().await;
|
||||
}));
|
||||
|
||||
let _ = self.event_channel.send(RecorderEvent::RecordStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
|
||||
let hls_stream =
|
||||
construct_stream_from_variant(live_id, &stream_url, Format::TS, Codec::Avc)
|
||||
.await
|
||||
.map_err(|_| RecorderError::NoStreamAvailable)?;
|
||||
let hls_recorder = HlsRecorder::new(
|
||||
self.room_id.to_string(),
|
||||
Arc::new(hls_stream),
|
||||
self.client.clone(),
|
||||
None,
|
||||
self.event_channel.clone(),
|
||||
work_dir.full_path(),
|
||||
self.enabled.clone(),
|
||||
)
|
||||
.await;
|
||||
if let Err(e) = hls_recorder.start().await {
|
||||
log::error!("[{}]Error from hls recorder: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl crate::traits::RecorderTrait<DouyinExtra> for DouyinRecorder {
|
||||
async fn run(&self) {
|
||||
let self_clone = self.clone();
|
||||
*self.record_task.lock().await = Some(tokio::spawn(async move {
|
||||
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
|
||||
if self_clone.check_status().await {
|
||||
// Live status is ok, start recording
|
||||
if self_clone.should_record().await {
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(true, atomic::Ordering::Relaxed);
|
||||
let live_id = Utc::now().timestamp_millis().to_string();
|
||||
if let Err(e) = self_clone.update_entries(&live_id).await {
|
||||
log::error!("[{}]Update entries error: {}", self_clone.room_id, e);
|
||||
}
|
||||
}
|
||||
if self_clone.is_recording.load(atomic::Ordering::Relaxed) {
|
||||
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
|
||||
recorder: self_clone.info().await,
|
||||
});
|
||||
}
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(false, atomic::Ordering::Relaxed);
|
||||
self_clone.reset().await;
|
||||
// Check status again after some seconds
|
||||
let secs = random::<u64>() % 5;
|
||||
tokio::time::sleep(Duration::from_secs(secs)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
tokio::time::sleep(Duration::from_secs(
|
||||
self_clone.update_interval.load(atomic::Ordering::Relaxed),
|
||||
))
|
||||
.await;
|
||||
}
|
||||
log::info!("[{}]Recording thread quit.", self_clone.room_id);
|
||||
}));
|
||||
}
|
||||
}
|
||||
392
src-tauri/crates/recorder/src/platforms/douyin/api.rs
Normal file
@@ -0,0 +1,392 @@
|
||||
use crate::account::Account;
|
||||
use crate::errors::RecorderError;
|
||||
use crate::utils::user_agent_generator;
|
||||
use deno_core::JsRuntime;
|
||||
use deno_core::RuntimeOptions;
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::response::DouyinRoomInfoResponse;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DouyinBasicRoomInfo {
|
||||
pub room_id_str: String,
|
||||
pub room_title: String,
|
||||
pub cover: Option<String>,
|
||||
pub status: i64,
|
||||
pub hls_url: String,
|
||||
pub stream_data: String,
|
||||
// user related
|
||||
pub user_name: String,
|
||||
pub user_avatar: String,
|
||||
pub sec_user_id: String,
|
||||
}
|
||||
|
||||
fn setup_js_runtime() -> Result<JsRuntime, RecorderError> {
|
||||
// Create a new V8 runtime
|
||||
let mut runtime = JsRuntime::new(RuntimeOptions::default());
|
||||
|
||||
// Add global CryptoJS object
|
||||
let crypto_js = include_str!("js/a_bogus.js");
|
||||
runtime
|
||||
.execute_script(
|
||||
"<a_bogus.js>",
|
||||
deno_core::FastString::from_static(crypto_js),
|
||||
)
|
||||
.map_err(|e| RecorderError::JsRuntimeError(format!("Failed to execute crypto-js: {e}")))?;
|
||||
Ok(runtime)
|
||||
}
|
||||
|
||||
async fn generate_a_bogus(params: &str, user_agent: &str) -> Result<String, RecorderError> {
|
||||
let mut runtime = setup_js_runtime()?;
|
||||
// Call the get_wss_url function
|
||||
let sign_call = format!("generate_a_bogus(\"{params}\", \"{user_agent}\")");
|
||||
let result = runtime
|
||||
.execute_script("<sign_call>", deno_core::FastString::from(sign_call))
|
||||
.map_err(|e| RecorderError::JsRuntimeError(format!("Failed to execute JavaScript: {e}")))?;
|
||||
|
||||
// Get the result from the V8 runtime
|
||||
let mut scope = runtime.handle_scope();
|
||||
let local = deno_core::v8::Local::new(&mut scope, result);
|
||||
let url = local
|
||||
.to_string(&mut scope)
|
||||
.unwrap()
|
||||
.to_rust_string_lossy(&mut scope);
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
async fn generate_ms_token() -> String {
|
||||
// generate a random 32 characters uuid string
|
||||
let uuid = Uuid::new_v4();
|
||||
uuid.to_string()
|
||||
}
|
||||
|
||||
pub fn generate_user_agent_header() -> reqwest::header::HeaderMap {
|
||||
let user_agent = user_agent_generator::UserAgentGenerator::new().generate(false);
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
headers
|
||||
}
|
||||
|
||||
pub async fn get_room_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
sec_user_id: &str,
|
||||
) -> Result<DouyinBasicRoomInfo, RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
headers.insert("Referer", "https://live.douyin.com/".parse().unwrap());
|
||||
headers.insert("Cookie", account.cookies.clone().parse().unwrap());
|
||||
let ms_token = generate_ms_token().await;
|
||||
let user_agent = headers.get("user-agent").unwrap().to_str().unwrap();
|
||||
let params = format!(
|
||||
"aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={room_id}&ms_token={ms_token}");
|
||||
let a_bogus = generate_a_bogus(¶ms, user_agent).await?;
|
||||
// log::debug!("params: {params}");
|
||||
// log::debug!("user_agent: {user_agent}");
|
||||
// log::debug!("a_bogus: {a_bogus}");
|
||||
let url = format!(
|
||||
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={room_id}&ms_token={ms_token}&a_bogus={a_bogus}"
|
||||
);
|
||||
|
||||
let resp = client.get(&url).headers(headers).send().await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if text.is_empty() {
|
||||
log::debug!("Empty room info response, trying H5 API");
|
||||
return get_room_info_h5(client, account, room_id, sec_user_id).await;
|
||||
}
|
||||
|
||||
if status.is_success() {
|
||||
if let Ok(data) = serde_json::from_str::<DouyinRoomInfoResponse>(&text) {
|
||||
let cover = data
|
||||
.data
|
||||
.data
|
||||
.first()
|
||||
.and_then(|data| data.cover.as_ref())
|
||||
.map(|cover| cover.url_list[0].clone());
|
||||
return Ok(DouyinBasicRoomInfo {
|
||||
room_id_str: data.data.data[0].id_str.clone(),
|
||||
sec_user_id: sec_user_id.to_string(),
|
||||
cover,
|
||||
room_title: data.data.data[0].title.clone(),
|
||||
user_name: data.data.user.nickname.clone(),
|
||||
user_avatar: data.data.user.avatar_thumb.url_list[0].clone(),
|
||||
status: data.data.room_status,
|
||||
hls_url: data.data.data[0]
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|stream_url| stream_url.hls_pull_url.clone())
|
||||
.unwrap_or_default(),
|
||||
stream_data: data.data.data[0]
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|s| s.live_core_sdk_data.pull_data.stream_data.clone())
|
||||
.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
log::error!("Failed to parse room info response: {text}");
|
||||
return get_room_info_h5(client, account, room_id, sec_user_id).await;
|
||||
}
|
||||
|
||||
log::error!("Failed to get room info: {status}");
|
||||
return get_room_info_h5(client, account, room_id, sec_user_id).await;
|
||||
}
|
||||
|
||||
pub async fn get_room_info_h5(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
sec_user_id: &str,
|
||||
) -> Result<DouyinBasicRoomInfo, RecorderError> {
|
||||
// 参考biliup实现,构建完整的URL参数
|
||||
let room_id_str = room_id.to_string();
|
||||
// https://webcast.amemv.com/webcast/room/reflow/info/?type_id=0&live_id=1&version_code=99.99.99&app_id=1128&room_id=10000&sec_user_id=MS4wLjAB&aid=6383&device_platform=web&browser_language=zh-CN&browser_platform=Win32&browser_name=Mozilla&browser_version=5.0
|
||||
let url_params = [
|
||||
("type_id", "0"),
|
||||
("live_id", "1"),
|
||||
("version_code", "99.99.99"),
|
||||
("app_id", "1128"),
|
||||
("room_id", &room_id_str),
|
||||
("sec_user_id", sec_user_id),
|
||||
("aid", "6383"),
|
||||
("device_platform", "web"),
|
||||
];
|
||||
|
||||
// 构建URL
|
||||
let query_string = url_params
|
||||
.iter()
|
||||
.map(|(k, v)| format!("{k}={v}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("&");
|
||||
let url = format!("https://webcast.amemv.com/webcast/room/reflow/info/?{query_string}");
|
||||
|
||||
let mut headers = generate_user_agent_header();
|
||||
headers.insert("Referer", "https://live.douyin.com/".parse().unwrap());
|
||||
headers.insert("Cookie", account.cookies.clone().parse().unwrap());
|
||||
|
||||
let resp = client.get(&url).headers(headers).send().await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if status.is_success() {
|
||||
// Try to parse as H5 response format
|
||||
if let Ok(h5_data) =
|
||||
serde_json::from_str::<super::response::DouyinH5RoomInfoResponse>(&text)
|
||||
{
|
||||
// Extract RoomBasicInfo from H5 response
|
||||
let room = &h5_data.data.room;
|
||||
let owner = &room.owner;
|
||||
|
||||
let cover = room
|
||||
.cover
|
||||
.as_ref()
|
||||
.and_then(|c| c.url_list.first().cloned());
|
||||
let hls_url = room
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|s| s.hls_pull_url.clone())
|
||||
.unwrap_or_default();
|
||||
|
||||
return Ok(DouyinBasicRoomInfo {
|
||||
room_id_str: room.id_str.clone(),
|
||||
room_title: room.title.clone(),
|
||||
cover,
|
||||
status: if room.status == 2 { 0 } else { 1 },
|
||||
hls_url,
|
||||
user_name: owner.nickname.clone(),
|
||||
user_avatar: owner
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.unwrap_or(&String::new())
|
||||
.clone(),
|
||||
sec_user_id: owner.sec_uid.clone(),
|
||||
stream_data: room
|
||||
.stream_url
|
||||
.as_ref()
|
||||
.map(|s| s.live_core_sdk_data.pull_data.stream_data.clone())
|
||||
.unwrap_or_default(),
|
||||
});
|
||||
}
|
||||
|
||||
// If that fails, try to parse as a generic JSON to see what we got
|
||||
if let Ok(json_value) = serde_json::from_str::<serde_json::Value>(&text) {
|
||||
// Check if it's an error response
|
||||
if let Some(status_code) = json_value
|
||||
.get("status_code")
|
||||
.and_then(serde_json::Value::as_i64)
|
||||
{
|
||||
if status_code != 0 {
|
||||
let error_msg = json_value
|
||||
.get("data")
|
||||
.and_then(|v| v.get("message").and_then(|v| v.as_str()))
|
||||
.unwrap_or("Unknown error");
|
||||
|
||||
if status_code == 10011 {
|
||||
return Err(RecorderError::ApiError {
|
||||
error: error_msg.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
return Err(RecorderError::ApiError {
|
||||
error: format!(
|
||||
"API returned error status_code: {status_code} - {error_msg}"
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 检查是否是"invalid session"错误
|
||||
if let Some(status_message) = json_value.get("status_message").and_then(|v| v.as_str())
|
||||
{
|
||||
if status_message.contains("invalid session") {
|
||||
return Err(RecorderError::ApiError { error:
|
||||
"Invalid session - please check your cookies. Make sure you have valid sessionid, passport_csrf_token, and other authentication cookies from douyin.com".to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Err(RecorderError::ApiError {
|
||||
error: format!("Failed to parse h5 room info response: {text}"),
|
||||
});
|
||||
}
|
||||
log::error!("Failed to parse h5 room info response: {text}");
|
||||
return Err(RecorderError::ApiError {
|
||||
error: format!("Failed to parse h5 room info response: {text}"),
|
||||
});
|
||||
}
|
||||
|
||||
log::error!("Failed to get h5 room info: {status}");
|
||||
Err(RecorderError::ApiError {
|
||||
error: format!("Failed to get h5 room info: {status} {text}"),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_user_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
) -> Result<super::response::User, RecorderError> {
|
||||
// Use the IM spotlight relation API to get user info
|
||||
let url = "https://www.douyin.com/aweme/v1/web/im/spotlight/relation/";
|
||||
let mut headers = generate_user_agent_header();
|
||||
headers.insert("Referer", "https://www.douyin.com/".parse().unwrap());
|
||||
headers.insert("Cookie", account.cookies.clone().parse().unwrap());
|
||||
|
||||
let resp = client.get(url).headers(headers).send().await?;
|
||||
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
|
||||
if status.is_success() {
|
||||
if let Ok(data) = serde_json::from_str::<super::response::DouyinRelationResponse>(&text) {
|
||||
if data.status_code == 0 {
|
||||
let owner_sec_uid = &data.owner_sec_uid;
|
||||
|
||||
// Find the user's own info in the followings list by matching sec_uid
|
||||
if let Some(followings) = &data.followings {
|
||||
for following in followings {
|
||||
if following.sec_uid == *owner_sec_uid {
|
||||
let user = super::response::User {
|
||||
id_str: following.uid.clone(),
|
||||
sec_uid: following.sec_uid.clone(),
|
||||
nickname: following.nickname.clone(),
|
||||
avatar_thumb: following.avatar_thumb.clone(),
|
||||
follow_info: super::response::FollowInfo::default(),
|
||||
foreign_user: 0,
|
||||
open_id_str: String::new(),
|
||||
};
|
||||
return Ok(user);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If not found in followings, create a minimal user info from owner_sec_uid
|
||||
let user = super::response::User {
|
||||
id_str: String::new(), // We don't have the numeric UID
|
||||
sec_uid: owner_sec_uid.clone(),
|
||||
nickname: "抖音用户".to_string(), // Default nickname
|
||||
avatar_thumb: super::response::AvatarThumb { url_list: vec![] },
|
||||
follow_info: super::response::FollowInfo::default(),
|
||||
foreign_user: 0,
|
||||
open_id_str: String::new(),
|
||||
};
|
||||
return Ok(user);
|
||||
}
|
||||
} else {
|
||||
log::error!("Failed to parse user info response: {text}");
|
||||
return Err(RecorderError::ApiError {
|
||||
error: format!("Failed to parse user info response: {text}"),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
log::error!("Failed to get user info: {status}");
|
||||
|
||||
Err(RecorderError::ApiError {
|
||||
error: format!("Failed to get user info: {status} {text}"),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_room_owner_sec_uid(
|
||||
client: &Client,
|
||||
room_id: &str,
|
||||
) -> Result<String, RecorderError> {
|
||||
let url = format!("https://live.douyin.com/{room_id}");
|
||||
let mut headers = generate_user_agent_header();
|
||||
headers.insert("Referer", "https://live.douyin.com/".parse().unwrap());
|
||||
let resp = client.get(url).headers(headers).send().await?;
|
||||
let status = resp.status();
|
||||
let text = resp.text().await?;
|
||||
if !status.is_success() {
|
||||
return Err(RecorderError::ApiError {
|
||||
error: format!("Failed to get room owner sec uid: {status} {text}"),
|
||||
});
|
||||
}
|
||||
// match to get sec_uid from text like \"sec_uid\":\"MS4wLjABAAAAdFmmud36bynPjXOvoMjatb42856_zryHsGmlkpIECDA\"
|
||||
let sec_uid = Regex::new(r#"\\"sec_uid\\":\\"(.*?)\\""#)
|
||||
.unwrap()
|
||||
.captures(&text)
|
||||
.and_then(|c| c.get(1))
|
||||
.ok_or_else(|| RecorderError::ApiError {
|
||||
error: "Failed to find sec_uid in room page".to_string(),
|
||||
})?
|
||||
.as_str()
|
||||
.to_string();
|
||||
Ok(sec_uid)
|
||||
}
|
||||
|
||||
/// Download file from url to path
|
||||
pub async fn download_file(client: &Client, url: &str, path: &Path) -> Result<(), RecorderError> {
|
||||
if !path.parent().unwrap().exists() {
|
||||
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||
}
|
||||
let response = client.get(url).send().await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let mut file = tokio::fs::File::create(&path).await?;
|
||||
let mut content = std::io::Cursor::new(bytes);
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_room_owner_sec_uid() {
|
||||
let client = Client::new();
|
||||
let sec_uid = get_room_owner_sec_uid(&client, "200525029536")
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
sec_uid,
|
||||
"MS4wLjABAAAAdFmmud36bynPjXOvoMjatb42856_zryHsGmlkpIECDA"
|
||||
);
|
||||
}
|
||||
}
|
||||
550
src-tauri/crates/recorder/src/platforms/douyin/js/a_bogus.js
Normal file
@@ -0,0 +1,550 @@
|
||||
// Script from https://github.com/JoeanAmier/TikTokDownloader/blob/master/static/js/a_bogus.js
|
||||
// All the content in this article is only for learning and communication use, not for any other purpose, strictly prohibited for commercial use and illegal use, otherwise all the consequences are irrelevant to the author!
|
||||
function rc4_encrypt(plaintext, key) {
|
||||
var s = [];
|
||||
for (var i = 0; i < 256; i++) {
|
||||
s[i] = i;
|
||||
}
|
||||
var j = 0;
|
||||
for (var i = 0; i < 256; i++) {
|
||||
j = (j + s[i] + key.charCodeAt(i % key.length)) % 256;
|
||||
var temp = s[i];
|
||||
s[i] = s[j];
|
||||
s[j] = temp;
|
||||
}
|
||||
|
||||
var i = 0;
|
||||
var j = 0;
|
||||
var cipher = [];
|
||||
for (var k = 0; k < plaintext.length; k++) {
|
||||
i = (i + 1) % 256;
|
||||
j = (j + s[i]) % 256;
|
||||
var temp = s[i];
|
||||
s[i] = s[j];
|
||||
s[j] = temp;
|
||||
var t = (s[i] + s[j]) % 256;
|
||||
cipher.push(String.fromCharCode(s[t] ^ plaintext.charCodeAt(k)));
|
||||
}
|
||||
return cipher.join("");
|
||||
}
|
||||
|
||||
function le(e, r) {
|
||||
return ((e << (r %= 32)) | (e >>> (32 - r))) >>> 0;
|
||||
}
|
||||
|
||||
function de(e) {
|
||||
return 0 <= e && e < 16
|
||||
? 2043430169
|
||||
: 16 <= e && e < 64
|
||||
? 2055708042
|
||||
: void console["error"]("invalid j for constant Tj");
|
||||
}
|
||||
|
||||
function pe(e, r, t, n) {
|
||||
return 0 <= e && e < 16
|
||||
? (r ^ t ^ n) >>> 0
|
||||
: 16 <= e && e < 64
|
||||
? ((r & t) | (r & n) | (t & n)) >>> 0
|
||||
: (console["error"]("invalid j for bool function FF"), 0);
|
||||
}
|
||||
|
||||
function he(e, r, t, n) {
|
||||
return 0 <= e && e < 16
|
||||
? (r ^ t ^ n) >>> 0
|
||||
: 16 <= e && e < 64
|
||||
? ((r & t) | (~r & n)) >>> 0
|
||||
: (console["error"]("invalid j for bool function GG"), 0);
|
||||
}
|
||||
|
||||
function reset() {
|
||||
(this.reg[0] = 1937774191),
|
||||
(this.reg[1] = 1226093241),
|
||||
(this.reg[2] = 388252375),
|
||||
(this.reg[3] = 3666478592),
|
||||
(this.reg[4] = 2842636476),
|
||||
(this.reg[5] = 372324522),
|
||||
(this.reg[6] = 3817729613),
|
||||
(this.reg[7] = 2969243214),
|
||||
(this["chunk"] = []),
|
||||
(this["size"] = 0);
|
||||
}
|
||||
|
||||
function write(e) {
|
||||
var a =
|
||||
"string" == typeof e
|
||||
? (function (e) {
|
||||
(n = encodeURIComponent(e)["replace"](
|
||||
/%([0-9A-F]{2})/g,
|
||||
function (e, r) {
|
||||
return String["fromCharCode"]("0x" + r);
|
||||
}
|
||||
)),
|
||||
(a = new Array(n["length"]));
|
||||
return (
|
||||
Array["prototype"]["forEach"]["call"](n, function (e, r) {
|
||||
a[r] = e.charCodeAt(0);
|
||||
}),
|
||||
a
|
||||
);
|
||||
})(e)
|
||||
: e;
|
||||
this.size += a.length;
|
||||
var f = 64 - this["chunk"]["length"];
|
||||
if (a["length"] < f) this["chunk"] = this["chunk"].concat(a);
|
||||
else
|
||||
for (
|
||||
this["chunk"] = this["chunk"].concat(a.slice(0, f));
|
||||
this["chunk"].length >= 64;
|
||||
|
||||
)
|
||||
this["_compress"](this["chunk"]),
|
||||
f < a["length"]
|
||||
? (this["chunk"] = a["slice"](f, Math["min"](f + 64, a["length"])))
|
||||
: (this["chunk"] = []),
|
||||
(f += 64);
|
||||
}
|
||||
|
||||
function sum(e, t) {
|
||||
e && (this["reset"](), this["write"](e)), this["_fill"]();
|
||||
for (var f = 0; f < this.chunk["length"]; f += 64)
|
||||
this._compress(this["chunk"]["slice"](f, f + 64));
|
||||
var i = null;
|
||||
if (t == "hex") {
|
||||
i = "";
|
||||
for (f = 0; f < 8; f++) i += se(this["reg"][f]["toString"](16), 8, "0");
|
||||
} else
|
||||
for (i = new Array(32), f = 0; f < 8; f++) {
|
||||
var c = this.reg[f];
|
||||
(i[4 * f + 3] = (255 & c) >>> 0),
|
||||
(c >>>= 8),
|
||||
(i[4 * f + 2] = (255 & c) >>> 0),
|
||||
(c >>>= 8),
|
||||
(i[4 * f + 1] = (255 & c) >>> 0),
|
||||
(c >>>= 8),
|
||||
(i[4 * f] = (255 & c) >>> 0);
|
||||
}
|
||||
return this["reset"](), i;
|
||||
}
|
||||
|
||||
function _compress(t) {
|
||||
if (t < 64) console.error("compress error: not enough data");
|
||||
else {
|
||||
for (
|
||||
var f = (function (e) {
|
||||
for (var r = new Array(132), t = 0; t < 16; t++)
|
||||
(r[t] = e[4 * t] << 24),
|
||||
(r[t] |= e[4 * t + 1] << 16),
|
||||
(r[t] |= e[4 * t + 2] << 8),
|
||||
(r[t] |= e[4 * t + 3]),
|
||||
(r[t] >>>= 0);
|
||||
for (var n = 16; n < 68; n++) {
|
||||
var a = r[n - 16] ^ r[n - 9] ^ le(r[n - 3], 15);
|
||||
(a = a ^ le(a, 15) ^ le(a, 23)),
|
||||
(r[n] = (a ^ le(r[n - 13], 7) ^ r[n - 6]) >>> 0);
|
||||
}
|
||||
for (n = 0; n < 64; n++) r[n + 68] = (r[n] ^ r[n + 4]) >>> 0;
|
||||
return r;
|
||||
})(t),
|
||||
i = this["reg"].slice(0),
|
||||
c = 0;
|
||||
c < 64;
|
||||
c++
|
||||
) {
|
||||
var o = le(i[0], 12) + i[4] + le(de(c), c),
|
||||
s = ((o = le((o = (4294967295 & o) >>> 0), 7)) ^ le(i[0], 12)) >>> 0,
|
||||
u = pe(c, i[0], i[1], i[2]);
|
||||
u = (4294967295 & (u = u + i[3] + s + f[c + 68])) >>> 0;
|
||||
var b = he(c, i[4], i[5], i[6]);
|
||||
(b = (4294967295 & (b = b + i[7] + o + f[c])) >>> 0),
|
||||
(i[3] = i[2]),
|
||||
(i[2] = le(i[1], 9)),
|
||||
(i[1] = i[0]),
|
||||
(i[0] = u),
|
||||
(i[7] = i[6]),
|
||||
(i[6] = le(i[5], 19)),
|
||||
(i[5] = i[4]),
|
||||
(i[4] = (b ^ le(b, 9) ^ le(b, 17)) >>> 0);
|
||||
}
|
||||
for (var l = 0; l < 8; l++) this["reg"][l] = (this["reg"][l] ^ i[l]) >>> 0;
|
||||
}
|
||||
}
|
||||
|
||||
function _fill() {
|
||||
var a = 8 * this["size"],
|
||||
f = this["chunk"]["push"](128) % 64;
|
||||
for (64 - f < 8 && (f -= 64); f < 56; f++) this.chunk["push"](0);
|
||||
for (var i = 0; i < 4; i++) {
|
||||
var c = Math["floor"](a / 4294967296);
|
||||
this["chunk"].push((c >>> (8 * (3 - i))) & 255);
|
||||
}
|
||||
for (i = 0; i < 4; i++) this["chunk"]["push"]((a >>> (8 * (3 - i))) & 255);
|
||||
}
|
||||
|
||||
function SM3() {
|
||||
this.reg = [];
|
||||
this.chunk = [];
|
||||
this.size = 0;
|
||||
this.reset();
|
||||
}
|
||||
SM3.prototype.reset = reset;
|
||||
SM3.prototype.write = write;
|
||||
SM3.prototype.sum = sum;
|
||||
SM3.prototype._compress = _compress;
|
||||
SM3.prototype._fill = _fill;
|
||||
|
||||
function result_encrypt(long_str, num = null) {
|
||||
let s_obj = {
|
||||
s0: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
|
||||
s1: "Dkdpgh4ZKsQB80/Mfvw36XI1R25+WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
|
||||
s2: "Dkdpgh4ZKsQB80/Mfvw36XI1R25-WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
|
||||
s3: "ckdp1h4ZKsUB80/Mfvw36XIgR25+WQAlEi7NLboqYTOPuzmFjJnryx9HVGDaStCe",
|
||||
s4: "Dkdpgh2ZmsQB80/MfvV36XI1R45-WUAlEixNLwoqYTOPuzKFjJnry79HbGcaStCe",
|
||||
};
|
||||
let constant = {
|
||||
0: 16515072,
|
||||
1: 258048,
|
||||
2: 4032,
|
||||
str: s_obj[num],
|
||||
};
|
||||
|
||||
let result = "";
|
||||
let lound = 0;
|
||||
let long_int = get_long_int(lound, long_str);
|
||||
for (let i = 0; i < (long_str.length / 3) * 4; i++) {
|
||||
if (Math.floor(i / 4) !== lound) {
|
||||
lound += 1;
|
||||
long_int = get_long_int(lound, long_str);
|
||||
}
|
||||
let key = i % 4;
|
||||
switch (key) {
|
||||
case 0:
|
||||
temp_int = (long_int & constant["0"]) >> 18;
|
||||
result += constant["str"].charAt(temp_int);
|
||||
break;
|
||||
case 1:
|
||||
temp_int = (long_int & constant["1"]) >> 12;
|
||||
result += constant["str"].charAt(temp_int);
|
||||
break;
|
||||
case 2:
|
||||
temp_int = (long_int & constant["2"]) >> 6;
|
||||
result += constant["str"].charAt(temp_int);
|
||||
break;
|
||||
case 3:
|
||||
temp_int = long_int & 63;
|
||||
result += constant["str"].charAt(temp_int);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function get_long_int(round, long_str) {
|
||||
round = round * 3;
|
||||
return (
|
||||
(long_str.charCodeAt(round) << 16) |
|
||||
(long_str.charCodeAt(round + 1) << 8) |
|
||||
long_str.charCodeAt(round + 2)
|
||||
);
|
||||
}
|
||||
|
||||
function gener_random(random, option) {
|
||||
return [
|
||||
(random & 255 & 170) | (option[0] & 85), // 163
|
||||
(random & 255 & 85) | (option[0] & 170), //87
|
||||
((random >> 8) & 255 & 170) | (option[1] & 85), //37
|
||||
((random >> 8) & 255 & 85) | (option[1] & 170), //41
|
||||
];
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////
|
||||
function generate_rc4_bb_str(
|
||||
url_search_params,
|
||||
user_agent,
|
||||
window_env_str,
|
||||
suffix = "cus",
|
||||
Arguments = [0, 1, 14]
|
||||
) {
|
||||
let sm3 = new SM3();
|
||||
let start_time = Date.now();
|
||||
/**
|
||||
* 进行3次加密处理
|
||||
* 1: url_search_params两次sm3之的结果
|
||||
* 2: 对后缀两次sm3之的结果
|
||||
* 3: 对ua处理之后的结果
|
||||
*/
|
||||
// url_search_params两次sm3之的结果
|
||||
let url_search_params_list = sm3.sum(sm3.sum(url_search_params + suffix));
|
||||
// 对后缀两次sm3之的结果
|
||||
let cus = sm3.sum(sm3.sum(suffix));
|
||||
// 对ua处理之后的结果
|
||||
let ua = sm3.sum(
|
||||
result_encrypt(
|
||||
rc4_encrypt(
|
||||
user_agent,
|
||||
String.fromCharCode.apply(null, [0.00390625, 1, 14])
|
||||
),
|
||||
"s3"
|
||||
)
|
||||
);
|
||||
//
|
||||
let end_time = Date.now();
|
||||
// b
|
||||
let b = {
|
||||
8: 3, // 固定
|
||||
10: end_time, //3次加密结束时间
|
||||
15: {
|
||||
aid: 6383,
|
||||
pageId: 6241,
|
||||
boe: false,
|
||||
ddrt: 7,
|
||||
paths: {
|
||||
include: [{}, {}, {}, {}, {}, {}, {}],
|
||||
exclude: [],
|
||||
},
|
||||
track: {
|
||||
mode: 0,
|
||||
delay: 300,
|
||||
paths: [],
|
||||
},
|
||||
dump: true,
|
||||
rpU: "",
|
||||
},
|
||||
16: start_time, //3次加密开始时间
|
||||
18: 44, //固定
|
||||
19: [1, 0, 1, 5],
|
||||
};
|
||||
|
||||
//3次加密开始时间
|
||||
b[20] = (b[16] >> 24) & 255;
|
||||
b[21] = (b[16] >> 16) & 255;
|
||||
b[22] = (b[16] >> 8) & 255;
|
||||
b[23] = b[16] & 255;
|
||||
b[24] = (b[16] / 256 / 256 / 256 / 256) >> 0;
|
||||
b[25] = (b[16] / 256 / 256 / 256 / 256 / 256) >> 0;
|
||||
|
||||
// 参数Arguments [0, 1, 14, ...]
|
||||
// let Arguments = [0, 1, 14]
|
||||
b[26] = (Arguments[0] >> 24) & 255;
|
||||
b[27] = (Arguments[0] >> 16) & 255;
|
||||
b[28] = (Arguments[0] >> 8) & 255;
|
||||
b[29] = Arguments[0] & 255;
|
||||
|
||||
b[30] = (Arguments[1] / 256) & 255;
|
||||
b[31] = Arguments[1] % 256 & 255;
|
||||
b[32] = (Arguments[1] >> 24) & 255;
|
||||
b[33] = (Arguments[1] >> 16) & 255;
|
||||
|
||||
b[34] = (Arguments[2] >> 24) & 255;
|
||||
b[35] = (Arguments[2] >> 16) & 255;
|
||||
b[36] = (Arguments[2] >> 8) & 255;
|
||||
b[37] = Arguments[2] & 255;
|
||||
|
||||
// (url_search_params + "cus") 两次sm3之的结果
|
||||
/**let url_search_params_list = [
|
||||
91, 186, 35, 86, 143, 253, 6, 76,
|
||||
34, 21, 167, 148, 7, 42, 192, 219,
|
||||
188, 20, 182, 85, 213, 74, 213, 147,
|
||||
37, 155, 93, 139, 85, 118, 228, 213
|
||||
]*/
|
||||
b[38] = url_search_params_list[21];
|
||||
b[39] = url_search_params_list[22];
|
||||
|
||||
// ("cus") 对后缀两次sm3之的结果
|
||||
/**
|
||||
* let cus = [
|
||||
136, 101, 114, 147, 58, 77, 207, 201,
|
||||
215, 162, 154, 93, 248, 13, 142, 160,
|
||||
105, 73, 215, 241, 83, 58, 51, 43,
|
||||
255, 38, 168, 141, 216, 194, 35, 236
|
||||
]*/
|
||||
b[40] = cus[21];
|
||||
b[41] = cus[22];
|
||||
|
||||
// 对ua处理之后的结果
|
||||
/**
|
||||
* let ua = [
|
||||
129, 190, 70, 186, 86, 196, 199, 53,
|
||||
99, 38, 29, 209, 243, 17, 157, 69,
|
||||
147, 104, 53, 23, 114, 126, 66, 228,
|
||||
135, 30, 168, 185, 109, 156, 251, 88
|
||||
]*/
|
||||
b[42] = ua[23];
|
||||
b[43] = ua[24];
|
||||
|
||||
//3次加密结束时间
|
||||
b[44] = (b[10] >> 24) & 255;
|
||||
b[45] = (b[10] >> 16) & 255;
|
||||
b[46] = (b[10] >> 8) & 255;
|
||||
b[47] = b[10] & 255;
|
||||
b[48] = b[8];
|
||||
b[49] = (b[10] / 256 / 256 / 256 / 256) >> 0;
|
||||
b[50] = (b[10] / 256 / 256 / 256 / 256 / 256) >> 0;
|
||||
|
||||
// object配置项
|
||||
b[51] = b[15]["pageId"];
|
||||
b[52] = (b[15]["pageId"] >> 24) & 255;
|
||||
b[53] = (b[15]["pageId"] >> 16) & 255;
|
||||
b[54] = (b[15]["pageId"] >> 8) & 255;
|
||||
b[55] = b[15]["pageId"] & 255;
|
||||
|
||||
b[56] = b[15]["aid"];
|
||||
b[57] = b[15]["aid"] & 255;
|
||||
b[58] = (b[15]["aid"] >> 8) & 255;
|
||||
b[59] = (b[15]["aid"] >> 16) & 255;
|
||||
b[60] = (b[15]["aid"] >> 24) & 255;
|
||||
|
||||
// 中间进行了环境检测
|
||||
// 代码索引: 2496 索引值: 17 (索引64关键条件)
|
||||
// '1536|747|1536|834|0|30|0|0|1536|834|1536|864|1525|747|24|24|Win32'.charCodeAt()得到65位数组
|
||||
/**
|
||||
* let window_env_list = [49, 53, 51, 54, 124, 55, 52, 55, 124, 49, 53, 51, 54, 124, 56, 51, 52, 124, 48, 124, 51,
|
||||
* 48, 124, 48, 124, 48, 124, 49, 53, 51, 54, 124, 56, 51, 52, 124, 49, 53, 51, 54, 124, 56,
|
||||
* 54, 52, 124, 49, 53, 50, 53, 124, 55, 52, 55, 124, 50, 52, 124, 50, 52, 124, 87, 105, 110,
|
||||
* 51, 50]
|
||||
*/
|
||||
let window_env_list = [];
|
||||
for (let index = 0; index < window_env_str.length; index++) {
|
||||
window_env_list.push(window_env_str.charCodeAt(index));
|
||||
}
|
||||
b[64] = window_env_list.length;
|
||||
b[65] = b[64] & 255;
|
||||
b[66] = (b[64] >> 8) & 255;
|
||||
|
||||
b[69] = [].length;
|
||||
b[70] = b[69] & 255;
|
||||
b[71] = (b[69] >> 8) & 255;
|
||||
|
||||
b[72] =
|
||||
b[18] ^
|
||||
b[20] ^
|
||||
b[26] ^
|
||||
b[30] ^
|
||||
b[38] ^
|
||||
b[40] ^
|
||||
b[42] ^
|
||||
b[21] ^
|
||||
b[27] ^
|
||||
b[31] ^
|
||||
b[35] ^
|
||||
b[39] ^
|
||||
b[41] ^
|
||||
b[43] ^
|
||||
b[22] ^
|
||||
b[28] ^
|
||||
b[32] ^
|
||||
b[36] ^
|
||||
b[23] ^
|
||||
b[29] ^
|
||||
b[33] ^
|
||||
b[37] ^
|
||||
b[44] ^
|
||||
b[45] ^
|
||||
b[46] ^
|
||||
b[47] ^
|
||||
b[48] ^
|
||||
b[49] ^
|
||||
b[50] ^
|
||||
b[24] ^
|
||||
b[25] ^
|
||||
b[52] ^
|
||||
b[53] ^
|
||||
b[54] ^
|
||||
b[55] ^
|
||||
b[57] ^
|
||||
b[58] ^
|
||||
b[59] ^
|
||||
b[60] ^
|
||||
b[65] ^
|
||||
b[66] ^
|
||||
b[70] ^
|
||||
b[71];
|
||||
let bb = [
|
||||
b[18],
|
||||
b[20],
|
||||
b[52],
|
||||
b[26],
|
||||
b[30],
|
||||
b[34],
|
||||
b[58],
|
||||
b[38],
|
||||
b[40],
|
||||
b[53],
|
||||
b[42],
|
||||
b[21],
|
||||
b[27],
|
||||
b[54],
|
||||
b[55],
|
||||
b[31],
|
||||
b[35],
|
||||
b[57],
|
||||
b[39],
|
||||
b[41],
|
||||
b[43],
|
||||
b[22],
|
||||
b[28],
|
||||
b[32],
|
||||
b[60],
|
||||
b[36],
|
||||
b[23],
|
||||
b[29],
|
||||
b[33],
|
||||
b[37],
|
||||
b[44],
|
||||
b[45],
|
||||
b[59],
|
||||
b[46],
|
||||
b[47],
|
||||
b[48],
|
||||
b[49],
|
||||
b[50],
|
||||
b[24],
|
||||
b[25],
|
||||
b[65],
|
||||
b[66],
|
||||
b[70],
|
||||
b[71],
|
||||
];
|
||||
bb = bb.concat(window_env_list).concat(b[72]);
|
||||
return rc4_encrypt(
|
||||
String.fromCharCode.apply(null, bb),
|
||||
String.fromCharCode.apply(null, [121])
|
||||
);
|
||||
}
|
||||
|
||||
function generate_random_str() {
|
||||
let random_str_list = [];
|
||||
random_str_list = random_str_list.concat(
|
||||
gener_random(Math.random() * 10000, [3, 45])
|
||||
);
|
||||
random_str_list = random_str_list.concat(
|
||||
gener_random(Math.random() * 10000, [1, 0])
|
||||
);
|
||||
random_str_list = random_str_list.concat(
|
||||
gener_random(Math.random() * 10000, [1, 5])
|
||||
);
|
||||
return String.fromCharCode.apply(null, random_str_list);
|
||||
}
|
||||
|
||||
function generate_a_bogus(url_search_params, user_agent) {
|
||||
/**
|
||||
* url_search_params:"device_platform=webapp&aid=6383&channel=channel_pc_web&update_version_code=170400&pc_client_type=1&version_code=170400&version_name=17.4.0&cookie_enabled=true&screen_width=1536&screen_height=864&browser_language=zh-CN&browser_platform=Win32&browser_name=Chrome&browser_version=123.0.0.0&browser_online=true&engine_name=Blink&engine_version=123.0.0.0&os_name=Windows&os_version=10&cpu_core_num=16&device_memory=8&platform=PC&downlink=10&effective_type=4g&round_trip_time=50&webid=7362810250930783783&msToken=VkDUvz1y24CppXSl80iFPr6ez-3FiizcwD7fI1OqBt6IICq9RWG7nCvxKb8IVi55mFd-wnqoNkXGnxHrikQb4PuKob5Q-YhDp5Um215JzlBszkUyiEvR"
|
||||
* user_agent:"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36"
|
||||
*/
|
||||
let result_str =
|
||||
generate_random_str() +
|
||||
generate_rc4_bb_str(
|
||||
url_search_params,
|
||||
user_agent,
|
||||
"1536|747|1536|834|0|30|0|0|1536|834|1536|864|1525|747|24|24|Win32"
|
||||
);
|
||||
|
||||
return encodeURIComponent(result_encrypt(result_str, "s4") + "=");
|
||||
}
|
||||
|
||||
//测试调用
|
||||
// console.log(generate_a_bogus(
|
||||
// "device_platform=webapp&aid=6383&channel=channel_pc_web&update_version_code=170400&pc_client_type=1&version_code=170400&version_name=17.4.0&cookie_enabled=true&screen_width=1536&screen_height=864&browser_language=zh-CN&browser_platform=Win32&browser_name=Chrome&browser_version=123.0.0.0&browser_online=true&engine_name=Blink&engine_version=123.0.0.0&os_name=Windows&os_version=10&cpu_core_num=16&device_memory=8&platform=PC&downlink=10&effective_type=4g&round_trip_time=50&webid=7362810250930783783&msToken=VkDUvz1y24CppXSl80iFPr6ez-3FiizcwD7fI1OqBt6IICq9RWG7nCvxKb8IVi55mFd-wnqoNkXGnxHrikQb4PuKob5Q-YhDp5Um215JzlBszkUyiEvR",
|
||||
// "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36"
|
||||
// ));
|
||||
@@ -1,11 +1,14 @@
|
||||
use serde_derive::Deserialize;
|
||||
use serde_derive::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DouyinRoomInfoResponse {
|
||||
pub data: Data,
|
||||
#[serde(default)]
|
||||
pub extra: Option<serde_json::Value>,
|
||||
#[serde(rename = "status_code")]
|
||||
pub status_code: i64,
|
||||
}
|
||||
@@ -14,9 +17,29 @@ pub struct DouyinRoomInfoResponse {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Data {
|
||||
pub data: Vec<Daum>,
|
||||
#[serde(rename = "enter_room_id", default)]
|
||||
pub enter_room_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub extra: Option<serde_json::Value>,
|
||||
pub user: User,
|
||||
#[serde(rename = "qrcode_url", default)]
|
||||
pub qrcode_url: Option<String>,
|
||||
#[serde(rename = "enter_mode", default)]
|
||||
pub enter_mode: Option<i64>,
|
||||
#[serde(rename = "room_status")]
|
||||
pub room_status: i64,
|
||||
#[serde(rename = "partition_road_map", default)]
|
||||
pub partition_road_map: Option<serde_json::Value>,
|
||||
#[serde(rename = "similar_rooms", default)]
|
||||
pub similar_rooms: Option<Vec<serde_json::Value>>,
|
||||
#[serde(rename = "shark_decision_conf", default)]
|
||||
pub shark_decision_conf: Option<String>,
|
||||
#[serde(rename = "web_stream_url", default)]
|
||||
pub web_stream_url: Option<serde_json::Value>,
|
||||
#[serde(rename = "login_lead", default)]
|
||||
pub login_lead: Option<serde_json::Value>,
|
||||
#[serde(rename = "auth_cert_info", default)]
|
||||
pub auth_cert_info: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
@@ -28,9 +51,36 @@ pub struct Daum {
|
||||
#[serde(rename = "status_str")]
|
||||
pub status_str: String,
|
||||
pub title: String,
|
||||
#[serde(rename = "user_count_str", default)]
|
||||
pub user_count_str: Option<String>,
|
||||
pub cover: Option<Cover>,
|
||||
#[serde(rename = "stream_url")]
|
||||
pub stream_url: Option<StreamUrl>,
|
||||
#[serde(default)]
|
||||
pub owner: Option<Owner>,
|
||||
#[serde(rename = "room_auth", default)]
|
||||
pub room_auth: Option<RoomAuth>,
|
||||
#[serde(rename = "live_room_mode", default)]
|
||||
pub live_room_mode: Option<i64>,
|
||||
#[serde(default)]
|
||||
pub stats: Option<Stats>,
|
||||
#[serde(rename = "has_commerce_goods", default)]
|
||||
pub has_commerce_goods: Option<bool>,
|
||||
#[serde(rename = "linker_map", default)]
|
||||
pub linker_map: Option<LinkerMap>,
|
||||
#[serde(rename = "linker_detail", default)]
|
||||
pub linker_detail: Option<LinkerDetail>,
|
||||
#[serde(rename = "room_view_stats", default)]
|
||||
pub room_view_stats: Option<RoomViewStats>,
|
||||
#[serde(rename = "scene_type_info", default)]
|
||||
pub scene_type_info: Option<SceneTypeInfo>,
|
||||
#[serde(rename = "like_count", default)]
|
||||
pub like_count: Option<i64>,
|
||||
#[serde(rename = "owner_user_id_str", default)]
|
||||
pub owner_user_id_str: Option<String>,
|
||||
// Many other fields that can be ignored for now
|
||||
#[serde(flatten)]
|
||||
pub other_fields: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
@@ -56,8 +106,8 @@ pub struct StreamUrl {
|
||||
#[serde(rename = "live_core_sdk_data")]
|
||||
pub live_core_sdk_data: LiveCoreSdkData,
|
||||
pub extra: Extra,
|
||||
#[serde(rename = "pull_datas")]
|
||||
pub pull_datas: PullDatas,
|
||||
#[serde(rename = "pull_datas", default)]
|
||||
pub pull_datas: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
@@ -182,10 +232,7 @@ pub struct Extra {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PullDatas {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Owner {
|
||||
#[serde(rename = "id_str")]
|
||||
pub id_str: String,
|
||||
@@ -234,6 +281,7 @@ pub struct Subscribe {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct RoomAuth {
|
||||
#[serde(rename = "Chat")]
|
||||
pub chat: bool,
|
||||
@@ -383,6 +431,7 @@ pub struct RoomAuth {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct SpecialStyle {
|
||||
#[serde(rename = "Chat")]
|
||||
pub chat: Chat,
|
||||
@@ -392,6 +441,7 @@ pub struct SpecialStyle {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Chat {
|
||||
#[serde(rename = "UnableStyle")]
|
||||
pub unable_style: i64,
|
||||
@@ -407,6 +457,7 @@ pub struct Chat {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Like {
|
||||
#[serde(rename = "UnableStyle")]
|
||||
pub unable_style: i64,
|
||||
@@ -422,6 +473,7 @@ pub struct Like {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Stats {
|
||||
#[serde(rename = "total_user_desp")]
|
||||
pub total_user_desp: String,
|
||||
@@ -435,10 +487,12 @@ pub struct Stats {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct LinkerMap {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct LinkerDetail {
|
||||
#[serde(rename = "linker_play_modes")]
|
||||
pub linker_play_modes: Vec<Value>,
|
||||
@@ -476,14 +530,17 @@ pub struct LinkerDetail {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct LinkerMapStr {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct PlaymodeDetail {}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct RoomViewStats {
|
||||
#[serde(rename = "is_hidden")]
|
||||
pub is_hidden: bool,
|
||||
@@ -510,6 +567,7 @@ pub struct RoomViewStats {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct SceneTypeInfo {
|
||||
#[serde(rename = "is_union_live_room")]
|
||||
pub is_union_live_room: bool,
|
||||
@@ -529,6 +587,7 @@ pub struct SceneTypeInfo {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct EntranceList {
|
||||
#[serde(rename = "group_id")]
|
||||
pub group_id: i64,
|
||||
@@ -549,6 +608,7 @@ pub struct EntranceList {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Icon {
|
||||
#[serde(rename = "url_list")]
|
||||
pub url_list: Vec<String>,
|
||||
@@ -770,6 +830,7 @@ pub struct H5Owner {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct H5AvatarThumb {
|
||||
#[serde(rename = "url_list")]
|
||||
pub url_list: Vec<String>,
|
||||
@@ -3,7 +3,7 @@ use serde_derive::Serialize;
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct StreamInfo {
|
||||
pub struct DouyinStream {
|
||||
pub data: Data,
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ pub struct Data {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Ld {
|
||||
pub main: Main,
|
||||
}
|
||||
@@ -28,6 +29,7 @@ pub struct Main {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Md {
|
||||
pub main: Main,
|
||||
}
|
||||
@@ -40,23 +42,27 @@ pub struct Origin {
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Sd {
|
||||
pub main: Main,
|
||||
}
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Hd {
|
||||
pub main: Main,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Ao {
|
||||
pub main: Main,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[allow(dead_code)]
|
||||
pub struct Uhd {
|
||||
pub main: Main,
|
||||
}
|
||||
156
src-tauri/crates/recorder/src/platforms/huya/api.rs
Normal file
@@ -0,0 +1,156 @@
|
||||
use crate::account::Account;
|
||||
use crate::platforms::huya::extractor::StreamInfo;
|
||||
use crate::utils::user_agent_generator;
|
||||
use crate::RoomInfo;
|
||||
use crate::UserInfo;
|
||||
|
||||
use super::errors::HuyaClientError;
|
||||
|
||||
use reqwest::Client;
|
||||
use scraper::Html;
|
||||
use scraper::Selector;
|
||||
use std::path::Path;
|
||||
|
||||
fn generate_user_agent_header() -> reqwest::header::HeaderMap {
|
||||
let user_agent = user_agent_generator::UserAgentGenerator::new().generate(true);
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
headers
|
||||
}
|
||||
|
||||
pub async fn get_user_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
) -> Result<UserInfo, HuyaClientError> {
|
||||
// https://m.huya.com/video/u/2246697169
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(HuyaClientError::InvalidCookie);
|
||||
}
|
||||
let url = format!("https://m.huya.com/video/u/{}", account.id);
|
||||
let response = client.get(url).headers(headers).send().await?;
|
||||
let raw_content = response.text().await?;
|
||||
// <div class="video-list-info">
|
||||
// <div class="podcast-box clearfix">
|
||||
// <img src="http://huyaimg.msstatic.com/avatar/1060/3f/0e6c0694867ef98e9f869589608ce3_180_135.jpg" alt="">
|
||||
// <div class="podcast-info-intro">
|
||||
// <h2>X inrea 丶</h2>
|
||||
// <p></p>
|
||||
// </div>
|
||||
// </div>
|
||||
// </div>
|
||||
let document = Html::parse_document(&raw_content);
|
||||
|
||||
let avatar_selector = Selector::parse(".video-list-info .podcast-box img").unwrap();
|
||||
let name_selector = Selector::parse(".video-list-info .podcast-info-intro h2").unwrap();
|
||||
|
||||
// 提取 avatar (img src)
|
||||
let avatar = document
|
||||
.select(&avatar_selector)
|
||||
.next()
|
||||
.and_then(|img| img.value().attr("src"))
|
||||
.map(|src| src.to_string());
|
||||
|
||||
// 提取 name (h2 text)
|
||||
let name = document
|
||||
.select(&name_selector)
|
||||
.next()
|
||||
.map(|h2| h2.text().collect::<String>().trim().to_string())
|
||||
.filter(|s| !s.is_empty());
|
||||
|
||||
Ok(UserInfo {
|
||||
user_id: account.id.clone(),
|
||||
user_name: name.unwrap_or_default(),
|
||||
user_avatar: avatar.unwrap_or_default(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_room_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
) -> Result<(UserInfo, RoomInfo, StreamInfo), HuyaClientError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(HuyaClientError::InvalidCookie);
|
||||
}
|
||||
headers.insert("Referer", "https://m.huya.com/".parse().unwrap());
|
||||
let url = format!("https://m.huya.com/{room_id}");
|
||||
let response = client.get(url).headers(headers).send().await?;
|
||||
let raw_content = response.text().await?;
|
||||
let (user_info, room_info, stream_info) =
|
||||
super::extractor::LiveStreamExtractor::extract_infos(&raw_content)?;
|
||||
|
||||
Ok((user_info, room_info, stream_info))
|
||||
}
|
||||
|
||||
/// Download file from url to path
|
||||
pub async fn download_file(client: &Client, url: &str, path: &Path) -> Result<(), HuyaClientError> {
|
||||
if !path.parent().unwrap().exists() {
|
||||
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||
}
|
||||
let response = client.get(url).send().await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let mut file = tokio::fs::File::create(&path).await?;
|
||||
let mut content = std::io::Cursor::new(bytes);
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_index_content(client: &Client, url: &str) -> Result<String, HuyaClientError> {
|
||||
let headers = generate_user_agent_header();
|
||||
let response = client.get(url).headers(headers).send().await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
Ok(response.text().await?)
|
||||
} else {
|
||||
log::error!("get_index_content failed: {}", response.status());
|
||||
Err(HuyaClientError::InvalidStream)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::platforms::PlatformType;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_user_info() {
|
||||
let client = Client::new();
|
||||
let account = Account {
|
||||
platform: PlatformType::Huya.as_str().to_string(),
|
||||
id: "2246697169".to_string(),
|
||||
name: "X inrea 丶".to_string(),
|
||||
avatar: "https://huyaimg.msstatic.com/avatar/1060/3f/0e6c0694867ef98e9f869589608ce3_180_135.jpg".to_string(),
|
||||
csrf: "".to_string(),
|
||||
cookies: "".to_string(),
|
||||
};
|
||||
let user_info = get_user_info(&client, &account).await.unwrap();
|
||||
println!("{:?}", user_info);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_room_info() {
|
||||
// set log level to debug
|
||||
std::env::set_var("RUST_LOG", "debug");
|
||||
let _ = env_logger::try_init();
|
||||
let client = Client::new();
|
||||
let account = Account::default();
|
||||
let (user_info, room_info, stream_info) =
|
||||
get_room_info(&client, &account, "599934").await.unwrap();
|
||||
println!("{:?}", user_info);
|
||||
println!("{:?}", room_info);
|
||||
println!("{:?}", stream_info);
|
||||
|
||||
// query index content
|
||||
let index_content = get_index_content(&client, &stream_info.hls_url)
|
||||
.await
|
||||
.unwrap();
|
||||
println!("{:?}", index_content);
|
||||
}
|
||||
}
|
||||
51
src-tauri/crates/recorder/src/platforms/huya/errors.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum HuyaClientError {
|
||||
#[error("Invalid response")]
|
||||
InvalidResponse,
|
||||
#[error("Client init error")]
|
||||
InitClientError,
|
||||
#[error("Invalid response status: {status}")]
|
||||
InvalidResponseStatus { status: reqwest::StatusCode },
|
||||
#[error("Invalid response json: {resp}")]
|
||||
InvalidResponseJson { resp: serde_json::Value },
|
||||
#[error("Invalid message code: {code}")]
|
||||
InvalidMessageCode { code: u64 },
|
||||
#[error("Invalid value")]
|
||||
InvalidValue,
|
||||
#[error("Invalid url")]
|
||||
InvalidUrl,
|
||||
#[error("Invalid stream format")]
|
||||
InvalidFormat,
|
||||
#[error("Invalid stream")]
|
||||
InvalidStream,
|
||||
#[error("Invalid cookie")]
|
||||
InvalidCookie,
|
||||
#[error("Upload error: {err}")]
|
||||
UploadError { err: String },
|
||||
#[error("Upload was cancelled by user")]
|
||||
UploadCancelled,
|
||||
#[error("Empty cache")]
|
||||
EmptyCache,
|
||||
#[error("Client error: {0}")]
|
||||
ClientError(#[from] reqwest::Error),
|
||||
#[error("IO error: {0}")]
|
||||
IOError(#[from] std::io::Error),
|
||||
#[error("Security control error")]
|
||||
SecurityControlError,
|
||||
#[error("API error: {0}")]
|
||||
ApiError(String),
|
||||
#[error("Format not found: {0}")]
|
||||
FormatNotFound(String),
|
||||
#[error("Codec not found: {0}")]
|
||||
CodecNotFound(String),
|
||||
#[error("Extractor error: {0}")]
|
||||
ExtractorError(String),
|
||||
}
|
||||
|
||||
impl From<HuyaClientError> for String {
|
||||
fn from(err: HuyaClientError) -> Self {
|
||||
err.to_string()
|
||||
}
|
||||
}
|
||||
1563
src-tauri/crates/recorder/src/platforms/huya/extractor.rs
Normal file
237
src-tauri/crates/recorder/src/platforms/huya/mod.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
pub mod api;
|
||||
pub mod errors;
|
||||
mod extractor;
|
||||
pub mod url_builder;
|
||||
use crate::account::Account;
|
||||
use crate::core::hls_recorder::{construct_stream_from_variant, HlsRecorder};
|
||||
use crate::core::{Codec, Format};
|
||||
use crate::errors::RecorderError;
|
||||
use crate::events::RecorderEvent;
|
||||
use crate::platforms::huya::extractor::StreamInfo;
|
||||
use crate::traits::RecorderTrait;
|
||||
use crate::{Recorder, RoomInfo, UserInfo};
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use rand::random;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{atomic, Arc};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
|
||||
use crate::danmu::DanmuStorage;
|
||||
use crate::platforms::PlatformType;
|
||||
|
||||
pub type HuyaRecorder = Recorder<HuyaExtra>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HuyaExtra {
|
||||
live_stream: Arc<RwLock<Option<StreamInfo>>>,
|
||||
}
|
||||
|
||||
impl HuyaRecorder {
|
||||
pub async fn new(
|
||||
room_id: &str,
|
||||
account: &Account,
|
||||
cache_dir: PathBuf,
|
||||
channel: broadcast::Sender<RecorderEvent>,
|
||||
update_interval: Arc<atomic::AtomicU64>,
|
||||
enabled: bool,
|
||||
) -> Result<Self, crate::errors::RecorderError> {
|
||||
Ok(Self {
|
||||
platform: PlatformType::Huya,
|
||||
room_id: room_id.to_string(),
|
||||
account: account.clone(),
|
||||
client: reqwest::Client::new(),
|
||||
event_channel: channel,
|
||||
cache_dir,
|
||||
quit: Arc::new(atomic::AtomicBool::new(false)),
|
||||
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
|
||||
is_recording: Arc::new(atomic::AtomicBool::new(false)),
|
||||
room_info: Arc::new(RwLock::new(RoomInfo::default())),
|
||||
user_info: Arc::new(RwLock::new(UserInfo::default())),
|
||||
platform_live_id: Arc::new(RwLock::new(String::new())),
|
||||
live_id: Arc::new(RwLock::new(String::new())),
|
||||
danmu_storage: Arc::new(RwLock::new(None)),
|
||||
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
|
||||
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
|
||||
danmu_task: Arc::new(Mutex::new(None)),
|
||||
record_task: Arc::new(Mutex::new(None)),
|
||||
update_interval,
|
||||
total_duration: Arc::new(atomic::AtomicU64::new(0)),
|
||||
total_size: Arc::new(atomic::AtomicU64::new(0)),
|
||||
extra: HuyaExtra {
|
||||
live_stream: Arc::new(RwLock::new(None)),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async fn check_status(&self) -> bool {
|
||||
let pre_live_status = self.room_info.read().await.status;
|
||||
match api::get_room_info(&self.client, &self.account, &self.room_id).await {
|
||||
Ok((user_info, room_info, stream_info)) => {
|
||||
let live_status = room_info.status;
|
||||
|
||||
*self.room_info.write().await = room_info;
|
||||
|
||||
*self.user_info.write().await = user_info;
|
||||
|
||||
if pre_live_status != live_status {
|
||||
// live status changed, reset current record flag
|
||||
log::info!(
|
||||
"[{}]Live status changed to {}, auto_start: {}",
|
||||
&self.room_id,
|
||||
live_status,
|
||||
self.enabled.load(atomic::Ordering::Relaxed)
|
||||
);
|
||||
|
||||
if live_status {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
} else {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
|
||||
platform: PlatformType::Douyin,
|
||||
room_id: self.room_id.clone(),
|
||||
recorder: self.info().await,
|
||||
});
|
||||
}
|
||||
|
||||
self.reset().await;
|
||||
}
|
||||
|
||||
if !live_status {
|
||||
self.reset().await;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
let should_record = self.should_record().await;
|
||||
|
||||
if !should_record {
|
||||
return true;
|
||||
}
|
||||
|
||||
*self.extra.live_stream.write().await = Some(stream_info.clone());
|
||||
let platform_live_id = stream_info.id();
|
||||
*self.platform_live_id.write().await = platform_live_id;
|
||||
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("[{}]Update room status failed: {}", &self.room_id, e);
|
||||
pre_live_status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn reset(&self) {
|
||||
*self.platform_live_id.write().await = String::new();
|
||||
self.last_update
|
||||
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
|
||||
self.last_sequence.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_duration.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_size.store(0, atomic::Ordering::Relaxed);
|
||||
*self.extra.live_stream.write().await = None;
|
||||
}
|
||||
|
||||
async fn update_entries(&self, live_id: &str) -> Result<(), RecorderError> {
|
||||
// Get current room info and stream URL
|
||||
let room_info = self.room_info.read().await.clone();
|
||||
let Some(stream) = self.extra.live_stream.read().await.clone() else {
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
};
|
||||
|
||||
let work_dir = self.work_dir(live_id).await;
|
||||
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
|
||||
|
||||
// download cover
|
||||
let cover_url = room_info.room_cover.clone();
|
||||
let cover_path = work_dir.with_filename("cover.jpg");
|
||||
let _ = api::download_file(&self.client, &cover_url, &cover_path.full_path()).await;
|
||||
|
||||
*self.live_id.write().await = live_id.to_string();
|
||||
|
||||
// Setup danmu store
|
||||
let danmu_file_path = work_dir.with_filename("danmu.txt");
|
||||
let danmu_storage = DanmuStorage::new(&danmu_file_path.full_path()).await;
|
||||
*self.danmu_storage.write().await = danmu_storage;
|
||||
|
||||
// Start danmu task
|
||||
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
|
||||
danmu_task.abort();
|
||||
}
|
||||
if let Some(danmu_stream_task) = self.danmu_task.lock().await.as_mut() {
|
||||
danmu_stream_task.abort();
|
||||
}
|
||||
|
||||
let _ = self.event_channel.send(RecorderEvent::RecordStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
|
||||
log::debug!("[{}]Stream URL: {}", &self.room_id, stream.hls_url);
|
||||
|
||||
let hls_stream =
|
||||
construct_stream_from_variant(live_id, &stream.hls_url, Format::TS, Codec::Avc)
|
||||
.await
|
||||
.map_err(|_| RecorderError::NoStreamAvailable)?;
|
||||
let hls_recorder = HlsRecorder::new(
|
||||
self.room_id.clone(),
|
||||
Arc::new(hls_stream),
|
||||
self.client.clone(),
|
||||
Some(self.account.cookies.clone()),
|
||||
self.event_channel.clone(),
|
||||
work_dir.full_path(),
|
||||
self.enabled.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
if let Err(e) = hls_recorder.start().await {
|
||||
log::error!("[{}]Failed to start hls recorder: {}", &self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl crate::traits::RecorderTrait<HuyaExtra> for HuyaRecorder {
|
||||
async fn run(&self) {
|
||||
let self_clone = self.clone();
|
||||
*self.record_task.lock().await = Some(tokio::spawn(async move {
|
||||
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
|
||||
if self_clone.check_status().await {
|
||||
// Live status is ok, start recording
|
||||
if self_clone.should_record().await {
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(true, atomic::Ordering::Relaxed);
|
||||
let live_id = Utc::now().timestamp_millis().to_string();
|
||||
if let Err(e) = self_clone.update_entries(&live_id).await {
|
||||
log::error!("[{}]Update entries error: {}", &self_clone.room_id, e);
|
||||
}
|
||||
}
|
||||
if self_clone.is_recording.load(atomic::Ordering::Relaxed) {
|
||||
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
|
||||
recorder: self_clone.info().await,
|
||||
});
|
||||
}
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(false, atomic::Ordering::Relaxed);
|
||||
self_clone.reset().await;
|
||||
// Check status again after some seconds
|
||||
let secs = random::<u64>() % 5;
|
||||
tokio::time::sleep(Duration::from_secs(secs)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
tokio::time::sleep(Duration::from_secs(
|
||||
self_clone.update_interval.load(atomic::Ordering::Relaxed),
|
||||
))
|
||||
.await;
|
||||
}
|
||||
log::info!("[{}]Recording thread quit.", &self_clone.room_id);
|
||||
}));
|
||||
}
|
||||
}
|
||||
215
src-tauri/crates/recorder/src/platforms/huya/reference.js
Normal file
@@ -0,0 +1,215 @@
|
||||
/**
|
||||
* 虎牙直播播放器URL构建函数
|
||||
* 基于对虎牙直播JavaScript代码的深入分析
|
||||
* 谢谢 Claude 4.5 Sonnet
|
||||
*/
|
||||
|
||||
/**
|
||||
* 构建播放器URL
|
||||
* @param {Object} info - 播放器配置信息
|
||||
* @param {string} info.url - 解码后的基础URL
|
||||
* @param {string} info.sStreamName - 流名称
|
||||
* @param {string} info.presenterUid - 主播UID
|
||||
* @param {string} info.sFlvAntiCode - FLV防码参数
|
||||
* @param {string} info.sHlsAntiCode - HLS防码参数
|
||||
* @param {string} info.sP2pAntiCode - P2P防码参数
|
||||
* @param {number} info.uid - 用户ID
|
||||
* @param {string} info.sGuid - 设备GUID
|
||||
* @param {number} info.appid - 应用ID
|
||||
* @param {string} info.type - 播放器类型 (P2PFLV/HLS)
|
||||
* @param {number} info.playTimeout - 播放超时时间
|
||||
* @param {string} info.h5Root - H5根路径
|
||||
* @returns {string} 完整的播放URL
|
||||
*/
|
||||
function buildPlayerUrl(info) {
|
||||
// 验证必需参数
|
||||
if (!info.url) {
|
||||
throw new Error("URL is required");
|
||||
}
|
||||
|
||||
let baseUrl = info.url;
|
||||
|
||||
// 确保URL以?开头,如果没有则添加
|
||||
if (!baseUrl.includes("?")) {
|
||||
baseUrl += "?";
|
||||
} else if (!baseUrl.endsWith("&") && !baseUrl.endsWith("?")) {
|
||||
baseUrl += "&";
|
||||
}
|
||||
|
||||
// 根据播放器类型添加防码参数
|
||||
if (info.type === "P2PFLV" && info.sFlvAntiCode) {
|
||||
baseUrl += info.sFlvAntiCode;
|
||||
} else if (info.type === "HLS" && info.sHlsAntiCode) {
|
||||
baseUrl += info.sHlsAntiCode;
|
||||
} else if (info.type === "P2P" && info.sP2pAntiCode) {
|
||||
baseUrl += info.sP2pAntiCode;
|
||||
}
|
||||
|
||||
// 添加用户身份参数
|
||||
if (info.uid !== undefined) {
|
||||
baseUrl += "&uid=" + encodeURIComponent(info.uid);
|
||||
}
|
||||
|
||||
if (info.sGuid) {
|
||||
baseUrl += "&sGuid=" + encodeURIComponent(info.sGuid);
|
||||
}
|
||||
|
||||
if (info.appid !== undefined) {
|
||||
baseUrl += "&appid=" + encodeURIComponent(info.appid);
|
||||
}
|
||||
|
||||
// 添加流信息参数
|
||||
if (info.sStreamName) {
|
||||
baseUrl += "&sStreamName=" + encodeURIComponent(info.sStreamName);
|
||||
}
|
||||
|
||||
if (info.presenterUid) {
|
||||
baseUrl += "&presenterUid=" + encodeURIComponent(info.presenterUid);
|
||||
}
|
||||
|
||||
// 添加播放配置参数
|
||||
if (info.playTimeout) {
|
||||
baseUrl += "&playTimeout=" + encodeURIComponent(info.playTimeout);
|
||||
}
|
||||
|
||||
if (info.h5Root) {
|
||||
baseUrl += "&h5Root=" + encodeURIComponent(info.h5Root);
|
||||
}
|
||||
|
||||
// 添加动态参数
|
||||
const timestamp = Date.now();
|
||||
baseUrl += "&t=" + timestamp;
|
||||
|
||||
// 生成序列ID(模拟播放器内部逻辑)
|
||||
const seqId = generateSeqId();
|
||||
baseUrl += "&seqId=" + seqId;
|
||||
|
||||
// 添加其他必要参数
|
||||
baseUrl += "&ver=1";
|
||||
baseUrl += "&sv=" + getVersion();
|
||||
|
||||
return baseUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* 生成序列ID
|
||||
* 模拟播放器内部的getAnticodeSeqid()方法
|
||||
* @returns {string} 序列ID
|
||||
*/
|
||||
function generateSeqId() {
|
||||
// 模拟播放器内部序列ID生成逻辑
|
||||
const timestamp = Date.now();
|
||||
const random = Math.floor(Math.random() * 1000000);
|
||||
return timestamp + "_" + random;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取版本号
|
||||
* 模拟播放器内部的版本获取逻辑
|
||||
* @returns {string} 版本号
|
||||
*/
|
||||
function getVersion() {
|
||||
// 模拟虎牙直播的版本号格式
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, "0");
|
||||
const day = String(now.getDate()).padStart(2, "0");
|
||||
const hour = String(now.getHours()).padStart(2, "0");
|
||||
const minute = String(now.getMinutes()).padStart(2, "0");
|
||||
|
||||
return `${year}${month}${day}${hour}${minute}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* 从liveLineUrl构建完整播放URL
|
||||
* @param {string} liveLineUrl - Base64编码的liveLineUrl
|
||||
* @param {Object} streamInfo - 流信息对象
|
||||
* @param {Object} userInfo - 用户信息对象
|
||||
* @returns {string} 完整的播放URL
|
||||
*/
|
||||
function buildUrlFromLiveLineUrl(liveLineUrl, streamInfo, userInfo) {
|
||||
// 解码liveLineUrl
|
||||
const decodedUrl = atob(liveLineUrl);
|
||||
|
||||
// 构建播放器配置
|
||||
const playerInfo = {
|
||||
url: decodedUrl,
|
||||
sStreamName: streamInfo.sStreamName,
|
||||
presenterUid: streamInfo.presenterUid,
|
||||
sFlvAntiCode: streamInfo.sFlvAntiCode,
|
||||
sHlsAntiCode: streamInfo.sHlsAntiCode,
|
||||
sP2pAntiCode: streamInfo.sP2pAntiCode,
|
||||
uid: userInfo.uid || 0,
|
||||
sGuid: userInfo.sGuid || "",
|
||||
appid: userInfo.appid || 66,
|
||||
type: streamInfo.type || "P2PFLV",
|
||||
playTimeout: streamInfo.playTimeout || 5000,
|
||||
h5Root: "https://hd.huya.com/cdn_libs/mobile/",
|
||||
};
|
||||
|
||||
return buildPlayerUrl(playerInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* 解析虎牙直播URL参数
|
||||
* @param {string} url - 完整的播放URL
|
||||
* @returns {Object} 解析后的参数对象
|
||||
*/
|
||||
function parsePlayerUrl(url) {
|
||||
const urlObj = new URL(url);
|
||||
const params = {};
|
||||
|
||||
for (const [key, value] of urlObj.searchParams) {
|
||||
params[key] = value;
|
||||
}
|
||||
|
||||
return {
|
||||
baseUrl: urlObj.origin + urlObj.pathname,
|
||||
params: params,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* 验证播放URL是否有效
|
||||
* @param {string} url - 播放URL
|
||||
* @returns {boolean} 是否有效
|
||||
*/
|
||||
function validatePlayerUrl(url) {
|
||||
try {
|
||||
const urlObj = new URL(url);
|
||||
const params = urlObj.searchParams;
|
||||
|
||||
// 检查必需参数
|
||||
const requiredParams = ["uid", "sGuid", "appid", "seqId", "t"];
|
||||
for (const param of requiredParams) {
|
||||
if (!params.has(param)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("虎牙直播播放器URL构建函数已加载");
|
||||
|
||||
// 示例用法
|
||||
const exampleInfo = {
|
||||
url: "https://tx.hls.huya.com/src/431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus.m3u8?ratio=2000&wsSecret=725304fc2867cbe6254f12b264055136&wsTime=68fb9aa9&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103",
|
||||
sStreamName:
|
||||
"431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus",
|
||||
presenterUid: 431653844,
|
||||
sFlvAntiCode:
|
||||
"wsSecret=820369d885b161baa5a7a82170881d78&wsTime=68fb97be&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103",
|
||||
uid: 2246697169,
|
||||
sGuid: "0af264cd4955d5688902472c482cb47c",
|
||||
appid: 66,
|
||||
type: "HLS",
|
||||
playTimeout: 5000,
|
||||
h5Root: "https://hd.huya.com/cdn_libs/mobile/",
|
||||
};
|
||||
|
||||
const playerUrl = buildPlayerUrl(exampleInfo);
|
||||
console.log("构建的播放URL:", playerUrl);
|
||||
215
src-tauri/crates/recorder/src/platforms/huya/url_builder.rs
Normal file
@@ -0,0 +1,215 @@
|
||||
use std::collections::HashMap;
|
||||
use url::Url;
|
||||
|
||||
/// 播放器配置信息
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PlayerInfo {
|
||||
/// 解码后的基础URL
|
||||
pub url: String,
|
||||
/// 流名称
|
||||
pub s_stream_name: Option<String>,
|
||||
/// 主播UID
|
||||
pub presenter_uid: Option<String>,
|
||||
/// HLS防码参数
|
||||
pub s_hls_anti_code: Option<String>,
|
||||
}
|
||||
|
||||
/// URL构建器
|
||||
pub struct UrlBuilder;
|
||||
|
||||
impl UrlBuilder {
|
||||
fn generate_uid() -> u64 {
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
let random = fastrand::u32(0..1000000);
|
||||
timestamp as u64 * 1000 + random as u64
|
||||
}
|
||||
|
||||
fn generate_s_guid() -> String {
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
let random = fastrand::u32(0..1000000);
|
||||
format!("{}_{}", timestamp, random)
|
||||
}
|
||||
|
||||
/// 构建播放器URL
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `info` - 播放器配置信息
|
||||
///
|
||||
/// # Returns
|
||||
/// * `Result<String, String>` - 完整的播放URL或错误信息
|
||||
pub fn build_player_url(info: &PlayerInfo) -> Result<String, String> {
|
||||
if info.url.is_empty() {
|
||||
return Err("URL is required".to_string());
|
||||
}
|
||||
|
||||
let mut base_url = info.url.clone();
|
||||
|
||||
// 确保URL以?开头,如果没有则添加
|
||||
if !base_url.contains('?') {
|
||||
base_url.push('?');
|
||||
} else if !base_url.ends_with('&') && !base_url.ends_with('?') {
|
||||
base_url.push('&');
|
||||
}
|
||||
|
||||
// 添加HLS防码参数
|
||||
if let Some(anti_code) = &info.s_hls_anti_code {
|
||||
base_url.push_str(anti_code);
|
||||
}
|
||||
|
||||
// 添加用户身份参数
|
||||
base_url.push_str(&format!("&uid={}", Self::generate_uid()));
|
||||
base_url.push_str(&format!("&sGuid={}", Self::generate_s_guid()));
|
||||
base_url.push_str(&format!("&appid={}", 66));
|
||||
|
||||
// 添加流信息参数
|
||||
if let Some(s_stream_name) = &info.s_stream_name {
|
||||
base_url.push_str(&format!(
|
||||
"&sStreamName={}",
|
||||
urlencoding::encode(s_stream_name)
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(presenter_uid) = &info.presenter_uid {
|
||||
base_url.push_str(&format!(
|
||||
"&presenterUid={}",
|
||||
urlencoding::encode(presenter_uid)
|
||||
));
|
||||
}
|
||||
|
||||
// 添加播放配置参数
|
||||
base_url.push_str(&format!("&playTimeout={}", 5000));
|
||||
base_url.push_str(&format!(
|
||||
"&h5Root={}",
|
||||
"https://hd.huya.com/cdn_libs/mobile/"
|
||||
));
|
||||
|
||||
// 添加动态参数
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
base_url.push_str(&format!("&t={}", timestamp));
|
||||
|
||||
// 生成序列ID
|
||||
let seq_id = Self::generate_seq_id();
|
||||
base_url.push_str(&format!("&seqId={}", seq_id));
|
||||
|
||||
// 添加其他必要参数
|
||||
base_url.push_str("&ver=1");
|
||||
base_url.push_str(&format!("&sv={}", Self::get_version()));
|
||||
|
||||
Ok(base_url)
|
||||
}
|
||||
|
||||
/// 解析虎牙直播URL参数
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `url` - 完整的播放URL
|
||||
///
|
||||
/// # Returns
|
||||
/// * `Result<(String, HashMap<String, String>), String>` - 基础URL和参数映射
|
||||
pub fn parse_player_url(url: &str) -> Result<(String, HashMap<String, String>), String> {
|
||||
let url_obj = Url::parse(url).map_err(|e| format!("Failed to parse URL: {}", e))?;
|
||||
let mut params = HashMap::new();
|
||||
|
||||
for (key, value) in url_obj.query_pairs() {
|
||||
params.insert(key.to_string(), value.to_string());
|
||||
}
|
||||
|
||||
let base_url = format!(
|
||||
"{}://{}{}",
|
||||
url_obj.scheme(),
|
||||
url_obj.host_str().unwrap_or(""),
|
||||
url_obj.path()
|
||||
);
|
||||
Ok((base_url, params))
|
||||
}
|
||||
|
||||
/// 验证播放URL是否有效
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `url` - 播放URL
|
||||
///
|
||||
/// # Returns
|
||||
/// * `bool` - 是否有效
|
||||
pub fn validate_player_url(url: &str) -> bool {
|
||||
match Url::parse(url) {
|
||||
Ok(url_obj) => {
|
||||
let params: HashMap<String, String> = url_obj
|
||||
.query_pairs()
|
||||
.map(|(k, v)| (k.to_string(), v.to_string()))
|
||||
.collect();
|
||||
|
||||
// 检查必需参数
|
||||
let required_params = ["uid", "sGuid", "appid", "seqId", "t"];
|
||||
required_params
|
||||
.iter()
|
||||
.all(|param| params.contains_key(*param))
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// 生成序列ID
|
||||
/// 模拟播放器内部的getAnticodeSeqid()方法
|
||||
///
|
||||
/// # Returns
|
||||
/// * `String` - 序列ID
|
||||
fn generate_seq_id() -> String {
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_millis();
|
||||
let random = fastrand::u32(0..1000000);
|
||||
format!("{}_{}", timestamp, random)
|
||||
}
|
||||
|
||||
/// 获取版本号
|
||||
/// 模拟播放器内部的版本获取逻辑
|
||||
///
|
||||
/// # Returns
|
||||
/// * `String` - 版本号
|
||||
fn get_version() -> String {
|
||||
let now = chrono::Utc::now();
|
||||
now.format("%Y%m%d%H%M").to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_build_player_url() {
|
||||
let info = PlayerInfo {
|
||||
url: "https://tx.hls.huya.com/src/431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus.m3u8?ratio=2000&wsSecret=725304fc2867cbe6254f12b264055136&wsTime=68fb9aa9&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103".to_string(),
|
||||
s_stream_name: Some("431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus".to_string()),
|
||||
presenter_uid: Some("431653844".to_string()),
|
||||
s_hls_anti_code: Some("wsSecret=820369d885b161baa5a7a82170881d78&wsTime=68fb97be&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103".to_string()),
|
||||
};
|
||||
|
||||
let result = UrlBuilder::build_player_url(&info);
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
println!("url: {}", url);
|
||||
assert!(url.contains("appid=66"));
|
||||
assert!(url.contains("seqId="));
|
||||
assert!(url.contains("t="));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_player_url() {
|
||||
let valid_url =
|
||||
"https://example.com/stream.m3u8?uid=123&sGuid=abc&appid=66&seqId=123_456&t=1234567890";
|
||||
assert!(UrlBuilder::validate_player_url(valid_url));
|
||||
|
||||
let invalid_url = "https://example.com/stream.m3u8?uid=123&sGuid=abc";
|
||||
assert!(!UrlBuilder::validate_player_url(invalid_url));
|
||||
}
|
||||
}
|
||||
42
src-tauri/crates/recorder/src/platforms/mod.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
pub mod bilibili;
|
||||
pub mod douyin;
|
||||
pub mod huya;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum PlatformType {
|
||||
BiliBili,
|
||||
Douyin,
|
||||
Huya,
|
||||
Youtube,
|
||||
}
|
||||
|
||||
impl PlatformType {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
PlatformType::BiliBili => "bilibili",
|
||||
PlatformType::Douyin => "douyin",
|
||||
PlatformType::Huya => "huya",
|
||||
PlatformType::Youtube => "youtube",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for PlatformType {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"bilibili" => Ok(PlatformType::BiliBili),
|
||||
"douyin" => Ok(PlatformType::Douyin),
|
||||
"huya" => Ok(PlatformType::Huya),
|
||||
"youtube" => Ok(PlatformType::Youtube),
|
||||
_ => Err(format!("Invalid platform type: {s}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for PlatformType {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
std::mem::discriminant(self).hash(state);
|
||||
}
|
||||
}
|
||||
96
src-tauri/crates/recorder/src/traits.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
sync::{atomic, Arc},
|
||||
};
|
||||
|
||||
use crate::platforms::PlatformType;
|
||||
use crate::{
|
||||
account::Account, danmu::DanmuStorage, events::RecorderEvent, CachePath, RecorderInfo,
|
||||
RoomInfo, UserInfo,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use tokio::{
|
||||
sync::{broadcast, Mutex, RwLock},
|
||||
task::JoinHandle,
|
||||
};
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub trait RecorderBasicTrait<T> {
|
||||
fn platform(&self) -> PlatformType;
|
||||
fn room_id(&self) -> String;
|
||||
fn account(&self) -> &Account;
|
||||
fn client(&self) -> &reqwest::Client;
|
||||
fn event_channel(&self) -> &broadcast::Sender<RecorderEvent>;
|
||||
fn cache_dir(&self) -> PathBuf;
|
||||
fn quit(&self) -> &atomic::AtomicBool;
|
||||
fn enabled(&self) -> &atomic::AtomicBool;
|
||||
fn is_recording(&self) -> &atomic::AtomicBool;
|
||||
fn room_info(&self) -> Arc<RwLock<RoomInfo>>;
|
||||
fn user_info(&self) -> Arc<RwLock<UserInfo>>;
|
||||
fn platform_live_id(&self) -> Arc<RwLock<String>>;
|
||||
fn live_id(&self) -> Arc<RwLock<String>>;
|
||||
fn danmu_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>>;
|
||||
fn record_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>>;
|
||||
fn danmu_storage(&self) -> Arc<RwLock<Option<DanmuStorage>>>;
|
||||
fn last_update(&self) -> &atomic::AtomicI64;
|
||||
fn last_sequence(&self) -> &atomic::AtomicU64;
|
||||
fn total_duration(&self) -> &atomic::AtomicU64;
|
||||
fn total_size(&self) -> &atomic::AtomicU64;
|
||||
fn extra(&self) -> &T;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait RecorderTrait<T>: RecorderBasicTrait<T> {
|
||||
async fn run(&self);
|
||||
async fn stop(&self) {
|
||||
self.quit().store(true, atomic::Ordering::Relaxed);
|
||||
if let Some(danmu_task) = self.danmu_task().lock().await.take() {
|
||||
danmu_task.abort();
|
||||
let _ = danmu_task.await;
|
||||
}
|
||||
if let Some(record_task) = self.record_task().lock().await.take() {
|
||||
record_task.abort();
|
||||
let _ = record_task.await;
|
||||
}
|
||||
}
|
||||
async fn should_record(&self) -> bool {
|
||||
if self.quit().load(atomic::Ordering::Relaxed) {
|
||||
return false;
|
||||
}
|
||||
|
||||
self.enabled().load(atomic::Ordering::Relaxed)
|
||||
}
|
||||
|
||||
async fn work_dir(&self, live_id: &str) -> CachePath {
|
||||
CachePath::new(self.cache_dir(), self.platform(), &self.room_id(), live_id)
|
||||
}
|
||||
async fn info(&self) -> RecorderInfo {
|
||||
let room_info = self.room_info().read().await.clone();
|
||||
let user_info = self.user_info().read().await.clone();
|
||||
let is_recording = self.is_recording().load(atomic::Ordering::Relaxed);
|
||||
RecorderInfo {
|
||||
platform_live_id: self.platform_live_id().read().await.clone(),
|
||||
live_id: self.live_id().read().await.clone(),
|
||||
recording: is_recording,
|
||||
enabled: self.enabled().load(atomic::Ordering::Relaxed),
|
||||
room_info: RoomInfo {
|
||||
platform: self.platform().as_str().to_string(),
|
||||
room_id: self.room_id().to_string(),
|
||||
room_title: room_info.room_title.clone(),
|
||||
room_cover: room_info.room_cover.clone(),
|
||||
status: room_info.status,
|
||||
},
|
||||
user_info: UserInfo {
|
||||
user_id: user_info.user_id.to_string(),
|
||||
user_name: user_info.user_name.clone(),
|
||||
user_avatar: user_info.user_avatar.clone(),
|
||||
},
|
||||
}
|
||||
}
|
||||
async fn enable(&self) {
|
||||
self.enabled().store(true, atomic::Ordering::Relaxed);
|
||||
}
|
||||
async fn disable(&self) {
|
||||
self.enabled().store(false, atomic::Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
1
src-tauri/crates/recorder/src/utils/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod user_agent_generator;
|
||||
234
src-tauri/crates/recorder/src/utils/user_agent_generator.rs
Normal file
@@ -0,0 +1,234 @@
|
||||
use rand::prelude::*;
|
||||
|
||||
pub struct UserAgentGenerator {
|
||||
rng: ThreadRng,
|
||||
}
|
||||
|
||||
impl Default for UserAgentGenerator {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl UserAgentGenerator {
|
||||
pub fn new() -> Self {
|
||||
Self { rng: rand::rng() }
|
||||
}
|
||||
|
||||
/// Generate a user agent
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `mobile` - Whether to generate a mobile user agent
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A string representing the user agent
|
||||
pub fn generate(&mut self, mobile: bool) -> String {
|
||||
if mobile {
|
||||
return self.generate_mobile();
|
||||
}
|
||||
let browser_type = self.rng.random_range(0..4);
|
||||
|
||||
match browser_type {
|
||||
0 => self.generate_chrome(),
|
||||
1 => self.generate_firefox(),
|
||||
2 => self.generate_safari(),
|
||||
_ => self.generate_edge(),
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_mobile(&mut self) -> String {
|
||||
let mobile_versions = [
|
||||
"120.0.0.0",
|
||||
"119.0.0.0",
|
||||
"118.0.0.0",
|
||||
"117.0.0.0",
|
||||
"116.0.0.0",
|
||||
"115.0.0.0",
|
||||
"114.0.0.0",
|
||||
];
|
||||
let mobile_version = mobile_versions.choose(&mut self.rng).unwrap();
|
||||
|
||||
// 随机选择 Android 或 iOS
|
||||
if self.rng.random_bool(0.7) {
|
||||
// Android User-Agent
|
||||
let android_versions = ["13", "12", "11", "10", "9"];
|
||||
let android_version = android_versions.choose(&mut self.rng).unwrap();
|
||||
let device_models = [
|
||||
"SM-G991B",
|
||||
"SM-G996B",
|
||||
"SM-G998B",
|
||||
"SM-A525F",
|
||||
"SM-A725F",
|
||||
"Pixel 6",
|
||||
"Pixel 7",
|
||||
"Pixel 8",
|
||||
"OnePlus 9",
|
||||
"OnePlus 10",
|
||||
];
|
||||
let device_model = device_models.choose(&mut self.rng).unwrap();
|
||||
|
||||
format!("Mozilla/5.0 (Linux; Android {android_version}; {device_model}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{mobile_version} Mobile Safari/537.36")
|
||||
} else {
|
||||
// iOS User-Agent
|
||||
let ios_versions = ["17_1", "16_7", "16_6", "15_7", "14_8"];
|
||||
let ios_version = ios_versions.choose(&mut self.rng).unwrap();
|
||||
let device_types = ["iPhone; CPU iPhone OS", "iPad; CPU OS"];
|
||||
let device_type = device_types.choose(&mut self.rng).unwrap();
|
||||
|
||||
format!("Mozilla/5.0 ({device_type} {ios_version} like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Mobile/15E148 Safari/604.1")
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_chrome(&mut self) -> String {
|
||||
let chrome_versions = [
|
||||
"120.0.0.0",
|
||||
"119.0.0.0",
|
||||
"118.0.0.0",
|
||||
"117.0.0.0",
|
||||
"116.0.0.0",
|
||||
"115.0.0.0",
|
||||
"114.0.0.0",
|
||||
];
|
||||
let webkit_versions = ["537.36", "537.35", "537.34"];
|
||||
|
||||
let os = self.get_random_os();
|
||||
let chrome_version = chrome_versions.choose(&mut self.rng).unwrap();
|
||||
let webkit_version = webkit_versions.choose(&mut self.rng).unwrap();
|
||||
|
||||
format!(
|
||||
"Mozilla/5.0 ({os}) AppleWebKit/{webkit_version} (KHTML, like Gecko) Chrome/{chrome_version} Safari/{webkit_version}"
|
||||
)
|
||||
}
|
||||
|
||||
fn generate_firefox(&mut self) -> String {
|
||||
let firefox_versions = ["121.0", "120.0", "119.0", "118.0", "117.0", "116.0"];
|
||||
|
||||
let os = self.get_random_os_firefox();
|
||||
let firefox_version = firefox_versions.choose(&mut self.rng).unwrap();
|
||||
|
||||
format!("Mozilla/5.0 ({os}; rv:{firefox_version}) Gecko/20100101 Firefox/{firefox_version}")
|
||||
}
|
||||
|
||||
fn generate_safari(&mut self) -> String {
|
||||
let safari_versions = ["17.1", "17.0", "16.6", "16.5", "16.4", "16.3"];
|
||||
let webkit_versions = ["605.1.15", "605.1.14", "605.1.13"];
|
||||
|
||||
let safari_version = safari_versions.choose(&mut self.rng).unwrap();
|
||||
let webkit_version = webkit_versions.choose(&mut self.rng).unwrap();
|
||||
|
||||
// Safari 只在 macOS 和 iOS 上
|
||||
let is_mobile = self.rng.random_bool(0.3);
|
||||
|
||||
if is_mobile {
|
||||
let ios_versions = ["17_1", "16_7", "16_6", "15_7"];
|
||||
let ios_version = ios_versions.choose(&mut self.rng).unwrap();
|
||||
let device = ["iPhone; CPU iPhone OS", "iPad; CPU OS"]
|
||||
.choose(&mut self.rng)
|
||||
.unwrap();
|
||||
|
||||
format!(
|
||||
"Mozilla/5.0 ({device} {ios_version} like Mac OS X) AppleWebKit/{webkit_version} (KHTML, like Gecko) Version/{safari_version} Mobile/15E148 Safari/{webkit_version}"
|
||||
)
|
||||
} else {
|
||||
let macos_versions = ["14_1", "13_6", "12_7"];
|
||||
let macos_version = macos_versions.choose(&mut self.rng).unwrap();
|
||||
|
||||
format!(
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X {macos_version}) AppleWebKit/{webkit_version} (KHTML, like Gecko) Version/{safari_version} Safari/{webkit_version}"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_edge(&mut self) -> String {
|
||||
let edge_versions = ["119.0.0.0", "118.0.0.0", "117.0.0.0", "116.0.0.0"];
|
||||
let chrome_versions = ["119.0.0.0", "118.0.0.0", "117.0.0.0", "116.0.0.0"];
|
||||
|
||||
let os = self.get_random_os();
|
||||
let edge_version = edge_versions.choose(&mut self.rng).unwrap();
|
||||
let chrome_version = chrome_versions.choose(&mut self.rng).unwrap();
|
||||
|
||||
format!(
|
||||
"Mozilla/5.0 ({os}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{chrome_version} Safari/537.36 Edg/{edge_version}"
|
||||
)
|
||||
}
|
||||
|
||||
fn get_random_os(&mut self) -> &'static str {
|
||||
let os_list = [
|
||||
"Windows NT 10.0; Win64; x64",
|
||||
"Windows NT 11.0; Win64; x64",
|
||||
"Macintosh; Intel Mac OS X 10_15_7",
|
||||
"Macintosh; Intel Mac OS X 10_14_6",
|
||||
"X11; Linux x86_64",
|
||||
"X11; Ubuntu; Linux x86_64",
|
||||
];
|
||||
|
||||
os_list.choose(&mut self.rng).unwrap()
|
||||
}
|
||||
|
||||
fn get_random_os_firefox(&mut self) -> &'static str {
|
||||
let os_list = [
|
||||
"Windows NT 10.0; Win64; x64",
|
||||
"Windows NT 11.0; Win64; x64",
|
||||
"Macintosh; Intel Mac OS X 10.15",
|
||||
"X11; Linux x86_64",
|
||||
"X11; Ubuntu; Linux i686",
|
||||
];
|
||||
|
||||
os_list.choose(&mut self.rng).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_generate_user_agents() {
|
||||
let mut generator = UserAgentGenerator::new();
|
||||
|
||||
for _ in 0..100 {
|
||||
let ua = generator.generate(false);
|
||||
assert!(!ua.is_empty());
|
||||
assert!(ua.starts_with("Mozilla/5.0"));
|
||||
|
||||
// 验证是否包含常见浏览器标识
|
||||
assert!(
|
||||
ua.contains("Chrome")
|
||||
|| ua.contains("Firefox")
|
||||
|| ua.contains("Safari")
|
||||
|| ua.contains("Edg")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chrome_user_agent_format() {
|
||||
let mut generator = UserAgentGenerator::new();
|
||||
let ua = generator.generate_chrome();
|
||||
|
||||
assert!(ua.contains("Chrome"));
|
||||
assert!(ua.contains("Safari"));
|
||||
assert!(ua.contains("AppleWebKit"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mobile_user_agent_format() {
|
||||
let mut generator = UserAgentGenerator::new();
|
||||
|
||||
for _ in 0..50 {
|
||||
let ua = generator.generate(true);
|
||||
assert!(!ua.is_empty());
|
||||
assert!(ua.starts_with("Mozilla/5.0"));
|
||||
|
||||
// 验证是否包含移动设备标识
|
||||
assert!(ua.contains("Android") || ua.contains("iPhone") || ua.contains("iPad"));
|
||||
|
||||
// 验证是否包含移动浏览器标识
|
||||
// Android 包含 Chrome 和 Mobile Safari
|
||||
// iOS 包含 Safari
|
||||
assert!(ua.contains("Mobile Safari") || ua.contains("Chrome") || ua.contains("Safari"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.*"},{"url":"http://*.*"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}
|
||||
@@ -1,9 +1,10 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use chrono::Local;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::{self, AtomicU64};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{recorder::PlatformType, recorder_manager::ClipRangeParams};
|
||||
use crate::{danmu2ass::Danmu2AssOptions, recorder_manager::ClipRangeParams};
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
pub struct Config {
|
||||
@@ -35,12 +36,12 @@ pub struct Config {
|
||||
pub config_path: String,
|
||||
#[serde(default = "default_whisper_language")]
|
||||
pub whisper_language: String,
|
||||
#[serde(default = "default_user_agent")]
|
||||
pub user_agent: String,
|
||||
#[serde(default = "default_cleanup_source_flv")]
|
||||
pub cleanup_source_flv_after_import: bool,
|
||||
#[serde(default = "default_webhook_url")]
|
||||
pub webhook_url: String,
|
||||
#[serde(default = "default_danmu_ass_options")]
|
||||
pub danmu_ass_options: Danmu2AssOptions,
|
||||
#[serde(skip)]
|
||||
pub update_interval: Arc<AtomicU64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
@@ -49,6 +50,10 @@ pub struct AutoGenerateConfig {
|
||||
pub encode_danmu: bool,
|
||||
}
|
||||
|
||||
fn default_danmu_ass_options() -> Danmu2AssOptions {
|
||||
Danmu2AssOptions::default()
|
||||
}
|
||||
|
||||
fn default_auto_subtitle() -> bool {
|
||||
false
|
||||
}
|
||||
@@ -70,7 +75,7 @@ fn default_openai_api_endpoint() -> String {
|
||||
}
|
||||
|
||||
fn default_openai_api_key() -> String {
|
||||
"".to_string()
|
||||
String::new()
|
||||
}
|
||||
|
||||
fn default_clip_name_format() -> String {
|
||||
@@ -92,16 +97,8 @@ fn default_whisper_language() -> String {
|
||||
"auto".to_string()
|
||||
}
|
||||
|
||||
fn default_user_agent() -> String {
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36".to_string()
|
||||
}
|
||||
|
||||
fn default_cleanup_source_flv() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn default_webhook_url() -> String {
|
||||
"".to_string()
|
||||
String::new()
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -113,6 +110,7 @@ impl Config {
|
||||
if let Ok(content) = std::fs::read_to_string(config_path) {
|
||||
if let Ok(mut config) = toml::from_str::<Config>(&content) {
|
||||
config.config_path = config_path.to_str().unwrap().into();
|
||||
config.update_interval = Arc::new(AtomicU64::new(config.status_check_interval));
|
||||
return Ok(config);
|
||||
}
|
||||
}
|
||||
@@ -141,9 +139,9 @@ impl Config {
|
||||
status_check_interval: default_status_check_interval(),
|
||||
config_path: config_path.to_str().unwrap().into(),
|
||||
whisper_language: default_whisper_language(),
|
||||
user_agent: default_user_agent(),
|
||||
cleanup_source_flv_after_import: default_cleanup_source_flv(),
|
||||
webhook_url: default_webhook_url(),
|
||||
danmu_ass_options: default_danmu_ass_options(),
|
||||
update_interval: Arc::new(AtomicU64::new(default_status_check_interval())),
|
||||
};
|
||||
|
||||
config.save();
|
||||
@@ -177,20 +175,12 @@ impl Config {
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_user_agent(&mut self, user_agent: &str) {
|
||||
self.user_agent = user_agent.to_string();
|
||||
self.save();
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn set_cleanup_source_flv(&mut self, cleanup: bool) {
|
||||
self.cleanup_source_flv_after_import = cleanup;
|
||||
pub fn set_danmu_ass_options(&mut self, options: Danmu2AssOptions) {
|
||||
self.danmu_ass_options = options;
|
||||
self.save();
|
||||
}
|
||||
|
||||
pub fn generate_clip_name(&self, params: &ClipRangeParams) -> PathBuf {
|
||||
let platform = PlatformType::from_str(¶ms.platform).unwrap();
|
||||
|
||||
// get format config
|
||||
// filter special characters from title to make sure file name is valid
|
||||
let title = params
|
||||
@@ -200,9 +190,10 @@ impl Config {
|
||||
.collect::<String>();
|
||||
let format_config = self.clip_name_format.clone();
|
||||
let format_config = format_config.replace("{title}", &title);
|
||||
let format_config = format_config.replace("{platform}", platform.as_str());
|
||||
let format_config = format_config.replace("{platform}", ¶ms.platform);
|
||||
let format_config = format_config.replace("{room_id}", ¶ms.room_id.to_string());
|
||||
let format_config = format_config.replace("{live_id}", ¶ms.live_id);
|
||||
let format_config = format_config.replace("{note}", ¶ms.note);
|
||||
let format_config = format_config.replace(
|
||||
"{x}",
|
||||
¶ms
|
||||
@@ -229,8 +220,16 @@ impl Config {
|
||||
.map_or("0".to_string(), |r| r.duration().to_string()),
|
||||
);
|
||||
|
||||
let sanitized = sanitize_filename::sanitize(&format_config);
|
||||
let output = self.output.clone();
|
||||
|
||||
Path::new(&output).join(&format_config)
|
||||
Path::new(&output).join(&sanitized)
|
||||
}
|
||||
|
||||
pub fn set_status_check_interval(&mut self, interval: u64) {
|
||||
self.status_check_interval = interval;
|
||||
self.update_interval
|
||||
.store(interval, atomic::Ordering::Relaxed);
|
||||
self.save();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::recorder::danmu::DanmuEntry;
|
||||
use recorder::danmu::DanmuEntry;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::VecDeque;
|
||||
|
||||
// code reference: https://github.com/tiansh/us-danmaku/blob/master/bilibili/bilibili_ASS_Danmaku_Downloader.user.js
|
||||
@@ -30,9 +31,32 @@ const BOTTOM_RESERVED: f64 = 50.0;
|
||||
const R2L_TIME: f64 = 8.0;
|
||||
const MAX_DELAY: f64 = 6.0;
|
||||
|
||||
pub fn danmu_to_ass(danmus: Vec<DanmuEntry>) -> String {
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
pub struct Danmu2AssOptions {
|
||||
pub font_size: f64,
|
||||
pub opacity: f64, // 透明度,范围 0.0-1.0,0.0为完全透明,1.0为完全不透明
|
||||
}
|
||||
|
||||
impl Default for Danmu2AssOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
font_size: 36.0,
|
||||
opacity: 0.8, // 默认80%透明度
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn danmu_to_ass(danmus: Vec<DanmuEntry>, options: Danmu2AssOptions) -> String {
|
||||
let font_size = options.font_size; // Default font size
|
||||
let opacity = options.opacity; // 透明度参数
|
||||
|
||||
// 将透明度转换为十六进制Alpha值 (0.0-1.0 -> 0x00-0xFF)
|
||||
let alpha = ((1.0 - opacity) * 255.0) as u8;
|
||||
let alpha_hex = format!("{:02X}", alpha);
|
||||
|
||||
// ASS header
|
||||
let header = r#"[Script Info]
|
||||
let header = format!(
|
||||
r"[Script Info]
|
||||
Title: Bilibili Danmaku
|
||||
ScriptType: v4.00+
|
||||
Collisions: Normal
|
||||
@@ -42,14 +66,15 @@ Timer: 10.0000
|
||||
|
||||
[V4+ Styles]
|
||||
Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
|
||||
Style: Default,微软雅黑,36,&H7fFFFFFF,&H7fFFFFFF,&H7f000000,&H7f000000,0,0,0,0,100,100,0,0,1,1,0,2,20,20,2,0
|
||||
Style: Default,微软雅黑,{},&H{}FFFFFF,&H{}FFFFFF,&H{}000000,&H{}000000,0,0,0,0,100,100,0,0,1,1,0,2,20,20,2,0
|
||||
|
||||
[Events]
|
||||
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
|
||||
"#;
|
||||
",
|
||||
font_size, alpha_hex, alpha_hex, alpha_hex, alpha_hex
|
||||
);
|
||||
|
||||
let mut normal = normal_danmaku();
|
||||
let font_size = 36.0; // Default font size
|
||||
|
||||
// Convert danmus to ASS events
|
||||
let events = danmus
|
||||
@@ -87,22 +112,22 @@ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
|
||||
.join("\n");
|
||||
|
||||
// Combine header and events
|
||||
format!("{}\n{}", header, events)
|
||||
format!("{header}\n{events}")
|
||||
}
|
||||
|
||||
fn format_time(seconds: f64) -> String {
|
||||
let hours = (seconds / 3600.0) as i32;
|
||||
let minutes = ((seconds % 3600.0) / 60.0) as i32;
|
||||
let seconds = seconds % 60.0;
|
||||
format!("{}:{:02}:{:05.2}", hours, minutes, seconds)
|
||||
format!("{hours}:{minutes:02}:{seconds:05.2}")
|
||||
}
|
||||
|
||||
fn escape_text(text: &str) -> String {
|
||||
text.replace("\\", "\\\\")
|
||||
.replace("{", "{")
|
||||
.replace("}", "}")
|
||||
.replace("\r", "")
|
||||
.replace("\n", "\\N")
|
||||
text.replace('\\', "\\\\")
|
||||
.replace('{', "{")
|
||||
.replace('}', "}")
|
||||
.replace('\r', "")
|
||||
.replace('\n', "\\N")
|
||||
}
|
||||
|
||||
fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition> {
|
||||
@@ -144,8 +169,8 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
|
||||
|
||||
let p = space.m;
|
||||
let m = p + hv;
|
||||
let mut tas = t0s;
|
||||
let mut tal = t0l;
|
||||
let mut time_actual_start = t0s;
|
||||
let mut time_actual_leave = t0l;
|
||||
|
||||
for other in &used {
|
||||
if other.p >= m || other.m <= p {
|
||||
@@ -154,13 +179,13 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
|
||||
if other.b && b {
|
||||
continue;
|
||||
}
|
||||
tas = tas.max(other.tf);
|
||||
tal = tal.max(other.td);
|
||||
time_actual_start = time_actual_start.max(other.tf);
|
||||
time_actual_leave = time_actual_leave.max(other.td);
|
||||
}
|
||||
|
||||
suggestions.push(PositionSuggestion {
|
||||
p,
|
||||
r: (tas - t0s).max(tal - t0l),
|
||||
r: (time_actual_start - t0s).max(time_actual_leave - t0l),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
use crate::recorder::PlatformType;
|
||||
use recorder::account::Account;
|
||||
|
||||
use super::Database;
|
||||
use super::DatabaseError;
|
||||
use chrono::Utc;
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::Rng;
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct AccountRow {
|
||||
pub platform: String,
|
||||
pub uid: u64, // Keep for Bilibili compatibility
|
||||
pub id_str: Option<String>, // New field for string IDs like Douyin sec_uid
|
||||
pub uid: String,
|
||||
pub name: String,
|
||||
pub avatar: String,
|
||||
pub csrf: String,
|
||||
@@ -18,159 +15,42 @@ pub struct AccountRow {
|
||||
pub created_at: String,
|
||||
}
|
||||
|
||||
impl AccountRow {
|
||||
pub fn to_account(&self) -> Account {
|
||||
Account {
|
||||
platform: self.platform.clone(),
|
||||
id: self.uid.clone(),
|
||||
name: self.name.clone(),
|
||||
avatar: self.avatar.clone(),
|
||||
csrf: self.csrf.clone(),
|
||||
cookies: self.cookies.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// accounts
|
||||
impl Database {
|
||||
// CREATE TABLE accounts (uid INTEGER PRIMARY KEY, name TEXT, avatar TEXT, csrf TEXT, cookies TEXT, created_at TEXT);
|
||||
pub async fn add_account(
|
||||
&self,
|
||||
platform: &str,
|
||||
cookies: &str,
|
||||
) -> Result<AccountRow, DatabaseError> {
|
||||
pub async fn add_account(&self, account: &AccountRow) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let platform = PlatformType::from_str(platform).unwrap();
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7)").bind(&account.uid).bind(&account.platform).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
|
||||
let csrf = if platform == PlatformType::Douyin {
|
||||
Some("".to_string())
|
||||
} else {
|
||||
// parse cookies
|
||||
cookies
|
||||
.split(';')
|
||||
.map(|cookie| cookie.trim())
|
||||
.find_map(|cookie| -> Option<String> {
|
||||
match cookie.starts_with("bili_jct=") {
|
||||
true => {
|
||||
let var_name = &"bili_jct=";
|
||||
Some(cookie[var_name.len()..].to_string())
|
||||
}
|
||||
false => None,
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
if csrf.is_none() {
|
||||
return Err(DatabaseError::InvalidCookiesError);
|
||||
}
|
||||
|
||||
// parse uid and id_str based on platform
|
||||
let (uid, id_str) = if platform == PlatformType::BiliBili {
|
||||
// For Bilibili, extract numeric uid from cookies
|
||||
let uid = cookies
|
||||
.split("DedeUserID=")
|
||||
.collect::<Vec<&str>>()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.split(";")
|
||||
.collect::<Vec<&str>>()
|
||||
.first()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.parse::<u64>()
|
||||
.map_err(|_| DatabaseError::InvalidCookiesError)?;
|
||||
(uid, None)
|
||||
} else {
|
||||
// For Douyin, use temporary uid and will set id_str later with real sec_uid
|
||||
let temp_uid = rand::thread_rng().gen_range(10000..=i32::MAX) as u64;
|
||||
(temp_uid, Some(format!("temp_{}", temp_uid)))
|
||||
};
|
||||
|
||||
let account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid,
|
||||
id_str,
|
||||
name: "".into(),
|
||||
avatar: "".into(),
|
||||
csrf: csrf.unwrap(),
|
||||
cookies: cookies.into(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(account.uid as i64).bind(&account.platform).bind(&account.id_str).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
|
||||
Ok(account)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove_account(&self, platform: &str, uid: u64) -> Result<(), DatabaseError> {
|
||||
pub async fn remove_account(&self, platform: &str, uid: &str) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let sql = sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
|
||||
.bind(uid as i64)
|
||||
.bind(uid)
|
||||
.bind(platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
if sql.rows_affected() != 1 {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
return Err(DatabaseError::NotFound);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_account(
|
||||
&self,
|
||||
platform: &str,
|
||||
uid: u64,
|
||||
name: &str,
|
||||
avatar: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let sql = sqlx::query(
|
||||
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
|
||||
)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(uid as i64)
|
||||
.bind(platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
if sql.rows_affected() != 1 {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_account_with_id_str(
|
||||
&self,
|
||||
old_account: &AccountRow,
|
||||
new_id_str: &str,
|
||||
name: &str,
|
||||
avatar: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
|
||||
// If the id_str changed, we need to delete the old record and create a new one
|
||||
if old_account.id_str.as_deref() != Some(new_id_str) {
|
||||
// Delete the old record (for Douyin accounts, we use uid to identify)
|
||||
sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
|
||||
// Insert the new record with updated id_str
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.bind(new_id_str)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(&old_account.csrf)
|
||||
.bind(&old_account.cookies)
|
||||
.bind(&old_account.created_at)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
} else {
|
||||
// id_str is the same, just update name and avatar
|
||||
sqlx::query(
|
||||
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
|
||||
)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(old_account.uid as i64)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_accounts(&self) -> Result<Vec<AccountRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts")
|
||||
@@ -178,12 +58,16 @@ impl Database {
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_account(&self, platform: &str, uid: u64) -> Result<AccountRow, DatabaseError> {
|
||||
pub async fn get_account(
|
||||
&self,
|
||||
platform: &str,
|
||||
uid: &str,
|
||||
) -> Result<AccountRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT * FROM accounts WHERE uid = $1 and platform = $2",
|
||||
)
|
||||
.bind(uid as i64)
|
||||
.bind(uid)
|
||||
.bind(platform)
|
||||
.fetch_one(&lock)
|
||||
.await?)
|
||||
@@ -200,7 +84,7 @@ impl Database {
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
if accounts.is_empty() {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
return Err(DatabaseError::NotFound);
|
||||
}
|
||||
// randomly select one account
|
||||
let account = accounts.choose(&mut rand::thread_rng()).unwrap();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use custom_error::custom_error;
|
||||
use sqlx::Pool;
|
||||
use sqlx::Sqlite;
|
||||
use thiserror::Error;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
pub mod account;
|
||||
@@ -14,23 +14,25 @@ pub struct Database {
|
||||
db: RwLock<Option<Pool<Sqlite>>>,
|
||||
}
|
||||
|
||||
custom_error! { pub DatabaseError
|
||||
InsertError = "Entry insert failed",
|
||||
NotFoundError = "Entry not found",
|
||||
InvalidCookiesError = "Cookies are invalid",
|
||||
DBError {err: sqlx::Error } = "DB error: {err}",
|
||||
SQLError { sql: String } = "SQL is incorret: {sql}"
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DatabaseError {
|
||||
#[error("Entry insert failed")]
|
||||
Insert,
|
||||
#[error("Entry not found")]
|
||||
NotFound,
|
||||
#[error("Cookies are invalid")]
|
||||
InvalidCookies,
|
||||
#[error("Number exceed i64 range")]
|
||||
NumberExceedI64Range,
|
||||
#[error("DB error: {0}")]
|
||||
DB(#[from] sqlx::Error),
|
||||
#[error("SQL is incorret: {sql}")]
|
||||
Sql { sql: String },
|
||||
}
|
||||
|
||||
impl From<DatabaseError> for String {
|
||||
fn from(value: DatabaseError) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for DatabaseError {
|
||||
fn from(value: sqlx::Error) -> Self {
|
||||
DatabaseError::DBError { err: value }
|
||||
fn from(err: DatabaseError) -> Self {
|
||||
err.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::recorder::PlatformType;
|
||||
use recorder::platforms::PlatformType;
|
||||
|
||||
use super::Database;
|
||||
use super::DatabaseError;
|
||||
@@ -7,8 +7,9 @@ use chrono::Utc;
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct RecordRow {
|
||||
pub platform: String,
|
||||
pub parent_id: String,
|
||||
pub live_id: String,
|
||||
pub room_id: u64,
|
||||
pub room_id: String,
|
||||
pub title: String,
|
||||
pub length: i64,
|
||||
pub size: i64,
|
||||
@@ -16,62 +17,78 @@ pub struct RecordRow {
|
||||
pub cover: Option<String>,
|
||||
}
|
||||
|
||||
// CREATE TABLE records (live_id INTEGER PRIMARY KEY, room_id INTEGER, title TEXT, length INTEGER, size INTEGER, created_at TEXT);
|
||||
// CREATE TABLE records (live_id INTEGER PRIMARY KEY, room_id TEXT, title TEXT, length INTEGER, size INTEGER, created_at TEXT);
|
||||
impl Database {
|
||||
pub async fn get_records(
|
||||
&self,
|
||||
room_id: u64,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
room_id: &str,
|
||||
offset: i64,
|
||||
limit: i64,
|
||||
) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
)
|
||||
.bind(room_id as i64)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.bind(room_id)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_record(
|
||||
&self,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
live_id: &str,
|
||||
) -> Result<RecordRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 and live_id = $2",
|
||||
)
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.bind(live_id)
|
||||
.fetch_one(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_archives_by_parent_id(
|
||||
&self,
|
||||
room_id: &str,
|
||||
parent_id: &str,
|
||||
) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 and parent_id = $2",
|
||||
)
|
||||
.bind(room_id)
|
||||
.bind(parent_id)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn add_record(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
parent_id: &str,
|
||||
live_id: &str,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
title: &str,
|
||||
cover: Option<String>,
|
||||
created_at: Option<&str>,
|
||||
) -> Result<RecordRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let record = RecordRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
parent_id: parent_id.to_string(),
|
||||
live_id: live_id.to_string(),
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
title: title.into(),
|
||||
length: 0,
|
||||
size: 0,
|
||||
created_at: created_at.unwrap_or(&Utc::now().to_rfc3339()).to_string(),
|
||||
created_at: Utc::now().to_rfc3339().to_string(),
|
||||
cover,
|
||||
};
|
||||
if let Err(e) = sqlx::query("INSERT INTO records (live_id, room_id, title, length, size, cover, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(record.live_id.clone())
|
||||
.bind(record.room_id as i64).bind(&record.title).bind(0).bind(0).bind(&record.cover).bind(&record.created_at).bind(platform.as_str().to_string()).execute(&lock).await {
|
||||
if let Err(e) = sqlx::query("INSERT INTO records (live_id, room_id, title, length, size, cover, created_at, platform, parent_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)").bind(record.live_id.clone())
|
||||
.bind(&record.room_id).bind(&record.title).bind(0).bind(0).bind(&record.cover).bind(&record.created_at).bind(platform.as_str().to_string()).bind(parent_id).execute(&lock).await {
|
||||
// if the record already exists, return the existing record
|
||||
if e.to_string().contains("UNIQUE constraint failed") {
|
||||
return self.get_record(room_id, live_id).await;
|
||||
@@ -100,9 +117,24 @@ impl Database {
|
||||
size: u64,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let size = i64::try_from(size).map_err(|_| DatabaseError::NumberExceedI64Range)?;
|
||||
sqlx::query("UPDATE records SET length = $1, size = $2 WHERE live_id = $3")
|
||||
.bind(length)
|
||||
.bind(size as i64)
|
||||
.bind(size)
|
||||
.bind(live_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_record_parent_id(
|
||||
&self,
|
||||
live_id: &str,
|
||||
parent_id: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
sqlx::query("UPDATE records SET parent_id = $1 WHERE live_id = $2")
|
||||
.bind(parent_id)
|
||||
.bind(live_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
@@ -148,36 +180,36 @@ impl Database {
|
||||
|
||||
pub async fn get_recent_record(
|
||||
&self,
|
||||
room_id: u64,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
room_id: &str,
|
||||
offset: i64,
|
||||
limit: i64,
|
||||
) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
if room_id == 0 {
|
||||
if room_id.is_empty() {
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
} else {
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
)
|
||||
.bind(room_id as i64)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.bind(room_id)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_record_disk_usage(&self) -> Result<u64, DatabaseError> {
|
||||
pub async fn get_record_disk_usage(&self) -> Result<i64, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let result: (i64,) = sqlx::query_as("SELECT SUM(size) FROM records;")
|
||||
.fetch_one(&lock)
|
||||
.await?;
|
||||
Ok(result.0 as u64)
|
||||
Ok(result.0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use super::Database;
|
||||
use super::DatabaseError;
|
||||
use crate::recorder::PlatformType;
|
||||
use chrono::Utc;
|
||||
use recorder::platforms::PlatformType;
|
||||
/// Recorder in database is pretty simple
|
||||
/// because many room infos are collected in realtime
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct RecorderRow {
|
||||
pub room_id: u64,
|
||||
pub room_id: String,
|
||||
pub created_at: String,
|
||||
pub platform: String,
|
||||
pub auto_start: bool,
|
||||
@@ -18,12 +18,12 @@ impl Database {
|
||||
pub async fn add_recorder(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
extra: &str,
|
||||
) -> Result<RecorderRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let recorder = RecorderRow {
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
platform: platform.as_str().to_string(),
|
||||
auto_start: true,
|
||||
@@ -32,7 +32,7 @@ impl Database {
|
||||
let _ = sqlx::query(
|
||||
"INSERT OR REPLACE INTO recorders (room_id, created_at, platform, auto_start, extra) VALUES ($1, $2, $3, $4, $5)",
|
||||
)
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.bind(&recorder.created_at)
|
||||
.bind(platform.as_str())
|
||||
.bind(recorder.auto_start)
|
||||
@@ -42,19 +42,19 @@ impl Database {
|
||||
Ok(recorder)
|
||||
}
|
||||
|
||||
pub async fn remove_recorder(&self, room_id: u64) -> Result<RecorderRow, DatabaseError> {
|
||||
pub async fn remove_recorder(&self, room_id: &str) -> Result<RecorderRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let recorder =
|
||||
sqlx::query_as::<_, RecorderRow>("SELECT * FROM recorders WHERE room_id = $1")
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.fetch_one(&lock)
|
||||
.await?;
|
||||
let sql = sqlx::query("DELETE FROM recorders WHERE room_id = $1")
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
if sql.rows_affected() != 1 {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
return Err(DatabaseError::NotFound);
|
||||
}
|
||||
|
||||
// remove related archive
|
||||
@@ -71,10 +71,10 @@ impl Database {
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn remove_archive(&self, room_id: u64) -> Result<(), DatabaseError> {
|
||||
pub async fn remove_archive(&self, room_id: &str) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let _ = sqlx::query("DELETE FROM records WHERE room_id = $1")
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
@@ -83,7 +83,7 @@ impl Database {
|
||||
pub async fn update_recorder(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
auto_start: bool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
@@ -92,7 +92,7 @@ impl Database {
|
||||
)
|
||||
.bind(auto_start)
|
||||
.bind(platform.as_str().to_string())
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
|
||||
@@ -13,6 +13,27 @@ pub struct TaskRow {
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn generate_task(
|
||||
&self,
|
||||
task_type: &str,
|
||||
message: &str,
|
||||
metadata: &str,
|
||||
) -> Result<TaskRow, DatabaseError> {
|
||||
let task_id = uuid::Uuid::new_v4().to_string();
|
||||
let task = TaskRow {
|
||||
id: task_id,
|
||||
task_type: task_type.to_string(),
|
||||
status: "pending".to_string(),
|
||||
message: message.to_string(),
|
||||
metadata: metadata.to_string(),
|
||||
created_at: chrono::Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
self.add_task(&task).await?;
|
||||
|
||||
Ok(task)
|
||||
}
|
||||
|
||||
pub async fn add_task(&self, task: &TaskRow) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let _ = sqlx::query(
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use super::Database;
|
||||
use super::DatabaseError;
|
||||
|
||||
// CREATE TABLE videos (id INTEGER PRIMARY KEY, room_id INTEGER, cover TEXT, file TEXT, length INTEGER, size INTEGER, status INTEGER, bvid TEXT, title TEXT, desc TEXT, tags TEXT, area INTEGER, created_at TEXT);
|
||||
// CREATE TABLE videos (id INTEGER PRIMARY KEY, room_id TEXT, cover TEXT, file TEXT, length INTEGER, size INTEGER, status INTEGER, bvid TEXT, title TEXT, desc TEXT, tags TEXT, area INTEGER, created_at TEXT);
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct VideoRow {
|
||||
pub id: i64,
|
||||
pub room_id: u64,
|
||||
pub room_id: String,
|
||||
pub cover: String,
|
||||
pub file: String,
|
||||
pub note: String,
|
||||
@@ -22,10 +22,10 @@ pub struct VideoRow {
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn get_videos(&self, room_id: u64) -> Result<Vec<VideoRow>, DatabaseError> {
|
||||
pub async fn get_videos(&self, room_id: &str) -> Result<Vec<VideoRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let videos = sqlx::query_as::<_, VideoRow>("SELECT * FROM videos WHERE room_id = $1;")
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
Ok(videos)
|
||||
@@ -69,7 +69,7 @@ impl Database {
|
||||
pub async fn add_video(&self, video: &VideoRow) -> Result<VideoRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let sql = sqlx::query("INSERT INTO videos (room_id, cover, file, note, length, size, status, bvid, title, desc, tags, area, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)")
|
||||
.bind(video.room_id as i64)
|
||||
.bind(&video.room_id)
|
||||
.bind(&video.cover)
|
||||
.bind(&video.file)
|
||||
.bind(&video.note)
|
||||
|
||||
127
src-tauri/src/ffmpeg/general.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process::Stdio,
|
||||
};
|
||||
|
||||
use async_ffmpeg_sidecar::{event::FfmpegEvent, log_parser::FfmpegLogParser};
|
||||
use tokio::io::{AsyncWriteExt, BufReader};
|
||||
|
||||
use crate::{ffmpeg::hwaccel, progress::progress_reporter::ProgressReporterTrait};
|
||||
|
||||
use super::ffmpeg_path;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
#[cfg(target_os = "windows")]
|
||||
#[allow(unused_imports)]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
/// Generate a random filename in hex
|
||||
pub async fn random_filename() -> String {
|
||||
format!("{:x}", rand::random::<u64>())
|
||||
}
|
||||
|
||||
pub async fn handle_ffmpeg_process(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
ffmpeg_process: &mut tokio::process::Command,
|
||||
) -> Result<(), String> {
|
||||
let child = ffmpeg_process
|
||||
.stderr(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn();
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
// if contains "out_time_ms=66654667", by the way, it's actually in us
|
||||
if content.starts_with("out_time_ms") {
|
||||
let time_str = content.strip_prefix("out_time_ms=").unwrap_or_default();
|
||||
if let Some(reporter) = reporter {
|
||||
reporter.update(time_str).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("[FFmpeg Error] {}", e);
|
||||
return Err(e);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if let Err(e) = child.wait().await {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn concat_videos(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
videos: &[PathBuf],
|
||||
output_path: &Path,
|
||||
) -> Result<(), String> {
|
||||
// ffmpeg -i input1.mp4 -i input2.mp4 -i input3.mp4 -c copy output.mp4
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let output_folder = output_path.parent().unwrap();
|
||||
if !output_folder.exists() {
|
||||
std::fs::create_dir_all(output_folder).unwrap();
|
||||
}
|
||||
|
||||
let filelist_filename = format!("filelist_{}.txt", random_filename().await);
|
||||
|
||||
let mut filelist = tokio::fs::File::create(&output_folder.join(&filelist_filename))
|
||||
.await
|
||||
.unwrap();
|
||||
for video in videos {
|
||||
filelist
|
||||
.write_all(format!("file '{}'\n", video.display()).as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
filelist.flush().await.unwrap();
|
||||
|
||||
// Convert &[PathBuf] to &[&Path] for check_videos
|
||||
let video_refs: Vec<&Path> = videos.iter().map(|p| p.as_path()).collect();
|
||||
let should_encode = !super::check_videos(&video_refs).await;
|
||||
|
||||
ffmpeg_process.args([
|
||||
"-f",
|
||||
"concat",
|
||||
"-safe",
|
||||
"0",
|
||||
"-i",
|
||||
output_folder.join(&filelist_filename).to_str().unwrap(),
|
||||
]);
|
||||
if should_encode {
|
||||
let video_encoder = hwaccel::get_x264_encoder().await;
|
||||
ffmpeg_process.args(["-vf", "scale=1920:1080:force_original_aspect_ratio=decrease,pad=1920:1080:(ow-iw)/2:(oh-ih)/2"]);
|
||||
ffmpeg_process.args(["-r", "60"]);
|
||||
ffmpeg_process.args(["-c:v", video_encoder]);
|
||||
ffmpeg_process.args(["-c:a", "aac"]);
|
||||
ffmpeg_process.args(["-b:v", "6000k"]);
|
||||
ffmpeg_process.args(["-b:a", "128k"]);
|
||||
ffmpeg_process.args(["-threads", "0"]);
|
||||
} else {
|
||||
ffmpeg_process.args(["-c", "copy"]);
|
||||
}
|
||||
ffmpeg_process.args([output_path.to_str().unwrap()]);
|
||||
ffmpeg_process.args(["-progress", "pipe:2"]);
|
||||
ffmpeg_process.args(["-y"]);
|
||||
|
||||
handle_ffmpeg_process(reporter, &mut ffmpeg_process).await?;
|
||||
|
||||
// clean up filelist
|
||||
let _ = tokio::fs::remove_file(output_folder.join(&filelist_filename)).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
230
src-tauri/src/ffmpeg/hwaccel.rs
Normal file
@@ -0,0 +1,230 @@
|
||||
use std::{collections::HashSet, process::Stdio, sync::OnceLock};
|
||||
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
use super::ffmpeg_path;
|
||||
|
||||
const TARGET_ENCODERS: [&str; 7] = [
|
||||
"h264_nvenc",
|
||||
"h264_videotoolbox",
|
||||
"h264_qsv",
|
||||
"h264_amf",
|
||||
"h264_mf",
|
||||
"h264_vaapi",
|
||||
"h264_v4l2m2m",
|
||||
];
|
||||
|
||||
// 缓存选中的编码器,避免重复检查
|
||||
static ENCODER_CACHE: OnceLock<String> = OnceLock::new();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
#[cfg(target_os = "windows")]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
/// 检测当前环境下 FFmpeg 支持的 H.264 硬件编码器。
|
||||
///
|
||||
/// 返回值为可直接用于 `-c:v <value>` 的编码器名称列表。
|
||||
pub async fn list_supported_hwaccels() -> Result<Vec<String>, String> {
|
||||
let mut command = tokio::process::Command::new(ffmpeg_path());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
command.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let mut child = command
|
||||
.arg("-hide_banner")
|
||||
.arg("-encoders")
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| format!("无法启动 ffmpeg 进程: {e}"))?;
|
||||
|
||||
let mut stdout = child
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| "无法获取 ffmpeg 标准输出".to_string())?;
|
||||
let mut stderr = child
|
||||
.stderr
|
||||
.take()
|
||||
.ok_or_else(|| "无法获取 ffmpeg 标准错误输出".to_string())?;
|
||||
|
||||
let (mut stdout_buf, mut stderr_buf) = (String::new(), String::new());
|
||||
|
||||
let stdout_future = stdout.read_to_string(&mut stdout_buf);
|
||||
let stderr_future = stderr.read_to_string(&mut stderr_buf);
|
||||
|
||||
let (stdout_res, stderr_res, status) = tokio::join!(stdout_future, stderr_future, child.wait());
|
||||
|
||||
stdout_res.map_err(|e| format!("读取 ffmpeg 标准输出失败: {e}"))?;
|
||||
stderr_res.map_err(|e| format!("读取 ffmpeg 标准错误输出失败: {e}"))?;
|
||||
|
||||
let status = status.map_err(|e| format!("等待 ffmpeg 进程退出失败: {e}"))?;
|
||||
|
||||
if !status.success() {
|
||||
let err = if stderr_buf.trim().is_empty() {
|
||||
stdout_buf.trim().to_string()
|
||||
} else {
|
||||
stderr_buf.trim().to_string()
|
||||
};
|
||||
log::error!("ffmpeg -encoders 运行失败: {err}");
|
||||
return Err(format!("ffmpeg -encoders 运行失败: {err}"));
|
||||
}
|
||||
|
||||
let mut hwaccels = Vec::new();
|
||||
|
||||
for line in stdout_buf.lines() {
|
||||
let trimmed = line.trim_start();
|
||||
if trimmed.is_empty() || !trimmed.starts_with('V') {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut parts = trimmed.split_whitespace();
|
||||
let flags = parts.next().unwrap_or_default();
|
||||
if !flags.starts_with('V') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(name) = parts.next() {
|
||||
if TARGET_ENCODERS
|
||||
.iter()
|
||||
.any(|candidate| candidate.eq_ignore_ascii_case(name))
|
||||
{
|
||||
hwaccels.push(name.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 去重并保持原有顺序(即第一次出现时保留)
|
||||
let mut seen = HashSet::new();
|
||||
hwaccels.retain(|value| seen.insert(value.clone()));
|
||||
|
||||
Ok(hwaccels)
|
||||
}
|
||||
|
||||
/// 测试指定的编码器是否在当前硬件上真正可用
|
||||
///
|
||||
/// 通过尝试对测试流进行编码来验证编码器可用性
|
||||
async fn test_encoder_availability(encoder: &str) -> bool {
|
||||
let mut command = tokio::process::Command::new(ffmpeg_path());
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
command.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
// 使用合成输入源 (testsrc2) 测试编码器
|
||||
// -t 0.1 只编码0.1秒,-frames:v 3 只编码3帧,快速测试
|
||||
// -f null 丢弃输出,不需要实际文件
|
||||
let child = command
|
||||
.arg("-hide_banner")
|
||||
.arg("-loglevel")
|
||||
.arg("error")
|
||||
.arg("-f")
|
||||
.arg("lavfi")
|
||||
.arg("-i")
|
||||
.arg("testsrc2=duration=0.1:size=320x240:rate=1")
|
||||
.arg("-c:v")
|
||||
.arg(encoder)
|
||||
.arg("-frames:v")
|
||||
.arg("3")
|
||||
.arg("-f")
|
||||
.arg("null")
|
||||
.arg("-")
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
match child {
|
||||
Ok(process) => {
|
||||
let output = process.wait_with_output().await;
|
||||
match output {
|
||||
Ok(output) => {
|
||||
// 如果退出码为0,说明编码器可用
|
||||
if output.status.success() {
|
||||
log::debug!("Encoder {encoder} is available");
|
||||
true
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
log::debug!("Encoder {encoder} failed: {stderr}");
|
||||
false
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!("Encoder {encoder} test error: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
log::debug!("Failed to spawn ffmpeg process to test {encoder}: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the preferred hardware encoder for x264
|
||||
///
|
||||
/// Returns the preferred hardware encoder for x264, or "libx264" if no hardware acceleration is available.
|
||||
/// This function not only checks if the encoder is compiled into ffmpeg, but also verifies it's actually
|
||||
/// usable on the current hardware.
|
||||
///
|
||||
/// The result is cached to avoid repeated checks during the program's lifetime.
|
||||
pub async fn get_x264_encoder() -> &'static str {
|
||||
// 先检查缓存,如果已存在直接返回
|
||||
if let Some(encoder) = ENCODER_CACHE.get() {
|
||||
return encoder.as_str();
|
||||
}
|
||||
|
||||
// 执行硬件编码器检测和验证
|
||||
let encoder = match list_supported_hwaccels().await {
|
||||
Ok(hwaccels) => {
|
||||
// 按优先级顺序测试每个硬件编码器
|
||||
const PRIORITY: [&str; 7] = [
|
||||
"h264_nvenc",
|
||||
"h264_videotoolbox",
|
||||
"h264_qsv",
|
||||
"h264_amf",
|
||||
"h264_mf",
|
||||
"h264_vaapi",
|
||||
"h264_v4l2m2m",
|
||||
];
|
||||
|
||||
let mut selected = None;
|
||||
for &candidate in &PRIORITY {
|
||||
// 检查编码器是否在支持列表中
|
||||
if hwaccels
|
||||
.iter()
|
||||
.any(|value| value.eq_ignore_ascii_case(candidate))
|
||||
{
|
||||
// 测试编码器在实际硬件上是否可用
|
||||
if test_encoder_availability(candidate).await {
|
||||
log::info!("Found available hardware encoder: {candidate}");
|
||||
selected = Some(candidate.to_string());
|
||||
break;
|
||||
} else {
|
||||
log::debug!("Hardware encoder {candidate} is compiled in but not usable");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
selected.unwrap_or_else(|| {
|
||||
log::info!("No usable hardware encoder found, falling back to libx264");
|
||||
"libx264".to_string()
|
||||
})
|
||||
}
|
||||
Err(err) => {
|
||||
log::warn!("Failed to query hardware encoders: {err}");
|
||||
"libx264".to_string()
|
||||
}
|
||||
};
|
||||
|
||||
log::info!("Selected x264 encoder: {}", encoder);
|
||||
|
||||
// 存入缓存,如果设置成功则从缓存返回,否则返回刚才得到的值
|
||||
// 注意:set() 可能被其他线程抢先,但每个线程都会得到相同的 encoder 值
|
||||
match ENCODER_CACHE.set(encoder.clone()) {
|
||||
Ok(_) => ENCODER_CACHE.get().unwrap().as_str(),
|
||||
Err(_) => {
|
||||
// 其他线程已经设置了,返回缓存的值
|
||||
ENCODER_CACHE.get().unwrap().as_str()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,12 @@ use std::fmt;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
|
||||
pub mod general;
|
||||
pub mod hwaccel;
|
||||
pub mod playlist;
|
||||
|
||||
use crate::constants;
|
||||
use crate::progress_reporter::{ProgressReporter, ProgressReporterTrait};
|
||||
use crate::progress::progress_reporter::{ProgressReporter, ProgressReporterTrait};
|
||||
use crate::subtitle_generator::whisper_online;
|
||||
use crate::subtitle_generator::{
|
||||
whisper_cpp, GenerateResult, SubtitleGenerator, SubtitleGeneratorType,
|
||||
@@ -11,16 +15,16 @@ use crate::subtitle_generator::{
|
||||
use async_ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
|
||||
use async_ffmpeg_sidecar::log_parser::FfmpegLogParser;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::io::{AsyncBufReadExt, BufReader};
|
||||
use tokio::io::BufReader;
|
||||
|
||||
// 视频元数据结构
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct VideoMetadata {
|
||||
pub duration: f64,
|
||||
#[allow(unused)]
|
||||
pub width: u32,
|
||||
#[allow(unused)]
|
||||
pub height: u32,
|
||||
pub video_codec: String,
|
||||
pub audio_codec: String,
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
@@ -45,54 +49,104 @@ impl Range {
|
||||
pub fn duration(&self) -> f64 {
|
||||
self.end - self.start
|
||||
}
|
||||
|
||||
pub fn is_in(&self, v: f64) -> bool {
|
||||
v >= self.start && v <= self.end
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn clip_from_m3u8(
|
||||
pub async fn transcode(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
m3u8_index: &Path,
|
||||
file: &Path,
|
||||
output_path: &Path,
|
||||
range: Option<&Range>,
|
||||
fix_encoding: bool,
|
||||
copy_codecs: bool,
|
||||
) -> Result<(), String> {
|
||||
// first check output folder exists
|
||||
let output_folder = output_path.parent().unwrap();
|
||||
if !output_folder.exists() {
|
||||
log::warn!(
|
||||
"Output folder does not exist, creating: {}",
|
||||
output_folder.display()
|
||||
);
|
||||
std::fs::create_dir_all(output_folder).unwrap();
|
||||
}
|
||||
|
||||
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -c:v libx264 -c:a aac -b:v 6000k -b:a 64k -compression_level 0 -threads 0 output.mp3
|
||||
log::info!("Transcode: {} copy: {}", file.display(), copy_codecs);
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child_command = ffmpeg_process.args(["-i", &format!("{}", m3u8_index.display())]);
|
||||
ffmpeg_process.args(["-i", file.to_str().unwrap()]);
|
||||
|
||||
if let Some(range) = range {
|
||||
child_command
|
||||
.args(["-ss", &range.start.to_string()])
|
||||
.args(["-t", &range.duration().to_string()]);
|
||||
}
|
||||
|
||||
if fix_encoding {
|
||||
child_command
|
||||
.args(["-c:v", "libx264"])
|
||||
.args(["-c:a", "copy"])
|
||||
.args(["-b:v", "6000k"]);
|
||||
if copy_codecs {
|
||||
ffmpeg_process.args(["-c:v", "copy"]).args(["-c:a", "copy"]);
|
||||
} else {
|
||||
child_command.args(["-c", "copy"]);
|
||||
let video_encoder = hwaccel::get_x264_encoder().await;
|
||||
ffmpeg_process
|
||||
.args(["-vf", "scale=1920:1080:force_original_aspect_ratio=decrease,pad=1920:1080:(ow-iw)/2:(oh-ih)/2"])
|
||||
.args(["-c:v", video_encoder])
|
||||
.args(["-c:a", "aac"])
|
||||
.args(["-b:v", "6000k"])
|
||||
.args(["-b:a", "128k"])
|
||||
.args(["-threads", "0"]);
|
||||
}
|
||||
|
||||
let child = child_command
|
||||
.args(["-y", output_path.to_str().unwrap()])
|
||||
let child = ffmpeg_process
|
||||
.args([output_path.to_str().unwrap()])
|
||||
.args(["-y"])
|
||||
.args(["-progress", "pipe:2"])
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!("Spawn ffmpeg process failed: {}", e));
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Progress(p) => {
|
||||
if reporter.is_none() {
|
||||
continue;
|
||||
}
|
||||
reporter
|
||||
.unwrap()
|
||||
.update(format!("压制中:{}", p.time).as_str())
|
||||
.await;
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Transcode error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn trim_video(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
file: &Path,
|
||||
output_path: &Path,
|
||||
start_time: f64,
|
||||
duration: f64,
|
||||
) -> Result<(), String> {
|
||||
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -ss 0 -t 10 output.mp4
|
||||
log::info!("Trim video task start: {}", file.display());
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
ffmpeg_process.args(["-ss", &start_time.to_string()]);
|
||||
ffmpeg_process.args(["-i", file.to_str().unwrap()]);
|
||||
ffmpeg_process.args(["-t", &duration.to_string()]);
|
||||
ffmpeg_process.args(["-c", "copy"]);
|
||||
ffmpeg_process.args([output_path.to_str().unwrap()]);
|
||||
ffmpeg_process.args(["-y"]);
|
||||
ffmpeg_process.args(["-progress", "pipe:2"]);
|
||||
ffmpeg_process.stderr(Stdio::piped());
|
||||
let child = ffmpeg_process.spawn();
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
@@ -100,45 +154,33 @@ pub async fn clip_from_m3u8(
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut clip_error = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Progress(p) => {
|
||||
if reporter.is_none() {
|
||||
continue;
|
||||
}
|
||||
log::debug!("Clip progress: {}", p.time);
|
||||
reporter
|
||||
.unwrap()
|
||||
.update(format!("编码中:{}", p.time).as_str())
|
||||
.update(format!("切片中:{}", p.time).as_str())
|
||||
.await;
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(level, content) => {
|
||||
// log error if content contains error
|
||||
if content.contains("error") || level == LogLevel::Error {
|
||||
log::error!("Clip error: {}", content);
|
||||
}
|
||||
}
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Clip error: {}", e);
|
||||
clip_error = Some(e.to_string());
|
||||
log::error!("Trim video error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Clip error: {}", e);
|
||||
log::error!("Trim video error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = clip_error {
|
||||
log::error!("Clip error: {}", error);
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!("Clip task end: {}", output_path.display());
|
||||
Ok(())
|
||||
}
|
||||
log::info!("Trim video task end: {}", output_path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf, String> {
|
||||
@@ -152,29 +194,25 @@ pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf,
|
||||
|
||||
// First, get the duration of the input file
|
||||
let duration = get_audio_duration(file).await?;
|
||||
log::info!("Audio duration: {} seconds", duration);
|
||||
log::info!("Audio duration: {duration} seconds");
|
||||
|
||||
// Split into chunks of 30 seconds
|
||||
let chunk_duration = 30;
|
||||
let chunk_count = (duration as f64 / chunk_duration as f64).ceil() as usize;
|
||||
log::info!(
|
||||
"Splitting into {} chunks of {} seconds each",
|
||||
chunk_count,
|
||||
chunk_duration
|
||||
);
|
||||
let chunk_count = (duration as f64 / f64::from(chunk_duration)).ceil() as usize;
|
||||
log::info!("Splitting into {chunk_count} chunks of {chunk_duration} seconds each");
|
||||
|
||||
// Create output directory for chunks
|
||||
let output_dir = output_path.parent().unwrap();
|
||||
let base_name = output_path.file_stem().unwrap().to_str().unwrap();
|
||||
let chunk_dir = output_dir.join(format!("{}_chunks", base_name));
|
||||
let chunk_dir = output_dir.join(format!("{base_name}_chunks"));
|
||||
|
||||
if !chunk_dir.exists() {
|
||||
std::fs::create_dir_all(&chunk_dir)
|
||||
.map_err(|e| format!("Failed to create chunk directory: {}", e))?;
|
||||
.map_err(|e| format!("Failed to create chunk directory: {e}"))?;
|
||||
}
|
||||
|
||||
// Use ffmpeg segment feature to split audio into chunks
|
||||
let segment_pattern = chunk_dir.join(format!("{}_%03d.{}", base_name, format));
|
||||
let segment_pattern = chunk_dir.join(format!("{base_name}_%03d.{format}"));
|
||||
|
||||
// 构建优化的ffmpeg命令参数
|
||||
let file_str = file.to_str().unwrap();
|
||||
@@ -240,7 +278,7 @@ pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf,
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Extract audio error: {}", e);
|
||||
log::error!("Extract audio error: {e}");
|
||||
extract_error = Some(e.to_string());
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
@@ -250,12 +288,12 @@ pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf,
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Extract audio error: {}", e);
|
||||
log::error!("Extract audio error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = extract_error {
|
||||
log::error!("Extract audio error: {}", error);
|
||||
log::error!("Extract audio error: {error}");
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!(
|
||||
@@ -284,7 +322,7 @@ async fn get_audio_duration(file: &Path) -> Result<u64, String> {
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!("Failed to spawn ffprobe process: {}", e));
|
||||
return Err(format!("Failed to spawn ffprobe process: {e}"));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
@@ -300,7 +338,7 @@ async fn get_audio_duration(file: &Path) -> Result<u64, String> {
|
||||
// The new command outputs duration directly as a float
|
||||
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
|
||||
duration = Some(seconds_f64.ceil() as u64);
|
||||
log::debug!("Parsed duration: {} seconds", seconds_f64);
|
||||
log::debug!("Parsed duration: {seconds_f64} seconds");
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -308,64 +346,13 @@ async fn get_audio_duration(file: &Path) -> Result<u64, String> {
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Failed to get duration: {}", e);
|
||||
log::error!("Failed to get duration: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
duration.ok_or_else(|| "Failed to parse duration".to_string())
|
||||
}
|
||||
|
||||
/// Get the precise duration of a video segment (TS/MP4) in seconds
|
||||
pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
|
||||
// Use ffprobe to get the exact duration of the segment
|
||||
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffprobe_process
|
||||
.args(["-v", "quiet"])
|
||||
.args(["-show_entries", "format=duration"])
|
||||
.args(["-of", "csv=p=0"])
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!(
|
||||
"Failed to spawn ffprobe process for segment: {}",
|
||||
e
|
||||
));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut duration = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
// Parse the exact duration as f64 for precise timing
|
||||
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
|
||||
duration = Some(seconds_f64);
|
||||
log::debug!("Parsed segment duration: {} seconds", seconds_f64);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Failed to get segment duration: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
duration.ok_or_else(|| "Failed to parse segment duration".to_string())
|
||||
}
|
||||
|
||||
/// Encode video subtitle using ffmpeg, output is file name with prefix [subtitle]
|
||||
pub async fn encode_video_subtitle(
|
||||
reporter: &impl ProgressReporterTrait,
|
||||
@@ -375,7 +362,7 @@ pub async fn encode_video_subtitle(
|
||||
) -> Result<String, String> {
|
||||
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -vf "subtitles=test.srt:force_style='FontSize=24'" -c:v libx264 -c:a copy output.mp4
|
||||
log::info!("Encode video subtitle task start: {}", file.display());
|
||||
log::info!("SRT style: {}", srt_style);
|
||||
log::info!("SRT style: {srt_style}");
|
||||
// output path is file with prefix [subtitle]
|
||||
let output_filename = format!(
|
||||
"{}{}",
|
||||
@@ -400,23 +387,25 @@ pub async fn encode_video_subtitle(
|
||||
let subtitle = subtitle
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "\\\\")
|
||||
.replace(":", "\\:");
|
||||
format!("'{}'", subtitle)
|
||||
.replace('\\', "\\\\")
|
||||
.replace(':', "\\:");
|
||||
format!("'{subtitle}'")
|
||||
} else {
|
||||
format!("'{}'", subtitle.display())
|
||||
};
|
||||
let vf = format!("subtitles={}:force_style='{}'", subtitle, srt_style);
|
||||
log::info!("vf: {}", vf);
|
||||
let vf = format!("subtitles={subtitle}:force_style='{srt_style}'");
|
||||
log::info!("vf: {vf}");
|
||||
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let video_encoder = hwaccel::get_x264_encoder().await;
|
||||
|
||||
let child = ffmpeg_process
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.args(["-vf", vf.as_str()])
|
||||
.args(["-c:v", "libx264"])
|
||||
.args(["-c:v", video_encoder])
|
||||
.args(["-c:a", "copy"])
|
||||
.args(["-b:v", "6000k"])
|
||||
.args([output_path.to_str().unwrap()])
|
||||
@@ -437,12 +426,14 @@ pub async fn encode_video_subtitle(
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Encode video subtitle error: {}", e);
|
||||
log::error!("Encode video subtitle error: {e}");
|
||||
command_error = Some(e.to_string());
|
||||
}
|
||||
FfmpegEvent::Progress(p) => {
|
||||
log::info!("Encode video subtitle progress: {}", p.time);
|
||||
reporter.update(format!("压制中:{}", p.time).as_str());
|
||||
reporter
|
||||
.update(format!("压制中:{}", p.time).as_str())
|
||||
.await;
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
@@ -451,12 +442,12 @@ pub async fn encode_video_subtitle(
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Encode video subtitle error: {}", e);
|
||||
log::error!("Encode video subtitle error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = command_error {
|
||||
log::error!("Encode video subtitle error: {}", error);
|
||||
log::error!("Encode video subtitle error: {error}");
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!("Encode video subtitle task end: {}", output_path.display());
|
||||
@@ -494,9 +485,9 @@ pub async fn encode_video_danmu(
|
||||
let subtitle = subtitle
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "\\\\")
|
||||
.replace(":", "\\:");
|
||||
format!("'{}'", subtitle)
|
||||
.replace('\\', "\\\\")
|
||||
.replace(':', "\\:");
|
||||
format!("'{subtitle}'")
|
||||
} else {
|
||||
format!("'{}'", subtitle.display())
|
||||
};
|
||||
@@ -505,10 +496,12 @@ pub async fn encode_video_danmu(
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let video_encoder = hwaccel::get_x264_encoder().await;
|
||||
|
||||
let child = ffmpeg_process
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.args(["-vf", &format!("ass={}", subtitle)])
|
||||
.args(["-c:v", "libx264"])
|
||||
.args(["-vf", &format!("ass={subtitle}")])
|
||||
.args(["-c:v", video_encoder])
|
||||
.args(["-c:a", "copy"])
|
||||
.args(["-b:v", "6000k"])
|
||||
.args([output_file_path.to_str().unwrap()])
|
||||
@@ -529,7 +522,7 @@ pub async fn encode_video_danmu(
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Encode video danmu error: {}", e);
|
||||
log::error!("Encode video danmu error: {e}");
|
||||
command_error = Some(e.to_string());
|
||||
}
|
||||
FfmpegEvent::Progress(p) => {
|
||||
@@ -539,7 +532,8 @@ pub async fn encode_video_danmu(
|
||||
}
|
||||
reporter
|
||||
.unwrap()
|
||||
.update(format!("压制中:{}", p.time).as_str());
|
||||
.update(format!("压制中:{}", p.time).as_str())
|
||||
.await;
|
||||
}
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
@@ -548,12 +542,12 @@ pub async fn encode_video_danmu(
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Encode video danmu error: {}", e);
|
||||
log::error!("Encode video danmu error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = command_error {
|
||||
log::error!("Encode video danmu error: {}", error);
|
||||
log::error!("Encode video danmu error: {error}");
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!(
|
||||
@@ -592,7 +586,7 @@ pub async fn generic_ffmpeg_command(args: &[&str]) -> Result<String, String> {
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Generic ffmpeg command error: {}", e);
|
||||
log::error!("Generic ffmpeg command error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
@@ -620,17 +614,17 @@ pub async fn generate_video_subtitle(
|
||||
let chunk_dir = extract_audio_chunks(file, "wav").await?;
|
||||
|
||||
let mut full_result = GenerateResult {
|
||||
subtitle_id: "".to_string(),
|
||||
subtitle_id: String::new(),
|
||||
subtitle_content: vec![],
|
||||
generator_type: SubtitleGeneratorType::Whisper,
|
||||
};
|
||||
|
||||
let mut chunk_paths = vec![];
|
||||
for entry in std::fs::read_dir(&chunk_dir)
|
||||
.map_err(|e| format!("Failed to read chunk directory: {}", e))?
|
||||
.map_err(|e| format!("Failed to read chunk directory: {e}"))?
|
||||
{
|
||||
let entry =
|
||||
entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||||
entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
|
||||
let path = entry.path();
|
||||
chunk_paths.push(path);
|
||||
}
|
||||
@@ -676,17 +670,17 @@ pub async fn generate_video_subtitle(
|
||||
let chunk_dir = extract_audio_chunks(file, "mp3").await?;
|
||||
|
||||
let mut full_result = GenerateResult {
|
||||
subtitle_id: "".to_string(),
|
||||
subtitle_id: String::new(),
|
||||
subtitle_content: vec![],
|
||||
generator_type: SubtitleGeneratorType::WhisperOnline,
|
||||
};
|
||||
|
||||
let mut chunk_paths = vec![];
|
||||
for entry in std::fs::read_dir(&chunk_dir)
|
||||
.map_err(|e| format!("Failed to read chunk directory: {}", e))?
|
||||
.map_err(|e| format!("Failed to read chunk directory: {e}"))?
|
||||
{
|
||||
let entry =
|
||||
entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
|
||||
entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
|
||||
let path = entry.path();
|
||||
chunk_paths.push(path);
|
||||
}
|
||||
@@ -717,10 +711,7 @@ pub async fn generate_video_subtitle(
|
||||
Err("Failed to initialize Whisper Online".to_string())
|
||||
}
|
||||
}
|
||||
_ => Err(format!(
|
||||
"Unknown subtitle generator type: {}",
|
||||
generator_type
|
||||
)),
|
||||
_ => Err(format!("Unknown subtitle generator type: {generator_type}")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -731,7 +722,7 @@ pub async fn check_ffmpeg() -> Result<String, String> {
|
||||
.stdout(Stdio::piped())
|
||||
.spawn();
|
||||
if let Err(e) = child {
|
||||
log::error!("Faild to spwan ffmpeg process: {e}");
|
||||
log::error!("Failed to spawn ffmpeg process: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
@@ -763,52 +754,6 @@ pub async fn check_ffmpeg() -> Result<String, String> {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_video_resolution(file: &str) -> Result<String, String> {
|
||||
// ffprobe -v error -select_streams v:0 -show_entries stream=width,height -of csv=s=x:p=0 input.mp4
|
||||
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let child = ffprobe_process
|
||||
.arg("-i")
|
||||
.arg(file)
|
||||
.arg("-v")
|
||||
.arg("error")
|
||||
.arg("-select_streams")
|
||||
.arg("v:0")
|
||||
.arg("-show_entries")
|
||||
.arg("stream=width,height")
|
||||
.arg("-of")
|
||||
.arg("csv=s=x:p=0")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn();
|
||||
if let Err(e) = child {
|
||||
log::error!("Faild to spwan ffprobe process: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take();
|
||||
if stdout.is_none() {
|
||||
log::error!("Failed to take ffprobe output");
|
||||
return Err("Failed to take ffprobe output".into());
|
||||
}
|
||||
|
||||
let stdout = stdout.unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut lines = reader.lines();
|
||||
let line = lines.next_line().await.unwrap();
|
||||
if line.is_none() {
|
||||
return Err("Failed to parse resolution from output".into());
|
||||
}
|
||||
let line = line.unwrap();
|
||||
let resolution = line.split("x").collect::<Vec<&str>>();
|
||||
if resolution.len() != 2 {
|
||||
return Err("Failed to parse resolution from output".into());
|
||||
}
|
||||
Ok(format!("{}x{}", resolution[0], resolution[1]))
|
||||
}
|
||||
|
||||
fn ffmpeg_path() -> PathBuf {
|
||||
let mut path = Path::new("ffmpeg").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
@@ -844,11 +789,13 @@ pub async fn clip_from_video_file(
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let video_encoder = hwaccel::get_x264_encoder().await;
|
||||
|
||||
let child = ffmpeg_process
|
||||
.args(["-i", &format!("{}", input_path.display())])
|
||||
.args(["-ss", &start_time.to_string()])
|
||||
.args(["-t", &duration.to_string()])
|
||||
.args(["-c:v", "libx264"])
|
||||
.args(["-c:v", video_encoder])
|
||||
.args(["-c:a", "aac"])
|
||||
.args(["-b:v", "6000k"])
|
||||
.args(["-avoid_negative_ts", "make_zero"])
|
||||
@@ -858,7 +805,7 @@ pub async fn clip_from_video_file(
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!("启动ffmpeg进程失败: {}", e));
|
||||
return Err(format!("启动ffmpeg进程失败: {e}"));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
@@ -871,17 +818,17 @@ pub async fn clip_from_video_file(
|
||||
match event {
|
||||
FfmpegEvent::Progress(p) => {
|
||||
if let Some(reporter) = reporter {
|
||||
reporter.update(&format!("切片进度: {}", p.time));
|
||||
reporter.update(&format!("切片进度: {}", p.time)).await;
|
||||
}
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(level, content) => {
|
||||
if content.contains("error") || level == LogLevel::Error {
|
||||
log::error!("切片错误: {}", content);
|
||||
log::error!("切片错误: {content}");
|
||||
}
|
||||
}
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("切片错误: {}", e);
|
||||
log::error!("切片错误: {e}");
|
||||
clip_error = Some(e.to_string());
|
||||
}
|
||||
_ => {}
|
||||
@@ -920,13 +867,11 @@ pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, S
|
||||
"json",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
"-select_streams",
|
||||
"v:0",
|
||||
&format!("{}", file_path.display()),
|
||||
])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| format!("执行ffprobe失败: {}", e))?;
|
||||
.map_err(|e| format!("执行ffprobe失败: {e}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
@@ -937,7 +882,7 @@ pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, S
|
||||
|
||||
let json_str = String::from_utf8_lossy(&output.stdout);
|
||||
let json: serde_json::Value =
|
||||
serde_json::from_str(&json_str).map_err(|e| format!("解析ffprobe输出失败: {}", e))?;
|
||||
serde_json::from_str(&json_str).map_err(|e| format!("解析ffprobe输出失败: {e}"))?;
|
||||
|
||||
// 解析视频流信息
|
||||
let streams = json["streams"].as_array().ok_or("未找到视频流信息")?;
|
||||
@@ -946,22 +891,30 @@ pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, S
|
||||
return Err("未找到视频流".to_string());
|
||||
}
|
||||
|
||||
let video_stream = &streams[0];
|
||||
let format = &json["format"];
|
||||
let mut metadata = VideoMetadata {
|
||||
duration: 0.0,
|
||||
width: 0,
|
||||
height: 0,
|
||||
video_codec: String::new(),
|
||||
audio_codec: String::new(),
|
||||
};
|
||||
|
||||
let duration = format["duration"]
|
||||
.as_str()
|
||||
.and_then(|d| d.parse::<f64>().ok())
|
||||
.unwrap_or(0.0);
|
||||
|
||||
let width = video_stream["width"].as_u64().unwrap_or(0) as u32;
|
||||
let height = video_stream["height"].as_u64().unwrap_or(0) as u32;
|
||||
|
||||
Ok(VideoMetadata {
|
||||
duration,
|
||||
width,
|
||||
height,
|
||||
})
|
||||
for stream in streams {
|
||||
let codec_name = stream["codec_type"].as_str().unwrap_or("");
|
||||
if codec_name == "video" {
|
||||
metadata.video_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
|
||||
metadata.width = stream["width"].as_u64().unwrap_or(0) as u32;
|
||||
metadata.height = stream["height"].as_u64().unwrap_or(0) as u32;
|
||||
metadata.duration = stream["duration"]
|
||||
.as_str()
|
||||
.unwrap_or("0.0")
|
||||
.parse::<f64>()
|
||||
.unwrap_or(0.0);
|
||||
} else if codec_name == "audio" {
|
||||
metadata.audio_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
|
||||
}
|
||||
}
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Generate thumbnail file from video, capturing a frame at the specified timestamp.
|
||||
@@ -986,7 +939,7 @@ pub async fn generate_thumbnail(video_full_path: &Path, timestamp: f64) -> Resul
|
||||
.args(["-y", thumbnail_full_path.to_str().unwrap()])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| format!("生成缩略图失败: {}", e))?;
|
||||
.map_err(|e| format!("生成缩略图失败: {e}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
@@ -1022,7 +975,7 @@ pub async fn execute_ffmpeg_conversion(
|
||||
let mut child = cmd
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| format!("启动FFmpeg进程失败: {}", e))?;
|
||||
.map_err(|e| format!("启动FFmpeg进程失败: {e}"))?;
|
||||
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
@@ -1032,7 +985,9 @@ pub async fn execute_ffmpeg_conversion(
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Progress(p) => {
|
||||
reporter.update(&format!("正在转换视频格式... {} ({})", p.time, mode_name));
|
||||
reporter
|
||||
.update(&format!("正在转换视频格式... {} ({})", p.time, mode_name))
|
||||
.await;
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(level, content) => {
|
||||
@@ -1052,15 +1007,17 @@ pub async fn execute_ffmpeg_conversion(
|
||||
let status = child
|
||||
.wait()
|
||||
.await
|
||||
.map_err(|e| format!("等待FFmpeg进程失败: {}", e))?;
|
||||
.map_err(|e| format!("等待FFmpeg进程失败: {e}"))?;
|
||||
|
||||
if !status.success() {
|
||||
let error_msg = conversion_error
|
||||
.unwrap_or_else(|| format!("FFmpeg退出码: {}", status.code().unwrap_or(-1)));
|
||||
return Err(format!("视频格式转换失败 ({}): {}", mode_name, error_msg));
|
||||
return Err(format!("视频格式转换失败 ({mode_name}): {error_msg}"));
|
||||
}
|
||||
|
||||
reporter.update(&format!("视频格式转换完成 100% ({})", mode_name));
|
||||
reporter
|
||||
.update(&format!("视频格式转换完成 100% ({mode_name})"))
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1070,7 +1027,7 @@ pub async fn try_stream_copy_conversion(
|
||||
dest: &Path,
|
||||
reporter: &ProgressReporter,
|
||||
) -> Result<(), String> {
|
||||
reporter.update("正在转换视频格式... 0% (无损模式)");
|
||||
reporter.update("正在转换视频格式... 0% (无损模式)").await;
|
||||
|
||||
// 构建ffmpeg命令 - 流复制模式
|
||||
let mut cmd = tokio::process::Command::new(ffmpeg_path());
|
||||
@@ -1103,7 +1060,7 @@ pub async fn try_high_quality_conversion(
|
||||
dest: &Path,
|
||||
reporter: &ProgressReporter,
|
||||
) -> Result<(), String> {
|
||||
reporter.update("正在转换视频格式... 0% (高质量模式)");
|
||||
reporter.update("正在转换视频格式... 0% (高质量模式)").await;
|
||||
|
||||
// 构建ffmpeg命令 - 高质量重编码
|
||||
let mut cmd = tokio::process::Command::new(ffmpeg_path());
|
||||
@@ -1146,16 +1103,70 @@ pub async fn convert_video_format(
|
||||
match try_stream_copy_conversion(source, dest, reporter).await {
|
||||
Ok(()) => Ok(()),
|
||||
Err(stream_copy_error) => {
|
||||
reporter.update("流复制失败,使用高质量重编码模式...");
|
||||
log::warn!(
|
||||
"Stream copy failed: {}, falling back to re-encoding",
|
||||
stream_copy_error
|
||||
);
|
||||
reporter.update("流复制失败,使用高质量重编码模式...").await;
|
||||
log::warn!("Stream copy failed: {stream_copy_error}, falling back to re-encoding");
|
||||
try_high_quality_conversion(source, dest, reporter).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if all videos have same encoding and resolution
|
||||
pub async fn check_videos(video_paths: &[&Path]) -> bool {
|
||||
// check if all playlist paths exist
|
||||
let mut video_codec = "".to_owned();
|
||||
let mut audio_codec = "".to_owned();
|
||||
let mut width = 0;
|
||||
let mut height = 0;
|
||||
for video_path in video_paths.iter() {
|
||||
if !Path::new(video_path).exists() {
|
||||
continue;
|
||||
}
|
||||
let metadata = extract_video_metadata(Path::new(video_path)).await;
|
||||
if metadata.is_err() {
|
||||
log::error!(
|
||||
"Failed to extract video metadata: {}",
|
||||
metadata.unwrap_err()
|
||||
);
|
||||
return false;
|
||||
}
|
||||
let metadata = metadata.unwrap();
|
||||
|
||||
// check video codec
|
||||
if !video_codec.is_empty() && metadata.video_codec != video_codec {
|
||||
log::error!("Video codec does not match: {}", video_path.display());
|
||||
return false;
|
||||
} else {
|
||||
video_codec = metadata.video_codec;
|
||||
}
|
||||
|
||||
// check audio codec
|
||||
if !audio_codec.is_empty() && metadata.audio_codec != audio_codec {
|
||||
log::error!("Audio codec does not match: {}", video_path.display());
|
||||
return false;
|
||||
} else {
|
||||
audio_codec = metadata.audio_codec;
|
||||
}
|
||||
|
||||
// check width
|
||||
if width > 0 && metadata.width != width {
|
||||
log::error!("Video width does not match: {}", video_path.display());
|
||||
return false;
|
||||
} else {
|
||||
width = metadata.width;
|
||||
}
|
||||
|
||||
// check height
|
||||
if height > 0 && metadata.height != height {
|
||||
log::error!("Video height does not match: {}", video_path.display());
|
||||
return false;
|
||||
} else {
|
||||
height = metadata.height;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
// tests
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
@@ -1224,6 +1235,7 @@ mod tests {
|
||||
let test_video = Path::new("tests/video/test.mp4");
|
||||
if test_video.exists() {
|
||||
let metadata = extract_video_metadata(test_video).await.unwrap();
|
||||
println!("metadata: {:?}", metadata);
|
||||
assert!(metadata.duration > 0.0);
|
||||
assert!(metadata.width > 0);
|
||||
assert!(metadata.height > 0);
|
||||
@@ -1240,16 +1252,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
// 测试视频分辨率获取
|
||||
#[tokio::test]
|
||||
async fn test_get_video_resolution() {
|
||||
let file = Path::new("tests/video/h_test.m4s");
|
||||
if file.exists() {
|
||||
let resolution = get_video_resolution(file.to_str().unwrap()).await.unwrap();
|
||||
assert_eq!(resolution, "1920x1080");
|
||||
}
|
||||
}
|
||||
|
||||
// 测试缩略图生成
|
||||
#[tokio::test]
|
||||
async fn test_generate_thumbnail() {
|
||||
@@ -1296,6 +1298,23 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
// 测试硬件加速能力探测
|
||||
#[tokio::test]
|
||||
async fn test_list_supported_hwaccels() {
|
||||
match super::hwaccel::list_supported_hwaccels().await {
|
||||
Ok(hwaccels) => {
|
||||
println!("hwaccels: {:?}", hwaccels);
|
||||
let mut sorted = hwaccels.clone();
|
||||
sorted.sort();
|
||||
sorted.dedup();
|
||||
assert_eq!(sorted.len(), hwaccels.len());
|
||||
}
|
||||
Err(_) => {
|
||||
println!("FFmpeg hardware acceleration query not available for testing");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 测试字幕生成错误处理
|
||||
#[tokio::test]
|
||||
async fn test_generate_video_subtitle_errors() {
|
||||
@@ -1341,18 +1360,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
// 测试错误处理
|
||||
#[tokio::test]
|
||||
async fn test_error_handling() {
|
||||
// 测试不存在的文件
|
||||
let non_existent_file = Path::new("tests/nonexistent/test.mp4");
|
||||
let result = extract_video_metadata(non_existent_file).await;
|
||||
assert!(result.is_err());
|
||||
|
||||
let result = get_video_resolution("tests/nonexistent/test.mp4").await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// 测试文件名和路径处理
|
||||
#[test]
|
||||
fn test_filename_processing() {
|
||||
@@ -1384,7 +1391,7 @@ mod tests {
|
||||
let output_path = test_file.with_extension("wav");
|
||||
let output_dir = output_path.parent().unwrap();
|
||||
let base_name = output_path.file_stem().unwrap().to_str().unwrap();
|
||||
let chunk_dir = output_dir.join(format!("{}_chunks", base_name));
|
||||
let chunk_dir = output_dir.join(format!("{base_name}_chunks"));
|
||||
|
||||
assert!(chunk_dir.to_string_lossy().contains("_chunks"));
|
||||
assert!(chunk_dir.to_string_lossy().contains("test"));
|
||||
162
src-tauri/src/ffmpeg/playlist.rs
Normal file
@@ -0,0 +1,162 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use m3u8_rs::Map;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
use crate::progress::progress_reporter::ProgressReporterTrait;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use crate::ffmpeg::CREATE_NO_WINDOW;
|
||||
#[cfg(target_os = "windows")]
|
||||
#[allow(unused_imports)]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
use super::Range;
|
||||
|
||||
pub async fn playlist_to_video(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
playlist_path: &Path,
|
||||
output_path: &Path,
|
||||
range: Option<Range>,
|
||||
) -> Result<(), String> {
|
||||
let (_, playlist) = m3u8_rs::parse_media_playlist(
|
||||
&tokio::fs::read(playlist_path)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
)
|
||||
.unwrap();
|
||||
let mut start_offset = None;
|
||||
let mut segments = Vec::new();
|
||||
if let Some(range) = &range {
|
||||
let mut duration = 0.0;
|
||||
for s in playlist.segments.clone() {
|
||||
if range.is_in(duration) || range.is_in(duration + s.duration as f64) {
|
||||
segments.push(s.clone());
|
||||
if start_offset.is_none() {
|
||||
start_offset = Some(range.start - duration);
|
||||
}
|
||||
}
|
||||
duration += s.duration as f64;
|
||||
}
|
||||
} else {
|
||||
segments = playlist.segments.clone();
|
||||
}
|
||||
|
||||
if segments.is_empty() {
|
||||
return Err("No segments found".to_string());
|
||||
}
|
||||
|
||||
let first_segment = playlist.segments.first().unwrap().clone();
|
||||
let mut header_url = first_segment
|
||||
.unknown_tags
|
||||
.iter()
|
||||
.find(|t| t.tag == "X-MAP")
|
||||
.map(|t| {
|
||||
let rest = t.rest.clone().unwrap();
|
||||
rest.split('=').nth(1).unwrap().replace("\\\"", "")
|
||||
});
|
||||
if header_url.is_none() {
|
||||
// map: Some(Map { uri: "h1758725308.m4s"
|
||||
if let Some(Map { uri, .. }) = &first_segment.map {
|
||||
header_url = Some(uri.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// write all segments to clip_file
|
||||
{
|
||||
let playlist_folder = playlist_path.parent().unwrap();
|
||||
let output_folder = output_path.parent().unwrap();
|
||||
if !output_folder.exists() {
|
||||
std::fs::create_dir_all(output_folder).unwrap();
|
||||
}
|
||||
let mut file = tokio::fs::File::create(&output_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to create output file: {}", e))?;
|
||||
if let Some(header_url) = header_url {
|
||||
let header_data = tokio::fs::read(playlist_folder.join(header_url))
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read header file: {}", e))?;
|
||||
file.write_all(&header_data)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to write header file: {}", e))?;
|
||||
}
|
||||
for s in segments {
|
||||
// read segment
|
||||
let uri = s.uri.split('?').next().unwrap_or(&s.uri);
|
||||
let segment_file_path = playlist_folder.join(uri);
|
||||
let segment_data = tokio::fs::read(&segment_file_path)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read segment file: {}", e))?;
|
||||
// append segment data to clip_file
|
||||
file.write_all(&segment_data)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to write segment file: {}", e))?;
|
||||
}
|
||||
file.flush()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to flush file: {}", e))?;
|
||||
}
|
||||
|
||||
// transcode copy to fix timestamp
|
||||
{
|
||||
let tmp_output_path = output_path.with_extension("tmp.mp4");
|
||||
super::transcode(reporter, output_path, &tmp_output_path, true).await?;
|
||||
|
||||
// remove original file
|
||||
let _ = tokio::fs::remove_file(output_path).await;
|
||||
// rename tmp_output_path to output_path
|
||||
let _ = tokio::fs::rename(tmp_output_path, output_path).await;
|
||||
}
|
||||
|
||||
// trim for precised duration
|
||||
if let Some(start_offset) = start_offset {
|
||||
let tmp_output_path = output_path.with_extension("tmp.mp4");
|
||||
super::trim_video(
|
||||
reporter,
|
||||
output_path,
|
||||
&tmp_output_path,
|
||||
start_offset,
|
||||
range.as_ref().unwrap().duration(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// remove original file
|
||||
let _ = tokio::fs::remove_file(output_path).await;
|
||||
// rename tmp_output_path to output_path
|
||||
let _ = tokio::fs::rename(tmp_output_path, output_path).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn playlists_to_video(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
playlists: &[&Path],
|
||||
danmu_ass_files: Vec<Option<PathBuf>>,
|
||||
output_path: &Path,
|
||||
) -> Result<(), String> {
|
||||
let mut to_remove = Vec::new();
|
||||
let mut segments = Vec::new();
|
||||
for (i, playlist) in playlists.iter().enumerate() {
|
||||
let mut video_path = output_path.with_extension(format!("{}.mp4", i));
|
||||
if let Err(e) = playlist_to_video(reporter, playlist, &video_path, None).await {
|
||||
log::error!("Failed to generate playlist video: {e}");
|
||||
continue;
|
||||
}
|
||||
to_remove.push(video_path.clone());
|
||||
if let Some(danmu_ass_file) = &danmu_ass_files[i] {
|
||||
video_path = super::encode_video_danmu(reporter, &video_path, danmu_ass_file).await?;
|
||||
to_remove.push(video_path.clone());
|
||||
}
|
||||
segments.push(video_path);
|
||||
}
|
||||
|
||||
super::general::concat_videos(reporter, &segments, output_path).await?;
|
||||
|
||||
// clean up segments
|
||||
for segment in to_remove {
|
||||
let _ = tokio::fs::remove_file(segment).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,7 +1,12 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::database::account::AccountRow;
|
||||
use crate::recorder::bilibili::client::{QrInfo, QrStatus};
|
||||
use crate::state::State;
|
||||
use crate::state_type;
|
||||
use chrono::Utc;
|
||||
use recorder::platforms::bilibili::api::{QrInfo, QrStatus};
|
||||
use recorder::platforms::{bilibili, douyin, huya, PlatformType};
|
||||
use recorder::UserInfo;
|
||||
|
||||
use hyper::header::HeaderValue;
|
||||
#[cfg(feature = "gui")]
|
||||
@@ -15,74 +20,163 @@ pub async fn get_accounts(state: state_type!()) -> Result<super::AccountInfo, St
|
||||
Ok(account_info)
|
||||
}
|
||||
|
||||
fn get_item_from_cookies(name: &str, cookies: &str) -> Result<String, String> {
|
||||
Ok(cookies
|
||||
.split(';')
|
||||
.map(str::trim)
|
||||
.find_map(|cookie| cookie.strip_prefix(format!("{name}=").as_str()))
|
||||
.ok_or_else(|| format!("Invalid cookies: missing {name}").to_string())?
|
||||
.to_string())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn add_account(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
cookies: &str,
|
||||
) -> Result<AccountRow, String> {
|
||||
) -> Result<(), String> {
|
||||
// check if cookies is valid
|
||||
if let Err(e) = cookies.parse::<HeaderValue>() {
|
||||
return Err(format!("Invalid cookies: {}", e));
|
||||
return Err(format!("Invalid cookies: {e}"));
|
||||
}
|
||||
let account = state.db.add_account(&platform, cookies).await?;
|
||||
if platform == "bilibili" {
|
||||
let account_info = state.client.get_user_info(&account, account.uid).await?;
|
||||
state
|
||||
.db
|
||||
.update_account(
|
||||
&platform,
|
||||
account_info.user_id,
|
||||
&account_info.user_name,
|
||||
&account_info.user_avatar_url,
|
||||
)
|
||||
.await?;
|
||||
} else if platform == "douyin" {
|
||||
// Get user info from Douyin API
|
||||
let douyin_client = crate::recorder::douyin::client::DouyinClient::new(
|
||||
&state.config.read().await.user_agent,
|
||||
&account,
|
||||
);
|
||||
match douyin_client.get_user_info().await {
|
||||
Ok(user_info) => {
|
||||
// For Douyin, use sec_uid as the primary identifier in id_str field
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
state
|
||||
.db
|
||||
.update_account_with_id_str(
|
||||
&account,
|
||||
&user_info.sec_uid,
|
||||
&user_info.nickname,
|
||||
&avatar_url,
|
||||
)
|
||||
.await?;
|
||||
let platform = PlatformType::from_str(&platform).map_err(|_| "Invalid platform".to_string())?;
|
||||
|
||||
let csrf = match platform {
|
||||
PlatformType::BiliBili => {
|
||||
cookies
|
||||
.split(';')
|
||||
.map(str::trim)
|
||||
.find_map(|cookie| -> Option<String> {
|
||||
if cookie.starts_with("bili_jct=") {
|
||||
let var_name = &"bili_jct=";
|
||||
Some(cookie[var_name.len()..].to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
_ => Some(String::new()),
|
||||
};
|
||||
|
||||
// fetch basic account user info
|
||||
let client = reqwest::Client::new();
|
||||
let user_info = match platform {
|
||||
PlatformType::BiliBili => {
|
||||
// For Bilibili, extract numeric uid from cookies
|
||||
if csrf.is_none() {
|
||||
return Err("Invalid bilibili cookies".to_string());
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to get Douyin user info: {}", e);
|
||||
// Keep the account but with default values
|
||||
let uid = get_item_from_cookies("DedeUserID", cookies)?;
|
||||
let tmp_account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid,
|
||||
name: String::new(),
|
||||
avatar: String::new(),
|
||||
csrf: csrf.clone().unwrap(),
|
||||
cookies: cookies.into(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
match bilibili::api::get_user_info(&client, &tmp_account.to_account(), &tmp_account.uid)
|
||||
.await
|
||||
{
|
||||
Ok(user_info) => UserInfo {
|
||||
user_id: user_info.user_id,
|
||||
user_name: user_info.user_name,
|
||||
user_avatar: user_info.user_avatar_url,
|
||||
},
|
||||
Err(e) => {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(account)
|
||||
PlatformType::Douyin => {
|
||||
let tmp_account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid: "".into(),
|
||||
name: String::new(),
|
||||
avatar: String::new(),
|
||||
csrf: "".into(),
|
||||
cookies: cookies.into(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
match douyin::api::get_user_info(&client, &tmp_account.to_account()).await {
|
||||
Ok(user_info) => {
|
||||
// For Douyin, use sec_uid as the primary identifier in id_str field
|
||||
let avatar_url = user_info
|
||||
.avatar_thumb
|
||||
.url_list
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
UserInfo {
|
||||
user_id: user_info.sec_uid,
|
||||
user_name: user_info.nickname,
|
||||
user_avatar: avatar_url,
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Failed to get Douyin user info: {e}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
PlatformType::Huya => {
|
||||
let user_id = get_item_from_cookies("yyuid", cookies)?;
|
||||
|
||||
let tmp_account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid: user_id,
|
||||
name: String::new(),
|
||||
avatar: String::new(),
|
||||
csrf: "".into(),
|
||||
cookies: cookies.into(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
match huya::api::get_user_info(&client, &tmp_account.to_account()).await {
|
||||
Ok(user_info) => UserInfo {
|
||||
user_id: user_info.user_id,
|
||||
user_name: user_info.user_name,
|
||||
user_avatar: user_info.user_avatar,
|
||||
},
|
||||
Err(e) => {
|
||||
return Err(format!("Failed to get Huya user info: {e}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
PlatformType::Youtube => {
|
||||
// unsupported
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
};
|
||||
|
||||
let account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid: user_info.user_id,
|
||||
name: user_info.user_name,
|
||||
avatar: user_info.user_avatar,
|
||||
csrf: csrf.unwrap(),
|
||||
cookies: cookies.into(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
state.db.add_account(&account).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn remove_account(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
uid: u64,
|
||||
uid: String,
|
||||
) -> Result<(), String> {
|
||||
if platform == "bilibili" {
|
||||
let account = state.db.get_account(&platform, uid).await?;
|
||||
state.client.logout(&account).await?;
|
||||
let account = state.db.get_account(&platform, &uid).await?;
|
||||
let client = reqwest::Client::new();
|
||||
let _ = bilibili::api::logout(&client, &account.to_account()).await;
|
||||
}
|
||||
Ok(state.db.remove_account(&platform, uid).await?)
|
||||
Ok(state.db.remove_account(&platform, &uid).await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
@@ -91,17 +185,37 @@ pub async fn get_account_count(state: state_type!()) -> Result<u64, String> {
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_qr_status(state: state_type!(), qrcode_key: &str) -> Result<QrStatus, ()> {
|
||||
match state.client.get_qr_status(qrcode_key).await {
|
||||
pub async fn get_qr_status(_state: state_type!(), qrcode_key: &str) -> Result<QrStatus, ()> {
|
||||
let client = reqwest::Client::new();
|
||||
match bilibili::api::get_qr_status(&client, qrcode_key).await {
|
||||
Ok(qr_status) => Ok(qr_status),
|
||||
Err(_e) => Err(()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_qr(state: state_type!()) -> Result<QrInfo, ()> {
|
||||
match state.client.get_qr().await {
|
||||
pub async fn get_qr(_state: state_type!()) -> Result<QrInfo, ()> {
|
||||
let client = reqwest::Client::new();
|
||||
match bilibili::api::get_qr(&client).await {
|
||||
Ok(qr_info) => Ok(qr_info),
|
||||
Err(_e) => Err(()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_item_from_cookies() {
|
||||
let cookies = "DedeUserID=1234567890; bili_jct=1234567890; yyuid=1234567890";
|
||||
let uid = get_item_from_cookies("DedeUserID", cookies).unwrap();
|
||||
assert_eq!(uid, "1234567890");
|
||||
let uid = get_item_from_cookies("yyuid", cookies).unwrap();
|
||||
assert_eq!(uid, "1234567890");
|
||||
let uid = get_item_from_cookies("bili_jct", cookies).unwrap();
|
||||
assert_eq!(uid, "1234567890");
|
||||
let uid = get_item_from_cookies("unknown", cookies).unwrap_err();
|
||||
assert_eq!(uid, "Invalid cookies: missing unknown");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::config::Config;
|
||||
use crate::danmu2ass::Danmu2AssOptions;
|
||||
use crate::state::State;
|
||||
use crate::state_type;
|
||||
|
||||
@@ -14,11 +15,7 @@ pub async fn get_config(state: state_type!()) -> Result<Config, ()> {
|
||||
#[allow(dead_code)]
|
||||
pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<(), String> {
|
||||
let old_cache_path = state.config.read().await.cache.clone();
|
||||
log::info!(
|
||||
"Try to set cache path: {} -> {}",
|
||||
old_cache_path,
|
||||
cache_path
|
||||
);
|
||||
log::info!("Try to set cache path: {old_cache_path} -> {cache_path}");
|
||||
if old_cache_path == cache_path {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -27,20 +24,16 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
|
||||
let new_cache_path_obj = std::path::Path::new(&cache_path);
|
||||
// check if new cache path is under old cache path
|
||||
if new_cache_path_obj.starts_with(old_cache_path_obj) {
|
||||
log::error!(
|
||||
"New cache path is under old cache path: {} -> {}",
|
||||
old_cache_path,
|
||||
cache_path
|
||||
);
|
||||
log::error!("New cache path is under old cache path: {old_cache_path} -> {cache_path}");
|
||||
return Err("New cache path cannot be under old cache path".to_string());
|
||||
}
|
||||
|
||||
state.recorder_manager.set_migrating(true).await;
|
||||
state.recorder_manager.set_migrating(true);
|
||||
// stop and clear all recorders
|
||||
state.recorder_manager.stop_all().await;
|
||||
// first switch to new cache
|
||||
state.config.write().await.set_cache_path(&cache_path);
|
||||
log::info!("Cache path changed: {}", cache_path);
|
||||
log::info!("Cache path changed: {cache_path}");
|
||||
// Copy old cache to new cache
|
||||
log::info!("Start copy old cache to new cache");
|
||||
state
|
||||
@@ -68,11 +61,11 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
|
||||
// if entry is a folder
|
||||
if entry.is_dir() {
|
||||
if let Err(e) = crate::handlers::utils::copy_dir_all(entry, &new_entry) {
|
||||
log::error!("Copy old cache to new cache error: {}", e);
|
||||
log::error!("Copy old cache to new cache error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
} else if let Err(e) = std::fs::copy(entry, &new_entry) {
|
||||
log::error!("Copy old cache to new cache error: {}", e);
|
||||
log::error!("Copy old cache to new cache error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
@@ -80,16 +73,16 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
|
||||
log::info!("Copy old cache to new cache done");
|
||||
state.db.new_message("缓存目录切换", "缓存切换完成").await?;
|
||||
|
||||
state.recorder_manager.set_migrating(false).await;
|
||||
state.recorder_manager.set_migrating(false);
|
||||
|
||||
// remove all old cache entries
|
||||
for entry in old_cache_entries {
|
||||
if entry.is_dir() {
|
||||
if let Err(e) = std::fs::remove_dir_all(&entry) {
|
||||
log::error!("Remove old cache error: {}", e);
|
||||
log::error!("Remove old cache error: {e}");
|
||||
}
|
||||
} else if let Err(e) = std::fs::remove_file(&entry) {
|
||||
log::error!("Remove old cache error: {}", e);
|
||||
log::error!("Remove old cache error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,11 +94,7 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
|
||||
pub async fn set_output_path(state: state_type!(), output_path: String) -> Result<(), String> {
|
||||
let mut config = state.config.write().await;
|
||||
let old_output_path = config.output.clone();
|
||||
log::info!(
|
||||
"Try to set output path: {} -> {}",
|
||||
old_output_path,
|
||||
output_path
|
||||
);
|
||||
log::info!("Try to set output path: {old_output_path} -> {output_path}");
|
||||
if old_output_path == output_path {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -114,11 +103,7 @@ pub async fn set_output_path(state: state_type!(), output_path: String) -> Resul
|
||||
let new_output_path_obj = std::path::Path::new(&output_path);
|
||||
// check if new output path is under old output path
|
||||
if new_output_path_obj.starts_with(old_output_path_obj) {
|
||||
log::error!(
|
||||
"New output path is under old output path: {} -> {}",
|
||||
old_output_path,
|
||||
output_path
|
||||
);
|
||||
log::error!("New output path is under old output path: {old_output_path} -> {output_path}");
|
||||
return Err("New output path cannot be under old output path".to_string());
|
||||
}
|
||||
|
||||
@@ -140,11 +125,11 @@ pub async fn set_output_path(state: state_type!(), output_path: String) -> Resul
|
||||
// if entry is a folder
|
||||
if entry.is_dir() {
|
||||
if let Err(e) = crate::handlers::utils::copy_dir_all(entry, &new_entry) {
|
||||
log::error!("Copy old output to new output error: {}", e);
|
||||
log::error!("Copy old output to new output error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
} else if let Err(e) = std::fs::copy(entry, &new_entry) {
|
||||
log::error!("Copy old output to new output error: {}", e);
|
||||
log::error!("Copy old output to new output error: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
@@ -153,10 +138,10 @@ pub async fn set_output_path(state: state_type!(), output_path: String) -> Resul
|
||||
for entry in old_output_entries {
|
||||
if entry.is_dir() {
|
||||
if let Err(e) = std::fs::remove_dir_all(&entry) {
|
||||
log::error!("Remove old output error: {}", e);
|
||||
log::error!("Remove old output error: {e}");
|
||||
}
|
||||
} else if let Err(e) = std::fs::remove_file(&entry) {
|
||||
log::error!("Remove old output error: {}", e);
|
||||
log::error!("Remove old output error: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -216,10 +201,7 @@ pub async fn update_subtitle_generator_type(
|
||||
state: state_type!(),
|
||||
subtitle_generator_type: String,
|
||||
) -> Result<(), ()> {
|
||||
log::info!(
|
||||
"Updating subtitle generator type to {}",
|
||||
subtitle_generator_type
|
||||
);
|
||||
log::info!("Updating subtitle generator type to {subtitle_generator_type}");
|
||||
let mut config = state.config.write().await;
|
||||
config.subtitle_generator_type = subtitle_generator_type;
|
||||
config.save();
|
||||
@@ -240,7 +222,7 @@ pub async fn update_openai_api_endpoint(
|
||||
state: state_type!(),
|
||||
openai_api_endpoint: String,
|
||||
) -> Result<(), ()> {
|
||||
log::info!("Updating openai api endpoint to {}", openai_api_endpoint);
|
||||
log::info!("Updating openai api endpoint to {openai_api_endpoint}");
|
||||
let mut config = state.config.write().await;
|
||||
config.openai_api_endpoint = openai_api_endpoint;
|
||||
config.save();
|
||||
@@ -268,9 +250,12 @@ pub async fn update_status_check_interval(
|
||||
if interval < 10 {
|
||||
interval = 10; // Minimum interval of 10 seconds
|
||||
}
|
||||
log::info!("Updating status check interval to {} seconds", interval);
|
||||
state.config.write().await.status_check_interval = interval;
|
||||
state.config.write().await.save();
|
||||
log::info!("Updating status check interval to {interval} seconds");
|
||||
state
|
||||
.config
|
||||
.write()
|
||||
.await
|
||||
.set_status_check_interval(interval);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -279,30 +264,15 @@ pub async fn update_whisper_language(
|
||||
state: state_type!(),
|
||||
whisper_language: String,
|
||||
) -> Result<(), ()> {
|
||||
log::info!("Updating whisper language to {}", whisper_language);
|
||||
log::info!("Updating whisper language to {whisper_language}");
|
||||
state.config.write().await.whisper_language = whisper_language;
|
||||
state.config.write().await.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_user_agent(state: state_type!(), user_agent: String) -> Result<(), ()> {
|
||||
log::info!("Updating user agent to {}", user_agent);
|
||||
state.config.write().await.set_user_agent(&user_agent);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
#[cfg(feature = "gui")]
|
||||
pub async fn update_cleanup_source_flv(state: state_type!(), cleanup: bool) -> Result<(), ()> {
|
||||
log::info!("Updating cleanup source FLV after import to {}", cleanup);
|
||||
state.config.write().await.set_cleanup_source_flv(cleanup);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_webhook_url(state: state_type!(), webhook_url: String) -> Result<(), ()> {
|
||||
log::info!("Updating webhook url to {}", webhook_url);
|
||||
log::info!("Updating webhook url to {webhook_url}");
|
||||
let _ = state
|
||||
.webhook_poster
|
||||
.update_config(crate::webhook::poster::WebhookConfig {
|
||||
@@ -314,3 +284,18 @@ pub async fn update_webhook_url(state: state_type!(), webhook_url: String) -> Re
|
||||
state.config.write().await.save();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn update_danmu_ass_options(
|
||||
state: state_type!(),
|
||||
font_size: f64,
|
||||
opacity: f64,
|
||||
) -> Result<(), ()> {
|
||||
log::info!("Updating danmu ass options");
|
||||
state
|
||||
.config
|
||||
.write()
|
||||
.await
|
||||
.set_danmu_ass_options(Danmu2AssOptions { font_size, opacity });
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,10 +13,3 @@ use crate::database::account::AccountRow;
|
||||
pub struct AccountInfo {
|
||||
pub accounts: Vec<AccountRow>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct DiskInfo {
|
||||
pub disk: String,
|
||||
pub total: u64,
|
||||
pub free: u64,
|
||||
}
|
||||
|
||||
@@ -1,13 +1,24 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::danmu2ass;
|
||||
use crate::database::record::RecordRow;
|
||||
use crate::database::recorder::RecorderRow;
|
||||
use crate::recorder::danmu::DanmuEntry;
|
||||
use crate::recorder::PlatformType;
|
||||
use crate::recorder::RecorderInfo;
|
||||
use crate::database::task::TaskRow;
|
||||
use crate::progress::progress_reporter::EventEmitter;
|
||||
use crate::progress::progress_reporter::ProgressReporter;
|
||||
use crate::progress::progress_reporter::ProgressReporterTrait;
|
||||
use crate::recorder_manager::RecorderList;
|
||||
use crate::state::State;
|
||||
use crate::state_type;
|
||||
use crate::task::Task;
|
||||
use crate::task::TaskPriority;
|
||||
use crate::webhook::events;
|
||||
use recorder::account::Account;
|
||||
use recorder::danmu::DanmuEntry;
|
||||
use recorder::platforms::bilibili;
|
||||
use recorder::platforms::douyin;
|
||||
use recorder::platforms::PlatformType;
|
||||
use recorder::RecorderInfo;
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
use tauri::State as TauriState;
|
||||
@@ -24,42 +35,55 @@ pub async fn get_recorder_list(state: state_type!()) -> Result<RecorderList, ()>
|
||||
pub async fn add_recorder(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
extra: String,
|
||||
room_id: String,
|
||||
mut extra: String,
|
||||
) -> Result<RecorderRow, String> {
|
||||
log::info!("Add recorder: {} {}", platform, room_id);
|
||||
log::info!("Add recorder: {platform} {room_id}");
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
let account = match platform {
|
||||
PlatformType::BiliBili => {
|
||||
if let Ok(account) = state.db.get_account_by_platform("bilibili").await {
|
||||
Ok(account)
|
||||
Ok(account.to_account())
|
||||
} else {
|
||||
log::error!("No available bilibili account found");
|
||||
Err("没有可用账号,请先添加账号".to_string())
|
||||
}
|
||||
}
|
||||
PlatformType::Douyin => {
|
||||
let client = reqwest::Client::new();
|
||||
let sec_uid = douyin::api::get_room_owner_sec_uid(&client, &room_id)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
extra = sec_uid;
|
||||
|
||||
if let Ok(account) = state.db.get_account_by_platform("douyin").await {
|
||||
Ok(account)
|
||||
Ok(account.to_account())
|
||||
} else {
|
||||
log::error!("No available douyin account found");
|
||||
Err("没有可用账号,请先添加账号".to_string())
|
||||
}
|
||||
}
|
||||
PlatformType::Huya => {
|
||||
if let Ok(account) = state.db.get_account_by_platform("huya").await {
|
||||
Ok(account.to_account())
|
||||
} else {
|
||||
Ok(Account::default())
|
||||
}
|
||||
}
|
||||
_ => Err("不支持的平台".to_string()),
|
||||
};
|
||||
|
||||
match account {
|
||||
Ok(account) => match state
|
||||
.recorder_manager
|
||||
.add_recorder(&account, platform, room_id, &extra, true)
|
||||
.add_recorder(&account, platform, &room_id, &extra, true)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
let room = state.db.add_recorder(platform, room_id, &extra).await?;
|
||||
let room = state.db.add_recorder(platform, &room_id, &extra).await?;
|
||||
state
|
||||
.db
|
||||
.new_message("添加直播间", &format!("添加了新直播间 {}", room_id))
|
||||
.new_message("添加直播间", &format!("添加了新直播间 {room_id}"))
|
||||
.await?;
|
||||
// post webhook event
|
||||
let event = events::new_webhook_event(
|
||||
@@ -67,18 +91,18 @@ pub async fn add_recorder(
|
||||
events::Payload::Recorder(room.clone()),
|
||||
);
|
||||
if let Err(e) = state.webhook_poster.post_event(&event).await {
|
||||
log::error!("Post webhook event error: {}", e);
|
||||
log::error!("Post webhook event error: {e}");
|
||||
}
|
||||
Ok(room)
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to add recorder: {}", e);
|
||||
Err(format!("添加失败: {}", e))
|
||||
log::error!("Failed to add recorder: {e}");
|
||||
Err(format!("添加失败: {e}"))
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
log::error!("Failed to add recorder: {}", e);
|
||||
Err(format!("添加失败: {}", e))
|
||||
log::error!("Failed to add recorder: {e}");
|
||||
Err(format!("添加失败: {e}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -87,19 +111,19 @@ pub async fn add_recorder(
|
||||
pub async fn remove_recorder(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
) -> Result<(), String> {
|
||||
log::info!("Remove recorder: {} {}", platform, room_id);
|
||||
log::info!("Remove recorder: {platform} {room_id}");
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
match state
|
||||
.recorder_manager
|
||||
.remove_recorder(platform, room_id)
|
||||
.remove_recorder(platform, &room_id)
|
||||
.await
|
||||
{
|
||||
Ok(recorder) => {
|
||||
state
|
||||
.db
|
||||
.new_message("移除直播间", &format!("移除了直播间 {}", room_id))
|
||||
.new_message("移除直播间", &format!("移除了直播间 {room_id}"))
|
||||
.await?;
|
||||
// post webhook event
|
||||
let event = events::new_webhook_event(
|
||||
@@ -107,13 +131,13 @@ pub async fn remove_recorder(
|
||||
events::Payload::Recorder(recorder),
|
||||
);
|
||||
if let Err(e) = state.webhook_poster.post_event(&event).await {
|
||||
log::error!("Post webhook event error: {}", e);
|
||||
log::error!("Post webhook event error: {e}");
|
||||
}
|
||||
log::info!("Removed recorder: {} {}", platform.as_str(), room_id);
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to remove recorder: {}", e);
|
||||
log::error!("Failed to remove recorder: {e}");
|
||||
Err(e.to_string())
|
||||
}
|
||||
}
|
||||
@@ -123,12 +147,12 @@ pub async fn remove_recorder(
|
||||
pub async fn get_room_info(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
) -> Result<RecorderInfo, String> {
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
if let Some(info) = state
|
||||
.recorder_manager
|
||||
.get_recorder_info(platform, room_id)
|
||||
.get_recorder_info(platform, &room_id)
|
||||
.await
|
||||
{
|
||||
Ok(info)
|
||||
@@ -138,32 +162,44 @@ pub async fn get_room_info(
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_archive_disk_usage(state: state_type!()) -> Result<u64, String> {
|
||||
pub async fn get_archive_disk_usage(state: state_type!()) -> Result<i64, String> {
|
||||
Ok(state.recorder_manager.get_archive_disk_usage().await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_archives(
|
||||
state: state_type!(),
|
||||
room_id: u64,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
room_id: String,
|
||||
offset: i64,
|
||||
limit: i64,
|
||||
) -> Result<Vec<RecordRow>, String> {
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.get_archives(room_id, offset, limit)
|
||||
.get_archives(&room_id, offset, limit)
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_archive(
|
||||
state: state_type!(),
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_id: String,
|
||||
) -> Result<RecordRow, String> {
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.get_archive(room_id, &live_id)
|
||||
.get_archive(&room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_archives_by_parent_id(
|
||||
state: state_type!(),
|
||||
room_id: String,
|
||||
parent_id: String,
|
||||
) -> Result<Vec<RecordRow>, String> {
|
||||
Ok(state
|
||||
.db
|
||||
.get_archives_by_parent_id(&room_id, &parent_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -171,16 +207,13 @@ pub async fn get_archive(
|
||||
pub async fn get_archive_subtitle(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_id: String,
|
||||
) -> Result<String, String> {
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let platform = PlatformType::from_str(&platform)?;
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.get_archive_subtitle(platform.unwrap(), room_id, &live_id)
|
||||
.get_archive_subtitle(platform, &room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -188,16 +221,13 @@ pub async fn get_archive_subtitle(
|
||||
pub async fn generate_archive_subtitle(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_id: String,
|
||||
) -> Result<String, String> {
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let platform = PlatformType::from_str(&platform)?;
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.generate_archive_subtitle(platform.unwrap(), room_id, &live_id)
|
||||
.generate_archive_subtitle(platform, &room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -205,29 +235,26 @@ pub async fn generate_archive_subtitle(
|
||||
pub async fn delete_archive(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_id: String,
|
||||
) -> Result<(), String> {
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let platform = PlatformType::from_str(&platform)?;
|
||||
let to_delete = state
|
||||
.recorder_manager
|
||||
.delete_archive(platform.unwrap(), room_id, &live_id)
|
||||
.delete_archive(platform, &room_id, &live_id)
|
||||
.await?;
|
||||
state
|
||||
.db
|
||||
.new_message(
|
||||
"删除历史缓存",
|
||||
&format!("删除了房间 {} 的历史缓存 {}", room_id, live_id),
|
||||
&format!("删除了房间 {room_id} 的历史缓存 {live_id}"),
|
||||
)
|
||||
.await?;
|
||||
// post webhook event
|
||||
let event =
|
||||
events::new_webhook_event(events::ARCHIVE_DELETED, events::Payload::Archive(to_delete));
|
||||
if let Err(e) = state.webhook_poster.post_event(&event).await {
|
||||
log::error!("Post webhook event error: {}", e);
|
||||
log::error!("Post webhook event error: {e}");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -236,19 +263,19 @@ pub async fn delete_archive(
|
||||
pub async fn delete_archives(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_ids: Vec<String>,
|
||||
) -> Result<(), String> {
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let platform = PlatformType::from_str(&platform)?;
|
||||
let to_deletes = state
|
||||
.recorder_manager
|
||||
.delete_archives(
|
||||
platform.unwrap(),
|
||||
room_id,
|
||||
&live_ids.iter().map(|s| s.as_str()).collect::<Vec<&str>>(),
|
||||
platform,
|
||||
&room_id,
|
||||
&live_ids
|
||||
.iter()
|
||||
.map(std::string::String::as_str)
|
||||
.collect::<Vec<&str>>(),
|
||||
)
|
||||
.await?;
|
||||
state
|
||||
@@ -263,7 +290,7 @@ pub async fn delete_archives(
|
||||
let event =
|
||||
events::new_webhook_event(events::ARCHIVE_DELETED, events::Payload::Archive(to_delete));
|
||||
if let Err(e) = state.webhook_poster.post_event(&event).await {
|
||||
log::error!("Post webhook event error: {}", e);
|
||||
log::error!("Post webhook event error: {e}");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@@ -273,16 +300,13 @@ pub async fn delete_archives(
|
||||
pub async fn get_danmu_record(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_id: String,
|
||||
) -> Result<Vec<DanmuEntry>, String> {
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let platform = PlatformType::from_str(&platform)?;
|
||||
Ok(state
|
||||
.recorder_manager
|
||||
.get_danmu(platform.unwrap(), room_id, &live_id)
|
||||
.load_danmus(platform, &room_id, &live_id)
|
||||
.await?)
|
||||
}
|
||||
|
||||
@@ -290,7 +314,7 @@ pub async fn get_danmu_record(
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ExportDanmuOptions {
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_id: String,
|
||||
x: i64,
|
||||
y: i64,
|
||||
@@ -302,13 +326,10 @@ pub async fn export_danmu(
|
||||
state: state_type!(),
|
||||
options: ExportDanmuOptions,
|
||||
) -> Result<String, String> {
|
||||
let platform = PlatformType::from_str(&options.platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let platform = PlatformType::from_str(&options.platform)?;
|
||||
let mut danmus = state
|
||||
.recorder_manager
|
||||
.get_danmu(platform.unwrap(), options.room_id, &options.live_id)
|
||||
.load_danmus(platform, &options.room_id, &options.live_id)
|
||||
.await?;
|
||||
|
||||
log::debug!("First danmu entry: {:?}", danmus.first());
|
||||
@@ -321,7 +342,10 @@ pub async fn export_danmu(
|
||||
}
|
||||
|
||||
if options.ass {
|
||||
Ok(danmu2ass::danmu_to_ass(danmus))
|
||||
Ok(danmu2ass::danmu_to_ass(
|
||||
danmus,
|
||||
danmu2ass::Danmu2AssOptions::default(),
|
||||
))
|
||||
} else {
|
||||
// map and join entries
|
||||
Ok(danmus
|
||||
@@ -335,23 +359,23 @@ pub async fn export_danmu(
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn send_danmaku(
|
||||
state: state_type!(),
|
||||
uid: u64,
|
||||
room_id: u64,
|
||||
uid: String,
|
||||
room_id: String,
|
||||
message: String,
|
||||
) -> Result<(), String> {
|
||||
let account = state.db.get_account("bilibili", uid).await?;
|
||||
state
|
||||
.client
|
||||
.send_danmaku(&account, room_id, &message)
|
||||
.await?;
|
||||
Ok(())
|
||||
let account = state.db.get_account("bilibili", &uid).await?;
|
||||
let client = reqwest::Client::new();
|
||||
match bilibili::api::send_danmaku(&client, &account.to_account(), &room_id, &message).await {
|
||||
Ok(()) => Ok(()),
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_total_length(state: state_type!()) -> Result<i64, String> {
|
||||
match state.db.get_total_length().await {
|
||||
Ok(total_length) => Ok(total_length),
|
||||
Err(e) => Err(format!("Failed to get total length: {}", e)),
|
||||
Err(e) => Err(format!("Failed to get total length: {e}")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -359,20 +383,20 @@ pub async fn get_total_length(state: state_type!()) -> Result<i64, String> {
|
||||
pub async fn get_today_record_count(state: state_type!()) -> Result<i64, String> {
|
||||
match state.db.get_today_record_count().await {
|
||||
Ok(count) => Ok(count),
|
||||
Err(e) => Err(format!("Failed to get today record count: {}", e)),
|
||||
Err(e) => Err(format!("Failed to get today record count: {e}")),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn get_recent_record(
|
||||
state: state_type!(),
|
||||
room_id: u64,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
room_id: String,
|
||||
offset: i64,
|
||||
limit: i64,
|
||||
) -> Result<Vec<RecordRow>, String> {
|
||||
match state.db.get_recent_record(room_id, offset, limit).await {
|
||||
match state.db.get_recent_record(&room_id, offset, limit).await {
|
||||
Ok(records) => Ok(records),
|
||||
Err(e) => Err(format!("Failed to get recent record: {}", e)),
|
||||
Err(e) => Err(format!("Failed to get recent record: {e}")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -380,17 +404,14 @@ pub async fn get_recent_record(
|
||||
pub async fn set_enable(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
enabled: bool,
|
||||
) -> Result<(), String> {
|
||||
log::info!("Set enable for recorder {platform} {room_id} {enabled}");
|
||||
let platform = PlatformType::from_str(&platform);
|
||||
if platform.is_none() {
|
||||
return Err("Unsupported platform".to_string());
|
||||
}
|
||||
let platform = PlatformType::from_str(&platform)?;
|
||||
state
|
||||
.recorder_manager
|
||||
.set_enable(platform.unwrap(), room_id, enabled)
|
||||
.set_enable(platform, &room_id, enabled)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
@@ -409,3 +430,81 @@ pub async fn fetch_hls(state: state_type!(), uri: String) -> Result<Vec<u8>, Str
|
||||
.await
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn generate_whole_clip(
|
||||
state: state_type!(),
|
||||
encode_danmu: bool,
|
||||
platform: String,
|
||||
room_id: String,
|
||||
parent_id: String,
|
||||
) -> Result<TaskRow, String> {
|
||||
log::info!("Generate whole clip for {platform} {room_id} {parent_id}");
|
||||
|
||||
let task = state
|
||||
.db
|
||||
.generate_task(
|
||||
"generate_whole_clip",
|
||||
"",
|
||||
&serde_json::json!({
|
||||
"platform": platform,
|
||||
"room_id": room_id,
|
||||
"parent_id": parent_id,
|
||||
})
|
||||
.to_string(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
let emitter = EventEmitter::new(state.app_handle.clone());
|
||||
#[cfg(feature = "headless")]
|
||||
let emitter = EventEmitter::new(state.progress_manager.get_event_sender());
|
||||
let reporter = ProgressReporter::new(state.db.clone(), &emitter, &task.id).await?;
|
||||
|
||||
log::info!("Create task: {} {}", task.id, task.task_type);
|
||||
// create a tokio task to run in background
|
||||
#[cfg(feature = "gui")]
|
||||
let state_clone = (*state).clone();
|
||||
#[cfg(feature = "headless")]
|
||||
let state_clone = state.clone();
|
||||
|
||||
let task_id = task.id.clone();
|
||||
state
|
||||
.task_manager
|
||||
.add_task(Task::new(
|
||||
task_id.clone(),
|
||||
TaskPriority::Normal,
|
||||
async move {
|
||||
match state_clone
|
||||
.recorder_manager
|
||||
.generate_whole_clip(
|
||||
Some(&reporter),
|
||||
encode_danmu,
|
||||
platform,
|
||||
&room_id,
|
||||
parent_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
reporter.finish(true, "切片生成完成").await;
|
||||
let _ = state_clone
|
||||
.db
|
||||
.update_task(&task_id, "success", "切片生成完成", None)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
reporter.finish(false, &format!("切片生成失败: {e}")).await;
|
||||
let _ = state_clone
|
||||
.db
|
||||
.update_task(&task_id, "failed", &format!("切片生成失败: {e}"), None)
|
||||
.await;
|
||||
Err(format!("切片生成失败: {e}"))
|
||||
}
|
||||
}
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
Ok(task)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::state_type;
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
use {
|
||||
crate::recorder::PlatformType,
|
||||
recorder::platforms::PlatformType,
|
||||
std::process::Command,
|
||||
tauri::State as TauriState,
|
||||
tauri::{Manager, Theme},
|
||||
@@ -57,9 +57,13 @@ pub fn show_in_folder(path: String) {
|
||||
path2.into_os_string().into_string().unwrap()
|
||||
}
|
||||
};
|
||||
Command::new("xdg-open").arg(&new_path).spawn().unwrap();
|
||||
let _ = Command::new("xdg-open")
|
||||
.arg(&new_path)
|
||||
.spawn()
|
||||
.unwrap()
|
||||
.wait();
|
||||
} else {
|
||||
Command::new("dbus-send")
|
||||
let _ = Command::new("dbus-send")
|
||||
.args([
|
||||
"--session",
|
||||
"--dest=org.freedesktop.FileManager1",
|
||||
@@ -70,7 +74,8 @@ pub fn show_in_folder(path: String) {
|
||||
"string:\"\"",
|
||||
])
|
||||
.spawn()
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
.wait();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,10 +114,10 @@ pub async fn get_disk_info(state: state_type!()) -> Result<DiskInfo, ()> {
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
pub async fn console_log(_state: state_type!(), level: &str, message: &str) -> Result<(), ()> {
|
||||
match level {
|
||||
"error" => log::error!("[frontend] {}", message),
|
||||
"warn" => log::warn!("[frontend] {}", message),
|
||||
"info" => log::info!("[frontend] {}", message),
|
||||
_ => log::debug!("[frontend] {}", message),
|
||||
"error" => log::error!("[frontend] {message}"),
|
||||
"warn" => log::warn!("[frontend] {message}"),
|
||||
"info" => log::info!("[frontend] {message}"),
|
||||
_ => log::debug!("[frontend] {message}"),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -139,7 +144,7 @@ pub async fn get_disk_info_inner(target: PathBuf) -> Result<DiskInfo, ()> {
|
||||
let total = parts[1].parse::<u64>().unwrap() * 1024;
|
||||
let free = parts[3].parse::<u64>().unwrap() * 1024;
|
||||
|
||||
return Ok(DiskInfo { disk, total, free });
|
||||
Ok(DiskInfo { disk, total, free })
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "windows", target_os = "macos"))]
|
||||
@@ -148,7 +153,7 @@ pub async fn get_disk_info_inner(target: PathBuf) -> Result<DiskInfo, ()> {
|
||||
let disks = sysinfo::Disks::new_with_refreshed_list();
|
||||
// get target disk info
|
||||
let mut disk_info = DiskInfo {
|
||||
disk: "".into(),
|
||||
disk: String::new(),
|
||||
total: 0,
|
||||
free: 0,
|
||||
};
|
||||
@@ -157,11 +162,11 @@ pub async fn get_disk_info_inner(target: PathBuf) -> Result<DiskInfo, ()> {
|
||||
let mut longest_match = 0;
|
||||
for disk in disks.list() {
|
||||
let mount_point = disk.mount_point().to_str().unwrap();
|
||||
if target.starts_with(mount_point) && mount_point.split("/").count() > longest_match {
|
||||
if target.starts_with(mount_point) && mount_point.split('/').count() > longest_match {
|
||||
disk_info.disk = mount_point.into();
|
||||
disk_info.total = disk.total_space();
|
||||
disk_info.free = disk.available_space();
|
||||
longest_match = mount_point.split("/").count();
|
||||
longest_match = mount_point.split('/').count();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,10 +192,10 @@ pub async fn export_to_file(
|
||||
}
|
||||
let mut file = file.unwrap();
|
||||
if let Err(e) = file.write_all(content.as_bytes()).await {
|
||||
return Err(format!("Write file failed: {}", e));
|
||||
return Err(format!("Write file failed: {e}"));
|
||||
}
|
||||
if let Err(e) = file.flush().await {
|
||||
return Err(format!("Flush file failed: {}", e));
|
||||
return Err(format!("Flush file failed: {e}"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -211,21 +216,18 @@ pub async fn open_log_folder(state: state_type!()) -> Result<(), String> {
|
||||
pub async fn open_live(
|
||||
state: state_type!(),
|
||||
platform: String,
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
live_id: String,
|
||||
) -> Result<(), String> {
|
||||
log::info!("Open player window: {} {}", room_id, live_id);
|
||||
log::info!("Open player window: {room_id} {live_id}");
|
||||
#[cfg(feature = "gui")]
|
||||
{
|
||||
let platform = PlatformType::from_str(&platform).unwrap();
|
||||
let recorder_info = state
|
||||
.recorder_manager
|
||||
.get_recorder_info(platform, room_id)
|
||||
.await
|
||||
.unwrap();
|
||||
use std::str::FromStr;
|
||||
|
||||
let platform = PlatformType::from_str(&platform)?;
|
||||
let builder = tauri::WebviewWindowBuilder::new(
|
||||
&state.app_handle,
|
||||
format!("Live:{}:{}", room_id, live_id),
|
||||
format!("Live:{room_id}:{live_id}"),
|
||||
tauri::WebviewUrl::App(
|
||||
format!(
|
||||
"index_live.html?platform={}&room_id={}&live_id={}",
|
||||
@@ -236,10 +238,7 @@ pub async fn open_live(
|
||||
.into(),
|
||||
),
|
||||
)
|
||||
.title(format!(
|
||||
"Live[{}] {}",
|
||||
room_id, recorder_info.room_info.room_title
|
||||
))
|
||||
.title(format!("Live[{}] {}", room_id, live_id))
|
||||
.theme(Some(Theme::Light))
|
||||
.inner_size(1200.0, 800.0)
|
||||
.effects(WindowEffectsConfig {
|
||||
@@ -253,7 +252,7 @@ pub async fn open_live(
|
||||
});
|
||||
|
||||
if let Err(e) = builder.decorations(true).build() {
|
||||
log::error!("live window build failed: {}", e);
|
||||
log::error!("live window build failed: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -263,13 +262,13 @@ pub async fn open_live(
|
||||
#[cfg(feature = "gui")]
|
||||
#[tauri::command]
|
||||
pub async fn open_clip(state: state_type!(), video_id: i64) -> Result<(), String> {
|
||||
log::info!("Open clip window: {}", video_id);
|
||||
log::info!("Open clip window: {video_id}");
|
||||
let builder = tauri::WebviewWindowBuilder::new(
|
||||
&state.app_handle,
|
||||
format!("Clip:{}", video_id),
|
||||
tauri::WebviewUrl::App(format!("index_clip.html?id={}", video_id).into()),
|
||||
format!("Clip:{video_id}"),
|
||||
tauri::WebviewUrl::App(format!("index_clip.html?id={video_id}").into()),
|
||||
)
|
||||
.title(format!("Clip window:{}", video_id))
|
||||
.title(format!("Clip window:{video_id}"))
|
||||
.theme(Some(Theme::Light))
|
||||
.inner_size(1200.0, 800.0)
|
||||
.effects(WindowEffectsConfig {
|
||||
@@ -283,7 +282,7 @@ pub async fn open_clip(state: state_type!(), video_id: i64) -> Result<(), String
|
||||
});
|
||||
|
||||
if let Err(e) = builder.decorations(true).build() {
|
||||
log::error!("clip window build failed: {}", e);
|
||||
log::error!("clip window build failed: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -303,6 +302,16 @@ pub async fn list_folder(_state: state_type!(), path: String) -> Result<Vec<Stri
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "gui", tauri::command)]
|
||||
#[allow(dead_code)]
|
||||
pub async fn file_exists(_state: state_type!(), path: String) -> Result<bool, String> {
|
||||
let path = PathBuf::from(path);
|
||||
match std::fs::metadata(&path) {
|
||||
Ok(metadata) => Ok(metadata.is_file()),
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// 高级文件名清理函数,全面处理各种危险字符和控制字符
|
||||
///
|
||||
/// 适用于需要严格文件名清理的场景,支持中文字符
|
||||
|
||||