Compare commits
211 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6127c67cd3 | ||
|
|
183eb063bb | ||
|
|
532609c57d | ||
|
|
83c6979973 | ||
|
|
8bea9336ae | ||
|
|
617a6a0b8e | ||
|
|
140ab772d0 | ||
|
|
e7d8c8814d | ||
|
|
588559c645 | ||
|
|
e7411d25b4 | ||
|
|
ed0bd88e3b | ||
|
|
b4fb2d058a | ||
|
|
4058b425c8 | ||
|
|
55d872a38c | ||
|
|
1f666d402d | ||
|
|
64dec36773 | ||
|
|
0b3df59015 | ||
|
|
e1b4f8ede9 | ||
|
|
b8c3f0a464 | ||
|
|
ee714c855b | ||
|
|
c5165c752d | ||
|
|
8c275c2edb | ||
|
|
2fc6f673d3 | ||
|
|
b7c9d12e41 | ||
|
|
8ca2934e8d | ||
|
|
868a2a1940 | ||
|
|
a1e57c5b9c | ||
|
|
b7a76e8f10 | ||
|
|
e82159b9a2 | ||
|
|
35d068e109 | ||
|
|
136e1a3774 | ||
|
|
fc6c6adfce | ||
|
|
5981d97d5f | ||
|
|
42c12b3bf9 | ||
|
|
39a8d0d741 | ||
|
|
1db0609961 | ||
|
|
26d60cecbf | ||
|
|
a97ec33c07 | ||
|
|
9a1bc0f449 | ||
|
|
e4d1024082 | ||
|
|
3d5f97f635 | ||
|
|
7c82766549 | ||
|
|
cfe91e0782 | ||
|
|
9a26683a76 | ||
|
|
8a9344e3ee | ||
|
|
d200c7cf09 | ||
|
|
eb01d62e53 | ||
|
|
f65f375283 | ||
|
|
08979d2079 | ||
|
|
c6efe07303 | ||
|
|
7294f0ca6d | ||
|
|
eac1c09149 | ||
|
|
1e9cd61eba | ||
|
|
7b7f341fa0 | ||
|
|
ac806b49b2 | ||
|
|
f20636a107 | ||
|
|
787a30e6f7 | ||
|
|
d1d217be18 | ||
|
|
944d0a371a | ||
|
|
0df03e0c9c | ||
|
|
7ffdf65705 | ||
|
|
89cdf91a48 | ||
|
|
43ebc27044 | ||
|
|
e6159555f3 | ||
|
|
1f2508aae9 | ||
|
|
ad13f58fa7 | ||
|
|
de4959d49f | ||
|
|
b5b75129e7 | ||
|
|
84346a486f | ||
|
|
3bdcddf5a2 | ||
|
|
98f68a5e14 | ||
|
|
2249b86af3 | ||
|
|
fd889922d8 | ||
|
|
8db7c6e320 | ||
|
|
5bc4ed6dfd | ||
|
|
22ad5f7fea | ||
|
|
c0369c1a14 | ||
|
|
322f4a3ca5 | ||
|
|
4e32453441 | ||
|
|
66725b8a64 | ||
|
|
f7bcbbca83 | ||
|
|
07a3b33040 | ||
|
|
2f9b4582f8 | ||
|
|
c3f63c58cf | ||
|
|
4a3529bc2e | ||
|
|
b0355a919f | ||
|
|
cfe1a0b4b9 | ||
|
|
b655e98f35 | ||
|
|
2d1021bc42 | ||
|
|
33d74999b9 | ||
|
|
84b7dd7a3c | ||
|
|
0c678fbda3 | ||
|
|
3486f7d050 | ||
|
|
d42a1010b8 | ||
|
|
ece6ceea45 | ||
|
|
b22ebb399e | ||
|
|
4431b10cb7 | ||
|
|
01a0c929e8 | ||
|
|
b06f6e8d09 | ||
|
|
753227acbb | ||
|
|
c7dd9091d0 | ||
|
|
bae20ce011 | ||
|
|
8da4759668 | ||
|
|
eb7c6d91e9 | ||
|
|
3c24dfe8a6 | ||
|
|
bb916daaaf | ||
|
|
3931e484c2 | ||
|
|
b67e258c31 | ||
|
|
1a7e6f5a43 | ||
|
|
437204dbe6 | ||
|
|
af105277d9 | ||
|
|
7efd327a36 | ||
|
|
0141586fa9 | ||
|
|
df1d8ccac6 | ||
|
|
10b6b95e4d | ||
|
|
a58e6f77bd | ||
|
|
fe2bd80ac6 | ||
|
|
870b44a973 | ||
|
|
48fd9ca7b2 | ||
|
|
14d03b7eb9 | ||
|
|
6f1db6c038 | ||
|
|
cd2d208e5c | ||
|
|
7d6ec72002 | ||
|
|
837cb6a978 | ||
|
|
aeeb0c08d7 | ||
|
|
72d8a7f485 | ||
|
|
5d3692c7a0 | ||
|
|
7e54231bef | ||
|
|
80a885dbf3 | ||
|
|
134c6bbb5f | ||
|
|
49a153adf7 | ||
|
|
99e15b0bda | ||
|
|
4de8a73af2 | ||
|
|
d104ba3180 | ||
|
|
abf0d4748f | ||
|
|
d2a9c44601 | ||
|
|
c269558bae | ||
|
|
cc22453a40 | ||
|
|
d525d92de4 | ||
|
|
2197dfe65c | ||
|
|
38ee00f474 | ||
|
|
8fdad41c71 | ||
|
|
f269995bb7 | ||
|
|
03a2db8c44 | ||
|
|
6d9cd3c6a8 | ||
|
|
303b2f7036 | ||
|
|
ec25c2ffd9 | ||
|
|
50ab608ddb | ||
|
|
3c76be9b81 | ||
|
|
ab7f0cf0b4 | ||
|
|
f9f590c4dc | ||
|
|
8d38fe582a | ||
|
|
dc4a26561d | ||
|
|
10c1d1f3a8 | ||
|
|
66bcf53d01 | ||
|
|
8ab4b7d693 | ||
|
|
ce2f097d32 | ||
|
|
f7575cd327 | ||
|
|
8634c6a211 | ||
|
|
b070013efc | ||
|
|
d2d9112f6c | ||
|
|
9fea18f2de | ||
|
|
74480f91ce | ||
|
|
b2e13b631f | ||
|
|
001d995c8f | ||
|
|
8cb2acea88 | ||
|
|
7c0d57d84e | ||
|
|
8cb875f449 | ||
|
|
e6bbe65723 | ||
|
|
f4a71a2476 | ||
|
|
47b9362b0a | ||
|
|
c1aad0806e | ||
|
|
4ccc90f9fb | ||
|
|
7dc63440e6 | ||
|
|
4094e8b80d | ||
|
|
e27cbaf715 | ||
|
|
1f39b27d79 | ||
|
|
f45891fd95 | ||
|
|
18fe644715 | ||
|
|
40cde8c69a | ||
|
|
4b0af47906 | ||
|
|
9365b3c8cd | ||
|
|
4b9f015ea7 | ||
|
|
c42d4a084e | ||
|
|
5bb3feb05b | ||
|
|
05f776ed8b | ||
|
|
9cec809485 | ||
|
|
429f909152 | ||
|
|
084dd23df1 | ||
|
|
e55afdd739 | ||
|
|
72128a132b | ||
|
|
92ca2cddad | ||
|
|
3db0d1dfe5 | ||
|
|
57907323e6 | ||
|
|
dbdca44c5f | ||
|
|
fe1dd2201f | ||
|
|
e0ae194cc3 | ||
|
|
6fc5700457 | ||
|
|
c4fdcf86d4 | ||
|
|
3088500c8d | ||
|
|
861f3a3624 | ||
|
|
c55783e4d9 | ||
|
|
955e284d41 | ||
|
|
fc4c47427e | ||
|
|
e2d7563faa | ||
|
|
27d69f7f8d | ||
|
|
a77bb5af44 | ||
|
|
00286261a4 | ||
|
|
0b898dccaa | ||
|
|
a1d9ac4e68 | ||
|
|
4150939e23 |
51
.cursor/rules/ai-features.mdc
Normal file
@@ -0,0 +1,51 @@
|
||||
# AI Features and LangChain Integration
|
||||
|
||||
## AI Components
|
||||
|
||||
- **LangChain Integration**: Uses `@langchain/core`, `@langchain/deepseek`,
|
||||
`@langchain/langgraph`, `@langchain/ollama`
|
||||
- **Whisper Transcription**: Local and online transcription via `whisper-rs` in
|
||||
Rust backend
|
||||
- **AI Agent**: Located in [src/lib/agent/](mdc:src/lib/agent/) directory
|
||||
|
||||
## Frontend AI Features
|
||||
|
||||
- **AI Page**: [src/page/AI.svelte](mdc:src/page/AI.svelte) - Main AI interface
|
||||
- **Agent Logic**: [src/lib/agent/](mdc:src/lib/agent/) - AI agent implementation
|
||||
- **Interface**: [src/lib/interface.ts](mdc:src/lib/interface.ts)
|
||||
\- AI communication layer
|
||||
|
||||
## Backend AI Features
|
||||
|
||||
- **Subtitle Generation**:
|
||||
[src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/) -
|
||||
AI-powered subtitle creation
|
||||
- **Whisper Integration**:
|
||||
[src-tauri/src/subtitle_generator.rs](mdc:src-tauri/src/subtitle_generator.rs)
|
||||
\- Speech-to-text processing
|
||||
- **CUDA Support**: Optional CUDA acceleration for Whisper via feature flag
|
||||
|
||||
## AI Workflows
|
||||
|
||||
- **Live Transcription**: Real-time speech-to-text during live streams
|
||||
- **Content Summarization**: AI-powered content analysis and summarization
|
||||
- **Smart Editing**: AI-assisted video editing and clip generation
|
||||
- **Danmaku Processing**: AI analysis of danmaku (bullet comments) streams
|
||||
|
||||
## Configuration
|
||||
|
||||
- **LLM Settings**: Configure AI models in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
- **Whisper Models**: Local model configuration for offline transcription
|
||||
- **API Keys**: External AI service configuration for online features
|
||||
|
||||
## Development Notes
|
||||
|
||||
- AI features require proper model configuration
|
||||
- CUDA feature enables GPU acceleration for Whisper
|
||||
- LangChain integration supports multiple AI providers
|
||||
- AI agent can work with both local and cloud-based models
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
62
.cursor/rules/build-deployment.mdc
Normal file
@@ -0,0 +1,62 @@
|
||||
# Build and Deployment Configuration
|
||||
|
||||
## Build Scripts
|
||||
|
||||
- **PowerShell**: [build.ps1](mdc:build.ps1) - Windows build script
|
||||
- **FFmpeg Setup**: [ffmpeg_setup.ps1](mdc:ffmpeg_setup.ps1)
|
||||
\- FFmpeg installation script
|
||||
- **Version Bump**: [scripts/bump.cjs](mdc:scripts/bump.cjs)
|
||||
\- Version management script
|
||||
|
||||
## Package Management
|
||||
|
||||
- **Node.js**: [package.json](mdc:package.json) - Frontend dependencies and scripts
|
||||
- **Rust**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Backend dependencies and features
|
||||
- **Lock Files**: [yarn.lock](mdc:yarn.lock) - Yarn dependency lock
|
||||
|
||||
## Build Configuration
|
||||
|
||||
- **Vite**: [vite.config.ts](mdc:vite.config.ts) - Frontend build tool configuration
|
||||
- **Tailwind**: [tailwind.config.cjs](mdc:tailwind.config.cjs) - CSS framework configuration
|
||||
- **PostCSS**: [postcss.config.cjs](mdc:postcss.config.cjs) - CSS processing configuration
|
||||
- **TypeScript**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[tsconfig.node.json](mdc:tsconfig.node.json) - TypeScript configuration
|
||||
|
||||
## Tauri Configuration
|
||||
|
||||
- **Main Config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- Core Tauri settings
|
||||
- **Platform Configs**:
|
||||
- [src-tauri/tauri.macos.conf.json](mdc:src-tauri/tauri.macos.conf.json)
|
||||
\- macOS specific
|
||||
- [src-tauri/tauri.linux.conf.json](mdc:src-tauri/tauri.linux.conf.json)
|
||||
\- Linux specific
|
||||
- [src-tauri/tauri.windows.conf.json](mdc:src-tauri/tauri.windows.conf.json)
|
||||
\- Windows specific
|
||||
- [src-tauri/tauri.windows.cuda.conf.json](mdc:src-tauri/tauri.windows.cuda.conf.json)
|
||||
\- Windows with CUDA
|
||||
|
||||
## Docker Support
|
||||
|
||||
- **Dockerfile**: [Dockerfile](mdc:Dockerfile) - Container deployment configuration
|
||||
- **Documentation**: [docs/](mdc:docs/) - VitePress-based documentation site
|
||||
|
||||
## Build Commands
|
||||
|
||||
- **Frontend**: `yarn build` - Build production frontend
|
||||
- **Tauri**: `yarn tauri build` - Build desktop application
|
||||
- **Documentation**: `yarn docs:build` - Build documentation site
|
||||
- **Type Check**: `yarn check` - TypeScript and Svelte validation
|
||||
|
||||
## Deployment Targets
|
||||
|
||||
- **Desktop**: Native Tauri applications for Windows, macOS, Linux
|
||||
- **Docker**: Containerized deployment option
|
||||
- **Documentation**: Static site deployment via VitePress
|
||||
- **Assets**: Static asset distribution for web components
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
61
.cursor/rules/database-data.mdc
Normal file
@@ -0,0 +1,61 @@
|
||||
# Database and Data Management
|
||||
|
||||
## Database Architecture
|
||||
|
||||
- **SQLite Database**: Primary data storage using `sqlx` with async runtime
|
||||
- **Database Module**: [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- Core database operations
|
||||
- **Migration System**: [src-tauri/src/migration.rs](mdc:src-tauri/src/migration.rs)
|
||||
\- Database schema management
|
||||
|
||||
## Data Models
|
||||
|
||||
- **Recording Data**: Stream metadata, recording sessions, and file information
|
||||
- **Room Configuration**: Stream room settings and platform credentials
|
||||
- **Task Management**: Recording task status and progress tracking
|
||||
- **User Preferences**: Application settings and user configurations
|
||||
|
||||
## Frontend Data Layer
|
||||
|
||||
- **Database Interface**: [src/lib/db.ts](mdc:src/lib/db.ts)
|
||||
\- Frontend database operations
|
||||
- **Stores**: [src/lib/stores/](mdc:src/lib/stores/) - State management for data
|
||||
- **Version Management**: [src/lib/stores/version.ts](mdc:src/lib/stores/version.ts)
|
||||
\- Version tracking
|
||||
|
||||
## Data Operations
|
||||
|
||||
- **CRUD Operations**: Create, read, update, delete for all data entities
|
||||
- **Query Optimization**: Efficient SQL queries with proper indexing
|
||||
- **Transaction Support**: ACID compliance for critical operations
|
||||
- **Data Validation**: Input validation and sanitization
|
||||
|
||||
## File Management
|
||||
|
||||
- **Cache Directory**: [src-tauri/cache/](mdc:src-tauri/cache/)
|
||||
\- Temporary file storage
|
||||
- **Upload Directory**: [src-tauri/cache/uploads/](mdc:src-tauri/cache/uploads/)
|
||||
\- User upload storage
|
||||
- **Bilibili Cache**: [src-tauri/cache/bilibili/](mdc:src-tauri/cache/bilibili/)
|
||||
\- Platform-specific cache
|
||||
|
||||
## Data Persistence
|
||||
|
||||
- **SQLite Files**: [src-tauri/data/data_v2.db](mdc:src-tauri/data/data_v2.db)
|
||||
\- Main database file
|
||||
- **Write-Ahead Logging**: WAL mode for concurrent access and performance
|
||||
- **Backup Strategy**: Database backup and recovery procedures
|
||||
- **Migration Handling**: Automatic schema updates and data migration
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
- Use prepared statements to prevent SQL injection
|
||||
- Implement proper error handling for database operations
|
||||
- Use transactions for multi-step operations
|
||||
- Follow database naming conventions consistently
|
||||
- Test database operations with sample data
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
47
.cursor/rules/frontend-development.mdc
Normal file
@@ -0,0 +1,47 @@
|
||||
# Frontend Development Guidelines
|
||||
|
||||
## Svelte 3 Best Practices
|
||||
|
||||
- Use Svelte 3 syntax with `<script>` tags for component logic
|
||||
- Prefer reactive statements with `$:` for derived state
|
||||
- Use stores from [src/lib/stores/](mdc:src/lib/stores/) for global state management
|
||||
- Import components from [src/lib/components/](mdc:src/lib/components/)
|
||||
|
||||
## TypeScript Configuration
|
||||
|
||||
- Follow the configuration in [tsconfig.json](mdc:tsconfig.json)
|
||||
- Use strict type checking with `checkJs: true`
|
||||
- Extends `@tsconfig/svelte` for Svelte-specific TypeScript settings
|
||||
- Base URL is set to workspace root for clean imports
|
||||
|
||||
## Component Structure
|
||||
|
||||
- **Page components**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Reusable components**: Located in [src/lib/components/](mdc:src/lib/components/)
|
||||
directory
|
||||
- **Layout components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
|
||||
## Styling
|
||||
|
||||
- Use Tailwind CSS classes for styling
|
||||
- Configuration in [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- PostCSS configuration in [postcss.config.cjs](mdc:postcss.config.cjs)
|
||||
- Global styles in [src/styles.css](mdc:src/styles.css)
|
||||
|
||||
## Entry Points
|
||||
|
||||
- **Main app**: [src/main.ts](mdc:src/main.ts) - Main application entry
|
||||
- **Clip mode**: [src/main_clip.ts](mdc:src/main_clip.ts) - Clip editing interface
|
||||
- **Live mode**: [src/main_live.ts](mdc:src/main_live.ts) - Live streaming interface
|
||||
|
||||
## Development Workflow
|
||||
|
||||
- Use `yarn dev` for frontend-only development
|
||||
- Use `yarn tauri dev` for full Tauri development
|
||||
- Use `yarn check` for TypeScript and Svelte type checking
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
53
.cursor/rules/project-overview.mdc
Normal file
@@ -0,0 +1,53 @@
|
||||
# BiliBili ShadowReplay Project Overview
|
||||
|
||||
This is a Tauri-based desktop application for caching live streams and performing
|
||||
real-time editing and submission. It supports Bilibili and Douyin platforms.
|
||||
|
||||
## Project Structure
|
||||
|
||||
### Frontend (Svelte + TypeScript)
|
||||
|
||||
- **Main entry points**: [src/main.ts](mdc:src/main.ts),
|
||||
[src/main_clip.ts](mdc:src/main_clip.ts), [src/main_live.ts](mdc:src/main_live.ts)
|
||||
- **App components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
- **Pages**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Components**: Located in [src/lib/components/](mdc:src/lib/components/) directory
|
||||
- **Stores**: Located in [src/lib/stores/](mdc:src/lib/stores/) directory
|
||||
|
||||
### Backend (Rust + Tauri)
|
||||
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
|
||||
- **Core modules**:
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) - Stream recording functionality
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/) - Database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/) - Tauri command handlers
|
||||
- **Custom crate**:
|
||||
[src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) -
|
||||
Danmaku stream processing
|
||||
|
||||
### Configuration
|
||||
|
||||
- **Frontend config**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[vite.config.ts](mdc:vite.config.ts), [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- **Backend config**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml), [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
|
||||
## Key Technologies
|
||||
|
||||
- **Frontend**: Svelte 3, TypeScript, Tailwind CSS, Flowbite
|
||||
- **Backend**: Rust, Tauri 2, SQLite, FFmpeg
|
||||
- **AI Features**: LangChain, Whisper for transcription
|
||||
- **Build Tools**: Vite, VitePress for documentation
|
||||
|
||||
## Development Commands
|
||||
|
||||
- `yarn dev` - Start development server
|
||||
- `yarn tauri dev` - Start Tauri development
|
||||
- `yarn build` - Build frontend
|
||||
- `yarn docs:dev` - Start documentation server
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
56
.cursor/rules/rust-backend.mdc
Normal file
@@ -0,0 +1,56 @@
|
||||
# Rust Backend Development Guidelines
|
||||
|
||||
## Project Structure
|
||||
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
|
||||
\- Application entry point
|
||||
- **Core modules**:
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Stream recording and management
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- SQLite database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/)
|
||||
\- Tauri command handlers
|
||||
- [src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/)
|
||||
\- AI-powered subtitle generation
|
||||
|
||||
## Custom Crates
|
||||
|
||||
- **danmu_stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Danmaku stream processing library
|
||||
|
||||
## Dependencies
|
||||
|
||||
- **Tauri 2**: Core framework for desktop app functionality
|
||||
- **FFmpeg**: Video/audio processing via `async-ffmpeg-sidecar`
|
||||
- **Whisper**: AI transcription via `whisper-rs` (CUDA support available)
|
||||
- **LangChain**: AI agent functionality
|
||||
- **SQLite**: Database via `sqlx` with async runtime
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Cargo.toml**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Dependencies and features
|
||||
- **Tauri config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- App configuration
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
\- User configuration template
|
||||
|
||||
## Features
|
||||
|
||||
- **default**: Includes GUI and core functionality
|
||||
- **cuda**: Enables CUDA acceleration for Whisper transcription
|
||||
- **headless**: Headless mode without GUI
|
||||
- **custom-protocol**: Required for production builds
|
||||
|
||||
## Development Commands
|
||||
|
||||
- `yarn tauri dev` - Start Tauri development with hot reload
|
||||
- `yarn tauri build` - Build production application
|
||||
- `cargo check` - Check Rust code without building
|
||||
- `cargo test` - Run Rust tests
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
60
.cursor/rules/streaming-recording.mdc
Normal file
@@ -0,0 +1,60 @@
|
||||
# Streaming and Recording System
|
||||
|
||||
## Core Recording Components
|
||||
|
||||
- **Recorder Manager**: [src-tauri/src/recorder_manager.rs](mdc:src-tauri/src/recorder_manager.rs)
|
||||
\- Main recording orchestration
|
||||
- **Recorder**: [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Individual stream recording logic
|
||||
- **Danmaku Stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Custom crate for bullet comment processing
|
||||
|
||||
## Supported Platforms
|
||||
|
||||
- **Bilibili**: Main platform support with live stream caching
|
||||
- **Douyin**: TikTok's Chinese platform support
|
||||
- **Multi-stream**: Support for recording multiple streams simultaneously
|
||||
|
||||
## Recording Features
|
||||
|
||||
- **Live Caching**: Real-time stream recording and buffering
|
||||
- **Time-based Clipping**: Extract specific time segments from recorded streams
|
||||
- **Danmaku Capture**: Record bullet comments and chat messages
|
||||
- **Quality Control**: Configurable recording quality and format options
|
||||
|
||||
## Frontend Interfaces
|
||||
|
||||
- **Live Mode**: [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
\- Live streaming interface
|
||||
- **Clip Mode**: [src/AppClip.svelte](mdc:src/AppClip.svelte)
|
||||
\- Video editing and clipping
|
||||
- **Room Management**: [src/page/Room.svelte](mdc:src/page/Room.svelte)
|
||||
\- Stream room configuration
|
||||
- **Task Management**: [src/page/Task.svelte](mdc:src/page/Task.svelte)
|
||||
\- Recording task monitoring
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
- **FFmpeg Integration**: Video/audio processing via `async-ffmpeg-sidecar`
|
||||
- **M3U8 Support**: HLS stream processing with `m3u8-rs`
|
||||
- **Async Processing**: Non-blocking I/O with `tokio` runtime
|
||||
- **Database Storage**: SQLite for metadata and recording information
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Recording Settings**: Configure in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
- **FFmpeg Path**: Set FFmpeg binary location for video processing
|
||||
- **Storage Paths**: Configure cache and output directories
|
||||
- **Quality Settings**: Adjust recording bitrate and format options
|
||||
|
||||
## Development Workflow
|
||||
|
||||
- Use [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) for core recording logic
|
||||
- Test with [src-tauri/tests/](mdc:src-tauri/tests/) directory
|
||||
- Monitor recording progress via progress manager
|
||||
- Handle errors gracefully with custom error types
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
36
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
ARG VARIANT=bookworm-slim
|
||||
FROM debian:${VARIANT}
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Arguments
|
||||
ARG CONTAINER_USER=vscode
|
||||
ARG CONTAINER_GROUP=vscode
|
||||
|
||||
# Install dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
build-essential \
|
||||
clang \
|
||||
cmake \
|
||||
curl \
|
||||
file \
|
||||
git \
|
||||
libayatana-appindicator3-dev \
|
||||
librsvg2-dev \
|
||||
libssl-dev \
|
||||
libwebkit2gtk-4.1-dev \
|
||||
libxdo-dev \
|
||||
pkg-config \
|
||||
wget \
|
||||
&& apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts
|
||||
|
||||
# Set users
|
||||
RUN adduser --disabled-password --gecos "" ${CONTAINER_USER}
|
||||
USER ${CONTAINER_USER}
|
||||
WORKDIR /home/${CONTAINER_USER}
|
||||
|
||||
# Install rustup
|
||||
RUN curl --proto "=https" --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal
|
||||
ENV PATH=${PATH}:/home/${CONTAINER_USER}/.cargo/bin
|
||||
|
||||
CMD [ "/bin/bash" ]
|
||||
31
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "vscode",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
"CONTAINER_USER": "vscode",
|
||||
"CONTAINER_GROUP": "vscode"
|
||||
}
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"lldb.executable": "/usr/bin/lldb",
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true
|
||||
}
|
||||
},
|
||||
"extensions": [
|
||||
"vadimcn.vscode-lldb",
|
||||
"rust-lang.rust-analyzer",
|
||||
"tamasfe.even-better-toml"
|
||||
]
|
||||
}
|
||||
},
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
7
.github/CONTRIBUTING.md
vendored
@@ -12,7 +12,8 @@
|
||||
|
||||
### Windows
|
||||
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。
|
||||
`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
|
||||
默认运行为 `cpu` 版本,使用 `yarn tauri dev --features cuda` 命令运行 `cuda` 版本。
|
||||
|
||||
@@ -20,7 +21,9 @@ Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于
|
||||
|
||||
1. 安装 LLVM 且配置相关环境变量,详情见 [LLVM Windows Setup](https://llvm.org/docs/GettingStarted.html#building-llvm-on-windows);
|
||||
|
||||
2. 安装 CUDA Toolkit,详情见 [CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);要注意,安装时请勾选 **VisualStudio integration**。
|
||||
2. 安装 CUDA Toolkit,详情见
|
||||
[CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);
|
||||
要注意,安装时请勾选 **VisualStudio integration**。
|
||||
|
||||
### 常见问题
|
||||
|
||||
|
||||
21
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,21 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: 提交一个 BUG
|
||||
title: "[BUG]"
|
||||
labels: bug
|
||||
assignees: Xinrea
|
||||
---
|
||||
|
||||
**描述:**
|
||||
简要描述一下这个 BUG 的现象
|
||||
|
||||
**日志和截图:**
|
||||
如果可以的话,请尽量附上相关截图和日志文件(日志是位于安装目录下,名为 bsr.log 的文件)。
|
||||
|
||||
**相关信息:**
|
||||
|
||||
- 程序版本:
|
||||
- 系统类型:
|
||||
|
||||
**其他**
|
||||
任何其他想说的
|
||||
47
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Bug Report
|
||||
description: 提交 BUG 报告.
|
||||
title: "[bug] "
|
||||
labels: ["bug"]
|
||||
assignees:
|
||||
- Xinrea
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 提交须知
|
||||
description: 请确认以下内容
|
||||
options:
|
||||
- label: 我是在最新版本上发现的此问题
|
||||
required: true
|
||||
- label: 我已阅读 [常见问题](https://bsr.xinrea.cn/usage/faq.html) 的说明
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: app_type
|
||||
attributes:
|
||||
label: 以哪种方式使用的该软件?
|
||||
multiple: false
|
||||
options:
|
||||
- Docker 镜像
|
||||
- 桌面应用
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: 运行环境
|
||||
multiple: false
|
||||
options:
|
||||
- Linux
|
||||
- Windows
|
||||
- MacOS
|
||||
- Docker
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: BUG 描述
|
||||
description: 请尽可能详细描述 BUG 的现象以及复现的方法
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: 日志
|
||||
description: 请粘贴日志内容或是上传日志文件(在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮;当你打开日志目录所在位置后,进入 logs 目录,找到后缀名为 log 的文件)
|
||||
validations:
|
||||
required: true
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: 提交一个新功能的建议
|
||||
title: "[feature]"
|
||||
labels: enhancement
|
||||
assignees: Xinrea
|
||||
|
||||
---
|
||||
|
||||
**遇到的问题:**
|
||||
在使用过程中遇到了什么问题让你想要提出建议
|
||||
|
||||
**想要的功能:**
|
||||
想要怎样的新功能来解决这个问题
|
||||
|
||||
**通过什么方式实现(有思路的话):**
|
||||
如果有相关的实现思路或者是参考,可以在此提供
|
||||
|
||||
**其他:**
|
||||
其他任何想说的话
|
||||
13
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Feature Request
|
||||
description: 提交新功能的需求
|
||||
title: "[feature] "
|
||||
labels: ["feature"]
|
||||
assignees:
|
||||
- Xinrea
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 需求描述
|
||||
description: 请尽可能详细描述你想要的新功能
|
||||
validations:
|
||||
required: true
|
||||
21
.github/workflows/main.yml
vendored
@@ -59,11 +59,6 @@ jobs:
|
||||
if: matrix.platform == 'windows-latest' && matrix.features == 'cuda'
|
||||
uses: Jimver/cuda-toolkit@v0.2.24
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "./src-tauri -> target"
|
||||
|
||||
- name: Setup ffmpeg
|
||||
if: matrix.platform == 'windows-latest'
|
||||
working-directory: ./
|
||||
@@ -87,6 +82,19 @@ jobs:
|
||||
Copy-Item "$cudaPath\cublas64*.dll" -Destination $targetPath
|
||||
Copy-Item "$cudaPath\cublasLt64*.dll" -Destination $targetPath
|
||||
|
||||
- name: Get previous tag
|
||||
id: get_previous_tag
|
||||
run: |
|
||||
# Get the previous tag (excluding the current one being pushed)
|
||||
PREVIOUS_TAG=$(git describe --tags --abbrev=0 HEAD~1 2>/dev/null || echo "")
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
# If no previous tag found, use the first commit
|
||||
PREVIOUS_TAG=$(git rev-list --max-parents=0 HEAD | head -1)
|
||||
fi
|
||||
echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
|
||||
echo "current_tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -96,8 +104,7 @@ jobs:
|
||||
with:
|
||||
tagName: v__VERSION__
|
||||
releaseName: "BiliBili ShadowReplay v__VERSION__"
|
||||
releaseBody: "See the assets to download this version and install."
|
||||
releaseBody: "> [!NOTE]\n> 如果你是第一次下载安装,请参考 [安装准备](https://bsr.xinrea.cn/getting-started/installation/desktop.html) 选择合适的版本。\n> Changelog: https://github.com/Xinrea/bili-shadowreplay/compare/${{ steps.get_previous_tag.outputs.previous_tag }}...${{ steps.get_previous_tag.outputs.current_tag }}"
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }} ${{ matrix.platform == 'windows-latest' && matrix.features == 'cuda' && '--config src-tauri/tauri.windows.cuda.conf.json' || '' }}
|
||||
includeDebug: true
|
||||
|
||||
1
.gitignore
vendored
@@ -11,6 +11,7 @@ node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
/target/
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
|
||||
5
.markdownlint.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"MD033": {
|
||||
"allowed_elements": ["nobr", "sup"]
|
||||
}
|
||||
}
|
||||
46
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,46 @@
|
||||
fail_fast: true
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
exclude: '(\.json$|public/)'
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: cargo-fmt
|
||||
name: cargo fmt
|
||||
entry: cargo fmt --manifest-path src-tauri/Cargo.toml --
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- id: cargo-clippy
|
||||
name: cargo clippy
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-clippy-headless
|
||||
name: cargo clippy headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
|
||||
- id: cargo-test
|
||||
name: cargo test
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-test-headless
|
||||
name: cargo test headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
21
Dockerfile
@@ -23,7 +23,7 @@ COPY . .
|
||||
RUN yarn build
|
||||
|
||||
# Build Rust backend
|
||||
FROM rust:1.86-slim AS rust-builder
|
||||
FROM rust:1.90-slim AS rust-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -48,15 +48,9 @@ COPY src-tauri/crates ./src-tauri/crates
|
||||
WORKDIR /app/src-tauri
|
||||
RUN rustup component add rustfmt
|
||||
RUN cargo build --no-default-features --features headless --release
|
||||
# Download and install FFmpeg static build
|
||||
RUN wget https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz \
|
||||
&& tar xf ffmpeg-release-amd64-static.tar.xz \
|
||||
&& mv ffmpeg-*-static/ffmpeg ./ \
|
||||
&& mv ffmpeg-*-static/ffprobe ./ \
|
||||
&& rm -rf ffmpeg-*-static ffmpeg-release-amd64-static.tar.xz
|
||||
|
||||
# Final stage
|
||||
FROM debian:bookworm-slim AS final
|
||||
FROM debian:trixie-slim AS final
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -65,9 +59,16 @@ RUN apt-get update && apt-get install -y \
|
||||
libssl3 \
|
||||
ca-certificates \
|
||||
fonts-wqy-microhei \
|
||||
netbase \
|
||||
nscd \
|
||||
ffmpeg \
|
||||
&& update-ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
RUN touch /etc/netgroup
|
||||
RUN mkdir -p /var/run/nscd && chmod 755 /var/run/nscd
|
||||
|
||||
# Add /app to PATH
|
||||
ENV PATH="/app:${PATH}"
|
||||
|
||||
@@ -76,11 +77,9 @@ COPY --from=frontend-builder /app/dist ./dist
|
||||
|
||||
# Copy built Rust binary
|
||||
COPY --from=rust-builder /app/src-tauri/target/release/bili-shadowreplay .
|
||||
COPY --from=rust-builder /app/src-tauri/ffmpeg ./ffmpeg
|
||||
COPY --from=rust-builder /app/src-tauri/ffprobe ./ffprobe
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3000
|
||||
|
||||
# Run the application
|
||||
CMD ["./bili-shadowreplay"]
|
||||
CMD ["sh", "-c", "nscd && ./bili-shadowreplay"]
|
||||
|
||||
13
README.md
@@ -4,24 +4,29 @@
|
||||
|
||||

|
||||

|
||||
|
||||

|
||||

|
||||
[](https://deepwiki.com/Xinrea/bili-shadowreplay)
|
||||
|
||||
BiliBili ShadowReplay 是一个缓存直播并进行实时编辑投稿的工具。通过划定时间区间,并编辑简单的必需信息,即可完成直播切片以及投稿,将整个流程压缩到分钟级。同时,也支持对缓存的历史直播进行回放,以及相同的切片编辑投稿处理流程。
|
||||
|
||||
目前仅支持 B 站和抖音平台的直播。
|
||||
|
||||

|
||||
[](https://www.star-history.com/#Xinrea/bili-shadowreplay&Date)
|
||||
|
||||
## 安装和使用
|
||||
|
||||

|
||||
|
||||
前往网站查看说明:[BiliBili ShadowReplay](https://bsr.xinrea.cn/)
|
||||
|
||||
## 参与开发
|
||||
|
||||
[Contributing](.github/CONTRIBUTING.md)
|
||||
可以通过 [DeepWiki](https://deepwiki.com/Xinrea/bili-shadowreplay) 了解本项目。
|
||||
|
||||
贡献指南:[Contributing](.github/CONTRIBUTING.md)
|
||||
|
||||
## 赞助
|
||||
|
||||

|
||||
<!-- markdownlint-disable MD033 -->
|
||||
<img src="docs/public/images/donate.png" alt="donate" width="300">
|
||||
|
||||
2
_typos.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[default.extend-identifiers]
|
||||
pull_datas = "pull_datas"
|
||||
@@ -1,7 +1,8 @@
|
||||
import { defineConfig } from "vitepress";
|
||||
import { withMermaid } from "vitepress-plugin-mermaid";
|
||||
|
||||
// https://vitepress.dev/reference/site-config
|
||||
export default defineConfig({
|
||||
export default withMermaid({
|
||||
title: "BiliBili ShadowReplay",
|
||||
description: "直播录制/实时回放/剪辑/投稿工具",
|
||||
themeConfig: {
|
||||
@@ -18,21 +19,55 @@ export default defineConfig({
|
||||
{
|
||||
text: "开始使用",
|
||||
items: [
|
||||
{ text: "安装准备", link: "/getting-started/installation" },
|
||||
{ text: "配置使用", link: "/getting-started/configuration" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/ffmpeg" },
|
||||
{
|
||||
text: "安装准备",
|
||||
items: [
|
||||
{
|
||||
text: "桌面端安装",
|
||||
link: "/getting-started/installation/desktop",
|
||||
},
|
||||
{
|
||||
text: "Docker 安装",
|
||||
link: "/getting-started/installation/docker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "配置使用",
|
||||
items: [
|
||||
{ text: "账号配置", link: "/getting-started/config/account" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/config/ffmpeg" },
|
||||
{ text: "Whisper 配置", link: "/getting-started/config/whisper" },
|
||||
{ text: "LLM 配置", link: "/getting-started/config/llm" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "说明文档",
|
||||
items: [
|
||||
{ text: "功能说明", link: "/usage/features" },
|
||||
{
|
||||
text: "功能说明",
|
||||
items: [
|
||||
{ text: "工作流程", link: "/usage/features/workflow" },
|
||||
{ text: "直播间管理", link: "/usage/features/room" },
|
||||
{ text: "切片功能", link: "/usage/features/clip" },
|
||||
{ text: "字幕功能", link: "/usage/features/subtitle" },
|
||||
{ text: "弹幕功能", link: "/usage/features/danmaku" },
|
||||
{ text: "Webhook", link: "/usage/features/webhook" },
|
||||
],
|
||||
},
|
||||
{ text: "常见问题", link: "/usage/faq" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "开发文档",
|
||||
items: [{ text: "架构设计", link: "/develop/architecture" }],
|
||||
items: [
|
||||
{
|
||||
text: "DeepWiki",
|
||||
link: "https://deepwiki.com/Xinrea/bili-shadowreplay",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# 架构设计
|
||||
12
docs/getting-started/config/account.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# 账号配置
|
||||
|
||||
要添加直播间,至少需要配置一个同平台的账号。在账号页面,你可以通过添加账号按钮添加一个账号。
|
||||
|
||||
- B 站账号:目前支持扫码登录和 Cookie 手动配置两种方式,推荐使用扫码登录
|
||||
- 抖音账号:目前仅支持 Cookie 手动配置登陆
|
||||
|
||||
## 抖音账号配置
|
||||
|
||||
首先确保已经登录抖音,然后打开[个人主页](https://www.douyin.com/user/self),右键单击网页,在菜单中选择 `检查(Inspect)`,打开开发者工具,切换到 `网络(Network)` 选项卡,然后刷新网页,此时能在列表中找到 `self` 请求(一般是列表中第一个),单击该请求,查看`请求标头`,在 `请求标头` 中找到 `Cookie`,复制该字段的值,粘贴到配置页面的 `Cookie` 输入框中,要注意复制完全。
|
||||
|
||||

|
||||
9
docs/getting-started/config/llm.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# LLM 配置
|
||||
|
||||

|
||||
|
||||
助手页面的 AI Agent 助手功能需要配置大模型,目前仅支持配置 OpenAI 协议兼容的大模型服务。
|
||||
|
||||
本软件并不提供大模型服务,请自行选择服务提供商。要注意,使用 AI Agent 助手需要消耗比普通对话更多的 Token,请确保有足够的 Token 余额。
|
||||
|
||||
此外,AI Agent 的功能需要大模型支持 Function Calling 功能,否则无法正常调用工具。
|
||||
46
docs/getting-started/config/whisper.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Whisper 配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付
|
||||
费获取 API Key)。
|
||||
|
||||
> [!NOTE]
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使
|
||||
> 用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
|
||||
## 本地运行 Whisper 模型
|
||||
|
||||

|
||||
|
||||
如果需要使用本地运行 Whisper 模型进行字幕生成,需要下载 Whisper.cpp 模型,并在设置中指定模型路径。模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此
|
||||
推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
|
||||
## 使用在线 Whisper 服务
|
||||
|
||||

|
||||
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有
|
||||
OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
|
||||
## 字幕识别质量的调优
|
||||
|
||||
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
|
||||
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。
|
||||
根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian,
|
||||
Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish,
|
||||
French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic,
|
||||
Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian,
|
||||
Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish,
|
||||
Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili,
|
||||
Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
|
||||
提示词可以优化生成的字幕的风格(也会一定程度上影响质量),要注意,Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。
|
||||
@@ -1,57 +0,0 @@
|
||||
# 配置使用
|
||||
|
||||
## 账号配置
|
||||
|
||||
要添加直播间,至少需要配置一个同平台的账号。在账号页面,你可以通过添加账号按钮添加一个账号。
|
||||
|
||||
- B 站账号:目前支持扫码登录和 Cookie 手动配置两种方式,推荐使用扫码登录
|
||||
- 抖音账号:目前仅支持 Cookie 手动配置登陆
|
||||
|
||||
### 抖音账号配置
|
||||
|
||||
首先确保已经登录抖音,然后打开[个人主页](https://www.douyin.com/user/self),右键单击网页,在菜单中选择 `检查(Inspect)`,打开开发者工具,切换到 `网络(Network)` 选项卡,然后刷新网页,此时能在列表中找到 `self` 请求(一般是列表中第一个),单击该请求,查看`请求标头`,在 `请求标头` 中找到 `Cookie`,复制该字段的值,粘贴到配置页面的 `Cookie` 输入框中,要注意复制完全。
|
||||
|
||||

|
||||
|
||||
## FFmpeg 配置
|
||||
|
||||
如果想要使用切片生成和压制功能,请确保 FFmpeg 已正确配置;除了 Windows 平台打包自带 FFmpeg 以外,其他平台需要手动安装 FFmpeg,请参考 [FFmpeg 配置](/getting-started/ffmpeg)。
|
||||
|
||||
## Whisper 配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付费获取 API Key)。
|
||||
|
||||
> [!NOTE]
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
|
||||
### 本地运行 Whisper 模型
|
||||
|
||||

|
||||
|
||||
如果需要使用本地运行 Whisper 模型进行字幕生成,需要下载 Whisper.cpp 模型,并在设置中指定模型路径。模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
|
||||
### 使用在线 Whisper 服务
|
||||
|
||||

|
||||
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有 OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
|
||||
### 字幕识别质量的调优
|
||||
|
||||
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
|
||||
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian, Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish, French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic, Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian, Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili, Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
|
||||
提示词可以优化生成的字幕的风格(也会一定程度上影响质量),要注意,Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。
|
||||
|
||||
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
# 安装准备
|
||||
|
||||
## 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
安装包分为两个版本,普通版和 debug 版,普通版适合大部分用户使用,debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
### Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
### Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
### MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
|
||||
## Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
### 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
### 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
24
docs/getting-started/installation/desktop.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
对于 MacOS 用户,请先手动安装 FFmpeg,详情见 [FFmpeg 配置](../config/ffmpeg.md)。
|
||||
|
||||
## Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
## Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
## MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
41
docs/getting-started/installation/docker.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
## 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
## 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
@@ -11,10 +11,10 @@ hero:
|
||||
actions:
|
||||
- theme: brand
|
||||
text: 开始使用
|
||||
link: /getting-started/installation
|
||||
link: /getting-started/installation/desktop
|
||||
- theme: alt
|
||||
text: 说明文档
|
||||
link: /usage/features
|
||||
link: /usage/features/workflow
|
||||
|
||||
features:
|
||||
- icon: 📹
|
||||
|
||||
BIN
docs/public/images/model_config.png
Normal file
|
After Width: | Height: | Size: 383 KiB |
BIN
docs/public/images/whole_clip.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
BIN
docs/public/videos/deeplinking.mp4
Normal file
BIN
docs/public/videos/room_remove.mp4
Normal file
@@ -0,0 +1,31 @@
|
||||
# 常见问题
|
||||
|
||||
## 一、在哪里反馈问题?
|
||||
|
||||
你可以前往 [Github Issues](https://github.com/Xinrea/bili-shadowreplay/issues/new?template=bug_report.md) 提交问题,或是加入[反馈交流群](https://qm.qq.com/q/v4lrE6gyum)。
|
||||
|
||||
1. 在提交问题前,请先阅读其它常见问题,确保你的问题已有解答;
|
||||
2. 其次,请确保你的程序已更新到最新版本;
|
||||
3. 最后,你应准备好提供你的程序日志文件,以便更好地定位问题。
|
||||
|
||||
## 二、在哪里查看日志?
|
||||
|
||||
在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮。当你打开日志目录所在位置后,进入 `logs` 目录,找到后缀名为 `log` 的文件,这便是你需要提供给开发者的日志文件。
|
||||
|
||||
## 三、无法预览直播或是生成切片
|
||||
|
||||
如果你是 macOS 或 Linux 用户,请确保你已安装了 `ffmpeg` 和 `ffprobe` 工具;如果不知道如何安装,请参考 [FFmpeg 配置](/getting-started/config/ffmpeg)。
|
||||
|
||||
如果你是 Windows 用户,程序目录下应当自带了 `ffmpeg` 和 `ffprobe` 工具,如果无法预览直播或是生成切片,请向开发者反馈。
|
||||
|
||||
## 四、添加 B 站直播间出现 -352 错误
|
||||
|
||||
`-352` 错误是由 B 站风控机制导致的,如果你添加了大量的 B 站直播间进行录制,可以在设置页面调整直播间状态的检查间隔,尽量避免风控;如果你在直播间数量较少的情况下出现该错误,请向开发者反馈。
|
||||
|
||||
## 五、录播为什么都是碎片文件?
|
||||
|
||||
缓存目录下的录播文件并非用于直接播放或是投稿,而是用于直播流的预览与实时回放。如果你需要录播文件用于投稿,请打开对应录播的预览界面,使用快捷键创建选区,生成所需范围的切片,切片文件为常规的 mp4 文件,位于你所设置的切片目录下。
|
||||
|
||||
如果你将 BSR 作为单纯的录播软件使用,在设置中可以开启`整场录播生成`,这样在直播结束后,BSR 会自动生成整场录播的切片。
|
||||
|
||||

|
||||
|
||||
1
docs/usage/features/clip.md
Normal file
@@ -0,0 +1 @@
|
||||
# 切片
|
||||
1
docs/usage/features/danmaku.md
Normal file
@@ -0,0 +1 @@
|
||||
# 弹幕
|
||||
40
docs/usage/features/room.md
Normal file
@@ -0,0 +1,40 @@
|
||||
# 直播间
|
||||
|
||||
> [!WARNING]
|
||||
> 在添加管理直播间前,请确保账号列表中有对应平台的可用账号。
|
||||
|
||||
## 添加直播间
|
||||
|
||||
### 手动添加直播间
|
||||
|
||||
你可以在 BSR 直播间页面,点击按钮手动添加直播间。你需要选择平台,并输入直播间号。
|
||||
|
||||
直播间号通常是直播间网页地址尾部的遗传数字,例如 `https://live.bilibili.com/123456` 中的 `123456`,或是 `https://live.douyin.com/123456` 中的 `123456`。
|
||||
|
||||
抖音直播间比较特殊,当未开播时,你无法找到直播间的入口,因此你需要当直播间开播时找到直播间网页地址,并记录其直播间号。
|
||||
|
||||
抖音直播间需要输入主播的 sec_uid,你可以在主播主页的 URL 中找到,例如 `https://www.douyin.com/user/MS4wLjABAAAA` 中的 `MS4wLjABAAAA`。
|
||||
|
||||
### 使用 DeepLinking 快速添加直播间
|
||||
|
||||
<!-- MD033 -->
|
||||
|
||||
<video src="/videos/deeplinking.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
|
||||
在浏览器中观看直播时,替换地址栏中直播间地址中的 `https://` 为 `bsr://` 即可快速唤起 BSR 添加直播间。
|
||||
|
||||
## 启用/禁用直播间
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择启用/禁用直播间。
|
||||
|
||||
- 启用后,当直播间开播时,会自动开始录制
|
||||
- 禁用后,当直播间开播时,不会自动开始录制
|
||||
|
||||
## 移除直播间
|
||||
|
||||
> [!CAUTION]
|
||||
> 移除直播间后,该直播间相关的所有录播都会被删除,请谨慎操作。
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择移除直播间。
|
||||
|
||||
<video src="/videos/room_remove.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
1
docs/usage/features/subtitle.md
Normal file
@@ -0,0 +1 @@
|
||||
# 字幕
|
||||
245
docs/usage/features/webhook.md
Normal file
@@ -0,0 +1,245 @@
|
||||
# Webhook
|
||||
|
||||
> [!NOTE]
|
||||
> 你可以使用 <https://webhook.site> 来测试 Webhook 功能。
|
||||
|
||||
## 设置 Webhook
|
||||
|
||||
打开 BSR 设置页面,在基础设置中设置 Webhook 地址。
|
||||
|
||||
## Webhook Events
|
||||
|
||||
### 直播间相关
|
||||
|
||||
#### 添加直播间
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "a96a5e9f-9857-4c13-b889-91da2ace208a",
|
||||
"event": "recorder.added",
|
||||
"payload": {
|
||||
"room_id": "26966466",
|
||||
"created_at": "2025-09-07T03:33:14.258796+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
"extra": ""
|
||||
},
|
||||
"timestamp": 1757215994
|
||||
}
|
||||
```
|
||||
|
||||
#### 移除直播间
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "e33623d4-e040-4390-88f5-d351ceeeace7",
|
||||
"event": "recorder.removed",
|
||||
"payload": {
|
||||
"room_id": "27183290",
|
||||
"created_at": "2025-08-30T10:54:18.569198+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
"extra": ""
|
||||
},
|
||||
"timestamp": 1757217015
|
||||
}
|
||||
```
|
||||
|
||||
### 直播相关
|
||||
|
||||
> [!NOTE]
|
||||
> 直播开始和结束,不意味着录制的开始和结束。
|
||||
|
||||
#### 直播开始
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "f12f3424-f7d8-4b2f-a8b7-55477411482e",
|
||||
"event": "live.started",
|
||||
"payload": {
|
||||
"room_id": "843610",
|
||||
"room_info": {
|
||||
"room_id": "843610",
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "475210",
|
||||
"user_name": "Xinrea",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "",
|
||||
"live_status": false,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757217190
|
||||
}
|
||||
```
|
||||
|
||||
#### 直播结束
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "e8b0756a-02f9-4655-b5ae-a170bf9547bd",
|
||||
"event": "live.ended",
|
||||
"payload": {
|
||||
"room_id": "843610",
|
||||
"room_info": {
|
||||
"room_id": "843610",
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "475210",
|
||||
"user_name": "Xinrea",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "",
|
||||
"live_status": true,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757217365
|
||||
}
|
||||
```
|
||||
|
||||
### 录播相关
|
||||
|
||||
#### 开始录制
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "5ec1ea10-2b31-48fd-8deb-f2d7d2ea5985",
|
||||
"event": "record.started",
|
||||
"payload": {
|
||||
"room_id": "26966466",
|
||||
"room_info": {
|
||||
"room_id": "26966466",
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "1609526545",
|
||||
"user_name": "栞栞Shiori",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "1757216045412",
|
||||
"live_status": true,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757216045
|
||||
}
|
||||
```
|
||||
|
||||
#### 结束录制
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "56fd03e5-3965-4c2e-a6a9-bb6932347eb3",
|
||||
"event": "record.ended",
|
||||
"payload": {
|
||||
"room_id": "26966466",
|
||||
"room_info": {
|
||||
"room_id": "26966466",
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "1609526545",
|
||||
"user_name": "栞栞Shiori",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
|
||||
},
|
||||
"total_length": 52.96700000000001,
|
||||
"current_live_id": "1757215994597",
|
||||
"live_status": true,
|
||||
"is_recording": true,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757216040
|
||||
}
|
||||
```
|
||||
|
||||
#### 删除录播
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "c32bc811-ab4b-49fd-84c7-897727905d16",
|
||||
"event": "archive.deleted",
|
||||
"payload": {
|
||||
"platform": "bilibili",
|
||||
"live_id": "1756607084705",
|
||||
"room_id": "1967212929",
|
||||
"title": "灶台O.o",
|
||||
"length": 9,
|
||||
"size": 1927112,
|
||||
"created_at": "2025-08-31T02:24:44.728616+00:00",
|
||||
"cover": "bilibili/1967212929/1756607084705/cover.jpg"
|
||||
},
|
||||
"timestamp": 1757176219
|
||||
}
|
||||
```
|
||||
|
||||
### 切片相关
|
||||
|
||||
#### 切片生成
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "f542e0e1-688b-4f1a-8ce1-e5e51530cf5d",
|
||||
"event": "clip.generated",
|
||||
"payload": {
|
||||
"id": 316,
|
||||
"room_id": "27183290",
|
||||
"cover": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].jpg",
|
||||
"file": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].mp4",
|
||||
"note": "",
|
||||
"length": 121,
|
||||
"size": 53049119,
|
||||
"status": 0,
|
||||
"bvid": "",
|
||||
"title": "",
|
||||
"desc": "",
|
||||
"tags": "",
|
||||
"area": 0,
|
||||
"created_at": "2025-09-07T00:16:11.747461+08:00",
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757175371
|
||||
}
|
||||
```
|
||||
|
||||
#### 切片删除
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "5c7ca728-753d-4a7d-a0b4-02c997ad2f92",
|
||||
"event": "clip.deleted",
|
||||
"payload": {
|
||||
"id": 313,
|
||||
"room_id": "27183290",
|
||||
"cover": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].jpg",
|
||||
"file": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].mp4",
|
||||
"note": "",
|
||||
"length": 32,
|
||||
"size": 18530098,
|
||||
"status": 0,
|
||||
"bvid": "",
|
||||
"title": "",
|
||||
"desc": "",
|
||||
"tags": "",
|
||||
"area": 0,
|
||||
"created_at": "2025-09-03T21:10:54.943682+08:00",
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757147617
|
||||
}
|
||||
```
|
||||
30
docs/usage/features/workflow.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# 工作流程
|
||||
|
||||
- 直播间:各个平台的直播间
|
||||
- 录播:直播流的存档,每次录制会自动生成一场录播记录
|
||||
- 切片:从直播流中剪切生成的视频片段
|
||||
- 投稿:将切片上传到各个平台(目前仅支持 Bilibili)
|
||||
|
||||
下图展示了它们之间的关系:
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A[直播间] -->|录制| B[录播 01]
|
||||
A -->|录制| C[录播 02]
|
||||
A -->|录制| E[录播 N]
|
||||
|
||||
B --> F[直播流预览窗口]
|
||||
|
||||
F -->|区间生成| G[切片 01]
|
||||
F -->|区间生成| H[切片 02]
|
||||
F -->|区间生成| I[切片 N]
|
||||
|
||||
G --> J[切片预览窗口]
|
||||
|
||||
J -->|字幕压制| K[新切片]
|
||||
|
||||
K --> J
|
||||
|
||||
J -->|投稿| L[Bilibili]
|
||||
|
||||
```
|
||||
@@ -4,7 +4,7 @@
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<link rel="stylesheet" href="shaka-player/controls.min.css" />
|
||||
<link rel="stylesheet" href="shaka-player/controls.css" />
|
||||
<link rel="stylesheet" href="shaka-player/youtube-theme.css" />
|
||||
<script src="shaka-player/shaka-player.ui.js"></script>
|
||||
</head>
|
||||
|
||||
18
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "bili-shadowreplay",
|
||||
"private": true,
|
||||
"version": "2.9.1",
|
||||
"version": "2.16.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -11,14 +11,16 @@
|
||||
"tauri": "tauri",
|
||||
"docs:dev": "vitepress dev docs",
|
||||
"docs:build": "vitepress build docs",
|
||||
"docs:preview": "vitepress preview docs"
|
||||
"docs:preview": "vitepress preview docs",
|
||||
"bump": "node scripts/bump.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@langchain/core": "^0.3.64",
|
||||
"@langchain/deepseek": "^0.1.0",
|
||||
"@langchain/langgraph": "^0.3.10",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@tauri-apps/api": "^2.4.1",
|
||||
"@tauri-apps/api": "^2.6.2",
|
||||
"@tauri-apps/plugin-deep-link": "~2",
|
||||
"@tauri-apps/plugin-dialog": "~2",
|
||||
"@tauri-apps/plugin-fs": "~2",
|
||||
"@tauri-apps/plugin-http": "~2",
|
||||
@@ -28,7 +30,9 @@
|
||||
"@tauri-apps/plugin-sql": "~2",
|
||||
"lucide-svelte": "^0.479.0",
|
||||
"marked": "^16.1.1",
|
||||
"qrcode": "^1.5.4"
|
||||
"qrcode": "^1.5.4",
|
||||
"socket.io-client": "^4.8.1",
|
||||
"wavesurfer.js": "^7.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/vite-plugin-svelte": "^2.0.0",
|
||||
@@ -40,6 +44,7 @@
|
||||
"flowbite": "^2.5.1",
|
||||
"flowbite-svelte": "^0.46.16",
|
||||
"flowbite-svelte-icons": "^1.6.1",
|
||||
"mermaid": "^11.9.0",
|
||||
"postcss": "^8.4.21",
|
||||
"svelte": "^3.54.0",
|
||||
"svelte-check": "^3.0.0",
|
||||
@@ -47,8 +52,9 @@
|
||||
"tailwindcss": "^3.3.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^4.6.4",
|
||||
"typescript": "^5.0.0",
|
||||
"vite": "^4.0.0",
|
||||
"vitepress": "^1.6.3"
|
||||
"vitepress": "^1.6.3",
|
||||
"vitepress-plugin-mermaid": "^2.0.17"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
public/imgs/bilibili.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
public/imgs/bilibili_avatar.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 246 KiB |
BIN
public/imgs/douyin_avatar.png
Normal file
|
After Width: | Height: | Size: 153 KiB |
BIN
public/imgs/huya.png
Normal file
|
After Width: | Height: | Size: 219 KiB |
BIN
public/imgs/huya_avatar.png
Normal file
|
After Width: | Height: | Size: 865 KiB |
983
public/shaka-player/controls.css
Normal file
@@ -0,0 +1,983 @@
|
||||
/*! @license
|
||||
* Shaka Player
|
||||
* Copyright 2016 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
.shaka-hidden {
|
||||
display: none !important;
|
||||
}
|
||||
.shaka-video-container {
|
||||
position: relative;
|
||||
top: 0;
|
||||
left: 0;
|
||||
display: flex;
|
||||
font-family: Roboto, sans-serif, TengwarTelcontar;
|
||||
font-weight: 400;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
}
|
||||
.shaka-video-container .material-svg-icon {
|
||||
font-size: 24px;
|
||||
}
|
||||
.shaka-video-container:fullscreen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:fullscreen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-video-container:-webkit-full-screen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:-webkit-full-screen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-video-container:-moz-full-screen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:-moz-full-screen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-video-container:-ms-fullscreen {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
}
|
||||
.shaka-video-container:-ms-fullscreen .shaka-text-container {
|
||||
font-size: 4.4vmin;
|
||||
}
|
||||
.shaka-controls-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
box-sizing: border-box;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-video-container:not([shaka-controls="true"]) .shaka-controls-container {
|
||||
display: none;
|
||||
}
|
||||
.shaka-controls-container * {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-fullscreen-button {
|
||||
display: none;
|
||||
}
|
||||
.shaka-canvas-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
pointer-events: none;
|
||||
}
|
||||
.shaka-vr-canvas-container {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
pointer-events: none;
|
||||
}
|
||||
.shaka-bottom-controls {
|
||||
width: 98%;
|
||||
padding: 0;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-controls-button-panel {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
overflow: hidden;
|
||||
min-width: 48px;
|
||||
font-size: 12px;
|
||||
font-weight: 400;
|
||||
font-style: normal;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-controls-button-panel,
|
||||
.shaka-controls-container[shown="true"] .shaka-controls-button-panel {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-controls-button-panel > * {
|
||||
color: #fff;
|
||||
height: 48px;
|
||||
width: 48px;
|
||||
line-height: 0.5;
|
||||
padding: 0 2px;
|
||||
background: 0 0;
|
||||
border: 0;
|
||||
cursor: pointer;
|
||||
opacity: 0.9;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.1s;
|
||||
text-shadow: 0 0 2px rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-fast-forward-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-rewind-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-skip-next-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-skip-previous-button .material-svg-icon,
|
||||
.shaka-controls-button-panel > .shaka-small-play-button .material-svg-icon {
|
||||
font-size: 32px;
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-fullscreen-button .material-svg-icon {
|
||||
font-size: 24px;
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-overflow-menu-button {
|
||||
position: relative;
|
||||
}
|
||||
.shaka-controls-button-panel > .shaka-overflow-menu-button .material-svg-icon {
|
||||
font-size: 24px;
|
||||
}
|
||||
.shaka-controls-button-panel > :hover {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-controls-button-panel .shaka-overflow-menu-only {
|
||||
display: none;
|
||||
}
|
||||
.shaka-play-button-container {
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
flex-shrink: 1;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-statistics-container {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
scrollbar-color: white rgba(0, 0, 0, 0.5);
|
||||
scrollbar-width: thin;
|
||||
min-width: 300px;
|
||||
color: #fff;
|
||||
background-color: rgba(35, 35, 35, 0.9);
|
||||
font-size: 14px;
|
||||
padding: 5px 10px;
|
||||
border-radius: 2px;
|
||||
position: absolute;
|
||||
z-index: 2;
|
||||
left: 15px;
|
||||
top: 15px;
|
||||
max-height: calc(100% - 115px);
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-statistics-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-statistics-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-statistics-container div {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.shaka-statistics-container span {
|
||||
color: #969696;
|
||||
}
|
||||
.shaka-ad-statistics-container {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
scrollbar-color: white rgba(0, 0, 0, 0.5);
|
||||
scrollbar-width: thin;
|
||||
min-width: 150px;
|
||||
color: #fff;
|
||||
background-color: rgba(35, 35, 35, 0.9);
|
||||
font-size: 14px;
|
||||
padding: 5px 10px;
|
||||
border-radius: 2px;
|
||||
position: absolute;
|
||||
z-index: 2;
|
||||
right: 15px;
|
||||
top: 15px;
|
||||
max-height: calc(100% - 115px);
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-ad-statistics-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-ad-statistics-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-ad-statistics-container div {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
}
|
||||
.shaka-ad-statistics-container span {
|
||||
color: #969696;
|
||||
}
|
||||
.shaka-context-menu {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
white-space: nowrap;
|
||||
background: rgba(28, 28, 28, 0.9);
|
||||
border-radius: 2px;
|
||||
min-width: 190px;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
position: absolute;
|
||||
z-index: 3;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-context-menu,
|
||||
.shaka-controls-container[shown="true"] .shaka-context-menu {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-context-menu button {
|
||||
font-size: 14px;
|
||||
background: 0 0;
|
||||
color: #fff;
|
||||
border: none;
|
||||
min-height: 30px;
|
||||
padding: 10px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-context-menu button:hover {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-context-menu button label {
|
||||
cursor: pointer;
|
||||
margin-left: 5px;
|
||||
}
|
||||
.shaka-keyboard-navigation .shaka-context-menu button:focus {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-context-menu button .shaka-current-selection-span {
|
||||
display: none;
|
||||
}
|
||||
.shaka-scrim-container {
|
||||
margin: 0;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
flex-shrink: 1;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
height: 61px;
|
||||
background: linear-gradient(rgba(0, 0, 0, 0) 0, rgba(0, 0, 0, 0.5) 100%);
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-scrim-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-scrim-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-text-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
pointer-events: none;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
min-width: 48px;
|
||||
transition: bottom cubic-bezier(0.4, 0, 0.6, 1) 0.1s;
|
||||
transition-delay: 0.5s;
|
||||
font-size: 20px;
|
||||
line-height: 1.4;
|
||||
color: #fff;
|
||||
}
|
||||
.shaka-text-container span.shaka-text-wrapper {
|
||||
display: inline;
|
||||
background: 0 0;
|
||||
}
|
||||
.shaka-controls-container[shown="true"] ~ .shaka-text-container {
|
||||
transition-delay: 0s;
|
||||
}
|
||||
.shaka-spinner-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
flex-shrink: 1;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
.shaka-video-container:not([shaka-controls="true"]) .shaka-spinner-container {
|
||||
display: none;
|
||||
}
|
||||
.shaka-hidden-fast-forward-container,
|
||||
.shaka-hidden-rewind-container {
|
||||
height: 100%;
|
||||
width: 40%;
|
||||
flex-shrink: 1;
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-hidden-fast-forward-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 60%;
|
||||
}
|
||||
.shaka-hidden-rewind-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
}
|
||||
.shaka-video-container.no-cursor {
|
||||
cursor: none !important;
|
||||
}
|
||||
.shaka-video-container.no-cursor * {
|
||||
cursor: none !important;
|
||||
}
|
||||
.shaka-play-button {
|
||||
box-sizing: border-box;
|
||||
padding: calc(15% / 2);
|
||||
width: 0;
|
||||
height: 0;
|
||||
margin: 0;
|
||||
border-radius: 50%;
|
||||
box-shadow: rgba(0, 0, 0, 0.1) 0 0 20px 0;
|
||||
border: none;
|
||||
background-size: 50%;
|
||||
background-repeat: no-repeat;
|
||||
background-position: center center;
|
||||
background-color: rgba(255, 255, 255, 0.9);
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-play-button,
|
||||
.shaka-controls-container[shown="true"] .shaka-play-button {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-play-button[icon="play"] {
|
||||
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%23000000%22%20height%3D%2224%22%20viewBox%3D%220%200%2024%2024%22%20width%3D%2224%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%20%20%3Cpath%20d%3D%22M8%205v14l11-7z%22%2F%3E%0A%20%20%20%20%3Cpath%20d%3D%22M0%200h24v24H0z%22%20fill%3D%22none%22%2F%3E%0A%3C%2Fsvg%3E");
|
||||
}
|
||||
.shaka-play-button[icon="pause"] {
|
||||
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%23000000%22%20height%3D%2224%22%20viewBox%3D%220%200%2024%2024%22%20width%3D%2224%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%20%20%3Cpath%20d%3D%22M6%2019h4V5H6v14zm8-14v14h4V5h-4z%22%2F%3E%0A%20%20%20%20%3Cpath%20d%3D%22M0%200h24v24H0z%22%20fill%3D%22none%22%2F%3E%0A%3C%2Fsvg%3E");
|
||||
}
|
||||
.shaka-play-button[icon="replay"] {
|
||||
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%231f1f1f%22%20height%3D%2224px%22%20viewBox%3D%220%20-960%20960%20960%22%20width%3D%2224px%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%3Cpath%20d%3D%22M480-80q-75%200-140.5-28.5t-114-77q-48.5-48.5-77-114T120-440h80q0%20117%2081.5%20198.5T480-160q117%200%20198.5-81.5T760-440q0-117-81.5-198.5T480-720h-6l62%2062-56%2058-160-160%20160-160%2056%2058-62%2062h6q75%200%20140.5%2028.5t114%2077q48.5%2048.5%2077%20114T840-440q0%2075-28.5%20140.5t-77%20114q-48.5%2048.5-114%2077T480-80Z%22%2F%3E%0A%3C%2Fsvg%3E");
|
||||
}
|
||||
@media (prefers-reduced-transparency: no-preference) {
|
||||
.shaka-controls-container[shown="true"] .shaka-play-button {
|
||||
opacity: 0.75;
|
||||
}
|
||||
}
|
||||
.shaka-current-time {
|
||||
font-size: 14px;
|
||||
color: #fff;
|
||||
cursor: pointer;
|
||||
width: auto;
|
||||
padding: 0 5px;
|
||||
}
|
||||
.shaka-current-time[disabled] {
|
||||
background-color: transparent;
|
||||
color: #fff;
|
||||
cursor: default;
|
||||
}
|
||||
.shaka-controls-container button:focus,
|
||||
.shaka-controls-container input:focus {
|
||||
outline: 1px solid Highlight;
|
||||
}
|
||||
.shaka-controls-container button:-moz-focus-inner,
|
||||
.shaka-controls-container input:-moz-focus-outer {
|
||||
outline: 0;
|
||||
border: 0;
|
||||
}
|
||||
.shaka-controls-container:not(.shaka-keyboard-navigation) button:focus,
|
||||
.shaka-controls-container:not(.shaka-keyboard-navigation) input:focus {
|
||||
outline: 0;
|
||||
}
|
||||
.shaka-fast-forward-container,
|
||||
.shaka-rewind-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
flex-shrink: 1;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
margin: 0;
|
||||
border: none;
|
||||
color: #fff;
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
cursor: default;
|
||||
font-size: 20px;
|
||||
opacity: 0;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
}
|
||||
.shaka-fast-forward-container {
|
||||
border-radius: 40% 0 0 40%;
|
||||
}
|
||||
.shaka-rewind-container {
|
||||
border-radius: 0 40% 40% 0;
|
||||
}
|
||||
.shaka-forward-rewind-container-icon {
|
||||
font-size: 32px;
|
||||
}
|
||||
.shaka-range-container {
|
||||
position: relative;
|
||||
top: 0;
|
||||
left: 0;
|
||||
margin: calc((12px - 4px) / 2) 6px;
|
||||
height: 4px;
|
||||
border-radius: 4px;
|
||||
background: #fff;
|
||||
box-sizing: content-box;
|
||||
}
|
||||
.shaka-volume-bar-container {
|
||||
width: 100px;
|
||||
padding: 0;
|
||||
transition-property: opacity, width;
|
||||
transition-duration: 250ms;
|
||||
transition-timing-function: cubic-bezier(0.4, 0, 0.6, 1);
|
||||
}
|
||||
.shaka-volume-bar-container:hover {
|
||||
width: 100px !important;
|
||||
opacity: 1 !important;
|
||||
}
|
||||
@media (max-width: 474px) {
|
||||
.shaka-volume-bar-container {
|
||||
width: 50px;
|
||||
}
|
||||
.shaka-volume-bar-container:hover {
|
||||
width: 50px !important;
|
||||
}
|
||||
.shaka-mute-button:hover + .shaka-volume-bar-container-allow-hiding {
|
||||
width: 50px;
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
.shaka-mute-button
|
||||
+ .shaka-volume-bar-container-allow-hiding:not(:focus-within) {
|
||||
width: 0;
|
||||
opacity: 0;
|
||||
}
|
||||
@media (min-width: 475px) {
|
||||
.shaka-mute-button:hover + .shaka-volume-bar-container-allow-hiding {
|
||||
width: 100px;
|
||||
opacity: 1;
|
||||
}
|
||||
}
|
||||
.shaka-range-element {
|
||||
-webkit-appearance: none;
|
||||
background: 0 0;
|
||||
cursor: pointer;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
height: 12px;
|
||||
top: calc((4px - 12px) / 2);
|
||||
z-index: 1;
|
||||
}
|
||||
.shaka-range-element::-webkit-slider-runnable-track {
|
||||
width: 100%;
|
||||
cursor: pointer;
|
||||
height: 12px;
|
||||
background: 0 0;
|
||||
color: transparent;
|
||||
border: none;
|
||||
}
|
||||
.shaka-range-element::-webkit-slider-thumb {
|
||||
-webkit-appearance: none;
|
||||
border: none;
|
||||
border-radius: 12px;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
background: #fff;
|
||||
}
|
||||
.shaka-range-element::-moz-range-track {
|
||||
width: 100%;
|
||||
cursor: pointer;
|
||||
height: 12px;
|
||||
background: 0 0;
|
||||
color: transparent;
|
||||
border: none;
|
||||
}
|
||||
.shaka-range-element::-moz-range-thumb {
|
||||
-webkit-appearance: none;
|
||||
border: none;
|
||||
border-radius: 12px;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
background: #fff;
|
||||
}
|
||||
.shaka-seek-bar-container {
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
top: 5px;
|
||||
height: 5px;
|
||||
margin-bottom: 0;
|
||||
background-clip: padding-box !important;
|
||||
border-top: 4px solid transparent;
|
||||
border-bottom: 4px solid transparent;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-seek-bar-container,
|
||||
.shaka-controls-container[shown="true"] .shaka-seek-bar-container {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-seek-bar-container .shaka-seek-bar {
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 250ms;
|
||||
opacity: 0;
|
||||
}
|
||||
.shaka-seek-bar-container:hover .shaka-seek-bar {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-ad-markers {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.shaka-spacer {
|
||||
cursor: default;
|
||||
flex-shrink: 1;
|
||||
flex-grow: 1;
|
||||
margin: 0;
|
||||
}
|
||||
.shaka-overflow-menu,
|
||||
.shaka-settings-menu {
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
scrollbar-color: white rgba(0, 0, 0, 0.5);
|
||||
scrollbar-width: thin;
|
||||
white-space: nowrap;
|
||||
background: rgba(28, 28, 28, 0.9);
|
||||
border-radius: 15px;
|
||||
max-height: 250px;
|
||||
min-width: 190px;
|
||||
padding: 5px 0;
|
||||
opacity: 0;
|
||||
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
position: absolute;
|
||||
z-index: 2;
|
||||
right: 15px;
|
||||
bottom: 62px;
|
||||
}
|
||||
.shaka-controls-container[casting="true"] .shaka-overflow-menu,
|
||||
.shaka-controls-container[casting="true"] .shaka-settings-menu,
|
||||
.shaka-controls-container[shown="true"] .shaka-overflow-menu,
|
||||
.shaka-controls-container[shown="true"] .shaka-settings-menu {
|
||||
opacity: 1;
|
||||
}
|
||||
.shaka-overflow-menu button,
|
||||
.shaka-settings-menu button {
|
||||
font-size: 14px;
|
||||
background: 0 0;
|
||||
color: #fff;
|
||||
border: none;
|
||||
min-height: 30px;
|
||||
padding: 10px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-overflow-menu button:hover,
|
||||
.shaka-settings-menu button:hover {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-overflow-menu button label,
|
||||
.shaka-settings-menu button label {
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-keyboard-navigation .shaka-overflow-menu button:focus,
|
||||
.shaka-keyboard-navigation .shaka-settings-menu button:focus {
|
||||
background: rgba(255, 255, 255, 0.1);
|
||||
}
|
||||
.shaka-overflow-menu .material-svg-icon,
|
||||
.shaka-settings-menu .material-svg-icon {
|
||||
padding-left: 0;
|
||||
padding-right: 10px;
|
||||
}
|
||||
.shaka-overflow-menu .material-svg-icon.shaka-chosen-item,
|
||||
.shaka-settings-menu .material-svg-icon.shaka-chosen-item {
|
||||
order: -1;
|
||||
line-height: 17px;
|
||||
font-size: 18px;
|
||||
}
|
||||
.shaka-overflow-menu.shaka-low-position,
|
||||
.shaka-settings-menu.shaka-low-position {
|
||||
bottom: 48px;
|
||||
}
|
||||
.shaka-overflow-menu span {
|
||||
text-align: left;
|
||||
}
|
||||
.shaka-overflow-button-label {
|
||||
position: relative;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.shaka-overflow-button-label-inline {
|
||||
box-sizing: border-box;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
width: calc(100% - 34px);
|
||||
padding-right: 28px;
|
||||
background-image: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iMjRweCIgdmlld0JveD0iMCAwIDI0IDI0IiB3aWR0aD0iMjRweCIgZmlsbD0iI2VlZWVlZSI+PHBhdGggZD0iTTAgMGgyNHYyNEgwVjB6IiBmaWxsPSJub25lIi8+PHBhdGggZD0iTTguNTkgMTYuNTlMMTMuMTcgMTIgOC41OSA3LjQxIDEwIDZsNiA2LTYgNi0xLjQxLTEuNDF6Ii8+PC9zdmc+");
|
||||
background-repeat: no-repeat;
|
||||
background-position: right 5px center;
|
||||
background-size: 24px 24px;
|
||||
}
|
||||
.shaka-simple-overflow-button-label-inline {
|
||||
box-sizing: border-box;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
width: calc(100% - 50px);
|
||||
}
|
||||
.shaka-current-selection-span {
|
||||
font-size: 12px;
|
||||
padding-left: 10px;
|
||||
}
|
||||
.shaka-current-auto-quality {
|
||||
margin-left: 5px;
|
||||
font-size: 11px;
|
||||
color: #ccc;
|
||||
}
|
||||
.shaka-current-quality-mark,
|
||||
.shaka-quality-mark {
|
||||
color: red;
|
||||
margin-left: 2px !important;
|
||||
font-size: 10px;
|
||||
height: 17px;
|
||||
}
|
||||
.shaka-quality-mark {
|
||||
line-height: 6px;
|
||||
}
|
||||
.shaka-overflow-playback-rate-mark,
|
||||
.shaka-overflow-quality-mark {
|
||||
background: red;
|
||||
color: #fff;
|
||||
border-radius: 2px;
|
||||
font-family: Roboto, sans-serif, TengwarTelcontar;
|
||||
font-size: 10px;
|
||||
font-weight: 700;
|
||||
line-height: 10px;
|
||||
text-shadow: none;
|
||||
padding: 1px;
|
||||
position: absolute;
|
||||
right: 4px;
|
||||
top: 10px;
|
||||
}
|
||||
.shaka-settings-menu span {
|
||||
margin-left: 28px;
|
||||
}
|
||||
.shaka-settings-menu span.shaka-chosen-item {
|
||||
margin-left: 0;
|
||||
}
|
||||
.shaka-settings-menu .shaka-chapter {
|
||||
margin-left: 10px;
|
||||
}
|
||||
.shaka-back-to-overflow-button {
|
||||
border-bottom: 1px solid rgba(255, 255, 255, 0.2) !important;
|
||||
}
|
||||
.shaka-back-to-overflow-button span {
|
||||
margin-left: 0;
|
||||
}
|
||||
.shaka-back-to-overflow-button .material-svg-icon {
|
||||
padding-right: 10px;
|
||||
font-size: 18px !important;
|
||||
}
|
||||
.shaka-back-to-overflow-button:hover {
|
||||
background: 0 0 !important;
|
||||
}
|
||||
.shaka-controls-container[ad-active="true"] {
|
||||
pointer-events: none;
|
||||
}
|
||||
.shaka-controls-container[ad-active="true"] .shaka-bottom-controls {
|
||||
pointer-events: auto;
|
||||
}
|
||||
.shaka-client-side-ad-container,
|
||||
.shaka-server-side-ad-container {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
}
|
||||
.shaka-video-container[shaka-controls="true"]
|
||||
.shaka-client-side-ad-container
|
||||
iframe,
|
||||
.shaka-video-container[shaka-controls="true"]
|
||||
.shaka-server-side-ad-container
|
||||
iframe {
|
||||
height: 90%;
|
||||
}
|
||||
.shaka-ad-controls {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
z-index: 1;
|
||||
padding-bottom: 1%;
|
||||
}
|
||||
.shaka-video-container:not([shaka-controls="true"]) .shaka-ad-controls {
|
||||
display: none;
|
||||
}
|
||||
.shaka-ad-controls button,
|
||||
.shaka-ad-controls div {
|
||||
color: #fff;
|
||||
font-size: initial;
|
||||
}
|
||||
.shaka-ad-info {
|
||||
font-size: 14px;
|
||||
color: #fff;
|
||||
width: auto;
|
||||
padding: 0 5px;
|
||||
}
|
||||
.shaka-ad-info[disabled] {
|
||||
background-color: transparent;
|
||||
color: #fff;
|
||||
cursor: default;
|
||||
padding: 0;
|
||||
}
|
||||
.shaka-skip-ad-container {
|
||||
position: relative;
|
||||
right: calc((100% - 98%) / 2 * -1);
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
margin: 0;
|
||||
margin-left: auto;
|
||||
}
|
||||
.shaka-skip-ad-button {
|
||||
padding: 5px 15px;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
.shaka-skip-ad-button:disabled {
|
||||
background: rgba(0, 0, 0, 0.3);
|
||||
}
|
||||
.shaka-skip-ad-counter {
|
||||
padding: 5px;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
margin: 0;
|
||||
} /*!
|
||||
* @license
|
||||
* The tooltip is based on https://github.com/felipefialho/css-components/
|
||||
* Local modifications have been performed.
|
||||
*
|
||||
* Copyright (c) 2017 Felipe Fialho
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
.shaka-tooltips-on {
|
||||
overflow: visible;
|
||||
}
|
||||
.shaka-tooltips-on > .shaka-tooltip,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status {
|
||||
position: relative;
|
||||
}
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:hover:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:hover:after {
|
||||
content: attr(aria-label);
|
||||
font-family: Roboto, sans-serif, TengwarTelcontar;
|
||||
line-height: 20px;
|
||||
white-space: nowrap;
|
||||
font-size: 14px;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
color: #fff;
|
||||
border-radius: 2px;
|
||||
padding: 2px 10px;
|
||||
position: absolute;
|
||||
bottom: 62px;
|
||||
left: calc(48px / 2);
|
||||
-webkit-transform: translateX(-50%);
|
||||
-moz-transform: translateX(-50%);
|
||||
-ms-transform: translateX(-50%);
|
||||
-o-transform: translateX(-50%);
|
||||
transform: translateX(-50%);
|
||||
}
|
||||
@media (prefers-reduced-transparency) {
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:hover:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip:hover:after {
|
||||
background-color: rgba(0, 0, 0, 0.9);
|
||||
}
|
||||
}
|
||||
.shaka-tooltips-on.shaka-tooltips-low-position > .shaka-tooltip:active:after,
|
||||
.shaka-tooltips-on.shaka-tooltips-low-position
|
||||
> .shaka-tooltip:focus-visible:after,
|
||||
.shaka-tooltips-on.shaka-tooltips-low-position > .shaka-tooltip:hover:after {
|
||||
bottom: 48px;
|
||||
}
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
|
||||
.shaka-tooltips-on > .shaka-tooltip-status:hover:after {
|
||||
content: attr(aria-label) " (" attr(shaka-status) ")";
|
||||
}
|
||||
.shaka-tooltips-on button:first-child:active:after,
|
||||
.shaka-tooltips-on button:first-child:focus-visible:after,
|
||||
.shaka-tooltips-on button:first-child:hover:after {
|
||||
left: 0;
|
||||
-webkit-transform: translateX(0);
|
||||
-moz-transform: translateX(0);
|
||||
-ms-transform: translateX(0);
|
||||
-o-transform: translateX(0);
|
||||
transform: translateX(0);
|
||||
}
|
||||
.shaka-tooltips-on button:last-child:active:after,
|
||||
.shaka-tooltips-on button:last-child:focus-visible:after,
|
||||
.shaka-tooltips-on button:last-child:hover:after {
|
||||
left: 48px;
|
||||
-webkit-transform: translateX(-100%);
|
||||
-moz-transform: translateX(-100%);
|
||||
-ms-transform: translateX(-100%);
|
||||
-o-transform: translateX(-100%);
|
||||
transform: translateX(-100%);
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container {
|
||||
background-color: #000;
|
||||
border: 1px solid #000;
|
||||
box-shadow: 0 8px 8px 0 rgba(0, 0, 0, 0.5);
|
||||
min-width: 150px;
|
||||
overflow: hidden;
|
||||
position: absolute;
|
||||
visibility: hidden;
|
||||
width: 15%;
|
||||
z-index: 1;
|
||||
pointer-events: none;
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container #shaka-player-ui-thumbnail-image {
|
||||
position: absolute;
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container #shaka-player-ui-thumbnail-time-container {
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
position: absolute;
|
||||
right: 0;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container
|
||||
#shaka-player-ui-thumbnail-time-container
|
||||
#shaka-player-ui-thumbnail-time {
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
border-radius: 14px;
|
||||
color: #fff;
|
||||
font-size: 14px;
|
||||
padding: 0 5px;
|
||||
}
|
||||
@media (prefers-reduced-transparency) {
|
||||
#shaka-player-ui-thumbnail-container
|
||||
#shaka-player-ui-thumbnail-time-container
|
||||
#shaka-player-ui-thumbnail-time {
|
||||
background-color: rgba(0, 0, 0, 0.9);
|
||||
}
|
||||
}
|
||||
#shaka-player-ui-thumbnail-container.portrait-thumbnail {
|
||||
min-width: 75px;
|
||||
width: 7.5%;
|
||||
}
|
||||
#shaka-player-ui-time-container {
|
||||
background-color: rgba(0, 0, 0, 0.5);
|
||||
border-radius: 5px;
|
||||
color: #fff;
|
||||
display: flex;
|
||||
font-size: 14px;
|
||||
justify-content: center;
|
||||
overflow: hidden;
|
||||
padding: 0 3px;
|
||||
position: absolute;
|
||||
visibility: hidden;
|
||||
z-index: 1;
|
||||
}
|
||||
@media (prefers-reduced-transparency) {
|
||||
#shaka-player-ui-time-container {
|
||||
background-color: rgba(0, 0, 0, 0.9);
|
||||
}
|
||||
}
|
||||
.material-svg-icon {
|
||||
display: inline-block;
|
||||
fill: currentcolor;
|
||||
width: 1em;
|
||||
height: 1em;
|
||||
}
|
||||
@font-face {
|
||||
font-family: Roboto;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-stretch: normal;
|
||||
src: url(./fonts/KFOMCnqEu92Fr1ME7kSn66aGLdTylUAMQXC89YmC2DPNWubEbVmUiA8.ttf)
|
||||
format("truetype");
|
||||
} /*# sourceMappingURL=controls.css.map */
|
||||
1
public/shaka-player/controls.css.map
Normal file
53
public/shaka-player/controls.min.css
vendored
BIN
public/shaka-player/fonts/KFOlCnqEu92Fr1MmEU9vAw.ttf
Normal file
BIN
public/shaka-player/fonts/KFOmCnqEu92Fr1Me5Q.ttf
Normal file
7727
public/shaka-player/shaka-player.ui.debug.externs.js
Normal file
7727
public/shaka-player/shaka-player.ui.externs.js
Normal file
@@ -1,19 +1,19 @@
|
||||
@font-face {
|
||||
font-family: 'Roboto';
|
||||
font-family: "Roboto";
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-display: swap;
|
||||
src: url(https://fonts.gstatic.com/s/roboto/v27/KFOmCnqEu92Fr1Me5Q.ttf) format('truetype');
|
||||
src: url(./fonts/KFOmCnqEu92Fr1Me5Q.ttf) format("truetype");
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'Roboto';
|
||||
font-family: "Roboto";
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
font-display: swap;
|
||||
src: url(https://fonts.gstatic.com/s/roboto/v27/KFOlCnqEu92Fr1MmEU9vAw.ttf) format('truetype');
|
||||
src: url(./fonts/KFOlCnqEu92Fr1MmEU9vAw.ttf) format("truetype");
|
||||
}
|
||||
.youtube-theme {
|
||||
font-family: 'Roboto', sans-serif;
|
||||
font-family: "Roboto", sans-serif;
|
||||
}
|
||||
.youtube-theme .shaka-bottom-controls {
|
||||
width: 100%;
|
||||
@@ -27,18 +27,18 @@
|
||||
display: flex;
|
||||
-webkit-box-orient: vertical;
|
||||
-webkit-box-direction: normal;
|
||||
-ms-flex-direction: column;
|
||||
flex-direction: column;
|
||||
-ms-flex-direction: column;
|
||||
flex-direction: column;
|
||||
}
|
||||
.youtube-theme .shaka-ad-controls {
|
||||
-webkit-box-ordinal-group: 2;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel {
|
||||
-webkit-box-ordinal-group: 3;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
height: 40px;
|
||||
padding: 0 10px;
|
||||
}
|
||||
@@ -48,36 +48,36 @@
|
||||
}
|
||||
.youtube-theme .shaka-small-play-button {
|
||||
-webkit-box-ordinal-group: -2;
|
||||
-ms-flex-order: -3;
|
||||
order: -3;
|
||||
-ms-flex-order: -3;
|
||||
order: -3;
|
||||
}
|
||||
.youtube-theme .shaka-mute-button {
|
||||
-webkit-box-ordinal-group: -1;
|
||||
-ms-flex-order: -2;
|
||||
order: -2;
|
||||
-ms-flex-order: -2;
|
||||
order: -2;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel > * {
|
||||
margin: 0;
|
||||
padding: 3px 8px;
|
||||
color: #EEE;
|
||||
color: #eee;
|
||||
height: 40px;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel > *:focus {
|
||||
outline: none;
|
||||
-webkit-box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
|
||||
box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
|
||||
color: #FFF;
|
||||
box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
|
||||
color: #fff;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel > *:hover {
|
||||
color: #FFF;
|
||||
color: #fff;
|
||||
}
|
||||
.youtube-theme .shaka-controls-button-panel .shaka-volume-bar-container {
|
||||
position: relative;
|
||||
z-index: 10;
|
||||
left: -1px;
|
||||
-webkit-box-ordinal-group: 0;
|
||||
-ms-flex-order: -1;
|
||||
order: -1;
|
||||
-ms-flex-order: -1;
|
||||
order: -1;
|
||||
opacity: 0;
|
||||
width: 0px;
|
||||
-webkit-transition: width 0.2s cubic-bezier(0.4, 0, 1, 1);
|
||||
@@ -120,23 +120,25 @@
|
||||
opacity: 1;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-seek-bar-container input[type=range]::-webkit-slider-thumb {
|
||||
background: #FF0000;
|
||||
.youtube-theme
|
||||
.shaka-seek-bar-container
|
||||
input[type="range"]::-webkit-slider-thumb {
|
||||
background: #ff0000;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-seek-bar-container input[type=range]::-moz-range-thumb {
|
||||
background: #FF0000;
|
||||
.youtube-theme .shaka-seek-bar-container input[type="range"]::-moz-range-thumb {
|
||||
background: #ff0000;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-seek-bar-container input[type=range]::-ms-thumb {
|
||||
background: #FF0000;
|
||||
.youtube-theme .shaka-seek-bar-container input[type="range"]::-ms-thumb {
|
||||
background: #ff0000;
|
||||
cursor: pointer;
|
||||
}
|
||||
.youtube-theme .shaka-video-container * {
|
||||
font-family: 'Roboto', sans-serif;
|
||||
font-family: "Roboto", sans-serif;
|
||||
}
|
||||
.youtube-theme .shaka-video-container .material-icons-round {
|
||||
font-family: 'Material Icons Sharp';
|
||||
font-family: "Material Icons Sharp";
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu,
|
||||
.youtube-theme .shaka-settings-menu {
|
||||
@@ -170,14 +172,14 @@
|
||||
}
|
||||
.youtube-theme .shaka-settings-menu button[aria-selected="true"] span {
|
||||
-webkit-box-ordinal-group: 3;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
-ms-flex-order: 2;
|
||||
order: 2;
|
||||
margin-left: 0;
|
||||
}
|
||||
.youtube-theme .shaka-settings-menu button[aria-selected="true"] i {
|
||||
-webkit-box-ordinal-group: 2;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
-ms-flex-order: 1;
|
||||
order: 1;
|
||||
font-size: 18px;
|
||||
padding-left: 5px;
|
||||
}
|
||||
@@ -192,25 +194,25 @@
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-pack: justify;
|
||||
-ms-flex-pack: justify;
|
||||
justify-content: space-between;
|
||||
-ms-flex-pack: justify;
|
||||
justify-content: space-between;
|
||||
-webkit-box-orient: horizontal;
|
||||
-webkit-box-direction: normal;
|
||||
-ms-flex-direction: row;
|
||||
flex-direction: row;
|
||||
-ms-flex-direction: row;
|
||||
flex-direction: row;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
cursor: default;
|
||||
outline: none;
|
||||
height: 40px;
|
||||
-webkit-box-flex: 0;
|
||||
-ms-flex: 0 0 100%;
|
||||
flex: 0 0 100%;
|
||||
-ms-flex: 0 0 100%;
|
||||
flex: 0 0 100%;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu button .shaka-overflow-button-label span {
|
||||
-ms-flex-negative: initial;
|
||||
flex-shrink: initial;
|
||||
flex-shrink: initial;
|
||||
padding-left: 15px;
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
@@ -218,11 +220,11 @@
|
||||
display: -ms-flexbox;
|
||||
display: flex;
|
||||
-webkit-box-align: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
-ms-flex-align: center;
|
||||
align-items: center;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu span + span {
|
||||
color: #FFF;
|
||||
color: #fff;
|
||||
font-weight: 400 !important;
|
||||
font-size: 12px !important;
|
||||
padding-right: 8px;
|
||||
@@ -230,7 +232,7 @@
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu span + span:after {
|
||||
content: "navigate_next";
|
||||
font-family: 'Material Icons Sharp';
|
||||
font-family: "Material Icons Sharp";
|
||||
font-size: 20px;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu .shaka-pip-button span + span {
|
||||
@@ -270,10 +272,10 @@
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu button,
|
||||
.youtube-theme .shaka-settings-menu button {
|
||||
color: #EEE;
|
||||
color: #eee;
|
||||
}
|
||||
.youtube-theme .shaka-captions-off {
|
||||
color: #BFBFBF;
|
||||
color: #bfbfbf;
|
||||
}
|
||||
.youtube-theme .shaka-overflow-menu-button {
|
||||
font-size: 18px;
|
||||
|
||||
58
scripts/bump.cjs
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
function updatePackageJson(version) {
|
||||
const packageJsonPath = path.join(process.cwd(), "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(
|
||||
packageJsonPath,
|
||||
JSON.stringify(packageJson, null, 2) + "\n"
|
||||
);
|
||||
console.log(`✅ Updated package.json version to ${version}`);
|
||||
}
|
||||
|
||||
function updateCargoToml(version) {
|
||||
const cargoTomlPath = path.join(process.cwd(), "src-tauri", "Cargo.toml");
|
||||
let cargoToml = fs.readFileSync(cargoTomlPath, "utf8");
|
||||
|
||||
// Update the version in the [package] section
|
||||
cargoToml = cargoToml.replace(/^version = ".*"$/m, `version = "${version}"`);
|
||||
|
||||
fs.writeFileSync(cargoTomlPath, cargoToml);
|
||||
console.log(`✅ Updated Cargo.toml version to ${version}`);
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("❌ Please provide a version number");
|
||||
console.error("Usage: yarn bump <version>");
|
||||
console.error("Example: yarn bump 3.1.0");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = args[0];
|
||||
|
||||
// Validate version format (simple check)
|
||||
if (!/^\d+\.\d+\.\d+/.test(version)) {
|
||||
console.error(
|
||||
"❌ Invalid version format. Please use semantic versioning (e.g., 3.1.0)"
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
updatePackageJson(version);
|
||||
updateCargoToml(version);
|
||||
console.log(`🎉 Successfully bumped version to ${version}`);
|
||||
} catch (error) {
|
||||
console.error("❌ Error updating version:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
1264
src-tauri/Cargo.lock
generated
@@ -1,22 +1,30 @@
|
||||
[workspace]
|
||||
members = ["crates/danmu_stream"]
|
||||
members = ["crates/danmu_stream", "crates/recorder"]
|
||||
resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "bili-shadowreplay"
|
||||
version = "1.0.0"
|
||||
version = "2.16.0"
|
||||
description = "BiliBili ShadowReplay"
|
||||
authors = ["Xinrea"]
|
||||
license = ""
|
||||
repository = ""
|
||||
edition = "2021"
|
||||
|
||||
[lints.clippy]
|
||||
correctness="deny"
|
||||
suspicious="deny"
|
||||
complexity="deny"
|
||||
style="deny"
|
||||
perf="deny"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
danmu_stream = { path = "crates/danmu_stream" }
|
||||
recorder = { path = "crates/recorder" }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"] }
|
||||
reqwest = { workspace = true}
|
||||
serde_derive = "1.0.158"
|
||||
serde = "1.0.158"
|
||||
sysinfo = "0.32.0"
|
||||
@@ -25,7 +33,6 @@ async-std = "1.12.0"
|
||||
async-ffmpeg-sidecar = "0.0.1"
|
||||
chrono = { version = "0.4.24", features = ["serde"] }
|
||||
toml = "0.7.3"
|
||||
custom_error = "1.9.2"
|
||||
regex = "1.7.3"
|
||||
tokio = { version = "1.27.0", features = ["process"] }
|
||||
platform-dirs = "0.3.0"
|
||||
@@ -43,15 +50,21 @@ mime_guess = "2.0"
|
||||
async-trait = "0.1.87"
|
||||
whisper-rs = "0.14.2"
|
||||
hound = "3.5.1"
|
||||
uuid = { version = "1.4", features = ["v4"] }
|
||||
axum = { version = "0.7", features = ["macros"] }
|
||||
uuid = { workspace = true }
|
||||
axum = { version = "0.7", features = ["macros", "multipart"] }
|
||||
tower-http = { version = "0.5", features = ["cors", "fs"] }
|
||||
futures-core = "0.3"
|
||||
futures = "0.3"
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
tokio-stream = "0.1"
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
url = "2.5.4"
|
||||
srtparse = "0.2.0"
|
||||
thiserror = "2"
|
||||
deno_core = "0.355"
|
||||
sanitize-filename = "0.6.0"
|
||||
socketioxide = "0.17.2"
|
||||
scraper = "0.24.0"
|
||||
|
||||
[features]
|
||||
# this feature is used for production builds or when `devPath` points to the filesystem
|
||||
@@ -71,6 +84,7 @@ gui = [
|
||||
"tauri-utils",
|
||||
"tauri-plugin-os",
|
||||
"tauri-plugin-notification",
|
||||
"tauri-plugin-deep-link",
|
||||
"fix-path-env",
|
||||
"tauri-build",
|
||||
]
|
||||
@@ -83,6 +97,7 @@ optional = true
|
||||
[dependencies.tauri-plugin-single-instance]
|
||||
version = "2"
|
||||
optional = true
|
||||
features = ["deep-link"]
|
||||
|
||||
[dependencies.tauri-plugin-dialog]
|
||||
version = "2"
|
||||
@@ -117,6 +132,10 @@ optional = true
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-deep-link]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.fix-path-env]
|
||||
git = "https://github.com/tauri-apps/fix-path-env-rs"
|
||||
optional = true
|
||||
@@ -132,3 +151,7 @@ whisper-rs = { version = "0.14.2", default-features = false }
|
||||
[target.'cfg(darwin)'.dependencies.whisper-rs]
|
||||
version = "0.14.2"
|
||||
features = ["metal"]
|
||||
|
||||
[workspace.dependencies]
|
||||
reqwest = { version = "0.11", features = ["blocking", "json", "multipart", "gzip"] }
|
||||
uuid = { version = "1.4", features = ["v4"] }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
fn main() {
|
||||
#[cfg(feature = "gui")]
|
||||
tauri_build::build()
|
||||
tauri_build::build();
|
||||
}
|
||||
|
||||
@@ -36,19 +36,10 @@
|
||||
"identifier": "http:default",
|
||||
"allow": [
|
||||
{
|
||||
"url": "https://*.hdslb.com/"
|
||||
"url": "https://*.*"
|
||||
},
|
||||
{
|
||||
"url": "https://afdian.com/"
|
||||
},
|
||||
{
|
||||
"url": "https://*.afdiancdn.com/"
|
||||
},
|
||||
{
|
||||
"url": "https://*.douyin.com/"
|
||||
},
|
||||
{
|
||||
"url": "https://*.douyinpic.com/"
|
||||
"url": "http://*.*"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -65,6 +56,7 @@
|
||||
"shell:default",
|
||||
"sql:default",
|
||||
"os:default",
|
||||
"dialog:default"
|
||||
"dialog:default",
|
||||
"deep-link:default"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -7,38 +7,42 @@ edition = "2021"
|
||||
name = "danmu_stream"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[example]]
|
||||
name = "bilibili"
|
||||
path = "examples/bilibili.rs"
|
||||
|
||||
[[example]]
|
||||
name = "douyin"
|
||||
path = "examples/douyin.rs"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.27", features = ["native-tls"] }
|
||||
futures-util = "0.3"
|
||||
prost = "0.12"
|
||||
prost = "0.14"
|
||||
chrono = "0.4"
|
||||
log = "0.4"
|
||||
env_logger = "0.10"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
env_logger = "0.11"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
url = "2.4"
|
||||
md5 = "0.7"
|
||||
md5 = "0.8"
|
||||
regex = "1.9"
|
||||
deno_core = "0.242.0"
|
||||
pct-str = "2.0.0"
|
||||
custom_error = "1.9.2"
|
||||
deno_core = "0.355"
|
||||
pct-str = "2.0"
|
||||
thiserror = "2.0"
|
||||
flate2 = "1.0"
|
||||
scroll = "0.13.0"
|
||||
scroll_derive = "0.13.0"
|
||||
brotli = "8.0.1"
|
||||
scroll = "0.13"
|
||||
scroll_derive = "0.13"
|
||||
brotli = "8.0"
|
||||
http = "1.0"
|
||||
rand = "0.9.1"
|
||||
urlencoding = "2.1.3"
|
||||
rand = "0.9"
|
||||
urlencoding = "2.1"
|
||||
gzip = "0.1.2"
|
||||
hex = "0.4.3"
|
||||
async-trait = "0.1.88"
|
||||
uuid = "1.17.0"
|
||||
async-trait = "0.1"
|
||||
uuid = { workspace = true}
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = "0.10"
|
||||
tonic-build = "0.14"
|
||||
|
||||
@@ -8,7 +8,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 768756;
|
||||
let room_id = "768756";
|
||||
let cookie = "";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::BiliBili, cookie, room_id).await?);
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 7514298567821937427; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
|
||||
let room_id = "7514298567821937427"; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
|
||||
let cookie = "your_cookie";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::Douyin, cookie, room_id).await?);
|
||||
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::{mpsc, RwLock};
|
||||
|
||||
use crate::{
|
||||
provider::{new, DanmuProvider, ProviderType},
|
||||
DanmuMessageType, DanmuStreamError,
|
||||
};
|
||||
use tokio::sync::{mpsc, RwLock};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DanmuStream {
|
||||
pub provider_type: ProviderType,
|
||||
pub identifier: String,
|
||||
pub room_id: u64,
|
||||
pub room_id: String,
|
||||
pub provider: Arc<RwLock<Box<dyn DanmuProvider>>>,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
rx: Arc<RwLock<mpsc::UnboundedReceiver<DanmuMessageType>>>,
|
||||
@@ -20,14 +21,14 @@ impl DanmuStream {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<Self, DanmuStreamError> {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let provider = new(provider_type, identifier, room_id).await?;
|
||||
Ok(Self {
|
||||
provider_type,
|
||||
identifier: identifier.to_string(),
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
provider: Arc::new(RwLock::new(provider)),
|
||||
tx,
|
||||
rx: Arc::new(RwLock::new(rx)),
|
||||
|
||||
@@ -1,19 +1,8 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
impl From<reqwest::Error> for DanmuStreamError {
|
||||
fn from(value: reqwest::Error) -> Self {
|
||||
Self::HttpError { err: value }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<url::ParseError> for DanmuStreamError {
|
||||
fn from(value: url::ParseError) -> Self {
|
||||
Self::ParseError { err: value }
|
||||
}
|
||||
}
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
pub struct ApiClient {
|
||||
client: reqwest::Client,
|
||||
|
||||
@@ -2,16 +2,24 @@ pub mod danmu_stream;
|
||||
mod http_client;
|
||||
pub mod provider;
|
||||
|
||||
use custom_error::custom_error;
|
||||
use thiserror::Error;
|
||||
|
||||
custom_error! {pub DanmuStreamError
|
||||
HttpError {err: reqwest::Error} = "HttpError {err}",
|
||||
ParseError {err: url::ParseError} = "ParseError {err}",
|
||||
WebsocketError {err: String } = "WebsocketError {err}",
|
||||
PackError {err: String} = "PackError {err}",
|
||||
UnsupportProto {proto: u16} = "UnsupportProto {proto}",
|
||||
MessageParseError {err: String} = "MessageParseError {err}",
|
||||
InvalidIdentifier {err: String} = "InvalidIdentifier {err}"
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DanmuStreamError {
|
||||
#[error("HttpError {0:?}")]
|
||||
HttpError(#[from] reqwest::Error),
|
||||
#[error("ParseError {0:?}")]
|
||||
ParseError(#[from] url::ParseError),
|
||||
#[error("WebsocketError {err}")]
|
||||
WebsocketError { err: String },
|
||||
#[error("PackError {err}")]
|
||||
PackError { err: String },
|
||||
#[error("UnsupportProto {proto}")]
|
||||
UnsupportProto { proto: u16 },
|
||||
#[error("MessageParseError {err}")]
|
||||
MessageParseError { err: String },
|
||||
#[error("InvalidIdentifier {err}")]
|
||||
InvalidIdentifier { err: String },
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -21,7 +29,7 @@ pub enum DanmuMessageType {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DanmuMessage {
|
||||
pub room_id: u64,
|
||||
pub room_id: String,
|
||||
pub user_id: u64,
|
||||
pub user_name: String,
|
||||
pub message: String,
|
||||
|
||||
@@ -36,15 +36,15 @@ type WsWriteType = futures_util::stream::SplitSink<
|
||||
|
||||
pub struct BiliDanmu {
|
||||
client: ApiClient,
|
||||
room_id: u64,
|
||||
user_id: u64,
|
||||
room_id: String,
|
||||
user_id: i64,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for BiliDanmu {
|
||||
async fn new(cookie: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
async fn new(cookie: &str, room_id: &str) -> Result<Self, DanmuStreamError> {
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let user_id = BiliDanmu::parse_user_id(cookie)?;
|
||||
// add buvid3 to cookie
|
||||
@@ -54,7 +54,7 @@ impl DanmuProvider for BiliDanmu {
|
||||
Ok(Self {
|
||||
client,
|
||||
user_id,
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
})
|
||||
@@ -65,7 +65,6 @@ impl DanmuProvider for BiliDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const MAX_RETRIES: u32 = 5;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Bilibili WebSocket connection started, room_id: {}",
|
||||
@@ -74,33 +73,37 @@ impl DanmuProvider for BiliDanmu {
|
||||
|
||||
loop {
|
||||
if *self.stop.read().await {
|
||||
info!(
|
||||
"Bilibili WebSocket connection stopped, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!("Bilibili WebSocket connection closed normally");
|
||||
break;
|
||||
info!(
|
||||
"Bilibili WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Bilibili WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
|
||||
if retry_count >= MAX_RETRIES {
|
||||
return Err(DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect after {} retries", MAX_RETRIES),
|
||||
});
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}/{})",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
MAX_RETRIES
|
||||
error!(
|
||||
"Bilibili WebSocket connection error, room_id: {}, error: {}",
|
||||
self.room_id, e
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
retry_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
self.room_id
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -123,7 +126,10 @@ impl BiliDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let wbi_key = self.get_wbi_key().await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, self.room_id).await?;
|
||||
let real_room = self.get_real_room(&wbi_key, &self.room_id).await?;
|
||||
let danmu_info = self
|
||||
.get_danmu_info(&wbi_key, real_room.to_string().as_str())
|
||||
.await?;
|
||||
let ws_hosts = danmu_info.data.host_list.clone();
|
||||
let mut conn = None;
|
||||
log::debug!("ws_hosts: {:?}", ws_hosts);
|
||||
@@ -152,7 +158,7 @@ impl BiliDanmu {
|
||||
*self.write.write().await = Some(write);
|
||||
|
||||
let json = serde_json::to_string(&WsSend {
|
||||
roomid: self.room_id,
|
||||
roomid: real_room,
|
||||
key: danmu_info.data.token,
|
||||
uid: self.user_id,
|
||||
protover: 3,
|
||||
@@ -237,9 +243,8 @@ impl BiliDanmu {
|
||||
async fn get_danmu_info(
|
||||
&self,
|
||||
wbi_key: &str,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<DanmuInfo, DanmuStreamError> {
|
||||
let room_id = self.get_real_room(wbi_key, room_id).await?;
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
@@ -265,7 +270,7 @@ impl BiliDanmu {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: u64) -> Result<u64, DanmuStreamError> {
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: &str) -> Result<i64, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
@@ -293,14 +298,14 @@ impl BiliDanmu {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
fn parse_user_id(cookie: &str) -> Result<u64, DanmuStreamError> {
|
||||
fn parse_user_id(cookie: &str) -> Result<i64, DanmuStreamError> {
|
||||
let mut user_id = None;
|
||||
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let re = Regex::new(r"DedeUserID=(\d+)").unwrap();
|
||||
if let Some(captures) = re.captures(cookie) {
|
||||
if let Some(user) = captures.get(1) {
|
||||
user_id = Some(user.as_str().parse::<u64>().unwrap());
|
||||
user_id = Some(user.as_str().parse::<i64>().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -404,8 +409,8 @@ impl BiliDanmu {
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct WsSend {
|
||||
uid: u64,
|
||||
roomid: u64,
|
||||
uid: i64,
|
||||
roomid: i64,
|
||||
key: String,
|
||||
protover: u32,
|
||||
platform: String,
|
||||
@@ -436,5 +441,5 @@ pub struct RoomInit {
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct RoomInitData {
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -24,7 +24,7 @@ struct PackHotCount {
|
||||
|
||||
type BilibiliPackCtx<'a> = (BilibiliPackHeader, &'a [u8]);
|
||||
|
||||
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx, DanmuStreamError> {
|
||||
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx<'_>, DanmuStreamError> {
|
||||
let data = buffer
|
||||
.pread_with(0, scroll::BE)
|
||||
.map_err(|e: scroll::Error| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
provider::{bilibili::dannmu_msg::BiliDanmuMessage, DanmuMessageType},
|
||||
DanmuMessage, DanmuStreamError,
|
||||
};
|
||||
use super::dannmu_msg::BiliDanmuMessage;
|
||||
|
||||
use crate::{provider::DanmuMessageType, DanmuMessage, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsStreamCtx {
|
||||
@@ -66,7 +65,7 @@ impl WsStreamCtx {
|
||||
|
||||
if let Some(danmu_msg) = danmu_msg {
|
||||
Ok(DanmuMessageType::DanmuMessage(DanmuMessage {
|
||||
room_id: 0,
|
||||
room_id: "".to_string(),
|
||||
user_id: danmu_msg.uid,
|
||||
user_name: danmu_msg.username,
|
||||
message: danmu_msg.msg,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
|
||||
mod messages;
|
||||
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use deno_core::v8;
|
||||
use deno_core::JsRuntime;
|
||||
@@ -7,11 +12,9 @@ use flate2::read::GzDecoder;
|
||||
use futures_util::{SinkExt, StreamExt, TryStreamExt};
|
||||
use log::debug;
|
||||
use log::{error, info};
|
||||
use messages::*;
|
||||
use prost::bytes::Bytes;
|
||||
use prost::Message;
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, SystemTime};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::RwLock;
|
||||
@@ -19,8 +22,7 @@ use tokio_tungstenite::{
|
||||
connect_async, tungstenite::Message as WsMessage, MaybeTlsStream, WebSocketStream,
|
||||
};
|
||||
|
||||
mod messages;
|
||||
use messages::*;
|
||||
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
|
||||
|
||||
const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36";
|
||||
|
||||
@@ -31,7 +33,7 @@ type WsWriteType =
|
||||
futures_util::stream::SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, WsMessage>;
|
||||
|
||||
pub struct DouyinDanmu {
|
||||
room_id: u64,
|
||||
room_id: String,
|
||||
cookie: String,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
@@ -109,7 +111,7 @@ impl DouyinDanmu {
|
||||
runtime
|
||||
.execute_script(
|
||||
"<crypto-js.min.js>",
|
||||
deno_core::FastString::Static(crypto_js),
|
||||
deno_core::FastString::from_static(crypto_js),
|
||||
)
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute crypto-js: {}", e),
|
||||
@@ -118,7 +120,7 @@ impl DouyinDanmu {
|
||||
// Load and execute the sign.js file
|
||||
let js_code = include_str!("douyin/webmssdk.js");
|
||||
runtime
|
||||
.execute_script("<sign.js>", deno_core::FastString::Static(js_code))
|
||||
.execute_script("<sign.js>", deno_core::FastString::from_static(js_code))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
@@ -126,10 +128,7 @@ impl DouyinDanmu {
|
||||
// Call the get_wss_url function
|
||||
let sign_call = format!("get_wss_url(\"{}\")", self.room_id);
|
||||
let result = runtime
|
||||
.execute_script(
|
||||
"<sign_call>",
|
||||
deno_core::FastString::Owned(sign_call.into_boxed_str()),
|
||||
)
|
||||
.execute_script("<sign_call>", deno_core::FastString::from(sign_call))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
@@ -193,7 +192,7 @@ impl DouyinDanmu {
|
||||
});
|
||||
|
||||
// Main message handling loop
|
||||
let room_id = self.room_id;
|
||||
let room_id = self.room_id.clone();
|
||||
let stop = Arc::clone(&self.stop);
|
||||
let write = Arc::clone(&self.write);
|
||||
let message_handle = tokio::spawn(async move {
|
||||
@@ -211,10 +210,10 @@ impl DouyinDanmu {
|
||||
|
||||
match msg {
|
||||
WsMessage::Binary(data) => {
|
||||
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, room_id).await {
|
||||
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, &room_id).await {
|
||||
if let Some(write) = write.write().await.as_mut() {
|
||||
if let Err(e) =
|
||||
write.send(WsMessage::Binary(ack.encode_to_vec())).await
|
||||
write.send(WsMessage::binary(ack.encode_to_vec())).await
|
||||
{
|
||||
error!("Failed to send ack: {}", e);
|
||||
}
|
||||
@@ -257,7 +256,7 @@ impl DouyinDanmu {
|
||||
|
||||
async fn send_heartbeat(tx: &mpsc::Sender<WsMessage>) -> Result<(), DanmuStreamError> {
|
||||
// heartbeat message: 3A 02 68 62
|
||||
tx.send(WsMessage::Binary(vec![0x3A, 0x02, 0x68, 0x62]))
|
||||
tx.send(WsMessage::binary(vec![0x3A, 0x02, 0x68, 0x62]))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to send heartbeat message: {}", e),
|
||||
@@ -269,7 +268,7 @@ impl DouyinDanmu {
|
||||
async fn handle_binary_message(
|
||||
data: &[u8],
|
||||
tx: &mpsc::UnboundedSender<DanmuMessageType>,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<Option<PushFrame>, DanmuStreamError> {
|
||||
// First decode the PushFrame
|
||||
let push_frame = PushFrame::decode(Bytes::from(data.to_vec())).map_err(|e| {
|
||||
@@ -329,7 +328,7 @@ async fn handle_binary_message(
|
||||
})?;
|
||||
if let Some(user) = chat_msg.user {
|
||||
let danmu_msg = DanmuMessage {
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
user_id: user.id,
|
||||
user_name: user.nick_name,
|
||||
message: chat_msg.content,
|
||||
@@ -395,9 +394,9 @@ async fn handle_binary_message(
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for DouyinDanmu {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
async fn new(identifier: &str, room_id: &str) -> Result<Self, DanmuStreamError> {
|
||||
Ok(Self {
|
||||
room_id,
|
||||
room_id: room_id.to_string(),
|
||||
cookie: identifier.to_string(),
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
@@ -409,7 +408,6 @@ impl DanmuProvider for DouyinDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const MAX_RETRIES: u32 = 5;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Douyin WebSocket connection started, room_id: {}",
|
||||
@@ -423,28 +421,25 @@ impl DanmuProvider for DouyinDanmu {
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!("Douyin WebSocket connection closed normally");
|
||||
break;
|
||||
info!(
|
||||
"Douyin WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Douyin WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
|
||||
if retry_count >= MAX_RETRIES {
|
||||
return Err(DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect after {} retries", MAX_RETRIES),
|
||||
});
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}/{})",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
MAX_RETRIES
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
self.room_id
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use prost::Message;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use prost::Message;
|
||||
|
||||
// message Response {
|
||||
// repeated Message messagesList = 1;
|
||||
// string cursor = 2;
|
||||
|
||||
@@ -4,10 +4,10 @@ mod douyin;
|
||||
use async_trait::async_trait;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::{
|
||||
provider::bilibili::BiliDanmu, provider::douyin::DouyinDanmu, DanmuMessageType,
|
||||
DanmuStreamError,
|
||||
};
|
||||
use self::bilibili::BiliDanmu;
|
||||
use self::douyin::DouyinDanmu;
|
||||
|
||||
use crate::{DanmuMessageType, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ProviderType {
|
||||
@@ -17,7 +17,7 @@ pub enum ProviderType {
|
||||
|
||||
#[async_trait]
|
||||
pub trait DanmuProvider: Send + Sync {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError>
|
||||
async fn new(identifier: &str, room_id: &str) -> Result<Self, DanmuStreamError>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
@@ -57,7 +57,7 @@ pub trait DanmuProvider: Send + Sync {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
room_id: &str,
|
||||
) -> Result<Box<dyn DanmuProvider>, DanmuStreamError> {
|
||||
match provider_type {
|
||||
ProviderType::BiliBili => {
|
||||
37
src-tauri/crates/recorder/Cargo.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[package]
|
||||
name = "recorder"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "recorder"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
danmu_stream = { path = "../danmu_stream" }
|
||||
async-trait = "0.1.89"
|
||||
rand = "0.9.2"
|
||||
chrono = "0.4.42"
|
||||
tokio = "1.48.0"
|
||||
reqwest = { workspace = true}
|
||||
pct-str = "2.0.0"
|
||||
serde_json = "1.0.145"
|
||||
serde = "1.0.228"
|
||||
regex = "1.12.2"
|
||||
deno_core = "0.355"
|
||||
uuid = { workspace = true}
|
||||
serde_derive = "1.0.228"
|
||||
thiserror = "2.0.17"
|
||||
log = "0.4.28"
|
||||
sanitize-filename = "0.6.0"
|
||||
m3u8-rs = "6.0.0"
|
||||
async-ffmpeg-sidecar = "0.0.3"
|
||||
md5 = "0.8.0"
|
||||
scraper = "0.24.0"
|
||||
base64 = "0.22.1"
|
||||
url = "2.5.0"
|
||||
urlencoding = "2.1.3"
|
||||
fastrand = "2.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.11"
|
||||
9
src-tauri/crates/recorder/src/account.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Account {
|
||||
pub platform: String,
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub avatar: String,
|
||||
pub csrf: String,
|
||||
pub cookies: String,
|
||||
}
|
||||
431
src-tauri/crates/recorder/src/core/hls_recorder.rs
Normal file
@@ -0,0 +1,431 @@
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use m3u8_rs::{MediaPlaylist, Playlist};
|
||||
use reqwest::header::HeaderMap;
|
||||
use std::time::Duration;
|
||||
use tokio::fs::{File, OpenOptions};
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
|
||||
use crate::core::playlist::HlsPlaylist;
|
||||
use crate::core::{Codec, Format};
|
||||
use crate::errors::RecorderError;
|
||||
use crate::ffmpeg::VideoMetadata;
|
||||
use crate::{core::HlsStream, events::RecorderEvent};
|
||||
|
||||
const UPDATE_TIMEOUT: Duration = Duration::from_secs(20);
|
||||
const UPDATE_INTERVAL: Duration = Duration::from_secs(1);
|
||||
const PLAYLIST_FILE_NAME: &str = "playlist.m3u8";
|
||||
const DOWNLOAD_RETRY: u32 = 3;
|
||||
/// A recorder for HLS streams
|
||||
///
|
||||
/// This recorder fetches, caches and serves TS entries, currently supporting `StreamType::FMP4, StreamType::TS`.
|
||||
///
|
||||
/// Segments will be downloaded to work_dir, and `playlist.m3u8` will be generated in work_dir.
|
||||
#[derive(Clone)]
|
||||
pub struct HlsRecorder {
|
||||
room_id: String,
|
||||
stream: Arc<HlsStream>,
|
||||
client: reqwest::Client,
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
work_dir: PathBuf,
|
||||
playlist: Arc<Mutex<HlsPlaylist>>,
|
||||
headers: HeaderMap,
|
||||
|
||||
enabled: Arc<AtomicBool>,
|
||||
|
||||
sequence: Arc<AtomicU64>,
|
||||
updated_at: Arc<AtomicI64>,
|
||||
|
||||
cached_duration_secs: Arc<AtomicU64>,
|
||||
cached_size_bytes: Arc<AtomicU64>,
|
||||
|
||||
pre_metadata: Arc<RwLock<Option<VideoMetadata>>>,
|
||||
}
|
||||
|
||||
impl HlsRecorder {
|
||||
pub async fn new(
|
||||
room_id: String,
|
||||
stream: Arc<HlsStream>,
|
||||
client: reqwest::Client,
|
||||
cookies: Option<String>,
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
work_dir: PathBuf,
|
||||
enabled: Arc<AtomicBool>,
|
||||
) -> Self {
|
||||
// try to create work_dir
|
||||
if !work_dir.exists() {
|
||||
std::fs::create_dir_all(&work_dir).unwrap();
|
||||
}
|
||||
let playlist_path = work_dir.join(PLAYLIST_FILE_NAME);
|
||||
|
||||
// set user agent
|
||||
let user_agent =
|
||||
crate::utils::user_agent_generator::UserAgentGenerator::new().generate(false);
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
if let Some(cookies) = cookies {
|
||||
headers.insert("cookie", cookies.parse().unwrap());
|
||||
}
|
||||
Self {
|
||||
room_id,
|
||||
stream,
|
||||
client,
|
||||
event_channel,
|
||||
work_dir,
|
||||
playlist: Arc::new(Mutex::new(HlsPlaylist::new(playlist_path).await)),
|
||||
headers,
|
||||
enabled,
|
||||
sequence: Arc::new(AtomicU64::new(0)),
|
||||
updated_at: Arc::new(AtomicI64::new(chrono::Utc::now().timestamp_millis())),
|
||||
cached_duration_secs: Arc::new(AtomicU64::new(0)),
|
||||
cached_size_bytes: Arc::new(AtomicU64::new(0)),
|
||||
pre_metadata: Arc::new(RwLock::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Start the recorder blockingly
|
||||
///
|
||||
/// This will start the recorder and update the entries periodically.
|
||||
pub async fn start(&self) -> Result<(), RecorderError> {
|
||||
while self.enabled.load(Ordering::Relaxed) {
|
||||
let result = self.update_entries().await;
|
||||
if let Err(e) = result {
|
||||
match e {
|
||||
RecorderError::ResolutionChanged { .. } => {
|
||||
log::error!("Resolution changed: {}", e);
|
||||
self.playlist.lock().await.close().await?;
|
||||
return Err(e);
|
||||
}
|
||||
RecorderError::UpdateTimeout => {
|
||||
log::error!(
|
||||
"Source playlist is not updated for a long time, stop recording"
|
||||
);
|
||||
self.playlist.lock().await.close().await?;
|
||||
return Err(e);
|
||||
}
|
||||
RecorderError::M3u8ParseFailed { .. } => {
|
||||
log::error!("[{}]M3u8 parse failed: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
_ => {
|
||||
// Other errors are not critical, just log it
|
||||
log::error!("[{}]Update entries error: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tokio::time::sleep(UPDATE_INTERVAL).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn stop(&self) {
|
||||
self.enabled.store(false, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
async fn query_playlist(&self, stream: &HlsStream) -> Result<Playlist, RecorderError> {
|
||||
let url = stream.index();
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.headers(self.headers.clone())
|
||||
.send()
|
||||
.await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let (_, playlist) =
|
||||
m3u8_rs::parse_playlist(&bytes).map_err(|_| RecorderError::M3u8ParseFailed {
|
||||
content: String::from_utf8(bytes.to_vec()).unwrap(),
|
||||
})?;
|
||||
Ok(playlist)
|
||||
}
|
||||
|
||||
async fn query_media_playlist(&self) -> Result<MediaPlaylist, RecorderError> {
|
||||
let playlist = self.query_playlist(&self.stream).await?;
|
||||
match playlist {
|
||||
Playlist::MediaPlaylist(playlist) => Ok(playlist),
|
||||
Playlist::MasterPlaylist(playlist) => {
|
||||
// just return the first variant
|
||||
match playlist.variants.first() {
|
||||
Some(variant) => {
|
||||
let real_stream = construct_stream_from_variant(
|
||||
&self.stream.id,
|
||||
&variant.uri,
|
||||
self.stream.format.clone(),
|
||||
self.stream.codec.clone(),
|
||||
)
|
||||
.await?;
|
||||
let playlist = self.query_playlist(&real_stream).await?;
|
||||
match playlist {
|
||||
Playlist::MediaPlaylist(playlist) => Ok(playlist),
|
||||
Playlist::MasterPlaylist(_) => Err(RecorderError::M3u8ParseFailed {
|
||||
content: "No media playlist found".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
None => Err(RecorderError::M3u8ParseFailed {
|
||||
content: "No variants found".to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_entries(&self) -> Result<(), RecorderError> {
|
||||
let media_playlist = self.query_media_playlist().await?;
|
||||
let playlist_sequence = media_playlist.media_sequence;
|
||||
let last_sequence = self.sequence.load(Ordering::Relaxed);
|
||||
let last_metadata = self.pre_metadata.read().await.clone();
|
||||
let mut updated = false;
|
||||
for (i, segment) in media_playlist.segments.iter().enumerate() {
|
||||
let segment_sequence = playlist_sequence + i as u64;
|
||||
if segment_sequence <= last_sequence {
|
||||
continue;
|
||||
}
|
||||
|
||||
let segment_full_url = self.stream.ts_url(&segment.uri);
|
||||
// to get filename, we need to remove the query parameters
|
||||
// for example: 1.ts?expires=1760808243
|
||||
// we need to remove the query parameters: 1.ts
|
||||
let filename = segment.uri.split('?').next().unwrap_or(&segment.uri);
|
||||
let segment_path = self.work_dir.join(filename);
|
||||
let Ok(size) = download(
|
||||
&self.client,
|
||||
&segment_full_url,
|
||||
&segment_path,
|
||||
DOWNLOAD_RETRY,
|
||||
)
|
||||
.await
|
||||
else {
|
||||
log::error!("Download failed: {:#?}", segment);
|
||||
return Err(RecorderError::IoError(std::io::Error::other(
|
||||
"Download failed",
|
||||
)));
|
||||
};
|
||||
|
||||
// check if the stream is changed
|
||||
let segment_metadata = crate::ffmpeg::extract_video_metadata(&segment_path)
|
||||
.await
|
||||
.map_err(RecorderError::FfmpegError)?;
|
||||
|
||||
// IMPORTANT: This handles bilibili ts stream segment, which might lack of SPS/PPS and need to be appended behind last segment
|
||||
if segment_metadata.seems_corrupted() {
|
||||
let mut playlist = self.playlist.lock().await;
|
||||
if playlist.is_empty().await {
|
||||
// ignore this segment
|
||||
log::error!(
|
||||
"Segment is corrupted and has no previous segment, ignore: {}",
|
||||
segment_path.display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
let last_segment = playlist.last_segment().await;
|
||||
let last_segment_uri = last_segment.unwrap().uri.clone();
|
||||
let last_segment_path = segment_path.with_file_name(last_segment_uri);
|
||||
// append segment data behind last segment data
|
||||
let mut last_segment_file = OpenOptions::new()
|
||||
.append(true)
|
||||
.open(&last_segment_path)
|
||||
.await?;
|
||||
log::debug!(
|
||||
"Appending segment data behind last segment: {}",
|
||||
last_segment_path.display()
|
||||
);
|
||||
let mut segment_file = File::open(&segment_path).await?;
|
||||
let mut buffer = Vec::new();
|
||||
segment_file.read_to_end(&mut buffer).await?;
|
||||
last_segment_file.write_all(&buffer).await?;
|
||||
let _ = tokio::fs::remove_file(&segment_path).await;
|
||||
playlist.append_last_segment(segment.clone()).await?;
|
||||
|
||||
self.cached_duration_secs
|
||||
.fetch_add(segment_metadata.duration as u64, Ordering::Relaxed);
|
||||
self.cached_size_bytes.fetch_add(size, Ordering::Relaxed);
|
||||
self.sequence.store(segment_sequence, Ordering::Relaxed);
|
||||
self.updated_at
|
||||
.store(chrono::Utc::now().timestamp_millis(), Ordering::Relaxed);
|
||||
updated = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(last_metadata) = &last_metadata {
|
||||
if last_metadata != &segment_metadata {
|
||||
return Err(RecorderError::ResolutionChanged {
|
||||
err: "Resolution changed".to_string(),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
self.pre_metadata
|
||||
.write()
|
||||
.await
|
||||
.replace(segment_metadata.clone());
|
||||
}
|
||||
|
||||
let mut new_segment = segment.clone();
|
||||
new_segment.duration = segment_metadata.duration as f32;
|
||||
|
||||
self.playlist.lock().await.add_segment(new_segment).await?;
|
||||
|
||||
self.cached_duration_secs
|
||||
.fetch_add(segment_metadata.duration as u64, Ordering::Relaxed);
|
||||
self.cached_size_bytes.fetch_add(size, Ordering::Relaxed);
|
||||
self.sequence.store(segment_sequence, Ordering::Relaxed);
|
||||
self.updated_at
|
||||
.store(chrono::Utc::now().timestamp_millis(), Ordering::Relaxed);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// Source playlist may not be updated for a long time, check if it's timeout
|
||||
let current_time = chrono::Utc::now().timestamp_millis();
|
||||
if self.updated_at.load(Ordering::Relaxed) + (UPDATE_TIMEOUT.as_millis() as i64)
|
||||
< current_time
|
||||
{
|
||||
return Err(RecorderError::UpdateTimeout);
|
||||
}
|
||||
|
||||
if updated {
|
||||
let _ = self.event_channel.send(RecorderEvent::RecordUpdate {
|
||||
live_id: self.stream.id.clone(),
|
||||
duration_secs: self.cached_duration_secs.load(Ordering::Relaxed),
|
||||
cached_size_bytes: self.cached_size_bytes.load(Ordering::Relaxed),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Download url content into fpath
|
||||
async fn download_inner(
|
||||
client: &reqwest::Client,
|
||||
url: &str,
|
||||
path: &Path,
|
||||
) -> Result<u64, RecorderError> {
|
||||
if !path.parent().unwrap().exists() {
|
||||
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||
}
|
||||
let response = client.get(url).send().await?;
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
log::warn!("Download segment failed: {url}: {status}");
|
||||
return Err(RecorderError::InvalidResponseStatus { status });
|
||||
}
|
||||
let bytes = response.bytes().await?;
|
||||
let size = bytes.len() as u64;
|
||||
let mut file = tokio::fs::File::create(&path).await?;
|
||||
let mut content = std::io::Cursor::new(bytes.clone());
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(size)
|
||||
}
|
||||
|
||||
async fn download(
|
||||
client: &reqwest::Client,
|
||||
url: &str,
|
||||
path: &Path,
|
||||
retry: u32,
|
||||
) -> Result<u64, RecorderError> {
|
||||
for i in 0..retry {
|
||||
let result = download_inner(client, url, path).await;
|
||||
if let Ok(size) = result {
|
||||
return Ok(size);
|
||||
}
|
||||
log::error!("Download failed, retry: {}", i);
|
||||
// sleep for 500 ms
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
|
||||
Err(RecorderError::IoError(std::io::Error::other(
|
||||
"Download failed",
|
||||
)))
|
||||
}
|
||||
|
||||
pub async fn construct_stream_from_variant(
|
||||
id: &str,
|
||||
variant_url: &str,
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
) -> Result<HlsStream, RecorderError> {
|
||||
// construct the real stream from variant
|
||||
// example: https://cn-jsnt-ct-01-07.bilivideo.com/live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8?expires=1760808243
|
||||
let (body, extra) = variant_url.split_once('?').unwrap_or((variant_url, ""));
|
||||
// body example: https://cn-jsnt-ct-01-07.bilivideo.com/live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8
|
||||
|
||||
// extract host, should be like: https://cn-jsnt-ct-01-07.bilivideo.com, which contains http schema
|
||||
let host = if let Some(schema_end) = body.find("://") {
|
||||
let after_schema = &body[schema_end + 3..];
|
||||
if let Some(path_start) = after_schema.find('/') {
|
||||
format!("{}{}", &body[..schema_end + 3], &after_schema[..path_start])
|
||||
} else {
|
||||
body.to_string()
|
||||
}
|
||||
} else {
|
||||
return Err(RecorderError::M3u8ParseFailed {
|
||||
content: "Invalid URL format: missing protocol".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
// extract base, should be like: /live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8
|
||||
let base = if let Some(schema_end) = body.find("://") {
|
||||
let after_schema = &body[schema_end + 3..];
|
||||
if let Some(path_start) = after_schema.find('/') {
|
||||
format!("/{}", &after_schema[path_start + 1..])
|
||||
} else {
|
||||
"/".to_string()
|
||||
}
|
||||
} else {
|
||||
return Err(RecorderError::M3u8ParseFailed {
|
||||
content: "Invalid URL format: missing protocol".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
// Add '?' to base if there are query parameters, to match the expected format
|
||||
let base_with_query = if !extra.is_empty() {
|
||||
format!("{}?", base)
|
||||
} else {
|
||||
base
|
||||
};
|
||||
|
||||
let real_stream = HlsStream::new(
|
||||
id.to_string(),
|
||||
host,
|
||||
base_with_query,
|
||||
extra.to_string(),
|
||||
format,
|
||||
codec,
|
||||
);
|
||||
|
||||
Ok(real_stream)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::core::{Codec, Format};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_construct_stream_from_variant() {
|
||||
let stream = construct_stream_from_variant(
|
||||
"test",
|
||||
"https://hs.hls.huya.com/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103",
|
||||
Format::TS,
|
||||
Codec::Avc,
|
||||
).await.unwrap();
|
||||
assert_eq!(stream.index(), "https://hs.hls.huya.com/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.ts_url("1.ts"), "https://hs.hls.huya.com/huyalive/1.ts?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.ts_url("1.ts?expires=1760808243"), "https://hs.hls.huya.com/huyalive/1.ts?expires=1760808243&ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.host, "https://hs.hls.huya.com");
|
||||
assert_eq!(
|
||||
stream.base,
|
||||
"/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?"
|
||||
);
|
||||
assert_eq!(stream.extra, "ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
|
||||
assert_eq!(stream.format, Format::TS);
|
||||
assert_eq!(stream.codec, Codec::Avc);
|
||||
}
|
||||
}
|
||||
97
src-tauri/crates/recorder/src/core/mod.rs
Normal file
@@ -0,0 +1,97 @@
|
||||
use std::fmt;
|
||||
pub mod hls_recorder;
|
||||
pub mod playlist;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Format {
|
||||
Flv,
|
||||
TS,
|
||||
FMP4,
|
||||
}
|
||||
|
||||
impl fmt::Display for Format {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Codec {
|
||||
Avc,
|
||||
Hevc,
|
||||
}
|
||||
|
||||
impl fmt::Display for Codec {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for HLS streams
|
||||
///
|
||||
/// This trait provides a common interface for HLS streams.
|
||||
/// For example:
|
||||
/// ```text
|
||||
/// host: https://d1--cn-gotcha104b.bilivideo.com
|
||||
/// base: /live-bvc/375028/live_2124647716_1414766_bluray.m3u8?
|
||||
/// extra: expire=1734567890&oi=1234567890&s=1234567890&pt=0&ps=0&bw=1000000&tk=1234567890
|
||||
/// ```
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HlsStream {
|
||||
id: String,
|
||||
host: String,
|
||||
base: String,
|
||||
extra: String,
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
}
|
||||
|
||||
impl HlsStream {
|
||||
pub fn new(
|
||||
id: String,
|
||||
host: String,
|
||||
base: String,
|
||||
extra: String,
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
host,
|
||||
base,
|
||||
extra,
|
||||
format,
|
||||
codec,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn index(&self) -> String {
|
||||
if self.extra.is_empty() {
|
||||
format!("{}{}", self.host, self.base)
|
||||
} else {
|
||||
format!("{}{}{}", self.host, self.base, self.extra)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ts_url(&self, seg_name: &str) -> String {
|
||||
let base = self.base.clone();
|
||||
let m3u8_filename = base.split('/').next_back().unwrap();
|
||||
let base_url = base.replace(m3u8_filename, seg_name);
|
||||
if self.extra.is_empty() {
|
||||
format!("{}{}", self.host, base_url)
|
||||
} else {
|
||||
// Check if base_url already contains query parameters
|
||||
if base_url.contains('?') {
|
||||
// If seg_name already has query params, append extra with '&'
|
||||
// Remove trailing '?' or '&' before appending
|
||||
let base_trimmed = base_url.trim_end_matches('?').trim_end_matches('&');
|
||||
format!("{}{}&{}", self.host, base_trimmed, self.extra)
|
||||
} else {
|
||||
// If no query params, add them with '?'
|
||||
// Remove trailing '?' from base_url if present
|
||||
let base_without_query = base_url.trim_end_matches('?');
|
||||
format!("{}{}?{}", self.host, base_without_query, self.extra)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
87
src-tauri/crates/recorder/src/core/playlist.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use m3u8_rs::{MediaPlaylist, MediaPlaylistType, MediaSegment};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::errors::RecorderError;
|
||||
|
||||
pub struct HlsPlaylist {
|
||||
pub playlist: MediaPlaylist,
|
||||
pub file_path: PathBuf,
|
||||
}
|
||||
|
||||
impl HlsPlaylist {
|
||||
pub async fn new(file_path: PathBuf) -> Self {
|
||||
if file_path.exists() {
|
||||
let bytes = tokio::fs::read(&file_path).await.unwrap();
|
||||
let (_, playlist) = m3u8_rs::parse_media_playlist(&bytes).unwrap();
|
||||
Self {
|
||||
playlist,
|
||||
file_path,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
playlist: MediaPlaylist::default(),
|
||||
file_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn last_segment(&self) -> Option<&MediaSegment> {
|
||||
self.playlist.segments.last()
|
||||
}
|
||||
|
||||
pub async fn append_last_segment(
|
||||
&mut self,
|
||||
segment: MediaSegment,
|
||||
) -> Result<(), RecorderError> {
|
||||
if self.is_empty().await {
|
||||
self.add_segment(segment).await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
{
|
||||
let last = self.playlist.segments.last_mut().unwrap();
|
||||
let new_duration = last.duration + segment.duration;
|
||||
last.duration = new_duration;
|
||||
self.playlist.target_duration =
|
||||
std::cmp::max(self.playlist.target_duration, new_duration as u64);
|
||||
self.flush().await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn add_segment(&mut self, segment: MediaSegment) -> Result<(), RecorderError> {
|
||||
self.playlist.segments.push(segment);
|
||||
self.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn flush(&self) -> Result<(), RecorderError> {
|
||||
// Create an in-memory buffer to serialize the playlist into.
|
||||
// `Vec<u8>` implements `std::io::Write`, which `m3u8_rs::MediaPlaylist::write_to` expects.
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
// Serialize the playlist into the buffer.
|
||||
self.playlist
|
||||
.write_to(&mut buffer)
|
||||
.map_err(RecorderError::IoError)?;
|
||||
|
||||
// Write the buffer to the file
|
||||
tokio::fs::write(&self.file_path, buffer)
|
||||
.await
|
||||
.map_err(RecorderError::IoError)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn close(&mut self) -> Result<(), RecorderError> {
|
||||
self.playlist.end_list = true;
|
||||
self.playlist.playlist_type = Some(MediaPlaylistType::Vod);
|
||||
self.flush().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn is_empty(&self) -> bool {
|
||||
self.playlist.segments.is_empty()
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Serialize;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::{
|
||||
@@ -18,7 +20,7 @@ pub struct DanmuStorage {
|
||||
}
|
||||
|
||||
impl DanmuStorage {
|
||||
pub async fn new(file_path: &str) -> Option<DanmuStorage> {
|
||||
pub async fn new(file_path: &PathBuf) -> Option<DanmuStorage> {
|
||||
let file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
@@ -38,7 +40,7 @@ impl DanmuStorage {
|
||||
let parts: Vec<&str> = line.split(':').collect();
|
||||
let ts: i64 = parts[0].parse().unwrap();
|
||||
let content = parts[1].to_string();
|
||||
preload_cache.push(DanmuEntry { ts, content })
|
||||
preload_cache.push(DanmuEntry { ts, content });
|
||||
}
|
||||
let file = OpenOptions::new()
|
||||
.append(true)
|
||||
@@ -61,18 +63,22 @@ impl DanmuStorage {
|
||||
.file
|
||||
.write()
|
||||
.await
|
||||
.write(format!("{}:{}\n", ts, content).as_bytes())
|
||||
.write(format!("{ts}:{content}\n").as_bytes())
|
||||
.await;
|
||||
}
|
||||
|
||||
// get entries with ts relative to live start time
|
||||
pub async fn get_entries(&self, live_start_ts: i64) -> Vec<DanmuEntry> {
|
||||
let mut danmus: Vec<DanmuEntry> = self.cache.read().await.iter().map(|entry| {
|
||||
DanmuEntry {
|
||||
let mut danmus: Vec<DanmuEntry> = self
|
||||
.cache
|
||||
.read()
|
||||
.await
|
||||
.iter()
|
||||
.map(|entry| DanmuEntry {
|
||||
ts: entry.ts - live_start_ts,
|
||||
content: entry.content.clone(),
|
||||
}
|
||||
}).collect();
|
||||
})
|
||||
.collect();
|
||||
// filter out danmus with ts < 0
|
||||
danmus.retain(|entry| entry.ts >= 0);
|
||||
danmus
|
||||
@@ -1,13 +1,10 @@
|
||||
use core::fmt;
|
||||
use std::fmt::Display;
|
||||
|
||||
use async_std::{
|
||||
fs::{File, OpenOptions},
|
||||
io::{prelude::BufReadExt, BufReader, WriteExt},
|
||||
path::Path,
|
||||
stream::StreamExt,
|
||||
};
|
||||
use chrono::{TimeZone, Utc};
|
||||
use core::fmt;
|
||||
use std::{fmt::Display, path::Path};
|
||||
use tokio::{
|
||||
fs::OpenOptions,
|
||||
io::{AsyncBufReadExt, BufReader},
|
||||
};
|
||||
|
||||
const ENTRY_FILE_NAME: &str = "entries.log";
|
||||
|
||||
@@ -31,19 +28,19 @@ impl TsEntry {
|
||||
url: parts[0].to_string(),
|
||||
sequence: parts[1]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse sequence: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse sequence: {e}"))?,
|
||||
length: parts[2]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse length: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse length: {e}"))?,
|
||||
size: parts[3]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse size: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse size: {e}"))?,
|
||||
ts: parts[4]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse timestamp: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse timestamp: {e}"))?,
|
||||
is_header: parts[5]
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse is_header: {}", e))?,
|
||||
.map_err(|e| format!("Failed to parse is_header: {e}"))?,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -51,39 +48,30 @@ impl TsEntry {
|
||||
pub fn ts_seconds(&self) -> i64 {
|
||||
// For some legacy problem, douyin entry's ts is s, bilibili entry's ts is ms.
|
||||
// This should be fixed after 2.5.6, but we need to support entry.log generated by previous version.
|
||||
if self.ts > 10000000000 {
|
||||
if self.ts > 10_000_000_000 {
|
||||
self.ts / 1000
|
||||
} else {
|
||||
self.ts
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ts_mili(&self) -> i64 {
|
||||
// if already in ms, return as is
|
||||
if self.ts > 10000000000 {
|
||||
self.ts
|
||||
} else {
|
||||
self.ts * 1000
|
||||
}
|
||||
}
|
||||
|
||||
pub fn date_time(&self) -> String {
|
||||
let date_str = Utc
|
||||
.timestamp_opt(self.ts_seconds(), 0)
|
||||
.unwrap()
|
||||
.to_rfc3339();
|
||||
format!("#EXT-X-PROGRAM-DATE-TIME:{}\n", date_str)
|
||||
format!("#EXT-X-PROGRAM-DATE-TIME:{date_str}\n")
|
||||
}
|
||||
|
||||
/// Convert entry into a segment in HLS manifest.
|
||||
pub fn to_segment(&self) -> String {
|
||||
if self.is_header {
|
||||
return "".into();
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let mut content = String::new();
|
||||
|
||||
content += &format!("#EXTINF:{:.2},\n", self.length);
|
||||
content += &format!("#EXTINF:{:.4},\n", self.length);
|
||||
content += &format!("{}\n", self.url);
|
||||
|
||||
content
|
||||
@@ -100,11 +88,9 @@ impl Display for TsEntry {
|
||||
}
|
||||
}
|
||||
|
||||
/// EntryStore is used to management stream segments, which is basicly a simple version of hls manifest,
|
||||
/// and of course, provids methods to generate hls manifest for frontend player.
|
||||
/// `EntryStore` is used to management stream segments, which is basically a simple version of hls manifest,
|
||||
/// and of course, provides methods to generate hls manifest for frontend player.
|
||||
pub struct EntryStore {
|
||||
// append only log file
|
||||
log_file: File,
|
||||
header: Option<TsEntry>,
|
||||
entries: Vec<TsEntry>,
|
||||
total_duration: f64,
|
||||
@@ -115,18 +101,11 @@ pub struct EntryStore {
|
||||
impl EntryStore {
|
||||
pub async fn new(work_dir: &str) -> Self {
|
||||
// if work_dir is not exists, create it
|
||||
if !Path::new(work_dir).exists().await {
|
||||
if !Path::new(work_dir).exists() {
|
||||
std::fs::create_dir_all(work_dir).unwrap();
|
||||
}
|
||||
// open append only log file
|
||||
let log_file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut entry_store = Self {
|
||||
log_file,
|
||||
header: None,
|
||||
entries: vec![],
|
||||
total_duration: 0.0,
|
||||
@@ -143,14 +122,26 @@ impl EntryStore {
|
||||
let file = OpenOptions::new()
|
||||
.create(false)
|
||||
.read(true)
|
||||
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
|
||||
.await
|
||||
.unwrap();
|
||||
let mut lines = BufReader::new(file).lines();
|
||||
while let Some(Ok(line)) = lines.next().await {
|
||||
let entry = TsEntry::from(&line);
|
||||
.open(format!("{work_dir}/{ENTRY_FILE_NAME}"))
|
||||
.await; // The `file` variable from the previous line now holds `Result<tokio::fs::File, tokio::io::Error>`
|
||||
let file_handle = match file {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
if e.kind() == std::io::ErrorKind::NotFound {
|
||||
log::info!(
|
||||
"Entry file not found at {work_dir}/{ENTRY_FILE_NAME}, starting fresh."
|
||||
);
|
||||
} else {
|
||||
log::error!("Failed to open entry file: {e}");
|
||||
}
|
||||
return; // Exit the load function if file cannot be opened
|
||||
}
|
||||
};
|
||||
let mut lines = BufReader::new(file_handle).lines();
|
||||
while let Ok(Some(line)) = lines.next_line().await {
|
||||
let entry = TsEntry::from(line.as_str());
|
||||
if let Err(e) = entry {
|
||||
log::error!("Failed to parse entry: {} {}", e, line);
|
||||
log::error!("Failed to parse entry: {e} {line}");
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -169,50 +160,18 @@ impl EntryStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn add_entry(&mut self, entry: TsEntry) {
|
||||
if entry.is_header {
|
||||
self.header = Some(entry.clone());
|
||||
} else {
|
||||
self.entries.push(entry.clone());
|
||||
}
|
||||
|
||||
if let Err(e) = self.log_file.write_all(entry.to_string().as_bytes()).await {
|
||||
log::error!("Failed to write entry to log file: {}", e);
|
||||
}
|
||||
|
||||
self.log_file.flush().await.unwrap();
|
||||
|
||||
self.last_sequence = std::cmp::max(self.last_sequence, entry.sequence);
|
||||
|
||||
self.total_duration += entry.length;
|
||||
self.total_size += entry.size;
|
||||
pub fn len(&self) -> usize {
|
||||
self.entries.len()
|
||||
}
|
||||
|
||||
pub fn get_header(&self) -> Option<&TsEntry> {
|
||||
self.header.as_ref()
|
||||
}
|
||||
|
||||
pub fn total_duration(&self) -> f64 {
|
||||
self.total_duration
|
||||
}
|
||||
|
||||
pub fn total_size(&self) -> u64 {
|
||||
self.total_size
|
||||
}
|
||||
|
||||
pub fn first_ts(&self) -> Option<i64> {
|
||||
self.entries.first().map(|x| x.ts_mili())
|
||||
}
|
||||
|
||||
pub fn last_ts(&self) -> Option<i64> {
|
||||
self.entries.last().map(|x| x.ts_mili())
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.entries.is_empty()
|
||||
}
|
||||
|
||||
/// Generate a hls manifest for selected range.
|
||||
/// `vod` indicates the manifest is for stream or video.
|
||||
/// `force_time` adds DATE-TIME tag for each entry.
|
||||
pub fn manifest(&self, vod: bool, force_time: bool, range: Option<Range>) -> String {
|
||||
log::info!("Generate manifest for range: {:?} with vod: {} and force_time: {}", range, vod, force_time);
|
||||
let mut m3u8_content = "#EXTM3U\n".to_string();
|
||||
m3u8_content += "#EXT-X-VERSION:6\n";
|
||||
m3u8_content += if vod {
|
||||
@@ -240,12 +199,6 @@ impl EntryStore {
|
||||
// Collect entries in range
|
||||
let first_entry = self.entries.first().unwrap();
|
||||
let first_entry_ts = first_entry.ts_seconds();
|
||||
log::debug!("First entry ts: {}", first_entry_ts);
|
||||
let last_entry = self.entries.last().unwrap();
|
||||
let last_entry_ts = last_entry.ts_seconds();
|
||||
log::debug!("Last entry ts: {}", last_entry_ts);
|
||||
log::debug!("Full length: {}", last_entry_ts - first_entry_ts);
|
||||
log::debug!("Range: {:?}", range);
|
||||
let mut entries_in_range = vec![];
|
||||
for e in &self.entries {
|
||||
// ignore header, cause it's already in EXT-X-MAP
|
||||
81
src-tauri/crates/recorder/src/errors.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use super::platforms::bilibili::api::BiliStream;
|
||||
use super::platforms::douyin::stream_info::DouyinStream;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Stream {
|
||||
BiliBili(BiliStream),
|
||||
Douyin(DouyinStream),
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum RecorderError {
|
||||
#[error("Index not found: {url}")]
|
||||
IndexNotFound { url: String },
|
||||
#[error("Can not delete current stream: {live_id}")]
|
||||
ArchiveInUse { live_id: String },
|
||||
#[error("Cache is empty")]
|
||||
EmptyCache,
|
||||
#[error("Parse m3u8 content failed: {content}")]
|
||||
M3u8ParseFailed { content: String },
|
||||
#[error("No available stream provided")]
|
||||
NoStreamAvailable,
|
||||
#[error("Stream is freezed: {stream:#?}")]
|
||||
FreezedStream { stream: Stream },
|
||||
#[error("Stream is nearly expired: {stream:#?}")]
|
||||
StreamExpired { stream: Stream },
|
||||
#[error("No room info provided")]
|
||||
NoRoomInfo,
|
||||
#[error("Invalid stream: {stream:#?}")]
|
||||
InvalidStream { stream: Stream },
|
||||
#[error("Stream is too slow: {stream:#?}")]
|
||||
SlowStream { stream: Stream },
|
||||
#[error("Header url is empty")]
|
||||
EmptyHeader,
|
||||
#[error("Header timestamp is invalid")]
|
||||
InvalidTimestamp,
|
||||
#[error("IO error: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
#[error("Danmu stream error: {0}")]
|
||||
DanmuStreamError(#[from] danmu_stream::DanmuStreamError),
|
||||
#[error("Subtitle not found: {live_id}")]
|
||||
SubtitleNotFound { live_id: String },
|
||||
#[error("Subtitle generation failed: {error}")]
|
||||
SubtitleGenerationFailed { error: String },
|
||||
#[error("Resolution changed: {err}")]
|
||||
ResolutionChanged { err: String },
|
||||
#[error("Ffmpeg error: {0}")]
|
||||
FfmpegError(String),
|
||||
#[error("Format not found: {format}")]
|
||||
FormatNotFound { format: String },
|
||||
#[error("Codec not found: {codecs}")]
|
||||
CodecNotFound { codecs: String },
|
||||
#[error("Invalid cookies")]
|
||||
InvalidCookies,
|
||||
#[error("API error: {error}")]
|
||||
ApiError { error: String },
|
||||
#[error("Invalid value")]
|
||||
InvalidValue,
|
||||
#[error("Invalid response")]
|
||||
InvalidResponse,
|
||||
#[error("Invalid response json: {resp}")]
|
||||
InvalidResponseJson { resp: serde_json::Value },
|
||||
#[error("Invalid response status: {status}")]
|
||||
InvalidResponseStatus { status: reqwest::StatusCode },
|
||||
#[error("Upload cancelled")]
|
||||
UploadCancelled,
|
||||
#[error("Upload error: {err}")]
|
||||
UploadError { err: String },
|
||||
#[error("Client error: {0}")]
|
||||
ClientError(#[from] reqwest::Error),
|
||||
#[error("Security control error")]
|
||||
SecurityControlError,
|
||||
#[error("JavaScript runtime error: {0}")]
|
||||
JsRuntimeError(String),
|
||||
#[error("Update timeout")]
|
||||
UpdateTimeout,
|
||||
#[error("Unsupported stream")]
|
||||
UnsupportedStream,
|
||||
#[error("Empty record")]
|
||||
EmptyRecord,
|
||||
}
|
||||
39
src-tauri/crates/recorder/src/events.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use crate::platforms::PlatformType;
|
||||
use crate::RecorderInfo;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum RecorderEvent {
|
||||
LiveStart {
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
LiveEnd {
|
||||
room_id: String,
|
||||
platform: PlatformType,
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
RecordStart {
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
RecordEnd {
|
||||
recorder: RecorderInfo,
|
||||
},
|
||||
RecordUpdate {
|
||||
live_id: String,
|
||||
duration_secs: u64,
|
||||
cached_size_bytes: u64,
|
||||
},
|
||||
ProgressUpdate {
|
||||
id: String,
|
||||
content: String,
|
||||
},
|
||||
ProgressFinished {
|
||||
id: String,
|
||||
success: bool,
|
||||
message: String,
|
||||
},
|
||||
DanmuReceived {
|
||||
room: String,
|
||||
ts: i64,
|
||||
content: String,
|
||||
},
|
||||
}
|
||||
111
src-tauri/crates/recorder/src/ffmpeg/mod.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
// 视频元数据结构
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct VideoMetadata {
|
||||
pub duration: f64,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub video_codec: String,
|
||||
pub audio_codec: String,
|
||||
}
|
||||
|
||||
impl VideoMetadata {
|
||||
pub fn seems_corrupted(&self) -> bool {
|
||||
self.width == 0 && self.height == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl std::cmp::PartialEq for VideoMetadata {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.width == other.width
|
||||
&& self.height == other.height
|
||||
&& self.video_codec == other.video_codec
|
||||
&& self.audio_codec == other.audio_codec
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
#[cfg(target_os = "windows")]
|
||||
#[allow(unused_imports)]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
fn ffprobe_path() -> PathBuf {
|
||||
let mut path = Path::new("ffprobe").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
path.set_extension("exe");
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
/// Extract basic information from a video file.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `file_path` - The path to the video file.
|
||||
///
|
||||
/// # Returns
|
||||
/// A `Result` containing the video metadata or an error message.
|
||||
pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, String> {
|
||||
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let output = ffprobe_process
|
||||
.args([
|
||||
"-v",
|
||||
"quiet",
|
||||
"-print_format",
|
||||
"json",
|
||||
"-show_format",
|
||||
"-show_streams",
|
||||
&format!("{}", file_path.display()),
|
||||
])
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| format!("执行ffprobe失败: {e}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"ffprobe执行失败: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
let json_str = String::from_utf8_lossy(&output.stdout);
|
||||
let json: serde_json::Value =
|
||||
serde_json::from_str(&json_str).map_err(|e| format!("解析ffprobe输出失败: {e}"))?;
|
||||
|
||||
// 解析视频流信息
|
||||
let streams = json["streams"].as_array().ok_or("未找到视频流信息")?;
|
||||
|
||||
if streams.is_empty() {
|
||||
return Err("未找到视频流".to_string());
|
||||
}
|
||||
|
||||
let mut metadata = VideoMetadata {
|
||||
duration: 0.0,
|
||||
width: 0,
|
||||
height: 0,
|
||||
video_codec: String::new(),
|
||||
audio_codec: String::new(),
|
||||
};
|
||||
|
||||
for stream in streams {
|
||||
let codec_name = stream["codec_type"].as_str().unwrap_or("");
|
||||
if codec_name == "video" {
|
||||
metadata.video_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
|
||||
metadata.width = stream["width"].as_u64().unwrap_or(0) as u32;
|
||||
metadata.height = stream["height"].as_u64().unwrap_or(0) as u32;
|
||||
metadata.duration = stream["duration"]
|
||||
.as_str()
|
||||
.unwrap_or("0.0")
|
||||
.parse::<f64>()
|
||||
.unwrap_or(0.0);
|
||||
} else if codec_name == "audio" {
|
||||
metadata.audio_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
|
||||
}
|
||||
}
|
||||
Ok(metadata)
|
||||
}
|
||||
251
src-tauri/crates/recorder/src/lib.rs
Normal file
@@ -0,0 +1,251 @@
|
||||
pub mod account;
|
||||
pub mod core;
|
||||
pub mod danmu;
|
||||
pub mod entry;
|
||||
pub mod errors;
|
||||
pub mod events;
|
||||
mod ffmpeg;
|
||||
pub mod platforms;
|
||||
pub mod traits;
|
||||
pub mod utils;
|
||||
use crate::danmu::DanmuStorage;
|
||||
use crate::events::RecorderEvent;
|
||||
use crate::{account::Account, platforms::PlatformType};
|
||||
|
||||
use std::{
|
||||
fmt::Display,
|
||||
path::PathBuf,
|
||||
sync::{atomic, Arc},
|
||||
};
|
||||
use tokio::{
|
||||
sync::{broadcast, Mutex, RwLock},
|
||||
task::JoinHandle,
|
||||
};
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
|
||||
pub struct RecorderInfo {
|
||||
pub room_info: RoomInfo,
|
||||
pub user_info: UserInfo,
|
||||
pub platform_live_id: String,
|
||||
pub live_id: String,
|
||||
pub recording: bool,
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug, Default)]
|
||||
pub struct RoomInfo {
|
||||
pub platform: String,
|
||||
pub room_id: String,
|
||||
pub room_title: String,
|
||||
pub room_cover: String,
|
||||
/// Whether the room is live
|
||||
pub status: bool,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug, Default)]
|
||||
pub struct UserInfo {
|
||||
pub user_id: String,
|
||||
pub user_name: String,
|
||||
pub user_avatar: String,
|
||||
}
|
||||
|
||||
/// `Recorder` is the base struct for all recorders
|
||||
/// It contains the basic information for a recorder
|
||||
/// and the extra information for the recorder
|
||||
#[derive(Clone)]
|
||||
pub struct Recorder<T>
|
||||
where
|
||||
T: Send + Sync,
|
||||
{
|
||||
platform: PlatformType,
|
||||
room_id: String,
|
||||
/// The account for the recorder
|
||||
account: Account,
|
||||
/// The client for the recorder
|
||||
client: reqwest::Client,
|
||||
/// The event channel for the recorder
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
/// The cache directory for the recorder
|
||||
cache_dir: PathBuf,
|
||||
/// Whether the recorder is quitting
|
||||
quit: Arc<atomic::AtomicBool>,
|
||||
/// Whether the recorder is enabled
|
||||
enabled: Arc<atomic::AtomicBool>,
|
||||
/// Whether the recorder is recording
|
||||
is_recording: Arc<atomic::AtomicBool>,
|
||||
/// The room info for the recorder
|
||||
room_info: Arc<RwLock<RoomInfo>>,
|
||||
/// The user info for the recorder
|
||||
user_info: Arc<RwLock<UserInfo>>,
|
||||
/// The update interval for room status
|
||||
update_interval: Arc<atomic::AtomicU64>,
|
||||
|
||||
/// The platform live id for the current recording
|
||||
platform_live_id: Arc<RwLock<String>>,
|
||||
/// The live id for the current recording, generally is the timestamp of the recording start time
|
||||
live_id: Arc<RwLock<String>>,
|
||||
/// The danmu task for the current recording
|
||||
danmu_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
/// The record task for the current recording
|
||||
record_task: Arc<Mutex<Option<JoinHandle<()>>>>,
|
||||
/// The danmu storage for the current recording
|
||||
danmu_storage: Arc<RwLock<Option<DanmuStorage>>>,
|
||||
/// The last update time of the current recording
|
||||
last_update: Arc<atomic::AtomicI64>,
|
||||
/// The last sequence of the current recording
|
||||
last_sequence: Arc<atomic::AtomicU64>,
|
||||
/// The total duration of the current recording in milliseconds
|
||||
total_duration: Arc<atomic::AtomicU64>,
|
||||
/// The total size of the current recording in bytes
|
||||
total_size: Arc<atomic::AtomicU64>,
|
||||
|
||||
/// The extra information for the recorder
|
||||
extra: T,
|
||||
}
|
||||
|
||||
impl<T: Send + Sync> traits::RecorderBasicTrait<T> for Recorder<T> {
|
||||
fn platform(&self) -> PlatformType {
|
||||
self.platform
|
||||
}
|
||||
|
||||
fn room_id(&self) -> String {
|
||||
self.room_id.clone()
|
||||
}
|
||||
|
||||
fn account(&self) -> &Account {
|
||||
&self.account
|
||||
}
|
||||
|
||||
fn client(&self) -> &reqwest::Client {
|
||||
&self.client
|
||||
}
|
||||
|
||||
fn event_channel(&self) -> &broadcast::Sender<RecorderEvent> {
|
||||
&self.event_channel
|
||||
}
|
||||
|
||||
fn cache_dir(&self) -> PathBuf {
|
||||
self.cache_dir.clone()
|
||||
}
|
||||
|
||||
fn quit(&self) -> &atomic::AtomicBool {
|
||||
&self.quit
|
||||
}
|
||||
|
||||
fn enabled(&self) -> &atomic::AtomicBool {
|
||||
&self.enabled
|
||||
}
|
||||
|
||||
fn is_recording(&self) -> &atomic::AtomicBool {
|
||||
&self.is_recording
|
||||
}
|
||||
|
||||
fn room_info(&self) -> Arc<RwLock<RoomInfo>> {
|
||||
self.room_info.clone()
|
||||
}
|
||||
|
||||
fn user_info(&self) -> Arc<RwLock<UserInfo>> {
|
||||
self.user_info.clone()
|
||||
}
|
||||
|
||||
fn platform_live_id(&self) -> Arc<RwLock<String>> {
|
||||
self.platform_live_id.clone()
|
||||
}
|
||||
|
||||
fn live_id(&self) -> Arc<RwLock<String>> {
|
||||
self.live_id.clone()
|
||||
}
|
||||
|
||||
fn danmu_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>> {
|
||||
self.danmu_task.clone()
|
||||
}
|
||||
|
||||
fn record_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>> {
|
||||
self.record_task.clone()
|
||||
}
|
||||
|
||||
fn danmu_storage(&self) -> Arc<RwLock<Option<DanmuStorage>>> {
|
||||
self.danmu_storage.clone()
|
||||
}
|
||||
|
||||
fn last_update(&self) -> &atomic::AtomicI64 {
|
||||
&self.last_update
|
||||
}
|
||||
|
||||
fn last_sequence(&self) -> &atomic::AtomicU64 {
|
||||
&self.last_sequence
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> &atomic::AtomicU64 {
|
||||
&self.total_duration
|
||||
}
|
||||
|
||||
fn total_size(&self) -> &atomic::AtomicU64 {
|
||||
&self.total_size
|
||||
}
|
||||
|
||||
fn extra(&self) -> &T {
|
||||
&self.extra
|
||||
}
|
||||
}
|
||||
|
||||
/// Cache path is relative to cache path in config
|
||||
#[derive(Clone)]
|
||||
pub struct CachePath {
|
||||
pub cache_path: PathBuf,
|
||||
pub platform: PlatformType,
|
||||
pub room_id: String,
|
||||
pub live_id: String,
|
||||
pub file_name: Option<String>,
|
||||
}
|
||||
|
||||
impl CachePath {
|
||||
pub fn new(cache_path: PathBuf, platform: PlatformType, room_id: &str, live_id: &str) -> Self {
|
||||
Self {
|
||||
cache_path,
|
||||
platform,
|
||||
room_id: room_id.to_string(),
|
||||
live_id: live_id.to_string(),
|
||||
file_name: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sanitize filename and set it
|
||||
pub fn with_filename(&self, file_name: &str) -> Self {
|
||||
let sanitized_filename = sanitize_filename::sanitize(file_name);
|
||||
Self {
|
||||
file_name: Some(sanitized_filename),
|
||||
..self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get relative path to cache path
|
||||
pub fn relative_path(&self) -> PathBuf {
|
||||
if let Some(file_name) = &self.file_name {
|
||||
return PathBuf::from(format!(
|
||||
"{}/{}/{}/{}",
|
||||
self.platform.as_str(),
|
||||
self.room_id,
|
||||
self.live_id,
|
||||
file_name
|
||||
));
|
||||
}
|
||||
|
||||
PathBuf::from(format!(
|
||||
"{}/{}/{}",
|
||||
self.platform.as_str(),
|
||||
self.room_id,
|
||||
self.live_id
|
||||
))
|
||||
}
|
||||
|
||||
pub fn full_path(&self) -> PathBuf {
|
||||
self.cache_path.clone().join(self.relative_path())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for CachePath {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.full_path().display())
|
||||
}
|
||||
}
|
||||
474
src-tauri/crates/recorder/src/platforms/bilibili.rs
Normal file
@@ -0,0 +1,474 @@
|
||||
pub mod api;
|
||||
pub mod profile;
|
||||
pub mod response;
|
||||
use crate::account::Account;
|
||||
use crate::core::hls_recorder::HlsRecorder;
|
||||
use crate::events::RecorderEvent;
|
||||
use crate::platforms::bilibili::api::{Protocol, Qn};
|
||||
use crate::platforms::PlatformType;
|
||||
use crate::traits::RecorderTrait;
|
||||
use crate::{Recorder, RoomInfo, UserInfo};
|
||||
|
||||
use crate::core::Format;
|
||||
use crate::core::{Codec, HlsStream};
|
||||
use crate::danmu::DanmuStorage;
|
||||
use crate::platforms::bilibili::api::BiliStream;
|
||||
use chrono::Utc;
|
||||
use danmu_stream::danmu_stream::DanmuStream;
|
||||
use danmu_stream::provider::ProviderType;
|
||||
use danmu_stream::DanmuMessageType;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{atomic, Arc};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
/// A recorder for `BiliBili` live streams
|
||||
///
|
||||
/// This recorder fetches, caches and serves TS entries, currently supporting only `StreamType::FMP4`.
|
||||
/// As high-quality streams are accessible only to logged-in users, the use of a `BiliClient`, which manages cookies, is required.
|
||||
#[derive(Clone)]
|
||||
pub struct BiliExtra {
|
||||
cover: Arc<RwLock<Option<String>>>,
|
||||
live_stream: Arc<RwLock<Option<BiliStream>>>,
|
||||
}
|
||||
|
||||
pub type BiliRecorder = Recorder<BiliExtra>;
|
||||
|
||||
impl BiliRecorder {
|
||||
pub async fn new(
|
||||
room_id: &str,
|
||||
account: &Account,
|
||||
cache_dir: PathBuf,
|
||||
event_channel: broadcast::Sender<RecorderEvent>,
|
||||
update_interval: Arc<atomic::AtomicU64>,
|
||||
enabled: bool,
|
||||
) -> Result<Self, crate::errors::RecorderError> {
|
||||
let client = reqwest::Client::new();
|
||||
let extra = BiliExtra {
|
||||
cover: Arc::new(RwLock::new(None)),
|
||||
live_stream: Arc::new(RwLock::new(None)),
|
||||
};
|
||||
|
||||
let recorder = Self {
|
||||
platform: PlatformType::BiliBili,
|
||||
room_id: room_id.to_string(),
|
||||
account: account.clone(),
|
||||
client,
|
||||
event_channel,
|
||||
cache_dir,
|
||||
quit: Arc::new(atomic::AtomicBool::new(false)),
|
||||
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
|
||||
update_interval,
|
||||
is_recording: Arc::new(atomic::AtomicBool::new(false)),
|
||||
room_info: Arc::new(RwLock::new(RoomInfo::default())),
|
||||
user_info: Arc::new(RwLock::new(UserInfo::default())),
|
||||
platform_live_id: Arc::new(RwLock::new(String::new())),
|
||||
live_id: Arc::new(RwLock::new(String::new())),
|
||||
danmu_storage: Arc::new(RwLock::new(None)),
|
||||
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
|
||||
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
|
||||
danmu_task: Arc::new(Mutex::new(None)),
|
||||
record_task: Arc::new(Mutex::new(None)),
|
||||
total_duration: Arc::new(atomic::AtomicU64::new(0)),
|
||||
total_size: Arc::new(atomic::AtomicU64::new(0)),
|
||||
extra,
|
||||
};
|
||||
|
||||
log::info!("[{}]Recorder for room {} created.", room_id, room_id);
|
||||
|
||||
Ok(recorder)
|
||||
}
|
||||
|
||||
fn log_info(&self, message: &str) {
|
||||
log::info!("[{}]{}", self.room_id, message);
|
||||
}
|
||||
|
||||
fn log_error(&self, message: &str) {
|
||||
log::error!("[{}]{}", self.room_id, message);
|
||||
}
|
||||
|
||||
pub async fn reset(&self) {
|
||||
*self.extra.live_stream.write().await = None;
|
||||
self.last_update
|
||||
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
|
||||
*self.danmu_storage.write().await = None;
|
||||
*self.platform_live_id.write().await = String::new();
|
||||
*self.live_id.write().await = String::new();
|
||||
self.total_duration.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_size.store(0, atomic::Ordering::Relaxed);
|
||||
}
|
||||
|
||||
async fn check_status(&self) -> bool {
|
||||
let pre_live_status = self.room_info.read().await.status;
|
||||
match api::get_room_info(&self.client, &self.account, &self.room_id).await {
|
||||
Ok(room_info) => {
|
||||
*self.room_info.write().await = RoomInfo {
|
||||
platform: "bilibili".to_string(),
|
||||
room_id: self.room_id.to_string(),
|
||||
room_title: room_info.room_title,
|
||||
room_cover: room_info.room_cover_url.clone(),
|
||||
status: room_info.live_status == 1,
|
||||
};
|
||||
// Only update user info once
|
||||
if self.user_info.read().await.user_id != room_info.user_id {
|
||||
let user_id = room_info.user_id;
|
||||
let user_info = api::get_user_info(&self.client, &self.account, &user_id).await;
|
||||
if let Ok(user_info) = user_info {
|
||||
*self.user_info.write().await = UserInfo {
|
||||
user_id: user_id.to_string(),
|
||||
user_name: user_info.user_name,
|
||||
user_avatar: user_info.user_avatar_url,
|
||||
}
|
||||
} else {
|
||||
self.log_error(&format!(
|
||||
"Failed to get user info: {}",
|
||||
user_info.err().unwrap()
|
||||
));
|
||||
}
|
||||
}
|
||||
let live_status = room_info.live_status == 1;
|
||||
|
||||
// handle live notification
|
||||
if pre_live_status != live_status {
|
||||
self.log_info(&format!(
|
||||
"Live status changed to {}, enabled: {}",
|
||||
live_status,
|
||||
self.enabled.load(atomic::Ordering::Relaxed)
|
||||
));
|
||||
|
||||
if live_status {
|
||||
// Get cover image
|
||||
let room_cover_path = Path::new(PlatformType::BiliBili.as_str())
|
||||
.join(&self.room_id)
|
||||
.join("cover.jpg");
|
||||
let full_room_cover_path = self.cache_dir.join(&room_cover_path);
|
||||
if (api::download_file(
|
||||
&self.client,
|
||||
&room_info.room_cover_url,
|
||||
&full_room_cover_path,
|
||||
)
|
||||
.await)
|
||||
.is_ok()
|
||||
{
|
||||
*self.extra.cover.write().await =
|
||||
Some(room_cover_path.to_str().unwrap().to_string());
|
||||
}
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
} else {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
|
||||
platform: PlatformType::BiliBili,
|
||||
room_id: self.room_id.to_string(),
|
||||
recorder: self.info().await,
|
||||
});
|
||||
*self.live_id.write().await = String::new();
|
||||
}
|
||||
|
||||
// just doing reset, cuz live status is changed
|
||||
self.reset().await;
|
||||
}
|
||||
|
||||
*self.platform_live_id.write().await = room_info.live_start_time.to_string();
|
||||
|
||||
if !live_status {
|
||||
return false;
|
||||
}
|
||||
|
||||
// no need to check stream if should not record
|
||||
if !self.should_record().await {
|
||||
return true;
|
||||
}
|
||||
|
||||
// current_record => update stream
|
||||
// auto_start+is_new_stream => update stream and current_record=true
|
||||
let new_stream = api::get_stream_info(
|
||||
&self.client,
|
||||
&self.account,
|
||||
&self.room_id,
|
||||
Protocol::HttpHls,
|
||||
Format::TS,
|
||||
&[Codec::Avc, Codec::Hevc],
|
||||
Qn::Q4K,
|
||||
)
|
||||
.await;
|
||||
|
||||
match new_stream {
|
||||
Ok(stream) => {
|
||||
let pre_live_stream = self.extra.live_stream.read().await.clone();
|
||||
*self.extra.live_stream.write().await = Some(stream.clone());
|
||||
self.last_update
|
||||
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
|
||||
|
||||
log::info!(
|
||||
"[{}]Update to a new stream: {:#?} => {:#?}",
|
||||
&self.room_id,
|
||||
pre_live_stream,
|
||||
stream
|
||||
);
|
||||
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
if let crate::errors::RecorderError::FormatNotFound { format } = e {
|
||||
log::error!("[{}]Format {} not found", &self.room_id, format);
|
||||
|
||||
true
|
||||
} else {
|
||||
log::error!("[{}]Fetch stream failed: {}", &self.room_id, e);
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("[{}]Update room status failed: {}", &self.room_id, e);
|
||||
// may encounter internet issues, not sure whether the stream is closed or started, just remain
|
||||
pre_live_status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn danmu(&self) -> Result<(), crate::errors::RecorderError> {
|
||||
let cookies = self.account.cookies.clone();
|
||||
let room_id = self.room_id.clone();
|
||||
let danmu_stream = DanmuStream::new(ProviderType::BiliBili, &cookies, &room_id).await;
|
||||
if danmu_stream.is_err() {
|
||||
let err = danmu_stream.err().unwrap();
|
||||
log::error!("[{}]Failed to create danmu stream: {}", &self.room_id, err);
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
let danmu_stream = danmu_stream.unwrap();
|
||||
|
||||
let mut start_fut = Box::pin(danmu_stream.start());
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
start_res = &mut start_fut => {
|
||||
match start_res {
|
||||
Ok(_) => {
|
||||
log::info!("[{}]Danmu stream finished", &self.room_id);
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("[{}]Danmu stream start error: {}", &self.room_id, err);
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
recv_res = danmu_stream.recv() => {
|
||||
match recv_res {
|
||||
Ok(Some(msg)) => {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
let ts = Utc::now().timestamp_millis();
|
||||
let _ = self.event_channel.send(RecorderEvent::DanmuReceived {
|
||||
room: self.room_id.clone(),
|
||||
ts,
|
||||
content: danmu.message.clone(),
|
||||
});
|
||||
if let Some(storage) = self.danmu_storage.write().await.as_ref() {
|
||||
storage.add_line(ts, &danmu.message).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
log::info!("[{}]Danmu stream closed", &self.room_id);
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("[{}]Failed to receive danmu message: {}", &self.room_id, err);
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update entries for a new live
|
||||
async fn update_entries(&self, live_id: &str) -> Result<(), crate::errors::RecorderError> {
|
||||
let current_stream = self.extra.live_stream.read().await.clone();
|
||||
let Some(current_stream) = current_stream else {
|
||||
return Err(crate::errors::RecorderError::NoStreamAvailable);
|
||||
};
|
||||
|
||||
let work_dir = self.work_dir(live_id).await;
|
||||
log::info!("[{}]New record started: {}", self.room_id, live_id);
|
||||
|
||||
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
|
||||
|
||||
let danmu_path = work_dir.with_filename("danmu.txt");
|
||||
*self.danmu_storage.write().await = DanmuStorage::new(&danmu_path.full_path()).await;
|
||||
|
||||
let cover_path = work_dir.with_filename("cover.jpg");
|
||||
let room_cover_path = self
|
||||
.cache_dir
|
||||
.join(PlatformType::BiliBili.as_str())
|
||||
.join(&self.room_id)
|
||||
.join("cover.jpg");
|
||||
|
||||
tokio::fs::copy(room_cover_path, &cover_path.full_path())
|
||||
.await
|
||||
.map_err(crate::errors::RecorderError::IoError)?;
|
||||
|
||||
*self.live_id.write().await = live_id.to_string();
|
||||
|
||||
// send record start event
|
||||
let _ = self.event_channel.send(RecorderEvent::RecordStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
|
||||
self.is_recording.store(true, atomic::Ordering::Relaxed);
|
||||
|
||||
let stream = Arc::new(HlsStream::new(
|
||||
live_id.to_string(),
|
||||
current_stream.url_info.first().unwrap().host.clone(),
|
||||
current_stream.base_url.clone(),
|
||||
current_stream.url_info.first().unwrap().extra.clone(),
|
||||
current_stream.format,
|
||||
current_stream.codec,
|
||||
));
|
||||
let hls_recorder = HlsRecorder::new(
|
||||
self.room_id.to_string(),
|
||||
stream,
|
||||
self.client.clone(),
|
||||
None,
|
||||
self.event_channel.clone(),
|
||||
work_dir.full_path(),
|
||||
self.enabled.clone(),
|
||||
)
|
||||
.await;
|
||||
if let Err(e) = hls_recorder.start().await {
|
||||
log::error!("[{}]Failed to start hls recorder: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl crate::traits::RecorderTrait<BiliExtra> for BiliRecorder {
|
||||
async fn run(&self) {
|
||||
let self_clone = self.clone();
|
||||
let danmu_task = tokio::spawn(async move {
|
||||
let _ = self_clone.danmu().await;
|
||||
});
|
||||
*self.danmu_task.lock().await = Some(danmu_task);
|
||||
|
||||
let self_clone = self.clone();
|
||||
*self.record_task.lock().await = Some(tokio::spawn(async move {
|
||||
log::info!("[{}]Start running recorder", self_clone.room_id);
|
||||
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
|
||||
if self_clone.check_status().await {
|
||||
// Live status is ok, start recording.
|
||||
if self_clone.should_record().await {
|
||||
let live_id = Utc::now().timestamp_millis().to_string();
|
||||
|
||||
if let Err(e) = self_clone.update_entries(&live_id).await {
|
||||
log::error!("[{}]Update entries error: {}", self_clone.room_id, e);
|
||||
}
|
||||
|
||||
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
|
||||
recorder: self_clone.info().await,
|
||||
});
|
||||
}
|
||||
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(false, atomic::Ordering::Relaxed);
|
||||
|
||||
self_clone.reset().await;
|
||||
// go check status again after random 2-5 secs
|
||||
let secs = rand::random::<u64>() % 4 + 2;
|
||||
tokio::time::sleep(Duration::from_secs(secs)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
tokio::time::sleep(Duration::from_secs(
|
||||
self_clone.update_interval.load(atomic::Ordering::Relaxed),
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn parse_fmp4_playlist() {
|
||||
let content = r#"#EXTM3U
|
||||
#EXT-X-VERSION:7
|
||||
#EXT-X-START:TIME-OFFSET=0
|
||||
#EXT-X-MEDIA-SEQUENCE:323066244
|
||||
#EXT-X-TARGETDURATION:1
|
||||
#EXT-X-MAP:URI=\"h1758715459.m4s\"
|
||||
#EXT-BILI-AUX:97d350|K|7d1e3|fe1425ab
|
||||
#EXTINF:1.00,7d1e3|fe1425ab
|
||||
323066244.m4s
|
||||
#EXT-BILI-AUX:97d706|N|757d4|c9094969
|
||||
#EXTINF:1.00,757d4|c9094969
|
||||
323066245.m4s
|
||||
#EXT-BILI-AUX:97daee|N|8223d|f307566a
|
||||
#EXTINF:1.00,8223d|f307566a
|
||||
323066246.m4s
|
||||
#EXT-BILI-AUX:97dee7|N|775cc|428d567
|
||||
#EXTINF:1.00,775cc|428d567
|
||||
323066247.m4s
|
||||
#EXT-BILI-AUX:97e2df|N|10410|9a62fe61
|
||||
#EXTINF:0.17,10410|9a62fe61
|
||||
323066248.m4s
|
||||
#EXT-BILI-AUX:97e397|K|679d2|8fbee7df
|
||||
#EXTINF:1.00,679d2|8fbee7df
|
||||
323066249.m4s
|
||||
#EXT-BILI-AUX:97e74d|N|8907b|67d1c6ad
|
||||
#EXTINF:1.00,8907b|67d1c6ad
|
||||
323066250.m4s
|
||||
#EXT-BILI-AUX:97eb35|N|87374|f6406797
|
||||
#EXTINF:1.00,87374|f6406797
|
||||
323066251.m4s
|
||||
#EXT-BILI-AUX:97ef2d|N|6b792|b8125097
|
||||
#EXTINF:1.00,6b792|b8125097
|
||||
323066252.m4s
|
||||
#EXT-BILI-AUX:97f326|N|e213|b30c02c6
|
||||
#EXTINF:0.17,e213|b30c02c6
|
||||
323066253.m4s
|
||||
#EXT-BILI-AUX:97f3de|K|65754|7ea6dcc8
|
||||
#EXTINF:1.00,65754|7ea6dcc8
|
||||
323066254.m4s
|
||||
"#;
|
||||
let (_, pl) = m3u8_rs::parse_media_playlist(content.as_bytes()).unwrap();
|
||||
// ExtTag { tag: "X-MAP", rest: Some("URI=\\\"h1758715459.m4s\\\"") }
|
||||
let header_url = pl
|
||||
.segments
|
||||
.first()
|
||||
.unwrap()
|
||||
.unknown_tags
|
||||
.iter()
|
||||
.find(|t| t.tag == "X-MAP")
|
||||
.map(|t| {
|
||||
let rest = t.rest.clone().unwrap();
|
||||
rest.split('=').nth(1).unwrap().replace("\\\"", "")
|
||||
});
|
||||
// #EXT-BILI-AUX:a5e4e0|K|79b3e|ebde469e
|
||||
let is_key = pl
|
||||
.segments
|
||||
.first()
|
||||
.unwrap()
|
||||
.unknown_tags
|
||||
.iter()
|
||||
.find(|t| t.tag == "BILI-AUX")
|
||||
.map(|t| {
|
||||
let rest = t.rest.clone().unwrap();
|
||||
rest.split('|').nth(1).unwrap() == "K"
|
||||
});
|
||||
assert_eq!(is_key, Some(true));
|
||||
assert_eq!(header_url, Some("h1758715459.m4s".to_string()));
|
||||
}
|
||||
}
|
||||
966
src-tauri/crates/recorder/src/platforms/bilibili/api.rs
Normal file
@@ -0,0 +1,966 @@
|
||||
use super::profile;
|
||||
use super::profile::Profile;
|
||||
use super::response;
|
||||
use super::response::GeneralResponse;
|
||||
use super::response::PostVideoMetaResponse;
|
||||
use super::response::PreuploadResponse;
|
||||
use super::response::VideoSubmitData;
|
||||
use crate::account::Account;
|
||||
use crate::core::Codec;
|
||||
use crate::core::Format;
|
||||
use crate::errors::RecorderError;
|
||||
use crate::utils::user_agent_generator;
|
||||
use chrono::TimeZone;
|
||||
use pct_str::PctString;
|
||||
use pct_str::URIReserved;
|
||||
use rand::seq::IndexedRandom;
|
||||
use rand::seq::SliceRandom;
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use std::fmt;
|
||||
use std::path::Path;
|
||||
use std::time::Duration;
|
||||
use std::time::SystemTime;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::AsyncReadExt;
|
||||
use tokio::time::Instant;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct UploadParams<'a> {
|
||||
preupload_response: &'a PreuploadResponse,
|
||||
post_video_meta_response: &'a PostVideoMetaResponse,
|
||||
video_file: &'a Path,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct RoomInfo {
|
||||
pub live_status: u8,
|
||||
pub room_cover_url: String,
|
||||
pub room_id: String,
|
||||
pub room_keyframe_url: String,
|
||||
pub room_title: String,
|
||||
pub user_id: String,
|
||||
pub live_start_time: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct UserInfo {
|
||||
pub user_id: String,
|
||||
pub user_name: String,
|
||||
pub user_sign: String,
|
||||
pub user_avatar_url: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct QrInfo {
|
||||
pub oauth_key: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct QrStatus {
|
||||
pub code: u8,
|
||||
pub cookies: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BiliStream {
|
||||
pub format: Format,
|
||||
pub codec: Codec,
|
||||
pub base_url: String,
|
||||
pub url_info: Vec<UrlInfo>,
|
||||
pub drm: bool,
|
||||
pub master_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UrlInfo {
|
||||
pub host: String,
|
||||
pub extra: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub enum Protocol {
|
||||
HttpStream,
|
||||
HttpHls,
|
||||
}
|
||||
|
||||
impl fmt::Display for Protocol {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
// 30000 杜比
|
||||
// 20000 4K
|
||||
// 15000 2K
|
||||
// 10000 原画
|
||||
// 400 蓝光
|
||||
// 250 超清
|
||||
// 150 高清
|
||||
// 80 流畅
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub enum Qn {
|
||||
Dolby = 30000,
|
||||
Q4K = 20000,
|
||||
Q2K = 15000,
|
||||
Q1080PH = 10000,
|
||||
Q1080P = 400,
|
||||
Q720P = 250,
|
||||
Hd = 150,
|
||||
Smooth = 80,
|
||||
}
|
||||
|
||||
impl fmt::Display for Qn {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for BiliStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"type: {:?}, codec: {:?}, base_url: {}, url_info: {:?}, drm: {}, master_url: {:?}",
|
||||
self.format, self.codec, self.base_url, self.url_info, self.drm, self.master_url
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl BiliStream {
|
||||
pub fn new(
|
||||
format: Format,
|
||||
codec: Codec,
|
||||
base_url: &str,
|
||||
url_info: Vec<UrlInfo>,
|
||||
drm: bool,
|
||||
master_url: Option<String>,
|
||||
) -> BiliStream {
|
||||
BiliStream {
|
||||
format,
|
||||
codec,
|
||||
base_url: base_url.into(),
|
||||
url_info,
|
||||
drm,
|
||||
master_url,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn index(&self) -> String {
|
||||
let url_info = self.url_info.choose(&mut rand::rng()).unwrap();
|
||||
format!("{}{}{}", url_info.host, self.base_url, url_info.extra)
|
||||
}
|
||||
|
||||
pub fn ts_url(&self, seg_name: &str) -> String {
|
||||
let m3u8_filename = self.base_url.split('/').next_back().unwrap();
|
||||
let base_url = self.base_url.replace(m3u8_filename, seg_name);
|
||||
let url_info = self.url_info.choose(&mut rand::rng()).unwrap();
|
||||
format!("{}{}?{}", url_info.host, base_url, url_info.extra)
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_user_agent_header() -> reqwest::header::HeaderMap {
|
||||
let user_agent = user_agent_generator::UserAgentGenerator::new().generate(false);
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
headers.insert("user-agent", user_agent.parse().unwrap());
|
||||
headers
|
||||
}
|
||||
|
||||
pub async fn get_qr(client: &Client) -> Result<QrInfo, RecorderError> {
|
||||
let headers = generate_user_agent_header();
|
||||
let res: serde_json::Value = client
|
||||
.get("https://passport.bilibili.com/x/passport-login/web/qrcode/generate")
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
Ok(QrInfo {
|
||||
oauth_key: res["data"]["qrcode_key"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string(),
|
||||
url: res["data"]["url"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_qr_status(client: &Client, qrcode_key: &str) -> Result<QrStatus, RecorderError> {
|
||||
let headers = generate_user_agent_header();
|
||||
let res: serde_json::Value = client
|
||||
.get(format!(
|
||||
"https://passport.bilibili.com/x/passport-login/web/qrcode/poll?qrcode_key={qrcode_key}"
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
let code: u8 = res["data"]["code"].as_u64().unwrap_or(400) as u8;
|
||||
let mut cookies: String = String::new();
|
||||
if code == 0 {
|
||||
let url = res["data"]["url"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let query_str = url.split('?').next_back().unwrap();
|
||||
cookies = query_str.replace('&', ";");
|
||||
}
|
||||
Ok(QrStatus { code, cookies })
|
||||
}
|
||||
|
||||
pub async fn logout(client: &Client, account: &Account) -> Result<(), RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
let url = "https://passport.bilibili.com/login/exit/v2";
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let params = [("csrf", account.csrf.clone())];
|
||||
let _ = client
|
||||
.post(url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_user_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
user_id: &str,
|
||||
) -> Result<UserInfo, RecorderError> {
|
||||
let params: Value = json!({
|
||||
"mid": user_id.to_string(),
|
||||
"platform": "web",
|
||||
"web_location": "1550101",
|
||||
"token": "",
|
||||
"w_webid": "",
|
||||
});
|
||||
let params = get_sign(client, params).await?;
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let resp = client
|
||||
.get(format!(
|
||||
"https://api.bilibili.com/x/space/wbi/acc/info?{params}"
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
if resp.status() == reqwest::StatusCode::PRECONDITION_FAILED {
|
||||
return Err(RecorderError::SecurityControlError);
|
||||
}
|
||||
return Err(RecorderError::InvalidResponseStatus {
|
||||
status: resp.status(),
|
||||
});
|
||||
}
|
||||
|
||||
let res: serde_json::Value = resp.json().await?;
|
||||
let code = res["code"]
|
||||
.as_u64()
|
||||
.ok_or(RecorderError::InvalidResponseJson { resp: res.clone() })?;
|
||||
if code != 0 {
|
||||
log::error!("Get user info failed {code}");
|
||||
return Err(RecorderError::InvalidResponseJson { resp: res.clone() });
|
||||
}
|
||||
Ok(UserInfo {
|
||||
user_id: user_id.to_string(),
|
||||
user_name: res["data"]["name"].as_str().unwrap_or("").to_string(),
|
||||
user_sign: res["data"]["sign"].as_str().unwrap_or("").to_string(),
|
||||
user_avatar_url: res["data"]["face"].as_str().unwrap_or("").to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_room_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
) -> Result<RoomInfo, RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let response = client
|
||||
.get(format!(
|
||||
"https://api.live.bilibili.com/room/v1/Room/get_info?room_id={room_id}"
|
||||
))
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
if response.status() == reqwest::StatusCode::PRECONDITION_FAILED {
|
||||
return Err(RecorderError::SecurityControlError);
|
||||
}
|
||||
return Err(RecorderError::InvalidResponseStatus {
|
||||
status: response.status(),
|
||||
});
|
||||
}
|
||||
|
||||
let res: serde_json::Value = response.json().await?;
|
||||
let code = res["code"]
|
||||
.as_u64()
|
||||
.ok_or(RecorderError::InvalidResponseJson { resp: res.clone() })?;
|
||||
if code != 0 {
|
||||
return Err(RecorderError::InvalidResponseJson { resp: res.clone() });
|
||||
}
|
||||
|
||||
let room_id = res["data"]["room_id"]
|
||||
.as_i64()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let room_title = res["data"]["title"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let room_cover_url = res["data"]["user_cover"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let room_keyframe_url = res["data"]["keyframe"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let user_id = res["data"]["uid"]
|
||||
.as_i64()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.to_string();
|
||||
let live_status = res["data"]["live_status"]
|
||||
.as_u64()
|
||||
.ok_or(RecorderError::InvalidValue)? as u8;
|
||||
// "live_time": "2025-08-09 18:33:35",
|
||||
let live_start_time_str = res["data"]["live_time"]
|
||||
.as_str()
|
||||
.ok_or(RecorderError::InvalidValue)?;
|
||||
let live_start_time = if live_start_time_str == "0000-00-00 00:00:00" {
|
||||
0
|
||||
} else {
|
||||
// this is a fixed Asia/Shanghai datetime str
|
||||
let naive = chrono::NaiveDateTime::parse_from_str(live_start_time_str, "%Y-%m-%d %H:%M:%S")
|
||||
.map_err(|_| RecorderError::InvalidValue)?;
|
||||
// parse as UTC datetime and convert to timestamp
|
||||
chrono::Utc
|
||||
.from_local_datetime(&naive)
|
||||
.earliest()
|
||||
.ok_or(RecorderError::InvalidValue)?
|
||||
.timestamp()
|
||||
- 8 * 3600
|
||||
};
|
||||
Ok(RoomInfo {
|
||||
live_status,
|
||||
room_cover_url,
|
||||
room_id,
|
||||
room_keyframe_url,
|
||||
room_title,
|
||||
user_id,
|
||||
live_start_time,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get stream info from room id
|
||||
///
|
||||
/// https://socialsisteryi.github.io/bilibili-API-collect/docs/live/info.html#%E8%8E%B7%E5%8F%96%E7%9B%B4%E6%92%AD%E9%97%B4%E4%BF%A1%E6%81%AF-1
|
||||
/// https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo?room_id=31368705&protocol=1&format=1&codec=0&qn=10000&platform=h5
|
||||
pub async fn get_stream_info(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
protocol: Protocol,
|
||||
format: Format,
|
||||
codec: &[Codec],
|
||||
qn: Qn,
|
||||
) -> Result<BiliStream, RecorderError> {
|
||||
let url = format!(
|
||||
"https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo?room_id={}&protocol={}&format={}&codec={}&qn={}&platform=h5",
|
||||
room_id,
|
||||
protocol.clone() as u8,
|
||||
format.clone() as u8,
|
||||
codec.iter().map(|c| (c.clone() as u8).to_string()).collect::<Vec<String>>().join(","),
|
||||
qn as i64,
|
||||
);
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let response = client.get(url).headers(headers).send().await?;
|
||||
let res: serde_json::Value = response.json().await?;
|
||||
|
||||
let code = res["code"].as_u64().unwrap_or(0);
|
||||
let message = res["message"].as_str().unwrap_or("");
|
||||
if code != 0 {
|
||||
return Err(RecorderError::ApiError {
|
||||
error: format!("Code {code} not found, message: {message}"),
|
||||
});
|
||||
}
|
||||
|
||||
log::debug!("Get stream info response: {res}");
|
||||
|
||||
// Parse the new API response structure
|
||||
let playurl_info = &res["data"]["playurl_info"]["playurl"];
|
||||
let empty_vec = vec![];
|
||||
let streams = playurl_info["stream"].as_array().unwrap_or(&empty_vec);
|
||||
|
||||
if streams.is_empty() {
|
||||
return Err(RecorderError::ApiError {
|
||||
error: "No streams available".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
// Find the matching protocol
|
||||
let target_protocol = match protocol {
|
||||
Protocol::HttpStream => "http_stream",
|
||||
Protocol::HttpHls => "http_hls",
|
||||
};
|
||||
|
||||
let stream = streams
|
||||
.iter()
|
||||
.find(|s| s["protocol_name"].as_str() == Some(target_protocol))
|
||||
.ok_or_else(|| RecorderError::ApiError {
|
||||
error: format!("Protocol {target_protocol} not found"),
|
||||
})?;
|
||||
|
||||
// Find the matching format
|
||||
let target_format = match format {
|
||||
Format::Flv => "flv",
|
||||
Format::TS => "ts",
|
||||
Format::FMP4 => "fmp4",
|
||||
};
|
||||
|
||||
let empty_vec = vec![];
|
||||
let format_info = stream["format"]
|
||||
.as_array()
|
||||
.unwrap_or(&empty_vec)
|
||||
.iter()
|
||||
.find(|f| f["format_name"].as_str() == Some(target_format))
|
||||
.ok_or_else(|| RecorderError::FormatNotFound {
|
||||
format: target_format.to_owned(),
|
||||
})?;
|
||||
|
||||
// Find the matching codec
|
||||
let target_codecs = codec
|
||||
.iter()
|
||||
.map(|c| match c {
|
||||
Codec::Avc => "avc",
|
||||
Codec::Hevc => "hevc",
|
||||
})
|
||||
.collect::<Vec<&str>>();
|
||||
|
||||
let codec_info = format_info["codec"]
|
||||
.as_array()
|
||||
.unwrap_or(&empty_vec)
|
||||
.iter()
|
||||
.find(|c| target_codecs.contains(&c["codec_name"].as_str().unwrap_or("")))
|
||||
.ok_or_else(|| RecorderError::CodecNotFound {
|
||||
codecs: target_codecs.join(","),
|
||||
})?;
|
||||
|
||||
let url_info = codec_info["url_info"].as_array().unwrap_or(&empty_vec);
|
||||
|
||||
let mut url_info = url_info
|
||||
.iter()
|
||||
.map(|u| UrlInfo {
|
||||
host: u["host"].as_str().unwrap_or("").to_string(),
|
||||
extra: u["extra"].as_str().unwrap_or("").to_string(),
|
||||
})
|
||||
.collect::<Vec<UrlInfo>>();
|
||||
|
||||
url_info.shuffle(&mut rand::rng());
|
||||
|
||||
let drm = codec_info["drm"].as_bool().unwrap_or(false);
|
||||
let base_url = codec_info["base_url"].as_str().unwrap_or("").to_string();
|
||||
let master_url = format_info["master_url"].as_str().map(|s| s.to_string());
|
||||
let codec = codec_info["codec_name"].as_str().unwrap_or("");
|
||||
let codec = match codec {
|
||||
"avc" => Codec::Avc,
|
||||
"hevc" => Codec::Hevc,
|
||||
_ => {
|
||||
return Err(RecorderError::CodecNotFound {
|
||||
codecs: codec.to_string(),
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
Ok(BiliStream {
|
||||
format,
|
||||
codec,
|
||||
base_url,
|
||||
url_info,
|
||||
drm,
|
||||
master_url,
|
||||
})
|
||||
}
|
||||
|
||||
/// Download file from url to path
|
||||
pub async fn download_file(client: &Client, url: &str, path: &Path) -> Result<(), RecorderError> {
|
||||
if !path.parent().unwrap().exists() {
|
||||
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
|
||||
}
|
||||
let response = client.get(url).send().await?;
|
||||
let bytes = response.bytes().await?;
|
||||
let mut file = tokio::fs::File::create(&path).await?;
|
||||
let mut content = std::io::Cursor::new(bytes);
|
||||
tokio::io::copy(&mut content, &mut file).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Method from js code
|
||||
pub async fn get_sign(client: &Client, mut parameters: Value) -> Result<String, RecorderError> {
|
||||
let table = vec![
|
||||
46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49, 33, 9, 42, 19,
|
||||
29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40, 61, 26, 17, 0, 1, 60, 51, 30, 4,
|
||||
22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11, 36, 20, 34, 44, 52,
|
||||
];
|
||||
let nav_info: Value = client
|
||||
.get("https://api.bilibili.com/x/web-interface/nav")
|
||||
.headers(generate_user_agent_header())
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
let re = Regex::new(r"wbi/(.*).png").unwrap();
|
||||
let img = re
|
||||
.captures(nav_info["data"]["wbi_img"]["img_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let sub = re
|
||||
.captures(nav_info["data"]["wbi_img"]["sub_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let raw_string = format!("{img}{sub}");
|
||||
let mut encoded = Vec::new();
|
||||
for x in table {
|
||||
if x < raw_string.len() {
|
||||
encoded.push(raw_string.as_bytes()[x]);
|
||||
}
|
||||
}
|
||||
// only keep 32 bytes of encoded
|
||||
encoded = encoded[0..32].to_vec();
|
||||
let encoded = String::from_utf8(encoded).unwrap();
|
||||
// Timestamp in seconds
|
||||
let wts = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
parameters
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.insert("wts".to_owned(), serde_json::Value::String(wts.to_string()));
|
||||
// Get all keys from parameters into vec
|
||||
let mut keys = parameters
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.map(std::borrow::ToOwned::to_owned)
|
||||
.collect::<Vec<String>>();
|
||||
// sort keys
|
||||
keys.sort();
|
||||
let mut params = String::new();
|
||||
for x in &keys {
|
||||
params.push_str(x);
|
||||
params.push('=');
|
||||
// Value filters !'()* characters
|
||||
let value = parameters
|
||||
.get(x)
|
||||
.unwrap()
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.replace(['!', '\'', '(', ')', '*'], "");
|
||||
let value = PctString::encode(value.chars(), URIReserved);
|
||||
params.push_str(value.as_str());
|
||||
// add & if not last
|
||||
if x != keys.last().unwrap() {
|
||||
params.push('&');
|
||||
}
|
||||
}
|
||||
// md5 params+encoded
|
||||
let w_rid = md5::compute(params.to_string() + encoded.as_str());
|
||||
let params = params + format!("&w_rid={w_rid:x}").as_str();
|
||||
Ok(params)
|
||||
}
|
||||
|
||||
async fn preupload_video(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
video_file: &Path,
|
||||
) -> Result<PreuploadResponse, RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/preupload?name={}&r=upos&profile=ugcfx/bup",
|
||||
video_file.file_name().unwrap().to_str().unwrap()
|
||||
);
|
||||
let response = client
|
||||
.get(&url)
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json::<PreuploadResponse>()
|
||||
.await?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn post_video_meta(
|
||||
client: &Client,
|
||||
preupload_response: &PreuploadResponse,
|
||||
video_file: &Path,
|
||||
) -> Result<PostVideoMetaResponse, RecorderError> {
|
||||
let url = format!(
|
||||
"https:{}{}?uploads=&output=json&profile=ugcfx/bup&filesize={}&partsize={}&biz_id={}",
|
||||
preupload_response.endpoint,
|
||||
preupload_response.upos_uri.replace("upos:/", ""),
|
||||
video_file.metadata().unwrap().len(),
|
||||
preupload_response.chunk_size,
|
||||
preupload_response.biz_id
|
||||
);
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("X-Upos-Auth", &preupload_response.auth)
|
||||
.send()
|
||||
.await?
|
||||
.json::<PostVideoMetaResponse>()
|
||||
.await?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn upload_video(client: &Client, params: UploadParams<'_>) -> Result<usize, RecorderError> {
|
||||
let mut file = File::open(params.video_file).await?;
|
||||
let mut buffer = vec![0; params.preupload_response.chunk_size];
|
||||
let file_size = params.video_file.metadata()?.len();
|
||||
let chunk_size = params.preupload_response.chunk_size as u64;
|
||||
let total_chunks = (file_size as f64 / chunk_size as f64).ceil() as usize;
|
||||
|
||||
let start = Instant::now();
|
||||
let mut chunk = 0;
|
||||
let mut read_total = 0;
|
||||
let max_retries = 3;
|
||||
let timeout = Duration::from_secs(30);
|
||||
|
||||
while let Ok(size) = file.read(&mut buffer[read_total..]).await {
|
||||
read_total += size;
|
||||
log::debug!("size: {size}, total: {read_total}");
|
||||
if size > 0 && (read_total as u64) < chunk_size {
|
||||
continue;
|
||||
}
|
||||
if size == 0 && read_total == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
let mut retry_count = 0;
|
||||
let mut success = false;
|
||||
|
||||
while retry_count < max_retries && !success {
|
||||
let url = format!(
|
||||
"https:{}{}?partNumber={}&uploadId={}&chunk={}&chunks={}&size={}&start={}&end={}&total={}",
|
||||
params.preupload_response.endpoint,
|
||||
params.preupload_response.upos_uri.replace("upos:/", ""),
|
||||
chunk + 1,
|
||||
params.post_video_meta_response.upload_id,
|
||||
chunk,
|
||||
total_chunks,
|
||||
read_total,
|
||||
chunk * params.preupload_response.chunk_size,
|
||||
chunk * params.preupload_response.chunk_size + read_total,
|
||||
params.video_file.metadata().unwrap().len()
|
||||
);
|
||||
|
||||
match client
|
||||
.put(&url)
|
||||
.header("X-Upos-Auth", ¶ms.preupload_response.auth)
|
||||
.header("Content-Type", "application/octet-stream")
|
||||
.header("Content-Length", read_total.to_string())
|
||||
.timeout(timeout)
|
||||
.body(buffer[..read_total].to_vec())
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
success = true;
|
||||
let _ = response.text().await?;
|
||||
} else {
|
||||
log::error!("Upload failed with status: {}", response.status());
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_secs(2u64.pow(retry_count as u32)))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Upload error: {e}");
|
||||
retry_count += 1;
|
||||
if retry_count < max_retries {
|
||||
tokio::time::sleep(Duration::from_secs(2u64.pow(retry_count as u32))).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !success {
|
||||
return Err(RecorderError::UploadError {
|
||||
err: format!("Failed to upload chunk {chunk} after {max_retries} retries"),
|
||||
});
|
||||
}
|
||||
|
||||
chunk += 1;
|
||||
read_total = 0;
|
||||
log::debug!(
|
||||
"[bili]speed: {:.1} KiB/s",
|
||||
(chunk * params.preupload_response.chunk_size) as f64
|
||||
/ start.elapsed().as_secs_f64()
|
||||
/ 1024.0
|
||||
);
|
||||
}
|
||||
Ok(total_chunks)
|
||||
}
|
||||
|
||||
async fn end_upload(
|
||||
client: &Client,
|
||||
preupload_response: &PreuploadResponse,
|
||||
post_video_meta_response: &PostVideoMetaResponse,
|
||||
chunks: usize,
|
||||
) -> Result<(), RecorderError> {
|
||||
let url = format!(
|
||||
"https:{}{}?output=json&name={}&profile=ugcfx/bup&uploadId={}&biz_id={}",
|
||||
preupload_response.endpoint,
|
||||
preupload_response.upos_uri.replace("upos:/", ""),
|
||||
preupload_response.upos_uri,
|
||||
post_video_meta_response.upload_id,
|
||||
preupload_response.biz_id
|
||||
);
|
||||
let parts: Vec<Value> = (1..=chunks)
|
||||
.map(|i| json!({ "partNumber": i, "eTag": "etag" }))
|
||||
.collect();
|
||||
let body = json!({ "parts": parts });
|
||||
client
|
||||
.post(&url)
|
||||
.header("X-Upos-Auth", &preupload_response.auth)
|
||||
.header("Content-Type", "application/json; charset=UTF-8")
|
||||
.body(body.to_string())
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn prepare_video(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
video_file: &Path,
|
||||
) -> Result<profile::Video, RecorderError> {
|
||||
log::info!("Start Preparing Video: {}", video_file.to_str().unwrap());
|
||||
let preupload = preupload_video(client, account, video_file).await?;
|
||||
log::info!("Preupload Response: {preupload:?}");
|
||||
let metaposted = post_video_meta(client, &preupload, video_file).await?;
|
||||
log::info!("Post Video Meta Response: {metaposted:?}");
|
||||
let uploaded = upload_video(
|
||||
client,
|
||||
UploadParams {
|
||||
preupload_response: &preupload,
|
||||
post_video_meta_response: &metaposted,
|
||||
video_file,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
log::info!("Uploaded: {uploaded}");
|
||||
end_upload(client, &preupload, &metaposted, uploaded).await?;
|
||||
let filename = Path::new(&metaposted.key)
|
||||
.file_stem()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap();
|
||||
Ok(profile::Video {
|
||||
title: filename.to_string(),
|
||||
filename: filename.to_string(),
|
||||
desc: String::new(),
|
||||
cid: preupload.biz_id,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn submit_video(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
profile_template: &Profile,
|
||||
video: &profile::Video,
|
||||
) -> Result<VideoSubmitData, RecorderError> {
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/x/vu/web/add/v3?ts={}&csrf={}",
|
||||
chrono::Local::now().timestamp(),
|
||||
account.csrf
|
||||
);
|
||||
let mut preprofile = profile_template.clone();
|
||||
preprofile.videos.push(video.clone());
|
||||
match client
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/json; charset=UTF-8")
|
||||
.body(serde_json::ser::to_string(&preprofile).unwrap_or_default())
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(raw_resp) => {
|
||||
let json: Value = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
|
||||
match resp.data {
|
||||
response::Data::VideoSubmit(data) => Ok(data),
|
||||
_ => Err(RecorderError::InvalidResponse),
|
||||
}
|
||||
} else {
|
||||
log::error!("Parse response failed: {json}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Send failed {e}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upload_cover(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
cover: &str,
|
||||
) -> Result<String, RecorderError> {
|
||||
let url = format!(
|
||||
"https://member.bilibili.com/x/vu/web/cover/up?ts={}&csrf={}",
|
||||
chrono::Local::now().timestamp_millis(),
|
||||
account.csrf
|
||||
);
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let params = [("csrf", account.csrf.clone()), ("cover", cover.to_string())];
|
||||
match client
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(raw_resp) => {
|
||||
let json: Value = raw_resp.json().await?;
|
||||
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
|
||||
match resp.data {
|
||||
response::Data::Cover(data) => Ok(data.url),
|
||||
_ => Err(RecorderError::InvalidResponse),
|
||||
}
|
||||
} else {
|
||||
log::error!("Parse response failed: {json}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Send failed {e}");
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_danmaku(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
room_id: &str,
|
||||
message: &str,
|
||||
) -> Result<(), RecorderError> {
|
||||
let url = "https://api.live.bilibili.com/msg/send".to_string();
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let params = [
|
||||
("bubble", "0"),
|
||||
("msg", message),
|
||||
("color", "16777215"),
|
||||
("mode", "1"),
|
||||
("fontsize", "25"),
|
||||
("room_type", "0"),
|
||||
("rnd", &format!("{}", chrono::Local::now().timestamp())),
|
||||
("roomid", room_id),
|
||||
("csrf", &account.csrf),
|
||||
("csrf_token", &account.csrf),
|
||||
];
|
||||
let _ = client
|
||||
.post(&url)
|
||||
.headers(headers)
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_video_typelist(
|
||||
client: &Client,
|
||||
account: &Account,
|
||||
) -> Result<Vec<response::Typelist>, RecorderError> {
|
||||
let url = "https://member.bilibili.com/x/vupre/web/archive/pre?lang=cn";
|
||||
let mut headers = generate_user_agent_header();
|
||||
if let Ok(cookies) = account.cookies.parse() {
|
||||
headers.insert("cookie", cookies);
|
||||
} else {
|
||||
return Err(RecorderError::InvalidCookies);
|
||||
}
|
||||
let resp: GeneralResponse = client
|
||||
.get(url)
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
if resp.code == 0 {
|
||||
if let response::Data::VideoTypeList(data) = resp.data {
|
||||
Ok(data.typelist)
|
||||
} else {
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
} else {
|
||||
log::error!("Get video typelist failed with code {}", resp.code);
|
||||
Err(RecorderError::InvalidResponse)
|
||||
}
|
||||
}
|
||||
355
src-tauri/crates/recorder/src/platforms/douyin.rs
Normal file
@@ -0,0 +1,355 @@
|
||||
pub mod api;
|
||||
mod response;
|
||||
pub mod stream_info;
|
||||
use crate::account::Account;
|
||||
use crate::core::hls_recorder::{construct_stream_from_variant, HlsRecorder};
|
||||
use crate::core::{Codec, Format};
|
||||
use crate::errors::RecorderError;
|
||||
use crate::events::RecorderEvent;
|
||||
use crate::platforms::douyin::stream_info::DouyinStream;
|
||||
use crate::traits::RecorderTrait;
|
||||
use crate::{Recorder, RoomInfo, UserInfo};
|
||||
use async_trait::async_trait;
|
||||
use chrono::Utc;
|
||||
use danmu_stream::danmu_stream::DanmuStream;
|
||||
use danmu_stream::provider::ProviderType;
|
||||
use danmu_stream::DanmuMessageType;
|
||||
use rand::random;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{atomic, Arc};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, Mutex, RwLock};
|
||||
|
||||
use crate::danmu::DanmuStorage;
|
||||
use crate::platforms::PlatformType;
|
||||
|
||||
pub type DouyinRecorder = Recorder<DouyinExtra>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DouyinExtra {
|
||||
sec_user_id: String,
|
||||
live_stream: Arc<RwLock<Option<DouyinStream>>>,
|
||||
}
|
||||
|
||||
fn get_best_stream_url(stream: &DouyinStream) -> Option<String> {
|
||||
// find the best stream url
|
||||
if stream.data.origin.main.hls.is_empty() {
|
||||
log::error!("No stream url found in stream_data: {stream:#?}");
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(stream.data.origin.main.hls.clone())
|
||||
}
|
||||
|
||||
impl DouyinRecorder {
|
||||
pub async fn new(
|
||||
room_id: &str,
|
||||
sec_user_id: &str,
|
||||
account: &Account,
|
||||
cache_dir: PathBuf,
|
||||
channel: broadcast::Sender<RecorderEvent>,
|
||||
update_interval: Arc<atomic::AtomicU64>,
|
||||
enabled: bool,
|
||||
) -> Result<Self, crate::errors::RecorderError> {
|
||||
Ok(Self {
|
||||
platform: PlatformType::Douyin,
|
||||
room_id: room_id.to_string(),
|
||||
account: account.clone(),
|
||||
client: reqwest::Client::new(),
|
||||
event_channel: channel,
|
||||
cache_dir,
|
||||
quit: Arc::new(atomic::AtomicBool::new(false)),
|
||||
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
|
||||
is_recording: Arc::new(atomic::AtomicBool::new(false)),
|
||||
room_info: Arc::new(RwLock::new(RoomInfo::default())),
|
||||
user_info: Arc::new(RwLock::new(UserInfo::default())),
|
||||
platform_live_id: Arc::new(RwLock::new(String::new())),
|
||||
live_id: Arc::new(RwLock::new(String::new())),
|
||||
danmu_storage: Arc::new(RwLock::new(None)),
|
||||
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
|
||||
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
|
||||
danmu_task: Arc::new(Mutex::new(None)),
|
||||
record_task: Arc::new(Mutex::new(None)),
|
||||
update_interval,
|
||||
total_duration: Arc::new(atomic::AtomicU64::new(0)),
|
||||
total_size: Arc::new(atomic::AtomicU64::new(0)),
|
||||
extra: DouyinExtra {
|
||||
sec_user_id: sec_user_id.to_string(),
|
||||
live_stream: Arc::new(RwLock::new(None)),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async fn check_status(&self) -> bool {
|
||||
let pre_live_status = self.room_info.read().await.status;
|
||||
match api::get_room_info(
|
||||
&self.client,
|
||||
&self.account,
|
||||
&self.room_id,
|
||||
&self.extra.sec_user_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(info) => {
|
||||
let live_status = info.status == 0; // room_status == 0 表示正在直播
|
||||
|
||||
*self.room_info.write().await = RoomInfo {
|
||||
platform: PlatformType::Douyin.as_str().to_string(),
|
||||
room_id: self.room_id.to_string(),
|
||||
room_title: info.room_title.clone(),
|
||||
room_cover: info.cover.clone().unwrap_or_default(),
|
||||
status: live_status,
|
||||
};
|
||||
|
||||
*self.user_info.write().await = UserInfo {
|
||||
user_id: info.sec_user_id.clone(),
|
||||
user_name: info.user_name.clone(),
|
||||
user_avatar: info.user_avatar.clone(),
|
||||
};
|
||||
|
||||
if pre_live_status != live_status {
|
||||
// live status changed, reset current record flag
|
||||
log::info!(
|
||||
"[{}]Live status changed to {}, auto_start: {}",
|
||||
self.room_id,
|
||||
live_status,
|
||||
self.enabled.load(atomic::Ordering::Relaxed)
|
||||
);
|
||||
|
||||
if live_status {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
} else {
|
||||
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
|
||||
platform: PlatformType::Douyin,
|
||||
room_id: self.room_id.clone(),
|
||||
recorder: self.info().await,
|
||||
});
|
||||
}
|
||||
|
||||
self.reset().await;
|
||||
}
|
||||
|
||||
if !live_status {
|
||||
self.reset().await;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
let should_record = self.should_record().await;
|
||||
|
||||
if !should_record {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Get stream URL when live starts
|
||||
if !info.hls_url.is_empty() {
|
||||
// Only set stream URL, don't create record yet
|
||||
// Record will be created when first ts download succeeds
|
||||
// parse info.stream_data into DouyinStream
|
||||
let stream_data = info.stream_data.clone();
|
||||
let Ok(stream) = serde_json::from_str::<DouyinStream>(&stream_data) else {
|
||||
log::error!("Failed to parse stream data: {:#?}", &info);
|
||||
return false;
|
||||
};
|
||||
let Some(new_stream_url) = get_best_stream_url(&stream) else {
|
||||
log::error!("No stream url found in stream_data: {stream:#?}");
|
||||
return false;
|
||||
};
|
||||
|
||||
log::info!("New douyin stream URL: {}", new_stream_url.clone());
|
||||
*self.extra.live_stream.write().await = Some(stream);
|
||||
(*self.platform_live_id.write().await).clone_from(&info.room_id_str);
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("[{}]Update room status failed: {}", &self.room_id, e);
|
||||
pre_live_status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn danmu(&self) -> Result<(), crate::errors::RecorderError> {
|
||||
let cookies = self.account.cookies.clone();
|
||||
let danmu_room_id = self
|
||||
.platform_live_id
|
||||
.read()
|
||||
.await
|
||||
.clone()
|
||||
.parse::<i64>()
|
||||
.unwrap_or(0);
|
||||
let danmu_stream =
|
||||
DanmuStream::new(ProviderType::Douyin, &cookies, &danmu_room_id.to_string()).await;
|
||||
if danmu_stream.is_err() {
|
||||
let err = danmu_stream.err().unwrap();
|
||||
log::error!("Failed to create danmu stream: {err}");
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
let danmu_stream = danmu_stream.unwrap();
|
||||
|
||||
let mut start_fut = Box::pin(danmu_stream.start());
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
start_res = &mut start_fut => {
|
||||
match start_res {
|
||||
Ok(_) => {
|
||||
log::info!("Danmu stream finished");
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("Danmu stream start error: {err}");
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
recv_res = danmu_stream.recv() => {
|
||||
match recv_res {
|
||||
Ok(Some(msg)) => {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
let ts = Utc::now().timestamp_millis();
|
||||
let _ = self.event_channel.send(RecorderEvent::DanmuReceived {
|
||||
room: self.room_id.clone(),
|
||||
ts,
|
||||
content: danmu.message.clone(),
|
||||
});
|
||||
|
||||
if let Some(danmu_storage) = self.danmu_storage.read().await.as_ref() {
|
||||
danmu_storage.add_line(ts, &danmu.message).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
log::info!("Danmu stream closed");
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("Failed to receive danmu message: {err}");
|
||||
return Err(crate::errors::RecorderError::DanmuStreamError(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn reset(&self) {
|
||||
*self.platform_live_id.write().await = String::new();
|
||||
self.last_update
|
||||
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
|
||||
self.last_sequence.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_duration.store(0, atomic::Ordering::Relaxed);
|
||||
self.total_size.store(0, atomic::Ordering::Relaxed);
|
||||
*self.extra.live_stream.write().await = None;
|
||||
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
|
||||
danmu_task.abort();
|
||||
let _ = danmu_task.await;
|
||||
log::info!("Danmu task aborted");
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_entries(&self, live_id: &str) -> Result<(), RecorderError> {
|
||||
// Get current room info and stream URL
|
||||
let room_info = self.room_info.read().await.clone();
|
||||
let Some(stream) = self.extra.live_stream.read().await.clone() else {
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
};
|
||||
let Some(stream_url) = get_best_stream_url(&stream) else {
|
||||
return Err(RecorderError::NoStreamAvailable);
|
||||
};
|
||||
|
||||
let work_dir = self.work_dir(live_id).await;
|
||||
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
|
||||
|
||||
// download cover
|
||||
let cover_url = room_info.room_cover.clone();
|
||||
let cover_path = work_dir.with_filename("cover.jpg");
|
||||
let _ = api::download_file(&self.client, &cover_url, &cover_path.full_path()).await;
|
||||
|
||||
// Setup danmu store
|
||||
let danmu_file_path = work_dir.with_filename("danmu.txt");
|
||||
let danmu_storage = DanmuStorage::new(&danmu_file_path.full_path()).await;
|
||||
*self.danmu_storage.write().await = danmu_storage;
|
||||
|
||||
// Start danmu task
|
||||
*self.live_id.write().await = live_id.to_string();
|
||||
|
||||
let self_clone = self.clone();
|
||||
log::info!("Start fetching danmu for live {live_id}");
|
||||
*self.danmu_task.lock().await = Some(tokio::spawn(async move {
|
||||
let _ = self_clone.danmu().await;
|
||||
}));
|
||||
|
||||
let _ = self.event_channel.send(RecorderEvent::RecordStart {
|
||||
recorder: self.info().await,
|
||||
});
|
||||
|
||||
let hls_stream =
|
||||
construct_stream_from_variant(live_id, &stream_url, Format::TS, Codec::Avc)
|
||||
.await
|
||||
.map_err(|_| RecorderError::NoStreamAvailable)?;
|
||||
let hls_recorder = HlsRecorder::new(
|
||||
self.room_id.to_string(),
|
||||
Arc::new(hls_stream),
|
||||
self.client.clone(),
|
||||
None,
|
||||
self.event_channel.clone(),
|
||||
work_dir.full_path(),
|
||||
self.enabled.clone(),
|
||||
)
|
||||
.await;
|
||||
if let Err(e) = hls_recorder.start().await {
|
||||
log::error!("[{}]Error from hls recorder: {}", self.room_id, e);
|
||||
return Err(e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl crate::traits::RecorderTrait<DouyinExtra> for DouyinRecorder {
|
||||
async fn run(&self) {
|
||||
let self_clone = self.clone();
|
||||
*self.record_task.lock().await = Some(tokio::spawn(async move {
|
||||
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
|
||||
if self_clone.check_status().await {
|
||||
// Live status is ok, start recording
|
||||
if self_clone.should_record().await {
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(true, atomic::Ordering::Relaxed);
|
||||
let live_id = Utc::now().timestamp_millis().to_string();
|
||||
if let Err(e) = self_clone.update_entries(&live_id).await {
|
||||
log::error!("[{}]Update entries error: {}", self_clone.room_id, e);
|
||||
}
|
||||
}
|
||||
if self_clone.is_recording.load(atomic::Ordering::Relaxed) {
|
||||
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
|
||||
recorder: self_clone.info().await,
|
||||
});
|
||||
}
|
||||
self_clone
|
||||
.is_recording
|
||||
.store(false, atomic::Ordering::Relaxed);
|
||||
self_clone.reset().await;
|
||||
// Check status again after some seconds
|
||||
let secs = random::<u64>() % 5;
|
||||
tokio::time::sleep(Duration::from_secs(secs)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
tokio::time::sleep(Duration::from_secs(
|
||||
self_clone.update_interval.load(atomic::Ordering::Relaxed),
|
||||
))
|
||||
.await;
|
||||
}
|
||||
log::info!("[{}]Recording thread quit.", self_clone.room_id);
|
||||
}));
|
||||
}
|
||||
}
|
||||