Compare commits
305 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f20636a107 | ||
|
|
787a30e6f7 | ||
|
|
d1d217be18 | ||
|
|
944d0a371a | ||
|
|
0df03e0c9c | ||
|
|
7ffdf65705 | ||
|
|
89cdf91a48 | ||
|
|
43ebc27044 | ||
|
|
e6159555f3 | ||
|
|
1f2508aae9 | ||
|
|
ad13f58fa7 | ||
|
|
de4959d49f | ||
|
|
b5b75129e7 | ||
|
|
84346a486f | ||
|
|
3bdcddf5a2 | ||
|
|
98f68a5e14 | ||
|
|
2249b86af3 | ||
|
|
fd889922d8 | ||
|
|
8db7c6e320 | ||
|
|
5bc4ed6dfd | ||
|
|
22ad5f7fea | ||
|
|
c0369c1a14 | ||
|
|
322f4a3ca5 | ||
|
|
4e32453441 | ||
|
|
66725b8a64 | ||
|
|
f7bcbbca83 | ||
|
|
07a3b33040 | ||
|
|
2f9b4582f8 | ||
|
|
c3f63c58cf | ||
|
|
4a3529bc2e | ||
|
|
b0355a919f | ||
|
|
cfe1a0b4b9 | ||
|
|
b655e98f35 | ||
|
|
2d1021bc42 | ||
|
|
33d74999b9 | ||
|
|
84b7dd7a3c | ||
|
|
0c678fbda3 | ||
|
|
3486f7d050 | ||
|
|
d42a1010b8 | ||
|
|
ece6ceea45 | ||
|
|
b22ebb399e | ||
|
|
4431b10cb7 | ||
|
|
01a0c929e8 | ||
|
|
b06f6e8d09 | ||
|
|
753227acbb | ||
|
|
c7dd9091d0 | ||
|
|
bae20ce011 | ||
|
|
8da4759668 | ||
|
|
eb7c6d91e9 | ||
|
|
3c24dfe8a6 | ||
|
|
bb916daaaf | ||
|
|
3931e484c2 | ||
|
|
b67e258c31 | ||
|
|
1a7e6f5a43 | ||
|
|
437204dbe6 | ||
|
|
af105277d9 | ||
|
|
7efd327a36 | ||
|
|
0141586fa9 | ||
|
|
df1d8ccac6 | ||
|
|
10b6b95e4d | ||
|
|
a58e6f77bd | ||
|
|
fe2bd80ac6 | ||
|
|
870b44a973 | ||
|
|
48fd9ca7b2 | ||
|
|
14d03b7eb9 | ||
|
|
6f1db6c038 | ||
|
|
cd2d208e5c | ||
|
|
7d6ec72002 | ||
|
|
837cb6a978 | ||
|
|
aeeb0c08d7 | ||
|
|
72d8a7f485 | ||
|
|
5d3692c7a0 | ||
|
|
7e54231bef | ||
|
|
80a885dbf3 | ||
|
|
134c6bbb5f | ||
|
|
49a153adf7 | ||
|
|
99e15b0bda | ||
|
|
4de8a73af2 | ||
|
|
d104ba3180 | ||
|
|
abf0d4748f | ||
|
|
d2a9c44601 | ||
|
|
c269558bae | ||
|
|
cc22453a40 | ||
|
|
d525d92de4 | ||
|
|
2197dfe65c | ||
|
|
38ee00f474 | ||
|
|
8fdad41c71 | ||
|
|
f269995bb7 | ||
|
|
03a2db8c44 | ||
|
|
6d9cd3c6a8 | ||
|
|
303b2f7036 | ||
|
|
ec25c2ffd9 | ||
|
|
50ab608ddb | ||
|
|
3c76be9b81 | ||
|
|
ab7f0cf0b4 | ||
|
|
f9f590c4dc | ||
|
|
8d38fe582a | ||
|
|
dc4a26561d | ||
|
|
10c1d1f3a8 | ||
|
|
66bcf53d01 | ||
|
|
8ab4b7d693 | ||
|
|
ce2f097d32 | ||
|
|
f7575cd327 | ||
|
|
8634c6a211 | ||
|
|
b070013efc | ||
|
|
d2d9112f6c | ||
|
|
9fea18f2de | ||
|
|
74480f91ce | ||
|
|
b2e13b631f | ||
|
|
001d995c8f | ||
|
|
8cb2acea88 | ||
|
|
7c0d57d84e | ||
|
|
8cb875f449 | ||
|
|
e6bbe65723 | ||
|
|
f4a71a2476 | ||
|
|
47b9362b0a | ||
|
|
c1aad0806e | ||
|
|
4ccc90f9fb | ||
|
|
7dc63440e6 | ||
|
|
4094e8b80d | ||
|
|
e27cbaf715 | ||
|
|
1f39b27d79 | ||
|
|
f45891fd95 | ||
|
|
18fe644715 | ||
|
|
40cde8c69a | ||
|
|
4b0af47906 | ||
|
|
9365b3c8cd | ||
|
|
4b9f015ea7 | ||
|
|
c42d4a084e | ||
|
|
5bb3feb05b | ||
|
|
05f776ed8b | ||
|
|
9cec809485 | ||
|
|
429f909152 | ||
|
|
084dd23df1 | ||
|
|
e55afdd739 | ||
|
|
72128a132b | ||
|
|
92ca2cddad | ||
|
|
3db0d1dfe5 | ||
|
|
57907323e6 | ||
|
|
dbdca44c5f | ||
|
|
fe1dd2201f | ||
|
|
e0ae194cc3 | ||
|
|
6fc5700457 | ||
|
|
c4fdcf86d4 | ||
|
|
3088500c8d | ||
|
|
861f3a3624 | ||
|
|
c55783e4d9 | ||
|
|
955e284d41 | ||
|
|
fc4c47427e | ||
|
|
e2d7563faa | ||
|
|
27d69f7f8d | ||
|
|
a77bb5af44 | ||
|
|
00286261a4 | ||
|
|
0b898dccaa | ||
|
|
a1d9ac4e68 | ||
|
|
4150939e23 | ||
|
|
8f84b7f063 | ||
|
|
04b245ac64 | ||
|
|
12f7e62957 | ||
|
|
9600d310c7 | ||
|
|
dec5a2472a | ||
|
|
13eb7c6ea2 | ||
|
|
2356cfa10a | ||
|
|
3bfaefb3b0 | ||
|
|
78b8c25d96 | ||
|
|
c1d2ff2b96 | ||
|
|
24aee9446a | ||
|
|
2fb094ec31 | ||
|
|
53897c66ee | ||
|
|
ca4e266ae6 | ||
|
|
6612a1e16f | ||
|
|
55ceb65dfb | ||
|
|
6cad3d6afb | ||
|
|
151e1bdb8a | ||
|
|
44a3cfd1ff | ||
|
|
9cbc3028a7 | ||
|
|
8c30730d7b | ||
|
|
acfb870f9d | ||
|
|
3813528f50 | ||
|
|
e3bb014644 | ||
|
|
76a7afde76 | ||
|
|
1184f9f3f5 | ||
|
|
b754f8938f | ||
|
|
6b30ff04b7 | ||
|
|
1c40acca63 | ||
|
|
a5a7a8afaf | ||
|
|
583ac13a37 | ||
|
|
3e58972072 | ||
|
|
f15aa27727 | ||
|
|
2581014dbd | ||
|
|
baaaa1b57e | ||
|
|
160fbb3590 | ||
|
|
6f3253678c | ||
|
|
563ad66243 | ||
|
|
a8d002cc53 | ||
|
|
0615410fa4 | ||
|
|
fc98e065f8 | ||
|
|
66f671ffa0 | ||
|
|
69a35af456 | ||
|
|
e462bd0b4c | ||
|
|
ae6483427f | ||
|
|
ad97677104 | ||
|
|
996d15ef25 | ||
|
|
06de32ffe7 | ||
|
|
dd43074e46 | ||
|
|
93495e13db | ||
|
|
16950edae4 | ||
|
|
4af1203360 | ||
|
|
55b5bd1fd2 | ||
|
|
f0a7cf4ed0 | ||
|
|
62e7412abf | ||
|
|
275bf647d2 | ||
|
|
00af723be9 | ||
|
|
19da577836 | ||
|
|
bf3a2b469b | ||
|
|
bf31bfd099 | ||
|
|
d02fea99f2 | ||
|
|
2404bacb4e | ||
|
|
b6c274c181 | ||
|
|
f9b472aee7 | ||
|
|
45f277741b | ||
|
|
94179f59cd | ||
|
|
c7b550a3e3 | ||
|
|
fd51fd2387 | ||
|
|
23d1798ab6 | ||
|
|
90e81d0d4d | ||
|
|
6a7a19547d | ||
|
|
1550849ee2 | ||
|
|
15116e2197 | ||
|
|
63eda5179b | ||
|
|
d7b1277363 | ||
|
|
337c933b92 | ||
|
|
b01b2cc9c0 | ||
|
|
30069b2f33 | ||
|
|
c5bd57468c | ||
|
|
c050c65675 | ||
|
|
e1bd7e7563 | ||
|
|
cc129f6384 | ||
|
|
e7ea0c0ff0 | ||
|
|
9630d51c4c | ||
|
|
ceb140a4c2 | ||
|
|
fe8410ab98 | ||
|
|
00731cda93 | ||
|
|
c05979cb11 | ||
|
|
6e1a10e45c | ||
|
|
bd74dfdb26 | ||
|
|
b7c2fd3387 | ||
|
|
b65e41ca23 | ||
|
|
ec70eded14 | ||
|
|
dcf9047d82 | ||
|
|
cd85e9f65a | ||
|
|
066fd4fb77 | ||
|
|
9a6bb30e73 | ||
|
|
99d9f27618 | ||
|
|
02ddac6b17 | ||
|
|
017438ee50 | ||
|
|
d938982107 | ||
|
|
bdde1969f7 | ||
|
|
c8eb038190 | ||
|
|
2d90b79f73 | ||
|
|
f39d3baff5 | ||
|
|
84664ee272 | ||
|
|
d603216baf | ||
|
|
522873c7fb | ||
|
|
a6548f9941 | ||
|
|
3843dd88b2 | ||
|
|
baddb4e9d4 | ||
|
|
4aa51b51bd | ||
|
|
725494db7d | ||
|
|
292caa4158 | ||
|
|
29e9656919 | ||
|
|
78f4682efb | ||
|
|
fa090b0b66 | ||
|
|
32b7e9c3c2 | ||
|
|
4d3e069a81 | ||
|
|
3ed658a31c | ||
|
|
efb24798c8 | ||
|
|
e72e9027ef | ||
|
|
17c93fb716 | ||
|
|
a826666ad6 | ||
|
|
c8282cb66f | ||
|
|
592fd3940e | ||
|
|
7e9980b098 | ||
|
|
283ee06034 | ||
|
|
9a00693bb3 | ||
|
|
16906a46cd | ||
|
|
bdf017024a | ||
|
|
58ae1ef426 | ||
|
|
98e6544c25 | ||
|
|
1b57beeea6 | ||
|
|
1625a5f889 | ||
|
|
ae20e7fad7 | ||
|
|
fc594b12e0 | ||
|
|
0d25f32101 | ||
|
|
cfd4522036 | ||
|
|
f638d4aee0 | ||
|
|
b237b78300 | ||
|
|
ed2983c073 | ||
|
|
730227ac45 | ||
|
|
7fb4f41f01 | ||
|
|
d92e013413 | ||
|
|
980fd145d0 | ||
|
|
693734e12a | ||
|
|
cbeae9b40d | ||
|
|
4d0cc2c3b6 |
51
.cursor/rules/ai-features.mdc
Normal file
@@ -0,0 +1,51 @@
|
||||
# AI Features and LangChain Integration
|
||||
|
||||
## AI Components
|
||||
|
||||
- **LangChain Integration**: Uses `@langchain/core`, `@langchain/deepseek`,
|
||||
`@langchain/langgraph`, `@langchain/ollama`
|
||||
- **Whisper Transcription**: Local and online transcription via `whisper-rs` in
|
||||
Rust backend
|
||||
- **AI Agent**: Located in [src/lib/agent/](mdc:src/lib/agent/) directory
|
||||
|
||||
## Frontend AI Features
|
||||
|
||||
- **AI Page**: [src/page/AI.svelte](mdc:src/page/AI.svelte) - Main AI interface
|
||||
- **Agent Logic**: [src/lib/agent/](mdc:src/lib/agent/) - AI agent implementation
|
||||
- **Interface**: [src/lib/interface.ts](mdc:src/lib/interface.ts)
|
||||
\- AI communication layer
|
||||
|
||||
## Backend AI Features
|
||||
|
||||
- **Subtitle Generation**:
|
||||
[src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/) -
|
||||
AI-powered subtitle creation
|
||||
- **Whisper Integration**:
|
||||
[src-tauri/src/subtitle_generator.rs](mdc:src-tauri/src/subtitle_generator.rs)
|
||||
\- Speech-to-text processing
|
||||
- **CUDA Support**: Optional CUDA acceleration for Whisper via feature flag
|
||||
|
||||
## AI Workflows
|
||||
|
||||
- **Live Transcription**: Real-time speech-to-text during live streams
|
||||
- **Content Summarization**: AI-powered content analysis and summarization
|
||||
- **Smart Editing**: AI-assisted video editing and clip generation
|
||||
- **Danmaku Processing**: AI analysis of danmaku (bullet comments) streams
|
||||
|
||||
## Configuration
|
||||
|
||||
- **LLM Settings**: Configure AI models in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
- **Whisper Models**: Local model configuration for offline transcription
|
||||
- **API Keys**: External AI service configuration for online features
|
||||
|
||||
## Development Notes
|
||||
|
||||
- AI features require proper model configuration
|
||||
- CUDA feature enables GPU acceleration for Whisper
|
||||
- LangChain integration supports multiple AI providers
|
||||
- AI agent can work with both local and cloud-based models
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
62
.cursor/rules/build-deployment.mdc
Normal file
@@ -0,0 +1,62 @@
|
||||
# Build and Deployment Configuration
|
||||
|
||||
## Build Scripts
|
||||
|
||||
- **PowerShell**: [build.ps1](mdc:build.ps1) - Windows build script
|
||||
- **FFmpeg Setup**: [ffmpeg_setup.ps1](mdc:ffmpeg_setup.ps1)
|
||||
\- FFmpeg installation script
|
||||
- **Version Bump**: [scripts/bump.cjs](mdc:scripts/bump.cjs)
|
||||
\- Version management script
|
||||
|
||||
## Package Management
|
||||
|
||||
- **Node.js**: [package.json](mdc:package.json) - Frontend dependencies and scripts
|
||||
- **Rust**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Backend dependencies and features
|
||||
- **Lock Files**: [yarn.lock](mdc:yarn.lock) - Yarn dependency lock
|
||||
|
||||
## Build Configuration
|
||||
|
||||
- **Vite**: [vite.config.ts](mdc:vite.config.ts) - Frontend build tool configuration
|
||||
- **Tailwind**: [tailwind.config.cjs](mdc:tailwind.config.cjs) - CSS framework configuration
|
||||
- **PostCSS**: [postcss.config.cjs](mdc:postcss.config.cjs) - CSS processing configuration
|
||||
- **TypeScript**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[tsconfig.node.json](mdc:tsconfig.node.json) - TypeScript configuration
|
||||
|
||||
## Tauri Configuration
|
||||
|
||||
- **Main Config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- Core Tauri settings
|
||||
- **Platform Configs**:
|
||||
- [src-tauri/tauri.macos.conf.json](mdc:src-tauri/tauri.macos.conf.json)
|
||||
\- macOS specific
|
||||
- [src-tauri/tauri.linux.conf.json](mdc:src-tauri/tauri.linux.conf.json)
|
||||
\- Linux specific
|
||||
- [src-tauri/tauri.windows.conf.json](mdc:src-tauri/tauri.windows.conf.json)
|
||||
\- Windows specific
|
||||
- [src-tauri/tauri.windows.cuda.conf.json](mdc:src-tauri/tauri.windows.cuda.conf.json)
|
||||
\- Windows with CUDA
|
||||
|
||||
## Docker Support
|
||||
|
||||
- **Dockerfile**: [Dockerfile](mdc:Dockerfile) - Container deployment configuration
|
||||
- **Documentation**: [docs/](mdc:docs/) - VitePress-based documentation site
|
||||
|
||||
## Build Commands
|
||||
|
||||
- **Frontend**: `yarn build` - Build production frontend
|
||||
- **Tauri**: `yarn tauri build` - Build desktop application
|
||||
- **Documentation**: `yarn docs:build` - Build documentation site
|
||||
- **Type Check**: `yarn check` - TypeScript and Svelte validation
|
||||
|
||||
## Deployment Targets
|
||||
|
||||
- **Desktop**: Native Tauri applications for Windows, macOS, Linux
|
||||
- **Docker**: Containerized deployment option
|
||||
- **Documentation**: Static site deployment via VitePress
|
||||
- **Assets**: Static asset distribution for web components
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
61
.cursor/rules/database-data.mdc
Normal file
@@ -0,0 +1,61 @@
|
||||
# Database and Data Management
|
||||
|
||||
## Database Architecture
|
||||
|
||||
- **SQLite Database**: Primary data storage using `sqlx` with async runtime
|
||||
- **Database Module**: [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- Core database operations
|
||||
- **Migration System**: [src-tauri/src/migration.rs](mdc:src-tauri/src/migration.rs)
|
||||
\- Database schema management
|
||||
|
||||
## Data Models
|
||||
|
||||
- **Recording Data**: Stream metadata, recording sessions, and file information
|
||||
- **Room Configuration**: Stream room settings and platform credentials
|
||||
- **Task Management**: Recording task status and progress tracking
|
||||
- **User Preferences**: Application settings and user configurations
|
||||
|
||||
## Frontend Data Layer
|
||||
|
||||
- **Database Interface**: [src/lib/db.ts](mdc:src/lib/db.ts)
|
||||
\- Frontend database operations
|
||||
- **Stores**: [src/lib/stores/](mdc:src/lib/stores/) - State management for data
|
||||
- **Version Management**: [src/lib/stores/version.ts](mdc:src/lib/stores/version.ts)
|
||||
\- Version tracking
|
||||
|
||||
## Data Operations
|
||||
|
||||
- **CRUD Operations**: Create, read, update, delete for all data entities
|
||||
- **Query Optimization**: Efficient SQL queries with proper indexing
|
||||
- **Transaction Support**: ACID compliance for critical operations
|
||||
- **Data Validation**: Input validation and sanitization
|
||||
|
||||
## File Management
|
||||
|
||||
- **Cache Directory**: [src-tauri/cache/](mdc:src-tauri/cache/)
|
||||
\- Temporary file storage
|
||||
- **Upload Directory**: [src-tauri/cache/uploads/](mdc:src-tauri/cache/uploads/)
|
||||
\- User upload storage
|
||||
- **Bilibili Cache**: [src-tauri/cache/bilibili/](mdc:src-tauri/cache/bilibili/)
|
||||
\- Platform-specific cache
|
||||
|
||||
## Data Persistence
|
||||
|
||||
- **SQLite Files**: [src-tauri/data/data_v2.db](mdc:src-tauri/data/data_v2.db)
|
||||
\- Main database file
|
||||
- **Write-Ahead Logging**: WAL mode for concurrent access and performance
|
||||
- **Backup Strategy**: Database backup and recovery procedures
|
||||
- **Migration Handling**: Automatic schema updates and data migration
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
- Use prepared statements to prevent SQL injection
|
||||
- Implement proper error handling for database operations
|
||||
- Use transactions for multi-step operations
|
||||
- Follow database naming conventions consistently
|
||||
- Test database operations with sample data
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
47
.cursor/rules/frontend-development.mdc
Normal file
@@ -0,0 +1,47 @@
|
||||
# Frontend Development Guidelines
|
||||
|
||||
## Svelte 3 Best Practices
|
||||
|
||||
- Use Svelte 3 syntax with `<script>` tags for component logic
|
||||
- Prefer reactive statements with `$:` for derived state
|
||||
- Use stores from [src/lib/stores/](mdc:src/lib/stores/) for global state management
|
||||
- Import components from [src/lib/components/](mdc:src/lib/components/)
|
||||
|
||||
## TypeScript Configuration
|
||||
|
||||
- Follow the configuration in [tsconfig.json](mdc:tsconfig.json)
|
||||
- Use strict type checking with `checkJs: true`
|
||||
- Extends `@tsconfig/svelte` for Svelte-specific TypeScript settings
|
||||
- Base URL is set to workspace root for clean imports
|
||||
|
||||
## Component Structure
|
||||
|
||||
- **Page components**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Reusable components**: Located in [src/lib/components/](mdc:src/lib/components/)
|
||||
directory
|
||||
- **Layout components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
|
||||
## Styling
|
||||
|
||||
- Use Tailwind CSS classes for styling
|
||||
- Configuration in [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- PostCSS configuration in [postcss.config.cjs](mdc:postcss.config.cjs)
|
||||
- Global styles in [src/styles.css](mdc:src/styles.css)
|
||||
|
||||
## Entry Points
|
||||
|
||||
- **Main app**: [src/main.ts](mdc:src/main.ts) - Main application entry
|
||||
- **Clip mode**: [src/main_clip.ts](mdc:src/main_clip.ts) - Clip editing interface
|
||||
- **Live mode**: [src/main_live.ts](mdc:src/main_live.ts) - Live streaming interface
|
||||
|
||||
## Development Workflow
|
||||
|
||||
- Use `yarn dev` for frontend-only development
|
||||
- Use `yarn tauri dev` for full Tauri development
|
||||
- Use `yarn check` for TypeScript and Svelte type checking
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
53
.cursor/rules/project-overview.mdc
Normal file
@@ -0,0 +1,53 @@
|
||||
# BiliBili ShadowReplay Project Overview
|
||||
|
||||
This is a Tauri-based desktop application for caching live streams and performing
|
||||
real-time editing and submission. It supports Bilibili and Douyin platforms.
|
||||
|
||||
## Project Structure
|
||||
|
||||
### Frontend (Svelte + TypeScript)
|
||||
|
||||
- **Main entry points**: [src/main.ts](mdc:src/main.ts),
|
||||
[src/main_clip.ts](mdc:src/main_clip.ts), [src/main_live.ts](mdc:src/main_live.ts)
|
||||
- **App components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
- **Pages**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Components**: Located in [src/lib/components/](mdc:src/lib/components/) directory
|
||||
- **Stores**: Located in [src/lib/stores/](mdc:src/lib/stores/) directory
|
||||
|
||||
### Backend (Rust + Tauri)
|
||||
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
|
||||
- **Core modules**:
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) - Stream recording functionality
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/) - Database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/) - Tauri command handlers
|
||||
- **Custom crate**:
|
||||
[src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) -
|
||||
Danmaku stream processing
|
||||
|
||||
### Configuration
|
||||
|
||||
- **Frontend config**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[vite.config.ts](mdc:vite.config.ts), [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- **Backend config**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml), [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
|
||||
## Key Technologies
|
||||
|
||||
- **Frontend**: Svelte 3, TypeScript, Tailwind CSS, Flowbite
|
||||
- **Backend**: Rust, Tauri 2, SQLite, FFmpeg
|
||||
- **AI Features**: LangChain, Whisper for transcription
|
||||
- **Build Tools**: Vite, VitePress for documentation
|
||||
|
||||
## Development Commands
|
||||
|
||||
- `yarn dev` - Start development server
|
||||
- `yarn tauri dev` - Start Tauri development
|
||||
- `yarn build` - Build frontend
|
||||
- `yarn docs:dev` - Start documentation server
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
56
.cursor/rules/rust-backend.mdc
Normal file
@@ -0,0 +1,56 @@
|
||||
# Rust Backend Development Guidelines
|
||||
|
||||
## Project Structure
|
||||
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
|
||||
\- Application entry point
|
||||
- **Core modules**:
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Stream recording and management
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- SQLite database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/)
|
||||
\- Tauri command handlers
|
||||
- [src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/)
|
||||
\- AI-powered subtitle generation
|
||||
|
||||
## Custom Crates
|
||||
|
||||
- **danmu_stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Danmaku stream processing library
|
||||
|
||||
## Dependencies
|
||||
|
||||
- **Tauri 2**: Core framework for desktop app functionality
|
||||
- **FFmpeg**: Video/audio processing via `async-ffmpeg-sidecar`
|
||||
- **Whisper**: AI transcription via `whisper-rs` (CUDA support available)
|
||||
- **LangChain**: AI agent functionality
|
||||
- **SQLite**: Database via `sqlx` with async runtime
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Cargo.toml**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Dependencies and features
|
||||
- **Tauri config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- App configuration
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
\- User configuration template
|
||||
|
||||
## Features
|
||||
|
||||
- **default**: Includes GUI and core functionality
|
||||
- **cuda**: Enables CUDA acceleration for Whisper transcription
|
||||
- **headless**: Headless mode without GUI
|
||||
- **custom-protocol**: Required for production builds
|
||||
|
||||
## Development Commands
|
||||
|
||||
- `yarn tauri dev` - Start Tauri development with hot reload
|
||||
- `yarn tauri build` - Build production application
|
||||
- `cargo check` - Check Rust code without building
|
||||
- `cargo test` - Run Rust tests
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
60
.cursor/rules/streaming-recording.mdc
Normal file
@@ -0,0 +1,60 @@
|
||||
# Streaming and Recording System
|
||||
|
||||
## Core Recording Components
|
||||
|
||||
- **Recorder Manager**: [src-tauri/src/recorder_manager.rs](mdc:src-tauri/src/recorder_manager.rs)
|
||||
\- Main recording orchestration
|
||||
- **Recorder**: [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Individual stream recording logic
|
||||
- **Danmaku Stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Custom crate for bullet comment processing
|
||||
|
||||
## Supported Platforms
|
||||
|
||||
- **Bilibili**: Main platform support with live stream caching
|
||||
- **Douyin**: TikTok's Chinese platform support
|
||||
- **Multi-stream**: Support for recording multiple streams simultaneously
|
||||
|
||||
## Recording Features
|
||||
|
||||
- **Live Caching**: Real-time stream recording and buffering
|
||||
- **Time-based Clipping**: Extract specific time segments from recorded streams
|
||||
- **Danmaku Capture**: Record bullet comments and chat messages
|
||||
- **Quality Control**: Configurable recording quality and format options
|
||||
|
||||
## Frontend Interfaces
|
||||
|
||||
- **Live Mode**: [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
\- Live streaming interface
|
||||
- **Clip Mode**: [src/AppClip.svelte](mdc:src/AppClip.svelte)
|
||||
\- Video editing and clipping
|
||||
- **Room Management**: [src/page/Room.svelte](mdc:src/page/Room.svelte)
|
||||
\- Stream room configuration
|
||||
- **Task Management**: [src/page/Task.svelte](mdc:src/page/Task.svelte)
|
||||
\- Recording task monitoring
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
- **FFmpeg Integration**: Video/audio processing via `async-ffmpeg-sidecar`
|
||||
- **M3U8 Support**: HLS stream processing with `m3u8-rs`
|
||||
- **Async Processing**: Non-blocking I/O with `tokio` runtime
|
||||
- **Database Storage**: SQLite for metadata and recording information
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Recording Settings**: Configure in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
- **FFmpeg Path**: Set FFmpeg binary location for video processing
|
||||
- **Storage Paths**: Configure cache and output directories
|
||||
- **Quality Settings**: Adjust recording bitrate and format options
|
||||
|
||||
## Development Workflow
|
||||
|
||||
- Use [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) for core recording logic
|
||||
- Test with [src-tauri/tests/](mdc:src-tauri/tests/) directory
|
||||
- Monitor recording progress via progress manager
|
||||
- Handle errors gracefully with custom error types
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
36
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
ARG VARIANT=bookworm-slim
|
||||
FROM debian:${VARIANT}
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Arguments
|
||||
ARG CONTAINER_USER=vscode
|
||||
ARG CONTAINER_GROUP=vscode
|
||||
|
||||
# Install dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
build-essential \
|
||||
clang \
|
||||
cmake \
|
||||
curl \
|
||||
file \
|
||||
git \
|
||||
libayatana-appindicator3-dev \
|
||||
librsvg2-dev \
|
||||
libssl-dev \
|
||||
libwebkit2gtk-4.1-dev \
|
||||
libxdo-dev \
|
||||
pkg-config \
|
||||
wget \
|
||||
&& apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts
|
||||
|
||||
# Set users
|
||||
RUN adduser --disabled-password --gecos "" ${CONTAINER_USER}
|
||||
USER ${CONTAINER_USER}
|
||||
WORKDIR /home/${CONTAINER_USER}
|
||||
|
||||
# Install rustup
|
||||
RUN curl --proto "=https" --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal
|
||||
ENV PATH=${PATH}:/home/${CONTAINER_USER}/.cargo/bin
|
||||
|
||||
CMD [ "/bin/bash" ]
|
||||
31
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "vscode",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
"CONTAINER_USER": "vscode",
|
||||
"CONTAINER_GROUP": "vscode"
|
||||
}
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"lldb.executable": "/usr/bin/lldb",
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true
|
||||
}
|
||||
},
|
||||
"extensions": [
|
||||
"vadimcn.vscode-lldb",
|
||||
"rust-lang.rust-analyzer",
|
||||
"tamasfe.even-better-toml"
|
||||
]
|
||||
}
|
||||
},
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
39
.dockerignore
Normal file
@@ -0,0 +1,39 @@
|
||||
# Dependencies
|
||||
node_modules
|
||||
.pnpm-store
|
||||
.npm
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
|
||||
# Build outputs
|
||||
dist
|
||||
build
|
||||
target
|
||||
*.log
|
||||
|
||||
# Version control
|
||||
.git
|
||||
.gitignore
|
||||
|
||||
# IDE and editor files
|
||||
.idea
|
||||
.vscode
|
||||
*.swp
|
||||
*.swo
|
||||
.DS_Store
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
|
||||
# Debug files
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Tauri specific
|
||||
src-tauri/target
|
||||
src-tauri/dist
|
||||
7
.github/CONTRIBUTING.md
vendored
@@ -12,7 +12,8 @@
|
||||
|
||||
### Windows
|
||||
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。
|
||||
`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
|
||||
默认运行为 `cpu` 版本,使用 `yarn tauri dev --features cuda` 命令运行 `cuda` 版本。
|
||||
|
||||
@@ -20,7 +21,9 @@ Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于
|
||||
|
||||
1. 安装 LLVM 且配置相关环境变量,详情见 [LLVM Windows Setup](https://llvm.org/docs/GettingStarted.html#building-llvm-on-windows);
|
||||
|
||||
2. 安装 CUDA Toolkit,详情见 [CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);要注意,安装时请勾选 **VisualStudio integration**。
|
||||
2. 安装 CUDA Toolkit,详情见
|
||||
[CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);
|
||||
要注意,安装时请勾选 **VisualStudio integration**。
|
||||
|
||||
### 常见问题
|
||||
|
||||
|
||||
21
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,21 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: 提交一个 BUG
|
||||
title: "[BUG]"
|
||||
labels: bug
|
||||
assignees: Xinrea
|
||||
---
|
||||
|
||||
**描述:**
|
||||
简要描述一下这个 BUG 的现象
|
||||
|
||||
**日志和截图:**
|
||||
如果可以的话,请尽量附上相关截图和日志文件(日志是位于安装目录下,名为 bsr.log 的文件)。
|
||||
|
||||
**相关信息:**
|
||||
|
||||
- 程序版本:
|
||||
- 系统类型:
|
||||
|
||||
**其他**
|
||||
任何其他想说的
|
||||
47
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Bug Report
|
||||
description: 提交 BUG 报告.
|
||||
title: "[bug] "
|
||||
labels: ["bug"]
|
||||
assignees:
|
||||
- Xinrea
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 提交须知
|
||||
description: 请确认以下内容
|
||||
options:
|
||||
- label: 我是在最新版本上发现的此问题
|
||||
required: true
|
||||
- label: 我已阅读 [常见问题](https://bsr.xinrea.cn/usage/faq.html) 的说明
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: app_type
|
||||
attributes:
|
||||
label: 以哪种方式使用的该软件?
|
||||
multiple: false
|
||||
options:
|
||||
- Docker 镜像
|
||||
- 桌面应用
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: 运行环境
|
||||
multiple: false
|
||||
options:
|
||||
- Linux
|
||||
- Windows
|
||||
- MacOS
|
||||
- Docker
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: BUG 描述
|
||||
description: 请尽可能详细描述 BUG 的现象以及复现的方法
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: 日志
|
||||
description: 请粘贴日志内容或是上传日志文件(在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮;当你打开日志目录所在位置后,进入 logs 目录,找到后缀名为 log 的文件)
|
||||
validations:
|
||||
required: true
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: 提交一个新功能的建议
|
||||
title: "[feature]"
|
||||
labels: enhancement
|
||||
assignees: Xinrea
|
||||
|
||||
---
|
||||
|
||||
**遇到的问题:**
|
||||
在使用过程中遇到了什么问题让你想要提出建议
|
||||
|
||||
**想要的功能:**
|
||||
想要怎样的新功能来解决这个问题
|
||||
|
||||
**通过什么方式实现(有思路的话):**
|
||||
如果有相关的实现思路或者是参考,可以在此提供
|
||||
|
||||
**其他:**
|
||||
其他任何想说的话
|
||||
13
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Feature Request
|
||||
description: 提交新功能的需求
|
||||
title: "[feature] "
|
||||
labels: ["feature"]
|
||||
assignees:
|
||||
- Xinrea
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 需求描述
|
||||
description: 请尽可能详细描述你想要的新功能
|
||||
validations:
|
||||
required: true
|
||||
46
.github/workflows/check.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Rust Check
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "**/*.rs"
|
||||
- "src-tauri/Cargo.toml"
|
||||
- "src-tauri/Cargo.lock"
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: self-linux
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt clippy
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf ffmpeg
|
||||
|
||||
- name: Check formatting
|
||||
run: cargo fmt --check
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check clippy
|
||||
run: cargo clippy
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check clippy (headless)
|
||||
run: cargo clippy --no-default-features --features headless
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check tests
|
||||
run: cargo test -v
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check tests (headless)
|
||||
run: cargo test --no-default-features --features headless -v
|
||||
working-directory: src-tauri
|
||||
27
.github/workflows/main.yml
vendored
@@ -55,18 +55,9 @@ jobs:
|
||||
# Those targets are only used on macos runners so it's in an `if` to slightly speed up windows and linux builds.
|
||||
targets: ${{ matrix.platform == 'macos-latest' && 'aarch64-apple-darwin,x86_64-apple-darwin' || '' }}
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "./src-tauri -> target"
|
||||
|
||||
- name: Install CUDA toolkit (Windows CUDA only)
|
||||
if: matrix.platform == 'windows-latest' && matrix.features == 'cuda'
|
||||
uses: Jimver/cuda-toolkit@master
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "./src-tauri -> target"
|
||||
uses: Jimver/cuda-toolkit@v0.2.24
|
||||
|
||||
- name: Setup ffmpeg
|
||||
if: matrix.platform == 'windows-latest'
|
||||
@@ -91,6 +82,19 @@ jobs:
|
||||
Copy-Item "$cudaPath\cublas64*.dll" -Destination $targetPath
|
||||
Copy-Item "$cudaPath\cublasLt64*.dll" -Destination $targetPath
|
||||
|
||||
- name: Get previous tag
|
||||
id: get_previous_tag
|
||||
run: |
|
||||
# Get the previous tag (excluding the current one being pushed)
|
||||
PREVIOUS_TAG=$(git describe --tags --abbrev=0 HEAD~1 2>/dev/null || echo "")
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
# If no previous tag found, use the first commit
|
||||
PREVIOUS_TAG=$(git rev-list --max-parents=0 HEAD | head -1)
|
||||
fi
|
||||
echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
|
||||
echo "current_tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -100,8 +104,7 @@ jobs:
|
||||
with:
|
||||
tagName: v__VERSION__
|
||||
releaseName: "BiliBili ShadowReplay v__VERSION__"
|
||||
releaseBody: "See the assets to download this version and install."
|
||||
releaseBody: "> [!NOTE]\n> 如果你是第一次下载安装,请参考 [安装准备](https://bsr.xinrea.cn/getting-started/installation/desktop.html) 选择合适的版本。\n> Changelog: https://github.com/Xinrea/bili-shadowreplay/compare/${{ steps.get_previous_tag.outputs.previous_tag }}...${{ steps.get_previous_tag.outputs.current_tag }}"
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }} ${{ matrix.platform == 'windows-latest' && matrix.features == 'cuda' && '--config src-tauri/tauri.windows.cuda.conf.json' || '' }}
|
||||
includeDebug: true
|
||||
|
||||
51
.github/workflows/package.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
name: Docker Build and Push
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=sha,format=long
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
66
.github/workflows/pages.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
name: Deploy VitePress site to Pages
|
||||
|
||||
on:
|
||||
# Runs on pushes targeting the `main` branch. Change this to `master` if you're
|
||||
# using the `master` branch as the default branch.
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- docs/**
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
||||
concurrency:
|
||||
group: pages
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Build job
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Not needed if lastUpdated is not enabled
|
||||
# - uses: pnpm/action-setup@v3 # Uncomment this block if you're using pnpm
|
||||
# with:
|
||||
# version: 9 # Not needed if you've set "packageManager" in package.json
|
||||
# - uses: oven-sh/setup-bun@v1 # Uncomment this if you're using Bun
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: npm # or pnpm / yarn
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v4
|
||||
- name: Install dependencies
|
||||
run: yarn install # or pnpm install / yarn install / bun install
|
||||
- name: Build with VitePress
|
||||
run: yarn run docs:build # or pnpm docs:build / yarn docs:build / bun run docs:build
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: docs/.vitepress/dist
|
||||
|
||||
# Deployment job
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
name: Deploy
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
11
.gitignore
vendored
@@ -11,6 +11,7 @@ node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
/target/
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
@@ -27,4 +28,12 @@ dist-ssr
|
||||
src-tauri/*.exe
|
||||
|
||||
# test files
|
||||
src-tauri/tests/audio/*.srt
|
||||
src-tauri/tests/audio/*.srt
|
||||
|
||||
.env
|
||||
|
||||
docs/.vitepress/cache
|
||||
docs/.vitepress/dist
|
||||
|
||||
*.debug.js
|
||||
*.debug.map
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
[[language]]
|
||||
name = "rust"
|
||||
auto-format = true
|
||||
rulers = []
|
||||
|
||||
[[language]]
|
||||
name = "svelte"
|
||||
|
||||
5
.markdownlint.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"MD033": {
|
||||
"allowed_elements": ["nobr", "sup"]
|
||||
}
|
||||
}
|
||||
51
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,51 @@
|
||||
fail_fast: true
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
exclude: \.json$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.36.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: cargo-fmt
|
||||
name: cargo fmt
|
||||
entry: cargo fmt --manifest-path src-tauri/Cargo.toml --
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- id: cargo-clippy
|
||||
name: cargo clippy
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-clippy-headless
|
||||
name: cargo clippy headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
|
||||
- id: cargo-test
|
||||
name: cargo test
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-test-headless
|
||||
name: cargo test headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
85
Dockerfile
Normal file
@@ -0,0 +1,85 @@
|
||||
# Build frontend
|
||||
FROM node:20-bullseye AS frontend-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy package files
|
||||
COPY package.json yarn.lock ./
|
||||
|
||||
# Install dependencies with specific flags
|
||||
RUN yarn install --frozen-lockfile
|
||||
|
||||
# Copy source files
|
||||
COPY . .
|
||||
|
||||
# Build frontend
|
||||
RUN yarn build
|
||||
|
||||
# Build Rust backend
|
||||
FROM rust:1.90-slim AS rust-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install required system dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
cmake \
|
||||
pkg-config \
|
||||
libssl-dev \
|
||||
glib-2.0-dev \
|
||||
libclang-dev \
|
||||
g++ \
|
||||
wget \
|
||||
xz-utils \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy Rust project files
|
||||
COPY src-tauri/Cargo.toml src-tauri/Cargo.lock ./src-tauri/
|
||||
COPY src-tauri/src ./src-tauri/src
|
||||
COPY src-tauri/crates ./src-tauri/crates
|
||||
|
||||
# Build Rust backend
|
||||
WORKDIR /app/src-tauri
|
||||
RUN rustup component add rustfmt
|
||||
RUN cargo build --no-default-features --features headless --release
|
||||
|
||||
# Final stage
|
||||
FROM debian:trixie-slim AS final
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install runtime dependencies, SSL certificates and Chinese fonts
|
||||
RUN apt-get update && apt-get install -y \
|
||||
libssl3 \
|
||||
ca-certificates \
|
||||
fonts-wqy-microhei \
|
||||
netbase \
|
||||
nscd \
|
||||
ffmpeg \
|
||||
&& update-ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
RUN touch /etc/netgroup
|
||||
RUN mkdir -p /var/run/nscd && chmod 755 /var/run/nscd
|
||||
|
||||
# Add /app to PATH
|
||||
ENV PATH="/app:${PATH}"
|
||||
|
||||
# Copy built frontend
|
||||
COPY --from=frontend-builder /app/dist ./dist
|
||||
|
||||
# Copy built Rust binary
|
||||
COPY --from=rust-builder /app/src-tauri/target/release/bili-shadowreplay .
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3000
|
||||
|
||||
# Run the application
|
||||
CMD ["sh", "-c", "nscd && ./bili-shadowreplay"]
|
||||
71
README.md
@@ -1,72 +1,31 @@
|
||||
# BiliBili ShadowReplay
|
||||
|
||||

|
||||

|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
> [!WARNING]
|
||||
> v2.0.0 版本为重大更新,将不兼容 v1.x 版本的数据。
|
||||
[](https://deepwiki.com/Xinrea/bili-shadowreplay)
|
||||
|
||||
BiliBili ShadowReplay 是一个缓存直播并进行实时编辑投稿的工具。通过划定时间区间,并编辑简单的必需信息,即可完成直播切片以及投稿,将整个流程压缩到分钟级。同时,也支持对缓存的历史直播进行回放,以及相同的切片编辑投稿处理流程。
|
||||
|
||||
目前仅支持 B 站和抖音平台的直播。
|
||||
|
||||

|
||||
[](https://www.star-history.com/#Xinrea/bili-shadowreplay&Date)
|
||||
|
||||
## 安装和使用
|
||||
|
||||

|
||||
|
||||
前往网站查看说明:[BiliBili ShadowReplay](https://bsr.xinrea.cn/)
|
||||
|
||||
## 参与开发
|
||||
|
||||
[Contributing](.github/CONTRIBUTING.md)
|
||||
可以通过 [DeepWiki](https://deepwiki.com/Xinrea/bili-shadowreplay) 了解本项目。
|
||||
|
||||
## 总览
|
||||
贡献指南:[Contributing](.github/CONTRIBUTING.md)
|
||||
|
||||

|
||||
## 赞助
|
||||
|
||||
## 直播间管理
|
||||
|
||||

|
||||
|
||||
显示当前缓存的直播间列表,在添加前需要在账号页面添加至少一个账号(主账号)用于直播流以及用户信息的获取。
|
||||
操作菜单包含打开直播流、查看历史记录以及删除等操作。其中历史记录以列表形式展示,可以进行回放以及删除。
|
||||
|
||||

|
||||
|
||||
无论是正在进行的直播还是历史录播,都可在预览窗口进行回放,同时也可以进行切片编辑以及投稿。关于预览窗口的相关说明请见 [预览窗口](#预览窗口)。
|
||||
|
||||
## 账号管理
|
||||
|
||||

|
||||
|
||||
程序需要至少一个账号用于直播流以及用户信息的获取,可以在此页面添加账号。
|
||||
|
||||
你可以添加多个账号,但只有一个账号会被标记为主账号,主账号用于直播流的获取。所有账号都可在切片投稿或是观看直播流发送弹幕时自由选择,详情见 [预览窗口](#预览窗口)。
|
||||
|
||||
抖音账号目前仅支持手动 Cookie 添加,且账号仅用于获取直播信息和直播流。
|
||||
|
||||
## 预览窗口
|
||||
|
||||

|
||||
|
||||
预览窗口是一个多功能的窗口,可以用于观看直播流、回放历史录播、编辑切片、记录时间点以及投稿等操作。如果当前播放的是直播流,那么会有实时弹幕观看以及发送弹幕相关的选项。
|
||||
|
||||
通过预览窗口的快捷键操作,可以快速选择时间区间,进行切片生成以及投稿。
|
||||
|
||||
无论是弹幕发送还是投稿,均可自由选择账号,只要在账号管理中添加了该账号。
|
||||
|
||||
进度条上方会显示弹幕频率图,可以直观地看到弹幕的分布情况;右侧的弹幕统计过滤器可以用于过滤弹幕,只显示含有指定文字的弹幕的统计情况。
|
||||
|
||||
## 封面编辑
|
||||
|
||||

|
||||
|
||||
在预览窗口中,生成切片后可以进行封面编辑,包括关键帧的选择、文字的添加和拖动等。
|
||||
|
||||
## 设置
|
||||
|
||||

|
||||
|
||||
在设置页面可以进行一些基本的设置,包括缓存和切片的保存路径,以及相关事件是否显示通知等。
|
||||
|
||||
> [!WARNING]
|
||||
> 缓存目录进行切换时,会有文件复制等操作,如果缓存量较大,可能会耗费较长时间;且在此期间预览功能会暂时失效,需要等待操作完成。
|
||||

|
||||
|
||||
2
_typos.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[default.extend-identifiers]
|
||||
pull_datas = "pull_datas"
|
||||
78
docs/.vitepress/config.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { defineConfig } from "vitepress";
|
||||
import { withMermaid } from "vitepress-plugin-mermaid";
|
||||
|
||||
// https://vitepress.dev/reference/site-config
|
||||
export default withMermaid({
|
||||
title: "BiliBili ShadowReplay",
|
||||
description: "直播录制/实时回放/剪辑/投稿工具",
|
||||
themeConfig: {
|
||||
// https://vitepress.dev/reference/default-theme-config
|
||||
nav: [
|
||||
{ text: "Home", link: "/" },
|
||||
{
|
||||
text: "Releases",
|
||||
link: "https://github.com/Xinrea/bili-shadowreplay/releases",
|
||||
},
|
||||
],
|
||||
|
||||
sidebar: [
|
||||
{
|
||||
text: "开始使用",
|
||||
items: [
|
||||
{
|
||||
text: "安装准备",
|
||||
items: [
|
||||
{
|
||||
text: "桌面端安装",
|
||||
link: "/getting-started/installation/desktop",
|
||||
},
|
||||
{
|
||||
text: "Docker 安装",
|
||||
link: "/getting-started/installation/docker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "配置使用",
|
||||
items: [
|
||||
{ text: "账号配置", link: "/getting-started/config/account" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/config/ffmpeg" },
|
||||
{ text: "Whisper 配置", link: "/getting-started/config/whisper" },
|
||||
{ text: "LLM 配置", link: "/getting-started/config/llm" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "说明文档",
|
||||
items: [
|
||||
{
|
||||
text: "功能说明",
|
||||
items: [
|
||||
{ text: "工作流程", link: "/usage/features/workflow" },
|
||||
{ text: "直播间管理", link: "/usage/features/room" },
|
||||
{ text: "切片功能", link: "/usage/features/clip" },
|
||||
{ text: "字幕功能", link: "/usage/features/subtitle" },
|
||||
{ text: "弹幕功能", link: "/usage/features/danmaku" },
|
||||
{ text: "Webhook", link: "/usage/features/webhook" },
|
||||
],
|
||||
},
|
||||
{ text: "常见问题", link: "/usage/faq" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "开发文档",
|
||||
items: [
|
||||
{
|
||||
text: "DeepWiki",
|
||||
link: "https://deepwiki.com/Xinrea/bili-shadowreplay",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
socialLinks: [
|
||||
{ icon: "github", link: "https://github.com/Xinrea/bili-shadowreplay" },
|
||||
],
|
||||
},
|
||||
});
|
||||
|
Before Width: | Height: | Size: 555 KiB |
|
Before Width: | Height: | Size: 1.2 MiB |
|
Before Width: | Height: | Size: 2.9 MiB |
12
docs/getting-started/config/account.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# 账号配置
|
||||
|
||||
要添加直播间,至少需要配置一个同平台的账号。在账号页面,你可以通过添加账号按钮添加一个账号。
|
||||
|
||||
- B 站账号:目前支持扫码登录和 Cookie 手动配置两种方式,推荐使用扫码登录
|
||||
- 抖音账号:目前仅支持 Cookie 手动配置登陆
|
||||
|
||||
## 抖音账号配置
|
||||
|
||||
首先确保已经登录抖音,然后打开[个人主页](https://www.douyin.com/user/self),右键单击网页,在菜单中选择 `检查(Inspect)`,打开开发者工具,切换到 `网络(Network)` 选项卡,然后刷新网页,此时能在列表中找到 `self` 请求(一般是列表中第一个),单击该请求,查看`请求标头`,在 `请求标头` 中找到 `Cookie`,复制该字段的值,粘贴到配置页面的 `Cookie` 输入框中,要注意复制完全。
|
||||
|
||||

|
||||
47
docs/getting-started/config/ffmpeg.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# FFmpeg 配置
|
||||
|
||||
FFmpeg 是一个开源的音视频处理工具,支持多种格式的音视频编解码、转码、剪辑、合并等操作。
|
||||
在本项目中,FFmpeg 用于切片生成以及字幕和弹幕的硬编码处理,因此需要确保安装了 FFmpeg。
|
||||
|
||||
## MacOS
|
||||
|
||||
在 MacOS 上安装 FFmpeg 非常简单,可以使用 Homebrew 来安装:
|
||||
|
||||
```bash
|
||||
brew install ffmpeg
|
||||
```
|
||||
|
||||
如果没有安装 Homebrew,可以参考 [Homebrew 官网](https://brew.sh/) 进行安装。
|
||||
|
||||
## Linux
|
||||
|
||||
在 Linux 上安装 FFmpeg 可以使用系统自带的包管理器进行安装,例如:
|
||||
|
||||
- Ubuntu/Debian 系统:
|
||||
|
||||
```bash
|
||||
sudo apt install ffmpeg
|
||||
```
|
||||
|
||||
- Fedora 系统:
|
||||
|
||||
```bash
|
||||
sudo dnf install ffmpeg
|
||||
```
|
||||
|
||||
- Arch Linux 系统:
|
||||
|
||||
```bash
|
||||
sudo pacman -S ffmpeg
|
||||
```
|
||||
|
||||
- CentOS 系统:
|
||||
|
||||
```bash
|
||||
sudo yum install epel-release
|
||||
sudo yum install ffmpeg
|
||||
```
|
||||
|
||||
## Windows
|
||||
|
||||
Windows 版本安装后,FFmpeg 已经放置在了程序目录下,因此不需要额外安装。
|
||||
9
docs/getting-started/config/llm.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# LLM 配置
|
||||
|
||||

|
||||
|
||||
助手页面的 AI Agent 助手功能需要配置大模型,目前仅支持配置 OpenAI 协议兼容的大模型服务。
|
||||
|
||||
本软件并不提供大模型服务,请自行选择服务提供商。要注意,使用 AI Agent 助手需要消耗比普通对话更多的 Token,请确保有足够的 Token 余额。
|
||||
|
||||
此外,AI Agent 的功能需要大模型支持 Function Calling 功能,否则无法正常调用工具。
|
||||
46
docs/getting-started/config/whisper.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Whisper 配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付
|
||||
费获取 API Key)。
|
||||
|
||||
> [!NOTE]
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使
|
||||
> 用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
|
||||
## 本地运行 Whisper 模型
|
||||
|
||||

|
||||
|
||||
如果需要使用本地运行 Whisper 模型进行字幕生成,需要下载 Whisper.cpp 模型,并在设置中指定模型路径。模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此
|
||||
推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
|
||||
## 使用在线 Whisper 服务
|
||||
|
||||

|
||||
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有
|
||||
OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
|
||||
## 字幕识别质量的调优
|
||||
|
||||
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
|
||||
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。
|
||||
根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian,
|
||||
Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish,
|
||||
French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic,
|
||||
Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian,
|
||||
Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish,
|
||||
Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili,
|
||||
Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
|
||||
提示词可以优化生成的字幕的风格(也会一定程度上影响质量),要注意,Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。
|
||||
22
docs/getting-started/installation/desktop.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
## Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
## Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
## MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
41
docs/getting-started/installation/docker.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
## 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
## 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
70
docs/index.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
# https://vitepress.dev/reference/default-theme-home-page
|
||||
layout: home
|
||||
|
||||
hero:
|
||||
name: "BiliBili ShadowReplay"
|
||||
tagline: "直播录制/实时回放/剪辑/投稿工具"
|
||||
image:
|
||||
src: /images/icon.png
|
||||
alt: BiliBili ShadowReplay
|
||||
actions:
|
||||
- theme: brand
|
||||
text: 开始使用
|
||||
link: /getting-started/installation/desktop
|
||||
- theme: alt
|
||||
text: 说明文档
|
||||
link: /usage/features/workflow
|
||||
|
||||
features:
|
||||
- icon: 📹
|
||||
title: 直播录制
|
||||
details: 缓存直播流,直播结束自动生成整场录播
|
||||
- icon: 📺
|
||||
title: 实时回放
|
||||
details: 实时回放当前直播,不错过任何内容
|
||||
- icon: ✂️
|
||||
title: 剪辑投稿
|
||||
details: 剪辑切片,封面编辑,一键投稿
|
||||
- icon: 📝
|
||||
title: 字幕生成
|
||||
details: 支持 Wisper 模型生成字幕,编辑与压制
|
||||
- icon: 📄
|
||||
title: 弹幕支持
|
||||
details: 直播间弹幕压制到切片,并支持直播弹幕发送和导出
|
||||
- icon: 🌐
|
||||
title: 多直播平台支持
|
||||
details: 目前支持 B 站和抖音直播
|
||||
- icon: 🔍
|
||||
title: 云端部署
|
||||
details: 支持 Docker 部署,提供 Web 控制界面
|
||||
- icon: 🤖
|
||||
title: AI Agent 支持
|
||||
details: 支持 AI 助手管理录播,分析直播内容,生成切片
|
||||
---
|
||||
|
||||
## 总览
|
||||
|
||||

|
||||
|
||||
## 直播间管理
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
## 账号管理
|
||||
|
||||

|
||||
|
||||
## 预览窗口
|
||||
|
||||

|
||||
|
||||
## 封面编辑
|
||||
|
||||

|
||||
|
||||
## 设置
|
||||
|
||||

|
||||
|
Before Width: | Height: | Size: 2.8 MiB |
BIN
docs/public/images/accounts.png
Normal file
|
After Width: | Height: | Size: 195 KiB |
BIN
docs/public/images/ai_agent.png
Normal file
|
After Width: | Height: | Size: 261 KiB |
BIN
docs/public/images/archives.png
Normal file
|
After Width: | Height: | Size: 434 KiB |
BIN
docs/public/images/clip_manage.png
Normal file
|
After Width: | Height: | Size: 234 KiB |
BIN
docs/public/images/clip_preview.png
Normal file
|
After Width: | Height: | Size: 2.3 MiB |
BIN
docs/public/images/cover_edit.png
Normal file
|
After Width: | Height: | Size: 2.1 MiB |
BIN
docs/public/images/donate.png
Normal file
|
After Width: | Height: | Size: 474 KiB |
BIN
docs/public/images/douyin_cookie.png
Normal file
|
After Width: | Height: | Size: 548 KiB |
|
Before Width: | Height: | Size: 114 KiB After Width: | Height: | Size: 114 KiB |
|
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
BIN
docs/public/images/livewindow.png
Normal file
|
After Width: | Height: | Size: 2.1 MiB |
BIN
docs/public/images/model_config.png
Normal file
|
After Width: | Height: | Size: 383 KiB |
BIN
docs/public/images/rooms.png
Normal file
|
After Width: | Height: | Size: 949 KiB |
BIN
docs/public/images/settings.png
Normal file
|
After Width: | Height: | Size: 244 KiB |
BIN
docs/public/images/summary.png
Normal file
|
After Width: | Height: | Size: 372 KiB |
BIN
docs/public/images/tasks.png
Normal file
|
After Width: | Height: | Size: 201 KiB |
BIN
docs/public/images/whisper_local.png
Normal file
|
After Width: | Height: | Size: 194 KiB |
BIN
docs/public/images/whisper_online.png
Normal file
|
After Width: | Height: | Size: 199 KiB |
BIN
docs/public/images/whole_clip.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
BIN
docs/public/videos/deeplinking.mp4
Normal file
BIN
docs/public/videos/room_remove.mp4
Normal file
BIN
docs/rooms.png
|
Before Width: | Height: | Size: 1.9 MiB |
|
Before Width: | Height: | Size: 622 KiB |
BIN
docs/summary.png
|
Before Width: | Height: | Size: 721 KiB |
31
docs/usage/faq.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# 常见问题
|
||||
|
||||
## 一、在哪里反馈问题?
|
||||
|
||||
你可以前往 [Github Issues](https://github.com/Xinrea/bili-shadowreplay/issues/new?template=bug_report.md) 提交问题,或是加入[反馈交流群](https://qm.qq.com/q/v4lrE6gyum)。
|
||||
|
||||
1. 在提交问题前,请先阅读其它常见问题,确保你的问题已有解答;
|
||||
2. 其次,请确保你的程序已更新到最新版本;
|
||||
3. 最后,你应准备好提供你的程序日志文件,以便更好地定位问题。
|
||||
|
||||
## 二、在哪里查看日志?
|
||||
|
||||
在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮。当你打开日志目录所在位置后,进入 `logs` 目录,找到后缀名为 `log` 的文件,这便是你需要提供给开发者的日志文件。
|
||||
|
||||
## 三、无法预览直播或是生成切片
|
||||
|
||||
如果你是 macOS 或 Linux 用户,请确保你已安装了 `ffmpeg` 和 `ffprobe` 工具;如果不知道如何安装,请参考 [FFmpeg 配置](/getting-started/config/ffmpeg)。
|
||||
|
||||
如果你是 Windows 用户,程序目录下应当自带了 `ffmpeg` 和 `ffprobe` 工具,如果无法预览直播或是生成切片,请向开发者反馈。
|
||||
|
||||
## 四、添加 B 站直播间出现 -352 错误
|
||||
|
||||
`-352` 错误是由 B 站风控机制导致的,如果你添加了大量的 B 站直播间进行录制,可以在设置页面调整直播间状态的检查间隔,尽量避免风控;如果你在直播间数量较少的情况下出现该错误,请向开发者反馈。
|
||||
|
||||
## 五、录播为什么都是碎片文件?
|
||||
|
||||
缓存目录下的录播文件并非用于直接播放或是投稿,而是用于直播流的预览与实时回放。如果你需要录播文件用于投稿,请打开对应录播的预览界面,使用快捷键创建选区,生成所需范围的切片,切片文件为常规的 mp4 文件,位于你所设置的切片目录下。
|
||||
|
||||
如果你将 BSR 作为单纯的录播软件使用,在设置中可以开启`整场录播生成`,这样在直播结束后,BSR 会自动生成整场录播的切片。
|
||||
|
||||

|
||||
1
docs/usage/features/clip.md
Normal file
@@ -0,0 +1 @@
|
||||
# 切片
|
||||
1
docs/usage/features/danmaku.md
Normal file
@@ -0,0 +1 @@
|
||||
# 弹幕
|
||||
40
docs/usage/features/room.md
Normal file
@@ -0,0 +1,40 @@
|
||||
# 直播间
|
||||
|
||||
> [!WARNING]
|
||||
> 在添加管理直播间前,请确保账号列表中有对应平台的可用账号。
|
||||
|
||||
## 添加直播间
|
||||
|
||||
### 手动添加直播间
|
||||
|
||||
你可以在 BSR 直播间页面,点击按钮手动添加直播间。你需要选择平台,并输入直播间号。
|
||||
|
||||
直播间号通常是直播间网页地址尾部的遗传数字,例如 `https://live.bilibili.com/123456` 中的 `123456`,或是 `https://live.douyin.com/123456` 中的 `123456`。
|
||||
|
||||
抖音直播间比较特殊,当未开播时,你无法找到直播间的入口,因此你需要当直播间开播时找到直播间网页地址,并记录其直播间号。
|
||||
|
||||
抖音直播间需要输入主播的 sec_uid,你可以在主播主页的 URL 中找到,例如 `https://www.douyin.com/user/MS4wLjABAAAA` 中的 `MS4wLjABAAAA`。
|
||||
|
||||
### 使用 DeepLinking 快速添加直播间
|
||||
|
||||
<!-- MD033 -->
|
||||
|
||||
<video src="/videos/deeplinking.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
|
||||
在浏览器中观看直播时,替换地址栏中直播间地址中的 `https://` 为 `bsr://` 即可快速唤起 BSR 添加直播间。
|
||||
|
||||
## 启用/禁用直播间
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择启用/禁用直播间。
|
||||
|
||||
- 启用后,当直播间开播时,会自动开始录制
|
||||
- 禁用后,当直播间开播时,不会自动开始录制
|
||||
|
||||
## 移除直播间
|
||||
|
||||
> [!CAUTION]
|
||||
> 移除直播间后,该直播间相关的所有录播都会被删除,请谨慎操作。
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择移除直播间。
|
||||
|
||||
<video src="/videos/room_remove.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
1
docs/usage/features/subtitle.md
Normal file
@@ -0,0 +1 @@
|
||||
# 字幕
|
||||
245
docs/usage/features/webhook.md
Normal file
@@ -0,0 +1,245 @@
|
||||
# Webhook
|
||||
|
||||
> [!NOTE]
|
||||
> 你可以使用 <https://webhook.site> 来测试 Webhook 功能。
|
||||
|
||||
## 设置 Webhook
|
||||
|
||||
打开 BSR 设置页面,在基础设置中设置 Webhook 地址。
|
||||
|
||||
## Webhook Events
|
||||
|
||||
### 直播间相关
|
||||
|
||||
#### 添加直播间
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "a96a5e9f-9857-4c13-b889-91da2ace208a",
|
||||
"event": "recorder.added",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"created_at": "2025-09-07T03:33:14.258796+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
"extra": ""
|
||||
},
|
||||
"timestamp": 1757215994
|
||||
}
|
||||
```
|
||||
|
||||
#### 移除直播间
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "e33623d4-e040-4390-88f5-d351ceeeace7",
|
||||
"event": "recorder.removed",
|
||||
"payload": {
|
||||
"room_id": 27183290,
|
||||
"created_at": "2025-08-30T10:54:18.569198+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
"extra": ""
|
||||
},
|
||||
"timestamp": 1757217015
|
||||
}
|
||||
```
|
||||
|
||||
### 直播相关
|
||||
|
||||
> [!NOTE]
|
||||
> 直播开始和结束,不意味着录制的开始和结束。
|
||||
|
||||
#### 直播开始
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "f12f3424-f7d8-4b2f-a8b7-55477411482e",
|
||||
"event": "live.started",
|
||||
"payload": {
|
||||
"room_id": 843610,
|
||||
"room_info": {
|
||||
"room_id": 843610,
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "475210",
|
||||
"user_name": "Xinrea",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "",
|
||||
"live_status": false,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757217190
|
||||
}
|
||||
```
|
||||
|
||||
#### 直播结束
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "e8b0756a-02f9-4655-b5ae-a170bf9547bd",
|
||||
"event": "live.ended",
|
||||
"payload": {
|
||||
"room_id": 843610,
|
||||
"room_info": {
|
||||
"room_id": 843610,
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "475210",
|
||||
"user_name": "Xinrea",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "",
|
||||
"live_status": true,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757217365
|
||||
}
|
||||
```
|
||||
|
||||
### 录播相关
|
||||
|
||||
#### 开始录制
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "5ec1ea10-2b31-48fd-8deb-f2d7d2ea5985",
|
||||
"event": "record.started",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"room_info": {
|
||||
"room_id": 26966466,
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "1609526545",
|
||||
"user_name": "栞栞Shiori",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "1757216045412",
|
||||
"live_status": true,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757216045
|
||||
}
|
||||
```
|
||||
|
||||
#### 结束录制
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "56fd03e5-3965-4c2e-a6a9-bb6932347eb3",
|
||||
"event": "record.ended",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"room_info": {
|
||||
"room_id": 26966466,
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "1609526545",
|
||||
"user_name": "栞栞Shiori",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
|
||||
},
|
||||
"total_length": 52.96700000000001,
|
||||
"current_live_id": "1757215994597",
|
||||
"live_status": true,
|
||||
"is_recording": true,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757216040
|
||||
}
|
||||
```
|
||||
|
||||
#### 删除录播
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "c32bc811-ab4b-49fd-84c7-897727905d16",
|
||||
"event": "archive.deleted",
|
||||
"payload": {
|
||||
"platform": "bilibili",
|
||||
"live_id": "1756607084705",
|
||||
"room_id": 1967212929,
|
||||
"title": "灶台O.o",
|
||||
"length": 9,
|
||||
"size": 1927112,
|
||||
"created_at": "2025-08-31T02:24:44.728616+00:00",
|
||||
"cover": "bilibili/1967212929/1756607084705/cover.jpg"
|
||||
},
|
||||
"timestamp": 1757176219
|
||||
}
|
||||
```
|
||||
|
||||
### 切片相关
|
||||
|
||||
#### 切片生成
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "f542e0e1-688b-4f1a-8ce1-e5e51530cf5d",
|
||||
"event": "clip.generated",
|
||||
"payload": {
|
||||
"id": 316,
|
||||
"room_id": 27183290,
|
||||
"cover": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].jpg",
|
||||
"file": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].mp4",
|
||||
"note": "",
|
||||
"length": 121,
|
||||
"size": 53049119,
|
||||
"status": 0,
|
||||
"bvid": "",
|
||||
"title": "",
|
||||
"desc": "",
|
||||
"tags": "",
|
||||
"area": 0,
|
||||
"created_at": "2025-09-07T00:16:11.747461+08:00",
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757175371
|
||||
}
|
||||
```
|
||||
|
||||
#### 切片删除
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "5c7ca728-753d-4a7d-a0b4-02c997ad2f92",
|
||||
"event": "clip.deleted",
|
||||
"payload": {
|
||||
"id": 313,
|
||||
"room_id": 27183290,
|
||||
"cover": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].jpg",
|
||||
"file": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].mp4",
|
||||
"note": "",
|
||||
"length": 32,
|
||||
"size": 18530098,
|
||||
"status": 0,
|
||||
"bvid": "",
|
||||
"title": "",
|
||||
"desc": "",
|
||||
"tags": "",
|
||||
"area": 0,
|
||||
"created_at": "2025-09-03T21:10:54.943682+08:00",
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757147617
|
||||
}
|
||||
```
|
||||
30
docs/usage/features/workflow.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# 工作流程
|
||||
|
||||
- 直播间:各个平台的直播间
|
||||
- 录播:直播流的存档,每次录制会自动生成一场录播记录
|
||||
- 切片:从直播流中剪切生成的视频片段
|
||||
- 投稿:将切片上传到各个平台(目前仅支持 Bilibili)
|
||||
|
||||
下图展示了它们之间的关系:
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A[直播间] -->|录制| B[录播 01]
|
||||
A -->|录制| C[录播 02]
|
||||
A -->|录制| E[录播 N]
|
||||
|
||||
B --> F[直播流预览窗口]
|
||||
|
||||
F -->|区间生成| G[切片 01]
|
||||
F -->|区间生成| H[切片 02]
|
||||
F -->|区间生成| I[切片 N]
|
||||
|
||||
G --> J[切片预览窗口]
|
||||
|
||||
J -->|字幕压制| K[新切片]
|
||||
|
||||
K --> J
|
||||
|
||||
J -->|投稿| L[Bilibili]
|
||||
|
||||
```
|
||||
21
index.html
@@ -1,14 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh-cn">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.ts"></script>
|
||||
</body>
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>BiliBili ShadowReplay</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
13
index_clip.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh-cn">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>切片窗口</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main_clip.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -8,15 +8,20 @@
|
||||
<link rel="stylesheet" href="shaka-player/youtube-theme.css" />
|
||||
<script src="shaka-player/shaka-player.ui.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="src/live_main.ts"></script>
|
||||
<script type="module" src="src/main_live.ts"></script>
|
||||
<style>
|
||||
input[type="range"]::-webkit-slider-thumb {
|
||||
width: 12px; /* 设置滑块按钮宽度 */
|
||||
height: 12px; /* 设置滑块按钮高度 */
|
||||
border-radius: 50%; /* 设置为圆形 */
|
||||
width: 12px;
|
||||
/* 设置滑块按钮宽度 */
|
||||
height: 12px;
|
||||
/* 设置滑块按钮高度 */
|
||||
border-radius: 50%;
|
||||
/* 设置为圆形 */
|
||||
}
|
||||
|
||||
html {
|
||||
scrollbar-face-color: #646464;
|
||||
scrollbar-base-color: #646464;
|
||||
@@ -31,20 +36,25 @@
|
||||
width: 8px;
|
||||
height: 3px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-button {
|
||||
background-color: #666;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background-color: #646464;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track-piece {
|
||||
background-color: #000;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
height: 50px;
|
||||
background-color: #666;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-corner {
|
||||
background-color: #646464;
|
||||
}
|
||||
27
package.json
@@ -1,17 +1,26 @@
|
||||
{
|
||||
"name": "bili-shadowreplay",
|
||||
"private": true,
|
||||
"version": "2.4.3",
|
||||
"version": "2.13.6",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"check": "svelte-check --tsconfig ./tsconfig.json",
|
||||
"tauri": "tauri"
|
||||
"tauri": "tauri",
|
||||
"docs:dev": "vitepress dev docs",
|
||||
"docs:build": "vitepress build docs",
|
||||
"docs:preview": "vitepress preview docs",
|
||||
"bump": "node scripts/bump.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": "^2.4.1",
|
||||
"@langchain/core": "^0.3.64",
|
||||
"@langchain/deepseek": "^0.1.0",
|
||||
"@langchain/langgraph": "^0.3.10",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@tauri-apps/api": "^2.6.2",
|
||||
"@tauri-apps/plugin-deep-link": "~2",
|
||||
"@tauri-apps/plugin-dialog": "~2",
|
||||
"@tauri-apps/plugin-fs": "~2",
|
||||
"@tauri-apps/plugin-http": "~2",
|
||||
@@ -19,9 +28,10 @@
|
||||
"@tauri-apps/plugin-os": "~2",
|
||||
"@tauri-apps/plugin-shell": "~2",
|
||||
"@tauri-apps/plugin-sql": "~2",
|
||||
"html2canvas": "^1.4.1",
|
||||
"lucide-svelte": "^0.479.0",
|
||||
"qrcode": "^1.5.4"
|
||||
"marked": "^16.1.1",
|
||||
"qrcode": "^1.5.4",
|
||||
"socket.io-client": "^4.8.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/vite-plugin-svelte": "^2.0.0",
|
||||
@@ -33,6 +43,7 @@
|
||||
"flowbite": "^2.5.1",
|
||||
"flowbite-svelte": "^0.46.16",
|
||||
"flowbite-svelte-icons": "^1.6.1",
|
||||
"mermaid": "^11.9.0",
|
||||
"postcss": "^8.4.21",
|
||||
"svelte": "^3.54.0",
|
||||
"svelte-check": "^3.0.0",
|
||||
@@ -40,7 +51,9 @@
|
||||
"tailwindcss": "^3.3.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^4.6.4",
|
||||
"vite": "^4.0.0"
|
||||
"typescript": "^5.0.0",
|
||||
"vite": "^4.0.0",
|
||||
"vitepress": "^1.6.3",
|
||||
"vitepress-plugin-mermaid": "^2.0.17"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
public/imgs/bilibili.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
public/imgs/bilibili_avatar.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 246 KiB |
BIN
public/imgs/douyin_avatar.png
Normal file
|
After Width: | Height: | Size: 153 KiB |
58
scripts/bump.cjs
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
function updatePackageJson(version) {
|
||||
const packageJsonPath = path.join(process.cwd(), "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(
|
||||
packageJsonPath,
|
||||
JSON.stringify(packageJson, null, 2) + "\n"
|
||||
);
|
||||
console.log(`✅ Updated package.json version to ${version}`);
|
||||
}
|
||||
|
||||
function updateCargoToml(version) {
|
||||
const cargoTomlPath = path.join(process.cwd(), "src-tauri", "Cargo.toml");
|
||||
let cargoToml = fs.readFileSync(cargoTomlPath, "utf8");
|
||||
|
||||
// Update the version in the [package] section
|
||||
cargoToml = cargoToml.replace(/^version = ".*"$/m, `version = "${version}"`);
|
||||
|
||||
fs.writeFileSync(cargoTomlPath, cargoToml);
|
||||
console.log(`✅ Updated Cargo.toml version to ${version}`);
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("❌ Please provide a version number");
|
||||
console.error("Usage: yarn bump <version>");
|
||||
console.error("Example: yarn bump 3.1.0");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = args[0];
|
||||
|
||||
// Validate version format (simple check)
|
||||
if (!/^\d+\.\d+\.\d+/.test(version)) {
|
||||
console.error(
|
||||
"❌ Invalid version format. Please use semantic versioning (e.g., 3.1.0)"
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
updatePackageJson(version);
|
||||
updateCargoToml(version);
|
||||
console.log(`🎉 Successfully bumped version to ${version}`);
|
||||
} catch (error) {
|
||||
console.error("❌ Error updating version:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
4
src-tauri/.gitignore
vendored
@@ -1,5 +1,9 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
cache
|
||||
output
|
||||
tmps
|
||||
clips
|
||||
data
|
||||
config.toml
|
||||
3191
src-tauri/Cargo.lock
generated
@@ -1,21 +1,29 @@
|
||||
[workspace]
|
||||
members = ["crates/danmu_stream"]
|
||||
resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "bili-shadowreplay"
|
||||
version = "1.0.0"
|
||||
version = "2.13.6"
|
||||
description = "BiliBili ShadowReplay"
|
||||
authors = ["Xinrea"]
|
||||
license = ""
|
||||
repository = ""
|
||||
edition = "2021"
|
||||
|
||||
[lints.clippy]
|
||||
correctness="deny"
|
||||
suspicious="deny"
|
||||
complexity="deny"
|
||||
style="deny"
|
||||
perf="deny"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2", features = [] }
|
||||
|
||||
[dependencies]
|
||||
tauri = { version = "2", features = ["protocol-asset", "tray-icon"] }
|
||||
danmu_stream = { path = "crates/danmu_stream" }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.11", features = ["blocking", "json"] }
|
||||
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"] }
|
||||
serde_derive = "1.0.158"
|
||||
serde = "1.0.158"
|
||||
sysinfo = "0.32.0"
|
||||
@@ -24,8 +32,6 @@ async-std = "1.12.0"
|
||||
async-ffmpeg-sidecar = "0.0.1"
|
||||
chrono = { version = "0.4.24", features = ["serde"] }
|
||||
toml = "0.7.3"
|
||||
custom_error = "1.9.2"
|
||||
felgens = { git = "https://github.com/Xinrea/felgens.git", tag = "v0.4.2" }
|
||||
regex = "1.7.3"
|
||||
tokio = { version = "1.27.0", features = ["process"] }
|
||||
platform-dirs = "0.3.0"
|
||||
@@ -37,31 +43,105 @@ urlencoding = "2.1.3"
|
||||
log = "0.4.22"
|
||||
simplelog = "0.12.2"
|
||||
sqlx = { version = "0.8", features = ["runtime-tokio", "sqlite"] }
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-shell = "2"
|
||||
tauri-plugin-fs = "2"
|
||||
tauri-plugin-http = "2"
|
||||
tauri-utils = "2"
|
||||
tauri-plugin-sql = { version = "2", features = ["sqlite"] }
|
||||
tauri-plugin-os = "2"
|
||||
tauri-plugin-notification = "2"
|
||||
rand = "0.8.5"
|
||||
base64 = "0.21"
|
||||
mime_guess = "2.0"
|
||||
async-trait = "0.1.87"
|
||||
whisper-rs = "0.14.2"
|
||||
hound = "3.5.1"
|
||||
fix-path-env = { git = "https://github.com/tauri-apps/fix-path-env-rs" }
|
||||
uuid = { version = "1.4", features = ["v4"] }
|
||||
axum = { version = "0.7", features = ["macros", "multipart"] }
|
||||
tower-http = { version = "0.5", features = ["cors", "fs"] }
|
||||
futures-core = "0.3"
|
||||
futures = "0.3"
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
tokio-stream = "0.1"
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
url = "2.5.4"
|
||||
srtparse = "0.2.0"
|
||||
thiserror = "2"
|
||||
deno_core = "0.355"
|
||||
sanitize-filename = "0.6.0"
|
||||
socketioxide = "0.17.2"
|
||||
|
||||
[features]
|
||||
# this feature is used for production builds or when `devPath` points to the filesystem
|
||||
# DO NOT REMOVE!!
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
cuda = ["whisper-rs/cuda"]
|
||||
headless = []
|
||||
default = ["gui"]
|
||||
gui = [
|
||||
"tauri",
|
||||
"tauri-plugin-single-instance",
|
||||
"tauri-plugin-dialog",
|
||||
"tauri-plugin-shell",
|
||||
"tauri-plugin-fs",
|
||||
"tauri-plugin-http",
|
||||
"tauri-plugin-sql",
|
||||
"tauri-utils",
|
||||
"tauri-plugin-os",
|
||||
"tauri-plugin-notification",
|
||||
"tauri-plugin-deep-link",
|
||||
"fix-path-env",
|
||||
"tauri-build",
|
||||
]
|
||||
|
||||
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
|
||||
tauri-plugin-single-instance = "2"
|
||||
[dependencies.tauri]
|
||||
version = "2"
|
||||
features = ["protocol-asset", "tray-icon"]
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-single-instance]
|
||||
version = "2"
|
||||
optional = true
|
||||
features = ["deep-link"]
|
||||
|
||||
[dependencies.tauri-plugin-dialog]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-shell]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-fs]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-http]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-sql]
|
||||
version = "2"
|
||||
optional = true
|
||||
features = ["sqlite"]
|
||||
|
||||
[dependencies.tauri-utils]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-os]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-notification]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-deep-link]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.fix-path-env]
|
||||
git = "https://github.com/tauri-apps/fix-path-env-rs"
|
||||
optional = true
|
||||
|
||||
[build-dependencies.tauri-build]
|
||||
version = "2"
|
||||
features = []
|
||||
optional = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
whisper-rs = { version = "0.14.2", default-features = false }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
fn main() {
|
||||
tauri_build::build()
|
||||
#[cfg(feature = "gui")]
|
||||
tauri_build::build();
|
||||
}
|
||||
|
||||
@@ -2,10 +2,7 @@
|
||||
"identifier": "migrated",
|
||||
"description": "permissions that were migrated from v1",
|
||||
"local": true,
|
||||
"windows": [
|
||||
"main",
|
||||
"Live*"
|
||||
],
|
||||
"windows": ["main", "Live*", "Clip*"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"fs:allow-read-file",
|
||||
@@ -19,9 +16,7 @@
|
||||
"fs:allow-exists",
|
||||
{
|
||||
"identifier": "fs:scope",
|
||||
"allow": [
|
||||
"**"
|
||||
]
|
||||
"allow": ["**"]
|
||||
},
|
||||
"core:window:default",
|
||||
"core:window:allow-start-dragging",
|
||||
@@ -54,6 +49,9 @@
|
||||
},
|
||||
{
|
||||
"url": "https://*.douyinpic.com/"
|
||||
},
|
||||
{
|
||||
"url": "http://tauri.localhost/*"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -70,6 +68,7 @@
|
||||
"shell:default",
|
||||
"sql:default",
|
||||
"os:default",
|
||||
"dialog:default"
|
||||
"dialog:default",
|
||||
"deep-link:default"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
16
src-tauri/config.example.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
cache = "./cache"
|
||||
output = "./output"
|
||||
live_start_notify = true
|
||||
live_end_notify = true
|
||||
clip_notify = true
|
||||
post_notify = true
|
||||
auto_subtitle = false
|
||||
subtitle_generator_type = "whisper_online"
|
||||
whisper_model = "./whisper_model.bin"
|
||||
whisper_prompt = "这是一段中文 你们好"
|
||||
openai_api_key = ""
|
||||
clip_name_format = "[{room_id}][{live_id}][{title}][{created_at}].mp4"
|
||||
|
||||
[auto_generate]
|
||||
enabled = false
|
||||
encode_danmu = false
|
||||
48
src-tauri/crates/danmu_stream/Cargo.toml
Normal file
@@ -0,0 +1,48 @@
|
||||
[package]
|
||||
name = "danmu_stream"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "danmu_stream"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[example]]
|
||||
name = "bilibili"
|
||||
path = "examples/bilibili.rs"
|
||||
|
||||
[[example]]
|
||||
name = "douyin"
|
||||
path = "examples/douyin.rs"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.27", features = ["native-tls"] }
|
||||
futures-util = "0.3"
|
||||
prost = "0.14"
|
||||
chrono = "0.4"
|
||||
log = "0.4"
|
||||
env_logger = "0.11"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
url = "2.4"
|
||||
md5 = "0.8"
|
||||
regex = "1.9"
|
||||
deno_core = "0.355"
|
||||
pct-str = "2.0"
|
||||
thiserror = "2.0"
|
||||
flate2 = "1.0"
|
||||
scroll = "0.13"
|
||||
scroll_derive = "0.13"
|
||||
brotli = "8.0"
|
||||
http = "1.0"
|
||||
rand = "0.9"
|
||||
urlencoding = "2.1"
|
||||
gzip = "0.1.2"
|
||||
hex = "0.4.3"
|
||||
async-trait = "0.1"
|
||||
uuid = "1"
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = "0.14"
|
||||
41
src-tauri/crates/danmu_stream/examples/bilibili.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use std::{sync::Arc, time::Duration};
|
||||
|
||||
use danmu_stream::{danmu_stream::DanmuStream, provider::ProviderType, DanmuMessageType};
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 768756;
|
||||
let cookie = "";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::BiliBili, cookie, room_id).await?);
|
||||
|
||||
log::info!("Start to receive danmu messages: {}", cookie);
|
||||
|
||||
let stream_clone = stream.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
log::info!("Waitting for message");
|
||||
if let Ok(Some(msg)) = stream_clone.recv().await {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
log::info!("Received danmu message: {:?}", danmu.message);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::info!("Channel closed");
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let _ = stream.start().await;
|
||||
|
||||
sleep(Duration::from_secs(10)).await;
|
||||
|
||||
stream.stop().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
40
src-tauri/crates/danmu_stream/examples/douyin.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use std::{sync::Arc, time::Duration};
|
||||
|
||||
use danmu_stream::{danmu_stream::DanmuStream, provider::ProviderType, DanmuMessageType};
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Initialize logging
|
||||
env_logger::init();
|
||||
// Replace these with actual values
|
||||
let room_id = 7514298567821937427; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
|
||||
let cookie = "your_cookie";
|
||||
let stream = Arc::new(DanmuStream::new(ProviderType::Douyin, cookie, room_id).await?);
|
||||
|
||||
log::info!("Start to receive danmu messages");
|
||||
|
||||
let _ = stream.start().await;
|
||||
|
||||
let stream_clone = stream.clone();
|
||||
tokio::spawn(async move {
|
||||
loop {
|
||||
if let Ok(Some(msg)) = stream_clone.recv().await {
|
||||
match msg {
|
||||
DanmuMessageType::DanmuMessage(danmu) => {
|
||||
log::info!("Received danmu message: {:?}", danmu.message);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::info!("Channel closed");
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
sleep(Duration::from_secs(10)).await;
|
||||
|
||||
stream.stop().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
52
src-tauri/crates/danmu_stream/src/danmu_stream.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::{mpsc, RwLock};
|
||||
|
||||
use crate::{
|
||||
provider::{new, DanmuProvider, ProviderType},
|
||||
DanmuMessageType, DanmuStreamError,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DanmuStream {
|
||||
pub provider_type: ProviderType,
|
||||
pub identifier: String,
|
||||
pub room_id: i64,
|
||||
pub provider: Arc<RwLock<Box<dyn DanmuProvider>>>,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
rx: Arc<RwLock<mpsc::UnboundedReceiver<DanmuMessageType>>>,
|
||||
}
|
||||
|
||||
impl DanmuStream {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: i64,
|
||||
) -> Result<Self, DanmuStreamError> {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let provider = new(provider_type, identifier, room_id).await?;
|
||||
Ok(Self {
|
||||
provider_type,
|
||||
identifier: identifier.to_string(),
|
||||
room_id,
|
||||
provider: Arc::new(RwLock::new(provider)),
|
||||
tx,
|
||||
rx: Arc::new(RwLock::new(rx)),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn start(&self) -> Result<(), DanmuStreamError> {
|
||||
self.provider.write().await.start(self.tx.clone()).await
|
||||
}
|
||||
|
||||
pub async fn stop(&self) -> Result<(), DanmuStreamError> {
|
||||
self.provider.write().await.stop().await?;
|
||||
// close channel
|
||||
self.rx.write().await.close();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn recv(&self) -> Result<Option<DanmuMessageType>, DanmuStreamError> {
|
||||
Ok(self.rx.write().await.recv().await)
|
||||
}
|
||||
}
|
||||
40
src-tauri/crates/danmu_stream/src/http_client.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
pub struct ApiClient {
|
||||
client: reqwest::Client,
|
||||
header: HeaderMap,
|
||||
}
|
||||
|
||||
impl ApiClient {
|
||||
pub fn new(cookies: &str) -> Self {
|
||||
let mut header = HeaderMap::new();
|
||||
header.insert("cookie", cookies.parse().unwrap());
|
||||
|
||||
Self {
|
||||
client: reqwest::Client::new(),
|
||||
header,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get(
|
||||
&self,
|
||||
url: &str,
|
||||
query: Option<&[(&str, &str)]>,
|
||||
) -> Result<reqwest::Response, DanmuStreamError> {
|
||||
let resp = self
|
||||
.client
|
||||
.get(url)
|
||||
.query(query.unwrap_or_default())
|
||||
.headers(self.header.clone())
|
||||
.timeout(Duration::from_secs(10))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
}
|
||||
39
src-tauri/crates/danmu_stream/src/lib.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
pub mod danmu_stream;
|
||||
mod http_client;
|
||||
pub mod provider;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DanmuStreamError {
|
||||
#[error("HttpError {0:?}")]
|
||||
HttpError(#[from] reqwest::Error),
|
||||
#[error("ParseError {0:?}")]
|
||||
ParseError(#[from] url::ParseError),
|
||||
#[error("WebsocketError {err}")]
|
||||
WebsocketError { err: String },
|
||||
#[error("PackError {err}")]
|
||||
PackError { err: String },
|
||||
#[error("UnsupportProto {proto}")]
|
||||
UnsupportProto { proto: u16 },
|
||||
#[error("MessageParseError {err}")]
|
||||
MessageParseError { err: String },
|
||||
#[error("InvalidIdentifier {err}")]
|
||||
InvalidIdentifier { err: String },
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DanmuMessageType {
|
||||
DanmuMessage(DanmuMessage),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DanmuMessage {
|
||||
pub room_id: i64,
|
||||
pub user_id: u64,
|
||||
pub user_name: String,
|
||||
pub message: String,
|
||||
pub color: u32,
|
||||
/// timestamp in milliseconds
|
||||
pub timestamp: i64,
|
||||
}
|
||||
443
src-tauri/crates/danmu_stream/src/provider/bilibili.rs
Normal file
@@ -0,0 +1,443 @@
|
||||
mod dannmu_msg;
|
||||
mod interact_word;
|
||||
mod pack;
|
||||
mod send_gift;
|
||||
mod stream;
|
||||
mod super_chat;
|
||||
|
||||
use std::{sync::Arc, time::SystemTime};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use futures_util::{SinkExt, StreamExt, TryStreamExt};
|
||||
use log::{error, info};
|
||||
use pct_str::{PctString, URIReserved};
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::{
|
||||
sync::{mpsc, RwLock},
|
||||
time::{sleep, Duration},
|
||||
};
|
||||
use tokio_tungstenite::{connect_async, tungstenite::Message};
|
||||
|
||||
use crate::{
|
||||
http_client::ApiClient,
|
||||
provider::{DanmuMessageType, DanmuProvider},
|
||||
DanmuStreamError,
|
||||
};
|
||||
|
||||
type WsReadType = futures_util::stream::SplitStream<
|
||||
tokio_tungstenite::WebSocketStream<tokio_tungstenite::MaybeTlsStream<tokio::net::TcpStream>>,
|
||||
>;
|
||||
|
||||
type WsWriteType = futures_util::stream::SplitSink<
|
||||
tokio_tungstenite::WebSocketStream<tokio_tungstenite::MaybeTlsStream<tokio::net::TcpStream>>,
|
||||
Message,
|
||||
>;
|
||||
|
||||
pub struct BiliDanmu {
|
||||
client: ApiClient,
|
||||
room_id: i64,
|
||||
user_id: i64,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for BiliDanmu {
|
||||
async fn new(cookie: &str, room_id: i64) -> Result<Self, DanmuStreamError> {
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let user_id = BiliDanmu::parse_user_id(cookie)?;
|
||||
// add buvid3 to cookie
|
||||
let cookie = format!("{};buvid3={}", cookie, uuid::Uuid::new_v4());
|
||||
let client = ApiClient::new(&cookie);
|
||||
|
||||
Ok(Self {
|
||||
client,
|
||||
user_id,
|
||||
room_id,
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
})
|
||||
}
|
||||
|
||||
async fn start(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Bilibili WebSocket connection started, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
|
||||
loop {
|
||||
if *self.stop.read().await {
|
||||
info!(
|
||||
"Bilibili WebSocket connection stopped, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!(
|
||||
"Bilibili WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!(
|
||||
"Bilibili WebSocket connection error, room_id: {}, error: {}",
|
||||
self.room_id, e
|
||||
);
|
||||
retry_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
self.room_id
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop(&self) -> Result<(), DanmuStreamError> {
|
||||
*self.stop.write().await = true;
|
||||
if let Some(mut write) = self.write.write().await.take() {
|
||||
if let Err(e) = write.close().await {
|
||||
error!("Failed to close WebSocket connection: {}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl BiliDanmu {
|
||||
async fn connect_and_handle(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let wbi_key = self.get_wbi_key().await?;
|
||||
let real_room = self.get_real_room(&wbi_key, self.room_id).await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, real_room).await?;
|
||||
let ws_hosts = danmu_info.data.host_list.clone();
|
||||
let mut conn = None;
|
||||
log::debug!("ws_hosts: {:?}", ws_hosts);
|
||||
// try to connect to ws_hsots, once success, send the token to the tx
|
||||
for i in ws_hosts {
|
||||
let host = format!("wss://{}/sub", i.host);
|
||||
match connect_async(&host).await {
|
||||
Ok((c, _)) => {
|
||||
conn = Some(c);
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"Connect ws host: {} has error, trying next host ...\n{:?}\n{:?}",
|
||||
host, i, e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let conn = conn.ok_or(DanmuStreamError::WebsocketError {
|
||||
err: "Failed to connect to ws host".into(),
|
||||
})?;
|
||||
|
||||
let (write, read) = conn.split();
|
||||
*self.write.write().await = Some(write);
|
||||
|
||||
let json = serde_json::to_string(&WsSend {
|
||||
roomid: real_room,
|
||||
key: danmu_info.data.token,
|
||||
uid: self.user_id,
|
||||
protover: 3,
|
||||
platform: "web".to_string(),
|
||||
t: 2,
|
||||
})
|
||||
.map_err(|e| DanmuStreamError::WebsocketError { err: e.to_string() })?;
|
||||
|
||||
let json = pack::encode(&json, 7);
|
||||
if let Some(write) = self.write.write().await.as_mut() {
|
||||
write
|
||||
.send(Message::binary(json))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError { err: e.to_string() })?;
|
||||
}
|
||||
|
||||
tokio::select! {
|
||||
v = BiliDanmu::send_heartbeat_packets(Arc::clone(&self.write)) => v,
|
||||
v = BiliDanmu::recv(read, tx, Arc::clone(&self.stop)) => v
|
||||
}?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_heartbeat_packets(
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
loop {
|
||||
if let Some(write) = write.write().await.as_mut() {
|
||||
write
|
||||
.send(Message::binary(pack::encode("", 2)))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError { err: e.to_string() })?;
|
||||
}
|
||||
sleep(Duration::from_secs(30)).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn recv(
|
||||
mut read: WsReadType,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
while let Ok(Some(msg)) = read.try_next().await {
|
||||
if *stop.read().await {
|
||||
log::info!("Stopping bilibili danmu stream");
|
||||
break;
|
||||
}
|
||||
let data = msg.into_data();
|
||||
|
||||
if !data.is_empty() {
|
||||
let s = pack::build_pack(&data);
|
||||
|
||||
if let Ok(msgs) = s {
|
||||
for i in msgs {
|
||||
let ws = stream::WsStreamCtx::new(&i);
|
||||
if let Ok(ws) = ws {
|
||||
match ws.match_msg() {
|
||||
Ok(v) => {
|
||||
log::debug!("Received message: {:?}", v);
|
||||
tx.send(v).map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: e.to_string(),
|
||||
})?;
|
||||
}
|
||||
Err(e) => {
|
||||
log::trace!(
|
||||
"This message parsing is not yet supported:\nMessage: {i}\nErr: {e:#?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::error!("{}", ws.unwrap_err());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_danmu_info(
|
||||
&self,
|
||||
wbi_key: &str,
|
||||
room_id: i64,
|
||||
) -> Result<DanmuInfo, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
serde_json::json!({
|
||||
"id": room_id,
|
||||
"type": 0,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let resp = self
|
||||
.client
|
||||
.get(
|
||||
&format!(
|
||||
"https://api.live.bilibili.com/xlive/web-room/v1/index/getDanmuInfo?{}",
|
||||
params
|
||||
),
|
||||
None,
|
||||
)
|
||||
.await?
|
||||
.json::<DanmuInfo>()
|
||||
.await?;
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: i64) -> Result<i64, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
serde_json::json!({
|
||||
"id": room_id,
|
||||
"from": "room",
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let resp = self
|
||||
.client
|
||||
.get(
|
||||
&format!(
|
||||
"https://api.live.bilibili.com/room/v1/Room/room_init?{}",
|
||||
params
|
||||
),
|
||||
None,
|
||||
)
|
||||
.await?
|
||||
.json::<RoomInit>()
|
||||
.await?
|
||||
.data
|
||||
.room_id;
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
fn parse_user_id(cookie: &str) -> Result<i64, DanmuStreamError> {
|
||||
let mut user_id = None;
|
||||
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let re = Regex::new(r"DedeUserID=(\d+)").unwrap();
|
||||
if let Some(captures) = re.captures(cookie) {
|
||||
if let Some(user) = captures.get(1) {
|
||||
user_id = Some(user.as_str().parse::<i64>().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(user_id) = user_id {
|
||||
Ok(user_id)
|
||||
} else {
|
||||
Err(DanmuStreamError::InvalidIdentifier {
|
||||
err: format!("Failed to find user_id in cookie: {cookie}"),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_wbi_key(&self) -> Result<String, DanmuStreamError> {
|
||||
let nav_info: serde_json::Value = self
|
||||
.client
|
||||
.get("https://api.bilibili.com/x/web-interface/nav", None)
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
let re = Regex::new(r"wbi/(.*).png").unwrap();
|
||||
let img = re
|
||||
.captures(nav_info["data"]["wbi_img"]["img_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let sub = re
|
||||
.captures(nav_info["data"]["wbi_img"]["sub_url"].as_str().unwrap())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.as_str();
|
||||
let raw_string = format!("{}{}", img, sub);
|
||||
Ok(raw_string)
|
||||
}
|
||||
|
||||
pub async fn get_sign(
|
||||
&self,
|
||||
wbi_key: &str,
|
||||
mut parameters: serde_json::Value,
|
||||
) -> Result<String, DanmuStreamError> {
|
||||
let table = vec![
|
||||
46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49, 33, 9, 42,
|
||||
19, 29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40, 61, 26, 17, 0, 1, 60,
|
||||
51, 30, 4, 22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11, 36, 20, 34, 44, 52,
|
||||
];
|
||||
let raw_string = wbi_key;
|
||||
let mut encoded = Vec::new();
|
||||
table.into_iter().for_each(|x| {
|
||||
if x < raw_string.len() {
|
||||
encoded.push(raw_string.as_bytes()[x]);
|
||||
}
|
||||
});
|
||||
// only keep 32 bytes of encoded
|
||||
encoded = encoded[0..32].to_vec();
|
||||
let encoded = String::from_utf8(encoded).unwrap();
|
||||
// Timestamp in seconds
|
||||
let wts = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
parameters
|
||||
.as_object_mut()
|
||||
.unwrap()
|
||||
.insert("wts".to_owned(), serde_json::Value::String(wts.to_string()));
|
||||
// Get all keys from parameters into vec
|
||||
let mut keys = parameters
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.map(|x| x.to_owned())
|
||||
.collect::<Vec<String>>();
|
||||
// sort keys
|
||||
keys.sort();
|
||||
let mut params = String::new();
|
||||
keys.iter().for_each(|x| {
|
||||
params.push_str(x);
|
||||
params.push('=');
|
||||
// Convert value to string based on its type
|
||||
let value = match parameters.get(x).unwrap() {
|
||||
serde_json::Value::String(s) => s.clone(),
|
||||
serde_json::Value::Number(n) => n.to_string(),
|
||||
serde_json::Value::Bool(b) => b.to_string(),
|
||||
_ => "".to_string(),
|
||||
};
|
||||
// Value filters !'()* characters
|
||||
let value = value.replace(['!', '\'', '(', ')', '*'], "");
|
||||
let value = PctString::encode(value.chars(), URIReserved);
|
||||
params.push_str(value.as_str());
|
||||
// add & if not last
|
||||
if x != keys.last().unwrap() {
|
||||
params.push('&');
|
||||
}
|
||||
});
|
||||
// md5 params+encoded
|
||||
let w_rid = md5::compute(params.to_string() + encoded.as_str());
|
||||
let params = params + format!("&w_rid={:x}", w_rid).as_str();
|
||||
Ok(params)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct WsSend {
|
||||
uid: i64,
|
||||
roomid: i64,
|
||||
key: String,
|
||||
protover: u32,
|
||||
platform: String,
|
||||
#[serde(rename = "type")]
|
||||
t: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct DanmuInfo {
|
||||
pub data: DanmuInfoData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct DanmuInfoData {
|
||||
pub token: String,
|
||||
pub host_list: Vec<WsHost>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsHost {
|
||||
pub host: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct RoomInit {
|
||||
data: RoomInitData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct RoomInitData {
|
||||
room_id: i64,
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct BiliDanmuMessage {
|
||||
pub uid: u64,
|
||||
pub username: String,
|
||||
pub msg: String,
|
||||
pub fan: Option<String>,
|
||||
pub fan_level: Option<u64>,
|
||||
pub timestamp: i64,
|
||||
}
|
||||
|
||||
impl BiliDanmuMessage {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let info = ctx
|
||||
.info
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "info is None".to_string(),
|
||||
})?;
|
||||
|
||||
let array_2 = info
|
||||
.get(2)
|
||||
.and_then(|x| x.as_array())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "array_2 is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let uid = array_2.first().and_then(|x| x.as_u64()).ok_or_else(|| {
|
||||
DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let username = array_2
|
||||
.get(1)
|
||||
.and_then(|x| x.as_str())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "username is None".to_string(),
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
let msg = info
|
||||
.get(1)
|
||||
.and_then(|x| x.as_str())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "msg is None".to_string(),
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
let array_3 = info
|
||||
.get(3)
|
||||
.and_then(|x| x.as_array())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "array_3 is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let fan = array_3
|
||||
.get(1)
|
||||
.and_then(|x| x.as_str())
|
||||
.map(|x| x.to_owned());
|
||||
|
||||
let fan_level = array_3.first().and_then(|x| x.as_u64());
|
||||
|
||||
let timestamp = info
|
||||
.first()
|
||||
.and_then(|x| x.as_array())
|
||||
.and_then(|x| x.get(4))
|
||||
.and_then(|x| x.as_i64())
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "timestamp is None".to_string(),
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
uid,
|
||||
username,
|
||||
msg,
|
||||
fan,
|
||||
fan_level,
|
||||
timestamp,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct InteractWord {
|
||||
pub uid: u64,
|
||||
pub uname: String,
|
||||
pub fan: Option<String>,
|
||||
pub fan_level: Option<u32>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl InteractWord {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let data = ctx
|
||||
.data
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "data is None".to_string(),
|
||||
})?;
|
||||
|
||||
let uname = data
|
||||
.uname
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uname is None".to_string(),
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
let uid = data
|
||||
.uid
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?
|
||||
.as_u64()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?;
|
||||
|
||||
let fan = data
|
||||
.fans_medal
|
||||
.as_ref()
|
||||
.and_then(|x| x.medal_name.to_owned());
|
||||
|
||||
let fan = if fan == Some("".to_string()) {
|
||||
None
|
||||
} else {
|
||||
fan
|
||||
};
|
||||
|
||||
let fan_level = data.fans_medal.as_ref().and_then(|x| x.medal_level);
|
||||
|
||||
let fan_level = if fan_level == Some(0) {
|
||||
None
|
||||
} else {
|
||||
fan_level
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
uid,
|
||||
uname,
|
||||
fan,
|
||||
fan_level,
|
||||
})
|
||||
}
|
||||
}
|
||||
161
src-tauri/crates/danmu_stream/src/provider/bilibili/pack.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
// This file is copied from https://github.com/eatradish/felgens/blob/master/src/pack.rs
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use flate2::read::ZlibDecoder;
|
||||
use scroll::Pread;
|
||||
use scroll_derive::Pread;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Pread, Clone)]
|
||||
struct BilibiliPackHeader {
|
||||
pack_len: u32,
|
||||
_header_len: u16,
|
||||
ver: u16,
|
||||
_op: u32,
|
||||
_seq: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Pread)]
|
||||
struct PackHotCount {
|
||||
count: u32,
|
||||
}
|
||||
|
||||
type BilibiliPackCtx<'a> = (BilibiliPackHeader, &'a [u8]);
|
||||
|
||||
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx<'_>, DanmuStreamError> {
|
||||
let data = buffer
|
||||
.pread_with(0, scroll::BE)
|
||||
.map_err(|e: scroll::Error| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let buf = &buffer[16..];
|
||||
|
||||
Ok((data, buf))
|
||||
}
|
||||
|
||||
fn write_int(buffer: &[u8], start: usize, val: u32) -> Vec<u8> {
|
||||
let val_bytes = val.to_be_bytes();
|
||||
|
||||
let mut buf = buffer.to_vec();
|
||||
|
||||
for (i, c) in val_bytes.iter().enumerate() {
|
||||
buf[start + i] = *c;
|
||||
}
|
||||
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn encode(s: &str, op: u8) -> Vec<u8> {
|
||||
let data = s.as_bytes();
|
||||
let packet_len = 16 + data.len();
|
||||
let header = vec![0, 0, 0, 0, 0, 16, 0, 1, 0, 0, 0, op, 0, 0, 0, 1];
|
||||
|
||||
let header = write_int(&header, 0, packet_len as u32);
|
||||
|
||||
[&header, data].concat()
|
||||
}
|
||||
|
||||
pub fn build_pack(buf: &[u8]) -> Result<Vec<String>, DanmuStreamError> {
|
||||
let ctx = pack(buf)?;
|
||||
let msgs = decode(ctx)?;
|
||||
|
||||
Ok(msgs)
|
||||
}
|
||||
|
||||
fn get_hot_count(body: &[u8]) -> Result<u32, DanmuStreamError> {
|
||||
let count = body
|
||||
.pread_with::<PackHotCount>(0, scroll::BE)
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?
|
||||
.count;
|
||||
|
||||
Ok(count)
|
||||
}
|
||||
|
||||
fn zlib_decode(body: &[u8]) -> Result<(BilibiliPackHeader, Vec<u8>), DanmuStreamError> {
|
||||
let mut buf = vec![];
|
||||
let mut z = ZlibDecoder::new(body);
|
||||
z.read_to_end(&mut buf)
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let ctx = pack(&buf)?;
|
||||
let header = ctx.0;
|
||||
let buf = ctx.1.to_vec();
|
||||
|
||||
Ok((header, buf))
|
||||
}
|
||||
|
||||
fn decode(ctx: BilibiliPackCtx) -> Result<Vec<String>, DanmuStreamError> {
|
||||
let (mut header, body) = ctx;
|
||||
|
||||
let mut buf = body.to_vec();
|
||||
|
||||
loop {
|
||||
(header, buf) = match header.ver {
|
||||
2 => zlib_decode(&buf)?,
|
||||
3 => brotli_decode(&buf)?,
|
||||
0 | 1 => break,
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
let msgs = match header.ver {
|
||||
0 => split_msgs(buf, header)?,
|
||||
1 => vec![format!("{{\"count\": {}}}", get_hot_count(&buf)?)],
|
||||
x => return Err(DanmuStreamError::UnsupportProto { proto: x }),
|
||||
};
|
||||
|
||||
Ok(msgs)
|
||||
}
|
||||
|
||||
fn split_msgs(buf: Vec<u8>, header: BilibiliPackHeader) -> Result<Vec<String>, DanmuStreamError> {
|
||||
let mut buf = buf;
|
||||
let mut header = header;
|
||||
let mut msgs = vec![];
|
||||
let mut offset = 0;
|
||||
let buf_len = buf.len();
|
||||
|
||||
msgs.push(
|
||||
std::str::from_utf8(&buf[..(header.pack_len - 16) as usize])
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?
|
||||
.to_string(),
|
||||
);
|
||||
buf = buf[(header.pack_len - 16) as usize..].to_vec();
|
||||
offset += header.pack_len - 16;
|
||||
|
||||
while offset != buf_len as u32 {
|
||||
let ctx = pack(&buf).map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
header = ctx.0;
|
||||
buf = ctx.1.to_vec();
|
||||
|
||||
msgs.push(
|
||||
std::str::from_utf8(&buf[..(header.pack_len - 16) as usize])
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
buf = buf[(header.pack_len - 16) as usize..].to_vec();
|
||||
|
||||
offset += header.pack_len;
|
||||
}
|
||||
|
||||
Ok(msgs)
|
||||
}
|
||||
|
||||
fn brotli_decode(body: &[u8]) -> Result<(BilibiliPackHeader, Vec<u8>), DanmuStreamError> {
|
||||
let mut reader = brotli::Decompressor::new(body, 4096);
|
||||
|
||||
let mut buf = Vec::new();
|
||||
|
||||
reader
|
||||
.read_to_end(&mut buf)
|
||||
.map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let ctx = pack(&buf).map_err(|e| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
let header = ctx.0;
|
||||
let buf = ctx.1.to_vec();
|
||||
|
||||
Ok((header, buf))
|
||||
}
|
||||
117
src-tauri/crates/danmu_stream/src/provider/bilibili/send_gift.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct SendGift {
|
||||
pub action: String,
|
||||
pub gift_name: String,
|
||||
pub num: u64,
|
||||
pub uname: String,
|
||||
pub uid: u64,
|
||||
pub medal_name: Option<String>,
|
||||
pub medal_level: Option<u32>,
|
||||
pub price: u32,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SendGift {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let data = ctx
|
||||
.data
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "data is None".to_string(),
|
||||
})?;
|
||||
|
||||
let action = data
|
||||
.action
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "action is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let combo_send = data.combo_send.clone();
|
||||
|
||||
let gift_name = if let Some(gift) = data.gift_name.as_ref() {
|
||||
gift.to_owned()
|
||||
} else if let Some(gift) = combo_send.clone().and_then(|x| x.gift_name) {
|
||||
gift
|
||||
} else {
|
||||
return Err(DanmuStreamError::MessageParseError {
|
||||
err: "gift_name is None".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
let num = if let Some(num) = combo_send.clone().and_then(|x| x.combo_num) {
|
||||
num
|
||||
} else if let Some(num) = data.num {
|
||||
num
|
||||
} else if let Some(num) = combo_send.and_then(|x| x.gift_num) {
|
||||
num
|
||||
} else {
|
||||
return Err(DanmuStreamError::MessageParseError {
|
||||
err: "num is None".to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
let uname = data
|
||||
.uname
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uname is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let uid = data
|
||||
.uid
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?
|
||||
.as_u64()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
})?;
|
||||
|
||||
let medal_name = data
|
||||
.medal_info
|
||||
.as_ref()
|
||||
.and_then(|x| x.medal_name.to_owned());
|
||||
|
||||
let medal_level = data.medal_info.as_ref().and_then(|x| x.medal_level);
|
||||
|
||||
let medal_name = if medal_name == Some("".to_string()) {
|
||||
None
|
||||
} else {
|
||||
medal_name
|
||||
};
|
||||
|
||||
let medal_level = if medal_level == Some(0) {
|
||||
None
|
||||
} else {
|
||||
medal_level
|
||||
};
|
||||
|
||||
let price = data
|
||||
.price
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "price is None".to_string(),
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
action,
|
||||
gift_name,
|
||||
num,
|
||||
uname,
|
||||
uid,
|
||||
medal_name,
|
||||
medal_level,
|
||||
price,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::dannmu_msg::BiliDanmuMessage;
|
||||
|
||||
use crate::{provider::DanmuMessageType, DanmuMessage, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsStreamCtx {
|
||||
pub cmd: Option<String>,
|
||||
pub info: Option<Vec<Value>>,
|
||||
pub data: Option<WsStreamCtxData>,
|
||||
#[serde(flatten)]
|
||||
_v: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
#[allow(dead_code)]
|
||||
pub struct WsStreamCtxData {
|
||||
pub message: Option<String>,
|
||||
pub price: Option<u32>,
|
||||
pub start_time: Option<u64>,
|
||||
pub time: Option<u32>,
|
||||
pub uid: Option<Value>,
|
||||
pub user_info: Option<WsStreamCtxDataUser>,
|
||||
pub medal_info: Option<WsStreamCtxDataMedalInfo>,
|
||||
pub uname: Option<String>,
|
||||
pub fans_medal: Option<WsStreamCtxDataMedalInfo>,
|
||||
pub action: Option<String>,
|
||||
#[serde(rename = "giftName")]
|
||||
pub gift_name: Option<String>,
|
||||
pub num: Option<u64>,
|
||||
pub combo_num: Option<u64>,
|
||||
pub gift_num: Option<u64>,
|
||||
pub combo_send: Box<Option<WsStreamCtxData>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsStreamCtxDataMedalInfo {
|
||||
pub medal_name: Option<String>,
|
||||
pub medal_level: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
#[allow(dead_code)]
|
||||
pub struct WsStreamCtxDataUser {
|
||||
pub face: String,
|
||||
pub uname: String,
|
||||
}
|
||||
|
||||
impl WsStreamCtx {
|
||||
pub fn new(s: &str) -> Result<Self, DanmuStreamError> {
|
||||
serde_json::from_str(s).map_err(|_| DanmuStreamError::MessageParseError {
|
||||
err: "Failed to parse message".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn match_msg(&self) -> Result<DanmuMessageType, DanmuStreamError> {
|
||||
let cmd = self.handle_cmd();
|
||||
|
||||
let danmu_msg = match cmd {
|
||||
Some(c) if c.contains("DANMU_MSG") => Some(BiliDanmuMessage::new_from_ctx(self)?),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(danmu_msg) = danmu_msg {
|
||||
Ok(DanmuMessageType::DanmuMessage(DanmuMessage {
|
||||
room_id: 0,
|
||||
user_id: danmu_msg.uid,
|
||||
user_name: danmu_msg.username,
|
||||
message: danmu_msg.msg,
|
||||
color: 0,
|
||||
timestamp: danmu_msg.timestamp,
|
||||
}))
|
||||
} else {
|
||||
Err(DanmuStreamError::MessageParseError {
|
||||
err: "Unknown message".to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_cmd(&self) -> Option<&str> {
|
||||
// handle DANMU_MSG:4:0:2:2:2:0
|
||||
let cmd = if let Some(c) = self.cmd.as_deref() {
|
||||
if c.starts_with("DM_INTERACTION") {
|
||||
Some("DANMU_MSG")
|
||||
} else {
|
||||
Some(c)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
cmd
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct SuperChatMessage {
|
||||
pub uname: String,
|
||||
pub uid: u64,
|
||||
pub face: String,
|
||||
pub price: u32,
|
||||
pub start_time: u64,
|
||||
pub time: u32,
|
||||
pub msg: String,
|
||||
pub medal_name: Option<String>,
|
||||
pub medal_level: Option<u32>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SuperChatMessage {
|
||||
pub fn new_from_ctx(ctx: &WsStreamCtx) -> Result<Self, DanmuStreamError> {
|
||||
let data = ctx
|
||||
.data
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "data is None".to_string(),
|
||||
})?;
|
||||
|
||||
let user_info =
|
||||
data.user_info
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "user_info is None".to_string(),
|
||||
})?;
|
||||
|
||||
let uname = user_info.uname.to_owned();
|
||||
|
||||
let uid = data.uid.as_ref().and_then(|x| x.as_u64()).ok_or_else(|| {
|
||||
DanmuStreamError::MessageParseError {
|
||||
err: "uid is None".to_string(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let face = user_info.face.to_owned();
|
||||
|
||||
let price = data
|
||||
.price
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "price is None".to_string(),
|
||||
})?;
|
||||
|
||||
let start_time = data
|
||||
.start_time
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "start_time is None".to_string(),
|
||||
})?;
|
||||
|
||||
let time = data
|
||||
.time
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "time is None".to_string(),
|
||||
})?;
|
||||
|
||||
let msg = data
|
||||
.message
|
||||
.as_ref()
|
||||
.ok_or_else(|| DanmuStreamError::MessageParseError {
|
||||
err: "message is None".to_string(),
|
||||
})?
|
||||
.to_owned();
|
||||
|
||||
let medal = data
|
||||
.medal_info
|
||||
.as_ref()
|
||||
.map(|x| (x.medal_name.to_owned(), x.medal_level.to_owned()));
|
||||
|
||||
let medal_name = medal.as_ref().and_then(|(name, _)| name.to_owned());
|
||||
|
||||
let medal_level = medal.and_then(|(_, level)| level);
|
||||
|
||||
Ok(Self {
|
||||
uname,
|
||||
uid,
|
||||
face,
|
||||
price,
|
||||
start_time,
|
||||
time,
|
||||
msg,
|
||||
medal_name,
|
||||
medal_level,
|
||||
})
|
||||
}
|
||||
}
|
||||
457
src-tauri/crates/danmu_stream/src/provider/douyin.rs
Normal file
@@ -0,0 +1,457 @@
|
||||
mod messages;
|
||||
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use deno_core::v8;
|
||||
use deno_core::JsRuntime;
|
||||
use deno_core::RuntimeOptions;
|
||||
use flate2::read::GzDecoder;
|
||||
use futures_util::{SinkExt, StreamExt, TryStreamExt};
|
||||
use log::debug;
|
||||
use log::{error, info};
|
||||
use messages::*;
|
||||
use prost::bytes::Bytes;
|
||||
use prost::Message;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio_tungstenite::{
|
||||
connect_async, tungstenite::Message as WsMessage, MaybeTlsStream, WebSocketStream,
|
||||
};
|
||||
|
||||
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
|
||||
|
||||
const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36";
|
||||
|
||||
const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(10);
|
||||
|
||||
type WsReadType = futures_util::stream::SplitStream<WebSocketStream<MaybeTlsStream<TcpStream>>>;
|
||||
type WsWriteType =
|
||||
futures_util::stream::SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, WsMessage>;
|
||||
|
||||
pub struct DouyinDanmu {
|
||||
room_id: i64,
|
||||
cookie: String,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
}
|
||||
|
||||
impl DouyinDanmu {
|
||||
async fn connect_and_handle(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let url = self.get_wss_url().await?;
|
||||
|
||||
let request = tokio_tungstenite::tungstenite::http::Request::builder()
|
||||
.uri(url)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::COOKIE,
|
||||
self.cookie.as_str(),
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::REFERER,
|
||||
"https://live.douyin.com/",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::USER_AGENT,
|
||||
USER_AGENT,
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::HOST,
|
||||
"webcast5-ws-web-hl.douyin.com",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::UPGRADE,
|
||||
"websocket",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::CONNECTION,
|
||||
"Upgrade",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::SEC_WEBSOCKET_VERSION,
|
||||
"13",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::SEC_WEBSOCKET_EXTENSIONS,
|
||||
"permessage-deflate; client_max_window_bits",
|
||||
)
|
||||
.header(
|
||||
tokio_tungstenite::tungstenite::http::header::SEC_WEBSOCKET_KEY,
|
||||
"V1Yza5x1zcfkembl6u/0Pg==",
|
||||
)
|
||||
.body(())
|
||||
.unwrap();
|
||||
|
||||
let (ws_stream, response) =
|
||||
connect_async(request)
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect to douyin websocket: {}", e),
|
||||
})?;
|
||||
|
||||
// Log the response status for debugging
|
||||
info!("WebSocket connection response: {:?}", response.status());
|
||||
|
||||
let (write, read) = ws_stream.split();
|
||||
*self.write.write().await = Some(write);
|
||||
self.handle_connection(read, tx).await
|
||||
}
|
||||
|
||||
async fn get_wss_url(&self) -> Result<String, DanmuStreamError> {
|
||||
// Create a new V8 runtime
|
||||
let mut runtime = JsRuntime::new(RuntimeOptions::default());
|
||||
|
||||
// Add global CryptoJS object
|
||||
let crypto_js = include_str!("douyin/crypto-js.min.js");
|
||||
runtime
|
||||
.execute_script(
|
||||
"<crypto-js.min.js>",
|
||||
deno_core::FastString::from_static(crypto_js),
|
||||
)
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute crypto-js: {}", e),
|
||||
})?;
|
||||
|
||||
// Load and execute the sign.js file
|
||||
let js_code = include_str!("douyin/webmssdk.js");
|
||||
runtime
|
||||
.execute_script("<sign.js>", deno_core::FastString::from_static(js_code))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
|
||||
// Call the get_wss_url function
|
||||
let sign_call = format!("get_wss_url(\"{}\")", self.room_id);
|
||||
let result = runtime
|
||||
.execute_script("<sign_call>", deno_core::FastString::from(sign_call))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
|
||||
// Get the result from the V8 runtime
|
||||
let scope = &mut runtime.handle_scope();
|
||||
let local = v8::Local::new(scope, result);
|
||||
let url = local.to_string(scope).unwrap().to_rust_string_lossy(scope);
|
||||
|
||||
debug!("Douyin wss url: {}", url);
|
||||
|
||||
Ok(url)
|
||||
}
|
||||
|
||||
async fn handle_connection(
|
||||
&self,
|
||||
mut read: WsReadType,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
// Start heartbeat task with error handling
|
||||
let (tx_write, mut _rx_write) = mpsc::channel(32);
|
||||
let tx_write_clone = tx_write.clone();
|
||||
let stop = Arc::clone(&self.stop);
|
||||
let heartbeat_handle = tokio::spawn(async move {
|
||||
let mut last_heartbeat = SystemTime::now();
|
||||
let mut consecutive_failures = 0;
|
||||
const MAX_FAILURES: u32 = 3;
|
||||
|
||||
loop {
|
||||
if *stop.read().await {
|
||||
log::info!("Stopping douyin danmu stream");
|
||||
break;
|
||||
}
|
||||
|
||||
tokio::time::sleep(HEARTBEAT_INTERVAL).await;
|
||||
|
||||
match Self::send_heartbeat(&tx_write_clone).await {
|
||||
Ok(_) => {
|
||||
last_heartbeat = SystemTime::now();
|
||||
consecutive_failures = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Failed to send heartbeat: {}", e);
|
||||
consecutive_failures += 1;
|
||||
|
||||
if consecutive_failures >= MAX_FAILURES {
|
||||
error!("Too many consecutive heartbeat failures, closing connection");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if we've exceeded the maximum time without a successful heartbeat
|
||||
if let Ok(duration) = last_heartbeat.elapsed() {
|
||||
if duration > HEARTBEAT_INTERVAL * 2 {
|
||||
error!("No successful heartbeat for too long, closing connection");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Main message handling loop
|
||||
let room_id = self.room_id;
|
||||
let stop = Arc::clone(&self.stop);
|
||||
let write = Arc::clone(&self.write);
|
||||
let message_handle = tokio::spawn(async move {
|
||||
while let Some(msg) =
|
||||
read.try_next()
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to read message: {}", e),
|
||||
})?
|
||||
{
|
||||
if *stop.read().await {
|
||||
log::info!("Stopping douyin danmu stream");
|
||||
break;
|
||||
}
|
||||
|
||||
match msg {
|
||||
WsMessage::Binary(data) => {
|
||||
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, room_id).await {
|
||||
if let Some(write) = write.write().await.as_mut() {
|
||||
if let Err(e) =
|
||||
write.send(WsMessage::binary(ack.encode_to_vec())).await
|
||||
{
|
||||
error!("Failed to send ack: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
WsMessage::Close(_) => {
|
||||
info!("WebSocket connection closed");
|
||||
break;
|
||||
}
|
||||
WsMessage::Ping(data) => {
|
||||
// Respond to ping with pong
|
||||
if let Err(e) = tx_write.send(WsMessage::Pong(data)).await {
|
||||
error!("Failed to send pong: {}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok::<(), DanmuStreamError>(())
|
||||
});
|
||||
|
||||
// Wait for either the heartbeat or message handling to complete
|
||||
tokio::select! {
|
||||
result = heartbeat_handle => {
|
||||
if let Err(e) = result {
|
||||
error!("Heartbeat task failed: {}", e);
|
||||
}
|
||||
}
|
||||
result = message_handle => {
|
||||
if let Err(e) = result {
|
||||
error!("Message handling task failed: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_heartbeat(tx: &mpsc::Sender<WsMessage>) -> Result<(), DanmuStreamError> {
|
||||
// heartbeat message: 3A 02 68 62
|
||||
tx.send(WsMessage::binary(vec![0x3A, 0x02, 0x68, 0x62]))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to send heartbeat message: {}", e),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_binary_message(
|
||||
data: &[u8],
|
||||
tx: &mpsc::UnboundedSender<DanmuMessageType>,
|
||||
room_id: i64,
|
||||
) -> Result<Option<PushFrame>, DanmuStreamError> {
|
||||
// First decode the PushFrame
|
||||
let push_frame = PushFrame::decode(Bytes::from(data.to_vec())).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode PushFrame: {}", e),
|
||||
}
|
||||
})?;
|
||||
|
||||
// Decompress the payload
|
||||
let mut decoder = GzDecoder::new(push_frame.payload.as_slice());
|
||||
let mut decompressed = Vec::new();
|
||||
decoder
|
||||
.read_to_end(&mut decompressed)
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decompress payload: {}", e),
|
||||
})?;
|
||||
|
||||
// Decode the Response from decompressed payload
|
||||
let response = Response::decode(Bytes::from(decompressed)).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode Response: {}", e),
|
||||
}
|
||||
})?;
|
||||
|
||||
// if payload_package.needAck:
|
||||
// obj = PushFrame()
|
||||
// obj.payloadType = 'ack'
|
||||
// obj.logId = log_id
|
||||
// obj.payloadType = payload_package.internalExt
|
||||
// ack = obj.SerializeToString()
|
||||
let mut ack = None;
|
||||
if response.need_ack {
|
||||
let ack_msg = PushFrame {
|
||||
payload_type: "ack".to_string(),
|
||||
log_id: push_frame.log_id,
|
||||
payload_encoding: "".to_string(),
|
||||
payload: vec![],
|
||||
seq_id: 0,
|
||||
service: 0,
|
||||
method: 0,
|
||||
headers_list: vec![],
|
||||
};
|
||||
|
||||
debug!("Need to respond ack: {:?}", ack_msg);
|
||||
|
||||
ack = Some(ack_msg);
|
||||
}
|
||||
|
||||
for message in response.messages_list {
|
||||
match message.method.as_str() {
|
||||
"WebcastChatMessage" => {
|
||||
let chat_msg =
|
||||
DouyinChatMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode chat message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = chat_msg.user {
|
||||
let danmu_msg = DanmuMessage {
|
||||
room_id,
|
||||
user_id: user.id,
|
||||
user_name: user.nick_name,
|
||||
message: chat_msg.content,
|
||||
color: 0xffffff,
|
||||
timestamp: chat_msg.event_time as i64 * 1000,
|
||||
};
|
||||
debug!("Received danmu message: {:?}", danmu_msg);
|
||||
tx.send(DanmuMessageType::DanmuMessage(danmu_msg))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to send message to channel: {}", e),
|
||||
})?;
|
||||
}
|
||||
}
|
||||
"WebcastGiftMessage" => {
|
||||
let gift_msg = GiftMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode gift message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = gift_msg.user {
|
||||
if let Some(gift) = gift_msg.gift {
|
||||
log::debug!("Received gift: {} from user: {}", gift.name, user.nick_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
"WebcastLikeMessage" => {
|
||||
let like_msg = LikeMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode like message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = like_msg.user {
|
||||
log::debug!(
|
||||
"Received {} likes from user: {}",
|
||||
like_msg.count,
|
||||
user.nick_name
|
||||
);
|
||||
}
|
||||
}
|
||||
"WebcastMemberMessage" => {
|
||||
let member_msg =
|
||||
MemberMessage::decode(message.payload.as_slice()).map_err(|e| {
|
||||
DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to decode member message: {}", e),
|
||||
}
|
||||
})?;
|
||||
if let Some(user) = member_msg.user {
|
||||
log::debug!(
|
||||
"Member joined: {} (Action: {})",
|
||||
user.nick_name,
|
||||
member_msg.action_description
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
debug!("Unknown message: {:?}", message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ack)
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for DouyinDanmu {
|
||||
async fn new(identifier: &str, room_id: i64) -> Result<Self, DanmuStreamError> {
|
||||
Ok(Self {
|
||||
room_id,
|
||||
cookie: identifier.to_string(),
|
||||
stop: Arc::new(RwLock::new(false)),
|
||||
write: Arc::new(RwLock::new(None)),
|
||||
})
|
||||
}
|
||||
|
||||
async fn start(
|
||||
&self,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Douyin WebSocket connection started, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
|
||||
loop {
|
||||
if *self.stop.read().await {
|
||||
break;
|
||||
}
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!(
|
||||
"Douyin WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Douyin WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
self.room_id
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stop(&self) -> Result<(), DanmuStreamError> {
|
||||
*self.stop.write().await = true;
|
||||
if let Some(mut write) = self.write.write().await.take() {
|
||||
if let Err(e) = write.close().await {
|
||||
error!("Failed to close WebSocket connection: {}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||