Compare commits
170 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1e9cd61eba | ||
|
|
7b7f341fa0 | ||
|
|
ac806b49b2 | ||
|
|
f20636a107 | ||
|
|
787a30e6f7 | ||
|
|
d1d217be18 | ||
|
|
944d0a371a | ||
|
|
0df03e0c9c | ||
|
|
7ffdf65705 | ||
|
|
89cdf91a48 | ||
|
|
43ebc27044 | ||
|
|
e6159555f3 | ||
|
|
1f2508aae9 | ||
|
|
ad13f58fa7 | ||
|
|
de4959d49f | ||
|
|
b5b75129e7 | ||
|
|
84346a486f | ||
|
|
3bdcddf5a2 | ||
|
|
98f68a5e14 | ||
|
|
2249b86af3 | ||
|
|
fd889922d8 | ||
|
|
8db7c6e320 | ||
|
|
5bc4ed6dfd | ||
|
|
22ad5f7fea | ||
|
|
c0369c1a14 | ||
|
|
322f4a3ca5 | ||
|
|
4e32453441 | ||
|
|
66725b8a64 | ||
|
|
f7bcbbca83 | ||
|
|
07a3b33040 | ||
|
|
2f9b4582f8 | ||
|
|
c3f63c58cf | ||
|
|
4a3529bc2e | ||
|
|
b0355a919f | ||
|
|
cfe1a0b4b9 | ||
|
|
b655e98f35 | ||
|
|
2d1021bc42 | ||
|
|
33d74999b9 | ||
|
|
84b7dd7a3c | ||
|
|
0c678fbda3 | ||
|
|
3486f7d050 | ||
|
|
d42a1010b8 | ||
|
|
ece6ceea45 | ||
|
|
b22ebb399e | ||
|
|
4431b10cb7 | ||
|
|
01a0c929e8 | ||
|
|
b06f6e8d09 | ||
|
|
753227acbb | ||
|
|
c7dd9091d0 | ||
|
|
bae20ce011 | ||
|
|
8da4759668 | ||
|
|
eb7c6d91e9 | ||
|
|
3c24dfe8a6 | ||
|
|
bb916daaaf | ||
|
|
3931e484c2 | ||
|
|
b67e258c31 | ||
|
|
1a7e6f5a43 | ||
|
|
437204dbe6 | ||
|
|
af105277d9 | ||
|
|
7efd327a36 | ||
|
|
0141586fa9 | ||
|
|
df1d8ccac6 | ||
|
|
10b6b95e4d | ||
|
|
a58e6f77bd | ||
|
|
fe2bd80ac6 | ||
|
|
870b44a973 | ||
|
|
48fd9ca7b2 | ||
|
|
14d03b7eb9 | ||
|
|
6f1db6c038 | ||
|
|
cd2d208e5c | ||
|
|
7d6ec72002 | ||
|
|
837cb6a978 | ||
|
|
aeeb0c08d7 | ||
|
|
72d8a7f485 | ||
|
|
5d3692c7a0 | ||
|
|
7e54231bef | ||
|
|
80a885dbf3 | ||
|
|
134c6bbb5f | ||
|
|
49a153adf7 | ||
|
|
99e15b0bda | ||
|
|
4de8a73af2 | ||
|
|
d104ba3180 | ||
|
|
abf0d4748f | ||
|
|
d2a9c44601 | ||
|
|
c269558bae | ||
|
|
cc22453a40 | ||
|
|
d525d92de4 | ||
|
|
2197dfe65c | ||
|
|
38ee00f474 | ||
|
|
8fdad41c71 | ||
|
|
f269995bb7 | ||
|
|
03a2db8c44 | ||
|
|
6d9cd3c6a8 | ||
|
|
303b2f7036 | ||
|
|
ec25c2ffd9 | ||
|
|
50ab608ddb | ||
|
|
3c76be9b81 | ||
|
|
ab7f0cf0b4 | ||
|
|
f9f590c4dc | ||
|
|
8d38fe582a | ||
|
|
dc4a26561d | ||
|
|
10c1d1f3a8 | ||
|
|
66bcf53d01 | ||
|
|
8ab4b7d693 | ||
|
|
ce2f097d32 | ||
|
|
f7575cd327 | ||
|
|
8634c6a211 | ||
|
|
b070013efc | ||
|
|
d2d9112f6c | ||
|
|
9fea18f2de | ||
|
|
74480f91ce | ||
|
|
b2e13b631f | ||
|
|
001d995c8f | ||
|
|
8cb2acea88 | ||
|
|
7c0d57d84e | ||
|
|
8cb875f449 | ||
|
|
e6bbe65723 | ||
|
|
f4a71a2476 | ||
|
|
47b9362b0a | ||
|
|
c1aad0806e | ||
|
|
4ccc90f9fb | ||
|
|
7dc63440e6 | ||
|
|
4094e8b80d | ||
|
|
e27cbaf715 | ||
|
|
1f39b27d79 | ||
|
|
f45891fd95 | ||
|
|
18fe644715 | ||
|
|
40cde8c69a | ||
|
|
4b0af47906 | ||
|
|
9365b3c8cd | ||
|
|
4b9f015ea7 | ||
|
|
c42d4a084e | ||
|
|
5bb3feb05b | ||
|
|
05f776ed8b | ||
|
|
9cec809485 | ||
|
|
429f909152 | ||
|
|
084dd23df1 | ||
|
|
e55afdd739 | ||
|
|
72128a132b | ||
|
|
92ca2cddad | ||
|
|
3db0d1dfe5 | ||
|
|
57907323e6 | ||
|
|
dbdca44c5f | ||
|
|
fe1dd2201f | ||
|
|
e0ae194cc3 | ||
|
|
6fc5700457 | ||
|
|
c4fdcf86d4 | ||
|
|
3088500c8d | ||
|
|
861f3a3624 | ||
|
|
c55783e4d9 | ||
|
|
955e284d41 | ||
|
|
fc4c47427e | ||
|
|
e2d7563faa | ||
|
|
27d69f7f8d | ||
|
|
a77bb5af44 | ||
|
|
00286261a4 | ||
|
|
0b898dccaa | ||
|
|
a1d9ac4e68 | ||
|
|
4150939e23 | ||
|
|
8f84b7f063 | ||
|
|
04b245ac64 | ||
|
|
12f7e62957 | ||
|
|
9600d310c7 | ||
|
|
dec5a2472a | ||
|
|
13eb7c6ea2 | ||
|
|
2356cfa10a | ||
|
|
3bfaefb3b0 | ||
|
|
78b8c25d96 | ||
|
|
c1d2ff2b96 | ||
|
|
24aee9446a |
51
.cursor/rules/ai-features.mdc
Normal file
@@ -0,0 +1,51 @@
|
||||
# AI Features and LangChain Integration
|
||||
|
||||
## AI Components
|
||||
|
||||
- **LangChain Integration**: Uses `@langchain/core`, `@langchain/deepseek`,
|
||||
`@langchain/langgraph`, `@langchain/ollama`
|
||||
- **Whisper Transcription**: Local and online transcription via `whisper-rs` in
|
||||
Rust backend
|
||||
- **AI Agent**: Located in [src/lib/agent/](mdc:src/lib/agent/) directory
|
||||
|
||||
## Frontend AI Features
|
||||
|
||||
- **AI Page**: [src/page/AI.svelte](mdc:src/page/AI.svelte) - Main AI interface
|
||||
- **Agent Logic**: [src/lib/agent/](mdc:src/lib/agent/) - AI agent implementation
|
||||
- **Interface**: [src/lib/interface.ts](mdc:src/lib/interface.ts)
|
||||
\- AI communication layer
|
||||
|
||||
## Backend AI Features
|
||||
|
||||
- **Subtitle Generation**:
|
||||
[src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/) -
|
||||
AI-powered subtitle creation
|
||||
- **Whisper Integration**:
|
||||
[src-tauri/src/subtitle_generator.rs](mdc:src-tauri/src/subtitle_generator.rs)
|
||||
\- Speech-to-text processing
|
||||
- **CUDA Support**: Optional CUDA acceleration for Whisper via feature flag
|
||||
|
||||
## AI Workflows
|
||||
|
||||
- **Live Transcription**: Real-time speech-to-text during live streams
|
||||
- **Content Summarization**: AI-powered content analysis and summarization
|
||||
- **Smart Editing**: AI-assisted video editing and clip generation
|
||||
- **Danmaku Processing**: AI analysis of danmaku (bullet comments) streams
|
||||
|
||||
## Configuration
|
||||
|
||||
- **LLM Settings**: Configure AI models in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
- **Whisper Models**: Local model configuration for offline transcription
|
||||
- **API Keys**: External AI service configuration for online features
|
||||
|
||||
## Development Notes
|
||||
|
||||
- AI features require proper model configuration
|
||||
- CUDA feature enables GPU acceleration for Whisper
|
||||
- LangChain integration supports multiple AI providers
|
||||
- AI agent can work with both local and cloud-based models
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
62
.cursor/rules/build-deployment.mdc
Normal file
@@ -0,0 +1,62 @@
|
||||
# Build and Deployment Configuration
|
||||
|
||||
## Build Scripts
|
||||
|
||||
- **PowerShell**: [build.ps1](mdc:build.ps1) - Windows build script
|
||||
- **FFmpeg Setup**: [ffmpeg_setup.ps1](mdc:ffmpeg_setup.ps1)
|
||||
\- FFmpeg installation script
|
||||
- **Version Bump**: [scripts/bump.cjs](mdc:scripts/bump.cjs)
|
||||
\- Version management script
|
||||
|
||||
## Package Management
|
||||
|
||||
- **Node.js**: [package.json](mdc:package.json) - Frontend dependencies and scripts
|
||||
- **Rust**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Backend dependencies and features
|
||||
- **Lock Files**: [yarn.lock](mdc:yarn.lock) - Yarn dependency lock
|
||||
|
||||
## Build Configuration
|
||||
|
||||
- **Vite**: [vite.config.ts](mdc:vite.config.ts) - Frontend build tool configuration
|
||||
- **Tailwind**: [tailwind.config.cjs](mdc:tailwind.config.cjs) - CSS framework configuration
|
||||
- **PostCSS**: [postcss.config.cjs](mdc:postcss.config.cjs) - CSS processing configuration
|
||||
- **TypeScript**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[tsconfig.node.json](mdc:tsconfig.node.json) - TypeScript configuration
|
||||
|
||||
## Tauri Configuration
|
||||
|
||||
- **Main Config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- Core Tauri settings
|
||||
- **Platform Configs**:
|
||||
- [src-tauri/tauri.macos.conf.json](mdc:src-tauri/tauri.macos.conf.json)
|
||||
\- macOS specific
|
||||
- [src-tauri/tauri.linux.conf.json](mdc:src-tauri/tauri.linux.conf.json)
|
||||
\- Linux specific
|
||||
- [src-tauri/tauri.windows.conf.json](mdc:src-tauri/tauri.windows.conf.json)
|
||||
\- Windows specific
|
||||
- [src-tauri/tauri.windows.cuda.conf.json](mdc:src-tauri/tauri.windows.cuda.conf.json)
|
||||
\- Windows with CUDA
|
||||
|
||||
## Docker Support
|
||||
|
||||
- **Dockerfile**: [Dockerfile](mdc:Dockerfile) - Container deployment configuration
|
||||
- **Documentation**: [docs/](mdc:docs/) - VitePress-based documentation site
|
||||
|
||||
## Build Commands
|
||||
|
||||
- **Frontend**: `yarn build` - Build production frontend
|
||||
- **Tauri**: `yarn tauri build` - Build desktop application
|
||||
- **Documentation**: `yarn docs:build` - Build documentation site
|
||||
- **Type Check**: `yarn check` - TypeScript and Svelte validation
|
||||
|
||||
## Deployment Targets
|
||||
|
||||
- **Desktop**: Native Tauri applications for Windows, macOS, Linux
|
||||
- **Docker**: Containerized deployment option
|
||||
- **Documentation**: Static site deployment via VitePress
|
||||
- **Assets**: Static asset distribution for web components
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
61
.cursor/rules/database-data.mdc
Normal file
@@ -0,0 +1,61 @@
|
||||
# Database and Data Management
|
||||
|
||||
## Database Architecture
|
||||
|
||||
- **SQLite Database**: Primary data storage using `sqlx` with async runtime
|
||||
- **Database Module**: [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- Core database operations
|
||||
- **Migration System**: [src-tauri/src/migration.rs](mdc:src-tauri/src/migration.rs)
|
||||
\- Database schema management
|
||||
|
||||
## Data Models
|
||||
|
||||
- **Recording Data**: Stream metadata, recording sessions, and file information
|
||||
- **Room Configuration**: Stream room settings and platform credentials
|
||||
- **Task Management**: Recording task status and progress tracking
|
||||
- **User Preferences**: Application settings and user configurations
|
||||
|
||||
## Frontend Data Layer
|
||||
|
||||
- **Database Interface**: [src/lib/db.ts](mdc:src/lib/db.ts)
|
||||
\- Frontend database operations
|
||||
- **Stores**: [src/lib/stores/](mdc:src/lib/stores/) - State management for data
|
||||
- **Version Management**: [src/lib/stores/version.ts](mdc:src/lib/stores/version.ts)
|
||||
\- Version tracking
|
||||
|
||||
## Data Operations
|
||||
|
||||
- **CRUD Operations**: Create, read, update, delete for all data entities
|
||||
- **Query Optimization**: Efficient SQL queries with proper indexing
|
||||
- **Transaction Support**: ACID compliance for critical operations
|
||||
- **Data Validation**: Input validation and sanitization
|
||||
|
||||
## File Management
|
||||
|
||||
- **Cache Directory**: [src-tauri/cache/](mdc:src-tauri/cache/)
|
||||
\- Temporary file storage
|
||||
- **Upload Directory**: [src-tauri/cache/uploads/](mdc:src-tauri/cache/uploads/)
|
||||
\- User upload storage
|
||||
- **Bilibili Cache**: [src-tauri/cache/bilibili/](mdc:src-tauri/cache/bilibili/)
|
||||
\- Platform-specific cache
|
||||
|
||||
## Data Persistence
|
||||
|
||||
- **SQLite Files**: [src-tauri/data/data_v2.db](mdc:src-tauri/data/data_v2.db)
|
||||
\- Main database file
|
||||
- **Write-Ahead Logging**: WAL mode for concurrent access and performance
|
||||
- **Backup Strategy**: Database backup and recovery procedures
|
||||
- **Migration Handling**: Automatic schema updates and data migration
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
- Use prepared statements to prevent SQL injection
|
||||
- Implement proper error handling for database operations
|
||||
- Use transactions for multi-step operations
|
||||
- Follow database naming conventions consistently
|
||||
- Test database operations with sample data
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
47
.cursor/rules/frontend-development.mdc
Normal file
@@ -0,0 +1,47 @@
|
||||
# Frontend Development Guidelines
|
||||
|
||||
## Svelte 3 Best Practices
|
||||
|
||||
- Use Svelte 3 syntax with `<script>` tags for component logic
|
||||
- Prefer reactive statements with `$:` for derived state
|
||||
- Use stores from [src/lib/stores/](mdc:src/lib/stores/) for global state management
|
||||
- Import components from [src/lib/components/](mdc:src/lib/components/)
|
||||
|
||||
## TypeScript Configuration
|
||||
|
||||
- Follow the configuration in [tsconfig.json](mdc:tsconfig.json)
|
||||
- Use strict type checking with `checkJs: true`
|
||||
- Extends `@tsconfig/svelte` for Svelte-specific TypeScript settings
|
||||
- Base URL is set to workspace root for clean imports
|
||||
|
||||
## Component Structure
|
||||
|
||||
- **Page components**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Reusable components**: Located in [src/lib/components/](mdc:src/lib/components/)
|
||||
directory
|
||||
- **Layout components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
|
||||
## Styling
|
||||
|
||||
- Use Tailwind CSS classes for styling
|
||||
- Configuration in [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- PostCSS configuration in [postcss.config.cjs](mdc:postcss.config.cjs)
|
||||
- Global styles in [src/styles.css](mdc:src/styles.css)
|
||||
|
||||
## Entry Points
|
||||
|
||||
- **Main app**: [src/main.ts](mdc:src/main.ts) - Main application entry
|
||||
- **Clip mode**: [src/main_clip.ts](mdc:src/main_clip.ts) - Clip editing interface
|
||||
- **Live mode**: [src/main_live.ts](mdc:src/main_live.ts) - Live streaming interface
|
||||
|
||||
## Development Workflow
|
||||
|
||||
- Use `yarn dev` for frontend-only development
|
||||
- Use `yarn tauri dev` for full Tauri development
|
||||
- Use `yarn check` for TypeScript and Svelte type checking
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
53
.cursor/rules/project-overview.mdc
Normal file
@@ -0,0 +1,53 @@
|
||||
# BiliBili ShadowReplay Project Overview
|
||||
|
||||
This is a Tauri-based desktop application for caching live streams and performing
|
||||
real-time editing and submission. It supports Bilibili and Douyin platforms.
|
||||
|
||||
## Project Structure
|
||||
|
||||
### Frontend (Svelte + TypeScript)
|
||||
|
||||
- **Main entry points**: [src/main.ts](mdc:src/main.ts),
|
||||
[src/main_clip.ts](mdc:src/main_clip.ts), [src/main_live.ts](mdc:src/main_live.ts)
|
||||
- **App components**: [src/App.svelte](mdc:src/App.svelte),
|
||||
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
- **Pages**: Located in [src/page/](mdc:src/page/) directory
|
||||
- **Components**: Located in [src/lib/components/](mdc:src/lib/components/) directory
|
||||
- **Stores**: Located in [src/lib/stores/](mdc:src/lib/stores/) directory
|
||||
|
||||
### Backend (Rust + Tauri)
|
||||
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
|
||||
- **Core modules**:
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) - Stream recording functionality
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/) - Database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/) - Tauri command handlers
|
||||
- **Custom crate**:
|
||||
[src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) -
|
||||
Danmaku stream processing
|
||||
|
||||
### Configuration
|
||||
|
||||
- **Frontend config**: [tsconfig.json](mdc:tsconfig.json),
|
||||
[vite.config.ts](mdc:vite.config.ts), [tailwind.config.cjs](mdc:tailwind.config.cjs)
|
||||
- **Backend config**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml), [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
|
||||
## Key Technologies
|
||||
|
||||
- **Frontend**: Svelte 3, TypeScript, Tailwind CSS, Flowbite
|
||||
- **Backend**: Rust, Tauri 2, SQLite, FFmpeg
|
||||
- **AI Features**: LangChain, Whisper for transcription
|
||||
- **Build Tools**: Vite, VitePress for documentation
|
||||
|
||||
## Development Commands
|
||||
|
||||
- `yarn dev` - Start development server
|
||||
- `yarn tauri dev` - Start Tauri development
|
||||
- `yarn build` - Build frontend
|
||||
- `yarn docs:dev` - Start documentation server
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
56
.cursor/rules/rust-backend.mdc
Normal file
@@ -0,0 +1,56 @@
|
||||
# Rust Backend Development Guidelines
|
||||
|
||||
## Project Structure
|
||||
|
||||
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
|
||||
\- Application entry point
|
||||
- **Core modules**:
|
||||
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Stream recording and management
|
||||
- [src-tauri/src/database/](mdc:src-tauri/src/database/)
|
||||
\- SQLite database operations
|
||||
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/)
|
||||
\- Tauri command handlers
|
||||
- [src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/)
|
||||
\- AI-powered subtitle generation
|
||||
|
||||
## Custom Crates
|
||||
|
||||
- **danmu_stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Danmaku stream processing library
|
||||
|
||||
## Dependencies
|
||||
|
||||
- **Tauri 2**: Core framework for desktop app functionality
|
||||
- **FFmpeg**: Video/audio processing via `async-ffmpeg-sidecar`
|
||||
- **Whisper**: AI transcription via `whisper-rs` (CUDA support available)
|
||||
- **LangChain**: AI agent functionality
|
||||
- **SQLite**: Database via `sqlx` with async runtime
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Cargo.toml**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
|
||||
\- Dependencies and features
|
||||
- **Tauri config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
|
||||
\- App configuration
|
||||
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
\- User configuration template
|
||||
|
||||
## Features
|
||||
|
||||
- **default**: Includes GUI and core functionality
|
||||
- **cuda**: Enables CUDA acceleration for Whisper transcription
|
||||
- **headless**: Headless mode without GUI
|
||||
- **custom-protocol**: Required for production builds
|
||||
|
||||
## Development Commands
|
||||
|
||||
- `yarn tauri dev` - Start Tauri development with hot reload
|
||||
- `yarn tauri build` - Build production application
|
||||
- `cargo check` - Check Rust code without building
|
||||
- `cargo test` - Run Rust tests
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
60
.cursor/rules/streaming-recording.mdc
Normal file
@@ -0,0 +1,60 @@
|
||||
# Streaming and Recording System
|
||||
|
||||
## Core Recording Components
|
||||
|
||||
- **Recorder Manager**: [src-tauri/src/recorder_manager.rs](mdc:src-tauri/src/recorder_manager.rs)
|
||||
\- Main recording orchestration
|
||||
- **Recorder**: [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
|
||||
\- Individual stream recording logic
|
||||
- **Danmaku Stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
|
||||
\- Custom crate for bullet comment processing
|
||||
|
||||
## Supported Platforms
|
||||
|
||||
- **Bilibili**: Main platform support with live stream caching
|
||||
- **Douyin**: TikTok's Chinese platform support
|
||||
- **Multi-stream**: Support for recording multiple streams simultaneously
|
||||
|
||||
## Recording Features
|
||||
|
||||
- **Live Caching**: Real-time stream recording and buffering
|
||||
- **Time-based Clipping**: Extract specific time segments from recorded streams
|
||||
- **Danmaku Capture**: Record bullet comments and chat messages
|
||||
- **Quality Control**: Configurable recording quality and format options
|
||||
|
||||
## Frontend Interfaces
|
||||
|
||||
- **Live Mode**: [src/AppLive.svelte](mdc:src/AppLive.svelte)
|
||||
\- Live streaming interface
|
||||
- **Clip Mode**: [src/AppClip.svelte](mdc:src/AppClip.svelte)
|
||||
\- Video editing and clipping
|
||||
- **Room Management**: [src/page/Room.svelte](mdc:src/page/Room.svelte)
|
||||
\- Stream room configuration
|
||||
- **Task Management**: [src/page/Task.svelte](mdc:src/page/Task.svelte)
|
||||
\- Recording task monitoring
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
- **FFmpeg Integration**: Video/audio processing via `async-ffmpeg-sidecar`
|
||||
- **M3U8 Support**: HLS stream processing with `m3u8-rs`
|
||||
- **Async Processing**: Non-blocking I/O with `tokio` runtime
|
||||
- **Database Storage**: SQLite for metadata and recording information
|
||||
|
||||
## Configuration
|
||||
|
||||
- **Recording Settings**: Configure in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
|
||||
- **FFmpeg Path**: Set FFmpeg binary location for video processing
|
||||
- **Storage Paths**: Configure cache and output directories
|
||||
- **Quality Settings**: Adjust recording bitrate and format options
|
||||
|
||||
## Development Workflow
|
||||
|
||||
- Use [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) for core recording logic
|
||||
- Test with [src-tauri/tests/](mdc:src-tauri/tests/) directory
|
||||
- Monitor recording progress via progress manager
|
||||
- Handle errors gracefully with custom error types
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
|
||||
---
|
||||
36
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,36 @@
|
||||
ARG VARIANT=bookworm-slim
|
||||
FROM debian:${VARIANT}
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Arguments
|
||||
ARG CONTAINER_USER=vscode
|
||||
ARG CONTAINER_GROUP=vscode
|
||||
|
||||
# Install dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
build-essential \
|
||||
clang \
|
||||
cmake \
|
||||
curl \
|
||||
file \
|
||||
git \
|
||||
libayatana-appindicator3-dev \
|
||||
librsvg2-dev \
|
||||
libssl-dev \
|
||||
libwebkit2gtk-4.1-dev \
|
||||
libxdo-dev \
|
||||
pkg-config \
|
||||
wget \
|
||||
&& apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts
|
||||
|
||||
# Set users
|
||||
RUN adduser --disabled-password --gecos "" ${CONTAINER_USER}
|
||||
USER ${CONTAINER_USER}
|
||||
WORKDIR /home/${CONTAINER_USER}
|
||||
|
||||
# Install rustup
|
||||
RUN curl --proto "=https" --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal
|
||||
ENV PATH=${PATH}:/home/${CONTAINER_USER}/.cargo/bin
|
||||
|
||||
CMD [ "/bin/bash" ]
|
||||
31
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "vscode",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
"CONTAINER_USER": "vscode",
|
||||
"CONTAINER_GROUP": "vscode"
|
||||
}
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"lldb.executable": "/usr/bin/lldb",
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true
|
||||
}
|
||||
},
|
||||
"extensions": [
|
||||
"vadimcn.vscode-lldb",
|
||||
"rust-lang.rust-analyzer",
|
||||
"tamasfe.even-better-toml"
|
||||
]
|
||||
}
|
||||
},
|
||||
"remoteUser": "vscode"
|
||||
}
|
||||
7
.github/CONTRIBUTING.md
vendored
@@ -12,7 +12,8 @@
|
||||
|
||||
### Windows
|
||||
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。
|
||||
`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
|
||||
|
||||
默认运行为 `cpu` 版本,使用 `yarn tauri dev --features cuda` 命令运行 `cuda` 版本。
|
||||
|
||||
@@ -20,7 +21,9 @@ Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于
|
||||
|
||||
1. 安装 LLVM 且配置相关环境变量,详情见 [LLVM Windows Setup](https://llvm.org/docs/GettingStarted.html#building-llvm-on-windows);
|
||||
|
||||
2. 安装 CUDA Toolkit,详情见 [CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);要注意,安装时请勾选 **VisualStudio integration**。
|
||||
2. 安装 CUDA Toolkit,详情见
|
||||
[CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);
|
||||
要注意,安装时请勾选 **VisualStudio integration**。
|
||||
|
||||
### 常见问题
|
||||
|
||||
|
||||
21
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,21 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: 提交一个 BUG
|
||||
title: "[BUG]"
|
||||
labels: bug
|
||||
assignees: Xinrea
|
||||
---
|
||||
|
||||
**描述:**
|
||||
简要描述一下这个 BUG 的现象
|
||||
|
||||
**日志和截图:**
|
||||
如果可以的话,请尽量附上相关截图和日志文件(日志是位于安装目录下,名为 bsr.log 的文件)。
|
||||
|
||||
**相关信息:**
|
||||
|
||||
- 程序版本:
|
||||
- 系统类型:
|
||||
|
||||
**其他**
|
||||
任何其他想说的
|
||||
47
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Bug Report
|
||||
description: 提交 BUG 报告.
|
||||
title: "[bug] "
|
||||
labels: ["bug"]
|
||||
assignees:
|
||||
- Xinrea
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 提交须知
|
||||
description: 请确认以下内容
|
||||
options:
|
||||
- label: 我是在最新版本上发现的此问题
|
||||
required: true
|
||||
- label: 我已阅读 [常见问题](https://bsr.xinrea.cn/usage/faq.html) 的说明
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: app_type
|
||||
attributes:
|
||||
label: 以哪种方式使用的该软件?
|
||||
multiple: false
|
||||
options:
|
||||
- Docker 镜像
|
||||
- 桌面应用
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: 运行环境
|
||||
multiple: false
|
||||
options:
|
||||
- Linux
|
||||
- Windows
|
||||
- MacOS
|
||||
- Docker
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: BUG 描述
|
||||
description: 请尽可能详细描述 BUG 的现象以及复现的方法
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: 日志
|
||||
description: 请粘贴日志内容或是上传日志文件(在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮;当你打开日志目录所在位置后,进入 logs 目录,找到后缀名为 log 的文件)
|
||||
validations:
|
||||
required: true
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: 提交一个新功能的建议
|
||||
title: "[feature]"
|
||||
labels: enhancement
|
||||
assignees: Xinrea
|
||||
|
||||
---
|
||||
|
||||
**遇到的问题:**
|
||||
在使用过程中遇到了什么问题让你想要提出建议
|
||||
|
||||
**想要的功能:**
|
||||
想要怎样的新功能来解决这个问题
|
||||
|
||||
**通过什么方式实现(有思路的话):**
|
||||
如果有相关的实现思路或者是参考,可以在此提供
|
||||
|
||||
**其他:**
|
||||
其他任何想说的话
|
||||
13
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Feature Request
|
||||
description: 提交新功能的需求
|
||||
title: "[feature] "
|
||||
labels: ["feature"]
|
||||
assignees:
|
||||
- Xinrea
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 需求描述
|
||||
description: 请尽可能详细描述你想要的新功能
|
||||
validations:
|
||||
required: true
|
||||
46
.github/workflows/check.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Rust Check
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "**/*.rs"
|
||||
- "src-tauri/Cargo.toml"
|
||||
- "src-tauri/Cargo.lock"
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: self-linux
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt clippy
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf ffmpeg
|
||||
|
||||
- name: Check formatting
|
||||
run: cargo fmt --check
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check clippy
|
||||
run: cargo clippy
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check clippy (headless)
|
||||
run: cargo clippy --no-default-features --features headless
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check tests
|
||||
run: cargo test -v
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Check tests (headless)
|
||||
run: cargo test --no-default-features --features headless -v
|
||||
working-directory: src-tauri
|
||||
21
.github/workflows/main.yml
vendored
@@ -59,11 +59,6 @@ jobs:
|
||||
if: matrix.platform == 'windows-latest' && matrix.features == 'cuda'
|
||||
uses: Jimver/cuda-toolkit@v0.2.24
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "./src-tauri -> target"
|
||||
|
||||
- name: Setup ffmpeg
|
||||
if: matrix.platform == 'windows-latest'
|
||||
working-directory: ./
|
||||
@@ -87,6 +82,19 @@ jobs:
|
||||
Copy-Item "$cudaPath\cublas64*.dll" -Destination $targetPath
|
||||
Copy-Item "$cudaPath\cublasLt64*.dll" -Destination $targetPath
|
||||
|
||||
- name: Get previous tag
|
||||
id: get_previous_tag
|
||||
run: |
|
||||
# Get the previous tag (excluding the current one being pushed)
|
||||
PREVIOUS_TAG=$(git describe --tags --abbrev=0 HEAD~1 2>/dev/null || echo "")
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
# If no previous tag found, use the first commit
|
||||
PREVIOUS_TAG=$(git rev-list --max-parents=0 HEAD | head -1)
|
||||
fi
|
||||
echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
|
||||
echo "current_tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -96,8 +104,7 @@ jobs:
|
||||
with:
|
||||
tagName: v__VERSION__
|
||||
releaseName: "BiliBili ShadowReplay v__VERSION__"
|
||||
releaseBody: "See the assets to download this version and install."
|
||||
releaseBody: "> [!NOTE]\n> 如果你是第一次下载安装,请参考 [安装准备](https://bsr.xinrea.cn/getting-started/installation/desktop.html) 选择合适的版本。\n> Changelog: https://github.com/Xinrea/bili-shadowreplay/compare/${{ steps.get_previous_tag.outputs.previous_tag }}...${{ steps.get_previous_tag.outputs.current_tag }}"
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }} ${{ matrix.platform == 'windows-latest' && matrix.features == 'cuda' && '--config src-tauri/tauri.windows.cuda.conf.json' || '' }}
|
||||
includeDebug: true
|
||||
|
||||
1
.gitignore
vendored
@@ -11,6 +11,7 @@ node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
/target/
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
|
||||
5
.markdownlint.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"MD033": {
|
||||
"allowed_elements": ["nobr", "sup"]
|
||||
}
|
||||
}
|
||||
51
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,51 @@
|
||||
fail_fast: true
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
exclude: \.json$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.36.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: cargo-fmt
|
||||
name: cargo fmt
|
||||
entry: cargo fmt --manifest-path src-tauri/Cargo.toml --
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- id: cargo-clippy
|
||||
name: cargo clippy
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-clippy-headless
|
||||
name: cargo clippy headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo clippy --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
|
||||
- id: cargo-test
|
||||
name: cargo test
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml
|
||||
|
||||
- id: cargo-test-headless
|
||||
name: cargo test headless
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
entry: cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
|
||||
21
Dockerfile
@@ -23,7 +23,7 @@ COPY . .
|
||||
RUN yarn build
|
||||
|
||||
# Build Rust backend
|
||||
FROM rust:1.86-slim AS rust-builder
|
||||
FROM rust:1.90-slim AS rust-builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -48,15 +48,9 @@ COPY src-tauri/crates ./src-tauri/crates
|
||||
WORKDIR /app/src-tauri
|
||||
RUN rustup component add rustfmt
|
||||
RUN cargo build --no-default-features --features headless --release
|
||||
# Download and install FFmpeg static build
|
||||
RUN wget https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz \
|
||||
&& tar xf ffmpeg-release-amd64-static.tar.xz \
|
||||
&& mv ffmpeg-*-static/ffmpeg ./ \
|
||||
&& mv ffmpeg-*-static/ffprobe ./ \
|
||||
&& rm -rf ffmpeg-*-static ffmpeg-release-amd64-static.tar.xz
|
||||
|
||||
# Final stage
|
||||
FROM debian:bookworm-slim AS final
|
||||
FROM debian:trixie-slim AS final
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -65,9 +59,16 @@ RUN apt-get update && apt-get install -y \
|
||||
libssl3 \
|
||||
ca-certificates \
|
||||
fonts-wqy-microhei \
|
||||
netbase \
|
||||
nscd \
|
||||
ffmpeg \
|
||||
&& update-ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
RUN touch /etc/netgroup
|
||||
RUN mkdir -p /var/run/nscd && chmod 755 /var/run/nscd
|
||||
|
||||
# Add /app to PATH
|
||||
ENV PATH="/app:${PATH}"
|
||||
|
||||
@@ -76,11 +77,9 @@ COPY --from=frontend-builder /app/dist ./dist
|
||||
|
||||
# Copy built Rust binary
|
||||
COPY --from=rust-builder /app/src-tauri/target/release/bili-shadowreplay .
|
||||
COPY --from=rust-builder /app/src-tauri/ffmpeg ./ffmpeg
|
||||
COPY --from=rust-builder /app/src-tauri/ffprobe ./ffprobe
|
||||
|
||||
# Expose port
|
||||
EXPOSE 3000
|
||||
|
||||
# Run the application
|
||||
CMD ["./bili-shadowreplay"]
|
||||
CMD ["sh", "-c", "nscd && ./bili-shadowreplay"]
|
||||
|
||||
13
README.md
@@ -4,24 +4,29 @@
|
||||
|
||||

|
||||

|
||||
|
||||

|
||||

|
||||
[](https://deepwiki.com/Xinrea/bili-shadowreplay)
|
||||
|
||||
BiliBili ShadowReplay 是一个缓存直播并进行实时编辑投稿的工具。通过划定时间区间,并编辑简单的必需信息,即可完成直播切片以及投稿,将整个流程压缩到分钟级。同时,也支持对缓存的历史直播进行回放,以及相同的切片编辑投稿处理流程。
|
||||
|
||||
目前仅支持 B 站和抖音平台的直播。
|
||||
|
||||

|
||||
[](https://www.star-history.com/#Xinrea/bili-shadowreplay&Date)
|
||||
|
||||
## 安装和使用
|
||||
|
||||

|
||||
|
||||
前往网站查看说明:[BiliBili ShadowReplay](https://bsr.xinrea.cn/)
|
||||
|
||||
## 参与开发
|
||||
|
||||
[Contributing](.github/CONTRIBUTING.md)
|
||||
可以通过 [DeepWiki](https://deepwiki.com/Xinrea/bili-shadowreplay) 了解本项目。
|
||||
|
||||
贡献指南:[Contributing](.github/CONTRIBUTING.md)
|
||||
|
||||
## 赞助
|
||||
|
||||

|
||||
<!-- markdownlint-disable MD033 -->
|
||||
<img src="docs/public/images/donate.png" alt="donate" width="300">
|
||||
|
||||
2
_typos.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[default.extend-identifiers]
|
||||
pull_datas = "pull_datas"
|
||||
@@ -1,7 +1,8 @@
|
||||
import { defineConfig } from "vitepress";
|
||||
import { withMermaid } from "vitepress-plugin-mermaid";
|
||||
|
||||
// https://vitepress.dev/reference/site-config
|
||||
export default defineConfig({
|
||||
export default withMermaid({
|
||||
title: "BiliBili ShadowReplay",
|
||||
description: "直播录制/实时回放/剪辑/投稿工具",
|
||||
themeConfig: {
|
||||
@@ -18,21 +19,55 @@ export default defineConfig({
|
||||
{
|
||||
text: "开始使用",
|
||||
items: [
|
||||
{ text: "安装准备", link: "/getting-started/installation" },
|
||||
{ text: "配置使用", link: "/getting-started/configuration" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/ffmpeg" },
|
||||
{
|
||||
text: "安装准备",
|
||||
items: [
|
||||
{
|
||||
text: "桌面端安装",
|
||||
link: "/getting-started/installation/desktop",
|
||||
},
|
||||
{
|
||||
text: "Docker 安装",
|
||||
link: "/getting-started/installation/docker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "配置使用",
|
||||
items: [
|
||||
{ text: "账号配置", link: "/getting-started/config/account" },
|
||||
{ text: "FFmpeg 配置", link: "/getting-started/config/ffmpeg" },
|
||||
{ text: "Whisper 配置", link: "/getting-started/config/whisper" },
|
||||
{ text: "LLM 配置", link: "/getting-started/config/llm" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "说明文档",
|
||||
items: [
|
||||
{ text: "功能说明", link: "/usage/features" },
|
||||
{
|
||||
text: "功能说明",
|
||||
items: [
|
||||
{ text: "工作流程", link: "/usage/features/workflow" },
|
||||
{ text: "直播间管理", link: "/usage/features/room" },
|
||||
{ text: "切片功能", link: "/usage/features/clip" },
|
||||
{ text: "字幕功能", link: "/usage/features/subtitle" },
|
||||
{ text: "弹幕功能", link: "/usage/features/danmaku" },
|
||||
{ text: "Webhook", link: "/usage/features/webhook" },
|
||||
],
|
||||
},
|
||||
{ text: "常见问题", link: "/usage/faq" },
|
||||
],
|
||||
},
|
||||
{
|
||||
text: "开发文档",
|
||||
items: [{ text: "架构设计", link: "/develop/architecture" }],
|
||||
items: [
|
||||
{
|
||||
text: "DeepWiki",
|
||||
link: "https://deepwiki.com/Xinrea/bili-shadowreplay",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
# 架构设计
|
||||
@@ -1,27 +1,12 @@
|
||||
# 配置使用
|
||||
|
||||
## 账号配置
|
||||
# 账号配置
|
||||
|
||||
要添加直播间,至少需要配置一个同平台的账号。在账号页面,你可以通过添加账号按钮添加一个账号。
|
||||
|
||||
- B 站账号:目前支持扫码登录和 Cookie 手动配置两种方式,推荐使用扫码登录
|
||||
- 抖音账号:目前仅支持 Cookie 手动配置登陆
|
||||
|
||||
### 抖音账号配置
|
||||
## 抖音账号配置
|
||||
|
||||
首先确保已经登录抖音,然后打开[个人主页](https://www.douyin.com/user/self),右键单击网页,在菜单中选择 `检查(Inspect)`,打开开发者工具,切换到 `网络(Network)` 选项卡,然后刷新网页,此时能在列表中找到 `self` 请求(一般是列表中第一个),单击该请求,查看`请求标头`,在 `请求标头` 中找到 `Cookie`,复制该字段的值,粘贴到配置页面的 `Cookie` 输入框中,要注意复制完全。
|
||||
|
||||

|
||||
|
||||
## FFmpeg 配置
|
||||
|
||||
如果想要使用切片生成和压制功能,请确保 FFmpeg 已正确配置;除了 Windows 平台打包自带 FFfmpeg 以外,其他平台需要手动安装 FFfmpeg,请参考 [FFfmpeg 配置](/getting-started/ffmpeg)。
|
||||
|
||||
## Whisper 模型配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper 模型路径,模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
9
docs/getting-started/config/llm.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# LLM 配置
|
||||
|
||||

|
||||
|
||||
助手页面的 AI Agent 助手功能需要配置大模型,目前仅支持配置 OpenAI 协议兼容的大模型服务。
|
||||
|
||||
本软件并不提供大模型服务,请自行选择服务提供商。要注意,使用 AI Agent 助手需要消耗比普通对话更多的 Token,请确保有足够的 Token 余额。
|
||||
|
||||
此外,AI Agent 的功能需要大模型支持 Function Calling 功能,否则无法正常调用工具。
|
||||
46
docs/getting-started/config/whisper.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Whisper 配置
|
||||
|
||||
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付
|
||||
费获取 API Key)。
|
||||
|
||||
> [!NOTE]
|
||||
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使
|
||||
> 用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
|
||||
|
||||
## 本地运行 Whisper 模型
|
||||
|
||||

|
||||
|
||||
如果需要使用本地运行 Whisper 模型进行字幕生成,需要下载 Whisper.cpp 模型,并在设置中指定模型路径。模型文件可以从网络上下载,例如:
|
||||
|
||||
- [Whisper.cpp(国内镜像,内容较旧)](https://www.modelscope.cn/models/cjc1887415157/whisper.cpp/files)
|
||||
- [Whisper.cpp](https://huggingface.co/ggerganov/whisper.cpp/tree/main)
|
||||
|
||||
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
|
||||
|
||||
模型文件的大小通常意味着其在运行时资源占用的大小,因此请根据电脑配置选择合适的模型。此外,GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此
|
||||
推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU)。
|
||||
|
||||
## 使用在线 Whisper 服务
|
||||
|
||||

|
||||
|
||||
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper,并配置好 API Key。提供 Whisper 服务的平台并非只有
|
||||
OpenAI 一家,许多云服务平台也提供 Whisper 服务。
|
||||
|
||||
## 字幕识别质量的调优
|
||||
|
||||
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
|
||||
|
||||
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。
|
||||
根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
|
||||
|
||||
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian,
|
||||
Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish,
|
||||
French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic,
|
||||
Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian,
|
||||
Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish,
|
||||
Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili,
|
||||
Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
|
||||
|
||||
提示词可以优化生成的字幕的风格(也会一定程度上影响质量),要注意,Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。
|
||||
@@ -1,66 +0,0 @@
|
||||
# 安装准备
|
||||
|
||||
## 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
安装包分为两个版本,普通版和 debug 版,普通版适合大部分用户使用,debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
### Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
### Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
### MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
|
||||
## Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
### 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
### 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
22
docs/getting-started/installation/desktop.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# 桌面端安装
|
||||
|
||||
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
|
||||
|
||||
由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
|
||||
|
||||
## Windows
|
||||
|
||||
由于程序内置 Whisper 字幕识别模型支持,Windows 版本分为两种:
|
||||
|
||||
- **普通版本**:内置了 Whisper GPU 加速,字幕识别较快,体积较大,只支持 Nvidia 显卡
|
||||
- **CPU 版本**: 使用 CPU 进行字幕识别推理,速度较慢
|
||||
|
||||
请根据自己的显卡情况选择合适的版本进行下载。
|
||||
|
||||
## Linux
|
||||
|
||||
Linux 版本目前仅支持使用 CPU 推理,且测试较少,可能存在一些问题,遇到问题请及时反馈。
|
||||
|
||||
## MacOS
|
||||
|
||||
MacOS 版本内置 Metal GPU 加速;安装后首次运行,会提示无法打开从网络下载的软件,请在设置-隐私与安全性下,选择仍然打开以允许程序运行。
|
||||
41
docs/getting-started/installation/docker.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Docker 部署
|
||||
|
||||
BiliBili ShadowReplay 提供了服务端部署的能力,提供 Web 控制界面,可以用于在服务器等无图形界面环境下部署使用。
|
||||
|
||||
## 镜像获取
|
||||
|
||||
```bash
|
||||
# 拉取最新版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
# 拉取指定版本
|
||||
docker pull ghcr.io/xinrea/bili-shadowreplay:2.5.0
|
||||
# 速度太慢?从镜像源拉取
|
||||
docker pull ghcr.nju.edu.cn/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
## 镜像使用
|
||||
|
||||
使用方法:
|
||||
|
||||
```bash
|
||||
sudo docker run -it -d\
|
||||
-p 3000:3000 \
|
||||
-v $DATA_DIR:/app/data \
|
||||
-v $CACHE_DIR:/app/cache \
|
||||
-v $OUTPUT_DIR:/app/output \
|
||||
-v $WHISPER_MODEL:/app/whisper_model.bin \
|
||||
--name bili-shadowreplay \
|
||||
ghcr.io/xinrea/bili-shadowreplay:latest
|
||||
```
|
||||
|
||||
其中:
|
||||
|
||||
- `$DATA_DIR`:为数据目录,对应于桌面版的数据目录,
|
||||
|
||||
Windows 下位于 `C:\Users\{用户名}\AppData\Roaming\cn.vjoi.bilishadowreplay`;
|
||||
|
||||
MacOS 下位于 `/Users/{user}/Library/Application Support/cn.vjoi.bilishadowreplay`
|
||||
|
||||
- `$CACHE_DIR`:为缓存目录,对应于桌面版的缓存目录;
|
||||
- `$OUTPUT_DIR`:为输出目录,对应于桌面版的输出目录;
|
||||
- `$WHISPER_MODEL`:为 Whisper 模型文件路径,对应于桌面版的 Whisper 模型文件路径。
|
||||
@@ -11,10 +11,10 @@ hero:
|
||||
actions:
|
||||
- theme: brand
|
||||
text: 开始使用
|
||||
link: /getting-started/installation
|
||||
link: /getting-started/installation/desktop
|
||||
- theme: alt
|
||||
text: 说明文档
|
||||
link: /usage/features
|
||||
link: /usage/features/workflow
|
||||
|
||||
features:
|
||||
- icon: 📹
|
||||
@@ -38,9 +38,9 @@ features:
|
||||
- icon: 🔍
|
||||
title: 云端部署
|
||||
details: 支持 Docker 部署,提供 Web 控制界面
|
||||
- icon: 📦
|
||||
title: 多平台支持
|
||||
details: 桌面端支持 Windows/Linux/macOS
|
||||
- icon: 🤖
|
||||
title: AI Agent 支持
|
||||
details: 支持 AI 助手管理录播,分析直播内容,生成切片
|
||||
---
|
||||
|
||||
## 总览
|
||||
@@ -63,7 +63,7 @@ features:
|
||||
|
||||
## 封面编辑
|
||||
|
||||

|
||||

|
||||
|
||||
## 设置
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 555 KiB After Width: | Height: | Size: 195 KiB |
BIN
docs/public/images/ai_agent.png
Normal file
|
After Width: | Height: | Size: 261 KiB |
|
Before Width: | Height: | Size: 1.2 MiB After Width: | Height: | Size: 434 KiB |
BIN
docs/public/images/clip_manage.png
Normal file
|
After Width: | Height: | Size: 234 KiB |
BIN
docs/public/images/clip_preview.png
Normal file
|
After Width: | Height: | Size: 2.3 MiB |
BIN
docs/public/images/cover_edit.png
Normal file
|
After Width: | Height: | Size: 2.1 MiB |
|
Before Width: | Height: | Size: 2.9 MiB |
|
Before Width: | Height: | Size: 2.8 MiB After Width: | Height: | Size: 2.1 MiB |
BIN
docs/public/images/model_config.png
Normal file
|
After Width: | Height: | Size: 383 KiB |
|
Before Width: | Height: | Size: 1.9 MiB After Width: | Height: | Size: 949 KiB |
|
Before Width: | Height: | Size: 622 KiB After Width: | Height: | Size: 244 KiB |
|
Before Width: | Height: | Size: 721 KiB After Width: | Height: | Size: 372 KiB |
BIN
docs/public/images/tasks.png
Normal file
|
After Width: | Height: | Size: 201 KiB |
BIN
docs/public/images/whisper_local.png
Normal file
|
After Width: | Height: | Size: 194 KiB |
BIN
docs/public/images/whisper_online.png
Normal file
|
After Width: | Height: | Size: 199 KiB |
BIN
docs/public/images/whole_clip.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
BIN
docs/public/videos/deeplinking.mp4
Normal file
BIN
docs/public/videos/room_remove.mp4
Normal file
@@ -0,0 +1,31 @@
|
||||
# 常见问题
|
||||
|
||||
## 一、在哪里反馈问题?
|
||||
|
||||
你可以前往 [Github Issues](https://github.com/Xinrea/bili-shadowreplay/issues/new?template=bug_report.md) 提交问题,或是加入[反馈交流群](https://qm.qq.com/q/v4lrE6gyum)。
|
||||
|
||||
1. 在提交问题前,请先阅读其它常见问题,确保你的问题已有解答;
|
||||
2. 其次,请确保你的程序已更新到最新版本;
|
||||
3. 最后,你应准备好提供你的程序日志文件,以便更好地定位问题。
|
||||
|
||||
## 二、在哪里查看日志?
|
||||
|
||||
在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮。当你打开日志目录所在位置后,进入 `logs` 目录,找到后缀名为 `log` 的文件,这便是你需要提供给开发者的日志文件。
|
||||
|
||||
## 三、无法预览直播或是生成切片
|
||||
|
||||
如果你是 macOS 或 Linux 用户,请确保你已安装了 `ffmpeg` 和 `ffprobe` 工具;如果不知道如何安装,请参考 [FFmpeg 配置](/getting-started/config/ffmpeg)。
|
||||
|
||||
如果你是 Windows 用户,程序目录下应当自带了 `ffmpeg` 和 `ffprobe` 工具,如果无法预览直播或是生成切片,请向开发者反馈。
|
||||
|
||||
## 四、添加 B 站直播间出现 -352 错误
|
||||
|
||||
`-352` 错误是由 B 站风控机制导致的,如果你添加了大量的 B 站直播间进行录制,可以在设置页面调整直播间状态的检查间隔,尽量避免风控;如果你在直播间数量较少的情况下出现该错误,请向开发者反馈。
|
||||
|
||||
## 五、录播为什么都是碎片文件?
|
||||
|
||||
缓存目录下的录播文件并非用于直接播放或是投稿,而是用于直播流的预览与实时回放。如果你需要录播文件用于投稿,请打开对应录播的预览界面,使用快捷键创建选区,生成所需范围的切片,切片文件为常规的 mp4 文件,位于你所设置的切片目录下。
|
||||
|
||||
如果你将 BSR 作为单纯的录播软件使用,在设置中可以开启`整场录播生成`,这样在直播结束后,BSR 会自动生成整场录播的切片。
|
||||
|
||||

|
||||
|
||||
1
docs/usage/features/clip.md
Normal file
@@ -0,0 +1 @@
|
||||
# 切片
|
||||
1
docs/usage/features/danmaku.md
Normal file
@@ -0,0 +1 @@
|
||||
# 弹幕
|
||||
40
docs/usage/features/room.md
Normal file
@@ -0,0 +1,40 @@
|
||||
# 直播间
|
||||
|
||||
> [!WARNING]
|
||||
> 在添加管理直播间前,请确保账号列表中有对应平台的可用账号。
|
||||
|
||||
## 添加直播间
|
||||
|
||||
### 手动添加直播间
|
||||
|
||||
你可以在 BSR 直播间页面,点击按钮手动添加直播间。你需要选择平台,并输入直播间号。
|
||||
|
||||
直播间号通常是直播间网页地址尾部的遗传数字,例如 `https://live.bilibili.com/123456` 中的 `123456`,或是 `https://live.douyin.com/123456` 中的 `123456`。
|
||||
|
||||
抖音直播间比较特殊,当未开播时,你无法找到直播间的入口,因此你需要当直播间开播时找到直播间网页地址,并记录其直播间号。
|
||||
|
||||
抖音直播间需要输入主播的 sec_uid,你可以在主播主页的 URL 中找到,例如 `https://www.douyin.com/user/MS4wLjABAAAA` 中的 `MS4wLjABAAAA`。
|
||||
|
||||
### 使用 DeepLinking 快速添加直播间
|
||||
|
||||
<!-- MD033 -->
|
||||
|
||||
<video src="/videos/deeplinking.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
|
||||
在浏览器中观看直播时,替换地址栏中直播间地址中的 `https://` 为 `bsr://` 即可快速唤起 BSR 添加直播间。
|
||||
|
||||
## 启用/禁用直播间
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择启用/禁用直播间。
|
||||
|
||||
- 启用后,当直播间开播时,会自动开始录制
|
||||
- 禁用后,当直播间开播时,不会自动开始录制
|
||||
|
||||
## 移除直播间
|
||||
|
||||
> [!CAUTION]
|
||||
> 移除直播间后,该直播间相关的所有录播都会被删除,请谨慎操作。
|
||||
|
||||
你可以点击直播间卡片右上角的菜单按钮,选择移除直播间。
|
||||
|
||||
<video src="/videos/room_remove.mp4" loop autoplay muted style="border-radius: 10px;"></video>
|
||||
1
docs/usage/features/subtitle.md
Normal file
@@ -0,0 +1 @@
|
||||
# 字幕
|
||||
245
docs/usage/features/webhook.md
Normal file
@@ -0,0 +1,245 @@
|
||||
# Webhook
|
||||
|
||||
> [!NOTE]
|
||||
> 你可以使用 <https://webhook.site> 来测试 Webhook 功能。
|
||||
|
||||
## 设置 Webhook
|
||||
|
||||
打开 BSR 设置页面,在基础设置中设置 Webhook 地址。
|
||||
|
||||
## Webhook Events
|
||||
|
||||
### 直播间相关
|
||||
|
||||
#### 添加直播间
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "a96a5e9f-9857-4c13-b889-91da2ace208a",
|
||||
"event": "recorder.added",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"created_at": "2025-09-07T03:33:14.258796+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
"extra": ""
|
||||
},
|
||||
"timestamp": 1757215994
|
||||
}
|
||||
```
|
||||
|
||||
#### 移除直播间
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "e33623d4-e040-4390-88f5-d351ceeeace7",
|
||||
"event": "recorder.removed",
|
||||
"payload": {
|
||||
"room_id": 27183290,
|
||||
"created_at": "2025-08-30T10:54:18.569198+00:00",
|
||||
"platform": "bilibili",
|
||||
"auto_start": true,
|
||||
"extra": ""
|
||||
},
|
||||
"timestamp": 1757217015
|
||||
}
|
||||
```
|
||||
|
||||
### 直播相关
|
||||
|
||||
> [!NOTE]
|
||||
> 直播开始和结束,不意味着录制的开始和结束。
|
||||
|
||||
#### 直播开始
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "f12f3424-f7d8-4b2f-a8b7-55477411482e",
|
||||
"event": "live.started",
|
||||
"payload": {
|
||||
"room_id": 843610,
|
||||
"room_info": {
|
||||
"room_id": 843610,
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "475210",
|
||||
"user_name": "Xinrea",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "",
|
||||
"live_status": false,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757217190
|
||||
}
|
||||
```
|
||||
|
||||
#### 直播结束
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "e8b0756a-02f9-4655-b5ae-a170bf9547bd",
|
||||
"event": "live.ended",
|
||||
"payload": {
|
||||
"room_id": 843610,
|
||||
"room_info": {
|
||||
"room_id": 843610,
|
||||
"room_title": "登顶!",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "475210",
|
||||
"user_name": "Xinrea",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "",
|
||||
"live_status": true,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757217365
|
||||
}
|
||||
```
|
||||
|
||||
### 录播相关
|
||||
|
||||
#### 开始录制
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "5ec1ea10-2b31-48fd-8deb-f2d7d2ea5985",
|
||||
"event": "record.started",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"room_info": {
|
||||
"room_id": 26966466,
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "1609526545",
|
||||
"user_name": "栞栞Shiori",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
|
||||
},
|
||||
"total_length": 0,
|
||||
"current_live_id": "1757216045412",
|
||||
"live_status": true,
|
||||
"is_recording": false,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757216045
|
||||
}
|
||||
```
|
||||
|
||||
#### 结束录制
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "56fd03e5-3965-4c2e-a6a9-bb6932347eb3",
|
||||
"event": "record.ended",
|
||||
"payload": {
|
||||
"room_id": 26966466,
|
||||
"room_info": {
|
||||
"room_id": 26966466,
|
||||
"room_title": "早安獭獭栞!下播前抽fufu",
|
||||
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
|
||||
},
|
||||
"user_info": {
|
||||
"user_id": "1609526545",
|
||||
"user_name": "栞栞Shiori",
|
||||
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
|
||||
},
|
||||
"total_length": 52.96700000000001,
|
||||
"current_live_id": "1757215994597",
|
||||
"live_status": true,
|
||||
"is_recording": true,
|
||||
"auto_start": true,
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757216040
|
||||
}
|
||||
```
|
||||
|
||||
#### 删除录播
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "c32bc811-ab4b-49fd-84c7-897727905d16",
|
||||
"event": "archive.deleted",
|
||||
"payload": {
|
||||
"platform": "bilibili",
|
||||
"live_id": "1756607084705",
|
||||
"room_id": 1967212929,
|
||||
"title": "灶台O.o",
|
||||
"length": 9,
|
||||
"size": 1927112,
|
||||
"created_at": "2025-08-31T02:24:44.728616+00:00",
|
||||
"cover": "bilibili/1967212929/1756607084705/cover.jpg"
|
||||
},
|
||||
"timestamp": 1757176219
|
||||
}
|
||||
```
|
||||
|
||||
### 切片相关
|
||||
|
||||
#### 切片生成
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "f542e0e1-688b-4f1a-8ce1-e5e51530cf5d",
|
||||
"event": "clip.generated",
|
||||
"payload": {
|
||||
"id": 316,
|
||||
"room_id": 27183290,
|
||||
"cover": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].jpg",
|
||||
"file": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].mp4",
|
||||
"note": "",
|
||||
"length": 121,
|
||||
"size": 53049119,
|
||||
"status": 0,
|
||||
"bvid": "",
|
||||
"title": "",
|
||||
"desc": "",
|
||||
"tags": "",
|
||||
"area": 0,
|
||||
"created_at": "2025-09-07T00:16:11.747461+08:00",
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757175371
|
||||
}
|
||||
```
|
||||
|
||||
#### 切片删除
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "5c7ca728-753d-4a7d-a0b4-02c997ad2f92",
|
||||
"event": "clip.deleted",
|
||||
"payload": {
|
||||
"id": 313,
|
||||
"room_id": 27183290,
|
||||
"cover": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].jpg",
|
||||
"file": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].mp4",
|
||||
"note": "",
|
||||
"length": 32,
|
||||
"size": 18530098,
|
||||
"status": 0,
|
||||
"bvid": "",
|
||||
"title": "",
|
||||
"desc": "",
|
||||
"tags": "",
|
||||
"area": 0,
|
||||
"created_at": "2025-09-03T21:10:54.943682+08:00",
|
||||
"platform": "bilibili"
|
||||
},
|
||||
"timestamp": 1757147617
|
||||
}
|
||||
```
|
||||
30
docs/usage/features/workflow.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# 工作流程
|
||||
|
||||
- 直播间:各个平台的直播间
|
||||
- 录播:直播流的存档,每次录制会自动生成一场录播记录
|
||||
- 切片:从直播流中剪切生成的视频片段
|
||||
- 投稿:将切片上传到各个平台(目前仅支持 Bilibili)
|
||||
|
||||
下图展示了它们之间的关系:
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A[直播间] -->|录制| B[录播 01]
|
||||
A -->|录制| C[录播 02]
|
||||
A -->|录制| E[录播 N]
|
||||
|
||||
B --> F[直播流预览窗口]
|
||||
|
||||
F -->|区间生成| G[切片 01]
|
||||
F -->|区间生成| H[切片 02]
|
||||
F -->|区间生成| I[切片 N]
|
||||
|
||||
G --> J[切片预览窗口]
|
||||
|
||||
J -->|字幕压制| K[新切片]
|
||||
|
||||
K --> J
|
||||
|
||||
J -->|投稿| L[Bilibili]
|
||||
|
||||
```
|
||||
22
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "bili-shadowreplay",
|
||||
"private": true,
|
||||
"version": "2.8.0",
|
||||
"version": "2.13.7",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
@@ -11,10 +11,16 @@
|
||||
"tauri": "tauri",
|
||||
"docs:dev": "vitepress dev docs",
|
||||
"docs:build": "vitepress build docs",
|
||||
"docs:preview": "vitepress preview docs"
|
||||
"docs:preview": "vitepress preview docs",
|
||||
"bump": "node scripts/bump.cjs"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tauri-apps/api": "^2.4.1",
|
||||
"@langchain/core": "^0.3.64",
|
||||
"@langchain/deepseek": "^0.1.0",
|
||||
"@langchain/langgraph": "^0.3.10",
|
||||
"@langchain/ollama": "^0.2.3",
|
||||
"@tauri-apps/api": "^2.6.2",
|
||||
"@tauri-apps/plugin-deep-link": "~2",
|
||||
"@tauri-apps/plugin-dialog": "~2",
|
||||
"@tauri-apps/plugin-fs": "~2",
|
||||
"@tauri-apps/plugin-http": "~2",
|
||||
@@ -23,7 +29,9 @@
|
||||
"@tauri-apps/plugin-shell": "~2",
|
||||
"@tauri-apps/plugin-sql": "~2",
|
||||
"lucide-svelte": "^0.479.0",
|
||||
"qrcode": "^1.5.4"
|
||||
"marked": "^16.1.1",
|
||||
"qrcode": "^1.5.4",
|
||||
"socket.io-client": "^4.8.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/vite-plugin-svelte": "^2.0.0",
|
||||
@@ -35,6 +43,7 @@
|
||||
"flowbite": "^2.5.1",
|
||||
"flowbite-svelte": "^0.46.16",
|
||||
"flowbite-svelte-icons": "^1.6.1",
|
||||
"mermaid": "^11.9.0",
|
||||
"postcss": "^8.4.21",
|
||||
"svelte": "^3.54.0",
|
||||
"svelte-check": "^3.0.0",
|
||||
@@ -42,8 +51,9 @@
|
||||
"tailwindcss": "^3.3.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^4.6.4",
|
||||
"typescript": "^5.0.0",
|
||||
"vite": "^4.0.0",
|
||||
"vitepress": "^1.6.3"
|
||||
"vitepress": "^1.6.3",
|
||||
"vitepress-plugin-mermaid": "^2.0.17"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
public/imgs/bilibili.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
public/imgs/bilibili_avatar.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
|
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 246 KiB |
BIN
public/imgs/douyin_avatar.png
Normal file
|
After Width: | Height: | Size: 153 KiB |
58
scripts/bump.cjs
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
function updatePackageJson(version) {
|
||||
const packageJsonPath = path.join(process.cwd(), "package.json");
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
||||
packageJson.version = version;
|
||||
fs.writeFileSync(
|
||||
packageJsonPath,
|
||||
JSON.stringify(packageJson, null, 2) + "\n"
|
||||
);
|
||||
console.log(`✅ Updated package.json version to ${version}`);
|
||||
}
|
||||
|
||||
function updateCargoToml(version) {
|
||||
const cargoTomlPath = path.join(process.cwd(), "src-tauri", "Cargo.toml");
|
||||
let cargoToml = fs.readFileSync(cargoTomlPath, "utf8");
|
||||
|
||||
// Update the version in the [package] section
|
||||
cargoToml = cargoToml.replace(/^version = ".*"$/m, `version = "${version}"`);
|
||||
|
||||
fs.writeFileSync(cargoTomlPath, cargoToml);
|
||||
console.log(`✅ Updated Cargo.toml version to ${version}`);
|
||||
}
|
||||
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
if (args.length === 0) {
|
||||
console.error("❌ Please provide a version number");
|
||||
console.error("Usage: yarn bump <version>");
|
||||
console.error("Example: yarn bump 3.1.0");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const version = args[0];
|
||||
|
||||
// Validate version format (simple check)
|
||||
if (!/^\d+\.\d+\.\d+/.test(version)) {
|
||||
console.error(
|
||||
"❌ Invalid version format. Please use semantic versioning (e.g., 3.1.0)"
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
updatePackageJson(version);
|
||||
updateCargoToml(version);
|
||||
console.log(`🎉 Successfully bumped version to ${version}`);
|
||||
} catch (error) {
|
||||
console.error("❌ Error updating version:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
1622
src-tauri/Cargo.lock
generated
@@ -4,13 +4,20 @@ resolver = "2"
|
||||
|
||||
[package]
|
||||
name = "bili-shadowreplay"
|
||||
version = "1.0.0"
|
||||
version = "2.13.7"
|
||||
description = "BiliBili ShadowReplay"
|
||||
authors = ["Xinrea"]
|
||||
license = ""
|
||||
repository = ""
|
||||
edition = "2021"
|
||||
|
||||
[lints.clippy]
|
||||
correctness="deny"
|
||||
suspicious="deny"
|
||||
complexity="deny"
|
||||
style="deny"
|
||||
perf="deny"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
@@ -25,7 +32,6 @@ async-std = "1.12.0"
|
||||
async-ffmpeg-sidecar = "0.0.1"
|
||||
chrono = { version = "0.4.24", features = ["serde"] }
|
||||
toml = "0.7.3"
|
||||
custom_error = "1.9.2"
|
||||
regex = "1.7.3"
|
||||
tokio = { version = "1.27.0", features = ["process"] }
|
||||
platform-dirs = "0.3.0"
|
||||
@@ -44,14 +50,19 @@ async-trait = "0.1.87"
|
||||
whisper-rs = "0.14.2"
|
||||
hound = "3.5.1"
|
||||
uuid = { version = "1.4", features = ["v4"] }
|
||||
axum = { version = "0.7", features = ["macros"] }
|
||||
axum = { version = "0.7", features = ["macros", "multipart"] }
|
||||
tower-http = { version = "0.5", features = ["cors", "fs"] }
|
||||
futures-core = "0.3"
|
||||
futures = "0.3"
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
tokio-stream = "0.1"
|
||||
clap = { version = "4.5.37", features = ["derive"] }
|
||||
url = "2.5.4"
|
||||
srtparse = "0.2.0"
|
||||
thiserror = "2"
|
||||
deno_core = "0.355"
|
||||
sanitize-filename = "0.6.0"
|
||||
socketioxide = "0.17.2"
|
||||
|
||||
[features]
|
||||
# this feature is used for production builds or when `devPath` points to the filesystem
|
||||
@@ -71,6 +82,7 @@ gui = [
|
||||
"tauri-utils",
|
||||
"tauri-plugin-os",
|
||||
"tauri-plugin-notification",
|
||||
"tauri-plugin-deep-link",
|
||||
"fix-path-env",
|
||||
"tauri-build",
|
||||
]
|
||||
@@ -83,6 +95,7 @@ optional = true
|
||||
[dependencies.tauri-plugin-single-instance]
|
||||
version = "2"
|
||||
optional = true
|
||||
features = ["deep-link"]
|
||||
|
||||
[dependencies.tauri-plugin-dialog]
|
||||
version = "2"
|
||||
@@ -117,6 +130,10 @@ optional = true
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.tauri-plugin-deep-link]
|
||||
version = "2"
|
||||
optional = true
|
||||
|
||||
[dependencies.fix-path-env]
|
||||
git = "https://github.com/tauri-apps/fix-path-env-rs"
|
||||
optional = true
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
fn main() {
|
||||
#[cfg(feature = "gui")]
|
||||
tauri_build::build()
|
||||
tauri_build::build();
|
||||
}
|
||||
|
||||
@@ -49,6 +49,12 @@
|
||||
},
|
||||
{
|
||||
"url": "https://*.douyinpic.com/"
|
||||
},
|
||||
{
|
||||
"url": "http://tauri.localhost/*"
|
||||
},
|
||||
{
|
||||
"url": "http://localhost:8054/*"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -65,6 +71,7 @@
|
||||
"shell:default",
|
||||
"sql:default",
|
||||
"os:default",
|
||||
"dialog:default"
|
||||
"dialog:default",
|
||||
"deep-link:default"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -7,38 +7,42 @@ edition = "2021"
|
||||
name = "danmu_stream"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[example]]
|
||||
name = "bilibili"
|
||||
path = "examples/bilibili.rs"
|
||||
|
||||
[[example]]
|
||||
name = "douyin"
|
||||
path = "examples/douyin.rs"
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = { version = "0.27", features = ["native-tls"] }
|
||||
futures-util = "0.3"
|
||||
prost = "0.12"
|
||||
prost = "0.14"
|
||||
chrono = "0.4"
|
||||
log = "0.4"
|
||||
env_logger = "0.10"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
env_logger = "0.11"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.12", features = ["json"] }
|
||||
url = "2.4"
|
||||
md5 = "0.7"
|
||||
md5 = "0.8"
|
||||
regex = "1.9"
|
||||
deno_core = "0.242.0"
|
||||
pct-str = "2.0.0"
|
||||
custom_error = "1.9.2"
|
||||
deno_core = "0.355"
|
||||
pct-str = "2.0"
|
||||
thiserror = "2.0"
|
||||
flate2 = "1.0"
|
||||
scroll = "0.13.0"
|
||||
scroll_derive = "0.13.0"
|
||||
brotli = "8.0.1"
|
||||
scroll = "0.13"
|
||||
scroll_derive = "0.13"
|
||||
brotli = "8.0"
|
||||
http = "1.0"
|
||||
rand = "0.9.1"
|
||||
urlencoding = "2.1.3"
|
||||
rand = "0.9"
|
||||
urlencoding = "2.1"
|
||||
gzip = "0.1.2"
|
||||
hex = "0.4.3"
|
||||
async-trait = "0.1.88"
|
||||
uuid = "1.17.0"
|
||||
async-trait = "0.1"
|
||||
uuid = "1"
|
||||
|
||||
[build-dependencies]
|
||||
tonic-build = "0.10"
|
||||
tonic-build = "0.14"
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use tokio::sync::{mpsc, RwLock};
|
||||
|
||||
use crate::{
|
||||
provider::{new, DanmuProvider, ProviderType},
|
||||
DanmuMessageType, DanmuStreamError,
|
||||
};
|
||||
use tokio::sync::{mpsc, RwLock};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DanmuStream {
|
||||
pub provider_type: ProviderType,
|
||||
pub identifier: String,
|
||||
pub room_id: u64,
|
||||
pub room_id: i64,
|
||||
pub provider: Arc<RwLock<Box<dyn DanmuProvider>>>,
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
rx: Arc<RwLock<mpsc::UnboundedReceiver<DanmuMessageType>>>,
|
||||
@@ -20,7 +21,7 @@ impl DanmuStream {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
) -> Result<Self, DanmuStreamError> {
|
||||
let (tx, rx) = mpsc::unbounded_channel();
|
||||
let provider = new(provider_type, identifier, room_id).await?;
|
||||
|
||||
@@ -1,19 +1,8 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
use reqwest::header::HeaderMap;
|
||||
|
||||
impl From<reqwest::Error> for DanmuStreamError {
|
||||
fn from(value: reqwest::Error) -> Self {
|
||||
Self::HttpError { err: value }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<url::ParseError> for DanmuStreamError {
|
||||
fn from(value: url::ParseError) -> Self {
|
||||
Self::ParseError { err: value }
|
||||
}
|
||||
}
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
pub struct ApiClient {
|
||||
client: reqwest::Client,
|
||||
|
||||
@@ -2,16 +2,24 @@ pub mod danmu_stream;
|
||||
mod http_client;
|
||||
pub mod provider;
|
||||
|
||||
use custom_error::custom_error;
|
||||
use thiserror::Error;
|
||||
|
||||
custom_error! {pub DanmuStreamError
|
||||
HttpError {err: reqwest::Error} = "HttpError {err}",
|
||||
ParseError {err: url::ParseError} = "ParseError {err}",
|
||||
WebsocketError {err: String } = "WebsocketError {err}",
|
||||
PackError {err: String} = "PackError {err}",
|
||||
UnsupportProto {proto: u16} = "UnsupportProto {proto}",
|
||||
MessageParseError {err: String} = "MessageParseError {err}",
|
||||
InvalidIdentifier {err: String} = "InvalidIdentifier {err}"
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DanmuStreamError {
|
||||
#[error("HttpError {0:?}")]
|
||||
HttpError(#[from] reqwest::Error),
|
||||
#[error("ParseError {0:?}")]
|
||||
ParseError(#[from] url::ParseError),
|
||||
#[error("WebsocketError {err}")]
|
||||
WebsocketError { err: String },
|
||||
#[error("PackError {err}")]
|
||||
PackError { err: String },
|
||||
#[error("UnsupportProto {proto}")]
|
||||
UnsupportProto { proto: u16 },
|
||||
#[error("MessageParseError {err}")]
|
||||
MessageParseError { err: String },
|
||||
#[error("InvalidIdentifier {err}")]
|
||||
InvalidIdentifier { err: String },
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -21,7 +29,7 @@ pub enum DanmuMessageType {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DanmuMessage {
|
||||
pub room_id: u64,
|
||||
pub room_id: i64,
|
||||
pub user_id: u64,
|
||||
pub user_name: String,
|
||||
pub message: String,
|
||||
|
||||
@@ -36,15 +36,15 @@ type WsWriteType = futures_util::stream::SplitSink<
|
||||
|
||||
pub struct BiliDanmu {
|
||||
client: ApiClient,
|
||||
room_id: u64,
|
||||
user_id: u64,
|
||||
room_id: i64,
|
||||
user_id: i64,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for BiliDanmu {
|
||||
async fn new(cookie: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
async fn new(cookie: &str, room_id: i64) -> Result<Self, DanmuStreamError> {
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let user_id = BiliDanmu::parse_user_id(cookie)?;
|
||||
// add buvid3 to cookie
|
||||
@@ -65,7 +65,6 @@ impl DanmuProvider for BiliDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const MAX_RETRIES: u32 = 5;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Bilibili WebSocket connection started, room_id: {}",
|
||||
@@ -74,33 +73,37 @@ impl DanmuProvider for BiliDanmu {
|
||||
|
||||
loop {
|
||||
if *self.stop.read().await {
|
||||
info!(
|
||||
"Bilibili WebSocket connection stopped, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!("Bilibili WebSocket connection closed normally");
|
||||
break;
|
||||
info!(
|
||||
"Bilibili WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Bilibili WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
|
||||
if retry_count >= MAX_RETRIES {
|
||||
return Err(DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect after {} retries", MAX_RETRIES),
|
||||
});
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}/{})",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
MAX_RETRIES
|
||||
error!(
|
||||
"Bilibili WebSocket connection error, room_id: {}, error: {}",
|
||||
self.room_id, e
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
retry_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
self.room_id
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -123,7 +126,8 @@ impl BiliDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let wbi_key = self.get_wbi_key().await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, self.room_id).await?;
|
||||
let real_room = self.get_real_room(&wbi_key, self.room_id).await?;
|
||||
let danmu_info = self.get_danmu_info(&wbi_key, real_room).await?;
|
||||
let ws_hosts = danmu_info.data.host_list.clone();
|
||||
let mut conn = None;
|
||||
log::debug!("ws_hosts: {:?}", ws_hosts);
|
||||
@@ -152,7 +156,7 @@ impl BiliDanmu {
|
||||
*self.write.write().await = Some(write);
|
||||
|
||||
let json = serde_json::to_string(&WsSend {
|
||||
roomid: self.room_id,
|
||||
roomid: real_room,
|
||||
key: danmu_info.data.token,
|
||||
uid: self.user_id,
|
||||
protover: 3,
|
||||
@@ -237,9 +241,8 @@ impl BiliDanmu {
|
||||
async fn get_danmu_info(
|
||||
&self,
|
||||
wbi_key: &str,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
) -> Result<DanmuInfo, DanmuStreamError> {
|
||||
let room_id = self.get_real_room(wbi_key, room_id).await?;
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
@@ -265,7 +268,7 @@ impl BiliDanmu {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: u64) -> Result<u64, DanmuStreamError> {
|
||||
async fn get_real_room(&self, wbi_key: &str, room_id: i64) -> Result<i64, DanmuStreamError> {
|
||||
let params = self
|
||||
.get_sign(
|
||||
wbi_key,
|
||||
@@ -293,14 +296,14 @@ impl BiliDanmu {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
fn parse_user_id(cookie: &str) -> Result<u64, DanmuStreamError> {
|
||||
fn parse_user_id(cookie: &str) -> Result<i64, DanmuStreamError> {
|
||||
let mut user_id = None;
|
||||
|
||||
// find DedeUserID=<user_id> in cookie str
|
||||
let re = Regex::new(r"DedeUserID=(\d+)").unwrap();
|
||||
if let Some(captures) = re.captures(cookie) {
|
||||
if let Some(user) = captures.get(1) {
|
||||
user_id = Some(user.as_str().parse::<u64>().unwrap());
|
||||
user_id = Some(user.as_str().parse::<i64>().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -404,8 +407,8 @@ impl BiliDanmu {
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct WsSend {
|
||||
uid: u64,
|
||||
roomid: u64,
|
||||
uid: i64,
|
||||
roomid: i64,
|
||||
key: String,
|
||||
protover: u32,
|
||||
platform: String,
|
||||
@@ -436,5 +439,5 @@ pub struct RoomInit {
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct RoomInitData {
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -24,7 +24,7 @@ struct PackHotCount {
|
||||
|
||||
type BilibiliPackCtx<'a> = (BilibiliPackHeader, &'a [u8]);
|
||||
|
||||
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx, DanmuStreamError> {
|
||||
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx<'_>, DanmuStreamError> {
|
||||
let data = buffer
|
||||
.pread_with(0, scroll::BE)
|
||||
.map_err(|e: scroll::Error| DanmuStreamError::PackError { err: e.to_string() })?;
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{
|
||||
provider::{bilibili::dannmu_msg::BiliDanmuMessage, DanmuMessageType},
|
||||
DanmuMessage, DanmuStreamError,
|
||||
};
|
||||
use super::dannmu_msg::BiliDanmuMessage;
|
||||
|
||||
use crate::{provider::DanmuMessageType, DanmuMessage, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
pub struct WsStreamCtx {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
|
||||
use super::stream::WsStreamCtx;
|
||||
|
||||
use crate::DanmuStreamError;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -1,4 +1,9 @@
|
||||
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
|
||||
mod messages;
|
||||
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use deno_core::v8;
|
||||
use deno_core::JsRuntime;
|
||||
@@ -7,11 +12,9 @@ use flate2::read::GzDecoder;
|
||||
use futures_util::{SinkExt, StreamExt, TryStreamExt};
|
||||
use log::debug;
|
||||
use log::{error, info};
|
||||
use messages::*;
|
||||
use prost::bytes::Bytes;
|
||||
use prost::Message;
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, SystemTime};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::sync::RwLock;
|
||||
@@ -19,8 +22,7 @@ use tokio_tungstenite::{
|
||||
connect_async, tungstenite::Message as WsMessage, MaybeTlsStream, WebSocketStream,
|
||||
};
|
||||
|
||||
mod messages;
|
||||
use messages::*;
|
||||
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
|
||||
|
||||
const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36";
|
||||
|
||||
@@ -31,7 +33,7 @@ type WsWriteType =
|
||||
futures_util::stream::SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, WsMessage>;
|
||||
|
||||
pub struct DouyinDanmu {
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
cookie: String,
|
||||
stop: Arc<RwLock<bool>>,
|
||||
write: Arc<RwLock<Option<WsWriteType>>>,
|
||||
@@ -109,7 +111,7 @@ impl DouyinDanmu {
|
||||
runtime
|
||||
.execute_script(
|
||||
"<crypto-js.min.js>",
|
||||
deno_core::FastString::Static(crypto_js),
|
||||
deno_core::FastString::from_static(crypto_js),
|
||||
)
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute crypto-js: {}", e),
|
||||
@@ -118,7 +120,7 @@ impl DouyinDanmu {
|
||||
// Load and execute the sign.js file
|
||||
let js_code = include_str!("douyin/webmssdk.js");
|
||||
runtime
|
||||
.execute_script("<sign.js>", deno_core::FastString::Static(js_code))
|
||||
.execute_script("<sign.js>", deno_core::FastString::from_static(js_code))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
@@ -126,10 +128,7 @@ impl DouyinDanmu {
|
||||
// Call the get_wss_url function
|
||||
let sign_call = format!("get_wss_url(\"{}\")", self.room_id);
|
||||
let result = runtime
|
||||
.execute_script(
|
||||
"<sign_call>",
|
||||
deno_core::FastString::Owned(sign_call.into_boxed_str()),
|
||||
)
|
||||
.execute_script("<sign_call>", deno_core::FastString::from(sign_call))
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to execute JavaScript: {}", e),
|
||||
})?;
|
||||
@@ -214,7 +213,7 @@ impl DouyinDanmu {
|
||||
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, room_id).await {
|
||||
if let Some(write) = write.write().await.as_mut() {
|
||||
if let Err(e) =
|
||||
write.send(WsMessage::Binary(ack.encode_to_vec())).await
|
||||
write.send(WsMessage::binary(ack.encode_to_vec())).await
|
||||
{
|
||||
error!("Failed to send ack: {}", e);
|
||||
}
|
||||
@@ -257,7 +256,7 @@ impl DouyinDanmu {
|
||||
|
||||
async fn send_heartbeat(tx: &mpsc::Sender<WsMessage>) -> Result<(), DanmuStreamError> {
|
||||
// heartbeat message: 3A 02 68 62
|
||||
tx.send(WsMessage::Binary(vec![0x3A, 0x02, 0x68, 0x62]))
|
||||
tx.send(WsMessage::binary(vec![0x3A, 0x02, 0x68, 0x62]))
|
||||
.await
|
||||
.map_err(|e| DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to send heartbeat message: {}", e),
|
||||
@@ -269,7 +268,7 @@ impl DouyinDanmu {
|
||||
async fn handle_binary_message(
|
||||
data: &[u8],
|
||||
tx: &mpsc::UnboundedSender<DanmuMessageType>,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
) -> Result<Option<PushFrame>, DanmuStreamError> {
|
||||
// First decode the PushFrame
|
||||
let push_frame = PushFrame::decode(Bytes::from(data.to_vec())).map_err(|e| {
|
||||
@@ -395,7 +394,7 @@ async fn handle_binary_message(
|
||||
|
||||
#[async_trait]
|
||||
impl DanmuProvider for DouyinDanmu {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
|
||||
async fn new(identifier: &str, room_id: i64) -> Result<Self, DanmuStreamError> {
|
||||
Ok(Self {
|
||||
room_id,
|
||||
cookie: identifier.to_string(),
|
||||
@@ -409,7 +408,6 @@ impl DanmuProvider for DouyinDanmu {
|
||||
tx: mpsc::UnboundedSender<DanmuMessageType>,
|
||||
) -> Result<(), DanmuStreamError> {
|
||||
let mut retry_count = 0;
|
||||
const MAX_RETRIES: u32 = 5;
|
||||
const RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
info!(
|
||||
"Douyin WebSocket connection started, room_id: {}",
|
||||
@@ -423,28 +421,25 @@ impl DanmuProvider for DouyinDanmu {
|
||||
|
||||
match self.connect_and_handle(tx.clone()).await {
|
||||
Ok(_) => {
|
||||
info!("Douyin WebSocket connection closed normally");
|
||||
break;
|
||||
info!(
|
||||
"Douyin WebSocket connection closed normally, room_id: {}",
|
||||
self.room_id
|
||||
);
|
||||
retry_count = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
error!("Douyin WebSocket connection error: {}", e);
|
||||
retry_count += 1;
|
||||
|
||||
if retry_count >= MAX_RETRIES {
|
||||
return Err(DanmuStreamError::WebsocketError {
|
||||
err: format!("Failed to connect after {} retries", MAX_RETRIES),
|
||||
});
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}/{})",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
MAX_RETRIES
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
}
|
||||
|
||||
info!(
|
||||
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
|
||||
RETRY_DELAY.as_secs(),
|
||||
retry_count,
|
||||
self.room_id
|
||||
);
|
||||
tokio::time::sleep(RETRY_DELAY).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use prost::Message;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use prost::Message;
|
||||
|
||||
// message Response {
|
||||
// repeated Message messagesList = 1;
|
||||
// string cursor = 2;
|
||||
|
||||
@@ -4,10 +4,10 @@ mod douyin;
|
||||
use async_trait::async_trait;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::{
|
||||
provider::bilibili::BiliDanmu, provider::douyin::DouyinDanmu, DanmuMessageType,
|
||||
DanmuStreamError,
|
||||
};
|
||||
use self::bilibili::BiliDanmu;
|
||||
use self::douyin::DouyinDanmu;
|
||||
|
||||
use crate::{DanmuMessageType, DanmuStreamError};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ProviderType {
|
||||
@@ -17,7 +17,7 @@ pub enum ProviderType {
|
||||
|
||||
#[async_trait]
|
||||
pub trait DanmuProvider: Send + Sync {
|
||||
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError>
|
||||
async fn new(identifier: &str, room_id: i64) -> Result<Self, DanmuStreamError>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
@@ -57,7 +57,7 @@ pub trait DanmuProvider: Send + Sync {
|
||||
pub async fn new(
|
||||
provider_type: ProviderType,
|
||||
identifier: &str,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
) -> Result<Box<dyn DanmuProvider>, DanmuStreamError> {
|
||||
match provider_type {
|
||||
ProviderType::BiliBili => {
|
||||
@@ -1 +1 @@
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default"]}}
|
||||
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"},{"url":"http://tauri.localhost/*"},{"url":"http://localhost:8054/*"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}
|
||||
@@ -37,7 +37,7 @@
|
||||
],
|
||||
"definitions": {
|
||||
"Capability": {
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier",
|
||||
@@ -49,7 +49,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.",
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.",
|
||||
"default": "",
|
||||
"type": "string"
|
||||
},
|
||||
@@ -3152,6 +3152,12 @@
|
||||
"const": "core:webview:allow-reparent",
|
||||
"markdownDescription": "Enables the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:allow-set-webview-auto-resize",
|
||||
"markdownDescription": "Enables the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -3254,6 +3260,12 @@
|
||||
"const": "core:webview:deny-reparent",
|
||||
"markdownDescription": "Denies the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:deny-set-webview-auto-resize",
|
||||
"markdownDescription": "Denies the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -4208,6 +4220,60 @@
|
||||
"const": "core:window:deny-unminimize",
|
||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
|
||||
"type": "string",
|
||||
"const": "deep-link:default",
|
||||
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-get-current",
|
||||
"markdownDescription": "Enables the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-is-registered",
|
||||
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-register",
|
||||
"markdownDescription": "Enables the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-unregister",
|
||||
"markdownDescription": "Enables the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-get-current",
|
||||
"markdownDescription": "Denies the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-is-registered",
|
||||
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-register",
|
||||
"markdownDescription": "Denies the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-unregister",
|
||||
"markdownDescription": "Denies the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||
"type": "string",
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
],
|
||||
"definitions": {
|
||||
"Capability": {
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\nIt controls application windows' and webviews' fine grained access to the Tauri core, application, or plugin commands. If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\nThis can be done to create groups of windows, based on their required system access, which can reduce impact of frontend vulnerabilities in less privileged windows. Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`. A Window can have none, one, or multiple associated capabilities.\n\n## Example\n\n```json { \"identifier\": \"main-user-files-write\", \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\", \"windows\": [ \"main\" ], \"permissions\": [ \"core:default\", \"dialog:open\", { \"identifier\": \"fs:allow-write-text-file\", \"allow\": [{ \"path\": \"$HOME/test.txt\" }] }, ], \"platforms\": [\"macOS\",\"windows\"] } ```",
|
||||
"type": "object",
|
||||
"required": [
|
||||
"identifier",
|
||||
@@ -49,7 +49,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programatic access to files selected by the user.",
|
||||
"description": "Description of what the capability is intended to allow on associated windows.\n\nIt should contain a description of what the grouped permissions should allow.\n\n## Example\n\nThis capability allows the `main` window access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.",
|
||||
"default": "",
|
||||
"type": "string"
|
||||
},
|
||||
@@ -3152,6 +3152,12 @@
|
||||
"const": "core:webview:allow-reparent",
|
||||
"markdownDescription": "Enables the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:allow-set-webview-auto-resize",
|
||||
"markdownDescription": "Enables the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -3254,6 +3260,12 @@
|
||||
"const": "core:webview:deny-reparent",
|
||||
"markdownDescription": "Denies the reparent command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_auto_resize command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "core:webview:deny-set-webview-auto-resize",
|
||||
"markdownDescription": "Denies the set_webview_auto_resize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the set_webview_background_color command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
@@ -4208,6 +4220,60 @@
|
||||
"const": "core:window:deny-unminimize",
|
||||
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
|
||||
"type": "string",
|
||||
"const": "deep-link:default",
|
||||
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
|
||||
},
|
||||
{
|
||||
"description": "Enables the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-get-current",
|
||||
"markdownDescription": "Enables the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-is-registered",
|
||||
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-register",
|
||||
"markdownDescription": "Enables the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Enables the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:allow-unregister",
|
||||
"markdownDescription": "Enables the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the get_current command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-get-current",
|
||||
"markdownDescription": "Denies the get_current command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the is_registered command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-is-registered",
|
||||
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the register command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-register",
|
||||
"markdownDescription": "Denies the register command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "Denies the unregister command without any pre-configured scope.",
|
||||
"type": "string",
|
||||
"const": "deep-link:deny-unregister",
|
||||
"markdownDescription": "Denies the unregister command without any pre-configured scope."
|
||||
},
|
||||
{
|
||||
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
|
||||
"type": "string",
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::Utc;
|
||||
|
||||
use crate::database::Database;
|
||||
use crate::recorder::PlatformType;
|
||||
|
||||
pub async fn try_rebuild_archives(
|
||||
db: &Arc<Database>,
|
||||
cache_path: PathBuf,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let rooms = db.get_recorders().await?;
|
||||
for room in rooms {
|
||||
let room_id = room.room_id;
|
||||
let room_cache_path = cache_path.join(format!("{}/{}", room.platform, room_id));
|
||||
let mut files = tokio::fs::read_dir(room_cache_path).await?;
|
||||
while let Some(file) = files.next_entry().await? {
|
||||
if file.file_type().await?.is_dir() {
|
||||
// use folder name as live_id
|
||||
let live_id = file.file_name();
|
||||
let live_id = live_id.to_str().unwrap();
|
||||
// check if live_id is in db
|
||||
let record = db.get_record(room_id, live_id).await;
|
||||
if record.is_ok() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// get created_at from folder metadata
|
||||
let metadata = file.metadata().await?;
|
||||
let created_at = metadata.created();
|
||||
if created_at.is_err() {
|
||||
continue;
|
||||
}
|
||||
let created_at = created_at.unwrap();
|
||||
let created_at = chrono::DateTime::<Utc>::from(created_at)
|
||||
.format("%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
.to_string();
|
||||
// create a record for this live_id
|
||||
let record = db
|
||||
.add_record(
|
||||
PlatformType::from_str(room.platform.as_str()).unwrap(),
|
||||
live_id,
|
||||
room_id,
|
||||
&format!("UnknownLive {}", live_id),
|
||||
None,
|
||||
Some(&created_at),
|
||||
)
|
||||
.await?;
|
||||
|
||||
log::info!("rebuild archive {:?}", record);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use chrono::Utc;
|
||||
use chrono::Local;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{recorder::PlatformType, recorder_manager::ClipRangeParams};
|
||||
@@ -35,6 +35,8 @@ pub struct Config {
|
||||
pub config_path: String,
|
||||
#[serde(default = "default_whisper_language")]
|
||||
pub whisper_language: String,
|
||||
#[serde(default = "default_webhook_url")]
|
||||
pub webhook_url: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone)]
|
||||
@@ -64,7 +66,7 @@ fn default_openai_api_endpoint() -> String {
|
||||
}
|
||||
|
||||
fn default_openai_api_key() -> String {
|
||||
"".to_string()
|
||||
String::new()
|
||||
}
|
||||
|
||||
fn default_clip_name_format() -> String {
|
||||
@@ -86,6 +88,10 @@ fn default_whisper_language() -> String {
|
||||
"auto".to_string()
|
||||
}
|
||||
|
||||
fn default_webhook_url() -> String {
|
||||
String::new()
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn load(
|
||||
config_path: &PathBuf,
|
||||
@@ -123,6 +129,7 @@ impl Config {
|
||||
status_check_interval: default_status_check_interval(),
|
||||
config_path: config_path.to_str().unwrap().into(),
|
||||
whisper_language: default_whisper_language(),
|
||||
webhook_url: default_webhook_url(),
|
||||
};
|
||||
|
||||
config.save();
|
||||
@@ -170,13 +177,31 @@ impl Config {
|
||||
let format_config = format_config.replace("{platform}", platform.as_str());
|
||||
let format_config = format_config.replace("{room_id}", ¶ms.room_id.to_string());
|
||||
let format_config = format_config.replace("{live_id}", ¶ms.live_id);
|
||||
let format_config = format_config.replace("{x}", ¶ms.x.to_string());
|
||||
let format_config = format_config.replace("{y}", ¶ms.y.to_string());
|
||||
let format_config = format_config.replace(
|
||||
"{x}",
|
||||
¶ms
|
||||
.range
|
||||
.as_ref()
|
||||
.map_or("0".to_string(), |r| r.start.to_string()),
|
||||
);
|
||||
let format_config = format_config.replace(
|
||||
"{y}",
|
||||
¶ms
|
||||
.range
|
||||
.as_ref()
|
||||
.map_or("0".to_string(), |r| r.end.to_string()),
|
||||
);
|
||||
let format_config = format_config.replace(
|
||||
"{created_at}",
|
||||
&Utc::now().format("%Y-%m-%d_%H-%M-%S").to_string(),
|
||||
&Local::now().format("%Y-%m-%d_%H-%M-%S").to_string(),
|
||||
);
|
||||
let format_config = format_config.replace(
|
||||
"{length}",
|
||||
¶ms
|
||||
.range
|
||||
.as_ref()
|
||||
.map_or("0".to_string(), |r| r.duration().to_string()),
|
||||
);
|
||||
let format_config = format_config.replace("{length}", &(params.y - params.x).to_string());
|
||||
|
||||
let output = self.output.clone();
|
||||
|
||||
|
||||
4
src-tauri/src/constants.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub const PREFIX_SUBTITLE: &str = "[subtitle]";
|
||||
pub const PREFIX_IMPORTED: &str = "[imported]";
|
||||
pub const PREFIX_DANMAKU: &str = "[danmaku]";
|
||||
pub const PREFIX_CLIP: &str = "[clip]";
|
||||
@@ -24,32 +24,32 @@ struct DanmakuPosition {
|
||||
time: f64,
|
||||
}
|
||||
|
||||
const PLAY_RES_X: f64 = 1920.0;
|
||||
const PLAY_RES_Y: f64 = 1080.0;
|
||||
const PLAY_RES_X: f64 = 1280.0;
|
||||
const PLAY_RES_Y: f64 = 720.0;
|
||||
const BOTTOM_RESERVED: f64 = 50.0;
|
||||
const R2L_TIME: f64 = 8.0;
|
||||
const MAX_DELAY: f64 = 6.0;
|
||||
|
||||
pub fn danmu_to_ass(danmus: Vec<DanmuEntry>) -> String {
|
||||
// ASS header
|
||||
let header = r#"[Script Info]
|
||||
let header = r"[Script Info]
|
||||
Title: Bilibili Danmaku
|
||||
ScriptType: v4.00+
|
||||
Collisions: Normal
|
||||
PlayResX: 1920
|
||||
PlayResY: 1080
|
||||
PlayResX: 1280
|
||||
PlayResY: 720
|
||||
Timer: 10.0000
|
||||
|
||||
[V4+ Styles]
|
||||
Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
|
||||
Style: Default,Microsoft YaHei,48,&H00FFFFFF,&H000000FF,&H00000000,&H00000000,0,0,0,0,100,100,0,0,1,2,0,2,20,20,2,0
|
||||
Style: Default,微软雅黑,36,&H7fFFFFFF,&H7fFFFFFF,&H7f000000,&H7f000000,0,0,0,0,100,100,0,0,1,1,0,2,20,20,2,0
|
||||
|
||||
[Events]
|
||||
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
|
||||
"#;
|
||||
";
|
||||
|
||||
let mut normal = normal_danmaku();
|
||||
let font_size = 48.0; // Default font size
|
||||
let font_size = 36.0; // Default font size
|
||||
|
||||
// Convert danmus to ASS events
|
||||
let events = danmus
|
||||
@@ -76,7 +76,7 @@ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
|
||||
"Dialogue: 0,{},{},Default,,0,0,0,,{{\\move({},{},{},{})}}{}",
|
||||
start_time,
|
||||
end_time,
|
||||
PLAY_RES_X,
|
||||
PLAY_RES_X + text_width / 2.0,
|
||||
pos.top + font_size, // Start position
|
||||
-text_width,
|
||||
pos.top + font_size, // End position
|
||||
@@ -87,22 +87,22 @@ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
|
||||
.join("\n");
|
||||
|
||||
// Combine header and events
|
||||
format!("{}\n{}", header, events)
|
||||
format!("{header}\n{events}")
|
||||
}
|
||||
|
||||
fn format_time(seconds: f64) -> String {
|
||||
let hours = (seconds / 3600.0) as i32;
|
||||
let minutes = ((seconds % 3600.0) / 60.0) as i32;
|
||||
let seconds = seconds % 60.0;
|
||||
format!("{}:{:02}:{:05.2}", hours, minutes, seconds)
|
||||
format!("{hours}:{minutes:02}:{seconds:05.2}")
|
||||
}
|
||||
|
||||
fn escape_text(text: &str) -> String {
|
||||
text.replace("\\", "\\\\")
|
||||
.replace("{", "{")
|
||||
.replace("}", "}")
|
||||
.replace("\r", "")
|
||||
.replace("\n", "\\N")
|
||||
text.replace('\\', "\\\\")
|
||||
.replace('{', "{")
|
||||
.replace('}', "}")
|
||||
.replace('\r', "")
|
||||
.replace('\n', "\\N")
|
||||
}
|
||||
|
||||
fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition> {
|
||||
@@ -144,8 +144,8 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
|
||||
|
||||
let p = space.m;
|
||||
let m = p + hv;
|
||||
let mut tas = t0s;
|
||||
let mut tal = t0l;
|
||||
let mut time_actual_start = t0s;
|
||||
let mut time_actual_leave = t0l;
|
||||
|
||||
for other in &used {
|
||||
if other.p >= m || other.m <= p {
|
||||
@@ -154,13 +154,13 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
|
||||
if other.b && b {
|
||||
continue;
|
||||
}
|
||||
tas = tas.max(other.tf);
|
||||
tal = tal.max(other.td);
|
||||
time_actual_start = time_actual_start.max(other.tf);
|
||||
time_actual_leave = time_actual_leave.max(other.td);
|
||||
}
|
||||
|
||||
suggestions.push(PositionSuggestion {
|
||||
p,
|
||||
r: (tas - t0s).max(tal - t0l),
|
||||
r: (time_actual_start - t0s).max(time_actual_leave - t0l),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -6,10 +6,11 @@ use chrono::Utc;
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::Rng;
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct AccountRow {
|
||||
pub platform: String,
|
||||
pub uid: u64,
|
||||
pub uid: i64, // Keep for Bilibili compatibility
|
||||
pub id_str: Option<String>, // New field for string IDs like Douyin sec_uid
|
||||
pub name: String,
|
||||
pub avatar: String,
|
||||
pub csrf: String,
|
||||
@@ -29,70 +30,76 @@ impl Database {
|
||||
let platform = PlatformType::from_str(platform).unwrap();
|
||||
|
||||
let csrf = if platform == PlatformType::Douyin {
|
||||
Some("".to_string())
|
||||
Some(String::new())
|
||||
} else {
|
||||
// parse cookies
|
||||
cookies
|
||||
.split(';')
|
||||
.map(|cookie| cookie.trim())
|
||||
.map(str::trim)
|
||||
.find_map(|cookie| -> Option<String> {
|
||||
match cookie.starts_with("bili_jct=") {
|
||||
true => {
|
||||
let var_name = &"bili_jct=";
|
||||
Some(cookie[var_name.len()..].to_string())
|
||||
}
|
||||
false => None,
|
||||
if cookie.starts_with("bili_jct=") {
|
||||
let var_name = &"bili_jct=";
|
||||
Some(cookie[var_name.len()..].to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
if csrf.is_none() {
|
||||
return Err(DatabaseError::InvalidCookiesError);
|
||||
return Err(DatabaseError::InvalidCookies);
|
||||
}
|
||||
|
||||
// parse uid
|
||||
let uid = if platform == PlatformType::BiliBili {
|
||||
cookies
|
||||
// parse uid and id_str based on platform
|
||||
let (uid, id_str) = if platform == PlatformType::BiliBili {
|
||||
// For Bilibili, extract numeric uid from cookies
|
||||
let uid = (*cookies
|
||||
.split("DedeUserID=")
|
||||
.collect::<Vec<&str>>()
|
||||
.get(1)
|
||||
.unwrap()
|
||||
.split(";")
|
||||
.split(';')
|
||||
.collect::<Vec<&str>>()
|
||||
.first()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.parse::<u64>()
|
||||
.map_err(|_| DatabaseError::InvalidCookiesError)?
|
||||
.unwrap())
|
||||
.to_string()
|
||||
.parse::<u64>()
|
||||
.map_err(|_| DatabaseError::InvalidCookies)?;
|
||||
(uid, None)
|
||||
} else {
|
||||
// generate a random uid
|
||||
rand::thread_rng().gen_range(10000..=i32::MAX) as u64
|
||||
// For Douyin, use temporary uid and will set id_str later with real sec_uid
|
||||
// Fix: Generate a u32 within the desired range and then cast to u64 to avoid `clippy::cast-sign-loss`.
|
||||
let temp_uid = rand::thread_rng().gen_range(10000u64..=i32::MAX as u64);
|
||||
(temp_uid, Some(format!("temp_{temp_uid}")))
|
||||
};
|
||||
|
||||
let uid = i64::try_from(uid).map_err(|_| DatabaseError::InvalidCookies)?;
|
||||
|
||||
let account = AccountRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
uid,
|
||||
name: "".into(),
|
||||
avatar: "".into(),
|
||||
id_str,
|
||||
name: String::new(),
|
||||
avatar: String::new(),
|
||||
csrf: csrf.unwrap(),
|
||||
cookies: cookies.into(),
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7)").bind(account.uid as i64).bind(&account.platform).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(uid).bind(&account.platform).bind(&account.id_str).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
|
||||
|
||||
Ok(account)
|
||||
}
|
||||
|
||||
pub async fn remove_account(&self, platform: &str, uid: u64) -> Result<(), DatabaseError> {
|
||||
pub async fn remove_account(&self, platform: &str, uid: i64) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let sql = sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
|
||||
.bind(uid as i64)
|
||||
.bind(uid)
|
||||
.bind(platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
if sql.rows_affected() != 1 {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
return Err(DatabaseError::NotFound);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -100,7 +107,7 @@ impl Database {
|
||||
pub async fn update_account(
|
||||
&self,
|
||||
platform: &str,
|
||||
uid: u64,
|
||||
uid: i64,
|
||||
name: &str,
|
||||
avatar: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
@@ -110,16 +117,62 @@ impl Database {
|
||||
)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(uid as i64)
|
||||
.bind(uid)
|
||||
.bind(platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
if sql.rows_affected() != 1 {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
return Err(DatabaseError::NotFound);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_account_with_id_str(
|
||||
&self,
|
||||
old_account: &AccountRow,
|
||||
new_id_str: &str,
|
||||
name: &str,
|
||||
avatar: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
|
||||
// If the id_str changed, we need to delete the old record and create a new one
|
||||
if old_account.id_str.as_deref() == Some(new_id_str) {
|
||||
// id_str is the same, just update name and avatar
|
||||
sqlx::query(
|
||||
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
|
||||
)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(old_account.uid)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
} else {
|
||||
// Delete the old record (for Douyin accounts, we use uid to identify)
|
||||
sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
|
||||
.bind(old_account.uid)
|
||||
.bind(&old_account.platform)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
|
||||
// Insert the new record with updated id_str
|
||||
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
|
||||
.bind(old_account.uid)
|
||||
.bind(&old_account.platform)
|
||||
.bind(new_id_str)
|
||||
.bind(name)
|
||||
.bind(avatar)
|
||||
.bind(&old_account.csrf)
|
||||
.bind(&old_account.cookies)
|
||||
.bind(&old_account.created_at)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_accounts(&self) -> Result<Vec<AccountRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts")
|
||||
@@ -127,12 +180,12 @@ impl Database {
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_account(&self, platform: &str, uid: u64) -> Result<AccountRow, DatabaseError> {
|
||||
pub async fn get_account(&self, platform: &str, uid: i64) -> Result<AccountRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, AccountRow>(
|
||||
"SELECT * FROM accounts WHERE uid = $1 and platform = $2",
|
||||
)
|
||||
.bind(uid as i64)
|
||||
.bind(uid)
|
||||
.bind(platform)
|
||||
.fetch_one(&lock)
|
||||
.await?)
|
||||
@@ -149,7 +202,7 @@ impl Database {
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
if accounts.is_empty() {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
return Err(DatabaseError::NotFound);
|
||||
}
|
||||
// randomly select one account
|
||||
let account = accounts.choose(&mut rand::thread_rng()).unwrap();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use custom_error::custom_error;
|
||||
use sqlx::Pool;
|
||||
use sqlx::Sqlite;
|
||||
use thiserror::Error;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
pub mod account;
|
||||
@@ -14,23 +14,25 @@ pub struct Database {
|
||||
db: RwLock<Option<Pool<Sqlite>>>,
|
||||
}
|
||||
|
||||
custom_error! { pub DatabaseError
|
||||
InsertError = "Entry insert failed",
|
||||
NotFoundError = "Entry not found",
|
||||
InvalidCookiesError = "Cookies are invalid",
|
||||
DBError {err: sqlx::Error } = "DB error: {err}",
|
||||
SQLError { sql: String } = "SQL is incorret: {sql}"
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DatabaseError {
|
||||
#[error("Entry insert failed")]
|
||||
Insert,
|
||||
#[error("Entry not found")]
|
||||
NotFound,
|
||||
#[error("Cookies are invalid")]
|
||||
InvalidCookies,
|
||||
#[error("Number exceed i64 range")]
|
||||
NumberExceedI64Range,
|
||||
#[error("DB error: {0}")]
|
||||
DB(#[from] sqlx::Error),
|
||||
#[error("SQL is incorret: {sql}")]
|
||||
Sql { sql: String },
|
||||
}
|
||||
|
||||
impl From<DatabaseError> for String {
|
||||
fn from(value: DatabaseError) -> Self {
|
||||
value.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for DatabaseError {
|
||||
fn from(value: sqlx::Error) -> Self {
|
||||
DatabaseError::DBError { err: value }
|
||||
fn from(err: DatabaseError) -> Self {
|
||||
err.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,12 @@ use super::Database;
|
||||
use super::DatabaseError;
|
||||
use chrono::Utc;
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct RecordRow {
|
||||
pub platform: String,
|
||||
pub parent_id: String,
|
||||
pub live_id: String,
|
||||
pub room_id: u64,
|
||||
pub room_id: i64,
|
||||
pub title: String,
|
||||
pub length: i64,
|
||||
pub size: i64,
|
||||
@@ -18,53 +19,76 @@ pub struct RecordRow {
|
||||
|
||||
// CREATE TABLE records (live_id INTEGER PRIMARY KEY, room_id INTEGER, title TEXT, length INTEGER, size INTEGER, created_at TEXT);
|
||||
impl Database {
|
||||
pub async fn get_records(&self, room_id: u64) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
pub async fn get_records(
|
||||
&self,
|
||||
room_id: i64,
|
||||
offset: i64,
|
||||
limit: i64,
|
||||
) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(
|
||||
sqlx::query_as::<_, RecordRow>("SELECT * FROM records WHERE room_id = $1")
|
||||
.bind(room_id as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?,
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
)
|
||||
.bind(room_id)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_record(
|
||||
&self,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
live_id: &str,
|
||||
) -> Result<RecordRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE live_id = $1 and room_id = $2",
|
||||
"SELECT * FROM records WHERE room_id = $1 and live_id = $2",
|
||||
)
|
||||
.bind(room_id)
|
||||
.bind(live_id)
|
||||
.bind(room_id as i64)
|
||||
.fetch_one(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn get_archives_by_parent_id(
|
||||
&self,
|
||||
room_id: i64,
|
||||
parent_id: &str,
|
||||
) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 and parent_id = $2",
|
||||
)
|
||||
.bind(room_id)
|
||||
.bind(parent_id)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn add_record(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
parent_id: &str,
|
||||
live_id: &str,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
title: &str,
|
||||
cover: Option<String>,
|
||||
created_at: Option<&str>,
|
||||
) -> Result<RecordRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let record = RecordRow {
|
||||
platform: platform.as_str().to_string(),
|
||||
parent_id: parent_id.to_string(),
|
||||
live_id: live_id.to_string(),
|
||||
room_id,
|
||||
title: title.into(),
|
||||
length: 0,
|
||||
size: 0,
|
||||
created_at: created_at.unwrap_or(&Utc::now().to_rfc3339()).to_string(),
|
||||
created_at: Utc::now().to_rfc3339().to_string(),
|
||||
cover,
|
||||
};
|
||||
if let Err(e) = sqlx::query("INSERT INTO records (live_id, room_id, title, length, size, cover, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(record.live_id.clone())
|
||||
.bind(record.room_id as i64).bind(&record.title).bind(0).bind(0).bind(&record.cover).bind(&record.created_at).bind(platform.as_str().to_string()).execute(&lock).await {
|
||||
if let Err(e) = sqlx::query("INSERT INTO records (live_id, room_id, title, length, size, cover, created_at, platform, parent_id) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)").bind(record.live_id.clone())
|
||||
.bind(record.room_id).bind(&record.title).bind(0).bind(0).bind(&record.cover).bind(&record.created_at).bind(platform.as_str().to_string()).bind(parent_id).execute(&lock).await {
|
||||
// if the record already exists, return the existing record
|
||||
if e.to_string().contains("UNIQUE constraint failed") {
|
||||
return self.get_record(room_id, live_id).await;
|
||||
@@ -73,13 +97,17 @@ impl Database {
|
||||
Ok(record)
|
||||
}
|
||||
|
||||
pub async fn remove_record(&self, live_id: &str) -> Result<(), DatabaseError> {
|
||||
pub async fn remove_record(&self, live_id: &str) -> Result<RecordRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let to_delete = sqlx::query_as::<_, RecordRow>("SELECT * FROM records WHERE live_id = $1")
|
||||
.bind(live_id)
|
||||
.fetch_one(&lock)
|
||||
.await?;
|
||||
sqlx::query("DELETE FROM records WHERE live_id = $1")
|
||||
.bind(live_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
Ok(to_delete)
|
||||
}
|
||||
|
||||
pub async fn update_record(
|
||||
@@ -89,9 +117,38 @@ impl Database {
|
||||
size: u64,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let size = i64::try_from(size).map_err(|_| DatabaseError::NumberExceedI64Range)?;
|
||||
sqlx::query("UPDATE records SET length = $1, size = $2 WHERE live_id = $3")
|
||||
.bind(length)
|
||||
.bind(size as i64)
|
||||
.bind(size)
|
||||
.bind(live_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_record_parent_id(
|
||||
&self,
|
||||
live_id: &str,
|
||||
parent_id: &str,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
sqlx::query("UPDATE records SET parent_id = $1 WHERE live_id = $2")
|
||||
.bind(parent_id)
|
||||
.bind(live_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_record_cover(
|
||||
&self,
|
||||
live_id: &str,
|
||||
cover: Option<String>,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
sqlx::query("UPDATE records SET cover = $1 WHERE live_id = $2")
|
||||
.bind(cover)
|
||||
.bind(live_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
@@ -123,16 +180,36 @@ impl Database {
|
||||
|
||||
pub async fn get_recent_record(
|
||||
&self,
|
||||
offset: u64,
|
||||
limit: u64,
|
||||
room_id: i64,
|
||||
offset: i64,
|
||||
limit: i64,
|
||||
) -> Result<Vec<RecordRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
)
|
||||
.bind(limit as i64)
|
||||
.bind(offset as i64)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
if room_id == 0 {
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
|
||||
)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
} else {
|
||||
Ok(sqlx::query_as::<_, RecordRow>(
|
||||
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
|
||||
)
|
||||
.bind(room_id)
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_record_disk_usage(&self) -> Result<i64, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let result: (i64,) = sqlx::query_as("SELECT SUM(size) FROM records;")
|
||||
.fetch_one(&lock)
|
||||
.await?;
|
||||
Ok(result.0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,12 +4,13 @@ use crate::recorder::PlatformType;
|
||||
use chrono::Utc;
|
||||
/// Recorder in database is pretty simple
|
||||
/// because many room infos are collected in realtime
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct RecorderRow {
|
||||
pub room_id: u64,
|
||||
pub room_id: i64,
|
||||
pub created_at: String,
|
||||
pub platform: String,
|
||||
pub auto_start: bool,
|
||||
pub extra: String,
|
||||
}
|
||||
|
||||
// recorders
|
||||
@@ -17,7 +18,8 @@ impl Database {
|
||||
pub async fn add_recorder(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
extra: &str,
|
||||
) -> Result<RecorderRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let recorder = RecorderRow {
|
||||
@@ -25,47 +27,54 @@ impl Database {
|
||||
created_at: Utc::now().to_rfc3339(),
|
||||
platform: platform.as_str().to_string(),
|
||||
auto_start: true,
|
||||
extra: extra.to_string(),
|
||||
};
|
||||
let _ = sqlx::query(
|
||||
"INSERT INTO recorders (room_id, created_at, platform, auto_start) VALUES ($1, $2, $3, $4)",
|
||||
"INSERT OR REPLACE INTO recorders (room_id, created_at, platform, auto_start, extra) VALUES ($1, $2, $3, $4, $5)",
|
||||
)
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.bind(&recorder.created_at)
|
||||
.bind(platform.as_str())
|
||||
.bind(recorder.auto_start)
|
||||
.bind(extra)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(recorder)
|
||||
}
|
||||
|
||||
pub async fn remove_recorder(&self, room_id: u64) -> Result<(), DatabaseError> {
|
||||
pub async fn remove_recorder(&self, room_id: i64) -> Result<RecorderRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let recorder =
|
||||
sqlx::query_as::<_, RecorderRow>("SELECT * FROM recorders WHERE room_id = $1")
|
||||
.bind(room_id)
|
||||
.fetch_one(&lock)
|
||||
.await?;
|
||||
let sql = sqlx::query("DELETE FROM recorders WHERE room_id = $1")
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
if sql.rows_affected() != 1 {
|
||||
return Err(DatabaseError::NotFoundError);
|
||||
return Err(DatabaseError::NotFound);
|
||||
}
|
||||
|
||||
// remove related archive
|
||||
let _ = self.remove_archive(room_id).await;
|
||||
Ok(())
|
||||
Ok(recorder)
|
||||
}
|
||||
|
||||
pub async fn get_recorders(&self) -> Result<Vec<RecorderRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
Ok(sqlx::query_as::<_, RecorderRow>(
|
||||
"SELECT room_id, created_at, platform, auto_start FROM recorders",
|
||||
"SELECT room_id, created_at, platform, auto_start, extra FROM recorders",
|
||||
)
|
||||
.fetch_all(&lock)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn remove_archive(&self, room_id: u64) -> Result<(), DatabaseError> {
|
||||
pub async fn remove_archive(&self, room_id: i64) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let _ = sqlx::query("DELETE FROM records WHERE room_id = $1")
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
@@ -74,7 +83,7 @@ impl Database {
|
||||
pub async fn update_recorder(
|
||||
&self,
|
||||
platform: PlatformType,
|
||||
room_id: u64,
|
||||
room_id: i64,
|
||||
auto_start: bool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
@@ -83,7 +92,7 @@ impl Database {
|
||||
)
|
||||
.bind(auto_start)
|
||||
.bind(platform.as_str().to_string())
|
||||
.bind(room_id as i64)
|
||||
.bind(room_id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
Ok(())
|
||||
|
||||
@@ -13,6 +13,27 @@ pub struct TaskRow {
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn generate_task(
|
||||
&self,
|
||||
task_type: &str,
|
||||
message: &str,
|
||||
metadata: &str,
|
||||
) -> Result<TaskRow, DatabaseError> {
|
||||
let task_id = uuid::Uuid::new_v4().to_string();
|
||||
let task = TaskRow {
|
||||
id: task_id,
|
||||
task_type: task_type.to_string(),
|
||||
status: "pending".to_string(),
|
||||
message: message.to_string(),
|
||||
metadata: metadata.to_string(),
|
||||
created_at: chrono::Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
self.add_task(&task).await?;
|
||||
|
||||
Ok(task)
|
||||
}
|
||||
|
||||
pub async fn add_task(&self, task: &TaskRow) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let _ = sqlx::query(
|
||||
|
||||
@@ -2,29 +2,13 @@ use super::Database;
|
||||
use super::DatabaseError;
|
||||
|
||||
// CREATE TABLE videos (id INTEGER PRIMARY KEY, room_id INTEGER, cover TEXT, file TEXT, length INTEGER, size INTEGER, status INTEGER, bvid TEXT, title TEXT, desc TEXT, tags TEXT, area INTEGER, created_at TEXT);
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
|
||||
pub struct VideoRow {
|
||||
pub id: i64,
|
||||
pub room_id: u64,
|
||||
pub room_id: i64,
|
||||
pub cover: String,
|
||||
pub file: String,
|
||||
pub length: i64,
|
||||
pub size: i64,
|
||||
pub status: i64,
|
||||
pub bvid: String,
|
||||
pub title: String,
|
||||
pub desc: String,
|
||||
pub tags: String,
|
||||
pub area: i64,
|
||||
pub created_at: String,
|
||||
pub platform: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
|
||||
pub struct VideoNoCover {
|
||||
pub id: i64,
|
||||
pub room_id: u64,
|
||||
pub file: String,
|
||||
pub note: String,
|
||||
pub length: i64,
|
||||
pub size: i64,
|
||||
pub status: i64,
|
||||
@@ -38,10 +22,10 @@ pub struct VideoNoCover {
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub async fn get_videos(&self, room_id: u64) -> Result<Vec<VideoNoCover>, DatabaseError> {
|
||||
pub async fn get_videos(&self, room_id: i64) -> Result<Vec<VideoRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let videos = sqlx::query_as::<_, VideoNoCover>("SELECT * FROM videos WHERE room_id = $1;")
|
||||
.bind(room_id as i64)
|
||||
let videos = sqlx::query_as::<_, VideoRow>("SELECT * FROM videos WHERE room_id = $1;")
|
||||
.bind(room_id)
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
Ok(videos)
|
||||
@@ -59,13 +43,14 @@ impl Database {
|
||||
|
||||
pub async fn update_video(&self, video_row: &VideoRow) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
sqlx::query("UPDATE videos SET status = $1, bvid = $2, title = $3, desc = $4, tags = $5, area = $6 WHERE id = $7")
|
||||
sqlx::query("UPDATE videos SET status = $1, bvid = $2, title = $3, desc = $4, tags = $5, area = $6, note = $7 WHERE id = $8")
|
||||
.bind(video_row.status)
|
||||
.bind(&video_row.bvid)
|
||||
.bind(&video_row.title)
|
||||
.bind(&video_row.desc)
|
||||
.bind(&video_row.tags)
|
||||
.bind(video_row.area)
|
||||
.bind(&video_row.note)
|
||||
.bind(video_row.id)
|
||||
.execute(&lock)
|
||||
.await?;
|
||||
@@ -83,10 +68,11 @@ impl Database {
|
||||
|
||||
pub async fn add_video(&self, video: &VideoRow) -> Result<VideoRow, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let sql = sqlx::query("INSERT INTO videos (room_id, cover, file, length, size, status, bvid, title, desc, tags, area, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)")
|
||||
.bind(video.room_id as i64)
|
||||
let sql = sqlx::query("INSERT INTO videos (room_id, cover, file, note, length, size, status, bvid, title, desc, tags, area, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)")
|
||||
.bind(video.room_id)
|
||||
.bind(&video.cover)
|
||||
.bind(&video.file)
|
||||
.bind(&video.note)
|
||||
.bind(video.length)
|
||||
.bind(video.size)
|
||||
.bind(video.status)
|
||||
@@ -106,7 +92,7 @@ impl Database {
|
||||
Ok(video)
|
||||
}
|
||||
|
||||
pub async fn update_video_cover(&self, id: i64, cover: String) -> Result<(), DatabaseError> {
|
||||
pub async fn update_video_cover(&self, id: i64, cover: &str) -> Result<(), DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
sqlx::query("UPDATE videos SET cover = $1 WHERE id = $2")
|
||||
.bind(cover)
|
||||
@@ -116,10 +102,10 @@ impl Database {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_all_videos(&self) -> Result<Vec<VideoNoCover>, DatabaseError> {
|
||||
pub async fn get_all_videos(&self) -> Result<Vec<VideoRow>, DatabaseError> {
|
||||
let lock = self.db.read().await.clone().unwrap();
|
||||
let videos =
|
||||
sqlx::query_as::<_, VideoNoCover>("SELECT * FROM videos ORDER BY created_at DESC;")
|
||||
sqlx::query_as::<_, VideoRow>("SELECT * FROM videos ORDER BY created_at DESC;")
|
||||
.fetch_all(&lock)
|
||||
.await?;
|
||||
Ok(videos)
|
||||
|
||||
@@ -1,472 +0,0 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
|
||||
use crate::progress_reporter::ProgressReporterTrait;
|
||||
use async_ffmpeg_sidecar::event::FfmpegEvent;
|
||||
use async_ffmpeg_sidecar::log_parser::FfmpegLogParser;
|
||||
use tokio::io::BufReader;
|
||||
|
||||
pub async fn clip_from_m3u8(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
m3u8_index: &Path,
|
||||
output_path: &Path,
|
||||
) -> Result<(), String> {
|
||||
// first check output folder exists
|
||||
let output_folder = output_path.parent().unwrap();
|
||||
if !output_folder.exists() {
|
||||
log::warn!(
|
||||
"Output folder does not exist, creating: {}",
|
||||
output_folder.display()
|
||||
);
|
||||
std::fs::create_dir_all(output_folder).unwrap();
|
||||
}
|
||||
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
.args(["-i", &format!("{}", m3u8_index.display())])
|
||||
.args(["-c", "copy"])
|
||||
.args(["-y", output_path.to_str().unwrap()])
|
||||
.args(["-progress", "pipe:2"])
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!("Spawn ffmpeg process failed: {}", e));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut clip_error = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Progress(p) => {
|
||||
if reporter.is_none() {
|
||||
continue;
|
||||
}
|
||||
reporter
|
||||
.unwrap()
|
||||
.update(format!("编码中:{}", p.time).as_str())
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Clip error: {}", e);
|
||||
clip_error = Some(e.to_string());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Clip error: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = clip_error {
|
||||
log::error!("Clip error: {}", error);
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!("Clip task end: {}", output_path.display());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf, String> {
|
||||
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -ar 16000 test.wav
|
||||
log::info!("Extract audio task start: {}", file.display());
|
||||
let output_path = file.with_extension(format);
|
||||
let mut extract_error = None;
|
||||
|
||||
// 降低采样率以提高处理速度,同时保持足够的音质用于语音识别
|
||||
let sample_rate = if format == "mp3" { "22050" } else { "16000" };
|
||||
|
||||
// First, get the duration of the input file
|
||||
let duration = get_audio_duration(file).await?;
|
||||
log::info!("Audio duration: {} seconds", duration);
|
||||
|
||||
// Split into chunks of 30 seconds
|
||||
let chunk_duration = 30;
|
||||
let chunk_count = (duration as f64 / chunk_duration as f64).ceil() as usize;
|
||||
log::info!(
|
||||
"Splitting into {} chunks of {} seconds each",
|
||||
chunk_count,
|
||||
chunk_duration
|
||||
);
|
||||
|
||||
// Create output directory for chunks
|
||||
let output_dir = output_path.parent().unwrap();
|
||||
let base_name = output_path.file_stem().unwrap().to_str().unwrap();
|
||||
let chunk_dir = output_dir.join(format!("{}_chunks", base_name));
|
||||
|
||||
if !chunk_dir.exists() {
|
||||
std::fs::create_dir_all(&chunk_dir)
|
||||
.map_err(|e| format!("Failed to create chunk directory: {}", e))?;
|
||||
}
|
||||
|
||||
// Use ffmpeg segment feature to split audio into chunks
|
||||
let segment_pattern = chunk_dir.join(format!("{}_%03d.{}", base_name, format));
|
||||
|
||||
// 构建优化的ffmpeg命令参数
|
||||
let file_str = file.to_str().unwrap();
|
||||
let chunk_duration_str = chunk_duration.to_string();
|
||||
let segment_pattern_str = segment_pattern.to_str().unwrap();
|
||||
|
||||
let mut args = vec![
|
||||
"-i",
|
||||
file_str,
|
||||
"-ar",
|
||||
sample_rate,
|
||||
"-vn",
|
||||
"-f",
|
||||
"segment",
|
||||
"-segment_time",
|
||||
&chunk_duration_str,
|
||||
"-reset_timestamps",
|
||||
"1",
|
||||
"-y",
|
||||
"-progress",
|
||||
"pipe:2",
|
||||
];
|
||||
|
||||
// 根据格式添加优化的编码参数
|
||||
if format == "mp3" {
|
||||
args.extend_from_slice(&[
|
||||
"-c:a",
|
||||
"mp3",
|
||||
"-b:a",
|
||||
"64k", // 降低比特率以提高速度
|
||||
"-compression_level",
|
||||
"0", // 最快压缩
|
||||
]);
|
||||
} else {
|
||||
args.extend_from_slice(&[
|
||||
"-c:a",
|
||||
"pcm_s16le", // 使用PCM编码,速度更快
|
||||
]);
|
||||
}
|
||||
|
||||
// 添加性能优化参数
|
||||
args.extend_from_slice(&[
|
||||
"-threads", "0", // 使用所有可用CPU核心
|
||||
]);
|
||||
|
||||
args.push(segment_pattern_str);
|
||||
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
.args(&args)
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Extract audio error: {}", e);
|
||||
extract_error = Some(e.to_string());
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Extract audio error: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = extract_error {
|
||||
log::error!("Extract audio error: {}", error);
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!(
|
||||
"Extract audio task end: {} chunks created in {}",
|
||||
chunk_count,
|
||||
chunk_dir.display()
|
||||
);
|
||||
Ok(chunk_dir)
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the duration of an audio/video file in seconds
|
||||
async fn get_audio_duration(file: &Path) -> Result<u64, String> {
|
||||
// Use ffprobe with format option to get duration
|
||||
let child = tokio::process::Command::new(ffprobe_path())
|
||||
.args(["-v", "quiet"])
|
||||
.args(["-show_entries", "format=duration"])
|
||||
.args(["-of", "csv=p=0"])
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(format!("Failed to spawn ffprobe process: {}", e));
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut duration = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, content) => {
|
||||
// The new command outputs duration directly as a float
|
||||
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
|
||||
duration = Some(seconds_f64.ceil() as u64);
|
||||
log::debug!("Parsed duration: {} seconds", seconds_f64);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Failed to get duration: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
duration.ok_or_else(|| "Failed to parse duration".to_string())
|
||||
}
|
||||
|
||||
pub async fn encode_video_subtitle(
|
||||
reporter: &impl ProgressReporterTrait,
|
||||
file: &Path,
|
||||
subtitle: &Path,
|
||||
srt_style: String,
|
||||
) -> Result<String, String> {
|
||||
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -vf "subtitles=test.srt:force_style='FontSize=24'" -c:v libx264 -c:a copy output.mp4
|
||||
log::info!("Encode video subtitle task start: {}", file.display());
|
||||
log::info!("srt_style: {}", srt_style);
|
||||
// output path is file with prefix [subtitle]
|
||||
let output_filename = format!("[subtitle]{}", file.file_name().unwrap().to_str().unwrap());
|
||||
let output_path = file.with_file_name(&output_filename);
|
||||
|
||||
// check output path exists
|
||||
if output_path.exists() {
|
||||
log::info!("Output path already exists: {}", output_path.display());
|
||||
return Err("Output path already exists".to_string());
|
||||
}
|
||||
|
||||
let mut command_error = None;
|
||||
|
||||
// if windows
|
||||
let subtitle = if cfg!(target_os = "windows") {
|
||||
// escape characters in subtitle path
|
||||
let subtitle = subtitle
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "\\\\")
|
||||
.replace(":", "\\:");
|
||||
format!("'{}'", subtitle)
|
||||
} else {
|
||||
format!("'{}'", subtitle.display())
|
||||
};
|
||||
let vf = format!("subtitles={}:force_style='{}'", subtitle, srt_style);
|
||||
log::info!("vf: {}", vf);
|
||||
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.args(["-vf", vf.as_str()])
|
||||
.args(["-c:v", "libx264"])
|
||||
.args(["-c:a", "copy"])
|
||||
.args([output_path.to_str().unwrap()])
|
||||
.args(["-y"])
|
||||
.args(["-progress", "pipe:2"])
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Encode video subtitle error: {}", e);
|
||||
command_error = Some(e.to_string());
|
||||
}
|
||||
FfmpegEvent::Progress(p) => {
|
||||
log::info!("Encode video subtitle progress: {}", p.time);
|
||||
reporter.update(format!("压制中:{}", p.time).as_str());
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Encode video subtitle error: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = command_error {
|
||||
log::error!("Encode video subtitle error: {}", error);
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!("Encode video subtitle task end: {}", output_path.display());
|
||||
Ok(output_filename)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn encode_video_danmu(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
file: &Path,
|
||||
subtitle: &Path,
|
||||
) -> Result<PathBuf, String> {
|
||||
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -vf ass=subtitle.ass -c:v libx264 -c:a copy output.mp4
|
||||
log::info!("Encode video danmu task start: {}", file.display());
|
||||
let danmu_filename = format!("[danmu]{}", file.file_name().unwrap().to_str().unwrap());
|
||||
let output_path = file.with_file_name(danmu_filename);
|
||||
|
||||
// check output path exists
|
||||
if output_path.exists() {
|
||||
log::info!("Output path already exists: {}", output_path.display());
|
||||
return Err("Output path already exists".to_string());
|
||||
}
|
||||
|
||||
let mut command_error = None;
|
||||
|
||||
// if windows
|
||||
let subtitle = if cfg!(target_os = "windows") {
|
||||
// escape characters in subtitle path
|
||||
let subtitle = subtitle
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "\\\\")
|
||||
.replace(":", "\\:");
|
||||
format!("'{}'", subtitle)
|
||||
} else {
|
||||
format!("'{}'", subtitle.display())
|
||||
};
|
||||
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
.args(["-i", file.to_str().unwrap()])
|
||||
.args(["-vf", &format!("ass={}", subtitle)])
|
||||
.args(["-c:v", "libx264"])
|
||||
.args(["-c:a", "copy"])
|
||||
.args([output_path.to_str().unwrap()])
|
||||
.args(["-y"])
|
||||
.args(["-progress", "pipe:2"])
|
||||
.stderr(Stdio::piped())
|
||||
.spawn();
|
||||
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Error(e) => {
|
||||
log::error!("Encode video danmu error: {}", e);
|
||||
command_error = Some(e.to_string());
|
||||
}
|
||||
FfmpegEvent::Progress(p) => {
|
||||
log::info!("Encode video danmu progress: {}", p.time);
|
||||
if reporter.is_none() {
|
||||
continue;
|
||||
}
|
||||
reporter
|
||||
.unwrap()
|
||||
.update(format!("压制中:{}", p.time).as_str());
|
||||
}
|
||||
FfmpegEvent::Log(_level, _content) => {}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = child.wait().await {
|
||||
log::error!("Encode video danmu error: {}", e);
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(error) = command_error {
|
||||
log::error!("Encode video danmu error: {}", error);
|
||||
Err(error)
|
||||
} else {
|
||||
log::info!("Encode video danmu task end: {}", output_path.display());
|
||||
Ok(output_path)
|
||||
}
|
||||
}
|
||||
|
||||
/// Trying to run ffmpeg for version
|
||||
pub async fn check_ffmpeg() -> Result<String, String> {
|
||||
let child = tokio::process::Command::new(ffmpeg_path())
|
||||
.arg("-version")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn();
|
||||
if let Err(e) = child {
|
||||
log::error!("Faild to spwan ffmpeg process: {e}");
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut child = child.unwrap();
|
||||
|
||||
let stdout = child.stdout.take();
|
||||
if stdout.is_none() {
|
||||
log::error!("Failed to take ffmpeg output");
|
||||
return Err("Failed to take ffmpeg output".into());
|
||||
}
|
||||
|
||||
let stdout = stdout.unwrap();
|
||||
let reader = BufReader::new(stdout);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
|
||||
let mut version = None;
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::ParsedVersion(v) => version = Some(v.version),
|
||||
FfmpegEvent::LogEOF => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(version) = version {
|
||||
Ok(version)
|
||||
} else {
|
||||
Err("Failed to parse version from output".into())
|
||||
}
|
||||
}
|
||||
|
||||
fn ffmpeg_path() -> PathBuf {
|
||||
let mut path = Path::new("ffmpeg").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
path.set_extension("exe");
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn ffprobe_path() -> PathBuf {
|
||||
let mut path = Path::new("ffprobe").to_path_buf();
|
||||
if cfg!(windows) {
|
||||
path.set_extension("exe");
|
||||
}
|
||||
|
||||
path
|
||||
}
|
||||
122
src-tauri/src/ffmpeg/general.rs
Normal file
@@ -0,0 +1,122 @@
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process::Stdio,
|
||||
};
|
||||
|
||||
use async_ffmpeg_sidecar::{event::FfmpegEvent, log_parser::FfmpegLogParser};
|
||||
use tokio::io::{AsyncWriteExt, BufReader};
|
||||
|
||||
use crate::progress::progress_reporter::ProgressReporterTrait;
|
||||
|
||||
use super::ffmpeg_path;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
#[cfg(target_os = "windows")]
|
||||
#[allow(unused_imports)]
|
||||
use std::os::windows::process::CommandExt;
|
||||
|
||||
/// Generate a random filename in hex
|
||||
pub async fn random_filename() -> String {
|
||||
format!("{:x}", rand::random::<u64>())
|
||||
}
|
||||
|
||||
pub async fn handle_ffmpeg_process(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
ffmpeg_process: &mut tokio::process::Command,
|
||||
) -> Result<(), String> {
|
||||
let child = ffmpeg_process.stderr(Stdio::piped()).spawn();
|
||||
if let Err(e) = child {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
let mut child = child.unwrap();
|
||||
let stderr = child.stderr.take().unwrap();
|
||||
let reader = BufReader::new(stderr);
|
||||
let mut parser = FfmpegLogParser::new(reader);
|
||||
while let Ok(event) = parser.parse_next_event().await {
|
||||
match event {
|
||||
FfmpegEvent::Progress(p) => {
|
||||
if let Some(reporter) = reporter {
|
||||
reporter.update(p.time.to_string().as_str());
|
||||
}
|
||||
}
|
||||
FfmpegEvent::LogEOF => break,
|
||||
FfmpegEvent::Error(e) => {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if let Err(e) = child.wait().await {
|
||||
return Err(e.to_string());
|
||||
}
|
||||
|
||||
if let Some(reporter) = reporter {
|
||||
reporter.update("合成完成");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn concat_videos(
|
||||
reporter: Option<&impl ProgressReporterTrait>,
|
||||
videos: &[PathBuf],
|
||||
output_path: &Path,
|
||||
) -> Result<(), String> {
|
||||
// ffmpeg -i input1.mp4 -i input2.mp4 -i input3.mp4 -c copy output.mp4
|
||||
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
|
||||
#[cfg(target_os = "windows")]
|
||||
ffmpeg_process.creation_flags(CREATE_NO_WINDOW);
|
||||
|
||||
let output_folder = output_path.parent().unwrap();
|
||||
if !output_folder.exists() {
|
||||
std::fs::create_dir_all(output_folder).unwrap();
|
||||
}
|
||||
|
||||
let filelist_filename = format!("filelist_{}.txt", random_filename().await);
|
||||
|
||||
let mut filelist = tokio::fs::File::create(&output_folder.join(&filelist_filename))
|
||||
.await
|
||||
.unwrap();
|
||||
for video in videos {
|
||||
filelist
|
||||
.write_all(format!("file '{}'\n", video.display()).as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
filelist.flush().await.unwrap();
|
||||
|
||||
// Convert &[PathBuf] to &[&Path] for check_videos
|
||||
let video_refs: Vec<&Path> = videos.iter().map(|p| p.as_path()).collect();
|
||||
let should_encode = !super::check_videos(&video_refs).await;
|
||||
|
||||
ffmpeg_process.args([
|
||||
"-f",
|
||||
"concat",
|
||||
"-safe",
|
||||
"0",
|
||||
"-i",
|
||||
output_folder.join(&filelist_filename).to_str().unwrap(),
|
||||
]);
|
||||
if should_encode {
|
||||
ffmpeg_process.args(["-vf", "scale=1920:1080"]);
|
||||
ffmpeg_process.args(["-r", "60"]);
|
||||
ffmpeg_process.args(["-c", "libx264"]);
|
||||
ffmpeg_process.args(["-c:a", "aac"]);
|
||||
ffmpeg_process.args(["-b:v", "6000k"]);
|
||||
ffmpeg_process.args(["-b:a", "128k"]);
|
||||
ffmpeg_process.args(["-threads", "0"]);
|
||||
} else {
|
||||
ffmpeg_process.args(["-c", "copy"]);
|
||||
}
|
||||
ffmpeg_process.args([output_path.to_str().unwrap()]);
|
||||
ffmpeg_process.args(["-progress", "pipe:2"]);
|
||||
ffmpeg_process.args(["-y"]);
|
||||
|
||||
handle_ffmpeg_process(reporter, &mut ffmpeg_process).await?;
|
||||
|
||||
// clean up filelist
|
||||
let _ = tokio::fs::remove_file(output_folder.join(&filelist_filename)).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||