Compare commits

...

161 Commits

Author SHA1 Message Date
Xinrea
d3fe8dee2c feat: introduce static server for file access (close #220) (#223) 2025-11-09 22:18:26 +08:00
Xinrea
c97e0649a9 bump version to 2.16.3 2025-11-09 17:07:40 +08:00
Xinrea
26e1271681 fix: video import with number roomId (#222) 2025-11-09 15:52:43 +08:00
Xinrea
7155fef677 bump version to 2.16.2 2025-11-06 22:11:38 +08:00
Xinrea
0566ae78e7 fix: douyin encoding with danmu (#221) 2025-11-06 22:09:41 +08:00
Xinrea
cf439d60c4 feat: segment with query param (#219)
* bump version to 2.16.1

* fix: read segment with params
2025-11-04 23:00:34 +08:00
Xinrea
6127c67cd3 fix: check encoders list (#218) 2025-11-02 13:52:01 +08:00
Xinrea
183eb063bb bump version to 2.16.0 2025-11-02 11:54:35 +08:00
Xinrea
532609c57d fix: cover uploading error (#217) 2025-11-02 11:52:07 +08:00
Xinrea
83c6979973 feat: task scheduler (#216)
* feat: update progress in task message

* feat: task scheduler for queued tasks
2025-11-02 11:20:27 +08:00
Xinrea
8bea9336ae fix: multiple danmu task running (#215) 2025-11-01 23:43:40 +08:00
Xinrea
617a6a0b8e fix: panic generating whole live without danmu (#214) 2025-11-01 22:07:52 +08:00
Xinrea
140ab772d0 feat: auto using hwaccel x264 encoder (#213)
* feat: auto using hwaccel x264 encoder

* fix: ffmpeg args error
2025-11-01 21:50:03 +08:00
Xinrea
e7d8c8814d fix: shortcut key conflicts with input (#212)
* fix: shortcut key conflicts with input

* Update src/lib/components/VideoPreview.svelte

Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com>

---------

Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com>
2025-11-01 20:44:27 +08:00
Xinrea
588559c645 refactor: migrate ids to str for compatibility (#211)
* refactor: migrate ids to str for compatibility

* feat: handle get item from cookies
2025-11-01 19:27:39 +08:00
Xinrea
e7411d25b4 fix(bilibili): add shuffle for url_info (#207) 2025-10-31 00:13:27 +08:00
Xinrea
ed0bd88e3b fix: douyin segment with params 2025-10-29 00:28:00 +08:00
Xinrea
b4fb2d058a fix: update interval setting 2025-10-29 00:16:06 +08:00
Xinrea
4058b425c8 feat: only query userinfo once 2025-10-28 23:31:15 +08:00
Xinrea
55d872a38c bump version to 2.15.4 2025-10-26 23:13:01 +08:00
Xinrea
1f666d402d feat: simplify douyin room-adding (close #196) 2025-10-26 22:03:04 +08:00
Xinrea
64dec36773 fix: douyin recording record live_id 2025-10-25 22:25:20 +08:00
Xinrea
0b3df59015 bump version to 2.15.3 2025-10-25 20:37:55 +08:00
Xinrea
e1b4f8ede9 fix: clip danmu not in range 2025-10-25 20:30:22 +08:00
Xinrea
b8c3f0a464 bump version to 2.15.2 2025-10-25 18:48:01 +08:00
Xinrea
ee714c855b fix: quit recording when error 2025-10-25 18:44:52 +08:00
Xinrea
c5165c752d fix: room filter 2025-10-25 17:47:15 +08:00
Xinrea
8c275c2edb fix: config cache 2025-10-25 13:59:55 +08:00
Xinrea
2fc6f673d3 fix: reduce requests for config 2025-10-25 13:52:04 +08:00
Xinrea
b7c9d12e41 fix: cannot remove bilibili account 2025-10-25 13:48:59 +08:00
Xinrea
8ca2934e8d docs: update 2025-10-25 13:26:47 +08:00
Xinrea
868a2a1940 fix: api in headless 2025-10-25 13:23:04 +08:00
Xinrea
a1e57c5b9c feat: basic danmu encoding configuration (close #103) 2025-10-25 13:16:59 +08:00
Xinrea
b7a76e8f10 chore: bundle font assets 2025-10-25 12:40:15 +08:00
Xinrea
e82159b9a2 feat: video waveform (close #184) 2025-10-25 12:22:36 +08:00
Xinrea
35d068e109 fix: handle segment download error 2025-10-25 11:12:34 +08:00
Xinrea
136e1a3774 feat: support {note} in filename (close #203) 2025-10-25 02:13:24 +08:00
Xinrea
fc6c6adfce feat: generate whole live with danmu 2025-10-25 02:03:41 +08:00
Xinrea
5981d97d5f fix: target duration 2025-10-24 23:36:24 +08:00
Xinrea
42c12b3bf9 fix: live notification 2025-10-24 22:51:33 +08:00
Xinrea
39a8d0d741 fix: break recordin when parsing failed 2025-10-24 22:29:27 +08:00
Xinrea
1db0609961 bump version to 2.15.0 2025-10-24 22:19:55 +08:00
Xinrea
26d60cecbf feat: recorder crate and huya support (#204)
* feat: huya account support

* refactor: recorder

* fix: first segment ts

* refactor: seperate crate

* feat: general hls recorder

* feat: auto end playlist for archives

* feat: handle bilibili ts stream

* fix(bilibili): ts stream only allowed

* feat(huya): huya recorder
2025-10-24 00:46:49 +08:00
Xinrea
a97ec33c07 bump version to 2.14.1 2025-10-14 08:12:03 +08:00
Xinrea
9a1bc0f449 feat: switch to TS as primary for bilibili 2025-10-14 08:10:24 +08:00
Xinrea
e4d1024082 fix: windows build 2025-10-10 20:49:37 +08:00
Xinrea
3d5f97f635 chore: add tips for encoding 2025-10-10 20:23:47 +08:00
Xinrea
7c82766549 bump version to 2.14.0 2025-10-10 20:19:59 +08:00
Xinrea
cfe91e0782 feat: adjust styles 2025-10-10 20:19:18 +08:00
Xinrea
9a26683a76 fix: shaka player js 2025-10-09 22:06:13 +08:00
Xinrea
8a9344e3ee feat: update shaka-player 2025-10-09 21:55:22 +08:00
Xinrea
d200c7cf09 fix: transcode video aspect ratio 2025-10-09 07:27:32 +08:00
Xinrea
eb01d62e53 fix: playlist path when generating whole clip 2025-10-09 07:24:11 +08:00
Xinrea
f65f375283 feat: migrate to ffmpeg (#201)
* feat: handle hls with ffmpeg

* fix: douyin cover and title

* refactor: cache path handle

* feat: auto remove empty record

* feat: calculate ts file size
2025-10-04 16:57:08 +08:00
Xinrea
08979d2079 bump version to 2.13.8 2025-09-29 19:36:50 +08:00
Xinrea
c6efe07303 fix: clip black screen at beginning 2025-09-29 01:06:47 +08:00
Xinrea
7294f0ca6d fix: progress report for transcode 2025-09-28 19:54:49 +08:00
Xinrea
eac1c09149 feat: add hevc option for bilibili
clip from hevc live needs fix-encoding
2025-09-28 19:48:33 +08:00
Xinrea
1e9cd61eba bump version to 2.13.7 2025-09-28 01:56:12 +08:00
Xinrea
7b7f341fa0 fix: progress and danmu events 2025-09-28 01:54:37 +08:00
Xinrea
ac806b49b2 chore: update README 2025-09-28 00:26:43 +08:00
Xinrea
f20636a107 bump version to 2.13.6 2025-09-28 00:12:26 +08:00
Xinrea
787a30e6f7 fix: clip on fmp4 -> ts archive 2025-09-28 00:10:31 +08:00
Xinrea
d1d217be18 bump version to 2.13.5 2025-09-27 15:07:53 +08:00
Xinrea
944d0a371a fix: danmu from ws 2025-09-27 15:06:53 +08:00
Xinrea
0df03e0c9c fix: create proxy master playlist for old fmp4 playlist 2025-09-27 14:54:13 +08:00
Xinrea
7ffdf65705 Revert "feat: convert old fmp4 archives into ts"
This reverts commit 89cdf91a48.
2025-09-27 01:00:39 +08:00
Xinrea
89cdf91a48 feat: convert old fmp4 archives into ts 2025-09-27 00:25:57 +08:00
Xinrea
43ebc27044 ci/cd: update docker script 2025-09-26 22:10:44 +08:00
Xinrea
e6159555f3 feat: migrate sse to websocket 2025-09-26 21:30:55 +08:00
Xinrea
1f2508aae9 fix: disable http2 for http server 2025-09-26 19:52:57 +08:00
Xinrea
ad13f58fa7 ci/cd: update final image 2025-09-26 19:19:24 +08:00
Xinrea
de4959d49f bump version to 2.13.4 2025-09-26 00:16:16 +08:00
Xinrea
b5b75129e7 fix: sanitize filename 2025-09-26 00:14:41 +08:00
Xinrea
84346a486f chore: ignore check json 2025-09-26 00:13:50 +08:00
Xinrea
3bdcddf5a2 fix: ignore resolution checks on packet iterleaving stream 2025-09-25 22:27:24 +08:00
Xinrea
98f68a5e14 bump version to 2.13.3 2025-09-25 22:22:10 +08:00
Xinrea
2249b86af3 fix: bilibili using TS as fallback 2025-09-25 21:38:33 +08:00
Xinrea
fd889922d8 ci/cd: update builder version 2025-09-25 08:37:43 +08:00
Xinrea
8db7c6e320 fix: douyin stream statistics 2025-09-25 08:18:53 +08:00
Xinrea
5bc4ed6dfd bump version to 2.13.2 2025-09-25 01:24:37 +08:00
Xinrea
22ad5f7fea feat: using fMP4 as fallback plan 2025-09-25 01:22:10 +08:00
Xinrea
c0369c1a14 fix: wrong length and size statistics 2025-09-24 22:39:57 +08:00
Xinrea
322f4a3ca5 chore: update allow list 2025-09-24 21:54:40 +08:00
Xinrea
4e32453441 fix: danmu encoding 2025-09-24 21:45:53 +08:00
Xinrea
66725b8a64 bump version to 2.13.1 2025-09-24 07:49:08 +08:00
Xinrea
f7bcbbca83 feat: default cover and avatar for rooms not initialized 2025-09-24 01:16:59 +08:00
Xinrea
07a3b33040 fix: douyin room info parse error 2025-09-24 00:38:47 +08:00
Xinrea
2f9b4582f8 fix: clip range drifting 2025-09-24 00:19:19 +08:00
Xinrea
c3f63c58cf fix: danmu statistics 2025-09-23 23:28:05 +08:00
Xinrea
4a3529bc2e fix: generate whole clip when live ends 2025-09-23 20:33:08 +08:00
Xinrea
b0355a919f ci/cd: only run checks when modifying rust code 2025-09-22 23:51:43 +08:00
Xinrea
cfe1a0b4b9 bump version to 2.13.0 2025-09-22 23:47:24 +08:00
Xinrea
b655e98f35 fix: remove unused config item 2025-09-22 23:39:46 +08:00
Xinrea
2d1021bc42 feat: fast generate for whole live clip 2025-09-22 23:25:13 +08:00
Xinrea
33d74999b9 feat: acrhive batch delete (close #193)
* feat: add tool for agent

* feat: archive management
2025-09-22 23:21:26 +08:00
Xinrea
84b7dd7a3c feat: generate whole live (close #190) 2025-09-22 01:45:30 +08:00
Xinrea
0c678fbda3 feat: input seek when clipping on ts stream (close #183) 2025-09-19 01:13:39 +08:00
Xinrea
3486f7d050 feat: migrate bili stream to ts (close #154) 2025-09-19 00:59:52 +08:00
Xinrea
d42a1010b8 fix: event not sent when auto generate clip (close #187) 2025-09-17 22:58:58 +08:00
Xinrea
ece6ceea45 fix: code refactor (#189)
* fix: code refactor

* fix: code errors

* fix: show in folder

* fix: show in folder on linux

* ci/cd: remove cache for self-hosted runner

* fix: unused result
2025-09-15 23:42:29 +08:00
Xinrea
b22ebb399e feat: commit check (#188)
* ci/cd: run checks on self-hosted runner

* ci/cd: add clippy check

* ci/cd: fix add clippy

* ci/cd: remove pull request check

* feat: add prek for commit hooks
2025-09-15 22:58:39 +08:00
Xinrea
4431b10cb7 refactor: cleanup code 2025-09-11 23:55:39 +08:00
Xinrea
01a0c929e8 refactor: rust module structure 2025-09-11 01:33:38 +08:00
Xinrea
b06f6e8d09 refactor: migrate to thiserror 2025-09-11 01:25:45 +08:00
Xinrea
753227acbb chore: markdown lint 2025-09-11 01:23:58 +08:00
Xinrea
c7dd9091d0 docs: update 2025-09-11 00:35:09 +08:00
Xinrea
bae20ce011 ci/cd: switch to github-hosted runner 2025-09-10 22:55:19 +08:00
Xinrea
8da4759668 ci/cd: fix runner 2025-09-10 22:19:44 +08:00
Xinrea
eb7c6d91e9 ci/cd: fix mixed label 2025-09-10 22:13:13 +08:00
Xinrea
3c24dfe8a6 ci/cd: build on self-linux 2025-09-10 22:08:42 +08:00
Xinrea
bb916daaaf fix: post with video title 2025-09-10 22:08:10 +08:00
Xinrea
3931e484c2 chore: remove unused code 2025-09-10 21:52:19 +08:00
Xinrea
b67e258c31 fix: douyin a_bogus check 2025-09-10 21:49:44 +08:00
Xinrea
1a7e6f5a43 feat: randomly generated user-agent 2025-09-10 20:19:42 +08:00
Xinrea
437204dbe6 ci/cd: remove cuda installation 2025-09-10 00:09:11 +08:00
Xinrea
af105277d9 ci/cd: switch to self-hosted win runner 2025-09-09 23:26:21 +08:00
Xinrea
7efd327a36 bump version to 2.12.4 2025-09-09 23:11:35 +08:00
Xinrea
0141586fa9 fix: danmu ws not reconnecting 2025-09-09 21:42:35 +08:00
Xinrea
df1d8ccac6 bump version to 2.12.3 2025-09-09 21:32:17 +08:00
Xinrea
10b6b95e4d fix: bilibili stream offset timezone error 2025-09-09 21:31:10 +08:00
Xinrea
a58e6f77bd bump version to 2.12.2 2025-09-09 20:58:54 +08:00
Xinrea
fe2bd80ac6 chore: adjust logs for douyin h5 api error 2025-09-09 20:58:54 +08:00
Xinrea
870b44a973 fix: room without cover (close #181) 2025-09-09 20:58:44 +08:00
Xinrea
48fd9ca7b2 fix: clip auto-generate not works 2025-09-09 20:15:33 +08:00
Xinrea
14d03b7eb9 ci/cd: switch to github-hosted runner 2025-09-09 00:49:36 +08:00
Xinrea
6f1db6c038 bump version to 2.12.1 2025-09-08 23:55:41 +08:00
Xinrea
cd2d208e5c fix: output not created while importing (close #180) 2025-09-08 23:53:50 +08:00
Xinrea
7d6ec72002 ci/cd: add self-hosted runner 2025-09-08 22:39:25 +08:00
Xinrea
837cb6a978 chore: adjust default danmaku style 2025-09-08 00:52:31 +08:00
Xinrea
aeeb0c08d7 refactor: delete video with related cover file 2025-09-08 00:30:07 +08:00
Xinrea
72d8a7f485 fix: danmu start position 2025-09-08 00:00:18 +08:00
Xinrea
5d3692c7a0 fix: update video with new cover 2025-09-07 23:30:37 +08:00
Xinrea
7e54231bef ci/cd: fix get previous tag 2025-09-07 19:11:49 +08:00
Xinrea
80a885dbf3 Release 2.12.0 (#179)
* chore: add devcontainer config (#175)

* refactor: refactor error handling and update dependencies (#176)

* docs: update webhook

* feat: add webhook module

* feat: webhook url settings

* feat: update webhook poster instead of recreating

* feat: add link for webhook docs

* refactor: using relative path for all covers

* fix: webhook in headless mode

* feat: implement all webhook events

* fix: webhook in headless mode

* feat: static host cache/output directory

* tests: add more tests (#178)

* chore: add tests

* chore: update

* fix: wrong cover type

* bump version to 2.12.0

* feat: change default clip bitrate to 6000k

---------

Co-authored-by: Sieluna <seele.peng@gmail.com>
2025-09-07 18:38:16 +08:00
Xinrea
134c6bbb5f chore: update dependencies 2025-08-31 11:01:32 +08:00
Xinrea
49a153adf7 chore: add cursor rules 2025-08-31 10:52:44 +08:00
Xinrea
99e15b0bda ci/cd: publish workflow with release body 2025-08-30 23:35:06 +08:00
Xinrea
4de8a73af2 chore: add logs for ts filename 2025-08-30 23:19:06 +08:00
Xinrea
d104ba3180 chore: update issue template 2025-08-30 10:52:00 +08:00
Xinrea
abf0d4748f chore: update issue template 2025-08-30 10:46:45 +08:00
Xinrea
d2a9c44601 feat: clip note support (#173)
* refactor: move components

* feat: note for clip (close #170)

* fix: import video handler

* fix: sort by note

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-30 10:24:53 +08:00
Xinrea
c269558bae refactor: using post for all handlers (#172)
* refactor: using post for all handlers

* chore: code format

* ci/cd: tests verbose

* ci/cd: remove compilation test

* ci/cd: add ffmpeg in test
2025-08-24 22:48:51 +08:00
Xinrea
cc22453a40 chore: fix tests 2025-08-24 21:20:26 +08:00
Xinrea
d525d92de4 ci/cd: add checks for pr 2025-08-24 21:13:16 +08:00
Xinrea
2197dfe65c chore: format code 2025-08-24 21:05:22 +08:00
Xinrea
38ee00f474 bump version to 2.11.7 2025-08-24 20:58:20 +08:00
Eeeeep4
8fdad41c71 fix: resolve subtitle drag sync issues with timeline scaling (#171)
- Fix double scaling in getSubtitleStyle causing position misalignment
- Improve edge detection logic for better drag precision
- Add time boundary constraints to prevent negative/overflow values
- Refactor drag handlers with helper functions for better maintainability
- Ensure subtitle blocks align with timeline markers at all zoom levels
2025-08-24 17:08:35 +08:00
Eeeeep4
f269995bb7 feat: batch video import (#163)
* feat(import): auto-detect and import new videos from the import directory

- Scan the configured import directory for newly added video files
- Automatically enqueue/import detected files into the application library
- Extend config and app initialization to support import directory settings
- Update video/config handlers to expose and trigger the import flow

* refactor(import): improve video import and conversion workflow

- Optimize auto-import loading state management with proper polling
- Separate large file (>500MB) async conversion from small file sync conversion
- Extract reusable helper functions for file cleanup and thumbnail generation
- Enhance UI progress display for large file conversions

* feat(import): add batch video import functionality

- Add batch_import_external_videos API endpoint with progress tracking
- Support multiple file selection in import dialog
- Display batch import progress with current file information

* feat: improve video import on headless mode

- Enhance batch video import workflow and progress tracking
- Improve HTTP server API structure and error handling

* fix(video): prevent OOM in headless mode by using fixed 64KB buffers
- introduce sanitize_filename_advanced with tests
- use fixed 64KB buffer and 1% progress reporting

* fix: resolve compilation warnings and improve SSE reliability

  - Add feature-gated compilation for platform-specific functions
  - Enhance SSE error handling and connection stability
  - Increase broadcast channel capacity and optimize keep-alive timing
2025-08-24 10:10:38 +08:00
Xinrea
03a2db8c44 feat: randomly choose stream variant (#168) 2025-08-20 23:40:56 +08:00
Xinrea
6d9cd3c6a8 fix: danmu reconnection (#167) 2025-08-20 23:08:41 +08:00
Xinrea
303b2f7036 fix: record breaks after stream expired (#166) 2025-08-20 22:55:06 +08:00
Xinrea
ec25c2ffd9 bump version to 2.11.6 2025-08-20 22:23:25 +08:00
Xinrea
50ab608ddb fix: cache/output dir migration (close #159) (#165)
* fix: cache/output dir migration (close #159)

* chore: adjust wrong log info

* fix: more accurate way to check path
2025-08-20 22:15:46 +08:00
Xinrea
3c76be9b81 feat: add batch delete for archives API and tool 2025-08-19 00:27:59 +08:00
Xinrea
ab7f0cf0b4 bump version to 2.11.5 2025-08-15 22:50:38 +08:00
Xinrea
f9f590c4dc fix: docker start with nscd 2025-08-15 22:47:52 +08:00
Xinrea
8d38fe582a fix: ffprobe segment falt in docker environment 2025-08-15 22:31:10 +08:00
Xinrea
dc4a26561d bump version to 2.11.4 2025-08-14 22:08:44 +08:00
Xinrea
10c1d1f3a8 feat: add video export button in clip list (close #156) 2025-08-14 22:05:11 +08:00
Xinrea
66bcf53d01 fix: database operation optimization (close #157) 2025-08-14 21:52:08 +08:00
183 changed files with 36984 additions and 9682 deletions

View File

@@ -0,0 +1,51 @@
# AI Features and LangChain Integration
## AI Components
- **LangChain Integration**: Uses `@langchain/core`, `@langchain/deepseek`,
`@langchain/langgraph`, `@langchain/ollama`
- **Whisper Transcription**: Local and online transcription via `whisper-rs` in
Rust backend
- **AI Agent**: Located in [src/lib/agent/](mdc:src/lib/agent/) directory
## Frontend AI Features
- **AI Page**: [src/page/AI.svelte](mdc:src/page/AI.svelte) - Main AI interface
- **Agent Logic**: [src/lib/agent/](mdc:src/lib/agent/) - AI agent implementation
- **Interface**: [src/lib/interface.ts](mdc:src/lib/interface.ts)
\- AI communication layer
## Backend AI Features
- **Subtitle Generation**:
[src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/) -
AI-powered subtitle creation
- **Whisper Integration**:
[src-tauri/src/subtitle_generator.rs](mdc:src-tauri/src/subtitle_generator.rs)
\- Speech-to-text processing
- **CUDA Support**: Optional CUDA acceleration for Whisper via feature flag
## AI Workflows
- **Live Transcription**: Real-time speech-to-text during live streams
- **Content Summarization**: AI-powered content analysis and summarization
- **Smart Editing**: AI-assisted video editing and clip generation
- **Danmaku Processing**: AI analysis of danmaku (bullet comments) streams
## Configuration
- **LLM Settings**: Configure AI models in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
- **Whisper Models**: Local model configuration for offline transcription
- **API Keys**: External AI service configuration for online features
## Development Notes
- AI features require proper model configuration
- CUDA feature enables GPU acceleration for Whisper
- LangChain integration supports multiple AI providers
- AI agent can work with both local and cloud-based models
description:
globs:
alwaysApply: true
---

View File

@@ -0,0 +1,62 @@
# Build and Deployment Configuration
## Build Scripts
- **PowerShell**: [build.ps1](mdc:build.ps1) - Windows build script
- **FFmpeg Setup**: [ffmpeg_setup.ps1](mdc:ffmpeg_setup.ps1)
\- FFmpeg installation script
- **Version Bump**: [scripts/bump.cjs](mdc:scripts/bump.cjs)
\- Version management script
## Package Management
- **Node.js**: [package.json](mdc:package.json) - Frontend dependencies and scripts
- **Rust**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
\- Backend dependencies and features
- **Lock Files**: [yarn.lock](mdc:yarn.lock) - Yarn dependency lock
## Build Configuration
- **Vite**: [vite.config.ts](mdc:vite.config.ts) - Frontend build tool configuration
- **Tailwind**: [tailwind.config.cjs](mdc:tailwind.config.cjs) - CSS framework configuration
- **PostCSS**: [postcss.config.cjs](mdc:postcss.config.cjs) - CSS processing configuration
- **TypeScript**: [tsconfig.json](mdc:tsconfig.json),
[tsconfig.node.json](mdc:tsconfig.node.json) - TypeScript configuration
## Tauri Configuration
- **Main Config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
\- Core Tauri settings
- **Platform Configs**:
- [src-tauri/tauri.macos.conf.json](mdc:src-tauri/tauri.macos.conf.json)
\- macOS specific
- [src-tauri/tauri.linux.conf.json](mdc:src-tauri/tauri.linux.conf.json)
\- Linux specific
- [src-tauri/tauri.windows.conf.json](mdc:src-tauri/tauri.windows.conf.json)
\- Windows specific
- [src-tauri/tauri.windows.cuda.conf.json](mdc:src-tauri/tauri.windows.cuda.conf.json)
\- Windows with CUDA
## Docker Support
- **Dockerfile**: [Dockerfile](mdc:Dockerfile) - Container deployment configuration
- **Documentation**: [docs/](mdc:docs/) - VitePress-based documentation site
## Build Commands
- **Frontend**: `yarn build` - Build production frontend
- **Tauri**: `yarn tauri build` - Build desktop application
- **Documentation**: `yarn docs:build` - Build documentation site
- **Type Check**: `yarn check` - TypeScript and Svelte validation
## Deployment Targets
- **Desktop**: Native Tauri applications for Windows, macOS, Linux
- **Docker**: Containerized deployment option
- **Documentation**: Static site deployment via VitePress
- **Assets**: Static asset distribution for web components
description:
globs:
alwaysApply: true
---

View File

@@ -0,0 +1,61 @@
# Database and Data Management
## Database Architecture
- **SQLite Database**: Primary data storage using `sqlx` with async runtime
- **Database Module**: [src-tauri/src/database/](mdc:src-tauri/src/database/)
\- Core database operations
- **Migration System**: [src-tauri/src/migration.rs](mdc:src-tauri/src/migration.rs)
\- Database schema management
## Data Models
- **Recording Data**: Stream metadata, recording sessions, and file information
- **Room Configuration**: Stream room settings and platform credentials
- **Task Management**: Recording task status and progress tracking
- **User Preferences**: Application settings and user configurations
## Frontend Data Layer
- **Database Interface**: [src/lib/db.ts](mdc:src/lib/db.ts)
\- Frontend database operations
- **Stores**: [src/lib/stores/](mdc:src/lib/stores/) - State management for data
- **Version Management**: [src/lib/stores/version.ts](mdc:src/lib/stores/version.ts)
\- Version tracking
## Data Operations
- **CRUD Operations**: Create, read, update, delete for all data entities
- **Query Optimization**: Efficient SQL queries with proper indexing
- **Transaction Support**: ACID compliance for critical operations
- **Data Validation**: Input validation and sanitization
## File Management
- **Cache Directory**: [src-tauri/cache/](mdc:src-tauri/cache/)
\- Temporary file storage
- **Upload Directory**: [src-tauri/cache/uploads/](mdc:src-tauri/cache/uploads/)
\- User upload storage
- **Bilibili Cache**: [src-tauri/cache/bilibili/](mdc:src-tauri/cache/bilibili/)
\- Platform-specific cache
## Data Persistence
- **SQLite Files**: [src-tauri/data/data_v2.db](mdc:src-tauri/data/data_v2.db)
\- Main database file
- **Write-Ahead Logging**: WAL mode for concurrent access and performance
- **Backup Strategy**: Database backup and recovery procedures
- **Migration Handling**: Automatic schema updates and data migration
## Development Guidelines
- Use prepared statements to prevent SQL injection
- Implement proper error handling for database operations
- Use transactions for multi-step operations
- Follow database naming conventions consistently
- Test database operations with sample data
description:
globs:
alwaysApply: true
---

View File

@@ -0,0 +1,47 @@
# Frontend Development Guidelines
## Svelte 3 Best Practices
- Use Svelte 3 syntax with `<script>` tags for component logic
- Prefer reactive statements with `$:` for derived state
- Use stores from [src/lib/stores/](mdc:src/lib/stores/) for global state management
- Import components from [src/lib/components/](mdc:src/lib/components/)
## TypeScript Configuration
- Follow the configuration in [tsconfig.json](mdc:tsconfig.json)
- Use strict type checking with `checkJs: true`
- Extends `@tsconfig/svelte` for Svelte-specific TypeScript settings
- Base URL is set to workspace root for clean imports
## Component Structure
- **Page components**: Located in [src/page/](mdc:src/page/) directory
- **Reusable components**: Located in [src/lib/components/](mdc:src/lib/components/)
directory
- **Layout components**: [src/App.svelte](mdc:src/App.svelte),
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
## Styling
- Use Tailwind CSS classes for styling
- Configuration in [tailwind.config.cjs](mdc:tailwind.config.cjs)
- PostCSS configuration in [postcss.config.cjs](mdc:postcss.config.cjs)
- Global styles in [src/styles.css](mdc:src/styles.css)
## Entry Points
- **Main app**: [src/main.ts](mdc:src/main.ts) - Main application entry
- **Clip mode**: [src/main_clip.ts](mdc:src/main_clip.ts) - Clip editing interface
- **Live mode**: [src/main_live.ts](mdc:src/main_live.ts) - Live streaming interface
## Development Workflow
- Use `yarn dev` for frontend-only development
- Use `yarn tauri dev` for full Tauri development
- Use `yarn check` for TypeScript and Svelte type checking
description:
globs:
alwaysApply: true
---

View File

@@ -0,0 +1,53 @@
# BiliBili ShadowReplay Project Overview
This is a Tauri-based desktop application for caching live streams and performing
real-time editing and submission. It supports Bilibili and Douyin platforms.
## Project Structure
### Frontend (Svelte + TypeScript)
- **Main entry points**: [src/main.ts](mdc:src/main.ts),
[src/main_clip.ts](mdc:src/main_clip.ts), [src/main_live.ts](mdc:src/main_live.ts)
- **App components**: [src/App.svelte](mdc:src/App.svelte),
[src/AppClip.svelte](mdc:src/AppClip.svelte), [src/AppLive.svelte](mdc:src/AppLive.svelte)
- **Pages**: Located in [src/page/](mdc:src/page/) directory
- **Components**: Located in [src/lib/components/](mdc:src/lib/components/) directory
- **Stores**: Located in [src/lib/stores/](mdc:src/lib/stores/) directory
### Backend (Rust + Tauri)
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
- **Core modules**:
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) - Stream recording functionality
- [src-tauri/src/database/](mdc:src-tauri/src/database/) - Database operations
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/) - Tauri command handlers
- **Custom crate**:
[src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/) -
Danmaku stream processing
### Configuration
- **Frontend config**: [tsconfig.json](mdc:tsconfig.json),
[vite.config.ts](mdc:vite.config.ts), [tailwind.config.cjs](mdc:tailwind.config.cjs)
- **Backend config**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml), [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
## Key Technologies
- **Frontend**: Svelte 3, TypeScript, Tailwind CSS, Flowbite
- **Backend**: Rust, Tauri 2, SQLite, FFmpeg
- **AI Features**: LangChain, Whisper for transcription
- **Build Tools**: Vite, VitePress for documentation
## Development Commands
- `yarn dev` - Start development server
- `yarn tauri dev` - Start Tauri development
- `yarn build` - Build frontend
- `yarn docs:dev` - Start documentation server
description:
globs:
alwaysApply: true
---

View File

@@ -0,0 +1,56 @@
# Rust Backend Development Guidelines
## Project Structure
- **Main entry**: [src-tauri/src/main.rs](mdc:src-tauri/src/main.rs)
\- Application entry point
- **Core modules**:
- [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
\- Stream recording and management
- [src-tauri/src/database/](mdc:src-tauri/src/database/)
\- SQLite database operations
- [src-tauri/src/handlers/](mdc:src-tauri/src/handlers/)
\- Tauri command handlers
- [src-tauri/src/subtitle_generator/](mdc:src-tauri/src/subtitle_generator/)
\- AI-powered subtitle generation
## Custom Crates
- **danmu_stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
\- Danmaku stream processing library
## Dependencies
- **Tauri 2**: Core framework for desktop app functionality
- **FFmpeg**: Video/audio processing via `async-ffmpeg-sidecar`
- **Whisper**: AI transcription via `whisper-rs` (CUDA support available)
- **LangChain**: AI agent functionality
- **SQLite**: Database via `sqlx` with async runtime
## Configuration
- **Cargo.toml**: [src-tauri/Cargo.toml](mdc:src-tauri/Cargo.toml)
\- Dependencies and features
- **Tauri config**: [src-tauri/tauri.conf.json](mdc:src-tauri/tauri.conf.json)
\- App configuration
- **Example config**: [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
\- User configuration template
## Features
- **default**: Includes GUI and core functionality
- **cuda**: Enables CUDA acceleration for Whisper transcription
- **headless**: Headless mode without GUI
- **custom-protocol**: Required for production builds
## Development Commands
- `yarn tauri dev` - Start Tauri development with hot reload
- `yarn tauri build` - Build production application
- `cargo check` - Check Rust code without building
- `cargo test` - Run Rust tests
description:
globs:
alwaysApply: true
---

View File

@@ -0,0 +1,60 @@
# Streaming and Recording System
## Core Recording Components
- **Recorder Manager**: [src-tauri/src/recorder_manager.rs](mdc:src-tauri/src/recorder_manager.rs)
\- Main recording orchestration
- **Recorder**: [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/)
\- Individual stream recording logic
- **Danmaku Stream**: [src-tauri/crates/danmu_stream/](mdc:src-tauri/crates/danmu_stream/)
\- Custom crate for bullet comment processing
## Supported Platforms
- **Bilibili**: Main platform support with live stream caching
- **Douyin**: TikTok's Chinese platform support
- **Multi-stream**: Support for recording multiple streams simultaneously
## Recording Features
- **Live Caching**: Real-time stream recording and buffering
- **Time-based Clipping**: Extract specific time segments from recorded streams
- **Danmaku Capture**: Record bullet comments and chat messages
- **Quality Control**: Configurable recording quality and format options
## Frontend Interfaces
- **Live Mode**: [src/AppLive.svelte](mdc:src/AppLive.svelte)
\- Live streaming interface
- **Clip Mode**: [src/AppClip.svelte](mdc:src/AppClip.svelte)
\- Video editing and clipping
- **Room Management**: [src/page/Room.svelte](mdc:src/page/Room.svelte)
\- Stream room configuration
- **Task Management**: [src/page/Task.svelte](mdc:src/page/Task.svelte)
\- Recording task monitoring
## Technical Implementation
- **FFmpeg Integration**: Video/audio processing via `async-ffmpeg-sidecar`
- **M3U8 Support**: HLS stream processing with `m3u8-rs`
- **Async Processing**: Non-blocking I/O with `tokio` runtime
- **Database Storage**: SQLite for metadata and recording information
## Configuration
- **Recording Settings**: Configure in [src-tauri/config.example.toml](mdc:src-tauri/config.example.toml)
- **FFmpeg Path**: Set FFmpeg binary location for video processing
- **Storage Paths**: Configure cache and output directories
- **Quality Settings**: Adjust recording bitrate and format options
## Development Workflow
- Use [src-tauri/src/recorder/](mdc:src-tauri/src/recorder/) for core recording logic
- Test with [src-tauri/tests/](mdc:src-tauri/tests/) directory
- Monitor recording progress via progress manager
- Handle errors gracefully with custom error types
description:
globs:
alwaysApply: true
---

36
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,36 @@
ARG VARIANT=bookworm-slim
FROM debian:${VARIANT}
ENV DEBIAN_FRONTEND=noninteractive
# Arguments
ARG CONTAINER_USER=vscode
ARG CONTAINER_GROUP=vscode
# Install dependencies
RUN apt-get update \
&& apt-get install -y \
build-essential \
clang \
cmake \
curl \
file \
git \
libayatana-appindicator3-dev \
librsvg2-dev \
libssl-dev \
libwebkit2gtk-4.1-dev \
libxdo-dev \
pkg-config \
wget \
&& apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts
# Set users
RUN adduser --disabled-password --gecos "" ${CONTAINER_USER}
USER ${CONTAINER_USER}
WORKDIR /home/${CONTAINER_USER}
# Install rustup
RUN curl --proto "=https" --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal
ENV PATH=${PATH}:/home/${CONTAINER_USER}/.cargo/bin
CMD [ "/bin/bash" ]

View File

@@ -0,0 +1,31 @@
{
"name": "vscode",
"build": {
"dockerfile": "Dockerfile",
"args": {
"CONTAINER_USER": "vscode",
"CONTAINER_GROUP": "vscode"
}
},
"features": {
"ghcr.io/devcontainers/features/node:1": {
"version": "latest"
}
},
"customizations": {
"vscode": {
"settings": {
"lldb.executable": "/usr/bin/lldb",
"files.watcherExclude": {
"**/target/**": true
}
},
"extensions": [
"vadimcn.vscode-lldb",
"rust-lang.rust-analyzer",
"tamasfe.even-better-toml"
]
}
},
"remoteUser": "vscode"
}

View File

@@ -12,7 +12,8 @@
### Windows
Windows 下分为两个版本,分别是 `cpu``cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
Windows 下分为两个版本,分别是 `cpu``cuda` 版本。区别在于 Whisper 是否使用 GPU 加速。
`cpu` 版本使用 CPU 进行推理,`cuda` 版本使用 GPU 进行推理。
默认运行为 `cpu` 版本,使用 `yarn tauri dev --features cuda` 命令运行 `cuda` 版本。
@@ -20,7 +21,9 @@ Windows 下分为两个版本,分别是 `cpu` 和 `cuda` 版本。区别在于
1. 安装 LLVM 且配置相关环境变量,详情见 [LLVM Windows Setup](https://llvm.org/docs/GettingStarted.html#building-llvm-on-windows)
2. 安装 CUDA Toolkit详情见 [CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html);要注意,安装时请勾选 **VisualStudio integration**
2. 安装 CUDA Toolkit详情见
[CUDA Windows Setup](https://docs.nvidia.com/cuda/cuda-installation-guide-microsoft-windows/index.html)
要注意,安装时请勾选 **VisualStudio integration**
### 常见问题

View File

@@ -1,21 +0,0 @@
---
name: Bug report
about: 提交一个 BUG
title: "[BUG]"
labels: bug
assignees: Xinrea
---
**描述:**
简要描述一下这个 BUG 的现象
**日志和截图:**
如果可以的话,请尽量附上相关截图和日志文件(日志是位于安装目录下,名为 bsr.log 的文件)。
**相关信息:**
- 程序版本:
- 系统类型:
**其他**
任何其他想说的

47
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: Bug Report
description: 提交 BUG 报告.
title: "[bug] "
labels: ["bug"]
assignees:
- Xinrea
body:
- type: checkboxes
attributes:
label: 提交须知
description: 请确认以下内容
options:
- label: 我是在最新版本上发现的此问题
required: true
- label: 我已阅读 [常见问题](https://bsr.xinrea.cn/usage/faq.html) 的说明
required: true
- type: dropdown
id: app_type
attributes:
label: 以哪种方式使用的该软件?
multiple: false
options:
- Docker 镜像
- 桌面应用
- type: dropdown
id: os
attributes:
label: 运行环境
multiple: false
options:
- Linux
- Windows
- MacOS
- Docker
- type: textarea
attributes:
label: BUG 描述
description: 请尽可能详细描述 BUG 的现象以及复现的方法
validations:
required: true
- type: textarea
id: logs
attributes:
label: 日志
description: 请粘贴日志内容或是上传日志文件(在主窗口的设置页面,提供了一键打开日志目录所在位置的按钮;当你打开日志目录所在位置后,进入 logs 目录,找到后缀名为 log 的文件)
validations:
required: true

View File

@@ -1,20 +0,0 @@
---
name: Feature request
about: 提交一个新功能的建议
title: "[feature]"
labels: enhancement
assignees: Xinrea
---
**遇到的问题:**
在使用过程中遇到了什么问题让你想要提出建议
**想要的功能:**
想要怎样的新功能来解决这个问题
**通过什么方式实现(有思路的话):**
如果有相关的实现思路或者是参考,可以在此提供
**其他:**
其他任何想说的话

View File

@@ -0,0 +1,13 @@
name: Feature Request
description: 提交新功能的需求
title: "[feature] "
labels: ["feature"]
assignees:
- Xinrea
body:
- type: textarea
attributes:
label: 需求描述
description: 请尽可能详细描述你想要的新功能
validations:
required: true

View File

@@ -82,6 +82,19 @@ jobs:
Copy-Item "$cudaPath\cublas64*.dll" -Destination $targetPath
Copy-Item "$cudaPath\cublasLt64*.dll" -Destination $targetPath
- name: Get previous tag
id: get_previous_tag
run: |
# Get the previous tag (excluding the current one being pushed)
PREVIOUS_TAG=$(git describe --tags --abbrev=0 HEAD~1 2>/dev/null || echo "")
if [ -z "$PREVIOUS_TAG" ]; then
# If no previous tag found, use the first commit
PREVIOUS_TAG=$(git rev-list --max-parents=0 HEAD | head -1)
fi
echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
echo "current_tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
shell: bash
- uses: tauri-apps/tauri-action@v0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -91,8 +104,7 @@ jobs:
with:
tagName: v__VERSION__
releaseName: "BiliBili ShadowReplay v__VERSION__"
releaseBody: "See the assets to download this version and install."
releaseBody: "> [!NOTE]\n> 如果你是第一次下载安装,请参考 [安装准备](https://bsr.xinrea.cn/getting-started/installation/desktop.html) 选择合适的版本。\n> Changelog: https://github.com/Xinrea/bili-shadowreplay/compare/${{ steps.get_previous_tag.outputs.previous_tag }}...${{ steps.get_previous_tag.outputs.current_tag }}"
releaseDraft: true
prerelease: false
args: ${{ matrix.args }} ${{ matrix.platform == 'windows-latest' && matrix.features == 'cuda' && '--config src-tauri/tauri.windows.cuda.conf.json' || '' }}
includeDebug: true

5
.markdownlint.json Normal file
View File

@@ -0,0 +1,5 @@
{
"MD033": {
"allowed_elements": ["nobr", "sup"]
}
}

46
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,46 @@
fail_fast: true
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
exclude: '(\.json$|public/)'
- repo: local
hooks:
- id: cargo-fmt
name: cargo fmt
entry: cargo fmt --manifest-path src-tauri/Cargo.toml --
language: system
types: [rust]
pass_filenames: false # This makes it a lot faster
- id: cargo-clippy
name: cargo clippy
language: system
types: [rust]
pass_filenames: false
entry: cargo clippy --manifest-path src-tauri/Cargo.toml
- id: cargo-clippy-headless
name: cargo clippy headless
language: system
types: [rust]
pass_filenames: false
entry: cargo clippy --manifest-path src-tauri/Cargo.toml --no-default-features --features headless
- id: cargo-test
name: cargo test
language: system
types: [rust]
pass_filenames: false
entry: cargo test --manifest-path src-tauri/Cargo.toml
- id: cargo-test-headless
name: cargo test headless
language: system
types: [rust]
pass_filenames: false
entry: cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features headless

View File

@@ -23,7 +23,7 @@ COPY . .
RUN yarn build
# Build Rust backend
FROM rust:1.86-slim AS rust-builder
FROM rust:1.90-slim AS rust-builder
WORKDIR /app
@@ -48,15 +48,9 @@ COPY src-tauri/crates ./src-tauri/crates
WORKDIR /app/src-tauri
RUN rustup component add rustfmt
RUN cargo build --no-default-features --features headless --release
# Download and install FFmpeg static build
RUN wget https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz \
&& tar xf ffmpeg-release-amd64-static.tar.xz \
&& mv ffmpeg-*-static/ffmpeg ./ \
&& mv ffmpeg-*-static/ffprobe ./ \
&& rm -rf ffmpeg-*-static ffmpeg-release-amd64-static.tar.xz
# Final stage
FROM debian:bookworm-slim AS final
FROM debian:trixie-slim AS final
WORKDIR /app
@@ -65,9 +59,16 @@ RUN apt-get update && apt-get install -y \
libssl3 \
ca-certificates \
fonts-wqy-microhei \
netbase \
nscd \
ffmpeg \
&& update-ca-certificates \
&& rm -rf /var/lib/apt/lists/*
RUN touch /etc/netgroup
RUN mkdir -p /var/run/nscd && chmod 755 /var/run/nscd
# Add /app to PATH
ENV PATH="/app:${PATH}"
@@ -76,11 +77,9 @@ COPY --from=frontend-builder /app/dist ./dist
# Copy built Rust binary
COPY --from=rust-builder /app/src-tauri/target/release/bili-shadowreplay .
COPY --from=rust-builder /app/src-tauri/ffmpeg ./ffmpeg
COPY --from=rust-builder /app/src-tauri/ffprobe ./ffprobe
# Expose port
EXPOSE 3000
# Run the application
CMD ["./bili-shadowreplay"]
CMD ["sh", "-c", "nscd && ./bili-shadowreplay"]

View File

@@ -28,4 +28,5 @@ BiliBili ShadowReplay 是一个缓存直播并进行实时编辑投稿的工具
## 赞助
![donate](docs/public/images/donate.png)
<!-- markdownlint-disable MD033 -->
<img src="docs/public/images/donate.png" alt="donate" width="300">

2
_typos.toml Normal file
View File

@@ -0,0 +1,2 @@
[default.extend-identifiers]
pull_datas = "pull_datas"

View File

@@ -54,6 +54,7 @@ export default withMermaid({
{ text: "切片功能", link: "/usage/features/clip" },
{ text: "字幕功能", link: "/usage/features/subtitle" },
{ text: "弹幕功能", link: "/usage/features/danmaku" },
{ text: "Webhook", link: "/usage/features/webhook" },
],
},
{ text: "常见问题", link: "/usage/faq" },

View File

@@ -1,9 +1,11 @@
# Whisper 配置
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付费获取 API Key
要使用 AI 字幕识别功能,需要在设置页面配置 Whisper。目前可以选择使用本地运行 Whisper 模型,或是使用在线的 Whisper 服务(通常需要付
费获取 API Key
> [!NOTE]
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
> 其实有许多更好的中文字幕识别解决方案,但是这类服务通常需要将文件上传到对象存储后异步处理,考虑到实现的复杂度,选择了使用本地运行 Whisper 模型或是使
> 用在线的 Whisper 服务,在请求返回时能够直接获取字幕生成结果。
## 本地运行 Whisper 模型
@@ -16,20 +18,29 @@
可以跟据自己的需求选择不同的模型,要注意带有 `en` 的模型是英文模型,其他模型为多语言模型。
模型文件的大小通常意味着其在运行时资源占用的大小因此请根据电脑配置选择合适的模型。此外GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU
模型文件的大小通常意味着其在运行时资源占用的大小因此请根据电脑配置选择合适的模型。此外GPU 版本与 CPU 版本在字幕生成速度上存在**巨大差异**,因此
推荐使用 GPU 版本进行本地处理(目前仅支持 Nvidia GPU
## 使用在线 Whisper 服务
![WhisperOnline](/images/whisper_online.png)
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper并配置好 API Key。提供 Whisper 服务的平台并非只有 OpenAI 一家,许多云服务平台也提供 Whisper 服务。
如果需要使用在线的 Whisper 服务进行字幕生成,可以在设置中切换为在线 Whisper并配置好 API Key。提供 Whisper 服务的平台并非只有
OpenAI 一家,许多云服务平台也提供 Whisper 服务。
## 字幕识别质量的调优
目前在设置中支持设置 Whisper 语言和 Whisper 提示词,这些设置对于本地和在线的 Whisper 服务都有效。
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
通常情况下,`auto` 语言选项能够自动识别语音语言,并生成相应语言的字幕。如果需要生成其他语言的字幕,或是生成的字幕语言不匹配,可以手动配置指定的语言。
根据 OpenAI 官方文档中对于 `language` 参数的描述,目前支持的语言包括
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian, Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish, French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic, Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian, Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili, Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
Afrikaans, Arabic, Armenian, Azerbaijani, Belarusian, Bosnian, Bulgarian,
Catalan, Chinese, Croatian, Czech, Danish, Dutch, English, Estonian, Finnish,
French, Galician, German, Greek, Hebrew, Hindi, Hungarian, Icelandic,
Indonesian, Italian, Japanese, Kannada, Kazakh, Korean, Latvian, Lithuanian,
Macedonian, Malay, Marathi, Maori, Nepali, Norwegian, Persian, Polish,
Portuguese, Romanian, Russian, Serbian, Slovak, Slovenian, Spanish, Swahili,
Swedish, Tagalog, Tamil, Thai, Turkish, Ukrainian, Urdu, Vietnamese, and Welsh.
提示词可以优化生成的字幕的风格也会一定程度上影响质量要注意Whisper 无法理解复杂的提示词,你可以在提示词中使用一些简单的描述,让其在选择词汇时使用偏向于提示词所描述的领域相关的词汇,以避免出现毫不相干领域的词汇;或是让它在标点符号的使用上参照提示词的风格。

View File

@@ -2,7 +2,9 @@
桌面端目前提供了 Windows、Linux 和 MacOS 三个平台的安装包。
安装包分为两个版本,普通版和 debug 版普通版适合大部分用户使用debug 版包含了更多的调试信息,适合开发者使用;由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
由于程序会对账号等敏感信息进行管理,请从信任的来源进行下载;所有版本均可在 [GitHub Releases](https://github.com/Xinrea/bili-shadowreplay/releases) 页面下载安装。
对于 MacOS 用户,请先手动安装 FFmpeg详情见 [FFmpeg 配置](../config/ffmpeg.md)。
## Windows

View File

@@ -17,6 +17,8 @@
### 使用 DeepLinking 快速添加直播间
<!-- MD033 -->
<video src="/videos/deeplinking.mp4" loop autoplay muted style="border-radius: 10px;"></video>
在浏览器中观看直播时,替换地址栏中直播间地址中的 `https://``bsr://` 即可快速唤起 BSR 添加直播间。

View File

@@ -0,0 +1,245 @@
# Webhook
> [!NOTE]
> 你可以使用 <https://webhook.site> 来测试 Webhook 功能。
## 设置 Webhook
打开 BSR 设置页面,在基础设置中设置 Webhook 地址。
## Webhook Events
### 直播间相关
#### 添加直播间
```json
{
"id": "a96a5e9f-9857-4c13-b889-91da2ace208a",
"event": "recorder.added",
"payload": {
"room_id": "26966466",
"created_at": "2025-09-07T03:33:14.258796+00:00",
"platform": "bilibili",
"auto_start": true,
"extra": ""
},
"timestamp": 1757215994
}
```
#### 移除直播间
```json
{
"id": "e33623d4-e040-4390-88f5-d351ceeeace7",
"event": "recorder.removed",
"payload": {
"room_id": "27183290",
"created_at": "2025-08-30T10:54:18.569198+00:00",
"platform": "bilibili",
"auto_start": true,
"extra": ""
},
"timestamp": 1757217015
}
```
### 直播相关
> [!NOTE]
> 直播开始和结束,不意味着录制的开始和结束。
#### 直播开始
```json
{
"id": "f12f3424-f7d8-4b2f-a8b7-55477411482e",
"event": "live.started",
"payload": {
"room_id": "843610",
"room_info": {
"room_id": "843610",
"room_title": "登顶!",
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
},
"user_info": {
"user_id": "475210",
"user_name": "Xinrea",
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
},
"total_length": 0,
"current_live_id": "",
"live_status": false,
"is_recording": false,
"auto_start": true,
"platform": "bilibili"
},
"timestamp": 1757217190
}
```
#### 直播结束
```json
{
"id": "e8b0756a-02f9-4655-b5ae-a170bf9547bd",
"event": "live.ended",
"payload": {
"room_id": "843610",
"room_info": {
"room_id": "843610",
"room_title": "登顶!",
"room_cover": "https://i0.hdslb.com/bfs/live/new_room_cover/73aea43f4b4624c314d62fea4b424822fb506dfb.jpg"
},
"user_info": {
"user_id": "475210",
"user_name": "Xinrea",
"user_avatar": "https://i1.hdslb.com/bfs/face/91beb3bf444b295fe12bae1f3dc6d9fc4fe4c224.jpg"
},
"total_length": 0,
"current_live_id": "",
"live_status": true,
"is_recording": false,
"auto_start": true,
"platform": "bilibili"
},
"timestamp": 1757217365
}
```
### 录播相关
#### 开始录制
```json
{
"id": "5ec1ea10-2b31-48fd-8deb-f2d7d2ea5985",
"event": "record.started",
"payload": {
"room_id": "26966466",
"room_info": {
"room_id": "26966466",
"room_title": "早安獭獭栞下播前抽fufu",
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
},
"user_info": {
"user_id": "1609526545",
"user_name": "栞栞Shiori",
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
},
"total_length": 0,
"current_live_id": "1757216045412",
"live_status": true,
"is_recording": false,
"auto_start": true,
"platform": "bilibili"
},
"timestamp": 1757216045
}
```
#### 结束录制
```json
{
"id": "56fd03e5-3965-4c2e-a6a9-bb6932347eb3",
"event": "record.ended",
"payload": {
"room_id": "26966466",
"room_info": {
"room_id": "26966466",
"room_title": "早安獭獭栞下播前抽fufu",
"room_cover": "https://i0.hdslb.com/bfs/live/user_cover/b810c36855168034557e905e5916b1dba1761fa4.jpg"
},
"user_info": {
"user_id": "1609526545",
"user_name": "栞栞Shiori",
"user_avatar": "https://i1.hdslb.com/bfs/face/47e8dbabb895de44ec6cace085d4dc1d40307277.jpg"
},
"total_length": 52.96700000000001,
"current_live_id": "1757215994597",
"live_status": true,
"is_recording": true,
"auto_start": true,
"platform": "bilibili"
},
"timestamp": 1757216040
}
```
#### 删除录播
```json
{
"id": "c32bc811-ab4b-49fd-84c7-897727905d16",
"event": "archive.deleted",
"payload": {
"platform": "bilibili",
"live_id": "1756607084705",
"room_id": "1967212929",
"title": "灶台O.o",
"length": 9,
"size": 1927112,
"created_at": "2025-08-31T02:24:44.728616+00:00",
"cover": "bilibili/1967212929/1756607084705/cover.jpg"
},
"timestamp": 1757176219
}
```
### 切片相关
#### 切片生成
```json
{
"id": "f542e0e1-688b-4f1a-8ce1-e5e51530cf5d",
"event": "clip.generated",
"payload": {
"id": 316,
"room_id": "27183290",
"cover": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].jpg",
"file": "[27183290][1757172501727][一起看凡人修仙传][2025-09-07_00-16-11].mp4",
"note": "",
"length": 121,
"size": 53049119,
"status": 0,
"bvid": "",
"title": "",
"desc": "",
"tags": "",
"area": 0,
"created_at": "2025-09-07T00:16:11.747461+08:00",
"platform": "bilibili"
},
"timestamp": 1757175371
}
```
#### 切片删除
```json
{
"id": "5c7ca728-753d-4a7d-a0b4-02c997ad2f92",
"event": "clip.deleted",
"payload": {
"id": 313,
"room_id": "27183290",
"cover": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].jpg",
"file": "[27183290][1756903953470][不出非洲之心不下播][2025-09-03_21-10-54].mp4",
"note": "",
"length": 32,
"size": 18530098,
"status": 0,
"bvid": "",
"title": "",
"desc": "",
"tags": "",
"area": 0,
"created_at": "2025-09-03T21:10:54.943682+08:00",
"platform": "bilibili"
},
"timestamp": 1757147617
}
```

View File

@@ -4,7 +4,7 @@
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="shaka-player/controls.min.css" />
<link rel="stylesheet" href="shaka-player/controls.css" />
<link rel="stylesheet" href="shaka-player/youtube-theme.css" />
<script src="shaka-player/shaka-player.ui.js"></script>
</head>

View File

@@ -1,7 +1,7 @@
{
"name": "bili-shadowreplay",
"private": true,
"version": "2.11.3",
"version": "2.16.3",
"type": "module",
"scripts": {
"dev": "vite",
@@ -30,7 +30,9 @@
"@tauri-apps/plugin-sql": "~2",
"lucide-svelte": "^0.479.0",
"marked": "^16.1.1",
"qrcode": "^1.5.4"
"qrcode": "^1.5.4",
"socket.io-client": "^4.8.1",
"wavesurfer.js": "^7.11.0"
},
"devDependencies": {
"@sveltejs/vite-plugin-svelte": "^2.0.0",
@@ -50,7 +52,7 @@
"tailwindcss": "^3.3.0",
"ts-node": "^10.9.1",
"tslib": "^2.4.1",
"typescript": "^4.6.4",
"typescript": "^5.0.0",
"vite": "^4.0.0",
"vitepress": "^1.6.3",
"vitepress-plugin-mermaid": "^2.0.17"

BIN
public/imgs/bilibili.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 246 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 153 KiB

BIN
public/imgs/huya.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 219 KiB

BIN
public/imgs/huya_avatar.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 865 KiB

View File

@@ -0,0 +1,983 @@
/*! @license
* Shaka Player
* Copyright 2016 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
.shaka-hidden {
display: none !important;
}
.shaka-video-container {
position: relative;
top: 0;
left: 0;
display: flex;
font-family: Roboto, sans-serif, TengwarTelcontar;
font-weight: 400;
-webkit-font-smoothing: antialiased;
user-select: none;
-webkit-user-select: none;
}
.shaka-video-container .material-svg-icon {
font-size: 24px;
}
.shaka-video-container:fullscreen {
width: 100%;
height: 100%;
background-color: #000;
}
.shaka-video-container:fullscreen .shaka-text-container {
font-size: 4.4vmin;
}
.shaka-video-container:-webkit-full-screen {
width: 100%;
height: 100%;
background-color: #000;
}
.shaka-video-container:-webkit-full-screen .shaka-text-container {
font-size: 4.4vmin;
}
.shaka-video-container:-moz-full-screen {
width: 100%;
height: 100%;
background-color: #000;
}
.shaka-video-container:-moz-full-screen .shaka-text-container {
font-size: 4.4vmin;
}
.shaka-video-container:-ms-fullscreen {
width: 100%;
height: 100%;
background-color: #000;
}
.shaka-video-container:-ms-fullscreen .shaka-text-container {
font-size: 4.4vmin;
}
.shaka-controls-container {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
margin: 0;
padding: 0;
width: 100%;
height: 100%;
box-sizing: border-box;
display: flex;
flex-direction: column;
justify-content: flex-end;
align-items: center;
z-index: 1;
}
.shaka-video-container:not([shaka-controls="true"]) .shaka-controls-container {
display: none;
}
.shaka-controls-container * {
flex-shrink: 0;
}
.shaka-controls-container[casting="true"] .shaka-fullscreen-button {
display: none;
}
.shaka-canvas-container {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
margin: 0;
padding: 0;
width: 100%;
height: 100%;
pointer-events: none;
}
.shaka-vr-canvas-container {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
margin: 0;
padding: 0;
width: 100%;
height: 100%;
pointer-events: none;
}
.shaka-bottom-controls {
width: 98%;
padding: 0;
z-index: 1;
}
.shaka-controls-button-panel {
padding: 0;
margin: 0;
display: flex;
flex-direction: row;
justify-content: flex-end;
align-items: center;
overflow: hidden;
min-width: 48px;
font-size: 12px;
font-weight: 400;
font-style: normal;
user-select: none;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
}
.shaka-controls-container[casting="true"] .shaka-controls-button-panel,
.shaka-controls-container[shown="true"] .shaka-controls-button-panel {
opacity: 1;
}
.shaka-controls-button-panel > * {
color: #fff;
height: 48px;
width: 48px;
line-height: 0.5;
padding: 0 2px;
background: 0 0;
border: 0;
cursor: pointer;
opacity: 0.9;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.1s;
text-shadow: 0 0 2px rgba(0, 0, 0, 0.5);
}
.shaka-controls-button-panel > .shaka-fast-forward-button .material-svg-icon,
.shaka-controls-button-panel > .shaka-rewind-button .material-svg-icon,
.shaka-controls-button-panel > .shaka-skip-next-button .material-svg-icon,
.shaka-controls-button-panel > .shaka-skip-previous-button .material-svg-icon,
.shaka-controls-button-panel > .shaka-small-play-button .material-svg-icon {
font-size: 32px;
}
.shaka-controls-button-panel > .shaka-fullscreen-button .material-svg-icon {
font-size: 24px;
}
.shaka-controls-button-panel > .shaka-overflow-menu-button {
position: relative;
}
.shaka-controls-button-panel > .shaka-overflow-menu-button .material-svg-icon {
font-size: 24px;
}
.shaka-controls-button-panel > :hover {
opacity: 1;
}
.shaka-controls-button-panel .shaka-overflow-menu-only {
display: none;
}
.shaka-play-button-container {
margin: 0;
width: 100%;
height: 100%;
flex-shrink: 1;
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
display: flex;
justify-content: center;
align-items: center;
z-index: 1;
}
.shaka-statistics-container {
overflow-x: hidden;
overflow-y: auto;
scrollbar-color: white rgba(0, 0, 0, 0.5);
scrollbar-width: thin;
min-width: 300px;
color: #fff;
background-color: rgba(35, 35, 35, 0.9);
font-size: 14px;
padding: 5px 10px;
border-radius: 2px;
position: absolute;
z-index: 2;
left: 15px;
top: 15px;
max-height: calc(100% - 115px);
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
}
.shaka-controls-container[casting="true"] .shaka-statistics-container,
.shaka-controls-container[shown="true"] .shaka-statistics-container {
opacity: 1;
}
.shaka-statistics-container div {
display: flex;
justify-content: space-between;
}
.shaka-statistics-container span {
color: #969696;
}
.shaka-ad-statistics-container {
overflow-x: hidden;
overflow-y: auto;
scrollbar-color: white rgba(0, 0, 0, 0.5);
scrollbar-width: thin;
min-width: 150px;
color: #fff;
background-color: rgba(35, 35, 35, 0.9);
font-size: 14px;
padding: 5px 10px;
border-radius: 2px;
position: absolute;
z-index: 2;
right: 15px;
top: 15px;
max-height: calc(100% - 115px);
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
}
.shaka-controls-container[casting="true"] .shaka-ad-statistics-container,
.shaka-controls-container[shown="true"] .shaka-ad-statistics-container {
opacity: 1;
}
.shaka-ad-statistics-container div {
display: flex;
justify-content: space-between;
}
.shaka-ad-statistics-container span {
color: #969696;
}
.shaka-context-menu {
overflow-x: hidden;
overflow-y: auto;
white-space: nowrap;
background: rgba(28, 28, 28, 0.9);
border-radius: 2px;
min-width: 190px;
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
display: flex;
flex-direction: column;
align-items: stretch;
position: absolute;
z-index: 3;
}
.shaka-controls-container[casting="true"] .shaka-context-menu,
.shaka-controls-container[shown="true"] .shaka-context-menu {
opacity: 1;
}
.shaka-context-menu button {
font-size: 14px;
background: 0 0;
color: #fff;
border: none;
min-height: 30px;
padding: 10px;
display: flex;
align-items: center;
cursor: pointer;
}
.shaka-context-menu button:hover {
background: rgba(255, 255, 255, 0.1);
}
.shaka-context-menu button label {
cursor: pointer;
margin-left: 5px;
}
.shaka-keyboard-navigation .shaka-context-menu button:focus {
background: rgba(255, 255, 255, 0.1);
}
.shaka-context-menu button .shaka-current-selection-span {
display: none;
}
.shaka-scrim-container {
margin: 0;
width: 100%;
position: absolute;
left: 0;
right: 0;
bottom: 0;
flex-shrink: 1;
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
height: 61px;
background: linear-gradient(rgba(0, 0, 0, 0) 0, rgba(0, 0, 0, 0.5) 100%);
}
.shaka-controls-container[casting="true"] .shaka-scrim-container,
.shaka-controls-container[shown="true"] .shaka-scrim-container {
opacity: 1;
}
.shaka-text-container {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
pointer-events: none;
bottom: 0;
width: 100%;
min-width: 48px;
transition: bottom cubic-bezier(0.4, 0, 0.6, 1) 0.1s;
transition-delay: 0.5s;
font-size: 20px;
line-height: 1.4;
color: #fff;
}
.shaka-text-container span.shaka-text-wrapper {
display: inline;
background: 0 0;
}
.shaka-controls-container[shown="true"] ~ .shaka-text-container {
transition-delay: 0s;
}
.shaka-spinner-container {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
width: 100%;
height: 100%;
flex-shrink: 1;
display: flex;
justify-content: center;
align-items: center;
}
.shaka-video-container:not([shaka-controls="true"]) .shaka-spinner-container {
display: none;
}
.shaka-hidden-fast-forward-container,
.shaka-hidden-rewind-container {
height: 100%;
width: 40%;
flex-shrink: 1;
z-index: 1;
}
.shaka-hidden-fast-forward-container {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
left: 60%;
}
.shaka-hidden-rewind-container {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
}
.shaka-video-container.no-cursor {
cursor: none !important;
}
.shaka-video-container.no-cursor * {
cursor: none !important;
}
.shaka-play-button {
box-sizing: border-box;
padding: calc(15% / 2);
width: 0;
height: 0;
margin: 0;
border-radius: 50%;
box-shadow: rgba(0, 0, 0, 0.1) 0 0 20px 0;
border: none;
background-size: 50%;
background-repeat: no-repeat;
background-position: center center;
background-color: rgba(255, 255, 255, 0.9);
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
}
.shaka-controls-container[casting="true"] .shaka-play-button,
.shaka-controls-container[shown="true"] .shaka-play-button {
opacity: 1;
}
.shaka-play-button[icon="play"] {
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%23000000%22%20height%3D%2224%22%20viewBox%3D%220%200%2024%2024%22%20width%3D%2224%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%20%20%3Cpath%20d%3D%22M8%205v14l11-7z%22%2F%3E%0A%20%20%20%20%3Cpath%20d%3D%22M0%200h24v24H0z%22%20fill%3D%22none%22%2F%3E%0A%3C%2Fsvg%3E");
}
.shaka-play-button[icon="pause"] {
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%23000000%22%20height%3D%2224%22%20viewBox%3D%220%200%2024%2024%22%20width%3D%2224%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%20%20%3Cpath%20d%3D%22M6%2019h4V5H6v14zm8-14v14h4V5h-4z%22%2F%3E%0A%20%20%20%20%3Cpath%20d%3D%22M0%200h24v24H0z%22%20fill%3D%22none%22%2F%3E%0A%3C%2Fsvg%3E");
}
.shaka-play-button[icon="replay"] {
background-image: url("data:image/svg+xml,%3Csvg%20fill%3D%22%231f1f1f%22%20height%3D%2224px%22%20viewBox%3D%220%20-960%20960%20960%22%20width%3D%2224px%22%20xmlns%3D%22http%3A%2F%2Fwww.w3.org%2F2000%2Fsvg%22%3E%0A%20%20%3Cpath%20d%3D%22M480-80q-75%200-140.5-28.5t-114-77q-48.5-48.5-77-114T120-440h80q0%20117%2081.5%20198.5T480-160q117%200%20198.5-81.5T760-440q0-117-81.5-198.5T480-720h-6l62%2062-56%2058-160-160%20160-160%2056%2058-62%2062h6q75%200%20140.5%2028.5t114%2077q48.5%2048.5%2077%20114T840-440q0%2075-28.5%20140.5t-77%20114q-48.5%2048.5-114%2077T480-80Z%22%2F%3E%0A%3C%2Fsvg%3E");
}
@media (prefers-reduced-transparency: no-preference) {
.shaka-controls-container[shown="true"] .shaka-play-button {
opacity: 0.75;
}
}
.shaka-current-time {
font-size: 14px;
color: #fff;
cursor: pointer;
width: auto;
padding: 0 5px;
}
.shaka-current-time[disabled] {
background-color: transparent;
color: #fff;
cursor: default;
}
.shaka-controls-container button:focus,
.shaka-controls-container input:focus {
outline: 1px solid Highlight;
}
.shaka-controls-container button:-moz-focus-inner,
.shaka-controls-container input:-moz-focus-outer {
outline: 0;
border: 0;
}
.shaka-controls-container:not(.shaka-keyboard-navigation) button:focus,
.shaka-controls-container:not(.shaka-keyboard-navigation) input:focus {
outline: 0;
}
.shaka-fast-forward-container,
.shaka-rewind-container {
height: 100%;
width: 100%;
flex-shrink: 1;
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
display: flex;
flex-direction: row;
justify-content: center;
align-items: center;
margin: 0;
border: none;
color: #fff;
background-color: rgba(0, 0, 0, 0.5);
cursor: default;
font-size: 20px;
opacity: 0;
user-select: none;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
}
.shaka-fast-forward-container {
border-radius: 40% 0 0 40%;
}
.shaka-rewind-container {
border-radius: 0 40% 40% 0;
}
.shaka-forward-rewind-container-icon {
font-size: 32px;
}
.shaka-range-container {
position: relative;
top: 0;
left: 0;
margin: calc((12px - 4px) / 2) 6px;
height: 4px;
border-radius: 4px;
background: #fff;
box-sizing: content-box;
}
.shaka-volume-bar-container {
width: 100px;
padding: 0;
transition-property: opacity, width;
transition-duration: 250ms;
transition-timing-function: cubic-bezier(0.4, 0, 0.6, 1);
}
.shaka-volume-bar-container:hover {
width: 100px !important;
opacity: 1 !important;
}
@media (max-width: 474px) {
.shaka-volume-bar-container {
width: 50px;
}
.shaka-volume-bar-container:hover {
width: 50px !important;
}
.shaka-mute-button:hover + .shaka-volume-bar-container-allow-hiding {
width: 50px;
opacity: 1;
}
}
.shaka-mute-button
+ .shaka-volume-bar-container-allow-hiding:not(:focus-within) {
width: 0;
opacity: 0;
}
@media (min-width: 475px) {
.shaka-mute-button:hover + .shaka-volume-bar-container-allow-hiding {
width: 100px;
opacity: 1;
}
}
.shaka-range-element {
-webkit-appearance: none;
background: 0 0;
cursor: pointer;
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
margin: 0;
padding: 0;
width: 100%;
height: 100%;
height: 12px;
top: calc((4px - 12px) / 2);
z-index: 1;
}
.shaka-range-element::-webkit-slider-runnable-track {
width: 100%;
cursor: pointer;
height: 12px;
background: 0 0;
color: transparent;
border: none;
}
.shaka-range-element::-webkit-slider-thumb {
-webkit-appearance: none;
border: none;
border-radius: 12px;
height: 12px;
width: 12px;
background: #fff;
}
.shaka-range-element::-moz-range-track {
width: 100%;
cursor: pointer;
height: 12px;
background: 0 0;
color: transparent;
border: none;
}
.shaka-range-element::-moz-range-thumb {
-webkit-appearance: none;
border: none;
border-radius: 12px;
height: 12px;
width: 12px;
background: #fff;
}
.shaka-seek-bar-container {
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
top: 5px;
height: 5px;
margin-bottom: 0;
background-clip: padding-box !important;
border-top: 4px solid transparent;
border-bottom: 4px solid transparent;
}
.shaka-controls-container[casting="true"] .shaka-seek-bar-container,
.shaka-controls-container[shown="true"] .shaka-seek-bar-container {
opacity: 1;
}
.shaka-seek-bar-container .shaka-seek-bar {
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 250ms;
opacity: 0;
}
.shaka-seek-bar-container:hover .shaka-seek-bar {
opacity: 1;
}
.shaka-ad-markers {
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
margin: 0;
padding: 0;
width: 100%;
height: 100%;
}
.shaka-spacer {
cursor: default;
flex-shrink: 1;
flex-grow: 1;
margin: 0;
}
.shaka-overflow-menu,
.shaka-settings-menu {
overflow-x: hidden;
overflow-y: auto;
scrollbar-color: white rgba(0, 0, 0, 0.5);
scrollbar-width: thin;
white-space: nowrap;
background: rgba(28, 28, 28, 0.9);
border-radius: 15px;
max-height: 250px;
min-width: 190px;
padding: 5px 0;
opacity: 0;
transition: opacity cubic-bezier(0.4, 0, 0.6, 1) 0.6s;
display: flex;
flex-direction: column;
align-items: stretch;
position: absolute;
z-index: 2;
right: 15px;
bottom: 62px;
}
.shaka-controls-container[casting="true"] .shaka-overflow-menu,
.shaka-controls-container[casting="true"] .shaka-settings-menu,
.shaka-controls-container[shown="true"] .shaka-overflow-menu,
.shaka-controls-container[shown="true"] .shaka-settings-menu {
opacity: 1;
}
.shaka-overflow-menu button,
.shaka-settings-menu button {
font-size: 14px;
background: 0 0;
color: #fff;
border: none;
min-height: 30px;
padding: 10px;
display: flex;
align-items: center;
cursor: pointer;
}
.shaka-overflow-menu button:hover,
.shaka-settings-menu button:hover {
background: rgba(255, 255, 255, 0.1);
}
.shaka-overflow-menu button label,
.shaka-settings-menu button label {
cursor: pointer;
}
.shaka-keyboard-navigation .shaka-overflow-menu button:focus,
.shaka-keyboard-navigation .shaka-settings-menu button:focus {
background: rgba(255, 255, 255, 0.1);
}
.shaka-overflow-menu .material-svg-icon,
.shaka-settings-menu .material-svg-icon {
padding-left: 0;
padding-right: 10px;
}
.shaka-overflow-menu .material-svg-icon.shaka-chosen-item,
.shaka-settings-menu .material-svg-icon.shaka-chosen-item {
order: -1;
line-height: 17px;
font-size: 18px;
}
.shaka-overflow-menu.shaka-low-position,
.shaka-settings-menu.shaka-low-position {
bottom: 48px;
}
.shaka-overflow-menu span {
text-align: left;
}
.shaka-overflow-button-label {
position: relative;
display: flex;
flex-direction: column;
}
.shaka-overflow-button-label-inline {
box-sizing: border-box;
flex-direction: row;
justify-content: space-between;
width: calc(100% - 34px);
padding-right: 28px;
background-image: url("data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iMjRweCIgdmlld0JveD0iMCAwIDI0IDI0IiB3aWR0aD0iMjRweCIgZmlsbD0iI2VlZWVlZSI+PHBhdGggZD0iTTAgMGgyNHYyNEgwVjB6IiBmaWxsPSJub25lIi8+PHBhdGggZD0iTTguNTkgMTYuNTlMMTMuMTcgMTIgOC41OSA3LjQxIDEwIDZsNiA2LTYgNi0xLjQxLTEuNDF6Ii8+PC9zdmc+");
background-repeat: no-repeat;
background-position: right 5px center;
background-size: 24px 24px;
}
.shaka-simple-overflow-button-label-inline {
box-sizing: border-box;
flex-direction: row;
justify-content: space-between;
width: calc(100% - 50px);
}
.shaka-current-selection-span {
font-size: 12px;
padding-left: 10px;
}
.shaka-current-auto-quality {
margin-left: 5px;
font-size: 11px;
color: #ccc;
}
.shaka-current-quality-mark,
.shaka-quality-mark {
color: red;
margin-left: 2px !important;
font-size: 10px;
height: 17px;
}
.shaka-quality-mark {
line-height: 6px;
}
.shaka-overflow-playback-rate-mark,
.shaka-overflow-quality-mark {
background: red;
color: #fff;
border-radius: 2px;
font-family: Roboto, sans-serif, TengwarTelcontar;
font-size: 10px;
font-weight: 700;
line-height: 10px;
text-shadow: none;
padding: 1px;
position: absolute;
right: 4px;
top: 10px;
}
.shaka-settings-menu span {
margin-left: 28px;
}
.shaka-settings-menu span.shaka-chosen-item {
margin-left: 0;
}
.shaka-settings-menu .shaka-chapter {
margin-left: 10px;
}
.shaka-back-to-overflow-button {
border-bottom: 1px solid rgba(255, 255, 255, 0.2) !important;
}
.shaka-back-to-overflow-button span {
margin-left: 0;
}
.shaka-back-to-overflow-button .material-svg-icon {
padding-right: 10px;
font-size: 18px !important;
}
.shaka-back-to-overflow-button:hover {
background: 0 0 !important;
}
.shaka-controls-container[ad-active="true"] {
pointer-events: none;
}
.shaka-controls-container[ad-active="true"] .shaka-bottom-controls {
pointer-events: auto;
}
.shaka-client-side-ad-container,
.shaka-server-side-ad-container {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
}
.shaka-video-container[shaka-controls="true"]
.shaka-client-side-ad-container
iframe,
.shaka-video-container[shaka-controls="true"]
.shaka-server-side-ad-container
iframe {
height: 90%;
}
.shaka-ad-controls {
display: flex;
flex-direction: row;
z-index: 1;
padding-bottom: 1%;
}
.shaka-video-container:not([shaka-controls="true"]) .shaka-ad-controls {
display: none;
}
.shaka-ad-controls button,
.shaka-ad-controls div {
color: #fff;
font-size: initial;
}
.shaka-ad-info {
font-size: 14px;
color: #fff;
width: auto;
padding: 0 5px;
}
.shaka-ad-info[disabled] {
background-color: transparent;
color: #fff;
cursor: default;
padding: 0;
}
.shaka-skip-ad-container {
position: relative;
right: calc((100% - 98%) / 2 * -1);
display: flex;
flex-direction: row;
margin: 0;
margin-left: auto;
}
.shaka-skip-ad-button {
padding: 5px 15px;
background: rgba(0, 0, 0, 0.7);
border: none;
cursor: pointer;
}
.shaka-skip-ad-button:disabled {
background: rgba(0, 0, 0, 0.3);
}
.shaka-skip-ad-counter {
padding: 5px;
background: rgba(0, 0, 0, 0.7);
margin: 0;
} /*!
* @license
* The tooltip is based on https://github.com/felipefialho/css-components/
* Local modifications have been performed.
*
* Copyright (c) 2017 Felipe Fialho
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
.shaka-tooltips-on {
overflow: visible;
}
.shaka-tooltips-on > .shaka-tooltip,
.shaka-tooltips-on > .shaka-tooltip-status {
position: relative;
}
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
.shaka-tooltips-on > .shaka-tooltip-status:hover:after,
.shaka-tooltips-on > .shaka-tooltip:active:after,
.shaka-tooltips-on > .shaka-tooltip:focus-visible:after,
.shaka-tooltips-on > .shaka-tooltip:hover:after {
content: attr(aria-label);
font-family: Roboto, sans-serif, TengwarTelcontar;
line-height: 20px;
white-space: nowrap;
font-size: 14px;
background: rgba(0, 0, 0, 0.5);
color: #fff;
border-radius: 2px;
padding: 2px 10px;
position: absolute;
bottom: 62px;
left: calc(48px / 2);
-webkit-transform: translateX(-50%);
-moz-transform: translateX(-50%);
-ms-transform: translateX(-50%);
-o-transform: translateX(-50%);
transform: translateX(-50%);
}
@media (prefers-reduced-transparency) {
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
.shaka-tooltips-on > .shaka-tooltip-status:hover:after,
.shaka-tooltips-on > .shaka-tooltip:active:after,
.shaka-tooltips-on > .shaka-tooltip:focus-visible:after,
.shaka-tooltips-on > .shaka-tooltip:hover:after {
background-color: rgba(0, 0, 0, 0.9);
}
}
.shaka-tooltips-on.shaka-tooltips-low-position > .shaka-tooltip:active:after,
.shaka-tooltips-on.shaka-tooltips-low-position
> .shaka-tooltip:focus-visible:after,
.shaka-tooltips-on.shaka-tooltips-low-position > .shaka-tooltip:hover:after {
bottom: 48px;
}
.shaka-tooltips-on > .shaka-tooltip-status:active:after,
.shaka-tooltips-on > .shaka-tooltip-status:focus-visible:after,
.shaka-tooltips-on > .shaka-tooltip-status:hover:after {
content: attr(aria-label) " (" attr(shaka-status) ")";
}
.shaka-tooltips-on button:first-child:active:after,
.shaka-tooltips-on button:first-child:focus-visible:after,
.shaka-tooltips-on button:first-child:hover:after {
left: 0;
-webkit-transform: translateX(0);
-moz-transform: translateX(0);
-ms-transform: translateX(0);
-o-transform: translateX(0);
transform: translateX(0);
}
.shaka-tooltips-on button:last-child:active:after,
.shaka-tooltips-on button:last-child:focus-visible:after,
.shaka-tooltips-on button:last-child:hover:after {
left: 48px;
-webkit-transform: translateX(-100%);
-moz-transform: translateX(-100%);
-ms-transform: translateX(-100%);
-o-transform: translateX(-100%);
transform: translateX(-100%);
}
#shaka-player-ui-thumbnail-container {
background-color: #000;
border: 1px solid #000;
box-shadow: 0 8px 8px 0 rgba(0, 0, 0, 0.5);
min-width: 150px;
overflow: hidden;
position: absolute;
visibility: hidden;
width: 15%;
z-index: 1;
pointer-events: none;
}
#shaka-player-ui-thumbnail-container #shaka-player-ui-thumbnail-image {
position: absolute;
}
#shaka-player-ui-thumbnail-container #shaka-player-ui-thumbnail-time-container {
bottom: 0;
left: 0;
position: absolute;
right: 0;
display: flex;
justify-content: center;
}
#shaka-player-ui-thumbnail-container
#shaka-player-ui-thumbnail-time-container
#shaka-player-ui-thumbnail-time {
background-color: rgba(0, 0, 0, 0.5);
border-radius: 14px;
color: #fff;
font-size: 14px;
padding: 0 5px;
}
@media (prefers-reduced-transparency) {
#shaka-player-ui-thumbnail-container
#shaka-player-ui-thumbnail-time-container
#shaka-player-ui-thumbnail-time {
background-color: rgba(0, 0, 0, 0.9);
}
}
#shaka-player-ui-thumbnail-container.portrait-thumbnail {
min-width: 75px;
width: 7.5%;
}
#shaka-player-ui-time-container {
background-color: rgba(0, 0, 0, 0.5);
border-radius: 5px;
color: #fff;
display: flex;
font-size: 14px;
justify-content: center;
overflow: hidden;
padding: 0 3px;
position: absolute;
visibility: hidden;
z-index: 1;
}
@media (prefers-reduced-transparency) {
#shaka-player-ui-time-container {
background-color: rgba(0, 0, 0, 0.9);
}
}
.material-svg-icon {
display: inline-block;
fill: currentcolor;
width: 1em;
height: 1em;
}
@font-face {
font-family: Roboto;
font-style: normal;
font-weight: 400;
font-stretch: normal;
src: url(./fonts/KFOMCnqEu92Fr1ME7kSn66aGLdTylUAMQXC89YmC2DPNWubEbVmUiA8.ttf)
format("truetype");
} /*# sourceMappingURL=controls.css.map */

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,19 +1,19 @@
@font-face {
font-family: 'Roboto';
font-family: "Roboto";
font-style: normal;
font-weight: 400;
font-display: swap;
src: url(https://fonts.gstatic.com/s/roboto/v27/KFOmCnqEu92Fr1Me5Q.ttf) format('truetype');
src: url(./fonts/KFOmCnqEu92Fr1Me5Q.ttf) format("truetype");
}
@font-face {
font-family: 'Roboto';
font-family: "Roboto";
font-style: normal;
font-weight: 500;
font-display: swap;
src: url(https://fonts.gstatic.com/s/roboto/v27/KFOlCnqEu92Fr1MmEU9vAw.ttf) format('truetype');
src: url(./fonts/KFOlCnqEu92Fr1MmEU9vAw.ttf) format("truetype");
}
.youtube-theme {
font-family: 'Roboto', sans-serif;
font-family: "Roboto", sans-serif;
}
.youtube-theme .shaka-bottom-controls {
width: 100%;
@@ -27,18 +27,18 @@
display: flex;
-webkit-box-orient: vertical;
-webkit-box-direction: normal;
-ms-flex-direction: column;
flex-direction: column;
-ms-flex-direction: column;
flex-direction: column;
}
.youtube-theme .shaka-ad-controls {
-webkit-box-ordinal-group: 2;
-ms-flex-order: 1;
order: 1;
-ms-flex-order: 1;
order: 1;
}
.youtube-theme .shaka-controls-button-panel {
-webkit-box-ordinal-group: 3;
-ms-flex-order: 2;
order: 2;
-ms-flex-order: 2;
order: 2;
height: 40px;
padding: 0 10px;
}
@@ -48,36 +48,36 @@
}
.youtube-theme .shaka-small-play-button {
-webkit-box-ordinal-group: -2;
-ms-flex-order: -3;
order: -3;
-ms-flex-order: -3;
order: -3;
}
.youtube-theme .shaka-mute-button {
-webkit-box-ordinal-group: -1;
-ms-flex-order: -2;
order: -2;
-ms-flex-order: -2;
order: -2;
}
.youtube-theme .shaka-controls-button-panel > * {
margin: 0;
padding: 3px 8px;
color: #EEE;
color: #eee;
height: 40px;
}
.youtube-theme .shaka-controls-button-panel > *:focus {
outline: none;
-webkit-box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
color: #FFF;
box-shadow: inset 0 0 0 2px rgba(27, 127, 204, 0.8);
color: #fff;
}
.youtube-theme .shaka-controls-button-panel > *:hover {
color: #FFF;
color: #fff;
}
.youtube-theme .shaka-controls-button-panel .shaka-volume-bar-container {
position: relative;
z-index: 10;
left: -1px;
-webkit-box-ordinal-group: 0;
-ms-flex-order: -1;
order: -1;
-ms-flex-order: -1;
order: -1;
opacity: 0;
width: 0px;
-webkit-transition: width 0.2s cubic-bezier(0.4, 0, 1, 1);
@@ -120,23 +120,25 @@
opacity: 1;
cursor: pointer;
}
.youtube-theme .shaka-seek-bar-container input[type=range]::-webkit-slider-thumb {
background: #FF0000;
.youtube-theme
.shaka-seek-bar-container
input[type="range"]::-webkit-slider-thumb {
background: #ff0000;
cursor: pointer;
}
.youtube-theme .shaka-seek-bar-container input[type=range]::-moz-range-thumb {
background: #FF0000;
.youtube-theme .shaka-seek-bar-container input[type="range"]::-moz-range-thumb {
background: #ff0000;
cursor: pointer;
}
.youtube-theme .shaka-seek-bar-container input[type=range]::-ms-thumb {
background: #FF0000;
.youtube-theme .shaka-seek-bar-container input[type="range"]::-ms-thumb {
background: #ff0000;
cursor: pointer;
}
.youtube-theme .shaka-video-container * {
font-family: 'Roboto', sans-serif;
font-family: "Roboto", sans-serif;
}
.youtube-theme .shaka-video-container .material-icons-round {
font-family: 'Material Icons Sharp';
font-family: "Material Icons Sharp";
}
.youtube-theme .shaka-overflow-menu,
.youtube-theme .shaka-settings-menu {
@@ -170,14 +172,14 @@
}
.youtube-theme .shaka-settings-menu button[aria-selected="true"] span {
-webkit-box-ordinal-group: 3;
-ms-flex-order: 2;
order: 2;
-ms-flex-order: 2;
order: 2;
margin-left: 0;
}
.youtube-theme .shaka-settings-menu button[aria-selected="true"] i {
-webkit-box-ordinal-group: 2;
-ms-flex-order: 1;
order: 1;
-ms-flex-order: 1;
order: 1;
font-size: 18px;
padding-left: 5px;
}
@@ -192,25 +194,25 @@
display: -ms-flexbox;
display: flex;
-webkit-box-pack: justify;
-ms-flex-pack: justify;
justify-content: space-between;
-ms-flex-pack: justify;
justify-content: space-between;
-webkit-box-orient: horizontal;
-webkit-box-direction: normal;
-ms-flex-direction: row;
flex-direction: row;
-ms-flex-direction: row;
flex-direction: row;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
-ms-flex-align: center;
align-items: center;
cursor: default;
outline: none;
height: 40px;
-webkit-box-flex: 0;
-ms-flex: 0 0 100%;
flex: 0 0 100%;
-ms-flex: 0 0 100%;
flex: 0 0 100%;
}
.youtube-theme .shaka-overflow-menu button .shaka-overflow-button-label span {
-ms-flex-negative: initial;
flex-shrink: initial;
flex-shrink: initial;
padding-left: 15px;
font-size: 13px;
font-weight: 500;
@@ -218,11 +220,11 @@
display: -ms-flexbox;
display: flex;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
-ms-flex-align: center;
align-items: center;
}
.youtube-theme .shaka-overflow-menu span + span {
color: #FFF;
color: #fff;
font-weight: 400 !important;
font-size: 12px !important;
padding-right: 8px;
@@ -230,7 +232,7 @@
}
.youtube-theme .shaka-overflow-menu span + span:after {
content: "navigate_next";
font-family: 'Material Icons Sharp';
font-family: "Material Icons Sharp";
font-size: 20px;
}
.youtube-theme .shaka-overflow-menu .shaka-pip-button span + span {
@@ -270,10 +272,10 @@
}
.youtube-theme .shaka-overflow-menu button,
.youtube-theme .shaka-settings-menu button {
color: #EEE;
color: #eee;
}
.youtube-theme .shaka-captions-off {
color: #BFBFBF;
color: #bfbfbf;
}
.youtube-theme .shaka-overflow-menu-button {
font-size: 18px;

1174
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,22 +1,30 @@
[workspace]
members = ["crates/danmu_stream"]
members = ["crates/danmu_stream", "crates/recorder"]
resolver = "2"
[package]
name = "bili-shadowreplay"
version = "2.11.3"
version = "2.16.3"
description = "BiliBili ShadowReplay"
authors = ["Xinrea"]
license = ""
repository = ""
edition = "2021"
[lints.clippy]
correctness="deny"
suspicious="deny"
complexity="deny"
style="deny"
perf="deny"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
danmu_stream = { path = "crates/danmu_stream" }
recorder = { path = "crates/recorder" }
serde_json = "1.0"
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"] }
reqwest = { workspace = true}
serde_derive = "1.0.158"
serde = "1.0.158"
sysinfo = "0.32.0"
@@ -25,7 +33,6 @@ async-std = "1.12.0"
async-ffmpeg-sidecar = "0.0.1"
chrono = { version = "0.4.24", features = ["serde"] }
toml = "0.7.3"
custom_error = "1.9.2"
regex = "1.7.3"
tokio = { version = "1.27.0", features = ["process"] }
platform-dirs = "0.3.0"
@@ -43,15 +50,21 @@ mime_guess = "2.0"
async-trait = "0.1.87"
whisper-rs = "0.14.2"
hound = "3.5.1"
uuid = { version = "1.4", features = ["v4"] }
uuid = { workspace = true }
axum = { version = "0.7", features = ["macros", "multipart"] }
tower-http = { version = "0.5", features = ["cors", "fs"] }
futures-core = "0.3"
futures = "0.3"
tokio-util = { version = "0.7", features = ["io"] }
tokio-stream = "0.1"
clap = { version = "4.5.37", features = ["derive"] }
url = "2.5.4"
srtparse = "0.2.0"
thiserror = "2"
deno_core = "0.355"
sanitize-filename = "0.6.0"
socketioxide = "0.17.2"
scraper = "0.24.0"
[features]
# this feature is used for production builds or when `devPath` points to the filesystem
@@ -138,3 +151,7 @@ whisper-rs = { version = "0.14.2", default-features = false }
[target.'cfg(darwin)'.dependencies.whisper-rs]
version = "0.14.2"
features = ["metal"]
[workspace.dependencies]
reqwest = { version = "0.11", features = ["blocking", "json", "multipart", "gzip"] }
uuid = { version = "1.4", features = ["v4"] }

View File

@@ -1,4 +1,4 @@
fn main() {
#[cfg(feature = "gui")]
tauri_build::build()
tauri_build::build();
}

View File

@@ -2,11 +2,7 @@
"identifier": "migrated",
"description": "permissions that were migrated from v1",
"local": true,
"windows": [
"main",
"Live*",
"Clip*"
],
"windows": ["main", "Live*", "Clip*"],
"permissions": [
"core:default",
"fs:allow-read-file",
@@ -20,9 +16,7 @@
"fs:allow-exists",
{
"identifier": "fs:scope",
"allow": [
"**"
]
"allow": ["**"]
},
"core:window:default",
"core:window:allow-start-dragging",
@@ -42,19 +36,10 @@
"identifier": "http:default",
"allow": [
{
"url": "https://*.hdslb.com/"
"url": "https://*.*"
},
{
"url": "https://afdian.com/"
},
{
"url": "https://*.afdiancdn.com/"
},
{
"url": "https://*.douyin.com/"
},
{
"url": "https://*.douyinpic.com/"
"url": "http://*.*"
}
]
},
@@ -74,4 +59,4 @@
"dialog:default",
"deep-link:default"
]
}
}

View File

@@ -7,38 +7,42 @@ edition = "2021"
name = "danmu_stream"
path = "src/lib.rs"
[[example]]
name = "bilibili"
path = "examples/bilibili.rs"
[[example]]
name = "douyin"
path = "examples/douyin.rs"
[dependencies]
tokio = { version = "1.0", features = ["full"] }
tokio-tungstenite = { version = "0.20", features = ["native-tls"] }
tokio = { version = "1", features = ["full"] }
tokio-tungstenite = { version = "0.27", features = ["native-tls"] }
futures-util = "0.3"
prost = "0.12"
prost = "0.14"
chrono = "0.4"
log = "0.4"
env_logger = "0.10"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
reqwest = { version = "0.11", features = ["json"] }
env_logger = "0.11"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
reqwest = { version = "0.12", features = ["json"] }
url = "2.4"
md5 = "0.7"
md5 = "0.8"
regex = "1.9"
deno_core = "0.242.0"
pct-str = "2.0.0"
custom_error = "1.9.2"
deno_core = "0.355"
pct-str = "2.0"
thiserror = "2.0"
flate2 = "1.0"
scroll = "0.13.0"
scroll_derive = "0.13.0"
brotli = "8.0.1"
scroll = "0.13"
scroll_derive = "0.13"
brotli = "8.0"
http = "1.0"
rand = "0.9.1"
urlencoding = "2.1.3"
rand = "0.9"
urlencoding = "2.1"
gzip = "0.1.2"
hex = "0.4.3"
async-trait = "0.1.88"
uuid = "1.17.0"
async-trait = "0.1"
uuid = { workspace = true}
[build-dependencies]
tonic-build = "0.10"
tonic-build = "0.14"

View File

@@ -8,7 +8,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Initialize logging
env_logger::init();
// Replace these with actual values
let room_id = 768756;
let room_id = "768756";
let cookie = "";
let stream = Arc::new(DanmuStream::new(ProviderType::BiliBili, cookie, room_id).await?);

View File

@@ -8,7 +8,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Initialize logging
env_logger::init();
// Replace these with actual values
let room_id = 7514298567821937427; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
let room_id = "7514298567821937427"; // Replace with actual Douyin room_id. When live starts, the room_id will be generated, so it's more like a live_id.
let cookie = "your_cookie";
let stream = Arc::new(DanmuStream::new(ProviderType::Douyin, cookie, room_id).await?);

View File

@@ -1,16 +1,17 @@
use std::sync::Arc;
use tokio::sync::{mpsc, RwLock};
use crate::{
provider::{new, DanmuProvider, ProviderType},
DanmuMessageType, DanmuStreamError,
};
use tokio::sync::{mpsc, RwLock};
#[derive(Clone)]
pub struct DanmuStream {
pub provider_type: ProviderType,
pub identifier: String,
pub room_id: u64,
pub room_id: String,
pub provider: Arc<RwLock<Box<dyn DanmuProvider>>>,
tx: mpsc::UnboundedSender<DanmuMessageType>,
rx: Arc<RwLock<mpsc::UnboundedReceiver<DanmuMessageType>>>,
@@ -20,14 +21,14 @@ impl DanmuStream {
pub async fn new(
provider_type: ProviderType,
identifier: &str,
room_id: u64,
room_id: &str,
) -> Result<Self, DanmuStreamError> {
let (tx, rx) = mpsc::unbounded_channel();
let provider = new(provider_type, identifier, room_id).await?;
Ok(Self {
provider_type,
identifier: identifier.to_string(),
room_id,
room_id: room_id.to_string(),
provider: Arc::new(RwLock::new(provider)),
tx,
rx: Arc::new(RwLock::new(rx)),

View File

@@ -1,19 +1,8 @@
use std::time::Duration;
use crate::DanmuStreamError;
use reqwest::header::HeaderMap;
impl From<reqwest::Error> for DanmuStreamError {
fn from(value: reqwest::Error) -> Self {
Self::HttpError { err: value }
}
}
impl From<url::ParseError> for DanmuStreamError {
fn from(value: url::ParseError) -> Self {
Self::ParseError { err: value }
}
}
use crate::DanmuStreamError;
pub struct ApiClient {
client: reqwest::Client,

View File

@@ -2,16 +2,24 @@ pub mod danmu_stream;
mod http_client;
pub mod provider;
use custom_error::custom_error;
use thiserror::Error;
custom_error! {pub DanmuStreamError
HttpError {err: reqwest::Error} = "HttpError {err}",
ParseError {err: url::ParseError} = "ParseError {err}",
WebsocketError {err: String } = "WebsocketError {err}",
PackError {err: String} = "PackError {err}",
UnsupportProto {proto: u16} = "UnsupportProto {proto}",
MessageParseError {err: String} = "MessageParseError {err}",
InvalidIdentifier {err: String} = "InvalidIdentifier {err}"
#[derive(Error, Debug)]
pub enum DanmuStreamError {
#[error("HttpError {0:?}")]
HttpError(#[from] reqwest::Error),
#[error("ParseError {0:?}")]
ParseError(#[from] url::ParseError),
#[error("WebsocketError {err}")]
WebsocketError { err: String },
#[error("PackError {err}")]
PackError { err: String },
#[error("UnsupportProto {proto}")]
UnsupportProto { proto: u16 },
#[error("MessageParseError {err}")]
MessageParseError { err: String },
#[error("InvalidIdentifier {err}")]
InvalidIdentifier { err: String },
}
#[derive(Debug)]
@@ -21,7 +29,7 @@ pub enum DanmuMessageType {
#[derive(Debug, Clone)]
pub struct DanmuMessage {
pub room_id: u64,
pub room_id: String,
pub user_id: u64,
pub user_name: String,
pub message: String,

View File

@@ -36,15 +36,15 @@ type WsWriteType = futures_util::stream::SplitSink<
pub struct BiliDanmu {
client: ApiClient,
room_id: u64,
user_id: u64,
room_id: String,
user_id: i64,
stop: Arc<RwLock<bool>>,
write: Arc<RwLock<Option<WsWriteType>>>,
}
#[async_trait]
impl DanmuProvider for BiliDanmu {
async fn new(cookie: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
async fn new(cookie: &str, room_id: &str) -> Result<Self, DanmuStreamError> {
// find DedeUserID=<user_id> in cookie str
let user_id = BiliDanmu::parse_user_id(cookie)?;
// add buvid3 to cookie
@@ -54,7 +54,7 @@ impl DanmuProvider for BiliDanmu {
Ok(Self {
client,
user_id,
room_id,
room_id: room_id.to_string(),
stop: Arc::new(RwLock::new(false)),
write: Arc::new(RwLock::new(None)),
})
@@ -65,7 +65,6 @@ impl DanmuProvider for BiliDanmu {
tx: mpsc::UnboundedSender<DanmuMessageType>,
) -> Result<(), DanmuStreamError> {
let mut retry_count = 0;
const MAX_RETRIES: u32 = 5;
const RETRY_DELAY: Duration = Duration::from_secs(5);
info!(
"Bilibili WebSocket connection started, room_id: {}",
@@ -74,33 +73,37 @@ impl DanmuProvider for BiliDanmu {
loop {
if *self.stop.read().await {
info!(
"Bilibili WebSocket connection stopped, room_id: {}",
self.room_id
);
break;
}
match self.connect_and_handle(tx.clone()).await {
Ok(_) => {
info!("Bilibili WebSocket connection closed normally");
break;
info!(
"Bilibili WebSocket connection closed normally, room_id: {}",
self.room_id
);
retry_count = 0;
}
Err(e) => {
error!("Bilibili WebSocket connection error: {}", e);
retry_count += 1;
if retry_count >= MAX_RETRIES {
return Err(DanmuStreamError::WebsocketError {
err: format!("Failed to connect after {} retries", MAX_RETRIES),
});
}
info!(
"Retrying connection in {} seconds... (Attempt {}/{})",
RETRY_DELAY.as_secs(),
retry_count,
MAX_RETRIES
error!(
"Bilibili WebSocket connection error, room_id: {}, error: {}",
self.room_id, e
);
tokio::time::sleep(RETRY_DELAY).await;
retry_count += 1;
}
}
info!(
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
RETRY_DELAY.as_secs(),
retry_count,
self.room_id
);
tokio::time::sleep(RETRY_DELAY).await;
}
Ok(())
@@ -123,8 +126,10 @@ impl BiliDanmu {
tx: mpsc::UnboundedSender<DanmuMessageType>,
) -> Result<(), DanmuStreamError> {
let wbi_key = self.get_wbi_key().await?;
let real_room = self.get_real_room(&wbi_key, self.room_id).await?;
let danmu_info = self.get_danmu_info(&wbi_key, real_room).await?;
let real_room = self.get_real_room(&wbi_key, &self.room_id).await?;
let danmu_info = self
.get_danmu_info(&wbi_key, real_room.to_string().as_str())
.await?;
let ws_hosts = danmu_info.data.host_list.clone();
let mut conn = None;
log::debug!("ws_hosts: {:?}", ws_hosts);
@@ -238,7 +243,7 @@ impl BiliDanmu {
async fn get_danmu_info(
&self,
wbi_key: &str,
room_id: u64,
room_id: &str,
) -> Result<DanmuInfo, DanmuStreamError> {
let params = self
.get_sign(
@@ -265,7 +270,7 @@ impl BiliDanmu {
Ok(resp)
}
async fn get_real_room(&self, wbi_key: &str, room_id: u64) -> Result<u64, DanmuStreamError> {
async fn get_real_room(&self, wbi_key: &str, room_id: &str) -> Result<i64, DanmuStreamError> {
let params = self
.get_sign(
wbi_key,
@@ -293,14 +298,14 @@ impl BiliDanmu {
Ok(resp)
}
fn parse_user_id(cookie: &str) -> Result<u64, DanmuStreamError> {
fn parse_user_id(cookie: &str) -> Result<i64, DanmuStreamError> {
let mut user_id = None;
// find DedeUserID=<user_id> in cookie str
let re = Regex::new(r"DedeUserID=(\d+)").unwrap();
if let Some(captures) = re.captures(cookie) {
if let Some(user) = captures.get(1) {
user_id = Some(user.as_str().parse::<u64>().unwrap());
user_id = Some(user.as_str().parse::<i64>().unwrap());
}
}
@@ -404,8 +409,8 @@ impl BiliDanmu {
#[derive(Serialize)]
struct WsSend {
uid: u64,
roomid: u64,
uid: i64,
roomid: i64,
key: String,
protover: u32,
platform: String,
@@ -436,5 +441,5 @@ pub struct RoomInit {
#[derive(Debug, Deserialize, Clone)]
pub struct RoomInitData {
room_id: u64,
room_id: i64,
}

View File

@@ -1,6 +1,8 @@
use serde::Deserialize;
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
use super::stream::WsStreamCtx;
use crate::DanmuStreamError;
#[derive(Debug, Deserialize)]
#[allow(dead_code)]

View File

@@ -1,4 +1,6 @@
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
use super::stream::WsStreamCtx;
use crate::DanmuStreamError;
#[derive(Debug)]
#[allow(dead_code)]

View File

@@ -24,7 +24,7 @@ struct PackHotCount {
type BilibiliPackCtx<'a> = (BilibiliPackHeader, &'a [u8]);
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx, DanmuStreamError> {
fn pack(buffer: &[u8]) -> Result<BilibiliPackCtx<'_>, DanmuStreamError> {
let data = buffer
.pread_with(0, scroll::BE)
.map_err(|e: scroll::Error| DanmuStreamError::PackError { err: e.to_string() })?;

View File

@@ -1,6 +1,8 @@
use serde::Deserialize;
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
use super::stream::WsStreamCtx;
use crate::DanmuStreamError;
#[derive(Debug, Deserialize)]
#[allow(dead_code)]

View File

@@ -1,10 +1,9 @@
use serde::Deserialize;
use serde_json::Value;
use crate::{
provider::{bilibili::dannmu_msg::BiliDanmuMessage, DanmuMessageType},
DanmuMessage, DanmuStreamError,
};
use super::dannmu_msg::BiliDanmuMessage;
use crate::{provider::DanmuMessageType, DanmuMessage, DanmuStreamError};
#[derive(Debug, Deserialize, Clone)]
pub struct WsStreamCtx {
@@ -66,7 +65,7 @@ impl WsStreamCtx {
if let Some(danmu_msg) = danmu_msg {
Ok(DanmuMessageType::DanmuMessage(DanmuMessage {
room_id: 0,
room_id: "".to_string(),
user_id: danmu_msg.uid,
user_name: danmu_msg.username,
message: danmu_msg.msg,

View File

@@ -1,6 +1,8 @@
use serde::Deserialize;
use crate::{provider::bilibili::stream::WsStreamCtx, DanmuStreamError};
use super::stream::WsStreamCtx;
use crate::DanmuStreamError;
#[derive(Debug, Deserialize)]
#[allow(dead_code)]

View File

@@ -1,4 +1,9 @@
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
mod messages;
use std::io::Read;
use std::sync::Arc;
use std::time::{Duration, SystemTime};
use async_trait::async_trait;
use deno_core::v8;
use deno_core::JsRuntime;
@@ -7,11 +12,9 @@ use flate2::read::GzDecoder;
use futures_util::{SinkExt, StreamExt, TryStreamExt};
use log::debug;
use log::{error, info};
use messages::*;
use prost::bytes::Bytes;
use prost::Message;
use std::io::Read;
use std::sync::Arc;
use std::time::{Duration, SystemTime};
use tokio::net::TcpStream;
use tokio::sync::mpsc;
use tokio::sync::RwLock;
@@ -19,8 +22,7 @@ use tokio_tungstenite::{
connect_async, tungstenite::Message as WsMessage, MaybeTlsStream, WebSocketStream,
};
mod messages;
use messages::*;
use crate::{provider::DanmuProvider, DanmuMessage, DanmuMessageType, DanmuStreamError};
const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36";
@@ -31,7 +33,7 @@ type WsWriteType =
futures_util::stream::SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, WsMessage>;
pub struct DouyinDanmu {
room_id: u64,
room_id: String,
cookie: String,
stop: Arc<RwLock<bool>>,
write: Arc<RwLock<Option<WsWriteType>>>,
@@ -109,7 +111,7 @@ impl DouyinDanmu {
runtime
.execute_script(
"<crypto-js.min.js>",
deno_core::FastString::Static(crypto_js),
deno_core::FastString::from_static(crypto_js),
)
.map_err(|e| DanmuStreamError::WebsocketError {
err: format!("Failed to execute crypto-js: {}", e),
@@ -118,7 +120,7 @@ impl DouyinDanmu {
// Load and execute the sign.js file
let js_code = include_str!("douyin/webmssdk.js");
runtime
.execute_script("<sign.js>", deno_core::FastString::Static(js_code))
.execute_script("<sign.js>", deno_core::FastString::from_static(js_code))
.map_err(|e| DanmuStreamError::WebsocketError {
err: format!("Failed to execute JavaScript: {}", e),
})?;
@@ -126,10 +128,7 @@ impl DouyinDanmu {
// Call the get_wss_url function
let sign_call = format!("get_wss_url(\"{}\")", self.room_id);
let result = runtime
.execute_script(
"<sign_call>",
deno_core::FastString::Owned(sign_call.into_boxed_str()),
)
.execute_script("<sign_call>", deno_core::FastString::from(sign_call))
.map_err(|e| DanmuStreamError::WebsocketError {
err: format!("Failed to execute JavaScript: {}", e),
})?;
@@ -193,7 +192,7 @@ impl DouyinDanmu {
});
// Main message handling loop
let room_id = self.room_id;
let room_id = self.room_id.clone();
let stop = Arc::clone(&self.stop);
let write = Arc::clone(&self.write);
let message_handle = tokio::spawn(async move {
@@ -211,10 +210,10 @@ impl DouyinDanmu {
match msg {
WsMessage::Binary(data) => {
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, room_id).await {
if let Ok(Some(ack)) = handle_binary_message(&data, &tx, &room_id).await {
if let Some(write) = write.write().await.as_mut() {
if let Err(e) =
write.send(WsMessage::Binary(ack.encode_to_vec())).await
write.send(WsMessage::binary(ack.encode_to_vec())).await
{
error!("Failed to send ack: {}", e);
}
@@ -257,7 +256,7 @@ impl DouyinDanmu {
async fn send_heartbeat(tx: &mpsc::Sender<WsMessage>) -> Result<(), DanmuStreamError> {
// heartbeat message: 3A 02 68 62
tx.send(WsMessage::Binary(vec![0x3A, 0x02, 0x68, 0x62]))
tx.send(WsMessage::binary(vec![0x3A, 0x02, 0x68, 0x62]))
.await
.map_err(|e| DanmuStreamError::WebsocketError {
err: format!("Failed to send heartbeat message: {}", e),
@@ -269,7 +268,7 @@ impl DouyinDanmu {
async fn handle_binary_message(
data: &[u8],
tx: &mpsc::UnboundedSender<DanmuMessageType>,
room_id: u64,
room_id: &str,
) -> Result<Option<PushFrame>, DanmuStreamError> {
// First decode the PushFrame
let push_frame = PushFrame::decode(Bytes::from(data.to_vec())).map_err(|e| {
@@ -329,7 +328,7 @@ async fn handle_binary_message(
})?;
if let Some(user) = chat_msg.user {
let danmu_msg = DanmuMessage {
room_id,
room_id: room_id.to_string(),
user_id: user.id,
user_name: user.nick_name,
message: chat_msg.content,
@@ -395,9 +394,9 @@ async fn handle_binary_message(
#[async_trait]
impl DanmuProvider for DouyinDanmu {
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
async fn new(identifier: &str, room_id: &str) -> Result<Self, DanmuStreamError> {
Ok(Self {
room_id,
room_id: room_id.to_string(),
cookie: identifier.to_string(),
stop: Arc::new(RwLock::new(false)),
write: Arc::new(RwLock::new(None)),
@@ -409,7 +408,6 @@ impl DanmuProvider for DouyinDanmu {
tx: mpsc::UnboundedSender<DanmuMessageType>,
) -> Result<(), DanmuStreamError> {
let mut retry_count = 0;
const MAX_RETRIES: u32 = 5;
const RETRY_DELAY: Duration = Duration::from_secs(5);
info!(
"Douyin WebSocket connection started, room_id: {}",
@@ -423,28 +421,25 @@ impl DanmuProvider for DouyinDanmu {
match self.connect_and_handle(tx.clone()).await {
Ok(_) => {
info!("Douyin WebSocket connection closed normally");
break;
info!(
"Douyin WebSocket connection closed normally, room_id: {}",
self.room_id
);
retry_count = 0;
}
Err(e) => {
error!("Douyin WebSocket connection error: {}", e);
retry_count += 1;
if retry_count >= MAX_RETRIES {
return Err(DanmuStreamError::WebsocketError {
err: format!("Failed to connect after {} retries", MAX_RETRIES),
});
}
info!(
"Retrying connection in {} seconds... (Attempt {}/{})",
RETRY_DELAY.as_secs(),
retry_count,
MAX_RETRIES
);
tokio::time::sleep(RETRY_DELAY).await;
}
}
info!(
"Retrying connection in {} seconds... (Attempt {}), room_id: {}",
RETRY_DELAY.as_secs(),
retry_count,
self.room_id
);
tokio::time::sleep(RETRY_DELAY).await;
}
Ok(())

View File

@@ -1,6 +1,7 @@
use prost::Message;
use std::collections::HashMap;
use prost::Message;
// message Response {
// repeated Message messagesList = 1;
// string cursor = 2;

View File

@@ -4,10 +4,10 @@ mod douyin;
use async_trait::async_trait;
use tokio::sync::mpsc;
use crate::{
provider::bilibili::BiliDanmu, provider::douyin::DouyinDanmu, DanmuMessageType,
DanmuStreamError,
};
use self::bilibili::BiliDanmu;
use self::douyin::DouyinDanmu;
use crate::{DanmuMessageType, DanmuStreamError};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ProviderType {
@@ -17,7 +17,7 @@ pub enum ProviderType {
#[async_trait]
pub trait DanmuProvider: Send + Sync {
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError>
async fn new(identifier: &str, room_id: &str) -> Result<Self, DanmuStreamError>
where
Self: Sized;
@@ -57,7 +57,7 @@ pub trait DanmuProvider: Send + Sync {
pub async fn new(
provider_type: ProviderType,
identifier: &str,
room_id: u64,
room_id: &str,
) -> Result<Box<dyn DanmuProvider>, DanmuStreamError> {
match provider_type {
ProviderType::BiliBili => {

View File

@@ -0,0 +1,37 @@
[package]
name = "recorder"
version = "0.1.0"
edition = "2021"
[lib]
name = "recorder"
path = "src/lib.rs"
[dependencies]
danmu_stream = { path = "../danmu_stream" }
async-trait = "0.1.89"
rand = "0.9.2"
chrono = "0.4.42"
tokio = "1.48.0"
reqwest = { workspace = true}
pct-str = "2.0.0"
serde_json = "1.0.145"
serde = "1.0.228"
regex = "1.12.2"
deno_core = "0.355"
uuid = { workspace = true}
serde_derive = "1.0.228"
thiserror = "2.0.17"
log = "0.4.28"
sanitize-filename = "0.6.0"
m3u8-rs = "6.0.0"
async-ffmpeg-sidecar = "0.0.3"
md5 = "0.8.0"
scraper = "0.24.0"
base64 = "0.22.1"
url = "2.5.0"
urlencoding = "2.1.3"
fastrand = "2.0.1"
[dev-dependencies]
env_logger = "0.11"

View File

@@ -0,0 +1,9 @@
#[derive(Debug, Clone, Default)]
pub struct Account {
pub platform: String,
pub id: String,
pub name: String,
pub avatar: String,
pub csrf: String,
pub cookies: String,
}

View File

@@ -0,0 +1,437 @@
use std::path::Path;
use std::sync::atomic::{AtomicBool, AtomicI64, AtomicU64, Ordering};
use std::{path::PathBuf, sync::Arc};
use chrono::Utc;
use m3u8_rs::{MediaPlaylist, Playlist};
use reqwest::header::HeaderMap;
use std::time::Duration;
use tokio::fs::{File, OpenOptions};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::sync::{broadcast, Mutex, RwLock};
use crate::core::playlist::HlsPlaylist;
use crate::core::{Codec, Format};
use crate::errors::RecorderError;
use crate::ffmpeg::VideoMetadata;
use crate::{core::HlsStream, events::RecorderEvent};
const UPDATE_TIMEOUT: Duration = Duration::from_secs(20);
const UPDATE_INTERVAL: Duration = Duration::from_secs(1);
const PLAYLIST_FILE_NAME: &str = "playlist.m3u8";
const DOWNLOAD_RETRY: u32 = 3;
/// A recorder for HLS streams
///
/// This recorder fetches, caches and serves TS entries, currently supporting `StreamType::FMP4, StreamType::TS`.
///
/// Segments will be downloaded to work_dir, and `playlist.m3u8` will be generated in work_dir.
#[derive(Clone)]
pub struct HlsRecorder {
room_id: String,
stream: Arc<HlsStream>,
client: reqwest::Client,
event_channel: broadcast::Sender<RecorderEvent>,
work_dir: PathBuf,
playlist: Arc<Mutex<HlsPlaylist>>,
headers: HeaderMap,
enabled: Arc<AtomicBool>,
sequence: Arc<AtomicU64>,
updated_at: Arc<AtomicI64>,
cached_duration_secs: Arc<AtomicU64>,
cached_size_bytes: Arc<AtomicU64>,
pre_metadata: Arc<RwLock<Option<VideoMetadata>>>,
}
impl HlsRecorder {
pub async fn new(
room_id: String,
stream: Arc<HlsStream>,
client: reqwest::Client,
cookies: Option<String>,
event_channel: broadcast::Sender<RecorderEvent>,
work_dir: PathBuf,
enabled: Arc<AtomicBool>,
) -> Self {
// try to create work_dir
if !work_dir.exists() {
std::fs::create_dir_all(&work_dir).unwrap();
}
let playlist_path = work_dir.join(PLAYLIST_FILE_NAME);
// set user agent
let user_agent =
crate::utils::user_agent_generator::UserAgentGenerator::new().generate(false);
let mut headers = reqwest::header::HeaderMap::new();
headers.insert("user-agent", user_agent.parse().unwrap());
if let Some(cookies) = cookies {
headers.insert("cookie", cookies.parse().unwrap());
}
Self {
room_id,
stream,
client,
event_channel,
work_dir,
playlist: Arc::new(Mutex::new(HlsPlaylist::new(playlist_path).await)),
headers,
enabled,
sequence: Arc::new(AtomicU64::new(0)),
updated_at: Arc::new(AtomicI64::new(chrono::Utc::now().timestamp_millis())),
cached_duration_secs: Arc::new(AtomicU64::new(0)),
cached_size_bytes: Arc::new(AtomicU64::new(0)),
pre_metadata: Arc::new(RwLock::new(None)),
}
}
/// Start the recorder blockingly
///
/// This will start the recorder and update the entries periodically.
pub async fn start(&self) -> Result<(), RecorderError> {
while self.enabled.load(Ordering::Relaxed) {
let result = self.update_entries().await;
if let Err(e) = result {
match e {
RecorderError::ResolutionChanged { .. } => {
log::error!("Resolution changed: {}", e);
self.playlist.lock().await.close().await?;
return Err(e);
}
RecorderError::UpdateTimeout => {
log::error!(
"Source playlist is not updated for a long time, stop recording"
);
self.playlist.lock().await.close().await?;
return Err(e);
}
RecorderError::M3u8ParseFailed { .. } => {
log::error!("[{}]M3u8 parse failed: {}", self.room_id, e);
return Err(e);
}
_ => {
// Other errors are not critical, just log it
log::error!("[{}]Update entries error: {}", self.room_id, e);
return Err(e);
}
}
}
tokio::time::sleep(UPDATE_INTERVAL).await;
}
Ok(())
}
pub async fn stop(&self) {
self.enabled.store(false, Ordering::Relaxed);
}
async fn query_playlist(&self, stream: &HlsStream) -> Result<Playlist, RecorderError> {
let url = stream.index();
let response = self
.client
.get(url)
.headers(self.headers.clone())
.send()
.await?;
let bytes = response.bytes().await?;
let (_, playlist) =
m3u8_rs::parse_playlist(&bytes).map_err(|_| RecorderError::M3u8ParseFailed {
content: String::from_utf8(bytes.to_vec()).unwrap(),
})?;
Ok(playlist)
}
async fn query_media_playlist(&self) -> Result<MediaPlaylist, RecorderError> {
let playlist = self.query_playlist(&self.stream).await?;
match playlist {
Playlist::MediaPlaylist(playlist) => Ok(playlist),
Playlist::MasterPlaylist(playlist) => {
// just return the first variant
match playlist.variants.first() {
Some(variant) => {
let real_stream = construct_stream_from_variant(
&self.stream.id,
&variant.uri,
self.stream.format.clone(),
self.stream.codec.clone(),
)
.await?;
let playlist = self.query_playlist(&real_stream).await?;
match playlist {
Playlist::MediaPlaylist(playlist) => Ok(playlist),
Playlist::MasterPlaylist(_) => Err(RecorderError::M3u8ParseFailed {
content: "No media playlist found".to_string(),
}),
}
}
None => Err(RecorderError::M3u8ParseFailed {
content: "No variants found".to_string(),
}),
}
}
}
}
async fn update_entries(&self) -> Result<(), RecorderError> {
let media_playlist = self.query_media_playlist().await?;
let playlist_sequence = media_playlist.media_sequence;
let last_sequence = self.sequence.load(Ordering::Relaxed);
let last_metadata = self.pre_metadata.read().await.clone();
let mut updated = false;
for (i, segment) in media_playlist.segments.iter().enumerate() {
let segment_sequence = playlist_sequence + i as u64;
if segment_sequence <= last_sequence {
continue;
}
let segment_full_url = self.stream.ts_url(&segment.uri);
// to get filename, we need to remove the query parameters
// for example: 1.ts?expires=1760808243
// we need to remove the query parameters: 1.ts
let filename = segment.uri.split('?').next().unwrap_or(&segment.uri);
let segment_path = self.work_dir.join(filename);
let Ok(size) = download(
&self.client,
&segment_full_url,
&segment_path,
DOWNLOAD_RETRY,
)
.await
else {
log::error!("Download failed: {:#?}", segment);
return Err(RecorderError::IoError(std::io::Error::other(
"Download failed",
)));
};
let mut segment = segment.clone();
if segment.program_date_time.is_none() {
segment.program_date_time.replace(Utc::now().into());
}
// check if the stream is changed
let segment_metadata = crate::ffmpeg::extract_video_metadata(&segment_path)
.await
.map_err(RecorderError::FfmpegError)?;
// IMPORTANT: This handles bilibili ts stream segment, which might lack of SPS/PPS and need to be appended behind last segment
if segment_metadata.seems_corrupted() {
let mut playlist = self.playlist.lock().await;
if playlist.is_empty().await {
// ignore this segment
log::error!(
"Segment is corrupted and has no previous segment, ignore: {}",
segment_path.display()
);
continue;
}
let last_segment = playlist.last_segment().await;
let last_segment_uri = last_segment.unwrap().uri.clone();
let last_segment_path = segment_path.with_file_name(last_segment_uri);
// append segment data behind last segment data
let mut last_segment_file = OpenOptions::new()
.append(true)
.open(&last_segment_path)
.await?;
log::debug!(
"Appending segment data behind last segment: {}",
last_segment_path.display()
);
let mut segment_file = File::open(&segment_path).await?;
let mut buffer = Vec::new();
segment_file.read_to_end(&mut buffer).await?;
last_segment_file.write_all(&buffer).await?;
let _ = tokio::fs::remove_file(&segment_path).await;
playlist.append_last_segment(segment.clone()).await?;
self.cached_duration_secs
.fetch_add(segment_metadata.duration as u64, Ordering::Relaxed);
self.cached_size_bytes.fetch_add(size, Ordering::Relaxed);
self.sequence.store(segment_sequence, Ordering::Relaxed);
self.updated_at
.store(chrono::Utc::now().timestamp_millis(), Ordering::Relaxed);
updated = true;
continue;
}
if let Some(last_metadata) = &last_metadata {
if last_metadata != &segment_metadata {
return Err(RecorderError::ResolutionChanged {
err: "Resolution changed".to_string(),
});
}
} else {
self.pre_metadata
.write()
.await
.replace(segment_metadata.clone());
}
let mut new_segment = segment.clone();
new_segment.duration = segment_metadata.duration as f32;
self.playlist.lock().await.add_segment(new_segment).await?;
self.cached_duration_secs
.fetch_add(segment_metadata.duration as u64, Ordering::Relaxed);
self.cached_size_bytes.fetch_add(size, Ordering::Relaxed);
self.sequence.store(segment_sequence, Ordering::Relaxed);
self.updated_at
.store(chrono::Utc::now().timestamp_millis(), Ordering::Relaxed);
updated = true;
}
// Source playlist may not be updated for a long time, check if it's timeout
let current_time = chrono::Utc::now().timestamp_millis();
if self.updated_at.load(Ordering::Relaxed) + (UPDATE_TIMEOUT.as_millis() as i64)
< current_time
{
return Err(RecorderError::UpdateTimeout);
}
if updated {
let _ = self.event_channel.send(RecorderEvent::RecordUpdate {
live_id: self.stream.id.clone(),
duration_secs: self.cached_duration_secs.load(Ordering::Relaxed),
cached_size_bytes: self.cached_size_bytes.load(Ordering::Relaxed),
});
}
Ok(())
}
}
/// Download url content into fpath
async fn download_inner(
client: &reqwest::Client,
url: &str,
path: &Path,
) -> Result<u64, RecorderError> {
if !path.parent().unwrap().exists() {
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
}
let response = client.get(url).send().await?;
if !response.status().is_success() {
let status = response.status();
log::warn!("Download segment failed: {url}: {status}");
return Err(RecorderError::InvalidResponseStatus { status });
}
let bytes = response.bytes().await?;
let size = bytes.len() as u64;
let mut file = tokio::fs::File::create(&path).await?;
let mut content = std::io::Cursor::new(bytes.clone());
tokio::io::copy(&mut content, &mut file).await?;
Ok(size)
}
async fn download(
client: &reqwest::Client,
url: &str,
path: &Path,
retry: u32,
) -> Result<u64, RecorderError> {
for i in 0..retry {
let result = download_inner(client, url, path).await;
if let Ok(size) = result {
return Ok(size);
}
log::error!("Download failed, retry: {}", i);
// sleep for 500 ms
tokio::time::sleep(Duration::from_millis(500)).await;
}
Err(RecorderError::IoError(std::io::Error::other(
"Download failed",
)))
}
pub async fn construct_stream_from_variant(
id: &str,
variant_url: &str,
format: Format,
codec: Codec,
) -> Result<HlsStream, RecorderError> {
// construct the real stream from variant
// example: https://cn-jsnt-ct-01-07.bilivideo.com/live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8?expires=1760808243
let (body, extra) = variant_url.split_once('?').unwrap_or((variant_url, ""));
// body example: https://cn-jsnt-ct-01-07.bilivideo.com/live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8
// extract host, should be like: https://cn-jsnt-ct-01-07.bilivideo.com, which contains http schema
let host = if let Some(schema_end) = body.find("://") {
let after_schema = &body[schema_end + 3..];
if let Some(path_start) = after_schema.find('/') {
format!("{}{}", &body[..schema_end + 3], &after_schema[..path_start])
} else {
body.to_string()
}
} else {
return Err(RecorderError::M3u8ParseFailed {
content: "Invalid URL format: missing protocol".to_string(),
});
};
// extract base, should be like: /live-bvc/930889/live_2124647716_1414766_bluray/index.m3u8
let base = if let Some(schema_end) = body.find("://") {
let after_schema = &body[schema_end + 3..];
if let Some(path_start) = after_schema.find('/') {
format!("/{}", &after_schema[path_start + 1..])
} else {
"/".to_string()
}
} else {
return Err(RecorderError::M3u8ParseFailed {
content: "Invalid URL format: missing protocol".to_string(),
});
};
// Add '?' to base if there are query parameters, to match the expected format
let base_with_query = if !extra.is_empty() {
format!("{}?", base)
} else {
base
};
let real_stream = HlsStream::new(
id.to_string(),
host,
base_with_query,
extra.to_string(),
format,
codec,
);
Ok(real_stream)
}
#[cfg(test)]
mod tests {
use crate::core::{Codec, Format};
use super::*;
#[tokio::test]
async fn test_construct_stream_from_variant() {
let stream = construct_stream_from_variant(
"test",
"https://hs.hls.huya.com/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103",
Format::TS,
Codec::Avc,
).await.unwrap();
assert_eq!(stream.index(), "https://hs.hls.huya.com/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
assert_eq!(stream.ts_url("1.ts"), "https://hs.hls.huya.com/huyalive/1.ts?ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
assert_eq!(stream.ts_url("1.ts?expires=1760808243"), "https://hs.hls.huya.com/huyalive/1.ts?expires=1760808243&ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
assert_eq!(stream.host, "https://hs.hls.huya.com");
assert_eq!(
stream.base,
"/huyalive/156976698-156976698-674209784144068608-314076852-10057-A-0-1.m3u8?"
);
assert_eq!(stream.extra, "ratio=2000&wsSecret=7abc7dec8809146f31f92046eb044e3b&wsTime=68fa41ba&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103");
assert_eq!(stream.format, Format::TS);
assert_eq!(stream.codec, Codec::Avc);
}
}

View File

@@ -0,0 +1,97 @@
use std::fmt;
pub mod hls_recorder;
pub mod playlist;
#[derive(Clone, Debug, PartialEq)]
pub enum Format {
Flv,
TS,
FMP4,
}
impl fmt::Display for Format {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum Codec {
Avc,
Hevc,
}
impl fmt::Display for Codec {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
/// A trait for HLS streams
///
/// This trait provides a common interface for HLS streams.
/// For example:
/// ```text
/// host: https://d1--cn-gotcha104b.bilivideo.com
/// base: /live-bvc/375028/live_2124647716_1414766_bluray.m3u8?
/// extra: expire=1734567890&oi=1234567890&s=1234567890&pt=0&ps=0&bw=1000000&tk=1234567890
/// ```
#[derive(Debug, Clone)]
pub struct HlsStream {
id: String,
host: String,
base: String,
extra: String,
format: Format,
codec: Codec,
}
impl HlsStream {
pub fn new(
id: String,
host: String,
base: String,
extra: String,
format: Format,
codec: Codec,
) -> Self {
Self {
id,
host,
base,
extra,
format,
codec,
}
}
pub fn index(&self) -> String {
if self.extra.is_empty() {
format!("{}{}", self.host, self.base)
} else {
format!("{}{}{}", self.host, self.base, self.extra)
}
}
pub fn ts_url(&self, seg_name: &str) -> String {
let base = self.base.clone();
let m3u8_filename = base.split('/').next_back().unwrap();
let base_url = base.replace(m3u8_filename, seg_name);
if self.extra.is_empty() {
format!("{}{}", self.host, base_url)
} else {
// Check if base_url already contains query parameters
if base_url.contains('?') {
// If seg_name already has query params, append extra with '&'
// Remove trailing '?' or '&' before appending
let base_trimmed = base_url.trim_end_matches('?').trim_end_matches('&');
format!("{}{}&{}", self.host, base_trimmed, self.extra)
} else {
// If no query params, add them with '?'
// Remove trailing '?' from base_url if present
let base_without_query = base_url.trim_end_matches('?');
format!("{}{}?{}", self.host, base_without_query, self.extra)
}
}
}
}

View File

@@ -0,0 +1,87 @@
use m3u8_rs::{MediaPlaylist, MediaPlaylistType, MediaSegment};
use std::path::PathBuf;
use crate::errors::RecorderError;
pub struct HlsPlaylist {
pub playlist: MediaPlaylist,
pub file_path: PathBuf,
}
impl HlsPlaylist {
pub async fn new(file_path: PathBuf) -> Self {
if file_path.exists() {
let bytes = tokio::fs::read(&file_path).await.unwrap();
let (_, playlist) = m3u8_rs::parse_media_playlist(&bytes).unwrap();
Self {
playlist,
file_path,
}
} else {
Self {
playlist: MediaPlaylist::default(),
file_path,
}
}
}
pub async fn last_segment(&self) -> Option<&MediaSegment> {
self.playlist.segments.last()
}
pub async fn append_last_segment(
&mut self,
segment: MediaSegment,
) -> Result<(), RecorderError> {
if self.is_empty().await {
self.add_segment(segment).await?;
return Ok(());
}
{
let last = self.playlist.segments.last_mut().unwrap();
let new_duration = last.duration + segment.duration;
last.duration = new_duration;
self.playlist.target_duration =
std::cmp::max(self.playlist.target_duration, new_duration as u64);
self.flush().await?;
}
Ok(())
}
pub async fn add_segment(&mut self, segment: MediaSegment) -> Result<(), RecorderError> {
self.playlist.segments.push(segment);
self.flush().await?;
Ok(())
}
pub async fn flush(&self) -> Result<(), RecorderError> {
// Create an in-memory buffer to serialize the playlist into.
// `Vec<u8>` implements `std::io::Write`, which `m3u8_rs::MediaPlaylist::write_to` expects.
let mut buffer = Vec::new();
// Serialize the playlist into the buffer.
self.playlist
.write_to(&mut buffer)
.map_err(RecorderError::IoError)?;
// Write the buffer to the file
tokio::fs::write(&self.file_path, buffer)
.await
.map_err(RecorderError::IoError)?;
Ok(())
}
pub async fn close(&mut self) -> Result<(), RecorderError> {
self.playlist.end_list = true;
self.playlist.playlist_type = Some(MediaPlaylistType::Vod);
self.flush().await?;
Ok(())
}
pub async fn is_empty(&self) -> bool {
self.playlist.segments.is_empty()
}
}

View File

@@ -1,3 +1,5 @@
use std::path::PathBuf;
use serde::Serialize;
use tokio::io::AsyncWriteExt;
use tokio::{
@@ -18,7 +20,7 @@ pub struct DanmuStorage {
}
impl DanmuStorage {
pub async fn new(file_path: &str) -> Option<DanmuStorage> {
pub async fn new(file_path: &PathBuf) -> Option<DanmuStorage> {
let file = OpenOptions::new()
.read(true)
.write(true)
@@ -38,7 +40,7 @@ impl DanmuStorage {
let parts: Vec<&str> = line.split(':').collect();
let ts: i64 = parts[0].parse().unwrap();
let content = parts[1].to_string();
preload_cache.push(DanmuEntry { ts, content })
preload_cache.push(DanmuEntry { ts, content });
}
let file = OpenOptions::new()
.append(true)
@@ -61,7 +63,7 @@ impl DanmuStorage {
.file
.write()
.await
.write(format!("{}:{}\n", ts, content).as_bytes())
.write(format!("{ts}:{content}\n").as_bytes())
.await;
}

View File

@@ -1,13 +1,10 @@
use core::fmt;
use std::fmt::Display;
use async_std::{
fs::{File, OpenOptions},
io::{prelude::BufReadExt, BufReader, WriteExt},
path::Path,
stream::StreamExt,
};
use chrono::{TimeZone, Utc};
use core::fmt;
use std::{fmt::Display, path::Path};
use tokio::{
fs::OpenOptions,
io::{AsyncBufReadExt, BufReader},
};
const ENTRY_FILE_NAME: &str = "entries.log";
@@ -31,19 +28,19 @@ impl TsEntry {
url: parts[0].to_string(),
sequence: parts[1]
.parse()
.map_err(|e| format!("Failed to parse sequence: {}", e))?,
.map_err(|e| format!("Failed to parse sequence: {e}"))?,
length: parts[2]
.parse()
.map_err(|e| format!("Failed to parse length: {}", e))?,
.map_err(|e| format!("Failed to parse length: {e}"))?,
size: parts[3]
.parse()
.map_err(|e| format!("Failed to parse size: {}", e))?,
.map_err(|e| format!("Failed to parse size: {e}"))?,
ts: parts[4]
.parse()
.map_err(|e| format!("Failed to parse timestamp: {}", e))?,
.map_err(|e| format!("Failed to parse timestamp: {e}"))?,
is_header: parts[5]
.parse()
.map_err(|e| format!("Failed to parse is_header: {}", e))?,
.map_err(|e| format!("Failed to parse is_header: {e}"))?,
})
}
@@ -51,34 +48,25 @@ impl TsEntry {
pub fn ts_seconds(&self) -> i64 {
// For some legacy problem, douyin entry's ts is s, bilibili entry's ts is ms.
// This should be fixed after 2.5.6, but we need to support entry.log generated by previous version.
if self.ts > 10000000000 {
if self.ts > 10_000_000_000 {
self.ts / 1000
} else {
self.ts
}
}
pub fn ts_mili(&self) -> i64 {
// if already in ms, return as is
if self.ts > 10000000000 {
self.ts
} else {
self.ts * 1000
}
}
pub fn date_time(&self) -> String {
let date_str = Utc
.timestamp_opt(self.ts_seconds(), 0)
.unwrap()
.to_rfc3339();
format!("#EXT-X-PROGRAM-DATE-TIME:{}\n", date_str)
format!("#EXT-X-PROGRAM-DATE-TIME:{date_str}\n")
}
/// Convert entry into a segment in HLS manifest.
pub fn to_segment(&self) -> String {
if self.is_header {
return "".into();
return String::new();
}
let mut content = String::new();
@@ -100,11 +88,9 @@ impl Display for TsEntry {
}
}
/// EntryStore is used to management stream segments, which is basicly a simple version of hls manifest,
/// and of course, provids methods to generate hls manifest for frontend player.
/// `EntryStore` is used to management stream segments, which is basically a simple version of hls manifest,
/// and of course, provides methods to generate hls manifest for frontend player.
pub struct EntryStore {
// append only log file
log_file: File,
header: Option<TsEntry>,
entries: Vec<TsEntry>,
total_duration: f64,
@@ -115,18 +101,11 @@ pub struct EntryStore {
impl EntryStore {
pub async fn new(work_dir: &str) -> Self {
// if work_dir is not exists, create it
if !Path::new(work_dir).exists().await {
if !Path::new(work_dir).exists() {
std::fs::create_dir_all(work_dir).unwrap();
}
// open append only log file
let log_file = OpenOptions::new()
.create(true)
.append(true)
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
.await
.unwrap();
let mut entry_store = Self {
log_file,
header: None,
entries: vec![],
total_duration: 0.0,
@@ -143,14 +122,26 @@ impl EntryStore {
let file = OpenOptions::new()
.create(false)
.read(true)
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
.await
.unwrap();
let mut lines = BufReader::new(file).lines();
while let Some(Ok(line)) = lines.next().await {
let entry = TsEntry::from(&line);
.open(format!("{work_dir}/{ENTRY_FILE_NAME}"))
.await; // The `file` variable from the previous line now holds `Result<tokio::fs::File, tokio::io::Error>`
let file_handle = match file {
Ok(f) => f,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound {
log::info!(
"Entry file not found at {work_dir}/{ENTRY_FILE_NAME}, starting fresh."
);
} else {
log::error!("Failed to open entry file: {e}");
}
return; // Exit the load function if file cannot be opened
}
};
let mut lines = BufReader::new(file_handle).lines();
while let Ok(Some(line)) = lines.next_line().await {
let entry = TsEntry::from(line.as_str());
if let Err(e) = entry {
log::error!("Failed to parse entry: {} {}", e, line);
log::error!("Failed to parse entry: {e} {line}");
continue;
}
@@ -169,45 +160,12 @@ impl EntryStore {
}
}
pub async fn add_entry(&mut self, entry: TsEntry) {
if entry.is_header {
self.header = Some(entry.clone());
} else {
self.entries.push(entry.clone());
}
if let Err(e) = self.log_file.write_all(entry.to_string().as_bytes()).await {
log::error!("Failed to write entry to log file: {}", e);
}
self.log_file.flush().await.unwrap();
self.last_sequence = std::cmp::max(self.last_sequence, entry.sequence);
self.total_duration += entry.length;
self.total_size += entry.size;
pub fn len(&self) -> usize {
self.entries.len()
}
pub fn get_header(&self) -> Option<&TsEntry> {
self.header.as_ref()
}
pub fn total_duration(&self) -> f64 {
self.total_duration
}
pub fn total_size(&self) -> u64 {
self.total_size
}
/// Get first timestamp in milliseconds
pub fn first_ts(&self) -> Option<i64> {
self.entries.first().map(|x| x.ts_mili())
}
/// Get last timestamp in milliseconds
pub fn last_ts(&self) -> Option<i64> {
self.entries.last().map(|x| x.ts_mili())
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
/// Generate a hls manifest for selected range.

View File

@@ -0,0 +1,81 @@
use super::platforms::bilibili::api::BiliStream;
use super::platforms::douyin::stream_info::DouyinStream;
use thiserror::Error;
#[derive(Debug, Clone)]
pub enum Stream {
BiliBili(BiliStream),
Douyin(DouyinStream),
}
#[derive(Error, Debug)]
pub enum RecorderError {
#[error("Index not found: {url}")]
IndexNotFound { url: String },
#[error("Can not delete current stream: {live_id}")]
ArchiveInUse { live_id: String },
#[error("Cache is empty")]
EmptyCache,
#[error("Parse m3u8 content failed: {content}")]
M3u8ParseFailed { content: String },
#[error("No available stream provided")]
NoStreamAvailable,
#[error("Stream is freezed: {stream:#?}")]
FreezedStream { stream: Stream },
#[error("Stream is nearly expired: {stream:#?}")]
StreamExpired { stream: Stream },
#[error("No room info provided")]
NoRoomInfo,
#[error("Invalid stream: {stream:#?}")]
InvalidStream { stream: Stream },
#[error("Stream is too slow: {stream:#?}")]
SlowStream { stream: Stream },
#[error("Header url is empty")]
EmptyHeader,
#[error("Header timestamp is invalid")]
InvalidTimestamp,
#[error("IO error: {0}")]
IoError(#[from] std::io::Error),
#[error("Danmu stream error: {0}")]
DanmuStreamError(#[from] danmu_stream::DanmuStreamError),
#[error("Subtitle not found: {live_id}")]
SubtitleNotFound { live_id: String },
#[error("Subtitle generation failed: {error}")]
SubtitleGenerationFailed { error: String },
#[error("Resolution changed: {err}")]
ResolutionChanged { err: String },
#[error("Ffmpeg error: {0}")]
FfmpegError(String),
#[error("Format not found: {format}")]
FormatNotFound { format: String },
#[error("Codec not found: {codecs}")]
CodecNotFound { codecs: String },
#[error("Invalid cookies")]
InvalidCookies,
#[error("API error: {error}")]
ApiError { error: String },
#[error("Invalid value")]
InvalidValue,
#[error("Invalid response")]
InvalidResponse,
#[error("Invalid response json: {resp}")]
InvalidResponseJson { resp: serde_json::Value },
#[error("Invalid response status: {status}")]
InvalidResponseStatus { status: reqwest::StatusCode },
#[error("Upload cancelled")]
UploadCancelled,
#[error("Upload error: {err}")]
UploadError { err: String },
#[error("Client error: {0}")]
ClientError(#[from] reqwest::Error),
#[error("Security control error")]
SecurityControlError,
#[error("JavaScript runtime error: {0}")]
JsRuntimeError(String),
#[error("Update timeout")]
UpdateTimeout,
#[error("Unsupported stream")]
UnsupportedStream,
#[error("Empty record")]
EmptyRecord,
}

View File

@@ -0,0 +1,39 @@
use crate::platforms::PlatformType;
use crate::RecorderInfo;
#[derive(Debug, Clone)]
pub enum RecorderEvent {
LiveStart {
recorder: RecorderInfo,
},
LiveEnd {
room_id: String,
platform: PlatformType,
recorder: RecorderInfo,
},
RecordStart {
recorder: RecorderInfo,
},
RecordEnd {
recorder: RecorderInfo,
},
RecordUpdate {
live_id: String,
duration_secs: u64,
cached_size_bytes: u64,
},
ProgressUpdate {
id: String,
content: String,
},
ProgressFinished {
id: String,
success: bool,
message: String,
},
DanmuReceived {
room: String,
ts: i64,
content: String,
},
}

View File

@@ -0,0 +1,111 @@
use std::path::{Path, PathBuf};
// 视频元数据结构
#[derive(Debug, Clone)]
pub struct VideoMetadata {
pub duration: f64,
pub width: u32,
pub height: u32,
pub video_codec: String,
pub audio_codec: String,
}
impl VideoMetadata {
pub fn seems_corrupted(&self) -> bool {
self.width == 0 && self.height == 0
}
}
impl std::cmp::PartialEq for VideoMetadata {
fn eq(&self, other: &Self) -> bool {
self.width == other.width
&& self.height == other.height
&& self.video_codec == other.video_codec
&& self.audio_codec == other.audio_codec
}
}
#[cfg(target_os = "windows")]
const CREATE_NO_WINDOW: u32 = 0x08000000;
#[cfg(target_os = "windows")]
#[allow(unused_imports)]
use std::os::windows::process::CommandExt;
fn ffprobe_path() -> PathBuf {
let mut path = Path::new("ffprobe").to_path_buf();
if cfg!(windows) {
path.set_extension("exe");
}
path
}
/// Extract basic information from a video file.
///
/// # Arguments
/// * `file_path` - The path to the video file.
///
/// # Returns
/// A `Result` containing the video metadata or an error message.
pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, String> {
let mut ffprobe_process = tokio::process::Command::new(ffprobe_path());
#[cfg(target_os = "windows")]
ffprobe_process.creation_flags(CREATE_NO_WINDOW);
let output = ffprobe_process
.args([
"-v",
"quiet",
"-print_format",
"json",
"-show_format",
"-show_streams",
&format!("{}", file_path.display()),
])
.output()
.await
.map_err(|e| format!("执行ffprobe失败: {e}"))?;
if !output.status.success() {
return Err(format!(
"ffprobe执行失败: {}",
String::from_utf8_lossy(&output.stderr)
));
}
let json_str = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value =
serde_json::from_str(&json_str).map_err(|e| format!("解析ffprobe输出失败: {e}"))?;
// 解析视频流信息
let streams = json["streams"].as_array().ok_or("未找到视频流信息")?;
if streams.is_empty() {
return Err("未找到视频流".to_string());
}
let mut metadata = VideoMetadata {
duration: 0.0,
width: 0,
height: 0,
video_codec: String::new(),
audio_codec: String::new(),
};
for stream in streams {
let codec_name = stream["codec_type"].as_str().unwrap_or("");
if codec_name == "video" {
metadata.video_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
metadata.width = stream["width"].as_u64().unwrap_or(0) as u32;
metadata.height = stream["height"].as_u64().unwrap_or(0) as u32;
metadata.duration = stream["duration"]
.as_str()
.unwrap_or("0.0")
.parse::<f64>()
.unwrap_or(0.0);
} else if codec_name == "audio" {
metadata.audio_codec = stream["codec_name"].as_str().unwrap_or("").to_owned();
}
}
Ok(metadata)
}

View File

@@ -0,0 +1,251 @@
pub mod account;
pub mod core;
pub mod danmu;
pub mod entry;
pub mod errors;
pub mod events;
mod ffmpeg;
pub mod platforms;
pub mod traits;
pub mod utils;
use crate::danmu::DanmuStorage;
use crate::events::RecorderEvent;
use crate::{account::Account, platforms::PlatformType};
use std::{
fmt::Display,
path::PathBuf,
sync::{atomic, Arc},
};
use tokio::{
sync::{broadcast, Mutex, RwLock},
task::JoinHandle,
};
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
pub struct RecorderInfo {
pub room_info: RoomInfo,
pub user_info: UserInfo,
pub platform_live_id: String,
pub live_id: String,
pub recording: bool,
pub enabled: bool,
}
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug, Default)]
pub struct RoomInfo {
pub platform: String,
pub room_id: String,
pub room_title: String,
pub room_cover: String,
/// Whether the room is live
pub status: bool,
}
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug, Default)]
pub struct UserInfo {
pub user_id: String,
pub user_name: String,
pub user_avatar: String,
}
/// `Recorder` is the base struct for all recorders
/// It contains the basic information for a recorder
/// and the extra information for the recorder
#[derive(Clone)]
pub struct Recorder<T>
where
T: Send + Sync,
{
platform: PlatformType,
room_id: String,
/// The account for the recorder
account: Account,
/// The client for the recorder
client: reqwest::Client,
/// The event channel for the recorder
event_channel: broadcast::Sender<RecorderEvent>,
/// The cache directory for the recorder
cache_dir: PathBuf,
/// Whether the recorder is quitting
quit: Arc<atomic::AtomicBool>,
/// Whether the recorder is enabled
enabled: Arc<atomic::AtomicBool>,
/// Whether the recorder is recording
is_recording: Arc<atomic::AtomicBool>,
/// The room info for the recorder
room_info: Arc<RwLock<RoomInfo>>,
/// The user info for the recorder
user_info: Arc<RwLock<UserInfo>>,
/// The update interval for room status
update_interval: Arc<atomic::AtomicU64>,
/// The platform live id for the current recording
platform_live_id: Arc<RwLock<String>>,
/// The live id for the current recording, generally is the timestamp of the recording start time
live_id: Arc<RwLock<String>>,
/// The danmu task for the current recording
danmu_task: Arc<Mutex<Option<JoinHandle<()>>>>,
/// The record task for the current recording
record_task: Arc<Mutex<Option<JoinHandle<()>>>>,
/// The danmu storage for the current recording
danmu_storage: Arc<RwLock<Option<DanmuStorage>>>,
/// The last update time of the current recording
last_update: Arc<atomic::AtomicI64>,
/// The last sequence of the current recording
last_sequence: Arc<atomic::AtomicU64>,
/// The total duration of the current recording in milliseconds
total_duration: Arc<atomic::AtomicU64>,
/// The total size of the current recording in bytes
total_size: Arc<atomic::AtomicU64>,
/// The extra information for the recorder
extra: T,
}
impl<T: Send + Sync> traits::RecorderBasicTrait<T> for Recorder<T> {
fn platform(&self) -> PlatformType {
self.platform
}
fn room_id(&self) -> String {
self.room_id.clone()
}
fn account(&self) -> &Account {
&self.account
}
fn client(&self) -> &reqwest::Client {
&self.client
}
fn event_channel(&self) -> &broadcast::Sender<RecorderEvent> {
&self.event_channel
}
fn cache_dir(&self) -> PathBuf {
self.cache_dir.clone()
}
fn quit(&self) -> &atomic::AtomicBool {
&self.quit
}
fn enabled(&self) -> &atomic::AtomicBool {
&self.enabled
}
fn is_recording(&self) -> &atomic::AtomicBool {
&self.is_recording
}
fn room_info(&self) -> Arc<RwLock<RoomInfo>> {
self.room_info.clone()
}
fn user_info(&self) -> Arc<RwLock<UserInfo>> {
self.user_info.clone()
}
fn platform_live_id(&self) -> Arc<RwLock<String>> {
self.platform_live_id.clone()
}
fn live_id(&self) -> Arc<RwLock<String>> {
self.live_id.clone()
}
fn danmu_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>> {
self.danmu_task.clone()
}
fn record_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>> {
self.record_task.clone()
}
fn danmu_storage(&self) -> Arc<RwLock<Option<DanmuStorage>>> {
self.danmu_storage.clone()
}
fn last_update(&self) -> &atomic::AtomicI64 {
&self.last_update
}
fn last_sequence(&self) -> &atomic::AtomicU64 {
&self.last_sequence
}
fn total_duration(&self) -> &atomic::AtomicU64 {
&self.total_duration
}
fn total_size(&self) -> &atomic::AtomicU64 {
&self.total_size
}
fn extra(&self) -> &T {
&self.extra
}
}
/// Cache path is relative to cache path in config
#[derive(Clone)]
pub struct CachePath {
pub cache_path: PathBuf,
pub platform: PlatformType,
pub room_id: String,
pub live_id: String,
pub file_name: Option<String>,
}
impl CachePath {
pub fn new(cache_path: PathBuf, platform: PlatformType, room_id: &str, live_id: &str) -> Self {
Self {
cache_path,
platform,
room_id: room_id.to_string(),
live_id: live_id.to_string(),
file_name: None,
}
}
/// Sanitize filename and set it
pub fn with_filename(&self, file_name: &str) -> Self {
let sanitized_filename = sanitize_filename::sanitize(file_name);
Self {
file_name: Some(sanitized_filename),
..self.clone()
}
}
/// Get relative path to cache path
pub fn relative_path(&self) -> PathBuf {
if let Some(file_name) = &self.file_name {
return PathBuf::from(format!(
"{}/{}/{}/{}",
self.platform.as_str(),
self.room_id,
self.live_id,
file_name
));
}
PathBuf::from(format!(
"{}/{}/{}",
self.platform.as_str(),
self.room_id,
self.live_id
))
}
pub fn full_path(&self) -> PathBuf {
self.cache_path.clone().join(self.relative_path())
}
}
impl Display for CachePath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.full_path().display())
}
}

View File

@@ -0,0 +1,474 @@
pub mod api;
pub mod profile;
pub mod response;
use crate::account::Account;
use crate::core::hls_recorder::HlsRecorder;
use crate::events::RecorderEvent;
use crate::platforms::bilibili::api::{Protocol, Qn};
use crate::platforms::PlatformType;
use crate::traits::RecorderTrait;
use crate::{Recorder, RoomInfo, UserInfo};
use crate::core::Format;
use crate::core::{Codec, HlsStream};
use crate::danmu::DanmuStorage;
use crate::platforms::bilibili::api::BiliStream;
use chrono::Utc;
use danmu_stream::danmu_stream::DanmuStream;
use danmu_stream::provider::ProviderType;
use danmu_stream::DanmuMessageType;
use std::path::{Path, PathBuf};
use std::sync::{atomic, Arc};
use std::time::Duration;
use tokio::sync::{broadcast, Mutex, RwLock};
use async_trait::async_trait;
/// A recorder for `BiliBili` live streams
///
/// This recorder fetches, caches and serves TS entries, currently supporting only `StreamType::FMP4`.
/// As high-quality streams are accessible only to logged-in users, the use of a `BiliClient`, which manages cookies, is required.
#[derive(Clone)]
pub struct BiliExtra {
cover: Arc<RwLock<Option<String>>>,
live_stream: Arc<RwLock<Option<BiliStream>>>,
}
pub type BiliRecorder = Recorder<BiliExtra>;
impl BiliRecorder {
pub async fn new(
room_id: &str,
account: &Account,
cache_dir: PathBuf,
event_channel: broadcast::Sender<RecorderEvent>,
update_interval: Arc<atomic::AtomicU64>,
enabled: bool,
) -> Result<Self, crate::errors::RecorderError> {
let client = reqwest::Client::new();
let extra = BiliExtra {
cover: Arc::new(RwLock::new(None)),
live_stream: Arc::new(RwLock::new(None)),
};
let recorder = Self {
platform: PlatformType::BiliBili,
room_id: room_id.to_string(),
account: account.clone(),
client,
event_channel,
cache_dir,
quit: Arc::new(atomic::AtomicBool::new(false)),
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
update_interval,
is_recording: Arc::new(atomic::AtomicBool::new(false)),
room_info: Arc::new(RwLock::new(RoomInfo::default())),
user_info: Arc::new(RwLock::new(UserInfo::default())),
platform_live_id: Arc::new(RwLock::new(String::new())),
live_id: Arc::new(RwLock::new(String::new())),
danmu_storage: Arc::new(RwLock::new(None)),
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
danmu_task: Arc::new(Mutex::new(None)),
record_task: Arc::new(Mutex::new(None)),
total_duration: Arc::new(atomic::AtomicU64::new(0)),
total_size: Arc::new(atomic::AtomicU64::new(0)),
extra,
};
log::info!("[{}]Recorder for room {} created.", room_id, room_id);
Ok(recorder)
}
fn log_info(&self, message: &str) {
log::info!("[{}]{}", self.room_id, message);
}
fn log_error(&self, message: &str) {
log::error!("[{}]{}", self.room_id, message);
}
pub async fn reset(&self) {
*self.extra.live_stream.write().await = None;
self.last_update
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
*self.danmu_storage.write().await = None;
*self.platform_live_id.write().await = String::new();
*self.live_id.write().await = String::new();
self.total_duration.store(0, atomic::Ordering::Relaxed);
self.total_size.store(0, atomic::Ordering::Relaxed);
}
async fn check_status(&self) -> bool {
let pre_live_status = self.room_info.read().await.status;
match api::get_room_info(&self.client, &self.account, &self.room_id).await {
Ok(room_info) => {
*self.room_info.write().await = RoomInfo {
platform: "bilibili".to_string(),
room_id: self.room_id.to_string(),
room_title: room_info.room_title,
room_cover: room_info.room_cover_url.clone(),
status: room_info.live_status == 1,
};
// Only update user info once
if self.user_info.read().await.user_id != room_info.user_id {
let user_id = room_info.user_id;
let user_info = api::get_user_info(&self.client, &self.account, &user_id).await;
if let Ok(user_info) = user_info {
*self.user_info.write().await = UserInfo {
user_id: user_id.to_string(),
user_name: user_info.user_name,
user_avatar: user_info.user_avatar_url,
}
} else {
self.log_error(&format!(
"Failed to get user info: {}",
user_info.err().unwrap()
));
}
}
let live_status = room_info.live_status == 1;
// handle live notification
if pre_live_status != live_status {
self.log_info(&format!(
"Live status changed to {}, enabled: {}",
live_status,
self.enabled.load(atomic::Ordering::Relaxed)
));
if live_status {
// Get cover image
let room_cover_path = Path::new(PlatformType::BiliBili.as_str())
.join(&self.room_id)
.join("cover.jpg");
let full_room_cover_path = self.cache_dir.join(&room_cover_path);
if (api::download_file(
&self.client,
&room_info.room_cover_url,
&full_room_cover_path,
)
.await)
.is_ok()
{
*self.extra.cover.write().await =
Some(room_cover_path.to_str().unwrap().to_string());
}
let _ = self.event_channel.send(RecorderEvent::LiveStart {
recorder: self.info().await,
});
} else {
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
platform: PlatformType::BiliBili,
room_id: self.room_id.to_string(),
recorder: self.info().await,
});
*self.live_id.write().await = String::new();
}
// just doing reset, cuz live status is changed
self.reset().await;
}
*self.platform_live_id.write().await = room_info.live_start_time.to_string();
if !live_status {
return false;
}
// no need to check stream if should not record
if !self.should_record().await {
return true;
}
// current_record => update stream
// auto_start+is_new_stream => update stream and current_record=true
let new_stream = api::get_stream_info(
&self.client,
&self.account,
&self.room_id,
Protocol::HttpHls,
Format::TS,
&[Codec::Avc, Codec::Hevc],
Qn::Q4K,
)
.await;
match new_stream {
Ok(stream) => {
let pre_live_stream = self.extra.live_stream.read().await.clone();
*self.extra.live_stream.write().await = Some(stream.clone());
self.last_update
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
log::info!(
"[{}]Update to a new stream: {:#?} => {:#?}",
&self.room_id,
pre_live_stream,
stream
);
true
}
Err(e) => {
if let crate::errors::RecorderError::FormatNotFound { format } = e {
log::error!("[{}]Format {} not found", &self.room_id, format);
true
} else {
log::error!("[{}]Fetch stream failed: {}", &self.room_id, e);
true
}
}
}
}
Err(e) => {
log::error!("[{}]Update room status failed: {}", &self.room_id, e);
// may encounter internet issues, not sure whether the stream is closed or started, just remain
pre_live_status
}
}
}
async fn danmu(&self) -> Result<(), crate::errors::RecorderError> {
let cookies = self.account.cookies.clone();
let room_id = self.room_id.clone();
let danmu_stream = DanmuStream::new(ProviderType::BiliBili, &cookies, &room_id).await;
if danmu_stream.is_err() {
let err = danmu_stream.err().unwrap();
log::error!("[{}]Failed to create danmu stream: {}", &self.room_id, err);
return Err(crate::errors::RecorderError::DanmuStreamError(err));
}
let danmu_stream = danmu_stream.unwrap();
let mut start_fut = Box::pin(danmu_stream.start());
loop {
tokio::select! {
start_res = &mut start_fut => {
match start_res {
Ok(_) => {
log::info!("[{}]Danmu stream finished", &self.room_id);
return Ok(());
}
Err(err) => {
log::error!("[{}]Danmu stream start error: {}", &self.room_id, err);
return Err(crate::errors::RecorderError::DanmuStreamError(err));
}
}
}
recv_res = danmu_stream.recv() => {
match recv_res {
Ok(Some(msg)) => {
match msg {
DanmuMessageType::DanmuMessage(danmu) => {
let ts = Utc::now().timestamp_millis();
let _ = self.event_channel.send(RecorderEvent::DanmuReceived {
room: self.room_id.clone(),
ts,
content: danmu.message.clone(),
});
if let Some(storage) = self.danmu_storage.write().await.as_ref() {
storage.add_line(ts, &danmu.message).await;
}
}
}
}
Ok(None) => {
log::info!("[{}]Danmu stream closed", &self.room_id);
return Ok(());
}
Err(err) => {
log::error!("[{}]Failed to receive danmu message: {}", &self.room_id, err);
return Err(crate::errors::RecorderError::DanmuStreamError(err));
}
}
}
}
}
}
/// Update entries for a new live
async fn update_entries(&self, live_id: &str) -> Result<(), crate::errors::RecorderError> {
let current_stream = self.extra.live_stream.read().await.clone();
let Some(current_stream) = current_stream else {
return Err(crate::errors::RecorderError::NoStreamAvailable);
};
let work_dir = self.work_dir(live_id).await;
log::info!("[{}]New record started: {}", self.room_id, live_id);
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
let danmu_path = work_dir.with_filename("danmu.txt");
*self.danmu_storage.write().await = DanmuStorage::new(&danmu_path.full_path()).await;
let cover_path = work_dir.with_filename("cover.jpg");
let room_cover_path = self
.cache_dir
.join(PlatformType::BiliBili.as_str())
.join(&self.room_id)
.join("cover.jpg");
tokio::fs::copy(room_cover_path, &cover_path.full_path())
.await
.map_err(crate::errors::RecorderError::IoError)?;
*self.live_id.write().await = live_id.to_string();
// send record start event
let _ = self.event_channel.send(RecorderEvent::RecordStart {
recorder: self.info().await,
});
self.is_recording.store(true, atomic::Ordering::Relaxed);
let stream = Arc::new(HlsStream::new(
live_id.to_string(),
current_stream.url_info.first().unwrap().host.clone(),
current_stream.base_url.clone(),
current_stream.url_info.first().unwrap().extra.clone(),
current_stream.format,
current_stream.codec,
));
let hls_recorder = HlsRecorder::new(
self.room_id.to_string(),
stream,
self.client.clone(),
None,
self.event_channel.clone(),
work_dir.full_path(),
self.enabled.clone(),
)
.await;
if let Err(e) = hls_recorder.start().await {
log::error!("[{}]Failed to start hls recorder: {}", self.room_id, e);
return Err(e);
}
Ok(())
}
}
#[async_trait]
impl crate::traits::RecorderTrait<BiliExtra> for BiliRecorder {
async fn run(&self) {
let self_clone = self.clone();
let danmu_task = tokio::spawn(async move {
let _ = self_clone.danmu().await;
});
*self.danmu_task.lock().await = Some(danmu_task);
let self_clone = self.clone();
*self.record_task.lock().await = Some(tokio::spawn(async move {
log::info!("[{}]Start running recorder", self_clone.room_id);
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
if self_clone.check_status().await {
// Live status is ok, start recording.
if self_clone.should_record().await {
let live_id = Utc::now().timestamp_millis().to_string();
if let Err(e) = self_clone.update_entries(&live_id).await {
log::error!("[{}]Update entries error: {}", self_clone.room_id, e);
}
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
recorder: self_clone.info().await,
});
}
self_clone
.is_recording
.store(false, atomic::Ordering::Relaxed);
self_clone.reset().await;
// go check status again after random 2-5 secs
let secs = rand::random::<u64>() % 4 + 2;
tokio::time::sleep(Duration::from_secs(secs)).await;
continue;
}
tokio::time::sleep(Duration::from_secs(
self_clone.update_interval.load(atomic::Ordering::Relaxed),
))
.await;
}
}));
}
}
#[cfg(test)]
mod tests {
#[test]
fn parse_fmp4_playlist() {
let content = r#"#EXTM3U
#EXT-X-VERSION:7
#EXT-X-START:TIME-OFFSET=0
#EXT-X-MEDIA-SEQUENCE:323066244
#EXT-X-TARGETDURATION:1
#EXT-X-MAP:URI=\"h1758715459.m4s\"
#EXT-BILI-AUX:97d350|K|7d1e3|fe1425ab
#EXTINF:1.00,7d1e3|fe1425ab
323066244.m4s
#EXT-BILI-AUX:97d706|N|757d4|c9094969
#EXTINF:1.00,757d4|c9094969
323066245.m4s
#EXT-BILI-AUX:97daee|N|8223d|f307566a
#EXTINF:1.00,8223d|f307566a
323066246.m4s
#EXT-BILI-AUX:97dee7|N|775cc|428d567
#EXTINF:1.00,775cc|428d567
323066247.m4s
#EXT-BILI-AUX:97e2df|N|10410|9a62fe61
#EXTINF:0.17,10410|9a62fe61
323066248.m4s
#EXT-BILI-AUX:97e397|K|679d2|8fbee7df
#EXTINF:1.00,679d2|8fbee7df
323066249.m4s
#EXT-BILI-AUX:97e74d|N|8907b|67d1c6ad
#EXTINF:1.00,8907b|67d1c6ad
323066250.m4s
#EXT-BILI-AUX:97eb35|N|87374|f6406797
#EXTINF:1.00,87374|f6406797
323066251.m4s
#EXT-BILI-AUX:97ef2d|N|6b792|b8125097
#EXTINF:1.00,6b792|b8125097
323066252.m4s
#EXT-BILI-AUX:97f326|N|e213|b30c02c6
#EXTINF:0.17,e213|b30c02c6
323066253.m4s
#EXT-BILI-AUX:97f3de|K|65754|7ea6dcc8
#EXTINF:1.00,65754|7ea6dcc8
323066254.m4s
"#;
let (_, pl) = m3u8_rs::parse_media_playlist(content.as_bytes()).unwrap();
// ExtTag { tag: "X-MAP", rest: Some("URI=\\\"h1758715459.m4s\\\"") }
let header_url = pl
.segments
.first()
.unwrap()
.unknown_tags
.iter()
.find(|t| t.tag == "X-MAP")
.map(|t| {
let rest = t.rest.clone().unwrap();
rest.split('=').nth(1).unwrap().replace("\\\"", "")
});
// #EXT-BILI-AUX:a5e4e0|K|79b3e|ebde469e
let is_key = pl
.segments
.first()
.unwrap()
.unknown_tags
.iter()
.find(|t| t.tag == "BILI-AUX")
.map(|t| {
let rest = t.rest.clone().unwrap();
rest.split('|').nth(1).unwrap() == "K"
});
assert_eq!(is_key, Some(true));
assert_eq!(header_url, Some("h1758715459.m4s".to_string()));
}
}

View File

@@ -0,0 +1,966 @@
use super::profile;
use super::profile::Profile;
use super::response;
use super::response::GeneralResponse;
use super::response::PostVideoMetaResponse;
use super::response::PreuploadResponse;
use super::response::VideoSubmitData;
use crate::account::Account;
use crate::core::Codec;
use crate::core::Format;
use crate::errors::RecorderError;
use crate::utils::user_agent_generator;
use chrono::TimeZone;
use pct_str::PctString;
use pct_str::URIReserved;
use rand::seq::IndexedRandom;
use rand::seq::SliceRandom;
use regex::Regex;
use reqwest::Client;
use serde::Deserialize;
use serde::Serialize;
use serde_json::json;
use serde_json::Value;
use std::fmt;
use std::path::Path;
use std::time::Duration;
use std::time::SystemTime;
use tokio::fs::File;
use tokio::io::AsyncReadExt;
use tokio::time::Instant;
#[derive(Clone)]
struct UploadParams<'a> {
preupload_response: &'a PreuploadResponse,
post_video_meta_response: &'a PostVideoMetaResponse,
video_file: &'a Path,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RoomInfo {
pub live_status: u8,
pub room_cover_url: String,
pub room_id: String,
pub room_keyframe_url: String,
pub room_title: String,
pub user_id: String,
pub live_start_time: i64,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct UserInfo {
pub user_id: String,
pub user_name: String,
pub user_sign: String,
pub user_avatar_url: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct QrInfo {
pub oauth_key: String,
pub url: String,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct QrStatus {
pub code: u8,
pub cookies: String,
}
#[derive(Clone, Debug)]
pub struct BiliStream {
pub format: Format,
pub codec: Codec,
pub base_url: String,
pub url_info: Vec<UrlInfo>,
pub drm: bool,
pub master_url: Option<String>,
}
#[derive(Clone, Debug)]
pub struct UrlInfo {
pub host: String,
pub extra: String,
}
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub enum Protocol {
HttpStream,
HttpHls,
}
impl fmt::Display for Protocol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
// 30000 杜比
// 20000 4K
// 15000 2K
// 10000 原画
// 400 蓝光
// 250 超清
// 150 高清
// 80 流畅
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub enum Qn {
Dolby = 30000,
Q4K = 20000,
Q2K = 15000,
Q1080PH = 10000,
Q1080P = 400,
Q720P = 250,
Hd = 150,
Smooth = 80,
}
impl fmt::Display for Qn {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl fmt::Display for BiliStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"type: {:?}, codec: {:?}, base_url: {}, url_info: {:?}, drm: {}, master_url: {:?}",
self.format, self.codec, self.base_url, self.url_info, self.drm, self.master_url
)
}
}
impl BiliStream {
pub fn new(
format: Format,
codec: Codec,
base_url: &str,
url_info: Vec<UrlInfo>,
drm: bool,
master_url: Option<String>,
) -> BiliStream {
BiliStream {
format,
codec,
base_url: base_url.into(),
url_info,
drm,
master_url,
}
}
pub fn index(&self) -> String {
let url_info = self.url_info.choose(&mut rand::rng()).unwrap();
format!("{}{}{}", url_info.host, self.base_url, url_info.extra)
}
pub fn ts_url(&self, seg_name: &str) -> String {
let m3u8_filename = self.base_url.split('/').next_back().unwrap();
let base_url = self.base_url.replace(m3u8_filename, seg_name);
let url_info = self.url_info.choose(&mut rand::rng()).unwrap();
format!("{}{}?{}", url_info.host, base_url, url_info.extra)
}
}
fn generate_user_agent_header() -> reqwest::header::HeaderMap {
let user_agent = user_agent_generator::UserAgentGenerator::new().generate(false);
let mut headers = reqwest::header::HeaderMap::new();
headers.insert("user-agent", user_agent.parse().unwrap());
headers
}
pub async fn get_qr(client: &Client) -> Result<QrInfo, RecorderError> {
let headers = generate_user_agent_header();
let res: serde_json::Value = client
.get("https://passport.bilibili.com/x/passport-login/web/qrcode/generate")
.headers(headers)
.send()
.await?
.json()
.await?;
Ok(QrInfo {
oauth_key: res["data"]["qrcode_key"]
.as_str()
.ok_or(RecorderError::InvalidValue)?
.to_string(),
url: res["data"]["url"]
.as_str()
.ok_or(RecorderError::InvalidValue)?
.to_string(),
})
}
pub async fn get_qr_status(client: &Client, qrcode_key: &str) -> Result<QrStatus, RecorderError> {
let headers = generate_user_agent_header();
let res: serde_json::Value = client
.get(format!(
"https://passport.bilibili.com/x/passport-login/web/qrcode/poll?qrcode_key={qrcode_key}"
))
.headers(headers)
.send()
.await?
.json()
.await?;
let code: u8 = res["data"]["code"].as_u64().unwrap_or(400) as u8;
let mut cookies: String = String::new();
if code == 0 {
let url = res["data"]["url"]
.as_str()
.ok_or(RecorderError::InvalidValue)?
.to_string();
let query_str = url.split('?').next_back().unwrap();
cookies = query_str.replace('&', ";");
}
Ok(QrStatus { code, cookies })
}
pub async fn logout(client: &Client, account: &Account) -> Result<(), RecorderError> {
let mut headers = generate_user_agent_header();
let url = "https://passport.bilibili.com/login/exit/v2";
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let params = [("csrf", account.csrf.clone())];
let _ = client
.post(url)
.headers(headers)
.header("Content-Type", "application/x-www-form-urlencoded")
.form(&params)
.send()
.await?;
Ok(())
}
pub async fn get_user_info(
client: &Client,
account: &Account,
user_id: &str,
) -> Result<UserInfo, RecorderError> {
let params: Value = json!({
"mid": user_id.to_string(),
"platform": "web",
"web_location": "1550101",
"token": "",
"w_webid": "",
});
let params = get_sign(client, params).await?;
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let resp = client
.get(format!(
"https://api.bilibili.com/x/space/wbi/acc/info?{params}"
))
.headers(headers)
.send()
.await?;
if !resp.status().is_success() {
if resp.status() == reqwest::StatusCode::PRECONDITION_FAILED {
return Err(RecorderError::SecurityControlError);
}
return Err(RecorderError::InvalidResponseStatus {
status: resp.status(),
});
}
let res: serde_json::Value = resp.json().await?;
let code = res["code"]
.as_u64()
.ok_or(RecorderError::InvalidResponseJson { resp: res.clone() })?;
if code != 0 {
log::error!("Get user info failed {code}");
return Err(RecorderError::InvalidResponseJson { resp: res.clone() });
}
Ok(UserInfo {
user_id: user_id.to_string(),
user_name: res["data"]["name"].as_str().unwrap_or("").to_string(),
user_sign: res["data"]["sign"].as_str().unwrap_or("").to_string(),
user_avatar_url: res["data"]["face"].as_str().unwrap_or("").to_string(),
})
}
pub async fn get_room_info(
client: &Client,
account: &Account,
room_id: &str,
) -> Result<RoomInfo, RecorderError> {
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let response = client
.get(format!(
"https://api.live.bilibili.com/room/v1/Room/get_info?room_id={room_id}"
))
.headers(headers)
.send()
.await?;
if !response.status().is_success() {
if response.status() == reqwest::StatusCode::PRECONDITION_FAILED {
return Err(RecorderError::SecurityControlError);
}
return Err(RecorderError::InvalidResponseStatus {
status: response.status(),
});
}
let res: serde_json::Value = response.json().await?;
let code = res["code"]
.as_u64()
.ok_or(RecorderError::InvalidResponseJson { resp: res.clone() })?;
if code != 0 {
return Err(RecorderError::InvalidResponseJson { resp: res.clone() });
}
let room_id = res["data"]["room_id"]
.as_i64()
.ok_or(RecorderError::InvalidValue)?
.to_string();
let room_title = res["data"]["title"]
.as_str()
.ok_or(RecorderError::InvalidValue)?
.to_string();
let room_cover_url = res["data"]["user_cover"]
.as_str()
.ok_or(RecorderError::InvalidValue)?
.to_string();
let room_keyframe_url = res["data"]["keyframe"]
.as_str()
.ok_or(RecorderError::InvalidValue)?
.to_string();
let user_id = res["data"]["uid"]
.as_i64()
.ok_or(RecorderError::InvalidValue)?
.to_string();
let live_status = res["data"]["live_status"]
.as_u64()
.ok_or(RecorderError::InvalidValue)? as u8;
// "live_time": "2025-08-09 18:33:35",
let live_start_time_str = res["data"]["live_time"]
.as_str()
.ok_or(RecorderError::InvalidValue)?;
let live_start_time = if live_start_time_str == "0000-00-00 00:00:00" {
0
} else {
// this is a fixed Asia/Shanghai datetime str
let naive = chrono::NaiveDateTime::parse_from_str(live_start_time_str, "%Y-%m-%d %H:%M:%S")
.map_err(|_| RecorderError::InvalidValue)?;
// parse as UTC datetime and convert to timestamp
chrono::Utc
.from_local_datetime(&naive)
.earliest()
.ok_or(RecorderError::InvalidValue)?
.timestamp()
- 8 * 3600
};
Ok(RoomInfo {
live_status,
room_cover_url,
room_id,
room_keyframe_url,
room_title,
user_id,
live_start_time,
})
}
/// Get stream info from room id
///
/// https://socialsisteryi.github.io/bilibili-API-collect/docs/live/info.html#%E8%8E%B7%E5%8F%96%E7%9B%B4%E6%92%AD%E9%97%B4%E4%BF%A1%E6%81%AF-1
/// https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo?room_id=31368705&protocol=1&format=1&codec=0&qn=10000&platform=h5
pub async fn get_stream_info(
client: &Client,
account: &Account,
room_id: &str,
protocol: Protocol,
format: Format,
codec: &[Codec],
qn: Qn,
) -> Result<BiliStream, RecorderError> {
let url = format!(
"https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo?room_id={}&protocol={}&format={}&codec={}&qn={}&platform=h5",
room_id,
protocol.clone() as u8,
format.clone() as u8,
codec.iter().map(|c| (c.clone() as u8).to_string()).collect::<Vec<String>>().join(","),
qn as i64,
);
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let response = client.get(url).headers(headers).send().await?;
let res: serde_json::Value = response.json().await?;
let code = res["code"].as_u64().unwrap_or(0);
let message = res["message"].as_str().unwrap_or("");
if code != 0 {
return Err(RecorderError::ApiError {
error: format!("Code {code} not found, message: {message}"),
});
}
log::debug!("Get stream info response: {res}");
// Parse the new API response structure
let playurl_info = &res["data"]["playurl_info"]["playurl"];
let empty_vec = vec![];
let streams = playurl_info["stream"].as_array().unwrap_or(&empty_vec);
if streams.is_empty() {
return Err(RecorderError::ApiError {
error: "No streams available".to_string(),
});
}
// Find the matching protocol
let target_protocol = match protocol {
Protocol::HttpStream => "http_stream",
Protocol::HttpHls => "http_hls",
};
let stream = streams
.iter()
.find(|s| s["protocol_name"].as_str() == Some(target_protocol))
.ok_or_else(|| RecorderError::ApiError {
error: format!("Protocol {target_protocol} not found"),
})?;
// Find the matching format
let target_format = match format {
Format::Flv => "flv",
Format::TS => "ts",
Format::FMP4 => "fmp4",
};
let empty_vec = vec![];
let format_info = stream["format"]
.as_array()
.unwrap_or(&empty_vec)
.iter()
.find(|f| f["format_name"].as_str() == Some(target_format))
.ok_or_else(|| RecorderError::FormatNotFound {
format: target_format.to_owned(),
})?;
// Find the matching codec
let target_codecs = codec
.iter()
.map(|c| match c {
Codec::Avc => "avc",
Codec::Hevc => "hevc",
})
.collect::<Vec<&str>>();
let codec_info = format_info["codec"]
.as_array()
.unwrap_or(&empty_vec)
.iter()
.find(|c| target_codecs.contains(&c["codec_name"].as_str().unwrap_or("")))
.ok_or_else(|| RecorderError::CodecNotFound {
codecs: target_codecs.join(","),
})?;
let url_info = codec_info["url_info"].as_array().unwrap_or(&empty_vec);
let mut url_info = url_info
.iter()
.map(|u| UrlInfo {
host: u["host"].as_str().unwrap_or("").to_string(),
extra: u["extra"].as_str().unwrap_or("").to_string(),
})
.collect::<Vec<UrlInfo>>();
url_info.shuffle(&mut rand::rng());
let drm = codec_info["drm"].as_bool().unwrap_or(false);
let base_url = codec_info["base_url"].as_str().unwrap_or("").to_string();
let master_url = format_info["master_url"].as_str().map(|s| s.to_string());
let codec = codec_info["codec_name"].as_str().unwrap_or("");
let codec = match codec {
"avc" => Codec::Avc,
"hevc" => Codec::Hevc,
_ => {
return Err(RecorderError::CodecNotFound {
codecs: codec.to_string(),
})
}
};
Ok(BiliStream {
format,
codec,
base_url,
url_info,
drm,
master_url,
})
}
/// Download file from url to path
pub async fn download_file(client: &Client, url: &str, path: &Path) -> Result<(), RecorderError> {
if !path.parent().unwrap().exists() {
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
}
let response = client.get(url).send().await?;
let bytes = response.bytes().await?;
let mut file = tokio::fs::File::create(&path).await?;
let mut content = std::io::Cursor::new(bytes);
tokio::io::copy(&mut content, &mut file).await?;
Ok(())
}
// Method from js code
pub async fn get_sign(client: &Client, mut parameters: Value) -> Result<String, RecorderError> {
let table = vec![
46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49, 33, 9, 42, 19,
29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40, 61, 26, 17, 0, 1, 60, 51, 30, 4,
22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11, 36, 20, 34, 44, 52,
];
let nav_info: Value = client
.get("https://api.bilibili.com/x/web-interface/nav")
.headers(generate_user_agent_header())
.send()
.await?
.json()
.await?;
let re = Regex::new(r"wbi/(.*).png").unwrap();
let img = re
.captures(nav_info["data"]["wbi_img"]["img_url"].as_str().unwrap())
.unwrap()
.get(1)
.unwrap()
.as_str();
let sub = re
.captures(nav_info["data"]["wbi_img"]["sub_url"].as_str().unwrap())
.unwrap()
.get(1)
.unwrap()
.as_str();
let raw_string = format!("{img}{sub}");
let mut encoded = Vec::new();
for x in table {
if x < raw_string.len() {
encoded.push(raw_string.as_bytes()[x]);
}
}
// only keep 32 bytes of encoded
encoded = encoded[0..32].to_vec();
let encoded = String::from_utf8(encoded).unwrap();
// Timestamp in seconds
let wts = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
parameters
.as_object_mut()
.unwrap()
.insert("wts".to_owned(), serde_json::Value::String(wts.to_string()));
// Get all keys from parameters into vec
let mut keys = parameters
.as_object()
.unwrap()
.keys()
.map(std::borrow::ToOwned::to_owned)
.collect::<Vec<String>>();
// sort keys
keys.sort();
let mut params = String::new();
for x in &keys {
params.push_str(x);
params.push('=');
// Value filters !'()* characters
let value = parameters
.get(x)
.unwrap()
.as_str()
.unwrap()
.replace(['!', '\'', '(', ')', '*'], "");
let value = PctString::encode(value.chars(), URIReserved);
params.push_str(value.as_str());
// add & if not last
if x != keys.last().unwrap() {
params.push('&');
}
}
// md5 params+encoded
let w_rid = md5::compute(params.to_string() + encoded.as_str());
let params = params + format!("&w_rid={w_rid:x}").as_str();
Ok(params)
}
async fn preupload_video(
client: &Client,
account: &Account,
video_file: &Path,
) -> Result<PreuploadResponse, RecorderError> {
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let url = format!(
"https://member.bilibili.com/preupload?name={}&r=upos&profile=ugcfx/bup",
video_file.file_name().unwrap().to_str().unwrap()
);
let response = client
.get(&url)
.headers(headers)
.send()
.await?
.json::<PreuploadResponse>()
.await?;
Ok(response)
}
async fn post_video_meta(
client: &Client,
preupload_response: &PreuploadResponse,
video_file: &Path,
) -> Result<PostVideoMetaResponse, RecorderError> {
let url = format!(
"https:{}{}?uploads=&output=json&profile=ugcfx/bup&filesize={}&partsize={}&biz_id={}",
preupload_response.endpoint,
preupload_response.upos_uri.replace("upos:/", ""),
video_file.metadata().unwrap().len(),
preupload_response.chunk_size,
preupload_response.biz_id
);
let response = client
.post(&url)
.header("X-Upos-Auth", &preupload_response.auth)
.send()
.await?
.json::<PostVideoMetaResponse>()
.await?;
Ok(response)
}
async fn upload_video(client: &Client, params: UploadParams<'_>) -> Result<usize, RecorderError> {
let mut file = File::open(params.video_file).await?;
let mut buffer = vec![0; params.preupload_response.chunk_size];
let file_size = params.video_file.metadata()?.len();
let chunk_size = params.preupload_response.chunk_size as u64;
let total_chunks = (file_size as f64 / chunk_size as f64).ceil() as usize;
let start = Instant::now();
let mut chunk = 0;
let mut read_total = 0;
let max_retries = 3;
let timeout = Duration::from_secs(30);
while let Ok(size) = file.read(&mut buffer[read_total..]).await {
read_total += size;
log::debug!("size: {size}, total: {read_total}");
if size > 0 && (read_total as u64) < chunk_size {
continue;
}
if size == 0 && read_total == 0 {
break;
}
let mut retry_count = 0;
let mut success = false;
while retry_count < max_retries && !success {
let url = format!(
"https:{}{}?partNumber={}&uploadId={}&chunk={}&chunks={}&size={}&start={}&end={}&total={}",
params.preupload_response.endpoint,
params.preupload_response.upos_uri.replace("upos:/", ""),
chunk + 1,
params.post_video_meta_response.upload_id,
chunk,
total_chunks,
read_total,
chunk * params.preupload_response.chunk_size,
chunk * params.preupload_response.chunk_size + read_total,
params.video_file.metadata().unwrap().len()
);
match client
.put(&url)
.header("X-Upos-Auth", &params.preupload_response.auth)
.header("Content-Type", "application/octet-stream")
.header("Content-Length", read_total.to_string())
.timeout(timeout)
.body(buffer[..read_total].to_vec())
.send()
.await
{
Ok(response) => {
if response.status().is_success() {
success = true;
let _ = response.text().await?;
} else {
log::error!("Upload failed with status: {}", response.status());
retry_count += 1;
if retry_count < max_retries {
tokio::time::sleep(Duration::from_secs(2u64.pow(retry_count as u32)))
.await;
}
}
}
Err(e) => {
log::error!("Upload error: {e}");
retry_count += 1;
if retry_count < max_retries {
tokio::time::sleep(Duration::from_secs(2u64.pow(retry_count as u32))).await;
}
}
}
}
if !success {
return Err(RecorderError::UploadError {
err: format!("Failed to upload chunk {chunk} after {max_retries} retries"),
});
}
chunk += 1;
read_total = 0;
log::debug!(
"[bili]speed: {:.1} KiB/s",
(chunk * params.preupload_response.chunk_size) as f64
/ start.elapsed().as_secs_f64()
/ 1024.0
);
}
Ok(total_chunks)
}
async fn end_upload(
client: &Client,
preupload_response: &PreuploadResponse,
post_video_meta_response: &PostVideoMetaResponse,
chunks: usize,
) -> Result<(), RecorderError> {
let url = format!(
"https:{}{}?output=json&name={}&profile=ugcfx/bup&uploadId={}&biz_id={}",
preupload_response.endpoint,
preupload_response.upos_uri.replace("upos:/", ""),
preupload_response.upos_uri,
post_video_meta_response.upload_id,
preupload_response.biz_id
);
let parts: Vec<Value> = (1..=chunks)
.map(|i| json!({ "partNumber": i, "eTag": "etag" }))
.collect();
let body = json!({ "parts": parts });
client
.post(&url)
.header("X-Upos-Auth", &preupload_response.auth)
.header("Content-Type", "application/json; charset=UTF-8")
.body(body.to_string())
.send()
.await?
.text()
.await?;
Ok(())
}
pub async fn prepare_video(
client: &Client,
account: &Account,
video_file: &Path,
) -> Result<profile::Video, RecorderError> {
log::info!("Start Preparing Video: {}", video_file.to_str().unwrap());
let preupload = preupload_video(client, account, video_file).await?;
log::info!("Preupload Response: {preupload:?}");
let metaposted = post_video_meta(client, &preupload, video_file).await?;
log::info!("Post Video Meta Response: {metaposted:?}");
let uploaded = upload_video(
client,
UploadParams {
preupload_response: &preupload,
post_video_meta_response: &metaposted,
video_file,
},
)
.await?;
log::info!("Uploaded: {uploaded}");
end_upload(client, &preupload, &metaposted, uploaded).await?;
let filename = Path::new(&metaposted.key)
.file_stem()
.unwrap()
.to_str()
.unwrap();
Ok(profile::Video {
title: filename.to_string(),
filename: filename.to_string(),
desc: String::new(),
cid: preupload.biz_id,
})
}
pub async fn submit_video(
client: &Client,
account: &Account,
profile_template: &Profile,
video: &profile::Video,
) -> Result<VideoSubmitData, RecorderError> {
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let url = format!(
"https://member.bilibili.com/x/vu/web/add/v3?ts={}&csrf={}",
chrono::Local::now().timestamp(),
account.csrf
);
let mut preprofile = profile_template.clone();
preprofile.videos.push(video.clone());
match client
.post(&url)
.headers(headers)
.header("Content-Type", "application/json; charset=UTF-8")
.body(serde_json::ser::to_string(&preprofile).unwrap_or_default())
.send()
.await
{
Ok(raw_resp) => {
let json: Value = raw_resp.json().await?;
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
match resp.data {
response::Data::VideoSubmit(data) => Ok(data),
_ => Err(RecorderError::InvalidResponse),
}
} else {
log::error!("Parse response failed: {json}");
Err(RecorderError::InvalidResponse)
}
}
Err(e) => {
log::error!("Send failed {e}");
Err(RecorderError::InvalidResponse)
}
}
}
pub async fn upload_cover(
client: &Client,
account: &Account,
cover: &str,
) -> Result<String, RecorderError> {
let url = format!(
"https://member.bilibili.com/x/vu/web/cover/up?ts={}&csrf={}",
chrono::Local::now().timestamp_millis(),
account.csrf
);
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let params = [("csrf", account.csrf.clone()), ("cover", cover.to_string())];
match client
.post(&url)
.headers(headers)
.header("Content-Type", "application/x-www-form-urlencoded")
.form(&params)
.send()
.await
{
Ok(raw_resp) => {
let json: Value = raw_resp.json().await?;
if let Ok(resp) = serde_json::from_value::<GeneralResponse>(json.clone()) {
match resp.data {
response::Data::Cover(data) => Ok(data.url),
_ => Err(RecorderError::InvalidResponse),
}
} else {
log::error!("Parse response failed: {json}");
Err(RecorderError::InvalidResponse)
}
}
Err(e) => {
log::error!("Send failed {e}");
Err(RecorderError::InvalidResponse)
}
}
}
pub async fn send_danmaku(
client: &Client,
account: &Account,
room_id: &str,
message: &str,
) -> Result<(), RecorderError> {
let url = "https://api.live.bilibili.com/msg/send".to_string();
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let params = [
("bubble", "0"),
("msg", message),
("color", "16777215"),
("mode", "1"),
("fontsize", "25"),
("room_type", "0"),
("rnd", &format!("{}", chrono::Local::now().timestamp())),
("roomid", room_id),
("csrf", &account.csrf),
("csrf_token", &account.csrf),
];
let _ = client
.post(&url)
.headers(headers)
.header("Content-Type", "application/x-www-form-urlencoded")
.form(&params)
.send()
.await?;
Ok(())
}
pub async fn get_video_typelist(
client: &Client,
account: &Account,
) -> Result<Vec<response::Typelist>, RecorderError> {
let url = "https://member.bilibili.com/x/vupre/web/archive/pre?lang=cn";
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(RecorderError::InvalidCookies);
}
let resp: GeneralResponse = client
.get(url)
.headers(headers)
.send()
.await?
.json()
.await?;
if resp.code == 0 {
if let response::Data::VideoTypeList(data) = resp.data {
Ok(data.typelist)
} else {
Err(RecorderError::InvalidResponse)
}
} else {
log::error!("Get video typelist failed with code {}", resp.code);
Err(RecorderError::InvalidResponse)
}
}

View File

@@ -0,0 +1,355 @@
pub mod api;
mod response;
pub mod stream_info;
use crate::account::Account;
use crate::core::hls_recorder::{construct_stream_from_variant, HlsRecorder};
use crate::core::{Codec, Format};
use crate::errors::RecorderError;
use crate::events::RecorderEvent;
use crate::platforms::douyin::stream_info::DouyinStream;
use crate::traits::RecorderTrait;
use crate::{Recorder, RoomInfo, UserInfo};
use async_trait::async_trait;
use chrono::Utc;
use danmu_stream::danmu_stream::DanmuStream;
use danmu_stream::provider::ProviderType;
use danmu_stream::DanmuMessageType;
use rand::random;
use std::path::PathBuf;
use std::sync::{atomic, Arc};
use std::time::Duration;
use tokio::sync::{broadcast, Mutex, RwLock};
use crate::danmu::DanmuStorage;
use crate::platforms::PlatformType;
pub type DouyinRecorder = Recorder<DouyinExtra>;
#[derive(Clone)]
pub struct DouyinExtra {
sec_user_id: String,
live_stream: Arc<RwLock<Option<DouyinStream>>>,
}
fn get_best_stream_url(stream: &DouyinStream) -> Option<String> {
// find the best stream url
if stream.data.origin.main.hls.is_empty() {
log::error!("No stream url found in stream_data: {stream:#?}");
return None;
}
Some(stream.data.origin.main.hls.clone())
}
impl DouyinRecorder {
pub async fn new(
room_id: &str,
sec_user_id: &str,
account: &Account,
cache_dir: PathBuf,
channel: broadcast::Sender<RecorderEvent>,
update_interval: Arc<atomic::AtomicU64>,
enabled: bool,
) -> Result<Self, crate::errors::RecorderError> {
Ok(Self {
platform: PlatformType::Douyin,
room_id: room_id.to_string(),
account: account.clone(),
client: reqwest::Client::new(),
event_channel: channel,
cache_dir,
quit: Arc::new(atomic::AtomicBool::new(false)),
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
is_recording: Arc::new(atomic::AtomicBool::new(false)),
room_info: Arc::new(RwLock::new(RoomInfo::default())),
user_info: Arc::new(RwLock::new(UserInfo::default())),
platform_live_id: Arc::new(RwLock::new(String::new())),
live_id: Arc::new(RwLock::new(String::new())),
danmu_storage: Arc::new(RwLock::new(None)),
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
danmu_task: Arc::new(Mutex::new(None)),
record_task: Arc::new(Mutex::new(None)),
update_interval,
total_duration: Arc::new(atomic::AtomicU64::new(0)),
total_size: Arc::new(atomic::AtomicU64::new(0)),
extra: DouyinExtra {
sec_user_id: sec_user_id.to_string(),
live_stream: Arc::new(RwLock::new(None)),
},
})
}
async fn check_status(&self) -> bool {
let pre_live_status = self.room_info.read().await.status;
match api::get_room_info(
&self.client,
&self.account,
&self.room_id,
&self.extra.sec_user_id,
)
.await
{
Ok(info) => {
let live_status = info.status == 0; // room_status == 0 表示正在直播
*self.room_info.write().await = RoomInfo {
platform: PlatformType::Douyin.as_str().to_string(),
room_id: self.room_id.to_string(),
room_title: info.room_title.clone(),
room_cover: info.cover.clone().unwrap_or_default(),
status: live_status,
};
*self.user_info.write().await = UserInfo {
user_id: info.sec_user_id.clone(),
user_name: info.user_name.clone(),
user_avatar: info.user_avatar.clone(),
};
if pre_live_status != live_status {
// live status changed, reset current record flag
log::info!(
"[{}]Live status changed to {}, auto_start: {}",
self.room_id,
live_status,
self.enabled.load(atomic::Ordering::Relaxed)
);
if live_status {
let _ = self.event_channel.send(RecorderEvent::LiveStart {
recorder: self.info().await,
});
} else {
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
platform: PlatformType::Douyin,
room_id: self.room_id.clone(),
recorder: self.info().await,
});
}
self.reset().await;
}
if !live_status {
self.reset().await;
return false;
}
let should_record = self.should_record().await;
if !should_record {
return true;
}
// Get stream URL when live starts
if !info.hls_url.is_empty() {
// Only set stream URL, don't create record yet
// Record will be created when first ts download succeeds
// parse info.stream_data into DouyinStream
let stream_data = info.stream_data.clone();
let Ok(stream) = serde_json::from_str::<DouyinStream>(&stream_data) else {
log::error!("Failed to parse stream data: {:#?}", &info);
return false;
};
let Some(new_stream_url) = get_best_stream_url(&stream) else {
log::error!("No stream url found in stream_data: {stream:#?}");
return false;
};
log::info!("New douyin stream URL: {}", new_stream_url.clone());
*self.extra.live_stream.write().await = Some(stream);
(*self.platform_live_id.write().await).clone_from(&info.room_id_str);
}
true
}
Err(e) => {
log::warn!("[{}]Update room status failed: {}", &self.room_id, e);
pre_live_status
}
}
}
async fn danmu(&self) -> Result<(), crate::errors::RecorderError> {
let cookies = self.account.cookies.clone();
let danmu_room_id = self
.platform_live_id
.read()
.await
.clone()
.parse::<i64>()
.unwrap_or(0);
let danmu_stream =
DanmuStream::new(ProviderType::Douyin, &cookies, &danmu_room_id.to_string()).await;
if danmu_stream.is_err() {
let err = danmu_stream.err().unwrap();
log::error!("Failed to create danmu stream: {err}");
return Err(crate::errors::RecorderError::DanmuStreamError(err));
}
let danmu_stream = danmu_stream.unwrap();
let mut start_fut = Box::pin(danmu_stream.start());
loop {
tokio::select! {
start_res = &mut start_fut => {
match start_res {
Ok(_) => {
log::info!("Danmu stream finished");
return Ok(());
}
Err(err) => {
log::error!("Danmu stream start error: {err}");
return Err(crate::errors::RecorderError::DanmuStreamError(err));
}
}
}
recv_res = danmu_stream.recv() => {
match recv_res {
Ok(Some(msg)) => {
match msg {
DanmuMessageType::DanmuMessage(danmu) => {
let ts = Utc::now().timestamp_millis();
let _ = self.event_channel.send(RecorderEvent::DanmuReceived {
room: self.room_id.clone(),
ts,
content: danmu.message.clone(),
});
if let Some(danmu_storage) = self.danmu_storage.read().await.as_ref() {
danmu_storage.add_line(ts, &danmu.message).await;
}
}
}
}
Ok(None) => {
log::info!("Danmu stream closed");
return Ok(());
}
Err(err) => {
log::error!("Failed to receive danmu message: {err}");
return Err(crate::errors::RecorderError::DanmuStreamError(err));
}
}
}
}
}
}
async fn reset(&self) {
*self.platform_live_id.write().await = String::new();
self.last_update
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
self.last_sequence.store(0, atomic::Ordering::Relaxed);
self.total_duration.store(0, atomic::Ordering::Relaxed);
self.total_size.store(0, atomic::Ordering::Relaxed);
*self.extra.live_stream.write().await = None;
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
danmu_task.abort();
let _ = danmu_task.await;
log::info!("Danmu task aborted");
}
}
async fn update_entries(&self, live_id: &str) -> Result<(), RecorderError> {
// Get current room info and stream URL
let room_info = self.room_info.read().await.clone();
let Some(stream) = self.extra.live_stream.read().await.clone() else {
return Err(RecorderError::NoStreamAvailable);
};
let Some(stream_url) = get_best_stream_url(&stream) else {
return Err(RecorderError::NoStreamAvailable);
};
let work_dir = self.work_dir(live_id).await;
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
// download cover
let cover_url = room_info.room_cover.clone();
let cover_path = work_dir.with_filename("cover.jpg");
let _ = api::download_file(&self.client, &cover_url, &cover_path.full_path()).await;
// Setup danmu store
let danmu_file_path = work_dir.with_filename("danmu.txt");
let danmu_storage = DanmuStorage::new(&danmu_file_path.full_path()).await;
*self.danmu_storage.write().await = danmu_storage;
// Start danmu task
*self.live_id.write().await = live_id.to_string();
let self_clone = self.clone();
log::info!("Start fetching danmu for live {live_id}");
*self.danmu_task.lock().await = Some(tokio::spawn(async move {
let _ = self_clone.danmu().await;
}));
let _ = self.event_channel.send(RecorderEvent::RecordStart {
recorder: self.info().await,
});
let hls_stream =
construct_stream_from_variant(live_id, &stream_url, Format::TS, Codec::Avc)
.await
.map_err(|_| RecorderError::NoStreamAvailable)?;
let hls_recorder = HlsRecorder::new(
self.room_id.to_string(),
Arc::new(hls_stream),
self.client.clone(),
None,
self.event_channel.clone(),
work_dir.full_path(),
self.enabled.clone(),
)
.await;
if let Err(e) = hls_recorder.start().await {
log::error!("[{}]Error from hls recorder: {}", self.room_id, e);
return Err(e);
}
Ok(())
}
}
#[async_trait]
impl crate::traits::RecorderTrait<DouyinExtra> for DouyinRecorder {
async fn run(&self) {
let self_clone = self.clone();
*self.record_task.lock().await = Some(tokio::spawn(async move {
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
if self_clone.check_status().await {
// Live status is ok, start recording
if self_clone.should_record().await {
self_clone
.is_recording
.store(true, atomic::Ordering::Relaxed);
let live_id = Utc::now().timestamp_millis().to_string();
if let Err(e) = self_clone.update_entries(&live_id).await {
log::error!("[{}]Update entries error: {}", self_clone.room_id, e);
}
}
if self_clone.is_recording.load(atomic::Ordering::Relaxed) {
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
recorder: self_clone.info().await,
});
}
self_clone
.is_recording
.store(false, atomic::Ordering::Relaxed);
self_clone.reset().await;
// Check status again after some seconds
let secs = random::<u64>() % 5;
tokio::time::sleep(Duration::from_secs(secs)).await;
continue;
}
tokio::time::sleep(Duration::from_secs(
self_clone.update_interval.load(atomic::Ordering::Relaxed),
))
.await;
}
log::info!("[{}]Recording thread quit.", self_clone.room_id);
}));
}
}

View File

@@ -0,0 +1,392 @@
use crate::account::Account;
use crate::errors::RecorderError;
use crate::utils::user_agent_generator;
use deno_core::JsRuntime;
use deno_core::RuntimeOptions;
use regex::Regex;
use reqwest::Client;
use uuid::Uuid;
use super::response::DouyinRoomInfoResponse;
use std::path::Path;
#[derive(Debug, Clone)]
pub struct DouyinBasicRoomInfo {
pub room_id_str: String,
pub room_title: String,
pub cover: Option<String>,
pub status: i64,
pub hls_url: String,
pub stream_data: String,
// user related
pub user_name: String,
pub user_avatar: String,
pub sec_user_id: String,
}
fn setup_js_runtime() -> Result<JsRuntime, RecorderError> {
// Create a new V8 runtime
let mut runtime = JsRuntime::new(RuntimeOptions::default());
// Add global CryptoJS object
let crypto_js = include_str!("js/a_bogus.js");
runtime
.execute_script(
"<a_bogus.js>",
deno_core::FastString::from_static(crypto_js),
)
.map_err(|e| RecorderError::JsRuntimeError(format!("Failed to execute crypto-js: {e}")))?;
Ok(runtime)
}
async fn generate_a_bogus(params: &str, user_agent: &str) -> Result<String, RecorderError> {
let mut runtime = setup_js_runtime()?;
// Call the get_wss_url function
let sign_call = format!("generate_a_bogus(\"{params}\", \"{user_agent}\")");
let result = runtime
.execute_script("<sign_call>", deno_core::FastString::from(sign_call))
.map_err(|e| RecorderError::JsRuntimeError(format!("Failed to execute JavaScript: {e}")))?;
// Get the result from the V8 runtime
let mut scope = runtime.handle_scope();
let local = deno_core::v8::Local::new(&mut scope, result);
let url = local
.to_string(&mut scope)
.unwrap()
.to_rust_string_lossy(&mut scope);
Ok(url)
}
async fn generate_ms_token() -> String {
// generate a random 32 characters uuid string
let uuid = Uuid::new_v4();
uuid.to_string()
}
pub fn generate_user_agent_header() -> reqwest::header::HeaderMap {
let user_agent = user_agent_generator::UserAgentGenerator::new().generate(false);
let mut headers = reqwest::header::HeaderMap::new();
headers.insert("user-agent", user_agent.parse().unwrap());
headers
}
pub async fn get_room_info(
client: &Client,
account: &Account,
room_id: &str,
sec_user_id: &str,
) -> Result<DouyinBasicRoomInfo, RecorderError> {
let mut headers = generate_user_agent_header();
headers.insert("Referer", "https://live.douyin.com/".parse().unwrap());
headers.insert("Cookie", account.cookies.clone().parse().unwrap());
let ms_token = generate_ms_token().await;
let user_agent = headers.get("user-agent").unwrap().to_str().unwrap();
let params = format!(
"aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={room_id}&ms_token={ms_token}");
let a_bogus = generate_a_bogus(&params, user_agent).await?;
// log::debug!("params: {params}");
// log::debug!("user_agent: {user_agent}");
// log::debug!("a_bogus: {a_bogus}");
let url = format!(
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={room_id}&ms_token={ms_token}&a_bogus={a_bogus}"
);
let resp = client.get(&url).headers(headers).send().await?;
let status = resp.status();
let text = resp.text().await?;
if text.is_empty() {
log::debug!("Empty room info response, trying H5 API");
return get_room_info_h5(client, account, room_id, sec_user_id).await;
}
if status.is_success() {
if let Ok(data) = serde_json::from_str::<DouyinRoomInfoResponse>(&text) {
let cover = data
.data
.data
.first()
.and_then(|data| data.cover.as_ref())
.map(|cover| cover.url_list[0].clone());
return Ok(DouyinBasicRoomInfo {
room_id_str: data.data.data[0].id_str.clone(),
sec_user_id: sec_user_id.to_string(),
cover,
room_title: data.data.data[0].title.clone(),
user_name: data.data.user.nickname.clone(),
user_avatar: data.data.user.avatar_thumb.url_list[0].clone(),
status: data.data.room_status,
hls_url: data.data.data[0]
.stream_url
.as_ref()
.map(|stream_url| stream_url.hls_pull_url.clone())
.unwrap_or_default(),
stream_data: data.data.data[0]
.stream_url
.as_ref()
.map(|s| s.live_core_sdk_data.pull_data.stream_data.clone())
.unwrap_or_default(),
});
}
log::error!("Failed to parse room info response: {text}");
return get_room_info_h5(client, account, room_id, sec_user_id).await;
}
log::error!("Failed to get room info: {status}");
return get_room_info_h5(client, account, room_id, sec_user_id).await;
}
pub async fn get_room_info_h5(
client: &Client,
account: &Account,
room_id: &str,
sec_user_id: &str,
) -> Result<DouyinBasicRoomInfo, RecorderError> {
// 参考biliup实现构建完整的URL参数
let room_id_str = room_id.to_string();
// https://webcast.amemv.com/webcast/room/reflow/info/?type_id=0&live_id=1&version_code=99.99.99&app_id=1128&room_id=10000&sec_user_id=MS4wLjAB&aid=6383&device_platform=web&browser_language=zh-CN&browser_platform=Win32&browser_name=Mozilla&browser_version=5.0
let url_params = [
("type_id", "0"),
("live_id", "1"),
("version_code", "99.99.99"),
("app_id", "1128"),
("room_id", &room_id_str),
("sec_user_id", sec_user_id),
("aid", "6383"),
("device_platform", "web"),
];
// 构建URL
let query_string = url_params
.iter()
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join("&");
let url = format!("https://webcast.amemv.com/webcast/room/reflow/info/?{query_string}");
let mut headers = generate_user_agent_header();
headers.insert("Referer", "https://live.douyin.com/".parse().unwrap());
headers.insert("Cookie", account.cookies.clone().parse().unwrap());
let resp = client.get(&url).headers(headers).send().await?;
let status = resp.status();
let text = resp.text().await?;
if status.is_success() {
// Try to parse as H5 response format
if let Ok(h5_data) =
serde_json::from_str::<super::response::DouyinH5RoomInfoResponse>(&text)
{
// Extract RoomBasicInfo from H5 response
let room = &h5_data.data.room;
let owner = &room.owner;
let cover = room
.cover
.as_ref()
.and_then(|c| c.url_list.first().cloned());
let hls_url = room
.stream_url
.as_ref()
.map(|s| s.hls_pull_url.clone())
.unwrap_or_default();
return Ok(DouyinBasicRoomInfo {
room_id_str: room.id_str.clone(),
room_title: room.title.clone(),
cover,
status: if room.status == 2 { 0 } else { 1 },
hls_url,
user_name: owner.nickname.clone(),
user_avatar: owner
.avatar_thumb
.url_list
.first()
.unwrap_or(&String::new())
.clone(),
sec_user_id: owner.sec_uid.clone(),
stream_data: room
.stream_url
.as_ref()
.map(|s| s.live_core_sdk_data.pull_data.stream_data.clone())
.unwrap_or_default(),
});
}
// If that fails, try to parse as a generic JSON to see what we got
if let Ok(json_value) = serde_json::from_str::<serde_json::Value>(&text) {
// Check if it's an error response
if let Some(status_code) = json_value
.get("status_code")
.and_then(serde_json::Value::as_i64)
{
if status_code != 0 {
let error_msg = json_value
.get("data")
.and_then(|v| v.get("message").and_then(|v| v.as_str()))
.unwrap_or("Unknown error");
if status_code == 10011 {
return Err(RecorderError::ApiError {
error: error_msg.to_string(),
});
}
return Err(RecorderError::ApiError {
error: format!(
"API returned error status_code: {status_code} - {error_msg}"
),
});
}
}
// 检查是否是"invalid session"错误
if let Some(status_message) = json_value.get("status_message").and_then(|v| v.as_str())
{
if status_message.contains("invalid session") {
return Err(RecorderError::ApiError { error:
"Invalid session - please check your cookies. Make sure you have valid sessionid, passport_csrf_token, and other authentication cookies from douyin.com".to_string(),
});
}
}
return Err(RecorderError::ApiError {
error: format!("Failed to parse h5 room info response: {text}"),
});
}
log::error!("Failed to parse h5 room info response: {text}");
return Err(RecorderError::ApiError {
error: format!("Failed to parse h5 room info response: {text}"),
});
}
log::error!("Failed to get h5 room info: {status}");
Err(RecorderError::ApiError {
error: format!("Failed to get h5 room info: {status} {text}"),
})
}
pub async fn get_user_info(
client: &Client,
account: &Account,
) -> Result<super::response::User, RecorderError> {
// Use the IM spotlight relation API to get user info
let url = "https://www.douyin.com/aweme/v1/web/im/spotlight/relation/";
let mut headers = generate_user_agent_header();
headers.insert("Referer", "https://www.douyin.com/".parse().unwrap());
headers.insert("Cookie", account.cookies.clone().parse().unwrap());
let resp = client.get(url).headers(headers).send().await?;
let status = resp.status();
let text = resp.text().await?;
if status.is_success() {
if let Ok(data) = serde_json::from_str::<super::response::DouyinRelationResponse>(&text) {
if data.status_code == 0 {
let owner_sec_uid = &data.owner_sec_uid;
// Find the user's own info in the followings list by matching sec_uid
if let Some(followings) = &data.followings {
for following in followings {
if following.sec_uid == *owner_sec_uid {
let user = super::response::User {
id_str: following.uid.clone(),
sec_uid: following.sec_uid.clone(),
nickname: following.nickname.clone(),
avatar_thumb: following.avatar_thumb.clone(),
follow_info: super::response::FollowInfo::default(),
foreign_user: 0,
open_id_str: String::new(),
};
return Ok(user);
}
}
}
// If not found in followings, create a minimal user info from owner_sec_uid
let user = super::response::User {
id_str: String::new(), // We don't have the numeric UID
sec_uid: owner_sec_uid.clone(),
nickname: "抖音用户".to_string(), // Default nickname
avatar_thumb: super::response::AvatarThumb { url_list: vec![] },
follow_info: super::response::FollowInfo::default(),
foreign_user: 0,
open_id_str: String::new(),
};
return Ok(user);
}
} else {
log::error!("Failed to parse user info response: {text}");
return Err(RecorderError::ApiError {
error: format!("Failed to parse user info response: {text}"),
});
}
}
log::error!("Failed to get user info: {status}");
Err(RecorderError::ApiError {
error: format!("Failed to get user info: {status} {text}"),
})
}
pub async fn get_room_owner_sec_uid(
client: &Client,
room_id: &str,
) -> Result<String, RecorderError> {
let url = format!("https://live.douyin.com/{room_id}");
let mut headers = generate_user_agent_header();
headers.insert("Referer", "https://live.douyin.com/".parse().unwrap());
let resp = client.get(url).headers(headers).send().await?;
let status = resp.status();
let text = resp.text().await?;
if !status.is_success() {
return Err(RecorderError::ApiError {
error: format!("Failed to get room owner sec uid: {status} {text}"),
});
}
// match to get sec_uid from text like \"sec_uid\":\"MS4wLjABAAAAdFmmud36bynPjXOvoMjatb42856_zryHsGmlkpIECDA\"
let sec_uid = Regex::new(r#"\\"sec_uid\\":\\"(.*?)\\""#)
.unwrap()
.captures(&text)
.and_then(|c| c.get(1))
.ok_or_else(|| RecorderError::ApiError {
error: "Failed to find sec_uid in room page".to_string(),
})?
.as_str()
.to_string();
Ok(sec_uid)
}
/// Download file from url to path
pub async fn download_file(client: &Client, url: &str, path: &Path) -> Result<(), RecorderError> {
if !path.parent().unwrap().exists() {
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
}
let response = client.get(url).send().await?;
let bytes = response.bytes().await?;
let mut file = tokio::fs::File::create(&path).await?;
let mut content = std::io::Cursor::new(bytes);
tokio::io::copy(&mut content, &mut file).await?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_get_room_owner_sec_uid() {
let client = Client::new();
let sec_uid = get_room_owner_sec_uid(&client, "200525029536")
.await
.unwrap();
assert_eq!(
sec_uid,
"MS4wLjABAAAAdFmmud36bynPjXOvoMjatb42856_zryHsGmlkpIECDA"
);
}
}

View File

@@ -0,0 +1,550 @@
// Script from https://github.com/JoeanAmier/TikTokDownloader/blob/master/static/js/a_bogus.js
// All the content in this article is only for learning and communication use, not for any other purpose, strictly prohibited for commercial use and illegal use, otherwise all the consequences are irrelevant to the author!
function rc4_encrypt(plaintext, key) {
var s = [];
for (var i = 0; i < 256; i++) {
s[i] = i;
}
var j = 0;
for (var i = 0; i < 256; i++) {
j = (j + s[i] + key.charCodeAt(i % key.length)) % 256;
var temp = s[i];
s[i] = s[j];
s[j] = temp;
}
var i = 0;
var j = 0;
var cipher = [];
for (var k = 0; k < plaintext.length; k++) {
i = (i + 1) % 256;
j = (j + s[i]) % 256;
var temp = s[i];
s[i] = s[j];
s[j] = temp;
var t = (s[i] + s[j]) % 256;
cipher.push(String.fromCharCode(s[t] ^ plaintext.charCodeAt(k)));
}
return cipher.join("");
}
function le(e, r) {
return ((e << (r %= 32)) | (e >>> (32 - r))) >>> 0;
}
function de(e) {
return 0 <= e && e < 16
? 2043430169
: 16 <= e && e < 64
? 2055708042
: void console["error"]("invalid j for constant Tj");
}
function pe(e, r, t, n) {
return 0 <= e && e < 16
? (r ^ t ^ n) >>> 0
: 16 <= e && e < 64
? ((r & t) | (r & n) | (t & n)) >>> 0
: (console["error"]("invalid j for bool function FF"), 0);
}
function he(e, r, t, n) {
return 0 <= e && e < 16
? (r ^ t ^ n) >>> 0
: 16 <= e && e < 64
? ((r & t) | (~r & n)) >>> 0
: (console["error"]("invalid j for bool function GG"), 0);
}
function reset() {
(this.reg[0] = 1937774191),
(this.reg[1] = 1226093241),
(this.reg[2] = 388252375),
(this.reg[3] = 3666478592),
(this.reg[4] = 2842636476),
(this.reg[5] = 372324522),
(this.reg[6] = 3817729613),
(this.reg[7] = 2969243214),
(this["chunk"] = []),
(this["size"] = 0);
}
function write(e) {
var a =
"string" == typeof e
? (function (e) {
(n = encodeURIComponent(e)["replace"](
/%([0-9A-F]{2})/g,
function (e, r) {
return String["fromCharCode"]("0x" + r);
}
)),
(a = new Array(n["length"]));
return (
Array["prototype"]["forEach"]["call"](n, function (e, r) {
a[r] = e.charCodeAt(0);
}),
a
);
})(e)
: e;
this.size += a.length;
var f = 64 - this["chunk"]["length"];
if (a["length"] < f) this["chunk"] = this["chunk"].concat(a);
else
for (
this["chunk"] = this["chunk"].concat(a.slice(0, f));
this["chunk"].length >= 64;
)
this["_compress"](this["chunk"]),
f < a["length"]
? (this["chunk"] = a["slice"](f, Math["min"](f + 64, a["length"])))
: (this["chunk"] = []),
(f += 64);
}
function sum(e, t) {
e && (this["reset"](), this["write"](e)), this["_fill"]();
for (var f = 0; f < this.chunk["length"]; f += 64)
this._compress(this["chunk"]["slice"](f, f + 64));
var i = null;
if (t == "hex") {
i = "";
for (f = 0; f < 8; f++) i += se(this["reg"][f]["toString"](16), 8, "0");
} else
for (i = new Array(32), f = 0; f < 8; f++) {
var c = this.reg[f];
(i[4 * f + 3] = (255 & c) >>> 0),
(c >>>= 8),
(i[4 * f + 2] = (255 & c) >>> 0),
(c >>>= 8),
(i[4 * f + 1] = (255 & c) >>> 0),
(c >>>= 8),
(i[4 * f] = (255 & c) >>> 0);
}
return this["reset"](), i;
}
function _compress(t) {
if (t < 64) console.error("compress error: not enough data");
else {
for (
var f = (function (e) {
for (var r = new Array(132), t = 0; t < 16; t++)
(r[t] = e[4 * t] << 24),
(r[t] |= e[4 * t + 1] << 16),
(r[t] |= e[4 * t + 2] << 8),
(r[t] |= e[4 * t + 3]),
(r[t] >>>= 0);
for (var n = 16; n < 68; n++) {
var a = r[n - 16] ^ r[n - 9] ^ le(r[n - 3], 15);
(a = a ^ le(a, 15) ^ le(a, 23)),
(r[n] = (a ^ le(r[n - 13], 7) ^ r[n - 6]) >>> 0);
}
for (n = 0; n < 64; n++) r[n + 68] = (r[n] ^ r[n + 4]) >>> 0;
return r;
})(t),
i = this["reg"].slice(0),
c = 0;
c < 64;
c++
) {
var o = le(i[0], 12) + i[4] + le(de(c), c),
s = ((o = le((o = (4294967295 & o) >>> 0), 7)) ^ le(i[0], 12)) >>> 0,
u = pe(c, i[0], i[1], i[2]);
u = (4294967295 & (u = u + i[3] + s + f[c + 68])) >>> 0;
var b = he(c, i[4], i[5], i[6]);
(b = (4294967295 & (b = b + i[7] + o + f[c])) >>> 0),
(i[3] = i[2]),
(i[2] = le(i[1], 9)),
(i[1] = i[0]),
(i[0] = u),
(i[7] = i[6]),
(i[6] = le(i[5], 19)),
(i[5] = i[4]),
(i[4] = (b ^ le(b, 9) ^ le(b, 17)) >>> 0);
}
for (var l = 0; l < 8; l++) this["reg"][l] = (this["reg"][l] ^ i[l]) >>> 0;
}
}
function _fill() {
var a = 8 * this["size"],
f = this["chunk"]["push"](128) % 64;
for (64 - f < 8 && (f -= 64); f < 56; f++) this.chunk["push"](0);
for (var i = 0; i < 4; i++) {
var c = Math["floor"](a / 4294967296);
this["chunk"].push((c >>> (8 * (3 - i))) & 255);
}
for (i = 0; i < 4; i++) this["chunk"]["push"]((a >>> (8 * (3 - i))) & 255);
}
function SM3() {
this.reg = [];
this.chunk = [];
this.size = 0;
this.reset();
}
SM3.prototype.reset = reset;
SM3.prototype.write = write;
SM3.prototype.sum = sum;
SM3.prototype._compress = _compress;
SM3.prototype._fill = _fill;
function result_encrypt(long_str, num = null) {
let s_obj = {
s0: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
s1: "Dkdpgh4ZKsQB80/Mfvw36XI1R25+WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
s2: "Dkdpgh4ZKsQB80/Mfvw36XI1R25-WUAlEi7NLboqYTOPuzmFjJnryx9HVGcaStCe=",
s3: "ckdp1h4ZKsUB80/Mfvw36XIgR25+WQAlEi7NLboqYTOPuzmFjJnryx9HVGDaStCe",
s4: "Dkdpgh2ZmsQB80/MfvV36XI1R45-WUAlEixNLwoqYTOPuzKFjJnry79HbGcaStCe",
};
let constant = {
0: 16515072,
1: 258048,
2: 4032,
str: s_obj[num],
};
let result = "";
let lound = 0;
let long_int = get_long_int(lound, long_str);
for (let i = 0; i < (long_str.length / 3) * 4; i++) {
if (Math.floor(i / 4) !== lound) {
lound += 1;
long_int = get_long_int(lound, long_str);
}
let key = i % 4;
switch (key) {
case 0:
temp_int = (long_int & constant["0"]) >> 18;
result += constant["str"].charAt(temp_int);
break;
case 1:
temp_int = (long_int & constant["1"]) >> 12;
result += constant["str"].charAt(temp_int);
break;
case 2:
temp_int = (long_int & constant["2"]) >> 6;
result += constant["str"].charAt(temp_int);
break;
case 3:
temp_int = long_int & 63;
result += constant["str"].charAt(temp_int);
break;
default:
break;
}
}
return result;
}
function get_long_int(round, long_str) {
round = round * 3;
return (
(long_str.charCodeAt(round) << 16) |
(long_str.charCodeAt(round + 1) << 8) |
long_str.charCodeAt(round + 2)
);
}
function gener_random(random, option) {
return [
(random & 255 & 170) | (option[0] & 85), // 163
(random & 255 & 85) | (option[0] & 170), //87
((random >> 8) & 255 & 170) | (option[1] & 85), //37
((random >> 8) & 255 & 85) | (option[1] & 170), //41
];
}
//////////////////////////////////////////////
function generate_rc4_bb_str(
url_search_params,
user_agent,
window_env_str,
suffix = "cus",
Arguments = [0, 1, 14]
) {
let sm3 = new SM3();
let start_time = Date.now();
/**
* 进行3次加密处理
* 1: url_search_params两次sm3之的结果
* 2: 对后缀两次sm3之的结果
* 3: 对ua处理之后的结果
*/
// url_search_params两次sm3之的结果
let url_search_params_list = sm3.sum(sm3.sum(url_search_params + suffix));
// 对后缀两次sm3之的结果
let cus = sm3.sum(sm3.sum(suffix));
// 对ua处理之后的结果
let ua = sm3.sum(
result_encrypt(
rc4_encrypt(
user_agent,
String.fromCharCode.apply(null, [0.00390625, 1, 14])
),
"s3"
)
);
//
let end_time = Date.now();
// b
let b = {
8: 3, // 固定
10: end_time, //3次加密结束时间
15: {
aid: 6383,
pageId: 6241,
boe: false,
ddrt: 7,
paths: {
include: [{}, {}, {}, {}, {}, {}, {}],
exclude: [],
},
track: {
mode: 0,
delay: 300,
paths: [],
},
dump: true,
rpU: "",
},
16: start_time, //3次加密开始时间
18: 44, //固定
19: [1, 0, 1, 5],
};
//3次加密开始时间
b[20] = (b[16] >> 24) & 255;
b[21] = (b[16] >> 16) & 255;
b[22] = (b[16] >> 8) & 255;
b[23] = b[16] & 255;
b[24] = (b[16] / 256 / 256 / 256 / 256) >> 0;
b[25] = (b[16] / 256 / 256 / 256 / 256 / 256) >> 0;
// 参数Arguments [0, 1, 14, ...]
// let Arguments = [0, 1, 14]
b[26] = (Arguments[0] >> 24) & 255;
b[27] = (Arguments[0] >> 16) & 255;
b[28] = (Arguments[0] >> 8) & 255;
b[29] = Arguments[0] & 255;
b[30] = (Arguments[1] / 256) & 255;
b[31] = Arguments[1] % 256 & 255;
b[32] = (Arguments[1] >> 24) & 255;
b[33] = (Arguments[1] >> 16) & 255;
b[34] = (Arguments[2] >> 24) & 255;
b[35] = (Arguments[2] >> 16) & 255;
b[36] = (Arguments[2] >> 8) & 255;
b[37] = Arguments[2] & 255;
// (url_search_params + "cus") 两次sm3之的结果
/**let url_search_params_list = [
91, 186, 35, 86, 143, 253, 6, 76,
34, 21, 167, 148, 7, 42, 192, 219,
188, 20, 182, 85, 213, 74, 213, 147,
37, 155, 93, 139, 85, 118, 228, 213
]*/
b[38] = url_search_params_list[21];
b[39] = url_search_params_list[22];
// ("cus") 对后缀两次sm3之的结果
/**
* let cus = [
136, 101, 114, 147, 58, 77, 207, 201,
215, 162, 154, 93, 248, 13, 142, 160,
105, 73, 215, 241, 83, 58, 51, 43,
255, 38, 168, 141, 216, 194, 35, 236
]*/
b[40] = cus[21];
b[41] = cus[22];
// 对ua处理之后的结果
/**
* let ua = [
129, 190, 70, 186, 86, 196, 199, 53,
99, 38, 29, 209, 243, 17, 157, 69,
147, 104, 53, 23, 114, 126, 66, 228,
135, 30, 168, 185, 109, 156, 251, 88
]*/
b[42] = ua[23];
b[43] = ua[24];
//3次加密结束时间
b[44] = (b[10] >> 24) & 255;
b[45] = (b[10] >> 16) & 255;
b[46] = (b[10] >> 8) & 255;
b[47] = b[10] & 255;
b[48] = b[8];
b[49] = (b[10] / 256 / 256 / 256 / 256) >> 0;
b[50] = (b[10] / 256 / 256 / 256 / 256 / 256) >> 0;
// object配置项
b[51] = b[15]["pageId"];
b[52] = (b[15]["pageId"] >> 24) & 255;
b[53] = (b[15]["pageId"] >> 16) & 255;
b[54] = (b[15]["pageId"] >> 8) & 255;
b[55] = b[15]["pageId"] & 255;
b[56] = b[15]["aid"];
b[57] = b[15]["aid"] & 255;
b[58] = (b[15]["aid"] >> 8) & 255;
b[59] = (b[15]["aid"] >> 16) & 255;
b[60] = (b[15]["aid"] >> 24) & 255;
// 中间进行了环境检测
// 代码索引: 2496 索引值: 17 索引64关键条件
// '1536|747|1536|834|0|30|0|0|1536|834|1536|864|1525|747|24|24|Win32'.charCodeAt()得到65位数组
/**
* let window_env_list = [49, 53, 51, 54, 124, 55, 52, 55, 124, 49, 53, 51, 54, 124, 56, 51, 52, 124, 48, 124, 51,
* 48, 124, 48, 124, 48, 124, 49, 53, 51, 54, 124, 56, 51, 52, 124, 49, 53, 51, 54, 124, 56,
* 54, 52, 124, 49, 53, 50, 53, 124, 55, 52, 55, 124, 50, 52, 124, 50, 52, 124, 87, 105, 110,
* 51, 50]
*/
let window_env_list = [];
for (let index = 0; index < window_env_str.length; index++) {
window_env_list.push(window_env_str.charCodeAt(index));
}
b[64] = window_env_list.length;
b[65] = b[64] & 255;
b[66] = (b[64] >> 8) & 255;
b[69] = [].length;
b[70] = b[69] & 255;
b[71] = (b[69] >> 8) & 255;
b[72] =
b[18] ^
b[20] ^
b[26] ^
b[30] ^
b[38] ^
b[40] ^
b[42] ^
b[21] ^
b[27] ^
b[31] ^
b[35] ^
b[39] ^
b[41] ^
b[43] ^
b[22] ^
b[28] ^
b[32] ^
b[36] ^
b[23] ^
b[29] ^
b[33] ^
b[37] ^
b[44] ^
b[45] ^
b[46] ^
b[47] ^
b[48] ^
b[49] ^
b[50] ^
b[24] ^
b[25] ^
b[52] ^
b[53] ^
b[54] ^
b[55] ^
b[57] ^
b[58] ^
b[59] ^
b[60] ^
b[65] ^
b[66] ^
b[70] ^
b[71];
let bb = [
b[18],
b[20],
b[52],
b[26],
b[30],
b[34],
b[58],
b[38],
b[40],
b[53],
b[42],
b[21],
b[27],
b[54],
b[55],
b[31],
b[35],
b[57],
b[39],
b[41],
b[43],
b[22],
b[28],
b[32],
b[60],
b[36],
b[23],
b[29],
b[33],
b[37],
b[44],
b[45],
b[59],
b[46],
b[47],
b[48],
b[49],
b[50],
b[24],
b[25],
b[65],
b[66],
b[70],
b[71],
];
bb = bb.concat(window_env_list).concat(b[72]);
return rc4_encrypt(
String.fromCharCode.apply(null, bb),
String.fromCharCode.apply(null, [121])
);
}
function generate_random_str() {
let random_str_list = [];
random_str_list = random_str_list.concat(
gener_random(Math.random() * 10000, [3, 45])
);
random_str_list = random_str_list.concat(
gener_random(Math.random() * 10000, [1, 0])
);
random_str_list = random_str_list.concat(
gener_random(Math.random() * 10000, [1, 5])
);
return String.fromCharCode.apply(null, random_str_list);
}
function generate_a_bogus(url_search_params, user_agent) {
/**
* url_search_params"device_platform=webapp&aid=6383&channel=channel_pc_web&update_version_code=170400&pc_client_type=1&version_code=170400&version_name=17.4.0&cookie_enabled=true&screen_width=1536&screen_height=864&browser_language=zh-CN&browser_platform=Win32&browser_name=Chrome&browser_version=123.0.0.0&browser_online=true&engine_name=Blink&engine_version=123.0.0.0&os_name=Windows&os_version=10&cpu_core_num=16&device_memory=8&platform=PC&downlink=10&effective_type=4g&round_trip_time=50&webid=7362810250930783783&msToken=VkDUvz1y24CppXSl80iFPr6ez-3FiizcwD7fI1OqBt6IICq9RWG7nCvxKb8IVi55mFd-wnqoNkXGnxHrikQb4PuKob5Q-YhDp5Um215JzlBszkUyiEvR"
* user_agent"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36"
*/
let result_str =
generate_random_str() +
generate_rc4_bb_str(
url_search_params,
user_agent,
"1536|747|1536|834|0|30|0|0|1536|834|1536|864|1525|747|24|24|Win32"
);
return encodeURIComponent(result_encrypt(result_str, "s4") + "=");
}
//测试调用
// console.log(generate_a_bogus(
// "device_platform=webapp&aid=6383&channel=channel_pc_web&update_version_code=170400&pc_client_type=1&version_code=170400&version_name=17.4.0&cookie_enabled=true&screen_width=1536&screen_height=864&browser_language=zh-CN&browser_platform=Win32&browser_name=Chrome&browser_version=123.0.0.0&browser_online=true&engine_name=Blink&engine_version=123.0.0.0&os_name=Windows&os_version=10&cpu_core_num=16&device_memory=8&platform=PC&downlink=10&effective_type=4g&round_trip_time=50&webid=7362810250930783783&msToken=VkDUvz1y24CppXSl80iFPr6ez-3FiizcwD7fI1OqBt6IICq9RWG7nCvxKb8IVi55mFd-wnqoNkXGnxHrikQb4PuKob5Q-YhDp5Um215JzlBszkUyiEvR",
// "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36"
// ));

View File

@@ -1,11 +1,14 @@
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde_json::Value;
use std::collections::HashMap;
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DouyinRoomInfoResponse {
pub data: Data,
#[serde(default)]
pub extra: Option<serde_json::Value>,
#[serde(rename = "status_code")]
pub status_code: i64,
}
@@ -14,9 +17,29 @@ pub struct DouyinRoomInfoResponse {
#[serde(rename_all = "camelCase")]
pub struct Data {
pub data: Vec<Daum>,
#[serde(rename = "enter_room_id", default)]
pub enter_room_id: Option<String>,
#[serde(default)]
pub extra: Option<serde_json::Value>,
pub user: User,
#[serde(rename = "qrcode_url", default)]
pub qrcode_url: Option<String>,
#[serde(rename = "enter_mode", default)]
pub enter_mode: Option<i64>,
#[serde(rename = "room_status")]
pub room_status: i64,
#[serde(rename = "partition_road_map", default)]
pub partition_road_map: Option<serde_json::Value>,
#[serde(rename = "similar_rooms", default)]
pub similar_rooms: Option<Vec<serde_json::Value>>,
#[serde(rename = "shark_decision_conf", default)]
pub shark_decision_conf: Option<String>,
#[serde(rename = "web_stream_url", default)]
pub web_stream_url: Option<serde_json::Value>,
#[serde(rename = "login_lead", default)]
pub login_lead: Option<serde_json::Value>,
#[serde(rename = "auth_cert_info", default)]
pub auth_cert_info: Option<String>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
@@ -28,9 +51,36 @@ pub struct Daum {
#[serde(rename = "status_str")]
pub status_str: String,
pub title: String,
#[serde(rename = "user_count_str", default)]
pub user_count_str: Option<String>,
pub cover: Option<Cover>,
#[serde(rename = "stream_url")]
pub stream_url: Option<StreamUrl>,
#[serde(default)]
pub owner: Option<Owner>,
#[serde(rename = "room_auth", default)]
pub room_auth: Option<RoomAuth>,
#[serde(rename = "live_room_mode", default)]
pub live_room_mode: Option<i64>,
#[serde(default)]
pub stats: Option<Stats>,
#[serde(rename = "has_commerce_goods", default)]
pub has_commerce_goods: Option<bool>,
#[serde(rename = "linker_map", default)]
pub linker_map: Option<LinkerMap>,
#[serde(rename = "linker_detail", default)]
pub linker_detail: Option<LinkerDetail>,
#[serde(rename = "room_view_stats", default)]
pub room_view_stats: Option<RoomViewStats>,
#[serde(rename = "scene_type_info", default)]
pub scene_type_info: Option<SceneTypeInfo>,
#[serde(rename = "like_count", default)]
pub like_count: Option<i64>,
#[serde(rename = "owner_user_id_str", default)]
pub owner_user_id_str: Option<String>,
// Many other fields that can be ignored for now
#[serde(flatten)]
pub other_fields: HashMap<String, serde_json::Value>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
@@ -56,8 +106,8 @@ pub struct StreamUrl {
#[serde(rename = "live_core_sdk_data")]
pub live_core_sdk_data: LiveCoreSdkData,
pub extra: Extra,
#[serde(rename = "pull_datas")]
pub pull_datas: PullDatas,
#[serde(rename = "pull_datas", default)]
pub pull_datas: Option<serde_json::Value>,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
@@ -182,10 +232,7 @@ pub struct Extra {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PullDatas {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Owner {
#[serde(rename = "id_str")]
pub id_str: String,
@@ -234,6 +281,7 @@ pub struct Subscribe {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct RoomAuth {
#[serde(rename = "Chat")]
pub chat: bool,
@@ -383,6 +431,7 @@ pub struct RoomAuth {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct SpecialStyle {
#[serde(rename = "Chat")]
pub chat: Chat,
@@ -392,6 +441,7 @@ pub struct SpecialStyle {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Chat {
#[serde(rename = "UnableStyle")]
pub unable_style: i64,
@@ -407,6 +457,7 @@ pub struct Chat {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Like {
#[serde(rename = "UnableStyle")]
pub unable_style: i64,
@@ -422,6 +473,7 @@ pub struct Like {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Stats {
#[serde(rename = "total_user_desp")]
pub total_user_desp: String,
@@ -435,10 +487,12 @@ pub struct Stats {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct LinkerMap {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct LinkerDetail {
#[serde(rename = "linker_play_modes")]
pub linker_play_modes: Vec<Value>,
@@ -476,14 +530,17 @@ pub struct LinkerDetail {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct LinkerMapStr {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct PlaymodeDetail {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct RoomViewStats {
#[serde(rename = "is_hidden")]
pub is_hidden: bool,
@@ -510,6 +567,7 @@ pub struct RoomViewStats {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct SceneTypeInfo {
#[serde(rename = "is_union_live_room")]
pub is_union_live_room: bool,
@@ -529,6 +587,7 @@ pub struct SceneTypeInfo {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct EntranceList {
#[serde(rename = "group_id")]
pub group_id: i64,
@@ -549,6 +608,7 @@ pub struct EntranceList {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Icon {
#[serde(rename = "url_list")]
pub url_list: Vec<String>,
@@ -770,6 +830,7 @@ pub struct H5Owner {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct H5AvatarThumb {
#[serde(rename = "url_list")]
pub url_list: Vec<String>,

View File

@@ -3,7 +3,7 @@ use serde_derive::Serialize;
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct StreamInfo {
pub struct DouyinStream {
pub data: Data,
}
@@ -15,6 +15,7 @@ pub struct Data {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Ld {
pub main: Main,
}
@@ -28,6 +29,7 @@ pub struct Main {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Md {
pub main: Main,
}
@@ -40,23 +42,27 @@ pub struct Origin {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Sd {
pub main: Main,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Hd {
pub main: Main,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Ao {
pub main: Main,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[allow(dead_code)]
pub struct Uhd {
pub main: Main,
}

View File

@@ -0,0 +1,156 @@
use crate::account::Account;
use crate::platforms::huya::extractor::StreamInfo;
use crate::utils::user_agent_generator;
use crate::RoomInfo;
use crate::UserInfo;
use super::errors::HuyaClientError;
use reqwest::Client;
use scraper::Html;
use scraper::Selector;
use std::path::Path;
fn generate_user_agent_header() -> reqwest::header::HeaderMap {
let user_agent = user_agent_generator::UserAgentGenerator::new().generate(true);
let mut headers = reqwest::header::HeaderMap::new();
headers.insert("user-agent", user_agent.parse().unwrap());
headers
}
pub async fn get_user_info(
client: &Client,
account: &Account,
) -> Result<UserInfo, HuyaClientError> {
// https://m.huya.com/video/u/2246697169
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(HuyaClientError::InvalidCookie);
}
let url = format!("https://m.huya.com/video/u/{}", account.id);
let response = client.get(url).headers(headers).send().await?;
let raw_content = response.text().await?;
// <div class="video-list-info">
// <div class="podcast-box clearfix">
// <img src="http://huyaimg.msstatic.com/avatar/1060/3f/0e6c0694867ef98e9f869589608ce3_180_135.jpg" alt="">
// <div class="podcast-info-intro">
// <h2>X inrea </h2>
// <p></p>
// </div>
// </div>
// </div>
let document = Html::parse_document(&raw_content);
let avatar_selector = Selector::parse(".video-list-info .podcast-box img").unwrap();
let name_selector = Selector::parse(".video-list-info .podcast-info-intro h2").unwrap();
// 提取 avatar (img src)
let avatar = document
.select(&avatar_selector)
.next()
.and_then(|img| img.value().attr("src"))
.map(|src| src.to_string());
// 提取 name (h2 text)
let name = document
.select(&name_selector)
.next()
.map(|h2| h2.text().collect::<String>().trim().to_string())
.filter(|s| !s.is_empty());
Ok(UserInfo {
user_id: account.id.clone(),
user_name: name.unwrap_or_default(),
user_avatar: avatar.unwrap_or_default(),
})
}
pub async fn get_room_info(
client: &Client,
account: &Account,
room_id: &str,
) -> Result<(UserInfo, RoomInfo, StreamInfo), HuyaClientError> {
let mut headers = generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
headers.insert("cookie", cookies);
} else {
return Err(HuyaClientError::InvalidCookie);
}
headers.insert("Referer", "https://m.huya.com/".parse().unwrap());
let url = format!("https://m.huya.com/{room_id}");
let response = client.get(url).headers(headers).send().await?;
let raw_content = response.text().await?;
let (user_info, room_info, stream_info) =
super::extractor::LiveStreamExtractor::extract_infos(&raw_content)?;
Ok((user_info, room_info, stream_info))
}
/// Download file from url to path
pub async fn download_file(client: &Client, url: &str, path: &Path) -> Result<(), HuyaClientError> {
if !path.parent().unwrap().exists() {
std::fs::create_dir_all(path.parent().unwrap()).unwrap();
}
let response = client.get(url).send().await?;
let bytes = response.bytes().await?;
let mut file = tokio::fs::File::create(&path).await?;
let mut content = std::io::Cursor::new(bytes);
tokio::io::copy(&mut content, &mut file).await?;
Ok(())
}
pub async fn get_index_content(client: &Client, url: &str) -> Result<String, HuyaClientError> {
let headers = generate_user_agent_header();
let response = client.get(url).headers(headers).send().await?;
if response.status().is_success() {
Ok(response.text().await?)
} else {
log::error!("get_index_content failed: {}", response.status());
Err(HuyaClientError::InvalidStream)
}
}
#[cfg(test)]
mod tests {
use crate::platforms::PlatformType;
use super::*;
#[tokio::test]
async fn test_get_user_info() {
let client = Client::new();
let account = Account {
platform: PlatformType::Huya.as_str().to_string(),
id: "2246697169".to_string(),
name: "X inrea ".to_string(),
avatar: "https://huyaimg.msstatic.com/avatar/1060/3f/0e6c0694867ef98e9f869589608ce3_180_135.jpg".to_string(),
csrf: "".to_string(),
cookies: "".to_string(),
};
let user_info = get_user_info(&client, &account).await.unwrap();
println!("{:?}", user_info);
}
#[tokio::test]
async fn test_get_room_info() {
// set log level to debug
std::env::set_var("RUST_LOG", "debug");
let _ = env_logger::try_init();
let client = Client::new();
let account = Account::default();
let (user_info, room_info, stream_info) =
get_room_info(&client, &account, "599934").await.unwrap();
println!("{:?}", user_info);
println!("{:?}", room_info);
println!("{:?}", stream_info);
// query index content
let index_content = get_index_content(&client, &stream_info.hls_url)
.await
.unwrap();
println!("{:?}", index_content);
}
}

View File

@@ -0,0 +1,51 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum HuyaClientError {
#[error("Invalid response")]
InvalidResponse,
#[error("Client init error")]
InitClientError,
#[error("Invalid response status: {status}")]
InvalidResponseStatus { status: reqwest::StatusCode },
#[error("Invalid response json: {resp}")]
InvalidResponseJson { resp: serde_json::Value },
#[error("Invalid message code: {code}")]
InvalidMessageCode { code: u64 },
#[error("Invalid value")]
InvalidValue,
#[error("Invalid url")]
InvalidUrl,
#[error("Invalid stream format")]
InvalidFormat,
#[error("Invalid stream")]
InvalidStream,
#[error("Invalid cookie")]
InvalidCookie,
#[error("Upload error: {err}")]
UploadError { err: String },
#[error("Upload was cancelled by user")]
UploadCancelled,
#[error("Empty cache")]
EmptyCache,
#[error("Client error: {0}")]
ClientError(#[from] reqwest::Error),
#[error("IO error: {0}")]
IOError(#[from] std::io::Error),
#[error("Security control error")]
SecurityControlError,
#[error("API error: {0}")]
ApiError(String),
#[error("Format not found: {0}")]
FormatNotFound(String),
#[error("Codec not found: {0}")]
CodecNotFound(String),
#[error("Extractor error: {0}")]
ExtractorError(String),
}
impl From<HuyaClientError> for String {
fn from(err: HuyaClientError) -> Self {
err.to_string()
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,237 @@
pub mod api;
pub mod errors;
mod extractor;
pub mod url_builder;
use crate::account::Account;
use crate::core::hls_recorder::{construct_stream_from_variant, HlsRecorder};
use crate::core::{Codec, Format};
use crate::errors::RecorderError;
use crate::events::RecorderEvent;
use crate::platforms::huya::extractor::StreamInfo;
use crate::traits::RecorderTrait;
use crate::{Recorder, RoomInfo, UserInfo};
use async_trait::async_trait;
use chrono::Utc;
use rand::random;
use std::path::PathBuf;
use std::sync::{atomic, Arc};
use std::time::Duration;
use tokio::sync::{broadcast, Mutex, RwLock};
use crate::danmu::DanmuStorage;
use crate::platforms::PlatformType;
pub type HuyaRecorder = Recorder<HuyaExtra>;
#[derive(Clone)]
pub struct HuyaExtra {
live_stream: Arc<RwLock<Option<StreamInfo>>>,
}
impl HuyaRecorder {
pub async fn new(
room_id: &str,
account: &Account,
cache_dir: PathBuf,
channel: broadcast::Sender<RecorderEvent>,
update_interval: Arc<atomic::AtomicU64>,
enabled: bool,
) -> Result<Self, crate::errors::RecorderError> {
Ok(Self {
platform: PlatformType::Huya,
room_id: room_id.to_string(),
account: account.clone(),
client: reqwest::Client::new(),
event_channel: channel,
cache_dir,
quit: Arc::new(atomic::AtomicBool::new(false)),
enabled: Arc::new(atomic::AtomicBool::new(enabled)),
is_recording: Arc::new(atomic::AtomicBool::new(false)),
room_info: Arc::new(RwLock::new(RoomInfo::default())),
user_info: Arc::new(RwLock::new(UserInfo::default())),
platform_live_id: Arc::new(RwLock::new(String::new())),
live_id: Arc::new(RwLock::new(String::new())),
danmu_storage: Arc::new(RwLock::new(None)),
last_update: Arc::new(atomic::AtomicI64::new(Utc::now().timestamp())),
last_sequence: Arc::new(atomic::AtomicU64::new(0)),
danmu_task: Arc::new(Mutex::new(None)),
record_task: Arc::new(Mutex::new(None)),
update_interval,
total_duration: Arc::new(atomic::AtomicU64::new(0)),
total_size: Arc::new(atomic::AtomicU64::new(0)),
extra: HuyaExtra {
live_stream: Arc::new(RwLock::new(None)),
},
})
}
async fn check_status(&self) -> bool {
let pre_live_status = self.room_info.read().await.status;
match api::get_room_info(&self.client, &self.account, &self.room_id).await {
Ok((user_info, room_info, stream_info)) => {
let live_status = room_info.status;
*self.room_info.write().await = room_info;
*self.user_info.write().await = user_info;
if pre_live_status != live_status {
// live status changed, reset current record flag
log::info!(
"[{}]Live status changed to {}, auto_start: {}",
&self.room_id,
live_status,
self.enabled.load(atomic::Ordering::Relaxed)
);
if live_status {
let _ = self.event_channel.send(RecorderEvent::LiveStart {
recorder: self.info().await,
});
} else {
let _ = self.event_channel.send(RecorderEvent::LiveEnd {
platform: PlatformType::Douyin,
room_id: self.room_id.clone(),
recorder: self.info().await,
});
}
self.reset().await;
}
if !live_status {
self.reset().await;
return false;
}
let should_record = self.should_record().await;
if !should_record {
return true;
}
*self.extra.live_stream.write().await = Some(stream_info.clone());
let platform_live_id = stream_info.id();
*self.platform_live_id.write().await = platform_live_id;
true
}
Err(e) => {
log::warn!("[{}]Update room status failed: {}", &self.room_id, e);
pre_live_status
}
}
}
async fn reset(&self) {
*self.platform_live_id.write().await = String::new();
self.last_update
.store(Utc::now().timestamp(), atomic::Ordering::Relaxed);
self.last_sequence.store(0, atomic::Ordering::Relaxed);
self.total_duration.store(0, atomic::Ordering::Relaxed);
self.total_size.store(0, atomic::Ordering::Relaxed);
*self.extra.live_stream.write().await = None;
}
async fn update_entries(&self, live_id: &str) -> Result<(), RecorderError> {
// Get current room info and stream URL
let room_info = self.room_info.read().await.clone();
let Some(stream) = self.extra.live_stream.read().await.clone() else {
return Err(RecorderError::NoStreamAvailable);
};
let work_dir = self.work_dir(live_id).await;
let _ = tokio::fs::create_dir_all(&work_dir.full_path()).await;
// download cover
let cover_url = room_info.room_cover.clone();
let cover_path = work_dir.with_filename("cover.jpg");
let _ = api::download_file(&self.client, &cover_url, &cover_path.full_path()).await;
*self.live_id.write().await = live_id.to_string();
// Setup danmu store
let danmu_file_path = work_dir.with_filename("danmu.txt");
let danmu_storage = DanmuStorage::new(&danmu_file_path.full_path()).await;
*self.danmu_storage.write().await = danmu_storage;
// Start danmu task
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
danmu_task.abort();
}
if let Some(danmu_stream_task) = self.danmu_task.lock().await.as_mut() {
danmu_stream_task.abort();
}
let _ = self.event_channel.send(RecorderEvent::RecordStart {
recorder: self.info().await,
});
log::debug!("[{}]Stream URL: {}", &self.room_id, stream.hls_url);
let hls_stream =
construct_stream_from_variant(live_id, &stream.hls_url, Format::TS, Codec::Avc)
.await
.map_err(|_| RecorderError::NoStreamAvailable)?;
let hls_recorder = HlsRecorder::new(
self.room_id.clone(),
Arc::new(hls_stream),
self.client.clone(),
Some(self.account.cookies.clone()),
self.event_channel.clone(),
work_dir.full_path(),
self.enabled.clone(),
)
.await;
if let Err(e) = hls_recorder.start().await {
log::error!("[{}]Failed to start hls recorder: {}", &self.room_id, e);
return Err(e);
}
Ok(())
}
}
#[async_trait]
impl crate::traits::RecorderTrait<HuyaExtra> for HuyaRecorder {
async fn run(&self) {
let self_clone = self.clone();
*self.record_task.lock().await = Some(tokio::spawn(async move {
while !self_clone.quit.load(atomic::Ordering::Relaxed) {
if self_clone.check_status().await {
// Live status is ok, start recording
if self_clone.should_record().await {
self_clone
.is_recording
.store(true, atomic::Ordering::Relaxed);
let live_id = Utc::now().timestamp_millis().to_string();
if let Err(e) = self_clone.update_entries(&live_id).await {
log::error!("[{}]Update entries error: {}", &self_clone.room_id, e);
}
}
if self_clone.is_recording.load(atomic::Ordering::Relaxed) {
let _ = self_clone.event_channel.send(RecorderEvent::RecordEnd {
recorder: self_clone.info().await,
});
}
self_clone
.is_recording
.store(false, atomic::Ordering::Relaxed);
self_clone.reset().await;
// Check status again after some seconds
let secs = random::<u64>() % 5;
tokio::time::sleep(Duration::from_secs(secs)).await;
continue;
}
tokio::time::sleep(Duration::from_secs(
self_clone.update_interval.load(atomic::Ordering::Relaxed),
))
.await;
}
log::info!("[{}]Recording thread quit.", &self_clone.room_id);
}));
}
}

View File

@@ -0,0 +1,215 @@
/**
* 虎牙直播播放器URL构建函数
* 基于对虎牙直播JavaScript代码的深入分析
* 谢谢 Claude 4.5 Sonnet
*/
/**
* 构建播放器URL
* @param {Object} info - 播放器配置信息
* @param {string} info.url - 解码后的基础URL
* @param {string} info.sStreamName - 流名称
* @param {string} info.presenterUid - 主播UID
* @param {string} info.sFlvAntiCode - FLV防码参数
* @param {string} info.sHlsAntiCode - HLS防码参数
* @param {string} info.sP2pAntiCode - P2P防码参数
* @param {number} info.uid - 用户ID
* @param {string} info.sGuid - 设备GUID
* @param {number} info.appid - 应用ID
* @param {string} info.type - 播放器类型 (P2PFLV/HLS)
* @param {number} info.playTimeout - 播放超时时间
* @param {string} info.h5Root - H5根路径
* @returns {string} 完整的播放URL
*/
function buildPlayerUrl(info) {
// 验证必需参数
if (!info.url) {
throw new Error("URL is required");
}
let baseUrl = info.url;
// 确保URL以?开头,如果没有则添加
if (!baseUrl.includes("?")) {
baseUrl += "?";
} else if (!baseUrl.endsWith("&") && !baseUrl.endsWith("?")) {
baseUrl += "&";
}
// 根据播放器类型添加防码参数
if (info.type === "P2PFLV" && info.sFlvAntiCode) {
baseUrl += info.sFlvAntiCode;
} else if (info.type === "HLS" && info.sHlsAntiCode) {
baseUrl += info.sHlsAntiCode;
} else if (info.type === "P2P" && info.sP2pAntiCode) {
baseUrl += info.sP2pAntiCode;
}
// 添加用户身份参数
if (info.uid !== undefined) {
baseUrl += "&uid=" + encodeURIComponent(info.uid);
}
if (info.sGuid) {
baseUrl += "&sGuid=" + encodeURIComponent(info.sGuid);
}
if (info.appid !== undefined) {
baseUrl += "&appid=" + encodeURIComponent(info.appid);
}
// 添加流信息参数
if (info.sStreamName) {
baseUrl += "&sStreamName=" + encodeURIComponent(info.sStreamName);
}
if (info.presenterUid) {
baseUrl += "&presenterUid=" + encodeURIComponent(info.presenterUid);
}
// 添加播放配置参数
if (info.playTimeout) {
baseUrl += "&playTimeout=" + encodeURIComponent(info.playTimeout);
}
if (info.h5Root) {
baseUrl += "&h5Root=" + encodeURIComponent(info.h5Root);
}
// 添加动态参数
const timestamp = Date.now();
baseUrl += "&t=" + timestamp;
// 生成序列ID模拟播放器内部逻辑
const seqId = generateSeqId();
baseUrl += "&seqId=" + seqId;
// 添加其他必要参数
baseUrl += "&ver=1";
baseUrl += "&sv=" + getVersion();
return baseUrl;
}
/**
* 生成序列ID
* 模拟播放器内部的getAnticodeSeqid()方法
* @returns {string} 序列ID
*/
function generateSeqId() {
// 模拟播放器内部序列ID生成逻辑
const timestamp = Date.now();
const random = Math.floor(Math.random() * 1000000);
return timestamp + "_" + random;
}
/**
* 获取版本号
* 模拟播放器内部的版本获取逻辑
* @returns {string} 版本号
*/
function getVersion() {
// 模拟虎牙直播的版本号格式
const now = new Date();
const year = now.getFullYear();
const month = String(now.getMonth() + 1).padStart(2, "0");
const day = String(now.getDate()).padStart(2, "0");
const hour = String(now.getHours()).padStart(2, "0");
const minute = String(now.getMinutes()).padStart(2, "0");
return `${year}${month}${day}${hour}${minute}`;
}
/**
* 从liveLineUrl构建完整播放URL
* @param {string} liveLineUrl - Base64编码的liveLineUrl
* @param {Object} streamInfo - 流信息对象
* @param {Object} userInfo - 用户信息对象
* @returns {string} 完整的播放URL
*/
function buildUrlFromLiveLineUrl(liveLineUrl, streamInfo, userInfo) {
// 解码liveLineUrl
const decodedUrl = atob(liveLineUrl);
// 构建播放器配置
const playerInfo = {
url: decodedUrl,
sStreamName: streamInfo.sStreamName,
presenterUid: streamInfo.presenterUid,
sFlvAntiCode: streamInfo.sFlvAntiCode,
sHlsAntiCode: streamInfo.sHlsAntiCode,
sP2pAntiCode: streamInfo.sP2pAntiCode,
uid: userInfo.uid || 0,
sGuid: userInfo.sGuid || "",
appid: userInfo.appid || 66,
type: streamInfo.type || "P2PFLV",
playTimeout: streamInfo.playTimeout || 5000,
h5Root: "https://hd.huya.com/cdn_libs/mobile/",
};
return buildPlayerUrl(playerInfo);
}
/**
* 解析虎牙直播URL参数
* @param {string} url - 完整的播放URL
* @returns {Object} 解析后的参数对象
*/
function parsePlayerUrl(url) {
const urlObj = new URL(url);
const params = {};
for (const [key, value] of urlObj.searchParams) {
params[key] = value;
}
return {
baseUrl: urlObj.origin + urlObj.pathname,
params: params,
};
}
/**
* 验证播放URL是否有效
* @param {string} url - 播放URL
* @returns {boolean} 是否有效
*/
function validatePlayerUrl(url) {
try {
const urlObj = new URL(url);
const params = urlObj.searchParams;
// 检查必需参数
const requiredParams = ["uid", "sGuid", "appid", "seqId", "t"];
for (const param of requiredParams) {
if (!params.has(param)) {
return false;
}
}
return true;
} catch (e) {
return false;
}
}
console.log("虎牙直播播放器URL构建函数已加载");
// 示例用法
const exampleInfo = {
url: "https://tx.hls.huya.com/src/431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus.m3u8?ratio=2000&wsSecret=725304fc2867cbe6254f12b264055136&wsTime=68fb9aa9&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103",
sStreamName:
"431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus",
presenterUid: 431653844,
sFlvAntiCode:
"wsSecret=820369d885b161baa5a7a82170881d78&wsTime=68fb97be&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103",
uid: 2246697169,
sGuid: "0af264cd4955d5688902472c482cb47c",
appid: 66,
type: "HLS",
playTimeout: 5000,
h5Root: "https://hd.huya.com/cdn_libs/mobile/",
};
const playerUrl = buildPlayerUrl(exampleInfo);
console.log("构建的播放URL:", playerUrl);

View File

@@ -0,0 +1,215 @@
use std::collections::HashMap;
use url::Url;
/// 播放器配置信息
#[derive(Debug, Clone)]
pub struct PlayerInfo {
/// 解码后的基础URL
pub url: String,
/// 流名称
pub s_stream_name: Option<String>,
/// 主播UID
pub presenter_uid: Option<String>,
/// HLS防码参数
pub s_hls_anti_code: Option<String>,
}
/// URL构建器
pub struct UrlBuilder;
impl UrlBuilder {
fn generate_uid() -> u64 {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis();
let random = fastrand::u32(0..1000000);
timestamp as u64 * 1000 + random as u64
}
fn generate_s_guid() -> String {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis();
let random = fastrand::u32(0..1000000);
format!("{}_{}", timestamp, random)
}
/// 构建播放器URL
///
/// # Arguments
/// * `info` - 播放器配置信息
///
/// # Returns
/// * `Result<String, String>` - 完整的播放URL或错误信息
pub fn build_player_url(info: &PlayerInfo) -> Result<String, String> {
if info.url.is_empty() {
return Err("URL is required".to_string());
}
let mut base_url = info.url.clone();
// 确保URL以?开头,如果没有则添加
if !base_url.contains('?') {
base_url.push('?');
} else if !base_url.ends_with('&') && !base_url.ends_with('?') {
base_url.push('&');
}
// 添加HLS防码参数
if let Some(anti_code) = &info.s_hls_anti_code {
base_url.push_str(anti_code);
}
// 添加用户身份参数
base_url.push_str(&format!("&uid={}", Self::generate_uid()));
base_url.push_str(&format!("&sGuid={}", Self::generate_s_guid()));
base_url.push_str(&format!("&appid={}", 66));
// 添加流信息参数
if let Some(s_stream_name) = &info.s_stream_name {
base_url.push_str(&format!(
"&sStreamName={}",
urlencoding::encode(s_stream_name)
));
}
if let Some(presenter_uid) = &info.presenter_uid {
base_url.push_str(&format!(
"&presenterUid={}",
urlencoding::encode(presenter_uid)
));
}
// 添加播放配置参数
base_url.push_str(&format!("&playTimeout={}", 5000));
base_url.push_str(&format!(
"&h5Root={}",
"https://hd.huya.com/cdn_libs/mobile/"
));
// 添加动态参数
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis();
base_url.push_str(&format!("&t={}", timestamp));
// 生成序列ID
let seq_id = Self::generate_seq_id();
base_url.push_str(&format!("&seqId={}", seq_id));
// 添加其他必要参数
base_url.push_str("&ver=1");
base_url.push_str(&format!("&sv={}", Self::get_version()));
Ok(base_url)
}
/// 解析虎牙直播URL参数
///
/// # Arguments
/// * `url` - 完整的播放URL
///
/// # Returns
/// * `Result<(String, HashMap<String, String>), String>` - 基础URL和参数映射
pub fn parse_player_url(url: &str) -> Result<(String, HashMap<String, String>), String> {
let url_obj = Url::parse(url).map_err(|e| format!("Failed to parse URL: {}", e))?;
let mut params = HashMap::new();
for (key, value) in url_obj.query_pairs() {
params.insert(key.to_string(), value.to_string());
}
let base_url = format!(
"{}://{}{}",
url_obj.scheme(),
url_obj.host_str().unwrap_or(""),
url_obj.path()
);
Ok((base_url, params))
}
/// 验证播放URL是否有效
///
/// # Arguments
/// * `url` - 播放URL
///
/// # Returns
/// * `bool` - 是否有效
pub fn validate_player_url(url: &str) -> bool {
match Url::parse(url) {
Ok(url_obj) => {
let params: HashMap<String, String> = url_obj
.query_pairs()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
// 检查必需参数
let required_params = ["uid", "sGuid", "appid", "seqId", "t"];
required_params
.iter()
.all(|param| params.contains_key(*param))
}
Err(_) => false,
}
}
/// 生成序列ID
/// 模拟播放器内部的getAnticodeSeqid()方法
///
/// # Returns
/// * `String` - 序列ID
fn generate_seq_id() -> String {
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_millis();
let random = fastrand::u32(0..1000000);
format!("{}_{}", timestamp, random)
}
/// 获取版本号
/// 模拟播放器内部的版本获取逻辑
///
/// # Returns
/// * `String` - 版本号
fn get_version() -> String {
let now = chrono::Utc::now();
now.format("%Y%m%d%H%M").to_string()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_build_player_url() {
let info = PlayerInfo {
url: "https://tx.hls.huya.com/src/431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus.m3u8?ratio=2000&wsSecret=725304fc2867cbe6254f12b264055136&wsTime=68fb9aa9&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103".to_string(),
s_stream_name: Some("431653844-431653844-1853939143172685824-863431144-10057-A-0-1-imgplus".to_string()),
presenter_uid: Some("431653844".to_string()),
s_hls_anti_code: Some("wsSecret=820369d885b161baa5a7a82170881d78&wsTime=68fb97be&fm=RFdxOEJjSjNoNkRKdDZUWV8kMF8kMV8kMl8kMw%3D%3D&ctype=tars_mobile&fs=bgct&t=103".to_string()),
};
let result = UrlBuilder::build_player_url(&info);
assert!(result.is_ok());
let url = result.unwrap();
println!("url: {}", url);
assert!(url.contains("appid=66"));
assert!(url.contains("seqId="));
assert!(url.contains("t="));
}
#[test]
fn test_validate_player_url() {
let valid_url =
"https://example.com/stream.m3u8?uid=123&sGuid=abc&appid=66&seqId=123_456&t=1234567890";
assert!(UrlBuilder::validate_player_url(valid_url));
let invalid_url = "https://example.com/stream.m3u8?uid=123&sGuid=abc";
assert!(!UrlBuilder::validate_player_url(invalid_url));
}
}

View File

@@ -0,0 +1,42 @@
pub mod bilibili;
pub mod douyin;
pub mod huya;
use std::hash::{Hash, Hasher};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PlatformType {
BiliBili,
Douyin,
Huya,
Youtube,
}
impl PlatformType {
pub fn as_str(&self) -> &'static str {
match self {
PlatformType::BiliBili => "bilibili",
PlatformType::Douyin => "douyin",
PlatformType::Huya => "huya",
PlatformType::Youtube => "youtube",
}
}
}
impl std::str::FromStr for PlatformType {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"bilibili" => Ok(PlatformType::BiliBili),
"douyin" => Ok(PlatformType::Douyin),
"huya" => Ok(PlatformType::Huya),
"youtube" => Ok(PlatformType::Youtube),
_ => Err(format!("Invalid platform type: {s}")),
}
}
}
impl Hash for PlatformType {
fn hash<H: Hasher>(&self, state: &mut H) {
std::mem::discriminant(self).hash(state);
}
}

View File

@@ -0,0 +1,96 @@
use std::{
path::PathBuf,
sync::{atomic, Arc},
};
use crate::platforms::PlatformType;
use crate::{
account::Account, danmu::DanmuStorage, events::RecorderEvent, CachePath, RecorderInfo,
RoomInfo, UserInfo,
};
use async_trait::async_trait;
use tokio::{
sync::{broadcast, Mutex, RwLock},
task::JoinHandle,
};
#[allow(dead_code)]
pub trait RecorderBasicTrait<T> {
fn platform(&self) -> PlatformType;
fn room_id(&self) -> String;
fn account(&self) -> &Account;
fn client(&self) -> &reqwest::Client;
fn event_channel(&self) -> &broadcast::Sender<RecorderEvent>;
fn cache_dir(&self) -> PathBuf;
fn quit(&self) -> &atomic::AtomicBool;
fn enabled(&self) -> &atomic::AtomicBool;
fn is_recording(&self) -> &atomic::AtomicBool;
fn room_info(&self) -> Arc<RwLock<RoomInfo>>;
fn user_info(&self) -> Arc<RwLock<UserInfo>>;
fn platform_live_id(&self) -> Arc<RwLock<String>>;
fn live_id(&self) -> Arc<RwLock<String>>;
fn danmu_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>>;
fn record_task(&self) -> Arc<Mutex<Option<JoinHandle<()>>>>;
fn danmu_storage(&self) -> Arc<RwLock<Option<DanmuStorage>>>;
fn last_update(&self) -> &atomic::AtomicI64;
fn last_sequence(&self) -> &atomic::AtomicU64;
fn total_duration(&self) -> &atomic::AtomicU64;
fn total_size(&self) -> &atomic::AtomicU64;
fn extra(&self) -> &T;
}
#[async_trait]
pub trait RecorderTrait<T>: RecorderBasicTrait<T> {
async fn run(&self);
async fn stop(&self) {
self.quit().store(true, atomic::Ordering::Relaxed);
if let Some(danmu_task) = self.danmu_task().lock().await.take() {
danmu_task.abort();
let _ = danmu_task.await;
}
if let Some(record_task) = self.record_task().lock().await.take() {
record_task.abort();
let _ = record_task.await;
}
}
async fn should_record(&self) -> bool {
if self.quit().load(atomic::Ordering::Relaxed) {
return false;
}
self.enabled().load(atomic::Ordering::Relaxed)
}
async fn work_dir(&self, live_id: &str) -> CachePath {
CachePath::new(self.cache_dir(), self.platform(), &self.room_id(), live_id)
}
async fn info(&self) -> RecorderInfo {
let room_info = self.room_info().read().await.clone();
let user_info = self.user_info().read().await.clone();
let is_recording = self.is_recording().load(atomic::Ordering::Relaxed);
RecorderInfo {
platform_live_id: self.platform_live_id().read().await.clone(),
live_id: self.live_id().read().await.clone(),
recording: is_recording,
enabled: self.enabled().load(atomic::Ordering::Relaxed),
room_info: RoomInfo {
platform: self.platform().as_str().to_string(),
room_id: self.room_id().to_string(),
room_title: room_info.room_title.clone(),
room_cover: room_info.room_cover.clone(),
status: room_info.status,
},
user_info: UserInfo {
user_id: user_info.user_id.to_string(),
user_name: user_info.user_name.clone(),
user_avatar: user_info.user_avatar.clone(),
},
}
}
async fn enable(&self) {
self.enabled().store(true, atomic::Ordering::Relaxed);
}
async fn disable(&self) {
self.enabled().store(false, atomic::Ordering::Relaxed);
}
}

View File

@@ -0,0 +1 @@
pub mod user_agent_generator;

View File

@@ -0,0 +1,234 @@
use rand::prelude::*;
pub struct UserAgentGenerator {
rng: ThreadRng,
}
impl Default for UserAgentGenerator {
fn default() -> Self {
Self::new()
}
}
impl UserAgentGenerator {
pub fn new() -> Self {
Self { rng: rand::rng() }
}
/// Generate a user agent
///
/// # Arguments
///
/// * `mobile` - Whether to generate a mobile user agent
///
/// # Returns
///
/// A string representing the user agent
pub fn generate(&mut self, mobile: bool) -> String {
if mobile {
return self.generate_mobile();
}
let browser_type = self.rng.random_range(0..4);
match browser_type {
0 => self.generate_chrome(),
1 => self.generate_firefox(),
2 => self.generate_safari(),
_ => self.generate_edge(),
}
}
fn generate_mobile(&mut self) -> String {
let mobile_versions = [
"120.0.0.0",
"119.0.0.0",
"118.0.0.0",
"117.0.0.0",
"116.0.0.0",
"115.0.0.0",
"114.0.0.0",
];
let mobile_version = mobile_versions.choose(&mut self.rng).unwrap();
// 随机选择 Android 或 iOS
if self.rng.random_bool(0.7) {
// Android User-Agent
let android_versions = ["13", "12", "11", "10", "9"];
let android_version = android_versions.choose(&mut self.rng).unwrap();
let device_models = [
"SM-G991B",
"SM-G996B",
"SM-G998B",
"SM-A525F",
"SM-A725F",
"Pixel 6",
"Pixel 7",
"Pixel 8",
"OnePlus 9",
"OnePlus 10",
];
let device_model = device_models.choose(&mut self.rng).unwrap();
format!("Mozilla/5.0 (Linux; Android {android_version}; {device_model}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{mobile_version} Mobile Safari/537.36")
} else {
// iOS User-Agent
let ios_versions = ["17_1", "16_7", "16_6", "15_7", "14_8"];
let ios_version = ios_versions.choose(&mut self.rng).unwrap();
let device_types = ["iPhone; CPU iPhone OS", "iPad; CPU OS"];
let device_type = device_types.choose(&mut self.rng).unwrap();
format!("Mozilla/5.0 ({device_type} {ios_version} like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Mobile/15E148 Safari/604.1")
}
}
fn generate_chrome(&mut self) -> String {
let chrome_versions = [
"120.0.0.0",
"119.0.0.0",
"118.0.0.0",
"117.0.0.0",
"116.0.0.0",
"115.0.0.0",
"114.0.0.0",
];
let webkit_versions = ["537.36", "537.35", "537.34"];
let os = self.get_random_os();
let chrome_version = chrome_versions.choose(&mut self.rng).unwrap();
let webkit_version = webkit_versions.choose(&mut self.rng).unwrap();
format!(
"Mozilla/5.0 ({os}) AppleWebKit/{webkit_version} (KHTML, like Gecko) Chrome/{chrome_version} Safari/{webkit_version}"
)
}
fn generate_firefox(&mut self) -> String {
let firefox_versions = ["121.0", "120.0", "119.0", "118.0", "117.0", "116.0"];
let os = self.get_random_os_firefox();
let firefox_version = firefox_versions.choose(&mut self.rng).unwrap();
format!("Mozilla/5.0 ({os}; rv:{firefox_version}) Gecko/20100101 Firefox/{firefox_version}")
}
fn generate_safari(&mut self) -> String {
let safari_versions = ["17.1", "17.0", "16.6", "16.5", "16.4", "16.3"];
let webkit_versions = ["605.1.15", "605.1.14", "605.1.13"];
let safari_version = safari_versions.choose(&mut self.rng).unwrap();
let webkit_version = webkit_versions.choose(&mut self.rng).unwrap();
// Safari 只在 macOS 和 iOS 上
let is_mobile = self.rng.random_bool(0.3);
if is_mobile {
let ios_versions = ["17_1", "16_7", "16_6", "15_7"];
let ios_version = ios_versions.choose(&mut self.rng).unwrap();
let device = ["iPhone; CPU iPhone OS", "iPad; CPU OS"]
.choose(&mut self.rng)
.unwrap();
format!(
"Mozilla/5.0 ({device} {ios_version} like Mac OS X) AppleWebKit/{webkit_version} (KHTML, like Gecko) Version/{safari_version} Mobile/15E148 Safari/{webkit_version}"
)
} else {
let macos_versions = ["14_1", "13_6", "12_7"];
let macos_version = macos_versions.choose(&mut self.rng).unwrap();
format!(
"Mozilla/5.0 (Macintosh; Intel Mac OS X {macos_version}) AppleWebKit/{webkit_version} (KHTML, like Gecko) Version/{safari_version} Safari/{webkit_version}"
)
}
}
fn generate_edge(&mut self) -> String {
let edge_versions = ["119.0.0.0", "118.0.0.0", "117.0.0.0", "116.0.0.0"];
let chrome_versions = ["119.0.0.0", "118.0.0.0", "117.0.0.0", "116.0.0.0"];
let os = self.get_random_os();
let edge_version = edge_versions.choose(&mut self.rng).unwrap();
let chrome_version = chrome_versions.choose(&mut self.rng).unwrap();
format!(
"Mozilla/5.0 ({os}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{chrome_version} Safari/537.36 Edg/{edge_version}"
)
}
fn get_random_os(&mut self) -> &'static str {
let os_list = [
"Windows NT 10.0; Win64; x64",
"Windows NT 11.0; Win64; x64",
"Macintosh; Intel Mac OS X 10_15_7",
"Macintosh; Intel Mac OS X 10_14_6",
"X11; Linux x86_64",
"X11; Ubuntu; Linux x86_64",
];
os_list.choose(&mut self.rng).unwrap()
}
fn get_random_os_firefox(&mut self) -> &'static str {
let os_list = [
"Windows NT 10.0; Win64; x64",
"Windows NT 11.0; Win64; x64",
"Macintosh; Intel Mac OS X 10.15",
"X11; Linux x86_64",
"X11; Ubuntu; Linux i686",
];
os_list.choose(&mut self.rng).unwrap()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_generate_user_agents() {
let mut generator = UserAgentGenerator::new();
for _ in 0..100 {
let ua = generator.generate(false);
assert!(!ua.is_empty());
assert!(ua.starts_with("Mozilla/5.0"));
// 验证是否包含常见浏览器标识
assert!(
ua.contains("Chrome")
|| ua.contains("Firefox")
|| ua.contains("Safari")
|| ua.contains("Edg")
);
}
}
#[test]
fn test_chrome_user_agent_format() {
let mut generator = UserAgentGenerator::new();
let ua = generator.generate_chrome();
assert!(ua.contains("Chrome"));
assert!(ua.contains("Safari"));
assert!(ua.contains("AppleWebKit"));
}
#[test]
fn test_mobile_user_agent_format() {
let mut generator = UserAgentGenerator::new();
for _ in 0..50 {
let ua = generator.generate(true);
assert!(!ua.is_empty());
assert!(ua.starts_with("Mozilla/5.0"));
// 验证是否包含移动设备标识
assert!(ua.contains("Android") || ua.contains("iPhone") || ua.contains("iPad"));
// 验证是否包含移动浏览器标识
// Android 包含 Chrome 和 Mobile Safari
// iOS 包含 Safari
assert!(ua.contains("Mobile Safari") || ua.contains("Chrome") || ua.contains("Safari"));
}
}
}

View File

@@ -1 +1 @@
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.*"},{"url":"http://*.*"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}

View File

@@ -1,56 +0,0 @@
use std::path::PathBuf;
use std::sync::Arc;
use chrono::Utc;
use crate::database::Database;
use crate::recorder::PlatformType;
pub async fn try_rebuild_archives(
db: &Arc<Database>,
cache_path: PathBuf,
) -> Result<(), Box<dyn std::error::Error>> {
let rooms = db.get_recorders().await?;
for room in rooms {
let room_id = room.room_id;
let room_cache_path = cache_path.join(format!("{}/{}", room.platform, room_id));
let mut files = tokio::fs::read_dir(room_cache_path).await?;
while let Some(file) = files.next_entry().await? {
if file.file_type().await?.is_dir() {
// use folder name as live_id
let live_id = file.file_name();
let live_id = live_id.to_str().unwrap();
// check if live_id is in db
let record = db.get_record(room_id, live_id).await;
if record.is_ok() {
continue;
}
// get created_at from folder metadata
let metadata = file.metadata().await?;
let created_at = metadata.created();
if created_at.is_err() {
continue;
}
let created_at = created_at.unwrap();
let created_at = chrono::DateTime::<Utc>::from(created_at)
.format("%Y-%m-%dT%H:%M:%S.%fZ")
.to_string();
// create a record for this live_id
let record = db
.add_record(
PlatformType::from_str(room.platform.as_str()).unwrap(),
live_id,
room_id,
&format!("UnknownLive {}", live_id),
None,
Some(&created_at),
)
.await?;
log::info!("rebuild archive {:?}", record);
}
}
}
Ok(())
}

View File

@@ -1,9 +1,10 @@
use std::path::{Path, PathBuf};
use chrono::Local;
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use std::sync::atomic::{self, AtomicU64};
use std::sync::Arc;
use crate::{recorder::PlatformType, recorder_manager::ClipRangeParams};
use crate::{danmu2ass::Danmu2AssOptions, recorder_manager::ClipRangeParams};
#[derive(Deserialize, Serialize, Clone)]
pub struct Config {
@@ -35,8 +36,12 @@ pub struct Config {
pub config_path: String,
#[serde(default = "default_whisper_language")]
pub whisper_language: String,
#[serde(default = "default_user_agent")]
pub user_agent: String,
#[serde(default = "default_webhook_url")]
pub webhook_url: String,
#[serde(default = "default_danmu_ass_options")]
pub danmu_ass_options: Danmu2AssOptions,
#[serde(skip)]
pub update_interval: Arc<AtomicU64>,
}
#[derive(Deserialize, Serialize, Clone)]
@@ -45,6 +50,10 @@ pub struct AutoGenerateConfig {
pub encode_danmu: bool,
}
fn default_danmu_ass_options() -> Danmu2AssOptions {
Danmu2AssOptions::default()
}
fn default_auto_subtitle() -> bool {
false
}
@@ -66,7 +75,7 @@ fn default_openai_api_endpoint() -> String {
}
fn default_openai_api_key() -> String {
"".to_string()
String::new()
}
fn default_clip_name_format() -> String {
@@ -88,8 +97,8 @@ fn default_whisper_language() -> String {
"auto".to_string()
}
fn default_user_agent() -> String {
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36".to_string()
fn default_webhook_url() -> String {
String::new()
}
impl Config {
@@ -101,6 +110,7 @@ impl Config {
if let Ok(content) = std::fs::read_to_string(config_path) {
if let Ok(mut config) = toml::from_str::<Config>(&content) {
config.config_path = config_path.to_str().unwrap().into();
config.update_interval = Arc::new(AtomicU64::new(config.status_check_interval));
return Ok(config);
}
}
@@ -129,7 +139,9 @@ impl Config {
status_check_interval: default_status_check_interval(),
config_path: config_path.to_str().unwrap().into(),
whisper_language: default_whisper_language(),
user_agent: default_user_agent(),
webhook_url: default_webhook_url(),
danmu_ass_options: default_danmu_ass_options(),
update_interval: Arc::new(AtomicU64::new(default_status_check_interval())),
};
config.save();
@@ -163,14 +175,12 @@ impl Config {
}
#[allow(dead_code)]
pub fn set_user_agent(&mut self, user_agent: &str) {
self.user_agent = user_agent.to_string();
pub fn set_danmu_ass_options(&mut self, options: Danmu2AssOptions) {
self.danmu_ass_options = options;
self.save();
}
pub fn generate_clip_name(&self, params: &ClipRangeParams) -> PathBuf {
let platform = PlatformType::from_str(&params.platform).unwrap();
// get format config
// filter special characters from title to make sure file name is valid
let title = params
@@ -180,9 +190,10 @@ impl Config {
.collect::<String>();
let format_config = self.clip_name_format.clone();
let format_config = format_config.replace("{title}", &title);
let format_config = format_config.replace("{platform}", platform.as_str());
let format_config = format_config.replace("{platform}", &params.platform);
let format_config = format_config.replace("{room_id}", &params.room_id.to_string());
let format_config = format_config.replace("{live_id}", &params.live_id);
let format_config = format_config.replace("{note}", &params.note);
let format_config = format_config.replace(
"{x}",
&params
@@ -209,8 +220,16 @@ impl Config {
.map_or("0".to_string(), |r| r.duration().to_string()),
);
let sanitized = sanitize_filename::sanitize(&format_config);
let output = self.output.clone();
Path::new(&output).join(&format_config)
Path::new(&output).join(&sanitized)
}
pub fn set_status_check_interval(&mut self, interval: u64) {
self.status_check_interval = interval;
self.update_interval
.store(interval, atomic::Ordering::Relaxed);
self.save();
}
}

View File

@@ -0,0 +1,4 @@
pub const PREFIX_SUBTITLE: &str = "[subtitle]";
pub const PREFIX_IMPORTED: &str = "[imported]";
pub const PREFIX_DANMAKU: &str = "[danmaku]";
pub const PREFIX_CLIP: &str = "[clip]";

View File

@@ -1,4 +1,5 @@
use crate::recorder::danmu::DanmuEntry;
use recorder::danmu::DanmuEntry;
use serde::{Deserialize, Serialize};
use std::collections::VecDeque;
// code reference: https://github.com/tiansh/us-danmaku/blob/master/bilibili/bilibili_ASS_Danmaku_Downloader.user.js
@@ -24,32 +25,56 @@ struct DanmakuPosition {
time: f64,
}
const PLAY_RES_X: f64 = 1920.0;
const PLAY_RES_Y: f64 = 1080.0;
const PLAY_RES_X: f64 = 1280.0;
const PLAY_RES_Y: f64 = 720.0;
const BOTTOM_RESERVED: f64 = 50.0;
const R2L_TIME: f64 = 8.0;
const MAX_DELAY: f64 = 6.0;
pub fn danmu_to_ass(danmus: Vec<DanmuEntry>) -> String {
#[derive(Deserialize, Serialize, Clone)]
pub struct Danmu2AssOptions {
pub font_size: f64,
pub opacity: f64, // 透明度,范围 0.0-1.00.0为完全透明1.0为完全不透明
}
impl Default for Danmu2AssOptions {
fn default() -> Self {
Self {
font_size: 36.0,
opacity: 0.8, // 默认80%透明度
}
}
}
pub fn danmu_to_ass(danmus: Vec<DanmuEntry>, options: Danmu2AssOptions) -> String {
let font_size = options.font_size; // Default font size
let opacity = options.opacity; // 透明度参数
// 将透明度转换为十六进制Alpha值 (0.0-1.0 -> 0x00-0xFF)
let alpha = ((1.0 - opacity) * 255.0) as u8;
let alpha_hex = format!("{:02X}", alpha);
// ASS header
let header = r#"[Script Info]
let header = format!(
r"[Script Info]
Title: Bilibili Danmaku
ScriptType: v4.00+
Collisions: Normal
PlayResX: 1920
PlayResY: 1080
PlayResX: 1280
PlayResY: 720
Timer: 10.0000
[V4+ Styles]
Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
Style: Default,Microsoft YaHei,48,&H00FFFFFF,&H000000FF,&H00000000,&H00000000,0,0,0,0,100,100,0,0,1,2,0,2,20,20,2,0
Style: Default,微软雅黑,{},&H{}FFFFFF,&H{}FFFFFF,&H{}000000,&H{}000000,0,0,0,0,100,100,0,0,1,1,0,2,20,20,2,0
[Events]
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
"#;
",
font_size, alpha_hex, alpha_hex, alpha_hex, alpha_hex
);
let mut normal = normal_danmaku();
let font_size = 48.0; // Default font size
// Convert danmus to ASS events
let events = danmus
@@ -76,7 +101,7 @@ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
"Dialogue: 0,{},{},Default,,0,0,0,,{{\\move({},{},{},{})}}{}",
start_time,
end_time,
PLAY_RES_X,
PLAY_RES_X + text_width / 2.0,
pos.top + font_size, // Start position
-text_width,
pos.top + font_size, // End position
@@ -87,22 +112,22 @@ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
.join("\n");
// Combine header and events
format!("{}\n{}", header, events)
format!("{header}\n{events}")
}
fn format_time(seconds: f64) -> String {
let hours = (seconds / 3600.0) as i32;
let minutes = ((seconds % 3600.0) / 60.0) as i32;
let seconds = seconds % 60.0;
format!("{}:{:02}:{:05.2}", hours, minutes, seconds)
format!("{hours}:{minutes:02}:{seconds:05.2}")
}
fn escape_text(text: &str) -> String {
text.replace("\\", "\\\\")
.replace("{", "")
.replace("}", "")
.replace("\r", "")
.replace("\n", "\\N")
text.replace('\\', "\\\\")
.replace('{', "")
.replace('}', "")
.replace('\r', "")
.replace('\n', "\\N")
}
fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition> {
@@ -144,8 +169,8 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
let p = space.m;
let m = p + hv;
let mut tas = t0s;
let mut tal = t0l;
let mut time_actual_start = t0s;
let mut time_actual_leave = t0l;
for other in &used {
if other.p >= m || other.m <= p {
@@ -154,13 +179,13 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
if other.b && b {
continue;
}
tas = tas.max(other.tf);
tal = tal.max(other.td);
time_actual_start = time_actual_start.max(other.tf);
time_actual_leave = time_actual_leave.max(other.td);
}
suggestions.push(PositionSuggestion {
p,
r: (tas - t0s).max(tal - t0l),
r: (time_actual_start - t0s).max(time_actual_leave - t0l),
});
}

View File

@@ -1,16 +1,13 @@
use crate::recorder::PlatformType;
use recorder::account::Account;
use super::Database;
use super::DatabaseError;
use chrono::Utc;
use rand::seq::SliceRandom;
use rand::Rng;
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
pub struct AccountRow {
pub platform: String,
pub uid: u64, // Keep for Bilibili compatibility
pub id_str: Option<String>, // New field for string IDs like Douyin sec_uid
pub uid: String,
pub name: String,
pub avatar: String,
pub csrf: String,
@@ -18,159 +15,42 @@ pub struct AccountRow {
pub created_at: String,
}
impl AccountRow {
pub fn to_account(&self) -> Account {
Account {
platform: self.platform.clone(),
id: self.uid.clone(),
name: self.name.clone(),
avatar: self.avatar.clone(),
csrf: self.csrf.clone(),
cookies: self.cookies.clone(),
}
}
}
// accounts
impl Database {
// CREATE TABLE accounts (uid INTEGER PRIMARY KEY, name TEXT, avatar TEXT, csrf TEXT, cookies TEXT, created_at TEXT);
pub async fn add_account(
&self,
platform: &str,
cookies: &str,
) -> Result<AccountRow, DatabaseError> {
pub async fn add_account(&self, account: &AccountRow) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let platform = PlatformType::from_str(platform).unwrap();
sqlx::query("INSERT INTO accounts (uid, platform, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7)").bind(&account.uid).bind(&account.platform).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
let csrf = if platform == PlatformType::Douyin {
Some("".to_string())
} else {
// parse cookies
cookies
.split(';')
.map(|cookie| cookie.trim())
.find_map(|cookie| -> Option<String> {
match cookie.starts_with("bili_jct=") {
true => {
let var_name = &"bili_jct=";
Some(cookie[var_name.len()..].to_string())
}
false => None,
}
})
};
if csrf.is_none() {
return Err(DatabaseError::InvalidCookiesError);
}
// parse uid and id_str based on platform
let (uid, id_str) = if platform == PlatformType::BiliBili {
// For Bilibili, extract numeric uid from cookies
let uid = cookies
.split("DedeUserID=")
.collect::<Vec<&str>>()
.get(1)
.unwrap()
.split(";")
.collect::<Vec<&str>>()
.first()
.unwrap()
.to_string()
.parse::<u64>()
.map_err(|_| DatabaseError::InvalidCookiesError)?;
(uid, None)
} else {
// For Douyin, use temporary uid and will set id_str later with real sec_uid
let temp_uid = rand::thread_rng().gen_range(10000..=i32::MAX) as u64;
(temp_uid, Some(format!("temp_{}", temp_uid)))
};
let account = AccountRow {
platform: platform.as_str().to_string(),
uid,
id_str,
name: "".into(),
avatar: "".into(),
csrf: csrf.unwrap(),
cookies: cookies.into(),
created_at: Utc::now().to_rfc3339(),
};
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(account.uid as i64).bind(&account.platform).bind(&account.id_str).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
Ok(account)
Ok(())
}
pub async fn remove_account(&self, platform: &str, uid: u64) -> Result<(), DatabaseError> {
pub async fn remove_account(&self, platform: &str, uid: &str) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let sql = sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
.bind(uid as i64)
.bind(uid)
.bind(platform)
.execute(&lock)
.await?;
if sql.rows_affected() != 1 {
return Err(DatabaseError::NotFoundError);
return Err(DatabaseError::NotFound);
}
Ok(())
}
pub async fn update_account(
&self,
platform: &str,
uid: u64,
name: &str,
avatar: &str,
) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let sql = sqlx::query(
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
)
.bind(name)
.bind(avatar)
.bind(uid as i64)
.bind(platform)
.execute(&lock)
.await?;
if sql.rows_affected() != 1 {
return Err(DatabaseError::NotFoundError);
}
Ok(())
}
pub async fn update_account_with_id_str(
&self,
old_account: &AccountRow,
new_id_str: &str,
name: &str,
avatar: &str,
) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
// If the id_str changed, we need to delete the old record and create a new one
if old_account.id_str.as_deref() != Some(new_id_str) {
// Delete the old record (for Douyin accounts, we use uid to identify)
sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
.bind(old_account.uid as i64)
.bind(&old_account.platform)
.execute(&lock)
.await?;
// Insert the new record with updated id_str
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
.bind(old_account.uid as i64)
.bind(&old_account.platform)
.bind(new_id_str)
.bind(name)
.bind(avatar)
.bind(&old_account.csrf)
.bind(&old_account.cookies)
.bind(&old_account.created_at)
.execute(&lock)
.await?;
} else {
// id_str is the same, just update name and avatar
sqlx::query(
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
)
.bind(name)
.bind(avatar)
.bind(old_account.uid as i64)
.bind(&old_account.platform)
.execute(&lock)
.await?;
}
Ok(())
}
pub async fn get_accounts(&self) -> Result<Vec<AccountRow>, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
Ok(sqlx::query_as::<_, AccountRow>("SELECT * FROM accounts")
@@ -178,12 +58,16 @@ impl Database {
.await?)
}
pub async fn get_account(&self, platform: &str, uid: u64) -> Result<AccountRow, DatabaseError> {
pub async fn get_account(
&self,
platform: &str,
uid: &str,
) -> Result<AccountRow, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
Ok(sqlx::query_as::<_, AccountRow>(
"SELECT * FROM accounts WHERE uid = $1 and platform = $2",
)
.bind(uid as i64)
.bind(uid)
.bind(platform)
.fetch_one(&lock)
.await?)
@@ -200,7 +84,7 @@ impl Database {
.fetch_all(&lock)
.await?;
if accounts.is_empty() {
return Err(DatabaseError::NotFoundError);
return Err(DatabaseError::NotFound);
}
// randomly select one account
let account = accounts.choose(&mut rand::thread_rng()).unwrap();

Some files were not shown because too many files have changed in this diff Show More