Compare commits

...

15 Commits

Author SHA1 Message Date
Xinrea
92ca2cddad fix: dependencies 2025-07-29 00:59:21 +08:00
Xinrea
3db0d1dfe5 feat: manual input model name (close #143) 2025-07-29 00:09:06 +08:00
Xinrea
57907323e6 bump version to 2.10.0 2025-07-27 19:52:53 +08:00
Xinrea
dbdca44c5f feat: deep-link support bsr:// 2025-07-27 19:51:58 +08:00
Xinrea
fe1dd2201f fix: prevent list corruption when deleting archived items 2025-07-26 22:52:45 +08:00
Xinrea
e0ae194cc3 bump version to 2.9.5 2025-07-26 22:40:50 +08:00
Xinrea
6fc5700457 ci/cd: add script to bump version 2025-07-26 22:40:49 +08:00
Xinrea
c4fdcf86d4 fix: bilibili stream pathway not update (close #117) 2025-07-26 22:40:46 +08:00
Xinrea
3088500c8d bump version to 2.9.4 2025-07-25 21:10:04 +08:00
Xinrea
861f3a3624 fix: tauri schema not handled by custom plugin for shaka-player 2025-07-25 21:09:41 +08:00
Xinrea
c55783e4d9 chore: update @tauri-apps/api 2025-07-25 20:13:04 +08:00
Xinrea
955e284d41 fix: start a new recording when header changes 2025-07-24 23:03:09 +08:00
Xinrea
fc4c47427e chore: adjust log level 2025-07-24 21:57:04 +08:00
Xinrea
e2d7563faa bump version to 2.9.3 2025-07-24 21:28:35 +08:00
Xinrea
27d69f7f8d fix: clip video cover not loaded 2025-07-24 21:28:10 +08:00
37 changed files with 1007 additions and 397 deletions

View File

@@ -1,7 +1,7 @@
{
"name": "bili-shadowreplay",
"private": true,
"version": "2.9.2",
"version": "2.10.0",
"type": "module",
"scripts": {
"dev": "vite",
@@ -11,14 +11,16 @@
"tauri": "tauri",
"docs:dev": "vitepress dev docs",
"docs:build": "vitepress build docs",
"docs:preview": "vitepress preview docs"
"docs:preview": "vitepress preview docs",
"bump": "node scripts/bump.cjs"
},
"dependencies": {
"@langchain/core": "^0.3.64",
"@langchain/deepseek": "^0.1.0",
"@langchain/langgraph": "^0.3.10",
"@langchain/ollama": "^0.2.3",
"@tauri-apps/api": "^2.4.1",
"@tauri-apps/api": "^2.6.2",
"@tauri-apps/plugin-deep-link": "~2",
"@tauri-apps/plugin-dialog": "~2",
"@tauri-apps/plugin-fs": "~2",
"@tauri-apps/plugin-http": "~2",

58
scripts/bump.cjs Normal file
View File

@@ -0,0 +1,58 @@
#!/usr/bin/env node
const fs = require("fs");
const path = require("path");
function updatePackageJson(version) {
const packageJsonPath = path.join(process.cwd(), "package.json");
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
packageJson.version = version;
fs.writeFileSync(
packageJsonPath,
JSON.stringify(packageJson, null, 2) + "\n"
);
console.log(`✅ Updated package.json version to ${version}`);
}
function updateCargoToml(version) {
const cargoTomlPath = path.join(process.cwd(), "src-tauri", "Cargo.toml");
let cargoToml = fs.readFileSync(cargoTomlPath, "utf8");
// Update the version in the [package] section
cargoToml = cargoToml.replace(/^version = ".*"$/m, `version = "${version}"`);
fs.writeFileSync(cargoTomlPath, cargoToml);
console.log(`✅ Updated Cargo.toml version to ${version}`);
}
function main() {
const args = process.argv.slice(2);
if (args.length === 0) {
console.error("❌ Please provide a version number");
console.error("Usage: yarn bump <version>");
console.error("Example: yarn bump 3.1.0");
process.exit(1);
}
const version = args[0];
// Validate version format (simple check)
if (!/^\d+\.\d+\.\d+/.test(version)) {
console.error(
"❌ Invalid version format. Please use semantic versioning (e.g., 3.1.0)"
);
process.exit(1);
}
try {
updatePackageJson(version);
updateCargoToml(version);
console.log(`🎉 Successfully bumped version to ${version}`);
} catch (error) {
console.error("❌ Error updating version:", error.message);
process.exit(1);
}
}
main();

88
src-tauri/Cargo.lock generated
View File

@@ -536,7 +536,7 @@ checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
[[package]]
name = "bili-shadowreplay"
version = "1.0.0"
version = "2.10.0"
dependencies = [
"async-ffmpeg-sidecar",
"async-std",
@@ -571,6 +571,7 @@ dependencies = [
"sysinfo",
"tauri",
"tauri-build",
"tauri-plugin-deep-link",
"tauri-plugin-dialog",
"tauri-plugin-fs",
"tauri-plugin-http",
@@ -975,6 +976,26 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "const-random"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359"
dependencies = [
"const-random-macro",
]
[[package]]
name = "const-random-macro"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e"
dependencies = [
"getrandom 0.2.16",
"once_cell",
"tiny-keccak",
]
[[package]]
name = "convert_case"
version = "0.4.0"
@@ -1151,6 +1172,12 @@ version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crunchy"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-common"
version = "0.1.6"
@@ -1529,6 +1556,15 @@ dependencies = [
"syn 2.0.104",
]
[[package]]
name = "dlv-list"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f"
dependencies = [
"const-random",
]
[[package]]
name = "document-features"
version = "0.2.11"
@@ -3927,6 +3963,16 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ordered-multimap"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79"
dependencies = [
"dlv-list",
"hashbrown 0.14.5",
]
[[package]]
name = "ordered-stream"
version = "0.2.0"
@@ -4987,6 +5033,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rust-ini"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7295b7ce3bf4806b419dc3420745998b447178b7005e2011947b38fc5aa6791"
dependencies = [
"cfg-if",
"ordered-multimap",
]
[[package]]
name = "rustc-demangle"
version = "0.1.25"
@@ -6320,6 +6376,26 @@ dependencies = [
"walkdir",
]
[[package]]
name = "tauri-plugin-deep-link"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fec67f32d7a06d80bd3dc009fdb678c35a66116d9cb8cd2bb32e406c2b5bbd2"
dependencies = [
"dunce",
"rust-ini",
"serde",
"serde_json",
"tauri",
"tauri-plugin",
"tauri-utils",
"thiserror 2.0.12",
"tracing",
"url",
"windows-registry",
"windows-result 0.3.4",
]
[[package]]
name = "tauri-plugin-dialog"
version = "2.3.0"
@@ -6451,6 +6527,7 @@ dependencies = [
"serde",
"serde_json",
"tauri",
"tauri-plugin-deep-link",
"thiserror 2.0.12",
"tracing",
"windows-sys 0.60.2",
@@ -6692,6 +6769,15 @@ dependencies = [
"time-core",
]
[[package]]
name = "tiny-keccak"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
dependencies = [
"crunchy",
]
[[package]]
name = "tinystr"
version = "0.8.1"

View File

@@ -4,7 +4,7 @@ resolver = "2"
[package]
name = "bili-shadowreplay"
version = "1.0.0"
version = "2.10.0"
description = "BiliBili ShadowReplay"
authors = ["Xinrea"]
license = ""
@@ -71,6 +71,7 @@ gui = [
"tauri-utils",
"tauri-plugin-os",
"tauri-plugin-notification",
"tauri-plugin-deep-link",
"fix-path-env",
"tauri-build",
]
@@ -83,6 +84,7 @@ optional = true
[dependencies.tauri-plugin-single-instance]
version = "2"
optional = true
features = ["deep-link"]
[dependencies.tauri-plugin-dialog]
version = "2"
@@ -117,6 +119,10 @@ optional = true
version = "2"
optional = true
[dependencies.tauri-plugin-deep-link]
version = "2"
optional = true
[dependencies.fix-path-env]
git = "https://github.com/tauri-apps/fix-path-env-rs"
optional = true

View File

@@ -2,7 +2,11 @@
"identifier": "migrated",
"description": "permissions that were migrated from v1",
"local": true,
"windows": ["main", "Live*", "Clip*"],
"windows": [
"main",
"Live*",
"Clip*"
],
"permissions": [
"core:default",
"fs:allow-read-file",
@@ -16,7 +20,9 @@
"fs:allow-exists",
{
"identifier": "fs:scope",
"allow": ["**"]
"allow": [
"**"
]
},
"core:window:default",
"core:window:allow-start-dragging",
@@ -65,6 +71,7 @@
"shell:default",
"sql:default",
"os:default",
"dialog:default"
"dialog:default",
"deep-link:default"
]
}
}

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default"]}}
{"migrated":{"identifier":"migrated","description":"permissions that were migrated from v1","local":true,"windows":["main","Live*","Clip*"],"permissions":["core:default","fs:allow-read-file","fs:allow-write-file","fs:allow-read-dir","fs:allow-copy-file","fs:allow-mkdir","fs:allow-remove","fs:allow-remove","fs:allow-rename","fs:allow-exists",{"identifier":"fs:scope","allow":["**"]},"core:window:default","core:window:allow-start-dragging","core:window:allow-close","core:window:allow-minimize","core:window:allow-maximize","core:window:allow-unmaximize","core:window:allow-set-title","sql:allow-execute","shell:allow-open","dialog:allow-open","dialog:allow-save","dialog:allow-message","dialog:allow-ask","dialog:allow-confirm",{"identifier":"http:default","allow":[{"url":"https://*.hdslb.com/"},{"url":"https://afdian.com/"},{"url":"https://*.afdiancdn.com/"},{"url":"https://*.douyin.com/"},{"url":"https://*.douyinpic.com/"}]},"dialog:default","shell:default","fs:default","http:default","sql:default","os:default","notification:default","dialog:default","fs:default","http:default","shell:default","sql:default","os:default","dialog:default","deep-link:default"]}}

View File

@@ -4220,6 +4220,60 @@
"const": "core:window:deny-unminimize",
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
},
{
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
"type": "string",
"const": "deep-link:default",
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
},
{
"description": "Enables the get_current command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-get-current",
"markdownDescription": "Enables the get_current command without any pre-configured scope."
},
{
"description": "Enables the is_registered command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-is-registered",
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
},
{
"description": "Enables the register command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-register",
"markdownDescription": "Enables the register command without any pre-configured scope."
},
{
"description": "Enables the unregister command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-unregister",
"markdownDescription": "Enables the unregister command without any pre-configured scope."
},
{
"description": "Denies the get_current command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-get-current",
"markdownDescription": "Denies the get_current command without any pre-configured scope."
},
{
"description": "Denies the is_registered command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-is-registered",
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
},
{
"description": "Denies the register command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-register",
"markdownDescription": "Denies the register command without any pre-configured scope."
},
{
"description": "Denies the unregister command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-unregister",
"markdownDescription": "Denies the unregister command without any pre-configured scope."
},
{
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
"type": "string",

View File

@@ -4220,6 +4220,60 @@
"const": "core:window:deny-unminimize",
"markdownDescription": "Denies the unminimize command without any pre-configured scope."
},
{
"description": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`",
"type": "string",
"const": "deep-link:default",
"markdownDescription": "Allows reading the opened deep link via the get_current command\n#### This default permission set includes:\n\n- `allow-get-current`"
},
{
"description": "Enables the get_current command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-get-current",
"markdownDescription": "Enables the get_current command without any pre-configured scope."
},
{
"description": "Enables the is_registered command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-is-registered",
"markdownDescription": "Enables the is_registered command without any pre-configured scope."
},
{
"description": "Enables the register command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-register",
"markdownDescription": "Enables the register command without any pre-configured scope."
},
{
"description": "Enables the unregister command without any pre-configured scope.",
"type": "string",
"const": "deep-link:allow-unregister",
"markdownDescription": "Enables the unregister command without any pre-configured scope."
},
{
"description": "Denies the get_current command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-get-current",
"markdownDescription": "Denies the get_current command without any pre-configured scope."
},
{
"description": "Denies the is_registered command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-is-registered",
"markdownDescription": "Denies the is_registered command without any pre-configured scope."
},
{
"description": "Denies the register command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-register",
"markdownDescription": "Denies the register command without any pre-configured scope."
},
{
"description": "Denies the unregister command without any pre-configured scope.",
"type": "string",
"const": "deep-link:deny-unregister",
"markdownDescription": "Denies the unregister command without any pre-configured scope."
},
{
"description": "This permission set configures the types of dialogs\navailable from the dialog plugin.\n\n#### Granted Permissions\n\nAll dialog types are enabled.\n\n\n\n#### This default permission set includes:\n\n- `allow-ask`\n- `allow-confirm`\n- `allow-message`\n- `allow-save`\n- `allow-open`",
"type": "string",

View File

@@ -9,7 +9,7 @@ use rand::Rng;
#[derive(Debug, Clone, serde::Serialize, sqlx::FromRow)]
pub struct AccountRow {
pub platform: String,
pub uid: u64, // Keep for Bilibili compatibility
pub uid: u64, // Keep for Bilibili compatibility
pub id_str: Option<String>, // New field for string IDs like Douyin sec_uid
pub name: String,
pub avatar: String,
@@ -133,7 +133,7 @@ impl Database {
avatar: &str,
) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
// If the id_str changed, we need to delete the old record and create a new one
if old_account.id_str.as_deref() != Some(new_id_str) {
// Delete the old record (for Douyin accounts, we use uid to identify)
@@ -142,7 +142,7 @@ impl Database {
.bind(&old_account.platform)
.execute(&lock)
.await?;
// Insert the new record with updated id_str
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
.bind(old_account.uid as i64)
@@ -157,15 +157,17 @@ impl Database {
.await?;
} else {
// id_str is the same, just update name and avatar
sqlx::query("UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4")
.bind(name)
.bind(avatar)
.bind(old_account.uid as i64)
.bind(&old_account.platform)
.execute(&lock)
.await?;
sqlx::query(
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
)
.bind(name)
.bind(avatar)
.bind(old_account.uid as i64)
.bind(&old_account.platform)
.execute(&lock)
.await?;
}
Ok(())
}

View File

@@ -133,9 +133,9 @@ impl Database {
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
)
.bind(limit as i64)
.bind(offset as i64)
.fetch_all(&lock)
.await?)
.bind(offset as i64)
.fetch_all(&lock)
.await?)
} else {
Ok(sqlx::query_as::<_, RecordRow>(
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",

View File

@@ -2,8 +2,10 @@ use std::path::{Path, PathBuf};
use std::process::Stdio;
use crate::progress_reporter::{ProgressReporter, ProgressReporterTrait};
use crate::subtitle_generator::{whisper_cpp, GenerateResult, SubtitleGenerator, SubtitleGeneratorType};
use crate::subtitle_generator::whisper_online;
use crate::subtitle_generator::{
whisper_cpp, GenerateResult, SubtitleGenerator, SubtitleGeneratorType,
};
use async_ffmpeg_sidecar::event::{FfmpegEvent, LogLevel};
use async_ffmpeg_sidecar::log_parser::FfmpegLogParser;
use tokio::io::BufReader;
@@ -262,7 +264,10 @@ pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
.spawn();
if let Err(e) = child {
return Err(format!("Failed to spawn ffprobe process for segment: {}", e));
return Err(format!(
"Failed to spawn ffprobe process for segment: {}",
e
));
}
let mut child = child.unwrap();
@@ -293,8 +298,6 @@ pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
duration.ok_or_else(|| "Failed to parse segment duration".to_string())
}
pub async fn encode_video_subtitle(
reporter: &impl ProgressReporterTrait,
file: &Path,
@@ -467,10 +470,7 @@ pub async fn encode_video_danmu(
}
}
pub async fn generic_ffmpeg_command(
args: &[&str],
) -> Result<String, String> {
pub async fn generic_ffmpeg_command(args: &[&str]) -> Result<String, String> {
let child = tokio::process::Command::new(ffmpeg_path())
.args(args)
.stderr(Stdio::piped())
@@ -520,8 +520,7 @@ pub async fn generate_video_subtitle(
if whisper_model.is_empty() {
return Err("Whisper model not configured".to_string());
}
if let Ok(generator) =
whisper_cpp::new(Path::new(&whisper_model), whisper_prompt).await
if let Ok(generator) = whisper_cpp::new(Path::new(&whisper_model), whisper_prompt).await
{
let chunk_dir = extract_audio_chunks(file, "wav").await?;
@@ -630,7 +629,6 @@ pub async fn generate_video_subtitle(
}
}
/// Trying to run ffmpeg for version
pub async fn check_ffmpeg() -> Result<String, String> {
let child = tokio::process::Command::new(ffmpeg_path())

View File

@@ -43,8 +43,13 @@ pub async fn add_account(
match douyin_client.get_user_info().await {
Ok(user_info) => {
// For Douyin, use sec_uid as the primary identifier in id_str field
let avatar_url = user_info.avatar_thumb.url_list.first().cloned().unwrap_or_default();
let avatar_url = user_info
.avatar_thumb
.url_list
.first()
.cloned()
.unwrap_or_default();
state
.db
.update_account_with_id_str(

View File

@@ -147,7 +147,10 @@ pub async fn get_archive_subtitle(
if platform.is_none() {
return Err("Unsupported platform".to_string());
}
Ok(state.recorder_manager.get_archive_subtitle(platform.unwrap(), room_id, &live_id).await?)
Ok(state
.recorder_manager
.get_archive_subtitle(platform.unwrap(), room_id, &live_id)
.await?)
}
#[cfg_attr(feature = "gui", tauri::command)]
@@ -161,7 +164,10 @@ pub async fn generate_archive_subtitle(
if platform.is_none() {
return Err("Unsupported platform".to_string());
}
Ok(state.recorder_manager.generate_archive_subtitle(platform.unwrap(), room_id, &live_id).await?)
Ok(state
.recorder_manager
.generate_archive_subtitle(platform.unwrap(), room_id, &live_id)
.await?)
}
#[cfg_attr(feature = "gui", tauri::command)]

View File

@@ -301,4 +301,4 @@ pub async fn list_folder(_state: state_type!(), path: String) -> Result<Vec<Stri
files.push(entry.path().to_str().unwrap().to_string());
}
Ok(files)
}
}

View File

@@ -411,7 +411,18 @@ pub async fn generate_video_subtitle(
let filepath = Path::new(state.config.read().await.output.as_str()).join(&video.file);
let file = Path::new(&filepath);
match ffmpeg::generate_video_subtitle(Some(&reporter), file, generator_type, &whisper_model, &whisper_prompt, &openai_api_key, &openai_api_endpoint, language_hint).await {
match ffmpeg::generate_video_subtitle(
Some(&reporter),
file,
generator_type,
&whisper_model,
&whisper_prompt,
&openai_api_key,
&openai_api_endpoint,
language_hint,
)
.await
{
Ok(result) => {
reporter.finish(true, "字幕生成完成").await;
// for local whisper, we need to update the task status to success
@@ -552,7 +563,6 @@ async fn encode_video_subtitle_inner(
Ok(new_video)
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn generic_ffmpeg_command(
_state: state_type!(),
@@ -560,4 +570,4 @@ pub async fn generic_ffmpeg_command(
) -> Result<String, String> {
let args_str: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
ffmpeg::generic_ffmpeg_command(&args_str).await
}
}

View File

@@ -22,17 +22,18 @@ use crate::{
},
message::{delete_message, get_messages, read_message},
recorder::{
add_recorder, delete_archive, export_danmu, fetch_hls, get_archive, get_archive_subtitle, get_archives,
get_danmu_record, get_recent_record, get_recorder_list, get_room_info,
get_today_record_count, get_total_length, remove_recorder, send_danmaku, set_enable,
ExportDanmuOptions, generate_archive_subtitle,
add_recorder, delete_archive, export_danmu, fetch_hls, generate_archive_subtitle,
get_archive, get_archive_subtitle, get_archives, get_danmu_record, get_recent_record,
get_recorder_list, get_room_info, get_today_record_count, get_total_length,
remove_recorder, send_danmaku, set_enable, ExportDanmuOptions,
},
task::{delete_task, get_tasks},
utils::{console_log, get_disk_info, list_folder, DiskInfo},
video::{
cancel, clip_range, delete_video, encode_video_subtitle, generate_video_subtitle,
get_all_videos, get_video, get_video_cover, get_video_subtitle, get_video_typelist,
get_videos, update_video_cover, update_video_subtitle, upload_procedure, generic_ffmpeg_command,
generic_ffmpeg_command, get_all_videos, get_video, get_video_cover, get_video_subtitle,
get_video_typelist, get_videos, update_video_cover, update_video_subtitle,
upload_procedure,
},
AccountInfo,
},
@@ -518,7 +519,8 @@ async fn handler_get_archive_subtitle(
state: axum::extract::State<State>,
Json(param): Json<GetArchiveSubtitleRequest>,
) -> Result<Json<ApiResponse<String>>, ApiError> {
let subtitle = get_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
let subtitle =
get_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
Ok(Json(ApiResponse::success(subtitle)))
}
@@ -534,7 +536,8 @@ async fn handler_generate_archive_subtitle(
state: axum::extract::State<State>,
Json(param): Json<GenerateArchiveSubtitleRequest>,
) -> Result<Json<ApiResponse<String>>, ApiError> {
let subtitle = generate_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
let subtitle =
generate_archive_subtitle(state.0, param.platform, param.room_id, param.live_id).await?;
Ok(Json(ApiResponse::success(subtitle)))
}
@@ -613,7 +616,8 @@ async fn handler_get_recent_record(
state: axum::extract::State<State>,
Json(param): Json<GetRecentRecordRequest>,
) -> Result<Json<ApiResponse<Vec<RecordRow>>>, ApiError> {
let recent_record = get_recent_record(state.0, param.room_id, param.offset, param.limit).await?;
let recent_record =
get_recent_record(state.0, param.room_id, param.offset, param.limit).await?;
Ok(Json(ApiResponse::success(recent_record)))
}
@@ -1333,7 +1337,10 @@ pub async fn start_api_server(state: State) {
.route("/api/get_room_info", post(handler_get_room_info))
.route("/api/get_archives", post(handler_get_archives))
.route("/api/get_archive", post(handler_get_archive))
.route("/api/get_archive_subtitle", post(handler_get_archive_subtitle))
.route(
"/api/get_archive_subtitle",
post(handler_get_archive_subtitle),
)
.route("/api/get_danmu_record", post(handler_get_danmu_record))
.route("/api/get_total_length", post(handler_get_total_length))
.route(

View File

@@ -117,6 +117,9 @@ async fn setup_logging(log_dir: &Path) -> Result<(), Box<dyn std::error::Error>>
),
])?;
// logging current package version
log::info!("Current version: {}", env!("CARGO_PKG_VERSION"));
Ok(())
}
@@ -553,7 +556,7 @@ fn setup_invoke_handlers(builder: tauri::Builder<tauri::Wry>) -> tauri::Builder<
fn main() -> Result<(), Box<dyn std::error::Error>> {
let _ = fix_path_env::fix();
let builder = tauri::Builder::default();
let builder = tauri::Builder::default().plugin(tauri_plugin_deep_link::init());
let builder = setup_plugins(builder);
let builder = setup_event_handlers(builder);
let builder = setup_invoke_handlers(builder);

View File

@@ -82,7 +82,10 @@ pub trait Recorder: Send + Sync + 'static {
async fn comments(&self, live_id: &str) -> Result<Vec<DanmuEntry>, errors::RecorderError>;
async fn is_recording(&self, live_id: &str) -> bool;
async fn get_archive_subtitle(&self, live_id: &str) -> Result<String, errors::RecorderError>;
async fn generate_archive_subtitle(&self, live_id: &str) -> Result<String, errors::RecorderError>;
async fn generate_archive_subtitle(
&self,
live_id: &str,
) -> Result<String, errors::RecorderError>;
async fn enable(&self);
async fn disable(&self);
}

View File

@@ -13,7 +13,6 @@ use crate::subtitle_generator::item_to_srt;
use super::danmu::{DanmuEntry, DanmuStorage};
use super::entry::TsEntry;
use std::path::Path;
use chrono::Utc;
use client::{BiliClient, BiliStream, RoomInfo, StreamType, UserInfo};
use danmu_stream::danmu_stream::DanmuStream;
@@ -22,11 +21,12 @@ use danmu_stream::DanmuMessageType;
use errors::BiliClientError;
use m3u8_rs::{Playlist, QuotedOrUnquoted, VariantStream};
use regex::Regex;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
use std::path::Path;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::time::Duration;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
use tokio::sync::{broadcast, Mutex, RwLock};
use tokio::task::JoinHandle;
use url::Url;
@@ -69,9 +69,11 @@ pub struct BiliRecorder {
live_end_channel: broadcast::Sender<RecorderEvent>,
enabled: Arc<RwLock<bool>>,
last_segment_offset: Arc<RwLock<Option<i64>>>, // 保存上次处理的最后一个片段的偏移
current_header_url: Arc<RwLock<Option<String>>>, // 保存当前的 header URL
header_changed_recently: Arc<AtomicBool>, // 标记最近是否发生了 header 变化
danmu_task: Arc<Mutex<Option<JoinHandle<()>>>>,
record_task: Arc<Mutex<Option<JoinHandle<()>>>>,
master_manifest: Arc<RwLock<Option<String>>>,
}
impl From<DatabaseError> for super::errors::RecorderError {
@@ -142,9 +144,11 @@ impl BiliRecorder {
live_end_channel: options.channel,
enabled: Arc::new(RwLock::new(options.auto_start)),
last_segment_offset: Arc::new(RwLock::new(None)),
current_header_url: Arc::new(RwLock::new(None)),
header_changed_recently: Arc::new(AtomicBool::new(false)),
danmu_task: Arc::new(Mutex::new(None)),
record_task: Arc::new(Mutex::new(None)),
master_manifest: Arc::new(RwLock::new(None)),
};
log::info!("Recorder for room {} created.", options.room_id);
Ok(recorder)
@@ -157,6 +161,8 @@ impl BiliRecorder {
*self.last_update.write().await = Utc::now().timestamp();
*self.danmu_storage.write().await = None;
*self.last_segment_offset.write().await = None;
*self.current_header_url.write().await = None;
self.header_changed_recently.store(false, Ordering::Relaxed);
}
async fn should_record(&self) -> bool {
@@ -262,11 +268,13 @@ impl BiliRecorder {
return true;
}
let master_manifest =
m3u8_rs::parse_playlist_res(master_manifest.as_ref().unwrap().as_bytes())
.map_err(|_| super::errors::RecorderError::M3u8ParseFailed {
content: master_manifest.as_ref().unwrap().clone(),
});
let master_manifest = master_manifest.unwrap();
*self.master_manifest.write().await = Some(master_manifest.clone());
let master_manifest = m3u8_rs::parse_playlist_res(master_manifest.as_bytes())
.map_err(|_| super::errors::RecorderError::M3u8ParseFailed {
content: master_manifest.clone(),
});
if master_manifest.is_err() {
log::error!(
"[{}]Parse master manifest failed: {}",
@@ -307,6 +315,8 @@ impl BiliRecorder {
let variant = variant.unwrap();
log::info!("Variant: {:?}", variant);
let new_stream = self.stream_from_variant(variant).await;
if new_stream.is_err() {
log::error!(
@@ -320,26 +330,13 @@ impl BiliRecorder {
let stream = new_stream.unwrap();
let should_update_stream = self.live_stream.read().await.is_none()
|| !self
.live_stream
.read()
.await
.as_ref()
.unwrap()
.is_same(&stream)
|| self.force_update.load(Ordering::Relaxed);
|| self.force_update.load(Ordering::Relaxed)
|| self.header_changed_recently.load(Ordering::Relaxed);
if should_update_stream {
log::info!(
"[{}]Update to a new stream: {:?} => {}",
self.room_id,
self.live_stream.read().await.clone(),
stream
);
self.force_update.store(false, Ordering::Relaxed);
let new_stream = self.fetch_real_stream(stream).await;
let new_stream = self.fetch_real_stream(&stream).await;
if new_stream.is_err() {
log::error!(
"[{}]Fetch real stream failed: {}",
@@ -352,6 +349,13 @@ impl BiliRecorder {
let new_stream = new_stream.unwrap();
*self.live_stream.write().await = Some(new_stream);
*self.last_update.write().await = Utc::now().timestamp();
log::info!(
"[{}]Update to a new stream: {:?} => {}",
self.room_id,
self.live_stream.read().await.clone(),
stream
);
}
true
@@ -458,6 +462,10 @@ impl BiliRecorder {
}
Err(e) => {
log::error!("Failed fetching index content from {}", stream.index());
log::error!(
"Master manifest: {}",
self.master_manifest.read().await.as_ref().unwrap()
);
Err(super::errors::RecorderError::BiliClientError { err: e })
}
}
@@ -469,7 +477,7 @@ impl BiliRecorder {
return Err(super::errors::RecorderError::NoStreamAvailable);
}
let stream = stream.unwrap();
let index_content = self
.client
.read()
@@ -484,7 +492,7 @@ impl BiliRecorder {
url: stream.index(),
});
}
let mut header_url = String::from("");
let re = Regex::new(r"h.*\.m4s").unwrap();
if let Some(captures) = re.captures(&index_content) {
@@ -493,13 +501,13 @@ impl BiliRecorder {
if header_url.is_empty() {
log::warn!("Parse header url failed: {}", index_content);
}
Ok(header_url)
}
async fn fetch_real_stream(
&self,
stream: BiliStream,
stream: &BiliStream,
) -> Result<BiliStream, super::errors::RecorderError> {
let index_content = self
.client
@@ -508,7 +516,9 @@ impl BiliRecorder {
.get_index_content(&self.account, &stream.index())
.await?;
if index_content.is_empty() {
return Err(super::errors::RecorderError::InvalidStream { stream });
return Err(super::errors::RecorderError::InvalidStream {
stream: stream.clone(),
});
}
if index_content.contains("Not Found") {
return Err(super::errors::RecorderError::IndexNotFound {
@@ -519,7 +529,7 @@ impl BiliRecorder {
// this index content provides another m3u8 url
// example: https://765b047cec3b099771d4b1851136046f.v.smtcdns.net/d1--cn-gotcha204-3.bilivideo.com/live-bvc/246284/live_1323355750_55526594/index.m3u8?expires=1741318366&len=0&oi=1961017843&pt=h5&qn=10000&trid=1007049a5300422eeffd2d6995d67b67ca5a&sigparams=cdn,expires,len,oi,pt,qn,trid&cdn=cn-gotcha204&sign=7ef1241439467ef27d3c804c1eda8d4d&site=1c89ef99adec13fab3a3592ee4db26d3&free_type=0&mid=475210&sche=ban&bvchls=1&trace=16&isp=ct&rg=East&pv=Shanghai&source=puv3_onetier&p2p_type=-1&score=1&suffix=origin&deploy_env=prod&flvsk=e5c4d6fb512ed7832b706f0a92f7a8c8&sk=246b3930727a89629f17520b1b551a2f&pp=rtmp&hot_cdn=57345&origin_bitrate=657300&sl=1&info_source=cache&vd=bc&src=puv3&order=1&TxLiveCode=cold_stream&TxDispType=3&svr_type=live_oc&tencent_test_client_ip=116.226.193.243&dispatch_from=OC_MGR61.170.74.11&utime=1741314857497
let new_url = index_content.lines().last().unwrap();
// extract host: cn-gotcha204-3.bilivideo.com
let host = new_url.split('/').nth(2).unwrap_or_default();
let extra = new_url.split('?').nth(1).unwrap_or_default();
@@ -531,11 +541,11 @@ impl BiliRecorder {
.collect::<Vec<&str>>()
.join("/")
+ "/";
let new_stream = BiliStream::new(StreamType::FMP4, base_url.as_str(), host, extra);
return Box::pin(self.fetch_real_stream(new_stream)).await;
return Box::pin(self.fetch_real_stream(&new_stream)).await;
}
Ok(stream)
Ok(stream.clone())
}
async fn get_work_dir(&self, live_id: &str) -> String {
@@ -555,106 +565,151 @@ impl BiliRecorder {
}
let current_stream = current_stream.unwrap();
let parsed = self.get_playlist().await;
if parsed.is_err() {
self.force_update.store(true, Ordering::Relaxed);
return Err(parsed.err().unwrap());
}
let playlist = parsed.unwrap();
let mut timestamp: i64 = self.live_id.read().await.parse::<i64>().unwrap_or(0);
let mut work_dir;
let mut is_first_record = false;
// Check header if None
if (self.entry_store.read().await.as_ref().is_none()
|| self
.entry_store
.read()
.await
.as_ref()
.unwrap()
.get_header()
.is_none())
&& current_stream.format == StreamType::FMP4
{
// Check header for FMP4 streams
if current_stream.format == StreamType::FMP4 {
// Get url from EXT-X-MAP
let header_url = self.get_header_url().await?;
if header_url.is_empty() {
return Err(super::errors::RecorderError::EmptyHeader);
}
timestamp = Utc::now().timestamp_millis();
*self.live_id.write().await = timestamp.to_string();
work_dir = self.get_work_dir(timestamp.to_string().as_str()).await;
is_first_record = true;
let full_header_url = current_stream.ts_url(&header_url);
let file_name = header_url.split('/').next_back().unwrap();
let mut header = TsEntry {
url: file_name.to_string(),
sequence: 0,
length: 0.0,
size: 0,
ts: timestamp,
is_header: true,
// Check if header URL has changed
let current_header = self.current_header_url.read().await.clone();
let header_changed = match &current_header {
Some(prev_url) => prev_url != &header_url,
None => true, // First time, treat as changed
};
// Create work directory before download
tokio::fs::create_dir_all(&work_dir).await.map_err(|e| {
super::errors::RecorderError::IoError { err: e }
})?;
// Download header
match self
.client
.read()
.await
.download_ts(&full_header_url, &format!("{}/{}", work_dir, file_name))
.await
{
Ok(size) => {
if size == 0 {
log::error!("Download header failed: {}", full_header_url);
let need_new_recording = self.entry_store.read().await.as_ref().is_none()
|| self
.entry_store
.read()
.await
.as_ref()
.unwrap()
.get_header()
.is_none()
|| header_changed;
if need_new_recording {
if header_changed && current_header.is_some() {
log::info!(
"[{}] Header URL changed from {:?} to {}, starting new recording segment",
self.room_id,
current_header,
header_url
);
// Reset current recording to start a new segment
self.reset().await;
// Return HeaderChanged error to break recording loop and re-enter check_status
return Err(super::errors::RecorderError::HeaderChanged);
}
timestamp = Utc::now().timestamp_millis();
*self.live_id.write().await = timestamp.to_string();
work_dir = self.get_work_dir(timestamp.to_string().as_str()).await;
is_first_record = true;
let full_header_url = current_stream.ts_url(&header_url);
let file_name = header_url.split('/').next_back().unwrap();
let mut header = TsEntry {
url: file_name.to_string(),
sequence: 0,
length: 0.0,
size: 0,
ts: timestamp,
is_header: true,
};
// Create work directory before download
tokio::fs::create_dir_all(&work_dir)
.await
.map_err(|e| super::errors::RecorderError::IoError { err: e })?;
// Download header
match self
.client
.read()
.await
.download_ts(&full_header_url, &format!("{}/{}", work_dir, file_name))
.await
{
Ok(size) => {
if size == 0 {
log::error!("Download header failed: {}", full_header_url);
// Clean up empty directory since header download failed
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
return Err(super::errors::RecorderError::InvalidStream {
stream: current_stream,
});
}
header.size = size;
// Now that download succeeded, create the record and setup stores
self.db
.add_record(
PlatformType::BiliBili,
timestamp.to_string().as_str(),
self.room_id,
&self.room_info.read().await.room_title,
self.cover.read().await.clone(),
None,
)
.await?;
let entry_store = EntryStore::new(&work_dir).await;
*self.entry_store.write().await = Some(entry_store);
// danmu file
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
*self.danmu_storage.write().await =
DanmuStorage::new(&danmu_file_path).await;
self.entry_store
.write()
.await
.as_mut()
.unwrap()
.add_entry(header)
.await;
// Save the new header URL
*self.current_header_url.write().await = Some(header_url.clone());
}
Err(e) => {
log::error!("Download header failed: {}", e);
// Clean up empty directory since header download failed
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
return Err(super::errors::RecorderError::InvalidStream {
stream: current_stream,
});
return Err(e.into());
}
header.size = size;
// Now that download succeeded, create the record and setup stores
self.db
.add_record(
PlatformType::BiliBili,
timestamp.to_string().as_str(),
self.room_id,
&self.room_info.read().await.room_title,
self.cover.read().await.clone(),
None,
)
.await?;
let entry_store = EntryStore::new(&work_dir).await;
*self.entry_store.write().await = Some(entry_store);
// danmu file
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
*self.danmu_storage.write().await = DanmuStorage::new(&danmu_file_path).await;
self.entry_store
.write()
.await
.as_mut()
.unwrap()
.add_entry(header)
.await;
}
Err(e) => {
log::error!("Download header failed: {}", e);
// Clean up empty directory since header download failed
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
}
return Err(e.into());
}
} else {
// Header exists and hasn't changed, use existing work_dir
work_dir = self.get_work_dir(self.live_id.read().await.as_str()).await;
}
} else {
work_dir = self.get_work_dir(timestamp.to_string().as_str()).await;
@@ -666,10 +721,10 @@ impl BiliRecorder {
is_first_record = true;
}
}
match parsed {
Ok(Playlist::MasterPlaylist(pl)) => log::debug!("Master playlist:\n{:?}", pl),
Ok(Playlist::MediaPlaylist(pl)) => {
match playlist {
Playlist::MasterPlaylist(pl) => log::debug!("Master playlist:\n{:?}", pl),
Playlist::MediaPlaylist(pl) => {
let mut new_segment_fetched = false;
let last_sequence = self
.entry_store
@@ -699,12 +754,22 @@ impl BiliRecorder {
}
segment_offsets.push(seg_offset);
}
// Extract stream start timestamp from header if available for FMP4
let stream_start_timestamp = if current_stream.format == StreamType::FMP4 {
if let Some(header_entry) = self.entry_store.read().await.as_ref().and_then(|store| store.get_header()) {
if let Some(header_entry) = self
.entry_store
.read()
.await
.as_ref()
.and_then(|store| store.get_header())
{
// Parse timestamp from header filename like "h1753276580.m4s"
if let Some(timestamp_str) = header_entry.url.strip_prefix("h").and_then(|s| s.strip_suffix(".m4s")) {
if let Some(timestamp_str) = header_entry
.url
.strip_prefix("h")
.and_then(|s| s.strip_suffix(".m4s"))
{
timestamp_str.parse::<i64>().unwrap_or(0)
} else {
0
@@ -732,70 +797,82 @@ impl BiliRecorder {
}
// Calculate precise timestamp from stream start + BILI-AUX offset for FMP4
let ts_mili = if current_stream.format == StreamType::FMP4 && stream_start_timestamp > 0 && i < segment_offsets.len() {
let ts_mili = if current_stream.format == StreamType::FMP4
&& stream_start_timestamp > 0
&& i < segment_offsets.len()
{
let seg_offset = segment_offsets[i];
stream_start_timestamp * 1000 + seg_offset
} else {
// Fallback to current time if parsing fails or not FMP4
Utc::now().timestamp_millis()
};
// encode segment offset into filename
let file_name = ts.uri.split('/').next_back().unwrap_or(&ts.uri);
let ts_length = pl.target_duration as f64;
// Calculate precise duration from BILI-AUX offsets for FMP4
let precise_length_from_aux = if current_stream.format == StreamType::FMP4 && i < segment_offsets.len() {
let current_offset = segment_offsets[i];
// Get the previous offset for duration calculation
let prev_offset = if i > 0 {
// Use previous segment in current M3U8
Some(segment_offsets[i - 1])
} else {
// Use saved last offset from previous M3U8 processing
last_offset
};
if let Some(prev) = prev_offset {
let duration_ms = current_offset - prev;
if duration_ms > 0 {
Some(duration_ms as f64 / 1000.0) // Convert ms to seconds
let precise_length_from_aux =
if current_stream.format == StreamType::FMP4 && i < segment_offsets.len() {
let current_offset = segment_offsets[i];
// Get the previous offset for duration calculation
let prev_offset = if i > 0 {
// Use previous segment in current M3U8
Some(segment_offsets[i - 1])
} else {
// Use saved last offset from previous M3U8 processing
last_offset
};
if let Some(prev) = prev_offset {
let duration_ms = current_offset - prev;
if duration_ms > 0 {
Some(duration_ms as f64 / 1000.0) // Convert ms to seconds
} else {
None
}
} else {
// No previous offset available, use target duration
None
}
} else {
// No previous offset available, use target duration
None
}
} else {
None
};
};
let client = self.client.clone();
let mut retry = 0;
let mut work_dir_created_for_non_fmp4 = false;
// For non-FMP4 streams, create record on first successful ts download
if is_first_record && current_stream.format != StreamType::FMP4 {
// Create work directory before first ts download
tokio::fs::create_dir_all(&work_dir).await.map_err(|e| {
super::errors::RecorderError::IoError { err: e }
})?;
tokio::fs::create_dir_all(&work_dir)
.await
.map_err(|e| super::errors::RecorderError::IoError { err: e })?;
work_dir_created_for_non_fmp4 = true;
}
loop {
if retry > 3 {
log::error!("Download ts failed after retry");
// Clean up empty directory if first ts download failed for non-FMP4
if is_first_record && current_stream.format != StreamType::FMP4 && work_dir_created_for_non_fmp4 {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
if is_first_record
&& current_stream.format != StreamType::FMP4
&& work_dir_created_for_non_fmp4
{
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await
{
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
}
break;
}
match client
@@ -807,19 +884,28 @@ impl BiliRecorder {
Ok(size) => {
if size == 0 {
log::error!("Segment with size 0, stream might be corrupted");
// Clean up empty directory if first ts download failed for non-FMP4
if is_first_record && current_stream.format != StreamType::FMP4 && work_dir_created_for_non_fmp4 {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
if is_first_record
&& current_stream.format != StreamType::FMP4
&& work_dir_created_for_non_fmp4
{
if let Err(cleanup_err) =
tokio::fs::remove_dir_all(&work_dir).await
{
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
}
return Err(super::errors::RecorderError::InvalidStream {
stream: current_stream,
});
}
// Create record and setup stores on first successful download for non-FMP4
if is_first_record && current_stream.format != StreamType::FMP4 {
self.db
@@ -838,20 +924,31 @@ impl BiliRecorder {
// danmu file
let danmu_file_path = format!("{}{}", work_dir, "danmu.txt");
*self.danmu_storage.write().await = DanmuStorage::new(&danmu_file_path).await;
*self.danmu_storage.write().await =
DanmuStorage::new(&danmu_file_path).await;
is_first_record = false;
}
// Get precise duration - prioritize BILI-AUX for FMP4, fallback to ffprobe if needed
let precise_length = if let Some(aux_duration) = precise_length_from_aux {
let precise_length = if let Some(aux_duration) =
precise_length_from_aux
{
aux_duration
} else if current_stream.format != StreamType::FMP4 {
// For regular TS segments, use direct ffprobe
let file_path = format!("{}/{}", work_dir, file_name);
match crate::ffmpeg::get_segment_duration(std::path::Path::new(&file_path)).await {
match crate::ffmpeg::get_segment_duration(std::path::Path::new(
&file_path,
))
.await
{
Ok(duration) => {
log::debug!("Precise TS segment duration: {}s (original: {}s)", duration, ts_length);
log::debug!(
"Precise TS segment duration: {}s (original: {}s)",
duration,
ts_length
);
duration
}
Err(e) => {
@@ -879,23 +976,35 @@ impl BiliRecorder {
is_header: false,
})
.await;
// Update last offset for next segment calculation
if current_stream.format == StreamType::FMP4 && i < segment_offsets.len() {
if current_stream.format == StreamType::FMP4
&& i < segment_offsets.len()
{
last_offset = Some(segment_offsets[i]);
}
new_segment_fetched = true;
break;
}
Err(e) => {
retry += 1;
log::warn!("Download ts failed, retry {}: {}", retry, e);
// If this is the last retry and it's the first record for non-FMP4, clean up
if retry > 3 && is_first_record && current_stream.format != StreamType::FMP4 && work_dir_created_for_non_fmp4 {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
if retry > 3
&& is_first_record
&& current_stream.format != StreamType::FMP4
&& work_dir_created_for_non_fmp4
{
if let Err(cleanup_err) =
tokio::fs::remove_dir_all(&work_dir).await
{
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
}
}
@@ -905,12 +1014,12 @@ impl BiliRecorder {
if new_segment_fetched {
*self.last_update.write().await = Utc::now().timestamp();
// Save the last offset for next M3U8 processing
if current_stream.format == StreamType::FMP4 {
*self.last_segment_offset.write().await = last_offset;
}
self.db
.update_record(
timestamp.to_string().as_str(),
@@ -944,10 +1053,6 @@ impl BiliRecorder {
}
}
}
Err(e) => {
self.force_update.store(true, Ordering::Relaxed);
return Err(e);
}
}
// check stream is nearly expired
@@ -997,11 +1102,7 @@ impl BiliRecorder {
};
if let Some(entry_store) = self.entry_store.read().await.as_ref() {
entry_store.manifest(
!live_status || range.is_some(),
true,
range,
)
entry_store.manifest(!live_status || range.is_some(), true, range)
} else {
// Return empty manifest if entry_store is not initialized yet
"#EXTM3U\n#EXT-X-VERSION:3\n".to_string()
@@ -1047,18 +1148,34 @@ impl super::Recorder for BiliRecorder {
if let RecorderError::BiliClientError { err: _ } = e {
connection_fail_count =
std::cmp::min(5, connection_fail_count + 1);
} else if let RecorderError::HeaderChanged = e {
// Mark that exit was triggered by header change
self_clone
.header_changed_recently
.store(true, Ordering::Relaxed);
}
break;
}
}
}
*self_clone.is_recording.write().await = false;
// go check status again after random 2-5 secs
let secs = rand::random::<u64>() % 4 + 2;
tokio::time::sleep(Duration::from_secs(
secs + 2_u64.pow(connection_fail_count),
))
.await;
// Check if exit was triggered by header change for faster recovery
let sleep_duration =
if self_clone.header_changed_recently.load(Ordering::Relaxed) {
// Clear the flag after checking
self_clone
.header_changed_recently
.store(false, Ordering::Relaxed);
// Quick recovery for header change - only 500ms
Duration::from_millis(500)
} else {
// Normal random delay for other errors
let secs = rand::random::<u64>() % 4 + 2;
Duration::from_secs(secs + 2_u64.pow(connection_fail_count))
};
tokio::time::sleep(sleep_duration).await;
continue;
}
@@ -1151,7 +1268,11 @@ impl super::Recorder for BiliRecorder {
Ok(if live_id == *self.live_id.read().await {
// just return current cache content
match self.danmu_storage.read().await.as_ref() {
Some(storage) => storage.get_entries(self.first_segment_ts(live_id).await).await,
Some(storage) => {
storage
.get_entries(self.first_segment_ts(live_id).await)
.await
}
None => Vec::new(),
}
} else {
@@ -1169,7 +1290,9 @@ impl super::Recorder for BiliRecorder {
return Ok(Vec::new());
}
let storage = storage.unwrap();
storage.get_entries(self.first_segment_ts(live_id).await).await
storage
.get_entries(self.first_segment_ts(live_id).await)
.await
})
}
@@ -1177,7 +1300,10 @@ impl super::Recorder for BiliRecorder {
*self.live_id.read().await == live_id && *self.live_status.read().await
}
async fn get_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
async fn get_archive_subtitle(
&self,
live_id: &str,
) -> Result<String, super::errors::RecorderError> {
// read subtitle file under work_dir
let work_dir = self.get_work_dir(live_id).await;
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
@@ -1194,7 +1320,10 @@ impl super::Recorder for BiliRecorder {
Ok(subtitle_content)
}
async fn generate_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
async fn generate_archive_subtitle(
&self,
live_id: &str,
) -> Result<String, super::errors::RecorderError> {
// generate subtitle file under work_dir
let work_dir = self.get_work_dir(live_id).await;
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
@@ -1207,7 +1336,13 @@ impl super::Recorder for BiliRecorder {
log::info!("M3U8 index file generated: {}", m3u8_index_file_path);
// generate a tmp clip file
let clip_file_path = format!("{}/{}", work_dir, "tmp.mp4");
if let Err(e) = crate::ffmpeg::clip_from_m3u8(None::<&crate::progress_reporter::ProgressReporter>, Path::new(&m3u8_index_file_path), Path::new(&clip_file_path)).await {
if let Err(e) = crate::ffmpeg::clip_from_m3u8(
None::<&crate::progress_reporter::ProgressReporter>,
Path::new(&m3u8_index_file_path),
Path::new(&clip_file_path),
)
.await
{
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
error: e.to_string(),
});
@@ -1215,7 +1350,17 @@ impl super::Recorder for BiliRecorder {
log::info!("Temp clip file generated: {}", clip_file_path);
// generate subtitle file
let config = self.config.read().await;
let result = crate::ffmpeg::generate_video_subtitle(None, Path::new(&clip_file_path), "whisper", &config.whisper_model, &config.whisper_prompt, &config.openai_api_key, &config.openai_api_endpoint, &config.whisper_language).await;
let result = crate::ffmpeg::generate_video_subtitle(
None,
Path::new(&clip_file_path),
"whisper",
&config.whisper_model,
&config.whisper_prompt,
&config.openai_api_key,
&config.openai_api_endpoint,
&config.whisper_language,
)
.await;
// write subtitle file
if let Err(e) = result {
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
@@ -1224,7 +1369,12 @@ impl super::Recorder for BiliRecorder {
}
log::info!("Subtitle generated");
let result = result.unwrap();
let subtitle_content = result.subtitle_content.iter().map(item_to_srt).collect::<Vec<String>>().join("");
let subtitle_content = result
.subtitle_content
.iter()
.map(item_to_srt)
.collect::<Vec<String>>()
.join("");
subtitle_file.write_all(subtitle_content.as_bytes()).await?;
log::info!("Subtitle file written");
// remove tmp file

View File

@@ -138,19 +138,6 @@ impl BiliStream {
}
})
}
pub fn is_same(&self, other: &BiliStream) -> bool {
// Extract live_id part from path (e.g., live_1848752274_71463808)
let get_live_id = |path: &str| {
path.split('/')
.find(|part| part.starts_with("live_"))
.unwrap_or("")
.to_string()
};
let self_live_id = get_live_id(&self.path);
let other_live_id = get_live_id(&other.path);
self_live_id == other_live_id
}
}
impl BiliClient {

View File

@@ -67,12 +67,16 @@ impl DanmuStorage {
// get entries with ts relative to live start time
pub async fn get_entries(&self, live_start_ts: i64) -> Vec<DanmuEntry> {
let mut danmus: Vec<DanmuEntry> = self.cache.read().await.iter().map(|entry| {
DanmuEntry {
let mut danmus: Vec<DanmuEntry> = self
.cache
.read()
.await
.iter()
.map(|entry| DanmuEntry {
ts: entry.ts - live_start_ts,
content: entry.content.clone(),
}
}).collect();
})
.collect();
// filter out danmus with ts < 0
danmus.retain(|entry| entry.ts >= 0);
danmus

View File

@@ -19,11 +19,11 @@ use danmu_stream::danmu_stream::DanmuStream;
use danmu_stream::provider::ProviderType;
use danmu_stream::DanmuMessageType;
use rand::random;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use tokio::fs::File;
use tokio::io::{AsyncReadExt, AsyncWriteExt, BufReader};
use tokio::sync::{broadcast, Mutex, RwLock};
use tokio::task::JoinHandle;
@@ -233,7 +233,13 @@ impl DouyinRecorder {
async fn danmu(&self) -> Result<(), super::errors::RecorderError> {
let cookies = self.account.cookies.clone();
let danmu_room_id = self.danmu_room_id.read().await.clone().parse::<u64>().unwrap_or(0);
let danmu_room_id = self
.danmu_room_id
.read()
.await
.clone()
.parse::<u64>()
.unwrap_or(0);
let danmu_stream = DanmuStream::new(ProviderType::Douyin, &cookies, danmu_room_id).await;
if danmu_stream.is_err() {
let err = danmu_stream.err().unwrap();
@@ -322,24 +328,22 @@ impl DouyinRecorder {
fn parse_stream_url(&self, stream_url: &str) -> (String, String) {
// Parse stream URL to extract base URL and query parameters
// Example: http://7167739a741646b4651b6949b2f3eb8e.livehwc3.cn/pull-hls-l26.douyincdn.com/third/stream-693342996808860134_or4.m3u8?sub_m3u8=true&user_session_id=16090eb45ab8a2f042f7c46563936187&major_anchor_level=common&edge_slice=true&expire=67d944ec&sign=47b95cc6e8de20d82f3d404412fa8406
let base_url = stream_url
.rfind('/')
.map(|i| &stream_url[..=i])
.unwrap_or(stream_url)
.to_string();
let query_params = stream_url
.find('?')
.map(|i| &stream_url[i..])
.unwrap_or("")
.to_string();
(base_url, query_params)
}
async fn update_entries(&self) -> Result<u128, RecorderError> {
let task_begin_time = std::time::Instant::now();
@@ -363,8 +367,8 @@ impl DouyinRecorder {
let mut new_segment_fetched = false;
let mut is_first_segment = self.entry_store.read().await.is_none();
let work_dir ;
let work_dir;
// If this is the first segment, prepare but don't create directories yet
if is_first_segment {
// Generate live_id for potential use
@@ -410,7 +414,7 @@ impl DouyinRecorder {
} else {
// Parse the stream URL to extract base URL and query parameters
let (base_url, query_params) = self.parse_stream_url(&stream_url);
// Check if the segment URI already has query parameters
if uri.contains('?') {
// If segment URI has query params, append m3u8 query params with &
@@ -420,17 +424,17 @@ impl DouyinRecorder {
format!("{}{}{}", base_url, uri, query_params)
}
};
// Download segment with retry mechanism
let mut retry_count = 0;
let max_retries = 3;
let mut download_success = false;
let mut work_dir_created = false;
while retry_count < max_retries && !download_success {
let file_name = format!("{}.ts", sequence);
let file_path = format!("{}/{}", work_dir, file_name);
// If this is the first segment, create work directory before first download attempt
if is_first_segment && !work_dir_created {
// Create work directory only when we're about to download
@@ -440,12 +444,8 @@ impl DouyinRecorder {
}
work_dir_created = true;
}
match self
.client
.download_ts(&ts_url, &file_path)
.await
{
match self.client.download_ts(&ts_url, &file_path).await {
Ok(size) => {
if size == 0 {
log::error!("Download segment failed (empty response): {}", ts_url);
@@ -499,7 +499,9 @@ impl DouyinRecorder {
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
danmu_task.abort();
}
if let Some(danmu_stream_task) = self.danmu_stream_task.lock().await.as_mut() {
if let Some(danmu_stream_task) =
self.danmu_stream_task.lock().await.as_mut()
{
danmu_stream_task.abort();
}
let live_id = self.live_id.read().await.clone();
@@ -533,50 +535,76 @@ impl DouyinRecorder {
download_success = true;
}
Err(e) => {
log::warn!("Failed to download segment (attempt {}/{}): {} - URL: {}",
retry_count + 1, max_retries, e, ts_url);
log::warn!(
"Failed to download segment (attempt {}/{}): {} - URL: {}",
retry_count + 1,
max_retries,
e,
ts_url
);
retry_count += 1;
if retry_count < max_retries {
tokio::time::sleep(Duration::from_millis(1000 * retry_count as u64)).await;
tokio::time::sleep(Duration::from_millis(1000 * retry_count as u64))
.await;
continue;
}
// If all retries failed, check if it's a 400 error
if e.to_string().contains("400") {
log::error!("HTTP 400 error for segment, stream URL may be expired: {}", ts_url);
log::error!(
"HTTP 400 error for segment, stream URL may be expired: {}",
ts_url
);
*self.stream_url.write().await = None;
// Clean up empty directory if first segment failed
if is_first_segment && work_dir_created {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await
{
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
}
return Err(RecorderError::NoStreamAvailable);
}
// Clean up empty directory if first segment failed
if is_first_segment && work_dir_created {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
}
return Err(e.into());
}
}
}
if !download_success {
log::error!("Failed to download segment after {} retries: {}", max_retries, ts_url);
log::error!(
"Failed to download segment after {} retries: {}",
max_retries,
ts_url
);
// Clean up empty directory if first segment failed after all retries
if is_first_segment && work_dir_created {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!("Failed to cleanup empty work directory {}: {}", work_dir, cleanup_err);
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
);
}
}
continue;
}
}
@@ -737,7 +765,10 @@ impl Recorder for DouyinRecorder {
m3u8_content
}
async fn get_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
async fn get_archive_subtitle(
&self,
live_id: &str,
) -> Result<String, super::errors::RecorderError> {
let work_dir = self.get_work_dir(live_id).await;
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
let subtitle_file = File::open(subtitle_file_path).await;
@@ -753,7 +784,10 @@ impl Recorder for DouyinRecorder {
Ok(subtitle_content)
}
async fn generate_archive_subtitle(&self, live_id: &str) -> Result<String, super::errors::RecorderError> {
async fn generate_archive_subtitle(
&self,
live_id: &str,
) -> Result<String, super::errors::RecorderError> {
// generate subtitle file under work_dir
let work_dir = self.get_work_dir(live_id).await;
let subtitle_file_path = format!("{}/{}", work_dir, "subtitle.srt");
@@ -765,22 +799,43 @@ impl Recorder for DouyinRecorder {
tokio::fs::write(&m3u8_index_file_path, m3u8_content).await?;
// generate a tmp clip file
let clip_file_path = format!("{}/{}", work_dir, "tmp.mp4");
if let Err(e) = crate::ffmpeg::clip_from_m3u8(None::<&crate::progress_reporter::ProgressReporter>, Path::new(&m3u8_index_file_path), Path::new(&clip_file_path)).await {
if let Err(e) = crate::ffmpeg::clip_from_m3u8(
None::<&crate::progress_reporter::ProgressReporter>,
Path::new(&m3u8_index_file_path),
Path::new(&clip_file_path),
)
.await
{
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
error: e.to_string(),
});
}
// generate subtitle file
let config = self.config.read().await;
let result = crate::ffmpeg::generate_video_subtitle(None, Path::new(&clip_file_path), "whisper", &config.whisper_model, &config.whisper_prompt, &config.openai_api_key, &config.openai_api_endpoint, &config.whisper_language).await;
let result = crate::ffmpeg::generate_video_subtitle(
None,
Path::new(&clip_file_path),
"whisper",
&config.whisper_model,
&config.whisper_prompt,
&config.openai_api_key,
&config.openai_api_endpoint,
&config.whisper_language,
)
.await;
// write subtitle file
if let Err(e) = result {
return Err(super::errors::RecorderError::SubtitleGenerationFailed {
error: e.to_string(),
});
}
}
let result = result.unwrap();
let subtitle_content = result.subtitle_content.iter().map(item_to_srt).collect::<Vec<String>>().join("");
let subtitle_content = result
.subtitle_content
.iter()
.map(item_to_srt)
.collect::<Vec<String>>()
.join("");
subtitle_file.write_all(subtitle_content.as_bytes()).await?;
// remove tmp file
@@ -857,7 +912,11 @@ impl Recorder for DouyinRecorder {
Ok(if live_id == *self.live_id.read().await {
// just return current cache content
match self.danmu_store.read().await.as_ref() {
Some(storage) => storage.get_entries(self.first_segment_ts(live_id).await).await,
Some(storage) => {
storage
.get_entries(self.first_segment_ts(live_id).await)
.await
}
None => Vec::new(),
}
} else {
@@ -875,7 +934,9 @@ impl Recorder for DouyinRecorder {
return Ok(Vec::new());
}
let storage = storage.unwrap();
storage.get_entries(self.first_segment_ts(live_id).await).await
storage
.get_entries(self.first_segment_ts(live_id).await)
.await
})
}

View File

@@ -91,7 +91,7 @@ impl DouyinClient {
if let Ok(data) = resp.json::<super::response::DouyinRelationResponse>().await {
if data.status_code == 0 {
let owner_sec_uid = &data.owner_sec_uid;
// Find the user's own info in the followings list by matching sec_uid
if let Some(followings) = &data.followings {
for following in followings {
@@ -109,15 +109,13 @@ impl DouyinClient {
}
}
}
// If not found in followings, create a minimal user info from owner_sec_uid
let user = super::response::User {
id_str: "".to_string(), // We don't have the numeric UID
sec_uid: owner_sec_uid.clone(),
nickname: "抖音用户".to_string(), // Default nickname
avatar_thumb: super::response::AvatarThumb {
url_list: vec![],
},
avatar_thumb: super::response::AvatarThumb { url_list: vec![] },
follow_info: super::response::FollowInfo::default(),
foreign_user: 0,
open_id_str: "".to_string(),
@@ -126,10 +124,10 @@ impl DouyinClient {
}
}
}
Err(DouyinClientError::Io(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Failed to get user info from Douyin relation API"
"Failed to get user info from Douyin relation API",
)))
}
@@ -148,7 +146,8 @@ impl DouyinClient {
&self,
url: &str,
) -> Result<(MediaPlaylist, String), DouyinClientError> {
let content = self.client
let content = self
.client
.get(url)
.header("Referer", "https://live.douyin.com/")
.header("User-Agent", USER_AGENT)
@@ -183,7 +182,8 @@ impl DouyinClient {
}
pub async fn download_ts(&self, url: &str, path: &str) -> Result<u64, DouyinClientError> {
let response = self.client
let response = self
.client
.get(url)
.header("Referer", "https://live.douyin.com/")
.header("User-Agent", USER_AGENT)
@@ -212,5 +212,3 @@ impl DouyinClient {
Ok(size)
}
}

View File

@@ -182,8 +182,7 @@ pub struct Extra {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PullDatas {
}
pub struct PullDatas {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -436,8 +435,7 @@ pub struct Stats {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LinkerMap {
}
pub struct LinkerMap {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -478,13 +476,11 @@ pub struct LinkerDetail {
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LinkerMapStr {
}
pub struct LinkerMapStr {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PlaymodeDetail {
}
pub struct PlaymodeDetail {}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -676,4 +672,4 @@ pub struct AvatarSmall {
pub uri: String,
#[serde(rename = "url_list")]
pub url_list: Vec<String>,
}
}

View File

@@ -212,7 +212,6 @@ impl EntryStore {
/// `vod` indicates the manifest is for stream or video.
/// `force_time` adds DATE-TIME tag for each entry.
pub fn manifest(&self, vod: bool, force_time: bool, range: Option<Range>) -> String {
log::info!("Generate manifest for range: {:?} with vod: {} and force_time: {}", range, vod, force_time);
let mut m3u8_content = "#EXTM3U\n".to_string();
m3u8_content += "#EXT-X-VERSION:6\n";
m3u8_content += if vod {
@@ -240,12 +239,6 @@ impl EntryStore {
// Collect entries in range
let first_entry = self.entries.first().unwrap();
let first_entry_ts = first_entry.ts_seconds();
log::debug!("First entry ts: {}", first_entry_ts);
let last_entry = self.entries.last().unwrap();
let last_entry_ts = last_entry.ts_seconds();
log::debug!("Last entry ts: {}", last_entry_ts);
log::debug!("Full length: {}", last_entry_ts - first_entry_ts);
log::debug!("Range: {:?}", range);
let mut entries_in_range = vec![];
for e in &self.entries {
// ignore header, cause it's already in EXT-X-MAP

View File

@@ -14,6 +14,7 @@ custom_error! {pub RecorderError
InvalidStream {stream: BiliStream} = "Invalid stream: {stream}",
SlowStream {stream: BiliStream} = "Stream is too slow: {stream}",
EmptyHeader = "Header url is empty",
HeaderChanged = "Header URL changed, need to restart recording",
InvalidTimestamp = "Header timestamp is invalid",
InvalidDBOP {err: crate::database::DatabaseError } = "Database error: {err}",
BiliClientError {err: super::bilibili::errors::BiliClientError} = "BiliClient error: {err}",

View File

@@ -613,7 +613,12 @@ impl RecorderManager {
Ok(self.db.get_record(room_id, live_id).await?)
}
pub async fn get_archive_subtitle(&self, platform: PlatformType, room_id: u64, live_id: &str) -> Result<String, RecorderManagerError> {
pub async fn get_archive_subtitle(
&self,
platform: PlatformType,
room_id: u64,
live_id: &str,
) -> Result<String, RecorderManagerError> {
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
if let Some(recorder_ref) = self.recorders.read().await.get(&recorder_id) {
let recorder = recorder_ref.as_ref();
@@ -623,7 +628,12 @@ impl RecorderManager {
}
}
pub async fn generate_archive_subtitle(&self, platform: PlatformType, room_id: u64, live_id: &str) -> Result<String, RecorderManagerError> {
pub async fn generate_archive_subtitle(
&self,
platform: PlatformType,
room_id: u64,
live_id: &str,
) -> Result<String, RecorderManagerError> {
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
if let Some(recorder_ref) = self.recorders.read().await.get(&recorder_id) {
let recorder = recorder_ref.as_ref();

View File

@@ -22,6 +22,11 @@
"plugins": {
"sql": {
"preload": ["sqlite:data_v2.db"]
},
"deep-link": {
"desktop": {
"schemes": ["bsr"]
}
}
},
"app": {

View File

@@ -5,12 +5,47 @@
import Setting from "./page/Setting.svelte";
import Account from "./page/Account.svelte";
import About from "./page/About.svelte";
import { log } from "./lib/invoker";
import { log, onOpenUrl } from "./lib/invoker";
import Clip from "./page/Clip.svelte";
import Task from "./page/Task.svelte";
import AI from "./page/AI.svelte";
import { onMount } from "svelte";
let active = "总览";
onMount(async () => {
await onOpenUrl((urls: string[]) => {
console.log("Received Deep Link:", urls);
if (urls.length > 0) {
const url = urls[0];
// extract platform and room_id from url
// url example:
// bsr://live.bilibili.com/167537?live_from=85001&spm_id_from=333.1365.live_users.item.click
// bsr://live.douyin.com/200525029536
let platform = "";
let room_id = "";
if (url.startsWith("bsr://live.bilibili.com/")) {
// 1. remove bsr://live.bilibili.com/
// 2. remove all query params
room_id = url.replace("bsr://live.bilibili.com/", "").split("?")[0];
platform = "bilibili";
}
if (url.startsWith("bsr://live.douyin.com/")) {
room_id = url.replace("bsr://live.douyin.com/", "").split("?")[0];
platform = "douyin";
}
if (platform && room_id) {
// switch to room page
active = "直播间";
}
}
});
});
log.info("App loaded");
</script>

View File

@@ -42,7 +42,9 @@
}
// find video in videos
video = videos.find((v) => v.id === parseInt(videoId));
let new_video = videos.find((v) => v.id === parseInt(videoId));
handleVideoChange(new_video);
// 显示视频预览
showVideoPreview = true;
@@ -55,6 +57,11 @@
});
async function handleVideoChange(newVideo: VideoItem) {
if (newVideo) {
// get cover from video
const cover = await invoke("get_video_cover", { id: newVideo.id });
newVideo.cover = cover as string;
}
video = newVideo;
}

View File

@@ -142,8 +142,10 @@
}
if (TAURI_ENV) {
console.log("register tauri network plugin");
shaka.net.NetworkingEngine.registerScheme("http", tauriNetworkPlugin);
shaka.net.NetworkingEngine.registerScheme("https", tauriNetworkPlugin);
shaka.net.NetworkingEngine.registerScheme("tauri", tauriNetworkPlugin);
}
async function update_stream_list() {

View File

@@ -29,7 +29,6 @@
import TypeSelect from "./TypeSelect.svelte";
import { invoke, TAURI_ENV, listen, log, close_window } from "../lib/invoker";
import { onDestroy, onMount } from "svelte";
import { getCurrentWebviewWindow } from "@tauri-apps/api/webviewWindow";
import { listen as tauriListen } from "@tauri-apps/api/event";
import type { AccountInfo } from "./db";

View File

@@ -4,6 +4,7 @@ import { fetch as tauri_fetch } from "@tauri-apps/plugin-http";
import { convertFileSrc as tauri_convert } from "@tauri-apps/api/core";
import { listen as tauri_listen } from "@tauri-apps/api/event";
import { open as tauri_open } from "@tauri-apps/plugin-shell";
import { onOpenUrl as tauri_onOpenUrl } from "@tauri-apps/plugin-deep-link";
declare global {
interface Window {
@@ -169,6 +170,12 @@ async function close_window() {
window.close();
}
async function onOpenUrl(func: (urls: string[]) => void) {
if (TAURI_ENV) {
return await tauri_onOpenUrl(func);
}
}
export {
invoke,
get,
@@ -180,4 +187,5 @@ export {
open,
log,
close_window,
onOpenUrl,
};

View File

@@ -622,22 +622,24 @@
on:click={loadModels}
disabled={!settings.endpoint || !settings.api_key || isLoadingModels}
>
{isLoadingModels ? '加载中...' : '刷新列表'}
{isLoadingModels ? '加载中...' : '刷新模型列表'}
</button>
</div>
<select
id="model"
bind:value={settings.model}
class="w-full px-4 py-3 border border-gray-300 dark:border-gray-600 rounded-xl bg-white dark:bg-gray-700 text-gray-900 dark:text-white focus:ring-2 focus:ring-blue-500 focus:border-transparent transition-all duration-200"
>
{#if availableModels.length === 0}
<option value="">请先配置 API 并刷新模型列表</option>
{:else}
<div class="relative">
<input
id="model"
type="text"
bind:value={settings.model}
list="model-options"
placeholder="输入模型名称或从列表中选择"
class="w-full px-4 py-3 border border-gray-300 dark:border-gray-600 rounded-xl bg-white dark:bg-gray-700 text-gray-900 dark:text-white placeholder-gray-500 dark:placeholder-gray-400 focus:ring-2 focus:ring-blue-500 focus:border-transparent transition-all duration-200"
/>
<datalist id="model-options">
{#each availableModels as model}
<option value={model.value}>{model.label}</option>
{/each}
{/if}
</select>
</datalist>
</div>
</div>
</div>

View File

@@ -1,5 +1,5 @@
<script lang="ts">
import { invoke, open } from "../lib/invoker";
import { invoke, open, onOpenUrl } from "../lib/invoker";
import { message } from "@tauri-apps/plugin-dialog";
import { fade, scale } from "svelte/transition";
import { Dropdown, DropdownItem } from "flowbite-svelte";
@@ -15,11 +15,11 @@
Trash2,
X,
History,
Activity,
} from "lucide-svelte";
import BilibiliIcon from "../lib/BilibiliIcon.svelte";
import DouyinIcon from "../lib/DouyinIcon.svelte";
import AutoRecordIcon from "../lib/AutoRecordIcon.svelte";
import { onMount } from "svelte";
export let room_count = 0;
let room_active = 0;
@@ -62,13 +62,6 @@
update_summary();
setInterval(update_summary, 5000);
function format_time(time: number) {
let hours = Math.floor(time / 3600);
let minutes = Math.floor((time % 3600) / 60);
let seconds = Math.floor(time % 60);
return `${hours.toString().padStart(2, "0")}:${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`;
}
// modals
let deleteModal = false;
let deleteRoom = null;
@@ -82,21 +75,30 @@
let archiveModal = false;
let archiveRoom = null;
let archives: RecordItem[] = [];
async function showArchives(room_id: number) {
archives = await invoke("get_archives", { roomId: room_id });
// sort archives by ts in descending order
archives.sort((a, b) => {
updateArchives();
archiveModal = true;
console.log(archives);
}
async function updateArchives() {
let updated_archives = (await invoke("get_archives", {
roomId: archiveRoom.room_id,
})) as RecordItem[];
updated_archives.sort((a, b) => {
return (
new Date(b.created_at).getTime() - new Date(a.created_at).getTime()
);
});
archiveModal = true;
console.log(archives);
archives = updated_archives;
}
function format_ts(ts_string: string) {
const date = new Date(ts_string);
return date.toLocaleString();
}
function format_duration(duration: number) {
const hours = Math.floor(duration / 3600)
.toString()
@@ -108,6 +110,7 @@
return `${hours}:${minutes}:${seconds}`;
}
function format_size(size: number) {
if (size < 1024) {
return `${size} B`;
@@ -119,6 +122,7 @@
return `${(size / 1024 / 1024 / 1024).toFixed(2)} GiB`;
}
}
function calc_bitrate(size: number, duration: number) {
return ((size * 8) / duration / 1024).toFixed(0);
}
@@ -135,9 +139,6 @@
}
}
// Add toggle state for auto-recording
let autoRecordStates = new Map<string, boolean>();
// Function to toggle auto-record state
function toggleEnabled(room: RecorderInfo) {
invoke("set_enable", {
@@ -165,6 +166,54 @@
open("https://live.douyin.com/" + room.room_id);
}
}
function addNewRecorder(room_id: number, platform: string) {
invoke("add_recorder", {
roomId: room_id,
platform: platform,
})
.then(() => {
addModal = false;
addRoom = "";
})
.catch(async (e) => {
await message(e);
});
}
onMount(async () => {
await onOpenUrl((urls: string[]) => {
console.log("Received Deep Link:", urls);
if (urls.length > 0) {
const url = urls[0];
// extract platform and room_id from url
// url example:
// bsr://live.bilibili.com/167537?live_from=85001&spm_id_from=333.1365.live_users.item.click
// bsr://live.douyin.com/200525029536
let platform = "";
let room_id = "";
if (url.startsWith("bsr://live.bilibili.com/")) {
// 1. remove bsr://live.bilibili.com/
// 2. remove all query params
room_id = url.replace("bsr://live.bilibili.com/", "").split("?")[0];
platform = "bilibili";
}
if (url.startsWith("bsr://live.douyin.com/")) {
room_id = url.replace("bsr://live.douyin.com/", "").split("?")[0];
platform = "douyin";
}
if (platform && room_id) {
addModal = true;
addRoom = room_id;
selectedPlatform = platform;
}
}
});
});
</script>
<div class="flex-1 p-6 overflow-auto custom-scrollbar-light bg-gray-50">
@@ -515,17 +564,9 @@
class="px-4 py-2 bg-[#0A84FF] hover:bg-[#0A84FF]/90 text-white text-sm font-medium rounded-lg transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
disabled={!addValid}
on:click={() => {
invoke("add_recorder", {
roomId: Number(addRoom),
platform: selectedPlatform,
})
.then(() => {
addModal = false;
addRoom = "";
})
.catch(async (e) => {
await message(e);
});
addNewRecorder(Number(addRoom), selectedPlatform);
addModal = false;
addRoom = "";
}}
>
添加
@@ -599,7 +640,7 @@
</tr>
</thead>
<tbody class="divide-y divide-gray-200 dark:divide-gray-700/50">
{#each archives as archive}
{#each archives as archive (archive.live_id)}
<tr
class="group hover:bg-[#f5f5f7] dark:hover:bg-[#3a3a3c] transition-colors"
>
@@ -663,9 +704,7 @@
liveId: archive.live_id,
})
.then(async () => {
archives = await invoke("get_archives", {
roomId: archiveRoom.room_id,
});
await updateArchives();
})
.catch((e) => {
alert(e);

View File

@@ -817,11 +817,16 @@
svelte-hmr "^0.15.3"
vitefu "^0.2.4"
"@tauri-apps/api@^2.4.1", "@tauri-apps/api@^2.6.0":
"@tauri-apps/api@^2.6.0":
version "2.6.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/api/-/api-2.6.0.tgz#efd873bf04b0d72cea81f9397e16218f5deafe0f"
integrity sha512-hRNcdercfgpzgFrMXWwNDBN0B7vNzOzRepy6ZAmhxi5mDLVPNrTpo9MGg2tN/F7JRugj4d2aF7E1rtPXAHaetg==
"@tauri-apps/api@^2.6.2":
version "2.7.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/api/-/api-2.7.0.tgz#44319e7cd34e898d21cc770961209bd50ac4cefe"
integrity sha512-v7fVE8jqBl8xJFOcBafDzXFc8FnicoH3j8o8DNNs0tHuEBmXUDqrCOAzMRX0UkfpwqZLqvrvK0GNQ45DfnoVDg==
"@tauri-apps/cli-darwin-arm64@2.6.2":
version "2.6.2"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.6.2.tgz#c69478438cae93dd892ea43d6cf7934a1c7f7839"
@@ -894,6 +899,13 @@
"@tauri-apps/cli-win32-ia32-msvc" "2.6.2"
"@tauri-apps/cli-win32-x64-msvc" "2.6.2"
"@tauri-apps/plugin-deep-link@~2":
version "2.4.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/plugin-deep-link/-/plugin-deep-link-2.4.1.tgz#2f22d01d3e3795a607a2b31857cf99fb56126701"
integrity sha512-I8Bo+spcAKGhIIJ1qN/gapp/Ot3mosQL98znxr975Zn2ODAkUZ++BQ9FnTpR7PDwfIl5ANSGdIW/YU01zVTcJw==
dependencies:
"@tauri-apps/api" "^2.6.0"
"@tauri-apps/plugin-dialog@~2":
version "2.3.0"
resolved "https://registry.yarnpkg.com/@tauri-apps/plugin-dialog/-/plugin-dialog-2.3.0.tgz#123d2cd3d98467b9b115d23ad71eef469d6ead35"