refactor: cleanup code

This commit is contained in:
Xinrea
2025-09-11 22:49:11 +08:00
parent 01a0c929e8
commit 4431b10cb7
37 changed files with 740 additions and 806 deletions

View File

@@ -11,6 +11,13 @@ license = ""
repository = ""
edition = "2021"
[lints.clippy]
correctness="deny"
suspicious="deny"
complexity="deny"
style="deny"
perf="deny"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View File

@@ -1,4 +1,4 @@
fn main() {
#[cfg(feature = "gui")]
tauri_build::build()
tauri_build::build();
}

View File

@@ -11,7 +11,7 @@ use crate::{
pub struct DanmuStream {
pub provider_type: ProviderType,
pub identifier: String,
pub room_id: u64,
pub room_id: i64,
pub provider: Arc<RwLock<Box<dyn DanmuProvider>>>,
tx: mpsc::UnboundedSender<DanmuMessageType>,
rx: Arc<RwLock<mpsc::UnboundedReceiver<DanmuMessageType>>>,
@@ -21,7 +21,7 @@ impl DanmuStream {
pub async fn new(
provider_type: ProviderType,
identifier: &str,
room_id: u64,
room_id: i64,
) -> Result<Self, DanmuStreamError> {
let (tx, rx) = mpsc::unbounded_channel();
let provider = new(provider_type, identifier, room_id).await?;

View File

@@ -29,7 +29,7 @@ pub enum DanmuMessageType {
#[derive(Debug, Clone)]
pub struct DanmuMessage {
pub room_id: u64,
pub room_id: i64,
pub user_id: u64,
pub user_name: String,
pub message: String,

View File

@@ -36,15 +36,15 @@ type WsWriteType = futures_util::stream::SplitSink<
pub struct BiliDanmu {
client: ApiClient,
room_id: u64,
user_id: u64,
room_id: i64,
user_id: i64,
stop: Arc<RwLock<bool>>,
write: Arc<RwLock<Option<WsWriteType>>>,
}
#[async_trait]
impl DanmuProvider for BiliDanmu {
async fn new(cookie: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
async fn new(cookie: &str, room_id: i64) -> Result<Self, DanmuStreamError> {
// find DedeUserID=<user_id> in cookie str
let user_id = BiliDanmu::parse_user_id(cookie)?;
// add buvid3 to cookie
@@ -241,7 +241,7 @@ impl BiliDanmu {
async fn get_danmu_info(
&self,
wbi_key: &str,
room_id: u64,
room_id: i64,
) -> Result<DanmuInfo, DanmuStreamError> {
let params = self
.get_sign(
@@ -268,7 +268,7 @@ impl BiliDanmu {
Ok(resp)
}
async fn get_real_room(&self, wbi_key: &str, room_id: u64) -> Result<u64, DanmuStreamError> {
async fn get_real_room(&self, wbi_key: &str, room_id: i64) -> Result<i64, DanmuStreamError> {
let params = self
.get_sign(
wbi_key,
@@ -296,14 +296,14 @@ impl BiliDanmu {
Ok(resp)
}
fn parse_user_id(cookie: &str) -> Result<u64, DanmuStreamError> {
fn parse_user_id(cookie: &str) -> Result<i64, DanmuStreamError> {
let mut user_id = None;
// find DedeUserID=<user_id> in cookie str
let re = Regex::new(r"DedeUserID=(\d+)").unwrap();
if let Some(captures) = re.captures(cookie) {
if let Some(user) = captures.get(1) {
user_id = Some(user.as_str().parse::<u64>().unwrap());
user_id = Some(user.as_str().parse::<i64>().unwrap());
}
}
@@ -407,8 +407,8 @@ impl BiliDanmu {
#[derive(Serialize)]
struct WsSend {
uid: u64,
roomid: u64,
uid: i64,
roomid: i64,
key: String,
protover: u32,
platform: String,
@@ -439,5 +439,5 @@ pub struct RoomInit {
#[derive(Debug, Deserialize, Clone)]
pub struct RoomInitData {
room_id: u64,
room_id: i64,
}

View File

@@ -33,7 +33,7 @@ type WsWriteType =
futures_util::stream::SplitSink<WebSocketStream<MaybeTlsStream<TcpStream>>, WsMessage>;
pub struct DouyinDanmu {
room_id: u64,
room_id: i64,
cookie: String,
stop: Arc<RwLock<bool>>,
write: Arc<RwLock<Option<WsWriteType>>>,
@@ -268,7 +268,7 @@ impl DouyinDanmu {
async fn handle_binary_message(
data: &[u8],
tx: &mpsc::UnboundedSender<DanmuMessageType>,
room_id: u64,
room_id: i64,
) -> Result<Option<PushFrame>, DanmuStreamError> {
// First decode the PushFrame
let push_frame = PushFrame::decode(Bytes::from(data.to_vec())).map_err(|e| {
@@ -394,7 +394,7 @@ async fn handle_binary_message(
#[async_trait]
impl DanmuProvider for DouyinDanmu {
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError> {
async fn new(identifier: &str, room_id: i64) -> Result<Self, DanmuStreamError> {
Ok(Self {
room_id,
cookie: identifier.to_string(),

View File

@@ -17,7 +17,7 @@ pub enum ProviderType {
#[async_trait]
pub trait DanmuProvider: Send + Sync {
async fn new(identifier: &str, room_id: u64) -> Result<Self, DanmuStreamError>
async fn new(identifier: &str, room_id: i64) -> Result<Self, DanmuStreamError>
where
Self: Sized;
@@ -57,7 +57,7 @@ pub trait DanmuProvider: Send + Sync {
pub async fn new(
provider_type: ProviderType,
identifier: &str,
room_id: u64,
room_id: i64,
) -> Result<Box<dyn DanmuProvider>, DanmuStreamError> {
match provider_type {
ProviderType::BiliBili => {

View File

@@ -68,7 +68,7 @@ fn default_openai_api_endpoint() -> String {
}
fn default_openai_api_key() -> String {
"".to_string()
String::new()
}
fn default_clip_name_format() -> String {
@@ -95,7 +95,7 @@ fn default_cleanup_source_flv() -> bool {
}
fn default_webhook_url() -> String {
"".to_string()
String::new()
}
impl Config {

View File

@@ -32,7 +32,7 @@ const MAX_DELAY: f64 = 6.0;
pub fn danmu_to_ass(danmus: Vec<DanmuEntry>) -> String {
// ASS header
let header = r#"[Script Info]
let header = r"[Script Info]
Title: Bilibili Danmaku
ScriptType: v4.00+
Collisions: Normal
@@ -46,7 +46,7 @@ Style: Default,微软雅黑,36,&H7fFFFFFF,&H7fFFFFFF,&H7f000000,&H7f000000,0,0,0
[Events]
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
"#;
";
let mut normal = normal_danmaku();
let font_size = 36.0; // Default font size
@@ -87,22 +87,22 @@ Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
.join("\n");
// Combine header and events
format!("{}\n{}", header, events)
format!("{header}\n{events}")
}
fn format_time(seconds: f64) -> String {
let hours = (seconds / 3600.0) as i32;
let minutes = ((seconds % 3600.0) / 60.0) as i32;
let seconds = seconds % 60.0;
format!("{}:{:02}:{:05.2}", hours, minutes, seconds)
format!("{hours}:{minutes:02}:{seconds:05.2}")
}
fn escape_text(text: &str) -> String {
text.replace("\\", "\\\\")
.replace("{", "")
.replace("}", "")
.replace("\r", "")
.replace("\n", "\\N")
text.replace('\\', "\\\\")
.replace('{', "")
.replace('}', "")
.replace('\r', "")
.replace('\n', "\\N")
}
fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition> {
@@ -144,8 +144,8 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
let p = space.m;
let m = p + hv;
let mut tas = t0s;
let mut tal = t0l;
let mut time_actual_start = t0s;
let mut time_actual_leave = t0l;
for other in &used {
if other.p >= m || other.m <= p {
@@ -154,13 +154,13 @@ fn normal_danmaku() -> impl FnMut(f64, f64, f64, bool) -> Option<DanmakuPosition
if other.b && b {
continue;
}
tas = tas.max(other.tf);
tal = tal.max(other.td);
time_actual_start = time_actual_start.max(other.tf);
time_actual_leave = time_actual_leave.max(other.td);
}
suggestions.push(PositionSuggestion {
p,
r: (tas - t0s).max(tal - t0l),
r: (time_actual_start - t0s).max(time_actual_leave - t0l),
});
}

View File

@@ -9,7 +9,7 @@ use rand::Rng;
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
pub struct AccountRow {
pub platform: String,
pub uid: u64, // Keep for Bilibili compatibility
pub uid: i64, // Keep for Bilibili compatibility
pub id_str: Option<String>, // New field for string IDs like Douyin sec_uid
pub name: String,
pub avatar: String,
@@ -30,19 +30,18 @@ impl Database {
let platform = PlatformType::from_str(platform).unwrap();
let csrf = if platform == PlatformType::Douyin {
Some("".to_string())
Some(String::new())
} else {
// parse cookies
cookies
.split(';')
.map(|cookie| cookie.trim())
.map(str::trim)
.find_map(|cookie| -> Option<String> {
match cookie.starts_with("bili_jct=") {
true => {
let var_name = &"bili_jct=";
Some(cookie[var_name.len()..].to_string())
}
false => None,
if cookie.starts_with("bili_jct=") {
let var_name = &"bili_jct=";
Some(cookie[var_name.len()..].to_string())
} else {
None
}
})
};
@@ -54,45 +53,48 @@ impl Database {
// parse uid and id_str based on platform
let (uid, id_str) = if platform == PlatformType::BiliBili {
// For Bilibili, extract numeric uid from cookies
let uid = cookies
let uid = (*cookies
.split("DedeUserID=")
.collect::<Vec<&str>>()
.get(1)
.unwrap()
.split(";")
.split(';')
.collect::<Vec<&str>>()
.first()
.unwrap()
.to_string()
.parse::<u64>()
.map_err(|_| DatabaseError::InvalidCookies)?;
.unwrap())
.to_string()
.parse::<u64>()
.map_err(|_| DatabaseError::InvalidCookies)?;
(uid, None)
} else {
// For Douyin, use temporary uid and will set id_str later with real sec_uid
let temp_uid = rand::thread_rng().gen_range(10000..=i32::MAX) as u64;
(temp_uid, Some(format!("temp_{}", temp_uid)))
// Fix: Generate a u32 within the desired range and then cast to u64 to avoid `clippy::cast-sign-loss`.
let temp_uid = rand::thread_rng().gen_range(10000u64..=i32::MAX as u64);
(temp_uid, Some(format!("temp_{temp_uid}")))
};
let uid = i64::try_from(uid).map_err(|_| DatabaseError::InvalidCookies)?;
let account = AccountRow {
platform: platform.as_str().to_string(),
uid,
id_str,
name: "".into(),
avatar: "".into(),
name: String::new(),
avatar: String::new(),
csrf: csrf.unwrap(),
cookies: cookies.into(),
created_at: Utc::now().to_rfc3339(),
};
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(account.uid as i64).bind(&account.platform).bind(&account.id_str).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(uid).bind(&account.platform).bind(&account.id_str).bind(&account.name).bind(&account.avatar).bind(&account.csrf).bind(&account.cookies).bind(&account.created_at).execute(&lock).await?;
Ok(account)
}
pub async fn remove_account(&self, platform: &str, uid: u64) -> Result<(), DatabaseError> {
pub async fn remove_account(&self, platform: &str, uid: i64) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let sql = sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
.bind(uid as i64)
.bind(uid)
.bind(platform)
.execute(&lock)
.await?;
@@ -105,7 +107,7 @@ impl Database {
pub async fn update_account(
&self,
platform: &str,
uid: u64,
uid: i64,
name: &str,
avatar: &str,
) -> Result<(), DatabaseError> {
@@ -115,7 +117,7 @@ impl Database {
)
.bind(name)
.bind(avatar)
.bind(uid as i64)
.bind(uid)
.bind(platform)
.execute(&lock)
.await?;
@@ -135,17 +137,28 @@ impl Database {
let lock = self.db.read().await.clone().unwrap();
// If the id_str changed, we need to delete the old record and create a new one
if old_account.id_str.as_deref() != Some(new_id_str) {
if old_account.id_str.as_deref() == Some(new_id_str) {
// id_str is the same, just update name and avatar
sqlx::query(
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
)
.bind(name)
.bind(avatar)
.bind(old_account.uid)
.bind(&old_account.platform)
.execute(&lock)
.await?;
} else {
// Delete the old record (for Douyin accounts, we use uid to identify)
sqlx::query("DELETE FROM accounts WHERE uid = $1 and platform = $2")
.bind(old_account.uid as i64)
.bind(old_account.uid)
.bind(&old_account.platform)
.execute(&lock)
.await?;
// Insert the new record with updated id_str
sqlx::query("INSERT INTO accounts (uid, platform, id_str, name, avatar, csrf, cookies, created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)")
.bind(old_account.uid as i64)
.bind(old_account.uid)
.bind(&old_account.platform)
.bind(new_id_str)
.bind(name)
@@ -155,17 +168,6 @@ impl Database {
.bind(&old_account.created_at)
.execute(&lock)
.await?;
} else {
// id_str is the same, just update name and avatar
sqlx::query(
"UPDATE accounts SET name = $1, avatar = $2 WHERE uid = $3 and platform = $4",
)
.bind(name)
.bind(avatar)
.bind(old_account.uid as i64)
.bind(&old_account.platform)
.execute(&lock)
.await?;
}
Ok(())
@@ -178,12 +180,12 @@ impl Database {
.await?)
}
pub async fn get_account(&self, platform: &str, uid: u64) -> Result<AccountRow, DatabaseError> {
pub async fn get_account(&self, platform: &str, uid: i64) -> Result<AccountRow, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
Ok(sqlx::query_as::<_, AccountRow>(
"SELECT * FROM accounts WHERE uid = $1 and platform = $2",
)
.bind(uid as i64)
.bind(uid)
.bind(platform)
.fetch_one(&lock)
.await?)

View File

@@ -22,6 +22,8 @@ pub enum DatabaseError {
NotFound,
#[error("Cookies are invalid")]
InvalidCookies,
#[error("Number exceed i64 range")]
NumberExceedI64Range,
#[error("DB error: {0}")]
DB(#[from] sqlx::Error),
#[error("SQL is incorret: {sql}")]

View File

@@ -8,7 +8,7 @@ use chrono::Utc;
pub struct RecordRow {
pub platform: String,
pub live_id: String,
pub room_id: u64,
pub room_id: i64,
pub title: String,
pub length: i64,
pub size: i64,
@@ -20,31 +20,31 @@ pub struct RecordRow {
impl Database {
pub async fn get_records(
&self,
room_id: u64,
offset: u64,
limit: u64,
room_id: i64,
offset: i64,
limit: i64,
) -> Result<Vec<RecordRow>, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
Ok(sqlx::query_as::<_, RecordRow>(
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
)
.bind(room_id as i64)
.bind(limit as i64)
.bind(offset as i64)
.bind(room_id)
.bind(limit)
.bind(offset)
.fetch_all(&lock)
.await?)
}
pub async fn get_record(
&self,
room_id: u64,
room_id: i64,
live_id: &str,
) -> Result<RecordRow, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
Ok(sqlx::query_as::<_, RecordRow>(
"SELECT * FROM records WHERE room_id = $1 and live_id = $2",
)
.bind(room_id as i64)
.bind(room_id)
.bind(live_id)
.fetch_one(&lock)
.await?)
@@ -54,7 +54,7 @@ impl Database {
&self,
platform: PlatformType,
live_id: &str,
room_id: u64,
room_id: i64,
title: &str,
cover: Option<String>,
created_at: Option<&str>,
@@ -71,7 +71,7 @@ impl Database {
cover,
};
if let Err(e) = sqlx::query("INSERT INTO records (live_id, room_id, title, length, size, cover, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)").bind(record.live_id.clone())
.bind(record.room_id as i64).bind(&record.title).bind(0).bind(0).bind(&record.cover).bind(&record.created_at).bind(platform.as_str().to_string()).execute(&lock).await {
.bind(record.room_id).bind(&record.title).bind(0).bind(0).bind(&record.cover).bind(&record.created_at).bind(platform.as_str().to_string()).execute(&lock).await {
// if the record already exists, return the existing record
if e.to_string().contains("UNIQUE constraint failed") {
return self.get_record(room_id, live_id).await;
@@ -100,9 +100,10 @@ impl Database {
size: u64,
) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let size = i64::try_from(size).map_err(|_| DatabaseError::NumberExceedI64Range)?;
sqlx::query("UPDATE records SET length = $1, size = $2 WHERE live_id = $3")
.bind(length)
.bind(size as i64)
.bind(size)
.bind(live_id)
.execute(&lock)
.await?;
@@ -148,36 +149,36 @@ impl Database {
pub async fn get_recent_record(
&self,
room_id: u64,
offset: u64,
limit: u64,
room_id: i64,
offset: i64,
limit: i64,
) -> Result<Vec<RecordRow>, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
if room_id == 0 {
Ok(sqlx::query_as::<_, RecordRow>(
"SELECT * FROM records ORDER BY created_at DESC LIMIT $1 OFFSET $2",
)
.bind(limit as i64)
.bind(offset as i64)
.bind(limit)
.bind(offset)
.fetch_all(&lock)
.await?)
} else {
Ok(sqlx::query_as::<_, RecordRow>(
"SELECT * FROM records WHERE room_id = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3",
)
.bind(room_id as i64)
.bind(limit as i64)
.bind(offset as i64)
.bind(room_id)
.bind(limit)
.bind(offset)
.fetch_all(&lock)
.await?)
}
}
pub async fn get_record_disk_usage(&self) -> Result<u64, DatabaseError> {
pub async fn get_record_disk_usage(&self) -> Result<i64, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let result: (i64,) = sqlx::query_as("SELECT SUM(size) FROM records;")
.fetch_one(&lock)
.await?;
Ok(result.0 as u64)
Ok(result.0)
}
}

View File

@@ -6,7 +6,7 @@ use chrono::Utc;
/// because many room infos are collected in realtime
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
pub struct RecorderRow {
pub room_id: u64,
pub room_id: i64,
pub created_at: String,
pub platform: String,
pub auto_start: bool,
@@ -18,7 +18,7 @@ impl Database {
pub async fn add_recorder(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
extra: &str,
) -> Result<RecorderRow, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
@@ -32,7 +32,7 @@ impl Database {
let _ = sqlx::query(
"INSERT OR REPLACE INTO recorders (room_id, created_at, platform, auto_start, extra) VALUES ($1, $2, $3, $4, $5)",
)
.bind(room_id as i64)
.bind(room_id)
.bind(&recorder.created_at)
.bind(platform.as_str())
.bind(recorder.auto_start)
@@ -42,15 +42,15 @@ impl Database {
Ok(recorder)
}
pub async fn remove_recorder(&self, room_id: u64) -> Result<RecorderRow, DatabaseError> {
pub async fn remove_recorder(&self, room_id: i64) -> Result<RecorderRow, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let recorder =
sqlx::query_as::<_, RecorderRow>("SELECT * FROM recorders WHERE room_id = $1")
.bind(room_id as i64)
.bind(room_id)
.fetch_one(&lock)
.await?;
let sql = sqlx::query("DELETE FROM recorders WHERE room_id = $1")
.bind(room_id as i64)
.bind(room_id)
.execute(&lock)
.await?;
if sql.rows_affected() != 1 {
@@ -71,10 +71,10 @@ impl Database {
.await?)
}
pub async fn remove_archive(&self, room_id: u64) -> Result<(), DatabaseError> {
pub async fn remove_archive(&self, room_id: i64) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let _ = sqlx::query("DELETE FROM records WHERE room_id = $1")
.bind(room_id as i64)
.bind(room_id)
.execute(&lock)
.await?;
Ok(())
@@ -83,7 +83,7 @@ impl Database {
pub async fn update_recorder(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
auto_start: bool,
) -> Result<(), DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
@@ -92,7 +92,7 @@ impl Database {
)
.bind(auto_start)
.bind(platform.as_str().to_string())
.bind(room_id as i64)
.bind(room_id)
.execute(&lock)
.await?;
Ok(())

View File

@@ -5,7 +5,7 @@ use super::DatabaseError;
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, sqlx::FromRow)]
pub struct VideoRow {
pub id: i64,
pub room_id: u64,
pub room_id: i64,
pub cover: String,
pub file: String,
pub note: String,
@@ -22,10 +22,10 @@ pub struct VideoRow {
}
impl Database {
pub async fn get_videos(&self, room_id: u64) -> Result<Vec<VideoRow>, DatabaseError> {
pub async fn get_videos(&self, room_id: i64) -> Result<Vec<VideoRow>, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let videos = sqlx::query_as::<_, VideoRow>("SELECT * FROM videos WHERE room_id = $1;")
.bind(room_id as i64)
.bind(room_id)
.fetch_all(&lock)
.await?;
Ok(videos)
@@ -69,7 +69,7 @@ impl Database {
pub async fn add_video(&self, video: &VideoRow) -> Result<VideoRow, DatabaseError> {
let lock = self.db.read().await.clone().unwrap();
let sql = sqlx::query("INSERT INTO videos (room_id, cover, file, note, length, size, status, bvid, title, desc, tags, area, created_at, platform) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)")
.bind(video.room_id as i64)
.bind(video.room_id)
.bind(&video.cover)
.bind(&video.file)
.bind(&video.note)

View File

@@ -92,7 +92,7 @@ pub async fn clip_from_m3u8(
.spawn();
if let Err(e) = child {
return Err(format!("Spawn ffmpeg process failed: {}", e));
return Err(format!("Spawn ffmpeg process failed: {e}"));
}
let mut child = child.unwrap();
@@ -110,17 +110,17 @@ pub async fn clip_from_m3u8(
log::debug!("Clip progress: {}", p.time);
reporter
.unwrap()
.update(format!("编码中:{}", p.time).as_str())
.update(format!("编码中:{}", p.time).as_str());
}
FfmpegEvent::LogEOF => break,
FfmpegEvent::Log(level, content) => {
// log error if content contains error
if content.contains("error") || level == LogLevel::Error {
log::error!("Clip error: {}", content);
log::error!("Clip error: {content}");
}
}
FfmpegEvent::Error(e) => {
log::error!("Clip error: {}", e);
log::error!("Clip error: {e}");
clip_error = Some(e.to_string());
}
_ => {}
@@ -128,12 +128,12 @@ pub async fn clip_from_m3u8(
}
if let Err(e) = child.wait().await {
log::error!("Clip error: {}", e);
log::error!("Clip error: {e}");
return Err(e.to_string());
}
if let Some(error) = clip_error {
log::error!("Clip error: {}", error);
log::error!("Clip error: {error}");
Err(error)
} else {
log::info!("Clip task end: {}", output_path.display());
@@ -152,29 +152,25 @@ pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf,
// First, get the duration of the input file
let duration = get_audio_duration(file).await?;
log::info!("Audio duration: {} seconds", duration);
log::info!("Audio duration: {duration} seconds");
// Split into chunks of 30 seconds
let chunk_duration = 30;
let chunk_count = (duration as f64 / chunk_duration as f64).ceil() as usize;
log::info!(
"Splitting into {} chunks of {} seconds each",
chunk_count,
chunk_duration
);
let chunk_count = (duration as f64 / f64::from(chunk_duration)).ceil() as usize;
log::info!("Splitting into {chunk_count} chunks of {chunk_duration} seconds each");
// Create output directory for chunks
let output_dir = output_path.parent().unwrap();
let base_name = output_path.file_stem().unwrap().to_str().unwrap();
let chunk_dir = output_dir.join(format!("{}_chunks", base_name));
let chunk_dir = output_dir.join(format!("{base_name}_chunks"));
if !chunk_dir.exists() {
std::fs::create_dir_all(&chunk_dir)
.map_err(|e| format!("Failed to create chunk directory: {}", e))?;
.map_err(|e| format!("Failed to create chunk directory: {e}"))?;
}
// Use ffmpeg segment feature to split audio into chunks
let segment_pattern = chunk_dir.join(format!("{}_%03d.{}", base_name, format));
let segment_pattern = chunk_dir.join(format!("{base_name}_%03d.{format}"));
// 构建优化的ffmpeg命令参数
let file_str = file.to_str().unwrap();
@@ -240,7 +236,7 @@ pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf,
while let Ok(event) = parser.parse_next_event().await {
match event {
FfmpegEvent::Error(e) => {
log::error!("Extract audio error: {}", e);
log::error!("Extract audio error: {e}");
extract_error = Some(e.to_string());
}
FfmpegEvent::LogEOF => break,
@@ -250,12 +246,12 @@ pub async fn extract_audio_chunks(file: &Path, format: &str) -> Result<PathBuf,
}
if let Err(e) = child.wait().await {
log::error!("Extract audio error: {}", e);
log::error!("Extract audio error: {e}");
return Err(e.to_string());
}
if let Some(error) = extract_error {
log::error!("Extract audio error: {}", error);
log::error!("Extract audio error: {error}");
Err(error)
} else {
log::info!(
@@ -284,7 +280,7 @@ async fn get_audio_duration(file: &Path) -> Result<u64, String> {
.spawn();
if let Err(e) = child {
return Err(format!("Failed to spawn ffprobe process: {}", e));
return Err(format!("Failed to spawn ffprobe process: {e}"));
}
let mut child = child.unwrap();
@@ -300,7 +296,7 @@ async fn get_audio_duration(file: &Path) -> Result<u64, String> {
// The new command outputs duration directly as a float
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
duration = Some(seconds_f64.ceil() as u64);
log::debug!("Parsed duration: {} seconds", seconds_f64);
log::debug!("Parsed duration: {seconds_f64} seconds");
}
}
_ => {}
@@ -308,7 +304,7 @@ async fn get_audio_duration(file: &Path) -> Result<u64, String> {
}
if let Err(e) = child.wait().await {
log::error!("Failed to get duration: {}", e);
log::error!("Failed to get duration: {e}");
return Err(e.to_string());
}
@@ -332,10 +328,7 @@ pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
.spawn();
if let Err(e) = child {
return Err(format!(
"Failed to spawn ffprobe process for segment: {}",
e
));
return Err(format!("Failed to spawn ffprobe process for segment: {e}"));
}
let mut child = child.unwrap();
@@ -351,7 +344,7 @@ pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
// Parse the exact duration as f64 for precise timing
if let Ok(seconds_f64) = content.trim().parse::<f64>() {
duration = Some(seconds_f64);
log::debug!("Parsed segment duration: {} seconds", seconds_f64);
log::debug!("Parsed segment duration: {seconds_f64} seconds");
}
}
_ => {}
@@ -359,7 +352,7 @@ pub async fn get_segment_duration(file: &Path) -> Result<f64, String> {
}
if let Err(e) = child.wait().await {
log::error!("Failed to get segment duration: {}", e);
log::error!("Failed to get segment duration: {e}");
return Err(e.to_string());
}
@@ -375,7 +368,7 @@ pub async fn encode_video_subtitle(
) -> Result<String, String> {
// ffmpeg -i fixed_\[30655190\]1742887114_0325084106_81.5.mp4 -vf "subtitles=test.srt:force_style='FontSize=24'" -c:v libx264 -c:a copy output.mp4
log::info!("Encode video subtitle task start: {}", file.display());
log::info!("SRT style: {}", srt_style);
log::info!("SRT style: {srt_style}");
// output path is file with prefix [subtitle]
let output_filename = format!(
"{}{}",
@@ -400,14 +393,14 @@ pub async fn encode_video_subtitle(
let subtitle = subtitle
.to_str()
.unwrap()
.replace("\\", "\\\\")
.replace(":", "\\:");
format!("'{}'", subtitle)
.replace('\\', "\\\\")
.replace(':', "\\:");
format!("'{subtitle}'")
} else {
format!("'{}'", subtitle.display())
};
let vf = format!("subtitles={}:force_style='{}'", subtitle, srt_style);
log::info!("vf: {}", vf);
let vf = format!("subtitles={subtitle}:force_style='{srt_style}'");
log::info!("vf: {vf}");
let mut ffmpeg_process = tokio::process::Command::new(ffmpeg_path());
#[cfg(target_os = "windows")]
@@ -437,7 +430,7 @@ pub async fn encode_video_subtitle(
while let Ok(event) = parser.parse_next_event().await {
match event {
FfmpegEvent::Error(e) => {
log::error!("Encode video subtitle error: {}", e);
log::error!("Encode video subtitle error: {e}");
command_error = Some(e.to_string());
}
FfmpegEvent::Progress(p) => {
@@ -451,12 +444,12 @@ pub async fn encode_video_subtitle(
}
if let Err(e) = child.wait().await {
log::error!("Encode video subtitle error: {}", e);
log::error!("Encode video subtitle error: {e}");
return Err(e.to_string());
}
if let Some(error) = command_error {
log::error!("Encode video subtitle error: {}", error);
log::error!("Encode video subtitle error: {error}");
Err(error)
} else {
log::info!("Encode video subtitle task end: {}", output_path.display());
@@ -494,9 +487,9 @@ pub async fn encode_video_danmu(
let subtitle = subtitle
.to_str()
.unwrap()
.replace("\\", "\\\\")
.replace(":", "\\:");
format!("'{}'", subtitle)
.replace('\\', "\\\\")
.replace(':', "\\:");
format!("'{subtitle}'")
} else {
format!("'{}'", subtitle.display())
};
@@ -507,7 +500,7 @@ pub async fn encode_video_danmu(
let child = ffmpeg_process
.args(["-i", file.to_str().unwrap()])
.args(["-vf", &format!("ass={}", subtitle)])
.args(["-vf", &format!("ass={subtitle}")])
.args(["-c:v", "libx264"])
.args(["-c:a", "copy"])
.args(["-b:v", "6000k"])
@@ -529,7 +522,7 @@ pub async fn encode_video_danmu(
while let Ok(event) = parser.parse_next_event().await {
match event {
FfmpegEvent::Error(e) => {
log::error!("Encode video danmu error: {}", e);
log::error!("Encode video danmu error: {e}");
command_error = Some(e.to_string());
}
FfmpegEvent::Progress(p) => {
@@ -548,12 +541,12 @@ pub async fn encode_video_danmu(
}
if let Err(e) = child.wait().await {
log::error!("Encode video danmu error: {}", e);
log::error!("Encode video danmu error: {e}");
return Err(e.to_string());
}
if let Some(error) = command_error {
log::error!("Encode video danmu error: {}", error);
log::error!("Encode video danmu error: {error}");
Err(error)
} else {
log::info!(
@@ -592,7 +585,7 @@ pub async fn generic_ffmpeg_command(args: &[&str]) -> Result<String, String> {
}
if let Err(e) = child.wait().await {
log::error!("Generic ffmpeg command error: {}", e);
log::error!("Generic ffmpeg command error: {e}");
return Err(e.to_string());
}
@@ -620,17 +613,17 @@ pub async fn generate_video_subtitle(
let chunk_dir = extract_audio_chunks(file, "wav").await?;
let mut full_result = GenerateResult {
subtitle_id: "".to_string(),
subtitle_id: String::new(),
subtitle_content: vec![],
generator_type: SubtitleGeneratorType::Whisper,
};
let mut chunk_paths = vec![];
for entry in std::fs::read_dir(&chunk_dir)
.map_err(|e| format!("Failed to read chunk directory: {}", e))?
.map_err(|e| format!("Failed to read chunk directory: {e}"))?
{
let entry =
entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
let path = entry.path();
chunk_paths.push(path);
}
@@ -676,17 +669,17 @@ pub async fn generate_video_subtitle(
let chunk_dir = extract_audio_chunks(file, "mp3").await?;
let mut full_result = GenerateResult {
subtitle_id: "".to_string(),
subtitle_id: String::new(),
subtitle_content: vec![],
generator_type: SubtitleGeneratorType::WhisperOnline,
};
let mut chunk_paths = vec![];
for entry in std::fs::read_dir(&chunk_dir)
.map_err(|e| format!("Failed to read chunk directory: {}", e))?
.map_err(|e| format!("Failed to read chunk directory: {e}"))?
{
let entry =
entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
entry.map_err(|e| format!("Failed to read directory entry: {e}"))?;
let path = entry.path();
chunk_paths.push(path);
}
@@ -717,10 +710,7 @@ pub async fn generate_video_subtitle(
Err("Failed to initialize Whisper Online".to_string())
}
}
_ => Err(format!(
"Unknown subtitle generator type: {}",
generator_type
)),
_ => Err(format!("Unknown subtitle generator type: {generator_type}")),
}
}
@@ -802,7 +792,7 @@ pub async fn get_video_resolution(file: &str) -> Result<String, String> {
return Err("Failed to parse resolution from output".into());
}
let line = line.unwrap();
let resolution = line.split("x").collect::<Vec<&str>>();
let resolution = line.split('x').collect::<Vec<&str>>();
if resolution.len() != 2 {
return Err("Failed to parse resolution from output".into());
}
@@ -858,7 +848,7 @@ pub async fn clip_from_video_file(
.spawn();
if let Err(e) = child {
return Err(format!("启动ffmpeg进程失败: {}", e));
return Err(format!("启动ffmpeg进程失败: {e}"));
}
let mut child = child.unwrap();
@@ -877,11 +867,11 @@ pub async fn clip_from_video_file(
FfmpegEvent::LogEOF => break,
FfmpegEvent::Log(level, content) => {
if content.contains("error") || level == LogLevel::Error {
log::error!("切片错误: {}", content);
log::error!("切片错误: {content}");
}
}
FfmpegEvent::Error(e) => {
log::error!("切片错误: {}", e);
log::error!("切片错误: {e}");
clip_error = Some(e.to_string());
}
_ => {}
@@ -926,7 +916,7 @@ pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, S
])
.output()
.await
.map_err(|e| format!("执行ffprobe失败: {}", e))?;
.map_err(|e| format!("执行ffprobe失败: {e}"))?;
if !output.status.success() {
return Err(format!(
@@ -937,7 +927,7 @@ pub async fn extract_video_metadata(file_path: &Path) -> Result<VideoMetadata, S
let json_str = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value =
serde_json::from_str(&json_str).map_err(|e| format!("解析ffprobe输出失败: {}", e))?;
serde_json::from_str(&json_str).map_err(|e| format!("解析ffprobe输出失败: {e}"))?;
// 解析视频流信息
let streams = json["streams"].as_array().ok_or("未找到视频流信息")?;
@@ -986,7 +976,7 @@ pub async fn generate_thumbnail(video_full_path: &Path, timestamp: f64) -> Resul
.args(["-y", thumbnail_full_path.to_str().unwrap()])
.output()
.await
.map_err(|e| format!("生成缩略图失败: {}", e))?;
.map_err(|e| format!("生成缩略图失败: {e}"))?;
if !output.status.success() {
return Err(format!(
@@ -1022,7 +1012,7 @@ pub async fn execute_ffmpeg_conversion(
let mut child = cmd
.stderr(Stdio::piped())
.spawn()
.map_err(|e| format!("启动FFmpeg进程失败: {}", e))?;
.map_err(|e| format!("启动FFmpeg进程失败: {e}"))?;
let stderr = child.stderr.take().unwrap();
let reader = BufReader::new(stderr);
@@ -1052,15 +1042,15 @@ pub async fn execute_ffmpeg_conversion(
let status = child
.wait()
.await
.map_err(|e| format!("等待FFmpeg进程失败: {}", e))?;
.map_err(|e| format!("等待FFmpeg进程失败: {e}"))?;
if !status.success() {
let error_msg = conversion_error
.unwrap_or_else(|| format!("FFmpeg退出码: {}", status.code().unwrap_or(-1)));
return Err(format!("视频格式转换失败 ({}): {}", mode_name, error_msg));
return Err(format!("视频格式转换失败 ({mode_name}): {error_msg}"));
}
reporter.update(&format!("视频格式转换完成 100% ({})", mode_name));
reporter.update(&format!("视频格式转换完成 100% ({mode_name})"));
Ok(())
}
@@ -1147,10 +1137,7 @@ pub async fn convert_video_format(
Ok(()) => Ok(()),
Err(stream_copy_error) => {
reporter.update("流复制失败,使用高质量重编码模式...");
log::warn!(
"Stream copy failed: {}, falling back to re-encoding",
stream_copy_error
);
log::warn!("Stream copy failed: {stream_copy_error}, falling back to re-encoding");
try_high_quality_conversion(source, dest, reporter).await
}
}
@@ -1384,7 +1371,7 @@ mod tests {
let output_path = test_file.with_extension("wav");
let output_dir = output_path.parent().unwrap();
let base_name = output_path.file_stem().unwrap().to_str().unwrap();
let chunk_dir = output_dir.join(format!("{}_chunks", base_name));
let chunk_dir = output_dir.join(format!("{base_name}_chunks"));
assert!(chunk_dir.to_string_lossy().contains("_chunks"));
assert!(chunk_dir.to_string_lossy().contains("test"));

View File

@@ -23,7 +23,7 @@ pub async fn add_account(
) -> Result<AccountRow, String> {
// check if cookies is valid
if let Err(e) = cookies.parse::<HeaderValue>() {
return Err(format!("Invalid cookies: {}", e));
return Err(format!("Invalid cookies: {e}"));
}
let account = state.db.add_account(&platform, cookies).await?;
if platform == "bilibili" {
@@ -61,7 +61,7 @@ pub async fn add_account(
.await?;
}
Err(e) => {
log::warn!("Failed to get Douyin user info: {}", e);
log::warn!("Failed to get Douyin user info: {e}");
// Keep the account but with default values
}
}
@@ -73,7 +73,7 @@ pub async fn add_account(
pub async fn remove_account(
state: state_type!(),
platform: String,
uid: u64,
uid: i64,
) -> Result<(), String> {
if platform == "bilibili" {
let account = state.db.get_account(&platform, uid).await?;

View File

@@ -14,11 +14,7 @@ pub async fn get_config(state: state_type!()) -> Result<Config, ()> {
#[allow(dead_code)]
pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<(), String> {
let old_cache_path = state.config.read().await.cache.clone();
log::info!(
"Try to set cache path: {} -> {}",
old_cache_path,
cache_path
);
log::info!("Try to set cache path: {old_cache_path} -> {cache_path}");
if old_cache_path == cache_path {
return Ok(());
}
@@ -27,20 +23,16 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
let new_cache_path_obj = std::path::Path::new(&cache_path);
// check if new cache path is under old cache path
if new_cache_path_obj.starts_with(old_cache_path_obj) {
log::error!(
"New cache path is under old cache path: {} -> {}",
old_cache_path,
cache_path
);
log::error!("New cache path is under old cache path: {old_cache_path} -> {cache_path}");
return Err("New cache path cannot be under old cache path".to_string());
}
state.recorder_manager.set_migrating(true).await;
state.recorder_manager.set_migrating(true);
// stop and clear all recorders
state.recorder_manager.stop_all().await;
// first switch to new cache
state.config.write().await.set_cache_path(&cache_path);
log::info!("Cache path changed: {}", cache_path);
log::info!("Cache path changed: {cache_path}");
// Copy old cache to new cache
log::info!("Start copy old cache to new cache");
state
@@ -68,11 +60,11 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
// if entry is a folder
if entry.is_dir() {
if let Err(e) = crate::handlers::utils::copy_dir_all(entry, &new_entry) {
log::error!("Copy old cache to new cache error: {}", e);
log::error!("Copy old cache to new cache error: {e}");
return Err(e.to_string());
}
} else if let Err(e) = std::fs::copy(entry, &new_entry) {
log::error!("Copy old cache to new cache error: {}", e);
log::error!("Copy old cache to new cache error: {e}");
return Err(e.to_string());
}
}
@@ -80,16 +72,16 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
log::info!("Copy old cache to new cache done");
state.db.new_message("缓存目录切换", "缓存切换完成").await?;
state.recorder_manager.set_migrating(false).await;
state.recorder_manager.set_migrating(false);
// remove all old cache entries
for entry in old_cache_entries {
if entry.is_dir() {
if let Err(e) = std::fs::remove_dir_all(&entry) {
log::error!("Remove old cache error: {}", e);
log::error!("Remove old cache error: {e}");
}
} else if let Err(e) = std::fs::remove_file(&entry) {
log::error!("Remove old cache error: {}", e);
log::error!("Remove old cache error: {e}");
}
}
@@ -101,11 +93,7 @@ pub async fn set_cache_path(state: state_type!(), cache_path: String) -> Result<
pub async fn set_output_path(state: state_type!(), output_path: String) -> Result<(), String> {
let mut config = state.config.write().await;
let old_output_path = config.output.clone();
log::info!(
"Try to set output path: {} -> {}",
old_output_path,
output_path
);
log::info!("Try to set output path: {old_output_path} -> {output_path}");
if old_output_path == output_path {
return Ok(());
}
@@ -114,11 +102,7 @@ pub async fn set_output_path(state: state_type!(), output_path: String) -> Resul
let new_output_path_obj = std::path::Path::new(&output_path);
// check if new output path is under old output path
if new_output_path_obj.starts_with(old_output_path_obj) {
log::error!(
"New output path is under old output path: {} -> {}",
old_output_path,
output_path
);
log::error!("New output path is under old output path: {old_output_path} -> {output_path}");
return Err("New output path cannot be under old output path".to_string());
}
@@ -140,11 +124,11 @@ pub async fn set_output_path(state: state_type!(), output_path: String) -> Resul
// if entry is a folder
if entry.is_dir() {
if let Err(e) = crate::handlers::utils::copy_dir_all(entry, &new_entry) {
log::error!("Copy old output to new output error: {}", e);
log::error!("Copy old output to new output error: {e}");
return Err(e.to_string());
}
} else if let Err(e) = std::fs::copy(entry, &new_entry) {
log::error!("Copy old output to new output error: {}", e);
log::error!("Copy old output to new output error: {e}");
return Err(e.to_string());
}
}
@@ -153,10 +137,10 @@ pub async fn set_output_path(state: state_type!(), output_path: String) -> Resul
for entry in old_output_entries {
if entry.is_dir() {
if let Err(e) = std::fs::remove_dir_all(&entry) {
log::error!("Remove old output error: {}", e);
log::error!("Remove old output error: {e}");
}
} else if let Err(e) = std::fs::remove_file(&entry) {
log::error!("Remove old output error: {}", e);
log::error!("Remove old output error: {e}");
}
}
@@ -216,10 +200,7 @@ pub async fn update_subtitle_generator_type(
state: state_type!(),
subtitle_generator_type: String,
) -> Result<(), ()> {
log::info!(
"Updating subtitle generator type to {}",
subtitle_generator_type
);
log::info!("Updating subtitle generator type to {subtitle_generator_type}");
let mut config = state.config.write().await;
config.subtitle_generator_type = subtitle_generator_type;
config.save();
@@ -240,7 +221,7 @@ pub async fn update_openai_api_endpoint(
state: state_type!(),
openai_api_endpoint: String,
) -> Result<(), ()> {
log::info!("Updating openai api endpoint to {}", openai_api_endpoint);
log::info!("Updating openai api endpoint to {openai_api_endpoint}");
let mut config = state.config.write().await;
config.openai_api_endpoint = openai_api_endpoint;
config.save();
@@ -268,7 +249,7 @@ pub async fn update_status_check_interval(
if interval < 10 {
interval = 10; // Minimum interval of 10 seconds
}
log::info!("Updating status check interval to {} seconds", interval);
log::info!("Updating status check interval to {interval} seconds");
state.config.write().await.status_check_interval = interval;
state.config.write().await.save();
Ok(())
@@ -279,7 +260,7 @@ pub async fn update_whisper_language(
state: state_type!(),
whisper_language: String,
) -> Result<(), ()> {
log::info!("Updating whisper language to {}", whisper_language);
log::info!("Updating whisper language to {whisper_language}");
state.config.write().await.whisper_language = whisper_language;
state.config.write().await.save();
Ok(())
@@ -288,14 +269,14 @@ pub async fn update_whisper_language(
#[cfg_attr(feature = "gui", tauri::command)]
#[cfg(feature = "gui")]
pub async fn update_cleanup_source_flv(state: state_type!(), cleanup: bool) -> Result<(), ()> {
log::info!("Updating cleanup source FLV after import to {}", cleanup);
log::info!("Updating cleanup source FLV after import to {cleanup}");
state.config.write().await.set_cleanup_source_flv(cleanup);
Ok(())
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn update_webhook_url(state: state_type!(), webhook_url: String) -> Result<(), ()> {
log::info!("Updating webhook url to {}", webhook_url);
log::info!("Updating webhook url to {webhook_url}");
let _ = state
.webhook_poster
.update_config(crate::webhook::poster::WebhookConfig {

View File

@@ -24,10 +24,10 @@ pub async fn get_recorder_list(state: state_type!()) -> Result<RecorderList, ()>
pub async fn add_recorder(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
extra: String,
) -> Result<RecorderRow, String> {
log::info!("Add recorder: {} {}", platform, room_id);
log::info!("Add recorder: {platform} {room_id}");
let platform = PlatformType::from_str(&platform).unwrap();
let account = match platform {
PlatformType::BiliBili => {
@@ -59,7 +59,7 @@ pub async fn add_recorder(
let room = state.db.add_recorder(platform, room_id, &extra).await?;
state
.db
.new_message("添加直播间", &format!("添加了新直播间 {}", room_id))
.new_message("添加直播间", &format!("添加了新直播间 {room_id}"))
.await?;
// post webhook event
let event = events::new_webhook_event(
@@ -67,18 +67,18 @@ pub async fn add_recorder(
events::Payload::Recorder(room.clone()),
);
if let Err(e) = state.webhook_poster.post_event(&event).await {
log::error!("Post webhook event error: {}", e);
log::error!("Post webhook event error: {e}");
}
Ok(room)
}
Err(e) => {
log::error!("Failed to add recorder: {}", e);
Err(format!("添加失败: {}", e))
log::error!("Failed to add recorder: {e}");
Err(format!("添加失败: {e}"))
}
},
Err(e) => {
log::error!("Failed to add recorder: {}", e);
Err(format!("添加失败: {}", e))
log::error!("Failed to add recorder: {e}");
Err(format!("添加失败: {e}"))
}
}
}
@@ -87,9 +87,9 @@ pub async fn add_recorder(
pub async fn remove_recorder(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
) -> Result<(), String> {
log::info!("Remove recorder: {} {}", platform, room_id);
log::info!("Remove recorder: {platform} {room_id}");
let platform = PlatformType::from_str(&platform).unwrap();
match state
.recorder_manager
@@ -99,7 +99,7 @@ pub async fn remove_recorder(
Ok(recorder) => {
state
.db
.new_message("移除直播间", &format!("移除了直播间 {}", room_id))
.new_message("移除直播间", &format!("移除了直播间 {room_id}"))
.await?;
// post webhook event
let event = events::new_webhook_event(
@@ -107,13 +107,13 @@ pub async fn remove_recorder(
events::Payload::Recorder(recorder),
);
if let Err(e) = state.webhook_poster.post_event(&event).await {
log::error!("Post webhook event error: {}", e);
log::error!("Post webhook event error: {e}");
}
log::info!("Removed recorder: {} {}", platform.as_str(), room_id);
Ok(())
}
Err(e) => {
log::error!("Failed to remove recorder: {}", e);
log::error!("Failed to remove recorder: {e}");
Err(e.to_string())
}
}
@@ -123,7 +123,7 @@ pub async fn remove_recorder(
pub async fn get_room_info(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
) -> Result<RecorderInfo, String> {
let platform = PlatformType::from_str(&platform).unwrap();
if let Some(info) = state
@@ -138,16 +138,16 @@ pub async fn get_room_info(
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn get_archive_disk_usage(state: state_type!()) -> Result<u64, String> {
pub async fn get_archive_disk_usage(state: state_type!()) -> Result<i64, String> {
Ok(state.recorder_manager.get_archive_disk_usage().await?)
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn get_archives(
state: state_type!(),
room_id: u64,
offset: u64,
limit: u64,
room_id: i64,
offset: i64,
limit: i64,
) -> Result<Vec<RecordRow>, String> {
Ok(state
.recorder_manager
@@ -158,7 +158,7 @@ pub async fn get_archives(
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn get_archive(
state: state_type!(),
room_id: u64,
room_id: i64,
live_id: String,
) -> Result<RecordRow, String> {
Ok(state
@@ -171,7 +171,7 @@ pub async fn get_archive(
pub async fn get_archive_subtitle(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
) -> Result<String, String> {
let platform = PlatformType::from_str(&platform);
@@ -188,7 +188,7 @@ pub async fn get_archive_subtitle(
pub async fn generate_archive_subtitle(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
) -> Result<String, String> {
let platform = PlatformType::from_str(&platform);
@@ -205,7 +205,7 @@ pub async fn generate_archive_subtitle(
pub async fn delete_archive(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
) -> Result<(), String> {
let platform = PlatformType::from_str(&platform);
@@ -220,14 +220,14 @@ pub async fn delete_archive(
.db
.new_message(
"删除历史缓存",
&format!("删除了房间 {} 的历史缓存 {}", room_id, live_id),
&format!("删除了房间 {room_id} 的历史缓存 {live_id}"),
)
.await?;
// post webhook event
let event =
events::new_webhook_event(events::ARCHIVE_DELETED, events::Payload::Archive(to_delete));
if let Err(e) = state.webhook_poster.post_event(&event).await {
log::error!("Post webhook event error: {}", e);
log::error!("Post webhook event error: {e}");
}
Ok(())
}
@@ -236,7 +236,7 @@ pub async fn delete_archive(
pub async fn delete_archives(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
live_ids: Vec<String>,
) -> Result<(), String> {
let platform = PlatformType::from_str(&platform);
@@ -248,7 +248,10 @@ pub async fn delete_archives(
.delete_archives(
platform.unwrap(),
room_id,
&live_ids.iter().map(|s| s.as_str()).collect::<Vec<&str>>(),
&live_ids
.iter()
.map(std::string::String::as_str)
.collect::<Vec<&str>>(),
)
.await?;
state
@@ -263,7 +266,7 @@ pub async fn delete_archives(
let event =
events::new_webhook_event(events::ARCHIVE_DELETED, events::Payload::Archive(to_delete));
if let Err(e) = state.webhook_poster.post_event(&event).await {
log::error!("Post webhook event error: {}", e);
log::error!("Post webhook event error: {e}");
}
}
Ok(())
@@ -273,7 +276,7 @@ pub async fn delete_archives(
pub async fn get_danmu_record(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
) -> Result<Vec<DanmuEntry>, String> {
let platform = PlatformType::from_str(&platform);
@@ -290,7 +293,7 @@ pub async fn get_danmu_record(
#[serde(rename_all = "camelCase")]
pub struct ExportDanmuOptions {
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
x: i64,
y: i64,
@@ -335,8 +338,8 @@ pub async fn export_danmu(
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn send_danmaku(
state: state_type!(),
uid: u64,
room_id: u64,
uid: i64,
room_id: i64,
message: String,
) -> Result<(), String> {
let account = state.db.get_account("bilibili", uid).await?;
@@ -351,7 +354,7 @@ pub async fn send_danmaku(
pub async fn get_total_length(state: state_type!()) -> Result<i64, String> {
match state.db.get_total_length().await {
Ok(total_length) => Ok(total_length),
Err(e) => Err(format!("Failed to get total length: {}", e)),
Err(e) => Err(format!("Failed to get total length: {e}")),
}
}
@@ -359,20 +362,20 @@ pub async fn get_total_length(state: state_type!()) -> Result<i64, String> {
pub async fn get_today_record_count(state: state_type!()) -> Result<i64, String> {
match state.db.get_today_record_count().await {
Ok(count) => Ok(count),
Err(e) => Err(format!("Failed to get today record count: {}", e)),
Err(e) => Err(format!("Failed to get today record count: {e}")),
}
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn get_recent_record(
state: state_type!(),
room_id: u64,
offset: u64,
limit: u64,
room_id: i64,
offset: i64,
limit: i64,
) -> Result<Vec<RecordRow>, String> {
match state.db.get_recent_record(room_id, offset, limit).await {
Ok(records) => Ok(records),
Err(e) => Err(format!("Failed to get recent record: {}", e)),
Err(e) => Err(format!("Failed to get recent record: {e}")),
}
}
@@ -380,7 +383,7 @@ pub async fn get_recent_record(
pub async fn set_enable(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
enabled: bool,
) -> Result<(), String> {
log::info!("Set enable for recorder {platform} {room_id} {enabled}");

View File

@@ -109,10 +109,10 @@ pub async fn get_disk_info(state: state_type!()) -> Result<DiskInfo, ()> {
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn console_log(_state: state_type!(), level: &str, message: &str) -> Result<(), ()> {
match level {
"error" => log::error!("[frontend] {}", message),
"warn" => log::warn!("[frontend] {}", message),
"info" => log::info!("[frontend] {}", message),
_ => log::debug!("[frontend] {}", message),
"error" => log::error!("[frontend] {message}"),
"warn" => log::warn!("[frontend] {message}"),
"info" => log::info!("[frontend] {message}"),
_ => log::debug!("[frontend] {message}"),
}
Ok(())
}
@@ -148,7 +148,7 @@ pub async fn get_disk_info_inner(target: PathBuf) -> Result<DiskInfo, ()> {
let disks = sysinfo::Disks::new_with_refreshed_list();
// get target disk info
let mut disk_info = DiskInfo {
disk: "".into(),
disk: String::new(),
total: 0,
free: 0,
};
@@ -157,11 +157,11 @@ pub async fn get_disk_info_inner(target: PathBuf) -> Result<DiskInfo, ()> {
let mut longest_match = 0;
for disk in disks.list() {
let mount_point = disk.mount_point().to_str().unwrap();
if target.starts_with(mount_point) && mount_point.split("/").count() > longest_match {
if target.starts_with(mount_point) && mount_point.split('/').count() > longest_match {
disk_info.disk = mount_point.into();
disk_info.total = disk.total_space();
disk_info.free = disk.available_space();
longest_match = mount_point.split("/").count();
longest_match = mount_point.split('/').count();
}
}
@@ -187,10 +187,10 @@ pub async fn export_to_file(
}
let mut file = file.unwrap();
if let Err(e) = file.write_all(content.as_bytes()).await {
return Err(format!("Write file failed: {}", e));
return Err(format!("Write file failed: {e}"));
}
if let Err(e) = file.flush().await {
return Err(format!("Flush file failed: {}", e));
return Err(format!("Flush file failed: {e}"));
}
Ok(())
}
@@ -211,10 +211,10 @@ pub async fn open_log_folder(state: state_type!()) -> Result<(), String> {
pub async fn open_live(
state: state_type!(),
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
) -> Result<(), String> {
log::info!("Open player window: {} {}", room_id, live_id);
log::info!("Open player window: {room_id} {live_id}");
#[cfg(feature = "gui")]
{
let platform = PlatformType::from_str(&platform).unwrap();
@@ -225,7 +225,7 @@ pub async fn open_live(
.unwrap();
let builder = tauri::WebviewWindowBuilder::new(
&state.app_handle,
format!("Live:{}:{}", room_id, live_id),
format!("Live:{room_id}:{live_id}"),
tauri::WebviewUrl::App(
format!(
"index_live.html?platform={}&room_id={}&live_id={}",
@@ -253,7 +253,7 @@ pub async fn open_live(
});
if let Err(e) = builder.decorations(true).build() {
log::error!("live window build failed: {}", e);
log::error!("live window build failed: {e}");
}
}
@@ -263,13 +263,13 @@ pub async fn open_live(
#[cfg(feature = "gui")]
#[tauri::command]
pub async fn open_clip(state: state_type!(), video_id: i64) -> Result<(), String> {
log::info!("Open clip window: {}", video_id);
log::info!("Open clip window: {video_id}");
let builder = tauri::WebviewWindowBuilder::new(
&state.app_handle,
format!("Clip:{}", video_id),
tauri::WebviewUrl::App(format!("index_clip.html?id={}", video_id).into()),
format!("Clip:{video_id}"),
tauri::WebviewUrl::App(format!("index_clip.html?id={video_id}").into()),
)
.title(format!("Clip window:{}", video_id))
.title(format!("Clip window:{video_id}"))
.theme(Some(Theme::Light))
.inner_size(1200.0, 800.0)
.effects(WindowEffectsConfig {
@@ -283,7 +283,7 @@ pub async fn open_clip(state: state_type!(), video_id: i64) -> Result<(), String
});
if let Err(e) = builder.decorations(true).build() {
log::error!("clip window build failed: {}", e);
log::error!("clip window build failed: {e}");
}
Ok(())

View File

@@ -95,12 +95,12 @@ async fn copy_file_with_progress(
dest: &Path,
reporter: &ProgressReporter,
) -> Result<(), String> {
let mut source_file = File::open(source).map_err(|e| format!("无法打开源文件: {}", e))?;
let mut dest_file = File::create(dest).map_err(|e| format!("无法创建目标文件: {}", e))?;
let mut source_file = File::open(source).map_err(|e| format!("无法打开源文件: {e}"))?;
let mut dest_file = File::create(dest).map_err(|e| format!("无法创建目标文件: {e}"))?;
let total_size = source_file
.metadata()
.map_err(|e| format!("无法获取文件大小: {}", e))?
.map_err(|e| format!("无法获取文件大小: {e}"))?
.len();
let mut copied = 0u64;
@@ -114,14 +114,14 @@ async fn copy_file_with_progress(
loop {
let bytes_read = source_file
.read(&mut buffer)
.map_err(|e| format!("读取文件失败: {}", e))?;
.map_err(|e| format!("读取文件失败: {e}"))?;
if bytes_read == 0 {
break;
}
dest_file
.write_all(&buffer[..bytes_read])
.map_err(|e| format!("写入文件失败: {}", e))?;
.map_err(|e| format!("写入文件失败: {e}"))?;
copied += bytes_read as u64;
// 计算进度百分比,只在变化时更新
@@ -135,14 +135,14 @@ async fn copy_file_with_progress(
let report_threshold = 1; // 每1%报告一次
if percent != last_reported_percent && (percent % report_threshold == 0 || percent == 100) {
reporter.update(&format!("正在复制视频文件... {}%", percent));
reporter.update(&format!("正在复制视频文件... {percent}%"));
last_reported_percent = percent;
}
}
dest_file
.flush()
.map_err(|e| format!("刷新文件缓冲区失败: {}", e))?;
.map_err(|e| format!("刷新文件缓冲区失败: {e}"))?;
Ok(())
}
@@ -191,7 +191,7 @@ async fn copy_then_convert_strategy(
// 确保临时目录存在
if let Some(parent) = temp_path.parent() {
std::fs::create_dir_all(parent).map_err(|e| format!("创建临时目录失败: {}", e))?;
std::fs::create_dir_all(parent).map_err(|e| format!("创建临时目录失败: {e}"))?;
}
// 第一步:将网络文件复制到本地临时位置(使用优化的缓冲区)
@@ -220,12 +220,12 @@ async fn copy_file_with_network_optimization(
dest: &Path,
reporter: &ProgressReporter,
) -> Result<(), String> {
let mut source_file = File::open(source).map_err(|e| format!("无法打开网络源文件: {}", e))?;
let mut dest_file = File::create(dest).map_err(|e| format!("无法创建本地临时文件: {}", e))?;
let mut source_file = File::open(source).map_err(|e| format!("无法打开网络源文件: {e}"))?;
let mut dest_file = File::create(dest).map_err(|e| format!("无法创建本地临时文件: {e}"))?;
let total_size = source_file
.metadata()
.map_err(|e| format!("无法获取文件大小: {}", e))?
.map_err(|e| format!("无法获取文件大小: {e}"))?
.len();
let mut copied = 0u64;
@@ -249,7 +249,7 @@ async fn copy_file_with_network_optimization(
dest_file
.write_all(&buffer[..bytes_read])
.map_err(|e| format!("写入临时文件失败: {}", e))?;
.map_err(|e| format!("写入临时文件失败: {e}"))?;
copied += bytes_read as u64;
// 计算并报告进度
@@ -272,22 +272,16 @@ async fn copy_file_with_network_optimization(
}
Err(e) => {
consecutive_errors += 1;
log::warn!(
"网络读取错误 (尝试 {}/{}): {}",
consecutive_errors,
MAX_RETRIES,
e
);
log::warn!("网络读取错误 (尝试 {consecutive_errors}/{MAX_RETRIES}): {e}");
if consecutive_errors >= MAX_RETRIES {
return Err(format!("网络文件读取失败,已重试{}次: {}", MAX_RETRIES, e));
return Err(format!("网络文件读取失败,已重试{MAX_RETRIES}次: {e}"));
}
// 等待一小段时间后重试
tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await;
reporter.update(&format!(
"网络连接中断,正在重试... ({}/{})",
consecutive_errors, MAX_RETRIES
"网络连接中断,正在重试... ({consecutive_errors}/{MAX_RETRIES})"
));
}
}
@@ -295,7 +289,7 @@ async fn copy_file_with_network_optimization(
dest_file
.flush()
.map_err(|e| format!("刷新临时文件缓冲区失败: {}", e))?;
.map_err(|e| format!("刷新临时文件缓冲区失败: {e}"))?;
reporter.update("网络文件复制完成");
Ok(())
}
@@ -307,7 +301,7 @@ struct ClipMetadata {
end_time: f64,
clip_source: String,
original_platform: String,
original_room_id: u64,
original_room_id: i64,
}
#[cfg(feature = "gui")]
@@ -341,12 +335,12 @@ pub async fn clip_range(
let emitter = EventEmitter::new(state.progress_manager.get_event_sender());
let reporter = ProgressReporter::new(&emitter, &event_id).await?;
let mut params_without_cover = params.clone();
params_without_cover.cover = "".to_string();
params_without_cover.cover = String::new();
let task = TaskRow {
id: event_id.clone(),
task_type: "clip_range".to_string(),
status: "pending".to_string(),
message: "".to_string(),
message: String::new(),
metadata: json!({
"params": params_without_cover,
})
@@ -359,10 +353,10 @@ pub async fn clip_range(
let clip_result = clip_range_inner(&state, &reporter, params.clone()).await;
if let Err(e) = clip_result {
reporter.finish(false, &format!("切片失败: {}", e)).await;
reporter.finish(false, &format!("切片失败: {e}")).await;
state
.db
.update_task(&event_id, "failed", &format!("切片失败: {}", e), None)
.update_task(&event_id, "failed", &format!("切片失败: {e}"), None)
.await?;
return Err(e);
}
@@ -377,12 +371,12 @@ pub async fn clip_range(
if state.config.read().await.auto_subtitle {
// generate a subtitle task event id
let subtitle_event_id = format!("{}_subtitle", event_id);
let subtitle_event_id = format!("{event_id}_subtitle");
let result = generate_video_subtitle(state.clone(), subtitle_event_id, video.id).await;
if let Ok(subtitle) = result {
let result = update_video_subtitle(state.clone(), video.id, subtitle).await;
if let Err(e) = result {
log::error!("Update video subtitle error: {}", e);
log::error!("Update video subtitle error: {e}");
}
} else {
log::error!("Generate video subtitle error: {}", result.err().unwrap());
@@ -394,7 +388,7 @@ pub async fn clip_range(
events::new_webhook_event(events::CLIP_GENERATED, events::Payload::Clip(video.clone()));
if let Err(e) = state.webhook_poster.post_event(&event).await {
log::error!("Post webhook event error: {}", e);
log::error!("Post webhook event error: {e}");
}
Ok(video)
@@ -449,6 +443,13 @@ async fn clip_range_inner(
.to_str()
.ok_or("Invalid file path")?;
// add video to db
let Ok(size) = i64::try_from(metadata.len()) else {
log::error!(
"Failed to convert metadata length to i64: {}",
metadata.len()
);
return Err("Failed to convert metadata length to i64".to_string());
};
let video = state
.db
.add_video(&VideoRow {
@@ -464,12 +465,15 @@ async fn clip_range_inner(
.to_string(),
file: filename.into(),
note: params.note.clone(),
length: params.range.as_ref().map_or(0.0, |r| r.duration()) as i64,
size: metadata.len() as i64,
bvid: "".into(),
title: "".into(),
desc: "".into(),
tags: "".into(),
length: params
.range
.as_ref()
.map_or(0.0, super::super::ffmpeg::Range::duration) as i64,
size,
bvid: String::new(),
title: String::new(),
desc: String::new(),
tags: String::new(),
area: 0,
platform: params.platform.clone(),
})
@@ -481,7 +485,10 @@ async fn clip_range_inner(
&format!(
"生成了房间 {} 的切片,长度 {}s{}",
params.room_id,
params.range.as_ref().map_or(0.0, |r| r.duration()),
params
.range
.as_ref()
.map_or(0.0, super::super::ffmpeg::Range::duration),
filename
),
)
@@ -510,8 +517,8 @@ async fn clip_range_inner(
pub async fn upload_procedure(
state: state_type!(),
event_id: String,
uid: u64,
room_id: u64,
uid: i64,
room_id: i64,
video_id: i64,
cover: String,
profile: Profile,
@@ -525,7 +532,7 @@ pub async fn upload_procedure(
id: event_id.clone(),
task_type: "upload_procedure".to_string(),
status: "pending".to_string(),
message: "".to_string(),
message: String::new(),
metadata: json!({
"uid": uid,
"room_id": room_id,
@@ -536,7 +543,7 @@ pub async fn upload_procedure(
created_at: Utc::now().to_rfc3339(),
};
state.db.add_task(&task).await?;
log::info!("Create task: {:?}", task);
log::info!("Create task: {task:?}");
match upload_procedure_inner(&state, &reporter, uid, room_id, video_id, cover, profile).await {
Ok(bvid) => {
reporter.finish(true, "投稿成功").await;
@@ -547,10 +554,10 @@ pub async fn upload_procedure(
Ok(bvid)
}
Err(e) => {
reporter.finish(false, &format!("投稿失败: {}", e)).await;
reporter.finish(false, &format!("投稿失败: {e}")).await;
state
.db
.update_task(&event_id, "failed", &format!("投稿失败: {}", e), None)
.update_task(&event_id, "failed", &format!("投稿失败: {e}"), None)
.await?;
Err(e)
}
@@ -560,8 +567,8 @@ pub async fn upload_procedure(
async fn upload_procedure_inner(
state: &state_type!(),
reporter: &ProgressReporter,
uid: u64,
room_id: u64,
uid: i64,
room_id: i64,
video_id: i64,
cover: String,
mut profile: Profile,
@@ -578,7 +585,7 @@ async fn upload_procedure_inner(
match state.client.prepare_video(reporter, &account, path).await {
Ok(video) => {
profile.cover = cover_url.await.unwrap_or("".to_string());
profile.cover = cover_url.await.unwrap_or(String::new());
if let Ok(ret) = state.client.submit_video(&account, &profile, &video).await {
// update video status and details
// 1 means uploaded
@@ -616,9 +623,9 @@ async fn upload_procedure_inner(
}
Err(e) => {
reporter
.finish(false, &format!("Preload video failed: {}", e))
.finish(false, &format!("Preload video failed: {e}"))
.await;
Err(format!("Preload video failed: {}", e))
Err(format!("Preload video failed: {e}"))
}
}
}
@@ -635,7 +642,7 @@ pub async fn get_video(state: state_type!(), id: i64) -> Result<VideoRow, String
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn get_videos(state: state_type!(), room_id: u64) -> Result<Vec<VideoRow>, String> {
pub async fn get_videos(state: state_type!(), room_id: i64) -> Result<Vec<VideoRow>, String> {
state
.db
.get_videos(room_id)
@@ -667,7 +674,7 @@ pub async fn delete_video(state: state_type!(), id: i64) -> Result<(), String> {
let event =
events::new_webhook_event(events::CLIP_DELETED, events::Payload::Clip(video.clone()));
if let Err(e) = state.webhook_poster.post_event(&event).await {
log::error!("Post webhook event error: {}", e);
log::error!("Post webhook event error: {e}");
}
// delete video from db
@@ -721,13 +728,13 @@ pub async fn update_video_cover(
.await
.map_err(|e| e.to_string())?;
let cover_file_name = cover_path.file_name().unwrap().to_str().unwrap();
log::debug!("Update video cover: {} {}", id, cover_file_name);
log::debug!("Update video cover: {id} {cover_file_name}");
Ok(state.db.update_video_cover(id, cover_file_name).await?)
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn get_video_subtitle(state: state_type!(), id: i64) -> Result<String, String> {
log::debug!("Get video subtitle: {}", id);
log::debug!("Get video subtitle: {id}");
let video = state.db.get_video(id).await?;
let filepath = Path::new(state.config.read().await.output.as_str()).join(&video.file);
let file = Path::new(&filepath);
@@ -735,7 +742,7 @@ pub async fn get_video_subtitle(state: state_type!(), id: i64) -> Result<String,
if let Ok(content) = std::fs::read_to_string(file.with_extension("srt")) {
Ok(content)
} else {
Ok("".to_string())
Ok(String::new())
}
}
@@ -754,7 +761,7 @@ pub async fn generate_video_subtitle(
id: event_id.clone(),
task_type: "generate_video_subtitle".to_string(),
status: "pending".to_string(),
message: "".to_string(),
message: String::new(),
metadata: json!({
"video_id": id,
})
@@ -762,7 +769,7 @@ pub async fn generate_video_subtitle(
created_at: Utc::now().to_rfc3339(),
};
state.db.add_task(&task).await?;
log::info!("Create task: {:?}", task);
log::info!("Create task: {task:?}");
let config = state.config.read().await;
let generator_type = config.subtitle_generator_type.as_str();
let whisper_model = config.whisper_model.clone();
@@ -812,22 +819,19 @@ pub async fn generate_video_subtitle(
.subtitle_content
.iter()
.map(item_to_srt)
.collect::<Vec<String>>()
.join("");
.collect::<String>();
let result = update_video_subtitle(state.clone(), id, subtitle.clone()).await;
if let Err(e) = result {
log::error!("Update video subtitle error: {}", e);
log::error!("Update video subtitle error: {e}");
}
Ok(subtitle)
}
Err(e) => {
reporter
.finish(false, &format!("字幕生成失败: {}", e))
.await;
reporter.finish(false, &format!("字幕生成失败: {e}")).await;
state
.db
.update_task(&event_id, "failed", &format!("字幕生成失败: {}", e), None)
.update_task(&event_id, "failed", &format!("字幕生成失败: {e}"), None)
.await?;
Err(e)
}
@@ -845,14 +849,14 @@ pub async fn update_video_subtitle(
let file = Path::new(&filepath);
let subtitle_path = file.with_extension("srt");
if let Err(e) = std::fs::write(subtitle_path, subtitle) {
log::warn!("Update video subtitle error: {}", e);
log::warn!("Update video subtitle error: {e}");
}
Ok(())
}
#[cfg_attr(feature = "gui", tauri::command)]
pub async fn update_video_note(state: state_type!(), id: i64, note: String) -> Result<(), String> {
log::info!("Update video note: {} -> {}", id, note);
log::info!("Update video note: {id} -> {note}");
let mut video = state.db.get_video(id).await?;
video.note = note;
state.db.update_video(&video).await?;
@@ -875,7 +879,7 @@ pub async fn encode_video_subtitle(
id: event_id.clone(),
task_type: "encode_video_subtitle".to_string(),
status: "pending".to_string(),
message: "".to_string(),
message: String::new(),
metadata: json!({
"video_id": id,
"srt_style": srt_style,
@@ -884,7 +888,7 @@ pub async fn encode_video_subtitle(
created_at: Utc::now().to_rfc3339(),
};
state.db.add_task(&task).await?;
log::info!("Create task: {:?}", task);
log::info!("Create task: {task:?}");
match encode_video_subtitle_inner(&state, &reporter, id, srt_style).await {
Ok(video) => {
reporter.finish(true, "字幕编码完成").await;
@@ -895,12 +899,10 @@ pub async fn encode_video_subtitle(
Ok(video)
}
Err(e) => {
reporter
.finish(false, &format!("字幕编码失败: {}", e))
.await;
reporter.finish(false, &format!("字幕编码失败: {e}")).await;
state
.db
.update_task(&event_id, "failed", &format!("字幕编码失败: {}", e), None)
.update_task(&event_id, "failed", &format!("字幕编码失败: {e}"), None)
.await?;
Err(e)
}
@@ -950,7 +952,7 @@ pub async fn generic_ffmpeg_command(
_state: state_type!(),
args: Vec<String>,
) -> Result<String, String> {
let args_str: Vec<&str> = args.iter().map(|s| s.as_str()).collect();
let args_str: Vec<&str> = args.iter().map(std::string::String::as_str).collect();
ffmpeg::generic_ffmpeg_command(&args_str).await
}
@@ -960,7 +962,7 @@ pub async fn import_external_video(
event_id: String,
file_path: String,
title: String,
room_id: u64,
room_id: i64,
) -> Result<VideoRow, String> {
#[cfg(feature = "gui")]
let emitter = EventEmitter::new(state.app_handle.clone());
@@ -1027,14 +1029,30 @@ pub async fn import_external_video(
match ffmpeg::generate_thumbnail(&final_target_full_path, thumbnail_timestamp).await {
Ok(path) => path.file_name().unwrap().to_str().unwrap().to_string(),
Err(e) => {
log::warn!("生成缩略图失败: {}", e);
"".to_string() // 使用空字符串,前端会显示默认图标
log::warn!("生成缩略图失败: {e}");
String::new() // 使用空字符串,前端会显示默认图标
}
};
// 步骤4: 保存到数据库
reporter.update("正在保存视频信息...");
let Ok(size) = i64::try_from(
final_target_full_path
.metadata()
.map_err(|e| e.to_string())?
.len(),
) else {
log::error!(
"Failed to convert metadata length to i64: {}",
final_target_full_path
.metadata()
.map_err(|e| e.to_string())?
.len()
);
return Err("Failed to convert metadata length to i64".to_string());
};
// 添加到数据库
let video = VideoRow {
id: 0,
@@ -1042,17 +1060,14 @@ pub async fn import_external_video(
platform: "imported".to_string(),
title,
file: target_filename,
note: "".to_string(),
note: String::new(),
length: metadata.duration as i64,
size: final_target_full_path
.metadata()
.map_err(|e| e.to_string())?
.len() as i64,
size,
status: 1, // 导入完成
cover: cover_path,
desc: "".to_string(),
tags: "".to_string(),
bvid: "".to_string(),
desc: String::new(),
tags: String::new(),
bvid: String::new(),
area: 0,
created_at: Utc::now().to_rfc3339(),
};
@@ -1096,7 +1111,7 @@ pub async fn clip_video(
id: event_id.clone(),
task_type: "clip_video".to_string(),
status: "pending".to_string(),
message: "".to_string(),
message: String::new(),
metadata: json!({
"parent_video_id": parent_video_id,
"start_time": start_time,
@@ -1127,10 +1142,10 @@ pub async fn clip_video(
Ok(video)
}
Err(e) => {
reporter.finish(false, &format!("切片失败: {}", e)).await;
reporter.finish(false, &format!("切片失败: {e}")).await;
state
.db
.update_task(&event_id, "failed", &format!("切片失败: {}", e), None)
.update_task(&event_id, "failed", &format!("切片失败: {e}"), None)
.await?;
Err(e)
}
@@ -1208,8 +1223,8 @@ async fn clip_video_inner(
.unwrap()
.to_string(),
Err(e) => {
log::warn!("生成切片缩略图失败: {}", e);
"".to_string() // 使用空字符串,前端会显示默认图标
log::warn!("生成切片缩略图失败: {e}");
String::new() // 使用空字符串,前端会显示默认图标
}
};
@@ -1221,14 +1236,14 @@ async fn clip_video_inner(
platform: "clip".to_string(),
title: clip_title,
file: output_filename,
note: "".to_string(),
note: String::new(),
length: (end_time - start_time) as i64,
size: file_metadata.len() as i64,
size: i64::try_from(file_metadata.len()).map_err(|e| e.to_string())?,
status: 1,
cover: clip_cover_path,
desc: "".to_string(),
tags: "".to_string(),
bvid: "".to_string(),
desc: String::new(),
tags: String::new(),
bvid: String::new(),
area: parent_video.area,
created_at: Local::now().to_rfc3339(),
};
@@ -1250,7 +1265,7 @@ pub async fn get_file_size(file_path: String) -> Result<u64, String> {
let path = Path::new(&file_path);
match std::fs::metadata(path) {
Ok(metadata) => Ok(metadata.len()),
Err(e) => Err(format!("无法获取文件信息: {}", e)),
Err(e) => Err(format!("无法获取文件信息: {e}")),
}
}
@@ -1291,7 +1306,7 @@ pub async fn batch_import_external_videos(
state: state_type!(),
event_id: String,
file_paths: Vec<String>,
room_id: u64,
room_id: i64,
) -> Result<BatchImportResult, String> {
if file_paths.is_empty() {
return Ok(BatchImportResult {
@@ -1326,12 +1341,11 @@ pub async fn batch_import_external_videos(
// 更新批量进度,只显示进度信息
batch_reporter.update(&format!(
"正在导入第{}个,共{}个文件",
current_index, total_files
"正在导入第{current_index}个,共{total_files}个文件"
));
// 为每个文件创建独立的事件ID
let file_event_id = format!("{}_file_{}", event_id, index);
let file_event_id = format!("{event_id}_file_{index}");
// 从文件名生成标题(去掉扩展名)
let title = file_name.clone();
@@ -1352,22 +1366,19 @@ pub async fn batch_import_external_videos(
log::info!("批量导入成功: {} (ID: {})", file_path, video.id);
}
Err(e) => {
let error_msg = format!("导入失败 {}: {}", file_path, e);
let error_msg = format!("导入失败 {file_path}: {e}");
errors.push(error_msg.clone());
failed_imports += 1;
log::error!("批量导入失败: {}", error_msg);
log::error!("批量导入失败: {error_msg}");
}
}
}
// 完成批量导入
let result_msg = if failed_imports == 0 {
format!("批量导入完成:成功导入{}个文件", successful_imports)
format!("批量导入完成:成功导入{successful_imports}个文件")
} else {
format!(
"批量导入完成:成功{}个,失败{}",
successful_imports, failed_imports
)
format!("批量导入完成:成功{successful_imports}个,失败{failed_imports}")
};
batch_reporter
.finish(failed_imports == 0, &result_msg)
@@ -1407,7 +1418,7 @@ pub async fn get_import_progress(
return Ok(Some(serde_json::json!({
"task_id": task.id,
"file_name": metadata.get("file_name").and_then(|v| v.as_str()).unwrap_or("未知文件"),
"file_size": metadata.get("file_size").and_then(|v| v.as_u64()).unwrap_or(0),
"file_size": metadata.get("file_size").and_then(serde_json::Value::as_u64).unwrap_or(0),
"message": task.message,
"status": task.status,
"created_at": task.created_at

View File

@@ -447,7 +447,7 @@ async fn handler_get_recorder_list(
#[serde(rename_all = "camelCase")]
struct AddRecorderRequest {
platform: String,
room_id: u64,
room_id: i64,
extra: String,
}
@@ -465,7 +465,7 @@ async fn handler_add_recorder(
#[serde(rename_all = "camelCase")]
struct RemoveRecorderRequest {
platform: String,
room_id: u64,
room_id: i64,
}
async fn handler_remove_recorder(
@@ -482,7 +482,7 @@ async fn handler_remove_recorder(
#[serde(rename_all = "camelCase")]
struct GetRoomInfoRequest {
platform: String,
room_id: u64,
room_id: i64,
}
async fn handler_get_room_info(
@@ -503,7 +503,7 @@ async fn handler_get_archive_disk_usage(
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct GetArchivesRequest {
room_id: u64,
room_id: i64,
offset: u64,
limit: u64,
}
@@ -519,7 +519,7 @@ async fn handler_get_archives(
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct GetArchiveRequest {
room_id: u64,
room_id: i64,
live_id: String,
}
@@ -535,7 +535,7 @@ async fn handler_get_archive(
#[serde(rename_all = "camelCase")]
struct GetArchiveSubtitleRequest {
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
}
@@ -552,7 +552,7 @@ async fn handler_get_archive_subtitle(
#[serde(rename_all = "camelCase")]
struct GenerateArchiveSubtitleRequest {
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
}
@@ -569,7 +569,7 @@ async fn handler_generate_archive_subtitle(
#[serde(rename_all = "camelCase")]
struct DeleteArchiveRequest {
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
}
@@ -585,7 +585,7 @@ async fn handler_delete_archive(
#[serde(rename_all = "camelCase")]
struct DeleteArchivesRequest {
platform: String,
room_id: u64,
room_id: i64,
live_ids: Vec<String>,
}
@@ -601,7 +601,7 @@ async fn handler_delete_archives(
#[serde(rename_all = "camelCase")]
struct GetDanmuRecordRequest {
platform: String,
room_id: u64,
room_id: i64,
live_id: String,
}
@@ -618,7 +618,7 @@ async fn handler_get_danmu_record(
#[serde(rename_all = "camelCase")]
struct SendDanmakuRequest {
uid: u64,
room_id: u64,
room_id: i64,
message: String,
}
@@ -647,7 +647,7 @@ async fn handler_get_today_record_count(
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct GetRecentRecordRequest {
room_id: u64,
room_id: i64,
offset: u64,
limit: u64,
}
@@ -665,7 +665,7 @@ async fn handler_get_recent_record(
#[serde(rename_all = "camelCase")]
struct SetEnableRequest {
platform: String,
room_id: u64,
room_id: i64,
enabled: bool,
}
@@ -697,7 +697,7 @@ async fn handler_clip_range(
struct UploadProcedureRequest {
event_id: String,
uid: u64,
room_id: u64,
room_id: i64,
video_id: i64,
cover: String,
profile: Profile,
@@ -751,7 +751,7 @@ async fn handler_get_video(
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct GetVideosRequest {
room_id: u64,
room_id: i64,
}
async fn handler_get_videos(
@@ -960,7 +960,7 @@ struct ImportExternalVideoRequest {
event_id: String,
file_path: String,
title: String,
room_id: u64,
room_id: i64,
}
async fn handler_import_external_video(
@@ -1021,7 +1021,7 @@ struct ScanImportedDirectoryResponse {
#[serde(rename_all = "camelCase")]
struct BatchImportInPlaceRequest {
file_paths: Vec<String>,
room_id: u64,
room_id: i64,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -1029,7 +1029,7 @@ struct BatchImportInPlaceRequest {
struct BatchImportExternalVideosRequest {
event_id: String,
file_paths: Vec<String>,
room_id: u64,
room_id: i64,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -1117,7 +1117,7 @@ struct UploadedFileInfo {
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct UploadAndImportRequest {
room_id: u64,
room_id: i64,
}
#[derive(Debug, Serialize, Deserialize)]

View File

@@ -129,67 +129,67 @@ fn get_migrations() -> Vec<Migration> {
Migration {
version: 1,
description: "create_initial_tables",
sql: r#"
sql: r"
CREATE TABLE accounts (uid INTEGER, platform TEXT NOT NULL DEFAULT 'bilibili', name TEXT, avatar TEXT, csrf TEXT, cookies TEXT, created_at TEXT, PRIMARY KEY(uid, platform));
CREATE TABLE recorders (room_id INTEGER PRIMARY KEY, platform TEXT NOT NULL DEFAULT 'bilibili', created_at TEXT);
CREATE TABLE records (live_id TEXT PRIMARY KEY, platform TEXT NOT NULL DEFAULT 'bilibili', room_id INTEGER, title TEXT, length INTEGER, size INTEGER, cover BLOB, created_at TEXT);
CREATE TABLE danmu_statistics (live_id TEXT PRIMARY KEY, room_id INTEGER, value INTEGER, time_point TEXT);
CREATE TABLE messages (id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT, content TEXT, read INTEGER, created_at TEXT);
CREATE TABLE videos (id INTEGER PRIMARY KEY AUTOINCREMENT, room_id INTEGER, cover TEXT, file TEXT, length INTEGER, size INTEGER, status INTEGER, bvid TEXT, title TEXT, desc TEXT, tags TEXT, area INTEGER, created_at TEXT);
"#,
",
kind: MigrationKind::Up,
},
Migration {
version: 2,
description: "add_auto_start_column",
sql: r#"ALTER TABLE recorders ADD COLUMN auto_start INTEGER NOT NULL DEFAULT 1;"#,
sql: r"ALTER TABLE recorders ADD COLUMN auto_start INTEGER NOT NULL DEFAULT 1;",
kind: MigrationKind::Up,
},
// add platform column to videos table
Migration {
version: 3,
description: "add_platform_column",
sql: r#"ALTER TABLE videos ADD COLUMN platform TEXT;"#,
sql: r"ALTER TABLE videos ADD COLUMN platform TEXT;",
kind: MigrationKind::Up,
},
// add task table to record encode/upload task
Migration {
version: 4,
description: "add_task_table",
sql: r#"CREATE TABLE tasks (id TEXT PRIMARY KEY, type TEXT, status TEXT, message TEXT, metadata TEXT, created_at TEXT);"#,
sql: r"CREATE TABLE tasks (id TEXT PRIMARY KEY, type TEXT, status TEXT, message TEXT, metadata TEXT, created_at TEXT);",
kind: MigrationKind::Up,
},
// add id_str column to support string IDs like Douyin sec_uid while keeping uid for Bilibili compatibility
Migration {
version: 5,
description: "add_id_str_column",
sql: r#"ALTER TABLE accounts ADD COLUMN id_str TEXT;"#,
sql: r"ALTER TABLE accounts ADD COLUMN id_str TEXT;",
kind: MigrationKind::Up,
},
// add extra column to recorders
Migration {
version: 6,
description: "add_extra_column_to_recorders",
sql: r#"ALTER TABLE recorders ADD COLUMN extra TEXT;"#,
sql: r"ALTER TABLE recorders ADD COLUMN extra TEXT;",
kind: MigrationKind::Up,
},
// add indexes
Migration {
version: 7,
description: "add_indexes",
sql: r#"
sql: r"
CREATE INDEX idx_records_live_id ON records (room_id, live_id);
CREATE INDEX idx_records_created_at ON records (room_id, created_at);
CREATE INDEX idx_videos_room_id ON videos (room_id);
CREATE INDEX idx_videos_created_at ON videos (created_at);
"#,
",
kind: MigrationKind::Up,
},
// add note column for video
Migration {
version: 8,
description: "add_note_column_for_video",
sql: r#"ALTER TABLE videos ADD COLUMN note TEXT;"#,
sql: r"ALTER TABLE videos ADD COLUMN note TEXT;",
kind: MigrationKind::Up,
},
]
@@ -362,7 +362,7 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
let config_path = app_dirs.config_dir.join("Conf.toml");
let cache_path = app_dirs.cache_dir.join("cache");
let output_path = app_dirs.data_dir.join("output");
log::info!("Loading config from {:?}", config_path);
log::info!("Loading config from {config_path:?}");
let config = match Config::load(&config_path, &cache_path, &output_path) {
Ok(config) => config,
Err(e) => {
@@ -426,11 +426,11 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
)
.await
{
log::error!("Error when updating Bilibili account info {}", e);
log::error!("Error when updating Bilibili account info {e}");
}
}
Err(e) => {
log::error!("Get Bilibili user info failed {}", e);
log::error!("Get Bilibili user info failed {e}");
}
}
} else if platform == PlatformType::Douyin {
@@ -455,11 +455,11 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
)
.await
{
log::error!("Error when updating Douyin account info {}", e);
log::error!("Error when updating Douyin account info {e}");
}
}
Err(e) => {
log::error!("Get Douyin user info failed {}", e);
log::error!("Get Douyin user info failed {e}");
}
}
}
@@ -469,7 +469,7 @@ async fn setup_app_state(app: &tauri::App) -> Result<State, Box<dyn std::error::
let cache_path = config_clone.read().await.cache.clone();
let output_path = config_clone.read().await.output.clone();
if let Err(e) = try_rebuild_archives(&db_clone, cache_path.clone().into()).await {
log::warn!("Rebuilding archive table failed: {}", e);
log::warn!("Rebuilding archive table failed: {e}");
}
let _ = try_convert_live_covers(&db_clone, cache_path.into()).await;
let _ = try_convert_clip_covers(&db_clone, output_path.into()).await;

View File

@@ -43,13 +43,13 @@ pub async fn try_rebuild_archives(
PlatformType::from_str(room.platform.as_str()).unwrap(),
live_id,
room_id,
&format!("UnknownLive {}", live_id),
&format!("UnknownLive {live_id}"),
None,
Some(&created_at),
)
.await?;
log::info!("rebuild archive {:?}", record);
log::info!("rebuild archive {record:?}");
}
}
}
@@ -64,7 +64,7 @@ pub async fn try_convert_live_covers(
for room in rooms {
let room_id = room.room_id;
let room_cache_path = cache_path.join(format!("{}/{}", room.platform, room_id));
let records = db.get_records(room_id, 0, 999999999).await?;
let records = db.get_records(room_id, 0, 999_999_999).await?;
for record in &records {
let record_path = room_cache_path.join(record.live_id.clone());
let cover = record.cover.clone();

View File

@@ -15,7 +15,7 @@ pub enum Event {
message: String,
},
DanmuReceived {
room: u64,
room: i64,
ts: i64,
content: String,
},

View File

@@ -98,7 +98,7 @@ impl EventEmitter {
Event::DanmuReceived { room, ts, content } => {
self.app_handle
.emit(
&format!("danmu:{}", room),
&format!("danmu:{room}"),
DanmuEntry {
ts: *ts,
content: content.clone(),
@@ -117,7 +117,7 @@ impl ProgressReporter {
pub async fn new(emitter: &EventEmitter, event_id: &str) -> Result<Self, String> {
// if already exists, return
if CANCEL_FLAG_MAP.read().await.get(event_id).is_some() {
log::error!("Task already exists: {}", event_id);
log::error!("Task already exists: {event_id}");
emitter.emit(&Event::ProgressFinished {
id: event_id.to_string(),
success: false,

View File

@@ -40,10 +40,10 @@ use async_trait::async_trait;
#[cfg(feature = "gui")]
use {tauri::AppHandle, tauri_plugin_notification::NotificationExt};
/// A recorder for BiliBili live streams
/// A recorder for `BiliBili` live streams
///
/// This recorder fetches, caches and serves TS entries, currently supporting only StreamType::FMP4.
/// As high-quality streams are accessible only to logged-in users, the use of a BiliClient, which manages cookies, is required.
/// This recorder fetches, caches and serves TS entries, currently supporting only `StreamType::FMP4`.
/// As high-quality streams are accessible only to logged-in users, the use of a `BiliClient`, which manages cookies, is required.
// TODO implement StreamType::TS
#[derive(Clone)]
pub struct BiliRecorder {
@@ -54,7 +54,7 @@ pub struct BiliRecorder {
db: Arc<Database>,
account: AccountRow,
config: Arc<RwLock<Config>>,
room_id: u64,
room_id: i64,
room_info: Arc<RwLock<RoomInfo>>,
user_info: Arc<RwLock<UserInfo>>,
live_status: Arc<RwLock<bool>>,
@@ -81,7 +81,7 @@ pub struct BiliRecorderOptions {
pub app_handle: AppHandle,
pub emitter: EventEmitter,
pub db: Arc<Database>,
pub room_id: u64,
pub room_id: i64,
pub account: AccountRow,
pub config: Arc<RwLock<Config>>,
pub auto_start: bool,
@@ -298,30 +298,27 @@ impl BiliRecorder {
}
let master_manifest = master_manifest.unwrap();
let variant = match master_manifest {
Playlist::MasterPlaylist(playlist) => {
let variants = playlist.variants.clone();
variants
.into_iter()
.filter(|variant| {
if let Some(other_attributes) = &variant.other_attributes {
if let Some(QuotedOrUnquoted::Quoted(bili_display)) =
other_attributes.get("BILI-DISPLAY")
{
bili_display == "原画"
} else {
false
}
let variant = if let Playlist::MasterPlaylist(playlist) = master_manifest {
let variants = playlist.variants.clone();
variants
.into_iter()
.filter(|variant| {
if let Some(other_attributes) = &variant.other_attributes {
if let Some(QuotedOrUnquoted::Quoted(bili_display)) =
other_attributes.get("BILI-DISPLAY")
{
bili_display == "原画"
} else {
false
}
})
.collect::<Vec<_>>()
}
_ => {
log::error!("[{}]Master manifest is not a media playlist", self.room_id);
vec![]
}
} else {
false
}
})
.collect::<Vec<_>>()
} else {
log::error!("[{}]Master manifest is not a media playlist", self.room_id);
vec![]
};
if variant.is_empty() {
@@ -505,7 +502,7 @@ impl BiliRecorder {
});
}
let mut header_url = String::from("");
let mut header_url = String::new();
let re = Regex::new(r"h.*\.m4s").unwrap();
if let Some(captures) = re.captures(&index_content) {
header_url = captures.get(0).unwrap().as_str().to_string();
@@ -525,7 +522,7 @@ impl BiliRecorder {
&self,
header_url: &str,
) -> Result<String, super::errors::RecorderError> {
log::debug!("Get resolution from {}", header_url);
log::debug!("Get resolution from {header_url}");
let resolution = get_video_resolution(header_url)
.await
.map_err(super::errors::RecorderError::FfmpegError)?;
@@ -646,7 +643,7 @@ impl BiliRecorder {
.client
.read()
.await
.download_ts(&full_header_url, &format!("{}/{}", work_dir, file_name))
.download_ts(&full_header_url, &format!("{work_dir}/{file_name}"))
.await
{
Ok(size) => {
@@ -794,7 +791,7 @@ impl BiliRecorder {
match playlist {
Playlist::MasterPlaylist(pl) => {
log::debug!("[{}]Master playlist:\n{:?}", self.room_id, pl)
log::debug!("[{}]Master playlist:\n{:?}", self.room_id, pl);
}
Playlist::MediaPlaylist(pl) => {
let mut new_segment_fetched = false;
@@ -803,12 +800,11 @@ impl BiliRecorder {
.read()
.await
.as_ref()
.map(|store| store.last_sequence)
.unwrap_or(0); // For first-time recording, start from 0
.map_or(0, |store| store.last_sequence); // For first-time recording, start from 0
// Parse BILI-AUX offsets to calculate precise durations for FMP4
let mut segment_offsets = Vec::new();
for ts in pl.segments.iter() {
for ts in &pl.segments {
let mut seg_offset: i64 = 0;
for tag in &ts.unknown_tags {
if tag.tag == "BILI-AUX" {
@@ -865,7 +861,7 @@ impl BiliRecorder {
// encode segment offset into filename
let file_name = ts.uri.split('/').next_back().unwrap_or(&ts.uri);
let ts_length = pl.target_duration as f64;
let ts_length = f64::from(pl.target_duration);
// Calculate precise duration from BILI-AUX offsets for FMP4
let precise_length_from_aux =
@@ -933,7 +929,7 @@ impl BiliRecorder {
match client
.read()
.await
.download_ts(&ts_url, &format!("{}/{}", work_dir, file_name))
.download_ts(&ts_url, &format!("{work_dir}/{file_name}"))
.await
{
Ok(size) => {
@@ -996,7 +992,7 @@ impl BiliRecorder {
aux_duration
} else if current_stream.format != StreamType::FMP4 {
// For regular TS segments, use direct ffprobe
let file_path = format!("{}/{}", work_dir, file_name);
let file_path = format!("{work_dir}/{file_name}");
match crate::ffmpeg::get_segment_duration(std::path::Path::new(
&file_path,
))
@@ -1166,13 +1162,13 @@ impl BiliRecorder {
range = Some(Range {
x: start as f32,
y: end as f32,
})
});
}
entry_store.manifest(true, true, range)
}
/// if fetching live/last stream m3u8, all entries are cached in memory, so it will be much faster than read_dir
/// if fetching live/last stream m3u8, all entries are cached in memory, so it will be much faster than `read_dir`
async fn generate_live_m3u8(&self, start: i64, end: i64) -> String {
let live_status = *self.live_status.read().await;
let range = if start != 0 || end != 0 {
@@ -1274,10 +1270,10 @@ impl super::Recorder for BiliRecorder {
log::debug!("[{}]Stop recorder", self.room_id);
*self.quit.lock().await = true;
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
let _ = danmu_task.abort();
let () = danmu_task.abort();
}
if let Some(record_task) = self.record_task.lock().await.as_mut() {
let _ = record_task.abort();
let () = record_task.abort();
}
log::info!("[{}]Recorder quit.", self.room_id);
}
@@ -1303,7 +1299,7 @@ impl super::Recorder for BiliRecorder {
"#EXT-X-STREAM-INF:BANDWIDTH=1280000,RESOLUTION=1920x1080,CODECS={},DANMU={}\n",
"\"avc1.64001F,mp4a.40.2\"", offset
);
m3u8_content += &format!("playlist.m3u8?start={}&end={}\n", start, end);
m3u8_content += &format!("playlist.m3u8?start={start}&end={end}\n");
m3u8_content
}
@@ -1475,8 +1471,7 @@ impl super::Recorder for BiliRecorder {
.subtitle_content
.iter()
.map(item_to_srt)
.collect::<Vec<String>>()
.join("");
.collect::<String>();
subtitle_file.write_all(subtitle_content.as_bytes()).await?;
log::info!("[{}]Subtitle file written", self.room_id);
// remove tmp file

View File

@@ -39,16 +39,16 @@ struct UploadParams<'a> {
pub struct RoomInfo {
pub live_status: u8,
pub room_cover_url: String,
pub room_id: u64,
pub room_id: i64,
pub room_keyframe_url: String,
pub room_title: String,
pub user_id: u64,
pub user_id: i64,
pub live_start_time: i64,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct UserInfo {
pub user_id: u64,
pub user_id: i64,
pub user_name: String,
pub user_sign: String,
pub user_avatar_url: String,
@@ -68,7 +68,7 @@ pub struct QrStatus {
pub cookies: String,
}
/// BiliClient is thread safe
/// `BiliClient` is thread safe
pub struct BiliClient {
client: Client,
}
@@ -105,7 +105,7 @@ impl BiliStream {
host: host.into(),
path: BiliStream::get_path(base_url),
extra: extra.into(),
expire: BiliStream::get_expire(extra).unwrap_or(600000),
expire: BiliStream::get_expire(extra).unwrap_or(600_000),
}
}
@@ -125,7 +125,7 @@ impl BiliStream {
pub fn get_path(base_url: &str) -> String {
match base_url.rfind('/') {
Some(pos) => base_url[..pos + 1].to_string(),
Some(pos) => base_url[..=pos].to_string(),
None => base_url.to_string(),
}
}
@@ -184,8 +184,7 @@ impl BiliClient {
let res: serde_json::Value = self
.client
.get(format!(
"https://passport.bilibili.com/x/passport-login/web/qrcode/poll?qrcode_key={}",
qrcode_key
"https://passport.bilibili.com/x/passport-login/web/qrcode/poll?qrcode_key={qrcode_key}"
))
.headers(headers)
.send()
@@ -193,7 +192,7 @@ impl BiliClient {
.json()
.await?;
let code: u8 = res["data"]["code"].as_u64().unwrap_or(400) as u8;
let mut cookies: String = "".to_string();
let mut cookies: String = String::new();
if code == 0 {
let url = res["data"]["url"]
.as_str()
@@ -228,7 +227,7 @@ impl BiliClient {
pub async fn get_user_info(
&self,
account: &AccountRow,
user_id: u64,
user_id: i64,
) -> Result<UserInfo, BiliClientError> {
let params: Value = json!({
"mid": user_id.to_string(),
@@ -247,8 +246,7 @@ impl BiliClient {
let resp = self
.client
.get(format!(
"https://api.bilibili.com/x/space/wbi/acc/info?{}",
params
"https://api.bilibili.com/x/space/wbi/acc/info?{params}"
))
.headers(headers)
.send()
@@ -268,7 +266,7 @@ impl BiliClient {
.as_u64()
.ok_or(BiliClientError::InvalidResponseJson { resp: res.clone() })?;
if code != 0 {
log::error!("Get user info failed {}", code);
log::error!("Get user info failed {code}");
return Err(BiliClientError::InvalidMessageCode { code });
}
Ok(UserInfo {
@@ -282,7 +280,7 @@ impl BiliClient {
pub async fn get_room_info(
&self,
account: &AccountRow,
room_id: u64,
room_id: i64,
) -> Result<RoomInfo, BiliClientError> {
let mut headers = self.generate_user_agent_header();
if let Ok(cookies) = account.cookies.parse() {
@@ -293,8 +291,7 @@ impl BiliClient {
let response = self
.client
.get(format!(
"https://api.live.bilibili.com/room/v1/Room/get_info?room_id={}",
room_id
"https://api.live.bilibili.com/room/v1/Room/get_info?room_id={room_id}"
))
.headers(headers)
.send()
@@ -318,7 +315,7 @@ impl BiliClient {
}
let room_id = res["data"]["room_id"]
.as_u64()
.as_i64()
.ok_or(BiliClientError::InvalidValue)?;
let room_title = res["data"]["title"]
.as_str()
@@ -333,7 +330,7 @@ impl BiliClient {
.ok_or(BiliClientError::InvalidValue)?
.to_string();
let user_id = res["data"]["uid"]
.as_u64()
.as_i64()
.ok_or(BiliClientError::InvalidValue)?;
let live_status = res["data"]["live_status"]
.as_u64()
@@ -358,12 +355,12 @@ impl BiliClient {
- 8 * 3600
};
Ok(RoomInfo {
room_id,
room_title,
room_cover_url,
room_keyframe_url,
user_id,
live_status,
room_cover_url,
room_id,
room_keyframe_url,
room_title,
user_id,
live_start_time,
})
}
@@ -450,13 +447,13 @@ impl BiliClient {
.get(1)
.unwrap()
.as_str();
let raw_string = format!("{}{}", img, sub);
let raw_string = format!("{img}{sub}");
let mut encoded = Vec::new();
table.into_iter().for_each(|x| {
for x in table {
if x < raw_string.len() {
encoded.push(raw_string.as_bytes()[x]);
}
});
}
// only keep 32 bytes of encoded
encoded = encoded[0..32].to_vec();
let encoded = String::from_utf8(encoded).unwrap();
@@ -474,12 +471,12 @@ impl BiliClient {
.as_object()
.unwrap()
.keys()
.map(|x| x.to_owned())
.map(std::borrow::ToOwned::to_owned)
.collect::<Vec<String>>();
// sort keys
keys.sort();
let mut params = String::new();
keys.iter().for_each(|x| {
for x in &keys {
params.push_str(x);
params.push('=');
// Value filters !'()* characters
@@ -495,10 +492,10 @@ impl BiliClient {
if x != keys.last().unwrap() {
params.push('&');
}
});
}
// md5 params+encoded
let w_rid = md5::compute(params.to_string() + encoded.as_str());
let params = params + format!("&w_rid={:x}", w_rid).as_str();
let params = params + format!("&w_rid={w_rid:x}").as_str();
Ok(params)
}
@@ -576,7 +573,7 @@ impl BiliClient {
}
read_total += size;
log::debug!("size: {}, total: {}", size, read_total);
log::debug!("size: {size}, total: {read_total}");
if size > 0 && (read_total as u64) < chunk_size {
continue;
}
@@ -638,7 +635,7 @@ impl BiliClient {
}
}
Err(e) => {
log::error!("Upload error: {}", e);
log::error!("Upload error: {e}");
retry_count += 1;
if retry_count < max_retries {
tokio::time::sleep(Duration::from_secs(2u64.pow(retry_count as u32)))
@@ -650,10 +647,7 @@ impl BiliClient {
if !success {
return Err(BiliClientError::UploadError {
err: format!(
"Failed to upload chunk {} after {} retries",
chunk, max_retries
),
err: format!("Failed to upload chunk {chunk} after {max_retries} retries"),
});
}
@@ -718,9 +712,9 @@ impl BiliClient {
) -> Result<profile::Video, BiliClientError> {
log::info!("Start Preparing Video: {}", video_file.to_str().unwrap());
let preupload = self.preupload_video(account, video_file).await?;
log::info!("Preupload Response: {:?}", preupload);
log::info!("Preupload Response: {preupload:?}");
let metaposted = self.post_video_meta(&preupload, video_file).await?;
log::info!("Post Video Meta Response: {:?}", metaposted);
log::info!("Post Video Meta Response: {metaposted:?}");
let uploaded = self
.upload_video(UploadParams {
reporter,
@@ -729,7 +723,7 @@ impl BiliClient {
video_file,
})
.await?;
log::info!("Uploaded: {}", uploaded);
log::info!("Uploaded: {uploaded}");
self.end_upload(&preupload, &metaposted, uploaded).await?;
let filename = Path::new(&metaposted.key)
.file_stem()
@@ -739,7 +733,7 @@ impl BiliClient {
Ok(profile::Video {
title: filename.to_string(),
filename: filename.to_string(),
desc: "".to_string(),
desc: String::new(),
cid: preupload.biz_id,
})
}
@@ -768,7 +762,7 @@ impl BiliClient {
.post(&url)
.headers(headers)
.header("Content-Type", "application/json; charset=UTF-8")
.body(serde_json::ser::to_string(&preprofile).unwrap_or("".to_string()))
.body(serde_json::ser::to_string(&preprofile).unwrap_or_default())
.send()
.await
{
@@ -780,12 +774,12 @@ impl BiliClient {
_ => Err(BiliClientError::InvalidResponse),
}
} else {
log::error!("Parse response failed: {}", json);
log::error!("Parse response failed: {json}");
Err(BiliClientError::InvalidResponse)
}
}
Err(e) => {
log::error!("Send failed {}", e);
log::error!("Send failed {e}");
Err(BiliClientError::InvalidResponse)
}
}
@@ -824,12 +818,12 @@ impl BiliClient {
_ => Err(BiliClientError::InvalidResponse),
}
} else {
log::error!("Parse response failed: {}", json);
log::error!("Parse response failed: {json}");
Err(BiliClientError::InvalidResponse)
}
}
Err(e) => {
log::error!("Send failed {}", e);
log::error!("Send failed {e}");
Err(BiliClientError::InvalidResponse)
}
}
@@ -838,7 +832,7 @@ impl BiliClient {
pub async fn send_danmaku(
&self,
account: &AccountRow,
room_id: u64,
room_id: i64,
message: &str,
) -> Result<(), BiliClientError> {
let url = "https://api.live.bilibili.com/msg/send".to_string();
@@ -856,7 +850,7 @@ impl BiliClient {
("fontsize", "25"),
("room_type", "0"),
("rnd", &format!("{}", chrono::Local::now().timestamp())),
("roomid", &format!("{}", room_id)),
("roomid", &format!("{room_id}")),
("csrf", &account.csrf),
("csrf_token", &account.csrf),
];

View File

@@ -38,7 +38,7 @@ impl DanmuStorage {
let parts: Vec<&str> = line.split(':').collect();
let ts: i64 = parts[0].parse().unwrap();
let content = parts[1].to_string();
preload_cache.push(DanmuEntry { ts, content })
preload_cache.push(DanmuEntry { ts, content });
}
let file = OpenOptions::new()
.append(true)
@@ -61,7 +61,7 @@ impl DanmuStorage {
.file
.write()
.await
.write(format!("{}:{}\n", ts, content).as_bytes())
.write(format!("{ts}:{content}\n").as_bytes())
.await;
}

View File

@@ -46,7 +46,7 @@ pub struct DouyinRecorder {
client: client::DouyinClient,
db: Arc<Database>,
account: AccountRow,
room_id: u64,
room_id: i64,
sec_user_id: String,
room_info: Arc<RwLock<Option<client::DouyinBasicRoomInfo>>>,
stream_url: Arc<RwLock<Option<String>>>,
@@ -67,12 +67,48 @@ pub struct DouyinRecorder {
record_task: Arc<Mutex<Option<JoinHandle<()>>>>,
}
fn get_best_stream_url(room_info: &client::DouyinBasicRoomInfo) -> Option<String> {
let stream_data = room_info.stream_data.clone();
// parse stream_data into stream_info
let stream_info = serde_json::from_str::<stream_info::StreamInfo>(&stream_data);
if let Ok(stream_info) = stream_info {
// find the best stream url
if stream_info.data.origin.main.hls.is_empty() {
log::error!("No stream url found in stream_data: {stream_data}");
return None;
}
Some(stream_info.data.origin.main.hls)
} else {
let err = stream_info.unwrap_err();
log::error!("Failed to parse stream data: {err} {stream_data}");
None
}
}
fn parse_stream_url(stream_url: &str) -> (String, String) {
// Parse stream URL to extract base URL and query parameters
// Example: http://7167739a741646b4651b6949b2f3eb8e.livehwc3.cn/pull-hls-l26.douyincdn.com/third/stream-693342996808860134_or4.m3u8?sub_m3u8=true&user_session_id=16090eb45ab8a2f042f7c46563936187&major_anchor_level=common&edge_slice=true&expire=67d944ec&sign=47b95cc6e8de20d82f3d404412fa8406
let base_url = stream_url
.rfind('/')
.map_or(stream_url, |i| &stream_url[..=i])
.to_string();
let query_params = stream_url
.find('?')
.map_or("", |i| &stream_url[i..])
.to_string();
(base_url, query_params)
}
impl DouyinRecorder {
#[allow(clippy::too_many_arguments)]
pub async fn new(
#[cfg(not(feature = "headless"))] app_handle: AppHandle,
emitter: EventEmitter,
room_id: u64,
room_id: i64,
sec_user_id: &str,
config: Arc<RwLock<Config>>,
account: &AccountRow,
@@ -201,15 +237,15 @@ impl DouyinRecorder {
if !info.hls_url.is_empty() {
// Only set stream URL, don't create record yet
// Record will be created when first ts download succeeds
let new_stream_url = self.get_best_stream_url(&info).await;
let new_stream_url = get_best_stream_url(&info);
if new_stream_url.is_none() {
log::error!("No stream url found in room_info: {:#?}", info);
log::error!("No stream url found in room_info: {info:#?}");
return false;
}
log::info!("New douyin stream URL: {}", new_stream_url.clone().unwrap());
*self.stream_url.write().await = Some(new_stream_url.unwrap());
*self.danmu_room_id.write().await = info.room_id_str.clone();
(*self.danmu_room_id.write().await).clone_from(&info.room_id_str);
}
true
@@ -232,12 +268,12 @@ impl DouyinRecorder {
.read()
.await
.clone()
.parse::<u64>()
.parse::<i64>()
.unwrap_or(0);
let danmu_stream = DanmuStream::new(ProviderType::Douyin, &cookies, danmu_room_id).await;
if danmu_stream.is_err() {
let err = danmu_stream.err().unwrap();
log::error!("Failed to create danmu stream: {}", err);
log::error!("Failed to create danmu stream: {err}");
return Err(super::errors::RecorderError::DanmuStreamError(err));
}
let danmu_stream = danmu_stream.unwrap();
@@ -289,44 +325,6 @@ impl DouyinRecorder {
)
}
async fn get_best_stream_url(&self, room_info: &client::DouyinBasicRoomInfo) -> Option<String> {
let stream_data = room_info.stream_data.clone();
// parse stream_data into stream_info
let stream_info = serde_json::from_str::<stream_info::StreamInfo>(&stream_data);
if let Ok(stream_info) = stream_info {
// find the best stream url
if stream_info.data.origin.main.hls.is_empty() {
log::error!("No stream url found in stream_data: {}", stream_data);
return None;
}
Some(stream_info.data.origin.main.hls)
} else {
let err = stream_info.unwrap_err();
log::error!("Failed to parse stream data: {} {}", err, stream_data);
None
}
}
fn parse_stream_url(&self, stream_url: &str) -> (String, String) {
// Parse stream URL to extract base URL and query parameters
// Example: http://7167739a741646b4651b6949b2f3eb8e.livehwc3.cn/pull-hls-l26.douyincdn.com/third/stream-693342996808860134_or4.m3u8?sub_m3u8=true&user_session_id=16090eb45ab8a2f042f7c46563936187&major_anchor_level=common&edge_slice=true&expire=67d944ec&sign=47b95cc6e8de20d82f3d404412fa8406
let base_url = stream_url
.rfind('/')
.map(|i| &stream_url[..=i])
.unwrap_or(stream_url)
.to_string();
let query_params = stream_url
.find('?')
.map(|i| &stream_url[i..])
.unwrap_or("")
.to_string();
(base_url, query_params)
}
async fn update_entries(&self) -> Result<u128, RecorderError> {
let task_begin_time = std::time::Instant::now();
@@ -374,14 +372,11 @@ impl DouyinRecorder {
.last_sequence
};
for segment in playlist.segments.iter() {
for segment in &playlist.segments {
let formated_ts_name = segment.uri.clone();
let sequence = extract_sequence_from(&formated_ts_name);
if sequence.is_none() {
log::error!(
"No timestamp extracted from douyin ts name: {}",
formated_ts_name
);
log::error!("No timestamp extracted from douyin ts name: {formated_ts_name}");
continue;
}
@@ -397,7 +392,7 @@ impl DouyinRecorder {
uri.clone()
} else {
// Parse the stream URL to extract base URL and query parameters
let (base_url, query_params) = self.parse_stream_url(&stream_url);
let (base_url, query_params) = parse_stream_url(&stream_url);
// Check if the segment URI already has query parameters
if uri.contains('?') {
@@ -405,7 +400,7 @@ impl DouyinRecorder {
format!("{}{}&{}", base_url, uri, &query_params[1..]) // Remove leading ? from query_params
} else {
// If segment URI has no query params, append m3u8 query params with ?
format!("{}{}{}", base_url, uri, query_params)
format!("{base_url}{uri}{query_params}")
}
};
@@ -416,14 +411,14 @@ impl DouyinRecorder {
let mut work_dir_created = false;
while retry_count < max_retries && !download_success {
let file_name = format!("{}.ts", sequence);
let file_path = format!("{}/{}", work_dir, file_name);
let file_name = format!("{sequence}.ts");
let file_path = format!("{work_dir}/{file_name}");
// If this is the first segment, create work directory before first download attempt
if is_first_segment && !work_dir_created {
// Create work directory only when we're about to download
if let Err(e) = tokio::fs::create_dir_all(&work_dir).await {
log::error!("Failed to create work directory: {}", e);
log::error!("Failed to create work directory: {e}");
return Err(e.into());
}
work_dir_created = true;
@@ -432,7 +427,7 @@ impl DouyinRecorder {
match self.client.download_ts(&ts_url, &file_path).await {
Ok(size) => {
if size == 0 {
log::error!("Download segment failed (empty response): {}", ts_url);
log::error!("Download segment failed (empty response): {ts_url}");
retry_count += 1;
if retry_count < max_retries {
tokio::time::sleep(Duration::from_millis(500)).await;
@@ -469,7 +464,7 @@ impl DouyinRecorder {
)
.await
{
log::error!("Failed to add record: {}", e);
log::error!("Failed to add record: {e}");
}
// Setup entry store
@@ -493,7 +488,7 @@ impl DouyinRecorder {
let live_id = self.live_id.read().await.clone();
let self_clone = self.clone();
*self.danmu_task.lock().await = Some(tokio::spawn(async move {
log::info!("Start fetching danmu for live {}", live_id);
log::info!("Start fetching danmu for live {live_id}");
let _ = self_clone.danmu().await;
}));
@@ -503,7 +498,7 @@ impl DouyinRecorder {
let ts_entry = TsEntry {
url: file_name,
sequence,
length: segment.duration as f64,
length: f64::from(segment.duration),
size,
ts: Utc::now().timestamp_millis(),
is_header: false,
@@ -537,8 +532,7 @@ impl DouyinRecorder {
// If all retries failed, check if it's a 400 error
if e.to_string().contains("400") {
log::error!(
"HTTP 400 error for segment, stream URL may be expired: {}",
ts_url
"HTTP 400 error for segment, stream URL may be expired: {ts_url}"
);
*self.stream_url.write().await = None;
@@ -547,9 +541,7 @@ impl DouyinRecorder {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await
{
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
"Failed to cleanup empty work directory {work_dir}: {cleanup_err}"
);
}
}
@@ -561,9 +553,7 @@ impl DouyinRecorder {
if is_first_segment && work_dir_created {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
"Failed to cleanup empty work directory {work_dir}: {cleanup_err}"
);
}
}
@@ -574,24 +564,16 @@ impl DouyinRecorder {
}
if !download_success {
log::error!(
"Failed to download segment after {} retries: {}",
max_retries,
ts_url
);
log::error!("Failed to download segment after {max_retries} retries: {ts_url}");
// Clean up empty directory if first segment failed after all retries
if is_first_segment && work_dir_created {
if let Err(cleanup_err) = tokio::fs::remove_dir_all(&work_dir).await {
log::warn!(
"Failed to cleanup empty work directory {}: {}",
work_dir,
cleanup_err
"Failed to cleanup empty work directory {work_dir}: {cleanup_err}"
);
}
}
continue;
}
}
@@ -626,7 +608,7 @@ impl DouyinRecorder {
)
.await
{
log::error!("Failed to update record: {}", e);
log::error!("Failed to update record: {e}");
}
}
@@ -680,8 +662,10 @@ impl Recorder for DouyinRecorder {
match self_clone.update_entries().await {
Ok(ms) => {
if ms < 1000 {
tokio::time::sleep(Duration::from_millis(1000 - ms as u64))
.await;
tokio::time::sleep(Duration::from_millis(
(1000 - ms).try_into().unwrap(),
))
.await;
}
if ms >= 3000 {
log::warn!(
@@ -724,13 +708,13 @@ impl Recorder for DouyinRecorder {
*self.running.write().await = false;
// stop 3 tasks
if let Some(danmu_task) = self.danmu_task.lock().await.as_mut() {
let _ = danmu_task.abort();
let () = danmu_task.abort();
}
if let Some(danmu_stream_task) = self.danmu_stream_task.lock().await.as_mut() {
let _ = danmu_stream_task.abort();
let () = danmu_stream_task.abort();
}
if let Some(record_task) = self.record_task.lock().await.as_mut() {
let _ = record_task.abort();
let () = record_task.abort();
}
log::info!("Recorder for room {} quit.", self.room_id);
}
@@ -747,7 +731,8 @@ impl Recorder for DouyinRecorder {
self.first_segment_ts(live_id).await / 1000
)
.as_str();
m3u8_content += &format!("playlist.m3u8?start={}&end={}\n", start, end);
use std::fmt::Write;
writeln!(&mut m3u8_content, "playlist.m3u8?start={start}&end={end}").unwrap();
m3u8_content
}
@@ -822,8 +807,7 @@ impl Recorder for DouyinRecorder {
.subtitle_content
.iter()
.map(item_to_srt)
.collect::<Vec<String>>()
.join("");
.collect::<String>();
subtitle_file.write_all(subtitle_content.as_bytes()).await?;
// remove tmp file
@@ -910,7 +894,7 @@ impl Recorder for DouyinRecorder {
live_id,
"danmu.txt"
);
log::debug!("loading danmu cache from {}", cache_file_path);
log::debug!("loading danmu cache from {cache_file_path}");
let storage = DanmuStorage::new(&cache_file_path).await;
if storage.is_none() {
return Ok(Vec::new());

View File

@@ -2,12 +2,12 @@ use crate::{database::account::AccountRow, recorder::user_agent_generator};
use deno_core::JsRuntime;
use deno_core::RuntimeOptions;
use m3u8_rs::{MediaPlaylist, Playlist};
use reqwest::{Client};
use reqwest::Client;
use uuid::Uuid;
use thiserror::Error;
use super::response::DouyinRoomInfoResponse;
use std::{path::Path};
use std::path::Path;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum DouyinClientError {
@@ -57,7 +57,7 @@ fn setup_js_runtime() -> Result<JsRuntime, DouyinClientError> {
deno_core::FastString::from_static(crypto_js),
)
.map_err(|e| {
DouyinClientError::JsRuntimeError(format!("Failed to execute crypto-js: {}", e))
DouyinClientError::JsRuntimeError(format!("Failed to execute crypto-js: {e}"))
})?;
Ok(runtime)
}
@@ -78,11 +78,11 @@ impl DouyinClient {
) -> Result<String, DouyinClientError> {
let mut runtime = setup_js_runtime()?;
// Call the get_wss_url function
let sign_call = format!("generate_a_bogus(\"{}\", \"{}\")", params, user_agent);
let sign_call = format!("generate_a_bogus(\"{params}\", \"{user_agent}\")");
let result = runtime
.execute_script("<sign_call>", deno_core::FastString::from(sign_call))
.map_err(|e| {
DouyinClientError::JsRuntimeError(format!("Failed to execute JavaScript: {}", e))
DouyinClientError::JsRuntimeError(format!("Failed to execute JavaScript: {e}"))
})?;
// Get the result from the V8 runtime
@@ -110,7 +110,7 @@ impl DouyinClient {
pub async fn get_room_info(
&self,
room_id: u64,
room_id: i64,
sec_user_id: &str,
) -> Result<DouyinBasicRoomInfo, DouyinClientError> {
let mut headers = self.generate_user_agent_header();
@@ -119,23 +119,13 @@ impl DouyinClient {
let ms_token = self.generate_ms_token().await;
let user_agent = headers.get("user-agent").unwrap().to_str().unwrap();
let params = format!(
"aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={}&ms_token={}",
room_id,
ms_token);
let a_bogus = self
.generate_a_bogus(
&params,
user_agent,
)
.await?;
log::debug!("params: {}", params);
log::debug!("user_agent: {}", user_agent);
log::debug!("a_bogus: {}", a_bogus);
"aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={room_id}&ms_token={ms_token}");
let a_bogus = self.generate_a_bogus(&params, user_agent).await?;
log::debug!("params: {params}");
log::debug!("user_agent: {user_agent}");
log::debug!("a_bogus: {a_bogus}");
let url = format!(
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={}&ms_token={}&a_bogus={}",
room_id,
ms_token,
a_bogus
"https://live.douyin.com/webcast/room/web/enter/?aid=6383&app_name=douyin_web&live_id=1&device_platform=web&language=zh-CN&enter_from=web_live&cookie_enabled=true&screen_width=1920&screen_height=1080&browser_language=zh-CN&browser_platform=MacIntel&browser_name=Chrome&browser_version=122.0.0.0&web_rid={room_id}&ms_token={ms_token}&a_bogus={a_bogus}"
);
let resp = self.client.get(&url).headers(headers).send().await?;
@@ -175,19 +165,18 @@ impl DouyinClient {
.map(|s| s.live_core_sdk_data.pull_data.stream_data.clone())
.unwrap_or_default(),
});
} else {
log::error!("Failed to parse room info response: {}", text);
return self.get_room_info_h5(room_id, sec_user_id).await;
}
log::error!("Failed to parse room info response: {text}");
return self.get_room_info_h5(room_id, sec_user_id).await;
}
log::error!("Failed to get room info: {}", status);
log::error!("Failed to get room info: {status}");
return self.get_room_info_h5(room_id, sec_user_id).await;
}
pub async fn get_room_info_h5(
&self,
room_id: u64,
room_id: i64,
sec_user_id: &str,
) -> Result<DouyinBasicRoomInfo, DouyinClientError> {
// 参考biliup实现构建完整的URL参数
@@ -207,15 +196,12 @@ impl DouyinClient {
// 构建URL
let query_string = url_params
.iter()
.map(|(k, v)| format!("{}={}", k, v))
.map(|(k, v)| format!("{k}={v}"))
.collect::<Vec<_>>()
.join("&");
let url = format!(
"https://webcast.amemv.com/webcast/room/reflow/info/?{}",
query_string
);
let url = format!("https://webcast.amemv.com/webcast/room/reflow/info/?{query_string}");
log::info!("get_room_info_h5: {}", url);
log::info!("get_room_info_h5: {url}");
let mut headers = self.generate_user_agent_header();
headers.insert("Referer", "https://live.douyin.com/".parse().unwrap());
@@ -275,7 +261,10 @@ impl DouyinClient {
);
// Check if it's an error response
if let Some(status_code) = json_value.get("status_code").and_then(|v| v.as_i64()) {
if let Some(status_code) = json_value
.get("status_code")
.and_then(serde_json::Value::as_i64)
{
if status_code != 0 {
let error_msg = json_value
.get("data")
@@ -287,8 +276,7 @@ impl DouyinClient {
}
return Err(DouyinClientError::Network(format!(
"API returned error status_code: {} - {}",
status_code, error_msg
"API returned error status_code: {status_code} - {error_msg}"
)));
}
}
@@ -305,22 +293,18 @@ impl DouyinClient {
}
return Err(DouyinClientError::Network(format!(
"Failed to parse h5 room info response: {}",
text
)));
} else {
log::error!("Failed to parse h5 room info response: {}", text);
return Err(DouyinClientError::Network(format!(
"Failed to parse h5 room info response: {}",
text
"Failed to parse h5 room info response: {text}"
)));
}
log::error!("Failed to parse h5 room info response: {text}");
return Err(DouyinClientError::Network(format!(
"Failed to parse h5 room info response: {text}"
)));
}
log::error!("Failed to get h5 room info: {}", status);
log::error!("Failed to get h5 room info: {status}");
Err(DouyinClientError::Network(format!(
"Failed to get h5 room info: {} {}",
status, text
"Failed to get h5 room info: {status} {text}"
)))
}
@@ -353,7 +337,7 @@ impl DouyinClient {
avatar_thumb: following.avatar_thumb.clone(),
follow_info: super::response::FollowInfo::default(),
foreign_user: 0,
open_id_str: "".to_string(),
open_id_str: String::new(),
};
return Ok(user);
}
@@ -362,26 +346,25 @@ impl DouyinClient {
// If not found in followings, create a minimal user info from owner_sec_uid
let user = super::response::User {
id_str: "".to_string(), // We don't have the numeric UID
id_str: String::new(), // We don't have the numeric UID
sec_uid: owner_sec_uid.clone(),
nickname: "抖音用户".to_string(), // Default nickname
avatar_thumb: super::response::AvatarThumb { url_list: vec![] },
follow_info: super::response::FollowInfo::default(),
foreign_user: 0,
open_id_str: "".to_string(),
open_id_str: String::new(),
};
return Ok(user);
}
} else {
log::error!("Failed to parse user info response: {}", text);
log::error!("Failed to parse user info response: {text}");
return Err(DouyinClientError::Network(format!(
"Failed to parse user info response: {}",
text
"Failed to parse user info response: {text}"
)));
}
}
log::error!("Failed to get user info: {}", status);
log::error!("Failed to get user info: {status}");
Err(DouyinClientError::Io(std::io::Error::new(
std::io::ErrorKind::NotFound,
@@ -412,7 +395,7 @@ impl DouyinClient {
// #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2560000
// http://7167739a741646b4651b6949b2f3eb8e.livehwc3.cn/pull-hls-l26.douyincdn.com/third/stream-693342996808860134_or4.m3u8?sub_m3u8=true&user_session_id=16090eb45ab8a2f042f7c46563936187&major_anchor_level=common&edge_slice=true&expire=67d944ec&sign=47b95cc6e8de20d82f3d404412fa8406
if content.contains("BANDWIDTH") {
log::info!("Master manifest with playlist URL: {}", url);
log::info!("Master manifest with playlist URL: {url}");
let new_url = content.lines().last().unwrap();
return Box::pin(self.get_m3u8_content(new_url)).await;
}
@@ -431,7 +414,7 @@ impl DouyinClient {
if response.status() != reqwest::StatusCode::OK {
let error = response.error_for_status().unwrap_err();
log::error!("HTTP error: {} for URL: {}", error, url);
log::error!("HTTP error: {error} for URL: {url}");
return Err(DouyinClientError::Network(error.to_string()));
}

View File

@@ -31,19 +31,19 @@ impl TsEntry {
url: parts[0].to_string(),
sequence: parts[1]
.parse()
.map_err(|e| format!("Failed to parse sequence: {}", e))?,
.map_err(|e| format!("Failed to parse sequence: {e}"))?,
length: parts[2]
.parse()
.map_err(|e| format!("Failed to parse length: {}", e))?,
.map_err(|e| format!("Failed to parse length: {e}"))?,
size: parts[3]
.parse()
.map_err(|e| format!("Failed to parse size: {}", e))?,
.map_err(|e| format!("Failed to parse size: {e}"))?,
ts: parts[4]
.parse()
.map_err(|e| format!("Failed to parse timestamp: {}", e))?,
.map_err(|e| format!("Failed to parse timestamp: {e}"))?,
is_header: parts[5]
.parse()
.map_err(|e| format!("Failed to parse is_header: {}", e))?,
.map_err(|e| format!("Failed to parse is_header: {e}"))?,
})
}
@@ -51,7 +51,7 @@ impl TsEntry {
pub fn ts_seconds(&self) -> i64 {
// For some legacy problem, douyin entry's ts is s, bilibili entry's ts is ms.
// This should be fixed after 2.5.6, but we need to support entry.log generated by previous version.
if self.ts > 10000000000 {
if self.ts > 10_000_000_000 {
self.ts / 1000
} else {
self.ts
@@ -60,7 +60,7 @@ impl TsEntry {
pub fn ts_mili(&self) -> i64 {
// if already in ms, return as is
if self.ts > 10000000000 {
if self.ts > 10_000_000_000 {
self.ts
} else {
self.ts * 1000
@@ -72,13 +72,13 @@ impl TsEntry {
.timestamp_opt(self.ts_seconds(), 0)
.unwrap()
.to_rfc3339();
format!("#EXT-X-PROGRAM-DATE-TIME:{}\n", date_str)
format!("#EXT-X-PROGRAM-DATE-TIME:{date_str}\n")
}
/// Convert entry into a segment in HLS manifest.
pub fn to_segment(&self) -> String {
if self.is_header {
return "".into();
return String::new();
}
let mut content = String::new();
@@ -100,7 +100,7 @@ impl Display for TsEntry {
}
}
/// EntryStore is used to management stream segments, which is basicly a simple version of hls manifest,
/// `EntryStore` is used to management stream segments, which is basicly a simple version of hls manifest,
/// and of course, provids methods to generate hls manifest for frontend player.
pub struct EntryStore {
// append only log file
@@ -122,7 +122,7 @@ impl EntryStore {
let log_file = OpenOptions::new()
.create(true)
.append(true)
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
.open(format!("{work_dir}/{ENTRY_FILE_NAME}"))
.await
.unwrap();
let mut entry_store = Self {
@@ -143,14 +143,14 @@ impl EntryStore {
let file = OpenOptions::new()
.create(false)
.read(true)
.open(format!("{}/{}", work_dir, ENTRY_FILE_NAME))
.open(format!("{work_dir}/{ENTRY_FILE_NAME}"))
.await
.unwrap();
let mut lines = BufReader::new(file).lines();
while let Some(Ok(line)) = lines.next().await {
let entry = TsEntry::from(&line);
if let Err(e) = entry {
log::error!("Failed to parse entry: {} {}", e, line);
log::error!("Failed to parse entry: {e} {line}");
continue;
}
@@ -177,7 +177,7 @@ impl EntryStore {
}
if let Err(e) = self.log_file.write_all(entry.to_string().as_bytes()).await {
log::error!("Failed to write entry to log file: {}", e);
log::error!("Failed to write entry to log file: {e}");
}
self.log_file.flush().await.unwrap();
@@ -202,12 +202,12 @@ impl EntryStore {
/// Get first timestamp in milliseconds
pub fn first_ts(&self) -> Option<i64> {
self.entries.first().map(|x| x.ts_mili())
self.entries.first().map(TsEntry::ts_mili)
}
/// Get last timestamp in milliseconds
pub fn last_ts(&self) -> Option<i64> {
self.entries.last().map(|x| x.ts_mili())
self.entries.last().map(TsEntry::ts_mili)
}
/// Generate a hls manifest for selected range.

View File

@@ -47,7 +47,7 @@ impl Hash for PlatformType {
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
pub struct RecorderInfo {
pub room_id: u64,
pub room_id: i64,
pub room_info: RoomInfo,
pub user_info: UserInfo,
pub total_length: f64,
@@ -60,7 +60,7 @@ pub struct RecorderInfo {
#[derive(serde::Deserialize, serde::Serialize, Clone, Debug)]
pub struct RoomInfo {
pub room_id: u64,
pub room_id: i64,
pub room_title: String,
pub room_cover: String,
}

View File

@@ -37,8 +37,7 @@ impl UserAgentGenerator {
let webkit_version = webkit_versions.choose(&mut self.rng).unwrap();
format!(
"Mozilla/5.0 ({}) AppleWebKit/{} (KHTML, like Gecko) Chrome/{} Safari/{}",
os, webkit_version, chrome_version, webkit_version
"Mozilla/5.0 ({os}) AppleWebKit/{webkit_version} (KHTML, like Gecko) Chrome/{chrome_version} Safari/{webkit_version}"
)
}
@@ -48,10 +47,7 @@ impl UserAgentGenerator {
let os = self.get_random_os_firefox();
let firefox_version = firefox_versions.choose(&mut self.rng).unwrap();
format!(
"Mozilla/5.0 ({}; rv:{}) Gecko/20100101 Firefox/{}",
os, firefox_version, firefox_version
)
format!("Mozilla/5.0 ({os}; rv:{firefox_version}) Gecko/20100101 Firefox/{firefox_version}")
}
fn generate_safari(&mut self) -> String {
@@ -72,16 +68,14 @@ impl UserAgentGenerator {
.unwrap();
format!(
"Mozilla/5.0 ({} {} like Mac OS X) AppleWebKit/{} (KHTML, like Gecko) Version/{} Mobile/15E148 Safari/{}",
device, ios_version, webkit_version, safari_version, webkit_version
"Mozilla/5.0 ({device} {ios_version} like Mac OS X) AppleWebKit/{webkit_version} (KHTML, like Gecko) Version/{safari_version} Mobile/15E148 Safari/{webkit_version}"
)
} else {
let macos_versions = ["14_1", "13_6", "12_7"];
let macos_version = macos_versions.choose(&mut self.rng).unwrap();
format!(
"Mozilla/5.0 (Macintosh; Intel Mac OS X {}) AppleWebKit/{} (KHTML, like Gecko) Version/{} Safari/{}",
macos_version, webkit_version, safari_version, webkit_version
"Mozilla/5.0 (Macintosh; Intel Mac OS X {macos_version}) AppleWebKit/{webkit_version} (KHTML, like Gecko) Version/{safari_version} Safari/{webkit_version}"
)
}
}
@@ -95,8 +89,7 @@ impl UserAgentGenerator {
let chrome_version = chrome_versions.choose(&mut self.rng).unwrap();
format!(
"Mozilla/5.0 ({}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{} Safari/537.36 Edg/{}",
os, chrome_version, edge_version
"Mozilla/5.0 ({os}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{chrome_version} Safari/537.36 Edg/{edge_version}"
)
}

View File

@@ -41,7 +41,7 @@ pub struct ClipRangeParams {
pub note: String,
pub cover: String,
pub platform: String,
pub room_id: u64,
pub room_id: i64,
pub live_id: String,
pub range: Option<Range>,
/// Encode danmu after clip
@@ -57,7 +57,7 @@ pub enum RecorderEvent {
recorder: RecorderInfo,
},
LiveEnd {
room_id: u64,
room_id: i64,
platform: PlatformType,
recorder: RecorderInfo,
},
@@ -86,9 +86,9 @@ pub struct RecorderManager {
#[derive(Error, Debug)]
pub enum RecorderManagerError {
#[error("Recorder already exists: {room_id}")]
AlreadyExisted { room_id: u64 },
AlreadyExisted { room_id: i64 },
#[error("Recorder not found: {room_id}")]
NotFound { room_id: u64 },
NotFound { room_id: i64 },
#[error("Invalid platform type: {platform}")]
InvalidPlatformType { platform: String },
#[error("Recorder error: {0}")]
@@ -186,27 +186,23 @@ impl RecorderManager {
}
}
async fn handle_live_end(&self, platform: PlatformType, room_id: u64, recorder: &RecorderInfo) {
async fn handle_live_end(&self, platform: PlatformType, room_id: i64, recorder: &RecorderInfo) {
if !self.config.read().await.auto_generate.enabled {
return;
}
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
log::info!("Start auto generate for {}", recorder_id);
log::info!("Start auto generate for {recorder_id}");
let live_id = recorder.current_live_id.clone();
let live_record = self.db.get_record(room_id, &live_id).await;
if live_record.is_err() {
log::error!("Live not found in record: {} {}", room_id, live_id);
log::error!("Live not found in record: {room_id} {live_id}");
return;
}
let recorders = self.recorders.read().await;
let recorder = match recorders.get(&recorder_id) {
Some(recorder) => recorder,
None => {
log::error!("Recorder not found: {}", recorder_id);
return;
}
let Some(recorder) = recorders.get(&recorder_id) else {
log::error!("Recorder not found: {recorder_id}");
return;
};
let live_record = live_record.unwrap();
@@ -214,8 +210,8 @@ impl RecorderManager {
let clip_config = ClipRangeParams {
title: live_record.title,
note: "".into(),
cover: "".into(),
note: String::new(),
cover: String::new(),
platform: live_record.platform.clone(),
room_id,
live_id: live_id.to_string(),
@@ -242,10 +238,17 @@ impl RecorderManager {
let metadata = match std::fs::metadata(&f) {
Ok(metadata) => metadata,
Err(e) => {
log::error!("Failed to detect auto generated clip: {}", e);
log::error!("Failed to detect auto generated clip: {e}");
return;
}
};
let Ok(size) = i64::try_from(metadata.len()) else {
log::error!(
"Failed to convert metadata length to i64: {}",
metadata.len()
);
return;
};
match self
.db
.add_video(&VideoRow {
@@ -253,15 +256,15 @@ impl RecorderManager {
status: 0,
room_id,
created_at: Utc::now().to_rfc3339(),
cover: "".into(),
cover: String::new(),
file: f.file_name().unwrap().to_str().unwrap().to_string(),
note: "".into(),
note: String::new(),
length: live_record.length,
size: metadata.len() as i64,
bvid: "".into(),
title: "".into(),
desc: "".into(),
tags: "".into(),
size,
bvid: String::new(),
title: String::new(),
desc: String::new(),
tags: String::new(),
area: 0,
platform: live_record.platform.clone(),
})
@@ -269,17 +272,17 @@ impl RecorderManager {
{
Ok(_) => {}
Err(e) => {
log::error!("Add auto generate clip record failed: {}", e)
log::error!("Add auto generate clip record failed: {e}");
}
};
}
Err(e) => {
log::error!("Auto generate clip failed: {}", e)
log::error!("Auto generate clip failed: {e}");
}
}
}
pub async fn set_migrating(&self, migrating: bool) {
pub fn set_migrating(&self, migrating: bool) {
self.is_migrating
.store(migrating, std::sync::atomic::Ordering::Relaxed);
}
@@ -353,7 +356,7 @@ impl RecorderManager {
&self,
account: &AccountRow,
platform: PlatformType,
room_id: u64,
room_id: i64,
extra: &str,
auto_start: bool,
) -> Result<(), RecorderManagerError> {
@@ -425,7 +428,7 @@ impl RecorderManager {
pub async fn remove_recorder(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
) -> Result<RecorderRow, RecorderManagerError> {
// check recorder exists
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
@@ -437,21 +440,21 @@ impl RecorderManager {
let recorder = self.db.remove_recorder(room_id).await?;
// add to to_remove
log::debug!("Add to to_remove: {}", recorder_id);
log::debug!("Add to to_remove: {recorder_id}");
self.to_remove.write().await.insert(recorder_id.clone());
// stop recorder
log::debug!("Stop recorder: {}", recorder_id);
log::debug!("Stop recorder: {recorder_id}");
if let Some(recorder_ref) = self.recorders.read().await.get(&recorder_id) {
recorder_ref.stop().await;
}
// remove recorder
log::debug!("Remove recorder from manager: {}", recorder_id);
log::debug!("Remove recorder from manager: {recorder_id}");
self.recorders.write().await.remove(&recorder_id);
// remove from to_remove
log::debug!("Remove from to_remove: {}", recorder_id);
log::debug!("Remove from to_remove: {recorder_id}");
self.to_remove.write().await.remove(&recorder_id);
// remove related cache folder
@@ -461,9 +464,9 @@ impl RecorderManager {
platform.as_str(),
room_id
);
log::debug!("Remove cache folder: {}", cache_folder);
log::debug!("Remove cache folder: {cache_folder}");
let _ = tokio::fs::remove_dir_all(cache_folder).await;
log::info!("Recorder {} cache folder removed", room_id);
log::info!("Recorder {room_id} cache folder removed");
Ok(recorder)
}
@@ -477,7 +480,7 @@ impl RecorderManager {
let recorders = self.recorders.read().await;
let recorder_id = format!("{}:{}", params.platform, params.room_id);
if !recorders.contains_key(&recorder_id) {
log::error!("Recorder {} not found", recorder_id);
log::error!("Recorder {recorder_id} not found");
return Err(RecorderManagerError::NotFound {
room_id: params.room_id,
});
@@ -535,7 +538,7 @@ impl RecorderManager {
)
.await
{
log::error!("Failed to generate clip file: {}", e);
log::error!("Failed to generate clip file: {e}");
return Err(RecorderManagerError::ClipError { err: e.to_string() });
}
@@ -565,7 +568,7 @@ impl RecorderManager {
params
.range
.as_ref()
.map_or("None".to_string(), |r| r.to_string()),
.map_or("None".to_string(), std::string::ToString::to_string),
params.local_offset
);
let mut danmus = danmus.unwrap();
@@ -574,10 +577,10 @@ impl RecorderManager {
if let Some(range) = &params.range {
// update entry ts to offset and filter danmus in range
for d in &mut danmus {
d.ts -= (range.start as i64 + params.local_offset) * 1000;
d.ts -= params.local_offset * 1000 + (range.start * 1000.0).round() as i64;
}
if range.duration() > 0.0 {
danmus.retain(|x| x.ts >= 0 && x.ts <= (range.duration() as i64) * 1000);
danmus.retain(|x| x.ts >= 0 && x.ts <= (range.duration() * 1000.0).round() as i64);
}
}
@@ -625,7 +628,7 @@ impl RecorderManager {
pub async fn get_recorder_info(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
) -> Option<RecorderInfo> {
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
if let Some(recorder_ref) = self.recorders.read().await.get(&recorder_id) {
@@ -636,22 +639,22 @@ impl RecorderManager {
}
}
pub async fn get_archive_disk_usage(&self) -> Result<u64, RecorderManagerError> {
pub async fn get_archive_disk_usage(&self) -> Result<i64, RecorderManagerError> {
Ok(self.db.get_record_disk_usage().await?)
}
pub async fn get_archives(
&self,
room_id: u64,
offset: u64,
limit: u64,
room_id: i64,
offset: i64,
limit: i64,
) -> Result<Vec<RecordRow>, RecorderManagerError> {
Ok(self.db.get_records(room_id, offset, limit).await?)
}
pub async fn get_archive(
&self,
room_id: u64,
room_id: i64,
live_id: &str,
) -> Result<RecordRow, RecorderManagerError> {
Ok(self.db.get_record(room_id, live_id).await?)
@@ -660,7 +663,7 @@ impl RecorderManager {
pub async fn get_archive_subtitle(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
live_id: &str,
) -> Result<String, RecorderManagerError> {
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
@@ -675,7 +678,7 @@ impl RecorderManager {
pub async fn generate_archive_subtitle(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
live_id: &str,
) -> Result<String, RecorderManagerError> {
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
@@ -690,10 +693,10 @@ impl RecorderManager {
pub async fn delete_archive(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
live_id: &str,
) -> Result<RecordRow, RecorderManagerError> {
log::info!("Deleting {}:{}", room_id, live_id);
log::info!("Deleting {room_id}:{live_id}");
// check if this is still recording
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
if let Some(recorder_ref) = self.recorders.read().await.get(&recorder_id) {
@@ -716,10 +719,10 @@ impl RecorderManager {
pub async fn delete_archives(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
live_ids: &[&str],
) -> Result<Vec<RecordRow>, RecorderManagerError> {
log::info!("Deleting archives in batch: {:?}", live_ids);
log::info!("Deleting archives in batch: {live_ids:?}");
let mut to_deletes = Vec::new();
for live_id in live_ids {
let to_delete = self.delete_archive(platform, room_id, live_id).await?;
@@ -731,7 +734,7 @@ impl RecorderManager {
pub async fn get_danmu(
&self,
platform: PlatformType,
room_id: u64,
room_id: i64,
live_id: &str,
) -> Result<Vec<DanmuEntry>, RecorderManagerError> {
let recorder_id = format!("{}:{}", platform.as_str(), room_id);
@@ -749,7 +752,7 @@ impl RecorderManager {
let path_segs: Vec<&str> = path.split('/').collect();
if path_segs.len() != 4 {
log::warn!("Invalid request path: {}", path);
log::warn!("Invalid request path: {path}");
return Err(RecorderManagerError::HLSError {
err: "Invalid hls path".into(),
});
@@ -757,7 +760,7 @@ impl RecorderManager {
// parse recorder type
let platform = path_segs[0];
// parse room id
let room_id = path_segs[1].parse::<u64>().unwrap();
let room_id = path_segs[1].parse::<i64>().unwrap();
// parse live id
let live_id = path_segs[2];
@@ -780,8 +783,7 @@ impl RecorderManager {
params
.iter()
.find(|param| param[0] == "start")
.map(|param| param[1].parse::<i64>().unwrap())
.unwrap_or(0)
.map_or(0, |param| param[1].parse::<i64>().unwrap())
} else {
0
};
@@ -789,15 +791,14 @@ impl RecorderManager {
params
.iter()
.find(|param| param[0] == "end")
.map(|param| param[1].parse::<i64>().unwrap())
.unwrap_or(0)
.map_or(0, |param| param[1].parse::<i64>().unwrap())
} else {
0
};
if path_segs[3] == "playlist.m3u8" {
// get recorder
let recorder_key = format!("{}:{}", platform, room_id);
let recorder_key = format!("{platform}:{room_id}");
let recorders = self.recorders.read().await;
let recorder = recorders.get(&recorder_key);
if recorder.is_none() {
@@ -813,7 +814,7 @@ impl RecorderManager {
Ok(m3u8_content.into())
} else if path_segs[3] == "master.m3u8" {
// get recorder
let recorder_key = format!("{}:{}", platform, room_id);
let recorder_key = format!("{platform}:{room_id}");
let recorders = self.recorders.read().await;
let recorder = recorders.get(&recorder_key);
if recorder.is_none() {
@@ -829,17 +830,17 @@ impl RecorderManager {
// cache files are stored in {cache_dir}/{room_id}/{timestamp}/{ts_file}
let ts_file = format!("{}/{}", cache_path, path.replace("%7C", "|"));
let recorders = self.recorders.read().await;
let recorder_id = format!("{}:{}", platform, room_id);
let recorder_id = format!("{platform}:{room_id}");
let recorder = recorders.get(&recorder_id);
if recorder.is_none() {
log::warn!("Recorder not found: {}", recorder_id);
log::warn!("Recorder not found: {recorder_id}");
return Err(RecorderManagerError::HLSError {
err: "Recorder not found".into(),
});
}
let ts_file_content = tokio::fs::read(&ts_file).await;
if ts_file_content.is_err() {
log::warn!("Segment file not found: {}", ts_file);
log::warn!("Segment file not found: {ts_file}");
return Err(RecorderManagerError::HLSError {
err: "Segment file not found".into(),
});
@@ -849,10 +850,10 @@ impl RecorderManager {
}
}
pub async fn set_enable(&self, platform: PlatformType, room_id: u64, enabled: bool) {
pub async fn set_enable(&self, platform: PlatformType, room_id: i64, enabled: bool) {
// update RecordRow auto_start field
if let Err(e) = self.db.update_recorder(platform, room_id, enabled).await {
log::error!("Failed to update recorder auto_start: {}", e);
log::error!("Failed to update recorder auto_start: {e}");
}
let recorder_id = format!("{}:{}", platform.as_str(), room_id);

View File

@@ -22,7 +22,7 @@ pub async fn new(model: &Path, prompt: &str) -> Result<WhisperCPP, String> {
WhisperContextParameters::default(),
)
.map_err(|e| {
log::error!("Create whisper context failed: {}", e);
log::error!("Create whisper context failed: {e}");
e.to_string()
})?;
@@ -65,7 +65,7 @@ impl SubtitleGenerator for WhisperCPP {
params.set_print_timestamps(false);
params.set_progress_callback_safe(move |p| {
log::info!("Progress: {}%", p);
log::info!("Progress: {p}%");
});
let mut inter_samples = vec![Default::default(); samples.len()];
@@ -88,7 +88,7 @@ impl SubtitleGenerator for WhisperCPP {
reporter.update("生成字幕中");
}
if let Err(e) = state.full(params, &samples[..]) {
log::error!("failed to run model: {}", e);
log::error!("failed to run model: {e}");
return Err(e.to_string());
}
@@ -107,10 +107,7 @@ impl SubtitleGenerator for WhisperCPP {
let milliseconds = ((timestamp - hours * 3600.0 - minutes * 60.0 - seconds)
* 1000.0)
.floor() as u32;
format!(
"{:02}:{:02}:{:02},{:03}",
hours, minutes, seconds, milliseconds
)
format!("{hours:02}:{minutes:02}:{seconds:02},{milliseconds:03}")
};
let line = format!(
@@ -126,12 +123,12 @@ impl SubtitleGenerator for WhisperCPP {
log::info!("Time taken: {} seconds", start_time.elapsed().as_secs_f64());
let subtitle_content = srtparse::from_str(&subtitle)
.map_err(|e| format!("Failed to parse subtitle: {}", e))?;
let subtitle_content =
srtparse::from_str(&subtitle).map_err(|e| format!("Failed to parse subtitle: {e}"))?;
Ok(GenerateResult {
generator_type: SubtitleGeneratorType::Whisper,
subtitle_id: "".to_string(),
subtitle_id: String::new(),
subtitle_content,
})
}
@@ -154,14 +151,14 @@ mod tests {
#[async_trait]
impl ProgressReporterTrait for MockReporter {
fn update(&self, message: &str) {
println!("Mock update: {}", message);
println!("Mock update: {message}");
}
async fn finish(&self, success: bool, message: &str) {
if success {
println!("Mock finish: {}", message);
println!("Mock finish: {message}");
} else {
println!("Mock error: {}", message);
println!("Mock error: {message}");
}
}
}
@@ -189,7 +186,7 @@ mod tests {
.generate_subtitle(Some(&reporter), audio_path, "auto")
.await;
if let Err(e) = result {
println!("Error: {}", e);
println!("Error: {e}");
panic!("Failed to generate subtitle");
}
}

View File

@@ -37,7 +37,7 @@ pub async fn new(
let client = Client::builder()
.timeout(std::time::Duration::from_secs(300)) // 5 minutes timeout
.build()
.map_err(|e| format!("Failed to create HTTP client: {}", e))?;
.map_err(|e| format!("Failed to create HTTP client: {e}"))?;
let api_url = api_url.unwrap_or("https://api.openai.com/v1");
let api_url = api_url.to_string() + "/audio/transcriptions";
@@ -45,8 +45,8 @@ pub async fn new(
Ok(WhisperOnline {
client,
api_url: api_url.to_string(),
api_key: api_key.map(|k| k.to_string()),
prompt: prompt.map(|p| p.to_string()),
api_key: api_key.map(std::string::ToString::to_string),
prompt: prompt.map(std::string::ToString::to_string),
})
}
@@ -67,7 +67,7 @@ impl SubtitleGenerator for WhisperOnline {
}
let audio_data = fs::read(audio_path)
.await
.map_err(|e| format!("Failed to read audio file: {}", e))?;
.map_err(|e| format!("Failed to read audio file: {e}"))?;
// Get file extension for proper MIME type
let file_extension = audio_path
@@ -86,7 +86,7 @@ impl SubtitleGenerator for WhisperOnline {
// Build form data with proper file part
let file_part = reqwest::multipart::Part::bytes(audio_data)
.mime_str(mime_type)
.map_err(|e| format!("Failed to set MIME type: {}", e))?
.map_err(|e| format!("Failed to set MIME type: {e}"))?
.file_name(
audio_path
.file_name()
@@ -111,7 +111,7 @@ impl SubtitleGenerator for WhisperOnline {
let mut req_builder = self.client.post(&self.api_url);
if let Some(api_key) = &self.api_key {
req_builder = req_builder.header("Authorization", format!("Bearer {}", api_key));
req_builder = req_builder.header("Authorization", format!("Bearer {api_key}"));
}
if let Some(reporter) = reporter {
@@ -122,15 +122,14 @@ impl SubtitleGenerator for WhisperOnline {
.multipart(form)
.send()
.await
.map_err(|e| format!("HTTP request failed: {}", e))?;
.map_err(|e| format!("HTTP request failed: {e}"))?;
let status = response.status();
if !status.is_success() {
let error_text = response.text().await.unwrap_or_default();
log::error!("API request failed with status {}: {}", status, error_text);
log::error!("API request failed with status {status}: {error_text}");
return Err(format!(
"API request failed with status {}: {}",
status, error_text
"API request failed with status {status}: {error_text}"
));
}
@@ -138,17 +137,14 @@ impl SubtitleGenerator for WhisperOnline {
let response_text = response
.text()
.await
.map_err(|e| format!("Failed to get response text: {}", e))?;
.map_err(|e| format!("Failed to get response text: {e}"))?;
// Try to parse as JSON
let whisper_response: WhisperResponse =
serde_json::from_str(&response_text).map_err(|e| {
println!("{}", response_text);
log::error!(
"Failed to parse JSON response. Raw response: {}",
response_text
);
format!("Failed to parse response: {}", e)
println!("{response_text}");
log::error!("Failed to parse JSON response. Raw response: {response_text}");
format!("Failed to parse response: {e}")
})?;
// Generate SRT format subtitle
@@ -161,10 +157,7 @@ impl SubtitleGenerator for WhisperOnline {
let milliseconds = ((timestamp - hours * 3600.0 - minutes * 60.0 - seconds)
* 1000.0)
.floor() as u32;
format!(
"{:02}:{:02}:{:02},{:03}",
hours, minutes, seconds, milliseconds
)
format!("{hours:02}:{minutes:02}:{seconds:02},{milliseconds:03}")
};
let line = format!(
@@ -180,12 +173,12 @@ impl SubtitleGenerator for WhisperOnline {
log::info!("Time taken: {} seconds", start_time.elapsed().as_secs_f64());
let subtitle_content = srtparse::from_str(&subtitle)
.map_err(|e| format!("Failed to parse subtitle: {}", e))?;
let subtitle_content =
srtparse::from_str(&subtitle).map_err(|e| format!("Failed to parse subtitle: {e}"))?;
Ok(GenerateResult {
generator_type: SubtitleGeneratorType::WhisperOnline,
subtitle_id: "".to_string(),
subtitle_id: String::new(),
subtitle_content,
})
}
@@ -203,14 +196,14 @@ mod tests {
#[async_trait]
impl ProgressReporterTrait for MockReporter {
fn update(&self, message: &str) {
println!("Mock update: {}", message);
println!("Mock update: {message}");
}
async fn finish(&self, success: bool, message: &str) {
if success {
println!("Mock finish: {}", message);
println!("Mock finish: {message}");
} else {
println!("Mock error: {}", message);
println!("Mock error: {message}");
}
}
}
@@ -240,7 +233,7 @@ mod tests {
"auto",
)
.await;
println!("{:?}", result);
println!("{result:?}");
assert!(result.is_ok());
let result = result.unwrap();
println!("{:?}", result.subtitle_content);

View File

@@ -103,7 +103,7 @@ impl WebhookPoster {
tokio::task::spawn(async move {
let result = self_clone.post_with_retry(&serialized_event).await;
if let Err(e) = result {
log::error!("Post webhook event error: {}", e);
log::error!("Post webhook event error: {e}");
}
});
@@ -132,9 +132,9 @@ impl WebhookPoster {
for attempt in 1..=self.config.read().await.retry_attempts {
match self.send_request(data).await {
Ok(_) => {
Ok(()) => {
if attempt > 1 {
info!("Webhook posted successfully on attempt {}", attempt);
info!("Webhook posted successfully on attempt {attempt}");
}
return Ok(());
}
@@ -168,7 +168,7 @@ impl WebhookPoster {
}
}
log::debug!("Sending webhook request to: {}", webhook_url);
log::debug!("Sending webhook request to: {webhook_url}");
// Set content type to JSON
request = request.header("Content-Type", "application/json");
@@ -225,7 +225,7 @@ pub fn create_webhook_poster(
headers,
..Default::default()
};
log::info!("Creating webhook poster with URL: {}", url);
log::info!("Creating webhook poster with URL: {url}");
WebhookPoster::new(config)
}