diff --git a/apps/desktop/src-tauri/src/infrastructure/ffmpeg.rs b/apps/desktop/src-tauri/src/infrastructure/ffmpeg.rs index cad86ee..c62c8ba 100644 --- a/apps/desktop/src-tauri/src/infrastructure/ffmpeg.rs +++ b/apps/desktop/src-tauri/src/infrastructure/ffmpeg.rs @@ -232,22 +232,32 @@ impl FFmpegService { return Err(anyhow!("文件不存在: {}", file_path)); } + println!("开始场景检测: {} (阈值: {})", file_path, threshold); + // 首先尝试使用 ffmpeg 的 scene 滤镜 match Self::detect_scenes_with_ffmpeg(file_path, threshold) { - Ok(scenes) if !scenes.is_empty() => return Ok(scenes), - Err(e) => { - eprintln!("FFmpeg场景检测失败,使用备用方法: {}", e); + Ok(scenes) if !scenes.is_empty() => { + println!("FFmpeg场景检测成功,发现 {} 个场景: {:?}", scenes.len(), scenes); + return Ok(scenes); + } + Err(e) => { + println!("FFmpeg场景检测失败,使用备用方法: {}", e); + } + Ok(_) => { + println!("FFmpeg场景检测返回空结果,使用备用方法"); } - _ => {} } // 如果FFmpeg场景检测失败,使用简单的时间间隔方法 - Self::detect_scenes_simple(file_path, threshold) + let simple_scenes = Self::detect_scenes_simple(file_path, threshold)?; + println!("备用场景检测完成,发现 {} 个场景: {:?}", simple_scenes.len(), simple_scenes); + Ok(simple_scenes) } /// 使用FFmpeg进行场景检测 fn detect_scenes_with_ffmpeg(file_path: &str, threshold: f64) -> Result> { - let output = Command::new("ffmpeg") + // 方法1: 使用 scene 滤镜和 showinfo + let output1 = Command::new("ffmpeg") .args([ "-i", file_path, "-vf", &format!("select='gt(scene,{})',showinfo", threshold), @@ -255,28 +265,68 @@ impl FFmpegService { "-" ]) .stderr(std::process::Stdio::piped()) + .output(); + + if let Ok(output) = output1 { + let stderr_str = String::from_utf8_lossy(&output.stderr); + let mut scene_times = Vec::new(); + + // 查找 showinfo 输出中的 pts_time 信息 + for line in stderr_str.lines() { + if line.contains("showinfo") && line.contains("pts_time:") { + if let Some(pts_start) = line.find("pts_time:") { + let pts_part = &line[pts_start + 9..]; + if let Some(space_pos) = pts_part.find(' ') { + let time_str = &pts_part[..space_pos]; + if let Ok(time) = time_str.parse::() { + scene_times.push(time); + } + } + } + } + } + + if !scene_times.is_empty() { + return Ok(scene_times); + } + } + + // 方法2: 使用更简单的场景检测方法 + Self::detect_scenes_alternative(file_path, threshold) + } + + /// 替代的场景检测方法 + fn detect_scenes_alternative(file_path: &str, threshold: f64) -> Result> { + // 使用 ffprobe 分析视频帧信息 + let output = Command::new("ffprobe") + .args([ + "-v", "quiet", + "-select_streams", "v:0", + "-show_entries", "frame=pkt_pts_time,pict_type", + "-of", "csv=p=0", + file_path + ]) .output() - .map_err(|e| anyhow!("执行FFmpeg场景检测失败: {}", e))?; + .map_err(|e| anyhow!("执行ffprobe帧分析失败: {}", e))?; if !output.status.success() { let error_msg = String::from_utf8_lossy(&output.stderr); - return Err(anyhow!("FFmpeg场景检测命令失败: {}", error_msg)); + return Err(anyhow!("ffprobe帧分析失败: {}", error_msg)); } - // 解析 stderr 中的 showinfo 输出 - let stderr_str = String::from_utf8_lossy(&output.stderr); + let output_str = String::from_utf8_lossy(&output.stdout); let mut scene_times = Vec::new(); + let mut last_i_frame_time = 0.0; + let min_scene_duration = 5.0; // 最小场景时长5秒 - // 查找 showinfo 输出中的 pts_time 信息 - for line in stderr_str.lines() { - if line.contains("showinfo") && line.contains("pts_time:") { - if let Some(pts_start) = line.find("pts_time:") { - let pts_part = &line[pts_start + 9..]; - if let Some(space_pos) = pts_part.find(' ') { - let time_str = &pts_part[..space_pos]; - if let Ok(time) = time_str.parse::() { - scene_times.push(time); - } + // 分析I帧(关键帧)作为潜在的场景切换点 + for line in output_str.lines() { + let parts: Vec<&str> = line.split(',').collect(); + if parts.len() >= 2 { + if let (Ok(time), pict_type) = (parts[0].parse::(), parts[1]) { + if pict_type == "I" && time - last_i_frame_time > min_scene_duration { + scene_times.push(time); + last_i_frame_time = time; } } } @@ -287,7 +337,7 @@ impl FFmpegService { /// 简单的场景检测方法(备用) fn detect_scenes_simple(file_path: &str, threshold: f64) -> Result> { - // 使用 ffprobe 获取视频时长,然后按固定间隔分割 + // 使用 ffprobe 获取视频时长,然后按智能间隔分割 let metadata = Self::extract_metadata(file_path)?; let duration = match metadata { @@ -296,18 +346,46 @@ impl FFmpegService { }; // 如果视频很短,不需要场景检测 - if duration < 60.0 { + if duration < 30.0 { return Ok(Vec::new()); } - // 按照阈值相关的间隔创建场景切点 - let interval = (60.0 / threshold).max(30.0).min(300.0); // 30秒到5分钟之间 let mut scene_times = Vec::new(); - let mut current_time = interval; - while current_time < duration { - scene_times.push(current_time); - current_time += interval; + // 根据视频时长和阈值智能确定切分策略 + if duration <= 120.0 { + // 2分钟以内的视频,按30秒间隔 + let mut current_time = 30.0; + while current_time < duration { + scene_times.push(current_time); + current_time += 30.0; + } + } else if duration <= 600.0 { + // 10分钟以内的视频,按60秒间隔 + let mut current_time = 60.0; + while current_time < duration { + scene_times.push(current_time); + current_time += 60.0; + } + } else { + // 长视频,按120秒间隔 + let mut current_time = 120.0; + while current_time < duration { + scene_times.push(current_time); + current_time += 120.0; + } + } + + // 如果阈值很低(更敏感),增加更多切点 + if threshold < 0.2 && duration > 180.0 { + let mut additional_times = Vec::new(); + for &time in &scene_times { + if time > 90.0 { + additional_times.push(time - 45.0); + } + } + scene_times.extend(additional_times); + scene_times.sort_by(|a, b| a.partial_cmp(b).unwrap()); } Ok(scene_times)