mixvideo-v2/cargos/tvai/examples/video_processing.rs

227 lines
8.7 KiB
Rust

//! Video processing examples demonstrating all video enhancement features
use tvai::*;
#[tokio::main]
async fn main() -> std::result::Result<(), Box<dyn std::error::Error>> {
println!("Topaz Video AI Library - Video Processing Examples");
// Detect Topaz installation
if let Some(topaz_path) = detect_topaz_installation() {
println!("Found Topaz Video AI at: {}", topaz_path.display());
// Create configuration
let config = TvaiConfig::builder()
.topaz_path(topaz_path)
.use_gpu(true)
.build()?;
// Create processor
let mut processor = TvaiProcessor::new(config)?;
println!("Processor created successfully");
// Demonstrate video upscaling
demonstrate_video_upscaling(&mut processor).await?;
// Demonstrate frame interpolation
demonstrate_frame_interpolation(&mut processor).await?;
// Demonstrate combined enhancement
demonstrate_combined_enhancement(&mut processor).await?;
// Demonstrate format conversion
demonstrate_format_conversion(&mut processor).await?;
// Demonstrate quick functions
demonstrate_quick_functions().await?;
println!("All video processing examples completed successfully!");
} else {
println!("Topaz Video AI not found. Please install it first.");
}
Ok(())
}
async fn demonstrate_video_upscaling(processor: &mut TvaiProcessor) -> std::result::Result<(), Box<dyn std::error::Error>> {
println!("\n=== Video Upscaling Demo ===");
// Create upscaling parameters for different scenarios
let scenarios = vec![
("General Purpose", VideoUpscaleParams {
scale_factor: 2.0,
model: UpscaleModel::Iris3,
compression: 0.0,
blend: 0.1,
quality_preset: QualityPreset::HighQuality,
}),
("Old Video Restoration", VideoUpscaleParams::for_old_video()),
("Game Content", VideoUpscaleParams::for_game_content()),
("Animation", VideoUpscaleParams::for_animation()),
("Portrait Video", VideoUpscaleParams::for_portrait()),
];
for (name, params) in scenarios {
println!("Scenario: {}", name);
println!(" Model: {} ({})", params.model.as_str(), params.model.description());
println!(" Scale: {}x", params.scale_factor);
println!(" Compression: {}", params.compression);
println!(" Blend: {}", params.blend);
println!(" Quality: {:?}", params.quality_preset);
// In a real scenario, you would call:
// let result = processor.upscale_video(input, output, params, Some(&progress_callback)).await?;
// println!(" Processing time: {:?}", result.processing_time);
}
Ok(())
}
async fn demonstrate_frame_interpolation(processor: &mut TvaiProcessor) -> std::result::Result<(), Box<dyn std::error::Error>> {
println!("\n=== Frame Interpolation Demo ===");
// Create interpolation parameters for different scenarios
let scenarios = vec![
("Slow Motion (24fps -> 60fps)", InterpolationParams {
input_fps: 24,
multiplier: 2.5,
model: InterpolationModel::Apo8,
target_fps: Some(60),
}),
("Animation Smoothing", InterpolationParams::for_animation(12, 2.0)),
("High Quality Slow Motion", InterpolationParams::for_slow_motion(30, 4.0)),
("Fast Processing", InterpolationParams {
input_fps: 24,
multiplier: 2.0,
model: InterpolationModel::Apf1,
target_fps: None,
}),
];
for (name, params) in scenarios {
let target_fps = params.target_fps.unwrap_or((params.input_fps as f32 * params.multiplier) as u32);
println!("Scenario: {}", name);
println!(" Model: {} ({})", params.model.as_str(), params.model.description());
println!(" Input FPS: {}", params.input_fps);
println!(" Multiplier: {}x", params.multiplier);
println!(" Target FPS: {}", target_fps);
// In a real scenario, you would call:
// let result = processor.interpolate_video(input, output, params, Some(&progress_callback)).await?;
// println!(" Processing time: {:?}", result.processing_time);
}
Ok(())
}
async fn demonstrate_combined_enhancement(processor: &mut TvaiProcessor) -> std::result::Result<(), Box<dyn std::error::Error>> {
println!("\n=== Combined Enhancement Demo ===");
// Create combined enhancement parameters
let scenarios = vec![
("Complete Enhancement", VideoEnhanceParams {
upscale: Some(VideoUpscaleParams {
scale_factor: 2.0,
model: UpscaleModel::Iris3,
compression: 0.0,
blend: 0.1,
quality_preset: QualityPreset::HighQuality,
}),
interpolation: Some(InterpolationParams {
input_fps: 24,
multiplier: 2.0,
model: InterpolationModel::Apo8,
target_fps: Some(48),
}),
}),
("Upscale Only", VideoEnhanceParams {
upscale: Some(VideoUpscaleParams::for_old_video()),
interpolation: None,
}),
("Interpolation Only", VideoEnhanceParams {
upscale: None,
interpolation: Some(InterpolationParams::for_slow_motion(30, 2.0)),
}),
];
for (name, params) in scenarios {
println!("Scenario: {}", name);
if let Some(ref upscale) = params.upscale {
println!(" Upscale: {} @ {}x", upscale.model.as_str(), upscale.scale_factor);
}
if let Some(ref interpolation) = params.interpolation {
let target_fps = interpolation.target_fps.unwrap_or((interpolation.input_fps as f32 * interpolation.multiplier) as u32);
println!(" Interpolation: {} ({}fps -> {}fps)",
interpolation.model.as_str(),
interpolation.input_fps,
target_fps
);
}
// In a real scenario, you would call:
// let result = processor.enhance_video(input, output, params, Some(&progress_callback)).await?;
// println!(" Processing time: {:?}", result.processing_time);
}
Ok(())
}
async fn demonstrate_format_conversion(processor: &mut TvaiProcessor) -> std::result::Result<(), Box<dyn std::error::Error>> {
println!("\n=== Format Conversion Demo ===");
// Demonstrate image sequence to video conversion
println!("Image Sequence to Video:");
println!(" - Collect image files in sequence");
println!(" - Set target FPS (e.g., 24, 30, 60)");
println!(" - Choose quality preset");
println!(" - Generate video file");
// In a real scenario:
// let image_paths = vec![/* image file paths */];
// let result = processor.images_to_video(&image_paths, output_video, 30.0, QualityPreset::HighQuality, Some(&progress_callback)).await?;
println!("\nVideo to Image Sequence:");
println!(" - Extract all frames from video");
println!(" - Choose output format (PNG, JPG, etc.)");
println!(" - Set quality level");
println!(" - Generate numbered image files");
// In a real scenario:
// let image_paths = processor.video_to_images(input_video, output_dir, "png", 95, Some(&progress_callback)).await?;
// println!(" Extracted {} frames", image_paths.len());
Ok(())
}
async fn demonstrate_quick_functions() -> std::result::Result<(), Box<dyn std::error::Error>> {
println!("\n=== Quick Functions Demo ===");
println!("Quick Upscale Video:");
println!(" - One-line video upscaling");
println!(" - Automatic Topaz detection");
println!(" - Default high-quality settings");
println!(" - Usage: quick_upscale_video(input, output, 2.0).await?");
println!("\nAuto Enhance Video:");
println!(" - Intelligent enhancement detection");
println!(" - Automatic parameter selection");
println!(" - Based on video characteristics");
println!(" - Usage: auto_enhance_video(input, output).await?");
// In a real scenario:
// let result = quick_upscale_video(Path::new("input.mp4"), Path::new("output.mp4"), 2.0).await?;
// let result = auto_enhance_video(Path::new("input.mp4"), Path::new("enhanced.mp4")).await?;
Ok(())
}
// Progress callback example
fn create_progress_callback(operation_name: &str) -> ProgressCallback {
let name = operation_name.to_string();
Box::new(move |progress| {
let percentage = (progress * 100.0) as u32;
println!("{}: {}%", name, percentage);
})
}