mxivideo/src-tauri/src/commands/ai_video.rs

142 lines
4.3 KiB
Rust

use serde::Deserialize;
use std::process::Command;
use crate::python_executor::execute_python_command;
use crate::command_utils::configure_no_window;
#[derive(Debug, Deserialize)]
pub struct AIVideoRequest {
pub image_path: String,
pub prompt: String,
pub duration: String,
pub model_type: String,
pub output_path: Option<String>,
pub timeout: Option<u32>,
}
#[derive(Debug, Deserialize)]
pub struct BatchAIVideoRequest {
pub image_folder: String,
pub prompts: Vec<String>,
pub output_folder: String,
pub duration: String,
pub model_type: String,
pub timeout: Option<u32>,
}
#[tauri::command]
pub async fn generate_ai_video(app: tauri::AppHandle, request: AIVideoRequest) -> Result<String, String> {
let mut args = vec![
"-m".to_string(),
"python_core.ai_video.video_generator".to_string(),
"--action".to_string(),
"single".to_string(),
"--image".to_string(),
request.image_path,
"--prompt".to_string(),
request.prompt,
"--duration".to_string(),
request.duration,
"--model".to_string(),
request.model_type,
];
if let Some(output_path) = request.output_path {
args.push("--output".to_string());
args.push(output_path);
}
if let Some(timeout) = request.timeout {
args.push("--timeout".to_string());
args.push(timeout.to_string());
}
execute_python_command(app, &args, None).await
}
#[tauri::command]
pub async fn batch_generate_ai_videos(app: tauri::AppHandle, request: BatchAIVideoRequest) -> Result<String, String> {
let prompts_json = serde_json::to_string(&request.prompts)
.map_err(|e| format!("Failed to serialize prompts: {}", e))?;
let mut args = vec![
"-m".to_string(),
"python_core.ai_video.video_generator".to_string(),
"--action".to_string(),
"batch".to_string(),
"--folder".to_string(),
request.image_folder,
"--prompts".to_string(),
prompts_json,
"--output".to_string(),
request.output_folder,
"--duration".to_string(),
request.duration,
"--model".to_string(),
request.model_type,
];
if let Some(timeout) = request.timeout {
args.push("--timeout".to_string());
args.push(timeout.to_string());
}
execute_python_command(app, &args, None).await
}
#[tauri::command]
pub async fn test_ai_video_environment(_app: tauri::AppHandle) -> Result<String, String> {
println!("Testing AI video environment...");
// Get project root directory
let current_dir = std::env::current_dir()
.map_err(|e| format!("Failed to get current directory: {}", e))?;
let project_root = if current_dir.ends_with("src-tauri") {
current_dir.parent().unwrap_or(&current_dir).to_path_buf()
} else {
current_dir
};
println!("Testing from project root: {:?}", project_root);
// Try multiple Python commands in order of preference
let python_commands = if cfg!(target_os = "windows") {
vec!["python", "python3", "py"]
} else {
vec!["python3", "python"]
};
for python_cmd in python_commands {
println!("Testing Python command: {}", python_cmd);
let mut cmd = Command::new(python_cmd);
cmd.current_dir(&project_root)
.args(&["-c", "import sys; print(f'Python {sys.version}'); import requests; print('requests OK'); import PIL; print('PIL OK')"]);
// Configure console window hiding
configure_no_window(&mut cmd);
let output = cmd.output();
match output {
Ok(output) => {
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
return Ok(format!("Environment test passed with {}:\n{}", python_cmd, stdout));
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
println!("Python {} test failed: {}", python_cmd, stderr);
continue;
}
}
Err(e) => {
println!("Failed to execute Python {}: {}", python_cmd, e);
continue;
}
}
}
Err("Failed to find a working Python installation".to_string())
}