diff --git a/python_core/ai_video/video_generator.py b/python_core/ai_video/video_generator.py index 9579338..0c3d615 100644 --- a/python_core/ai_video/video_generator.py +++ b/python_core/ai_video/video_generator.py @@ -108,7 +108,6 @@ class VideoGenerator: if found_path: image_path = found_path - logger.info(f"Found image at: {image_path}") else: result['msg'] = f'Image file not found: {image_path}. Searched in: {possible_paths}' logger.error(result['msg']) @@ -118,7 +117,6 @@ class VideoGenerator: progress.step("upload", "[1/4] 正在上传图片到云存储...") if progress_callback: progress_callback("[1/4] 正在上传图片到云存储...") - logger.info(f"Uploading image to cloud storage: {image_path}") upload_result = self.cloud_storage.upload_file(image_path) if not upload_result['status']: @@ -132,12 +130,10 @@ class VideoGenerator: progress.step("upload_success", "[1/4] 图片上传成功") if progress_callback: progress_callback("[1/4] 图片上传成功") - logger.info(f"Image uploaded successfully: {img_url}") # Step 2: Submit video generation task if progress_callback: progress_callback("[2/4] 正在提交视频生成任务...") - logger.info("Submitting video generation task...") task_result = self.api_client.submit_task(prompt, img_url, duration, model_type) if not task_result['status']: @@ -148,12 +144,10 @@ class VideoGenerator: task_id = task_result['data'] if progress_callback: progress_callback(f"[2/4] 任务提交成功,任务ID: {task_id}") - logger.info(f"Task submitted successfully, task ID: {task_id}") # Step 3: Wait for task completion if progress_callback: progress_callback("[3/4] 正在等待视频生成完成...") - logger.info("Waiting for video generation to complete...") completion_result = self.api_client.wait_for_completion( task_id, @@ -169,13 +163,11 @@ class VideoGenerator: video_url = completion_result['data'] result['video_url'] = video_url - logger.info(f"Video generated successfully: {video_url}") # Step 4: Download video if save_path is provided if save_path: if progress_callback: progress_callback("[4/4] 正在下载视频到本地...") - logger.info("Downloading video to local storage...") video_path = self.cloud_storage.download_file(video_url, save_path) if video_path and os.path.exists(video_path): @@ -184,7 +176,6 @@ class VideoGenerator: result['msg'] = '视频生成并下载成功' if progress_callback: progress_callback(f"[4/4] 视频下载成功: {os.path.basename(video_path)}") - logger.info(f"Video downloaded successfully: {video_path}") else: result['msg'] = '视频下载失败' if progress_callback: @@ -225,7 +216,8 @@ class VideoGenerator: model_type: str = 'lite', timeout: int = 300, interval: int = 3, - progress_callback: Optional[Callable[[str], None]] = None) -> Dict[str, Any]: + progress_callback: Optional[Callable[[str], None]] = None, + request_id: str = None) -> Dict[str, Any]: """ Batch generate videos from multiple images. @@ -242,8 +234,12 @@ class VideoGenerator: Returns: Dictionary with batch processing result """ + # Initialize JSON-RPC handlers for batch processing + rpc = create_response_handler(request_id) + progress = create_progress_reporter() + result = {'status': False, 'success_count': 0, 'failed_count': 0, 'results': [], 'msg': ''} - + try: if progress_callback: progress_callback(f"开始批量处理任务...") @@ -348,7 +344,16 @@ class VideoGenerator: except Exception as e: result['msg'] = f'批量处理过程中发生错误: {str(e)}' logger.error(result['msg']) - + progress.error(result['msg']) + rpc.error(JSONRPCError.GENERATION_FAILED, "Batch processing failed", str(e)) + + # Send final batch result via JSON-RPC + if result['status']: + progress.complete(f"批量处理完成!成功: {result['success_count']}, 失败: {result['failed_count']}") + rpc.success(result) + else: + rpc.error(JSONRPCError.GENERATION_FAILED, result.get('msg', 'Batch processing failed'), result) + return result @@ -410,7 +415,8 @@ def main(): duration=args.duration, model_type=args.model, timeout=args.timeout, - progress_callback=progress_callback + progress_callback=progress_callback, + request_id="cli_batch_request" ) print(json.dumps(result, ensure_ascii=False, indent=2)) diff --git a/src-tauri/src/commands.rs b/src-tauri/src/commands.rs index f14ca84..2ff5358 100644 --- a/src-tauri/src/commands.rs +++ b/src-tauri/src/commands.rs @@ -136,9 +136,9 @@ async fn execute_python_command(app: tauri::AppHandle, args: &[String]) -> Resul println!("Progress: {}", json_str); } } else if json_value.get("result").is_some() || json_value.get("error").is_some() { - // This is a final result or error response + // This is a final result or error response - always update to get the latest final_result = Some(json_str.to_string()); - println!("Final JSON-RPC result: {}", json_str); + println!("JSON-RPC result found: {}", json_str); } } } else if line.trim().starts_with('{') && line.trim().ends_with('}') {