diff --git a/croppa/main.py b/croppa/main.py index 51dcfe6..c5aada2 100644 --- a/croppa/main.py +++ b/croppa/main.py @@ -1843,8 +1843,7 @@ class VideoEditor: return None def _render_with_ffmpeg_pipe(self, output_path: str, start_frame: int, end_frame: int, output_width: int, output_height: int): - """Hybrid approach: OpenCV transformations + FFmpeg encoding via temporary file""" - temp_raw_file = None + """Hybrid approach: OpenCV transformations + FFmpeg encoding via pipe""" try: # Verify FFmpeg is available try: @@ -1857,9 +1856,32 @@ class VideoEditor: self.render_progress_queue.put(("progress", "Starting FFmpeg encoder...", 0.0, 0.0)) - # Create temporary raw video file instead of using pipes - temp_raw_file = output_path.replace('.mp4', '_temp.raw') - print(f"Using temporary file approach: {temp_raw_file}") + # Start FFmpeg process to receive frames via pipe + ffmpeg_cmd = [ + 'ffmpeg', '-y', '-v', 'quiet', + '-f', 'rawvideo', + '-vcodec', 'rawvideo', + '-s', f'{output_width}x{output_height}', + '-pix_fmt', 'bgr24', + '-r', str(self.fps), + '-i', '-', # Read from stdin + '-c:v', 'libx264', + '-preset', 'fast', + '-crf', '18', + '-pix_fmt', 'yuv420p', + output_path + ] + + # Start FFmpeg process with Windows-optimized configuration + self.ffmpeg_process = subprocess.Popen( + ffmpeg_cmd, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + stdout=subprocess.DEVNULL, # Discard stdout to prevent hanging + bufsize=65536, # 64KB buffer to prevent Windows pipe overflow + universal_newlines=False, # Binary mode for Windows + creationflags=subprocess.CREATE_NO_WINDOW if hasattr(subprocess, 'CREATE_NO_WINDOW') else 0 + ) # OpenCV for frame reading and transformations render_cap = cv2.VideoCapture(str(self.video_path)) @@ -1872,74 +1894,69 @@ class VideoEditor: self.render_progress_queue.put(("progress", f"Processing {total_frames} frames...", 0.1, 0.0)) - # Write frames to temporary raw file - with open(temp_raw_file, 'wb') as raw_file: - for i in range(total_frames): - if self.render_cancelled: - render_cap.release() - self.render_progress_queue.put(("cancelled", "Render cancelled", 0.0, 0.0)) - return False + for i in range(total_frames): + if self.render_cancelled: + self.ffmpeg_process.stdin.close() + self.ffmpeg_process.terminate() + self.ffmpeg_process.wait() + render_cap.release() + self.ffmpeg_process = None + self.render_progress_queue.put(("cancelled", "Render cancelled", 0.0, 0.0)) + return False - ret, frame = render_cap.read() - if not ret: + ret, frame = render_cap.read() + if not ret: + break + + # Apply transformations with OpenCV + processed_frame = self._process_frame_for_render(frame, output_width, output_height) + if processed_frame is not None: + # Debug frame dimensions + if i == 0: # Only print for first frame + print(f"Processed frame dimensions: {processed_frame.shape[1]}x{processed_frame.shape[0]}") + print(f"Expected dimensions: {output_width}x{output_height}") + + # Write frame to FFmpeg via pipe + try: + self.ffmpeg_process.stdin.write(processed_frame.tobytes()) + frames_written += 1 + + # Small delay every 10 frames to prevent Windows pipe overflow + if frames_written % 10 == 0: + time.sleep(0.001) # 1ms delay + + except BrokenPipeError: + # FFmpeg process died + print("FFmpeg process died - BrokenPipeError") break - # Apply transformations with OpenCV - processed_frame = self._process_frame_for_render(frame, output_width, output_height) - if processed_frame is not None: - # Debug frame dimensions - if i == 0: # Only print for first frame - print(f"Processed frame dimensions: {processed_frame.shape[1]}x{processed_frame.shape[0]}") - print(f"Expected dimensions: {output_width}x{output_height}") - - # Write frame to temporary raw file - raw_file.write(processed_frame.tobytes()) - frames_written += 1 + # Update progress with FPS calculation + current_time = time.time() + progress = 0.1 + (0.8 * (i + 1) / total_frames) + + # Calculate FPS and update progress (throttled) + if current_time - last_progress_update > 0.5: + elapsed = current_time - start_time + fps_rate = frames_written / elapsed if elapsed > 0 else 0 + self.render_progress_queue.put(("progress", f"Processed {i+1}/{total_frames} frames", progress, fps_rate)) + last_progress_update = current_time - # Update progress with FPS calculation - current_time = time.time() - progress = 0.1 + (0.8 * (i + 1) / total_frames) - - # Calculate FPS and update progress (throttled) - if current_time - last_progress_update > 0.5: - elapsed = current_time - start_time - fps_rate = frames_written / elapsed if elapsed > 0 else 0 - self.render_progress_queue.put(("progress", f"Processed {i+1}/{total_frames} frames", progress, fps_rate)) - last_progress_update = current_time + # Close FFmpeg input and wait for completion + self.ffmpeg_process.stdin.close() + stderr = self.ffmpeg_process.communicate()[1] + return_code = self.ffmpeg_process.returncode + self.ffmpeg_process = None render_cap.release() - # Now encode the raw file with FFmpeg - self.render_progress_queue.put(("progress", "Encoding with FFmpeg...", 0.9, 0.0)) - - ffmpeg_cmd = [ - 'ffmpeg', '-y', '-v', 'error', - '-f', 'rawvideo', - '-vcodec', 'rawvideo', - '-s', f'{output_width}x{output_height}', - '-pix_fmt', 'bgr24', - '-r', str(self.fps), - '-i', temp_raw_file, - '-c:v', 'libx264', - '-preset', 'fast', - '-crf', '18', - '-pix_fmt', 'yuv420p', - output_path - ] - - # Run FFmpeg encoding - result = subprocess.run(ffmpeg_cmd, capture_output=True, text=True) - return_code = result.returncode - stderr = result.stderr - if return_code == 0: total_time = time.time() - start_time avg_fps = frames_written / total_time if total_time > 0 else 0 self.render_progress_queue.put(("complete", f"Rendered {frames_written} frames with FFmpeg", 1.0, avg_fps)) - print(f"Successfully rendered {frames_written} frames using FFmpeg (avg {avg_fps:.1f} FPS)") + print(f"Successfully rendered {frames_written} frames using FFmpeg pipe (avg {avg_fps:.1f} FPS)") return True else: - error_details = stderr if stderr else "No error details available" + error_details = stderr.decode() if stderr else "No error details available" print(f"FFmpeg encoding failed with return code {return_code}") print(f"FFmpeg stderr: {error_details}") self.render_progress_queue.put(("error", f"FFmpeg encoding failed: {error_details}", 1.0, 0.0)) @@ -1947,25 +1964,19 @@ class VideoEditor: except Exception as e: error_msg = str(e) - print(f"FFmpeg rendering exception: {error_msg}") + print(f"FFmpeg pipe rendering exception: {error_msg}") print(f"Exception type: {type(e).__name__}") - # Handle specific errors + # Handle specific Windows pipe errors if "Errno 22" in error_msg or "invalid argument" in error_msg.lower(): - error_msg = "File system error - try using a different output path" + error_msg = "Windows pipe error - try using a different output path or restart the application" + elif "BrokenPipeError" in error_msg: + error_msg = "FFmpeg process terminated unexpectedly - check if FFmpeg is installed correctly" elif "FileNotFoundError" in error_msg or "ffmpeg" in error_msg.lower(): error_msg = "FFmpeg not found - please install FFmpeg and ensure it's in your PATH" - self.render_progress_queue.put(("error", f"FFmpeg rendering failed: {error_msg}", 1.0, 0.0)) + self.render_progress_queue.put(("error", f"FFmpeg pipe rendering failed: {error_msg}", 1.0, 0.0)) return False - finally: - # Clean up temporary file - if temp_raw_file and os.path.exists(temp_raw_file): - try: - os.remove(temp_raw_file) - print(f"Cleaned up temporary file: {temp_raw_file}") - except Exception as e: - print(f"Warning: Could not remove temporary file {temp_raw_file}: {e}") def run(self): """Main editor loop"""