Maybe make faster render says claude

This commit is contained in:
2025-09-04 14:48:50 +02:00
parent 28f11ab190
commit cf09fd172e

View File

@@ -5,6 +5,10 @@ import argparse
import numpy as np import numpy as np
from pathlib import Path from pathlib import Path
from typing import Optional, Tuple, List from typing import Optional, Tuple, List
import time
import threading
from concurrent.futures import ThreadPoolExecutor
from queue import Queue
class VideoEditor: class VideoEditor:
@@ -30,7 +34,7 @@ class VideoEditor:
# Zoom and crop settings # Zoom and crop settings
MIN_ZOOM = 0.1 MIN_ZOOM = 0.1
MAX_ZOOM = 10.0 MAX_ZOOM = 10.0
ZOOM_INCREMENT = 0.25 ZOOM_INCREMENT = 0.1
# Supported video extensions # Supported video extensions
VIDEO_EXTENSIONS = {'.mp4', '.avi', '.mov', '.mkv', '.wmv', '.flv', '.webm', '.m4v'} VIDEO_EXTENSIONS = {'.mp4', '.avi', '.mov', '.mkv', '.wmv', '.flv', '.webm', '.m4v'}
@@ -512,11 +516,12 @@ class VideoEditor:
self.crop_rect = None self.crop_rect = None
def render_video(self, output_path: str): def render_video(self, output_path: str):
"""Render the video with current crop, zoom, and cut settings""" """Optimized video rendering with multithreading and batch processing"""
if not output_path.endswith('.mp4'): if not output_path.endswith('.mp4'):
output_path += '.mp4' output_path += '.mp4'
print(f"Rendering video to {output_path}...") print(f"Rendering video to {output_path}...")
start_time = time.time()
# Determine frame range # Determine frame range
start_frame = self.cut_start_frame if self.cut_start_frame is not None else 0 start_frame = self.cut_start_frame if self.cut_start_frame is not None else 0
@@ -534,8 +539,7 @@ class VideoEditor:
output_width = int(self.frame_width * self.zoom_factor) output_width = int(self.frame_width * self.zoom_factor)
output_height = int(self.frame_height * self.zoom_factor) output_height = int(self.frame_height * self.zoom_factor)
# Use mp4v codec (most compatible with MP4)
# Initialize video writer
fourcc = cv2.VideoWriter_fourcc(*'mp4v') fourcc = cv2.VideoWriter_fourcc(*'mp4v')
out = cv2.VideoWriter(output_path, fourcc, self.fps, (output_width, output_height)) out = cv2.VideoWriter(output_path, fourcc, self.fps, (output_width, output_height))
@@ -543,55 +547,138 @@ class VideoEditor:
print("Error: Could not open video writer!") print("Error: Could not open video writer!")
return False return False
# Process frames
total_output_frames = end_frame - start_frame + 1 total_output_frames = end_frame - start_frame + 1
frames_written = 0
last_progress_update = 0
for frame_idx in range(start_frame, end_frame + 1): # Batch processing configuration
self.cap.set(cv2.CAP_PROP_POS_FRAMES, frame_idx) batch_size = min(50, max(10, total_output_frames // 20)) # Adaptive batch size
ret, frame = self.cap.read() frame_queue = Queue(maxsize=batch_size * 2)
processed_queue = Queue(maxsize=batch_size * 2)
if not ret: def frame_reader():
"""Background thread for reading frames"""
try:
for frame_idx in range(start_frame, end_frame + 1):
self.cap.set(cv2.CAP_PROP_POS_FRAMES, frame_idx)
ret, frame = self.cap.read()
if ret:
frame_queue.put((frame_idx, frame))
else:
break
finally:
frame_queue.put(None) # Signal end of frames
def frame_processor():
"""Background thread for processing frames"""
try:
while True:
item = frame_queue.get()
if item is None: # End signal
break
frame_idx, frame = item
processed_frame = self._process_frame_for_render(frame, output_width, output_height)
if processed_frame is not None:
processed_queue.put((frame_idx, processed_frame))
frame_queue.task_done()
finally:
processed_queue.put(None) # Signal end of processing
# Start background threads
reader_thread = threading.Thread(target=frame_reader, daemon=True)
processor_thread = threading.Thread(target=frame_processor, daemon=True)
reader_thread.start()
processor_thread.start()
# Main thread writes frames in order
expected_frame = start_frame
frame_buffer = {} # Buffer for out-of-order frames
while frames_written < total_output_frames:
item = processed_queue.get()
if item is None: # End signal
break break
# Apply crop frame_idx, processed_frame = item
frame_buffer[frame_idx] = processed_frame
# Write frames in order
while expected_frame in frame_buffer:
out.write(frame_buffer.pop(expected_frame))
frames_written += 1
expected_frame += 1
# Progress update (throttled to avoid too frequent updates)
current_time = time.time()
if current_time - last_progress_update > 0.5: # Update every 0.5 seconds
progress = frames_written / total_output_frames * 100
elapsed = current_time - start_time
if frames_written > 0:
eta = (elapsed / frames_written) * (total_output_frames - frames_written)
fps_rate = frames_written / elapsed
print(f"Progress: {progress:.1f}% | {frames_written}/{total_output_frames} | "
f"FPS: {fps_rate:.1f} | ETA: {eta:.1f}s\r", end="")
last_progress_update = current_time
processed_queue.task_done()
# Wait for threads to complete
reader_thread.join()
processor_thread.join()
out.release()
total_time = time.time() - start_time
avg_fps = frames_written / total_time if total_time > 0 else 0
print(f"\nVideo rendered successfully to {output_path}")
print(f"Rendered {frames_written} frames in {total_time:.2f}s (avg {avg_fps:.1f} FPS)")
return True
def _process_frame_for_render(self, frame, output_width: int, output_height: int):
"""Process a single frame for rendering (optimized for speed)"""
try:
# Apply crop (vectorized operation)
if self.crop_rect: if self.crop_rect:
x, y, w, h = self.crop_rect x, y, w, h = map(int, self.crop_rect)
x, y, w, h = int(x), int(y), int(w), int(h)
# Ensure crop coordinates are within frame bounds
x = max(0, min(x, frame.shape[1] - 1))
y = max(0, min(y, frame.shape[0] - 1))
w = min(w, frame.shape[1] - x)
h = min(h, frame.shape[0] - y)
# Clamp coordinates to frame bounds
h_frame, w_frame = frame.shape[:2]
x = max(0, min(x, w_frame - 1))
y = max(0, min(y, h_frame - 1))
w = min(w, w_frame - x)
h = min(h, h_frame - y)
if w > 0 and h > 0: if w > 0 and h > 0:
frame = frame[y:y+h, x:x+w] frame = frame[y:y+h, x:x+w]
else: else:
print(f"ERROR: Invalid crop dimensions, skipping frame") return None
continue
# Apply zoom # Apply zoom and resize in one step for efficiency
if self.zoom_factor != 1.0: if self.zoom_factor != 1.0:
height, width = frame.shape[:2] height, width = frame.shape[:2]
new_width = int(width * self.zoom_factor) intermediate_width = int(width * self.zoom_factor)
new_height = int(height * self.zoom_factor) intermediate_height = int(height * self.zoom_factor)
frame = cv2.resize(frame, (new_width, new_height), interpolation=cv2.INTER_LINEAR)
# Ensure frame matches output dimensions # If zoom results in different dimensions than output, resize directly to output
if intermediate_width != output_width or intermediate_height != output_height:
frame = cv2.resize(frame, (output_width, output_height), interpolation=cv2.INTER_LINEAR)
else:
frame = cv2.resize(frame, (intermediate_width, intermediate_height), interpolation=cv2.INTER_LINEAR)
# Final size check and resize if needed
if frame.shape[1] != output_width or frame.shape[0] != output_height: if frame.shape[1] != output_width or frame.shape[0] != output_height:
frame = cv2.resize(frame, (output_width, output_height), interpolation=cv2.INTER_LINEAR) frame = cv2.resize(frame, (output_width, output_height), interpolation=cv2.INTER_LINEAR)
out.write(frame) return frame
# Progress indicator except Exception as e:
progress = (frame_idx - start_frame + 1) / total_output_frames * 100 print(f"Error processing frame: {e}")
print(f"Progress: {progress:.1f}%\r", end="") return None
out.release()
print(f"\nVideo rendered successfully to {output_path}")
return True
def run(self): def run(self):
"""Main editor loop""" """Main editor loop"""