Compare commits
5 Commits
01ea25168a
...
1a086f9362
Author | SHA1 | Date | |
---|---|---|---|
1a086f9362 | |||
8f77960183 | |||
d6af05b6db | |||
76245b3adc | |||
6559310adc |
191
croppa/main.py
191
croppa/main.py
@@ -215,25 +215,31 @@ class VideoEditor:
|
||||
# Restore state values
|
||||
if 'current_frame' in state:
|
||||
self.current_frame = state['current_frame']
|
||||
print(f"Loaded current_frame: {self.current_frame}")
|
||||
if 'crop_rect' in state and state['crop_rect'] is not None:
|
||||
self.crop_rect = tuple(state['crop_rect'])
|
||||
print(f"DEBUG: Loaded crop_rect: {self.crop_rect}")
|
||||
print(f"Loaded crop_rect: {self.crop_rect}")
|
||||
if 'zoom_factor' in state:
|
||||
self.zoom_factor = state['zoom_factor']
|
||||
print(f"Loaded zoom_factor: {self.zoom_factor}")
|
||||
if 'zoom_center' in state and state['zoom_center'] is not None:
|
||||
self.zoom_center = tuple(state['zoom_center'])
|
||||
print(f"Loaded zoom_center: {self.zoom_center}")
|
||||
if 'rotation_angle' in state:
|
||||
self.rotation_angle = state['rotation_angle']
|
||||
print(f"Loaded rotation_angle: {self.rotation_angle}")
|
||||
if 'brightness' in state:
|
||||
self.brightness = state['brightness']
|
||||
print(f"Loaded brightness: {self.brightness}")
|
||||
if 'contrast' in state:
|
||||
self.contrast = state['contrast']
|
||||
print(f"Loaded contrast: {self.contrast}")
|
||||
if 'cut_start_frame' in state:
|
||||
self.cut_start_frame = state['cut_start_frame']
|
||||
print(f"Restored cut_start_frame: {self.cut_start_frame}")
|
||||
print(f"Loaded cut_start_frame: {self.cut_start_frame}")
|
||||
if 'cut_end_frame' in state:
|
||||
self.cut_end_frame = state['cut_end_frame']
|
||||
print(f"Restored cut_end_frame: {self.cut_end_frame}")
|
||||
print(f"Loaded cut_end_frame: {self.cut_end_frame}")
|
||||
|
||||
# Validate cut markers against current video length
|
||||
if self.cut_start_frame is not None and self.cut_start_frame >= self.total_frames:
|
||||
@@ -250,14 +256,19 @@ class VideoEditor:
|
||||
print(f"Markers will be drawn at: Start {start_progress:.4f} ({self.cut_start_frame}/{self.total_frames}), End {end_progress:.4f} ({self.cut_end_frame}/{self.total_frames})")
|
||||
if 'looping_between_markers' in state:
|
||||
self.looping_between_markers = state['looping_between_markers']
|
||||
print(f"Loaded looping_between_markers: {self.looping_between_markers}")
|
||||
if 'display_offset' in state:
|
||||
self.display_offset = state['display_offset']
|
||||
print(f"Loaded display_offset: {self.display_offset}")
|
||||
if 'playback_speed' in state:
|
||||
self.playback_speed = state['playback_speed']
|
||||
print(f"Loaded playback_speed: {self.playback_speed}")
|
||||
if 'seek_multiplier' in state:
|
||||
self.seek_multiplier = state['seek_multiplier']
|
||||
print(f"Loaded seek_multiplier: {self.seek_multiplier}")
|
||||
if 'is_playing' in state:
|
||||
self.is_playing = state['is_playing']
|
||||
print(f"Loaded is_playing: {self.is_playing}")
|
||||
|
||||
# Validate and clamp values
|
||||
self.current_frame = max(0, min(self.current_frame, getattr(self, 'total_frames', 1) - 1))
|
||||
@@ -443,26 +454,7 @@ class VideoEditor:
|
||||
if self.fps > 60:
|
||||
print(" Warning: High framerate video - may impact playback smoothness")
|
||||
|
||||
# Reset playback state for new media
|
||||
self.current_frame = 0
|
||||
self.is_playing = False if self.is_image_mode else False # Images start paused
|
||||
self.playback_speed = 1.0
|
||||
self.seek_multiplier = 1.0
|
||||
self.current_display_frame = None
|
||||
|
||||
# Reset crop, zoom, rotation, brightness/contrast, and cut settings for new media
|
||||
self.crop_rect = None
|
||||
self.crop_history = []
|
||||
self.zoom_factor = 1.0
|
||||
self.zoom_center = None
|
||||
self.rotation_angle = 0
|
||||
self.brightness = 0
|
||||
self.contrast = 1.0
|
||||
self.cut_start_frame = None
|
||||
self.cut_end_frame = None
|
||||
self.display_offset = [0, 0]
|
||||
|
||||
# Try to load saved state for this media file
|
||||
# Try to load saved state for this media file first
|
||||
if self.load_state():
|
||||
print("Loaded saved state for this media file")
|
||||
if self.cut_start_frame is not None:
|
||||
@@ -471,6 +463,24 @@ class VideoEditor:
|
||||
print(f" Cut end frame: {self.cut_end_frame}")
|
||||
else:
|
||||
print("No saved state found for this media file")
|
||||
# Only reset to defaults if no state was loaded
|
||||
self.current_frame = 0
|
||||
self.is_playing = False if self.is_image_mode else False # Images start paused
|
||||
self.playback_speed = 1.0
|
||||
self.seek_multiplier = 1.0
|
||||
self.crop_rect = None
|
||||
self.crop_history = []
|
||||
self.zoom_factor = 1.0
|
||||
self.zoom_center = None
|
||||
self.rotation_angle = 0
|
||||
self.brightness = 0
|
||||
self.contrast = 1.0
|
||||
self.cut_start_frame = None
|
||||
self.cut_end_frame = None
|
||||
self.display_offset = [0, 0]
|
||||
|
||||
# Always reset these regardless of state
|
||||
self.current_display_frame = None
|
||||
|
||||
def switch_to_video(self, index: int):
|
||||
"""Switch to a specific video by index"""
|
||||
@@ -1626,6 +1636,11 @@ class VideoEditor:
|
||||
# Send progress update
|
||||
self.render_progress_queue.put(("progress", "Setting up FFmpeg encoder...", 0.1, 0.0))
|
||||
|
||||
# Debug output dimensions
|
||||
print(f"Output dimensions: {output_width}x{output_height}")
|
||||
print(f"Zoom factor: {self.zoom_factor}")
|
||||
print(f"Crop dimensions: {crop_width}x{crop_height}")
|
||||
|
||||
# Skip all the OpenCV codec bullshit and go straight to FFmpeg
|
||||
print("Using FFmpeg for encoding with OpenCV transformations...")
|
||||
return self._render_with_ffmpeg_pipe(output_path, start_frame, end_frame, output_width, output_height)
|
||||
@@ -1663,6 +1678,8 @@ class VideoEditor:
|
||||
self._handle_overwrite_completion()
|
||||
elif update_type == "error":
|
||||
self.update_progress_bar(progress, text, fps)
|
||||
# Also show error as feedback message for better visibility
|
||||
self.show_feedback_message(f"ERROR: {text}")
|
||||
elif update_type == "cancelled":
|
||||
self.hide_progress_bar()
|
||||
self.show_feedback_message("Render cancelled")
|
||||
@@ -1826,12 +1843,17 @@ class VideoEditor:
|
||||
return None
|
||||
|
||||
def _render_with_ffmpeg_pipe(self, output_path: str, start_frame: int, end_frame: int, output_width: int, output_height: int):
|
||||
"""Hybrid approach: OpenCV transformations + FFmpeg encoding via pipe"""
|
||||
"""Render video with transformations"""
|
||||
try:
|
||||
self.render_progress_queue.put(("progress", "Starting FFmpeg encoder...", 0.0, 0.0))
|
||||
|
||||
# Start FFmpeg process to receive frames via pipe
|
||||
# Use Windows-friendly approach with explicit binary mode
|
||||
try:
|
||||
subprocess.run(['ffmpeg', '-version'], capture_output=True, check=True)
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
error_msg = "FFmpeg not found - please install FFmpeg and ensure it's in your PATH"
|
||||
print(error_msg)
|
||||
self.render_progress_queue.put(("error", error_msg, 1.0, 0.0))
|
||||
return False
|
||||
|
||||
self.render_progress_queue.put(("progress", "Starting encoder...", 0.0, 0.0))
|
||||
ffmpeg_cmd = [
|
||||
'ffmpeg', '-y', '-v', 'quiet',
|
||||
'-f', 'rawvideo',
|
||||
@@ -1847,16 +1869,22 @@ class VideoEditor:
|
||||
output_path
|
||||
]
|
||||
|
||||
# Start FFmpeg process with Windows-friendly settings
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.raw')
|
||||
temp_file.close()
|
||||
|
||||
ffmpeg_cmd[ffmpeg_cmd.index('-i') + 1] = temp_file.name
|
||||
self.ffmpeg_process = subprocess.Popen(
|
||||
ffmpeg_cmd,
|
||||
stdin=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
bufsize=0, # Unbuffered for better pipe performance
|
||||
universal_newlines=False # Binary mode for Windows
|
||||
stdout=subprocess.DEVNULL,
|
||||
creationflags=subprocess.CREATE_NO_WINDOW if hasattr(subprocess, 'CREATE_NO_WINDOW') else 0
|
||||
)
|
||||
|
||||
self.temp_file_name = temp_file.name
|
||||
|
||||
# OpenCV for frame reading and transformations
|
||||
render_cap = cv2.VideoCapture(str(self.video_path))
|
||||
render_cap.set(cv2.CAP_PROP_POS_FRAMES, start_frame)
|
||||
|
||||
@@ -1866,70 +1894,75 @@ class VideoEditor:
|
||||
last_progress_update = 0
|
||||
|
||||
self.render_progress_queue.put(("progress", f"Processing {total_frames} frames...", 0.1, 0.0))
|
||||
with open(self.temp_file_name, 'wb') as temp_file:
|
||||
for i in range(total_frames):
|
||||
if self.render_cancelled:
|
||||
render_cap.release()
|
||||
self.render_progress_queue.put(("cancelled", "Render cancelled", 0.0, 0.0))
|
||||
return False
|
||||
|
||||
for i in range(total_frames):
|
||||
if self.render_cancelled:
|
||||
self.ffmpeg_process.stdin.close()
|
||||
self.ffmpeg_process.terminate()
|
||||
self.ffmpeg_process.wait()
|
||||
render_cap.release()
|
||||
self.ffmpeg_process = None
|
||||
self.render_progress_queue.put(("cancelled", "Render cancelled", 0.0, 0.0))
|
||||
return False
|
||||
|
||||
ret, frame = render_cap.read()
|
||||
if not ret:
|
||||
break
|
||||
|
||||
# Apply transformations with OpenCV
|
||||
processed_frame = self._process_frame_for_render(frame, output_width, output_height)
|
||||
if processed_frame is not None:
|
||||
# Write frame to FFmpeg via pipe
|
||||
try:
|
||||
self.ffmpeg_process.stdin.write(processed_frame.tobytes())
|
||||
frames_written += 1
|
||||
except BrokenPipeError:
|
||||
# FFmpeg process died
|
||||
ret, frame = render_cap.read()
|
||||
if not ret:
|
||||
break
|
||||
|
||||
# Update progress with FPS calculation
|
||||
current_time = time.time()
|
||||
progress = 0.1 + (0.8 * (i + 1) / total_frames)
|
||||
|
||||
# Calculate FPS and update progress (throttled)
|
||||
if current_time - last_progress_update > 0.5:
|
||||
elapsed = current_time - start_time
|
||||
fps_rate = frames_written / elapsed if elapsed > 0 else 0
|
||||
self.render_progress_queue.put(("progress", f"Processed {i+1}/{total_frames} frames", progress, fps_rate))
|
||||
last_progress_update = current_time
|
||||
processed_frame = self._process_frame_for_render(frame, output_width, output_height)
|
||||
if processed_frame is not None:
|
||||
if i == 0:
|
||||
print(f"Processed frame dimensions: {processed_frame.shape[1]}x{processed_frame.shape[0]}")
|
||||
print(f"Expected dimensions: {output_width}x{output_height}")
|
||||
|
||||
temp_file.write(processed_frame.tobytes())
|
||||
frames_written += 1
|
||||
|
||||
current_time = time.time()
|
||||
progress = 0.1 + (0.8 * (i + 1) / total_frames)
|
||||
|
||||
if current_time - last_progress_update > 0.5:
|
||||
elapsed = current_time - start_time
|
||||
fps_rate = frames_written / elapsed if elapsed > 0 else 0
|
||||
self.render_progress_queue.put(("progress", f"Processed {i+1}/{total_frames} frames", progress, fps_rate))
|
||||
last_progress_update = current_time
|
||||
|
||||
render_cap.release()
|
||||
|
||||
self.render_progress_queue.put(("progress", "Encoding...", 0.9, 0.0))
|
||||
|
||||
# Close FFmpeg input and wait for completion
|
||||
self.ffmpeg_process.stdin.close()
|
||||
stderr = self.ffmpeg_process.communicate()[1]
|
||||
return_code = self.ffmpeg_process.returncode
|
||||
self.ffmpeg_process = None
|
||||
|
||||
render_cap.release()
|
||||
|
||||
if hasattr(self, 'temp_file_name') and os.path.exists(self.temp_file_name):
|
||||
try:
|
||||
os.unlink(self.temp_file_name)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if return_code == 0:
|
||||
total_time = time.time() - start_time
|
||||
avg_fps = frames_written / total_time if total_time > 0 else 0
|
||||
self.render_progress_queue.put(("complete", f"Rendered {frames_written} frames with FFmpeg", 1.0, avg_fps))
|
||||
print(f"Successfully rendered {frames_written} frames using FFmpeg pipe (avg {avg_fps:.1f} FPS)")
|
||||
self.render_progress_queue.put(("complete", f"Rendered {frames_written} frames", 1.0, avg_fps))
|
||||
print(f"Successfully rendered {frames_written} frames (avg {avg_fps:.1f} FPS)")
|
||||
return True
|
||||
else:
|
||||
self.render_progress_queue.put(("error", f"FFmpeg encoding failed: {stderr.decode()}", 1.0, 0.0))
|
||||
error_details = stderr.decode() if stderr else "No error details available"
|
||||
print(f"Encoding failed with return code {return_code}")
|
||||
print(f"Error: {error_details}")
|
||||
self.render_progress_queue.put(("error", f"Encoding failed: {error_details}", 1.0, 0.0))
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
# Handle specific Windows pipe errors
|
||||
if "Errno 22" in error_msg or "invalid argument" in error_msg.lower():
|
||||
error_msg = "Windows pipe error - try using a different output path or restart the application"
|
||||
elif "BrokenPipeError" in error_msg:
|
||||
error_msg = "FFmpeg process terminated unexpectedly - check if FFmpeg is installed correctly"
|
||||
print(f"Rendering exception: {error_msg}")
|
||||
print(f"Exception type: {type(e).__name__}")
|
||||
|
||||
self.render_progress_queue.put(("error", f"FFmpeg pipe rendering failed: {error_msg}", 1.0, 0.0))
|
||||
if "Errno 22" in error_msg or "invalid argument" in error_msg.lower():
|
||||
error_msg = "File system error - try using a different output path"
|
||||
elif "BrokenPipeError" in error_msg:
|
||||
error_msg = "Process terminated unexpectedly"
|
||||
elif "FileNotFoundError" in error_msg or "ffmpeg" in error_msg.lower():
|
||||
error_msg = "FFmpeg not found - please install FFmpeg and ensure it's in your PATH"
|
||||
|
||||
self.render_progress_queue.put(("error", f"Rendering failed: {error_msg}", 1.0, 0.0))
|
||||
return False
|
||||
|
||||
def run(self):
|
||||
|
Reference in New Issue
Block a user