From 80fb35ccedc7de869d9b000fd0b42e1e89e4e469 Mon Sep 17 00:00:00 2001 From: PhatPhuckDave Date: Fri, 26 Sep 2025 14:14:15 +0200 Subject: [PATCH] Implement feature interpolation and gap filling in optical flow tracking This commit introduces methods for interpolating features between frames and filling gaps in feature tracking using linear interpolation. It enhances the optical flow tracking capabilities by ensuring continuity of features across frames. Debug messages have been added to provide insights during the interpolation process, improving the overall functionality and user experience in the VideoEditor. --- croppa/main.py | 89 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) diff --git a/croppa/main.py b/croppa/main.py index 772a0c5..b25ccee 100644 --- a/croppa/main.py +++ b/croppa/main.py @@ -1382,6 +1382,7 @@ class VideoEditor: self._track_with_optical_flow() + # Store current frame for next optical flow iteration if not self.is_image_mode and self.current_display_frame is not None: self.previous_frame_for_flow = self.current_display_frame.copy() @@ -1842,19 +1843,24 @@ class VideoEditor: # Get previous frame features prev_frame_number = self.current_frame - 1 if prev_frame_number not in self.feature_tracker.features: + print(f"DEBUG: No features on previous frame {prev_frame_number} for optical flow") return prev_features = self.feature_tracker.features[prev_frame_number] prev_positions = np.array(prev_features['positions'], dtype=np.float32).reshape(-1, 1, 2) if len(prev_positions) == 0: + print(f"DEBUG: No positions on previous frame {prev_frame_number} for optical flow") return + print(f"DEBUG: Optical flow tracking from frame {prev_frame_number} to {self.current_frame}") + # Apply transformations to get the display frames prev_display_frame = self.apply_crop_zoom_and_rotation(self.previous_frame_for_flow) curr_display_frame = self.apply_crop_zoom_and_rotation(self.current_display_frame) if prev_display_frame is None or curr_display_frame is None: + print("DEBUG: Could not get display frames for optical flow") return # Map previous positions to display frame coordinates @@ -1876,9 +1882,11 @@ class VideoEditor: display_prev_positions.append([display_x, display_y]) if len(display_prev_positions) == 0: + print("DEBUG: No valid display positions for optical flow") return display_prev_positions = np.array(display_prev_positions, dtype=np.float32).reshape(-1, 1, 2) + print(f"DEBUG: Tracking {len(display_prev_positions)} points with optical flow") # Track using optical flow new_points, good_old, status = self.feature_tracker.track_features_optical_flow( @@ -1886,6 +1894,8 @@ class VideoEditor: ) if new_points is not None and len(new_points) > 0: + print(f"DEBUG: Optical flow found {len(new_points)} tracked points") + # Map new positions back to rotated frame coordinates mapped_positions = [] for point in new_points.reshape(-1, 2): @@ -1910,9 +1920,83 @@ class VideoEditor: } print(f"Optical flow tracked {len(mapped_positions)} features to frame {self.current_frame}") + else: + print("DEBUG: Optical flow failed to track any points") except Exception as e: print(f"Error in optical flow tracking: {e}") + + + def _interpolate_features_between_frames(self, start_frame, end_frame): + """Interpolate features between two frames using linear interpolation""" + try: + if start_frame not in self.feature_tracker.features or end_frame not in self.feature_tracker.features: + return + + start_features = self.feature_tracker.features[start_frame]['positions'] + end_features = self.feature_tracker.features[end_frame]['positions'] + + if len(start_features) != len(end_features): + print(f"DEBUG: Feature count mismatch between frames {start_frame} and {end_frame}") + return + + # Interpolate for all frames between start and end + for frame_num in range(start_frame + 1, end_frame): + if frame_num in self.feature_tracker.features: + continue # Skip if already has features + + # Linear interpolation + alpha = (frame_num - start_frame) / (end_frame - start_frame) + interpolated_positions = [] + + for i in range(len(start_features)): + start_x, start_y = start_features[i] + end_x, end_y = end_features[i] + + interp_x = start_x + alpha * (end_x - start_x) + interp_y = start_y + alpha * (end_y - start_y) + + interpolated_positions.append((int(interp_x), int(interp_y))) + + # Store interpolated features + self.feature_tracker.features[frame_num] = { + 'keypoints': [], + 'descriptors': np.array([]), + 'positions': interpolated_positions + } + + print(f"DEBUG: Interpolated {len(interpolated_positions)} features for frame {frame_num}") + + except Exception as e: + print(f"Error interpolating features: {e}") + + def _fill_all_gaps_with_interpolation(self): + """Fill all gaps between existing features with linear interpolation""" + try: + if not self.feature_tracker.features: + print("DEBUG: No features to interpolate between") + return + + # Get all frames with features, sorted + frames_with_features = sorted(self.feature_tracker.features.keys()) + print(f"DEBUG: Frames with features: {frames_with_features}") + + if len(frames_with_features) < 2: + print("DEBUG: Need at least 2 frames with features to interpolate") + return + + # Fill gaps between each pair of consecutive frames with features + for i in range(len(frames_with_features) - 1): + start_frame = frames_with_features[i] + end_frame = frames_with_features[i + 1] + + print(f"DEBUG: Interpolating between frame {start_frame} and {end_frame}") + self._interpolate_features_between_frames(start_frame, end_frame) + + print("DEBUG: Finished filling all gaps with interpolation") + + except Exception as e: + print(f"Error filling all gaps: {e}") def apply_rotation(self, frame): @@ -3951,6 +4035,11 @@ class VideoEditor: # Toggle optical flow tracking self.optical_flow_enabled = not self.optical_flow_enabled print(f"DEBUG: Optical flow toggled to {self.optical_flow_enabled}") + + # If enabling optical flow, fill all gaps between existing features + if self.optical_flow_enabled: + self._fill_all_gaps_with_interpolation() + self.show_feedback_message(f"Optical flow {'ON' if self.optical_flow_enabled else 'OFF'}") self.save_state() elif key == ord("t"):