Optimize template matching in VideoEditor by utilizing cropped regions for faster processing. This update modifies the tracking logic to first check for a defined crop rectangle, allowing for quicker template matching on smaller frames. If no crop is set, the full frame is used, maintaining the previous functionality. Debug messages remain to assist in tracking accuracy and confidence levels.

This commit is contained in:
2025-09-26 14:33:36 +02:00
parent 048e8ef033
commit e6616ed1b1

View File

@@ -1640,34 +1640,47 @@ class VideoEditor:
# First try template matching if enabled (much better than optical flow)
if self.template_matching_enabled and self.tracking_template is not None:
if self.current_display_frame is not None:
# Use the raw display frame to avoid recursion
raw_frame = self.current_display_frame.copy()
if raw_frame is not None:
# Track template in raw frame
result = self.track_template(raw_frame)
if result:
center_x, center_y, confidence = result
print(f"DEBUG: Template match found at ({center_x}, {center_y}) with confidence {confidence:.2f}")
# Map from raw frame coordinates to rotated frame coordinates
# We need to account for rotation and crop transformations
if self.rotation_angle == 90:
# 90° clockwise rotation
rot_x = self.frame_height - center_y
rot_y = center_x
elif self.rotation_angle == 180:
# 180° rotation
rot_x = self.frame_width - center_x
rot_y = self.frame_height - center_y
elif self.rotation_angle == 270:
# 270° clockwise rotation
rot_x = center_y
rot_y = self.frame_width - center_x
else:
# No rotation
rot_x, rot_y = center_x, center_y
return (rot_x, rot_y)
# Use only the cropped region for much faster template matching
if self.crop_rect:
crop_x, crop_y, crop_w, crop_h = self.crop_rect
# Extract only the cropped region
cropped_frame = self.current_display_frame[crop_y:crop_y+crop_h, crop_x:crop_x+crop_w]
if cropped_frame is not None and cropped_frame.size > 0:
# Track template in cropped frame (much faster!)
result = self.track_template(cropped_frame)
if result:
center_x, center_y, confidence = result
print(f"DEBUG: Template match found at ({center_x}, {center_y}) with confidence {confidence:.2f}")
# Map from cropped frame coordinates to rotated frame coordinates
# Add crop offset back
rot_x = center_x + crop_x
rot_y = center_y + crop_y
return (rot_x, rot_y)
else:
# No crop - use full frame
raw_frame = self.current_display_frame.copy()
if raw_frame is not None:
result = self.track_template(raw_frame)
if result:
center_x, center_y, confidence = result
print(f"DEBUG: Template match found at ({center_x}, {center_y}) with confidence {confidence:.2f}")
# Map from raw frame coordinates to rotated frame coordinates
if self.rotation_angle == 90:
rot_x = self.frame_height - center_y
rot_y = center_x
elif self.rotation_angle == 180:
rot_x = self.frame_width - center_x
rot_y = self.frame_height - center_y
elif self.rotation_angle == 270:
rot_x = center_y
rot_y = self.frame_width - center_x
else:
rot_x, rot_y = center_x, center_y
return (rot_x, rot_y)
# Fall back to feature tracking if enabled - but use smooth interpolation instead of averaging
if self.feature_tracker.tracking_enabled: