11""" Top level process to generate the funscript actions by tracking selected features in the video """
22
33import cv2
4- import os
54import copy
65import time
76import math
8- import json
97import funscript_editor .utils .logging as logging
10- import platform
118import threading
12- from numpy .core .fromnumeric import take
13- from numpy .lib .function_base import append
14- from numpy .ma .core import array
15-
16- from playsound import playsound
17- from screeninfo import get_monitors
18- from queue import Queue
19- from pynput .keyboard import Key , Listener
9+
2010from dataclasses import dataclass
2111from PyQt5 import QtCore
22- from matplotlib .figure import Figure
23- from datetime import datetime
2412from scipy .interpolate import interp1d
2513
2614from funscript_editor .algorithms .videotracker import StaticVideoTracker
2715from funscript_editor .data .ffmpegstream import FFmpegStream
2816from funscript_editor .data .funscript import Funscript
29- from funscript_editor .utils .config import HYPERPARAMETER , SETTINGS , PROJECTION , NOTIFICATION_SOUND_FILE
30- from funscript_editor .utils .logging import get_logfiles_paths
31- from funscript_editor .definitions import SETTINGS_CONFIG_FILE , HYPERPARAMETER_CONFIG_FILE
17+ from funscript_editor .utils .config import HYPERPARAMETER , SETTINGS , PROJECTION
3218from funscript_editor .algorithms .scenedetect import SceneDetectFromFile , SceneContentDetector , SceneThresholdDetector
3319from funscript_editor .algorithms .signal import Signal
3420from funscript_editor .ui .opencvui import OpenCV_GUI , OpenCV_GUI_Parameters
@@ -54,10 +40,6 @@ class FunscriptGeneratorParameter:
5440 additional_points : str = "none"
5541 raw_output : bool = SETTINGS ["raw_output" ]
5642 max_playback_fps : int = max ((0 , int (SETTINGS ['max_playback_fps' ])))
57- use_zoom : bool = SETTINGS ['use_zoom' ]
58- zoom_factor : float = max ((1.0 , float (SETTINGS ['zoom_factor' ])))
59- preview_scaling : float = float (SETTINGS ['preview_scaling' ])
60- tracking_lost_time : int = max ((0 , SETTINGS ['tracking_lost_time' ]))
6143 scene_detector : str = SETTINGS ['scene_detector' ]
6244
6345 # General Hyperparameter
@@ -91,6 +73,7 @@ def merge_score(item: list, number_of_trackers: int, return_queue: mp.Queue = No
9173 max_frame_number = max ([len (item [i ]) for i in range (number_of_trackers )])
9274 arr = np .ma .empty ((max_frame_number ,number_of_trackers ))
9375 arr .mask = True
76+ item = np .array (item )
9477 for tracker_number in range (number_of_trackers ):
9578 arr [:item [tracker_number ].shape [0 ],tracker_number ] = item [tracker_number ]
9679 if return_queue is not None :
@@ -123,9 +106,9 @@ def __init__(self,
123106 'roll' : []
124107 }
125108
126- self .opencvui = OpenCV_GUI (OpenCV_GUI_Parameters (
127- video_info = self .video_info ,
128- skip_frames = self .params .skip_frames ,
109+ self .ui = OpenCV_GUI (OpenCV_GUI_Parameters (
110+ video_info = self .video_info ,
111+ skip_frames = self .params .skip_frames ,
129112 end_frame_number = self .params .end_frame
130113 ))
131114
@@ -289,7 +272,7 @@ def scale_score(self, status: str, metric : str = 'y') -> None:
289272 imgMin = cv2 .resize (imgMin , None , fx = scale , fy = scale )
290273 imgMax = cv2 .resize (imgMax , None , fx = scale , fy = scale )
291274
292- (desired_min , desired_max ) = self .opencvui .min_max_selector (
275+ (desired_min , desired_max ) = self .ui .min_max_selector (
293276 image_min = imgMin ,
294277 image_max = imgMax ,
295278 info = status ,
@@ -409,14 +392,14 @@ def init_trackers(self, ffmpeg_stream: FFmpegStream) -> tuple:
409392 first_frame = ffmpeg_stream .read ()
410393 preview_frame = first_frame
411394 for tracker_number in range (self .params .number_of_trackers ):
412- bbox_woman = self .opencvui .bbox_selector (preview_frame , "Select {} Feature #{}" .format (self .get_target_name (0 ), tracker_number + 1 ))
413- preview_frame = self .opencvui .draw_box_to_image (preview_frame , bbox_woman , color = (255 ,0 ,255 ))
395+ bbox_woman = self .ui .bbox_selector (preview_frame , "Select {} Feature #{}" .format (self .get_target_name (0 ), tracker_number + 1 ))
396+ preview_frame = self .ui .draw_box_to_image (preview_frame , bbox_woman , color = (255 ,0 ,255 ))
414397 if self .params .supervised_tracking :
415398 while True :
416- tracking_areas_woman [tracker_number ] = self .opencvui .bbox_selector (preview_frame , "Select the Supervised Tracking Area for the {} Feature #{}" .format (self .get_target_name (0 ), tracker_number + 1 ))
399+ tracking_areas_woman [tracker_number ] = self .ui .bbox_selector (preview_frame , "Select the Supervised Tracking Area for the {} Feature #{}" .format (self .get_target_name (0 ), tracker_number + 1 ))
417400 if StaticVideoTracker .is_bbox_in_tracking_area (bbox_woman , tracking_areas_woman [tracker_number ]): break
418401 self .logger .error ("Invalid supervised tracking area selected" )
419- preview_frame = self .opencvui .draw_box_to_image (preview_frame , tracking_areas_woman [tracker_number ], color = (0 ,255 ,0 ))
402+ preview_frame = self .ui .draw_box_to_image (preview_frame , tracking_areas_woman [tracker_number ], color = (0 ,255 ,0 ))
420403 trackers_woman [tracker_number ] = StaticVideoTracker (first_frame , bbox_woman , self .video_info .fps , supervised_tracking_area = tracking_areas_woman [tracker_number ])
421404 else :
422405 trackers_woman [tracker_number ] = StaticVideoTracker (first_frame , bbox_woman , self .video_info .fps )
@@ -427,14 +410,14 @@ def init_trackers(self, ffmpeg_stream: FFmpegStream) -> tuple:
427410 bboxes ['Woman' ][1 ][tracker_number ] = bbox_woman
428411
429412 if self .params .track_men :
430- bbox_men = self .opencvui .bbox_selector (preview_frame , "Select {} Feature #{}" .format (self .get_target_name (1 ), tracker_number + 1 ))
431- preview_frame = self .opencvui .draw_box_to_image (preview_frame , bbox_men , color = (255 ,0 ,255 ))
413+ bbox_men = self .ui .bbox_selector (preview_frame , "Select {} Feature #{}" .format (self .get_target_name (1 ), tracker_number + 1 ))
414+ preview_frame = self .ui .draw_box_to_image (preview_frame , bbox_men , color = (255 ,0 ,255 ))
432415 if self .params .supervised_tracking :
433416 while True :
434- tracking_areas_men [tracker_number ] = self .opencvui .bbox_selector (preview_frame , "Select the Supervised Tracking Area for the {} Feature #{}" .format (self .get_target_name (1 ), tracker_number + 1 ))
417+ tracking_areas_men [tracker_number ] = self .ui .bbox_selector (preview_frame , "Select the Supervised Tracking Area for the {} Feature #{}" .format (self .get_target_name (1 ), tracker_number + 1 ))
435418 if StaticVideoTracker .is_bbox_in_tracking_area (bbox_men , tracking_areas_men [tracker_number ]): break
436419 self .logger .error ("Invalid supervised tracking area selected" )
437- preview_frame = self .opencvui .draw_box_to_image (preview_frame , tracking_areas_men [tracker_number ], color = (255 ,0 ,255 ))
420+ preview_frame = self .ui .draw_box_to_image (preview_frame , tracking_areas_men [tracker_number ], color = (255 ,0 ,255 ))
438421 trackers_men [tracker_number ] = StaticVideoTracker (first_frame , bbox_men , self .video_info .fps , supervised_tracking_area = tracking_areas_men [tracker_number ])
439422 else :
440423 trackers_men [tracker_number ] = StaticVideoTracker (first_frame , bbox_men , self .video_info .fps )
@@ -458,7 +441,7 @@ def tracking(self) -> str:
458441 """
459442 first_frame = FFmpegStream .get_frame (self .params .video_path , self .params .start_frame )
460443
461- projection_config = self .opencvui .get_video_projection_config (first_frame , self .params .projection )
444+ projection_config = self .ui .get_video_projection_config (first_frame , self .params .projection )
462445
463446 video = FFmpegStream (
464447 video_path = self .params .video_path ,
@@ -474,11 +457,6 @@ def tracking(self) -> str:
474457 else :
475458 cycle_time_in_ms = 0
476459
477- tracking_lost_frames = round (self .video_info .fps * self .params .tracking_lost_time / 1000.0 )
478- if tracking_lost_frames > 0 and self .params .number_of_trackers > 1 :
479- self .logger .warning ("Delayed Tracking Lost is currently not implemented for multiple trackers (The feature will be disabled)" )
480- tracking_lost_frames = 0
481-
482460 if self .params .scene_detector .upper () == "CONTENT" :
483461 scene_detector = SceneContentDetector (self .params .start_frame , first_frame , self .params .skip_frames , self .video_info .fps )
484462 elif self .params .scene_detector .upper () == "THRESHOLD" :
@@ -539,7 +517,7 @@ def tracking(self) -> str:
539517 scene_change_quit_flag = False
540518 if scene_detector .is_scene_change (frame_num - 1 + self .params .start_frame ):
541519 self .logger .info ("Scene change detected, Pause tracking" )
542- key = self .opencvui .preview (
520+ key = self .ui .preview (
543521 last_frame ,
544522 frame_num + self .params .start_frame ,
545523 texte = ["Scene change detected, Press 'space' to continue tracking or press 'q' to finalize tracking" ],
@@ -550,7 +528,7 @@ def tracking(self) -> str:
550528 if self .opemcvui .was_space_pressed () or key == ord (' ' ):
551529 break
552530
553- if self .opencvui .was_key_pressed ('q' ) or key == ord ('q' ):
531+ if self .ui .was_key_pressed ('q' ) or key == ord ('q' ):
554532 scene_change_quit_flag = True
555533 break
556534
@@ -565,16 +543,16 @@ def tracking(self) -> str:
565543 delete_last_predictions = (self .params .skip_frames + 1 )* 2
566544 break
567545
568- key = self .opencvui .preview (
546+ key = self .ui .preview (
569547 last_frame ,
570548 frame_num + self .params .start_frame ,
571549 texte = ["Press 'q' if the tracking point shifts or a video cut occured" ],
572550 boxes = boxes_to_draw ,
573551 )
574552
575- if self .opencvui .was_key_pressed ('q' ) or key == ord ('q' ):
553+ if self .ui .was_key_pressed ('q' ) or key == ord ('q' ):
576554 status = 'Tracking stopped by user'
577- delete_last_predictions = max ((1 , int ((self .opencvui .get_preview_fps ()+ 1 )* 0.5 * HYPERPARAMETER ['user_reaction_time_in_milliseconds' ]/ 1000.0 )))
555+ delete_last_predictions = max ((1 , int ((self .ui .get_preview_fps ()+ 1 )* 0.5 * HYPERPARAMETER ['user_reaction_time_in_milliseconds' ]/ 1000.0 )))
578556 break
579557
580558 stop_tracking = False
@@ -614,7 +592,7 @@ def tracking(self) -> str:
614592 if self .params .track_men :
615593 trackers_men [i ].stop ()
616594
617- self .opencvui .show_loading_screen ()
595+ self .ui .show_loading_screen ()
618596 self .logger .info ("Raw tracking data: %d Tracking points for %d seconds of the video" , \
619597 len (bboxes ["Woman" ]), int (len (bboxes ["Woman" ])* (self .params .skip_frames + 1 )/ self .video_info .fps ))
620598 video .stop ()
@@ -633,7 +611,7 @@ def finished(self, status: str, success :bool) -> None:
633611 status (str): a process status/error message
634612 success (bool): True if funscript was generated else False
635613 """
636- self .opencvui .close ()
614+ self .ui .close ()
637615 self .funscriptCompleted .emit (self .funscript , status , success )
638616
639617
@@ -676,7 +654,6 @@ def get_score_with_offset(self, idx_dict: dict, metric: str) -> list:
676654 return score
677655
678656
679-
680657 def determine_change_points (self , metric : str ) -> dict :
681658 """ Determine all change points
682659
0 commit comments