From 80e77fb85668479e59c16d638fde79aee32b7cc8 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 24 Jun 2023 18:16:22 -0500 Subject: [PATCH 01/40] Basic functionality --- frigate/app.py | 7 +- frigate/comms/dispatcher.py | 20 +++ frigate/comms/mqtt.py | 5 + frigate/config.py | 26 +++ frigate/object_processing.py | 49 +++++- frigate/ptz.py | 150 ++++++++++++++++- frigate/ptz_autotrack.py | 269 +++++++++++++++++++++++++++++++ frigate/track/norfair_tracker.py | 29 +++- frigate/types.py | 2 + frigate/video.py | 6 +- 10 files changed, 550 insertions(+), 13 deletions(-) create mode 100644 frigate/ptz_autotrack.py diff --git a/frigate/app.py b/frigate/app.py index 9d85f461ef..79e785a13d 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -117,6 +117,11 @@ def init_config(self) -> None: "improve_contrast_enabled": mp.Value( "i", self.config.cameras[camera_name].motion.improve_contrast ), + "ptz_autotracker_enabled": mp.Value( + "i", + self.config.cameras[camera_name].onvif.autotracking.enabled, + ), + "ptz_moving": mp.Value("i", 0), "motion_threshold": mp.Value( "i", self.config.cameras[camera_name].motion.threshold ), @@ -268,7 +273,7 @@ def init_web_server(self) -> None: ) def init_onvif(self) -> None: - self.onvif_controller = OnvifController(self.config) + self.onvif_controller = OnvifController(self.config, self.camera_metrics) def init_dispatcher(self) -> None: comms: list[Communicator] = [] diff --git a/frigate/comms/dispatcher.py b/frigate/comms/dispatcher.py index b7e9e88586..d6b242937e 100644 --- a/frigate/comms/dispatcher.py +++ b/frigate/comms/dispatcher.py @@ -54,6 +54,7 @@ def __init__( self._camera_settings_handlers: dict[str, Callable] = { "detect": self._on_detect_command, "improve_contrast": self._on_motion_improve_contrast_command, + "ptz_autotracker": self._on_ptz_autotracker_command, "motion": self._on_motion_command, "motion_contour_area": self._on_motion_contour_area_command, "motion_threshold": self._on_motion_threshold_command, @@ -158,6 +159,25 @@ def _on_motion_improve_contrast_command( self.publish(f"{camera_name}/improve_contrast/state", payload, retain=True) + def _on_ptz_autotracker_command(self, camera_name: str, payload: str) -> None: + """Callback for ptz_autotracker topic.""" + ptz_autotracker_settings = self.config.cameras[camera_name].onvif.autotracking + + if payload == "ON": + if not self.camera_metrics[camera_name]["ptz_autotracker_enabled"].value: + logger.info(f"Turning on ptz autotracker for {camera_name}") + self.camera_metrics[camera_name]["ptz_autotracker_enabled"].value = True + ptz_autotracker_settings.enabled = True + elif payload == "OFF": + if self.camera_metrics[camera_name]["ptz_autotracker_enabled"].value: + logger.info(f"Turning off ptz autotracker for {camera_name}") + self.camera_metrics[camera_name][ + "ptz_autotracker_enabled" + ].value = False + ptz_autotracker_settings.enabled = False + + self.publish(f"{camera_name}/ptz_autotracker/state", payload, retain=True) + def _on_motion_contour_area_command(self, camera_name: str, payload: int) -> None: """Callback for motion contour topic.""" try: diff --git a/frigate/comms/mqtt.py b/frigate/comms/mqtt.py index 07799f9dab..8a941d7fa0 100644 --- a/frigate/comms/mqtt.py +++ b/frigate/comms/mqtt.py @@ -64,6 +64,11 @@ def _set_initial_topics(self) -> None: "ON" if camera.motion.improve_contrast else "OFF", # type: ignore[union-attr] retain=True, ) + self.publish( + f"{camera_name}/ptz_autotracker/state", + "ON" if camera.onvif.autotracking.enabled else "OFF", # type: ignore[union-attr] + retain=True, + ) self.publish( f"{camera_name}/motion_threshold/state", camera.motion.threshold, # type: ignore[union-attr] diff --git a/frigate/config.py b/frigate/config.py index b71ba1907e..14b15e07ba 100644 --- a/frigate/config.py +++ b/frigate/config.py @@ -127,11 +127,37 @@ def validate_password(cls, v, values): return v +class PtzAutotrackConfig(FrigateBaseModel): + enabled: bool = Field(default=False, title="Enable PTZ auto tracking.") + motion_estimator: bool = Field(default=False, title="Use Norfair motion estimator.") + track: List[str] = Field(default=DEFAULT_TRACKED_OBJECTS, title="Objects to track.") + required_zones: List[str] = Field( + default_factory=list, + title="List of required zones to be entered in order to begin autotracking.", + ) + size_ratio: float = Field( + default=0.5, + title="Target ratio of tracked object to field of view (0.2-0.9).", + ge=0.2, + le=0.9, + ) + return_preset: Optional[str] = Field( + title="Name of camera preset to return to when object tracking is over." + ) + timeout: int = Field( + default=5, title="Seconds to delay before returning to preset." + ) + + class OnvifConfig(FrigateBaseModel): host: str = Field(default="", title="Onvif Host") port: int = Field(default=8000, title="Onvif Port") user: Optional[str] = Field(title="Onvif Username") password: Optional[str] = Field(title="Onvif Password") + autotracking: PtzAutotrackConfig = Field( + default_factory=PtzAutotrackConfig, + title="PTZ auto tracking config.", + ) class RetainModeEnum(str, Enum): diff --git a/frigate/object_processing.py b/frigate/object_processing.py index 6d31c3cddd..3ddb08e977 100644 --- a/frigate/object_processing.py +++ b/frigate/object_processing.py @@ -22,6 +22,7 @@ ) from frigate.const import CLIPS_DIR from frigate.events.maintainer import EventTypeEnum +from frigate.ptz_autotrack import PtzAutoTrackerThread from frigate.util import ( SharedMemoryFrameManager, area, @@ -143,6 +144,7 @@ def compute_score(self): def update(self, current_frame_time, obj_data): thumb_update = False significant_change = False + autotracker_update = False # if the object is not in the current frame, add a 0.0 to the score history if obj_data["frame_time"] != current_frame_time: self.score_history.append(0.0) @@ -237,9 +239,13 @@ def update(self, current_frame_time, obj_data): if self.obj_data["frame_time"] - self.previous["frame_time"] > 60: significant_change = True + # update autotrack every second? or fps? + if self.obj_data["frame_time"] - self.previous["frame_time"] > 1: + autotracker_update = True + self.obj_data.update(obj_data) self.current_zones = current_zones - return (thumb_update, significant_change) + return (thumb_update, significant_change, autotracker_update) def to_dict(self, include_thumbnail: bool = False): (self.thumbnail_data["frame_time"] if self.thumbnail_data is not None else 0.0) @@ -438,7 +444,11 @@ def zone_filtered(obj: TrackedObject, object_config): # Maintains the state of a camera class CameraState: def __init__( - self, name, config: FrigateConfig, frame_manager: SharedMemoryFrameManager + self, + name, + config: FrigateConfig, + frame_manager: SharedMemoryFrameManager, + ptz_autotracker_thread: PtzAutoTrackerThread, ): self.name = name self.config = config @@ -456,6 +466,7 @@ def __init__( self.regions = [] self.previous_frame_id = None self.callbacks = defaultdict(list) + self.ptz_autotracker_thread = ptz_autotracker_thread def get_current_frame(self, draw_options={}): with self.current_frame_lock: @@ -477,6 +488,20 @@ def get_current_frame(self, draw_options={}): thickness = 1 color = (255, 0, 0) + # draw thicker box around ptz autotracked object + if ( + self.ptz_autotracker_thread.ptz_autotracker.tracked_object[ + self.name + ] + is not None + and obj["id"] + == self.ptz_autotracker_thread.ptz_autotracker.tracked_object[ + self.name + ].obj_data["id"] + ): + thickness = 5 + color = self.config.model.colormap[obj["label"]] + # draw the bounding boxes on the frame box = obj["box"] draw_box_with_label( @@ -590,10 +615,14 @@ def update(self, frame_time, current_detections, motion_boxes, regions): for id in updated_ids: updated_obj = tracked_objects[id] - thumb_update, significant_update = updated_obj.update( + thumb_update, significant_update, autotracker_update = updated_obj.update( frame_time, current_detections[id] ) + if autotracker_update: + for c in self.callbacks["autotrack"]: + c(self.name, updated_obj, frame_time) + if thumb_update: # ensure this frame is stored in the cache if ( @@ -749,6 +778,9 @@ def __init__( self.camera_states: dict[str, CameraState] = {} self.frame_manager = SharedMemoryFrameManager() self.last_motion_detected: dict[str, float] = {} + self.ptz_autotracker_thread = PtzAutoTrackerThread( + config, dispatcher.onvif, dispatcher.camera_metrics, self.stop_event + ) def start(camera, obj: TrackedObject, current_frame_time): self.event_queue.put( @@ -775,6 +807,9 @@ def update(camera, obj: TrackedObject, current_frame_time): ) ) + def autotrack(camera, obj: TrackedObject, current_frame_time): + self.ptz_autotracker_thread.ptz_autotracker.autotrack_object(camera, obj) + def end(camera, obj: TrackedObject, current_frame_time): # populate has_snapshot obj.has_snapshot = self.should_save_snapshot(camera, obj) @@ -823,6 +858,7 @@ def end(camera, obj: TrackedObject, current_frame_time): "type": "end", } self.dispatcher.publish("events", json.dumps(message), retain=False) + self.ptz_autotracker_thread.ptz_autotracker.end_object(camera, obj) self.event_queue.put( ( @@ -859,8 +895,11 @@ def object_status(camera, object_name, status): self.dispatcher.publish(f"{camera}/{object_name}", status, retain=False) for camera in self.config.cameras.keys(): - camera_state = CameraState(camera, self.config, self.frame_manager) + camera_state = CameraState( + camera, self.config, self.frame_manager, self.ptz_autotracker_thread + ) camera_state.on("start", start) + camera_state.on("autotrack", autotrack) camera_state.on("update", update) camera_state.on("end", end) camera_state.on("snapshot", snapshot) @@ -1002,6 +1041,7 @@ def get_current_frame_time(self, camera) -> int: return self.camera_states[camera].current_frame_time def run(self): + self.ptz_autotracker_thread.start() while not self.stop_event.is_set(): try: ( @@ -1122,4 +1162,5 @@ def run(self): event_id, camera = self.event_processed_queue.get() self.camera_states[camera].finished(event_id) + self.ptz_autotracker_thread.join() logger.info("Exiting object processor...") diff --git a/frigate/ptz.py b/frigate/ptz.py index 385a230bc9..f7971a4551 100644 --- a/frigate/ptz.py +++ b/frigate/ptz.py @@ -4,9 +4,11 @@ import site from enum import Enum +import numpy from onvif import ONVIFCamera, ONVIFError from frigate.config import FrigateConfig +from frigate.types import CameraMetricsTypes logger = logging.getLogger(__name__) @@ -26,8 +28,11 @@ class OnvifCommandEnum(str, Enum): class OnvifController: - def __init__(self, config: FrigateConfig) -> None: + def __init__( + self, config: FrigateConfig, camera_metrics: dict[str, CameraMetricsTypes] + ) -> None: self.cams: dict[str, ONVIFCamera] = {} + self.camera_metrics = camera_metrics for cam_name, cam in config.cameras.items(): if not cam.enabled: @@ -68,12 +73,51 @@ def _init_onvif(self, camera_name: str) -> bool: ptz = onvif.create_ptz_service() request = ptz.create_type("GetConfigurationOptions") request.ConfigurationToken = profile.PTZConfiguration.token + ptz_config = ptz.GetConfigurationOptions(request) + + fov_space_id = next( + ( + i + for i, space in enumerate( + ptz_config.Spaces.RelativePanTiltTranslationSpace + ) + if "TranslationSpaceFov" in space["URI"] + ), + None, + ) - # setup moving request + # setup continuous moving request move_request = ptz.create_type("ContinuousMove") move_request.ProfileToken = profile.token self.cams[camera_name]["move_request"] = move_request + # setup relative moving request for autotracking + move_request = ptz.create_type("RelativeMove") + move_request.ProfileToken = profile.token + if move_request.Translation is None and fov_space_id is not None: + move_request.Translation = ptz.GetStatus( + {"ProfileToken": profile.token} + ).Position + move_request.Translation.PanTilt.space = ptz_config["Spaces"][ + "RelativePanTiltTranslationSpace" + ][fov_space_id]["URI"] + move_request.Translation.Zoom.space = ptz_config["Spaces"][ + "RelativeZoomTranslationSpace" + ][0]["URI"] + if move_request.Speed is None: + move_request.Speed = ptz.GetStatus({"ProfileToken": profile.token}).Position + self.cams[camera_name]["relative_move_request"] = move_request + + # setup relative moving request for autotracking + move_request = ptz.create_type("AbsoluteMove") + move_request.ProfileToken = profile.token + self.cams[camera_name]["absolute_move_request"] = move_request + + # status request for autotracking + status_request = ptz.create_type("GetStatus") + status_request.ProfileToken = profile.token + self.cams[camera_name]["status_request"] = status_request + # setup existing presets try: presets: list[dict] = ptz.GetPresets({"ProfileToken": profile.token}) @@ -94,6 +138,20 @@ def _init_onvif(self, camera_name: str) -> bool: if ptz_config.Spaces and ptz_config.Spaces.ContinuousZoomVelocitySpace: supported_features.append("zoom") + if ptz_config.Spaces and ptz_config.Spaces.RelativePanTiltTranslationSpace: + supported_features.append("pt-r") + + if ptz_config.Spaces and ptz_config.Spaces.RelativeZoomTranslationSpace: + supported_features.append("zoom-r") + + if fov_space_id is not None: + supported_features.append("pt-r-fov") + self.cams[camera_name][ + "relative_fov_range" + ] = ptz_config.Spaces.RelativePanTiltTranslationSpace[fov_space_id] + + self.cams[camera_name]["relative_fov_supported"] = fov_space_id is not None + self.cams[camera_name]["features"] = supported_features self.cams[camera_name]["init"] = True @@ -143,12 +201,74 @@ def _move(self, camera_name: str, command: OnvifCommandEnum) -> None: onvif.get_service("ptz").ContinuousMove(move_request) + def _move_relative(self, camera_name: str, pan, tilt, speed) -> None: + if not self.cams[camera_name]["relative_fov_supported"]: + logger.error(f"{camera_name} does not support ONVIF RelativeMove (FOV).") + return + + logger.debug(f"{camera_name} called RelativeMove: pan: {pan} tilt: {tilt}") + self.get_camera_status(camera_name) + + if self.cams[camera_name]["active"]: + logger.warning( + f"{camera_name} is already performing an action, not moving..." + ) + return + + self.cams[camera_name]["active"] = True + self.camera_metrics[camera_name]["ptz_moving"].value = True + onvif: ONVIFCamera = self.cams[camera_name]["onvif"] + move_request = self.cams[camera_name]["relative_move_request"] + + # function takes in -1 to 1 for pan and tilt, interpolate to the values of the camera. + # The onvif spec says this can report as +INF and -INF, so this may need to be modified + pan = numpy.interp( + pan, + [-1, 1], + [ + self.cams[camera_name]["relative_fov_range"]["XRange"]["Min"], + self.cams[camera_name]["relative_fov_range"]["XRange"]["Max"], + ], + ) + tilt = numpy.interp( + tilt, + [-1, 1], + [ + self.cams[camera_name]["relative_fov_range"]["YRange"]["Min"], + self.cams[camera_name]["relative_fov_range"]["YRange"]["Max"], + ], + ) + + move_request.Speed = { + "PanTilt": { + "x": speed, + "y": speed, + }, + "Zoom": 0, + } + + # move pan and tilt separately + move_request.Translation.PanTilt.x = pan + move_request.Translation.PanTilt.y = 0 + move_request.Translation.Zoom.x = 0 + + onvif.get_service("ptz").RelativeMove(move_request) + + move_request.Translation.PanTilt.x = 0 + move_request.Translation.PanTilt.y = tilt + move_request.Translation.Zoom.x = 0 + + onvif.get_service("ptz").RelativeMove(move_request) + + self.cams[camera_name]["active"] = False + def _move_to_preset(self, camera_name: str, preset: str) -> None: if preset not in self.cams[camera_name]["presets"]: logger.error(f"{preset} is not a valid preset for {camera_name}") return self.cams[camera_name]["active"] = True + self.camera_metrics[camera_name]["ptz_moving"].value = True move_request = self.cams[camera_name]["move_request"] onvif: ONVIFCamera = self.cams[camera_name]["onvif"] preset_token = self.cams[camera_name]["presets"][preset] @@ -158,6 +278,7 @@ def _move_to_preset(self, camera_name: str, preset: str) -> None: "PresetToken": preset_token, } ) + self.camera_metrics[camera_name]["ptz_moving"].value = False self.cams[camera_name]["active"] = False def _zoom(self, camera_name: str, command: OnvifCommandEnum) -> None: @@ -216,3 +337,28 @@ def get_camera_info(self, camera_name: str) -> dict[str, any]: "features": self.cams[camera_name]["features"], "presets": list(self.cams[camera_name]["presets"].keys()), } + + def get_camera_status(self, camera_name: str) -> dict[str, any]: + if camera_name not in self.cams.keys(): + logger.error(f"Onvif is not setup for {camera_name}") + return {} + + if not self.cams[camera_name]["init"]: + self._init_onvif(camera_name) + + onvif: ONVIFCamera = self.cams[camera_name]["onvif"] + status_request = self.cams[camera_name]["status_request"] + status = onvif.get_service("ptz").GetStatus(status_request) + + self.cams[camera_name]["active"] = status.MoveStatus.PanTilt != "IDLE" + self.camera_metrics[camera_name]["ptz_moving"].value = ( + status.MoveStatus.PanTilt != "IDLE" + ) + + return { + "pan": status.Position.PanTilt.x, + "tilt": status.Position.PanTilt.y, + "zoom": status.Position.Zoom.x, + "pantilt_moving": status.MoveStatus.PanTilt, + "zoom_moving": status.MoveStatus.Zoom, + } diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py new file mode 100644 index 0000000000..5761aa4752 --- /dev/null +++ b/frigate/ptz_autotrack.py @@ -0,0 +1,269 @@ +"""Automatically pan, tilt, and zoom on detected objects via onvif.""" + +import logging +import threading +import time +from multiprocessing.synchronize import Event as MpEvent + +import cv2 +import numpy as np +from norfair.camera_motion import MotionEstimator, TranslationTransformationGetter + +from frigate.config import CameraConfig, FrigateConfig +from frigate.ptz import OnvifController +from frigate.types import CameraMetricsTypes +from frigate.util import SharedMemoryFrameManager, intersection_over_union + +logger = logging.getLogger(__name__) + + +class PtzMotionEstimator: + def __init__(self, config: CameraConfig, ptz_moving) -> None: + self.frame_manager = SharedMemoryFrameManager() + # homography is nice (zooming) but slow, translation is pan/tilt only but fast. + self.norfair_motion_estimator = MotionEstimator( + transformations_getter=TranslationTransformationGetter() + ) + self.camera_config = config + self.coord_transformations = None + self.ptz_moving = ptz_moving + logger.debug(f"Motion estimator init for cam: {config.name}") + + def motion_estimator(self, detections, frame_time, camera_name): + if self.camera_config.onvif.autotracking.enabled and self.ptz_moving.value: + logger.debug(f"Motion estimator running for {camera_name}") + + frame_id = f"{camera_name}{frame_time}" + frame = self.frame_manager.get(frame_id, self.camera_config.frame_shape) + + # mask out detections for better motion estimation + mask = np.ones(frame.shape[:2], frame.dtype) + + detection_boxes = [x[2] for x in detections] + for detection in detection_boxes: + x1, y1, x2, y2 = detection + mask[y1:y2, x1:x2] = 0 + + # merge camera config motion mask with detections. Norfair function needs 0,1 mask + mask = np.bitwise_and(mask, self.camera_config.motion.mask).clip(max=1) + + # Norfair estimator function needs color + frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2BGRA) + + self.coord_transformations = self.norfair_motion_estimator.update( + frame, mask + ) + + self.frame_manager.close(frame_id) + + return self.coord_transformations + + return None + + +class PtzAutoTrackerThread(threading.Thread): + def __init__( + self, + config: FrigateConfig, + onvif: OnvifController, + camera_metrics: CameraMetricsTypes, + stop_event: MpEvent, + ) -> None: + threading.Thread.__init__(self) + self.name = "frigate_ptz_autotracker" + self.ptz_autotracker = PtzAutoTracker(config, onvif, camera_metrics) + self.stop_event = stop_event + self.config = config + + def run(self): + while not self.stop_event.is_set(): + for camera_name, cam in self.config.cameras.items(): + if cam.onvif.autotracking.enabled: + self.ptz_autotracker.camera_maintenance(camera_name) + time.sleep(1) + logger.info("Exiting autotracker...") + + +class PtzAutoTracker: + def __init__( + self, + config: FrigateConfig, + onvif: OnvifController, + camera_metrics: CameraMetricsTypes, + ) -> None: + self.config = config + self.onvif = onvif + self.camera_metrics = camera_metrics + self.tracked_object: dict[str, object] = {} + self.tracked_object_previous: dict[str, object] = {} + self.object_types = {} + self.required_zones = {} + + # if cam is set to autotrack, onvif should be set up + for camera_name, cam in self.config.cameras.items(): + if cam.onvif.autotracking.enabled: + logger.debug(f"Autotracker init for cam: {camera_name}") + + self.object_types[camera_name] = cam.onvif.autotracking.track + self.required_zones[camera_name] = cam.onvif.autotracking.required_zones + + self.tracked_object[camera_name] = None + self.tracked_object_previous[camera_name] = None + + if not onvif.cams[camera_name]["init"]: + if not self.onvif._init_onvif(camera_name): + return + if not onvif.cams[camera_name]["relative_fov_supported"]: + cam.onvif.autotracking.enabled = False + self.camera_metrics[camera_name][ + "ptz_autotracker_enabled" + ].value = False + logger.warning( + f"Disabling autotracking for {camera_name}: FOV relative movement not supported" + ) + + def _autotrack_move_ptz(self, camera, obj): + camera_config = self.config.cameras[camera] + + # # frame width and height + camera_width = camera_config.frame_shape[1] + camera_height = camera_config.frame_shape[0] + + # Normalize coordinates. top right of the fov is (1,1). + pan = 0.5 - (obj.obj_data["centroid"][0] / camera_width) + tilt = 0.5 - (obj.obj_data["centroid"][1] / camera_height) + + # Calculate zoom amount + size_ratio = camera_config.onvif.autotracking.size_ratio + int(size_ratio * camera_width) + int(size_ratio * camera_height) + + # ideas: check object velocity for camera speed? + self.onvif._move_relative(camera, -pan, tilt, 1) + + def autotrack_object(self, camera, obj): + camera_config = self.config.cameras[camera] + + # check if ptz is moving + self.onvif.get_camera_status(camera) + + if camera_config.onvif.autotracking.enabled: + # either this is a brand new object that's on our camera, has our label, entered the zone, is not a false positive, and is not initially motionless + # or one we're already tracking, which assumes all those things are already true + if ( + # new object + self.tracked_object[camera] is None + and obj.camera == camera + and obj.obj_data["label"] in self.object_types[camera] + and set(obj.entered_zones) & set(self.required_zones[camera]) + and not obj.previous["false_positive"] + and not obj.false_positive + and self.tracked_object_previous[camera] is None + ): + logger.debug(f"Autotrack: New object: {obj.to_dict()}") + self.tracked_object[camera] = obj + self.tracked_object_previous[camera] = obj + self._autotrack_move_ptz(camera, obj) + + return + + if ( + # already tracking an object + self.tracked_object[camera] is not None + and self.tracked_object_previous[camera] is not None + and obj.obj_data["id"] == self.tracked_object[camera].obj_data["id"] + and obj.obj_data["frame_time"] + != self.tracked_object_previous[camera].obj_data["frame_time"] + ): + # don't move the ptz if we're relatively close to the existing box + # should we use iou or euclidean distance or both? + # distance = math.sqrt((obj.obj_data["centroid"][0] - camera_width/2)**2 + (obj.obj_data["centroid"][1] - obj.camera_height/2)**2) + # if distance <= (self.camera_width * .15) or distance <= (self.camera_height * .15) + if ( + intersection_over_union( + self.tracked_object_previous[camera].obj_data["box"], + obj.obj_data["box"], + ) + < 0.05 + ): + logger.debug( + f"Autotrack: Existing object (do NOT move ptz): {obj.to_dict()}" + ) + return + + logger.debug(f"Autotrack: Existing object (move ptz): {obj.to_dict()}") + self.tracked_object_previous[camera] = obj + self._autotrack_move_ptz(camera, obj) + + return + + if ( + # The tracker lost an object, so let's check the previous object's region and compare it with the incoming object + # If it's within bounds, start tracking that object. + # Should we check region (maybe too broad) or expand the previous object's box a bit and check that? + self.tracked_object[camera] is None + and obj.camera == camera + and obj.obj_data["label"] in self.object_types[camera] + and not obj.previous["false_positive"] + and not obj.false_positive + and obj.obj_data["motionless_count"] == 0 + and self.tracked_object_previous[camera] is not None + ): + if ( + intersection_over_union( + self.tracked_object_previous[camera].obj_data["region"], + obj.obj_data["box"], + ) + < 0.2 + ): + logger.debug(f"Autotrack: Reacquired object: {obj.to_dict()}") + self.tracked_object[camera] = obj + self.tracked_object_previous[camera] = obj + self._autotrack_move_ptz(camera, obj) + + return + + def end_object(self, camera, obj): + if self.config.cameras[camera].onvif.autotracking.enabled: + if ( + self.tracked_object[camera] is not None + and obj.obj_data["id"] == self.tracked_object[camera].obj_data["id"] + ): + logger.debug(f"Autotrack: End object: {obj.to_dict()}") + self.tracked_object[camera] = None + self.onvif.get_camera_status(camera) + + def camera_maintenance(self, camera): + # calls get_camera_status to check/update ptz movement + # returns camera to preset after timeout when tracking is over + autotracker_config = self.config.cameras[camera].onvif.autotracking + + if autotracker_config.enabled: + # regularly update camera status + if self.camera_metrics[camera]["ptz_moving"].value: + self.onvif.get_camera_status(camera) + + # return to preset if tracking is over + if ( + self.tracked_object[camera] is None + and self.tracked_object_previous[camera] is not None + and ( + # might want to use a different timestamp here? + time.time() - self.tracked_object_previous[camera].last_published + > autotracker_config.timeout + ) + and autotracker_config.return_preset + and not self.camera_metrics[camera]["ptz_moving"].value + ): + logger.debug( + f"Autotrack: Time is {time.time()}, returning to preset: {autotracker_config.return_preset}" + ) + self.onvif._move_to_preset( + camera, + autotracker_config.return_preset.lower(), + ) + self.tracked_object_previous[camera] = None + + def disable_autotracking(self, camera): + # need to call this if autotracking is disabled by mqtt?? + self.tracked_object[camera] = None diff --git a/frigate/track/norfair_tracker.py b/frigate/track/norfair_tracker.py index b0c4621b40..48cc029ee3 100644 --- a/frigate/track/norfair_tracker.py +++ b/frigate/track/norfair_tracker.py @@ -5,7 +5,8 @@ from norfair import Detection, Drawable, Tracker, draw_boxes from norfair.drawing.drawer import Drawer -from frigate.config import DetectConfig +from frigate.config import CameraConfig +from frigate.ptz_autotrack import PtzMotionEstimator from frigate.track import ObjectTracker from frigate.util import intersection_over_union @@ -54,12 +55,16 @@ def frigate_distance(detection: Detection, tracked_object) -> float: class NorfairTracker(ObjectTracker): - def __init__(self, config: DetectConfig): + def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving): self.tracked_objects = {} self.disappeared = {} self.positions = {} - self.max_disappeared = config.max_disappeared - self.detect_config = config + self.max_disappeared = config.detect.max_disappeared + self.camera_config = config + self.detect_config = config.detect + self.ptz_autotracker_enabled = ptz_autotracker_enabled.value + self.ptz_moving = ptz_moving + self.camera_name = config.name self.track_id_map = {} # TODO: could also initialize a tracker per object class if there # was a good reason to have different distance calculations @@ -69,6 +74,8 @@ def __init__(self, config: DetectConfig): initialization_delay=0, hit_counter_max=self.max_disappeared, ) + if self.ptz_autotracker_enabled: + self.ptz_motion_estimator = PtzMotionEstimator(config, self.ptz_moving) def register(self, track_id, obj): rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6)) @@ -230,7 +237,19 @@ def match_and_update(self, frame_time, detections): ) ) - tracked_objects = self.tracker.update(detections=norfair_detections) + coord_transformations = None + + if ( + self.ptz_autotracker_enabled + and self.camera_config.onvif.autotracking.motion_estimator + ): + coord_transformations = self.ptz_motion_estimator.motion_estimator( + detections, frame_time, self.camera_name + ) + + tracked_objects = self.tracker.update( + detections=norfair_detections, coord_transformations=coord_transformations + ) # update or create new tracks active_ids = [] diff --git a/frigate/types.py b/frigate/types.py index 8c3e546541..29991552f1 100644 --- a/frigate/types.py +++ b/frigate/types.py @@ -16,6 +16,8 @@ class CameraMetricsTypes(TypedDict): frame_queue: Queue motion_enabled: Synchronized improve_contrast_enabled: Synchronized + ptz_autotracker_enabled: Synchronized + ptz_moving: Synchronized motion_threshold: Synchronized motion_contour_area: Synchronized process: Optional[Process] diff --git a/frigate/video.py b/frigate/video.py index c02ad15c48..4dfcbedabd 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -457,6 +457,8 @@ def receiveSignal(signalNumber, frame): detection_enabled = process_info["detection_enabled"] motion_enabled = process_info["motion_enabled"] improve_contrast_enabled = process_info["improve_contrast_enabled"] + ptz_autotracker_enabled = process_info["ptz_autotracker_enabled"] + ptz_moving = process_info["ptz_moving"] motion_threshold = process_info["motion_threshold"] motion_contour_area = process_info["motion_contour_area"] @@ -476,7 +478,7 @@ def receiveSignal(signalNumber, frame): name, labelmap, detection_queue, result_connection, model_config, stop_event ) - object_tracker = NorfairTracker(config.detect) + object_tracker = NorfairTracker(config, ptz_autotracker_enabled, ptz_moving) frame_manager = SharedMemoryFrameManager() @@ -497,6 +499,7 @@ def receiveSignal(signalNumber, frame): detection_enabled, motion_enabled, stop_event, + ptz_moving, ) logger.info(f"{name}: exiting subprocess") @@ -721,6 +724,7 @@ def process_frames( detection_enabled: mp.Value, motion_enabled: mp.Value, stop_event, + ptz_moving: mp.Value, exit_on_empty: bool = False, ): # attribute labels are not tracked and are not assigned regions From 3171801607cbbbe58cda7b637f2a3917d919959a Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Thu, 29 Jun 2023 11:05:14 -0500 Subject: [PATCH 02/40] Threaded motion estimator --- frigate/ptz_autotrack.py | 19 +++++++++++++++++++ frigate/track/norfair_tracker.py | 16 +++++++++++----- frigate/video.py | 4 +++- 3 files changed, 33 insertions(+), 6 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 5761aa4752..3e3bf29d6c 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -17,6 +17,21 @@ logger = logging.getLogger(__name__) +class PtzMotionEstimatorThread(threading.Thread): + def __init__(self, config: CameraConfig, ptz_moving, stop_event) -> None: + threading.Thread.__init__(self) + self.name = "frigate_ptz_motion_estimator" + self.ptz_moving = ptz_moving + self.config = config + self.stop_event = stop_event + self.ptz_motion_estimator = PtzMotionEstimator(self.config, self.ptz_moving) + + def run(self): + while not self.stop_event.is_set(): + pass + logger.info("Exiting motion estimator...") + + class PtzMotionEstimator: def __init__(self, config: CameraConfig, ptz_moving) -> None: self.frame_manager = SharedMemoryFrameManager() @@ -56,6 +71,10 @@ def motion_estimator(self, detections, frame_time, camera_name): self.frame_manager.close(frame_id) + logger.debug( + f"frame time: {frame_time}, coord_transformations: {vars(self.coord_transformations)}" + ) + return self.coord_transformations return None diff --git a/frigate/track/norfair_tracker.py b/frigate/track/norfair_tracker.py index 48cc029ee3..2683b05d14 100644 --- a/frigate/track/norfair_tracker.py +++ b/frigate/track/norfair_tracker.py @@ -6,7 +6,7 @@ from norfair.drawing.drawer import Drawer from frigate.config import CameraConfig -from frigate.ptz_autotrack import PtzMotionEstimator +from frigate.ptz_autotrack import PtzMotionEstimatorThread from frigate.track import ObjectTracker from frigate.util import intersection_over_union @@ -55,7 +55,9 @@ def frigate_distance(detection: Detection, tracked_object) -> float: class NorfairTracker(ObjectTracker): - def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving): + def __init__( + self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving, stop_event + ): self.tracked_objects = {} self.disappeared = {} self.positions = {} @@ -75,7 +77,9 @@ def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving): hit_counter_max=self.max_disappeared, ) if self.ptz_autotracker_enabled: - self.ptz_motion_estimator = PtzMotionEstimator(config, self.ptz_moving) + self.ptz_motion_estimator_thread = PtzMotionEstimatorThread( + config, self.ptz_moving, stop_event + ) def register(self, track_id, obj): rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6)) @@ -243,8 +247,10 @@ def match_and_update(self, frame_time, detections): self.ptz_autotracker_enabled and self.camera_config.onvif.autotracking.motion_estimator ): - coord_transformations = self.ptz_motion_estimator.motion_estimator( - detections, frame_time, self.camera_name + coord_transformations = ( + self.ptz_motion_estimator_thread.ptz_motion_estimator.motion_estimator( + detections, frame_time, self.camera_name + ) ) tracked_objects = self.tracker.update( diff --git a/frigate/video.py b/frigate/video.py index 4dfcbedabd..2eaa439c2f 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -478,7 +478,9 @@ def receiveSignal(signalNumber, frame): name, labelmap, detection_queue, result_connection, model_config, stop_event ) - object_tracker = NorfairTracker(config, ptz_autotracker_enabled, ptz_moving) + object_tracker = NorfairTracker( + config, ptz_autotracker_enabled, ptz_moving, stop_event + ) frame_manager = SharedMemoryFrameManager() From f26093dc4a21b4709ea3b5fae42026bad142d9b2 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Thu, 29 Jun 2023 13:20:25 -0500 Subject: [PATCH 03/40] Revert "Threaded motion estimator" This reverts commit 3171801607cbbbe58cda7b637f2a3917d919959a. --- frigate/ptz_autotrack.py | 19 ------------------- frigate/track/norfair_tracker.py | 16 +++++----------- frigate/video.py | 4 +--- 3 files changed, 6 insertions(+), 33 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 3e3bf29d6c..5761aa4752 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -17,21 +17,6 @@ logger = logging.getLogger(__name__) -class PtzMotionEstimatorThread(threading.Thread): - def __init__(self, config: CameraConfig, ptz_moving, stop_event) -> None: - threading.Thread.__init__(self) - self.name = "frigate_ptz_motion_estimator" - self.ptz_moving = ptz_moving - self.config = config - self.stop_event = stop_event - self.ptz_motion_estimator = PtzMotionEstimator(self.config, self.ptz_moving) - - def run(self): - while not self.stop_event.is_set(): - pass - logger.info("Exiting motion estimator...") - - class PtzMotionEstimator: def __init__(self, config: CameraConfig, ptz_moving) -> None: self.frame_manager = SharedMemoryFrameManager() @@ -71,10 +56,6 @@ def motion_estimator(self, detections, frame_time, camera_name): self.frame_manager.close(frame_id) - logger.debug( - f"frame time: {frame_time}, coord_transformations: {vars(self.coord_transformations)}" - ) - return self.coord_transformations return None diff --git a/frigate/track/norfair_tracker.py b/frigate/track/norfair_tracker.py index 2683b05d14..48cc029ee3 100644 --- a/frigate/track/norfair_tracker.py +++ b/frigate/track/norfair_tracker.py @@ -6,7 +6,7 @@ from norfair.drawing.drawer import Drawer from frigate.config import CameraConfig -from frigate.ptz_autotrack import PtzMotionEstimatorThread +from frigate.ptz_autotrack import PtzMotionEstimator from frigate.track import ObjectTracker from frigate.util import intersection_over_union @@ -55,9 +55,7 @@ def frigate_distance(detection: Detection, tracked_object) -> float: class NorfairTracker(ObjectTracker): - def __init__( - self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving, stop_event - ): + def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving): self.tracked_objects = {} self.disappeared = {} self.positions = {} @@ -77,9 +75,7 @@ def __init__( hit_counter_max=self.max_disappeared, ) if self.ptz_autotracker_enabled: - self.ptz_motion_estimator_thread = PtzMotionEstimatorThread( - config, self.ptz_moving, stop_event - ) + self.ptz_motion_estimator = PtzMotionEstimator(config, self.ptz_moving) def register(self, track_id, obj): rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6)) @@ -247,10 +243,8 @@ def match_and_update(self, frame_time, detections): self.ptz_autotracker_enabled and self.camera_config.onvif.autotracking.motion_estimator ): - coord_transformations = ( - self.ptz_motion_estimator_thread.ptz_motion_estimator.motion_estimator( - detections, frame_time, self.camera_name - ) + coord_transformations = self.ptz_motion_estimator.motion_estimator( + detections, frame_time, self.camera_name ) tracked_objects = self.tracker.update( diff --git a/frigate/video.py b/frigate/video.py index 2eaa439c2f..4dfcbedabd 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -478,9 +478,7 @@ def receiveSignal(signalNumber, frame): name, labelmap, detection_queue, result_connection, model_config, stop_event ) - object_tracker = NorfairTracker( - config, ptz_autotracker_enabled, ptz_moving, stop_event - ) + object_tracker = NorfairTracker(config, ptz_autotracker_enabled, ptz_moving) frame_manager = SharedMemoryFrameManager() From db9a408edf88ed0905e28f7ed674bdf86a341f89 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 30 Jun 2023 12:36:46 -0500 Subject: [PATCH 04/40] Don't detect motion when ptz is moving --- frigate/video.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frigate/video.py b/frigate/video.py index 4dfcbedabd..2d4e7fd5b2 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -767,7 +767,11 @@ def process_frames( continue # look for motion if enabled - motion_boxes = motion_detector.detect(frame) if motion_enabled.value else [] + motion_boxes = ( + motion_detector.detect(frame) + if motion_enabled.value or ptz_moving.value + else [] + ) regions = [] consolidated_detections = [] From 1a78230eae0ecfaa42dab1c511ef68e98283b916 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 30 Jun 2023 12:38:44 -0500 Subject: [PATCH 05/40] fix motion logic --- frigate/video.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/video.py b/frigate/video.py index 2d4e7fd5b2..597e90da93 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -769,7 +769,7 @@ def process_frames( # look for motion if enabled motion_boxes = ( motion_detector.detect(frame) - if motion_enabled.value or ptz_moving.value + if motion_enabled.value and not ptz_moving.value else [] ) From 27eb2a6088765458039c740a6502afae1e21954a Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Fri, 30 Jun 2023 12:51:50 -0500 Subject: [PATCH 06/40] fix mypy error --- frigate/comms/mqtt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/comms/mqtt.py b/frigate/comms/mqtt.py index 8a941d7fa0..287232aec3 100644 --- a/frigate/comms/mqtt.py +++ b/frigate/comms/mqtt.py @@ -66,7 +66,7 @@ def _set_initial_topics(self) -> None: ) self.publish( f"{camera_name}/ptz_autotracker/state", - "ON" if camera.onvif.autotracking.enabled else "OFF", # type: ignore[union-attr] + "ON" if camera.onvif.autotracking.enabled else "OFF", retain=True, ) self.publish( From 56d074ec4e453fb669611cff4dee846134e7d725 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 1 Jul 2023 20:26:55 -0500 Subject: [PATCH 07/40] Add threaded queue for movement for slower ptzs --- frigate/ptz_autotrack.py | 94 +++++++++++++++++++++++++++++++++------- 1 file changed, 79 insertions(+), 15 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 5761aa4752..c1dd62690a 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -1,6 +1,8 @@ """Automatically pan, tilt, and zoom on detected objects via onvif.""" +import copy import logging +import queue import threading import time from multiprocessing.synchronize import Event as MpEvent @@ -22,7 +24,9 @@ def __init__(self, config: CameraConfig, ptz_moving) -> None: self.frame_manager = SharedMemoryFrameManager() # homography is nice (zooming) but slow, translation is pan/tilt only but fast. self.norfair_motion_estimator = MotionEstimator( - transformations_getter=TranslationTransformationGetter() + transformations_getter=TranslationTransformationGetter(), + min_distance=30, + max_points=500, ) self.camera_config = config self.coord_transformations = None @@ -31,10 +35,16 @@ def __init__(self, config: CameraConfig, ptz_moving) -> None: def motion_estimator(self, detections, frame_time, camera_name): if self.camera_config.onvif.autotracking.enabled and self.ptz_moving.value: - logger.debug(f"Motion estimator running for {camera_name}") + # logger.debug( + # f"Motion estimator running for {camera_name} - frame time: {frame_time}" + # ) frame_id = f"{camera_name}{frame_time}" - frame = self.frame_manager.get(frame_id, self.camera_config.frame_shape) + yuv_frame = self.frame_manager.get( + frame_id, self.camera_config.frame_shape_yuv + ) + + frame = cv2.cvtColor(yuv_frame, cv2.COLOR_YUV2GRAY_I420) # mask out detections for better motion estimation mask = np.ones(frame.shape[:2], frame.dtype) @@ -47,7 +57,7 @@ def motion_estimator(self, detections, frame_time, camera_name): # merge camera config motion mask with detections. Norfair function needs 0,1 mask mask = np.bitwise_and(mask, self.camera_config.motion.mask).clip(max=1) - # Norfair estimator function needs color + # Norfair estimator function needs color so it can convert it right back to gray frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2BGRA) self.coord_transformations = self.norfair_motion_estimator.update( @@ -56,6 +66,10 @@ def motion_estimator(self, detections, frame_time, camera_name): self.frame_manager.close(frame_id) + # logger.debug( + # f"Motion estimator transformation: {self.coord_transformations.rel_to_abs((0,0))}" + # ) + return self.coord_transformations return None @@ -98,6 +112,10 @@ def __init__( self.tracked_object_previous: dict[str, object] = {} self.object_types = {} self.required_zones = {} + self.move_queue = queue.Queue() + self.move_thread = threading.Thread(target=self._process_move_queue) + self.move_thread.daemon = True # Set the thread as a daemon thread + self.move_thread.start() # if cam is set to autotrack, onvif should be set up for camera_name, cam in self.config.cameras.items(): @@ -122,6 +140,42 @@ def __init__( f"Disabling autotracking for {camera_name}: FOV relative movement not supported" ) + def _process_move_queue(self): + while True: + try: + if self.move_queue.qsize() > 1: + # Accumulate values since last moved + pan = 0 + tilt = 0 + + while not self.move_queue.empty(): + camera, queued_pan, queued_tilt = self.move_queue.get() + logger.debug( + f"queue pan: {queued_pan}, queue tilt: {queued_tilt}" + ) + pan += queued_pan + tilt += queued_tilt + else: + move_data = self.move_queue.get() + camera, pan, tilt = move_data + logger.debug(f"removing pan: {pan}, removing tilt: {tilt}") + + logger.debug(f"final pan: {pan}, final tilt: {tilt}") + + self.onvif._move_relative(camera, pan, tilt, 0.1) + + # Wait until the camera finishes moving + while self.camera_metrics[camera]["ptz_moving"].value: + pass + + except queue.Empty: + pass + + def enqueue_move(self, camera, pan, tilt): + move_data = (camera, pan, tilt) + logger.debug(f"enqueue pan: {pan}, enqueue tilt: {tilt}") + self.move_queue.put(move_data) + def _autotrack_move_ptz(self, camera, obj): camera_config = self.config.cameras[camera] @@ -139,7 +193,7 @@ def _autotrack_move_ptz(self, camera, obj): int(size_ratio * camera_height) # ideas: check object velocity for camera speed? - self.onvif._move_relative(camera, -pan, tilt, 1) + self.enqueue_move(camera, -pan, tilt) def autotrack_object(self, camera, obj): camera_config = self.config.cameras[camera] @@ -160,9 +214,11 @@ def autotrack_object(self, camera, obj): and not obj.false_positive and self.tracked_object_previous[camera] is None ): - logger.debug(f"Autotrack: New object: {obj.to_dict()}") + logger.debug( + f"Autotrack: New object: {obj.obj_data['id']} {obj.obj_data['box']} {obj.obj_data['frame_time']}" + ) self.tracked_object[camera] = obj - self.tracked_object_previous[camera] = obj + self.tracked_object_previous[camera] = copy.deepcopy(obj) self._autotrack_move_ptz(camera, obj) return @@ -184,15 +240,18 @@ def autotrack_object(self, camera, obj): self.tracked_object_previous[camera].obj_data["box"], obj.obj_data["box"], ) - < 0.05 + > 0.05 ): logger.debug( - f"Autotrack: Existing object (do NOT move ptz): {obj.to_dict()}" + f"Autotrack: Existing object (do NOT move ptz): {obj.obj_data['id']} {obj.obj_data['box']} {obj.obj_data['frame_time']}" ) + self.tracked_object_previous[camera] = copy.deepcopy(obj) return - logger.debug(f"Autotrack: Existing object (move ptz): {obj.to_dict()}") - self.tracked_object_previous[camera] = obj + logger.debug( + f"Autotrack: Existing object (move ptz): {obj.obj_data['id']} {obj.obj_data['box']} {obj.obj_data['frame_time']}" + ) + self.tracked_object_previous[camera] = copy.deepcopy(obj) self._autotrack_move_ptz(camera, obj) return @@ -216,9 +275,11 @@ def autotrack_object(self, camera, obj): ) < 0.2 ): - logger.debug(f"Autotrack: Reacquired object: {obj.to_dict()}") + logger.debug( + f"Autotrack: Reacquired object: {obj.obj_data['id']} {obj.obj_data['box']} {obj.obj_data['frame_time']}" + ) self.tracked_object[camera] = obj - self.tracked_object_previous[camera] = obj + self.tracked_object_previous[camera] = copy.deepcopy(obj) self._autotrack_move_ptz(camera, obj) return @@ -229,7 +290,9 @@ def end_object(self, camera, obj): self.tracked_object[camera] is not None and obj.obj_data["id"] == self.tracked_object[camera].obj_data["id"] ): - logger.debug(f"Autotrack: End object: {obj.to_dict()}") + logger.debug( + f"Autotrack: End object: {obj.obj_data['id']} {obj.obj_data['box']}" + ) self.tracked_object[camera] = None self.onvif.get_camera_status(camera) @@ -249,7 +312,8 @@ def camera_maintenance(self, camera): and self.tracked_object_previous[camera] is not None and ( # might want to use a different timestamp here? - time.time() - self.tracked_object_previous[camera].last_published + time.time() + - self.tracked_object_previous[camera].obj_data["frame_time"] > autotracker_config.timeout ) and autotracker_config.return_preset From a5f407dba835a482f23f4fd12460cf21572feae2 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sat, 1 Jul 2023 22:35:06 -0500 Subject: [PATCH 08/40] Move queues per camera --- frigate/ptz_autotrack.py | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index c1dd62690a..64b11961ea 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -5,6 +5,7 @@ import queue import threading import time +from functools import partial from multiprocessing.synchronize import Event as MpEvent import cv2 @@ -112,10 +113,8 @@ def __init__( self.tracked_object_previous: dict[str, object] = {} self.object_types = {} self.required_zones = {} - self.move_queue = queue.Queue() - self.move_thread = threading.Thread(target=self._process_move_queue) - self.move_thread.daemon = True # Set the thread as a daemon thread - self.move_thread.start() + self.move_queues = {} + self.move_threads = {} # if cam is set to autotrack, onvif should be set up for camera_name, cam in self.config.cameras.items(): @@ -128,6 +127,8 @@ def __init__( self.tracked_object[camera_name] = None self.tracked_object_previous[camera_name] = None + self.move_queues[camera_name] = queue.Queue() + if not onvif.cams[camera_name]["init"]: if not self.onvif._init_onvif(camera_name): return @@ -140,24 +141,33 @@ def __init__( f"Disabling autotracking for {camera_name}: FOV relative movement not supported" ) - def _process_move_queue(self): + return + + # movement thread per camera + self.move_threads[camera_name] = threading.Thread( + target=partial(self._process_move_queue, camera_name) + ) + self.move_threads[camera_name].daemon = True + self.move_threads[camera_name].start() + + def _process_move_queue(self, camera): while True: try: - if self.move_queue.qsize() > 1: + if self.move_queues[camera].qsize() > 1: # Accumulate values since last moved pan = 0 tilt = 0 - while not self.move_queue.empty(): - camera, queued_pan, queued_tilt = self.move_queue.get() + while not self.move_queues[camera].empty(): + queued_pan, queued_tilt = self.move_queues[camera].get() logger.debug( f"queue pan: {queued_pan}, queue tilt: {queued_tilt}" ) pan += queued_pan tilt += queued_tilt else: - move_data = self.move_queue.get() - camera, pan, tilt = move_data + move_data = self.move_queues[camera].get() + pan, tilt = move_data logger.debug(f"removing pan: {pan}, removing tilt: {tilt}") logger.debug(f"final pan: {pan}, final tilt: {tilt}") @@ -172,9 +182,9 @@ def _process_move_queue(self): pass def enqueue_move(self, camera, pan, tilt): - move_data = (camera, pan, tilt) + move_data = (pan, tilt) logger.debug(f"enqueue pan: {pan}, enqueue tilt: {tilt}") - self.move_queue.put(move_data) + self.move_queues[camera].put(move_data) def _autotrack_move_ptz(self, camera, obj): camera_config = self.config.cameras[camera] From 98c161bdde4649c1db42f302e2019178ea0adb11 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 10:52:38 -0500 Subject: [PATCH 09/40] Move autotracker start to app.py --- frigate/app.py | 13 +++++++++++++ frigate/object_processing.py | 14 +++++++------- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/frigate/app.py b/frigate/app.py index 79e785a13d..99b2cd3e97 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -40,6 +40,7 @@ from frigate.output import output_frames from frigate.plus import PlusApi from frigate.ptz import OnvifController +from frigate.ptz_autotrack import PtzAutoTrackerThread from frigate.record.record import manage_recordings from frigate.stats import StatsEmitter, stats_init from frigate.storage import StorageMaintainer @@ -327,6 +328,15 @@ def start_detectors(self) -> None: detector_config, ) + def start_ptz_autotracker(self) -> None: + self.ptz_autotracker_thread = PtzAutoTrackerThread( + self.config, + self.dispatcher.onvif, + self.dispatcher.camera_metrics, + self.stop_event, + ) + self.ptz_autotracker_thread.start() + def start_detected_frames_processor(self) -> None: self.detected_frames_processor = TrackedObjectProcessor( self.config, @@ -336,6 +346,7 @@ def start_detected_frames_processor(self) -> None: self.event_processed_queue, self.video_output_queue, self.recordings_info_queue, + self.ptz_autotracker_thread, self.stop_event, ) self.detected_frames_processor.start() @@ -488,6 +499,7 @@ def start(self) -> None: sys.exit(1) self.start_detectors() self.start_video_output_processor() + self.start_ptz_autotracker() self.start_detected_frames_processor() self.start_camera_processors() self.start_camera_capture_processes() @@ -531,6 +543,7 @@ def stop(self) -> None: self.dispatcher.stop() self.detected_frames_processor.join() + self.ptz_autotracker_thread.join() self.event_processor.join() self.event_cleanup.join() self.stats_emitter.join() diff --git a/frigate/object_processing.py b/frigate/object_processing.py index 3ddb08e977..a5edc6f1eb 100644 --- a/frigate/object_processing.py +++ b/frigate/object_processing.py @@ -240,7 +240,10 @@ def update(self, current_frame_time, obj_data): significant_change = True # update autotrack every second? or fps? - if self.obj_data["frame_time"] - self.previous["frame_time"] > 1: + if ( + self.obj_data["frame_time"] - self.previous["frame_time"] + > 0.5 # / self.camera_config.detect.fps + ): autotracker_update = True self.obj_data.update(obj_data) @@ -619,7 +622,7 @@ def update(self, frame_time, current_detections, motion_boxes, regions): frame_time, current_detections[id] ) - if autotracker_update: + if autotracker_update or significant_update: for c in self.callbacks["autotrack"]: c(self.name, updated_obj, frame_time) @@ -763,6 +766,7 @@ def __init__( event_processed_queue, video_output_queue, recordings_info_queue, + ptz_autotracker_thread, stop_event, ): threading.Thread.__init__(self) @@ -778,9 +782,7 @@ def __init__( self.camera_states: dict[str, CameraState] = {} self.frame_manager = SharedMemoryFrameManager() self.last_motion_detected: dict[str, float] = {} - self.ptz_autotracker_thread = PtzAutoTrackerThread( - config, dispatcher.onvif, dispatcher.camera_metrics, self.stop_event - ) + self.ptz_autotracker_thread = ptz_autotracker_thread def start(camera, obj: TrackedObject, current_frame_time): self.event_queue.put( @@ -1041,7 +1043,6 @@ def get_current_frame_time(self, camera) -> int: return self.camera_states[camera].current_frame_time def run(self): - self.ptz_autotracker_thread.start() while not self.stop_event.is_set(): try: ( @@ -1162,5 +1163,4 @@ def run(self): event_id, camera = self.event_processed_queue.get() self.camera_states[camera].finished(event_id) - self.ptz_autotracker_thread.join() logger.info("Exiting object processor...") From 8f590bf5cc2445f82788da4cc839aa3dc792314c Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 10:53:20 -0500 Subject: [PATCH 10/40] iou value for tracked object --- frigate/ptz_autotrack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 64b11961ea..de90fa0268 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -250,7 +250,7 @@ def autotrack_object(self, camera, obj): self.tracked_object_previous[camera].obj_data["box"], obj.obj_data["box"], ) - > 0.05 + > 0.5 ): logger.debug( f"Autotrack: Existing object (do NOT move ptz): {obj.obj_data['id']} {obj.obj_data['box']} {obj.obj_data['frame_time']}" From 3059fd83c96904c912854d376b03ad8ff659f441 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 12:58:07 -0500 Subject: [PATCH 11/40] mqtt callback --- frigate/comms/mqtt.py | 1 + 1 file changed, 1 insertion(+) diff --git a/frigate/comms/mqtt.py b/frigate/comms/mqtt.py index 287232aec3..bd38f4cb6b 100644 --- a/frigate/comms/mqtt.py +++ b/frigate/comms/mqtt.py @@ -151,6 +151,7 @@ def _start(self) -> None: "detect", "motion", "improve_contrast", + "ptz_autotracker", "motion_threshold", "motion_contour_area", ] From f05ca2b9c68c15fdd7a54e6e4f61e8a237dbf40f Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 13:00:12 -0500 Subject: [PATCH 12/40] tracked object should be initially motionless --- frigate/ptz_autotrack.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index de90fa0268..0bdc1a32b4 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -96,6 +96,7 @@ def run(self): if cam.onvif.autotracking.enabled: self.ptz_autotracker.camera_maintenance(camera_name) time.sleep(1) + time.sleep(0.1) logger.info("Exiting autotracker...") @@ -223,6 +224,7 @@ def autotrack_object(self, camera, obj): and not obj.previous["false_positive"] and not obj.false_positive and self.tracked_object_previous[camera] is None + and obj.obj_data["motionless_count"] == 0 ): logger.debug( f"Autotrack: New object: {obj.obj_data['id']} {obj.obj_data['box']} {obj.obj_data['frame_time']}" From e883e00c7479553bf18fcf8b582531e20df678d6 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 13:10:17 -0500 Subject: [PATCH 13/40] only draw thicker box if autotracking is enabled --- frigate/object_processing.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frigate/object_processing.py b/frigate/object_processing.py index a5edc6f1eb..ce8ec659d2 100644 --- a/frigate/object_processing.py +++ b/frigate/object_processing.py @@ -493,7 +493,8 @@ def get_current_frame(self, draw_options={}): # draw thicker box around ptz autotracked object if ( - self.ptz_autotracker_thread.ptz_autotracker.tracked_object[ + self.camera_config.onvif.autotracking.enabled + and self.ptz_autotracker_thread.ptz_autotracker.tracked_object[ self.name ] is not None From 98d534918a55f8bba05060ae39585fe69e00d117 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 13:40:16 -0500 Subject: [PATCH 14/40] Init if enabled when initially disabled in config --- frigate/ptz_autotrack.py | 66 +++++++++++++++++++++++----------------- 1 file changed, 38 insertions(+), 28 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 0bdc1a32b4..c92ea88028 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -116,40 +116,48 @@ def __init__( self.required_zones = {} self.move_queues = {} self.move_threads = {} + self.autotracker_init = {} # if cam is set to autotrack, onvif should be set up for camera_name, cam in self.config.cameras.items(): + self.autotracker_init[camera_name] = False if cam.onvif.autotracking.enabled: - logger.debug(f"Autotracker init for cam: {camera_name}") - - self.object_types[camera_name] = cam.onvif.autotracking.track - self.required_zones[camera_name] = cam.onvif.autotracking.required_zones - - self.tracked_object[camera_name] = None - self.tracked_object_previous[camera_name] = None - - self.move_queues[camera_name] = queue.Queue() - - if not onvif.cams[camera_name]["init"]: - if not self.onvif._init_onvif(camera_name): - return - if not onvif.cams[camera_name]["relative_fov_supported"]: - cam.onvif.autotracking.enabled = False - self.camera_metrics[camera_name][ - "ptz_autotracker_enabled" - ].value = False - logger.warning( - f"Disabling autotracking for {camera_name}: FOV relative movement not supported" - ) + self._autotracker_setup(cam, camera_name) - return + def _autotracker_setup(self, cam, camera_name): + logger.debug(f"Autotracker init for cam: {camera_name}") - # movement thread per camera - self.move_threads[camera_name] = threading.Thread( - target=partial(self._process_move_queue, camera_name) - ) - self.move_threads[camera_name].daemon = True - self.move_threads[camera_name].start() + self.object_types[camera_name] = cam.onvif.autotracking.track + self.required_zones[camera_name] = cam.onvif.autotracking.required_zones + + self.tracked_object[camera_name] = None + self.tracked_object_previous[camera_name] = None + + self.move_queues[camera_name] = queue.Queue() + + if not self.onvif.cams[camera_name]["init"]: + if not self.onvif._init_onvif(camera_name): + return + if not self.onvif.cams[camera_name]["relative_fov_supported"]: + cam.onvif.autotracking.enabled = False + self.camera_metrics[camera_name][ + "ptz_autotracker_enabled" + ].value = False + logger.warning( + f"Disabling autotracking for {camera_name}: FOV relative movement not supported" + ) + + return + + # movement thread per camera + if not self.move_threads or not self.move_threads[camera_name]: + self.move_threads[camera_name] = threading.Thread( + target=partial(self._process_move_queue, camera_name) + ) + self.move_threads[camera_name].daemon = True + self.move_threads[camera_name].start() + + self.autotracker_init[camera_name] = True def _process_move_queue(self, camera): while True: @@ -314,6 +322,8 @@ def camera_maintenance(self, camera): autotracker_config = self.config.cameras[camera].onvif.autotracking if autotracker_config.enabled: + if not self.autotracker_init[camera]: + self._autotracker_setup(self.config.cameras[camera], camera) # regularly update camera status if self.camera_metrics[camera]["ptz_moving"].value: self.onvif.get_camera_status(camera) From 6f170358ed2f50159a09ffb23cfcbbe7737b902e Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 14:02:52 -0500 Subject: [PATCH 15/40] Fix init --- frigate/ptz_autotrack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index c92ea88028..054f7c2c42 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -157,7 +157,7 @@ def _autotracker_setup(self, cam, camera_name): self.move_threads[camera_name].daemon = True self.move_threads[camera_name].start() - self.autotracker_init[camera_name] = True + self.autotracker_init[camera_name] = True def _process_move_queue(self, camera): while True: From 1be67e260682378dadeaddbb5667492bd6ce1279 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 16:08:08 -0500 Subject: [PATCH 16/40] Thread names --- frigate/ptz_autotrack.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 054f7c2c42..3d5dce392a 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -85,7 +85,7 @@ def __init__( stop_event: MpEvent, ) -> None: threading.Thread.__init__(self) - self.name = "frigate_ptz_autotracker" + self.name = "ptz_autotracker" self.ptz_autotracker = PtzAutoTracker(config, onvif, camera_metrics) self.stop_event = stop_event self.config = config @@ -152,7 +152,8 @@ def _autotracker_setup(self, cam, camera_name): # movement thread per camera if not self.move_threads or not self.move_threads[camera_name]: self.move_threads[camera_name] = threading.Thread( - target=partial(self._process_move_queue, camera_name) + name=f"move_thread_{camera_name}", + target=partial(self._process_move_queue, camera_name), ) self.move_threads[camera_name].daemon = True self.move_threads[camera_name].start() From 59e539375e7b5311d4c905827b969dda44e57c9c Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 16:52:12 -0500 Subject: [PATCH 17/40] Always use motion estimator --- frigate/config.py | 7 ------- frigate/track/norfair_tracker.py | 5 +---- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/frigate/config.py b/frigate/config.py index 14b15e07ba..fb35fe29f2 100644 --- a/frigate/config.py +++ b/frigate/config.py @@ -129,18 +129,11 @@ def validate_password(cls, v, values): class PtzAutotrackConfig(FrigateBaseModel): enabled: bool = Field(default=False, title="Enable PTZ auto tracking.") - motion_estimator: bool = Field(default=False, title="Use Norfair motion estimator.") track: List[str] = Field(default=DEFAULT_TRACKED_OBJECTS, title="Objects to track.") required_zones: List[str] = Field( default_factory=list, title="List of required zones to be entered in order to begin autotracking.", ) - size_ratio: float = Field( - default=0.5, - title="Target ratio of tracked object to field of view (0.2-0.9).", - ge=0.2, - le=0.9, - ) return_preset: Optional[str] = Field( title="Name of camera preset to return to when object tracking is over." ) diff --git a/frigate/track/norfair_tracker.py b/frigate/track/norfair_tracker.py index 48cc029ee3..c051bf13a5 100644 --- a/frigate/track/norfair_tracker.py +++ b/frigate/track/norfair_tracker.py @@ -239,10 +239,7 @@ def match_and_update(self, frame_time, detections): coord_transformations = None - if ( - self.ptz_autotracker_enabled - and self.camera_config.onvif.autotracking.motion_estimator - ): + if self.ptz_autotracker_enabled: coord_transformations = self.ptz_motion_estimator.motion_estimator( detections, frame_time, self.camera_name ) From 1f56b93824b1401d2984544b8f7cf073b99dfb44 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 18:02:43 -0500 Subject: [PATCH 18/40] docs --- docs/docs/configuration/autotracking.md | 69 +++++++++++++++++++++++++ docs/docs/configuration/cameras.md | 2 + docs/docs/configuration/index.md | 15 ++++++ docs/docs/integrations/mqtt.md | 8 +++ frigate/config.py | 6 +-- 5 files changed, 97 insertions(+), 3 deletions(-) create mode 100644 docs/docs/configuration/autotracking.md diff --git a/docs/docs/configuration/autotracking.md b/docs/docs/configuration/autotracking.md new file mode 100644 index 0000000000..02ec10c74d --- /dev/null +++ b/docs/docs/configuration/autotracking.md @@ -0,0 +1,69 @@ +--- +id: autotracking +title: Autotracking +--- + +An ONVIF-capable camera that supports relative movement within the field of view (FOV) can also be configured to automatically track moving objects and keep them in the center of the frame. + +## Autotracking behavior + +Once Frigate determines that an object is not a false positive and has entered one of the required zones, the autotracker will move the PTZ camera to keep the object centered in the frame until the object either moves out of the frame, the PTZ is not capable of any more movement, or Frigate loses track of it. + +Upon loss of tracking, Frigate will scan the region of the lost object for `timeout` seconds. If an object of the same type is found in that region, Frigate will track that new object. + +When tracking has ended, Frigate will return to the camera preset specified by the `return_preset` configuration entry. + +## Checking ONVIF camera support + +Frigate autotracking functions with PTZ cameras capable of relative movement within the field of view (as specified in the [ONVIF spec](https://www.onvif.org/specs/srv/ptz/ONVIF-PTZ-Service-Spec-v1712.pdf) as `RelativePanTiltTranslationSpace` having a `TranslationSpaceFov` entry). + +Many cheaper PTZs likely don't support this standard. To see if your PTZ camera does, you can download and run [this simple Python script](https://gist.github.com/hawkeye217/152a1d4ba80760dac95d46e143d37112), replacing the details on line 4 with your camera's IP address, ONVIF port, username, and password. + +## Configuration + +First, configure the ONVIF parameters for your camera, then specify the object types to track, a required zone the object must enter, and a camera preset name to return to when tracking has ended. Optionally, specify a delay in seconds before Frigate returns the camera to the preset. + +An [ONVIF connection](cameras.md) is required for autotracking to function. + +Note that `autotracking` is disabled by default but can be enabled in the configuration or by MQTT. + +```yaml +cameras: + ptzcamera: + ... + onvif: + # Required: host of the camera being connected to. + host: 0.0.0.0 + # Optional: ONVIF port for device (default: shown below). + port: 8000 + # Optional: username for login. + # NOTE: Some devices require admin to access ONVIF. + user: admin + # Optional: password for login. + password: admin + # Optional: PTZ camera object autotracking. Keeps a moving object in + # the center of the frame by automatically moving the PTZ camera. + autotracking: + # Optional: enable/disable object autotracking. (default: shown below) + enabled: False + # Optional: list of objects to track from labelmap.txt (default: shown below) + track: + - person + # Required: Begin automatically tracking an object when it enters any of the listed zones. + required_zones: + - zone_name + # Required: Name of ONVIF camera preset to return to when tracking is over. + return_preset: preset_name + # Optional: Seconds to delay before returning to preset. (default: shown below) + timeout: 10 +``` + +## Best practices and considerations + +Every PTZ camera is different, so autotracking may not perform ideally in every situation. This experimental feature was initially developed using an EmpireTech/Dahua SD1A404XB-GNR. + +The object tracker in Frigate estimates the motion of the PTZ so that tracked objects are preserved when the camera moves. In most cases (especially for faster moving objects), the default 5 fps is insufficient for the motion estimator to perform accurately. 10 fps is the current recommendation. Higher frame rates will likely not be more performant and will only slow down Frigate and the motion estimator. Adjust your camera to output 10 frames per second and change the `fps` parameter in the [detect configuration](index.md) of your configuration file. + +A fast [detector](detectors.md) is recommended. CPU detectors will not perform well or won't work at all. If Frigate already has trouble keeping track of your object, the autotracker will struggle as well. + +The autotracker queues up motion requests for the tracked object while the PTZ is moving and will move make one longer move when complete. If your PTZ's motor is slow, you may not be able to reliably autotrack fast moving objects. diff --git a/docs/docs/configuration/cameras.md b/docs/docs/configuration/cameras.md index 8f907cb3f6..1804003a5f 100644 --- a/docs/docs/configuration/cameras.md +++ b/docs/docs/configuration/cameras.md @@ -66,3 +66,5 @@ cameras: ``` then PTZ controls will be available in the cameras WebUI. + +An ONVIF-capable camera that supports relative movement within the field of view (FOV) can also be configured to automatically track moving objects and keep them in the center of the frame. For autotracking setup, see the [autotracking](autotracking.md) docs. diff --git a/docs/docs/configuration/index.md b/docs/docs/configuration/index.md index ac65a10189..9ca1354196 100644 --- a/docs/docs/configuration/index.md +++ b/docs/docs/configuration/index.md @@ -535,6 +535,21 @@ cameras: user: admin # Optional: password for login. password: admin + # Optional: PTZ camera object autotracking. Keeps a moving object in + # the center of the frame by automatically moving the PTZ camera. + autotracking: + # Optional: enable/disable object autotracking. (default: shown below) + enabled: False + # Optional: list of objects to track from labelmap.txt (default: shown below) + track: + - person + # Required: Begin automatically tracking an object when it enters any of the listed zones. + required_zones: + - zone_name + # Required: Name of ONVIF camera preset to return to when tracking is over. + return_preset: preset_name + # Optional: Seconds to delay before returning to preset. (default: shown below) + timeout: 10 # Optional: Configuration for how to sort the cameras in the Birdseye view. birdseye: diff --git a/docs/docs/integrations/mqtt.md b/docs/docs/integrations/mqtt.md index cde7605590..7fbf3b8e52 100644 --- a/docs/docs/integrations/mqtt.md +++ b/docs/docs/integrations/mqtt.md @@ -180,3 +180,11 @@ Topic to send PTZ commands to camera. | `MOVE_` | send command to continuously move in ``, possible values are [UP, DOWN, LEFT, RIGHT] | | `ZOOM_` | send command to continuously zoom ``, possible values are [IN, OUT] | | `STOP` | send command to stop moving | + +### `frigate//ptz_autotracker/set` + +Topic to turn the PTZ autotracker for a camera on and off. Expected values are `ON` and `OFF`. + +### `frigate//ptz_autotracker/state` + +Topic with current state of the PTZ autotracker for a camera. Published values are `ON` and `OFF`. diff --git a/frigate/config.py b/frigate/config.py index fb35fe29f2..2f23a21060 100644 --- a/frigate/config.py +++ b/frigate/config.py @@ -128,17 +128,17 @@ def validate_password(cls, v, values): class PtzAutotrackConfig(FrigateBaseModel): - enabled: bool = Field(default=False, title="Enable PTZ auto tracking.") + enabled: bool = Field(default=False, title="Enable PTZ object autotracking.") track: List[str] = Field(default=DEFAULT_TRACKED_OBJECTS, title="Objects to track.") required_zones: List[str] = Field( default_factory=list, title="List of required zones to be entered in order to begin autotracking.", ) - return_preset: Optional[str] = Field( + return_preset: str = Field( title="Name of camera preset to return to when object tracking is over." ) timeout: int = Field( - default=5, title="Seconds to delay before returning to preset." + default=10, title="Seconds to delay before returning to preset." ) From 1d588dcdc08f4f937f3e16ca65e273188a2fdaf3 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 18:07:25 -0500 Subject: [PATCH 19/40] clarify fov support --- docs/docs/configuration/autotracking.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/docs/configuration/autotracking.md b/docs/docs/configuration/autotracking.md index 02ec10c74d..ca1f59daac 100644 --- a/docs/docs/configuration/autotracking.md +++ b/docs/docs/configuration/autotracking.md @@ -17,7 +17,9 @@ When tracking has ended, Frigate will return to the camera preset specified by t Frigate autotracking functions with PTZ cameras capable of relative movement within the field of view (as specified in the [ONVIF spec](https://www.onvif.org/specs/srv/ptz/ONVIF-PTZ-Service-Spec-v1712.pdf) as `RelativePanTiltTranslationSpace` having a `TranslationSpaceFov` entry). -Many cheaper PTZs likely don't support this standard. To see if your PTZ camera does, you can download and run [this simple Python script](https://gist.github.com/hawkeye217/152a1d4ba80760dac95d46e143d37112), replacing the details on line 4 with your camera's IP address, ONVIF port, username, and password. +Many cheaper PTZs likely don't support this standard. Frigate will report an error message in the log and disable autotracking if your PTZ is unsupported. + +Alternatively, you can download and run [this simple Python script](https://gist.github.com/hawkeye217/152a1d4ba80760dac95d46e143d37112), replacing the details on line 4 with your camera's IP address, ONVIF port, username, and password to check your camera. ## Configuration From 8e0d492ad8204aeb8ab8ea66100f8f68c4d25475 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Sun, 2 Jul 2023 19:06:44 -0500 Subject: [PATCH 20/40] remove size ratio --- frigate/ptz_autotrack.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 3d5dce392a..f1fffc59ad 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -207,11 +207,6 @@ def _autotrack_move_ptz(self, camera, obj): pan = 0.5 - (obj.obj_data["centroid"][0] / camera_width) tilt = 0.5 - (obj.obj_data["centroid"][1] / camera_height) - # Calculate zoom amount - size_ratio = camera_config.onvif.autotracking.size_ratio - int(size_ratio * camera_width) - int(size_ratio * camera_height) - # ideas: check object velocity for camera speed? self.enqueue_move(camera, -pan, tilt) @@ -222,8 +217,8 @@ def autotrack_object(self, camera, obj): self.onvif.get_camera_status(camera) if camera_config.onvif.autotracking.enabled: - # either this is a brand new object that's on our camera, has our label, entered the zone, is not a false positive, and is not initially motionless - # or one we're already tracking, which assumes all those things are already true + # either this is a brand new object that's on our camera, has our label, entered the zone, is not a false positive, + # and is not initially motionless - or one we're already tracking, which assumes all those things are already true if ( # new object self.tracked_object[camera] is None From b267cfb520119bc3c7b52ef0e6470815f8fb74fa Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 09:22:21 -0500 Subject: [PATCH 21/40] use mp event instead of value for ptz status --- frigate/app.py | 3 +- frigate/ptz.py | 16 +++--- frigate/ptz_autotrack.py | 91 ++++++++++++++++++-------------- frigate/track/norfair_tracker.py | 6 +-- frigate/types.py | 2 +- frigate/video.py | 10 ++-- 6 files changed, 70 insertions(+), 58 deletions(-) diff --git a/frigate/app.py b/frigate/app.py index 99b2cd3e97..bc1ba7f6f9 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -122,7 +122,7 @@ def init_config(self) -> None: "i", self.config.cameras[camera_name].onvif.autotracking.enabled, ), - "ptz_moving": mp.Value("i", 0), + "ptz_stopped": mp.Event(), "motion_threshold": mp.Value( "i", self.config.cameras[camera_name].motion.threshold ), @@ -137,6 +137,7 @@ def init_config(self) -> None: "capture_process": None, "process": None, } + self.camera_metrics[camera_name]["ptz_stopped"].set() self.record_metrics[camera_name] = { "record_enabled": mp.Value( "i", self.config.cameras[camera_name].record.enabled diff --git a/frigate/ptz.py b/frigate/ptz.py index f7971a4551..253d76ef65 100644 --- a/frigate/ptz.py +++ b/frigate/ptz.py @@ -216,7 +216,7 @@ def _move_relative(self, camera_name: str, pan, tilt, speed) -> None: return self.cams[camera_name]["active"] = True - self.camera_metrics[camera_name]["ptz_moving"].value = True + self.camera_metrics[camera_name]["ptz_stopped"].clear() onvif: ONVIFCamera = self.cams[camera_name]["onvif"] move_request = self.cams[camera_name]["relative_move_request"] @@ -268,7 +268,7 @@ def _move_to_preset(self, camera_name: str, preset: str) -> None: return self.cams[camera_name]["active"] = True - self.camera_metrics[camera_name]["ptz_moving"].value = True + self.camera_metrics[camera_name]["ptz_stopped"].clear() move_request = self.cams[camera_name]["move_request"] onvif: ONVIFCamera = self.cams[camera_name]["onvif"] preset_token = self.cams[camera_name]["presets"][preset] @@ -278,7 +278,7 @@ def _move_to_preset(self, camera_name: str, preset: str) -> None: "PresetToken": preset_token, } ) - self.camera_metrics[camera_name]["ptz_moving"].value = False + self.camera_metrics[camera_name]["ptz_stopped"].set() self.cams[camera_name]["active"] = False def _zoom(self, camera_name: str, command: OnvifCommandEnum) -> None: @@ -350,10 +350,12 @@ def get_camera_status(self, camera_name: str) -> dict[str, any]: status_request = self.cams[camera_name]["status_request"] status = onvif.get_service("ptz").GetStatus(status_request) - self.cams[camera_name]["active"] = status.MoveStatus.PanTilt != "IDLE" - self.camera_metrics[camera_name]["ptz_moving"].value = ( - status.MoveStatus.PanTilt != "IDLE" - ) + if status.MoveStatus.PanTilt == "IDLE" or status.MoveStatus.Zoom == "IDLE": + self.cams[camera_name]["active"] = False + self.camera_metrics[camera_name]["ptz_stopped"].set() + else: + self.cams[camera_name]["active"] = True + self.camera_metrics[camera_name]["ptz_stopped"].clear() return { "pan": status.Position.PanTilt.x, diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index f1fffc59ad..9feef0ed63 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -21,7 +21,7 @@ class PtzMotionEstimator: - def __init__(self, config: CameraConfig, ptz_moving) -> None: + def __init__(self, config: CameraConfig, ptz_stopped) -> None: self.frame_manager = SharedMemoryFrameManager() # homography is nice (zooming) but slow, translation is pan/tilt only but fast. self.norfair_motion_estimator = MotionEstimator( @@ -31,11 +31,14 @@ def __init__(self, config: CameraConfig, ptz_moving) -> None: ) self.camera_config = config self.coord_transformations = None - self.ptz_moving = ptz_moving + self.ptz_stopped = ptz_stopped logger.debug(f"Motion estimator init for cam: {config.name}") def motion_estimator(self, detections, frame_time, camera_name): - if self.camera_config.onvif.autotracking.enabled and self.ptz_moving.value: + if ( + self.camera_config.onvif.autotracking.enabled + and not self.ptz_stopped.is_set() + ): # logger.debug( # f"Motion estimator running for {camera_name} - frame time: {frame_time}" # ) @@ -96,6 +99,11 @@ def run(self): if cam.onvif.autotracking.enabled: self.ptz_autotracker.camera_maintenance(camera_name) time.sleep(1) + else: + # disabled dynamically by mqtt + if self.ptz_autotracker.tracked_object.get(camera_name): + self.ptz_autotracker.tracked_object[camera_name] = None + self.ptz_autotracker.tracked_object_previous[camera_name] = None time.sleep(0.1) logger.info("Exiting autotracker...") @@ -169,6 +177,13 @@ def _process_move_queue(self, camera): tilt = 0 while not self.move_queues[camera].empty(): + queued_pan, queued_tilt = self.move_queues[camera].queue[0] + + # If exceeding the movement range, keep it in the queue and move now + if abs(pan + queued_pan) > 1.0 or abs(tilt + queued_tilt) > 1.0: + logger.debug("Pan or tilt value exceeds 1.0") + break + queued_pan, queued_tilt = self.move_queues[camera].get() logger.debug( f"queue pan: {queued_pan}, queue tilt: {queued_tilt}" @@ -182,16 +197,15 @@ def _process_move_queue(self, camera): logger.debug(f"final pan: {pan}, final tilt: {tilt}") - self.onvif._move_relative(camera, pan, tilt, 0.1) + self.onvif._move_relative(camera, pan, tilt, 1) # Wait until the camera finishes moving - while self.camera_metrics[camera]["ptz_moving"].value: - pass + self.camera_metrics[camera]["ptz_stopped"].wait() except queue.Empty: - pass + time.sleep(0.1) - def enqueue_move(self, camera, pan, tilt): + def _enqueue_move(self, camera, pan, tilt): move_data = (pan, tilt) logger.debug(f"enqueue pan: {pan}, enqueue tilt: {tilt}") self.move_queues[camera].put(move_data) @@ -208,7 +222,7 @@ def _autotrack_move_ptz(self, camera, obj): tilt = 0.5 - (obj.obj_data["centroid"][1] / camera_height) # ideas: check object velocity for camera speed? - self.enqueue_move(camera, -pan, tilt) + self._enqueue_move(camera, -pan, tilt) def autotrack_object(self, camera, obj): camera_config = self.config.cameras[camera] @@ -317,35 +331,30 @@ def camera_maintenance(self, camera): # returns camera to preset after timeout when tracking is over autotracker_config = self.config.cameras[camera].onvif.autotracking - if autotracker_config.enabled: - if not self.autotracker_init[camera]: - self._autotracker_setup(self.config.cameras[camera], camera) - # regularly update camera status - if self.camera_metrics[camera]["ptz_moving"].value: - self.onvif.get_camera_status(camera) - - # return to preset if tracking is over - if ( - self.tracked_object[camera] is None - and self.tracked_object_previous[camera] is not None - and ( - # might want to use a different timestamp here? - time.time() - - self.tracked_object_previous[camera].obj_data["frame_time"] - > autotracker_config.timeout - ) - and autotracker_config.return_preset - and not self.camera_metrics[camera]["ptz_moving"].value - ): - logger.debug( - f"Autotrack: Time is {time.time()}, returning to preset: {autotracker_config.return_preset}" - ) - self.onvif._move_to_preset( - camera, - autotracker_config.return_preset.lower(), - ) - self.tracked_object_previous[camera] = None - - def disable_autotracking(self, camera): - # need to call this if autotracking is disabled by mqtt?? - self.tracked_object[camera] = None + if not self.autotracker_init[camera]: + self._autotracker_setup(self.config.cameras[camera], camera) + # regularly update camera status + if not self.camera_metrics[camera]["ptz_stopped"].is_set(): + self.onvif.get_camera_status(camera) + + # return to preset if tracking is over + if ( + self.tracked_object[camera] is None + and self.tracked_object_previous[camera] is not None + and ( + # might want to use a different timestamp here? + time.time() + - self.tracked_object_previous[camera].obj_data["frame_time"] + > autotracker_config.timeout + ) + and autotracker_config.return_preset + ): + self.camera_metrics[camera]["ptz_stopped"].wait() + logger.debug( + f"Autotrack: Time is {time.time()}, returning to preset: {autotracker_config.return_preset}" + ) + self.onvif._move_to_preset( + camera, + autotracker_config.return_preset.lower(), + ) + self.tracked_object_previous[camera] = None diff --git a/frigate/track/norfair_tracker.py b/frigate/track/norfair_tracker.py index c051bf13a5..36bb2532f5 100644 --- a/frigate/track/norfair_tracker.py +++ b/frigate/track/norfair_tracker.py @@ -55,7 +55,7 @@ def frigate_distance(detection: Detection, tracked_object) -> float: class NorfairTracker(ObjectTracker): - def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving): + def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_stopped): self.tracked_objects = {} self.disappeared = {} self.positions = {} @@ -63,7 +63,7 @@ def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving): self.camera_config = config self.detect_config = config.detect self.ptz_autotracker_enabled = ptz_autotracker_enabled.value - self.ptz_moving = ptz_moving + self.ptz_stopped = ptz_stopped self.camera_name = config.name self.track_id_map = {} # TODO: could also initialize a tracker per object class if there @@ -75,7 +75,7 @@ def __init__(self, config: CameraConfig, ptz_autotracker_enabled, ptz_moving): hit_counter_max=self.max_disappeared, ) if self.ptz_autotracker_enabled: - self.ptz_motion_estimator = PtzMotionEstimator(config, self.ptz_moving) + self.ptz_motion_estimator = PtzMotionEstimator(config, self.ptz_stopped) def register(self, track_id, obj): rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6)) diff --git a/frigate/types.py b/frigate/types.py index 29991552f1..7914d0b90d 100644 --- a/frigate/types.py +++ b/frigate/types.py @@ -17,7 +17,7 @@ class CameraMetricsTypes(TypedDict): motion_enabled: Synchronized improve_contrast_enabled: Synchronized ptz_autotracker_enabled: Synchronized - ptz_moving: Synchronized + ptz_stopped: Synchronized motion_threshold: Synchronized motion_contour_area: Synchronized process: Optional[Process] diff --git a/frigate/video.py b/frigate/video.py index 597e90da93..0e30d3b4bc 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -458,7 +458,7 @@ def receiveSignal(signalNumber, frame): motion_enabled = process_info["motion_enabled"] improve_contrast_enabled = process_info["improve_contrast_enabled"] ptz_autotracker_enabled = process_info["ptz_autotracker_enabled"] - ptz_moving = process_info["ptz_moving"] + ptz_stopped = process_info["ptz_stopped"] motion_threshold = process_info["motion_threshold"] motion_contour_area = process_info["motion_contour_area"] @@ -478,7 +478,7 @@ def receiveSignal(signalNumber, frame): name, labelmap, detection_queue, result_connection, model_config, stop_event ) - object_tracker = NorfairTracker(config, ptz_autotracker_enabled, ptz_moving) + object_tracker = NorfairTracker(config, ptz_autotracker_enabled, ptz_stopped) frame_manager = SharedMemoryFrameManager() @@ -499,7 +499,7 @@ def receiveSignal(signalNumber, frame): detection_enabled, motion_enabled, stop_event, - ptz_moving, + ptz_stopped, ) logger.info(f"{name}: exiting subprocess") @@ -724,7 +724,7 @@ def process_frames( detection_enabled: mp.Value, motion_enabled: mp.Value, stop_event, - ptz_moving: mp.Value, + ptz_stopped: mp.Event, exit_on_empty: bool = False, ): # attribute labels are not tracked and are not assigned regions @@ -769,7 +769,7 @@ def process_frames( # look for motion if enabled motion_boxes = ( motion_detector.detect(frame) - if motion_enabled.value and not ptz_moving.value + if motion_enabled.value and ptz_stopped.is_set() else [] ) From 519520460472e5e7f633fb66e68e11912f2d5ca2 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 09:22:59 -0500 Subject: [PATCH 22/40] update autotrack at half fps --- frigate/object_processing.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/frigate/object_processing.py b/frigate/object_processing.py index ce8ec659d2..4849858db8 100644 --- a/frigate/object_processing.py +++ b/frigate/object_processing.py @@ -239,10 +239,9 @@ def update(self, current_frame_time, obj_data): if self.obj_data["frame_time"] - self.previous["frame_time"] > 60: significant_change = True - # update autotrack every second? or fps? - if ( - self.obj_data["frame_time"] - self.previous["frame_time"] - > 0.5 # / self.camera_config.detect.fps + # update autotrack at half fps + if self.obj_data["frame_time"] - self.previous["frame_time"] > ( + 1 / (self.camera_config.detect.fps / 2) ): autotracker_update = True From 39fbe47fc327aa60575d9af8aabdb1a0283fc38e Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 09:51:11 -0500 Subject: [PATCH 23/40] fix merge conflict --- frigate/app.py | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/frigate/app.py b/frigate/app.py index 018363d7cf..0b0544a9af 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -133,7 +133,9 @@ def init_config(self) -> None: "i", self.config.cameras[camera_name].motion.improve_contrast, ), - "ptz_autotracker_enabled": mp.Value( + "ptz_autotracker_enabled": mp.Value( # type: ignore[typeddict-item] + # issue https://github.com/python/typeshed/issues/8799 + # from mypy 0.981 onwards "i", self.config.cameras[camera_name].onvif.autotracking.enabled, ), @@ -166,10 +168,19 @@ def init_config(self) -> None: "capture_process": None, "process": None, } - self.record_metrics[camera_name] = { - "record_enabled": mp.Value( - "i", self.config.cameras[camera_name].record.enabled - ) + self.feature_metrics[camera_name] = { + "audio_enabled": mp.Value( # type: ignore[typeddict-item] + # issue https://github.com/python/typeshed/issues/8799 + # from mypy 0.981 onwards + "i", + self.config.cameras[camera_name].audio.enabled, + ), + "record_enabled": mp.Value( # type: ignore[typeddict-item] + # issue https://github.com/python/typeshed/issues/8799 + # from mypy 0.981 onwards + "i", + self.config.cameras[camera_name].record.enabled, + ), } def set_log_levels(self) -> None: From c690bb85007c3e2bb3479463d66032712812400b Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:08:27 -0500 Subject: [PATCH 24/40] fix event type for mypy --- frigate/types.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frigate/types.py b/frigate/types.py index bd5a1fb2ba..eabd26e180 100644 --- a/frigate/types.py +++ b/frigate/types.py @@ -1,5 +1,6 @@ from multiprocessing.context import Process from multiprocessing.sharedctypes import Synchronized +from multiprocessing.synchronize import Event from typing import Optional, TypedDict from faster_fifo import Queue @@ -18,7 +19,7 @@ class CameraMetricsTypes(TypedDict): motion_enabled: Synchronized improve_contrast_enabled: Synchronized ptz_autotracker_enabled: Synchronized - ptz_stopped: Synchronized + ptz_stopped: Event motion_threshold: Synchronized motion_contour_area: Synchronized process: Optional[Process] From bef54c537fe3795428be3ae8d7ec38dcd483a07a Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:10:43 -0500 Subject: [PATCH 25/40] clean up --- motion_estimator.py | 284 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 284 insertions(+) create mode 100644 motion_estimator.py diff --git a/motion_estimator.py b/motion_estimator.py new file mode 100644 index 0000000000..62949341ef --- /dev/null +++ b/motion_estimator.py @@ -0,0 +1,284 @@ +import argparse +from functools import partial + +import numpy as np +import torch +from norfair import ( + AbsolutePaths, + Detection, + FixedCamera, + Tracker, + Video, + draw_absolute_grid, +) +from norfair.camera_motion import ( + HomographyTransformationGetter, + MotionEstimator, + TranslationTransformationGetter, +) +from norfair.drawing import draw_tracked_objects + + +def yolo_detections_to_norfair_detections(yolo_detections, track_boxes): + norfair_detections = [] + boxes = [] + detections_as_xyxy = yolo_detections.xyxy[0] + for detection_as_xyxy in detections_as_xyxy: + detection_as_xyxy = detection_as_xyxy.cpu().numpy() + bbox = np.array( + [ + [detection_as_xyxy[0].item(), detection_as_xyxy[1].item()], + [detection_as_xyxy[2].item(), detection_as_xyxy[3].item()], + ] + ) + boxes.append(bbox) + if track_boxes: + points = bbox + scores = np.array([detection_as_xyxy[4], detection_as_xyxy[4]]) + else: + points = bbox.mean(axis=0, keepdims=True) + scores = detection_as_xyxy[[4]] + + norfair_detections.append( + Detection(points=points, scores=scores, label=detection_as_xyxy[-1].item()) + ) + + return norfair_detections, boxes + + +def run(): + parser = argparse.ArgumentParser(description="Track objects in a video.") + parser.add_argument("files", type=str, nargs="+", help="Video files to process") + parser.add_argument( + "--model", + type=str, + default="yolov5n", + help="YOLO model to use, possible values are yolov5n, yolov5s, yolov5m, yolov5l, yolov5x", + ) + parser.add_argument( + "--confidence-threshold", + type=float, + help="Confidence threshold of detections", + default=0.15, + ) + parser.add_argument( + "--distance-threshold", + type=float, + default=0.8, + help="Max distance to consider when matching detections and tracked objects", + ) + parser.add_argument( + "--initialization-delay", + type=float, + default=3, + help="Min detections needed to start the tracked object", + ) + parser.add_argument( + "--track-boxes", + dest="track_boxes", + action="store_true", + help="Pass it to track bounding boxes instead of just the centroids", + ) + parser.add_argument( + "--hit-counter-max", + type=int, + default=30, + help="Max iteration the tracked object is kept after when there are no detections", + ) + parser.add_argument( + "--iou-threshold", type=float, help="Iou threshold for detector", default=0.15 + ) + parser.add_argument( + "--image-size", type=int, help="Size of the images for detector", default=480 + ) + parser.add_argument( + "--classes", type=int, nargs="+", default=[0], help="Classes to track" + ) + parser.add_argument( + "--transformation", + default="homography", + help="Type of transformation, possible values are homography, translation, none", + ) + parser.add_argument( + "--max-points", + type=int, + default=500, + help="Max points sampled to calculate camera motion", + ) + parser.add_argument( + "--min-distance", + type=float, + default=7, + help="Min distance between points sampled to calculate camera motion", + ) + parser.add_argument( + "--no-mask-detections", + dest="mask_detections", + action="store_false", + default=True, + help="By default we don't sample regions where objects were detected when estimating camera motion. Pass this flag to disable this behavior", + ) + parser.add_argument( + "--save", + dest="save", + action="store_true", + help="Pass this flag to save the video instead of showing the frames", + ) + parser.add_argument( + "--output-name", + default=None, + help="Name of the output file", + ) + parser.add_argument( + "--downsample-ratio", + type=int, + default=1, + help="Downsample ratio when showing frames", + ) + parser.add_argument( + "--fixed-camera-scale", + type=float, + default=0, + help="Scale of the fixed camera, set to 0 to disable. Note that this only works for translation", + ) + parser.add_argument( + "--draw-absolute-grid", + dest="absolute_grid", + action="store_true", + help="Pass this flag to draw absolute grid for reference", + ) + parser.add_argument( + "--draw-objects", + dest="draw_objects", + action="store_true", + help="Pass this flag to draw tracked object as points or as boxes if --track-boxes is used.", + ) + parser.add_argument( + "--draw-paths", + dest="draw_paths", + action="store_true", + help="Pass this flag to draw the paths of the objects (SLOW)", + ) + parser.add_argument( + "--path-history", + type=int, + default=20, + help="Length of the paths", + ) + parser.add_argument( + "--id-size", + type=float, + default=None, + help="Size multiplier of the ids when drawing. Thikness will addapt to size", + ) + parser.add_argument( + "--draw-flow", + dest="draw_flow", + action="store_true", + help="Pass this flag to draw the optical flow of the selected points", + ) + + args = parser.parse_args() + + model = torch.hub.load("ultralytics/yolov5", args.model) + model.conf_threshold = 0 + model.iou_threshold = args.iou_threshold + model.image_size = args.image_size + model.classes = args.classes + + use_fixed_camera = args.fixed_camera_scale > 0 + tracked_objects = [] + # Process Videos + for input_path in args.files: + if args.transformation == "homography": + transformations_getter = HomographyTransformationGetter() + elif args.transformation == "translation": + transformations_getter = TranslationTransformationGetter() + elif args.transformation == "none": + transformations_getter = None + else: + raise ValueError(f"invalid transformation {args.transformation}") + if transformations_getter is not None: + motion_estimator = MotionEstimator( + max_points=args.max_points, + min_distance=args.min_distance, + transformations_getter=transformations_getter, + draw_flow=args.draw_flow, + ) + else: + motion_estimator = None + + if use_fixed_camera: + fixed_camera = FixedCamera(scale=args.fixed_camera_scale) + + if args.draw_paths: + path_drawer = AbsolutePaths(max_history=args.path_history, thickness=2) + + video = Video(input_path=input_path) + show_or_write = ( + video.write + if args.save + else partial(video.show, downsample_ratio=args.downsample_ratio) + ) + + tracker = Tracker( + distance_function="euclidean", + detection_threshold=args.confidence_threshold, + distance_threshold=args.distance_threshold, + initialization_delay=args.initialization_delay, + hit_counter_max=args.hit_counter_max, + ) + for frame in video: + detections = model(frame) + detections, boxes = yolo_detections_to_norfair_detections( + detections, args.track_boxes + ) + + mask = None + if args.mask_detections: + # create a mask of ones + mask = np.ones(frame.shape[:2], frame.dtype) + # set to 0 all detections + for b in boxes: + i = b.astype(int) + mask[i[0, 1] : i[1, 1], i[0, 0] : i[1, 0]] = 0 + if args.track_boxes: + for obj in tracked_objects: + i = obj.estimate.astype(int) + mask[i[0, 1] : i[1, 1], i[0, 0] : i[1, 0]] = 0 + + if motion_estimator is None: + coord_transformations = None + else: + coord_transformations = motion_estimator.update(frame, mask) + + tracked_objects = tracker.update( + detections=detections, coord_transformations=coord_transformations + ) + + if args.draw_objects: + draw_tracked_objects( + frame, + tracked_objects, + id_size=args.id_size, + id_thickness=None + if args.id_size is None + else int(args.id_size * 2), + ) + + if args.absolute_grid: + draw_absolute_grid(frame, coord_transformations) + + if args.draw_paths: + frame = path_drawer.draw( + frame, tracked_objects, coord_transform=coord_transformations + ) + + if use_fixed_camera: + frame = fixed_camera.adjust_frame(frame, coord_transformations) + + show_or_write(frame) + + +if __name__ == "__main__": + run() From c840c9c655a632c3392b40eeeaf48530bd49e7ef Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:14:04 -0500 Subject: [PATCH 26/40] Clean up --- frigate/ptz_autotrack.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 9feef0ed63..84ea6a3243 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -39,9 +39,9 @@ def motion_estimator(self, detections, frame_time, camera_name): self.camera_config.onvif.autotracking.enabled and not self.ptz_stopped.is_set() ): - # logger.debug( - # f"Motion estimator running for {camera_name} - frame time: {frame_time}" - # ) + logger.debug( + f"Motion estimator running for {camera_name} - frame time: {frame_time}" + ) frame_id = f"{camera_name}{frame_time}" yuv_frame = self.frame_manager.get( @@ -70,9 +70,9 @@ def motion_estimator(self, detections, frame_time, camera_name): self.frame_manager.close(frame_id) - # logger.debug( - # f"Motion estimator transformation: {self.coord_transformations.rel_to_abs((0,0))}" - # ) + logger.debug( + f"Motion estimator transformation: {self.coord_transformations.rel_to_abs((0,0))}" + ) return self.coord_transformations @@ -185,17 +185,12 @@ def _process_move_queue(self, camera): break queued_pan, queued_tilt = self.move_queues[camera].get() - logger.debug( - f"queue pan: {queued_pan}, queue tilt: {queued_tilt}" - ) + pan += queued_pan tilt += queued_tilt else: move_data = self.move_queues[camera].get() pan, tilt = move_data - logger.debug(f"removing pan: {pan}, removing tilt: {tilt}") - - logger.debug(f"final pan: {pan}, final tilt: {tilt}") self.onvif._move_relative(camera, pan, tilt, 1) From c9a9734c73c3baf2fcf8cae88a84ba2f0f800553 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:15:14 -0500 Subject: [PATCH 27/40] remove unused code --- motion_estimator.py | 284 -------------------------------------------- 1 file changed, 284 deletions(-) delete mode 100644 motion_estimator.py diff --git a/motion_estimator.py b/motion_estimator.py deleted file mode 100644 index 62949341ef..0000000000 --- a/motion_estimator.py +++ /dev/null @@ -1,284 +0,0 @@ -import argparse -from functools import partial - -import numpy as np -import torch -from norfair import ( - AbsolutePaths, - Detection, - FixedCamera, - Tracker, - Video, - draw_absolute_grid, -) -from norfair.camera_motion import ( - HomographyTransformationGetter, - MotionEstimator, - TranslationTransformationGetter, -) -from norfair.drawing import draw_tracked_objects - - -def yolo_detections_to_norfair_detections(yolo_detections, track_boxes): - norfair_detections = [] - boxes = [] - detections_as_xyxy = yolo_detections.xyxy[0] - for detection_as_xyxy in detections_as_xyxy: - detection_as_xyxy = detection_as_xyxy.cpu().numpy() - bbox = np.array( - [ - [detection_as_xyxy[0].item(), detection_as_xyxy[1].item()], - [detection_as_xyxy[2].item(), detection_as_xyxy[3].item()], - ] - ) - boxes.append(bbox) - if track_boxes: - points = bbox - scores = np.array([detection_as_xyxy[4], detection_as_xyxy[4]]) - else: - points = bbox.mean(axis=0, keepdims=True) - scores = detection_as_xyxy[[4]] - - norfair_detections.append( - Detection(points=points, scores=scores, label=detection_as_xyxy[-1].item()) - ) - - return norfair_detections, boxes - - -def run(): - parser = argparse.ArgumentParser(description="Track objects in a video.") - parser.add_argument("files", type=str, nargs="+", help="Video files to process") - parser.add_argument( - "--model", - type=str, - default="yolov5n", - help="YOLO model to use, possible values are yolov5n, yolov5s, yolov5m, yolov5l, yolov5x", - ) - parser.add_argument( - "--confidence-threshold", - type=float, - help="Confidence threshold of detections", - default=0.15, - ) - parser.add_argument( - "--distance-threshold", - type=float, - default=0.8, - help="Max distance to consider when matching detections and tracked objects", - ) - parser.add_argument( - "--initialization-delay", - type=float, - default=3, - help="Min detections needed to start the tracked object", - ) - parser.add_argument( - "--track-boxes", - dest="track_boxes", - action="store_true", - help="Pass it to track bounding boxes instead of just the centroids", - ) - parser.add_argument( - "--hit-counter-max", - type=int, - default=30, - help="Max iteration the tracked object is kept after when there are no detections", - ) - parser.add_argument( - "--iou-threshold", type=float, help="Iou threshold for detector", default=0.15 - ) - parser.add_argument( - "--image-size", type=int, help="Size of the images for detector", default=480 - ) - parser.add_argument( - "--classes", type=int, nargs="+", default=[0], help="Classes to track" - ) - parser.add_argument( - "--transformation", - default="homography", - help="Type of transformation, possible values are homography, translation, none", - ) - parser.add_argument( - "--max-points", - type=int, - default=500, - help="Max points sampled to calculate camera motion", - ) - parser.add_argument( - "--min-distance", - type=float, - default=7, - help="Min distance between points sampled to calculate camera motion", - ) - parser.add_argument( - "--no-mask-detections", - dest="mask_detections", - action="store_false", - default=True, - help="By default we don't sample regions where objects were detected when estimating camera motion. Pass this flag to disable this behavior", - ) - parser.add_argument( - "--save", - dest="save", - action="store_true", - help="Pass this flag to save the video instead of showing the frames", - ) - parser.add_argument( - "--output-name", - default=None, - help="Name of the output file", - ) - parser.add_argument( - "--downsample-ratio", - type=int, - default=1, - help="Downsample ratio when showing frames", - ) - parser.add_argument( - "--fixed-camera-scale", - type=float, - default=0, - help="Scale of the fixed camera, set to 0 to disable. Note that this only works for translation", - ) - parser.add_argument( - "--draw-absolute-grid", - dest="absolute_grid", - action="store_true", - help="Pass this flag to draw absolute grid for reference", - ) - parser.add_argument( - "--draw-objects", - dest="draw_objects", - action="store_true", - help="Pass this flag to draw tracked object as points or as boxes if --track-boxes is used.", - ) - parser.add_argument( - "--draw-paths", - dest="draw_paths", - action="store_true", - help="Pass this flag to draw the paths of the objects (SLOW)", - ) - parser.add_argument( - "--path-history", - type=int, - default=20, - help="Length of the paths", - ) - parser.add_argument( - "--id-size", - type=float, - default=None, - help="Size multiplier of the ids when drawing. Thikness will addapt to size", - ) - parser.add_argument( - "--draw-flow", - dest="draw_flow", - action="store_true", - help="Pass this flag to draw the optical flow of the selected points", - ) - - args = parser.parse_args() - - model = torch.hub.load("ultralytics/yolov5", args.model) - model.conf_threshold = 0 - model.iou_threshold = args.iou_threshold - model.image_size = args.image_size - model.classes = args.classes - - use_fixed_camera = args.fixed_camera_scale > 0 - tracked_objects = [] - # Process Videos - for input_path in args.files: - if args.transformation == "homography": - transformations_getter = HomographyTransformationGetter() - elif args.transformation == "translation": - transformations_getter = TranslationTransformationGetter() - elif args.transformation == "none": - transformations_getter = None - else: - raise ValueError(f"invalid transformation {args.transformation}") - if transformations_getter is not None: - motion_estimator = MotionEstimator( - max_points=args.max_points, - min_distance=args.min_distance, - transformations_getter=transformations_getter, - draw_flow=args.draw_flow, - ) - else: - motion_estimator = None - - if use_fixed_camera: - fixed_camera = FixedCamera(scale=args.fixed_camera_scale) - - if args.draw_paths: - path_drawer = AbsolutePaths(max_history=args.path_history, thickness=2) - - video = Video(input_path=input_path) - show_or_write = ( - video.write - if args.save - else partial(video.show, downsample_ratio=args.downsample_ratio) - ) - - tracker = Tracker( - distance_function="euclidean", - detection_threshold=args.confidence_threshold, - distance_threshold=args.distance_threshold, - initialization_delay=args.initialization_delay, - hit_counter_max=args.hit_counter_max, - ) - for frame in video: - detections = model(frame) - detections, boxes = yolo_detections_to_norfair_detections( - detections, args.track_boxes - ) - - mask = None - if args.mask_detections: - # create a mask of ones - mask = np.ones(frame.shape[:2], frame.dtype) - # set to 0 all detections - for b in boxes: - i = b.astype(int) - mask[i[0, 1] : i[1, 1], i[0, 0] : i[1, 0]] = 0 - if args.track_boxes: - for obj in tracked_objects: - i = obj.estimate.astype(int) - mask[i[0, 1] : i[1, 1], i[0, 0] : i[1, 0]] = 0 - - if motion_estimator is None: - coord_transformations = None - else: - coord_transformations = motion_estimator.update(frame, mask) - - tracked_objects = tracker.update( - detections=detections, coord_transformations=coord_transformations - ) - - if args.draw_objects: - draw_tracked_objects( - frame, - tracked_objects, - id_size=args.id_size, - id_thickness=None - if args.id_size is None - else int(args.id_size * 2), - ) - - if args.absolute_grid: - draw_absolute_grid(frame, coord_transformations) - - if args.draw_paths: - frame = path_drawer.draw( - frame, tracked_objects, coord_transform=coord_transformations - ) - - if use_fixed_camera: - frame = fixed_camera.adjust_frame(frame, coord_transformations) - - show_or_write(frame) - - -if __name__ == "__main__": - run() From 2c29b4f16ca28c889ce48520a0fcf18c71d48403 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:23:16 -0500 Subject: [PATCH 28/40] merge conflict fix --- frigate/app.py | 1 + 1 file changed, 1 insertion(+) diff --git a/frigate/app.py b/frigate/app.py index 0b0544a9af..ca2b284da9 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -168,6 +168,7 @@ def init_config(self) -> None: "capture_process": None, "process": None, } + self.camera_metrics[camera_name]["ptz_stopped"].set() self.feature_metrics[camera_name] = { "audio_enabled": mp.Value( # type: ignore[typeddict-item] # issue https://github.com/python/typeshed/issues/8799 From f84f6d12e27a18dc53745482dcf6f4b4da0146b3 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:26:47 -0500 Subject: [PATCH 29/40] docs: update link to object_detectors page --- docs/docs/configuration/autotracking.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/configuration/autotracking.md b/docs/docs/configuration/autotracking.md index ca1f59daac..eb9db54284 100644 --- a/docs/docs/configuration/autotracking.md +++ b/docs/docs/configuration/autotracking.md @@ -66,6 +66,6 @@ Every PTZ camera is different, so autotracking may not perform ideally in every The object tracker in Frigate estimates the motion of the PTZ so that tracked objects are preserved when the camera moves. In most cases (especially for faster moving objects), the default 5 fps is insufficient for the motion estimator to perform accurately. 10 fps is the current recommendation. Higher frame rates will likely not be more performant and will only slow down Frigate and the motion estimator. Adjust your camera to output 10 frames per second and change the `fps` parameter in the [detect configuration](index.md) of your configuration file. -A fast [detector](detectors.md) is recommended. CPU detectors will not perform well or won't work at all. If Frigate already has trouble keeping track of your object, the autotracker will struggle as well. +A fast [detector](object_detectors.md) is recommended. CPU detectors will not perform well or won't work at all. If Frigate already has trouble keeping track of your object, the autotracker will struggle as well. The autotracker queues up motion requests for the tracked object while the PTZ is moving and will move make one longer move when complete. If your PTZ's motor is slow, you may not be able to reliably autotrack fast moving objects. From fbc6e7decaaf2de5debf13a4971dfb523799d64b Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:34:36 -0500 Subject: [PATCH 30/40] Update docs/docs/configuration/autotracking.md Co-authored-by: Nicolas Mowen --- docs/docs/configuration/autotracking.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/configuration/autotracking.md b/docs/docs/configuration/autotracking.md index eb9db54284..b3c99789e7 100644 --- a/docs/docs/configuration/autotracking.md +++ b/docs/docs/configuration/autotracking.md @@ -64,7 +64,7 @@ cameras: Every PTZ camera is different, so autotracking may not perform ideally in every situation. This experimental feature was initially developed using an EmpireTech/Dahua SD1A404XB-GNR. -The object tracker in Frigate estimates the motion of the PTZ so that tracked objects are preserved when the camera moves. In most cases (especially for faster moving objects), the default 5 fps is insufficient for the motion estimator to perform accurately. 10 fps is the current recommendation. Higher frame rates will likely not be more performant and will only slow down Frigate and the motion estimator. Adjust your camera to output 10 frames per second and change the `fps` parameter in the [detect configuration](index.md) of your configuration file. +The object tracker in Frigate estimates the motion of the PTZ so that tracked objects are preserved when the camera moves. In most cases (especially for faster moving objects), the default 5 fps is insufficient for the motion estimator to perform accurately. 10 fps is the current recommendation. Higher frame rates will likely not be more performant and will only slow down Frigate and the motion estimator. Adjust your camera to output at least 10 frames per second and change the `fps` parameter in the [detect configuration](index.md) of your configuration file. A fast [detector](object_detectors.md) is recommended. CPU detectors will not perform well or won't work at all. If Frigate already has trouble keeping track of your object, the autotracker will struggle as well. From 6ee03f1e54db2766c0afb1c2eadf40eeea3f2016 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:42:13 -0500 Subject: [PATCH 31/40] clarify wording --- docs/docs/configuration/autotracking.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/configuration/autotracking.md b/docs/docs/configuration/autotracking.md index eb9db54284..2f87d8ceca 100644 --- a/docs/docs/configuration/autotracking.md +++ b/docs/docs/configuration/autotracking.md @@ -68,4 +68,4 @@ The object tracker in Frigate estimates the motion of the PTZ so that tracked ob A fast [detector](object_detectors.md) is recommended. CPU detectors will not perform well or won't work at all. If Frigate already has trouble keeping track of your object, the autotracker will struggle as well. -The autotracker queues up motion requests for the tracked object while the PTZ is moving and will move make one longer move when complete. If your PTZ's motor is slow, you may not be able to reliably autotrack fast moving objects. +The autotracker will add PTZ motion requests to a queue while the motor is moving. Once the motor stops, the events in the queue will be executed together as one large move (rather than incremental moves). If your PTZ's motor is slow, you may not be able to reliably autotrack fast moving objects. From df59636ff93f374d67696357e6cf5731abda5b6e Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 11:27:02 -0500 Subject: [PATCH 32/40] pass actual instances directly --- frigate/app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frigate/app.py b/frigate/app.py index ca2b284da9..4455ae919e 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -372,8 +372,8 @@ def start_detectors(self) -> None: def start_ptz_autotracker(self) -> None: self.ptz_autotracker_thread = PtzAutoTrackerThread( self.config, - self.dispatcher.onvif, - self.dispatcher.camera_metrics, + self.onvif_controller, + self.camera_metrics, self.stop_event, ) self.ptz_autotracker_thread.start() From 714a3e28ebdb70b0bb61ce4dcb226c4e6286630c Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 11:34:31 -0500 Subject: [PATCH 33/40] default return preset --- docs/docs/configuration/autotracking.md | 4 ++-- frigate/config.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/docs/configuration/autotracking.md b/docs/docs/configuration/autotracking.md index 73de278203..b1a2654ebf 100644 --- a/docs/docs/configuration/autotracking.md +++ b/docs/docs/configuration/autotracking.md @@ -54,8 +54,8 @@ cameras: # Required: Begin automatically tracking an object when it enters any of the listed zones. required_zones: - zone_name - # Required: Name of ONVIF camera preset to return to when tracking is over. - return_preset: preset_name + # Required: Name of ONVIF camera preset to return to when tracking is over. (default: shown below) + return_preset: home # Optional: Seconds to delay before returning to preset. (default: shown below) timeout: 10 ``` diff --git a/frigate/config.py b/frigate/config.py index dad6788ef5..61840df259 100644 --- a/frigate/config.py +++ b/frigate/config.py @@ -136,7 +136,8 @@ class PtzAutotrackConfig(FrigateBaseModel): title="List of required zones to be entered in order to begin autotracking.", ) return_preset: str = Field( - title="Name of camera preset to return to when object tracking is over." + default="home", + title="Name of camera preset to return to when object tracking is over.", ) timeout: int = Field( default=10, title="Seconds to delay before returning to preset." From 0feaef0031acee8446996fd09718fb2348d20cfd Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 11:41:20 -0500 Subject: [PATCH 34/40] fix type --- frigate/ptz_autotrack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 84ea6a3243..81af28e342 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -84,7 +84,7 @@ def __init__( self, config: FrigateConfig, onvif: OnvifController, - camera_metrics: CameraMetricsTypes, + camera_metrics: dict[str, CameraMetricsTypes], stop_event: MpEvent, ) -> None: threading.Thread.__init__(self) From 4279342cd31a3918f569e545c067ac79366a0eec Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 11:44:48 -0500 Subject: [PATCH 35/40] Error message when onvif init fails --- frigate/ptz_autotrack.py | 1 + 1 file changed, 1 insertion(+) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 81af28e342..31e945d5e6 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -145,6 +145,7 @@ def _autotracker_setup(self, cam, camera_name): if not self.onvif.cams[camera_name]["init"]: if not self.onvif._init_onvif(camera_name): + logger.warning(f"Unable to initialize onvif for {camera_name}") return if not self.onvif.cams[camera_name]["relative_fov_supported"]: cam.onvif.autotracking.enabled = False From 70e4ee9fea7903db11a3b1174e8104fe3c8477c9 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 11:51:49 -0500 Subject: [PATCH 36/40] disable autotracking if onvif init fails --- frigate/ptz_autotrack.py | 1 + 1 file changed, 1 insertion(+) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index 31e945d5e6..c9d4d332af 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -146,6 +146,7 @@ def _autotracker_setup(self, cam, camera_name): if not self.onvif.cams[camera_name]["init"]: if not self.onvif._init_onvif(camera_name): logger.warning(f"Unable to initialize onvif for {camera_name}") + cam.onvif.autotracking.enabled = False return if not self.onvif.cams[camera_name]["relative_fov_supported"]: cam.onvif.autotracking.enabled = False From 4f2624022a5ea05c3053efc23dab205193922dd5 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 12:05:14 -0500 Subject: [PATCH 37/40] disable autotracking if onvif init fails --- frigate/ptz_autotrack.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frigate/ptz_autotrack.py b/frigate/ptz_autotrack.py index c9d4d332af..ce1ba8519e 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz_autotrack.py @@ -147,7 +147,12 @@ def _autotracker_setup(self, cam, camera_name): if not self.onvif._init_onvif(camera_name): logger.warning(f"Unable to initialize onvif for {camera_name}") cam.onvif.autotracking.enabled = False + self.camera_metrics[camera_name][ + "ptz_autotracker_enabled" + ].value = False + return + if not self.onvif.cams[camera_name]["relative_fov_supported"]: cam.onvif.autotracking.enabled = False self.camera_metrics[camera_name][ From 3e95661e8afb99abe32c5608a4f130fb9831a86a Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 12:14:15 -0500 Subject: [PATCH 38/40] ptz module --- frigate/app.py | 4 ++-- frigate/comms/dispatcher.py | 2 +- frigate/http.py | 2 +- frigate/object_processing.py | 2 +- frigate/{ptz_autotrack.py => ptz/autotrack.py} | 2 +- frigate/{ptz.py => ptz/onvif.py} | 0 frigate/track/norfair_tracker.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) rename frigate/{ptz_autotrack.py => ptz/autotrack.py} (99%) rename frigate/{ptz.py => ptz/onvif.py} (100%) diff --git a/frigate/app.py b/frigate/app.py index 4455ae919e..507986abcf 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -40,8 +40,8 @@ from frigate.object_processing import TrackedObjectProcessor from frigate.output import output_frames from frigate.plus import PlusApi -from frigate.ptz import OnvifController -from frigate.ptz_autotrack import PtzAutoTrackerThread +from frigate.ptz.autotrack import PtzAutoTrackerThread +from frigate.ptz.onvif import OnvifController from frigate.record.record import manage_recordings from frigate.stats import StatsEmitter, stats_init from frigate.storage import StorageMaintainer diff --git a/frigate/comms/dispatcher.py b/frigate/comms/dispatcher.py index 97eb2093de..9babf38a0d 100644 --- a/frigate/comms/dispatcher.py +++ b/frigate/comms/dispatcher.py @@ -5,7 +5,7 @@ from typing import Any, Callable from frigate.config import FrigateConfig -from frigate.ptz import OnvifCommandEnum, OnvifController +from frigate.ptz.onvif import OnvifCommandEnum, OnvifController from frigate.types import CameraMetricsTypes, FeatureMetricsTypes from frigate.util import restart_frigate diff --git a/frigate/http.py b/frigate/http.py index f3632a0cf1..cf653ca4fe 100644 --- a/frigate/http.py +++ b/frigate/http.py @@ -34,7 +34,7 @@ from frigate.models import Event, Recordings, Timeline from frigate.object_processing import TrackedObject from frigate.plus import PlusApi -from frigate.ptz import OnvifController +from frigate.ptz.onvif import OnvifController from frigate.record.export import PlaybackFactorEnum, RecordingExporter from frigate.stats import stats_snapshot from frigate.storage import StorageMaintainer diff --git a/frigate/object_processing.py b/frigate/object_processing.py index f9f8391cfd..827c997042 100644 --- a/frigate/object_processing.py +++ b/frigate/object_processing.py @@ -22,7 +22,7 @@ ) from frigate.const import CLIPS_DIR from frigate.events.maintainer import EventTypeEnum -from frigate.ptz_autotrack import PtzAutoTrackerThread +from frigate.ptz.autotrack import PtzAutoTrackerThread from frigate.util import ( SharedMemoryFrameManager, area, diff --git a/frigate/ptz_autotrack.py b/frigate/ptz/autotrack.py similarity index 99% rename from frigate/ptz_autotrack.py rename to frigate/ptz/autotrack.py index ce1ba8519e..426ba905ea 100644 --- a/frigate/ptz_autotrack.py +++ b/frigate/ptz/autotrack.py @@ -13,7 +13,7 @@ from norfair.camera_motion import MotionEstimator, TranslationTransformationGetter from frigate.config import CameraConfig, FrigateConfig -from frigate.ptz import OnvifController +from frigate.ptz.onvif import OnvifController from frigate.types import CameraMetricsTypes from frigate.util import SharedMemoryFrameManager, intersection_over_union diff --git a/frigate/ptz.py b/frigate/ptz/onvif.py similarity index 100% rename from frigate/ptz.py rename to frigate/ptz/onvif.py diff --git a/frigate/track/norfair_tracker.py b/frigate/track/norfair_tracker.py index 36bb2532f5..b5bf356344 100644 --- a/frigate/track/norfair_tracker.py +++ b/frigate/track/norfair_tracker.py @@ -6,7 +6,7 @@ from norfair.drawing.drawer import Drawer from frigate.config import CameraConfig -from frigate.ptz_autotrack import PtzMotionEstimator +from frigate.ptz.autotrack import PtzMotionEstimator from frigate.track import ObjectTracker from frigate.util import intersection_over_union From 1a13cf601ead54030f15ebd7a539a7cb5ddafa8c Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 3 Jul 2023 12:47:44 -0500 Subject: [PATCH 39/40] verify required_zones in config --- frigate/config.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/frigate/config.py b/frigate/config.py index 61840df259..cc7e68adad 100644 --- a/frigate/config.py +++ b/frigate/config.py @@ -912,6 +912,17 @@ def verify_zone_objects_are_tracked(camera_config: CameraConfig) -> None: ) +def verify_autotrack_zones(camera_config: CameraConfig) -> ValueError | None: + """Verify that required_zones are specified when autotracking is enabled.""" + if ( + camera_config.onvif.autotracking.enabled + and not camera_config.onvif.autotracking.required_zones + ): + raise ValueError( + f"Camera {camera_config.name} has autotracking enabled, required_zones must be set to at least one of the camera's zones." + ) + + class FrigateConfig(FrigateBaseModel): mqtt: MqttConfig = Field(title="MQTT Configuration.") database: DatabaseConfig = Field( @@ -1087,6 +1098,7 @@ def runtime_config(self, plus_api: PlusApi = None) -> FrigateConfig: verify_recording_retention(camera_config) verify_recording_segments_setup_with_reasonable_time(camera_config) verify_zone_objects_are_tracked(camera_config) + verify_autotrack_zones(camera_config) if camera_config.rtmp.enabled: logger.warning( From 51357ccf6785aff6a5185ae6726333bc40b9cd22 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Thu, 6 Jul 2023 09:41:32 -0500 Subject: [PATCH 40/40] update util after dev merge --- frigate/ptz/autotrack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/ptz/autotrack.py b/frigate/ptz/autotrack.py index 426ba905ea..0417af38fb 100644 --- a/frigate/ptz/autotrack.py +++ b/frigate/ptz/autotrack.py @@ -15,7 +15,7 @@ from frigate.config import CameraConfig, FrigateConfig from frigate.ptz.onvif import OnvifController from frigate.types import CameraMetricsTypes -from frigate.util import SharedMemoryFrameManager, intersection_over_union +from frigate.util.image import SharedMemoryFrameManager, intersection_over_union logger = logging.getLogger(__name__)