Skip to content

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
lukacu committed Jan 31, 2024
2 parents 4ee7af7 + ce5dda8 commit 7629b27
Show file tree
Hide file tree
Showing 33 changed files with 787 additions and 270 deletions.
14 changes: 7 additions & 7 deletions docs/api/document.rst
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
Document module
Report module
============

.. automodule:: vot.document
.. automodule:: vot.report
:members:

.. automodule:: vot.document.common
.. automodule:: vot.report.common
:members:

HTML document generation
HTML report generation
------------------------

.. automodule:: vot.document
.. automodule:: vot.report
:members:

LaTeX document generation
LaTeX report generation
-------------------------

.. automodule:: vot.document.latex
.. automodule:: vot.report.latex
:members:

19 changes: 14 additions & 5 deletions vot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,13 +66,24 @@ def check_updates() -> bool:
from attributee import Attributee, Integer, Boolean

class GlobalConfiguration(Attributee):
"""Global configuration object for the toolkit. It is used to store global configuration options.
"""Global configuration object for the toolkit. It is used to store global configuration options. It can be initialized
from environment variables. The following options are supported:
- ``VOT_DEBUG_MODE``: Enables debug mode for the toolkit.
- ``VOT_SEQUENCE_CACHE_SIZE``: Maximum number of sequences to keep in cache.
- ``VOT_RESULTS_BINARY``: Enables binary results format.
- ``VOT_MASK_OPTIMIZE_READ``: Enables mask optimization when reading masks.
- ``VOT_WORKER_POOL_SIZE``: Number of workers to use for parallel processing.
- ``VOT_PERSISTENT_CACHE``: Enables persistent cache for analysis results in workspace.
"""

debug_mode = Boolean(default=False, description="Enables debug mode for the toolkit.")
sequence_cache_size = Integer(default=1000, description="Maximum number of sequences to keep in cache.")
sequence_cache_size = Integer(default=100, description="Maximum number of sequences to keep in cache.")
results_binary = Boolean(default=True, description="Enables binary results format.")
mask_optimize_read = Boolean(default=True, description="Enables mask optimization when reading masks.")
worker_pool_size = Integer(default=1, description="Number of workers to use for parallel processing.")
persistent_cache = Boolean(default=True, description="Enables persistent cache for analysis results in workspace.")

def __init__(self):
"""Initializes the global configuration object. It reads the configuration from environment variables.
Expand All @@ -90,9 +101,7 @@ def __init__(self):

def __repr__(self):
"""Returns a string representation of the global configuration object."""
return "debug_mode={} sequence_cache_size={} results_binary={} mask_optimize_read={}".format(
self.debug_mode, self.sequence_cache_size, self.results_binary, self.mask_optimize_read
)
return " ".join(["{}={}".format(k, getattr(self, k)) for k in self.attributes()])

config = GlobalConfiguration()

Expand Down
2 changes: 2 additions & 0 deletions vot/analysis/accuracy.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ def gather_overlaps(trajectory: List[Region], groundtruth: List[Region], burnin:
Returns:
np.ndarray: List of overlaps."""

assert len(trajectory) == len(groundtruth), "Trajectory and groundtruth must have the same length."

overlaps = np.array(calculate_overlaps(trajectory, groundtruth, bounds))
mask = np.ones(len(overlaps), dtype=bool)

Expand Down
8 changes: 4 additions & 4 deletions vot/analysis/longterm.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,8 +481,8 @@ def _title_default(self):

def describe(self):
"""Describes the analysis."""
return Measure("Non-reported Error", "NRE", 0, 1, Sorting.DESCENDING), \
Measure("Drift-rate Error", "DRE", 0, 1, Sorting.DESCENDING), \
return Measure("Non-reported Error", "NRE", 0, 1, Sorting.ASCENDING), \
Measure("Drift-rate Error", "DRE", 0, 1, Sorting.ASCENDING), \
Measure("Absence-detection Quality", "ADQ", 0, 1, Sorting.DESCENDING),

def subcompute(self, experiment: Experiment, tracker: Tracker, sequence: Sequence, dependencies: List[Grid]) -> Tuple[Any]:
Expand Down Expand Up @@ -562,8 +562,8 @@ def dependencies(self):

def describe(self):
"""Describes the analysis."""
return Measure("Non-reported Error", "NRE", 0, 1, Sorting.DESCENDING), \
Measure("Drift-rate Error", "DRE", 0, 1, Sorting.DESCENDING), \
return Measure("Non-reported Error", "NRE", 0, 1, Sorting.ASCENDING), \
Measure("Drift-rate Error", "DRE", 0, 1, Sorting.ASCENDING), \
Measure("Absence-detection Quality", "ADQ", 0, 1, Sorting.DESCENDING),

def compatible(self, experiment: Experiment):
Expand Down
24 changes: 24 additions & 0 deletions vot/analysis/tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
""" Unit tests for analysis module. """


import unittest

class Tests(unittest.TestCase):
""" Unit tests for analysis module. """

def test_perfect_accuracy(self):
import numpy as np

from vot.region import Rectangle, Special
from vot.analysis.accuracy import gather_overlaps

trajectory = [Rectangle(0, 0, 100, 100)] * 30
groundtruth = [Rectangle(0, 0, 100, 100)] * 30

trajectory[0] = Special(1)

overlaps = gather_overlaps(trajectory, groundtruth)

print(overlaps)

self.assertEqual(np.mean(overlaps), 1)
20 changes: 10 additions & 10 deletions vot/dataset/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,8 +284,7 @@ def append(self, image):

self._images.append(image)

@property
def length(self) -> int:
def __len__(self) -> int:
"""Returns the length of the sequence channel in number of frames
Returns:
Expand Down Expand Up @@ -868,11 +867,12 @@ def groundtruth(self, index=None):
Returns:
Region: Groundtruth region
"""
data = self.__preload()
if len(self.objects()) != 1:
objids = self.objects()

if len(objids) != 1:
raise DatasetException("More than one object in sequence")

id = next(iter(data.objects))
id = next(iter(objids))
return self.object(id, index)

def tags(self, index: int = None) -> List[str]:
Expand Down Expand Up @@ -951,7 +951,7 @@ def __init__(self, name, channels):
Raises:
DatasetException: If images are not provided for all channels
"""
super().__init__(name, None)
super().__init__(name)
self._channels = {c: InMemoryChannel() for c in channels}
self._tags = {}
self._values = {}
Expand Down Expand Up @@ -1014,7 +1014,8 @@ def channels(self) -> List[str]:
List[str]: List of channel names
"""
return self._channels.keys()
print(self._channels.keys())
return set(self._channels.keys())

def frame(self, index : int) -> "Frame":
"""Returns the specified frame. The frame is returned as a Frame object.
Expand Down Expand Up @@ -1133,15 +1134,14 @@ def size(self) -> tuple:
tuple: Sequence size
"""
return self.channel().size

@property

def channels(self) -> list:
"""Returns a list of channel names
Returns:
list: List of channel names
"""
return self._channels.keys()
return set(self._channels.keys())

def download_bundle(url: str, path: str = "."):
"""Downloads a dataset bundle as a ZIP file and decompresses it.
Expand Down
8 changes: 7 additions & 1 deletion vot/dataset/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,5 +319,11 @@ def write_sequence(directory: str, sequence: Sequence):
with open(os.path.join(directory, "%s.value" % value), "w") as fp:
fp.write(data)

write_trajectory(os.path.join(directory, "groundtruth.txt"), [f.groundtruth() for f in sequence])
# Write groundtruth in case of single object
if len(sequence.objects()) == 1:
write_trajectory(os.path.join(directory, "groundtruth.txt"), [f.groundtruth() for f in sequence])
else:
for id in sequence.objects():
write_trajectory(os.path.join(directory, "groundtruth_%s.txt" % id), [f.object(id) for f in sequence])

write_properties(os.path.join(directory, "sequence"), metadata)
27 changes: 25 additions & 2 deletions vot/dataset/proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def __len__(self):
Returns:
int: Length of the sequence.
"""
return len(self)
return len(self._source)

def frame(self, index: int) -> Frame:
"""Returns a frame object for the given index. Forwards the request to the source sequence.
Expand Down Expand Up @@ -384,4 +384,27 @@ def groundtruth(self, index: int = None) -> List[Region]:
Args:
index (int): Index of the frame.
"""
return self._source.object(self._id, index)
return self._source.object(self._id, index)

class ObjectsHideFilterSequence(ProxySequence):
"""A proxy sequence that virtually removes specified objects from the sequence. Note that the object is not removed from the sequence, but only hidden when listing them.
"""

def __init__(self, source: Sequence, ids: Set[str]):
"""Creates an object hide filter proxy sequence.
Args:
source (Sequence): Source sequence object
ids (Set[str]): IDs of the objects that will be hidden in the proxy sequence.
"""
super().__init__(source)
self._ids = ids

def objects(self):
"""Returns a dictionary of all objects in the sequence.
Returns:
Dict[str, Object]: Dictionary of all objects in the sequence.
"""
objects = self._source.objects()
return {id for id in objects if id not in self._ids}
28 changes: 22 additions & 6 deletions vot/experiment/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@

import os
from abc import abstractmethod
from typing import List
import typing

from PIL import Image

from attributee import Attributee, Integer, Float, Boolean
from attributee import Attributee, Integer, Float, Boolean, String, List

from vot.dataset import Sequence, InMemorySequence
from vot.dataset.proxy import FrameMapSequence
Expand All @@ -28,7 +28,7 @@ def __init__(self, cache: "LocalStorage", **kwargs):
self._cache = cache

@abstractmethod
def __call__(self, sequence: Sequence) -> List[Sequence]:
def __call__(self, sequence: Sequence) -> typing.List[Sequence]:
"""Generate a list of sequences from the given sequence. The generated sequences are stored in the cache if needed.
Args:
Expand All @@ -45,7 +45,7 @@ class SingleObject(Transformer):

trim = Boolean(default=False, description="Trim each generated sequence to a visible subsection for the selected object")

def __call__(self, sequence: Sequence) -> List[Sequence]:
def __call__(self, sequence: Sequence) -> typing.List[Sequence]:
"""Generate a list of sequences from the given sequence.
Args:
Expand All @@ -69,7 +69,7 @@ class Redetection(Transformer):
padding = Float(default=2, val_min=0)
scaling = Float(default=1, val_min=0.1, val_max=10)

def __call__(self, sequence: Sequence) -> List[Sequence]:
def __call__(self, sequence: Sequence) -> typing.List[Sequence]:
"""Generate a list of sequences from the given sequence.
Args:
Expand Down Expand Up @@ -108,5 +108,21 @@ def __call__(self, sequence: Sequence) -> List[Sequence]:
write_sequence(chache_dir, generated)

source = read_sequence(chache_dir)
mapping = [0] * self.initialization + [1] * (len(self) - self.initialization)
mapping = [0] * self.initialization + [1] * (len(source) - self.initialization)
return [FrameMapSequence(source, mapping)]

@transformer_registry.register("ignore")
class IgnoreObjects(Transformer):
"""Transformer that hides objects with certain ids from the sequence."""

ids = List(String(), default=[], description="List of ids to be ignored")

def __call__(self, sequence: Sequence) -> typing.List[Sequence]:
"""Generate a list of sequences from the given sequence.
Args:
sequence (Sequence): The sequence to be transformed.
"""
from vot.dataset.proxy import ObjectsHideFilterSequence

return [ObjectsHideFilterSequence(sequence, self.ids)]
Loading

0 comments on commit 7629b27

Please sign in to comment.