From eb6816a823da7ba144659eb6b2c5ea823558982b Mon Sep 17 00:00:00 2001 From: Thomas Sedlmayer Date: Wed, 3 Sep 2025 14:50:43 +0200 Subject: [PATCH 01/14] Add mcap support for osi3trace Signed-off-by: Thomas Sedlmayer --- osi3trace/osi_trace.py | 182 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 178 insertions(+), 4 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index 3c05770..d4ae5f2 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -3,8 +3,14 @@ """ import lzma +from pathlib import Path import struct +from abc import ABC, abstractmethod + +from mcap_protobuf.decoder import DecoderFactory +from mcap.reader import make_reader + from osi3.osi_sensorview_pb2 import SensorView from osi3.osi_sensorviewconfiguration_pb2 import SensorViewConfiguration from osi3.osi_groundtruth_pb2 import GroundTruth @@ -32,8 +38,8 @@ class OSITrace: - """This class can import and decode OSI trace files.""" - + """This class can import and decode OSI single- and multi-channel trace files.""" + @staticmethod def map_message_type(type_name): """Map the type name to the protobuf message type.""" @@ -43,9 +49,120 @@ def map_message_type(type_name): def message_types(): """Message types that OSITrace supports.""" return list(MESSAGES_TYPE.keys()) + + def __init__(self, path=None, type_name="SensorView", cache_messages=False, topic=None): + """ + Initializes the trace reader depending on the trace file format. + + Args: + path (str): The path to the trace file. + type_name (str): The type name of the messages in the trace; check supported message types with `OSITrace.message_types()`. + cache_messages (bool): Whether to cache messages in memory (only applies to single-channel traces). + topic (str): The topic name for multi-channel traces (only applies to multi-channel traces); Using the first available topic if not specified. + """ + self.reader = None + self.path = None + + if path is not None: + self.reader = self._init_reader(Path(path), type_name, cache_messages, topic) + + def _init_reader(self, path, type_name, cache_messages, topic): + if not path.exists(): + raise FileNotFoundError("File not found") + + if path.suffix.lower() == ".mcap": + reader = OSITraceMulti(path, topic) + if reader.get_message_type() != type_name: + raise ValueError(f"Channel message type '{reader.get_message_type()}' does not match expected type '{type_name}'") + return reader + elif path.suffix.lower() in [".osi", ".lzma", ".xz"]: + return OSITraceSingle(str(path), type_name, cache_messages) + else: + raise ValueError(f"Unsupported file format: '{path.suffix}'") + + def from_file(self, path, type_name="SensorView", cache_messages=False, topic=None): + """ + Initializes the trace reader depending on the trace file format. + + Args: + path (str): The path to the trace file. + type_name (str): The type name of the messages in the trace; check supported message types with `OSITrace.message_types()`. + cache_messages (bool): Whether to cache messages in memory (only applies to single-channel traces). + topic (str): The topic name for multi-channel traces (only applies to multi-channel traces); Using the first available topic if not specified. + """ + self.reader = self._init_reader(Path(path), type_name, cache_messages, topic) + + def restart(self, index=None): + """ + Restart the trace reader. + + Note: + Multi-channel traces don't support restarting from a specific index. + """ + return self.reader.restart(index) + + def __iter__(self): + return self.reader.__iter__() + + def close(self): + return self.reader.close() + + def retrieve_offsets(self, limit=None): + if isinstance(self.reader, OSITraceSingle): + return self.reader.retrieve_offsets(limit) + raise NotImplementedError("Offsets are only supported for single-channel traces.") + + def retrieve_message(self, index=None, skip=False): + if isinstance(self.reader, OSITraceSingle): + return self.reader.retrieve_message(index, skip) + raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + + def get_message_by_index(self, index): + if isinstance(self.reader, OSITraceSingle): + return self.reader.get_message_by_index(index) + raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + + def get_messages_in_index_range(self, begin, end): + if isinstance(self.reader, OSITraceSingle): + return self.reader.get_messages_in_index_range(begin, end) + raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + + def get_available_topics(self): + if isinstance(self.reader, OSITraceMulti): + return self.reader.get_available_topics() + raise NotImplementedError("Getting available topics is only supported for multi-channel traces.") + + def get_file_metadata(self): + if isinstance(self.reader, OSITraceMulti): + return self.reader.get_file_metadata() + raise NotImplementedError("Getting file metadata is only supported for multi-channel traces.") + + def get_channel_metadata(self): + if isinstance(self.reader, OSITraceMulti): + return self.reader.get_channel_metadata() + raise NotImplementedError("Getting channel metadata is only supported for multi-channel traces.") + + +class ReaderBase(ABC): + """Common interface for trace readers""" + @abstractmethod + def restart(self, index=None): + pass + + @abstractmethod + def __iter__(self): + pass + + @abstractmethod + def close(self): + pass + +class OSITraceSingle(ReaderBase): + """OSI single-channel trace reader""" + def __init__(self, path=None, type_name="SensorView", cache_messages=False): - self.type = self.map_message_type(type_name) + self.type = OSITrace.map_message_type(type_name) self.file = None self.current_index = None self.message_offsets = None @@ -57,7 +174,7 @@ def __init__(self, path=None, type_name="SensorView", cache_messages=False): def from_file(self, path, type_name="SensorView", cache_messages=False): """Import a trace from a file""" - self.type = self.map_message_type(type_name) + self.type = OSITrace.map_message_type(type_name) if path.lower().endswith((".lzma", ".xz")): self.file = lzma.open(path, "rb") @@ -186,3 +303,60 @@ def close(self): self.read_complete = False self.read_limit = None self.type = None + + +class OSITraceMulti(ReaderBase): + """OSI multi-channel trace reader""" + + def __init__(self, path, topic): + self.path = Path(path) + self._file = open(self.path, "rb") + self.mcap_reader = make_reader(self._file, decoder_factories=[DecoderFactory()]) + self._summary = self.mcap_reader.get_summary() + available_topics = self.get_available_topics() + if topic == None: + topic = available_topics[0] + if topic not in available_topics: + raise ValueError(f"The requested topic '{topic}' is not present in the trace file.") + self.topic = topic + + def restart(self, index=None): + if index != None: + raise NotImplementedError("Restarting from a given index is not supported for multi-channel traces.") + if hasattr(self, "_iter"): + del self._iter + + def __iter__(self): + """Stateful iterator over the channel's messages in log time order.""" + if not hasattr(self, "_iter"): + self._iter = self.mcap_reader.iter_decoded_messages(topics=[self.topic]) + for message in self._iter: + yield message.decoded_message + + def close(self): + self._file.close() + + def get_available_topics(self): + return [channel.topic for id, channel in self._summary.channels.items()] + + def get_file_metadata(self): + metadata = [] + for metadata_entry in self.mcap_reader.iter_metadata(): + metadata.append(metadata_entry) + return metadata + + def get_channel_metadata(self): + for id, channel in self._summary.channels.items(): + if channel.topic == self.topic: + return channel.metadata + return None + + def get_message_type(self): + for channel_id, channel in self._summary.channels.items(): + if channel.topic == self.topic: + schema = self._summary.schemas[channel.schema_id] + if schema.name.startswith("osi3."): + return schema.name[len("osi3.") :] + else: + raise ValueError(f"Schema '{schema.name}' is not an 'osi3.' schema.") + return None From adc1cadc0bd81fa3c04236503504cb84d6ba011a Mon Sep 17 00:00:00 2001 From: Thomas Sedlmayer Date: Thu, 4 Sep 2025 10:30:29 +0200 Subject: [PATCH 02/14] Update mcap dependencies Signed-off-by: Thomas Sedlmayer --- poetry.lock | 202 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 2 + 2 files changed, 203 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 822e999..cef8a9d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -106,6 +106,94 @@ files = [ {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] +[[package]] +name = "lz4" +version = "4.4.4" +description = "LZ4 Bindings for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "lz4-4.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f170abb8416c4efca48e76cac2c86c3185efdf841aecbe5c190121c42828ced0"}, + {file = "lz4-4.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d33a5105cd96ebd32c3e78d7ece6123a9d2fb7c18b84dec61f27837d9e0c496c"}, + {file = "lz4-4.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ebbc5b76b4f0018988825a7e9ce153be4f0d4eba34e6c1f2fcded120573e88"}, + {file = "lz4-4.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc64d6dfa7a89397529b22638939e70d85eaedc1bd68e30a29c78bfb65d4f715"}, + {file = "lz4-4.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a355223a284f42a723c120ce68827de66d5cb872a38732b3d5abbf544fa2fe26"}, + {file = "lz4-4.4.4-cp310-cp310-win32.whl", hash = "sha256:b28228197775b7b5096898851d59ef43ccaf151136f81d9c436bc9ba560bc2ba"}, + {file = "lz4-4.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:45e7c954546de4f85d895aa735989d77f87dd649f503ce1c8a71a151b092ed36"}, + {file = "lz4-4.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:e3fc90f766401684740978cd781d73b9685bd81b5dbf7257542ef9de4612e4d2"}, + {file = "lz4-4.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ddfc7194cd206496c445e9e5b0c47f970ce982c725c87bd22de028884125b68f"}, + {file = "lz4-4.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:714f9298c86f8e7278f1c6af23e509044782fa8220eb0260f8f8f1632f820550"}, + {file = "lz4-4.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8474c91de47733856c6686df3c4aca33753741da7e757979369c2c0d32918ba"}, + {file = "lz4-4.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80dd27d7d680ea02c261c226acf1d41de2fd77af4fb2da62b278a9376e380de0"}, + {file = "lz4-4.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b7d6dddfd01b49aedb940fdcaf32f41dc58c926ba35f4e31866aeec2f32f4f4"}, + {file = "lz4-4.4.4-cp311-cp311-win32.whl", hash = "sha256:4134b9fd70ac41954c080b772816bb1afe0c8354ee993015a83430031d686a4c"}, + {file = "lz4-4.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:f5024d3ca2383470f7c4ef4d0ed8eabad0b22b23eeefde1c192cf1a38d5e9f78"}, + {file = "lz4-4.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:6ea715bb3357ea1665f77874cf8f55385ff112553db06f3742d3cdcec08633f7"}, + {file = "lz4-4.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:23ae267494fdd80f0d2a131beff890cf857f1b812ee72dbb96c3204aab725553"}, + {file = "lz4-4.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fff9f3a1ed63d45cb6514bfb8293005dc4141341ce3500abdfeb76124c0b9b2e"}, + {file = "lz4-4.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ea7f07329f85a8eda4d8cf937b87f27f0ac392c6400f18bea2c667c8b7f8ecc"}, + {file = "lz4-4.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ccab8f7f7b82f9fa9fc3b0ba584d353bd5aa818d5821d77d5b9447faad2aaad"}, + {file = "lz4-4.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43e9d48b2daf80e486213128b0763deed35bbb7a59b66d1681e205e1702d735"}, + {file = "lz4-4.4.4-cp312-cp312-win32.whl", hash = "sha256:33e01e18e4561b0381b2c33d58e77ceee850a5067f0ece945064cbaac2176962"}, + {file = "lz4-4.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d21d1a2892a2dcc193163dd13eaadabb2c1b803807a5117d8f8588b22eaf9f12"}, + {file = "lz4-4.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:2f4f2965c98ab254feddf6b5072854a6935adab7bc81412ec4fe238f07b85f62"}, + {file = "lz4-4.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed6eb9f8deaf25ee4f6fad9625d0955183fdc90c52b6f79a76b7f209af1b6e54"}, + {file = "lz4-4.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:18ae4fe3bafb344dbd09f976d45cbf49c05c34416f2462828f9572c1fa6d5af7"}, + {file = "lz4-4.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fd20c5fc1a49d1bbd170836fccf9a338847e73664f8e313dce6ac91b8c1e02"}, + {file = "lz4-4.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9cb387c33f014dae4db8cb4ba789c8d2a0a6d045ddff6be13f6c8d9def1d2a6"}, + {file = "lz4-4.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0be9f68240231e1e44118a4ebfecd8a5d4184f0bdf5c591c98dd6ade9720afd"}, + {file = "lz4-4.4.4-cp313-cp313-win32.whl", hash = "sha256:e9ec5d45ea43684f87c316542af061ef5febc6a6b322928f059ce1fb289c298a"}, + {file = "lz4-4.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:a760a175b46325b2bb33b1f2bbfb8aa21b48e1b9653e29c10b6834f9bb44ead4"}, + {file = "lz4-4.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:f4c21648d81e0dda38b4720dccc9006ae33b0e9e7ffe88af6bf7d4ec124e2fba"}, + {file = "lz4-4.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd1add57b6fe1f96bed2d529de085e9378a3ac04b86f116d10506f85b68e97fc"}, + {file = "lz4-4.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:900912e8a7cf74b4a2bea18a3594ae0bf1138f99919c20017167b6e05f760aa4"}, + {file = "lz4-4.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017f8d269a739405a59d68a4d63d23a8df23e3bb2c70aa069b7563af08dfdffb"}, + {file = "lz4-4.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac522788296a9a02a39f620970dea86c38e141e21e51238f1b5e9fa629f8e69"}, + {file = "lz4-4.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b56aa9eef830bf6443acd8c4e18b208a8993dc32e0d6ef4263ecfa6afb3f599"}, + {file = "lz4-4.4.4-cp39-cp39-win32.whl", hash = "sha256:585b42eb37ab16a278c3a917ec23b2beef175aa669f4120142b97aebf90ef775"}, + {file = "lz4-4.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:4ab1537bd3b3bfbafd3c8847e06827129794488304f21945fc2f5b669649d94f"}, + {file = "lz4-4.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:38730927ad51beb42ab8dbc5555270bfbe86167ba734265f88bbd799fced1004"}, + {file = "lz4-4.4.4.tar.gz", hash = "sha256:070fd0627ec4393011251a094e08ed9fdcc78cb4e7ab28f507638eee4e39abda"}, +] + +[package.extras] +docs = ["sphinx (>=1.6.0)", "sphinx_bootstrap_theme"] +flake8 = ["flake8"] +tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] + +[[package]] +name = "mcap" +version = "1.3.0" +description = "MCAP libraries for Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mcap-1.3.0-py3-none-any.whl", hash = "sha256:6262a68cd5a9ed7f8fe8fa6330412a87949842782ab42a4800cd033d0f38e4cd"}, + {file = "mcap-1.3.0.tar.gz", hash = "sha256:5f0d5826ba3b8418508c1d992bada4c5744073f1b3e6d685579f0aca0389fb2f"}, +] + +[package.dependencies] +lz4 = "*" +zstandard = "*" + +[[package]] +name = "mcap-protobuf-support" +version = "0.5.3" +description = "Protobuf support for the Python MCAP library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "mcap_protobuf_support-0.5.3-py3-none-any.whl", hash = "sha256:2cdfe7082f26f7da1bae8fa91c9196820562d03dccf39d4e4de11e8287f92dcb"}, + {file = "mcap_protobuf_support-0.5.3.tar.gz", hash = "sha256:0545ed005cdc6ac22c5d87a7d8574f0e9adb0c5da7b0f46ae896f5702fc1a250"}, +] + +[package.dependencies] +mcap = ">=0.0.14" +protobuf = ">=4.25" + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -289,7 +377,119 @@ files = [ {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, ] +[[package]] +name = "zstandard" +version = "0.24.0" +description = "Zstandard bindings for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "zstandard-0.24.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af1394c2c5febc44e0bbf0fc6428263fa928b50d1b1982ce1d870dc793a8e5f4"}, + {file = "zstandard-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e941654cef13a1d53634ec30933722eda11f44f99e1d0bc62bbce3387580d50"}, + {file = "zstandard-0.24.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:561123d05681197c0e24eb8ab3cfdaf299e2b59c293d19dad96e1610ccd8fbc6"}, + {file = "zstandard-0.24.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0f6d9a146e07458cb41423ca2d783aefe3a3a97fe72838973c13b8f1ecc7343a"}, + {file = "zstandard-0.24.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf02f915fa7934ea5dfc8d96757729c99a8868b7c340b97704795d6413cf5fe6"}, + {file = "zstandard-0.24.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:35f13501a8accf834457d8e40e744568287a215818778bc4d79337af2f3f0d97"}, + {file = "zstandard-0.24.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:92be52ca4e6e604f03d5daa079caec9e04ab4cbf6972b995aaebb877d3d24e13"}, + {file = "zstandard-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c9c3cba57f5792532a3df3f895980d47d78eda94b0e5b800651b53e96e0b604"}, + {file = "zstandard-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dd91b0134a32dfcd8be504e8e46de44ad0045a569efc25101f2a12ccd41b5759"}, + {file = "zstandard-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d6975f2d903bc354916a17b91a7aaac7299603f9ecdb788145060dde6e573a16"}, + {file = "zstandard-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7ac6e4d727521d86d20ec291a3f4e64a478e8a73eaee80af8f38ec403e77a409"}, + {file = "zstandard-0.24.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:87ae1684bc3c02d5c35884b3726525eda85307073dbefe68c3c779e104a59036"}, + {file = "zstandard-0.24.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:7de5869e616d426b56809be7dc6dba4d37b95b90411ccd3de47f421a42d4d42c"}, + {file = "zstandard-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:388aad2d693707f4a0f6cc687eb457b33303d6b57ecf212c8ff4468c34426892"}, + {file = "zstandard-0.24.0-cp310-cp310-win32.whl", hash = "sha256:962ea3aecedcc944f8034812e23d7200d52c6e32765b8da396eeb8b8ffca71ce"}, + {file = "zstandard-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:869bf13f66b124b13be37dd6e08e4b728948ff9735308694e0b0479119e08ea7"}, + {file = "zstandard-0.24.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:addfc23e3bd5f4b6787b9ca95b2d09a1a67ad5a3c318daaa783ff90b2d3a366e"}, + {file = "zstandard-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b005bcee4be9c3984b355336283afe77b2defa76ed6b89332eced7b6fa68b68"}, + {file = "zstandard-0.24.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:3f96a9130171e01dbb6c3d4d9925d604e2131a97f540e223b88ba45daf56d6fb"}, + {file = "zstandard-0.24.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd0d3d16e63873253bad22b413ec679cf6586e51b5772eb10733899832efec42"}, + {file = "zstandard-0.24.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:b7a8c30d9bf4bd5e4dcfe26900bef0fcd9749acde45cdf0b3c89e2052fda9a13"}, + {file = "zstandard-0.24.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:52cd7d9fa0a115c9446abb79b06a47171b7d916c35c10e0c3aa6f01d57561382"}, + {file = "zstandard-0.24.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0f6fc2ea6e07e20df48752e7700e02e1892c61f9a6bfbacaf2c5b24d5ad504b"}, + {file = "zstandard-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e46eb6702691b24ddb3e31e88b4a499e31506991db3d3724a85bd1c5fc3cfe4e"}, + {file = "zstandard-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5e3b9310fd7f0d12edc75532cd9a56da6293840c84da90070d692e0bb15f186"}, + {file = "zstandard-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76cdfe7f920738ea871f035568f82bad3328cbc8d98f1f6988264096b5264efd"}, + {file = "zstandard-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3f2fe35ec84908dddf0fbf66b35d7c2878dbe349552dd52e005c755d3493d61c"}, + {file = "zstandard-0.24.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:aa705beb74ab116563f4ce784fa94771f230c05d09ab5de9c397793e725bb1db"}, + {file = "zstandard-0.24.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:aadf32c389bb7f02b8ec5c243c38302b92c006da565e120dfcb7bf0378f4f848"}, + {file = "zstandard-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e40cd0fc734aa1d4bd0e7ad102fd2a1aefa50ce9ef570005ffc2273c5442ddc3"}, + {file = "zstandard-0.24.0-cp311-cp311-win32.whl", hash = "sha256:cda61c46343809ecda43dc620d1333dd7433a25d0a252f2dcc7667f6331c7b61"}, + {file = "zstandard-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:3b95fc06489aa9388400d1aab01a83652bc040c9c087bd732eb214909d7fb0dd"}, + {file = "zstandard-0.24.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad9fd176ff6800a0cf52bcf59c71e5de4fa25bf3ba62b58800e0f84885344d34"}, + {file = "zstandard-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a2bda8f2790add22773ee7a4e43c90ea05598bffc94c21c40ae0a9000b0133c3"}, + {file = "zstandard-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cc76de75300f65b8eb574d855c12518dc25a075dadb41dd18f6322bda3fe15d5"}, + {file = "zstandard-0.24.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d2b3b4bda1a025b10fe0269369475f420177f2cb06e0f9d32c95b4873c9f80b8"}, + {file = "zstandard-0.24.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b84c6c210684286e504022d11ec294d2b7922d66c823e87575d8b23eba7c81f"}, + {file = "zstandard-0.24.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c59740682a686bf835a1a4d8d0ed1eefe31ac07f1c5a7ed5f2e72cf577692b00"}, + {file = "zstandard-0.24.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6324fde5cf5120fbf6541d5ff3c86011ec056e8d0f915d8e7822926a5377193a"}, + {file = "zstandard-0.24.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:51a86bd963de3f36688553926a84e550d45d7f9745bd1947d79472eca27fcc75"}, + {file = "zstandard-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d82ac87017b734f2fb70ff93818c66f0ad2c3810f61040f077ed38d924e19980"}, + {file = "zstandard-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92ea7855d5bcfb386c34557516c73753435fb2d4a014e2c9343b5f5ba148b5d8"}, + {file = "zstandard-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3adb4b5414febf074800d264ddf69ecade8c658837a83a19e8ab820e924c9933"}, + {file = "zstandard-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6374feaf347e6b83ec13cc5dcfa70076f06d8f7ecd46cc71d58fac798ff08b76"}, + {file = "zstandard-0.24.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:13fc548e214df08d896ee5f29e1f91ee35db14f733fef8eabea8dca6e451d1e2"}, + {file = "zstandard-0.24.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0a416814608610abf5488889c74e43ffa0343ca6cf43957c6b6ec526212422da"}, + {file = "zstandard-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0d66da2649bb0af4471699aeb7a83d6f59ae30236fb9f6b5d20fb618ef6c6777"}, + {file = "zstandard-0.24.0-cp312-cp312-win32.whl", hash = "sha256:ff19efaa33e7f136fe95f9bbcc90ab7fb60648453b03f95d1de3ab6997de0f32"}, + {file = "zstandard-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc05f8a875eb651d1cc62e12a4a0e6afa5cd0cc231381adb830d2e9c196ea895"}, + {file = "zstandard-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:b04c94718f7a8ed7cdd01b162b6caa1954b3c9d486f00ecbbd300f149d2b2606"}, + {file = "zstandard-0.24.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e4ebb000c0fe24a6d0f3534b6256844d9dbf042fdf003efe5cf40690cf4e0f3e"}, + {file = "zstandard-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:498f88f5109666c19531f0243a90d2fdd2252839cd6c8cc6e9213a3446670fa8"}, + {file = "zstandard-0.24.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0a9e95ceb180ccd12a8b3437bac7e8a8a089c9094e39522900a8917745542184"}, + {file = "zstandard-0.24.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bcf69e0bcddbf2adcfafc1a7e864edcc204dd8171756d3a8f3340f6f6cc87b7b"}, + {file = "zstandard-0.24.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:10e284748a7e7fbe2815ca62a9d6e84497d34cfdd0143fa9e8e208efa808d7c4"}, + {file = "zstandard-0.24.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:1bda8a85e5b9d5e73af2e61b23609a8cc1598c1b3b2473969912979205a1ff25"}, + {file = "zstandard-0.24.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1b14bc92af065d0534856bf1b30fc48753163ea673da98857ea4932be62079b1"}, + {file = "zstandard-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:b4f20417a4f511c656762b001ec827500cbee54d1810253c6ca2df2c0a307a5f"}, + {file = "zstandard-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:337572a7340e1d92fd7fb5248c8300d0e91071002d92e0b8cabe8d9ae7b58159"}, + {file = "zstandard-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:df4be1cf6e8f0f2bbe2a3eabfff163ef592c84a40e1a20a8d7db7f27cfe08fc2"}, + {file = "zstandard-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6885ae4b33aee8835dbdb4249d3dfec09af55e705d74d9b660bfb9da51baaa8b"}, + {file = "zstandard-0.24.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:663848a8bac4fdbba27feea2926049fdf7b55ec545d5b9aea096ef21e7f0b079"}, + {file = "zstandard-0.24.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:05d27c953f2e0a3ecc8edbe91d6827736acc4c04d0479672e0400ccdb23d818c"}, + {file = "zstandard-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:77b8b7b98893eaf47da03d262816f01f251c2aa059c063ed8a45c50eada123a5"}, + {file = "zstandard-0.24.0-cp313-cp313-win32.whl", hash = "sha256:cf7fbb4e54136e9a03c7ed7691843c4df6d2ecc854a2541f840665f4f2bb2edd"}, + {file = "zstandard-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:d64899cc0f33a8f446f1e60bffc21fa88b99f0e8208750d9144ea717610a80ce"}, + {file = "zstandard-0.24.0-cp313-cp313-win_arm64.whl", hash = "sha256:57be3abb4313e0dd625596376bbb607f40059d801d51c1a1da94d7477e63b255"}, + {file = "zstandard-0.24.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b7fa260dd2731afd0dfa47881c30239f422d00faee4b8b341d3e597cface1483"}, + {file = "zstandard-0.24.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e05d66239d14a04b4717998b736a25494372b1b2409339b04bf42aa4663bf251"}, + {file = "zstandard-0.24.0-cp314-cp314-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:622e1e04bd8a085994e02313ba06fbcf4f9ed9a488c6a77a8dbc0692abab6a38"}, + {file = "zstandard-0.24.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:55872e818598319f065e8192ebefecd6ac05f62a43f055ed71884b0a26218f41"}, + {file = "zstandard-0.24.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bb2446a55b3a0fd8aa02aa7194bd64740015464a2daaf160d2025204e1d7c282"}, + {file = "zstandard-0.24.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:2825a3951f945fb2613ded0f517d402b1e5a68e87e0ee65f5bd224a8333a9a46"}, + {file = "zstandard-0.24.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:09887301001e7a81a3618156bc1759e48588de24bddfdd5b7a4364da9a8fbc20"}, + {file = "zstandard-0.24.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:98ca91dc9602cf351497d5600aa66e6d011a38c085a8237b370433fcb53e3409"}, + {file = "zstandard-0.24.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e69f8e534b4e254f523e2f9d4732cf9c169c327ca1ce0922682aac9a5ee01155"}, + {file = "zstandard-0.24.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:444633b487a711e34f4bccc46a0c5dfbe1aee82c1a511e58cdc16f6bd66f187c"}, + {file = "zstandard-0.24.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f7d3fe9e1483171e9183ffdb1fab07c5fef80a9c3840374a38ec2ab869ebae20"}, + {file = "zstandard-0.24.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:27b6fa72b57824a3f7901fc9cc4ce1c1c834b28f3a43d1d4254c64c8f11149d4"}, + {file = "zstandard-0.24.0-cp314-cp314-win32.whl", hash = "sha256:fdc7a52a4cdaf7293e10813fd6a3abc0c7753660db12a3b864ab1fb5a0c60c16"}, + {file = "zstandard-0.24.0-cp314-cp314-win_amd64.whl", hash = "sha256:656ed895b28c7e42dd5b40dfcea3217cfc166b6b7eef88c3da2f5fc62484035b"}, + {file = "zstandard-0.24.0-cp314-cp314-win_arm64.whl", hash = "sha256:0101f835da7de08375f380192ff75135527e46e3f79bef224e3c49cb640fef6a"}, + {file = "zstandard-0.24.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:52788e7c489069e317fde641de41b757fa0ddc150e06488f153dd5daebac7192"}, + {file = "zstandard-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ec194197e90ca063f5ecb935d6c10063d84208cac5423c07d0f1a09d1c2ea42b"}, + {file = "zstandard-0.24.0-cp39-cp39-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e91a4e5d62da7cb3f53e04fe254f1aa41009af578801ee6477fe56e7bef74ee2"}, + {file = "zstandard-0.24.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fc67eb15ed573950bc6436a04b3faea6c36c7db98d2db030d48391c6736a0dc"}, + {file = "zstandard-0.24.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f6ae9fc67e636fc0fa9adee39db87dfbdeabfa8420bc0e678a1ac8441e01b22b"}, + {file = "zstandard-0.24.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ab2357353894a5ec084bb8508ff892aa43fb7fe8a69ad310eac58221ee7f72aa"}, + {file = "zstandard-0.24.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1f578fab202f4df67a955145c3e3ca60ccaaaf66c97808545b2625efeecdef10"}, + {file = "zstandard-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c39d2b6161f3c5c5d12e9207ecf1006bb661a647a97a6573656b09aaea3f00ef"}, + {file = "zstandard-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dc5654586613aebe5405c1ba180e67b3f29e7d98cf3187c79efdcc172f39457"}, + {file = "zstandard-0.24.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b91380aefa9c7ac831b011368daf378d3277e0bdeb6bad9535e21251e26dd55a"}, + {file = "zstandard-0.24.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:010302face38c9a909b8934e3bf6038266d6afc69523f3efa023c5cb5d38271b"}, + {file = "zstandard-0.24.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:3aa3b4344b206941385a425ea25e6dd63e5cb0f535a4b88d56e3f8902086be9e"}, + {file = "zstandard-0.24.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:63d39b161000aeeaa06a1cb77c9806e939bfe460dfd593e4cbf24e6bc717ae94"}, + {file = "zstandard-0.24.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed8345b504df1cab280af923ef69ec0d7d52f7b22f78ec7982fde7c33a43c4f"}, + {file = "zstandard-0.24.0-cp39-cp39-win32.whl", hash = "sha256:1e133a9dd51ac0bcd5fd547ba7da45a58346dbc63def883f999857b0d0c003c4"}, + {file = "zstandard-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:8ecd3b1f7a601f79e0cd20c26057d770219c0dc2f572ea07390248da2def79a4"}, + {file = "zstandard-0.24.0.tar.gz", hash = "sha256:fe3198b81c00032326342d973e526803f183f97aa9e9a98e3f897ebafe21178f"}, +] + +[package.extras] +cffi = ["cffi (>=1.17) ; python_version >= \"3.13\" and platform_python_implementation != \"PyPy\""] + [metadata] lock-version = "2.1" python-versions = ">=3.10" -content-hash = "f3992efe3ec9125cff08fa51b4e4b2f1281385b4fe5c5ca40cb3206ec192aeeb" +content-hash = "d8d891dd9c0a16ba9080b4960ccdd5dd1048f27d04f1026e221132d9023df1ab" diff --git a/pyproject.toml b/pyproject.toml index 7292f6f..76108b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,8 @@ dynamic = ["version"] requires-python = ">=3.10" dependencies = [ "protobuf>=6.30.2", + "mcap>=1.2.2", + "mcap-protobuf-support>=0.5.3" ] [project.urls] From c42364442b71e3291d3bc522454289dc5ad3c0c4 Mon Sep 17 00:00:00 2001 From: Thomas Sedlmayer Date: Thu, 4 Sep 2025 15:45:42 +0200 Subject: [PATCH 03/14] Minor changes Signed-off-by: Thomas Sedlmayer --- osi3trace/osi_trace.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index d4ae5f2..00ae426 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -312,6 +312,7 @@ def __init__(self, path, topic): self.path = Path(path) self._file = open(self.path, "rb") self.mcap_reader = make_reader(self._file, decoder_factories=[DecoderFactory()]) + self._iter = None self._summary = self.mcap_reader.get_summary() available_topics = self.get_available_topics() if topic == None: @@ -323,18 +324,22 @@ def __init__(self, path, topic): def restart(self, index=None): if index != None: raise NotImplementedError("Restarting from a given index is not supported for multi-channel traces.") - if hasattr(self, "_iter"): - del self._iter + self._iter = None def __iter__(self): """Stateful iterator over the channel's messages in log time order.""" - if not hasattr(self, "_iter"): + if self._iter is None: self._iter = self.mcap_reader.iter_decoded_messages(topics=[self.topic]) for message in self._iter: yield message.decoded_message def close(self): - self._file.close() + if self._file: + self._file.close() + self._file = None + self.mcap_reader = None + self._summary = None + self._iter = None def get_available_topics(self): return [channel.topic for id, channel in self._summary.channels.items()] From 635b227d956bb4711bf1823f67a1a934ff46655f Mon Sep 17 00:00:00 2001 From: Thomas Sedlmayer Date: Wed, 24 Sep 2025 10:43:21 +0200 Subject: [PATCH 04/14] Minor attribute protection changes Signed-off-by: Thomas Sedlmayer --- osi3trace/osi_trace.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index 00ae426..56a1a74 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -61,7 +61,6 @@ def __init__(self, path=None, type_name="SensorView", cache_messages=False, topi topic (str): The topic name for multi-channel traces (only applies to multi-channel traces); Using the first available topic if not specified. """ self.reader = None - self.path = None if path is not None: self.reader = self._init_reader(Path(path), type_name, cache_messages, topic) @@ -145,6 +144,7 @@ def get_channel_metadata(self): class ReaderBase(ABC): """Common interface for trace readers""" + @abstractmethod def restart(self, index=None): pass @@ -309,11 +309,10 @@ class OSITraceMulti(ReaderBase): """OSI multi-channel trace reader""" def __init__(self, path, topic): - self.path = Path(path) - self._file = open(self.path, "rb") - self.mcap_reader = make_reader(self._file, decoder_factories=[DecoderFactory()]) + self._file = open(path, "rb") + self._mcap_reader = make_reader(self._file, decoder_factories=[DecoderFactory()]) self._iter = None - self._summary = self.mcap_reader.get_summary() + self._summary = self._mcap_reader.get_summary() available_topics = self.get_available_topics() if topic == None: topic = available_topics[0] @@ -329,7 +328,7 @@ def restart(self, index=None): def __iter__(self): """Stateful iterator over the channel's messages in log time order.""" if self._iter is None: - self._iter = self.mcap_reader.iter_decoded_messages(topics=[self.topic]) + self._iter = self._mcap_reader.iter_decoded_messages(topics=[self.topic]) for message in self._iter: yield message.decoded_message @@ -337,7 +336,7 @@ def close(self): if self._file: self._file.close() self._file = None - self.mcap_reader = None + self._mcap_reader = None self._summary = None self._iter = None @@ -346,7 +345,7 @@ def get_available_topics(self): def get_file_metadata(self): metadata = [] - for metadata_entry in self.mcap_reader.iter_metadata(): + for metadata_entry in self._mcap_reader.iter_metadata(): metadata.append(metadata_entry) return metadata From 16abb2c928e20283a28e03f822a3047e76a821dc Mon Sep 17 00:00:00 2001 From: Thomas Sedlmayer Date: Wed, 24 Sep 2025 11:16:20 +0200 Subject: [PATCH 05/14] Delegate legacy instance attributes Signed-off-by: Thomas Sedlmayer --- osi3trace/osi_trace.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index 56a1a74..480f145 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -50,6 +50,42 @@ def message_types(): """Message types that OSITrace supports.""" return list(MESSAGES_TYPE.keys()) + _legacy_ositrace_attributes = { + "type", + "file", + "current_index", + "message_offsets", + "read_complete", + "message_cache", + } + + def __getattr__(self, name): + """ + This method forwards the getattr call for unsuccessful legacy attribute + name lookups to the reader in case it is an OSITraceSingle instance. + """ + if name in self._legacy_ositrace_attributes and isinstance(self.reader, OSITraceSingle): + return getattr(self.reader, name) + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'") + + def __setattr__(self, name, value): + """ + This method overwrites the default setter and forwards setattr calls for + legacy attribute names to the reader in case the reader is an + OSITraceSingle instance. Otherwise it uses the default setter. + """ + reader = super().__getattribute__("reader") if "reader" in self.__dict__ else None + if name in self._legacy_ositrace_attributes and isinstance(reader, OSITraceSingle): + setattr(reader, name, value) + else: + super().__setattr__(name, value) + + def __dir__(self): + attrs = super().__dir__() + if isinstance(self.reader, OSITraceSingle): + attrs += list(self._legacy_ositrace_attributes) + return attrs + def __init__(self, path=None, type_name="SensorView", cache_messages=False, topic=None): """ Initializes the trace reader depending on the trace file format. From d5b70dbb21b3074ff0313979e26c7b976572f986 Mon Sep 17 00:00:00 2001 From: "Pierre R. Mai" Date: Tue, 21 Oct 2025 11:14:05 +0200 Subject: [PATCH 06/14] Apply refactoring to unify trace implementations more Signed-off-by: Pierre R. Mai --- osi3trace/osi_trace.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index 480f145..f9f0d87 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -106,12 +106,9 @@ def _init_reader(self, path, type_name, cache_messages, topic): raise FileNotFoundError("File not found") if path.suffix.lower() == ".mcap": - reader = OSITraceMulti(path, topic) - if reader.get_message_type() != type_name: - raise ValueError(f"Channel message type '{reader.get_message_type()}' does not match expected type '{type_name}'") - return reader + return OSITraceMulti(path, type_name, topic) elif path.suffix.lower() in [".osi", ".lzma", ".xz"]: - return OSITraceSingle(str(path), type_name, cache_messages) + return OSITraceSingle(path, type_name, cache_messages) else: raise ValueError(f"Unsupported file format: '{path.suffix}'") @@ -212,7 +209,7 @@ def from_file(self, path, type_name="SensorView", cache_messages=False): """Import a trace from a file""" self.type = OSITrace.map_message_type(type_name) - if path.lower().endswith((".lzma", ".xz")): + if path.suffix.lower() in [".lzma", ".xz"]: self.file = lzma.open(path, "rb") else: self.file = open(path, "rb") @@ -344,16 +341,16 @@ def close(self): class OSITraceMulti(ReaderBase): """OSI multi-channel trace reader""" - def __init__(self, path, topic): + def __init__(self, path, type_name, topic): self._file = open(path, "rb") self._mcap_reader = make_reader(self._file, decoder_factories=[DecoderFactory()]) self._iter = None self._summary = self._mcap_reader.get_summary() - available_topics = self.get_available_topics() + available_topics = self.get_available_topics(type_name) if topic == None: - topic = available_topics[0] + topic = next(iter(available_topics), None) if topic not in available_topics: - raise ValueError(f"The requested topic '{topic}' is not present in the trace file.") + raise ValueError(f"The requested topic '{topic}' is not present in the trace file or is not of type '{type_name}'.") self.topic = topic def restart(self, index=None): @@ -376,8 +373,8 @@ def close(self): self._summary = None self._iter = None - def get_available_topics(self): - return [channel.topic for id, channel in self._summary.channels.items()] + def get_available_topics(self, type_name = None): + return [channel.topic for channel in self._summary.channels.values() if self._channel_is_of_type(channel, type_name)] def get_file_metadata(self): metadata = [] @@ -392,7 +389,7 @@ def get_channel_metadata(self): return None def get_message_type(self): - for channel_id, channel in self._summary.channels.items(): + for channel in self._summary.channels.values(): if channel.topic == self.topic: schema = self._summary.schemas[channel.schema_id] if schema.name.startswith("osi3."): @@ -400,3 +397,7 @@ def get_message_type(self): else: raise ValueError(f"Schema '{schema.name}' is not an 'osi3.' schema.") return None + + def _channel_is_of_type(self, channel, type_name): + schema = self._summary.schemas[channel.schema_id] + return type_name is None or schema.name == f"osi3.{type_name}" From e45ee9cfdf117535a2d345f105cec998a9b74b30 Mon Sep 17 00:00:00 2001 From: "Pierre R. Mai" Date: Tue, 21 Oct 2025 12:39:57 +0200 Subject: [PATCH 07/14] Deprecate single-channel only methods, unify new methods Add deprecation for purely single-channel only legacy methods, place new methods as reusable base methods, which can potentially be implemented in the future for single-channel as well. Signed-off-by: Pierre R. Mai --- osi3trace/osi_trace.py | 65 +++++++++++++++++++++++++---------------- poetry.lock | 5 ++-- pyproject.toml | 1 + tests/test_osi_trace.py | 47 +++++++++++++++++++++-------- 4 files changed, 78 insertions(+), 40 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index f9f0d87..4e56370 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -7,6 +7,7 @@ import struct from abc import ABC, abstractmethod +from typing_extensions import deprecated from mcap_protobuf.decoder import DecoderFactory from mcap.reader import make_reader @@ -139,40 +140,38 @@ def __iter__(self): def close(self): return self.reader.close() + @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def retrieve_offsets(self, limit=None): if isinstance(self.reader, OSITraceSingle): return self.reader.retrieve_offsets(limit) raise NotImplementedError("Offsets are only supported for single-channel traces.") + @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def retrieve_message(self, index=None, skip=False): if isinstance(self.reader, OSITraceSingle): return self.reader.retrieve_message(index, skip) raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def get_message_by_index(self, index): if isinstance(self.reader, OSITraceSingle): return self.reader.get_message_by_index(index) raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def get_messages_in_index_range(self, begin, end): if isinstance(self.reader, OSITraceSingle): return self.reader.get_messages_in_index_range(begin, end) raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") def get_available_topics(self): - if isinstance(self.reader, OSITraceMulti): - return self.reader.get_available_topics() - raise NotImplementedError("Getting available topics is only supported for multi-channel traces.") + return self.reader.get_available_topics() def get_file_metadata(self): - if isinstance(self.reader, OSITraceMulti): - return self.reader.get_file_metadata() - raise NotImplementedError("Getting file metadata is only supported for multi-channel traces.") + return self.reader.get_file_metadata() def get_channel_metadata(self): - if isinstance(self.reader, OSITraceMulti): - return self.reader.get_channel_metadata() - raise NotImplementedError("Getting channel metadata is only supported for multi-channel traces.") + return self.reader.get_channel_metadata() class ReaderBase(ABC): @@ -190,6 +189,18 @@ def __iter__(self): def close(self): pass + @abstractmethod + def get_available_topics(self): + pass + + @abstractmethod + def get_file_metadata(self): + pass + + @abstractmethod + def get_channel_metadata(self): + pass + class OSITraceSingle(ReaderBase): """OSI single-channel trace reader""" @@ -203,21 +214,16 @@ def __init__(self, path=None, type_name="SensorView", cache_messages=False): self.message_cache = {} if cache_messages else None self._header_length = 4 if path: - self.from_file(path, type_name, cache_messages) - - def from_file(self, path, type_name="SensorView", cache_messages=False): - """Import a trace from a file""" - self.type = OSITrace.map_message_type(type_name) + self.type = OSITrace.map_message_type(type_name) - if path.suffix.lower() in [".lzma", ".xz"]: - self.file = lzma.open(path, "rb") - else: - self.file = open(path, "rb") - - self.read_complete = False - self.current_index = 0 - self.message_offsets = [0] - self.message_cache = {} if cache_messages else None + if path.suffix.lower() in [".lzma", ".xz"]: + self.file = lzma.open(path, "rb") + else: + self.file = open(path, "rb") + self.read_complete = False + self.current_index = 0 + self.message_offsets = [0] + self.message_cache = {} if cache_messages else None def retrieve_offsets(self, limit=None): """Retrieve the offsets of the messages from the file.""" @@ -337,6 +343,15 @@ def close(self): self.read_limit = None self.type = None + def get_available_topics(self): + raise NotImplementedError("Getting available topics is only supported for multi-channel traces.") + + def get_file_metadata(self): + raise NotImplementedError("Getting file metadata is only supported for multi-channel traces.") + + def get_channel_metadata(self): + raise NotImplementedError("Getting channel metadata is only supported for multi-channel traces.") + class OSITraceMulti(ReaderBase): """OSI multi-channel trace reader""" @@ -387,7 +402,7 @@ def get_channel_metadata(self): if channel.topic == self.topic: return channel.metadata return None - + def get_message_type(self): for channel in self._summary.channels.values(): if channel.topic == self.topic: @@ -397,7 +412,7 @@ def get_message_type(self): else: raise ValueError(f"Schema '{schema.name}' is not an 'osi3.' schema.") return None - + def _channel_is_of_type(self, channel, type_name): schema = self._summary.schemas[channel.schema_id] return type_name is None or schema.name == f"osi3.{type_name}" diff --git a/poetry.lock b/poetry.lock index cef8a9d..e135a32 100644 --- a/poetry.lock +++ b/poetry.lock @@ -370,8 +370,7 @@ version = "4.14.1" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version == \"3.10\"" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"}, {file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"}, @@ -492,4 +491,4 @@ cffi = ["cffi (>=1.17) ; python_version >= \"3.13\" and platform_python_implemen [metadata] lock-version = "2.1" python-versions = ">=3.10" -content-hash = "d8d891dd9c0a16ba9080b4960ccdd5dd1048f27d04f1026e221132d9023df1ab" +content-hash = "2fb49d217c020e5dacb4a9ccbd6e9e833deaf265dc7279ef98ad6106dcf90d95" diff --git a/pyproject.toml b/pyproject.toml index 76108b3..211cd1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,7 @@ readme = "README.md" dynamic = ["version"] requires-python = ">=3.10" dependencies = [ + "typing-extensions~=4.9", "protobuf>=6.30.2", "mcap>=1.2.2", "mcap-protobuf-support>=0.5.3" diff --git a/tests/test_osi_trace.py b/tests/test_osi_trace.py index 9d74f5a..c508cde 100644 --- a/tests/test_osi_trace.py +++ b/tests/test_osi_trace.py @@ -1,6 +1,7 @@ import os import tempfile import unittest +import warnings from osi3trace.osi_trace import OSITrace from osi3.osi_sensorview_pb2 import SensorView @@ -30,7 +31,9 @@ def test_osi_trace_sv(self): self.assertIsInstance(message, SensorView) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -47,7 +50,9 @@ def test_osi_trace_svc(self): self.assertIsInstance(message, SensorViewConfiguration) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 1) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 1) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -64,7 +69,9 @@ def test_osi_trace_gt(self): self.assertIsInstance(message, GroundTruth) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -81,7 +88,9 @@ def test_osi_trace_hvd(self): self.assertIsInstance(message, HostVehicleData) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -98,7 +107,9 @@ def test_osi_trace_sd(self): self.assertIsInstance(message, SensorData) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -115,7 +126,9 @@ def test_osi_trace_tc(self): self.assertIsInstance(message, TrafficCommand) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -132,7 +145,9 @@ def test_osi_trace_tcu(self): self.assertIsInstance(message, TrafficCommandUpdate) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -149,7 +164,9 @@ def test_osi_trace_tu(self): self.assertIsInstance(message, TrafficUpdate) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -166,7 +183,9 @@ def test_osi_trace_mr(self): self.assertIsInstance(message, MotionRequest) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -183,7 +202,9 @@ def test_osi_trace_su(self): self.assertIsInstance(message, StreamingUpdate) f.write(str(message)) - self.assertEqual(len(trace.retrieve_offsets()), 10) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + self.assertEqual(len(trace.retrieve_offsets()), 10) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -195,8 +216,10 @@ def test_osi_trace_offsets_robustness(self): trace = OSITrace(path_input) # Test whether the function can handle be run multiple times safely - offsets = trace.retrieve_offsets(None) - offsets2 = trace.retrieve_offsets(None) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + offsets = trace.retrieve_offsets(None) + offsets2 = trace.retrieve_offsets(None) trace.close() self.assertEqual(len(offsets), 10) From 2a86e4d1d75c185cb5a1bba450b0cb76e57cb1df Mon Sep 17 00:00:00 2001 From: "Pierre R. Mai" Date: Tue, 28 Oct 2025 15:58:42 +0100 Subject: [PATCH 08/14] Make implementation classes private Signed-off-by: Pierre R. Mai --- osi3trace/osi_trace.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index 4e56370..30e2f8e 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -63,9 +63,9 @@ def message_types(): def __getattr__(self, name): """ This method forwards the getattr call for unsuccessful legacy attribute - name lookups to the reader in case it is an OSITraceSingle instance. + name lookups to the reader in case it is an _OSITraceSingle instance. """ - if name in self._legacy_ositrace_attributes and isinstance(self.reader, OSITraceSingle): + if name in self._legacy_ositrace_attributes and isinstance(self.reader, _OSITraceSingle): return getattr(self.reader, name) raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'") @@ -73,17 +73,17 @@ def __setattr__(self, name, value): """ This method overwrites the default setter and forwards setattr calls for legacy attribute names to the reader in case the reader is an - OSITraceSingle instance. Otherwise it uses the default setter. + _OSITraceSingle instance. Otherwise it uses the default setter. """ reader = super().__getattribute__("reader") if "reader" in self.__dict__ else None - if name in self._legacy_ositrace_attributes and isinstance(reader, OSITraceSingle): + if name in self._legacy_ositrace_attributes and isinstance(reader, _OSITraceSingle): setattr(reader, name, value) else: super().__setattr__(name, value) def __dir__(self): attrs = super().__dir__() - if isinstance(self.reader, OSITraceSingle): + if isinstance(self.reader, _OSITraceSingle): attrs += list(self._legacy_ositrace_attributes) return attrs @@ -107,9 +107,9 @@ def _init_reader(self, path, type_name, cache_messages, topic): raise FileNotFoundError("File not found") if path.suffix.lower() == ".mcap": - return OSITraceMulti(path, type_name, topic) + return _OSITraceMulti(path, type_name, topic) elif path.suffix.lower() in [".osi", ".lzma", ".xz"]: - return OSITraceSingle(path, type_name, cache_messages) + return _OSITraceSingle(path, type_name, cache_messages) else: raise ValueError(f"Unsupported file format: '{path.suffix}'") @@ -142,25 +142,25 @@ def close(self): @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def retrieve_offsets(self, limit=None): - if isinstance(self.reader, OSITraceSingle): + if isinstance(self.reader, _OSITraceSingle): return self.reader.retrieve_offsets(limit) raise NotImplementedError("Offsets are only supported for single-channel traces.") @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def retrieve_message(self, index=None, skip=False): - if isinstance(self.reader, OSITraceSingle): + if isinstance(self.reader, _OSITraceSingle): return self.reader.retrieve_message(index, skip) raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def get_message_by_index(self, index): - if isinstance(self.reader, OSITraceSingle): + if isinstance(self.reader, _OSITraceSingle): return self.reader.get_message_by_index(index) raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") def get_messages_in_index_range(self, begin, end): - if isinstance(self.reader, OSITraceSingle): + if isinstance(self.reader, _OSITraceSingle): return self.reader.get_messages_in_index_range(begin, end) raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") @@ -174,7 +174,7 @@ def get_channel_metadata(self): return self.reader.get_channel_metadata() -class ReaderBase(ABC): +class _ReaderBase(ABC): """Common interface for trace readers""" @abstractmethod @@ -202,7 +202,7 @@ def get_channel_metadata(self): pass -class OSITraceSingle(ReaderBase): +class _OSITraceSingle(_ReaderBase): """OSI single-channel trace reader""" def __init__(self, path=None, type_name="SensorView", cache_messages=False): @@ -353,7 +353,7 @@ def get_channel_metadata(self): raise NotImplementedError("Getting channel metadata is only supported for multi-channel traces.") -class OSITraceMulti(ReaderBase): +class _OSITraceMulti(_ReaderBase): """OSI multi-channel trace reader""" def __init__(self, path, type_name, topic): From 6fa580f1b33730a70203d7e7d88fd75be86788d5 Mon Sep 17 00:00:00 2001 From: Thomas Sedlmayer Date: Mon, 3 Nov 2025 14:45:38 +0100 Subject: [PATCH 09/14] Use static parsing Signed-off-by: Thomas Sedlmayer --- osi3trace/osi_trace.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index 30e2f8e..eacd0d9 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -358,7 +358,7 @@ class _OSITraceMulti(_ReaderBase): def __init__(self, path, type_name, topic): self._file = open(path, "rb") - self._mcap_reader = make_reader(self._file, decoder_factories=[DecoderFactory()]) + self._mcap_reader = make_reader(self._file) self._iter = None self._summary = self._mcap_reader.get_summary() available_topics = self.get_available_topics(type_name) @@ -376,9 +376,14 @@ def restart(self, index=None): def __iter__(self): """Stateful iterator over the channel's messages in log time order.""" if self._iter is None: - self._iter = self._mcap_reader.iter_decoded_messages(topics=[self.topic]) - for message in self._iter: - yield message.decoded_message + self._iter = self._mcap_reader.iter_messages(topics=[self.topic]) + + message_class = OSITrace.map_message_type(self.get_message_type()) + + for _, _, message in self._iter: + msg = message_class() + msg.ParseFromString(message.data) + yield msg def close(self): if self._file: From 9ae1254f38c2a956a05488da9246b4b5bd40ec69 Mon Sep 17 00:00:00 2001 From: Thomas Sedlmayer Date: Mon, 3 Nov 2025 14:56:09 +0100 Subject: [PATCH 10/14] Add mcap tests Signed-off-by: Thomas Sedlmayer --- tests/test_osi_trace_multi.py | 839 ++++++++++++++++++++++++++++++++++ 1 file changed, 839 insertions(+) create mode 100644 tests/test_osi_trace_multi.py diff --git a/tests/test_osi_trace_multi.py b/tests/test_osi_trace_multi.py new file mode 100644 index 0000000..1bc2392 --- /dev/null +++ b/tests/test_osi_trace_multi.py @@ -0,0 +1,839 @@ +import os +import tempfile +import unittest +import warnings + +from osi3trace.osi_trace import OSITrace +from osi3.osi_sensorview_pb2 import SensorView +from osi3.osi_sensorviewconfiguration_pb2 import SensorViewConfiguration +from osi3.osi_groundtruth_pb2 import GroundTruth +from osi3.osi_hostvehicledata_pb2 import HostVehicleData +from osi3.osi_sensordata_pb2 import SensorData +from osi3.osi_trafficcommand_pb2 import TrafficCommand +from osi3.osi_trafficcommandupdate_pb2 import TrafficCommandUpdate +from osi3.osi_trafficupdate_pb2 import TrafficUpdate +from osi3.osi_motionrequest_pb2 import MotionRequest +from osi3.osi_streamingupdate_pb2 import StreamingUpdate + +from mcap.writer import Writer +from mcap.well_known import MessageEncoding + +from google.protobuf.descriptor import FileDescriptor +from google.protobuf.descriptor_pb2 import FileDescriptorSet + + +class TestOSITraceMulti(unittest.TestCase): + def test_osi_trace_sv(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_sv.txth") + path_input = os.path.join(tmpdirname, "input_sv.mcap") + create_sample_sv(path_input) + + trace = OSITrace(path_input) + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, SensorView) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_svc(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_svc.txth") + path_input = os.path.join(tmpdirname, "input_svc.mcap") + create_sample_svc(path_input) + + trace = OSITrace(path_input, "SensorViewConfiguration") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, SensorViewConfiguration) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_gt(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_gt.txth") + path_input = os.path.join(tmpdirname, "input_gt.mcap") + create_sample_gt(path_input) + + trace = OSITrace(path_input, "GroundTruth") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, GroundTruth) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_hvd(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_hvd.txth") + path_input = os.path.join(tmpdirname, "input_hvd.mcap") + create_sample_hvd(path_input) + + trace = OSITrace(path_input, "HostVehicleData") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, HostVehicleData) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_sd(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_sd.txth") + path_input = os.path.join(tmpdirname, "input_sd.mcap") + create_sample_sd(path_input) + + trace = OSITrace(path_input, "SensorData") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, SensorData) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_tc(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_tc.txth") + path_input = os.path.join(tmpdirname, "input_tc.mcap") + create_sample_tc(path_input) + + trace = OSITrace(path_input, "TrafficCommand") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, TrafficCommand) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_tcu(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_tcu.txth") + path_input = os.path.join(tmpdirname, "input_tcu.mcap") + create_sample_tcu(path_input) + + trace = OSITrace(path_input, "TrafficCommandUpdate") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, TrafficCommandUpdate) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_tu(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_tu.txth") + path_input = os.path.join(tmpdirname, "input_tu.mcap") + create_sample_tu(path_input) + + trace = OSITrace(path_input, "TrafficUpdate") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, TrafficUpdate) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_mr(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_mr.txth") + path_input = os.path.join(tmpdirname, "input_mr.mcap") + create_sample_mr(path_input) + + trace = OSITrace(path_input, "MotionRequest") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, MotionRequest) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + def test_osi_trace_su(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output = os.path.join(tmpdirname, "output_su.txth") + path_input = os.path.join(tmpdirname, "input_su.mcap") + create_sample_su(path_input) + + trace = OSITrace(path_input, "StreamingUpdate") + with open(path_output, "wt") as f: + for message in trace: + self.assertIsInstance(message, StreamingUpdate) + f.write(str(message)) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + self.assertEqual(len(trace.retrieve_offsets()), 10) + except NotImplementedError: + pass + trace.close() + + self.assertTrue(os.path.exists(path_output)) + + +def build_file_descriptor_set(message_class) -> FileDescriptorSet: + file_descriptor_set = FileDescriptorSet() + seen_dependencies = set() + + def append_file_descriptor(file_descriptor: FileDescriptor): + for dep in file_descriptor.dependencies: + if dep.name not in seen_dependencies: + seen_dependencies.add(dep.name) + append_file_descriptor(dep) + file_descriptor.CopyToProto(file_descriptor_set.file.add()) + + append_file_descriptor(message_class.DESCRIPTOR.file) + return file_descriptor_set + + +def create_sample_sv(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(SensorView) + schema_id = mcap_writer.register_schema( + name="osi3.SensorView", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "SensorViewTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + sensorview = SensorView() + + sensorview.version.version_major = 3 + sensorview.version.version_minor = 0 + sensorview.version.version_patch = 0 + + sensorview.timestamp.seconds = 0 + sensorview.timestamp.nanos = 0 + + sensorview.sensor_id.value = 42 + + sensorview.host_vehicle_id.value = 114 + + sv_ground_truth = sensorview.global_ground_truth + sv_ground_truth.version.version_major = 3 + sv_ground_truth.version.version_minor = 0 + sv_ground_truth.version.version_patch = 0 + + sv_ground_truth.timestamp.seconds = 0 + sv_ground_truth.timestamp.nanos = 0 + + sv_ground_truth.host_vehicle_id.value = 114 + + moving_object = sv_ground_truth.moving_object.add() + moving_object.id.value = 114 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + sensorview.timestamp.seconds += 1 + sensorview.timestamp.nanos += 100000 + + sv_ground_truth.timestamp.seconds += 1 + sv_ground_truth.timestamp.nanos += 100000 + + moving_object.vehicle_classification.type = 2 + + moving_object.base.dimension.length = 5 + moving_object.base.dimension.width = 2 + moving_object.base.dimension.height = 1 + + moving_object.base.position.x = 0.0 + i + moving_object.base.position.y = 0.0 + moving_object.base.position.z = 0.0 + + moving_object.base.orientation.roll = 0.0 + moving_object.base.orientation.pitch = 0.0 + moving_object.base.orientation.yaw = 0.0 + + time = sensorview.timestamp.seconds + sensorview.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=sensorview.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_svc(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(SensorViewConfiguration) + schema_id = mcap_writer.register_schema( + name="osi3.SensorViewConfiguration", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "SensorViewConfigurationTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + sensorviewconfig = SensorViewConfiguration() + + sensorviewconfig.version.version_major = 3 + sensorviewconfig.version.version_minor = 0 + sensorviewconfig.version.version_patch = 0 + + sensorviewconfig.sensor_id.value = 42 + + sensorviewconfig.mounting_position.position.x = 0.8 + sensorviewconfig.mounting_position.position.y = 1.0 + sensorviewconfig.mounting_position.position.z = 0.5 + + sensorviewconfig.mounting_position.orientation.roll = 0.10 + sensorviewconfig.mounting_position.orientation.pitch = 0.15 + sensorviewconfig.mounting_position.orientation.yaw = 0.25 + + time = 0 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=sensorviewconfig.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_gt(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(GroundTruth) + schema_id = mcap_writer.register_schema( + name="osi3.GroundTruth", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "GroundTruthTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + ground_truth = GroundTruth() + + ground_truth.version.version_major = 3 + ground_truth.version.version_minor = 0 + ground_truth.version.version_patch = 0 + + ground_truth.timestamp.seconds = 0 + ground_truth.timestamp.nanos = 0 + + moving_object = ground_truth.moving_object.add() + moving_object.id.value = 114 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + ground_truth.timestamp.seconds += 1 + ground_truth.timestamp.nanos += 100000 + + moving_object.vehicle_classification.type = 2 + + moving_object.base.dimension.length = 5 + moving_object.base.dimension.width = 2 + moving_object.base.dimension.height = 1 + + moving_object.base.position.x = 0.0 + i + moving_object.base.position.y = 0.0 + moving_object.base.position.z = 0.0 + + moving_object.base.orientation.roll = 0.0 + moving_object.base.orientation.pitch = 0.0 + moving_object.base.orientation.yaw = 0.0 + + time = ground_truth.timestamp.seconds + ground_truth.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=ground_truth.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_hvd(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(HostVehicleData) + schema_id = mcap_writer.register_schema( + name="osi3.HostVehicleData", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "HostVehicleDataTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + hostvehicledata = HostVehicleData() + + hostvehicledata.version.version_major = 3 + hostvehicledata.version.version_minor = 0 + hostvehicledata.version.version_patch = 0 + + hostvehicledata.timestamp.seconds = 0 + hostvehicledata.timestamp.nanos = 0 + + hostvehicledata.host_vehicle_id.value = 114 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + hostvehicledata.timestamp.seconds += 1 + hostvehicledata.timestamp.nanos += 100000 + + hostvehicledata.location.dimension.length = 5 + hostvehicledata.location.dimension.width = 2 + hostvehicledata.location.dimension.height = 1 + + hostvehicledata.location.position.x = 0.0 + i + hostvehicledata.location.position.y = 0.0 + hostvehicledata.location.position.z = 0.0 + + hostvehicledata.location.orientation.roll = 0.0 + hostvehicledata.location.orientation.pitch = 0.0 + hostvehicledata.location.orientation.yaw = 0.0 + + time = hostvehicledata.timestamp.seconds + hostvehicledata.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=hostvehicledata.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_sd(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(SensorData) + schema_id = mcap_writer.register_schema( + name="osi3.SensorData", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "SensorDataTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + sensordata = SensorData() + + sensordata.version.version_major = 3 + sensordata.version.version_minor = 0 + sensordata.version.version_patch = 0 + + sensordata.timestamp.seconds = 0 + sensordata.timestamp.nanos = 0 + + sensordata.sensor_id.value = 42 + + moving_object = sensordata.moving_object.add() + moving_object.header.tracking_id.value = 1 + gt_id = moving_object.header.ground_truth_id.add() + gt_id.value = 114 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + sensordata.timestamp.seconds += 1 + sensordata.timestamp.nanos += 100000 + + moving_object.base.dimension.length = 5 + moving_object.base.dimension.width = 2 + moving_object.base.dimension.height = 1 + + moving_object.base.position.x = 0.0 + i + moving_object.base.position.y = 0.0 + moving_object.base.position.z = 0.0 + + moving_object.base.orientation.roll = 0.0 + moving_object.base.orientation.pitch = 0.0 + moving_object.base.orientation.yaw = 0.0 + + time = sensordata.timestamp.seconds + sensordata.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=sensordata.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_tc(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(TrafficCommand) + schema_id = mcap_writer.register_schema( + name="osi3.TrafficCommand", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "TrafficCommandTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + trafficcommand = TrafficCommand() + + trafficcommand.version.version_major = 3 + trafficcommand.version.version_minor = 0 + trafficcommand.version.version_patch = 0 + + trafficcommand.timestamp.seconds = 0 + trafficcommand.timestamp.nanos = 0 + + trafficcommand.traffic_participant_id.value = 114 + + action = trafficcommand.action.add() + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + trafficcommand.timestamp.seconds += 1 + trafficcommand.timestamp.nanos += 100000 + + action.speed_action.action_header.action_id.value = 1000 + i + + action.speed_action.absolute_target_speed = 10.0 + 0.5 * i + + time = trafficcommand.timestamp.seconds + trafficcommand.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=trafficcommand.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_tcu(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(TrafficCommandUpdate) + schema_id = mcap_writer.register_schema( + name="osi3.TrafficCommandUpdate", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "TrafficCommandUpdateTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + trafficcommandupdate = TrafficCommandUpdate() + + trafficcommandupdate.version.version_major = 3 + trafficcommandupdate.version.version_minor = 0 + trafficcommandupdate.version.version_patch = 0 + + trafficcommandupdate.timestamp.seconds = 0 + trafficcommandupdate.timestamp.nanos = 0 + + trafficcommandupdate.traffic_participant_id.value = 114 + + action = trafficcommandupdate.dismissed_action.add() + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + trafficcommandupdate.timestamp.seconds += 1 + trafficcommandupdate.timestamp.nanos += 100000 + + action.dismissed_action_id.value = 1000 + i + action.failure_reason = "Cannot complete!" + + time = trafficcommandupdate.timestamp.seconds + trafficcommandupdate.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=trafficcommandupdate.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_tu(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(TrafficUpdate) + schema_id = mcap_writer.register_schema( + name="osi3.TrafficUpdate", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "TrafficUpdateTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + trafficupdate = TrafficUpdate() + + trafficupdate.version.version_major = 3 + trafficupdate.version.version_minor = 0 + trafficupdate.version.version_patch = 0 + + trafficupdate.timestamp.seconds = 0 + trafficupdate.timestamp.nanos = 0 + + moving_object = trafficupdate.update.add() + moving_object.id.value = 114 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + trafficupdate.timestamp.seconds += 1 + trafficupdate.timestamp.nanos += 100000 + + moving_object.vehicle_classification.type = 2 + + moving_object.base.dimension.length = 5 + moving_object.base.dimension.width = 2 + moving_object.base.dimension.height = 1 + + moving_object.base.position.x = 0.0 + i + moving_object.base.position.y = 0.0 + moving_object.base.position.z = 0.0 + + moving_object.base.orientation.roll = 0.0 + moving_object.base.orientation.pitch = 0.0 + moving_object.base.orientation.yaw = 0.0 + + time = trafficupdate.timestamp.seconds + trafficupdate.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=trafficupdate.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_mr(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(MotionRequest) + schema_id = mcap_writer.register_schema( + name="osi3.MotionRequest", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "MotionRequestTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + motionrequest = MotionRequest() + + motionrequest.version.version_major = 3 + motionrequest.version.version_minor = 0 + motionrequest.version.version_patch = 0 + + motionrequest.timestamp.seconds = 0 + motionrequest.timestamp.nanos = 0 + + desired_state = motionrequest.desired_state + + desired_state.timestamp.seconds = 0 + desired_state.timestamp.nanos = 0 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + motionrequest.timestamp.seconds += 1 + motionrequest.timestamp.nanos += 100000 + + desired_state.timestamp.seconds += 1 + desired_state.timestamp.nanos += 100000 + + desired_state.position.x = 0.0 + i + desired_state.position.y = 0.0 + desired_state.position.z = 0.0 + + desired_state.orientation.roll = 0.0 + desired_state.orientation.pitch = 0.0 + desired_state.orientation.yaw = 0.10 + + time = motionrequest.timestamp.seconds + motionrequest.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=motionrequest.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() + + +def create_sample_su(path): + mcap_writer = Writer(output=str(path),) + file_descriptor_set = build_file_descriptor_set(StreamingUpdate) + schema_id = mcap_writer.register_schema( + name="osi3.StreamingUpdate", + encoding=MessageEncoding.Protobuf, + data=file_descriptor_set.SerializeToString(), + ) + topic_name = "StreamingUpdateTopic" + channel_id = mcap_writer.register_channel( + topic=topic_name, + message_encoding=MessageEncoding.Protobuf, + schema_id=schema_id, + metadata={}, + ) + mcap_writer.start(library=f"osi-python mcap tests") + + streamingupdate = StreamingUpdate() + + streamingupdate.version.version_major = 3 + streamingupdate.version.version_minor = 0 + streamingupdate.version.version_patch = 0 + + streamingupdate.timestamp.seconds = 0 + streamingupdate.timestamp.nanos = 0 + + moving_object = streamingupdate.moving_object_update.add() + moving_object.id.value = 114 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + streamingupdate.timestamp.seconds += 1 + streamingupdate.timestamp.nanos += 100000 + + moving_object.vehicle_classification.type = 2 + + moving_object.base.dimension.length = 5 + moving_object.base.dimension.width = 2 + moving_object.base.dimension.height = 1 + + moving_object.base.position.x = 0.0 + i + moving_object.base.position.y = 0.0 + moving_object.base.position.z = 0.0 + + moving_object.base.orientation.roll = 0.0 + moving_object.base.orientation.pitch = 0.0 + moving_object.base.orientation.yaw = 0.0 + + time = streamingupdate.timestamp.seconds + streamingupdate.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_id, + log_time=int(time*1000000000), + data=streamingupdate.SerializeToString(), + publish_time=int(time*1000000000), + ) + + mcap_writer.finish() From 49cc9d2345851525310a438ef00468be99d1ffa7 Mon Sep 17 00:00:00 2001 From: "Pierre R. Mai" Date: Tue, 4 Nov 2025 11:08:50 +0100 Subject: [PATCH 11/14] Add meta-data handling, refactor multi tests Signed-off-by: Pierre R. Mai --- tests/test_osi_trace_multi.py | 261 ++++++++++++---------------------- 1 file changed, 90 insertions(+), 171 deletions(-) diff --git a/tests/test_osi_trace_multi.py b/tests/test_osi_trace_multi.py index 1bc2392..3e6f930 100644 --- a/tests/test_osi_trace_multi.py +++ b/tests/test_osi_trace_multi.py @@ -4,6 +4,7 @@ import warnings from osi3trace.osi_trace import OSITrace +from osi3 import __version__ as osi3_version from osi3.osi_sensorview_pb2 import SensorView from osi3.osi_sensorviewconfiguration_pb2 import SensorViewConfiguration from osi3.osi_groundtruth_pb2 import GroundTruth @@ -18,6 +19,7 @@ from mcap.writer import Writer from mcap.well_known import MessageEncoding +from google.protobuf import __version__ as google_protobuf_version from google.protobuf.descriptor import FileDescriptor from google.protobuf.descriptor_pb2 import FileDescriptorSet @@ -245,36 +247,63 @@ def test_osi_trace_su(self): def build_file_descriptor_set(message_class) -> FileDescriptorSet: - file_descriptor_set = FileDescriptorSet() - seen_dependencies = set() + file_descriptor_set = FileDescriptorSet() + seen_dependencies = set() - def append_file_descriptor(file_descriptor: FileDescriptor): - for dep in file_descriptor.dependencies: - if dep.name not in seen_dependencies: - seen_dependencies.add(dep.name) - append_file_descriptor(dep) - file_descriptor.CopyToProto(file_descriptor_set.file.add()) + def append_file_descriptor(file_descriptor: FileDescriptor): + for dep in file_descriptor.dependencies: + if dep.name not in seen_dependencies: + seen_dependencies.add(dep.name) + append_file_descriptor(dep) + file_descriptor.CopyToProto(file_descriptor_set.file.add()) - append_file_descriptor(message_class.DESCRIPTOR.file) - return file_descriptor_set + append_file_descriptor(message_class.DESCRIPTOR.file) + return file_descriptor_set -def create_sample_sv(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(SensorView) +def prepare_mcap_writer(path): + mcap_writer = Writer( + output=str(path), + ) + + mcap_writer.start(library=f"osi-python mcap test suite") + mcap_writer.add_metadata( + name="net.asam.osi.trace", + data={ + "version": osi3_version, + "min_osi_version": osi3_version, + "max_osi_version": osi3_version, + "min_protobuf_version": google_protobuf_version, + "max_protobuf_version": google_protobuf_version, + "description": "Test trace created by osi-python test suite", + }, + ) + return mcap_writer + + +def add_channel(mcap_writer, message_class, topic_name): + file_descriptor_set = build_file_descriptor_set(message_class) schema_id = mcap_writer.register_schema( - name="osi3.SensorView", + name=f"osi3.{message_class.__name__}", encoding=MessageEncoding.Protobuf, data=file_descriptor_set.SerializeToString(), ) - topic_name = "SensorViewTopic" channel_id = mcap_writer.register_channel( topic=topic_name, message_encoding=MessageEncoding.Protobuf, schema_id=schema_id, - metadata={}, + metadata={ + "net.asam.osi.trace.channel.osi_version": osi3_version, + "net.asam.osi.trace.channel.protobuf_version": google_protobuf_version, + "net.asam.osi.trace.channel.description": f"Channel for OSI message type {message_class.__name__}", + }, ) - mcap_writer.start(library=f"osi-python mcap tests") + return channel_id + + +def create_sample_sv(path): + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, SensorView, "SensorViewTopic") sensorview = SensorView() @@ -328,30 +357,19 @@ def create_sample_sv(path): time = sensorview.timestamp.seconds + sensorview.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=sensorview.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_svc(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(SensorViewConfiguration) - schema_id = mcap_writer.register_schema( - name="osi3.SensorViewConfiguration", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel( + mcap_writer, SensorViewConfiguration, "SensorViewConfigurationTopic" ) - topic_name = "SensorViewConfigurationTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") sensorviewconfig = SensorViewConfiguration() @@ -372,30 +390,17 @@ def create_sample_svc(path): time = 0 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=sensorviewconfig.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_gt(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(GroundTruth) - schema_id = mcap_writer.register_schema( - name="osi3.GroundTruth", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), - ) - topic_name = "GroundTruthTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, GroundTruth, "GroundTruthTopic") ground_truth = GroundTruth() @@ -432,30 +437,17 @@ def create_sample_gt(path): time = ground_truth.timestamp.seconds + ground_truth.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=ground_truth.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_hvd(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(HostVehicleData) - schema_id = mcap_writer.register_schema( - name="osi3.HostVehicleData", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), - ) - topic_name = "HostVehicleDataTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, HostVehicleData, "HostVehicleDataTopic") hostvehicledata = HostVehicleData() @@ -489,30 +481,17 @@ def create_sample_hvd(path): time = hostvehicledata.timestamp.seconds + hostvehicledata.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=hostvehicledata.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_sd(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(SensorData) - schema_id = mcap_writer.register_schema( - name="osi3.SensorData", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), - ) - topic_name = "SensorDataTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, SensorData, "SensorDataTopic") sensordata = SensorData() @@ -551,30 +530,17 @@ def create_sample_sd(path): time = sensordata.timestamp.seconds + sensordata.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=sensordata.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_tc(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(TrafficCommand) - schema_id = mcap_writer.register_schema( - name="osi3.TrafficCommand", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), - ) - topic_name = "TrafficCommandTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, TrafficCommand, "TrafficCommandTopic") trafficcommand = TrafficCommand() @@ -602,30 +568,19 @@ def create_sample_tc(path): time = trafficcommand.timestamp.seconds + trafficcommand.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=trafficcommand.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_tcu(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(TrafficCommandUpdate) - schema_id = mcap_writer.register_schema( - name="osi3.TrafficCommandUpdate", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel( + mcap_writer, TrafficCommandUpdate, "TrafficCommandUpdateTopic" ) - topic_name = "TrafficCommandUpdateTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") trafficcommandupdate = TrafficCommandUpdate() @@ -649,33 +604,23 @@ def create_sample_tcu(path): action.dismissed_action_id.value = 1000 + i action.failure_reason = "Cannot complete!" - time = trafficcommandupdate.timestamp.seconds + trafficcommandupdate.timestamp.nanos / 1e9 + time = ( + trafficcommandupdate.timestamp.seconds + + trafficcommandupdate.timestamp.nanos / 1e9 + ) mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=trafficcommandupdate.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_tu(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(TrafficUpdate) - schema_id = mcap_writer.register_schema( - name="osi3.TrafficUpdate", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), - ) - topic_name = "TrafficUpdateTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, TrafficUpdate, "TrafficUpdateTopic") trafficupdate = TrafficUpdate() @@ -712,30 +657,17 @@ def create_sample_tu(path): time = trafficupdate.timestamp.seconds + trafficupdate.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=trafficupdate.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_mr(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(MotionRequest) - schema_id = mcap_writer.register_schema( - name="osi3.MotionRequest", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), - ) - topic_name = "MotionRequestTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, MotionRequest, "MotionRequestTopic") motionrequest = MotionRequest() @@ -771,30 +703,17 @@ def create_sample_mr(path): time = motionrequest.timestamp.seconds + motionrequest.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=motionrequest.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() def create_sample_su(path): - mcap_writer = Writer(output=str(path),) - file_descriptor_set = build_file_descriptor_set(StreamingUpdate) - schema_id = mcap_writer.register_schema( - name="osi3.StreamingUpdate", - encoding=MessageEncoding.Protobuf, - data=file_descriptor_set.SerializeToString(), - ) - topic_name = "StreamingUpdateTopic" - channel_id = mcap_writer.register_channel( - topic=topic_name, - message_encoding=MessageEncoding.Protobuf, - schema_id=schema_id, - metadata={}, - ) - mcap_writer.start(library=f"osi-python mcap tests") + mcap_writer = prepare_mcap_writer(path) + channel_id = add_channel(mcap_writer, StreamingUpdate, "StreamingUpdateTopic") streamingupdate = StreamingUpdate() @@ -831,9 +750,9 @@ def create_sample_su(path): time = streamingupdate.timestamp.seconds + streamingupdate.timestamp.nanos / 1e9 mcap_writer.add_message( channel_id=channel_id, - log_time=int(time*1000000000), + log_time=int(time * 1000000000), data=streamingupdate.SerializeToString(), - publish_time=int(time*1000000000), + publish_time=int(time * 1000000000), ) mcap_writer.finish() From 37cb4d1024054dd2fcb6ca145b71c48e819920a9 Mon Sep 17 00:00:00 2001 From: "Pierre R. Mai" Date: Tue, 4 Nov 2025 11:26:37 +0100 Subject: [PATCH 12/14] Adjust length checking code to multi trace Signed-off-by: Pierre R. Mai --- tests/test_osi_trace_multi.py | 72 ++++++++++++++++++++--------------- 1 file changed, 41 insertions(+), 31 deletions(-) diff --git a/tests/test_osi_trace_multi.py b/tests/test_osi_trace_multi.py index 3e6f930..9254c7f 100644 --- a/tests/test_osi_trace_multi.py +++ b/tests/test_osi_trace_multi.py @@ -33,16 +33,17 @@ def test_osi_trace_sv(self): trace = OSITrace(path_input) with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, SensorView) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -55,16 +56,17 @@ def test_osi_trace_svc(self): trace = OSITrace(path_input, "SensorViewConfiguration") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, SensorViewConfiguration) + count += 1 f.write(str(message)) + self.assertEqual(count, 1) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: - self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass + with self.assertRaises(NotImplementedError): + self.assertEqual(len(trace.retrieve_offsets()), 1) trace.close() self.assertTrue(os.path.exists(path_output)) @@ -77,16 +79,17 @@ def test_osi_trace_gt(self): trace = OSITrace(path_input, "GroundTruth") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, GroundTruth) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -99,16 +102,17 @@ def test_osi_trace_hvd(self): trace = OSITrace(path_input, "HostVehicleData") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, HostVehicleData) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -121,16 +125,17 @@ def test_osi_trace_sd(self): trace = OSITrace(path_input, "SensorData") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, SensorData) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -143,16 +148,17 @@ def test_osi_trace_tc(self): trace = OSITrace(path_input, "TrafficCommand") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, TrafficCommand) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -165,16 +171,17 @@ def test_osi_trace_tcu(self): trace = OSITrace(path_input, "TrafficCommandUpdate") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, TrafficCommandUpdate) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -187,16 +194,17 @@ def test_osi_trace_tu(self): trace = OSITrace(path_input, "TrafficUpdate") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, TrafficUpdate) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -209,16 +217,17 @@ def test_osi_trace_mr(self): trace = OSITrace(path_input, "MotionRequest") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, MotionRequest) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) @@ -231,16 +240,17 @@ def test_osi_trace_su(self): trace = OSITrace(path_input, "StreamingUpdate") with open(path_output, "wt") as f: + count = 0 for message in trace: self.assertIsInstance(message, StreamingUpdate) + count += 1 f.write(str(message)) + self.assertEqual(count, 10) with warnings.catch_warnings(): warnings.simplefilter("ignore") - try: + with self.assertRaises(NotImplementedError): self.assertEqual(len(trace.retrieve_offsets()), 10) - except NotImplementedError: - pass trace.close() self.assertTrue(os.path.exists(path_output)) From 958381475951de7a2f591fac8375589be5d77f34 Mon Sep 17 00:00:00 2001 From: "Pierre R. Mai" Date: Tue, 4 Nov 2025 11:41:17 +0100 Subject: [PATCH 13/14] Add multi-channel test case with type checks Signed-off-by: Pierre R. Mai --- tests/test_osi_trace_multi.py | 157 ++++++++++++++++++++++++++++++++++ 1 file changed, 157 insertions(+) diff --git a/tests/test_osi_trace_multi.py b/tests/test_osi_trace_multi.py index 9254c7f..de67b8d 100644 --- a/tests/test_osi_trace_multi.py +++ b/tests/test_osi_trace_multi.py @@ -255,6 +255,59 @@ def test_osi_trace_su(self): self.assertTrue(os.path.exists(path_output)) + def test_osi_trace_sv_and_sd(self): + with tempfile.TemporaryDirectory() as tmpdirname: + path_output1 = os.path.join(tmpdirname, "output_svsd1.txth") + path_output2 = os.path.join(tmpdirname, "output_svsd2.txth") + path_output3 = os.path.join(tmpdirname, "output_svsd3.txth") + path_input = os.path.join(tmpdirname, "input_svsd.mcap") + create_sample_svsd(path_input) + + # Select channel via type name + trace = OSITrace(path_input, "SensorView") + with open(path_output1, "wt") as f: + count = 0 + for message in trace: + self.assertIsInstance(message, SensorView) + count += 1 + f.write(str(message)) + self.assertEqual(count, 10) + trace.close() + + # Select channel via type name + trace = OSITrace(path_input, "SensorData") + with open(path_output2, "wt") as f: + count = 0 + for message in trace: + self.assertIsInstance(message, SensorData) + count += 1 + f.write(str(message)) + self.assertEqual(count, 10) + trace.close() + + # Select channel via channel name and type name + trace = OSITrace(path_input, "SensorData", False, "SensorDataTopic") + with open(path_output3, "wt") as f: + count = 0 + for message in trace: + self.assertIsInstance(message, SensorData) + count += 1 + f.write(str(message)) + self.assertEqual(count, 10) + trace.close() + + # Mismatched channel name and type name + with self.assertRaises(ValueError): + trace = OSITrace(path_input, "SensorData", False, "SensorViewTopic") + + # Unknown channel name + with self.assertRaises(ValueError): + trace = OSITrace(path_input, "SensorData", False, "UnknownTopic") + + self.assertTrue(os.path.exists(path_output1)) + self.assertTrue(os.path.exists(path_output2)) + self.assertTrue(os.path.exists(path_output3)) + def build_file_descriptor_set(message_class) -> FileDescriptorSet: file_descriptor_set = FileDescriptorSet() @@ -766,3 +819,107 @@ def create_sample_su(path): ) mcap_writer.finish() + + +def create_sample_svsd(path): + mcap_writer = prepare_mcap_writer(path) + channel_sv = add_channel(mcap_writer, SensorView, "SensorViewTopic") + channel_sd = add_channel(mcap_writer, SensorData, "SensorDataTopic") + + sensorview = SensorView() + + sensorview.version.version_major = 3 + sensorview.version.version_minor = 0 + sensorview.version.version_patch = 0 + + sensorview.timestamp.seconds = 0 + sensorview.timestamp.nanos = 0 + + sensorview.sensor_id.value = 42 + + sensorview.host_vehicle_id.value = 114 + + sv_ground_truth = sensorview.global_ground_truth + sv_ground_truth.version.version_major = 3 + sv_ground_truth.version.version_minor = 0 + sv_ground_truth.version.version_patch = 0 + + sv_ground_truth.timestamp.seconds = 0 + sv_ground_truth.timestamp.nanos = 0 + + sv_ground_truth.host_vehicle_id.value = 114 + + moving_object = sv_ground_truth.moving_object.add() + moving_object.id.value = 114 + + sensordata = SensorData() + + sensordata.version.version_major = 3 + sensordata.version.version_minor = 0 + sensordata.version.version_patch = 0 + + sensordata.timestamp.seconds = 0 + sensordata.timestamp.nanos = 0 + + sensordata.sensor_id.value = 42 + + sdmoving_object = sensordata.moving_object.add() + sdmoving_object.header.tracking_id.value = 1 + gt_id = sdmoving_object.header.ground_truth_id.add() + gt_id.value = 114 + + # Generate 10 OSI messages for 9 seconds + for i in range(10): + # Increment the time + sensorview.timestamp.seconds += 1 + sensorview.timestamp.nanos += 100000 + + sv_ground_truth.timestamp.seconds += 1 + sv_ground_truth.timestamp.nanos += 100000 + + sensordata.timestamp.seconds += 1 + sensordata.timestamp.nanos += 100000 + + # SensorView moving object + moving_object.vehicle_classification.type = 2 + + moving_object.base.dimension.length = 5 + moving_object.base.dimension.width = 2 + moving_object.base.dimension.height = 1 + + moving_object.base.position.x = 0.0 + i + moving_object.base.position.y = 0.0 + moving_object.base.position.z = 0.0 + + moving_object.base.orientation.roll = 0.0 + moving_object.base.orientation.pitch = 0.0 + moving_object.base.orientation.yaw = 0.0 + + # SensorData moving object + sdmoving_object.base.dimension.length = 5 + sdmoving_object.base.dimension.width = 2 + sdmoving_object.base.dimension.height = 1 + + sdmoving_object.base.position.x = 0.0 + i + sdmoving_object.base.position.y = 0.0 + sdmoving_object.base.position.z = 0.0 + + sdmoving_object.base.orientation.roll = 0.0 + sdmoving_object.base.orientation.pitch = 0.0 + sdmoving_object.base.orientation.yaw = 0.0 + + time = sensorview.timestamp.seconds + sensorview.timestamp.nanos / 1e9 + mcap_writer.add_message( + channel_id=channel_sv, + log_time=int(time * 1000000000), + data=sensorview.SerializeToString(), + publish_time=int(time * 1000000000), + ) + mcap_writer.add_message( + channel_id=channel_sd, + log_time=int(time * 1000000000), + data=sensordata.SerializeToString(), + publish_time=int(time * 1000000000), + ) + + mcap_writer.finish() From 7ee607f20625941b165795faa04ad12ca98ac47f Mon Sep 17 00:00:00 2001 From: "Pierre R. Mai" Date: Tue, 4 Nov 2025 11:42:57 +0100 Subject: [PATCH 14/14] Reformat with black settings Signed-off-by: Pierre R. Mai --- osi3trace/osi_trace.py | 118 ++++++++++++++++++++++++++++------------- 1 file changed, 81 insertions(+), 37 deletions(-) diff --git a/osi3trace/osi_trace.py b/osi3trace/osi_trace.py index eacd0d9..a3ed619 100644 --- a/osi3trace/osi_trace.py +++ b/osi3trace/osi_trace.py @@ -40,7 +40,7 @@ class OSITrace: """This class can import and decode OSI single- and multi-channel trace files.""" - + @staticmethod def map_message_type(type_name): """Map the type name to the protobuf message type.""" @@ -50,7 +50,7 @@ def map_message_type(type_name): def message_types(): """Message types that OSITrace supports.""" return list(MESSAGES_TYPE.keys()) - + _legacy_ositrace_attributes = { "type", "file", @@ -65,18 +65,26 @@ def __getattr__(self, name): This method forwards the getattr call for unsuccessful legacy attribute name lookups to the reader in case it is an _OSITraceSingle instance. """ - if name in self._legacy_ositrace_attributes and isinstance(self.reader, _OSITraceSingle): + if name in self._legacy_ositrace_attributes and isinstance( + self.reader, _OSITraceSingle + ): return getattr(self.reader, name) - raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'") - + raise AttributeError( + f"'{type(self).__name__}' object has no attribute '{name}'" + ) + def __setattr__(self, name, value): """ This method overwrites the default setter and forwards setattr calls for legacy attribute names to the reader in case the reader is an _OSITraceSingle instance. Otherwise it uses the default setter. """ - reader = super().__getattribute__("reader") if "reader" in self.__dict__ else None - if name in self._legacy_ositrace_attributes and isinstance(reader, _OSITraceSingle): + reader = ( + super().__getattribute__("reader") if "reader" in self.__dict__ else None + ) + if name in self._legacy_ositrace_attributes and isinstance( + reader, _OSITraceSingle + ): setattr(reader, name, value) else: super().__setattr__(name, value) @@ -86,11 +94,13 @@ def __dir__(self): if isinstance(self.reader, _OSITraceSingle): attrs += list(self._legacy_ositrace_attributes) return attrs - - def __init__(self, path=None, type_name="SensorView", cache_messages=False, topic=None): + + def __init__( + self, path=None, type_name="SensorView", cache_messages=False, topic=None + ): """ Initializes the trace reader depending on the trace file format. - + Args: path (str): The path to the trace file. type_name (str): The type name of the messages in the trace; check supported message types with `OSITrace.message_types()`. @@ -100,8 +110,10 @@ def __init__(self, path=None, type_name="SensorView", cache_messages=False, topi self.reader = None if path is not None: - self.reader = self._init_reader(Path(path), type_name, cache_messages, topic) - + self.reader = self._init_reader( + Path(path), type_name, cache_messages, topic + ) + def _init_reader(self, path, type_name, cache_messages, topic): if not path.exists(): raise FileNotFoundError("File not found") @@ -116,7 +128,7 @@ def _init_reader(self, path, type_name, cache_messages, topic): def from_file(self, path, type_name="SensorView", cache_messages=False, topic=None): """ Initializes the trace reader depending on the trace file format. - + Args: path (str): The path to the trace file. type_name (str): The type name of the messages in the trace; check supported message types with `OSITrace.message_types()`. @@ -140,47 +152,63 @@ def __iter__(self): def close(self): return self.reader.close() - @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") + @deprecated( + "This is a legacy interface only supported for single-channel traces, which will be removed in future versions." + ) def retrieve_offsets(self, limit=None): if isinstance(self.reader, _OSITraceSingle): return self.reader.retrieve_offsets(limit) - raise NotImplementedError("Offsets are only supported for single-channel traces.") + raise NotImplementedError( + "Offsets are only supported for single-channel traces." + ) - @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") + @deprecated( + "This is a legacy interface only supported for single-channel traces, which will be removed in future versions." + ) def retrieve_message(self, index=None, skip=False): if isinstance(self.reader, _OSITraceSingle): return self.reader.retrieve_message(index, skip) - raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + raise NotImplementedError( + "Index-based message retrieval is only supported for single-channel traces." + ) - @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") + @deprecated( + "This is a legacy interface only supported for single-channel traces, which will be removed in future versions." + ) def get_message_by_index(self, index): if isinstance(self.reader, _OSITraceSingle): return self.reader.get_message_by_index(index) - raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + raise NotImplementedError( + "Index-based message retrieval is only supported for single-channel traces." + ) - @deprecated("This is a legacy interface only supported for single-channel traces, which will be removed in future versions.") + @deprecated( + "This is a legacy interface only supported for single-channel traces, which will be removed in future versions." + ) def get_messages_in_index_range(self, begin, end): if isinstance(self.reader, _OSITraceSingle): return self.reader.get_messages_in_index_range(begin, end) - raise NotImplementedError("Index-based message retrieval is only supported for single-channel traces.") + raise NotImplementedError( + "Index-based message retrieval is only supported for single-channel traces." + ) def get_available_topics(self): return self.reader.get_available_topics() - + def get_file_metadata(self): return self.reader.get_file_metadata() - + def get_channel_metadata(self): return self.reader.get_channel_metadata() class _ReaderBase(ABC): """Common interface for trace readers""" - + @abstractmethod def restart(self, index=None): pass - + @abstractmethod def __iter__(self): pass @@ -204,7 +232,7 @@ def get_channel_metadata(self): class _OSITraceSingle(_ReaderBase): """OSI single-channel trace reader""" - + def __init__(self, path=None, type_name="SensorView", cache_messages=False): self.type = OSITrace.map_message_type(type_name) self.file = None @@ -344,13 +372,19 @@ def close(self): self.type = None def get_available_topics(self): - raise NotImplementedError("Getting available topics is only supported for multi-channel traces.") + raise NotImplementedError( + "Getting available topics is only supported for multi-channel traces." + ) def get_file_metadata(self): - raise NotImplementedError("Getting file metadata is only supported for multi-channel traces.") + raise NotImplementedError( + "Getting file metadata is only supported for multi-channel traces." + ) def get_channel_metadata(self): - raise NotImplementedError("Getting channel metadata is only supported for multi-channel traces.") + raise NotImplementedError( + "Getting channel metadata is only supported for multi-channel traces." + ) class _OSITraceMulti(_ReaderBase): @@ -365,12 +399,16 @@ def __init__(self, path, type_name, topic): if topic == None: topic = next(iter(available_topics), None) if topic not in available_topics: - raise ValueError(f"The requested topic '{topic}' is not present in the trace file or is not of type '{type_name}'.") + raise ValueError( + f"The requested topic '{topic}' is not present in the trace file or is not of type '{type_name}'." + ) self.topic = topic - + def restart(self, index=None): if index != None: - raise NotImplementedError("Restarting from a given index is not supported for multi-channel traces.") + raise NotImplementedError( + "Restarting from a given index is not supported for multi-channel traces." + ) self._iter = None def __iter__(self): @@ -384,7 +422,7 @@ def __iter__(self): msg = message_class() msg.ParseFromString(message.data) yield msg - + def close(self): if self._file: self._file.close() @@ -393,8 +431,12 @@ def close(self): self._summary = None self._iter = None - def get_available_topics(self, type_name = None): - return [channel.topic for channel in self._summary.channels.values() if self._channel_is_of_type(channel, type_name)] + def get_available_topics(self, type_name=None): + return [ + channel.topic + for channel in self._summary.channels.values() + if self._channel_is_of_type(channel, type_name) + ] def get_file_metadata(self): metadata = [] @@ -415,9 +457,11 @@ def get_message_type(self): if schema.name.startswith("osi3."): return schema.name[len("osi3.") :] else: - raise ValueError(f"Schema '{schema.name}' is not an 'osi3.' schema.") + raise ValueError( + f"Schema '{schema.name}' is not an 'osi3.' schema." + ) return None def _channel_is_of_type(self, channel, type_name): - schema = self._summary.schemas[channel.schema_id] + schema = self._summary.schemas[channel.schema_id] return type_name is None or schema.name == f"osi3.{type_name}"