Skip to content

Commit

Permalink
Merge branch 'development' of https://github.com/danielhrisca/asammdf
Browse files Browse the repository at this point in the history
…into fixes
  • Loading branch information
FillBk committed Dec 12, 2023
2 parents b217a8a + 312bd1b commit 1c86954
Show file tree
Hide file tree
Showing 29 changed files with 372 additions and 247 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
repos:
- repo: https://github.com/ambv/black
rev: 23.10.0
rev: 23.11.0
hooks:
- id: black
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: v0.1.2
rev: v0.1.6
hooks:
- id: ruff
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,17 @@ test-requires = "pytest"
test-command = "pytest {project}/test"
build-frontend = "build"
archs = ["auto64"] # only build for 64bit architectures
skip = "pp* *_ppc64le *_s390x" # skip pypy and irrelevant architectures
skip = "pp* *_ppc64le *-musllinux* *_s390x" # skip pypy and irrelevant architectures

[tool.ruff]
select = [
"B", # flake8-bugbear
"F", # pyflakes
"UP", # pyupgrade
"I", # isort
]
ignore = [
"B007", # unused-loop-control-variable
"F401", # unused-import
"F841", # unused-variable
]
Expand Down
1 change: 1 addition & 0 deletions run_black_and_ruff.bat
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
pip install -U black ruff && ^
ruff check --fix ./src && ^
ruff check --fix ./test && ^
ruff check --fix ./setup.py && ^
black --config pyproject.toml . && ^
black --config pyproject.toml asammdf.spec && ^
Expand Down
2 changes: 1 addition & 1 deletion src/asammdf/blocks/bus_logging_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def extract_signal(
if bit_count not in (8, 16, 32, 64):
vals = as_non_byte_sized_signed_int(vals, bit_count)
else:
vals = vals.view(f'{">" if big_endian else "<"}i{std_size}')
vals = vals.view(f"i{std_size}")

if not raw:
vals = apply_conversion(vals, signal, ignore_value2text_conversion)
Expand Down
4 changes: 2 additions & 2 deletions src/asammdf/blocks/mdf_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,12 @@ def _validate_channel_selection(
try:
grp = self.groups[gp_nr]
except IndexError:
raise MdfException("Group index out of range")
raise MdfException("Group index out of range") from None

try:
grp.channels[ch_nr]
except IndexError:
raise MdfException(f"Channel index out of range: {(name, group, index)}")
raise MdfException(f"Channel index out of range: {(name, group, index)}") from None
else:
if name not in self.channels_db:
raise MdfException(f'Channel "{name}" not found')
Expand Down
6 changes: 4 additions & 2 deletions src/asammdf/blocks/mdf_v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,9 @@ def __init__(
if not kwargs.get("__internal__", False):
raise MdfException("Always use the MDF class; do not use the class MDF3 directly")

# bind cache to instance to avoid memory leaks
self.determine_max_vlsd_sample_size = lru_cache(maxsize=1024 * 1024)(self._determine_max_vlsd_sample_size)

self._kwargs = kwargs
self._password = kwargs.get("password", None)
self.original_name = kwargs["original_name"]
Expand Down Expand Up @@ -3889,8 +3892,7 @@ def _yield_selected_signals(
def reload_header(self):
self.header = HeaderBlock(address=0x40, stream=self._file)

@lru_cache(maxsize=1024 * 1024)
def determine_max_vlsd_sample_size(self, group, index):
def _determine_max_vlsd_sample_size(self, group, index):
return 0


Expand Down
68 changes: 36 additions & 32 deletions src/asammdf/blocks/mdf_v4.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
from canmatrix.canmatrix import CanMatrix
from lz4.frame import compress as lz_compress
from lz4.frame import decompress as lz_decompress
import numpy as np
from numpy import (
arange,
argwhere,
Expand Down Expand Up @@ -65,7 +66,6 @@
where,
zeros,
)
import numpy as np
from numpy.core.defchararray import decode, encode
from numpy.core.records import fromarrays, fromstring
from numpy.typing import NDArray
Expand Down Expand Up @@ -267,6 +267,10 @@ def __init__(
if not kwargs.get("__internal__", False):
raise MdfException("Always use the MDF class; do not use the class MDF4 directly")

# bind cache to instance to avoid memory leaks
self.determine_max_vlsd_sample_size = lru_cache(maxsize=1024 * 1024)(self._determine_max_vlsd_sample_size)
self.extract_attachment = lru_cache(maxsize=128)(self._extract_attachment)

self._kwargs = kwargs
self.original_name = kwargs["original_name"]
self.groups = []
Expand Down Expand Up @@ -1677,10 +1681,12 @@ def _get_data_blocks_info(
) -> Iterator[DataBlockInfo]:
mapped = mapped or not is_file_like(stream)

if record_size:
_32_MB = 32 * 1024 * 1024 // record_size * record_size
if record_size > 32 * 1024 * 1024:
READ_CHUNK_SIZE = record_size
elif record_size:
READ_CHUNK_SIZE = 32 * 1024 * 1024 // record_size * record_size
else:
_32_MB = 32 * 1024 * 1024
READ_CHUNK_SIZE = 32 * 1024 * 1024

if mapped:
if address:
Expand All @@ -1694,19 +1700,19 @@ def _get_data_blocks_info(

# split the DTBLOCK into chucks of up to 32MB
while True:
if size > _32_MB:
total_size -= _32_MB
size -= _32_MB
if size > READ_CHUNK_SIZE:
total_size -= READ_CHUNK_SIZE
size -= READ_CHUNK_SIZE

yield DataBlockInfo(
address=address,
block_type=v4c.DT_BLOCK,
original_size=_32_MB,
compressed_size=_32_MB,
original_size=READ_CHUNK_SIZE,
compressed_size=READ_CHUNK_SIZE,
param=0,
block_limit=None,
)
address += _32_MB
address += READ_CHUNK_SIZE
else:
if total_size < size:
block_limit = total_size
Expand Down Expand Up @@ -1768,19 +1774,19 @@ def _get_data_blocks_info(

# split the DTBLOCK into chucks of up to 32MB
while True:
if size > _32_MB:
total_size -= _32_MB
size -= _32_MB
if size > READ_CHUNK_SIZE:
total_size -= READ_CHUNK_SIZE
size -= READ_CHUNK_SIZE

yield DataBlockInfo(
address=addr,
block_type=v4c.DT_BLOCK,
original_size=_32_MB,
compressed_size=_32_MB,
original_size=READ_CHUNK_SIZE,
compressed_size=READ_CHUNK_SIZE,
param=0,
block_limit=None,
)
addr += _32_MB
addr += READ_CHUNK_SIZE
else:
if total_size < size:
block_limit = total_size
Expand Down Expand Up @@ -1977,19 +1983,19 @@ def _get_data_blocks_info(

# split the DTBLOCK into chucks of up to 32MB
while True:
if size > _32_MB:
total_size -= _32_MB
size -= _32_MB
if size > READ_CHUNK_SIZE:
total_size -= READ_CHUNK_SIZE
size -= READ_CHUNK_SIZE

yield DataBlockInfo(
address=address,
block_type=v4c.DT_BLOCK,
original_size=_32_MB,
compressed_size=_32_MB,
original_size=READ_CHUNK_SIZE,
compressed_size=READ_CHUNK_SIZE,
param=0,
block_limit=None,
)
address += _32_MB
address += READ_CHUNK_SIZE
else:
if total_size < size:
block_limit = total_size
Expand Down Expand Up @@ -2054,19 +2060,19 @@ def _get_data_blocks_info(

# split the DTBLOCK into chucks of up to 32MB
while True:
if size > _32_MB:
total_size -= _32_MB
size -= _32_MB
if size > READ_CHUNK_SIZE:
total_size -= READ_CHUNK_SIZE
size -= READ_CHUNK_SIZE

yield DataBlockInfo(
address=addr,
block_type=v4c.DT_BLOCK,
original_size=_32_MB,
compressed_size=_32_MB,
original_size=READ_CHUNK_SIZE,
compressed_size=READ_CHUNK_SIZE,
param=0,
block_limit=None,
)
addr += _32_MB
addr += READ_CHUNK_SIZE
else:
if total_size < size:
block_limit = total_size
Expand Down Expand Up @@ -6180,8 +6186,7 @@ def close(self) -> None:
self._dbc_cache.clear()
self.virtual_groups.clear()

@lru_cache(maxsize=128)
def extract_attachment(
def _extract_attachment(
self,
index: int | None = None,
password: str | bytes | None = None,
Expand Down Expand Up @@ -7765,8 +7770,7 @@ def _get_not_byte_aligned_data(self, data: bytes, group: Group, ch_nr: int) -> N
else:
return vals

@lru_cache(maxsize=1024 * 1024)
def determine_max_vlsd_sample_size(self, group, index):
def _determine_max_vlsd_sample_size(self, group, index):
group_index = group
channel_index = index
group = self.groups[group]
Expand Down
4 changes: 2 additions & 2 deletions src/asammdf/blocks/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,8 @@ def detect(text: bytes) -> DetectDict:

from canmatrix.canmatrix import CanMatrix, matrix_class
import canmatrix.formats
from numpy import arange, bool_, dtype, interp, where
import numpy as np
from numpy import arange, bool_, dtype, interp, where
from numpy.typing import NDArray
from pandas import Series

Expand Down Expand Up @@ -2378,7 +2378,7 @@ def load_lab(file):
return {name: channels for name, channels in sections.items() if channels if name != "SETTINGS"}


class SignalFlags(IntFlag):
class SignalFlags:
no_flags = 0x0
user_defined_comment = 0x1
user_defined_conversion = 0x2
Expand Down
9 changes: 7 additions & 2 deletions src/asammdf/blocks/v4_blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import time
from traceback import format_exc
from typing import Any, TYPE_CHECKING
from xml.dom import minidom
import xml.etree.ElementTree as ET

import dateutil.tz
Expand Down Expand Up @@ -2324,7 +2325,7 @@ def __init__(self, **kwargs) -> None:
if self.id != b"##CC":
message = f'Expected "##CC" block @{hex(address)} but found "{self.id}"'
logger.exception(message)
raise MdfException(message)
raise MdfException(message) from None

block = stream.read(self.block_len - COMMON_SIZE)

Expand Down Expand Up @@ -5596,12 +5597,16 @@ def common_properties_to_xml(root, common_properties):

common_properties_to_xml(common, self._common_properties)

return (
comment_xml = (
ET.tostring(root, encoding="utf8", method="xml")
.replace(b"<?xml version='1.0' encoding='utf8'?>\n", b"")
.decode("utf-8")
)

comment_xml = minidom.parseString(comment_xml).toprettyxml(indent=" ")

return "\n".join(comment_xml.splitlines()[1:])

@comment.setter
def comment(self, string):
self._common_properties.clear()
Expand Down
3 changes: 2 additions & 1 deletion src/asammdf/gui/asammdfgui.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,14 @@ def main(measurements=None):
monkey_patch_pyqtgraph()
parser = _cmd_line_parser()
args = parser.parse_args(sys.argv[1:])

app = pyqtgraph.mkQApp()
app.setOrganizationName("py-asammdf")
app.setOrganizationDomain("py-asammdf")
app.setApplicationName("py-asammdf")
set_app_user_model_id("py-asammdf")

_main_window = MainWindow(args.measurements)
_main_window = MainWindow(measurements or args.measurements)
app.setStyle(QtWidgets.QStyleFactory.create("Fusion"))

app.exec()
Expand Down
4 changes: 4 additions & 0 deletions src/asammdf/gui/dialogs/advanced_search_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,7 @@ def keyPressEvent(self, event):
root = self.invisibleRootItem()
for item in selected_items:
(item.parent() or root).removeChild(item)

event.accept()
else:
super().keyPressEvent(event)
Loading

0 comments on commit 1c86954

Please sign in to comment.