Skip to content

Commit

Permalink
Merge 6a97ec7 into 8e223d9
Browse files Browse the repository at this point in the history
  • Loading branch information
zariiii9003 committed Jan 12, 2024
2 parents 8e223d9 + 6a97ec7 commit 004be5f
Show file tree
Hide file tree
Showing 13 changed files with 71 additions and 83 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
repos:
- repo: https://github.com/ambv/black
rev: 23.12.0
rev: 23.12.1
hooks:
- id: black
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: v0.1.7
rev: v0.1.12
hooks:
- id: ruff
7 changes: 7 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,18 @@ select = [
"UP", # pyupgrade
"I", # isort
"PIE", # flake8-pie
"PL", # pylint
"RUF", # Ruff-specific rules
]
ignore = [
"B007", # unused-loop-control-variable
"F841", # unused-variable
"PLC0414", # useless-import-alias
"PLR09", # too-many-this, too-many-that
"PLR2004", # magic-value-comparison
"PLR5501", # collapsible-else-if
"PLW0603", # global-statement
"PLW2901", # redefined-loop-name
"RUF012", # mutable-class-default
"RUF015", # unnecessary-iterable-allocation-for-first-element
]
Expand Down
2 changes: 1 addition & 1 deletion src/asammdf/blocks/mdf_v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -517,7 +517,7 @@ def _prepare_record(self, group: Group) -> list:

record = []

for idx, new_ch in enumerate(channels):
for new_ch in channels:
start_offset = new_ch.start_offset
try:
additional_byte_offset = new_ch.additional_byte_offset
Expand Down
51 changes: 21 additions & 30 deletions src/asammdf/blocks/mdf_v4.py
Original file line number Diff line number Diff line change
Expand Up @@ -5693,15 +5693,13 @@ def extend(self, index: int, signals: list[tuple[NDArray[Any], NDArray[Any] | No
if size % 2:
size += 1
elem = elem + b"\0"
data.append(UINT32_p(size))
data.append(elem)
data.extend((UINT32_p(size), elem))
off += size + 4
else:
for elem in signal:
offsets.append(off)
size = len(elem)
data.append(UINT32_p(size))
data.append(elem)
data.extend((UINT32_p(size), elem))
off += size + 4

offsets = array(offsets, dtype=uint64)
Expand Down Expand Up @@ -6074,7 +6072,6 @@ def attach(
"""
else:
hash_sum_encrypted = hash_sum
comment = comment

if hash_sum_encrypted in self._attachments_cache:
return self._attachments_cache[hash_sum]
Expand Down Expand Up @@ -6593,7 +6590,7 @@ def get(
if name is None:
name = channel.name

unit = conversion and conversion.unit or channel.unit
unit = conversion.unit if conversion else channel.unit

comment = channel.comment

Expand Down Expand Up @@ -7621,22 +7618,16 @@ def _get_scalar(
)
vals = vals.view(types)

arrays = []
arrays.append(vals["ms"])
# bit 6 and 7 of minutes are reserved
arrays.append(vals["min"] & 0x3F)
# only firt 4 bits of hour are used
arrays.append(vals["hour"] & 0xF)
# the first 4 bits are the day number
arrays.append(vals["day"] & 0xF)
# bit 6 and 7 of month are reserved
arrays.append(vals["month"] & 0x3F)
# bit 7 of year is reserved
arrays.append(vals["year"] & 0x7F)
# add summer or standard time information for hour
arrays.append((vals["hour"] & 0x80) >> 7)
# add day of week information
arrays.append((vals["day"] & 0xF0) >> 4)
arrays = [
vals["ms"],
vals["min"] & 0x3F, # bit 6 and 7 of minutes are reserved
vals["hour"] & 0xF, # only first 4 bits of hour are used
vals["day"] & 0xF, # the first 4 bits are the day number
vals["month"] & 0x3F, # bit 6 and 7 of month are reserved
vals["year"] & 0x7F, # bit 7 of year is reserved
(vals["hour"] & 0x80) >> 7, # add summer or standard time information for hour
(vals["day"] & 0xF0) >> 4, # add day of week information
]

names = [
"ms",
Expand Down Expand Up @@ -7989,7 +7980,7 @@ def _yield_selected_signals(
if group_index == index:
master_index = idx

encodings = {group_index: [None] for groups_index in groups}
encodings = {group_index: [None] for group_index in groups}

self._set_temporary_master(None)
idx = 0
Expand Down Expand Up @@ -10330,7 +10321,7 @@ def _process_can_logging(self, group_index: int, grp: Group) -> None:

dbc = None

for i, channel in enumerate(channels):
for channel in channels:
if channel.name == "CAN_DataFrame":
attachment_addr = channel.attachment

Expand All @@ -10357,7 +10348,7 @@ def _process_can_logging(self, group_index: int, grp: Group) -> None:
self._prepare_record(group)
data = self._load_data(group, record_offset=0, record_count=1)

for fragment_index, fragment in enumerate(data):
for fragment in data:
self._set_temporary_master(None)
self._set_temporary_master(self.get_master(group_index, data=fragment))

Expand Down Expand Up @@ -10397,7 +10388,7 @@ def _process_can_logging(self, group_index: int, grp: Group) -> None:
self._prepare_record(group)
data = self._load_data(group, optimize_read=False)

for fragment_index, fragment in enumerate(data):
for fragment in data:
self._set_temporary_master(None)
self._set_temporary_master(self.get_master(group_index, data=fragment))

Expand Down Expand Up @@ -10453,7 +10444,7 @@ def _process_can_logging(self, group_index: int, grp: Group) -> None:
self._prepare_record(group)
data = self._load_data(group, optimize_read=False)

for fragment_index, fragment in enumerate(data):
for fragment in data:
self._set_temporary_master(None)
self._set_temporary_master(self.get_master(group_index, data=fragment))

Expand Down Expand Up @@ -10582,7 +10573,7 @@ def _process_lin_logging(self, group_index: int, grp: Group) -> None:

dbc = None

for i, channel in enumerate(channels):
for channel in channels:
if channel.name == "LIN_Frame":
attachment_addr = channel.attachment
if attachment_addr is not None:
Expand Down Expand Up @@ -10611,7 +10602,7 @@ def _process_lin_logging(self, group_index: int, grp: Group) -> None:
self._prepare_record(group)
data = self._load_data(group, optimize_read=False)

for fragment_index, fragment in enumerate(data):
for fragment in data:
self._set_temporary_master(None)
self._set_temporary_master(self.get_master(group_index, data=fragment))

Expand Down Expand Up @@ -10644,7 +10635,7 @@ def _process_lin_logging(self, group_index: int, grp: Group) -> None:
self._prepare_record(group)
data = self._load_data(group, optimize_read=False)

for fragment_index, fragment in enumerate(data):
for fragment in data:
self._set_temporary_master(None)
self._set_temporary_master(self.get_master(group_index, data=fragment))

Expand Down
8 changes: 5 additions & 3 deletions src/asammdf/blocks/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -664,17 +664,17 @@ def fmt_to_datatype_v3(fmt: dtype[Any], shape: tuple[int, ...], array: bool = Fa
size *= dim
else:
if kind == "u":
if byteorder in "<":
if byteorder == "<":
data_type = v3c.DATA_TYPE_UNSIGNED_INTEL
else:
data_type = v3c.DATA_TYPE_UNSIGNED_MOTOROLA
elif kind == "i":
if byteorder in "<":
if byteorder == "<":
data_type = v3c.DATA_TYPE_SIGNED_INTEL
else:
data_type = v3c.DATA_TYPE_SIGNED_MOTOROLA
elif kind == "f":
if byteorder in "<":
if byteorder == "<":
if size == 32:
data_type = v3c.DATA_TYPE_FLOAT
else:
Expand Down Expand Up @@ -1184,6 +1184,7 @@ def cut_video_stream(stream: bytes, start: float, end: float, fmt: str) -> bytes
f"{out_file}",
],
capture_output=True,
check=False,
)
except FileNotFoundError:
result = stream
Expand Down Expand Up @@ -1214,6 +1215,7 @@ def get_video_stream_duration(stream: bytes) -> float | None:
f"{in_file}",
],
capture_output=True,
check=False,
)
result = float(result.stdout)
except FileNotFoundError:
Expand Down
3 changes: 1 addition & 2 deletions src/asammdf/blocks/v2_v3_blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1329,8 +1329,7 @@ def metadata(self, indent: str = "") -> str:
nr = self.ref_param_nr
new_keys = []
for i in range(nr):
new_keys.append(f"param_val_{i}")
new_keys.append(f"text_{i}")
new_keys.extend((f"param_val_{i}", f"text_{i}"))
keys += tuple(new_keys)

elif conv in (v23c.CONVERSION_TYPE_POLY, v23c.CONVERSION_TYPE_RAT):
Expand Down
8 changes: 4 additions & 4 deletions src/asammdf/gui/dialogs/advanced_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def search_text_changed(self):
(group_index, channel_index): {
"names": [ch.name],
"comment": extract_xml_comment(ch.comment).strip(),
"unit": ch.conversion and ch.conversion.unit or ch.unit,
"unit": ch.conversion.unit if ch.conversion else ch.unit,
"source_name": cg_source.name,
"source_path": cg_source.path,
}
Expand All @@ -198,7 +198,7 @@ def search_text_changed(self):
matches[entry] = {
"names": [target],
"comment": extract_xml_comment(ch.comment).strip(),
"unit": ch.conversion and ch.conversion.unit or ch.unit,
"unit": ch.conversion.unit if ch.conversion else ch.unit,
"source_name": source.name if source else "",
"source_path": source.path if source else "",
}
Expand All @@ -216,7 +216,7 @@ def search_text_changed(self):
matches[entry] = {
"names": [ch.name],
"comment": extract_xml_comment(ch.comment).strip(),
"unit": ch.conversion and ch.conversion.unit or ch.unit,
"unit": ch.conversion.unit if ch.conversion else ch.unit,
"source_name": source.name if source else "",
"source_path": source.path if source else "",
}
Expand All @@ -238,7 +238,7 @@ def search_text_changed(self):
matches[entry] = {
"names": [],
"comment": extract_xml_comment(ch.comment).strip(),
"unit": ch.conversion and ch.conversion.unit or ch.unit,
"unit": ch.conversion.unit if ch.conversion else ch.unit,
"source_name": source.name if source else "",
"source_path": source.path if source else "",
}
Expand Down
3 changes: 1 addition & 2 deletions src/asammdf/gui/dialogs/bus_database_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,7 @@ def store(self):

dbs = []
for bus, database in databases["CAN"]:
dbs.append(bus)
dbs.append(database)
dbs.extend((bus, database))

self._settings.setValue("can_databases", dbs)

Expand Down
3 changes: 1 addition & 2 deletions src/asammdf/gui/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1009,8 +1009,7 @@ def value_as_bin(value, dtype):

nibles = []
for byte in byte_string:
nibles.append(f"{byte >> 4:04b}")
nibles.append(f"{byte & 0xf:04b}")
nibles.extend((f"{byte >> 4:04b}", f"{byte & 0xf:04b}"))

return ".".join(nibles)

Expand Down
22 changes: 14 additions & 8 deletions src/asammdf/gui/widgets/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@

import numpy as np
import pyqtgraph as pg
from pyqtgraph import Qt
import pyqtgraph.functions as fn
import pyqtgraph.Qt as Qt
from PySide6 import QtCore, QtGui, QtWidgets

PLOT_BUFFER_SIZE = 4000
Expand Down Expand Up @@ -133,15 +133,17 @@ def cached_mkPen_factory(*args, **kargs):


def simple_min(a, b):
if b != b:
if b != b: # noqa: PLR0124
# b is NaN
return a
if a <= b:
return a
return b


def simple_max(a, b):
if b != b:
if b != b: # noqa: PLR0124
# b is NaN
return a
if a <= b:
return b
Expand Down Expand Up @@ -2009,7 +2011,7 @@ def add_new_items(tree, root, items, items_pool):
item.NameColumn,
QtCore.Qt.CheckState.Checked if info["enabled"] else QtCore.Qt.CheckState.Unchecked,
)
if "disabled" in info and info["disabled"]:
if info.get("disabled", False):
item.set_disabled(info["disabled"])

self.channel_selection.blockSignals(False)
Expand Down Expand Up @@ -4457,9 +4459,11 @@ def keyPressEvent(self, event):
else:
min_, max_ = 0, 1

if min_ != min_:
if min_ != min_: # noqa: PLR0124
# min_ is NaN
min_ = 0
if max_ != max_:
if max_ != max_: # noqa: PLR0124
# max_ is NaN
max_ = 1

signal.y_range = min_, max_
Expand Down Expand Up @@ -4500,9 +4504,11 @@ def keyPressEvent(self, event):
else:
min_, max_ = 0, 1

if min_ != min_:
if min_ != min_: # noqa: PLR0124
# min_ is NaN
min_ = 0
if max_ != max_:
if max_ != max_: # noqa: PLR0124
# max is NaN
max_ = 1

signal.y_range = min_, max_
Expand Down
Loading

0 comments on commit 004be5f

Please sign in to comment.