Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #32

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
rev: v4.6.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
Expand All @@ -11,32 +11,32 @@ repos:
- id: requirements-txt-fixer

- repo: https://github.com/asottile/setup-cfg-fmt
rev: v2.2.0
rev: v2.5.0
hooks:
- id: setup-cfg-fmt

- repo: https://github.com/asottile/reorder_python_imports
rev: v3.9.0
- repo: https://github.com/asottile/reorder-python-imports
rev: v3.13.0
hooks:
- id: reorder-python-imports
exclude: ^(pre_commit/resources/|testing/resources/python3_hooks_repo/)
args: [ --py38-plus, --add-import, 'from __future__ import annotations' ]

- repo: https://github.com/asottile/pyupgrade
rev: v3.2.2
rev: v3.17.0
hooks:
- id: pyupgrade
args: [ --py38-plus ]

- repo: https://github.com/psf/black
rev: 22.10.0
rev: 24.8.0
hooks:
- id: black
args: [ --safe ]
additional_dependencies: [ 'click==8.0.4' ]

- repo: https://github.com/PyCQA/flake8
rev: 5.0.4
rev: 7.1.1
hooks:
- id: flake8

Expand Down
2 changes: 1 addition & 1 deletion docs/guide/tutorial.rst
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
Tutorial
========
========
10 changes: 5 additions & 5 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
black==22.10.0
flake8>=3.9.2
tox>=3.24.3
pytest>=6.2.5
pytest-cov>=2.12.1
mypy>=0.910
black==22.10.0
pre-commit>=2.1.0
openpyxl>=3.0.10
pre-commit>=2.1.0
pytest>=6.2.5
pytest-cov>=2.12.1
tox>=3.24.3
4 changes: 3 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
from __future__ import annotations

from setuptools import setup

if __name__ == "__main__":
if __name__ == '__main__':
setup()
4 changes: 1 addition & 3 deletions src/femto/laserpath.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,9 +374,7 @@ def curvature_radius(self) -> npt.NDArray[np.float32]:
norm_r1 = np.linalg.norm(r1, axis=1)

# Return the local curvature radius, where cannot divide by 0 return inf
return np.divide(
norm_r1**3, norm_cross, out=np.full_like(norm_r1, fill_value=np.inf), where=~(norm_cross == 0)
)
return np.divide(norm_r1**3, norm_cross, out=np.full_like(norm_r1, fill_value=np.inf), where=~(norm_cross == 0))

@property
def cmd_rate(self) -> npt.NDArray[np.float32]:
Expand Down
14 changes: 7 additions & 7 deletions src/femto/pgmcompiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,9 @@ def __post_init__(self) -> None:
self._loaded_files: list[str] = []
self._dvars: list[str] = []

self.fwarp: Callable[
[npt.NDArray[np.float32], npt.NDArray[np.float32]], npt.NDArray[np.float32]
] = self.warp_management(self.warp_flag)
self.fwarp: Callable[[npt.NDArray[np.float32], npt.NDArray[np.float32]], npt.NDArray[np.float32]] = (
self.warp_management(self.warp_flag)
)

# Set rotation angle in radians for matrix rotations
if self.rotation_angle:
Expand Down Expand Up @@ -444,7 +444,7 @@ def go_init(self) -> None:
self.move_to([-2, 0, 0])

@contextlib.contextmanager
def axis_rotation(self, angle: float | None = None) -> Generator[PGMCompiler, None, None]:
def axis_rotation(self, angle: float | None = None) -> Generator[PGMCompiler]:
"""Aerotech axis rotation (G84).

Context manager for the G84 command. The user can specify the angle (in degree) of the axis rotation.
Expand All @@ -465,7 +465,7 @@ def axis_rotation(self, angle: float | None = None) -> Generator[PGMCompiler, No
self._exit_axis_rotation()

@contextlib.contextmanager
def for_loop(self, var: str, num: int) -> Generator[PGMCompiler, None, None]:
def for_loop(self, var: str, num: int) -> Generator[PGMCompiler]:
"""Foor loop instruction.

Context manager that manages a ``FOR`` loops in a G-Code file.
Expand Down Expand Up @@ -502,7 +502,7 @@ def for_loop(self, var: str, num: int) -> Generator[PGMCompiler, None, None]:
self._total_dwell_time += int(num - 1) * (self._total_dwell_time - _temp_dt)

@contextlib.contextmanager
def repeat(self, num: int) -> Generator[PGMCompiler, None, None]:
def repeat(self, num: int) -> Generator[PGMCompiler]:
"""Repeat loop instruction.

Context manager that manages a ``REPEAT`` loops in a G-Code file.
Expand Down Expand Up @@ -745,7 +745,7 @@ def write(self, points: npt.NDArray[np.float32]) -> None:

# Convert points if G-Code commands
args = [self._format_args(x, y, z, f) for (x, y, z, f) in zip(x_gc, y_gc, z_gc, f_gc)]
for (arg, s) in itertools.zip_longest(args, s_gc):
for arg, s in itertools.zip_longest(args, s_gc):
if s == 0 and self._shutter_on is True:
self.instruction('\n')
self.dwell(self.short_pause)
Expand Down
80 changes: 37 additions & 43 deletions src/femto/spreadsheet.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,31 @@
from __future__ import annotations

import time

from xlsxwriter import Workbook

from dataclasses import dataclass
from pathlib import Path

from femto import __file__ as fpath
import numpy as np

from types import TracebackType
from femto.waveguide import Waveguide
from femto.marker import Marker
from femto.helpers import flatten
from femto.writer import WaveguideWriter, MarkerWriter

from typing import Any
from typing import cast
from typing import Union
from typing import Any
import nptyping as nptyp

from dataclasses import dataclass

import femto.device
import nptyping as nptyp
import numpy as np
from femto import __file__ as fpath
from femto.helpers import flatten
from femto.marker import Marker
from femto.waveguide import Waveguide
from femto.writer import MarkerWriter
from femto.writer import WaveguideWriter
from xlsxwriter import Workbook


def generate_all_cols_data() -> nptyp.NDArray[
Any,
nptyp.Structure["tagname: Str, fullname: Str, unit: Str, width: Int, format: Str"],
]:
def generate_all_cols_data() -> (
nptyp.NDArray[
Any,
nptyp.Structure[tagname:Str, fullname:Str, unit:Str, width:Int, format:Str],
]
):
"""
Create the available columns array from a file.

Expand Down Expand Up @@ -113,7 +110,7 @@ class Parameter:
"""Class that handles preamble parameters."""

n: str # Full name
v: str = "" # Value
v: str = '' # Value
loc: tuple[int, int] = (0, 0) # Location (1-indexing)
sz: tuple[int, int] = (0, 0) # Size (for merged cells)
fmt: str = 'parval' # Format
Expand All @@ -135,10 +132,10 @@ class Spreadsheet:
"""Class representing the spreadsheet with all entities to fabricate."""

device: femto.device.Device | None = None
columns_names: str = ""
book_name: str | Path = "my_fabrication.xlsx"
sheet_name: str = "Fabrication"
font_name: str = "DejaVu Sans Mono"
columns_names: str = ''
book_name: str | Path = 'my_fabrication.xlsx'
sheet_name: str = 'Fabrication'
font_name: str = 'DejaVu Sans Mono'
font_size: int = 11
suppr_redd_cols: bool = True
static_preamble: bool = False
Expand Down Expand Up @@ -203,12 +200,10 @@ def __post_init__(
self.columns_names = scn
self.suppr_redd_cols = True
print(
(
'Columns_names not given in spreadsheet initialization.'
f' Will proceed with standard columns names \'{scn}\' '
'and activate the suppr_redd_cols flag to deal with '
'reddundant columns.'
)
'Columns_names not given in spreadsheet initialization.'
f' Will proceed with standard columns names \'{scn}\' '
'and activate the suppr_redd_cols flag to deal with '
'reddundant columns.'
)

if 'name' not in self.columns_names:
Expand Down Expand Up @@ -370,7 +365,7 @@ def _write_saints_list(self, column: int = 156) -> None:
None.

"""
with open(Path(fpath).parent / 'utils' / 'saints_data.txt', 'r') as f:
with open(Path(fpath).parent / 'utils' / 'saints_data.txt') as f:
for i in range(367):
s = f.readline().strip('\n')
# print(f'writing day {i}\t{s}')
Expand Down Expand Up @@ -412,9 +407,7 @@ def _dtype(self, tagname):

return dt

def _get_structure_list(
self, str_list: list[Union[Waveguide, Marker]] | None = None
) -> list[Union[Waveguide, Marker]]:
def _get_structure_list(self, str_list: list[Waveguide | Marker] | None = None) -> list[Waveguide | Marker]:

assert isinstance(self.device, femto.device.Device)

Expand Down Expand Up @@ -546,7 +539,7 @@ def _build_struct_list(
ignored_fields.append(t)
continue

if np.all(table_lines[t] == table_lines[t][0]) and suppr_redd_cols and table_lines[t][0] != "":
if np.all(table_lines[t] == table_lines[t][0]) and suppr_redd_cols and table_lines[t][0] != '':
# eliminate reddundancies if explicitly requested
ignored_fields.append(t)

Expand All @@ -570,10 +563,8 @@ def _build_struct_list(
if ignored_fields and verbose:
fields_left_out = ', '.join(ignored_fields)
print(
(
f'For all entities, the fields {fields_left_out} were not '
'defined, so they will not be shown as table columns.'
)
f'For all entities, the fields {fields_left_out} were not '
'defined, so they will not be shown as table columns.'
)

self.keep = keep
Expand Down Expand Up @@ -656,9 +647,12 @@ def _fill_spreadsheet(self):
for i, sdata in enumerate(self.struct_data):

sdata = [
s
if (isinstance(s, (np.int64, np.float64)) and s < 1e5) or (not isinstance(s, (np.int64, np.float64)))
else ''
(
s
if (isinstance(s, (np.int64, np.float64)) and s < 1e5)
or (not isinstance(s, (np.int64, np.float64)))
else ''
)
for s in sdata
]

Expand Down
4 changes: 2 additions & 2 deletions src/femto/trench.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ def zigzag(self, poly: geometry.Polygon) -> npt.NDArray[np.float32]:
coords.extend(line.coords)
return np.array(coords).T

def toolpath(self) -> Generator[npt.NDArray[np.float32], None, None]:
def toolpath(self) -> Generator[npt.NDArray[np.float32]]:
"""Toolpath generator.

The function takes a polygon and computes the filling toolpath.
Expand Down Expand Up @@ -736,7 +736,7 @@ def main() -> None:
# b = T._trench_list[0]
# b = T.trenchbed[0]
for tr in T._trench_list:
for (x, y) in tr.toolpath():
for x, y in tr.toolpath():
plt.plot(x, y)

plt.axis('equal')
Expand Down
4 changes: 2 additions & 2 deletions src/femto/utils/spreadsheet_columns.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ speed, Speed, mm/s, 6, 0.0
scan, Scans, , 5, 0
depth, Depth, mm, 8, 0.000
radius, R, mm, 5, 0.00
int_dist, Cdist, um, 8, 0.0000
int_dist, Cdist, um, 8, 0.0000
wl, λ, um, 8, 0.000
int_length, Clength, mm, 8, 0.000
int_length, Clength, mm, 8, 0.000
arm_length, Alength, mm, 8, 0.000
yin, Yin, mm, 8, 0.0000
yout, Yout, mm, 8, 0.0000
Expand Down
4 changes: 1 addition & 3 deletions src/femto/waveguide.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,9 +408,7 @@ def sin_bridge(

x_sin = np.linspace(self._x[-1], self._x[-1] + dx, num)
tmp_cos = np.cos(omega_y * np.pi / dx * (x_sin - self._x[-1]))
y_sin = self._y[-1] + 0.5 * dy * (
1 - np.sqrt((1 + flat_peaks**2) / (1 + flat_peaks**2 * tmp_cos**2)) * tmp_cos
)
y_sin = self._y[-1] + 0.5 * dy * (1 - np.sqrt((1 + flat_peaks**2) / (1 + flat_peaks**2 * tmp_cos**2)) * tmp_cos)
z_sin = self._z[-1] + 0.5 * dzb * (1 - np.cos(omega_z * np.pi / dx * (x_sin - self._x[-1])))
f_sin = np.repeat(f, num)
s_sin = np.repeat(shutter, num)
Expand Down
2 changes: 1 addition & 1 deletion src/femto/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ def export_array2d(
]

gcode_instr = []
for (line, dec) in itertools.zip_longest(instr, listcast(forced_deceleration)):
for line, dec in itertools.zip_longest(instr, listcast(forced_deceleration)):
if bool(dec):
gcode_instr.append(f'G9 G1 {line}\n')
else:
Expand Down
2 changes: 1 addition & 1 deletion tests/device_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def list_wg() -> list[Waveguide]:
def list_mk() -> list[Marker]:
PARAM_MK = dotdict(scan=1, speed=2, speed_pos=5, speed_closed=5, depth=0.000, lx=1, ly=1)
markers = []
for (x, y) in zip(range(4, 8), range(3, 7)):
for x, y in zip(range(4, 8), range(3, 7)):
m = Marker(**PARAM_MK)
m.cross([x, y])
markers.append(m)
Expand Down
11 changes: 6 additions & 5 deletions tests/helpers_test.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from __future__ import annotations

import pathlib

import numpy as np
import pytest
import yaml
import pathlib
from femto.helpers import almost_equal
from femto.helpers import dotdict
from femto.helpers import flatten
Expand Down Expand Up @@ -296,7 +297,7 @@ def test_load_param_empty_default():
PARAM_GC = dict(filename='UPP8.pgm', laser='PHAROS', aerotech_angle=0.0, rotation_angle=0.0)
p_dicts = {'DEFAULT': {}, 'wg': PARAM_WG, 'mk': PARAM_MK, 'gc': PARAM_GC}

with open('test.yaml', "w") as f:
with open('test.yaml', 'w') as f:
yaml.dump(p_dicts, f, sort_keys=False)

pw, pm, pg = load_parameters('test.yaml')
Expand Down Expand Up @@ -340,7 +341,7 @@ def test_load_param_no_default():
)
p_dicts = {'wg': PARAM_WG, 'mk': PARAM_MK, 'gc': PARAM_GC}

with open('test.yaml', "w") as f:
with open('test.yaml', 'w') as f:
yaml.dump(p_dicts, f, sort_keys=False)

pw, pm, pg = load_parameters('test.yaml')
Expand Down Expand Up @@ -380,7 +381,7 @@ def test_load_param_pathlib():
p_dicts = {'wg': PARAM_WG, 'mk': PARAM_MK, 'gc': PARAM_GC}

fp = pathlib.Path('test.yaml')
with open(fp, "w") as f:
with open(fp, 'w') as f:
yaml.dump(p_dicts, f, sort_keys=False)

pw, pm, pg = load_parameters(fp)
Expand All @@ -393,7 +394,7 @@ def test_load_param_pathlib():
def test_load_param_empty():
p_dicts = {}
fp = pathlib.Path('test.yaml')
with open(fp, "w") as f:
with open(fp, 'w') as f:
yaml.dump(p_dicts, f, sort_keys=False)

assert load_parameters(fp) == []
Expand Down
Loading
Loading