Skip to content

Commit

Permalink
Fix all ruff PT011 (not checking error message when testing exception…
Browse files Browse the repository at this point in the history
…s) (#698)

* bump ruff pre-commit to v0.1.15

* ruff auto fixes

* fix all ruff PT011 and unignore that rule
  • Loading branch information
janosh committed Jan 30, 2024
1 parent bfd9b0c commit 23b3c4b
Show file tree
Hide file tree
Showing 12 changed files with 78 additions and 85 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ default_language_version:
python: python3
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.1.13
rev: v0.1.15
hooks:
- id: ruff
args: [--fix]
Expand Down
44 changes: 21 additions & 23 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -148,37 +148,35 @@ exclude_lines = [
target-version = "py39"
select = ["ALL"]
ignore = [
"ARG002", # unused method argument
"COM812", # trailing comma missing
"PD011", # pandas-use-of-dot-values
"PERF203", # try-except-in-loop
"PLR", # pylint-refactor
"PT004", # pytest-missing-fixture-name-underscore
"PT006", # pytest-parametrize-names-wrong-type
"RUF013", # implicit-optional
# TODO remove PT011, pytest.raises() should always check err msg
"ANN002",
"ANN003",
"ANN101", # missing self type annotation
"ANN101", # missing self type annotation
"ANN102",
"ANN401",
"ARG002", # unused method argument
"BLE001",
"C408", # Unnecessary (dict/list/tuple) call - remove call
"C901", # function too complex
"DTZ", # datetime-tz-now
"EM", # exception message must not use f-string literal
"ERA001", # found commented out code
"C408", # Unnecessary (dict/list/tuple) call - remove call
"C901", # function too complex
"COM812", # trailing comma missing
"DTZ", # datetime-tz-now
"EM", # exception message must not use f-string literal
"ERA001", # found commented out code
"FBT001",
"FBT002",
"FIX002",
"G004", # logging uses fstring
"PT011", # pytest-raises-too-broad
"PT013", # pytest-incorrect-pytest-import
"PTH", # prefer Pathlib to os.path
"S324", # use of insecure hash function
"SLF", # private member accessed outside class
"TD", # TODOs
"TRY003", # long message outside exception class
"G004", # logging uses fstring
"PD011", # pandas-use-of-dot-values
"PERF203", # try-except-in-loop
"PLR", # pylint-refactor
"PT004", # pytest-missing-fixture-name-underscore
"PT006", # pytest-parametrize-names-wrong-type
"PT013", # pytest-incorrect-pytest-import
"PTH", # prefer Pathlib to os.path
"RUF013", # implicit-optional
"S324", # use of insecure hash function
"SLF", # private member accessed outside class
"TD", # TODOs
"TRY003", # long message outside exception class
]
pydocstyle.convention = "numpy"
isort.known-first-party = ["atomate2"]
Expand Down
21 changes: 12 additions & 9 deletions src/atomate2/common/schemas/defects.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import logging
from collections.abc import Sequence
from itertools import starmap
from typing import Any, Callable, Optional, Union

import numpy as np
Expand Down Expand Up @@ -237,7 +238,7 @@ def from_task_outputs(
UUID of relaxed calculation in charge state (q2).
"""

def get_ent(
def get_cs_entry(
struct: Structure,
energy: float,
dir_name: str,
Expand All @@ -249,14 +250,16 @@ def get_ent(
data={"dir_name": dir_name, "uuid": uuid},
)

entries1 = [
get_ent(s, e, d, u)
for s, e, d, u in zip(structures1, energies1, static_dirs1, static_uuids1)
]
entries2 = [
get_ent(s, e, d, u)
for s, e, d, u in zip(structures2, energies2, static_dirs2, static_uuids2)
]
entries1 = list(
starmap(
get_cs_entry, zip(structures1, energies1, static_dirs1, static_uuids1)
)
)
entries2 = list(
starmap(
get_cs_entry, zip(structures2, energies2, static_dirs2, static_uuids2)
)
)

return cls.from_entries(entries1, entries2, relaxed_uuid1, relaxed_uuid2)

Expand Down
16 changes: 8 additions & 8 deletions src/atomate2/cp2k/sets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,14 +106,14 @@ def write_input(
}
inputs.update(self.optional_files)

for k, v in inputs.items():
fn = v.get("filename")
obj = v.get("object")
if v is not None and (overwrite or not (directory / k).exists()):
with zopen(directory / fn, "wt") as f:
f.write(str(obj))
elif not overwrite and (directory / fn).exists():
raise FileExistsError(f"{directory / fn} already exists.")
for key, val in inputs.items():
filename = val.get("filename")
obj = val.get("object")
if val is not None and (overwrite or not (directory / key).exists()):
with zopen(directory / filename, "wt") as file:
file.write(str(obj))
elif not overwrite and (directory / filename).exists():
raise FileExistsError(f"{directory / filename} already exists.")

@staticmethod
def from_directory(
Expand Down
22 changes: 10 additions & 12 deletions src/atomate2/forcefields/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,18 +114,16 @@ def save(self, filename: str | PathLike) -> None:
-------
None
"""
with open(filename, "wb") as f:
pickle.dump(
{
"energy": self.energies,
"forces": self.forces,
"stresses": self.stresses,
"atom_positions": self.atom_positions,
"cell": self.cells,
"atomic_number": self.atoms.get_atomic_numbers(),
},
f,
)
traj_dict = {
"energy": self.energies,
"forces": self.forces,
"stresses": self.stresses,
"atom_positions": self.atom_positions,
"cell": self.cells,
"atomic_number": self.atoms.get_atomic_numbers(),
}
with open(filename, "wb") as file:
pickle.dump(traj_dict, file)


class Relaxer:
Expand Down
31 changes: 13 additions & 18 deletions src/atomate2/lobster/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,10 +766,7 @@ def from_directory(

# Do automatic bonding analysis with LobsterPy
condensed_bonding_analysis = None
sb_icobi = None
sb_icohp = None
sb_icoop = None
describe = None
sb_icobi = sb_icohp = sb_icoop = describe = None
struct = Structure.from_file(structure_path)

# will perform two condensed bonding analysis computations
Expand Down Expand Up @@ -1066,7 +1063,7 @@ def from_directory(
are_coops=False,
are_cobis=False,
)
doc.__setattr__("cohp_data", cohp_obj)
doc.cohp_data = cohp_obj

if coopcar_path.exists() and doc.coop_data is None:
coop_obj = CompleteCohp.from_file(
Expand All @@ -1076,7 +1073,7 @@ def from_directory(
are_coops=True,
are_cobis=False,
)
doc.__setattr__("coop_data", coop_obj)
doc.coop_data = coop_obj

if cobicar_path.exists() and doc.cobi_data is None:
cobi_obj = CompleteCohp.from_file(
Expand All @@ -1086,7 +1083,7 @@ def from_directory(
are_coops=False,
are_cobis=True,
)
doc.__setattr__("cobi_data", cobi_obj)
doc.cobi_data = cobi_obj
with gzip.open(
computational_data_json_save_dir, "wt", encoding="UTF-8"
) as file:
Expand All @@ -1099,7 +1096,7 @@ def from_directory(
# objects and other data json compatible dict format
data = {
attribute: jsanitize(
doc.__getattribute__(attribute),
getattr(doc, attribute),
allow_bson=False,
strict=True,
enum_values=True,
Expand All @@ -1113,9 +1110,9 @@ def from_directory(

# Again unset the cohp, cobi and coop data fields if not desired in the DB
if not add_coxxcar_to_task_document:
doc.__setattr__("cohp_data", None)
doc.__setattr__("coop_data", None)
doc.__setattr__("cobi_data", None)
doc.cohp_data = None
doc.coop_data = None
doc.cobi_data = None

return doc.model_copy(update=additional_fields)

Expand Down Expand Up @@ -1316,9 +1313,9 @@ def read_saved_json(
dict
Returns a dictionary with lobster task json data corresponding to query.
"""
with gzip.open(filename, "rb") as f:
with gzip.open(filename, "rb") as file:
lobster_data = {}
objects = ijson.items(f, "item", use_float=True)
objects = ijson.items(file, "item", use_float=True)
for obj in objects:
if query is None:
for field, data in obj.items():
Expand All @@ -1339,11 +1336,9 @@ def read_saved_json(
lobster_data[query_key] = MontyDecoder().process_decoded(value)
elif "lobsterpy_data" in query_key:
for field in lobster_data[query_key].__fields__:
lobster_data[query_key].__setattr__(
field,
MontyDecoder().process_decoded(
lobster_data[query_key].__getattribute__(field)
),
val = MontyDecoder().process_decoded(
getattr(lobster_data[query_key], field)
)
setattr(lobster_data[query_key], field, val)

return lobster_data
2 changes: 1 addition & 1 deletion src/atomate2/utils/file_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ def gzip(
path.unlink()
else:
ssh = self.get_ssh(host)
_, stdout, _ = ssh.exec_command(f"gzip -f {path!s}")
_, _stdout, _ = ssh.exec_command(f"gzip -f {path!s}")

def gunzip(
self,
Expand Down
6 changes: 2 additions & 4 deletions src/atomate2/vasp/flows/mp.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,9 +232,7 @@ class MPVaspLobsterMaker(VaspLobsterMaker):
"""

name: str = "lobster"
relax_maker: BaseVaspMaker | None = field(
default_factory=lambda: MPGGADoubleRelaxMaker()
)
relax_maker: BaseVaspMaker | None = field(default_factory=MPGGADoubleRelaxMaker)
lobster_static_maker: BaseVaspMaker = field(
default_factory=lambda: MPGGAStaticMaker(
input_set_generator=MPGGAStaticSetGenerator(
Expand All @@ -254,7 +252,7 @@ class MPVaspLobsterMaker(VaspLobsterMaker):
)
)
)
lobster_maker: LobsterMaker | None = field(default_factory=lambda: LobsterMaker())
lobster_maker: LobsterMaker | None = field(default_factory=LobsterMaker)
delete_wavecars: bool = True
address_min_basis: str | None = None
address_max_basis: str | None = None
2 changes: 1 addition & 1 deletion src/atomate2/vasp/sets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def write_input(
# write POSCAR with more significant figures
file.write(val.get_str(significant_figures=16))
else:
file.write(val.__str__())
file.write(str(val))
elif not overwrite and (directory / key).exists():
raise FileExistsError(f"{directory / key} already exists.")

Expand Down
7 changes: 3 additions & 4 deletions tests/common/schemas/test_cclib.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,11 @@ def test_cclib_taskdoc(test_dir):

# Now we will try two possible extensions, but we will make sure that
# it fails because the newest log file (.txt) is not valid
with open(p / "test.txt", "w") as f:
f.write("I am a dummy log file")
with pytest.raises(Exception) as e:
with open(p / "test.txt", "w") as file:
file.write("I am a dummy log file")
with pytest.raises(ValueError, match="Could not parse"):
doc = TaskDocument.from_logfile(p, [".log", ".txt"]).dict()
os.remove(p / "test.txt")
assert "Could not parse" in str(e.value)

# Test a population analysis
doc = TaskDocument.from_logfile(p, "psi_test.out", analysis="MBO").dict()
Expand Down
3 changes: 1 addition & 2 deletions tests/forcefields/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,7 @@ def test_relaxer(si_structure, test_dir, tmp_dir, optimizer, traj_file):
]

if optimizer is None:
# None is invalid, should raise ValueError
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="Optimizer cannot be None"):
Relaxer(calculator=LennardJones(), optimizer=optimizer)
return

Expand Down
7 changes: 5 additions & 2 deletions tests/vasp/flows/test_phonons.py
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,10 @@ def test_phonon_wf_only_displacements_kpath_raises_no_cell_change(
# automatically use fake VASP and write POTCAR.spec during the test
mock_vasp(ref_paths, fake_run_vasp_kwargs)

with pytest.raises(ValueError):
with pytest.raises(
ValueError,
match="can only use other kpath schemes with the primitive standard structure",
):
PhononMaker(
min_length=3.0,
bulk_relax_maker=None,
Expand Down Expand Up @@ -785,7 +788,7 @@ def test_phonon_wf_only_displacements_kpath_raises(mock_vasp, clean_dir, kpath_s

# automatically use fake VASP and write POTCAR.spec during the test
mock_vasp(ref_paths, fake_run_vasp_kwargs)
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="can only use other kpath schemes with the"):
PhononMaker(
min_length=3.0,
bulk_relax_maker=None,
Expand Down

0 comments on commit 23b3c4b

Please sign in to comment.