From 74d05fbbdb470450df575a4f03b992d098fa2761 Mon Sep 17 00:00:00 2001 From: Severin Magel Date: Sun, 3 May 2026 14:29:20 -0400 Subject: [PATCH 1/2] updating the dependencies for linitng --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 450ab6556..c768cd1dc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,13 @@ exclude: '^(versioneer.py|src/graphnet/_version.py|docs/)' repos: - repo: https://github.com/psf/black - rev: 24.10.0 + rev: 26.3.1 hooks: - id: black language_version: python3 args: [--config=black.toml] - repo: https://github.com/pycqa/flake8 - rev: 7.1.1 + rev: 7.3.0 hooks: - id: flake8 args: ["--config=.flake8", "--show-source", "--statistics"] @@ -22,13 +22,13 @@ repos: - id: pydocstyle language_version: python3 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.13.0 + rev: v1.20.2 hooks: - id: mypy args: [--follow-imports=silent, --disallow-untyped-defs, --disallow-incomplete-defs, --disallow-untyped-calls] language_version: python3 - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v6.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer From 258b6bd6d05ed86fd7c46c7a611c08fcec6afa6f Mon Sep 17 00:00:00 2001 From: Severin Magel Date: Sun, 3 May 2026 14:52:20 -0400 Subject: [PATCH 2/2] pre-commit run --all --- .github/workflows/build.yml | 8 ++++---- docker/gnn-benchmarking/apply.py | 1 - examples/01_icetray/01_convert_i3_files.py | 6 ++---- examples/01_icetray/02_compare_sqlite_and_parquet.py | 6 ++---- examples/01_icetray/03_i3_deployer_example.py | 6 ++---- .../04_i3_module_in_native_icetray_example.py | 6 ++---- examples/01_icetray/05_convert_i3_files_advanced.py | 6 ++---- examples/02_data/01_read_dataset.py | 6 ++---- examples/02_data/02_plot_feature_distributions.py | 6 ++---- examples/02_data/03_convert_parquet_to_sqlite.py | 6 ++---- examples/02_data/04_ensemble_dataset.py | 6 ++---- examples/03_weights/01_fit_uniform_weights.py | 6 ++---- examples/03_weights/02_fit_bjoern_low_weights.py | 6 ++---- examples/04_training/01_train_dynedge.py | 6 ++---- examples/04_training/02_train_tito_model.py | 6 ++---- examples/04_training/03_train_dynedge_from_config.py | 6 ++---- .../04_train_multiclassifier_from_configs.py | 6 ++---- examples/04_training/05_train_RNN_TITO.py | 6 ++---- examples/04_training/06_train_icemix_model.py | 6 ++---- examples/04_training/07_train_normalizing_flow.py | 6 ++---- examples/04_training/08_train_grit_model.py | 6 ++---- examples/05_liquido/01_convert_h5.py | 6 ++---- examples/06_prometheus/01_convert_prometheus.py | 6 ++---- examples/07_km3net/01_convert_km3net.py | 6 ++---- src/graphnet/data/datamodule.py | 2 +- src/graphnet/data/dataset/parquet/parquet_dataset.py | 12 +++++++++--- .../data/extractors/icecube/i3genericextractor.py | 1 - .../data/extractors/icecube/utilities/collections.py | 2 +- .../data/extractors/icecube/utilities/types.py | 2 -- src/graphnet/data/readers/i3reader.py | 1 - src/graphnet/data/readers/km3netreader.py | 1 - src/graphnet/deployment/deployer.py | 12 ++++-------- .../data_representation/data_representation.py | 6 ++---- .../models/data_representation/graphs/nodes/nodes.py | 12 ++++-------- src/graphnet/models/detector/detector.py | 6 ++---- src/graphnet/utilities/argparse.py | 1 - src/graphnet/utilities/config/base_config.py | 1 - src/graphnet/utilities/config/dataset_config.py | 4 ++-- src/graphnet/utilities/config/model_config.py | 4 ++-- src/graphnet/utilities/filesys.py | 6 ++---- src/graphnet/utilities/logging.py | 1 - tests/data/test_datamodule.py | 10 +++++----- tests/training/test_dataloader_utilities.py | 2 +- tests/utilities/test_dataset_config.py | 1 - 44 files changed, 83 insertions(+), 145 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 242e71bc2..bca538be5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -75,7 +75,7 @@ jobs: sudo apt update --fix-missing --yes sudo apt upgrade --yes sudo apt-get install --yes git - sudo apt-get clean + sudo apt-get clean - uses: actions/checkout@v4 @@ -88,9 +88,9 @@ jobs: python --version pip show setuptools rm -rf ~/.cache/pip - + - name: Print available disk space before graphnet install - run: | + run: | df -h - name: Upgrade packages in virtual environment shell: bash @@ -128,7 +128,7 @@ jobs: pip show torch-scatter pip show jammy_flows - name: Print available disk space after graphnet install - run: | + run: | df -h - name: Run unit tests and generate coverage report shell: bash diff --git a/docker/gnn-benchmarking/apply.py b/docker/gnn-benchmarking/apply.py index 0b6a9d76e..b69ef3047 100644 --- a/docker/gnn-benchmarking/apply.py +++ b/docker/gnn-benchmarking/apply.py @@ -15,7 +15,6 @@ from graphnet.data.constants import FEATURES from graphnet.constants import PRETRAINED_MODEL_DIR - # Constants MODEL_NAME = "total_neutrino_energy" BASE_PATH = f"{PRETRAINED_MODEL_DIR}/icecube/upgrade/QUESO" diff --git a/examples/01_icetray/01_convert_i3_files.py b/examples/01_icetray/01_convert_i3_files.py index a9a69e159..ba7d6a968 100644 --- a/examples/01_icetray/01_convert_i3_files.py +++ b/examples/01_icetray/01_convert_i3_files.py @@ -160,11 +160,9 @@ def main_icecube_upgrade( if not has_icecube_package(): Logger(log_folder=None).error(ERROR_MESSAGE_MISSING_ICETRAY) else: - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Convert I3 files to an intermediate format. -""" - ) +""") parser.add_argument( "backend", diff --git a/examples/01_icetray/02_compare_sqlite_and_parquet.py b/examples/01_icetray/02_compare_sqlite_and_parquet.py index d3874c5f2..188231a79 100644 --- a/examples/01_icetray/02_compare_sqlite_and_parquet.py +++ b/examples/01_icetray/02_compare_sqlite_and_parquet.py @@ -88,12 +88,10 @@ def load_data() -> None: else: # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Convert I3 files to both SQLite and Parquet formats, and see that the results agree. -""" - ) +""") args, unknown = parser.parse_known_args() diff --git a/examples/01_icetray/03_i3_deployer_example.py b/examples/01_icetray/03_i3_deployer_example.py index bd2de9f43..1d92f0065 100644 --- a/examples/01_icetray/03_i3_deployer_example.py +++ b/examples/01_icetray/03_i3_deployer_example.py @@ -87,11 +87,9 @@ def main() -> None: else: # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Use GraphNeTI3Modules to deploy trained model with GraphNeTI3Deployer. -""" - ) +""") args, unknown = parser.parse_known_args() diff --git a/examples/01_icetray/04_i3_module_in_native_icetray_example.py b/examples/01_icetray/04_i3_module_in_native_icetray_example.py index 09dd17ae3..f2986da2d 100644 --- a/examples/01_icetray/04_i3_module_in_native_icetray_example.py +++ b/examples/01_icetray/04_i3_module_in_native_icetray_example.py @@ -119,11 +119,9 @@ def main() -> None: else: # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Use GraphNeTI3Modules to deploy trained model in native IceTray. -""" - ) +""") args, unknown = parser.parse_known_args() diff --git a/examples/01_icetray/05_convert_i3_files_advanced.py b/examples/01_icetray/05_convert_i3_files_advanced.py index 4d6f3b6f6..d8d71876e 100644 --- a/examples/01_icetray/05_convert_i3_files_advanced.py +++ b/examples/01_icetray/05_convert_i3_files_advanced.py @@ -187,11 +187,9 @@ def main( Logger(log_folder=None).error(ERROR_MESSAGE_MISSING_ICETRAY) else: # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Convert I3 files to an intermediate format. -""" - ) +""") parser.add_argument("--merge", type=bool, default=True) parser.add_argument("--remove", type=bool, default=True) diff --git a/examples/02_data/01_read_dataset.py b/examples/02_data/01_read_dataset.py index 0f7bb0868..3f5429893 100644 --- a/examples/02_data/01_read_dataset.py +++ b/examples/02_data/01_read_dataset.py @@ -107,11 +107,9 @@ def main(backend: str) -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Read a few events from data in an intermediate format. -""" - ) +""") parser.add_argument( "backend", diff --git a/examples/02_data/02_plot_feature_distributions.py b/examples/02_data/02_plot_feature_distributions.py index ac08ae9fb..7f0dd64d2 100644 --- a/examples/02_data/02_plot_feature_distributions.py +++ b/examples/02_data/02_plot_feature_distributions.py @@ -60,11 +60,9 @@ def main() -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Plot feature distributions in dataset. -""" - ) +""") args, unknown = parser.parse_known_args() diff --git a/examples/02_data/03_convert_parquet_to_sqlite.py b/examples/02_data/03_convert_parquet_to_sqlite.py index 0a82aa744..058fedcf3 100644 --- a/examples/02_data/03_convert_parquet_to_sqlite.py +++ b/examples/02_data/03_convert_parquet_to_sqlite.py @@ -37,11 +37,9 @@ def main(parquet_path: str, tables: List[str]) -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Convert Parquet files to SQLite database. -""" - ) +""") parser.add_argument( "--parquet-path", diff --git a/examples/02_data/04_ensemble_dataset.py b/examples/02_data/04_ensemble_dataset.py index 80ae7ac72..75e44541b 100644 --- a/examples/02_data/04_ensemble_dataset.py +++ b/examples/02_data/04_ensemble_dataset.py @@ -80,10 +80,8 @@ def main() -> None: if __name__ == "__main__": - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Combine multiple Datasets using EnsembleDataset. -""" - ) +""") args, unknown = parser.parse_known_args() main() diff --git a/examples/03_weights/01_fit_uniform_weights.py b/examples/03_weights/01_fit_uniform_weights.py index e2f68487a..3df177b75 100644 --- a/examples/03_weights/01_fit_uniform_weights.py +++ b/examples/03_weights/01_fit_uniform_weights.py @@ -31,11 +31,9 @@ def main() -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Fit per-event weights to make the truth-level zenith distribution uniform. -""" - ) +""") args, unknown = parser.parse_known_args() diff --git a/examples/03_weights/02_fit_bjoern_low_weights.py b/examples/03_weights/02_fit_bjoern_low_weights.py index 4d54e8c7d..361d91d56 100644 --- a/examples/03_weights/02_fit_bjoern_low_weights.py +++ b/examples/03_weights/02_fit_bjoern_low_weights.py @@ -39,11 +39,9 @@ def main() -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Fit per-event weights according to the `BjoernLow` weight fitter. -""" - ) +""") args, unknown = parser.parse_known_args() diff --git a/examples/04_training/01_train_dynedge.py b/examples/04_training/01_train_dynedge.py index 3f1e41f80..76df46756 100644 --- a/examples/04_training/01_train_dynedge.py +++ b/examples/04_training/01_train_dynedge.py @@ -196,11 +196,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train GNN model without the use of config files. -""" - ) +""") parser.add_argument( "--path", diff --git a/examples/04_training/02_train_tito_model.py b/examples/04_training/02_train_tito_model.py index cd94ffa04..4dfac2f49 100644 --- a/examples/04_training/02_train_tito_model.py +++ b/examples/04_training/02_train_tito_model.py @@ -194,11 +194,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train GNN model without the use of config files. -""" - ) +""") parser.add_argument( "--path", diff --git a/examples/04_training/03_train_dynedge_from_config.py b/examples/04_training/03_train_dynedge_from_config.py index adb3757af..85535fe41 100644 --- a/examples/04_training/03_train_dynedge_from_config.py +++ b/examples/04_training/03_train_dynedge_from_config.py @@ -118,11 +118,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train GNN model. -""" - ) +""") parser.with_standard_arguments( "dataset-config", diff --git a/examples/04_training/04_train_multiclassifier_from_configs.py b/examples/04_training/04_train_multiclassifier_from_configs.py index 876fa191c..f7571247b 100644 --- a/examples/04_training/04_train_multiclassifier_from_configs.py +++ b/examples/04_training/04_train_multiclassifier_from_configs.py @@ -144,11 +144,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train GNN classification model. - """ - ) + """) parser.with_standard_arguments( ( diff --git a/examples/04_training/05_train_RNN_TITO.py b/examples/04_training/05_train_RNN_TITO.py index 1287e2193..948d4a3c8 100644 --- a/examples/04_training/05_train_RNN_TITO.py +++ b/examples/04_training/05_train_RNN_TITO.py @@ -216,11 +216,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train GNN model without the use of config files. -""" - ) +""") parser.add_argument( "--path", diff --git a/examples/04_training/06_train_icemix_model.py b/examples/04_training/06_train_icemix_model.py index b31a86487..b76c91aa4 100644 --- a/examples/04_training/06_train_icemix_model.py +++ b/examples/04_training/06_train_icemix_model.py @@ -222,11 +222,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train GNN model without the use of config files. -""" - ) +""") parser.add_argument( "--path", diff --git a/examples/04_training/07_train_normalizing_flow.py b/examples/04_training/07_train_normalizing_flow.py index 27be937c6..b78b34607 100644 --- a/examples/04_training/07_train_normalizing_flow.py +++ b/examples/04_training/07_train_normalizing_flow.py @@ -166,11 +166,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train conditional NormalizingFlow without the use of config files. -""" - ) +""") parser.add_argument( "--path", diff --git a/examples/04_training/08_train_grit_model.py b/examples/04_training/08_train_grit_model.py index a7a2c2992..24a5394a2 100644 --- a/examples/04_training/08_train_grit_model.py +++ b/examples/04_training/08_train_grit_model.py @@ -181,11 +181,9 @@ def main( if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Train GNN model without the use of config files. -""" - ) +""") parser.add_argument( "--path", diff --git a/examples/05_liquido/01_convert_h5.py b/examples/05_liquido/01_convert_h5.py index eaa5610fd..f32675994 100644 --- a/examples/05_liquido/01_convert_h5.py +++ b/examples/05_liquido/01_convert_h5.py @@ -39,11 +39,9 @@ def main(backend: str) -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Convert h5 files from LiquidO to an intermediate format. - """ - ) + """) parser.add_argument("backend", choices=["sqlite", "parquet"]) diff --git a/examples/06_prometheus/01_convert_prometheus.py b/examples/06_prometheus/01_convert_prometheus.py index 63b071372..23d536a29 100644 --- a/examples/06_prometheus/01_convert_prometheus.py +++ b/examples/06_prometheus/01_convert_prometheus.py @@ -42,11 +42,9 @@ def main(backend: str) -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Convert parquet files from Prometheus to an intermediate format. - """ - ) + """) parser.add_argument("backend", choices=["sqlite", "parquet"]) diff --git a/examples/07_km3net/01_convert_km3net.py b/examples/07_km3net/01_convert_km3net.py index a3a53b49d..1a21c937c 100644 --- a/examples/07_km3net/01_convert_km3net.py +++ b/examples/07_km3net/01_convert_km3net.py @@ -71,11 +71,9 @@ def main(backend: str, triggered: str, HNL: str, OUTPUT_DIR: str) -> None: if __name__ == "__main__": # Parse command-line arguments - parser = ArgumentParser( - description=""" + parser = ArgumentParser(description=""" Convert root files from KM3NeT to an sqlite or parquet. - """ - ) + """) parser.add_argument( "backend", diff --git a/src/graphnet/data/datamodule.py b/src/graphnet/data/datamodule.py index 48a916684..6267431f6 100644 --- a/src/graphnet/data/datamodule.py +++ b/src/graphnet/data/datamodule.py @@ -575,7 +575,7 @@ def _create_dataset( # Construct single dataset dataset = self._create_single_dataset( selection=selection, - path=self._dataset_args["path"], # type:ignore + path=self._dataset_args["path"], # type: ignore ) return dataset diff --git a/src/graphnet/data/dataset/parquet/parquet_dataset.py b/src/graphnet/data/dataset/parquet/parquet_dataset.py index 6329505de..cd889ee95 100644 --- a/src/graphnet/data/dataset/parquet/parquet_dataset.py +++ b/src/graphnet/data/dataset/parquet/parquet_dataset.py @@ -258,14 +258,20 @@ def query_table( # type: ignore else: file_idx = [bisect_right(self._chunk_cumsum, sequential_index)] - file_indices = [self._indices[idx] for idx in file_idx] + raw_file_indices = [self._indices[idx] for idx in file_idx] + file_indices: List[int] = [] + for entry in raw_file_indices: + if isinstance(entry, list): + file_indices.extend(entry) + else: + file_indices.append(int(entry)) arrays = [] - for file_idx in file_indices: + for fidx in file_indices: array = self._query_table( table=table, columns=columns, - file_idx=file_idx, + file_idx=fidx, sequential_index=sequential_index, selection=selection, ) diff --git a/src/graphnet/data/extractors/icecube/i3genericextractor.py b/src/graphnet/data/extractors/icecube/i3genericextractor.py index f59edfef2..13a8bda15 100644 --- a/src/graphnet/data/extractors/icecube/i3genericextractor.py +++ b/src/graphnet/data/extractors/icecube/i3genericextractor.py @@ -15,7 +15,6 @@ from graphnet.utilities.imports import has_icecube_package - if has_icecube_package() or TYPE_CHECKING: from icecube import ( dataclasses, diff --git a/src/graphnet/data/extractors/icecube/utilities/collections.py b/src/graphnet/data/extractors/icecube/utilities/collections.py index a5af5edfd..ac5a7ef1e 100644 --- a/src/graphnet/data/extractors/icecube/utilities/collections.py +++ b/src/graphnet/data/extractors/icecube/utilities/collections.py @@ -66,7 +66,7 @@ def serialise(obj: Union[Dict, Any]) -> Union[Dict, Any]: def transpose_list_of_dicts( - array: List[Dict[str, Any]] + array: List[Dict[str, Any]], ) -> Dict[str, List[Any]]: """Transpose a list of dicts to a dict of lists.""" if len(array) == 0: diff --git a/src/graphnet/data/extractors/icecube/utilities/types.py b/src/graphnet/data/extractors/icecube/utilities/types.py index 32ecae0ff..a5b427fe2 100644 --- a/src/graphnet/data/extractors/icecube/utilities/types.py +++ b/src/graphnet/data/extractors/icecube/utilities/types.py @@ -14,7 +14,6 @@ from graphnet.utilities.imports import has_icecube_package from graphnet.utilities.logging import Logger - if has_icecube_package(): from icecube import ( icetray, @@ -58,7 +57,6 @@ def break_cyclic_recursion(fn: Callable) -> Callable: @wraps(fn) def wrapper(obj: Any) -> Any: - global BEING_EVALUATED try: hash_ = (hash(fn), hash(obj)) if hash_ in BEING_EVALUATED: diff --git a/src/graphnet/data/readers/i3reader.py b/src/graphnet/data/readers/i3reader.py index 3716bc42b..c765f8978 100644 --- a/src/graphnet/data/readers/i3reader.py +++ b/src/graphnet/data/readers/i3reader.py @@ -12,7 +12,6 @@ from graphnet.utilities.filesys import find_i3_files from .graphnet_file_reader import GraphNeTFileReader - if has_icecube_package(): from icecube import icetray, dataio # pyright: reportMissingImports=false diff --git a/src/graphnet/data/readers/km3netreader.py b/src/graphnet/data/readers/km3netreader.py index faaea9265..0625f9d59 100644 --- a/src/graphnet/data/readers/km3netreader.py +++ b/src/graphnet/data/readers/km3netreader.py @@ -14,7 +14,6 @@ KM3NeTHNLRecoExtractor, ) - # km3net specific imports if has_km3net_package() or TYPE_CHECKING: import km3io as ki # pyright: reportMissingImports=false diff --git a/src/graphnet/deployment/deployer.py b/src/graphnet/deployment/deployer.py index 2c57c45d9..5d4c606b7 100644 --- a/src/graphnet/deployment/deployer.py +++ b/src/graphnet/deployment/deployer.py @@ -121,12 +121,8 @@ def run( assert len(settings) == self._n_workers - self.info( - f"""processing {len(input_files)} files \n - using {self._n_workers} workers""" - ) + self.info(f"""processing {len(input_files)} files \n + using {self._n_workers} workers""") self._launch_jobs(settings) - self.info( - f"""Processing {len(input_files)} files was completed in \n - {time.time() - start_time} seconds using {self._n_workers} cores.""" - ) + self.info(f"""Processing {len(input_files)} files was completed in \n + {time.time() - start_time} seconds using {self._n_workers} cores.""") diff --git a/src/graphnet/models/data_representation/data_representation.py b/src/graphnet/models/data_representation/data_representation.py index c6df0a0a8..09cd43293 100644 --- a/src/graphnet/models/data_representation/data_representation.py +++ b/src/graphnet/models/data_representation/data_representation.py @@ -300,11 +300,9 @@ def _validate_input( def _perturb_input(self, input_features: np.ndarray) -> np.ndarray: if isinstance(self._perturbation_dict, dict): - self.warning_once( - f"""Will randomly perturb + self.warning_once(f"""Will randomly perturb {list(self._perturbation_dict.keys())} - using stds {self._perturbation_dict.values()}""" # noqa - ) + using stds {self._perturbation_dict.values()}""") # noqa perturbed_features = self.rng.normal( loc=input_features[:, self._perturbation_cols], scale=np.array( diff --git a/src/graphnet/models/data_representation/graphs/nodes/nodes.py b/src/graphnet/models/data_representation/graphs/nodes/nodes.py index 064073bdd..8fa4b74cb 100644 --- a/src/graphnet/models/data_representation/graphs/nodes/nodes.py +++ b/src/graphnet/models/data_representation/graphs/nodes/nodes.py @@ -55,13 +55,11 @@ def _output_feature_names(self) -> List[str]: try: self._hidden_output_feature_names except AttributeError as e: - self.error( - f"""{self.__class__.__name__} was instantiated without + self.error(f"""{self.__class__.__name__} was instantiated without `input_feature_names` and it was not set prior to this forward call. If you are using this class outside a `GraphDefinition`, please instatiate - with `input_feature_names`.""" - ) # noqa + with `input_feature_names`.""") # noqa raise e return self._hidden_output_feature_names @@ -212,12 +210,10 @@ def _construct_nodes(self, x: torch.Tensor) -> torch.Tensor: cluster_class.add_counts() array = cluster_class.clustered_x else: - self.error( - f"""{self.__class__.__name__} was not instatiated with + self.error(f"""{self.__class__.__name__} was not instatiated with `input_feature_names` and has not been set later. Please instantiate this class with `input_feature_names` - if you're using it outside `GraphDefinition`.""" - ) # noqa + if you're using it outside `GraphDefinition`.""") # noqa raise AttributeError return torch.tensor(array) diff --git a/src/graphnet/models/detector/detector.py b/src/graphnet/models/detector/detector.py index 0c86663ba..6e9d8dcf9 100644 --- a/src/graphnet/models/detector/detector.py +++ b/src/graphnet/models/detector/detector.py @@ -46,10 +46,8 @@ def geometry_table(self) -> pd.DataFrame: try: assert hasattr(self, "geometry_table_path") except AssertionError as e: - self.error( - f"""{self.__class__.__name__} does not have class - variable `geometry_table_path` set.""" - ) + self.error(f"""{self.__class__.__name__} does not have class + variable `geometry_table_path` set.""") raise e self._geometry_table = pd.read_parquet(self.geometry_table_path) return self._geometry_table diff --git a/src/graphnet/utilities/argparse.py b/src/graphnet/utilities/argparse.py index eed402482..f8825e789 100644 --- a/src/graphnet/utilities/argparse.py +++ b/src/graphnet/utilities/argparse.py @@ -6,7 +6,6 @@ from graphnet.constants import CONFIG_DIR - ASCII_LOGO = r""" _____ __ _ __ ______ / ___/______ ____ / / / |/ /_/_ __/ diff --git a/src/graphnet/utilities/config/base_config.py b/src/graphnet/utilities/config/base_config.py index 0bc826ad7..af314918e 100644 --- a/src/graphnet/utilities/config/base_config.py +++ b/src/graphnet/utilities/config/base_config.py @@ -8,7 +8,6 @@ from pydantic import BaseModel import ruamel.yaml as yaml - CONFIG_FILES_SUFFIXES = (".yml", ".yaml") diff --git a/src/graphnet/utilities/config/dataset_config.py b/src/graphnet/utilities/config/dataset_config.py index 9c519d795..f5f876488 100644 --- a/src/graphnet/utilities/config/dataset_config.py +++ b/src/graphnet/utilities/config/dataset_config.py @@ -195,7 +195,7 @@ def save_dataset_config(init_fn: Callable) -> Callable: ) def _replace_model_instance_with_config( - obj: Union["Model", Any] + obj: Union["Model", Any], ) -> Union[ModelConfig, Any]: """Replace `Model` instances in `obj` with their `ModelConfig`.""" from graphnet.models import Model @@ -236,7 +236,7 @@ def __call__(cls: Any, *args: Any, **kwargs: Any) -> object: """Catch object after construction and save config.""" def _replace_model_instance_with_config( - obj: Union["Model", Any] + obj: Union["Model", Any], ) -> Union[ModelConfig, Any]: """Replace `Model` instances in `obj` with their `ModelConfig`.""" from graphnet.models import Model diff --git a/src/graphnet/utilities/config/model_config.py b/src/graphnet/utilities/config/model_config.py index b56962f66..f95a35a3f 100644 --- a/src/graphnet/utilities/config/model_config.py +++ b/src/graphnet/utilities/config/model_config.py @@ -282,7 +282,7 @@ def save_model_config(init_fn: Callable) -> Callable: ) def _replace_model_instance_with_config( - obj: Union["Model", Any] + obj: Union["Model", Any], ) -> Union[ModelConfig, Any]: """Replace `Model` instances in `obj` with their `ModelConfig`.""" from graphnet.models import Model @@ -322,7 +322,7 @@ def __call__(cls: Any, *args: Any, **kwargs: Any) -> object: """Catch object construction and save config after `__init__`.""" def _replace_model_instance_with_config( - obj: Union["Model", Any] + obj: Union["Model", Any], ) -> Union[ModelConfig, Any]: """Replace `Model` instances in `obj` with their `ModelConfig`.""" from graphnet.models import Model diff --git a/src/graphnet/utilities/filesys.py b/src/graphnet/utilities/filesys.py index 726d8f185..9201c8bed 100644 --- a/src/graphnet/utilities/filesys.py +++ b/src/graphnet/utilities/filesys.py @@ -105,9 +105,7 @@ def find_i3_files( return i3_files, gcd_files else: if any([os.path.isdir(input) for input in inputs]): - raise ValueError( - "Inputs contains a mix of files and directories \ - which is not supported." - ) + raise ValueError("Inputs contains a mix of files and directories \ + which is not supported.") else: raise ValueError("Some inputs are not valid directories or files.") diff --git a/src/graphnet/utilities/logging.py b/src/graphnet/utilities/logging.py index 575da94a0..7952cf3b8 100644 --- a/src/graphnet/utilities/logging.py +++ b/src/graphnet/utilities/logging.py @@ -226,7 +226,6 @@ def debug(self, msg: str, *args: Any, **kwargs: Any) -> None: def warning_once(self, msg: str) -> None: """Print `msg` as warning exactly once.""" - global WARNINGS if msg in WARNINGS: return diff --git a/tests/data/test_datamodule.py b/tests/data/test_datamodule.py index dd2e03abf..f35784644 100644 --- a/tests/data/test_datamodule.py +++ b/tests/data/test_datamodule.py @@ -123,7 +123,7 @@ def test_save_selection(selection: List[int], file_path: str) -> None: "dataset_ref", [SQLiteDataset, ParquetDataset], indirect=True ) def test_single_dataset_without_selections( - dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]] + dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]], ) -> None: """Verify GraphNeTDataModule behavior when no test selection is provided. @@ -163,7 +163,7 @@ def test_single_dataset_without_selections( "dataset_ref", [SQLiteDataset, ParquetDataset], indirect=True ) def test_single_dataset_with_selections( - dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]] + dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]], ) -> None: """Test that selection functionality of DataModule behaves as expected. @@ -226,7 +226,7 @@ def test_single_dataset_with_selections( "dataset_ref", [SQLiteDataset, ParquetDataset], indirect=True ) def test_dataloader_args( - dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]] + dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]], ) -> None: """Test that arguments to dataloaders are propagated correctly. @@ -267,7 +267,7 @@ def test_dataloader_args( "dataset_ref", [SQLiteDataset, ParquetDataset], indirect=True ) def test_ensemble_dataset_without_selections( - dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]] + dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]], ) -> None: """Test ensemble dataset functionality without selections. @@ -306,7 +306,7 @@ def test_ensemble_dataset_without_selections( @pytest.mark.parametrize("dataset_ref", [SQLiteDataset, ParquetDataset]) def test_ensemble_dataset_with_selections( - dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]] + dataset_setup: Tuple[Any, Dict[str, Any], Dict[str, int]], ) -> None: """Test ensemble dataset functionality with selections. diff --git a/tests/training/test_dataloader_utilities.py b/tests/training/test_dataloader_utilities.py index d5b9d55df..10b6761c5 100644 --- a/tests/training/test_dataloader_utilities.py +++ b/tests/training/test_dataloader_utilities.py @@ -55,7 +55,7 @@ def test_none_selection() -> None: ) def test_array_selection(selection: Tuple[int]) -> None: """Test agreement of the two ways to calculate this loss.""" - (train_dataloader, test_dataloader) = make_train_validation_dataloader( + train_dataloader, test_dataloader = make_train_validation_dataloader( db=TEST_SQLITE_DATA, graph_definition=graph_definition, selection=list(selection), diff --git a/tests/utilities/test_dataset_config.py b/tests/utilities/test_dataset_config.py index b3d6aec11..e98e08ac6 100644 --- a/tests/utilities/test_dataset_config.py +++ b/tests/utilities/test_dataset_config.py @@ -20,7 +20,6 @@ from graphnet.models.detector.icecube import IceCubeDeepCore from graphnet.models.graphs.nodes import NodesAsPulses - CONFIG_PATHS = { "parquet": "/tmp/test_dataset_parquet.yml", "sqlite": "/tmp/test_dataset_sqlite.yml",