From 53e8796ceda985e11173e86333b3131b9d1f630d Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 11:12:25 -0800 Subject: [PATCH 01/41] initial bare exception fix --- py4DSTEM/braggvectors/diskdetection.py | 2 +- py4DSTEM/braggvectors/diskdetection_aiml.py | 42 ++++++++++++------- .../braggvectors/diskdetection_aiml_cuda.py | 10 +++-- py4DSTEM/braggvectors/diskdetection_cuda.py | 3 +- py4DSTEM/datacube/virtualimage.py | 3 +- py4DSTEM/io/filereaders/read_K2.py | 3 +- py4DSTEM/io/legacy/read_utils.py | 3 +- .../process/classification/featurization.py | 2 +- py4DSTEM/process/polar/polar_peaks.py | 11 ++--- py4DSTEM/process/utils/multicorr.py | 3 +- py4DSTEM/utils/configuration_checker.py | 5 ++- 11 files changed, 53 insertions(+), 34 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection.py b/py4DSTEM/braggvectors/diskdetection.py index e23b10a15..59a8a55e7 100644 --- a/py4DSTEM/braggvectors/diskdetection.py +++ b/py4DSTEM/braggvectors/diskdetection.py @@ -221,7 +221,7 @@ def find_Bragg_disks( mode = "dp" elif data.ndim == 3: mode = "dp_stack" - except: + except Exception: er = f"entry {data} for `data` could not be parsed" raise Exception(er) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index 67df18074..4a59fd59e 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -103,12 +103,12 @@ def find_Bragg_disks_aiml_single_DP( """ try: import crystal4D - except: - raise ImportError("Import Error: Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") try: import tensorflow as tf - except: - raise ImportError( + except ModuleNotFoundError: + raise ModuleNotFoundError( "Please install tensorflow before proceeding - please check " + "https://www.tensorflow.org/install" + "for more information" @@ -256,8 +256,8 @@ def find_Bragg_disks_aiml_selected( try: import crystal4D - except: - raise ImportError("Import Error: Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") assert len(Rx) == len(Ry) peaks = [] @@ -433,8 +433,8 @@ def find_Bragg_disks_aiml_serial( try: import crystal4D - except: - raise ImportError("Import Error: Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") # Make the peaks PointListArray # dtype = [('qx',float),('qy',float),('intensity',float)] @@ -643,8 +643,8 @@ def find_Bragg_disks_aiml( """ try: import crystal4D - except: - raise ImportError("Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Please install crystal4D before proceeding") def _parse_distributed(distributed): import os @@ -840,7 +840,8 @@ def _integrate_disks(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1): disks.append(np.average(disk)) try: disks = disks / max(disks) - except: + # TODO work out what exception would go here + except Exception: pass return (maxima_x, maxima_y, disks) @@ -878,8 +879,8 @@ def _get_latest_model(model_path=None): try: import tensorflow as tf - except: - raise ImportError( + except ModuleNotFoundError: + raise ModuleNotFoundError( "Please install tensorflow before proceeding - please check " + "https://www.tensorflow.org/install" + "for more information" @@ -891,8 +892,12 @@ def _get_latest_model(model_path=None): if model_path is None: try: os.mkdir("./tmp") - except: + except FileExistsError: pass + except Exception as e: + # TODO work out if I want to pass or raise + pass + # raise e # download the json file with the meta data download_file_from_google_drive("FCU-Net", "./tmp/model_metadata.json") with open("./tmp/model_metadata.json") as f: @@ -905,9 +910,13 @@ def _get_latest_model(model_path=None): with open("./tmp/model_metadata_old.json") as f_old: metaold = json.load(f_old) file_id_old = metaold["file_id"] - except: + # TODO Double check this is correct Error + except FileNotFoundError: + file_id_old = file_id + except Exception: file_id_old = file_id + if os.path.exists(file_path) and file_id == file_id_old: print( "Latest model weight is already available in the local system. Loading the model... \n" @@ -921,7 +930,8 @@ def _get_latest_model(model_path=None): download_file_from_google_drive(file_id, filename) try: shutil.unpack_archive(filename, "./tmp", format="zip") - except: + # TODO Work work what specific exception + except Exception: pass model_path = file_path os.rename("./tmp/model_metadata.json", "./tmp/model_metadata_old.json") diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index c5f89b9fd..bbe0d37d4 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -22,8 +22,8 @@ try: import tensorflow as tf -except: - raise ImportError( +except ModuleNotFoundError: + raise ModuleNotFoundError( "Please install tensorflow before proceeding - please check " + "https://www.tensorflow.org/install" + "for more information" @@ -637,7 +637,8 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - except: + # TODO I think this is just the Index Error + except IndexError or Exception: dx, dy = ( 0, 0, @@ -733,6 +734,7 @@ def _integrate_disks_cp(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1) disks.append(np.average(disk)) try: disks = disks / max(disks) - except: + # TODO work out what exception to use + except Exception: pass return (maxima_x, maxima_y, disks) diff --git a/py4DSTEM/braggvectors/diskdetection_cuda.py b/py4DSTEM/braggvectors/diskdetection_cuda.py index 4bbb7f488..55e782028 100644 --- a/py4DSTEM/braggvectors/diskdetection_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_cuda.py @@ -618,7 +618,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift): dy = (icc[1, 2] - icc[1, 0]) / ( 4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0] ) - except: + # TODO Work out what exception to use + except IndexError or Exception: dx, dy = ( 0, 0, diff --git a/py4DSTEM/datacube/virtualimage.py b/py4DSTEM/datacube/virtualimage.py index 50a297914..51415e9fb 100644 --- a/py4DSTEM/datacube/virtualimage.py +++ b/py4DSTEM/datacube/virtualimage.py @@ -414,7 +414,8 @@ def position_detector( try: image = self.tree(k) break - except: + # TODO check what error is produced when passing nonexistant key to tree + except KeyError or AttributeError: pass if image is None: image = self[0, 0] diff --git a/py4DSTEM/io/filereaders/read_K2.py b/py4DSTEM/io/filereaders/read_K2.py index 61405a437..d316a5e7b 100644 --- a/py4DSTEM/io/filereaders/read_K2.py +++ b/py4DSTEM/io/filereaders/read_K2.py @@ -124,7 +124,8 @@ def __init__( # this may be wrong for binned data... in which case the reader doesn't work anyway! Q_Nx = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.height"] Q_Ny = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.width"] - except: + # TODO check this is the correct error type + except ValueError: print("Warning: diffraction pattern shape not detected!") print("Assuming 1920x1792 as the diffraction pattern size!") Q_Nx = 1792 diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index 7cd48cde7..27ee5cb7a 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -100,7 +100,8 @@ def get_N_dataobjects(filepath, topgroup="4DSTEM_experiment"): N_pla = len(f[topgroup]["data/pointlistarrays"].keys()) try: N_coords = len(f[topgroup]["data/coordinates"].keys()) - except: + # TODO work out what exception will be raised + except ValueError or AttributeError or Exception: N_coords = 0 N_do = N_dc + N_cdc + N_ds + N_rs + N_pl + N_pla + N_coords return N_dc, N_cdc, N_ds, N_rs, N_pl, N_pla, N_coords, N_do diff --git a/py4DSTEM/process/classification/featurization.py b/py4DSTEM/process/classification/featurization.py index 38b4e1412..26a63a62d 100644 --- a/py4DSTEM/process/classification/featurization.py +++ b/py4DSTEM/process/classification/featurization.py @@ -477,7 +477,7 @@ def get_class_DPs(self, datacube, method, thresh): datacube.data.shape[2], datacube.data.shape[3], ) - except: + except ValueError: raise ValueError( "Datacube must have same R_Nx and R_Ny dimensions as Featurization instance." ) diff --git a/py4DSTEM/process/polar/polar_peaks.py b/py4DSTEM/process/polar/polar_peaks.py index be9ae989e..7cc453544 100644 --- a/py4DSTEM/process/polar/polar_peaks.py +++ b/py4DSTEM/process/polar/polar_peaks.py @@ -602,8 +602,8 @@ def refine_peaks_local( self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step - - except: + # TODO work out what error is raised + except Exception: pass else: @@ -643,8 +643,8 @@ def refine_peaks_local( self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step - - except: + # TODO work out what exception is raised + except Exception: pass @@ -1044,7 +1044,8 @@ def fit_image(basis, *coefs): ), name="peaks_polar", ) - except: + # TODO work out what exception is raised + except Exception: # if fitting has failed, we will still output the last iteration # TODO - add a flag for unconverged fits coefs_peaks = np.reshape(coefs_all[(3 * num_rings + 3) :], (5, num_peaks)).T diff --git a/py4DSTEM/process/utils/multicorr.py b/py4DSTEM/process/utils/multicorr.py index bc07390bb..6760407a6 100644 --- a/py4DSTEM/process/utils/multicorr.py +++ b/py4DSTEM/process/utils/multicorr.py @@ -99,7 +99,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift, device="cpu"): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - except: + # TODO work out what error is raised + except IndexError or Exception: dx, dy = ( 0, 0, diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 26b0b89d5..c1bb98c47 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -304,7 +304,7 @@ def import_tester(m: str) -> bool: # try and import the module try: importlib.import_module(m) - except: + except ModuleNotFoundError: state = False return state @@ -391,7 +391,8 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): try: d = cp.cuda.Device(i) hasattr(d, "attributes") - except: + # TODO work out what error is raised + except AttributeError or Exception: num_gpus_detected = i break From fbbc23219d25c03550be6ea1d27f23785cf169ae Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 11:36:48 -0800 Subject: [PATCH 02/41] correcting multiple exception handling --- py4DSTEM/braggvectors/diskdetection_aiml_cuda.py | 2 +- py4DSTEM/braggvectors/diskdetection_cuda.py | 2 +- py4DSTEM/datacube/virtualimage.py | 2 +- py4DSTEM/io/legacy/read_utils.py | 2 +- py4DSTEM/process/utils/multicorr.py | 2 +- py4DSTEM/utils/configuration_checker.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index bbe0d37d4..7b3687f56 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -638,7 +638,7 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift): dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) # TODO I think this is just the Index Error - except IndexError or Exception: + except (IndexError, Exception): dx, dy = ( 0, 0, diff --git a/py4DSTEM/braggvectors/diskdetection_cuda.py b/py4DSTEM/braggvectors/diskdetection_cuda.py index 55e782028..2c44a63a8 100644 --- a/py4DSTEM/braggvectors/diskdetection_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_cuda.py @@ -619,7 +619,7 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift): 4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0] ) # TODO Work out what exception to use - except IndexError or Exception: + except (IndexError, Exception): dx, dy = ( 0, 0, diff --git a/py4DSTEM/datacube/virtualimage.py b/py4DSTEM/datacube/virtualimage.py index 51415e9fb..f930b6697 100644 --- a/py4DSTEM/datacube/virtualimage.py +++ b/py4DSTEM/datacube/virtualimage.py @@ -415,7 +415,7 @@ def position_detector( image = self.tree(k) break # TODO check what error is produced when passing nonexistant key to tree - except KeyError or AttributeError: + except (KeyError, AttributeError): pass if image is None: image = self[0, 0] diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index 27ee5cb7a..a08e7b61d 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -101,7 +101,7 @@ def get_N_dataobjects(filepath, topgroup="4DSTEM_experiment"): try: N_coords = len(f[topgroup]["data/coordinates"].keys()) # TODO work out what exception will be raised - except ValueError or AttributeError or Exception: + except (ValueError, AttributeError, Exception): N_coords = 0 N_do = N_dc + N_cdc + N_ds + N_rs + N_pl + N_pla + N_coords return N_dc, N_cdc, N_ds, N_rs, N_pl, N_pla, N_coords, N_do diff --git a/py4DSTEM/process/utils/multicorr.py b/py4DSTEM/process/utils/multicorr.py index 6760407a6..70b11ad5c 100644 --- a/py4DSTEM/process/utils/multicorr.py +++ b/py4DSTEM/process/utils/multicorr.py @@ -100,7 +100,7 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift, device="cpu"): dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) # TODO work out what error is raised - except IndexError or Exception: + except (IndexError, Exception): dx, dy = ( 0, 0, diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index c1bb98c47..df6f037df 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -392,7 +392,7 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): d = cp.cuda.Device(i) hasattr(d, "attributes") # TODO work out what error is raised - except AttributeError or Exception: + except (AttributeError, Exception): num_gpus_detected = i break From 22124612ed4f1e6aa2c2b438a09d263654c3a430 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 20:31:23 -0800 Subject: [PATCH 03/41] Changing to Exception if unsure --- py4DSTEM/braggvectors/diskdetection_aiml_cuda.py | 6 +++--- py4DSTEM/braggvectors/diskdetection_cuda.py | 4 ++-- py4DSTEM/process/utils/multicorr.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index 7b3687f56..316c18ee9 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -637,8 +637,8 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - # TODO I think this is just the Index Error - except (IndexError, Exception): + # TODO I think this is just the IndexError + except Exception: dx, dy = ( 0, 0, @@ -734,7 +734,7 @@ def _integrate_disks_cp(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1) disks.append(np.average(disk)) try: disks = disks / max(disks) - # TODO work out what exception to use + # TODO work out what exception to use, ZeroDivisionError except Exception: pass return (maxima_x, maxima_y, disks) diff --git a/py4DSTEM/braggvectors/diskdetection_cuda.py b/py4DSTEM/braggvectors/diskdetection_cuda.py index 2c44a63a8..305e4558e 100644 --- a/py4DSTEM/braggvectors/diskdetection_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_cuda.py @@ -618,8 +618,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift): dy = (icc[1, 2] - icc[1, 0]) / ( 4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0] ) - # TODO Work out what exception to use - except (IndexError, Exception): + # TODO Work out what exception to use IndexError + except Exception: dx, dy = ( 0, 0, diff --git a/py4DSTEM/process/utils/multicorr.py b/py4DSTEM/process/utils/multicorr.py index 70b11ad5c..a875d3b71 100644 --- a/py4DSTEM/process/utils/multicorr.py +++ b/py4DSTEM/process/utils/multicorr.py @@ -99,8 +99,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift, device="cpu"): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - # TODO work out what error is raised - except (IndexError, Exception): + # TODO work out what error is raised IndexError + except Exception: dx, dy = ( 0, 0, From 51f078fd525fd1c597064025d32fb6a64e6fed27 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 20:35:32 -0800 Subject: [PATCH 04/41] changing to assertion error --- py4DSTEM/datacube/virtualimage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/datacube/virtualimage.py b/py4DSTEM/datacube/virtualimage.py index f930b6697..fc3cd496d 100644 --- a/py4DSTEM/datacube/virtualimage.py +++ b/py4DSTEM/datacube/virtualimage.py @@ -415,7 +415,7 @@ def position_detector( image = self.tree(k) break # TODO check what error is produced when passing nonexistant key to tree - except (KeyError, AttributeError): + except AssertionError: pass if image is None: image = self[0, 0] From db597468d014de6b847d36148fde6a1bbd2b4d1a Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 20:43:35 -0800 Subject: [PATCH 05/41] removing TODO message --- py4DSTEM/datacube/virtualimage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/datacube/virtualimage.py b/py4DSTEM/datacube/virtualimage.py index fc3cd496d..f45fade33 100644 --- a/py4DSTEM/datacube/virtualimage.py +++ b/py4DSTEM/datacube/virtualimage.py @@ -414,7 +414,7 @@ def position_detector( try: image = self.tree(k) break - # TODO check what error is produced when passing nonexistant key to tree + # KeyError, AttributeError except AssertionError: pass if image is None: From be609b38f064b5c13b4bfb0041109dc0c8ebc513 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 20:46:02 -0800 Subject: [PATCH 06/41] Changing to a Exception --- py4DSTEM/io/legacy/read_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index a08e7b61d..2a51026f3 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -100,8 +100,8 @@ def get_N_dataobjects(filepath, topgroup="4DSTEM_experiment"): N_pla = len(f[topgroup]["data/pointlistarrays"].keys()) try: N_coords = len(f[topgroup]["data/coordinates"].keys()) - # TODO work out what exception will be raised - except (ValueError, AttributeError, Exception): + # TODO work out what exception will be raised ValueError, AttributeError + except Exception: N_coords = 0 N_do = N_dc + N_cdc + N_ds + N_rs + N_pl + N_pla + N_coords return N_dc, N_cdc, N_ds, N_rs, N_pl, N_pla, N_coords, N_do From 08957ff8135806777fd2868f47aa5e07118939ca Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 20:47:54 -0800 Subject: [PATCH 07/41] changing from ValueError to Exception --- py4DSTEM/process/classification/featurization.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/py4DSTEM/process/classification/featurization.py b/py4DSTEM/process/classification/featurization.py index 26a63a62d..1f7d493ef 100644 --- a/py4DSTEM/process/classification/featurization.py +++ b/py4DSTEM/process/classification/featurization.py @@ -477,7 +477,8 @@ def get_class_DPs(self, datacube, method, thresh): datacube.data.shape[2], datacube.data.shape[3], ) - except ValueError: + # TODO Work out if its a ValueError + except Exception: raise ValueError( "Datacube must have same R_Nx and R_Ny dimensions as Featurization instance." ) From e6b6ade36fcdd2f5f684deb5b52c9af53802d13c Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 21:06:56 -0800 Subject: [PATCH 08/41] black --- py4DSTEM/braggvectors/diskdetection_aiml.py | 17 +++++++++++------ .../braggvectors/diskdetection_aiml_cuda.py | 2 +- py4DSTEM/process/polar/polar_peaks.py | 4 ++-- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index 4a59fd59e..36fc2858e 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -104,7 +104,9 @@ def find_Bragg_disks_aiml_single_DP( try: import crystal4D except ModuleNotFoundError: - raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") + raise ModuleNotFoundError( + "Import Error: Please install crystal4D before proceeding" + ) try: import tensorflow as tf except ModuleNotFoundError: @@ -257,7 +259,9 @@ def find_Bragg_disks_aiml_selected( try: import crystal4D except ModuleNotFoundError: - raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") + raise ModuleNotFoundError( + "Import Error: Please install crystal4D before proceeding" + ) assert len(Rx) == len(Ry) peaks = [] @@ -434,7 +438,9 @@ def find_Bragg_disks_aiml_serial( try: import crystal4D except ModuleNotFoundError: - raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") + raise ModuleNotFoundError( + "Import Error: Please install crystal4D before proceeding" + ) # Make the peaks PointListArray # dtype = [('qx',float),('qy',float),('intensity',float)] @@ -895,7 +901,7 @@ def _get_latest_model(model_path=None): except FileExistsError: pass except Exception as e: - # TODO work out if I want to pass or raise + # TODO work out if I want to pass or raise pass # raise e # download the json file with the meta data @@ -910,13 +916,12 @@ def _get_latest_model(model_path=None): with open("./tmp/model_metadata_old.json") as f_old: metaold = json.load(f_old) file_id_old = metaold["file_id"] - # TODO Double check this is correct Error + # TODO Double check this is correct Error except FileNotFoundError: file_id_old = file_id except Exception: file_id_old = file_id - if os.path.exists(file_path) and file_id == file_id_old: print( "Latest model weight is already available in the local system. Loading the model... \n" diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index 316c18ee9..c00240092 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -637,7 +637,7 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - # TODO I think this is just the IndexError + # TODO I think this is just the IndexError except Exception: dx, dy = ( 0, diff --git a/py4DSTEM/process/polar/polar_peaks.py b/py4DSTEM/process/polar/polar_peaks.py index 7cc453544..8ee0db12c 100644 --- a/py4DSTEM/process/polar/polar_peaks.py +++ b/py4DSTEM/process/polar/polar_peaks.py @@ -602,7 +602,7 @@ def refine_peaks_local( self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step - # TODO work out what error is raised + # TODO work out what error is raised except Exception: pass @@ -643,7 +643,7 @@ def refine_peaks_local( self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step - # TODO work out what exception is raised + # TODO work out what exception is raised except Exception: pass From 857715aa28eb576c36d3ddbda9a66674d9a1b7dc Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 22:49:25 -0800 Subject: [PATCH 09/41] ruff C408 --- py4DSTEM/process/wholepatternfit/wpf_viz.py | 4 ++-- py4DSTEM/visualize/overlay.py | 22 ++++++++++----------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/py4DSTEM/process/wholepatternfit/wpf_viz.py b/py4DSTEM/process/wholepatternfit/wpf_viz.py index 436ae40a2..ac32e88f7 100644 --- a/py4DSTEM/process/wholepatternfit/wpf_viz.py +++ b/py4DSTEM/process/wholepatternfit/wpf_viz.py @@ -17,7 +17,7 @@ def show_model_grid(self, x=None, **plot_kwargs): cols = int(np.ceil(np.sqrt(N))) rows = (N + 1) // cols - kwargs = dict(constrained_layout=True) + kwargs = {"constrained_layout": True} kwargs.update(plot_kwargs) fig, ax = plt.subplots(rows, cols, **kwargs) @@ -213,7 +213,7 @@ def show_lattice_points( def show_fit_metrics(self, returnfig=False, **subplots_kwargs): assert hasattr(self, "fit_metrics"), "Please run fitting first!" - kwargs = dict(figsize=(14, 12), constrained_layout=True) + kwargs = {"figsize": (14, 12), "constrained_layout": True} kwargs.update(subplots_kwargs) fig, ax = plt.subplots(2, 2, **kwargs) im = ax[0, 0].matshow(self.fit_metrics["cost"].data, norm=mpl_c.LogNorm()) diff --git a/py4DSTEM/visualize/overlay.py b/py4DSTEM/visualize/overlay.py index 32baff443..1cefe8ac0 100644 --- a/py4DSTEM/visualize/overlay.py +++ b/py4DSTEM/visualize/overlay.py @@ -61,7 +61,7 @@ def add_rectangles(ax, d): kws = [ k for k in d.keys() if k not in ("lims", "color", "fill", "alpha", "linewidth") ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -145,7 +145,7 @@ def add_circles(ax, d): for k in d.keys() if k not in ("center", "R", "color", "fill", "alpha", "linewidth") ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -247,7 +247,7 @@ def add_annuli(ax, d): for k in d.keys() if k not in ("center", "radii", "color", "fill", "alpha", "linewidth") ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -387,7 +387,7 @@ def add_ellipses(ax, d): "linestyle", ) ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -470,7 +470,7 @@ def add_points(ax, d): for k in d.keys() if k not in ("x", "y", "s", "scale", "pointcolor", "alpha", "open_circles") ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -521,7 +521,7 @@ def add_pointlabels(ax, d): kws = [ k for k in d.keys() if k not in ("x", "y", "size", "color", "alpha", "labels") ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -649,7 +649,7 @@ def add_vector(ax, d): "labelcolor", ) ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -701,7 +701,7 @@ def add_grid_overlay(ax, d): for k in d.keys() if k not in ("x0", "y0", "xL", "yL", "color", "alpha", "linewidth") ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -798,7 +798,7 @@ def add_scalebar(ax, d): "ticks", ) ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -937,7 +937,7 @@ def add_cartesian_grid(ax, d): "alpha", ) ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] @@ -1103,7 +1103,7 @@ def add_polarelliptical_grid(ax, d): "alpha", ) ] - kwargs = dict() + kwargs = {} for k in kws: kwargs[k] = d[k] From 78921915035858d53615e094102de1372e379de5 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 23:42:10 -0800 Subject: [PATCH 10/41] ruff --select C419 --- py4DSTEM/braggvectors/threshold.py | 4 +- py4DSTEM/data/calibration.py | 10 +-- py4DSTEM/io/legacy/legacy12/read_v0_12.py | 4 +- py4DSTEM/io/legacy/legacy12/read_v0_5.py | 4 +- py4DSTEM/io/legacy/legacy12/read_v0_6.py | 4 +- py4DSTEM/io/legacy/legacy12/read_v0_7.py | 4 +- py4DSTEM/io/legacy/legacy12/read_v0_9.py | 4 +- .../io/legacy/legacy13/v13_emd_classes/io.py | 2 +- py4DSTEM/io/legacy/read_utils.py | 2 +- py4DSTEM/process/phase/utils.py | 16 ++--- py4DSTEM/process/wholepatternfit/wpf.py | 2 +- py4DSTEM/visualize/overlay.py | 66 +++++++++---------- py4DSTEM/visualize/show_extention.py | 2 +- py4DSTEM/visualize/vis_special.py | 4 +- 14 files changed, 62 insertions(+), 66 deletions(-) diff --git a/py4DSTEM/braggvectors/threshold.py b/py4DSTEM/braggvectors/threshold.py index c13b0a665..28aeb6e6a 100644 --- a/py4DSTEM/braggvectors/threshold.py +++ b/py4DSTEM/braggvectors/threshold.py @@ -26,7 +26,7 @@ def threshold_Braggpeaks( pattern """ assert all( - [item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"]] + item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"] ), "pointlistarray must include the coordinates 'qx', 'qy', and 'intensity'." for Rx, Ry in tqdmnd( pointlistarray.shape[0], @@ -113,7 +113,7 @@ def universal_threshold( assert isinstance(pointlistarray, PointListArray) assert metric in ("maximum", "average", "median", "manual") assert all( - [item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"]] + item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"] ), "pointlistarray must include the coordinates 'qx', 'qy', and 'intensity'." _pointlistarray = pointlistarray.copy() if name is None: diff --git a/py4DSTEM/data/calibration.py b/py4DSTEM/data/calibration.py index ffdbfa410..cd9a7c9aa 100644 --- a/py4DSTEM/data/calibration.py +++ b/py4DSTEM/data/calibration.py @@ -505,7 +505,7 @@ def get_origin(self, rx=None, ry=None): qx0 = self._get_value("qx0", rx, ry) qy0 = self._get_value("qy0", rx, ry) ans = (qx0, qy0) - if any([x is None for x in ans]): + if any(x is None for x in ans): ans = None return ans @@ -518,7 +518,7 @@ def get_origin_shift(self, rx=None, ry=None): qx0 = self._get_value("qx0_shift", rx, ry) qy0 = self._get_value("qy0_shift", rx, ry) ans = (qx0, qy0) - if any([x is None for x in ans]): + if any(x is None for x in ans): ans = None return ans @@ -540,7 +540,7 @@ def get_origin_meas(self, rx=None, ry=None): qx0 = self._get_value("qx0_meas", rx, ry) qy0 = self._get_value("qy0_meas", rx, ry) ans = (qx0, qy0) - if any([x is None for x in ans]): + if any(x is None for x in ans): ans = None return ans @@ -615,7 +615,7 @@ def get_ellipse(self, rx=None, ry=None): b = self.get_b(rx, ry) theta = self.get_theta(rx, ry) ans = (a, b, theta) - if any([x is None for x in ans]): + if any(x is None for x in ans): ans = None return ans @@ -778,7 +778,7 @@ def get_probe_param(self): qx0 = self._get_value("qx0") qy0 = self._get_value("qy0") ans = (probe_semiangle, qx0, qy0) - if any([x is None for x in ans]): + if any(x is None for x in ans): ans = None return ans diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_12.py b/py4DSTEM/io/legacy/legacy12/read_v0_12.py index 44aa86b6a..4f028f698 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_12.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_12.py @@ -101,7 +101,7 @@ def read_v0_12(fp, **kwargs): ), "Error: data must be specified with strings or integers only." if not isinstance(data_id, (int, np.int_, str)): assert all( - [isinstance(d, (int, np.int_, str)) for d in data_id] + isinstance(d, (int, np.int_, str)) for d in data_id ), "Error: data must be specified with strings or integers only." # Parse optional arguments @@ -250,7 +250,7 @@ def get_data_from_str(filepath, tg, data_id, mem="RAM", binfactor=1, bindtype=No def get_data_from_list(filepath, tg, data_id, mem="RAM", binfactor=1, bindtype=None): """Accepts a filepath to a valid py4DSTEM file and a list or tuple specifying data, and returns the data.""" assert isinstance(data_id, (list, tuple)) - assert all([isinstance(d, (int, np.int_, str)) for d in data_id]) + assert all(isinstance(d, (int, np.int_, str)) for d in data_id) data = [] for el in data_id: if isinstance(el, (int, np.int_)): diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_5.py b/py4DSTEM/io/legacy/legacy12/read_v0_5.py index de7108b02..7a7e2141c 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_5.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_5.py @@ -97,7 +97,7 @@ def read_v0_5(fp, **kwargs): ), "Error: data must be specified with strings or integers only." if not isinstance(data_id, (int, str)): assert all( - [isinstance(d, (int, str)) for d in data_id] + isinstance(d, (int, str)) for d in data_id ), "Error: data must be specified with strings or integers only." # Parse optional arguments @@ -248,7 +248,7 @@ def get_data_from_str(fp, tg, data_id, mem="RAM", binfactor=1, bindtype=None): def get_data_from_list(fp, tg, data_id, mem="RAM", binfactor=1, bindtype=None): """Accepts a fp to a valid py4DSTEM file and a list or tuple specifying data, and returns the data.""" assert isinstance(data_id, (list, tuple)) - assert all([isinstance(d, (int, str)) for d in data_id]) + assert all(isinstance(d, (int, str)) for d in data_id) data = [] for el in data_id: if isinstance(el, int): diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_6.py b/py4DSTEM/io/legacy/legacy12/read_v0_6.py index f746548ca..40c80c82b 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_6.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_6.py @@ -97,7 +97,7 @@ def read_v0_6(fp, **kwargs): ), "Error: data must be specified with strings or integers only." if not isinstance(data_id, (int, str)): assert all( - [isinstance(d, (int, str)) for d in data_id] + isinstance(d, (int, str)) for d in data_id ), "Error: data must be specified with strings or integers only." # Parse optional arguments @@ -248,7 +248,7 @@ def get_data_from_str(fp, tg, data_id, mem="RAM", binfactor=1, bindtype=None): def get_data_from_list(fp, tg, data_id, mem="RAM", binfactor=1, bindtype=None): """Accepts a fp to a valid py4DSTEM file and a list or tuple specifying data, and returns the data.""" assert isinstance(data_id, (list, tuple)) - assert all([isinstance(d, (int, str)) for d in data_id]) + assert all(isinstance(d, (int, str)) for d in data_id) data = [] for el in data_id: if isinstance(el, int): diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_7.py b/py4DSTEM/io/legacy/legacy12/read_v0_7.py index fac779d64..ed4a363cb 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_7.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_7.py @@ -97,7 +97,7 @@ def read_v0_7(fp, **kwargs): ), "Error: data must be specified with strings or integers only." if not isinstance(data_id, (int, str)): assert all( - [isinstance(d, (int, str)) for d in data_id] + isinstance(d, (int, str)) for d in data_id ), "Error: data must be specified with strings or integers only." # Parse optional arguments @@ -248,7 +248,7 @@ def get_data_from_str(fp, tg, data_id, mem="RAM", binfactor=1, bindtype=None): def get_data_from_list(fp, tg, data_id, mem="RAM", binfactor=1, bindtype=None): """Accepts a fp to a valid py4DSTEM file and a list or tuple specifying data, and returns the data.""" assert isinstance(data_id, (list, tuple)) - assert all([isinstance(d, (int, str)) for d in data_id]) + assert all(isinstance(d, (int, str)) for d in data_id) data = [] for el in data_id: if isinstance(el, int): diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_9.py b/py4DSTEM/io/legacy/legacy12/read_v0_9.py index 0cf186ffd..5084e7339 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_9.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_9.py @@ -103,7 +103,7 @@ def read_v0_9(fp, **kwargs): ), "Error: data must be specified with strings or integers only." if not isinstance(data_id, (int, np.int_, str)): assert all( - [isinstance(d, (int, np.int_, str)) for d in data_id] + isinstance(d, (int, np.int_, str)) for d in data_id ), "Error: data must be specified with strings or integers only." # Parse optional arguments @@ -255,7 +255,7 @@ def get_data_from_str(filepath, tg, data_id, mem="RAM", binfactor=1, bindtype=No def get_data_from_list(filepath, tg, data_id, mem="RAM", binfactor=1, bindtype=None): """Accepts a filepath to a valid py4DSTEM file and a list or tuple specifying data, and returns the data.""" assert isinstance(data_id, (list, tuple)) - assert all([isinstance(d, (int, np.int_, str)) for d in data_id]) + assert all(isinstance(d, (int, np.int_, str)) for d in data_id) data = [] for el in data_id: if isinstance(el, (int, np.int_)): diff --git a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/io.py b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/io.py index e1b7ab241..294c2498f 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/io.py +++ b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/io.py @@ -160,7 +160,7 @@ def Metadata_to_h5(metadata, group): dset.attrs["type"] = np.string_("tuple") # of tuples - elif any([isinstance(v[i], tuple) for i in range(len(v))]): + elif any(isinstance(v[i], tuple) for i in range(len(v))): dset_grp = grp.create_group(k) dset_grp.attrs["type"] = np.string_("tuple_of_tuples") dset_grp.attrs["length"] = len(v) diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index 7cd48cde7..fe8cd9819 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -21,7 +21,7 @@ def is_py4DSTEM_version13(filepath): if "emd_group_type" in f[k].attrs: if f[k].attrs["emd_group_type"] == "root": if all( - [x in f[k].attrs for x in ("version_major", "version_minor")] + x in f[k].attrs for x in ("version_major", "version_minor") ): if ( int(f[k].attrs["version_major"]), diff --git a/py4DSTEM/process/phase/utils.py b/py4DSTEM/process/phase/utils.py index a1eb54c80..0ae2a16ab 100644 --- a/py4DSTEM/process/phase/utils.py +++ b/py4DSTEM/process/phase/utils.py @@ -294,12 +294,12 @@ def evaluate_chi( alpha = xp.array(alpha) array = xp.zeros(alpha.shape, dtype=np.float32) - if any([p[symbol] != 0.0 for symbol in ("C10", "C12", "phi12")]): + if any(p[symbol] != 0.0 for symbol in ("C10", "C12", "phi12")): array += ( 1 / 2 * alpha2 * (p["C10"] + p["C12"] * xp.cos(2 * (phi - p["phi12"]))) ) - if any([p[symbol] != 0.0 for symbol in ("C21", "phi21", "C23", "phi23")]): + if any(p[symbol] != 0.0 for symbol in ("C21", "phi21", "C23", "phi23")): array += ( 1 / 3 @@ -312,7 +312,7 @@ def evaluate_chi( ) if any( - [p[symbol] != 0.0 for symbol in ("C30", "C32", "phi32", "C34", "phi34")] + p[symbol] != 0.0 for symbol in ("C30", "C32", "phi32", "C34", "phi34") ): array += ( 1 @@ -326,10 +326,8 @@ def evaluate_chi( ) if any( - [ - p[symbol] != 0.0 + p[symbol] != 0.0 for symbol in ("C41", "phi41", "C43", "phi43", "C45", "phi41") - ] ): array += ( 1 @@ -344,10 +342,8 @@ def evaluate_chi( ) if any( - [ - p[symbol] != 0.0 + p[symbol] != 0.0 for symbol in ("C50", "C52", "phi52", "C54", "phi54", "C56", "phi56") - ] ): array += ( 1 @@ -1048,7 +1044,7 @@ def fourier_rotate_real_volume(array, angle, axes=(0, 1), xp=np): if len(axes) != 2: raise ValueError("axes should contain exactly two values") - if not all([float(ax).is_integer() for ax in axes]): + if not all(float(ax).is_integer() for ax in axes): raise ValueError("axes should contain only integer values") if axes[0] < 0: diff --git a/py4DSTEM/process/wholepatternfit/wpf.py b/py4DSTEM/process/wholepatternfit/wpf.py index f206004b4..f964a765e 100644 --- a/py4DSTEM/process/wholepatternfit/wpf.py +++ b/py4DSTEM/process/wholepatternfit/wpf.py @@ -575,7 +575,7 @@ def _finalize_model(self): self.upper_bound = np.array([param.upper_bound for param in unique_params]) self.lower_bound = np.array([param.lower_bound for param in unique_params]) - self.hasJacobian = all([m.hasJacobian for m in self.model]) + self.hasJacobian = all(m.hasJacobian for m in self.model) self.nParams = self.x0.shape[0] diff --git a/py4DSTEM/visualize/overlay.py b/py4DSTEM/visualize/overlay.py index 1cefe8ac0..36ed63797 100644 --- a/py4DSTEM/visualize/overlay.py +++ b/py4DSTEM/visualize/overlay.py @@ -23,13 +23,13 @@ def add_rectangles(ax, d): lims = [lims] assert isinstance(lims, list) N = len(lims) - assert all([isinstance(t, tuple) for t in lims]) - assert all([len(t) == 4 for t in lims]) + assert all(isinstance(t, tuple) for t in lims) + assert all(len(t) == 4 for t in lims) # color color = d["color"] if "color" in d.keys() else "r" if isinstance(color, list): assert len(color) == N - assert all([is_color_like(c) for c in color]) + assert all(is_color_like(c) for c in color) else: assert is_color_like(color) color = [color for i in range(N)] @@ -40,7 +40,7 @@ def add_rectangles(ax, d): else: assert isinstance(fill, list) assert len(fill) == N - assert all([isinstance(f, bool) for f in fill]) + assert all(isinstance(f, bool) for f in fill) # alpha alpha = d["alpha"] if "alpha" in d.keys() else 1 if isinstance(alpha, (float, int, np.float64)): @@ -48,7 +48,7 @@ def add_rectangles(ax, d): else: assert isinstance(alpha, list) assert len(alpha) == N - assert all([isinstance(a, (float, int, np.float64)) for a in alpha]) + assert all(isinstance(a, (float, int, np.float64)) for a in alpha) # linewidth linewidth = d["linewidth"] if "linewidth" in d.keys() else 2 if isinstance(linewidth, (float, int, np.float64)): @@ -56,7 +56,7 @@ def add_rectangles(ax, d): else: assert isinstance(linewidth, list) assert len(linewidth) == N - assert all([isinstance(lw, (float, int, np.float64)) for lw in linewidth]) + assert all(isinstance(lw, (float, int, np.float64)) for lw in linewidth) # additional parameters kws = [ k for k in d.keys() if k not in ("lims", "color", "fill", "alpha", "linewidth") @@ -97,8 +97,8 @@ def add_circles(ax, d): center = [center] assert isinstance(center, list) N = len(center) - assert all([isinstance(x, tuple) for x in center]) - assert all([len(x) == 2 for x in center]) + assert all(isinstance(x, tuple) for x in center) + assert all(len(x) == 2 for x in center) # radius assert "R" in d.keys() R = d["R"] @@ -106,12 +106,12 @@ def add_circles(ax, d): R = [R for i in range(N)] assert isinstance(R, list) assert len(R) == N - assert all([isinstance(i, Number) for i in R]) + assert all(isinstance(i, Number) for i in R) # color color = d["color"] if "color" in d.keys() else "r" if isinstance(color, list): assert len(color) == N - assert all([is_color_like(c) for c in color]) + assert all(is_color_like(c) for c in color) else: assert is_color_like(color) color = [color for i in range(N)] @@ -122,7 +122,7 @@ def add_circles(ax, d): else: assert isinstance(fill, list) assert len(fill) == N - assert all([isinstance(f, bool) for f in fill]) + assert all(isinstance(f, bool) for f in fill) # alpha alpha = d["alpha"] if "alpha" in d.keys() else 1 if isinstance(alpha, (float, int, np.float64)): @@ -130,7 +130,7 @@ def add_circles(ax, d): else: assert isinstance(alpha, list) assert len(alpha) == N - assert all([isinstance(a, (float, int, np.float64)) for a in alpha]) + assert all(isinstance(a, (float, int, np.float64)) for a in alpha) # linewidth linewidth = d["linewidth"] if "linewidth" in d.keys() else 2 if isinstance(linewidth, (float, int, np.float64)): @@ -138,7 +138,7 @@ def add_circles(ax, d): else: assert isinstance(linewidth, list) assert len(linewidth) == N - assert all([isinstance(lw, (float, int, np.float64)) for lw in linewidth]) + assert all(isinstance(lw, (float, int, np.float64)) for lw in linewidth) # additional parameters kws = [ k @@ -194,8 +194,8 @@ def add_annuli(ax, d): assert len(center) == 2 center = [center] * N # assert(isinstance(center,list)) - assert all([isinstance(x, tuple) for x in center]) - assert all([len(x) == 2 for x in center]) + assert all(isinstance(x, tuple) for x in center) + assert all(len(x) == 2 for x in center) # radii if isinstance(radii, tuple): assert len(radii) == 2 @@ -203,17 +203,17 @@ def add_annuli(ax, d): ro = [radii[1] for i in range(N)] else: assert isinstance(radii, list) - assert all([isinstance(x, tuple) for x in radii]) + assert all(isinstance(x, tuple) for x in radii) assert len(radii) == N ri = [radii[i][0] for i in range(N)] ro = [radii[i][1] for i in range(N)] - assert all([isinstance(i, Number) for i in ri]) - assert all([isinstance(i, Number) for i in ro]) + assert all(isinstance(i, Number) for i in ri) + assert all(isinstance(i, Number) for i in ro) # color color = d["color"] if "color" in d.keys() else "r" if isinstance(color, list): assert len(color) == N - assert all([is_color_like(c) for c in color]) + assert all(is_color_like(c) for c in color) else: assert is_color_like(color) color = [color for i in range(N)] @@ -224,7 +224,7 @@ def add_annuli(ax, d): else: assert isinstance(fill, list) assert len(fill) == N - assert all([isinstance(f, bool) for f in fill]) + assert all(isinstance(f, bool) for f in fill) # alpha alpha = d["alpha"] if "alpha" in d.keys() else 1 if isinstance(alpha, (float, int, np.float64)): @@ -232,7 +232,7 @@ def add_annuli(ax, d): else: assert isinstance(alpha, list) assert len(alpha) == N - assert all([isinstance(a, (float, int, np.float64)) for a in alpha]) + assert all(isinstance(a, (float, int, np.float64)) for a in alpha) # linewidth linewidth = d["linewidth"] if "linewidth" in d.keys() else 2 if isinstance(linewidth, (float, int, np.float64)): @@ -240,7 +240,7 @@ def add_annuli(ax, d): else: assert isinstance(linewidth, list) assert len(linewidth) == N - assert all([isinstance(lw, (float, int, np.float64)) for lw in linewidth]) + assert all(isinstance(lw, (float, int, np.float64)) for lw in linewidth) # additional parameters kws = [ k @@ -303,7 +303,7 @@ def add_ellipses(ax, d): a = [a] assert isinstance(a, list) N = len(a) - assert all([isinstance(i, Number) for i in a]) + assert all(isinstance(i, Number) for i in a) # semiminor axis length assert "b" in d.keys() b = d["b"] @@ -311,7 +311,7 @@ def add_ellipses(ax, d): b = [b] assert isinstance(b, list) assert len(b) == N - assert all([isinstance(i, Number) for i in b]) + assert all(isinstance(i, Number) for i in b) # center assert "center" in d.keys() center = d["center"] @@ -320,8 +320,8 @@ def add_ellipses(ax, d): center = [center for i in range(N)] assert isinstance(center, list) assert len(center) == N - assert all([isinstance(x, tuple) for x in center]) - assert all([len(x) == 2 for x in center]) + assert all(isinstance(x, tuple) for x in center) + assert all(len(x) == 2 for x in center) # theta assert "theta" in d.keys() theta = d["theta"] @@ -329,12 +329,12 @@ def add_ellipses(ax, d): theta = [theta for i in range(N)] assert isinstance(theta, list) assert len(theta) == N - assert all([isinstance(i, Number) for i in theta]) + assert all(isinstance(i, Number) for i in theta) # color color = d["color"] if "color" in d.keys() else "r" if isinstance(color, list): assert len(color) == N - assert all([is_color_like(c) for c in color]) + assert all(is_color_like(c) for c in color) else: assert is_color_like(color) color = [color for i in range(N)] @@ -345,7 +345,7 @@ def add_ellipses(ax, d): else: assert isinstance(fill, list) assert len(fill) == N - assert all([isinstance(f, bool) for f in fill]) + assert all(isinstance(f, bool) for f in fill) # alpha alpha = d["alpha"] if "alpha" in d.keys() else 1 if isinstance(alpha, (float, int, np.float64)): @@ -353,7 +353,7 @@ def add_ellipses(ax, d): else: assert isinstance(alpha, list) assert len(alpha) == N - assert all([isinstance(alp, (float, int, np.float64)) for alp in alpha]) + assert all(isinstance(alp, (float, int, np.float64)) for alp in alpha) # linewidth linewidth = d["linewidth"] if "linewidth" in d.keys() else 2 if isinstance(linewidth, (float, int, np.float64)): @@ -361,7 +361,7 @@ def add_ellipses(ax, d): else: assert isinstance(linewidth, list) assert len(linewidth) == N - assert all([isinstance(lw, (float, int, np.float64)) for lw in linewidth]) + assert all(isinstance(lw, (float, int, np.float64)) for lw in linewidth) # linestyle linestyle = d["linestyle"] if "linestyle" in d.keys() else "-" if isinstance(linestyle, (str)): @@ -369,7 +369,7 @@ def add_ellipses(ax, d): else: assert isinstance(linestyle, list) assert len(linestyle) == N - assert all([isinstance(lw, (str)) for lw in linestyle]) + assert all(isinstance(lw, (str)) for lw in linestyle) # additional parameters kws = [ k @@ -454,7 +454,7 @@ def add_points(ax, d): color = d["pointcolor"] if "pointcolor" in d.keys() else "r" if isinstance(color, (list, np.ndarray)): assert len(color) == N - assert all([is_color_like(c) for c in color]) + assert all(is_color_like(c) for c in color) else: assert is_color_like(color) color = [color for i in range(N)] diff --git a/py4DSTEM/visualize/show_extention.py b/py4DSTEM/visualize/show_extention.py index 8fdf522a2..027292ddc 100644 --- a/py4DSTEM/visualize/show_extention.py +++ b/py4DSTEM/visualize/show_extention.py @@ -9,7 +9,7 @@ def _show_grid(**kwargs): # parse grid of images if isinstance(ar[0], list): - assert all([isinstance(ar[i], list) for i in range(len(ar))]) + assert all(isinstance(ar[i], list) for i in range(len(ar))) W = len(ar[0]) H = len(ar) diff --git a/py4DSTEM/visualize/vis_special.py b/py4DSTEM/visualize/vis_special.py index acacb6184..c58c12279 100644 --- a/py4DSTEM/visualize/vis_special.py +++ b/py4DSTEM/visualize/vis_special.py @@ -581,7 +581,7 @@ def show_selected_dps( assert isinstance(datacube, DataCube) N = len(positions) assert all( - [len(x) == 2 for x in positions] + len(x) == 2 for x in positions ), "Improperly formated argument `positions`" if bragg_pos is not None: show_disk_pos = True @@ -604,7 +604,7 @@ def show_selected_dps( H = int(np.ceil(N / W)) else: H, W = HW - assert all([isinstance(x, (int, np.integer)) for x in (H, W)]) + assert all(isinstance(x, (int, np.integer)) for x in (H, W)) x = [i[0] for i in positions] y = [i[1] for i in positions] From 4288ca259ca6e1577be5956454cc746b97e7ac87 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 23:54:39 -0800 Subject: [PATCH 11/41] C409 C405 --- py4DSTEM/process/calibration/ellipse.py | 2 +- py4DSTEM/process/classification/braggvectorclassification.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/py4DSTEM/process/calibration/ellipse.py b/py4DSTEM/process/calibration/ellipse.py index 8835aa95b..80a4defc6 100644 --- a/py4DSTEM/process/calibration/ellipse.py +++ b/py4DSTEM/process/calibration/ellipse.py @@ -199,7 +199,7 @@ def fit_ellipse_amorphous_ring(data, center, fitradii, p0=None, mask=None): A, B, C = convert_ellipse_params_r(R, R, 0) # Populate initial parameters - p0_guess = tuple([I0, I1, sigma0, sigma1, sigma2, c_bkgd, x0, y0, A, B, C]) + p0_guess = (I0, I1, sigma0, sigma1, sigma2, c_bkgd, x0, y0, A, B, C) if p0 is None: _p0 = p0_guess else: diff --git a/py4DSTEM/process/classification/braggvectorclassification.py b/py4DSTEM/process/classification/braggvectorclassification.py index d5c2ac0fc..413d3343f 100644 --- a/py4DSTEM/process/classification/braggvectorclassification.py +++ b/py4DSTEM/process/classification/braggvectorclassification.py @@ -856,7 +856,7 @@ def get_initial_classes( seed_new_class = True while seed_new_class: ind1, ind2 = np.unravel_index(np.argmax(n_point_function), (N, N)) - BP_set = set([ind1, ind2]) + BP_set = {ind1, ind2} grow_class = True while grow_class: frequencies = np.zeros(N) @@ -910,7 +910,7 @@ def get_initial_classes( seed_new_class = True while seed_new_class: ind1, ind2, ind3 = np.unravel_index(np.argmax(n_point_function), (N, N, N)) - BP_set = set([ind1, ind2, ind3]) + BP_set = {ind1, ind2, ind3} grow_class = True while grow_class: frequencies = np.zeros(N) From 1a00dd16f86d447e9b8e239374e0dc251bfb9ec1 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 00:06:54 -0800 Subject: [PATCH 12/41] trailing commas --- py4DSTEM/braggvectors/diskdetection_aiml.py | 2 +- py4DSTEM/braggvectors/diskdetection_aiml_cuda.py | 2 +- .../braggvectors/diskdetection_parallel_new.py | 4 ++-- py4DSTEM/io/filereaders/read_mib.py | 2 +- py4DSTEM/io/legacy/read_utils.py | 4 +--- py4DSTEM/process/diffraction/crystal_viz.py | 2 +- py4DSTEM/visualize/show.py | 14 +++++++------- py4DSTEM/visualize/vis_RQ.py | 2 +- py4DSTEM/visualize/vis_special.py | 16 ++++++++-------- 9 files changed, 23 insertions(+), 25 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index 67df18074..68c01a851 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -559,7 +559,7 @@ def find_Bragg_disks_aiml( model_path=None, distributed=None, CUDA=True, - **kwargs + **kwargs, ): """ Finds the Bragg disks in all diffraction patterns of datacube by AI/ML method. This method diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index c5f89b9fd..98e5e1a8d 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -265,7 +265,7 @@ def _find_Bragg_disks_aiml_single_DP_CUDA( blocks=None, threads=None, model_path=None, - **kwargs + **kwargs, ): """ Finds the Bragg disks in single DP by AI/ML method. This method utilizes FCU-Net diff --git a/py4DSTEM/braggvectors/diskdetection_parallel_new.py b/py4DSTEM/braggvectors/diskdetection_parallel_new.py index c15e41732..6771cb9af 100644 --- a/py4DSTEM/braggvectors/diskdetection_parallel_new.py +++ b/py4DSTEM/braggvectors/diskdetection_parallel_new.py @@ -100,7 +100,7 @@ def beta_parallel_disk_detection( close_dask_client=False, return_dask_client=True, *args, - **kwargs + **kwargs, ): """ This is not fully validated currently so may not work, please report bugs on the py4DSTEM github page. @@ -225,7 +225,7 @@ def beta_parallel_disk_detection( probe_kernel_FT=dask_probe_delayed[0, 0], # probe_kernel_FT=delayed_probe_kernel_FT, *args, - **kwargs + **kwargs, ) # passing through args from earlier or should I use # corrPower=corrPower, # sigma=sigma_gaussianFilter, diff --git a/py4DSTEM/io/filereaders/read_mib.py b/py4DSTEM/io/filereaders/read_mib.py index 079c9d1bd..7456bd594 100644 --- a/py4DSTEM/io/filereaders/read_mib.py +++ b/py4DSTEM/io/filereaders/read_mib.py @@ -14,7 +14,7 @@ def load_mib( reshape=True, flip=True, scan=(256, 256), - **kwargs + **kwargs, ): """ Read a MIB file and return as py4DSTEM DataCube. diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index fe8cd9819..c11f93b60 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -20,9 +20,7 @@ def is_py4DSTEM_version13(filepath): for k in f.keys(): if "emd_group_type" in f[k].attrs: if f[k].attrs["emd_group_type"] == "root": - if all( - x in f[k].attrs for x in ("version_major", "version_minor") - ): + if all(x in f[k].attrs for x in ("version_major", "version_minor")): if ( int(f[k].attrs["version_major"]), int(f[k].attrs["version_minor"]), diff --git a/py4DSTEM/process/diffraction/crystal_viz.py b/py4DSTEM/process/diffraction/crystal_viz.py index 9f9336155..9c5b0a0f2 100644 --- a/py4DSTEM/process/diffraction/crystal_viz.py +++ b/py4DSTEM/process/diffraction/crystal_viz.py @@ -2093,7 +2093,7 @@ def plot_ring_pattern( figsize=(10, 10), returnfig=False, input_fig_handle=None, - **kwargs + **kwargs, ): """ 2D plot of diffraction rings diff --git a/py4DSTEM/visualize/show.py b/py4DSTEM/visualize/show.py index 00309ec36..ffa3052bb 100644 --- a/py4DSTEM/visualize/show.py +++ b/py4DSTEM/visualize/show.py @@ -77,7 +77,7 @@ def show( title=None, show_fft=False, show_cbar=False, - **kwargs + **kwargs, ): """ General visualization function for 2D arrays. @@ -904,7 +904,7 @@ def show_Q( gridlabelsize=12, gridlabelcolor="k", alpha=0.35, - **kwargs + **kwargs, ): """ Shows a diffraction space image with options for several overlays to define the scale, @@ -1144,7 +1144,7 @@ def show_rectangles( alpha=0.25, linewidth=2, returnfig=False, - **kwargs + **kwargs, ): """ Visualization function which plots a 2D array with one or more overlayed rectangles. @@ -1197,7 +1197,7 @@ def show_circles( alpha=0.3, linewidth=2, returnfig=False, - **kwargs + **kwargs, ): """ Visualization function which plots a 2D array with one or more overlayed circles. @@ -1252,7 +1252,7 @@ def show_ellipses( alpha=0.3, linewidth=2, returnfig=False, - **kwargs + **kwargs, ): """ Visualization function which plots a 2D array with one or more overlayed ellipses. @@ -1308,7 +1308,7 @@ def show_annuli( alpha=0.3, linewidth=2, returnfig=False, - **kwargs + **kwargs, ): """ Visualization function which plots a 2D array with one or more overlayed annuli. @@ -1360,7 +1360,7 @@ def show_points( open_circles=False, title=None, returnfig=False, - **kwargs + **kwargs, ): """ Plots a 2D array with one or more points. diff --git a/py4DSTEM/visualize/vis_RQ.py b/py4DSTEM/visualize/vis_RQ.py index 6c2fbff3c..85c0eb042 100644 --- a/py4DSTEM/visualize/vis_RQ.py +++ b/py4DSTEM/visualize/vis_RQ.py @@ -15,7 +15,7 @@ def show_selected_dp( pointsize=50, pointcolor="r", scaling="log", - **kwargs + **kwargs, ): """ """ dp = datacube.data[rx, ry, :, :] diff --git a/py4DSTEM/visualize/vis_special.py b/py4DSTEM/visualize/vis_special.py index c58c12279..84584f368 100644 --- a/py4DSTEM/visualize/vis_special.py +++ b/py4DSTEM/visualize/vis_special.py @@ -31,7 +31,7 @@ def show_elliptical_fit( linewidth_ann=2, linewidth_ell=2, returnfig=False, - **kwargs + **kwargs, ): """ Plots an elliptical curve over its annular fit region. @@ -97,7 +97,7 @@ def show_amorphous_ring_fit( ellipse_alpha=0.7, ellipse_lw=2, returnfig=False, - **kwargs + **kwargs, ): """ Display a diffraction pattern with a fit to its amorphous ring, interleaving @@ -225,7 +225,7 @@ def show_qprofile( ticklabelsize=14, grid=True, label=None, - **kwargs + **kwargs, ): """ Plots a diffraction space radial profile. @@ -302,7 +302,7 @@ def show_voronoi( color_lines="w", max_dist=None, returnfig=False, - **kwargs + **kwargs, ): """ words @@ -375,7 +375,7 @@ def show_class_BPs_grid( axsize=(6, 6), titlesize=0, get_bordercolor=None, - **kwargs + **kwargs, ): """ words @@ -430,7 +430,7 @@ def select_point( color_selected="r", size=20, returnfig=False, - **kwargs + **kwargs, ): """ Show enumerated index labels for a set of points, with one selected point highlighted @@ -555,7 +555,7 @@ def show_selected_dps( HW=None, figsize_im=(6, 6), figsize_dp=(4, 4), - **kwargs + **kwargs, ): """ Shows two plots: first, a real space image overlaid with colored dots @@ -718,7 +718,7 @@ def show_complex( pixelunits="pixels", pixelsize=1, returnfig=False, - **kwargs + **kwargs, ): """ Function to plot complex arrays From dfa7546b3c325d768ca1d79bb78b66c9f010472b Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 00:07:11 -0800 Subject: [PATCH 13/41] black --- py4DSTEM/process/phase/utils.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/py4DSTEM/process/phase/utils.py b/py4DSTEM/process/phase/utils.py index 0ae2a16ab..c7e49e5c6 100644 --- a/py4DSTEM/process/phase/utils.py +++ b/py4DSTEM/process/phase/utils.py @@ -311,9 +311,7 @@ def evaluate_chi( ) ) - if any( - p[symbol] != 0.0 for symbol in ("C30", "C32", "phi32", "C34", "phi34") - ): + if any(p[symbol] != 0.0 for symbol in ("C30", "C32", "phi32", "C34", "phi34")): array += ( 1 / 4 @@ -327,7 +325,7 @@ def evaluate_chi( if any( p[symbol] != 0.0 - for symbol in ("C41", "phi41", "C43", "phi43", "C45", "phi41") + for symbol in ("C41", "phi41", "C43", "phi43", "C45", "phi41") ): array += ( 1 @@ -343,7 +341,7 @@ def evaluate_chi( if any( p[symbol] != 0.0 - for symbol in ("C50", "C52", "phi52", "C54", "phi54", "C56", "phi56") + for symbol in ("C50", "C52", "phi52", "C54", "phi54", "C56", "phi56") ): array += ( 1 From 79b25590e1df24132660d8bee816b68638ebb329 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 00:44:31 -0800 Subject: [PATCH 14/41] E721 --- py4DSTEM/datacube/datacube.py | 2 +- .../legacy/legacy13/v13_py4dstem_classes/datacube.py | 6 +++--- py4DSTEM/preprocess/preprocess.py | 12 +++++++++--- py4DSTEM/process/diffraction/tdesign.py | 4 +++- py4DSTEM/visualize/vis_grid.py | 2 +- 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/py4DSTEM/datacube/datacube.py b/py4DSTEM/datacube/datacube.py index 4d87afdd5..62da23e1e 100644 --- a/py4DSTEM/datacube/datacube.py +++ b/py4DSTEM/datacube/datacube.py @@ -653,7 +653,7 @@ def get_probe_size( "dp_mean" in self.treekeys ), "calculate .get_dp_mean() or pass a `dp` arg" DP = self.tree("dp_mean").data - elif type(dp) == str: + elif isinstance(dp, str): assert dp in self.treekeys, f"mode {dp} not found in the tree" DP = self.tree(dp) elif type(dp) == np.ndarray: diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py index 422d47bc6..979579d49 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py @@ -108,7 +108,7 @@ def R_pixel_size(self): @R_pixel_size.setter def R_pixel_size(self, x): - if type(x) is not list: + if not isinstance(x, list): x = [x, x] self.set_dim(0, [0, x[0]]) self.set_dim(1, [0, x[1]]) @@ -120,7 +120,7 @@ def R_pixel_units(self): @R_pixel_units.setter def R_pixel_units(self, x): - if type(x) is not list: + if not isinstance(x, list): x = [x, x] self.dim_units[0] = x[0] self.dim_units[1] = x[1] @@ -133,7 +133,7 @@ def Q_pixel_size(self): @Q_pixel_size.setter def Q_pixel_size(self, x): - if type(x) is not list: + if not isinstance(x, list): x = [x, x] self.set_dim(2, [0, x[0]]) self.set_dim(3, [0, x[1]]) diff --git a/py4DSTEM/preprocess/preprocess.py b/py4DSTEM/preprocess/preprocess.py index fb4983622..9febf229a 100644 --- a/py4DSTEM/preprocess/preprocess.py +++ b/py4DSTEM/preprocess/preprocess.py @@ -166,7 +166,9 @@ def bin_data_diffraction(datacube, bin_factor, dtype=None): """ # validate inputs - assert type(bin_factor) is int, f"Error: binning factor {bin_factor} is not an int." + assert isinstance( + bin_factor, int + ), f"Error: binning factor {bin_factor} is not an int." if bin_factor == 1: return datacube if dtype is None: @@ -225,7 +227,9 @@ def bin_data_mmap(datacube, bin_factor, dtype=np.float32): """ # validate inputs - assert type(bin_factor) is int, f"Error: binning factor {bin_factor} is not an int." + assert isinstance( + bin_factor, int + ), f"Error: binning factor {bin_factor} is not an int." if bin_factor == 1: return datacube @@ -268,7 +272,9 @@ def bin_data_real(datacube, bin_factor): Performs diffraction space binning of data by bin_factor. """ # validate inputs - assert type(bin_factor) is int, f"Bin factor {bin_factor} is not an int." + assert isinstance( + bin_factor, int + ), f"Error: binning factor {bin_factor} is not an int." if bin_factor <= 1: return datacube diff --git a/py4DSTEM/process/diffraction/tdesign.py b/py4DSTEM/process/diffraction/tdesign.py index 1a0a81fb6..79edae315 100644 --- a/py4DSTEM/process/diffraction/tdesign.py +++ b/py4DSTEM/process/diffraction/tdesign.py @@ -35,7 +35,9 @@ def tdesign(degree): assert degree <= 21, "Degree must be 21 or less." assert degree >= 1, "Degree should be at least 1." - assert type(degree) is int, "Degree should be an integer." + assert isinstance( + degree, int + ), f"Degree should be an integer, {type(degree)} passed." vecs = _tdesigns[degree - 1] diff --git a/py4DSTEM/visualize/vis_grid.py b/py4DSTEM/visualize/vis_grid.py index d24b0b8d8..ef318c021 100644 --- a/py4DSTEM/visualize/vis_grid.py +++ b/py4DSTEM/visualize/vis_grid.py @@ -285,7 +285,7 @@ def show_image_grid( ) except IndexError: ax.axis("off") - if type(title) == str: + if isinstance(title, str): fig.suptitle(title) if suptitle: fig.suptitle(suptitle) From 0b20634251d2c80eabe5db3d79aace7760e374a7 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 01:30:59 -0800 Subject: [PATCH 15/41] F401 - unused imports --- py4DSTEM/braggvectors/braggvectors.py | 2 +- py4DSTEM/braggvectors/diskdetection_aiml.py | 25 ++++++------------- .../braggvectors/diskdetection_aiml_cuda.py | 3 +-- .../diskdetection_parallel_new.py | 10 +------- py4DSTEM/data/calibration.py | 2 -- py4DSTEM/datacube/datacube.py | 2 +- py4DSTEM/io/filereaders/read_arina.py | 1 - py4DSTEM/io/filereaders/read_dm.py | 1 - py4DSTEM/io/legacy/legacy12/read_v0_12.py | 2 +- py4DSTEM/io/legacy/legacy12/read_v0_5.py | 2 -- py4DSTEM/io/legacy/legacy12/read_v0_6.py | 2 -- py4DSTEM/io/legacy/legacy12/read_v0_7.py | 2 -- py4DSTEM/io/legacy/legacy12/read_v0_9.py | 2 +- .../legacy/legacy13/v13_emd_classes/array.py | 1 - .../legacy13/v13_emd_classes/metadata.py | 2 -- .../legacy13/v13_emd_classes/pointlist.py | 2 -- .../v13_emd_classes/pointlistarray.py | 2 -- .../legacy/legacy13/v13_emd_classes/root.py | 3 --- .../v13_py4dstem_classes/braggvectors.py | 2 -- .../legacy13/v13_py4dstem_classes/datacube.py | 1 - .../v13_py4dstem_classes/diffractionslice.py | 1 - .../legacy13/v13_py4dstem_classes/io.py | 1 - .../legacy13/v13_py4dstem_classes/probe.py | 5 ++-- .../legacy13/v13_py4dstem_classes/qpoints.py | 3 +-- .../v13_py4dstem_classes/realslice.py | 1 - .../virtualdiffraction.py | 1 - .../v13_py4dstem_classes/virtualimage.py | 1 - py4DSTEM/io/legacy/legacy13/v13_to_14.py | 1 - py4DSTEM/io/legacy/read_legacy_12.py | 4 +-- py4DSTEM/io/legacy/read_legacy_13.py | 1 - py4DSTEM/io/legacy/read_utils.py | 1 - py4DSTEM/preprocess/electroncount.py | 2 -- py4DSTEM/preprocess/preprocess.py | 1 - py4DSTEM/process/calibration/origin.py | 6 ++--- py4DSTEM/process/calibration/qpixelsize.py | 3 --- py4DSTEM/process/calibration/rotation.py | 1 - py4DSTEM/process/diffraction/crystal_bloch.py | 1 - .../process/diffraction/crystal_calibrate.py | 6 ++--- py4DSTEM/process/diffraction/crystal_phase.py | 3 +-- py4DSTEM/process/diffraction/crystal_viz.py | 8 +++--- py4DSTEM/process/diffraction/flowlines.py | 4 +-- py4DSTEM/process/polar/polar_datacube.py | 2 +- py4DSTEM/process/polar/polar_peaks.py | 2 +- py4DSTEM/process/rdf/rdf.py | 2 +- py4DSTEM/process/strain/strain.py | 3 --- py4DSTEM/process/utils/utils.py | 3 +-- py4DSTEM/process/wholepatternfit/wpf_viz.py | 2 -- py4DSTEM/utils/configuration_checker.py | 2 +- py4DSTEM/visualize/vis_RQ.py | 1 - py4DSTEM/visualize/vis_special.py | 4 --- 50 files changed, 33 insertions(+), 112 deletions(-) diff --git a/py4DSTEM/braggvectors/braggvectors.py b/py4DSTEM/braggvectors/braggvectors.py index daaf9816e..333a4b395 100644 --- a/py4DSTEM/braggvectors/braggvectors.py +++ b/py4DSTEM/braggvectors/braggvectors.py @@ -1,7 +1,7 @@ # Defines the BraggVectors class from py4DSTEM.data import Data -from emdfile import Custom, PointListArray, PointList, Metadata +from emdfile import Custom, PointListArray, Metadata from py4DSTEM.braggvectors.braggvector_methods import BraggVectorMethods from os.path import basename import numpy as np diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index 68c01a851..2284202e4 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -4,16 +4,14 @@ """ import os -import glob import json import shutil import numpy as np +from importlib.util import find_spec -from scipy.ndimage import gaussian_filter from time import time -from numbers import Number -from emdfile import tqdmnd, PointList, PointListArray +from emdfile import tqdmnd from py4DSTEM.braggvectors.braggvectors import BraggVectors from py4DSTEM.data import QPoints from py4DSTEM.process.utils import get_maxima_2D @@ -100,10 +98,8 @@ def find_Bragg_disks_aiml_single_DP( Returns: (PointList): the Bragg peak positions and correlation intensities - """ - try: - import crystal4D - except: + """ + if find_spec("crystal4D") is None: raise ImportError("Import Error: Please install crystal4D before proceeding") try: import tensorflow as tf @@ -254,9 +250,8 @@ def find_Bragg_disks_aiml_selected( correlation intensities at each scan position (Rx,Ry). """ - try: - import crystal4D - except: + if find_spec("crystal4D") is None: + raise ImportError("Import Error: Please install crystal4D before proceeding") assert len(Rx) == len(Ry) @@ -431,9 +426,7 @@ def find_Bragg_disks_aiml_serial( (PointListArray): the Bragg peak positions and correlation intensities """ - try: - import crystal4D - except: + if find_spec("crystal4D") is None: raise ImportError("Import Error: Please install crystal4D before proceeding") # Make the peaks PointListArray @@ -641,9 +634,7 @@ def find_Bragg_disks_aiml( Returns: (PointListArray): the Bragg peak positions and correlation intensities """ - try: - import crystal4D - except: + if find_spec("crystal4D") is None: raise ImportError("Please install crystal4D before proceeding") def _parse_distributed(distributed): diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index 98e5e1a8d..d82aefd43 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -8,8 +8,7 @@ from emdfile import tqdmnd from py4DSTEM.braggvectors.braggvectors import BraggVectors -from emdfile import PointList, PointListArray -from py4DSTEM.data import QPoints +from emdfile import PointList from py4DSTEM.braggvectors.kernels import kernels from py4DSTEM.braggvectors.diskdetection_aiml import _get_latest_model diff --git a/py4DSTEM/braggvectors/diskdetection_parallel_new.py b/py4DSTEM/braggvectors/diskdetection_parallel_new.py index 6771cb9af..c5b27a8bc 100644 --- a/py4DSTEM/braggvectors/diskdetection_parallel_new.py +++ b/py4DSTEM/braggvectors/diskdetection_parallel_new.py @@ -1,7 +1,4 @@ import numpy as np -import matplotlib.pyplot as plt -import h5py -import time import dill import dask @@ -9,16 +6,11 @@ import dask.config from dask import delayed from dask.distributed import Client, LocalCluster -from dask.diagnostics import ProgressBar -# import dask.bag as db - -# import distributed from distributed.protocol.serialize import register_serialization_family import distributed -import py4DSTEM -from emdfile import PointListArray, PointList +from emdfile import PointListArray from py4DSTEM.braggvectors.diskdetection import _find_Bragg_disks_single_DP_FK diff --git a/py4DSTEM/data/calibration.py b/py4DSTEM/data/calibration.py index cd9a7c9aa..8281cc8e7 100644 --- a/py4DSTEM/data/calibration.py +++ b/py4DSTEM/data/calibration.py @@ -1,9 +1,7 @@ # Defines the Calibration class, which stores calibration metadata import numpy as np -from numbers import Number from typing import Optional -from warnings import warn from emdfile import Metadata, Root from py4DSTEM.data.propagating_calibration import call_calibrate diff --git a/py4DSTEM/datacube/datacube.py b/py4DSTEM/datacube/datacube.py index 62da23e1e..9306613b8 100644 --- a/py4DSTEM/datacube/datacube.py +++ b/py4DSTEM/datacube/datacube.py @@ -12,7 +12,7 @@ ) from typing import Optional, Union -from emdfile import Array, Metadata, Node, Root, tqdmnd +from emdfile import Array, Metadata, Node, tqdmnd from py4DSTEM.data import Data, Calibration from py4DSTEM.datacube.virtualimage import DataCubeVirtualImager from py4DSTEM.datacube.virtualdiffraction import DataCubeVirtualDiffraction diff --git a/py4DSTEM/io/filereaders/read_arina.py b/py4DSTEM/io/filereaders/read_arina.py index 6f7c463d2..71973fbd4 100644 --- a/py4DSTEM/io/filereaders/read_arina.py +++ b/py4DSTEM/io/filereaders/read_arina.py @@ -1,5 +1,4 @@ import h5py -import hdf5plugin import numpy as np from py4DSTEM.datacube import DataCube from py4DSTEM.preprocess.utils import bin2D diff --git a/py4DSTEM/io/filereaders/read_dm.py b/py4DSTEM/io/filereaders/read_dm.py index 617529708..118eff6ec 100644 --- a/py4DSTEM/io/filereaders/read_dm.py +++ b/py4DSTEM/io/filereaders/read_dm.py @@ -1,7 +1,6 @@ # Reads a digital micrograph 4D-STEM dataset import numpy as np -from pathlib import Path from ncempy.io import dm from emdfile import tqdmnd, Array diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_12.py b/py4DSTEM/io/legacy/legacy12/read_v0_12.py index 4f028f698..6622a5a2b 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_12.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_12.py @@ -2,7 +2,7 @@ import h5py import numpy as np -from os.path import splitext, exists +from os.path import exists from py4DSTEM.io.legacy.read_utils import ( is_py4DSTEM_file, get_py4DSTEM_topgroups, diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_5.py b/py4DSTEM/io/legacy/legacy12/read_v0_5.py index 7a7e2141c..3a19c5a4c 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_5.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_5.py @@ -2,12 +2,10 @@ import h5py import numpy as np -from os.path import splitext from py4DSTEM.io.legacy.read_utils import ( is_py4DSTEM_file, get_py4DSTEM_topgroups, get_py4DSTEM_version, - version_is_geq, ) from py4DSTEM.io.legacy.legacy12.read_utils_v0_5 import get_py4DSTEM_dataobject_info from emdfile import PointList, PointListArray diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_6.py b/py4DSTEM/io/legacy/legacy12/read_v0_6.py index 40c80c82b..4f0149473 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_6.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_6.py @@ -2,12 +2,10 @@ import h5py import numpy as np -from os.path import splitext from py4DSTEM.io.legacy.read_utils import ( is_py4DSTEM_file, get_py4DSTEM_topgroups, get_py4DSTEM_version, - version_is_geq, ) from py4DSTEM.io.legacy.legacy12.read_utils_v0_6 import get_py4DSTEM_dataobject_info from emdfile import PointList, PointListArray diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_7.py b/py4DSTEM/io/legacy/legacy12/read_v0_7.py index ed4a363cb..d94c55fab 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_7.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_7.py @@ -2,12 +2,10 @@ import h5py import numpy as np -from os.path import splitext from py4DSTEM.io.legacy.read_utils import ( is_py4DSTEM_file, get_py4DSTEM_topgroups, get_py4DSTEM_version, - version_is_geq, ) from py4DSTEM.io.legacy.legacy12.read_utils_v0_7 import get_py4DSTEM_dataobject_info from emdfile import PointList, PointListArray diff --git a/py4DSTEM/io/legacy/legacy12/read_v0_9.py b/py4DSTEM/io/legacy/legacy12/read_v0_9.py index 5084e7339..75bf91233 100644 --- a/py4DSTEM/io/legacy/legacy12/read_v0_9.py +++ b/py4DSTEM/io/legacy/legacy12/read_v0_9.py @@ -2,7 +2,7 @@ import h5py import numpy as np -from os.path import splitext, exists +from os.path import exists from py4DSTEM.io.legacy.read_utils import ( is_py4DSTEM_file, get_py4DSTEM_topgroups, diff --git a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/array.py b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/array.py index a5192ffa6..23d94b77e 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/array.py +++ b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/array.py @@ -3,7 +3,6 @@ from typing import Optional, Union import numpy as np -import h5py from numbers import Number from py4DSTEM.io.legacy.legacy13.v13_emd_classes.tree import Tree diff --git a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/metadata.py b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/metadata.py index d430528e1..e8e756965 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/metadata.py +++ b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/metadata.py @@ -1,7 +1,5 @@ import numpy as np -from numbers import Number from typing import Optional -import h5py from py4DSTEM.io.legacy.legacy13.v13_emd_classes.tree import Tree diff --git a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlist.py b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlist.py index c7f0c7fc1..4621211b4 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlist.py +++ b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlist.py @@ -3,8 +3,6 @@ # structured arrays. import numpy as np -import h5py -from copy import copy from typing import Optional from py4DSTEM.io.legacy.legacy13.v13_emd_classes.tree import Tree diff --git a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlistarray.py b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlistarray.py index c246672bd..7e27f21ca 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlistarray.py +++ b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/pointlistarray.py @@ -1,7 +1,5 @@ import numpy as np -from copy import copy from typing import Optional -import h5py from py4DSTEM.io.legacy.legacy13.v13_emd_classes.tree import Tree from py4DSTEM.io.legacy.legacy13.v13_emd_classes.metadata import Metadata diff --git a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/root.py b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/root.py index c5137d9f4..39a36ab5a 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_emd_classes/root.py +++ b/py4DSTEM/io/legacy/legacy13/v13_emd_classes/root.py @@ -1,7 +1,4 @@ -import numpy as np -from numbers import Number from typing import Optional -import h5py from py4DSTEM.io.legacy.legacy13.v13_emd_classes.tree import Tree diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/braggvectors.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/braggvectors.py index 4e51bdebf..91471bd83 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/braggvectors.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/braggvectors.py @@ -1,9 +1,7 @@ # Defines the BraggVectors class -from typing import Optional, Union import numpy as np -import h5py from py4DSTEM.io.legacy.legacy13.v13_emd_classes import PointListArray from py4DSTEM.io.legacy.legacy13.v13_emd_classes.tree import Tree diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py index 979579d49..a341cb3f8 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py @@ -6,7 +6,6 @@ from typing import Optional, Union import numpy as np -import h5py class DataCube(Array): diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/diffractionslice.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/diffractionslice.py index b32877a4a..60e29af33 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/diffractionslice.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/diffractionslice.py @@ -5,7 +5,6 @@ from typing import Optional, Union import numpy as np -import h5py class DiffractionSlice(Array): diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/io.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/io.py index 2556ebe8f..23fd9fee6 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/io.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/io.py @@ -1,6 +1,5 @@ # Functions for reading and writing subclasses of the base EMD types -import numpy as np import h5py from os.path import basename diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/probe.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/probe.py index cd1c7d9d9..284616241 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/probe.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/probe.py @@ -1,14 +1,13 @@ # Defines the Probe class, which stores vacuum probes # and cross-correlation kernels derived from them -from py4DSTEM.io.legacy.legacy13.v13_emd_classes.array import Array, Metadata +from py4DSTEM.io.legacy.legacy13.v13_emd_classes.array import Metadata from py4DSTEM.io.legacy.legacy13.v13_py4dstem_classes.diffractionslice import ( DiffractionSlice, ) -from typing import Optional, Union +from typing import Optional import numpy as np -import h5py class Probe(DiffractionSlice): diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/qpoints.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/qpoints.py index 3429c4c8d..e9fa51723 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/qpoints.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/qpoints.py @@ -2,9 +2,8 @@ from py4DSTEM.io.legacy.legacy13.v13_emd_classes.pointlist import PointList -from typing import Optional, Union +from typing import Optional import numpy as np -import h5py class QPoints(PointList): diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py index 367401055..a560e613b 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py @@ -2,7 +2,6 @@ from typing import Optional, Union import numpy as np -import h5py class RealSlice(Array): diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualdiffraction.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualdiffraction.py index 188f1d646..7adc89a8e 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualdiffraction.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualdiffraction.py @@ -8,7 +8,6 @@ from typing import Optional, Union import numpy as np -import h5py class VirtualDiffraction(DiffractionSlice): diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualimage.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualimage.py index 4d6c38845..2feb96c62 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualimage.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/virtualimage.py @@ -6,7 +6,6 @@ from typing import Optional, Union import numpy as np -import h5py class VirtualImage(RealSlice): diff --git a/py4DSTEM/io/legacy/legacy13/v13_to_14.py b/py4DSTEM/io/legacy/legacy13/v13_to_14.py index 650529b22..28e634eef 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_to_14.py +++ b/py4DSTEM/io/legacy/legacy13/v13_to_14.py @@ -34,7 +34,6 @@ Calibration, DiffractionSlice, RealSlice, - QPoints, ) from py4DSTEM.datacube import ( DataCube, diff --git a/py4DSTEM/io/legacy/read_legacy_12.py b/py4DSTEM/io/legacy/read_legacy_12.py index 40bfcfc94..68aafcada 100644 --- a/py4DSTEM/io/legacy/read_legacy_12.py +++ b/py4DSTEM/io/legacy/read_legacy_12.py @@ -1,8 +1,6 @@ # File reader for py4DSTEM files -import h5py -import numpy as np -from os.path import splitext, exists +from os.path import exists from py4DSTEM.io.legacy.read_utils import is_py4DSTEM_file, get_py4DSTEM_topgroups from py4DSTEM.io.legacy.read_utils import get_py4DSTEM_version, version_is_geq from py4DSTEM.io.legacy.legacy12 import ( diff --git a/py4DSTEM/io/legacy/read_legacy_13.py b/py4DSTEM/io/legacy/read_legacy_13.py index 04da1e65a..43f666ebd 100644 --- a/py4DSTEM/io/legacy/read_legacy_13.py +++ b/py4DSTEM/io/legacy/read_legacy_13.py @@ -1,7 +1,6 @@ # File reader for py4DSTEM v13 files import h5py -import numpy as np import warnings from os.path import exists, basename, dirname, join from typing import Optional, Union diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index c11f93b60..128213a70 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -1,7 +1,6 @@ # Utility functions import h5py -import numpy as np def get_py4DSTEM_topgroups(filepath): diff --git a/py4DSTEM/preprocess/electroncount.py b/py4DSTEM/preprocess/electroncount.py index d3c2edd9a..e54899bdb 100644 --- a/py4DSTEM/preprocess/electroncount.py +++ b/py4DSTEM/preprocess/electroncount.py @@ -144,7 +144,6 @@ def electron_count_GPU( See electron_count() for additional documentation. """ import torch - import dm assert isinstance(output, str), "output must be a str" assert output in [ @@ -417,7 +416,6 @@ def counted_pointlistarray_to_datacube(counted_pointlistarray, shape, subpixel=F if __name__ == "__main__": - from py4DSTEM.process.preprocess import get_darkreference from py4DSTEM.io import DataCube, save from ncempy.io import dm diff --git a/py4DSTEM/preprocess/preprocess.py b/py4DSTEM/preprocess/preprocess.py index 9febf229a..e70fed06f 100644 --- a/py4DSTEM/preprocess/preprocess.py +++ b/py4DSTEM/preprocess/preprocess.py @@ -11,7 +11,6 @@ import numpy as np from py4DSTEM.preprocess.utils import bin2D, get_shifted_ar from emdfile import tqdmnd -from scipy.ndimage import median_filter ### Editing datacube shape ### diff --git a/py4DSTEM/process/calibration/origin.py b/py4DSTEM/process/calibration/origin.py index 78a90fbef..fe410e777 100644 --- a/py4DSTEM/process/calibration/origin.py +++ b/py4DSTEM/process/calibration/origin.py @@ -1,15 +1,13 @@ # Find the origin of diffraction space -import functools import numpy as np from scipy.ndimage import gaussian_filter -from scipy.optimize import leastsq -from emdfile import tqdmnd, PointListArray +from emdfile import tqdmnd from py4DSTEM.datacube import DataCube from py4DSTEM.process.calibration.probe import get_probe_size from py4DSTEM.process.fit import plane, parabola, bezier_two, fit_2D -from py4DSTEM.process.utils import get_CoM, add_to_2D_array_from_floats, get_maxima_2D +from py4DSTEM.process.utils import get_CoM # diff --git a/py4DSTEM/process/calibration/qpixelsize.py b/py4DSTEM/process/calibration/qpixelsize.py index 2abefd54c..9d0c5eded 100644 --- a/py4DSTEM/process/calibration/qpixelsize.py +++ b/py4DSTEM/process/calibration/qpixelsize.py @@ -2,10 +2,7 @@ import numpy as np from scipy.optimize import leastsq -from typing import Union, Optional -from emdfile import tqdmnd -from py4DSTEM.process.utils import get_CoM def get_Q_pixel_size(q_meas, q_known, units="A"): diff --git a/py4DSTEM/process/calibration/rotation.py b/py4DSTEM/process/calibration/rotation.py index aaf8a49ce..eec4b3d4a 100644 --- a/py4DSTEM/process/calibration/rotation.py +++ b/py4DSTEM/process/calibration/rotation.py @@ -1,7 +1,6 @@ # Rotational calibrations import numpy as np -from typing import Optional import matplotlib.pyplot as plt from py4DSTEM import show diff --git a/py4DSTEM/process/diffraction/crystal_bloch.py b/py4DSTEM/process/diffraction/crystal_bloch.py index 6a3c9b1ac..31e7b94f7 100644 --- a/py4DSTEM/process/diffraction/crystal_bloch.py +++ b/py4DSTEM/process/diffraction/crystal_bloch.py @@ -1,4 +1,3 @@ -import warnings import numpy as np import numpy.lib.recfunctions as rfn from scipy import linalg diff --git a/py4DSTEM/process/diffraction/crystal_calibrate.py b/py4DSTEM/process/diffraction/crystal_calibrate.py index c068bf79e..e07005468 100644 --- a/py4DSTEM/process/diffraction/crystal_calibrate.py +++ b/py4DSTEM/process/diffraction/crystal_calibrate.py @@ -1,13 +1,13 @@ import numpy as np -from typing import Union, Optional +from typing import Union from scipy.optimize import curve_fit -from py4DSTEM.process.diffraction.utils import Orientation, calc_1D_profile +from py4DSTEM.process.diffraction.utils import calc_1D_profile try: from pymatgen.symmetry.analyzer import SpacegroupAnalyzer from pymatgen.core.structure import Structure -except ImportError: +except (ImportError, ModuleNotFoundError): pass diff --git a/py4DSTEM/process/diffraction/crystal_phase.py b/py4DSTEM/process/diffraction/crystal_phase.py index 84824fe63..6ab48cde8 100644 --- a/py4DSTEM/process/diffraction/crystal_phase.py +++ b/py4DSTEM/process/diffraction/crystal_phase.py @@ -5,8 +5,7 @@ import matplotlib.pyplot as plt from emdfile import tqdmnd, PointListArray -from py4DSTEM.visualize import show, show_image_grid -from py4DSTEM.process.diffraction.crystal_viz import plot_diffraction_pattern +from py4DSTEM.visualize import show_image_grid class Crystal_Phase: diff --git a/py4DSTEM/process/diffraction/crystal_viz.py b/py4DSTEM/process/diffraction/crystal_viz.py index 9c5b0a0f2..3318215ae 100644 --- a/py4DSTEM/process/diffraction/crystal_viz.py +++ b/py4DSTEM/process/diffraction/crystal_viz.py @@ -1,18 +1,16 @@ import matplotlib.pyplot as plt -from matplotlib.figure import Figure -from matplotlib.axes import Axes import matplotlib.tri as mtri -from mpl_toolkits.mplot3d import Axes3D, art3d +from mpl_toolkits.mplot3d import art3d from scipy.signal import medfilt from scipy.ndimage import gaussian_filter from scipy.ndimage.morphology import distance_transform_edt -from skimage.morphology import dilation, erosion +from skimage.morphology import erosion import warnings import numpy as np from typing import Union, Optional -from emdfile import tqdmnd, PointList, PointListArray +from emdfile import tqdmnd, PointList from py4DSTEM.process.diffraction.utils import calc_1D_profile diff --git a/py4DSTEM/process/diffraction/flowlines.py b/py4DSTEM/process/diffraction/flowlines.py index 66904d4f8..fbb01e603 100644 --- a/py4DSTEM/process/diffraction/flowlines.py +++ b/py4DSTEM/process/diffraction/flowlines.py @@ -3,15 +3,13 @@ import numpy as np import matplotlib.pyplot as plt -from matplotlib.figure import Figure -from matplotlib.axes import Axes from scipy.ndimage import gaussian_filter1d from matplotlib.colors import hsv_to_rgb from matplotlib.colors import rgb_to_hsv from matplotlib.colors import ListedColormap -from emdfile import tqdmnd, PointList, PointListArray +from emdfile import tqdmnd, PointListArray def make_orientation_histogram( diff --git a/py4DSTEM/process/polar/polar_datacube.py b/py4DSTEM/process/polar/polar_datacube.py index 56071c534..6ba741953 100644 --- a/py4DSTEM/process/polar/polar_datacube.py +++ b/py4DSTEM/process/polar/polar_datacube.py @@ -1,6 +1,6 @@ import numpy as np from py4DSTEM.datacube import DataCube -from scipy.ndimage import binary_opening, binary_closing, gaussian_filter1d +from scipy.ndimage import gaussian_filter1d class PolarDatacube: diff --git a/py4DSTEM/process/polar/polar_peaks.py b/py4DSTEM/process/polar/polar_peaks.py index be9ae989e..a7bb4f847 100644 --- a/py4DSTEM/process/polar/polar_peaks.py +++ b/py4DSTEM/process/polar/polar_peaks.py @@ -4,7 +4,7 @@ from scipy.ndimage import gaussian_filter, gaussian_filter1d from scipy.signal import peak_prominences from skimage.feature import peak_local_max -from scipy.optimize import curve_fit, leastsq +from scipy.optimize import curve_fit import warnings # from emdfile import tqdmnd, PointList, PointListArray diff --git a/py4DSTEM/process/rdf/rdf.py b/py4DSTEM/process/rdf/rdf.py index cee7eeee9..d09c8a35f 100644 --- a/py4DSTEM/process/rdf/rdf.py +++ b/py4DSTEM/process/rdf/rdf.py @@ -4,7 +4,7 @@ import numpy as np from scipy.special import erf -from scipy.fftpack import dst, idst +from scipy.fftpack import dst from py4DSTEM.process.utils import single_atom_scatter diff --git a/py4DSTEM/process/strain/strain.py b/py4DSTEM/process/strain/strain.py index ab8a46a9a..1d2dbf3bc 100644 --- a/py4DSTEM/process/strain/strain.py +++ b/py4DSTEM/process/strain/strain.py @@ -13,7 +13,6 @@ from py4DSTEM.data import Data, RealSlice from py4DSTEM.preprocess.utils import get_maxima_2D from py4DSTEM.process.strain.latticevectors import ( - add_indices_to_braggvectors, fit_lattice_vectors_all_DPs, get_reference_g1g2, get_rotated_strain_map, @@ -25,8 +24,6 @@ add_bragg_index_labels, add_pointlabels, add_vector, - ax_addaxes, - ax_addaxes_QtoR, ) diff --git a/py4DSTEM/process/utils/utils.py b/py4DSTEM/process/utils/utils.py index 4ef2e1d8a..f77578f5a 100644 --- a/py4DSTEM/process/utils/utils.py +++ b/py4DSTEM/process/utils/utils.py @@ -11,8 +11,7 @@ import matplotlib.font_manager as fm from emdfile import tqdmnd -from py4DSTEM.process.utils.multicorr import upsampled_correlation -from py4DSTEM.preprocess.utils import make_Fourier_coords2D +from py4DSTEM.preprocess.utils import make_Fourier_coords2D # TODO this is defined later try: from IPython.display import clear_output diff --git a/py4DSTEM/process/wholepatternfit/wpf_viz.py b/py4DSTEM/process/wholepatternfit/wpf_viz.py index ac32e88f7..6ad8fbc20 100644 --- a/py4DSTEM/process/wholepatternfit/wpf_viz.py +++ b/py4DSTEM/process/wholepatternfit/wpf_viz.py @@ -1,9 +1,7 @@ -from typing import Optional import numpy as np import matplotlib.pyplot as plt import matplotlib.colors as mpl_c -from matplotlib.gridspec import GridSpec from py4DSTEM.process.wholepatternfit.wp_models import WPFModelType diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 26b0b89d5..785b91b97 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -1,11 +1,11 @@ #### this file contains a function/s that will check if various # libaries/compute options are available import importlib -from operator import mod # list of modules we expect/may expect to be installed # as part of a standard py4DSTEM installation # this needs to be the import name e.g. import mp_api not mp-api +# TODO use importlib.metadata.requirements to populate modules = [ "crystal4D", "cupy", diff --git a/py4DSTEM/visualize/vis_RQ.py b/py4DSTEM/visualize/vis_RQ.py index 85c0eb042..8d111b2b6 100644 --- a/py4DSTEM/visualize/vis_RQ.py +++ b/py4DSTEM/visualize/vis_RQ.py @@ -1,6 +1,5 @@ import numpy as np import matplotlib.pyplot as plt -from matplotlib.axes import Axes from py4DSTEM.visualize.show import show, show_points diff --git a/py4DSTEM/visualize/vis_special.py b/py4DSTEM/visualize/vis_special.py index 84584f368..689cc451c 100644 --- a/py4DSTEM/visualize/vis_special.py +++ b/py4DSTEM/visualize/vis_special.py @@ -4,18 +4,14 @@ from mpl_toolkits.axes_grid1 import make_axes_locatable from scipy.spatial import Voronoi -from emdfile import PointList from py4DSTEM.visualize import show from py4DSTEM.visualize.overlay import ( add_pointlabels, - add_vector, - add_bragg_index_labels, add_ellipses, add_points, add_scalebar, ) from py4DSTEM.visualize.vis_grid import show_image_grid -from py4DSTEM.visualize.vis_RQ import ax_addaxes, ax_addaxes_QtoR from colorspacious import cspace_convert From d3ccf039328e4c656966e86cac619bab585915a9 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 01:31:50 -0800 Subject: [PATCH 16/41] black --- py4DSTEM/braggvectors/diskdetection_aiml.py | 3 +-- py4DSTEM/process/calibration/qpixelsize.py | 1 - py4DSTEM/process/utils/utils.py | 4 +++- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index 2284202e4..099105538 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -98,7 +98,7 @@ def find_Bragg_disks_aiml_single_DP( Returns: (PointList): the Bragg peak positions and correlation intensities - """ + """ if find_spec("crystal4D") is None: raise ImportError("Import Error: Please install crystal4D before proceeding") try: @@ -251,7 +251,6 @@ def find_Bragg_disks_aiml_selected( """ if find_spec("crystal4D") is None: - raise ImportError("Import Error: Please install crystal4D before proceeding") assert len(Rx) == len(Ry) diff --git a/py4DSTEM/process/calibration/qpixelsize.py b/py4DSTEM/process/calibration/qpixelsize.py index 9d0c5eded..b98cd8716 100644 --- a/py4DSTEM/process/calibration/qpixelsize.py +++ b/py4DSTEM/process/calibration/qpixelsize.py @@ -4,7 +4,6 @@ from scipy.optimize import leastsq - def get_Q_pixel_size(q_meas, q_known, units="A"): """ Computes the size of the Q-space pixels. diff --git a/py4DSTEM/process/utils/utils.py b/py4DSTEM/process/utils/utils.py index f77578f5a..3da90f48a 100644 --- a/py4DSTEM/process/utils/utils.py +++ b/py4DSTEM/process/utils/utils.py @@ -11,7 +11,9 @@ import matplotlib.font_manager as fm from emdfile import tqdmnd -from py4DSTEM.preprocess.utils import make_Fourier_coords2D # TODO this is defined later +from py4DSTEM.preprocess.utils import ( + make_Fourier_coords2D, +) # TODO this is defined later try: from IPython.display import clear_output From 9022bee00cb5d84094a4620d598160c673b91713 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 01:44:43 -0800 Subject: [PATCH 17/41] more E721 --- .../io/legacy/legacy13/v13_py4dstem_classes/datacube.py | 2 +- .../io/legacy/legacy13/v13_py4dstem_classes/realslice.py | 8 ++++---- py4DSTEM/process/classification/featurization.py | 4 ++-- py4DSTEM/visualize/vis_grid.py | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py index a341cb3f8..2c9d697b0 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/datacube.py @@ -144,7 +144,7 @@ def Q_pixel_units(self): @Q_pixel_units.setter def Q_pixel_units(self, x): - if type(x) is not list: + if not isinstance(x, list): x = [x, x] self.dim_units[2] = x[0] self.dim_units[3] = x[1] diff --git a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py index a560e613b..e4d973fbc 100644 --- a/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py +++ b/py4DSTEM/io/legacy/legacy13/v13_py4dstem_classes/realslice.py @@ -29,9 +29,9 @@ def __init__( A new RealSlice instance """ # expand pixel inputs to include 2 dimensions - if type(pixel_size) is not list: + if not isinstance(pixel_size, list): pixel_size = [pixel_size, pixel_size] - if type(pixel_units) is not list: + if not isinstance(pixel_units, list): pixel_units = [pixel_units, pixel_units] # initialize as an Array @@ -62,7 +62,7 @@ def pixel_size(self): @pixel_size.setter def pixel_size(self, x): - if type(x) is not list: + if not isinstance(x, list): x = [x, x] self.set_dim(0, [0, x[0]]) self.set_dim(1, [0, x[1]]) @@ -74,7 +74,7 @@ def pixel_units(self): @pixel_units.setter def pixel_units(self, x): - if type(x) is not list: + if not isinstance(x, list): x = [x, x] self.dim_units[0] = x[0] self.dim_units[1] = x[1] diff --git a/py4DSTEM/process/classification/featurization.py b/py4DSTEM/process/classification/featurization.py index 38b4e1412..3c3ad58c9 100644 --- a/py4DSTEM/process/classification/featurization.py +++ b/py4DSTEM/process/classification/featurization.py @@ -586,7 +586,7 @@ def get_class_ims(self, classification_method): """ class_maps = [] if classification_method == "NMF": - if type(self.W) == list: + if isinstance(self.W, list): for l in range(len(self.W)): small_class_maps = [] for k in range(self.W[l].shape[1]): @@ -599,7 +599,7 @@ def get_class_ims(self, classification_method): class_maps.append(self.W[:, l].reshape(self.R_Nx, self.R_Ny)) class_maps = [class_maps] elif classification_method == "GMM": - if type(self.gmm_labels) == list: + if isinstance(self.gmm_labels, list): for l in range(len(self.gmm_labels)): small_class_maps = [] for k in range(np.max(self.gmm_labels[l])): diff --git a/py4DSTEM/visualize/vis_grid.py b/py4DSTEM/visualize/vis_grid.py index ef318c021..209412fd5 100644 --- a/py4DSTEM/visualize/vis_grid.py +++ b/py4DSTEM/visualize/vis_grid.py @@ -205,7 +205,7 @@ def show_image_grid( ax = axs[i, j] N = i * W + j # make titles - if type(title) == list: + if isinstance(title, list): print_title = title[N] else: print_title = None From dd1a74bbfff1b9504268e412e492592322486e42 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 13:29:19 -0800 Subject: [PATCH 18/41] removing old comment --- py4DSTEM/datacube/virtualimage.py | 1 - 1 file changed, 1 deletion(-) diff --git a/py4DSTEM/datacube/virtualimage.py b/py4DSTEM/datacube/virtualimage.py index f45fade33..0b4169d20 100644 --- a/py4DSTEM/datacube/virtualimage.py +++ b/py4DSTEM/datacube/virtualimage.py @@ -414,7 +414,6 @@ def position_detector( try: image = self.tree(k) break - # KeyError, AttributeError except AssertionError: pass if image is None: From 9657c424d71f5b1256fed83c51474ecf670fa36d Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 14:59:12 -0800 Subject: [PATCH 19/41] adding KeyError --- py4DSTEM/io/filereaders/read_K2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/io/filereaders/read_K2.py b/py4DSTEM/io/filereaders/read_K2.py index d316a5e7b..5673a9d84 100644 --- a/py4DSTEM/io/filereaders/read_K2.py +++ b/py4DSTEM/io/filereaders/read_K2.py @@ -115,7 +115,7 @@ def __init__( try: R_Ny = gtg.allTags[".SI Dimensions.Size Y"] R_Nx = gtg.allTags[".SI Dimensions.Size X"] - except ValueError: + except (ValueError,KeyError): print("Warning: scan shape not detected. Please check/set manually.") R_Nx = self._guess_number_frames() // 32 R_Ny = 1 From dbfa1b27944b8bc6e87e806aa76f4364fa5d4773 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 14:59:58 -0800 Subject: [PATCH 20/41] adding KeyError to correct place ... --- py4DSTEM/io/filereaders/read_K2.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/py4DSTEM/io/filereaders/read_K2.py b/py4DSTEM/io/filereaders/read_K2.py index 5673a9d84..0de69d399 100644 --- a/py4DSTEM/io/filereaders/read_K2.py +++ b/py4DSTEM/io/filereaders/read_K2.py @@ -115,7 +115,7 @@ def __init__( try: R_Ny = gtg.allTags[".SI Dimensions.Size Y"] R_Nx = gtg.allTags[".SI Dimensions.Size X"] - except (ValueError,KeyError): + except ValueError: print("Warning: scan shape not detected. Please check/set manually.") R_Nx = self._guess_number_frames() // 32 R_Ny = 1 @@ -125,7 +125,7 @@ def __init__( Q_Nx = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.height"] Q_Ny = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.width"] # TODO check this is the correct error type - except ValueError: + except (ValueError, KeyError): print("Warning: diffraction pattern shape not detected!") print("Assuming 1920x1792 as the diffraction pattern size!") Q_Nx = 1792 From 6b5dfec84caf70d4b2ea81ed9ae4cd60f6cf7968 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 15:01:37 -0800 Subject: [PATCH 21/41] adding hdf5plugin back in --- py4DSTEM/io/filereaders/read_arina.py | 1 + 1 file changed, 1 insertion(+) diff --git a/py4DSTEM/io/filereaders/read_arina.py b/py4DSTEM/io/filereaders/read_arina.py index 71973fbd4..31412f3b6 100644 --- a/py4DSTEM/io/filereaders/read_arina.py +++ b/py4DSTEM/io/filereaders/read_arina.py @@ -1,4 +1,5 @@ import h5py +import hdf5plugin # noqa: F401 import numpy as np from py4DSTEM.datacube import DataCube from py4DSTEM.preprocess.utils import bin2D From 89ad26af08341bb2ba09a982cabd3511cbce30fa Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 15:05:00 -0800 Subject: [PATCH 22/41] Changing to Exception to catch more errors --- py4DSTEM/utils/configuration_checker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index e83ba4c50..a99081f45 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -304,7 +304,7 @@ def import_tester(m: str) -> bool: # try and import the module try: importlib.import_module(m) - except ModuleNotFoundError: + except Exception: state = False return state From ed34a999168629562dcbc7e54c3365aa542cc373 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 15:13:57 -0800 Subject: [PATCH 23/41] changning cupy GPU count method --- py4DSTEM/utils/configuration_checker.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index a99081f45..4d10491ec 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -386,15 +386,8 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): print(s) # Count how many GPUs Cupy can detect - # probably should change this to a while loop ... - for i in range(24): - try: - d = cp.cuda.Device(i) - hasattr(d, "attributes") - # TODO work out what error is raised - except (AttributeError, Exception): - num_gpus_detected = i - break + num_gpus_detected = cp.cuda.runtime.getDeviceCount() + # print how many GPUs were detected, filter for a couple of special conditons if num_gpus_detected == 0: From 527572938677b734d716e47f1e99d773458501d2 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 10 Nov 2023 15:16:56 -0800 Subject: [PATCH 24/41] black --- py4DSTEM/io/filereaders/read_arina.py | 2 +- py4DSTEM/utils/configuration_checker.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/py4DSTEM/io/filereaders/read_arina.py b/py4DSTEM/io/filereaders/read_arina.py index 31412f3b6..b8d8b00ad 100644 --- a/py4DSTEM/io/filereaders/read_arina.py +++ b/py4DSTEM/io/filereaders/read_arina.py @@ -1,5 +1,5 @@ import h5py -import hdf5plugin # noqa: F401 +import hdf5plugin # noqa: F401 import numpy as np from py4DSTEM.datacube import DataCube from py4DSTEM.preprocess.utils import bin2D diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 4d10491ec..862d74b24 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -388,7 +388,6 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): # Count how many GPUs Cupy can detect num_gpus_detected = cp.cuda.runtime.getDeviceCount() - # print how many GPUs were detected, filter for a couple of special conditons if num_gpus_detected == 0: s = " Detected no GPUs " From 96c96aed7d42b1ce99dc6b3e6afbdd347a75a875 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Sun, 12 Nov 2023 17:29:48 -0800 Subject: [PATCH 25/41] using importlib to populate requirements --- py4DSTEM/utils/configuration_checker.py | 185 ++++++++++++++---------- 1 file changed, 109 insertions(+), 76 deletions(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 862d74b24..60e161f39 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -1,61 +1,93 @@ #### this file contains a function/s that will check if various # libaries/compute options are available import importlib - -# list of modules we expect/may expect to be installed -# as part of a standard py4DSTEM installation -# this needs to be the import name e.g. import mp_api not mp-api -# TODO use importlib.metadata.requirements to populate -modules = [ - "crystal4D", - "cupy", - "dask", - "dill", - "distributed", - "gdown", - "h5py", - "ipyparallel", - "jax", - "matplotlib", - "mp_api", - "ncempy", - "numba", - "numpy", - "pymatgen", - "skimage", - "sklearn", - "scipy", - "tensorflow", - "tensorflow-addons", - "tqdm", -] - -# currently this was copy and pasted from setup.py, -# hopefully there's a programatic way to do this. -module_depenencies = { - "base": [ - "numpy", - "scipy", - "h5py", - "ncempy", - "matplotlib", - "skimage", - "sklearn", - "tqdm", - "dill", - "gdown", - "dask", - "distributed", - ], - "ipyparallel": ["ipyparallel", "dill"], - "cuda": ["cupy"], - "acom": ["pymatgen", "mp_api"], - "aiml": ["tensorflow", "tensorflow-addons", "crystal4D"], - "aiml-cuda": ["tensorflow", "tensorflow-addons", "crystal4D", "cupy"], - "numba": ["numba"], +from importlib.metadata import requires +import re + +# need a mapping of pypi/conda names to import names +import_mapping_dict = { + "scikit-image": "skimage", + "scikit-learn": "sklearn", + "scikit-optimize": "skopt", + "mp-api": "mp_api", } +# programatically get all possible requirements in the import name style +def get_modules_list(): + # Get the dependencies from the installed distribution + dependencies = requires("py4DSTEM") + + # Define a regular expression pattern for splitting on '>', '>=', '=' + delimiter_pattern = re.compile(r">=|>|==|<|<=") + + # Extract only the module names without versions + module_names = [ + delimiter_pattern.split(dependency.split(";")[0], 1)[0].strip() + for dependency in dependencies + ] + + # translate pypi names to import names e.g. scikit-image->skimage, mp-api->mp_api + for index, module in enumerate(module_names): + if module in import_mapping_dict.keys(): + module_names[index] = import_mapping_dict[module] + + return module_names + + +# programatically get all possible requirements in the import name style, +# split into a dict where optional import names are keys +def get_modules_dict(): + package_name = "py4DSTEM" + # Get the dependencies from the installed distribution + dependencies = requires(package_name) + + # set the dictionary for modules and packages to go into + modules_dict = { + "base": [], + "acom": [], + "aiml": [], + "aiml-cuda": [], + "cuda": [], + "numba": [], + } + # loop over the dependencies + for depend in dependencies: + # all the optional have extra in the name + # if its not there append it to base + if "extra" not in depend: + modules_dict["base"].append(depend) + + # if it has extra + else: + # loop over the keys and check if its in there + for key in modules_dict.keys(): + if key in depend: + modules_dict[key].append(depend) + + # STRIP all the versioning and semi-colons + # Define a regular expression pattern for splitting on '>', '>=', '=' + delimiter_pattern = re.compile(r">=|>|==|<|<=") + for key, val in modules_dict.items(): + # modules_dict[key] = [dependency.split(';')[0].split(' ')[0] for dependency in val] + modules_dict[key] = [ + delimiter_pattern.split(dependency.split(";")[0], 1)[0].strip() + for dependency in val + ] + + # translate pypi names to import names e.g. scikit-image->skimage, mp-api->mp_api + for key, val in modules_dict.items(): + for index, module in enumerate(val): + if module in import_mapping_dict.keys(): + val[index] = import_mapping_dict[module] + + return modules_dict + + +# module_depenencies = get_modules_dict() +modules = get_modules_list() + + #### Class and Functions to Create Coloured Strings #### class colours: CEND = "\x1b[0m" @@ -140,6 +172,7 @@ def create_underline(s: str) -> str: ### here I use the term state to define a boolean condition as to whether a libary/module was sucessfully imported/can be used +# get the state of each modules as a dict key-val e.g. "numpy" : True def get_import_states(modules: list = modules) -> dict: """ Check the ability to import modules and store the results as a boolean value. Returns as a dict. @@ -163,16 +196,17 @@ def get_import_states(modules: list = modules) -> dict: return import_states_dict +# Check def get_module_states(state_dict: dict) -> dict: - """_summary_ - - Args: - state_dict (dict): _description_ + """ + given a state dict for all modules e.g. "numpy" : True, + this parses through and checks if all modules required for a state are true - Returns: - dict: _description_ + returns dict "base": True, "ai-ml": False etc. """ + # get the modules_dict + module_depenencies = get_modules_dict() # create an empty dict to put module states into: module_states = {} @@ -190,19 +224,18 @@ def get_module_states(state_dict: dict) -> dict: # check that all the depencies could be imported i.e. state == True # and set the state of the module to that - module_states[key] = all(temp_lst) == True + module_states[key] = all(temp_lst) is True return module_states def print_import_states(import_states: dict) -> None: - """_summary_ - - Args: - import_states (dict): _description_ + """ + print with colours if the library could be imported or not + takes dict + "numpy" : True -> prints success + "pymatgen" : Fasle -> prints failure - Returns: - _type_: _description_ """ # m is the name of the import module # state is whether it was importable @@ -223,13 +256,11 @@ def print_import_states(import_states: dict) -> None: def print_module_states(module_states: dict) -> None: - """_summary_ - - Args: - module_states (dict): _description_ - - Returns: - _type_: _description_ + """ + print with colours if all the imports required for module could be imported or not + takes dict + "base" : True -> prints success + "ai-ml" : Fasle -> prints failure """ # Print out the state of all the modules in colour code # key is the name of a py4DSTEM Module @@ -254,9 +285,9 @@ def perfrom_extra_checks( """_summary_ Args: - import_states (dict): _description_ - verbose (bool): _description_ - gratuitously_verbose (bool): _description_ + import_states (dict): dict of modules and if they could be imported or not + verbose (bool): will show module states and all import states + gratuitously_verbose (bool): will run extra checks - Currently only for cupy Returns: _type_: _description_ @@ -324,6 +355,7 @@ def check_module_functionality(state_dict: dict) -> None: # create an empty dict to put module states into: module_states = {} + module_depenencies = get_modules_dict() # key is the name of the module e.g. ACOM # val is a list of its dependencies @@ -338,7 +370,7 @@ def check_module_functionality(state_dict: dict) -> None: # check that all the depencies could be imported i.e. state == True # and set the state of the module to that - module_states[key] = all(temp_lst) == True + module_states[key] = all(temp_lst) is True # Print out the state of all the modules in colour code for key, val in module_states.items(): @@ -421,6 +453,7 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): return None +# TODO add generic version which will print version def print_no_extra_checks(m: str): """ This function prints a warning style message that the module m From 03e9e610594a86496434b75841445a569a948999 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Mon, 13 Nov 2023 16:46:05 -0800 Subject: [PATCH 26/41] changing find_spec to try except --- py4DSTEM/braggvectors/diskdetection_aiml.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index cb20189de..481a97418 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -7,7 +7,6 @@ import json import shutil import numpy as np -from importlib.util import find_spec from time import time @@ -99,7 +98,9 @@ def find_Bragg_disks_aiml_single_DP( Returns: (PointList): the Bragg peak positions and correlation intensities """ - if find_spec("crystal4D") is None: + try: + import crystal4D + except ModuleNotFoundError: raise ImportError("Import Error: Please install crystal4D before proceeding") try: import tensorflow as tf @@ -250,7 +251,9 @@ def find_Bragg_disks_aiml_selected( correlation intensities at each scan position (Rx,Ry). """ - if find_spec("crystal4D") is None: + try: + import crystal4D + except ModuleNotFoundError: raise ImportError("Import Error: Please install crystal4D before proceeding") assert len(Rx) == len(Ry) @@ -425,7 +428,9 @@ def find_Bragg_disks_aiml_serial( (PointListArray): the Bragg peak positions and correlation intensities """ - if find_spec("crystal4D") is None: + try: + import crystal4D + except ModuleNotFoundError: raise ImportError("Import Error: Please install crystal4D before proceeding") # Make the peaks PointListArray @@ -633,8 +638,10 @@ def find_Bragg_disks_aiml( Returns: (PointListArray): the Bragg peak positions and correlation intensities """ - if find_spec("crystal4D") is None: - raise ImportError("Please install crystal4D before proceeding") + try: + import crystal4D + except ModuleNotFoundError: + raise ImportError("Import Error: Please install crystal4D before proceeding") def _parse_distributed(distributed): import os From 436d1e4135b12a094527d88b10a5ddaff57a5d46 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Mon, 13 Nov 2023 16:53:30 -0800 Subject: [PATCH 27/41] adding suspected KeyError to TODO --- py4DSTEM/io/legacy/read_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index 67ee920d7..afdbe1384 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -97,7 +97,7 @@ def get_N_dataobjects(filepath, topgroup="4DSTEM_experiment"): N_pla = len(f[topgroup]["data/pointlistarrays"].keys()) try: N_coords = len(f[topgroup]["data/coordinates"].keys()) - # TODO work out what exception will be raised ValueError, AttributeError + # TODO work out what exception will be raised ValueError, AttributeError, BS thinks KeyError except Exception: N_coords = 0 N_do = N_dc + N_cdc + N_ds + N_rs + N_pl + N_pla + N_coords From 5f5d89570c2f700a1363a817ca273bdabd514a93 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Wed, 15 Nov 2023 23:37:46 -0800 Subject: [PATCH 28/41] F811, F523/F524, F841, F741 - Corrections --- .flake8 | 36 +++++++++++++--------- .github/workflows/build-flake.yml | 40 ------------------------- .github/workflows/linter.yml | 17 +++++++---- py4DSTEM/process/diffraction/crystal.py | 33 ++++++++------------ 4 files changed, 45 insertions(+), 81 deletions(-) delete mode 100644 .github/workflows/build-flake.yml diff --git a/.flake8 b/.flake8 index 736dd1bd9..6781ba03b 100644 --- a/.flake8 +++ b/.flake8 @@ -1,15 +1,23 @@ [flake8] -select: - F402, - F404, - F812, - F823, - F831, - F821, - F822, - E112, - E113, - E901, - E902, - E999, - +extend-ignore = + E114, + E115, + E116, + E201, + E202, + E203, + E204, + E231, + E265, + E266, + E303, + E402, + E501, +exclude = + ; __init__.py # totally ignore __init__.py files + setup.py # ignore setup.py file + docs/ +#F401 ignore unused imports in __init__.py files +#F403 ignore unable to detect undefined names from import * +per-file-ignores = + __init__.py:F401,F403 \ No newline at end of file diff --git a/.github/workflows/build-flake.yml b/.github/workflows/build-flake.yml deleted file mode 100644 index 3393b7908..000000000 --- a/.github/workflows/build-flake.yml +++ /dev/null @@ -1,40 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Check module can be imported - -on: - push: - branches: [ "dev" ] - pull_request: - branches: [ "dev" ] - -permissions: - contents: read - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v3 - with: - python-version: "3.10" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 pytest - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test that the module imports - run: | - pip install . - python -c "import py4DSTEM; print(py4DSTEM.__version__)" diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml index 3e8071f6f..39fa51f72 100644 --- a/.github/workflows/linter.yml +++ b/.github/workflows/linter.yml @@ -1,4 +1,4 @@ -name: Check for errors with flake8 +name: Lint with super-linter@v5-slim on: push: @@ -17,9 +17,14 @@ jobs: fetch-depth: 0 - name: Lint Code Base - uses: github/super-linter@v4 + uses: super-linter/super-linter/slim@v5 # updated to latest slim as quicker to download env: - VALIDATE_ALL_CODEBASE: false - VALIDATE_PYTHON_FLAKE8: true - DEFAULT_BRANCH: "dev" - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + VALIDATE_ALL_CODEBASE: false # only check changes + VALIDATE_PYTHON_FLAKE8: true # lint with flake8 + DEFAULT_BRANCH: "dev" # set default branch to dev + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # for github things + # FILTER_REGEX_EXCLUDE: .*test/.* # exclude test dirs + FILTER_REGEX_EXCLUDE: .*__init__.py/.* # exclude test dirs + FILTER_REGEX_INCLUDE: .*py4DSTEM/.* # only look for py4DSTEM + LINTER_RULES_PATH: / # set toplevel dir as the path to look for rules + PYTHON_FLAKE8_CONFIG_FILE: .flake8 # set specific config file \ No newline at end of file diff --git a/py4DSTEM/process/diffraction/crystal.py b/py4DSTEM/process/diffraction/crystal.py index fb2911992..ea243404e 100644 --- a/py4DSTEM/process/diffraction/crystal.py +++ b/py4DSTEM/process/diffraction/crystal.py @@ -34,7 +34,6 @@ class Crystal: save_ang_file, symmetry_reduce_directions, orientation_map_to_orix_CrystalMap, - save_ang_file, ) from py4DSTEM.process.diffraction.crystal_viz import ( @@ -433,41 +432,31 @@ def from_unitcell_parameters( elif lattice_type == "hexagonal": assert ( len(latt_params) == 2 - ), "2 lattice parametere are expected for hexagonal: a, c, but given {len(latt_params)}".format( - len(latt_params) - ) + ), f"2 lattice parametere are expected for hexagonal: a, c, but given {len(latt_params)}" lattice = mg.core.Lattice.hexagonal(latt_params[0], latt_params[1]) elif lattice_type == "tetragonal": assert ( len(latt_params) == 2 - ), "2 lattice parametere are expected for tetragonal: a, c, but given {len(latt_params)}".format( - len(latt_params) - ) + ), f"2 lattice parametere are expected for tetragonal: a, c, but given {len(latt_params)}" lattice = mg.core.Lattice.tetragonal(latt_params[0], latt_params[1]) elif lattice_type == "orthorhombic": assert ( len(latt_params) == 3 - ), "3 lattice parametere are expected for orthorhombic: a, b, c, but given {len(latt_params)}".format( - len(latt_params) - ) + ), f"3 lattice parametere are expected for orthorhombic: a, b, c, but given {len(latt_params)}" lattice = mg.core.Lattice.orthorhombic( latt_params[0], latt_params[1], latt_params[2] ) elif lattice_type == "monoclinic": assert ( len(latt_params) == 4 - ), "4 lattice parametere are expected for monoclinic: a, b, c, beta, but given {len(latt_params)}".format( - len(latt_params) - ) + ), f"4 lattice parametere are expected for monoclinic: a, b, c, beta, but given {len(latt_params)}" lattice = mg.core.Lattice.monoclinic( latt_params[0], latt_params[1], latt_params[2], latt_params[3] ) else: assert ( len(latt_params) == 6 - ), "all 6 lattice parametere are expected: a, b, c, alpha, beta, gamma, but given {len(latt_params)}".format( - len(latt_params) - ) + ), f"all 6 lattice parametere are expected: a, b, c, alpha, beta, gamma, but given {len(latt_params)}" lattice = mg.core.Lattice.from_parameters( latt_params[0], latt_params[1], @@ -660,7 +649,8 @@ def generate_diffraction_pattern( self.setup_diffraction(300e3) # Tolerance for angular tests - tol = 1e-6 + # TODO should this be used somewhere ? + tol = 1e-6 # noqa: F841 # Parse orientation inputs if orientation is not None: @@ -720,9 +710,9 @@ def generate_diffraction_pattern( gy_proj = g_diff[1, keep_int] # Diffracted peak labels - h = hkl[0, keep_int] - k = hkl[1, keep_int] - l = hkl[2, keep_int] + h = hkl[0, keep_int] # noqa: E741 + k = hkl[1, keep_int] # noqa: E741 + l = hkl[2, keep_int] # noqa: E741 # Output as PointList if keep_qz: @@ -816,7 +806,8 @@ def generate_ring_pattern( # check accelerating voltage if hasattr(self, "accel_voltage"): - accelerating_voltage = self.accel_voltage + # TODO should this be used somewhere ? + accelerating_voltage = self.accel_voltage # noqa: F841 else: self.accel_voltage = 300e3 print("Accelerating voltage not set. Assuming 300 keV!") From 98c21d26781eb21767723322093a63d4f57b737a Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 16 Nov 2023 17:02:38 -0800 Subject: [PATCH 29/41] updating to autoscrape optional depends --- py4DSTEM/utils/configuration_checker.py | 32 ++++++++++++++++--------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 60e161f39..04a362867 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -45,26 +45,36 @@ def get_modules_dict(): # set the dictionary for modules and packages to go into modules_dict = { "base": [], - "acom": [], - "aiml": [], - "aiml-cuda": [], - "cuda": [], - "numba": [], + # "acom": [], + # "aiml": [], + # "aiml-cuda": [], + # "cuda": [], + # "numba": [], } # loop over the dependencies for depend in dependencies: # all the optional have extra in the name # if its not there append it to base if "extra" not in depend: + # String looks like: 'numpy>=1.19' modules_dict["base"].append(depend) - # if it has extra + # if it has extra in the string else: - # loop over the keys and check if its in there - for key in modules_dict.keys(): - if key in depend: - modules_dict[key].append(depend) - + # get the name of the optional name + # depend looks like this 'numba>=0.49.1; extra == "numba"' + # grab whatever is in the double quotes i.e. numba + optional_name = re.search(r'"(.*?)"', depend).group(1) + # if the optional name is not in the dict as a key i.e. first requirement of hte optional dependency + if optional_name not in modules_dict: + modules_dict[optional_name] = [depend] + # if the optional_name is already in the dict then just append it to the list + else: + modules_dict[optional_name].append(depend) + # # loop over the keys and check if its in there + # for key in modules_dict.keys(): + # if key in depend: + # modules_dict[key].append(depend) # STRIP all the versioning and semi-colons # Define a regular expression pattern for splitting on '>', '>=', '=' delimiter_pattern = re.compile(r">=|>|==|<|<=") From 0f9b096e0e629cf7d341e9cb7225f0b9335409c2 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 16 Nov 2023 17:04:13 -0800 Subject: [PATCH 30/41] fixing typo in docstring --- py4DSTEM/utils/configuration_checker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 04a362867..b4b514ac0 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -244,7 +244,7 @@ def print_import_states(import_states: dict) -> None: print with colours if the library could be imported or not takes dict "numpy" : True -> prints success - "pymatgen" : Fasle -> prints failure + "pymatgen" : False -> prints failure """ # m is the name of the import module From dc9c63268f5b816b2f4a9facf2154912b55ad648 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 16 Nov 2023 21:44:19 -0800 Subject: [PATCH 31/41] removing unused vars --- py4DSTEM/process/diffraction/crystal.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/py4DSTEM/process/diffraction/crystal.py b/py4DSTEM/process/diffraction/crystal.py index ea243404e..9d29c69f1 100644 --- a/py4DSTEM/process/diffraction/crystal.py +++ b/py4DSTEM/process/diffraction/crystal.py @@ -648,10 +648,6 @@ def generate_diffraction_pattern( print("Accelerating voltage not set. Assuming 300 keV!") self.setup_diffraction(300e3) - # Tolerance for angular tests - # TODO should this be used somewhere ? - tol = 1e-6 # noqa: F841 - # Parse orientation inputs if orientation is not None: if ind_orientation is None: @@ -805,10 +801,7 @@ def generate_ring_pattern( ) # check accelerating voltage - if hasattr(self, "accel_voltage"): - # TODO should this be used somewhere ? - accelerating_voltage = self.accel_voltage # noqa: F841 - else: + if not hasattr(self, "accel_voltage"): self.accel_voltage = 300e3 print("Accelerating voltage not set. Assuming 300 keV!") From 03986faa4c6ce2cdfd30edb5a2f6b5a9fddf5e2a Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 00:28:48 -0800 Subject: [PATCH 32/41] adding generic version extra tests --- py4DSTEM/utils/configuration_checker.py | 63 +++++++++++++++++++++++-- 1 file changed, 59 insertions(+), 4 deletions(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index b4b514ac0..3ba18b6dd 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -1,8 +1,10 @@ #### this file contains a function/s that will check if various # libaries/compute options are available import importlib -from importlib.metadata import requires +from importlib.metadata import requires, import_module +import importlib import re +from importlib.util import find_spec # need a mapping of pypi/conda names to import names import_mapping_dict = { @@ -289,7 +291,7 @@ def print_module_states(module_states: dict) -> None: return None -def perfrom_extra_checks( +def perform_extra_checks( import_states: dict, verbose: bool, gratuitously_verbose: bool, **kwargs ) -> None: """_summary_ @@ -308,6 +310,14 @@ def perfrom_extra_checks( extra_checks_message = create_bold(extra_checks_message) print(f"{extra_checks_message}") # For modules that import run any extra checks + # get all the dependencies + dependencies = requires("py4DSTEM") + # Extract only the module names with versions + depends_with_requirements = [ + dependency.split(";")[0] for dependency in dependencies + ] + # print(depends_with_requirements) + # need to go from for key, val in import_states.items(): if val: # s = create_underline(key.capitalize()) @@ -318,6 +328,10 @@ def perfrom_extra_checks( print(s) func(verbose=verbose, gratuitously_verbose=gratuitously_verbose) else: + s = create_underline(key.capitalize()) + print(s) + # check + generic_versions(key, depends_with_requires=depends_with_requirements) # if gratuitously_verbose print out all modules without checks if gratuitously_verbose: s = create_underline(key.capitalize()) @@ -401,6 +415,45 @@ def check_module_functionality(state_dict: dict) -> None: #### ADDTIONAL CHECKS #### +def generic_versions(module: str, depends_with_requires: list[str]) -> None: + # module will be like numpy, skimage + # depends_with_requires look like: numpy >= 19.0, scikit-image + # get module_translated_name + # mapping scikit-image : skimage + for key, value in import_mapping_dict.items(): + # if skimage == skimage get scikit-image + # print(f"{key = } - {value = } - {module = }") + if module in value: + module_depend_name = key + break + else: + # if cant find mapping set the search name to the same + module_depend_name = module + # print(f"{module_depend_name = }") + # find the requirement + for depend in depends_with_requires: + if module_depend_name in depend: + spec_required = depend + # print(f"{spec_required = }") + # get the version installed + spec_installed = find_spec(module) + if spec_installed is None: + s = f"{module} unable to import - {spec_required} required" + s = create_failure(s) + s = f"{s: <80}" + print(s) + + else: + try: + version = importlib.metadata.version(module) + except Exception: + version = "Couldn't test version" + s = f"{module} imported: {version = } - {spec_required} required" + s = create_warning(s) + s = f"{s: <80}" + print(s) + + def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): """ This function performs some additional tests which may be useful in @@ -484,7 +537,9 @@ def print_no_extra_checks(m: str): # dict of extra check functions -funcs_dict = {"cupy": check_cupy_gpu} +funcs_dict = { + "cupy": check_cupy_gpu, +} #### main function used to check the configuration of the installation @@ -529,7 +584,7 @@ def check_config( print_import_states(states_dict) - perfrom_extra_checks( + perform_extra_checks( import_states=states_dict, verbose=verbose, gratuitously_verbose=gratuitously_verbose, From 8e4f7c98e092a98a4747cde33a42833b107f445a Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 00:30:57 -0800 Subject: [PATCH 33/41] removing TODO --- py4DSTEM/utils/configuration_checker.py | 1 - 1 file changed, 1 deletion(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 3ba18b6dd..8ad6f4c9f 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -516,7 +516,6 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): return None -# TODO add generic version which will print version def print_no_extra_checks(m: str): """ This function prints a warning style message that the module m From 0a776656c7451e78affce4ec893374bdd321dc03 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 00:36:11 -0800 Subject: [PATCH 34/41] ModuleNotFoundErrors --- py4DSTEM/braggvectors/diskdetection_aiml.py | 16 ++++++++++++---- py4DSTEM/braggvectors/diskdetection_aiml_cuda.py | 2 +- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index 481a97418..d100cd34c 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -101,7 +101,9 @@ def find_Bragg_disks_aiml_single_DP( try: import crystal4D except ModuleNotFoundError: - raise ImportError("Import Error: Please install crystal4D before proceeding") + raise ModuleNotFoundError( + "Import Error: Please install crystal4D before proceeding" + ) try: import tensorflow as tf except ModuleNotFoundError: @@ -254,7 +256,9 @@ def find_Bragg_disks_aiml_selected( try: import crystal4D except ModuleNotFoundError: - raise ImportError("Import Error: Please install crystal4D before proceeding") + raise ModuleNotFoundError( + "Import Error: Please install crystal4D before proceeding" + ) assert len(Rx) == len(Ry) peaks = [] @@ -431,7 +435,9 @@ def find_Bragg_disks_aiml_serial( try: import crystal4D except ModuleNotFoundError: - raise ImportError("Import Error: Please install crystal4D before proceeding") + raise ModuleNotFoundError( + "Import Error: Please install crystal4D before proceeding" + ) # Make the peaks PointListArray # dtype = [('qx',float),('qy',float),('intensity',float)] @@ -641,7 +647,9 @@ def find_Bragg_disks_aiml( try: import crystal4D except ModuleNotFoundError: - raise ImportError("Import Error: Please install crystal4D before proceeding") + raise ModuleNotFoundError( + "Import Error: Please install crystal4D before proceeding" + ) def _parse_distributed(distributed): import os diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index 574fcb3cc..1cc127f58 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -17,7 +17,7 @@ try: import cupy as cp except ModuleNotFoundError: - raise ImportError("AIML CUDA Requires cupy") + raise ModuleNotFoundError("AIML CUDA Requires cupy") try: import tensorflow as tf From 92c69427acb2c72d7551711af3011791dec6123d Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 00:46:27 -0800 Subject: [PATCH 35/41] removing TODOS --- py4DSTEM/braggvectors/diskdetection_aiml.py | 10 +++------- py4DSTEM/braggvectors/diskdetection_aiml_cuda.py | 4 ++-- py4DSTEM/io/filereaders/read_K2.py | 3 +-- 3 files changed, 6 insertions(+), 11 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index d100cd34c..53ad89a77 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -845,7 +845,7 @@ def _integrate_disks(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1): disks.append(np.average(disk)) try: disks = disks / max(disks) - # TODO work out what exception would go here + # possibly a ZeroDivideError except Exception: pass return (maxima_x, maxima_y, disks) @@ -900,7 +900,6 @@ def _get_latest_model(model_path=None): except FileExistsError: pass except Exception as e: - # TODO work out if I want to pass or raise pass # raise e # download the json file with the meta data @@ -915,10 +914,8 @@ def _get_latest_model(model_path=None): with open("./tmp/model_metadata_old.json") as f_old: metaold = json.load(f_old) file_id_old = metaold["file_id"] - # TODO Double check this is correct Error - except FileNotFoundError: - file_id_old = file_id - except Exception: + # I think just FileNotFoundError + except (FileNotFoundError, Exception): file_id_old = file_id if os.path.exists(file_path) and file_id == file_id_old: @@ -934,7 +931,6 @@ def _get_latest_model(model_path=None): download_file_from_google_drive(file_id, filename) try: shutil.unpack_archive(filename, "./tmp", format="zip") - # TODO Work work what specific exception except Exception: pass model_path = file_path diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index 1cc127f58..26241c4d5 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -636,7 +636,7 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - # TODO I think this is just the IndexError + # I think this is just the IndexError except Exception: dx, dy = ( 0, @@ -733,7 +733,7 @@ def _integrate_disks_cp(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1) disks.append(np.average(disk)) try: disks = disks / max(disks) - # TODO work out what exception to use, ZeroDivisionError + # Possibly ZeroDivisionError except Exception: pass return (maxima_x, maxima_y, disks) diff --git a/py4DSTEM/io/filereaders/read_K2.py b/py4DSTEM/io/filereaders/read_K2.py index 0de69d399..57d030882 100644 --- a/py4DSTEM/io/filereaders/read_K2.py +++ b/py4DSTEM/io/filereaders/read_K2.py @@ -124,8 +124,7 @@ def __init__( # this may be wrong for binned data... in which case the reader doesn't work anyway! Q_Nx = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.height"] Q_Ny = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.width"] - # TODO check this is the correct error type - except (ValueError, KeyError): + except (ValueError, KeyError, AttributeError): print("Warning: diffraction pattern shape not detected!") print("Assuming 1920x1792 as the diffraction pattern size!") Q_Nx = 1792 From c280c8e75acd558cc93628d3d0fc063061806340 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 11:18:23 -0800 Subject: [PATCH 36/41] changing logic for print(Running Extra Checks run) --- py4DSTEM/utils/configuration_checker.py | 29 +++++++++++++++---------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 8ad6f4c9f..1994d62fc 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -304,12 +304,12 @@ def perform_extra_checks( Returns: _type_: _description_ """ - - # print a output module - extra_checks_message = "Running Extra Checks" - extra_checks_message = create_bold(extra_checks_message) - print(f"{extra_checks_message}") - # For modules that import run any extra checks + if gratuitously_verbose: + # print a output module + extra_checks_message = "Running Extra Checks" + extra_checks_message = create_bold(extra_checks_message) + print(f"{extra_checks_message}") + # For modules that import run any extra checks # get all the dependencies dependencies = requires("py4DSTEM") # Extract only the module names with versions @@ -328,15 +328,22 @@ def perform_extra_checks( print(s) func(verbose=verbose, gratuitously_verbose=gratuitously_verbose) else: - s = create_underline(key.capitalize()) - print(s) - # check - generic_versions(key, depends_with_requires=depends_with_requirements) + # s = create_underline(key.capitalize()) + # print(s) + # # check + # generic_versions(key, depends_with_requires=depends_with_requirements) # if gratuitously_verbose print out all modules without checks if gratuitously_verbose: s = create_underline(key.capitalize()) print(s) - print_no_extra_checks(key) + # check + generic_versions( + key, depends_with_requires=depends_with_requirements + ) + + # s = create_underline(key.capitalize()) + # print(s) + # print_no_extra_checks(key) else: pass From f1a9b3a1d759fdda5cf9fa2642dce5151f30662e Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 11:20:37 -0800 Subject: [PATCH 37/41] cleaning up old commented out code --- py4DSTEM/utils/configuration_checker.py | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 1994d62fc..8b315058c 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -45,13 +45,9 @@ def get_modules_dict(): dependencies = requires(package_name) # set the dictionary for modules and packages to go into + # optional dependencies will be added as they are discovered modules_dict = { "base": [], - # "acom": [], - # "aiml": [], - # "aiml-cuda": [], - # "cuda": [], - # "numba": [], } # loop over the dependencies for depend in dependencies: @@ -73,10 +69,6 @@ def get_modules_dict(): # if the optional_name is already in the dict then just append it to the list else: modules_dict[optional_name].append(depend) - # # loop over the keys and check if its in there - # for key in modules_dict.keys(): - # if key in depend: - # modules_dict[key].append(depend) # STRIP all the versioning and semi-colons # Define a regular expression pattern for splitting on '>', '>=', '=' delimiter_pattern = re.compile(r">=|>|==|<|<=") @@ -328,10 +320,6 @@ def perform_extra_checks( print(s) func(verbose=verbose, gratuitously_verbose=gratuitously_verbose) else: - # s = create_underline(key.capitalize()) - # print(s) - # # check - # generic_versions(key, depends_with_requires=depends_with_requirements) # if gratuitously_verbose print out all modules without checks if gratuitously_verbose: s = create_underline(key.capitalize()) @@ -340,10 +328,6 @@ def perform_extra_checks( generic_versions( key, depends_with_requires=depends_with_requirements ) - - # s = create_underline(key.capitalize()) - # print(s) - # print_no_extra_checks(key) else: pass From ffa70f9e0722e0e42564d073886d133de7bd5299 Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 13:38:40 -0800 Subject: [PATCH 38/41] remove unused imports --- py4DSTEM/utils/configuration_checker.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 8b315058c..65aa25a76 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -1,8 +1,7 @@ #### this file contains a function/s that will check if various # libaries/compute options are available import importlib -from importlib.metadata import requires, import_module -import importlib +from importlib.metadata import requires import re from importlib.util import find_spec From 0b10f4800a4f7d118597c63abe9f43ed9632b0bb Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Fri, 17 Nov 2023 16:07:45 -0800 Subject: [PATCH 39/41] fixing import test --- py4DSTEM/utils/configuration_checker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 65aa25a76..b50a21de2 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -435,7 +435,7 @@ def generic_versions(module: str, depends_with_requires: list[str]) -> None: else: try: - version = importlib.metadata.version(module) + version = importlib.metadata.version(module_depend_name) except Exception: version = "Couldn't test version" s = f"{module} imported: {version = } - {spec_required} required" From 19914f67bfeafd64004828535e8b225e193c4e24 Mon Sep 17 00:00:00 2001 From: SE Zeltmann Date: Fri, 8 Mar 2024 17:08:35 -0500 Subject: [PATCH 40/41] missing import --- py4DSTEM/braggvectors/diskdetection_aiml_cuda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index 0ac03ebb4..14eb5d83a 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -8,7 +8,7 @@ from emdfile import tqdmnd from py4DSTEM.braggvectors.braggvectors import BraggVectors -from emdfile import PointList +from emdfile import PointList, PointListArray from py4DSTEM.braggvectors.kernels import kernels from py4DSTEM.braggvectors.diskdetection_aiml import _get_latest_model From 939720852ce7775ba38eba75440445a6e1f11892 Mon Sep 17 00:00:00 2001 From: SE Zeltmann Date: Fri, 8 Mar 2024 17:09:05 -0500 Subject: [PATCH 41/41] other missing import --- py4DSTEM/braggvectors/diskdetection_aiml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index a90e108f7..d61fcab5f 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -12,7 +12,7 @@ from time import time -from emdfile import tqdmnd +from emdfile import tqdmnd, PointListArray from py4DSTEM.braggvectors.braggvectors import BraggVectors from py4DSTEM.data import QPoints from py4DSTEM.process.utils import get_maxima_2D