Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 17 additions & 17 deletions py4DSTEM/io/legacy/legacy13/v13_emd_classes/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,61 +127,61 @@ def Metadata_to_h5(metadata, group):
# None
if v is None:
v = "_None"
v = np.string_(v) # convert to byte string
v = np.bytes_(v) # convert to byte string
dset = grp.create_dataset(k, data=v)
dset.attrs["type"] = np.string_("None")
dset.attrs["type"] = np.bytes_("None")

# strings
elif isinstance(v, str):
v = np.string_(v) # convert to byte string
v = np.bytes_(v) # convert to byte string
dset = grp.create_dataset(k, data=v)
dset.attrs["type"] = np.string_("string")
dset.attrs["type"] = np.bytes_("string")

# bools
elif isinstance(v, bool):
dset = grp.create_dataset(k, data=v, dtype=bool)
dset.attrs["type"] = np.string_("bool")
dset.attrs["type"] = np.bytes_("bool")

# numbers
elif isinstance(v, Number):
dset = grp.create_dataset(k, data=v, dtype=type(v))
dset.attrs["type"] = np.string_("number")
dset.attrs["type"] = np.bytes_("number")

# arrays
elif isinstance(v, np.ndarray):
dset = grp.create_dataset(k, data=v, dtype=v.dtype)
dset.attrs["type"] = np.string_("array")
dset.attrs["type"] = np.bytes_("array")

# tuples
elif isinstance(v, tuple):
# of numbers
if isinstance(v[0], Number):
dset = grp.create_dataset(k, data=v)
dset.attrs["type"] = np.string_("tuple")
dset.attrs["type"] = np.bytes_("tuple")

# of tuples
elif any([isinstance(v[i], tuple) for i in range(len(v))]):
dset_grp = grp.create_group(k)
dset_grp.attrs["type"] = np.string_("tuple_of_tuples")
dset_grp.attrs["type"] = np.bytes_("tuple_of_tuples")
dset_grp.attrs["length"] = len(v)
for i, x in enumerate(v):
dset_grp.create_dataset(str(i), data=x)

# of arrays
elif isinstance(v[0], np.ndarray):
dset_grp = grp.create_group(k)
dset_grp.attrs["type"] = np.string_("tuple_of_arrays")
dset_grp.attrs["type"] = np.bytes_("tuple_of_arrays")
dset_grp.attrs["length"] = len(v)
for i, ar in enumerate(v):
dset_grp.create_dataset(str(i), data=ar, dtype=ar.dtype)

# of strings
elif isinstance(v[0], str):
dset_grp = grp.create_group(k)
dset_grp.attrs["type"] = np.string_("tuple_of_strings")
dset_grp.attrs["type"] = np.bytes_("tuple_of_strings")
dset_grp.attrs["length"] = len(v)
for i, s in enumerate(v):
dset_grp.create_dataset(str(i), data=np.string_(s))
dset_grp.create_dataset(str(i), data=np.bytes_(s))

else:
er = f"Metadata only supports writing tuples with numeric and array-like arguments; found type {type(v[0])}"
Expand All @@ -192,23 +192,23 @@ def Metadata_to_h5(metadata, group):
# of numbers
if isinstance(v[0], Number):
dset = grp.create_dataset(k, data=v)
dset.attrs["type"] = np.string_("list")
dset.attrs["type"] = np.bytes_("list")

# of arrays
elif isinstance(v[0], np.ndarray):
dset_grp = grp.create_group(k)
dset_grp.attrs["type"] = np.string_("list_of_arrays")
dset_grp.attrs["type"] = np.bytes_("list_of_arrays")
dset_grp.attrs["length"] = len(v)
for i, ar in enumerate(v):
dset_grp.create_dataset(str(i), data=ar, dtype=ar.dtype)

# of strings
elif isinstance(v[0], str):
dset_grp = grp.create_group(k)
dset_grp.attrs["type"] = np.string_("list_of_strings")
dset_grp.attrs["type"] = np.bytes_("list_of_strings")
dset_grp.attrs["length"] = len(v)
for i, s in enumerate(v):
dset_grp.create_dataset(str(i), data=np.string_(s))
dset_grp.create_dataset(str(i), data=np.bytes_(s))

else:
er = f"Metadata only supports writing lists with numeric and array-like arguments; found type {type(v[0])}"
Expand Down Expand Up @@ -490,7 +490,7 @@ def PointList_to_h5(pointlist, group):
# Add data
for f, t in zip(pointlist.fields, pointlist.types):
group_current_field = grp.create_dataset(f, data=pointlist.data[f])
group_current_field.attrs.create("dtype", np.string_(t))
group_current_field.attrs.create("dtype", np.bytes_(t))
# group_current_field.create_dataset(
# "data",
# data = pointlist.data[f]
Expand Down
2 changes: 1 addition & 1 deletion py4DSTEM/process/classification/featurization.py
Original file line number Diff line number Diff line change
Expand Up @@ -967,7 +967,7 @@ def _gmm_single(x, cv, components, num_models, random_seed=None, return_all=True
gmm_list = []
gmm_labels = []
gmm_proba = []
lowest_bic = np.infty
lowest_bic = np.inf
bic_temp = 0
if random_seed is None:
rng = np.random.RandomState(seed=42)
Expand Down
4 changes: 2 additions & 2 deletions py4DSTEM/process/utils/elliptical_coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,8 @@ def elliptical_resample(

# Get (qx,qy) corresponding to the coordinates distorted by the ellipse
xr, yr = np.mgrid[0:Nx, 0:Ny]
xr0 = xr.astype(np.float_) - qx0
yr0 = yr.astype(np.float_) - qy0
xr0 = xr.astype(np.float64) - qx0
yr0 = yr.astype(np.float64) - qy0
xr = xr0 * np.cos(-theta) - yr0 * np.sin(-theta)
yr = xr0 * np.sin(-theta) + yr0 * np.cos(-theta)
qx = qx0 + xr * np.cos(theta) - yr * (b / a) * np.sin(theta)
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@
keywords="STEM,4DSTEM",
python_requires=">=3.10",
install_requires=[
"numpy >= 1.19, < 2.0",
"numpy >= 1.19",
"scipy >= 1.5.2",
"h5py >= 3.2.0",
"hdf5plugin >= 4.1.3",
"ncempy >= 1.8.1, <= 1.11.2",
"ncempy >= 1.8.1",
"matplotlib >= 3.2.2",
"scikit-image >= 0.17.2",
"scikit-learn >= 0.23.2, < 1.5",
Expand Down