Skip to content

Commit

Permalink
Change occurrences of % and format() to f-strings
Browse files Browse the repository at this point in the history
  • Loading branch information
DimitriPapadopoulos committed Oct 31, 2023
1 parent e771c51 commit 3bb2aeb
Show file tree
Hide file tree
Showing 18 changed files with 177 additions and 238 deletions.
5 changes: 3 additions & 2 deletions docs/release.rst
Expand Up @@ -30,6 +30,9 @@ Docs
Maintenance
~~~~~~~~~~~

* Change occurrences of % and format() to f-strings.
By :user:`Dimitri Papadopoulos Orfanos <DimitriPapadopoulos>` :issue:`1423`.

* Change occurrence of ``io.open()`` into ``open()``.
By :user:`Dimitri Papadopoulos Orfanos <DimitriPapadopoulos>` :issue:`1421`.

Expand All @@ -42,8 +45,6 @@ Maintenance
* Allow ``black`` code formatter to be run with any Python version.
By :user:`David Stansby <dstansby>` :issue:`1549`.



.. _release_2.16.1:

2.16.1
Expand Down
8 changes: 4 additions & 4 deletions zarr/_storage/absstore.py
Expand Up @@ -84,10 +84,10 @@ def __init__(

blob_service_kwargs = blob_service_kwargs or {}
client = ContainerClient(
"https://{}.blob.core.windows.net/".format(account_name),
f"https://{account_name}.blob.core.windows.net/",
container,
credential=account_key,
**blob_service_kwargs
**blob_service_kwargs,
)

self.client = client
Expand Down Expand Up @@ -141,7 +141,7 @@ def __getitem__(self, key):
try:
return self.client.download_blob(blob_name).readall()
except ResourceNotFoundError:
raise KeyError("Blob %s not found" % blob_name)
raise KeyError(f"Blob {blob_name} not found")

def __setitem__(self, key, value):
value = ensure_bytes(value)
Expand All @@ -154,7 +154,7 @@ def __delitem__(self, key):
try:
self.client.delete_blob(self._append_path_to_prefix(key))
except ResourceNotFoundError:
raise KeyError("Blob %s not found" % key)
raise KeyError(f"Blob {key} not found")

def __eq__(self, other):
return (
Expand Down
2 changes: 1 addition & 1 deletion zarr/_storage/store.py
Expand Up @@ -227,7 +227,7 @@ def _validate_key(self, key: str):
# TODO: Possibly allow key == ".zmetadata" too if we write a
# consolidated metadata spec corresponding to this?
):
raise ValueError("keys starts with unexpected value: `{}`".format(key))
raise ValueError(f"key starts with unexpected value: `{key}`")

if key.endswith("/"):
raise ValueError("keys may not end in /")
Expand Down
2 changes: 1 addition & 1 deletion zarr/_storage/v3.py
Expand Up @@ -570,7 +570,7 @@ def __init__(self, store: StoreLike, metadata_key=meta_root + "consolidated/.zme
consolidated_format = meta.get("zarr_consolidated_format", None)
if consolidated_format != 1:
raise MetadataError(
"unsupported zarr consolidated metadata format: %s" % consolidated_format
f"unsupported zarr consolidated metadata format: {consolidated_format}"
)

# decode metadata
Expand Down
54 changes: 23 additions & 31 deletions zarr/convenience.py
Expand Up @@ -258,7 +258,7 @@ def save_group(store: StoreLike, *args, zarr_version=None, path=None, **kwargs):
try:
grp = _create_group(_store, path=path, overwrite=True, zarr_version=zarr_version)
for i, arr in enumerate(args):
k = "arr_{}".format(i)
k = f"arr_{i}"
grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version)
for k, arr in kwargs.items():
grp.create_dataset(k, data=arr, overwrite=True, zarr_version=zarr_version)
Expand Down Expand Up @@ -498,7 +498,7 @@ def __init__(self, log):
self.log_file = log
else:
raise TypeError(
"log must be a callable function, file path or " "file-like object, found %r" % log
f"log must be a callable function, file path or file-like object, found {log!r}"
)

def __enter__(self):
Expand All @@ -525,9 +525,9 @@ def _log_copy_summary(log, dry_run, n_copied, n_skipped, n_bytes_copied):
message = "dry run: "
else:
message = "all done: "
message += "{:,} copied, {:,} skipped".format(n_copied, n_skipped)
message += f"{n_copied:,} copied, {n_skipped:,} skipped"
if not dry_run:
message += ", {:,} bytes copied".format(n_bytes_copied)
message += f", {n_bytes_copied:,} bytes copied"
log(message)


Expand Down Expand Up @@ -656,9 +656,7 @@ def copy_store(
# check if_exists parameter
valid_if_exists = ["raise", "replace", "skip"]
if if_exists not in valid_if_exists:
raise ValueError(
"if_exists must be one of {!r}; found {!r}".format(valid_if_exists, if_exists)
)
raise ValueError(f"if_exists must be one of {valid_if_exists!r}; found {if_exists!r}")

# setup counting variables
n_copied = n_skipped = n_bytes_copied = 0
Expand Down Expand Up @@ -721,20 +719,20 @@ def copy_store(
if if_exists != "replace":
if dest_key in dest:
if if_exists == "raise":
raise CopyError("key {!r} exists in destination".format(dest_key))
raise CopyError(f"key {dest_key!r} exists in destination")
elif if_exists == "skip":
do_copy = False

# take action
if do_copy:
log("copy {}".format(descr))
log(f"copy {descr}")
if not dry_run:
data = source[source_key]
n_bytes_copied += buffer_size(data)
dest[dest_key] = data
n_copied += 1
else:
log("skip {}".format(descr))
log(f"skip {descr}")
n_skipped += 1

# log a final message with a summary of what happened
Expand All @@ -745,7 +743,7 @@ def copy_store(

def _check_dest_is_group(dest):
if not hasattr(dest, "create_dataset"):
raise ValueError("dest must be a group, got {!r}".format(dest))
raise ValueError(f"dest must be a group, got {dest!r}")


def copy(
Expand All @@ -757,7 +755,7 @@ def copy(
log=None,
if_exists="raise",
dry_run=False,
**create_kws
**create_kws,
):
"""Copy the `source` array or group into the `dest` group.
Expand Down Expand Up @@ -890,7 +888,7 @@ def copy(
without_attrs=without_attrs,
if_exists=if_exists,
dry_run=dry_run,
**create_kws
**create_kws,
)

# log a final message with a summary of what happened
Expand All @@ -912,11 +910,9 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
# check if_exists parameter
valid_if_exists = ["raise", "replace", "skip", "skip_initialized"]
if if_exists not in valid_if_exists:
raise ValueError(
"if_exists must be one of {!r}; found {!r}".format(valid_if_exists, if_exists)
)
raise ValueError(f"if_exists must be one of {valid_if_exists!r}; found {if_exists!r}")
if dest_h5py and if_exists == "skip_initialized":
raise ValueError("{!r} can only be used when copying to zarr".format(if_exists))
raise ValueError(f"{if_exists!r} can only be used when copying to zarr")

# determine name to copy to
if name is None:
Expand All @@ -936,9 +932,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
exists = dest is not None and name in dest
if exists:
if if_exists == "raise":
raise CopyError(
"an object {!r} already exists in destination " "{!r}".format(name, dest.name)
)
raise CopyError(f"an object {name!r} already exists in destination {dest.name!r}")
elif if_exists == "skip":
do_copy = False
elif if_exists == "skip_initialized":
Expand All @@ -950,7 +944,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
if do_copy:

# log a message about what we're going to do
log("copy {} {} {}".format(source.name, source.shape, source.dtype))
log(f"copy {source.name} {source.shape} {source.dtype}")

if not dry_run:

Expand Down Expand Up @@ -1019,7 +1013,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
n_copied += 1

else:
log("skip {} {} {}".format(source.name, source.shape, source.dtype))
log(f"skip {source.name} {source.shape} {source.dtype}")
n_skipped += 1

elif root or not shallow:
Expand All @@ -1030,17 +1024,15 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
exists_array = dest is not None and name in dest and hasattr(dest[name], "shape")
if exists_array:
if if_exists == "raise":
raise CopyError(
"an array {!r} already exists in destination " "{!r}".format(name, dest.name)
)
raise CopyError(f"an array {name!r} already exists in destination {dest.name!r}")
elif if_exists == "skip":
do_copy = False

# take action
if do_copy:

# log action
log("copy {}".format(source.name))
log(f"copy {source.name}")

if not dry_run:

Expand Down Expand Up @@ -1076,7 +1068,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
without_attrs=without_attrs,
if_exists=if_exists,
dry_run=dry_run,
**create_kws
**create_kws,
)
n_copied += c
n_skipped += s
Expand All @@ -1085,7 +1077,7 @@ def _copy(log, source, dest, name, root, shallow, without_attrs, if_exists, dry_
n_copied += 1

else:
log("skip {}".format(source.name))
log(f"skip {source.name}")
n_skipped += 1

return n_copied, n_skipped, n_bytes_copied
Expand All @@ -1099,7 +1091,7 @@ def copy_all(
log=None,
if_exists="raise",
dry_run=False,
**create_kws
**create_kws,
):
"""Copy all children of the `source` group into the `dest` group.
Expand Down Expand Up @@ -1201,7 +1193,7 @@ def copy_all(
without_attrs=without_attrs,
if_exists=if_exists,
dry_run=dry_run,
**create_kws
**create_kws,
)
n_copied += c
n_skipped += s
Expand Down Expand Up @@ -1336,7 +1328,7 @@ def open_consolidated(store: StoreLike, metadata_key=".zmetadata", mode="r+", **
store, storage_options=kwargs.get("storage_options"), mode=mode, zarr_version=zarr_version
)
if mode not in {"r", "r+"}:
raise ValueError("invalid mode, expected either 'r' or 'r+'; found {!r}".format(mode))
raise ValueError(f"invalid mode, expected either 'r' or 'r+'; found {mode!r}")

path = kwargs.pop("path", None)
if store._store_version == 2:
Expand Down
20 changes: 10 additions & 10 deletions zarr/core.py
Expand Up @@ -2448,11 +2448,11 @@ def _encode_chunk(self, chunk):

def __repr__(self):
t = type(self)
r = "<{}.{}".format(t.__module__, t.__name__)
r = f"<{t.__module__}.{t.__name__}"
if self.name:
r += " %r" % self.name
r += " %s" % str(self.shape)
r += " %s" % self.dtype
r += f" {self.name!r}"
r += f" {str(self.shape)}"
r += f" {self.dtype}"
if self._read_only:
r += " read-only"
r += ">"
Expand Down Expand Up @@ -2488,11 +2488,11 @@ def info_items(self):

def _info_items_nosync(self):
def typestr(o):
return "{}.{}".format(type(o).__module__, type(o).__name__)
return f"{type(o).__module__}.{type(o).__name__}"

def bytestr(n):
if n > 2**10:
return "{} ({})".format(n, human_readable_size(n))
return f"{n} ({human_readable_size(n)})"
else:
return str(n)

Expand All @@ -2503,7 +2503,7 @@ def bytestr(n):
items += [("Name", self.name)]
items += [
("Type", typestr(self)),
("Data type", "%s" % self.dtype),
("Data type", str(self.dtype)),
("Shape", str(self.shape)),
("Chunk shape", str(self.chunks)),
("Order", self.order),
Expand All @@ -2513,7 +2513,7 @@ def bytestr(n):
# filters
if self.filters:
for i, f in enumerate(self.filters):
items += [("Filter [%s]" % i, repr(f))]
items += [(f"Filter [{i}]", repr(f))]

# compressor
items += [("Compressor", repr(self.compressor))]
Expand All @@ -2530,9 +2530,9 @@ def bytestr(n):
if self.nbytes_stored > 0:
items += [
("No. bytes stored", bytestr(self.nbytes_stored)),
("Storage ratio", "%.1f" % (self.nbytes / self.nbytes_stored)),
("Storage ratio", f"{self.nbytes / self.nbytes_stored:.1f}"),
]
items += [("Chunks initialized", "{}/{}".format(self.nchunks_initialized, self.nchunks))]
items += [("Chunks initialized", f"{self.nchunks_initialized}/{self.nchunks}")]

return items

Expand Down
4 changes: 2 additions & 2 deletions zarr/creation.py
Expand Up @@ -282,7 +282,7 @@ def _kwargs_compat(compressor, fill_value, kwargs):
compressor = compression

else:
raise ValueError("bad value for compression: %r" % compression)
raise ValueError(f"bad value for compression: {compression!r}")

# handle 'fillvalue'
if "fillvalue" in kwargs:
Expand All @@ -292,7 +292,7 @@ def _kwargs_compat(compressor, fill_value, kwargs):

# ignore other keyword arguments
for k in kwargs:
warn("ignoring keyword argument %r" % k)
warn(f"ignoring keyword argument {k!r}")

return compressor, fill_value

Expand Down
4 changes: 1 addition & 3 deletions zarr/errors.py
Expand Up @@ -67,9 +67,7 @@ def __init__(self):


def err_too_many_indices(selection, shape):
raise IndexError(
"too many indices for array; expected {}, got {}".format(len(shape), len(selection))
)
raise IndexError(f"too many indices for array; expected {len(shape)}, got {len(selection)}")


class VindexInvalidSelectionError(_BaseZarrIndexError):
Expand Down

0 comments on commit 3bb2aeb

Please sign in to comment.