Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump the actions group with 2 updates #4

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- uses: pre-commit/action@v3.0.0
- uses: pre-commit/action@v3.0.1
with:
extra_args: --hook-stage manual --all-files
- name: Run PyLint
Expand Down Expand Up @@ -66,4 +66,4 @@ jobs:
--durations=20

- name: Upload coverage report
uses: codecov/codecov-action@v3.1.4
uses: codecov/codecov-action@v4.2.0
87 changes: 62 additions & 25 deletions src/n5py/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,21 @@
import os
import struct
import sys
from typing import Any, Dict, Optional, cast
import warnings
from typing import Any, Optional, cast

import numpy as np
from numcodecs.abc import Codec
from numcodecs.compat import ndarray_copy
from numcodecs.registry import get_codec, register_codec

from zarr.meta import ZARR_FORMAT, json_dumps, json_loads
from zarr.storage import FSStore, NestedDirectoryStore, _prog_ckey, _prog_number, normalize_storage_path
from zarr.storage import (
FSStore,
NestedDirectoryStore,
_prog_ckey,
_prog_number,
normalize_storage_path,
)
from zarr.storage import array_meta_key as zarr_array_meta_key
from zarr.storage import attrs_key as zarr_attrs_key
from zarr.storage import group_meta_key as zarr_group_meta_key
Expand Down Expand Up @@ -94,7 +99,9 @@ def __getitem__(self, key: str) -> bytes:
elif key.endswith(zarr_array_meta_key):
key_new = key.replace(zarr_array_meta_key, n5_attrs_key)
top_level = key == zarr_array_meta_key
value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level)
value = array_metadata_to_zarr(
self._load_n5_attrs(key_new), top_level=top_level
)
return json_dumps(value)

elif key.endswith(zarr_attrs_key):
Expand Down Expand Up @@ -127,7 +134,9 @@ def __setitem__(self, key: str, value: Any):
key_new = key.replace(zarr_array_meta_key, n5_attrs_key)
top_level = key == zarr_array_meta_key
n5_attrs = self._load_n5_attrs(key_new)
n5_attrs.update(**array_metadata_to_n5(json_loads(value), top_level=top_level))
n5_attrs.update(
**array_metadata_to_n5(json_loads(value), top_level=top_level)
)
value = json_dumps(n5_attrs)

elif key.endswith(zarr_attrs_key):
Expand All @@ -138,7 +147,9 @@ def __setitem__(self, key: str, value: Any):

for k in n5_keywords:
if k in zarr_attrs:
warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning)
warnings.warn(
f"Attribute {k} is a reserved N5 keyword", UserWarning
)

# remove previous user attributes
for k in list(n5_attrs.keys()):
Expand Down Expand Up @@ -245,7 +256,7 @@ def listdir(self, path: Optional[str] = None):
else:
return children

def _load_n5_attrs(self, path: str) -> Dict[str, Any]:
def _load_n5_attrs(self, path: str) -> dict[str, Any]:
try:
s = super().__getitem__(path)
return json_loads(s)
Expand Down Expand Up @@ -379,7 +390,9 @@ def __getitem__(self, key: str) -> bytes:
elif key.endswith(zarr_array_meta_key):
key_new = key.replace(zarr_array_meta_key, self._array_meta_key)
top_level = key == zarr_array_meta_key
value = array_metadata_to_zarr(self._load_n5_attrs(key_new), top_level=top_level)
value = array_metadata_to_zarr(
self._load_n5_attrs(key_new), top_level=top_level
)
return json_dumps(value)

elif key.endswith(zarr_attrs_key):
Expand Down Expand Up @@ -412,7 +425,9 @@ def __setitem__(self, key: str, value: Any):
key_new = key.replace(zarr_array_meta_key, self._array_meta_key)
top_level = key == zarr_array_meta_key
n5_attrs = self._load_n5_attrs(key_new)
n5_attrs.update(**array_metadata_to_n5(json_loads(value), top_level=top_level))
n5_attrs.update(
**array_metadata_to_n5(json_loads(value), top_level=top_level)
)

value = json_dumps(n5_attrs)

Expand All @@ -424,7 +439,9 @@ def __setitem__(self, key: str, value: Any):

for k in n5_keywords:
if k in zarr_attrs.keys():
warnings.warn(f"Attribute {k} is a reserved N5 keyword", UserWarning)
warnings.warn(
f"Attribute {k} is a reserved N5 keyword", UserWarning
)

# replace previous user attributes
for k in list(n5_attrs.keys()):
Expand Down Expand Up @@ -583,23 +600,25 @@ def invert_chunk_coords(key: str):
return key


def group_metadata_to_n5(group_metadata: Dict[str, Any]) -> Dict[str, Any]:
def group_metadata_to_n5(group_metadata: dict[str, Any]) -> dict[str, Any]:
"""Convert group metadata from zarr to N5 format."""
del group_metadata["zarr_format"]
# TODO: This should only exist at the top-level
group_metadata["n5"] = N5_FORMAT
return group_metadata


def group_metadata_to_zarr(group_metadata: Dict[str, Any]) -> Dict[str, Any]:
def group_metadata_to_zarr(group_metadata: dict[str, Any]) -> dict[str, Any]:
"""Convert group metadata from N5 to zarr format."""
# This only exists at the top level
group_metadata.pop("n5", None)
group_metadata["zarr_format"] = ZARR_FORMAT
return group_metadata


def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dict[str, Any]:
def array_metadata_to_n5(
array_metadata: dict[str, Any], top_level=False
) -> dict[str, Any]:
"""Convert array metadata from zarr to N5 format. If the `top_level` keyword argument is True,
then the `N5` : N5_FORMAT key : value pair will be inserted into the metadata."""

Expand All @@ -611,14 +630,19 @@ def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dic
try:
dtype = np.dtype(array_metadata["dataType"])
except TypeError:
raise TypeError(f"Data type {array_metadata['dataType']} is not supported by N5")
raise TypeError(
f"Data type {array_metadata['dataType']} is not supported by N5"
)

array_metadata["dataType"] = dtype.name
array_metadata["dimensions"] = array_metadata["dimensions"][::-1]
array_metadata["blockSize"] = array_metadata["blockSize"][::-1]

if "fill_value" in array_metadata:
if array_metadata["fill_value"] != 0 and array_metadata["fill_value"] is not None:
if (
array_metadata["fill_value"] != 0
and array_metadata["fill_value"] is not None
):
raise ValueError(
f"""Received fill_value = {array_metadata['fill_value']},
but N5 only supports fill_value = 0"""
Expand All @@ -634,7 +658,9 @@ def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dic

if "filters" in array_metadata:
if array_metadata["filters"] != [] and array_metadata["filters"] is not None:
raise ValueError("Received filters, but N5 storage does not support zarr filters")
raise ValueError(
"Received filters, but N5 storage does not support zarr filters"
)
del array_metadata["filters"]

assert "compression" in array_metadata
Expand All @@ -649,8 +675,8 @@ def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dic


def array_metadata_to_zarr(
array_metadata: Dict[str, Any], top_level: bool = False
) -> Dict[str, Any]:
array_metadata: dict[str, Any], top_level: bool = False
) -> dict[str, Any]:
"""Convert array metadata from N5 to zarr format.
If the `top_level` keyword argument is True, then the `N5` key will be removed from metadata"""
for t, f in zarr_to_n5_keys:
Expand Down Expand Up @@ -679,7 +705,7 @@ def array_metadata_to_zarr(
return array_metadata


def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]:
def attrs_to_zarr(attrs: dict[str, Any]) -> dict[str, Any]:
"""Get all zarr attributes from an N5 attributes dictionary (i.e.,
all non-keyword attributes)."""

Expand All @@ -691,7 +717,9 @@ def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]:
return attrs


def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
def compressor_config_to_n5(
compressor_config: Optional[dict[str, Any]],
) -> dict[str, Any]:
if compressor_config is None:
return {"type": "raw"}
else:
Expand Down Expand Up @@ -751,7 +779,9 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict
return n5_config


def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dict[str, Any]]:
def compressor_config_to_zarr(
compressor_config: dict[str, Any],
) -> Optional[dict[str, Any]]:
codec_id = compressor_config["type"]
zarr_config = {"id": codec_id}

Expand Down Expand Up @@ -808,10 +838,16 @@ def __init__(self, dtype, chunk_shape, compressor_config=None, compressor=None):

if compressor:
if compressor_config is not None:
raise ValueError("Only one of compressor_config or compressor should be given.")
raise ValueError(
"Only one of compressor_config or compressor should be given."
)
compressor_config = compressor.get_config()

if compressor_config is None and compressor is None or compressor_config["id"] == "raw":
if (
compressor_config is None
and compressor is None
or compressor_config["id"] == "raw"
):
self.compressor_config = None
self._compressor = None
else:
Expand Down Expand Up @@ -884,7 +920,8 @@ def _create_header(chunk):
def _read_header(chunk):
num_dims = struct.unpack(">H", chunk[2:4])[0]
shape = tuple(
struct.unpack(">I", chunk[i : i + 4])[0] for i in range(4, num_dims * 4 + 4, 4)
struct.unpack(">I", chunk[i : i + 4])[0]
for i in range(4, num_dims * 4 + 4, 4)
)[::-1]

len_header = 4 + num_dims * 4
Expand All @@ -908,4 +945,4 @@ def _from_big_endian(self, data):
return a.astype(self.dtype)


register_codec(N5ChunkWrapper, N5ChunkWrapper.codec_id)
register_codec(N5ChunkWrapper, N5ChunkWrapper.codec_id)
6 changes: 4 additions & 2 deletions tests/test_n5.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,12 @@ def test_make_n5_chunk_wrapper():
dtype,
chunk_shape=chunk_shape,
compressor_config=codec.get_config(),
compressor=codec
compressor=codec,
)

wrapper_a = N5ChunkWrapper(dtype, chunk_shape=chunk_shape, compressor_config=codec.get_config())
wrapper_a = N5ChunkWrapper(
dtype, chunk_shape=chunk_shape, compressor_config=codec.get_config()
)
wrapper_b = N5ChunkWrapper(dtype, chunk_shape=chunk_shape, compressor=codec)
assert wrapper_a == wrapper_b

Expand Down
Loading