Skip to content

Commit

Permalink
Merge pull request #69 from alimanfoo/issue_65
Browse files Browse the repository at this point in the history
Group supports item deletion; resolves #65
  • Loading branch information
alimanfoo committed Sep 9, 2016
2 parents 9a6de6e + a629da2 commit 8c61cbc
Show file tree
Hide file tree
Showing 6 changed files with 65 additions and 50 deletions.
2 changes: 2 additions & 0 deletions docs/release.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
Release notes
=============

* Group objects now support member deletion via ``del`` statement
('#65 <https://github.com/alimanfoo/zarr/issues/65>'_)
* Added :class:`zarr.storage.TempStore` class for convenience to provide
storage via a temporary directory
(`#59 <https://github.com/alimanfoo/zarr/issues/59>`_)
Expand Down
17 changes: 6 additions & 11 deletions zarr/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,20 +289,15 @@ def array(data, **kwargs):
data = np.asanyarray(data)

# setup dtype
dtype = kwargs.pop('dtype', None)
if dtype is None:
dtype = data.dtype
kwargs.setdefault('dtype', data.dtype)

# setup shape
shape = data.shape

# setup chunks
chunks = kwargs.pop('chunks', None)
if chunks is None:
_, chunks = _get_shape_chunks(data)
# setup shape and chunks
shape, chunks = _get_shape_chunks(data)
kwargs['shape'] = data.shape
kwargs.setdefault('chunks', chunks)

# instantiate array
z = create(shape=shape, chunks=chunks, dtype=dtype, **kwargs)
z = create(**kwargs)

# fill with data
z[:] = data
Expand Down
61 changes: 26 additions & 35 deletions zarr/hierarchy.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
from collections import Mapping
from collections import MutableMapping


import numpy as np
Expand All @@ -9,7 +9,7 @@
from zarr.attrs import Attributes
from zarr.core import Array
from zarr.storage import contains_array, contains_group, init_group, \
DictStore, DirectoryStore, group_meta_key, attrs_key, listdir
DictStore, DirectoryStore, group_meta_key, attrs_key, listdir, rmdir
from zarr.creation import array, create, empty, zeros, ones, full, \
empty_like, zeros_like, ones_like, full_like
from zarr.util import normalize_storage_path, normalize_shape
Expand All @@ -18,7 +18,7 @@
from zarr.meta import decode_group_metadata


class Group(Mapping):
class Group(MutableMapping):
"""Instantiate a group from an initialized store.
Parameters
Expand Down Expand Up @@ -303,6 +303,20 @@ def __getitem__(self, item):
else:
raise KeyError(item)

def __setitem__(self, item, value):
self.array(item, value, overwrite=True)

def __delitem__(self, item):
return self._write_op(self._delitem_nosync, item)

def _delitem_nosync(self, item):
path = self._item_path(item)
if contains_array(self._store, path) or \
contains_group(self._store, path):
rmdir(self._store, path)
else:
raise KeyError(item)

def group_keys(self):
"""Return an iterator over member names for groups only.
Expand Down Expand Up @@ -494,10 +508,7 @@ def require_groups(self, *names):
"""Convenience method to require multiple groups in a single call."""
return tuple(self.require_group(name) for name in names)

def create_dataset(self, name, data=None, shape=None, chunks=None,
dtype=None, compressor='default', fill_value=0,
order='C', synchronizer=None, filters=None,
overwrite=False, cache_metadata=True, **kwargs):
def create_dataset(self, name, **kwargs):
"""Create an array.
Parameters
Expand Down Expand Up @@ -550,43 +561,23 @@ def create_dataset(self, name, data=None, shape=None, chunks=None,
""" # flake8: noqa

return self._write_op(self._create_dataset_nosync, name, data=data,
shape=shape, chunks=chunks, dtype=dtype,
compressor=compressor, fill_value=fill_value,
order=order, synchronizer=synchronizer,
filters=filters, overwrite=overwrite,
cache_metadata=cache_metadata, **kwargs)
return self._write_op(self._create_dataset_nosync, name, **kwargs)

def _create_dataset_nosync(self, name, data=None, shape=None, chunks=None,
dtype=None, compressor='default',
fill_value=0, order='C', synchronizer=None,
filters=None, overwrite=False,
cache_metadata=True, **kwargs):
def _create_dataset_nosync(self, name, data=None, **kwargs):

path = self._item_path(name)

# determine synchronizer
if synchronizer is None:
synchronizer = self._synchronizer
kwargs.setdefault('synchronizer', self._synchronizer)

# create array
if data is not None:
a = array(data, chunks=chunks, dtype=dtype,
compressor=compressor, fill_value=fill_value,
order=order, synchronizer=synchronizer,
store=self._store, path=path,
chunk_store=self._chunk_store, filters=filters,
overwrite=overwrite, cache_metadata=cache_metadata,
**kwargs)
if data is None:
a = create(store=self._store, path=path,
chunk_store=self._chunk_store, **kwargs)

else:
a = create(shape=shape, chunks=chunks, dtype=dtype,
compressor=compressor, fill_value=fill_value,
order=order, synchronizer=synchronizer,
store=self._store, path=path,
chunk_store=self._chunk_store, filters=filters,
overwrite=overwrite, cache_metadata=cache_metadata,
**kwargs)
a = array(data, store=self._store, path=path,
chunk_store=self._chunk_store, **kwargs)

return a

Expand Down
4 changes: 2 additions & 2 deletions zarr/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -891,8 +891,8 @@ def getsize(self, path=None):
return info.compress_size
except KeyError:
err_path_not_found(path)
else:
return 0
else:
return 0


def migrate_1to2(store):
Expand Down
2 changes: 2 additions & 0 deletions zarr/tests/test_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,8 @@ def test_array():
eq(z.dtype, z2.dtype)
assert_array_equal(z[:], z2[:])

# with chunky array-likes

b = np.arange(1000).reshape(100, 10)
c = MockBcolzArray(b, 10)
z3 = array(c)
Expand Down
29 changes: 27 additions & 2 deletions zarr/tests/test_hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -507,8 +507,33 @@ def test_group_repr(self):

def test_setitem(self):
g = self.create_group()
with assert_raises(TypeError):
g['foo'] = 'bar'
try:
data = np.arange(100)
g['foo'] = data
assert_array_equal(data, g['foo'])
data = np.arange(200)
g['foo'] = data
assert_array_equal(data, g['foo'])
except NotImplementedError:
pass

def test_delitem(self):
g = self.create_group()
g.create_group('foo')
g.create_dataset('bar/baz', shape=100, chunks=10)
assert 'foo' in g
assert 'bar' in g
assert 'bar/baz' in g
try:
del g['bar']
with assert_raises(KeyError):
del g['xxx']
except NotImplementedError:
pass
else:
assert 'foo' in g
assert 'bar' not in g
assert 'bar/baz' not in g

def test_array_creation(self):
grp = self.create_group()
Expand Down

0 comments on commit 8c61cbc

Please sign in to comment.