diff --git a/.travis.yml b/.travis.yml index 3458ab9dc8..2b5f50809c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,8 +15,6 @@ env: - TEST_TARGET=default - TEST_TARGET=default TEST_MINIMAL=true - TEST_TARGET=coding - - TEST_TARGET=example - - TEST_TARGET=doctest git: depth: 10000 @@ -107,7 +105,7 @@ install: script: - if [[ $TEST_TARGET == 'default' ]]; then - python -m iris.tests.runner --default-tests --system-tests --print-failed-images; + python -m unittest discover -v lib/iris/tests/integration/temp_dask; fi - if [[ $TEST_TARGET == 'example' ]]; then python -m iris.tests.runner --example-tests --print-failed-images; diff --git a/conda-requirements.txt b/conda-requirements.txt index 3324c5fbc4..cd89693e57 100644 --- a/conda-requirements.txt +++ b/conda-requirements.txt @@ -10,6 +10,7 @@ numpy pyke udunits2 cf_units +dask # Iris build dependencies setuptools diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py new file mode 100644 index 0000000000..87dcff2ba0 --- /dev/null +++ b/lib/iris/_lazy_data.py @@ -0,0 +1,79 @@ +# (C) British Crown Copyright 2017, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Routines for lazy data handling. + +To avoid replicating implementation-dependent test and conversion code. + +""" +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +import dask.array as da + + +def is_lazy_data(data): + """ + Return whether the argument is an Iris 'lazy' data array. + + At present, this means simply a Dask array. + We determine this by checking for a "compute" property. + + """ + return hasattr(data, 'compute') + + +def as_concrete_data(data): + """ + Return the actual content of the argument, as a numpy array. + + If lazy, return the realised data, otherwise return the argument unchanged. + + """ + if is_lazy_data(data): + data = data.compute() + return data + + +# A magic value, borrowed from biggus +_MAX_CHUNK_SIZE = 8 * 1024 * 1024 * 2 + + +def as_lazy_data(data): + """ + Return a lazy equivalent of the argument, as a lazy array. + + For an existing dask array, return it unchanged. + Otherwise, return the argument wrapped with dask.array.from_array. + This assumes the underlying object has numpy-array-like properties. + + """ + # + # NOTE: there is still some doubts here about what forms of indexing are + # valid. + # Call an integer, slice, ellipsis or new-axis object a "simple" index, and + # other cases "compound" : a list, tuple, or array of integers. + # ( Except, a length-1 tuple, list or array might count as "simple" ? ) + # If there is at most one compund index, I think we are ok -- i.e. all + # interpretations should deliver the same. + # If there is *more than one* "compound" index there is potential for + # trouble. + # NOTE#2: cube indexing processes the indices, which may also be relevant. + # + if not is_lazy_data(data): + data = da.from_array(data, chunks=_MAX_CHUNK_SIZE) + return data diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 79fb074ea0..c121849dc9 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2016, Met Office +# (C) British Crown Copyright 2010 - 2017, Met Office # # This file is part of Iris. # @@ -46,6 +46,7 @@ import iris.coords import iris._concatenate import iris._constraints +from iris._lazy_data import is_lazy_data, as_lazy_data, as_concrete_data import iris._merge import iris.exceptions import iris.util @@ -713,7 +714,7 @@ def __init__(self, data, standard_name=None, long_name=None, if isinstance(data, six.string_types): raise TypeError('Invalid data type: {!r}.'.format(data)) - if not isinstance(data, (biggus.Array, ma.MaskedArray)): + if not is_lazy_data(data): data = np.asarray(data) self._my_data = data @@ -1606,32 +1607,33 @@ def ndim(self): def lazy_data(self, array=None): """ - Return a :class:`biggus.Array` representing the - multi-dimensional data of the Cube, and optionally provide a - new array of values. + Return a lazy array representing the Cube data. + + Optionally, provide a new lazy array to assign as the cube data. + This must also be a lazy array, according to + :meth:`iris._lazy_data.is_lazy_data`. Accessing this method will never cause the data to be loaded. Similarly, calling methods on, or indexing, the returned Array will not cause the Cube to have loaded data. If the data have already been loaded for the Cube, the returned - Array will be a :class:`biggus.NumpyArrayAdapter` which wraps - the numpy array from `self.data`. + Array will be a lazy array wrapper, generated by a call to + :meth:`iris._lazy_data.as_lazy_data`. Kwargs: - * array (:class:`biggus.Array` or None): + * array (lazy array or None): When this is not None it sets the multi-dimensional data of the cube to the given value. Returns: - A :class:`biggus.Array` representing the multi-dimensional - data of the Cube. + A lazy array, representing the Cube data array. """ if array is not None: - if not isinstance(array, biggus.Array): - raise TypeError('new values must be a biggus.Array') + if not is_lazy_data(array): + raise TypeError('new values must be a lazy array') if self.shape != array.shape: # The _ONLY_ data reshape permitted is converting a # 0-dimensional array into a 1-dimensional array of @@ -1643,8 +1645,8 @@ def lazy_data(self, array=None): self._my_data = array else: array = self._my_data - if not isinstance(array, biggus.Array): - array = biggus.NumpyArrayAdapter(array) + if not is_lazy_data(array): + array = as_lazy_data(array) return array @property @@ -1681,9 +1683,9 @@ def data(self): """ data = self._my_data - if not isinstance(data, np.ndarray): + if is_lazy_data(data): try: - data = data.masked_array() + data = as_concrete_data(data) except MemoryError: msg = "Failed to create the cube's data as there was not" \ " enough memory available.\n" \ @@ -1694,7 +1696,8 @@ def data(self): msg = msg.format(self.shape, data.dtype) raise MemoryError(msg) # Unmask the array only if it is filled. - if isinstance(data, np.ndarray) and ma.count_masked(data) == 0: + if (isinstance(data, np.ma.masked_array) and + ma.count_masked(data) == 0): data = data.data # data may be a numeric type, so ensure an np.ndarray is returned self._my_data = np.asanyarray(data) @@ -1715,7 +1718,7 @@ def data(self, value): self._my_data = data def has_lazy_data(self): - return isinstance(self._my_data, biggus.Array) + return is_lazy_data(self._my_data) @property def dim_coords(self): diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index ea921dca20..fbcfe7e3f2 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2016, Met Office +# (C) British Crown Copyright 2010 - 2017, Met Office # # This file is part of Iris. # @@ -33,7 +33,6 @@ import struct import warnings -import biggus import cf_units import numpy as np import numpy.ma as ma @@ -44,6 +43,7 @@ import iris.fileformats.rules import iris.fileformats.pp_rules import iris.coord_systems +from iris._lazy_data import is_lazy_data, as_concrete_data, as_lazy_data try: import mo_pack @@ -1286,11 +1286,10 @@ def data(self): """ # Cache the real data on first use - if isinstance(self._data, biggus.Array): - data = self._data.masked_array() - if ma.count_masked(data) == 0: - data = data.data - self._data = data + # N.B. this throws away the original lazy object. + if is_lazy_data(self._data): + # Get the data as a numpy array. + self._data = as_concrete_data(self._data) return self._data @data.setter @@ -1642,12 +1641,8 @@ def __eq__(self, other): for attr in self.__slots__: attrs = [hasattr(self, attr), hasattr(other, attr)] if all(attrs): - self_attr = getattr(self, attr) - other_attr = getattr(other, attr) - if isinstance(self_attr, biggus.NumpyArrayAdapter): - self_attr = self_attr.concrete - if isinstance(other_attr, biggus.NumpyArrayAdapter): - other_attr = other_attr.concrete + self_attr = as_concrete_data(getattr(self, attr)) + other_attr = as_concrete_data(getattr(other, attr)) if not np.all(self_attr == other_attr): result = False break @@ -1866,7 +1861,7 @@ def _interpret_fields(fields): def _create_field_data(field, data_shape, land_mask): """ Modifies a field's ``_data`` attribute either by: - * converting DeferredArrayBytes into a biggus array, + * converting DeferredArrayBytes into a lazy array, * converting LoadedArrayBytes into an actual numpy array. """ @@ -1887,7 +1882,7 @@ def _create_field_data(field, data_shape, land_mask): field.raw_lbpack, field.boundary_packing, field.bmdi, land_mask) - field._data = biggus.NumpyArrayAdapter(proxy) + field._data = as_lazy_data(proxy) def _field_gen(filename, read_data_bytes, little_ended=False): diff --git a/lib/iris/tests/integration/temp_dask/__init__.py b/lib/iris/tests/integration/temp_dask/__init__.py new file mode 100644 index 0000000000..50f059effc --- /dev/null +++ b/lib/iris/tests/integration/temp_dask/__init__.py @@ -0,0 +1,26 @@ +# (C) British Crown Copyright 2017, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Temporary integration tests, specific to replacement of biggus with dask. + +Note: some content here may eventually move into main tests. +Keep it here for now, so we can easily test all dask code with : + python -m unittest discover -v lib/iris/tests/integration/temp_dask + +""" +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa diff --git a/lib/iris/tests/integration/temp_dask/test_lazy_utils.py b/lib/iris/tests/integration/temp_dask/test_lazy_utils.py new file mode 100644 index 0000000000..8a774de738 --- /dev/null +++ b/lib/iris/tests/integration/temp_dask/test_lazy_utils.py @@ -0,0 +1,79 @@ +# (C) British Crown Copyright 2017, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Test lazy data utility functions. + +Note: really belongs in "tests/unit/lazy_data". + +""" +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + + +import numpy as np +import dask.array as da + + +from iris._lazy_data import is_lazy_data, as_lazy_data, as_concrete_data + + +class MixinLazyTestData(object): + def setUp(self): + # Create test real and dask arrays. + self.real_array = np.arange(24).reshape((2, 3, 4)) + self.lazy_values = np.arange(30).reshape((2, 5, 3)) + self.lazy_array = da.from_array(self.lazy_values, 1e6) + + +class Test_is_lazy_data(MixinLazyTestData, tests.IrisTest): + def test_lazy(self): + self.assertTrue(is_lazy_data(self.lazy_array)) + + def test_real(self): + self.assertFalse(is_lazy_data(self.real_array)) + + +class Test_as_lazy_data(MixinLazyTestData, tests.IrisTest): + def test_lazy(self): + result = as_lazy_data(self.lazy_array) + self.assertTrue(is_lazy_data(result)) + self.assertIs(result, self.lazy_array) + + def test_real(self): + result = as_lazy_data(self.real_array) + self.assertTrue(is_lazy_data(result)) + self.assertArrayAllClose(as_concrete_data(result), self.real_array) + + +class Test_as_concrete_data(MixinLazyTestData, tests.IrisTest): + def test_lazy(self): + result = as_concrete_data(self.lazy_array) + self.assertFalse(is_lazy_data(result)) + self.assertArrayAllClose(result, self.lazy_values) + + def test_real(self): + result = as_concrete_data(self.real_array) + self.assertFalse(is_lazy_data(result)) + self.assertIs(result, self.real_array) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/tests/integration/temp_dask/test_pp_lazy.py b/lib/iris/tests/integration/temp_dask/test_pp_lazy.py new file mode 100644 index 0000000000..c7428018dd --- /dev/null +++ b/lib/iris/tests/integration/temp_dask/test_pp_lazy.py @@ -0,0 +1,132 @@ +# (C) British Crown Copyright 2017, Met Office +# +# This file is part of Iris. +# +# Iris is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Iris is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with Iris. If not, see . +""" +Test lazy data handling in :mod:`iris.fileformats.pp`. + +Note: probably belongs in "tests/unit/fileformats/pp", if a separate test is +actually required. + +""" +from __future__ import (absolute_import, division, print_function) +from six.moves import (filter, input, map, range, zip) # noqa + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from dask.array.core import Array as DaskArray +import numpy as np + +import iris + + +class MixinLazyCubeLoad(object): + def setUp(self): + path = tests.get_data_path(('PP', 'aPPglob1', 'global.pp')) + self.cube, = iris.load_raw(path) + # This is the same as iris.tests.stock.global_pp(), but avoids the + # merge, which is presently not working. + + +@tests.skip_data +class TestLazyCubeLoad(MixinLazyCubeLoad, tests.IrisTest): + def setUp(self): + path = tests.get_data_path(('PP', 'aPPglob1', 'global.pp')) + self.cube, = iris.load_raw(path) + # This is the same as iris.tests.stock.global_pp(), but avoids the + # merge, which is presently not working. + + def test_load(self): + # Check that a simple load results in a cube with a lazy data array. + cube = self.cube + raw_data = cube._my_data + # It has loaded as a dask array. + self.assertIsInstance(raw_data, DaskArray) + + def test_data(self): + # Check that .data returns a realised array with the expected values. + cube = self.cube + raw_data = cube._my_data + data = cube.data + # "normal" .data is a numpy array. + self.assertIsInstance(data, np.ndarray) + # values match the lazy original. + self.assertArrayAllClose(data, raw_data.compute()) + + +@tests.skip_data +class Test_has_lazy_data(MixinLazyCubeLoad, tests.IrisTest): + def test(self): + # Check result before and after touching the data. + cube = self.cube + # normal load yields lazy data. + self.assertTrue(cube.has_lazy_data()) + # touch data. + cube.data + # cube has real data after .data access. + self.assertFalse(cube.has_lazy_data()) + + +@tests.skip_data +class Test_lazy_data(MixinLazyCubeLoad, tests.IrisTest): + def test__before_and_after_realise(self): + # Check return values from cube.lazy_data(). + cube = self.cube + raw_data = cube._my_data + self.assertIsInstance(raw_data, DaskArray) + # before touching .data, lazy_data() returns the original raw data. + lazy_before = cube.lazy_data() + self.assertIs(lazy_before, raw_data) + # touch data. + cube.data + # after touching .data, lazy_data() is not the original raw data, but + # it computes the same result. + lazy_after = cube.lazy_data() + self.assertIsInstance(lazy_after, DaskArray) + self.assertIsNot(lazy_after, lazy_before) + self.assertArrayAllClose(lazy_after.compute(), + lazy_before.compute()) + + def test__newdata(self): + # Check cube.lazy_data(). + cube = self.cube + raw_data = cube._my_data + real_data = raw_data.compute() + # set new lazy value. + cube.lazy_data(raw_data + 100.0) + # check that results are as expected. + self.assertArrayAllClose(cube.lazy_data().compute(), + real_data + 100.0) + + def test__newdata_fail_bad_shape(self): + # Check cube.lazy_data() with bad shape. + cube = self.cube + raw_data = cube.lazy_data() + msg = 'cube data with shape \(73, 96\), got \(72, 96\)' + with self.assertRaisesRegexp(ValueError, msg): + cube.lazy_data(raw_data[1:]) + + def test__newdata_fail_not_lazy(self): + # Check cube.lazy_data() with non-lazy argument. + cube = self.cube + raw_data = cube.lazy_data() + with self.assertRaisesRegexp(TypeError, 'must be a lazy array'): + cube.lazy_data(np.zeros(raw_data.shape)) + + +if __name__ == '__main__': + tests.main() diff --git a/lib/iris/util.py b/lib/iris/util.py index ebb6bfa746..16f6cdb87c 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -1,4 +1,4 @@ -# (C) British Crown Copyright 2010 - 2016, Met Office +# (C) British Crown Copyright 2010 - 2017, Met Office # # This file is part of Iris. # diff --git a/minimal-conda-requirements.txt b/minimal-conda-requirements.txt index 5299e438e9..a87c787ec9 100644 --- a/minimal-conda-requirements.txt +++ b/minimal-conda-requirements.txt @@ -10,6 +10,7 @@ numpy pyke udunits2 cf_units +dask # Iris build dependencies setuptools