Skip to content

Commit

Permalink
Make sure use_threads parameter can travel to legacy code
Browse files Browse the repository at this point in the history
Don't expose it at api level as named parameter, but pass it through to legacy
data loading code path nevertheless.
  • Loading branch information
Kirill888 committed Jan 20, 2019
1 parent 3c07b1f commit 288abc9
Showing 1 changed file with 12 additions and 3 deletions.
15 changes: 12 additions & 3 deletions datacube/api/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,12 @@ def load(self, product=None, measurements=None, output_crs=None, resolution=None
raise DeprecationWarning("the `stack` keyword argument is not supported anymore, "
"please apply `xarray.Dataset.to_array()` to the result instead")

# TODO: get rid of this block when removing legacy load support
legacy_args = {}
use_threads = query.pop('use_threads', None)
if use_threads is not None:
legacy_args['use_threads'] = use_threads

observations = datasets or self.find_datasets(product=product, like=like, ensure_location=True, **query)
if not observations:
return xarray.Dataset()
Expand All @@ -284,7 +290,8 @@ def load(self, product=None, measurements=None, output_crs=None, resolution=None
measurement_dicts,
resampling=resampling,
fuse_func=fuse_func,
dask_chunks=dask_chunks)
dask_chunks=dask_chunks,
**legacy_args)

return apply_aliases(result, datacube_product, measurements)

Expand Down Expand Up @@ -433,7 +440,8 @@ def _xr_load(sources, geobox, measurements,

@staticmethod
def load_data(sources, geobox, measurements, resampling=None,
fuse_func=None, dask_chunks=None, skip_broken_datasets=False):
fuse_func=None, dask_chunks=None, skip_broken_datasets=False,
**extra):
"""
Load data from :meth:`group_datasets` into an :class:`xarray.Dataset`.
Expand Down Expand Up @@ -502,7 +510,8 @@ def with_fuser(m, fuser, default=None):
from . import _legacy
return _legacy.load_data(sources, geobox, measurements,
dask_chunks=dask_chunks,
skip_broken_datasets=skip_broken_datasets)
skip_broken_datasets=skip_broken_datasets,
**extra)

if dask_chunks is not None:
return Datacube._dask_load(sources, geobox, measurements, dask_chunks,
Expand Down

0 comments on commit 288abc9

Please sign in to comment.