Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Resampling not working with SLSTR ancillary datasets #1702

Closed
simonrp84 opened this issue May 30, 2021 · 1 comment
Closed

Resampling not working with SLSTR ancillary datasets #1702

simonrp84 opened this issue May 30, 2021 · 1 comment

Comments

@simonrp84
Copy link
Member

Describe the bug
SLSTR dataset contains both channel data and ancillary data such as viewing geometry. When resampling a scene, this viewing geometry is not resampled correctly.

To Reproduce

from satpy import Scene
from glob import glob

infiles = glob('/my_slstr_dir/timeslot.SEN3/*.nc')
scn = Scene(infiles, reader='slstr_l1b')

scn.load(['S1', 'satellite_azimuth_angle'])
scn2 = scn.resample('laea_bb', resampler='bilinear')
data = np.array(scn4['satellite_azimuth_angle'])

Expected behavior
The azimuth angles on the resampled grid to be stored in the data variable.

Actual results

---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
<ipython-input-17-2c88e77567dd> in <module>
----> 1 np.array(scn4['satellite_azimuth_angle'])

~/miniconda3/lib/python3.8/site-packages/xarray/core/common.py in __array__(self, dtype)
    139 
    140     def __array__(self: Any, dtype: DTypeLike = None) -> np.ndarray:
--> 141         return np.asarray(self.values, dtype=dtype)
    142 
    143     def __repr__(self) -> str:

~/miniconda3/lib/python3.8/site-packages/xarray/core/dataarray.py in values(self)
    632     def values(self) -> np.ndarray:
    633         """The array's data as a numpy.ndarray"""
--> 634         return self.variable.values
    635 
    636     @values.setter

~/miniconda3/lib/python3.8/site-packages/xarray/core/variable.py in values(self)
    518     def values(self):
    519         """The variable's data as a numpy.ndarray"""
--> 520         return _as_array_or_item(self._data)
    521 
    522     @values.setter

~/miniconda3/lib/python3.8/site-packages/xarray/core/variable.py in _as_array_or_item(data)
    260     TODO: remove this (replace with np.asarray) once these issues are fixed
    261     """
--> 262     data = data.get() if isinstance(data, cupy_array_type) else np.asarray(data)
    263     if data.ndim == 0:
    264         if data.dtype.kind == "M":

~/miniconda3/lib/python3.8/site-packages/numpy/core/_asarray.py in asarray(a, dtype, order, like)
    100         return _asarray_with_like(a, dtype=dtype, order=order, like=like)
    101 
--> 102     return array(a, dtype, copy=False, order=order)
    103 
    104 

~/miniconda3/lib/python3.8/site-packages/dask/array/core.py in __array__(self, dtype, **kwargs)
   1500 
   1501     def __array__(self, dtype=None, **kwargs):
-> 1502         x = self.compute()
   1503         if dtype and x.dtype != dtype:
   1504             x = x.astype(dtype)

~/miniconda3/lib/python3.8/site-packages/dask/base.py in compute(self, **kwargs)
    283         dask.base.compute
    284         """
--> 285         (result,) = compute(self, traverse=False, **kwargs)
    286         return result
    287 

~/miniconda3/lib/python3.8/site-packages/dask/base.py in compute(*args, **kwargs)
    565         postcomputes.append(x.__dask_postcompute__())
    566 
--> 567     results = schedule(dsk, keys, **kwargs)
    568     return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
    569 

~/miniconda3/lib/python3.8/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, pool, **kwargs)
     77             pool = MultiprocessingPoolExecutor(pool)
     78 
---> 79     results = get_async(
     80         pool.submit,
     81         pool._max_workers,

~/miniconda3/lib/python3.8/site-packages/dask/local.py in get_async(submit, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, chunksize, **kwargs)
    512                             _execute_task(task, data)  # Re-execute locally
    513                         else:
--> 514                             raise_exception(exc, tb)
    515                     res, worker_id = loads(res_info)
    516                     state["cache"][key] = res

~/miniconda3/lib/python3.8/site-packages/dask/local.py in reraise(exc, tb)
    323     if exc.__traceback__ is not tb:
    324         raise exc.with_traceback(tb)
--> 325     raise exc
    326 
    327 

~/miniconda3/lib/python3.8/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
    221     try:
    222         task, data = loads(task_info)
--> 223         result = _execute_task(task, data)
    224         id = get_id()
    225         result = dumps((result, id))

~/miniconda3/lib/python3.8/site-packages/dask/core.py in _execute_task(arg, cache, dsk)
    119         # temporaries by their reference count and can execute certain
    120         # operations in-place.
--> 121         return func(*(_execute_task(a, cache) for a in args))
    122     elif not ishashable(arg):
    123         return arg

~/miniconda3/lib/python3.8/site-packages/pyresample/bilinear/xarr.py in _delayed_slice_data(self, slicer, data, fill_value)
    157     @delayed(nout=4)
    158     def _delayed_slice_data(self, slicer, data, fill_value):
--> 159         return slicer(data.values, self.slices_x, self.slices_y, self.mask_slices, fill_value)
    160 
    161     def _get_target_proj_vectors(self):

~/miniconda3/lib/python3.8/site-packages/pyresample/bilinear/_base.py in _slice2d(values, sl_x, sl_y, mask, fill_value)
    626 def _slice2d(values, sl_x, sl_y, mask, fill_value):
    627     # Slice 2D data
--> 628     arr = values[(sl_y, sl_x)]
    629     arr[(mask, )] = fill_value
    630     return arr[:, 0], arr[:, 1], arr[:, 2], arr[:, 3]

IndexError: index 1200 is out of bounds for axis 0 with size 1200
@simonrp84
Copy link
Member Author

simonrp84 commented May 30, 2021

Note:
I've also tried with ewa resampling, which crashes python - I'm not sure why, and nearest and gradient_search, which both give the following error:

~/miniconda3/lib/python3.8/site-packages/dask/array/core.py in normalize_chunks(chunks, shape, limit, dtype, previous_chunks)
   2803             for c, s in zip(map(sum, chunks), shape)
   2804         ):
-> 2805             raise ValueError(
   2806                 "Chunks do not add up to shape. "
   2807                 "Got chunks=%s, shape=%s" % (chunks, shape)

ValueError: Chunks do not add up to shape. Got chunks=((512, 512, 176), (512, 512, 476)), shape=(2400, 3000)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant