Skip to content

Commit

Permalink
Merge pull request #621 from astrofrog/fix-slice-mask
Browse files Browse the repository at this point in the history
Fix bug that caused errors when slicing a cube with a mask set to None
  • Loading branch information
astrofrog committed Apr 9, 2020
2 parents dc281f6 + 6a5b036 commit 3aa15a1
Show file tree
Hide file tree
Showing 13 changed files with 52 additions and 2 deletions.
1 change: 1 addition & 0 deletions CHANGES.rst
Expand Up @@ -5,6 +5,7 @@
run a script to generate test files. #598
- Refactor package infrastructure to no longer use astropy-helpers. #599
- Switch to using unified I/O infrastructure from Astropy. #600
- Bugfix: fix slicing of cubes with mask set to None. #621
- Refactor CASA I/O to use dask to access the array/mask data directly
and to use only Python and Numpy to access image metadata. CASA images
can now be read without CASA installed. #607, #609, #613
Expand Down
4 changes: 2 additions & 2 deletions spectral_cube/spectral_cube.py
Expand Up @@ -1223,7 +1223,7 @@ def __getitem__(self, view):
copy=False,
unit=self.unit,
spectral_unit=self._spectral_unit,
mask=self.mask[view],
mask=self.mask[view] if self.mask is not None else None,
meta=meta,
**bmarg
)
Expand All @@ -1239,7 +1239,7 @@ def __getitem__(self, view):
header['CUNIT3'] = self._spectral_unit.to_string(format='FITS')

return Slice(value=self.filled_data[view],
mask=self.mask[view],
mask=self.mask[view] if self.mask is not None else None,
wcs=newwcs,
copy=False,
unit=self.unit,
Expand Down
Binary file not shown.
Binary file not shown.
4 changes: 4 additions & 0 deletions spectral_cube/tests/data/nomask.image/logtable/table.info
@@ -0,0 +1,4 @@
Type = Log message
SubType =

Repository for software-generated logging messages
Binary file not shown.
Binary file added spectral_cube/tests/data/nomask.image/table.dat
Binary file not shown.
Binary file added spectral_cube/tests/data/nomask.image/table.f0
Binary file not shown.
Binary file added spectral_cube/tests/data/nomask.image/table.f0_TSM0
Binary file not shown.
3 changes: 3 additions & 0 deletions spectral_cube/tests/data/nomask.image/table.info
@@ -0,0 +1,3 @@
Type = Image
SubType =

Binary file added spectral_cube/tests/data/nomask.image/table.lock
Binary file not shown.
25 changes: 25 additions & 0 deletions spectral_cube/tests/test_casafuncs.py
Expand Up @@ -101,6 +101,31 @@ def test_casa_read_basic(memmap, bigendian):
assert_quantity_allclose(cube.unmasked_data[0, 1, 2], 1 * u.Jy / u.beam)


def test_casa_read_basic_nomask():

# Make sure things work well if there is no mask in the data

cube = SpectralCube.read(os.path.join(DATA, 'nomask.image'))
assert cube.shape == (3, 4, 5)
assert_allclose(cube.wcs.pixel_to_world_values(1, 2, 3),
[2.406271e+01, 2.993521e+01, 1.421911e+09])

# Carry out an operation to make sure the underlying data array works

cube.moment0()

# Slice the dataset

assert_quantity_allclose(cube.unmasked_data[0, 0, :],
[1, 1, 1, 1, 1] * u.Jy / u.beam)
assert_quantity_allclose(cube.unmasked_data[0, 1, 2], 1 * u.Jy / u.beam)

# Slice the cube

assert_quantity_allclose(cube[:, 0, 0],
[1, 1, 1] * u.Jy / u.beam)


@pytest.mark.skipif(not CASA_INSTALLED, reason='CASA tests must be run in a CASA environment.')
@pytest.mark.parametrize('filename', ('data_adv', 'data_advs', 'data_sdav',
'data_vad', 'data_vsad'),
Expand Down
17 changes: 17 additions & 0 deletions spectral_cube/tests/test_spectral_cube.py
Expand Up @@ -2207,3 +2207,20 @@ def test_varyres_mask(data_vda_beams):
assert mcube[:5].unmasked_beams == cube[:5].beams
except ValueError:
assert np.all(mcube[:5].unmasked_beams == cube[:5].beams)


def test_mask_none():

# Regression test for issues that occur when mask is None

data = np.arange(24).reshape((2, 3, 4))

wcs = WCS(naxis=3)
wcs.wcs.ctype = ['RA---TAN', 'DEC--TAN', 'VELO-HEL']

cube = SpectralCube(data * u.Jy / u.beam, wcs=wcs)

assert_quantity_allclose(cube[0, :, :],
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11]] * u.Jy / u.beam)
assert_quantity_allclose(cube[:, 0, 0],
[0, 12] * u.Jy / u.beam)

0 comments on commit 3aa15a1

Please sign in to comment.