diff --git a/doc/source/enhancements.rst b/doc/source/enhancements.rst index 142be49531..d8178e2d23 100644 --- a/doc/source/enhancements.rst +++ b/doc/source/enhancements.rst @@ -27,7 +27,7 @@ on both ends of the scale, but these can be overridden with method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - cutoffs: [0.003, 0.005] + cutoffs: (0.003, 0.005) .. note:: diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 1a89ec7583..49ecab429f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -46,10 +46,12 @@ class IncompatibleAreas(Exception): """Error raised upon compositing things of different shapes.""" + class IncompatibleTimes(Exception): """Error raised upon compositing things from different times.""" + def check_times(projectables): """Check that *projectables* have compatible times.""" times = [] @@ -72,6 +74,7 @@ def check_times(projectables): # Is there a more gracious way to handle this ? if np.max(times) - np.min(times) > np.timedelta64(1, 's'): raise IncompatibleTimes + mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times) return mid_time @@ -154,8 +157,7 @@ def apply_modifier_info(self, origin, destination): def match_data_arrays(self, data_arrays): """Match data arrays so that they can be used together in a composite.""" self.check_geolocation(data_arrays) - new_arrays = self.drop_coordinates(data_arrays) - return list(xr.unify_chunks(*new_arrays)) + return self.drop_coordinates(data_arrays) def drop_coordinates(self, data_arrays): """Drop neglible non-dimensional coordinates.""" diff --git a/satpy/etc/readers/modis_l1b.yaml b/satpy/etc/readers/modis_l1b.yaml index 7bb694dd7c..10a5c25770 100644 --- a/satpy/etc/readers/modis_l1b.yaml +++ b/satpy/etc/readers/modis_l1b.yaml @@ -438,52 +438,28 @@ datasets: solar_zenith_angle: name: solar_zenith_angle sensor: modis - resolution: - 1000: - file_type: [hdf_eos_geo, hdf_eos_data_1000m] - 500: - file_type: [hdf_eos_geo] - 250: - file_type: [hdf_eos_geo] + resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] solar_azimuth_angle: name: solar_azimuth_angle sensor: modis - resolution: - 1000: - file_type: [hdf_eos_geo, hdf_eos_data_1000m] - 500: - file_type: [hdf_eos_geo] - 250: - file_type: [hdf_eos_geo] + resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_zenith_angle: name: satellite_zenith_angle sensor: modis - resolution: - 1000: - file_type: [hdf_eos_geo, hdf_eos_data_1000m] - 500: - file_type: [hdf_eos_geo] - 250: - file_type: [hdf_eos_geo] + resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_azimuth_angle: name: satellite_azimuth_angle sensor: modis - resolution: - 1000: - file_type: [hdf_eos_geo, hdf_eos_data_1000m] - 500: - file_type: [hdf_eos_geo] - 250: - file_type: [hdf_eos_geo] + resolution: [1000, 500, 250] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index ffbb187220..72f3a719ff 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -149,20 +149,20 @@ def _read_fill_value_from_hdf4(var, dtype): return np.iinfo(dtype).min def _get_data_and_angles(self, datasets, optional_datasets): - vis, angles = self._extract_angle_data_arrays(datasets, optional_datasets) + angles = self._extract_angle_data_arrays(datasets, optional_datasets) angles = [xr.DataArray(dask_arr, dims=('y', 'x')) for dask_arr in angles] - return [vis] + angles + return [datasets[0]] + angles def _extract_angle_data_arrays(self, datasets, optional_datasets): all_datasets = datasets + optional_datasets if len(all_datasets) == 1: vis = self.match_data_arrays(datasets)[0] - return vis, self.get_angles(vis) + return self.get_angles(vis) if len(all_datasets) == 5: vis, *angles = self.match_data_arrays( datasets + optional_datasets) # get the dask array underneath - return vis, [data_arr.data for data_arr in angles] + return [data_arr.data for data_arr in angles] raise ValueError("Not sure how to handle provided dependencies. " "Either all 4 angles must be provided or none of " "of them.") diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 4020f88669..7dd93073fe 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -27,7 +27,7 @@ from pyhdf.error import HDF4Error from pyhdf.SD import SD -from satpy import CHUNK_SIZE, DataID +from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) @@ -120,12 +120,6 @@ def read_mda(attribute): current_dict[key] = val return mda - @property - def metadata_platform_name(self): - """Platform name from the internal file metadata.""" - return self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][ - 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE'] - @property def start_time(self): """Get the start time of the dataset.""" @@ -171,33 +165,11 @@ def load_dataset(self, dataset_name): scale_factor = data.attrs.get('scale_factor') if scale_factor is not None: - data = data * np.float32(scale_factor) + data = data * scale_factor data = data.where(good_mask, new_fill) return data - def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): - """Add metadata that is specific to Satpy.""" - new_attrs = { - 'platform_name': 'EOS-' + self.metadata_platform_name, - 'sensor': 'modis', - } - - res = data_id["resolution"] - rps = self._resolution_to_rows_per_scan(res) - new_attrs["rows_per_scan"] = rps - - data_arr.attrs.update(new_attrs) - - def _resolution_to_rows_per_scan(self, resolution: int) -> int: - known_rps = { - 5000: 2, - 1000: 10, - 500: 20, - 250: 40, - } - return known_rps.get(resolution, 10) - class HDFEOSGeoReader(HDFEOSBaseFileReader): """Handler for the geographical datasets.""" @@ -321,6 +293,5 @@ def get_dataset(self, dataset_keys, dataset_info): for key in ('standard_name', 'units'): if key in dataset_info: data.attrs[key] = dataset_info[key] - self._add_satpy_metadata(dataset_keys, data) return data diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index 46b00b05b6..a4e888c612 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -81,6 +81,12 @@ def get_dataset(self, key, info): 'EV_500_RefSB'], 250: ['EV_250_RefSB']} + platform_name = self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][ + 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE'] + + info.update({'platform_name': 'EOS-' + platform_name}) + info.update({'sensor': 'modis'}) + if self.resolution != key['resolution']: return @@ -175,7 +181,6 @@ def get_dataset(self, key, info): # satscene[band].area = geometry.SwathDefinition( # lons=satscene[band].area.lons[indices, :], # lats=satscene[band].area.lats[indices, :]) - self._add_satpy_metadata(key, projectable) return projectable diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 7dc336c501..cf8c1914ce 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -163,7 +163,6 @@ def get_dataset(self, dataset_id, dataset_info): else: dataset = self.load_dataset(dataset_name_in_file) - self._add_satpy_metadata(dataset_id, dataset) return dataset diff --git a/satpy/tests/ab011ed1-62c7-4693-b3e6-48898e2c303b.nc b/satpy/tests/ab011ed1-62c7-4693-b3e6-48898e2c303b.nc new file mode 100644 index 0000000000..934ceca5c9 Binary files /dev/null and b/satpy/tests/ab011ed1-62c7-4693-b3e6-48898e2c303b.nc differ diff --git a/satpy/tests/b71971c3-1792-490e-a4cb-3c5e6e8abf36.nc b/satpy/tests/b71971c3-1792-490e-a4cb-3c5e6e8abf36.nc new file mode 100644 index 0000000000..9a07454973 Binary files /dev/null and b/satpy/tests/b71971c3-1792-490e-a4cb-3c5e6e8abf36.nc differ diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index 1781cea627..571d8c5a29 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -16,7 +16,6 @@ import unittest from unittest import mock from contextlib import contextmanager -from datetime import datetime import numpy as np import pytest @@ -121,20 +120,6 @@ def test_get_angles(self, get_satpos): self.assertEqual(args[6], 0) -def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units='degrees', calibration=None): - return xr.DataArray(data, dims=('y', 'x'), - attrs={ - 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, - 'modifiers': None, 'calibration': calibration, - 'resolution': 371, 'name': name, - 'standard_name': standard_name, 'platform_name': 'Suomi-NPP', - 'polarization': None, 'sensor': 'viirs', 'units': units, - 'start_time': datetime(2012, 2, 25, 18, 1, 24, 570942), - 'end_time': datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area, - 'ancillary_variables': [] - }) - - class TestReflectanceCorrectorModifier: """Test the CREFL modifier.""" @@ -150,11 +135,11 @@ def data_area_ref_corrector(): cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) - data = np.zeros((rows, cols)) + 25 - data[3, :] += 25 - data[4:, :] += 50 - data = da.from_array(data, chunks=100) - return area, data + dnb = np.zeros((rows, cols)) + 25 + dnb[3, :] += 25 + dnb[4:, :] += 50 + dnb = da.from_array(dnb, chunks=100) + return area, dnb def test_reflectance_corrector_abi(self): """Test ReflectanceCorrector modifier with ABI data.""" @@ -237,6 +222,7 @@ def test_reflectance_corrector_abi(self): ]) def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): """Test ReflectanceCorrector modifier with VIIRS data.""" + import datetime from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq @@ -271,14 +257,28 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): make_dsq(name='solar_azimuth_angle'), make_dsq(name='solar_zenith_angle')] - area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') + area, dnb = self.data_area_ref_corrector() + + def make_xarray(name, standard_name, wavelength=None, units='degrees', calibration=None): + return xr.DataArray(dnb, dims=('y', 'x'), + attrs={ + 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, 'level': None, + 'modifiers': None, 'calibration': calibration, + 'resolution': 371, 'name': name, + 'standard_name': standard_name, 'platform_name': 'Suomi-NPP', + 'polarization': None, 'sensor': 'viirs', 'units': units, + 'start_time': datetime.datetime(2012, 2, 25, 18, 1, 24, 570942), + 'end_time': datetime.datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area, + 'ancillary_variables': [] + }) + + c01 = make_xarray('I01', 'toa_bidirectional_reflectance', + wavelength=(0.6, 0.64, 0.68), units='%', + calibration='reflectance') + c02 = make_xarray('satellite_azimuth_angle', 'sensor_azimuth_angle') + c03 = make_xarray('satellite_zenith_angle', 'sensor_zenith_angle') + c04 = make_xarray('solar_azimuth_angle', 'solar_azimuth_angle') + c05 = make_xarray('solar_zenith_angle', 'solar_zenith_angle') with dem_mock_cm(tmpdir, url): res = ref_cor([c01], [c02, c03, c04, c05]) @@ -294,8 +294,8 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): assert res.attrs['platform_name'] == 'Suomi-NPP' assert res.attrs['sensor'] == 'viirs' assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 2, 25, 18, 1, 24, 570942) - assert res.attrs['end_time'] == datetime(2012, 2, 25, 18, 11, 21, 175760) + assert res.attrs['start_time'] == datetime.datetime(2012, 2, 25, 18, 1, 24, 570942) + assert res.attrs['end_time'] == datetime.datetime(2012, 2, 25, 18, 11, 21, 175760) assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values @@ -306,6 +306,7 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" + import datetime from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq sataa_did = make_dsq(name='satellite_azimuth_angle') @@ -331,21 +332,22 @@ def test_reflectance_corrector_modis(self): area, dnb = self.data_area_ref_corrector() - def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000): + def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000, + file_type='hdf_eos_geo'): return xr.DataArray(dnb, dims=('y', 'x'), attrs={ 'wavelength': wavelength, 'level': None, 'modifiers': modifiers, - 'calibration': calibration, 'resolution': resolution, + 'calibration': calibration, 'resolution': resolution, 'file_type': file_type, 'name': name, 'coordinates': ['longitude', 'latitude'], 'platform_name': 'EOS-Aqua', 'polarization': None, 'sensor': 'modis', - 'units': '%', 'start_time': datetime(2012, 8, 13, 18, 46, 1, 439838), - 'end_time': datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area, + 'units': '%', 'start_time': datetime.datetime(2012, 8, 13, 18, 46, 1, 439838), + 'end_time': datetime.datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area, 'ancillary_variables': [] }) c01 = make_xarray('1', 'reflectance', wavelength=(0.62, 0.645, 0.67), modifiers='sunz_corrected', - resolution=500) + resolution=500, file_type='hdf_eos_data_500m') c02 = make_xarray('satellite_azimuth_angle', None) c03 = make_xarray('satellite_zenith_angle', None) c04 = make_xarray('solar_azimuth_angle', None) @@ -358,12 +360,13 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1 assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl',) assert res.attrs['calibration'] == 'reflectance' assert res.attrs['resolution'] == 500 + assert res.attrs['file_type'] == 'hdf_eos_data_500m' assert res.attrs['name'] == '1' assert res.attrs['platform_name'] == 'EOS-Aqua' assert res.attrs['sensor'] == 'modis' assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 8, 13, 18, 46, 1, 439838) - assert res.attrs['end_time'] == datetime(2012, 8, 13, 18, 57, 47, 746296) + assert res.attrs['start_time'] == datetime.datetime(2012, 8, 13, 18, 46, 1, 439838) + assert res.attrs['end_time'] == datetime.datetime(2012, 8, 13, 18, 57, 47, 746296) assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values @@ -380,54 +383,3 @@ def test_reflectance_corrector_bad_prereqs(self): pytest.raises(ValueError, ref_cor, [1], [2, 3, 4]) pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], []) pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4]) - - @pytest.mark.parametrize( - 'url,dem_mock_cm,dem_sds', - [ - (None, mock_cmgdem, "average elevation"), - ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), - ("tbase.hdf", mock_tbase, "Elevation"), - ]) - def test_reflectance_corrector_different_chunks(self, tmpdir, url, dem_mock_cm, dem_sds): - """Test that the modifier works with different chunk sizes for inputs. - - The modifier uses dask's "map_blocks". If the input chunks aren't the - same an error is raised. - - """ - from satpy.modifiers._crefl import ReflectanceCorrector - from satpy.tests.utils import make_dsq - - ref_cor = ReflectanceCorrector( - optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle') - ], - name='I01', - prerequisites=[], - wavelength=(0.6, 0.64, 0.68), - resolution=371, - calibration='reflectance', - modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), - sensor='viirs', - url=url, - dem_sds=dem_sds, - ) - - area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') - c02.data = c02.data.rechunk((1, -1)) - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') - - with dem_mock_cm(tmpdir, url): - res = ref_cor([c01], [c02, c03, c04, c05]) - - # make sure it can actually compute - res.compute() diff --git a/satpy/tests/reader_tests/test_modis_l1b.py b/satpy/tests/reader_tests/test_modis_l1b.py deleted file mode 100644 index 65bb5a5ea7..0000000000 --- a/satpy/tests/reader_tests/test_modis_l1b.py +++ /dev/null @@ -1,321 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2021 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Unit tests for MODIS L1b HDF reader.""" - -import os -import unittest - -import numpy as np - -from pyhdf.SD import SD, SDC - -from satpy import available_readers, Scene - -# Mock MODIS HDF4 file -SCAN_WIDTH = 406 -SCAN_LEN = 270 -SCALE_FACTOR = 1 -TEST_LAT = np.repeat(np.linspace(35., 45., SCAN_WIDTH)[:, None], SCAN_LEN, 1) -TEST_LAT *= np.linspace(0.9, 1.1, SCAN_LEN) -TEST_LON = np.repeat(np.linspace(-45., -35., SCAN_LEN)[None, :], SCAN_WIDTH, 0) -TEST_LON *= np.linspace(0.9, 1.1, SCAN_WIDTH)[:, None] -TEST_SATZ = (np.repeat(abs(np.linspace(-65.2, 65.4, SCAN_LEN))[None, :], SCAN_WIDTH, 0) * 100).astype(np.int16) -TEST_DATA = { - 'Latitude': {'data': TEST_LAT.astype(np.float32), - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, - 'Longitude': {'data': TEST_LON.astype(np.float32), - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, - 'EV_1KM_RefSB': { - 'data': np.zeros((15, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint16), - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { - 'dim_labels': ['Band_1KM_RefSB:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - 'valid_range': (0, 32767), - 'reflectance_scales': (1,) * 15, - 'reflectance_offsets': (0,) * 15, - 'band_names': '8,9,10,11,12,13lo,13hi,14lo,14hi,15,16,17,18,19,26', - }, - }, - 'EV_1KM_RefSB_Uncert_Indexes': { - 'data': np.zeros((15, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint8), - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': ['Band_1KM_RefSB:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - }, - }, - 'EV_500_Aggr1km_RefSB': { - 'data': np.zeros((5, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint16), - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { - 'dim_labels': ['Band_500M:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - 'valid_range': (0, 32767), - 'reflectance_scales': (1,) * 5, - 'reflectance_offsets': (0,) * 5, - 'band_names': '3,4,5,6,7', - }, - }, - 'EV_500_Aggr1km_RefSB_Uncert_Indexes': { - 'data': np.zeros((5, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint8), - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': ['Band_500M:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - }, - }, - 'EV_250_Aggr1km_RefSB': { - 'data': np.zeros((2, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint16), - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { - 'dim_labels': ['Band_250M:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - 'valid_range': (0, 32767), - 'reflectance_scales': (1,) * 2, - 'reflectance_offsets': (0,) * 2, - 'band_names': '1,2', - }, - }, - 'EV_250_Aggr1km_RefSB_Uncert_Indexes': { - 'data': np.zeros((2, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint8), - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': ['Band_250M:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - }, - }, - 'EV_1KM_Emmissive': { - 'data': np.zeros((16, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint16), - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { - 'dim_labels': ['Band_1KM_Emissive:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - 'valid_range': (0, 32767), - 'band_names': '20,21,22,23,24,25,27,28,29,30,31,32,33,34,35,36', - }, - }, - 'EV_1KM_Emissive_Uncert_Indexes': { - 'data': np.zeros((16, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.uint8), - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': ['Band_1KM_Emissive:MODIS_SWATH_Type_L1B', - '10*nscans:MODIS_SWATH_Type_L1B', - 'Max_EV_frames:MODIS_SWATH_Type_L1B'], - }, - }, - 'SensorZenith': {'data': TEST_SATZ, - 'type': SDC.INT16, - 'fill_value': -32767, - 'attrs': {'dim_labels': ['2*nscans:MODIS_SWATH_Type_L1B', '1KM_geo_dim:MODIS_SWATH_Type_L1B'], - 'scale_factor': 0.01}}, - 'SensorAzimuth': {'data': TEST_SATZ, - 'type': SDC.INT16, - 'fill_value': -32767, - 'attrs': {'dim_labels': ['2*nscans:MODIS_SWATH_Type_L1B', '1KM_geo_dim:MODIS_SWATH_Type_L1B'], - 'scale_factor': 0.01}}, -} - - -def generate_file_name(): - """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" - import tempfile - from datetime import datetime - - file_name = 'MOD021km_A{0:%y%j_%H%M%S}_{0:%Y%j%H%M%S}.hdf'.format( - datetime.now() - ) - - base_dir = tempfile.mkdtemp() - file_name = os.path.join(base_dir, file_name) - return base_dir, file_name - - -def create_test_data(): - """Create a fake MODIS L1b HDF4 file with headers.""" - from datetime import datetime, timedelta - - base_dir, file_name = generate_file_name() - h = SD(file_name, SDC.WRITE | SDC.CREATE) - # Set hdf file attributes - beginning_date = datetime.now() - ending_date = beginning_date + timedelta(minutes=5) - core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ - "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ - "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n"\ - "NUM_VAL = 1\nVALUE = \"{}\"\n"\ - "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n"\ - "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ - "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME".format( - beginning_date.strftime("%Y-%m-%d"), - beginning_date.strftime("%H:%M:%S.%f"), - ending_date.strftime("%Y-%m-%d"), - ending_date.strftime("%H:%M:%S.%f") - ) - inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n"\ - "OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = \"1\"\n\n" \ - "OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"Terra\"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n" \ - "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \ - "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" - collection_metadata = "GROUP = COLLECTIONDESCRIPTIONCLASS\n\nOBJECT = SHORTNAME\nNUM_VAL = 1\n"\ - "VALUE = \"MOD021KM\"\nEND_OBJECT = SHORTNAME\n\n"\ - "OBJECT = VERSIONID\nNUM_VAL = 1\nVALUE = 6\nEND_OBJECT = VERSIONID\n\n"\ - "END_GROUP = COLLECTIONDESCRIPTIONCLASS\n\n" - core_metadata_header += "\n\n" + inst_metadata + collection_metadata - struct_metadata_header = "GROUP=SwathStructure\n"\ - "GROUP=SWATH_1\n"\ - "GROUP=DimensionMap\n"\ - "OBJECT=DimensionMap_2\n"\ - "GeoDimension=\"2*nscans\"\n"\ - "END_OBJECT=DimensionMap_2\n"\ - "END_GROUP=DimensionMap\n"\ - "END_GROUP=SWATH_1\n"\ - "END_GROUP=SwathStructure\nEND" - archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND" - setattr(h, 'CoreMetadata.0', core_metadata_header) # noqa - setattr(h, 'StructMetadata.0', struct_metadata_header) # noqa - setattr(h, 'ArchiveMetadata.0', archive_metadata_header) # noqa - - # Fill datasets - for dataset in TEST_DATA: - v = h.create(dataset, TEST_DATA[dataset]['type'], TEST_DATA[dataset]['data'].shape) - v[:] = TEST_DATA[dataset]['data'] - dim_count = 0 - for dimension_name in TEST_DATA[dataset]['attrs']['dim_labels']: - v.dim(dim_count).setname(dimension_name) - dim_count += 1 - v.setfillvalue(TEST_DATA[dataset]['fill_value']) - v.scale_factor = TEST_DATA[dataset]['attrs'].get('scale_factor', SCALE_FACTOR) - for attr_key, attr_val in TEST_DATA[dataset]['attrs'].items(): - if attr_key == 'dim_labels': - continue - setattr(v, attr_key, attr_val) - h.end() - return base_dir, file_name - - -class TestModisL1b(unittest.TestCase): - """Test MODIS L1b reader.""" - - def setUp(self): - """Create fake HDF4 MODIS file.""" - self.base_dir, self.file_name = create_test_data() - - def tearDown(self): - """Remove the temporary directory created for the test.""" - try: - import shutil - shutil.rmtree(self.base_dir, ignore_errors=True) - except OSError: - pass - - @staticmethod - def _check_shared_metadata(data_arr): - assert data_arr.attrs["sensor"] == "modis" - assert data_arr.attrs["platform_name"] == "EOS-Terra" - assert "rows_per_scan" in data_arr.attrs - assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l1b' - - def test_available_reader(self): - """Test that MODIS L1b reader is available.""" - self.assertIn('modis_l1b', available_readers()) - - def test_scene_available_datasets(self): - """Test that datasets are available.""" - scene = Scene(reader='modis_l1b', filenames=[self.file_name]) - available_datasets = scene.all_dataset_names() - assert len(available_datasets) > 0 - self.assertIn('longitude', available_datasets) - self.assertIn('latitude', available_datasets) - for chan_num in list(range(1, 13)) + ['13lo', '13hi', '14lo', '14hi'] + list(range(15, 37)): - self.assertIn(str(chan_num), available_datasets) - - def test_load_longitude_latitude(self): - """Test that longitude and latitude datasets are loaded correctly.""" - from satpy.tests.utils import make_dataid - - def test_func(dname, x, y): - if dname == 'longitude': - # assert less - np.testing.assert_array_less(x, y) - else: - # assert greater - # np.testing.assert_equal(x > y, True) - np.testing.assert_array_less(y, x) - - scene = Scene(reader='modis_l1b', filenames=[self.file_name]) - for dataset_name in ['longitude', 'latitude']: - # Default resolution should be the interpolated 1km - scene.load([dataset_name]) - longitude_1km_id = make_dataid(name=dataset_name, resolution=1000) - longitude_1km = scene[longitude_1km_id] - self.assertEqual(longitude_1km.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) - test_func(dataset_name, longitude_1km.values, 0) - self._check_shared_metadata(longitude_1km) - - # Specify original 5km scale - scene.load([dataset_name], resolution=5000) - longitude_5km_id = make_dataid(name=dataset_name, resolution=5000) - longitude_5km = scene[longitude_5km_id] - self.assertEqual(longitude_5km.shape, TEST_DATA[dataset_name.capitalize()]['data'].shape) - test_func(dataset_name, longitude_5km.values, 0) - self._check_shared_metadata(longitude_5km) - - def test_load_sat_zenith_angle(self): - """Test loading satellite zenith angle band.""" - scene = Scene(reader='modis_l1b', filenames=[self.file_name]) - dataset_name = 'satellite_zenith_angle' - scene.load([dataset_name]) - dataset = scene[dataset_name] - self.assertEqual(dataset.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) - assert dataset.attrs['resolution'] == 1000 - self._check_shared_metadata(dataset) - - def test_load_vis(self): - """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=[self.file_name]) - dataset_name = '1' - scene.load([dataset_name]) - dataset = scene[dataset_name] - self.assertEqual(dataset.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) - self._check_shared_metadata(dataset) diff --git a/satpy/tests/reader_tests/test_modis_l2.py b/satpy/tests/reader_tests/test_modis_l2.py index 876ddaf8d5..d3064f7f92 100644 --- a/satpy/tests/reader_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/test_modis_l2.py @@ -103,17 +103,6 @@ def create_test_data(): ending_date.strftime("%Y-%m-%d"), ending_date.strftime("%H:%M:%S.%f") ) - inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n"\ - "OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = \"1\"\n\n" \ - "OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"Terra\"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n" \ - "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \ - "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" - core_metadata_header += "\n\n" + inst_metadata struct_metadata_header = "GROUP=SwathStructure\n"\ "GROUP=SWATH_1\n"\ "GROUP=DimensionMap\n"\ @@ -157,14 +146,6 @@ def tearDown(self): except OSError: pass - @staticmethod - def _check_shared_metadata(data_arr): - assert data_arr.attrs["sensor"] == "modis" - assert data_arr.attrs["platform_name"] == "EOS-Terra" - assert "rows_per_scan" in data_arr.attrs - assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l2' - def test_available_reader(self): """Test that MODIS L2 reader is available.""" self.assertIn('modis_l2', available_readers()) @@ -199,15 +180,12 @@ def test_func(dname, x, y): longitude_1km = scene[longitude_1km_id] self.assertEqual(longitude_1km.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) test_func(dataset_name, longitude_1km.values, 0) - self._check_shared_metadata(longitude_1km) - # Specify original 5km scale scene.load([dataset_name], resolution=5000) longitude_5km_id = make_dataid(name=dataset_name, resolution=5000) longitude_5km = scene[longitude_5km_id] self.assertEqual(longitude_5km.shape, TEST_DATA[dataset_name.capitalize()]['data'].shape) test_func(dataset_name, longitude_5km.values, 0) - self._check_shared_metadata(longitude_5km) def test_load_quality_assurance(self): """Test loading quality assurance.""" @@ -219,7 +197,6 @@ def test_load_quality_assurance(self): self.assertIn(quality_assurance_id, scene) quality_assurance = scene[quality_assurance_id] self.assertEqual(quality_assurance.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) - self._check_shared_metadata(quality_assurance) def test_load_1000m_cloud_mask_dataset(self): """Test loading 1000m cloud mask.""" @@ -231,7 +208,6 @@ def test_load_1000m_cloud_mask_dataset(self): self.assertIn(cloud_mask_id, scene) cloud_mask = scene[cloud_mask_id] self.assertEqual(cloud_mask.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4)) - self._check_shared_metadata(cloud_mask) def test_load_250m_cloud_mask_dataset(self): """Test loading 250m cloud mask.""" @@ -243,4 +219,3 @@ def test_load_250m_cloud_mask_dataset(self): self.assertIn(cloud_mask_id, scene) cloud_mask = scene[cloud_mask_id] self.assertEqual(cloud_mask.shape, (4*5*SCAN_WIDTH, 4*(5*SCAN_LEN+4))) - self._check_shared_metadata(cloud_mask) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 67a0a8426b..67e8071fac 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -51,7 +51,7 @@ def test_single_ds(self): ds1 = self._get_test_ds() comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1,)) - assert ret_datasets[0].identical(ds1) + self.assertIs(ret_datasets[0], ds1) def test_mult_ds_area(self): """Test multiple datasets successfully pass.""" @@ -60,8 +60,8 @@ def test_mult_ds_area(self): ds2 = self._get_test_ds() comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1, ds2)) - assert ret_datasets[0].identical(ds1) - assert ret_datasets[1].identical(ds2) + self.assertIs(ret_datasets[0], ds1) + self.assertIs(ret_datasets[1], ds2) def test_mult_ds_no_area(self): """Test that all datasets must have an area attribute.""" @@ -96,8 +96,8 @@ def test_mult_ds_diff_dims(self): ds2 = self._get_test_ds(shape=(3, 100, 50), dims=('bands', 'x', 'y')) comp = CompositeBase('test_comp') ret_datasets = comp.match_data_arrays((ds1, ds2)) - assert ret_datasets[0].identical(ds1) - assert ret_datasets[1].identical(ds2) + self.assertIs(ret_datasets[0], ds1) + self.assertIs(ret_datasets[1], ds2) def test_mult_ds_diff_size(self): """Test that datasets with different sizes fail.""" diff --git a/satpy/tests/test_file_vii_base_nc.nc4dbaf523-1225-11ec-9dc0-0a1f71143ab9.nc b/satpy/tests/test_file_vii_base_nc.nc4dbaf523-1225-11ec-9dc0-0a1f71143ab9.nc new file mode 100644 index 0000000000..a8357c13d0 Binary files /dev/null and b/satpy/tests/test_file_vii_base_nc.nc4dbaf523-1225-11ec-9dc0-0a1f71143ab9.nc differ diff --git a/satpy/tests/test_file_vii_base_nc.nc4dcb3eba-1225-11ec-90b9-0a1f71143ab9.nc b/satpy/tests/test_file_vii_base_nc.nc4dcb3eba-1225-11ec-90b9-0a1f71143ab9.nc new file mode 100644 index 0000000000..a8357c13d0 Binary files /dev/null and b/satpy/tests/test_file_vii_base_nc.nc4dcb3eba-1225-11ec-90b9-0a1f71143ab9.nc differ diff --git a/satpy/tests/test_file_vii_base_nc.nce37252fa-1229-11ec-a839-0a1f71143ab9.nc b/satpy/tests/test_file_vii_base_nc.nce37252fa-1229-11ec-a839-0a1f71143ab9.nc new file mode 100644 index 0000000000..4196aad0e3 Binary files /dev/null and b/satpy/tests/test_file_vii_base_nc.nce37252fa-1229-11ec-a839-0a1f71143ab9.nc differ diff --git a/satpy/tests/test_file_vii_base_nc.nce3831063-1229-11ec-b972-0a1f71143ab9.nc b/satpy/tests/test_file_vii_base_nc.nce3831063-1229-11ec-b972-0a1f71143ab9.nc new file mode 100644 index 0000000000..4196aad0e3 Binary files /dev/null and b/satpy/tests/test_file_vii_base_nc.nce3831063-1229-11ec-b972-0a1f71143ab9.nc differ diff --git a/satpy/tests/test_file_vii_l1b_nc.nc4de127a8-1225-11ec-9bc4-0a1f71143ab9.nc b/satpy/tests/test_file_vii_l1b_nc.nc4de127a8-1225-11ec-9bc4-0a1f71143ab9.nc new file mode 100644 index 0000000000..c78066f6ea Binary files /dev/null and b/satpy/tests/test_file_vii_l1b_nc.nc4de127a8-1225-11ec-9bc4-0a1f71143ab9.nc differ diff --git a/satpy/tests/test_file_vii_l1b_nc.nce39a0c29-1229-11ec-9912-0a1f71143ab9.nc b/satpy/tests/test_file_vii_l1b_nc.nce39a0c29-1229-11ec-9912-0a1f71143ab9.nc new file mode 100644 index 0000000000..fbb39f2014 Binary files /dev/null and b/satpy/tests/test_file_vii_l1b_nc.nce39a0c29-1229-11ec-9912-0a1f71143ab9.nc differ diff --git a/satpy/tests/test_file_vii_l2_nc.nc4ded0774-1225-11ec-bf73-0a1f71143ab9.nc b/satpy/tests/test_file_vii_l2_nc.nc4ded0774-1225-11ec-bf73-0a1f71143ab9.nc new file mode 100644 index 0000000000..e851894b6e Binary files /dev/null and b/satpy/tests/test_file_vii_l2_nc.nc4ded0774-1225-11ec-bf73-0a1f71143ab9.nc differ diff --git a/satpy/tests/test_file_vii_l2_nc.nce3a65fc0-1229-11ec-ba9a-0a1f71143ab9.nc b/satpy/tests/test_file_vii_l2_nc.nce3a65fc0-1229-11ec-ba9a-0a1f71143ab9.nc new file mode 100644 index 0000000000..408b43d50c Binary files /dev/null and b/satpy/tests/test_file_vii_l2_nc.nce3a65fc0-1229-11ec-ba9a-0a1f71143ab9.nc differ diff --git a/satpy/tests/testingcfwriter2021253112603-viirs-mband-20201007075915-20201007080744.nc b/satpy/tests/testingcfwriter2021253112603-viirs-mband-20201007075915-20201007080744.nc new file mode 100644 index 0000000000..14afab34c5 Binary files /dev/null and b/satpy/tests/testingcfwriter2021253112603-viirs-mband-20201007075915-20201007080744.nc differ diff --git a/satpy/tests/testingcfwriter22021253105311-viirs-mband-20201007075915-20201007080744.nc b/satpy/tests/testingcfwriter22021253105311-viirs-mband-20201007075915-20201007080744.nc new file mode 100644 index 0000000000..cabad65bf6 Binary files /dev/null and b/satpy/tests/testingcfwriter22021253105311-viirs-mband-20201007075915-20201007080744.nc differ diff --git a/satpy/tests/testingcfwriter22021253112603-viirs-mband-20201007075915-20201007080744.nc b/satpy/tests/testingcfwriter22021253112603-viirs-mband-20201007075915-20201007080744.nc new file mode 100644 index 0000000000..6181e8ab2a Binary files /dev/null and b/satpy/tests/testingcfwriter22021253112603-viirs-mband-20201007075915-20201007080744.nc differ diff --git a/satpy/tests/testingcfwriter32021253105311-viirs-mband-20201007075915-20201007080744.nc b/satpy/tests/testingcfwriter32021253105311-viirs-mband-20201007075915-20201007080744.nc new file mode 100644 index 0000000000..8ef01d51e7 Binary files /dev/null and b/satpy/tests/testingcfwriter32021253105311-viirs-mband-20201007075915-20201007080744.nc differ diff --git a/satpy/tests/testingcfwriter32021253112603-viirs-mband-20201007075915-20201007080744.nc b/satpy/tests/testingcfwriter32021253112603-viirs-mband-20201007075915-20201007080744.nc new file mode 100644 index 0000000000..6b84a4afc4 Binary files /dev/null and b/satpy/tests/testingcfwriter32021253112603-viirs-mband-20201007075915-20201007080744.nc differ diff --git a/satpy/tests/testingcfwriter42021253105310-viirs-mband-20201007075915-20201007080744.nc b/satpy/tests/testingcfwriter42021253105310-viirs-mband-20201007075915-20201007080744.nc new file mode 100644 index 0000000000..8539470a83 Binary files /dev/null and b/satpy/tests/testingcfwriter42021253105310-viirs-mband-20201007075915-20201007080744.nc differ diff --git a/satpy/tests/testingcfwriter42021253112602-viirs-mband-20201007075915-20201007080744.nc b/satpy/tests/testingcfwriter42021253112602-viirs-mband-20201007075915-20201007080744.nc new file mode 100644 index 0000000000..0b10e4d84a Binary files /dev/null and b/satpy/tests/testingcfwriter42021253112602-viirs-mband-20201007075915-20201007080744.nc differ diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index ed9db0f8d4..76fc42638c 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -128,17 +128,3 @@ def test_convert_units_other(self): ds_in = sc["rain_rate"] with pytest.raises(NotImplementedError): convert_units(ds_in, "millimeter/hour", "m/s") - - @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) - def test_P_image_is_uint8(self, iwsi, save_dataset): - """Test that a P-mode image is converted to uint8s.""" - nt = pyninjotiff_mock.ninjotiff - nt.reset_mock() - from satpy.writers.ninjotiff import NinjoTIFFWriter - ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3]).astype(int) - img = FakeImage(dataset, 'P') - ntw.save_image(img, filename='bla.tif', compute=False) - assert nt.save.mock_calls[0][1][0].data.dtype == np.uint8 diff --git a/satpy/writers/ninjotiff.py b/satpy/writers/ninjotiff.py index 033a25859d..54756d8340 100644 --- a/satpy/writers/ninjotiff.py +++ b/satpy/writers/ninjotiff.py @@ -173,8 +173,6 @@ def save_image(self, img, filename=None, compute=True, **kwargs): # floating_po raise NotImplementedError( "Don't know how to handle non-scale/offset-based enhancements yet." ) - if img.mode.startswith("P"): - img.data = img.data.astype(np.uint8) return nt.save(img, filename, data_is_scaled_01=True, compute=compute, **kwargs) def save_dataset(