diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py
index d48dfa950b..85f2b72add 100644
--- a/satpy/composites/viirs.py
+++ b/satpy/composites/viirs.py
@@ -633,9 +633,9 @@ def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, n
total_cols = data.shape[1]
tile_size = int((local_radius_px * 2.0) + 1.0)
row_tiles = int(total_rows / tile_size) if (
- total_rows % tile_size is 0) else int(total_rows / tile_size) + 1
+ (total_rows % tile_size) == 0) else int(total_rows / tile_size) + 1
col_tiles = int(total_cols / tile_size) if (
- total_cols % tile_size is 0) else int(total_cols / tile_size) + 1
+ (total_cols % tile_size) == 0) else int(total_cols / tile_size) + 1
# an array of our distribution functions for equalization
all_cumulative_dist_functions = [[]]
@@ -1003,7 +1003,7 @@ def _linear_normalization_from_0to1(
"""
LOG.debug(message)
- if theoretical_min is not 0:
+ if theoretical_min != 0:
data[mask] = data[mask] - theoretical_min
theoretical_max = theoretical_max - theoretical_min
data[mask] = data[mask] / theoretical_max
diff --git a/satpy/etc/readers/viirs_sdr.yaml b/satpy/etc/readers/viirs_sdr.yaml
index eddbe0fcd8..4acffe8a0a 100644
--- a/satpy/etc/readers/viirs_sdr.yaml
+++ b/satpy/etc/readers/viirs_sdr.yaml
@@ -9,50 +9,57 @@ datasets:
i_lon:
name: i_longitude
resolution: 371
- file_type: gitco
- file_key: All_Data/{file_group}_All/Longitude
+ file_type: generic_file
+ dataset_groups: [GITCO, GIMGO]
+ file_key: All_Data/{dataset_group}_All/Longitude
units: "degrees"
standard_name: longitude
i_lat:
name: i_latitude
resolution: 371
- file_type: gitco
- file_key: All_Data/{file_group}_All/Latitude
+ file_type: generic_file
+ dataset_groups: [GITCO, GIMGO]
+ file_key: All_Data/{dataset_group}_All/Latitude
units: "degrees"
standard_name: latitude
m_lon:
name: m_longitude
resolution: 742
- file_type: gmtco
- file_key: All_Data/{file_group}_All/Longitude
+ file_type: generic_file
+ dataset_groups: [GMTCO, GMODO]
+ file_key: All_Data/{dataset_group}_All/Longitude
units: "degrees"
standard_name: longitude
m_lat:
name: m_latitude
resolution: 742
- file_type: gmtco
- file_key: All_Data/{file_group}_All/Latitude
+ file_type: generic_file
+ dataset_groups: [GMTCO, GMODO]
+ file_key: All_Data/{dataset_group}_All/Latitude
units: "degrees"
standard_name: latitude
dnb_lon:
name: dnb_longitude
resolution: 743
- file_type: gdnbo
- file_key: All_Data/{file_group}_All/Longitude
+ file_type: generic_file
+ dataset_groups: [GDNBO]
+ file_key: All_Data/{dataset_group}_All/Longitude
units: "degrees"
standard_name: longitude
dnb_lat:
name: dnb_latitude
resolution: 743
- file_type: gdnbo
- file_key: All_Data/{file_group}_All/Latitude
+ file_type: generic_file
+ dataset_groups: [GDNBO]
+ file_key: All_Data/{dataset_group}_All/Latitude
units: "degrees"
standard_name: latitude
I01:
name: I01
wavelength: [0.600, 0.640, 0.680]
modifiers: [sunz_corrected_iband]
- file_type: svi01
+ dataset_groups: [SVI01]
+ file_type: generic_file
resolution: 371
coordinates: [i_longitude, i_latitude]
calibration:
@@ -66,7 +73,8 @@ datasets:
name: I02
wavelength: [0.845, 0.865, 0.884]
modifiers: [sunz_corrected_iband]
- file_type: svi02
+ dataset_groups: [SVI02]
+ file_type: generic_file
resolution: 371
coordinates: [i_longitude, i_latitude]
calibration:
@@ -80,7 +88,8 @@ datasets:
name: I03
wavelength: [1.580, 1.610, 1.640]
modifiers: [sunz_corrected_iband]
- file_type: svi03
+ dataset_groups: [SVI03]
+ file_type: generic_file
resolution: 371
coordinates: [i_longitude, i_latitude]
calibration:
@@ -93,7 +102,8 @@ datasets:
I04:
name: I04
wavelength: [3.580, 3.740, 3.900]
- file_type: svi04
+ file_type: generic_file
+ dataset_groups: [SVI04]
resolution: 371
coordinates: [i_longitude, i_latitude]
calibration:
@@ -106,7 +116,8 @@ datasets:
I05:
name: I05
wavelength: [10.500, 11.450, 12.300]
- file_type: svi05
+ dataset_groups: [SVI05]
+ file_type: generic_file
resolution: 371
coordinates: [i_longitude, i_latitude]
calibration:
@@ -120,7 +131,8 @@ datasets:
name: M01
wavelength: [0.402, 0.412, 0.422]
modifiers: [sunz_corrected]
- file_type: svm01
+ dataset_groups: [SVM01]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -134,7 +146,8 @@ datasets:
name: M02
wavelength: [0.436, 0.445, 0.454]
modifiers: [sunz_corrected]
- file_type: svm02
+ dataset_groups: [SVM02]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -148,7 +161,8 @@ datasets:
name: M03
wavelength: [0.478, 0.488, 0.498]
modifiers: [sunz_corrected]
- file_type: svm03
+ dataset_groups: [SVM03]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -162,7 +176,8 @@ datasets:
name: M04
wavelength: [0.545, 0.555, 0.565]
modifiers: [sunz_corrected]
- file_type: svm04
+ dataset_groups: [SVM04]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -176,7 +191,8 @@ datasets:
name: M05
wavelength: [0.662, 0.672, 0.682]
modifiers: [sunz_corrected]
- file_type: svm05
+ dataset_groups: [SVM05]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -190,7 +206,8 @@ datasets:
name: M06
wavelength: [0.739, 0.746, 0.754]
modifiers: [sunz_corrected]
- file_type: svm06
+ dataset_groups: [SVM06]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -204,7 +221,8 @@ datasets:
name: M07
wavelength: [0.846, 0.865, 0.885]
modifiers: [sunz_corrected]
- file_type: svm07
+ dataset_groups: [SVM07]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -218,7 +236,8 @@ datasets:
name: M08
wavelength: [1.230, 1.240, 1.250]
modifiers: [sunz_corrected]
- file_type: svm08
+ dataset_groups: [SVM08]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -232,7 +251,8 @@ datasets:
name: M09
wavelength: [1.371, 1.378, 1.386]
modifiers: [sunz_corrected]
- file_type: svm09
+ dataset_groups: [SVM09]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -246,7 +266,8 @@ datasets:
name: M10
wavelength: [1.580, 1.610, 1.640]
modifiers: [sunz_corrected]
- file_type: svm10
+ dataset_groups: [SVM10]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -260,7 +281,8 @@ datasets:
name: M11
wavelength: [2.225, 2.250, 2.275]
modifiers: [sunz_corrected]
- file_type: svm11
+ dataset_groups: [SVM11]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -273,7 +295,8 @@ datasets:
M12:
name: M12
wavelength: [3.610, 3.700, 3.790]
- file_type: svm12
+ dataset_groups: [SVM12]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -286,7 +309,8 @@ datasets:
M13:
name: M13
wavelength: [3.973, 4.050, 4.128]
- file_type: svm13
+ dataset_groups: [SVM13]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -299,7 +323,8 @@ datasets:
M14:
name: M14
wavelength: [8.400, 8.550, 8.700]
- file_type: svm14
+ dataset_groups: [SVM14]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -312,7 +337,8 @@ datasets:
M15:
name: M15
wavelength: [10.263, 10.763, 11.263]
- file_type: svm15
+ dataset_groups: [SVM15]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -325,7 +351,8 @@ datasets:
M16:
name: M16
wavelength: [11.538, 12.013, 12.489]
- file_type: svm16
+ dataset_groups: [SVM16]
+ file_type: generic_file
resolution: 742
coordinates: [m_longitude, m_latitude]
calibration:
@@ -342,64 +369,72 @@ datasets:
resolution: 371
coordinates: [i_longitude, i_latitude]
units: degrees
- file_type: [gitco, gimgo]
- file_key: 'All_Data/{file_group}_All/SolarZenithAngle'
+ file_type: generic_file
+ dataset_groups: [GITCO, GIMGO]
+ file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle'
I_SOLA:
name: solar_azimuth_angle
standard_name: solar_azimuth_angle
resolution: 371
coordinates: [i_longitude, i_latitude]
units: degrees
- file_type: [gitco, gimgo]
- file_key: 'All_Data/{file_group}_All/SolarAzimuthAngle'
+ file_type: generic_file
+ dataset_groups: [GITCO, GIMGO]
+ file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle'
I_SENZ:
name: satellite_zenith_angle
standard_name: sensor_zenith_angle
resolution: 371
coordinates: [i_longitude, i_latitude]
units: degrees
- file_type: [gitco, gimgo]
- file_key: 'All_Data/{file_group}_All/SatelliteZenithAngle'
+ file_type: generic_file
+ dataset_groups: [GITCO, GIMGO]
+ file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle'
I_SENA:
name: satellite_azimuth_angle
standard_name: sensor_azimuth_angle
resolution: 371
coordinates: [i_longitude, i_latitude]
units: degrees
- file_type: [gitco, gimgo]
- file_key: 'All_Data/{file_group}_All/SatelliteAzimuthAngle'
+ file_type: generic_file
+ dataset_groups: [GITCO, GIMGO]
+ file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle'
M_SOLZ:
name: solar_zenith_angle
standard_name: solar_zenith_angle
resolution: 742
coordinates: [m_longitude, m_latitude]
units: degrees
- file_type: [gmtco, gmodo]
- file_key: 'All_Data/{file_group}_All/SolarZenithAngle'
+ file_type: generic_file
+ dataset_groups: [GMTCO, GMODO]
+ file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle'
M_SOLA:
name: solar_azimuth_angle
standard_name: solar_azimuth_angle
resolution: 742
coordinates: [m_longitude, m_latitude]
units: degrees
- file_type: [gmtco, gmodo]
- file_key: 'All_Data/{file_group}_All/SolarAzimuthAngle'
+ file_type: generic_file
+ dataset_groups: [GMTCO, GMODO]
+ file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle'
M_SENZ:
name: satellite_zenith_angle
standard_name: sensor_zenith_angle
resolution: 742
coordinates: [m_longitude, m_latitude]
units: degrees
- file_type: [gmtco, gmodo]
- file_key: 'All_Data/{file_group}_All/SatelliteZenithAngle'
+ file_type: generic_file
+ dataset_groups: [GMTCO, GMODO]
+ file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle'
M_SENA:
name: satellite_azimuth_angle
standard_name: sensor_azimuth_angle
resolution: 742
coordinates: [m_longitude, m_latitude]
units: degrees
- file_type: [gmtco, gmodo]
- file_key: 'All_Data/{file_group}_All/SatelliteAzimuthAngle'
+ file_type: generic_file
+ dataset_groups: [GMTCO, GMODO]
+ file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle'
DNB:
name: DNB
wavelength: [0.500, 0.700, 0.900]
@@ -410,148 +445,54 @@ datasets:
standard_name: toa_outgoing_radiance_per_unit_wavelength
units: W m-2 sr-1
file_units: W cm-2 sr-1
- file_type: vdnbo
+ dataset_groups: [SVDNB]
+ file_type: generic_file
DNB_SZA:
name: dnb_solar_zenith_angle
standard_name: solar_zenith_angle
resolution: 743
coordinates: [dnb_longitude, dnb_latitude]
- file_type: gdnbo
- file_key: 'All_Data/{file_group}_All/SolarZenithAngle'
+ file_type: generic_file
+ dataset_groups: [GDNBO]
+ file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle'
DNB_LZA:
name: dnb_lunar_zenith_angle
standard_name: lunar_zenith_angle
resolution: 743
coordinates: [dnb_longitude, dnb_latitude]
- file_type: gdnbo
- file_key: 'All_Data/{file_group}_All/LunarZenithAngle'
+ file_type: generic_file
+ dataset_groups: [GDNBO]
+ file_key: 'All_Data/{dataset_group}_All/LunarZenithAngle'
DNB_SENZ:
name: dnb_satellite_zenith_angle
standard_name: sensor_zenith_angle
resolution: 743
coordinates: [dnb_longitude, dnb_latitude]
units: degrees
- file_type: gdnbo
- file_key: 'All_Data/{file_group}_All/SatelliteZenithAngle'
+ file_type: generic_file
+ dataset_groups: [GDNBO]
+ file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle'
DNB_SENA:
name: dnb_satellite_azimuth_angle
standard_name: sensor_azimuth_angle
resolution: 743
coordinates: [dnb_longitude, dnb_latitude]
units: degrees
- file_type: gdnbo
- file_key: 'All_Data/{file_group}_All/SatelliteAzimuthAngle'
+ file_type: generic_file
+ dataset_groups: [GDNBO]
+ file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle'
dnb_moon_illumination_fraction:
name: dnb_moon_illumination_fraction
- file_type: gdnbo
- file_key: 'All_Data/{file_group}_All/MoonIllumFraction'
+ file_type: generic_file
+ dataset_groups: [GDNBO]
+ file_key: 'All_Data/{dataset_group}_All/MoonIllumFraction'
file_types:
- gitco:
+ generic_file:
file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['GITCO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-IMG-GEO-TC
- gimgo:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['GIMGO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-IMG-GEO
- gmtco:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['GMTCO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-MOD-GEO-TC
- gmodo:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['GMODO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-MOD-GEO
- gdnbo:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['GDNBO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-DNB-GEO
- svi01:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVI01_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-I1-SDR
- svi02:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVI02_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-I2-SDR
- svi03:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVI03_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-I3-SDR
- svi04:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVI04_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-I4-SDR
- svi05:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVI05_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-I5-SDR
- svm01:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM01_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M1-SDR
- svm02:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM02_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M2-SDR
- svm03:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM03_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M3-SDR
- svm04:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM04_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M4-SDR
- svm05:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM05_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M5-SDR
- svm06:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM06_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M6-SDR
- svm07:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM07_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M7-SDR
- svm08:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM08_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M8-SDR
- svm09:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM09_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M9-SDR
- svm10:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM10_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M10-SDR
- svm11:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM11_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M11-SDR
- svm12:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM12_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M12-SDR
- svm13:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM13_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M13-SDR
- svm14:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM14_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M14-SDR
- svm15:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM15_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M15-SDR
- svm16:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVM16_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-M16-SDR
- vdnbo:
- file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler
- file_patterns: ['SVDNB_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
- file_group: VIIRS-DNB-SDR
+ file_patterns: ['{datasets}_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5']
+ # Example filenames
+ # GMODO-SVM01-SVM02-SVM03-SVM04-SVM05-SVM06-SVM07-SVM08-SVM09-SVM10-SVM11-SVM12-SVM13-SVM14-SVM15-SVM16_j01_d20190304_t1103049_e1108449_b06684_c20190304213641984108_nobc_ops.h5
+ # GMTCO_j01_d20190304_t1103049_e1108449_b06684_c20190304150845549693_nobc_ops.h5
+ # GDNBO-SVDNB_j01_d20190304_t1057236_e1103036_b06684_c20190304213641088765_nobc_ops.h5
+ # SVM15_npp_d20150311_t1126366_e1128008_b17451_c20150311113344455225_cspp_dev.h5
diff --git a/satpy/multiscene.py b/satpy/multiscene.py
index db35f7dc7e..1b94fb770c 100644
--- a/satpy/multiscene.py
+++ b/satpy/multiscene.py
@@ -40,12 +40,6 @@
# python 2
from Queue import Queue
-try:
- from itertools import zip_longest
-except ImportError:
- # python 2.7
- from itertools import izip_longest as zip_longest
-
try:
import imageio
except ImportError:
diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py
index 3f508abd60..ed5fc9385f 100644
--- a/satpy/readers/viirs_sdr.py
+++ b/satpy/readers/viirs_sdr.py
@@ -27,15 +27,23 @@
# satpy. If not, see .
"""Interface to VIIRS SDR format
+**********************************
+
+This reader implements the support of VIIRS SDR files as produced by CSPP and CLASS.
+It is comprised of two parts:
+
+ - A subclass of the YAMLFileReader class to allow handling all the files
+ - A filehandler class to implement the actual reading
Format documentation:
-http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf
+
+ - http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf
"""
import logging
-import os.path
from datetime import datetime, timedelta
from glob import glob
+import os.path
import numpy as np
import dask.array as da
@@ -84,11 +92,46 @@ def _get_invalid_info(granule_data):
return msg
+DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO',
+ 'SVDNB': 'VIIRS-DNB-SDR',
+ 'GITCO': 'VIIRS-IMG-GEO-TC',
+ 'GIMGO': 'VIIRS-IMG-GEO',
+ 'SVI01': 'VIIRS-I1-SDR',
+ 'SVI02': 'VIIRS-I2-SDR',
+ 'SVI03': 'VIIRS-I3-SDR',
+ 'SVI04': 'VIIRS-I4-SDR',
+ 'SVI05': 'VIIRS-I5-SDR',
+ 'GMTCO': 'VIIRS-MOD-GEO-TC',
+ 'GMODO': 'VIIRS-MOD-GEO',
+ 'SVM01': 'VIIRS-M1-SDR',
+ 'SVM02': 'VIIRS-M2-SDR',
+ 'SVM03': 'VIIRS-M3-SDR',
+ 'SVM04': 'VIIRS-M4-SDR',
+ 'SVM05': 'VIIRS-M5-SDR',
+ 'SVM06': 'VIIRS-M6-SDR',
+ 'SVM07': 'VIIRS-M7-SDR',
+ 'SVM08': 'VIIRS-M8-SDR',
+ 'SVM09': 'VIIRS-M9-SDR',
+ 'SVM10': 'VIIRS-M10-SDR',
+ 'SVM11': 'VIIRS-M11-SDR',
+ 'SVM12': 'VIIRS-M12-SDR',
+ 'SVM13': 'VIIRS-M13-SDR',
+ 'SVM14': 'VIIRS-M14-SDR',
+ 'SVM15': 'VIIRS-M15-SDR',
+ 'SVM16': 'VIIRS-M16-SDR',
+ }
+
+
class VIIRSSDRFileHandler(HDF5FileHandler):
"""VIIRS HDF5 File Reader
"""
+ def __init__(self, filename, filename_info, filetype_info, use_tc=None, **kwargs):
+ self.datasets = filename_info['datasets'].split('-')
+ self.use_tc = use_tc
+ super(VIIRSSDRFileHandler, self).__init__(filename, filename_info, filetype_info)
+
def __getitem__(self, item):
if '*' in item:
# this is an aggregated field that can't easily be loaded, need to
@@ -125,30 +168,34 @@ def _parse_datetime(self, datestr, timestr):
@property
def start_time(self):
- default_start_date = 'Data_Products/{file_group}/{file_group}_Aggr/attr/AggregateBeginningDate'
- default_start_time = 'Data_Products/{file_group}/{file_group}_Aggr/attr/AggregateBeginningTime'
- date_var_path = self.filetype_info.get('start_date', default_start_date).format(**self.filetype_info)
- time_var_path = self.filetype_info.get('start_time', default_start_time).format(**self.filetype_info)
+ dataset_group = DATASET_KEYS[self.datasets[0]]
+ default_start_date = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningDate'
+ default_start_time = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningTime'
+ date_var_path = self.filetype_info.get('start_date', default_start_date).format(dataset_group=dataset_group)
+ time_var_path = self.filetype_info.get('start_time', default_start_time).format(dataset_group=dataset_group)
return self._parse_datetime(self[date_var_path], self[time_var_path])
@property
def end_time(self):
- default_end_date = 'Data_Products/{file_group}/{file_group}_Aggr/attr/AggregateEndingDate'
- default_end_time = 'Data_Products/{file_group}/{file_group}_Aggr/attr/AggregateEndingTime'
- date_var_path = self.filetype_info.get('end_date', default_end_date).format(**self.filetype_info)
- time_var_path = self.filetype_info.get('end_time', default_end_time).format(**self.filetype_info)
+ dataset_group = DATASET_KEYS[self.datasets[0]]
+ default_end_date = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingDate'
+ default_end_time = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingTime'
+ date_var_path = self.filetype_info.get('end_date', default_end_date).format(dataset_group=dataset_group)
+ time_var_path = self.filetype_info.get('end_time', default_end_time).format(dataset_group=dataset_group)
return self._parse_datetime(self[date_var_path], self[time_var_path])
@property
def start_orbit_number(self):
- default = 'Data_Products/{file_group}/{file_group}_Aggr/attr/AggregateBeginningOrbitNumber'
- start_orbit_path = self.filetype_info.get('start_orbit', default).format(**self.filetype_info)
+ dataset_group = DATASET_KEYS[self.datasets[0]]
+ default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateBeginningOrbitNumber'
+ start_orbit_path = self.filetype_info.get('start_orbit', default).format(dataset_group=dataset_group)
return int(self[start_orbit_path])
@property
def end_orbit_number(self):
- default = 'Data_Products/{file_group}/{file_group}_Aggr/attr/AggregateEndingOrbitNumber'
- end_orbit_path = self.filetype_info.get('end_orbit', default).format(**self.filetype_info)
+ dataset_group = DATASET_KEYS[self.datasets[0]]
+ default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateEndingOrbitNumber'
+ end_orbit_path = self.filetype_info.get('end_orbit', default).format(dataset_group=dataset_group)
return int(self[end_orbit_path])
@property
@@ -165,9 +212,10 @@ def platform_name(self):
@property
def sensor_name(self):
- default = 'Data_Products/{file_group}/attr/Instrument_Short_Name'
+ dataset_group = DATASET_KEYS[self.datasets[0]]
+ default = 'Data_Products/{dataset_group}/attr/Instrument_Short_Name'
sensor_path = self.filetype_info.get(
- 'sensor_name', default).format(**self.filetype_info)
+ 'sensor_name', default).format(dataset_group=dataset_group)
return self[sensor_path].lower()
def get_file_units(self, dataset_id, ds_info):
@@ -225,34 +273,75 @@ def adjust_scaling_factors(self, factors, file_units, output_units):
return factors
def _generate_file_key(self, ds_id, ds_info, factors=False):
- var_path = ds_info.get('file_key', 'All_Data/{file_group}_All/{calibration}')
+ var_path = ds_info.get('file_key', 'All_Data/{dataset_group}_All/{calibration}')
calibration = {
'radiance': 'Radiance',
'reflectance': 'Reflectance',
'brightness_temperature': 'BrightnessTemperature',
}.get(ds_id.calibration)
- var_path = var_path.format(calibration=calibration, **self.filetype_info)
+ var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info['dataset_group']])
+ if ds_id.name in ['dnb_longitude', 'dnb_latitude']:
+ if self.use_tc is True:
+ return var_path + '_TC'
+ elif self.use_tc is None and var_path + '_TC' in self.file_content:
+ return var_path + '_TC'
return var_path
- def get_shape(self, ds_id, ds_info):
- var_path = self._generate_file_key(ds_id, ds_info)
- return self[var_path + "/shape"]
+ @staticmethod
+ def expand_single_values(var, scans):
+ """Expand single valued variable to full scan lengths."""
+ if scans.size == 1:
+ return var
+ else:
+ expanded = np.repeat(var, scans)
+ expanded.attrs = var.attrs
+ expanded.rename({expanded.dims[0]: 'y'})
+ return expanded
+
+ def concatenate_dataset(self, dataset_group, var_path):
+ if 'I' in dataset_group:
+ scan_size = 32
+ else:
+ scan_size = 16
+ scans_path = 'All_Data/{dataset_group}_All/NumberOfScans'
+ scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group])
+ start_scan = 0
+ data_chunks = []
+ scans = self[scans_path]
+ variable = self[var_path]
+ # check if these are single per-granule value
+ if variable.size != scans.size:
+ for gscans in scans.values:
+ data_chunks.append(self[var_path].isel(y=slice(start_scan, start_scan + gscans * scan_size)))
+ start_scan += scan_size * 48
+ return xr.concat(data_chunks, 'y')
+ else:
+ return self.expand_single_values(variable, scans)
- def get_dataset(self, dataset_id, ds_info):
- var_path = self._generate_file_key(dataset_id, ds_info)
- factor_var_path = ds_info.get("factors_key", var_path + "Factors")
- data = self[var_path]
+ def mask_fill_values(self, data, ds_info):
is_floating = np.issubdtype(data.dtype, np.floating)
if is_floating:
# If the data is a float then we mask everything <= -999.0
fill_max = float(ds_info.pop("fill_max_float", -999.0))
- data = data.where(data > fill_max)
+ return data.where(data > fill_max)
else:
# If the data is an integer then we mask everything >= fill_min_int
fill_min = int(ds_info.pop("fill_min_int", 65528))
- data = data.where(data < fill_min)
+ return data.where(data < fill_min)
+ def get_dataset(self, dataset_id, ds_info):
+ dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets]
+ if not dataset_group:
+ return
+ else:
+ dataset_group = dataset_group[0]
+ ds_info['dataset_group'] = dataset_group
+ var_path = self._generate_file_key(dataset_id, ds_info)
+ factor_var_path = ds_info.get("factors_key", var_path + "Factors")
+
+ data = self.concatenate_dataset(dataset_group, var_path)
+ data = self.mask_fill_values(data, ds_info)
factors = self.get(factor_var_path)
if factors is None:
LOG.debug("No scaling factors found for %s", dataset_id)
@@ -279,48 +368,105 @@ def get_dataset(self, dataset_id, ds_info):
def get_bounding_box(self):
"""Get the bounding box of this file."""
- path = 'Data_Products/{file_group}/{file_group}_Gran_0/attr/'
- prefix = path.format(**self.filetype_info)
+ from pyproj import Geod
+ geod = Geod(ellps='WGS84')
+ dataset_group = DATASET_KEYS[self.datasets[0]]
+ idx = 0
+ lons_ring = None
+ lats_ring = None
+ while True:
+ path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/'
+ prefix = path.format(dataset_group=dataset_group, idx=idx)
+ try:
+ lats = self.file_content[prefix + 'G-Ring_Latitude']
+ lons = self.file_content[prefix + 'G-Ring_Longitude']
+ if lons_ring is None:
+ lons_ring = lons
+ lats_ring = lats
+ else:
+ prev_lon = lons_ring[0]
+ prev_lat = lats_ring[0]
+ dists = list(geod.inv(lon, lat, prev_lon, prev_lat)[2] for lon, lat in zip(lons, lats))
+ first_idx = np.argmin(dists)
+ if first_idx == 2 and len(lons) == 8:
+ lons_ring = np.hstack((lons[:3], lons_ring[:-2], lons[4:]))
+ lats_ring = np.hstack((lats[:3], lats_ring[:-2], lats[4:]))
+ else:
+ raise NotImplementedError("Don't know how to handle G-Rings of length %d" % len(lons))
+
+ except KeyError:
+ break
+ idx += 1
+
+ return lons_ring, lats_ring
- lats = self.file_content[prefix + 'G-Ring_Latitude']
- lons = self.file_content[prefix + 'G-Ring_Longitude']
- return lons.ravel(), lats.ravel()
+def split_desired_other(fhs, req_geo, rem_geo):
+ """Split the provided filehandlers *fhs* into desired filehandlers and others."""
+ desired = []
+ other = []
+ for fh in fhs:
+ if req_geo in fh.datasets:
+ desired.append(fh)
+ elif rem_geo in fh.datasets:
+ other.append(fh)
+ return desired, other
class VIIRSSDRReader(FileYAMLReader):
"""Custom file reader for finding VIIRS SDR geolocation at runtime."""
- def __init__(self, config_files, use_tc=True, **kwargs):
+ def __init__(self, config_files, use_tc=None, **kwargs):
"""Initialize file reader and adjust geolocation preferences.
Args:
config_files (iterable): yaml config files passed to base class
- use_tc (boolean): If `True` (default) use the terrain corrected
- file types specified in the config files. If
- `False`, switch all terrain corrected file types
- to non-TC file types. If `None`
+ use_tc (boolean): If `True` use the terrain corrected
+ files. If `False`, switch to non-TC files. If
+ `None` (default), use TC if available, non-TC otherwise.
"""
super(VIIRSSDRReader, self).__init__(config_files, **kwargs)
- for ds_info in self.ids.values():
- ft = ds_info.get('file_type')
- if ft == 'gmtco':
- nontc = 'gmodo'
- elif ft == 'gitco':
- nontc = 'gimgo'
- else:
- continue
+ self.use_tc = use_tc
- if use_tc is None:
- # we want both TC and non-TC
- ds_info['file_type'] = [ds_info['file_type'], nontc]
- elif not use_tc:
- # we want only non-TC
- ds_info['file_type'] = nontc
+ def filter_filenames_by_info(self, filename_items):
+ """Filter out file using metadata from the filenames.
+
+ This sorts out the different lon and lat datasets depending on TC is
+ desired or not.
+ """
+ filename_items = list(filename_items)
+ geo_keep = []
+ geo_del = []
+ for filename, filename_info in filename_items:
+ filename_info['datasets'] = datasets = filename_info['datasets'].split('-')
+ if ('GITCO' in datasets) or ('GMTCO' in datasets):
+ if self.use_tc is False:
+ geo_del.append(filename)
+ else:
+ geo_keep.append(filename)
+ elif ('GIMGO' in datasets) or ('GMODO' in datasets):
+ if self.use_tc is True:
+ geo_del.append(filename)
+ else:
+ geo_keep.append(filename)
+ if geo_keep:
+ fdict = dict(filename_items)
+ for to_del in geo_del:
+ for dataset in ['GITCO', 'GMTCO', 'GIMGO', 'GMODO']:
+ try:
+ fdict[to_del]['datasets'].remove(dataset)
+ except ValueError:
+ pass
+ if not fdict[to_del]['datasets']:
+ del fdict[to_del]
+ filename_items = fdict.items()
+ for filename, filename_info in filename_items:
+ filename_info['datasets'] = '-'.join(filename_info['datasets'])
+ return super(VIIRSSDRReader, self).filter_filenames_by_info(filename_items)
def _load_from_geo_ref(self, dsid):
- """Load filenames from the N_GEO_Ref attribute of a dataset's file"""
+ """Load filenames from the N_GEO_Ref attribute of a dataset's file."""
file_handlers = self._get_file_handlers(dsid)
if not file_handlers:
return None
@@ -348,6 +494,54 @@ def _load_from_geo_ref(self, dsid):
return fns
+ def _get_req_rem_geo(self, ds_info):
+ """Find out which geolocation files are needed."""
+ if ds_info['dataset_groups'][0].startswith('GM'):
+ if self.use_tc is False:
+ req_geo = 'GMODO'
+ rem_geo = 'GMTCO'
+ else:
+ req_geo = 'GMTCO'
+ rem_geo = 'GMODO'
+ elif ds_info['dataset_groups'][0].startswith('GI'):
+ if self.use_tc is False:
+ req_geo = 'GIMGO'
+ rem_geo = 'GITCO'
+ else:
+ req_geo = 'GITCO'
+ rem_geo = 'GIMGO'
+ else:
+ raise ValueError('Unknown dataset group %s' % ds_info['dataset_groups'][0])
+ return req_geo, rem_geo
+
+ def get_right_geo_fhs(self, dsid, fhs):
+ """Find the right geographical file handlers for given dataset ID *dsid*."""
+ ds_info = self.ids[dsid]
+ req_geo, rem_geo = self._get_req_rem_geo(ds_info)
+ desired, other = split_desired_other(fhs, req_geo, rem_geo)
+ if desired:
+ try:
+ ds_info['dataset_groups'].remove(rem_geo)
+ except ValueError:
+ pass
+ return desired
+ else:
+ return other
+
+ def _get_file_handlers(self, dsid):
+ """Get the file handler to load this dataset."""
+ ds_info = self.ids[dsid]
+
+ fhs = [fh for fh in self.file_handlers['generic_file']
+ if set(fh.datasets) & set(ds_info['dataset_groups'])]
+ if not fhs:
+ LOG.warning("Required file type '%s' not found or loaded for "
+ "'%s'", ds_info['file_type'], dsid.name)
+ else:
+ if len(set(ds_info['dataset_groups']) & set(['GITCO', 'GIMGO', 'GMTCO', 'GMODO'])) > 1:
+ fhs = self.get_right_geo_fhs(dsid, fhs)
+ return fhs
+
def _get_coordinates_for_dataset_key(self, dsid):
"""Get the coordinate dataset keys for `dsid`.
@@ -356,15 +550,26 @@ def _get_coordinates_for_dataset_key(self, dsid):
"""
coords = super(VIIRSSDRReader, self)._get_coordinates_for_dataset_key(dsid)
for c_id in coords:
- c_file_type = self.ids[c_id]['file_type']
- if self._preferred_filetype(c_file_type):
- # coordinate has its file type loaded already
+ c_info = self.ids[c_id] # c_info['dataset_groups'] should be a list of 2 elements
+ self._get_file_handlers(c_id)
+ if len(c_info['dataset_groups']) == 1: # filtering already done
+ continue
+ try:
+ req_geo, rem_geo = self._get_req_rem_geo(c_info)
+ except ValueError: # DNB
continue
# check the dataset file for the geolocation filename
geo_filenames = self._load_from_geo_ref(dsid)
if not geo_filenames:
- continue
+ c_info['dataset_groups'] = [rem_geo]
+ else:
+ # concatenate all values
+ new_fhs = sum(self.create_filehandlers(geo_filenames).values(), [])
+ desired, other = split_desired_other(new_fhs, req_geo, rem_geo)
+ if desired:
+ c_info['dataset_groups'].remove(rem_geo)
+ else:
+ c_info['dataset_groups'].remove(req_geo)
- self.create_filehandlers(geo_filenames)
return coords
diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py
index 7a35d57fe0..dbbb0d2195 100644
--- a/satpy/readers/yaml_reader.py
+++ b/satpy/readers/yaml_reader.py
@@ -503,7 +503,7 @@ def create_filehandlers(self, filenames, fh_kwargs=None):
self.info.setdefault('filenames', []).extend(filenames)
filename_set = set(filenames)
-
+ created_fhs = {}
# load files that we know about by creating the file handlers
for filetype, filetype_info in self.sorted_filetype_items():
filehandlers = self.new_filehandlers_for_filetype(filetype_info,
@@ -512,8 +512,9 @@ def create_filehandlers(self, filenames, fh_kwargs=None):
filename_set -= set([fhd.filename for fhd in filehandlers])
if filehandlers:
+ created_fhs[filetype] = filehandlers
self.file_handlers[filetype] = sorted(
- filehandlers,
+ self.file_handlers.get(filetype, []) + filehandlers,
key=lambda fhd: (fhd.start_time, fhd.filename))
# update existing dataset IDs with information from the file handler
@@ -521,6 +522,7 @@ def create_filehandlers(self, filenames, fh_kwargs=None):
# load any additional dataset IDs determined dynamically from the file
self.add_ds_ids_from_files()
+ return created_fhs
def update_ds_ids_from_file_handlers(self):
"""Update DatasetIDs with information from loaded files.
diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py
index 6da8f2c873..d5e4e7bbea 100644
--- a/satpy/tests/reader_tests/test_ahi_hsd.py
+++ b/satpy/tests/reader_tests/test_ahi_hsd.py
@@ -236,6 +236,7 @@ def test_read_band(self, calibrate, *mocks):
ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute())
self.assertTrue(np.all(mask == ref_mask))
+
def suite():
"""The test suite for test_scene."""
loader = unittest.TestLoader()
diff --git a/satpy/tests/reader_tests/test_goes_imager_nc.py b/satpy/tests/reader_tests/test_goes_imager_nc.py
index 7fd866c7b6..a665054c0d 100644
--- a/satpy/tests/reader_tests/test_goes_imager_nc.py
+++ b/satpy/tests/reader_tests/test_goes_imager_nc.py
@@ -1,5 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2018 Pytroll Developers
+
+# Author(s):
+
+#
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
import datetime
-import logging
import sys
diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py
index 2426fcdf12..c5f9004f4b 100644
--- a/satpy/tests/reader_tests/test_viirs_sdr.py
+++ b/satpy/tests/reader_tests/test_viirs_sdr.py
@@ -28,91 +28,131 @@
DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
+DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO',
+ 'SVDNB': 'VIIRS-DNB-SDR',
+ 'GITCO': 'VIIRS-IMG-GEO-TC',
+ 'GIMGO': 'VIIRS-IMG-GEO',
+ 'SVI01': 'VIIRS-I1-SDR',
+ 'SVI02': 'VIIRS-I2-SDR',
+ 'SVI03': 'VIIRS-I3-SDR',
+ 'SVI04': 'VIIRS-I4-SDR',
+ 'SVI05': 'VIIRS-I5-SDR',
+ 'GMTCO': 'VIIRS-MOD-GEO-TC',
+ 'GMODO': 'VIIRS-MOD-GEO',
+ 'SVM01': 'VIIRS-M1-SDR',
+ 'SVM02': 'VIIRS-M2-SDR',
+ 'SVM03': 'VIIRS-M3-SDR',
+ 'SVM04': 'VIIRS-M4-SDR',
+ 'SVM05': 'VIIRS-M5-SDR',
+ 'SVM06': 'VIIRS-M6-SDR',
+ 'SVM07': 'VIIRS-M7-SDR',
+ 'SVM08': 'VIIRS-M8-SDR',
+ 'SVM09': 'VIIRS-M9-SDR',
+ 'SVM10': 'VIIRS-M10-SDR',
+ 'SVM11': 'VIIRS-M11-SDR',
+ 'SVM12': 'VIIRS-M12-SDR',
+ 'SVM13': 'VIIRS-M13-SDR',
+ 'SVM14': 'VIIRS-M14-SDR',
+ 'SVM15': 'VIIRS-M15-SDR',
+ 'SVM16': 'VIIRS-M16-SDR',
+ }
+
class FakeHDF5FileHandler2(FakeHDF5FileHandler):
"""Swap-in HDF5 File Handler"""
+
+ def __init__(self, filename, filename_info, filetype_info, use_tc=None):
+ super(FakeHDF5FileHandler2, self).__init__(filename, filename_info, filetype_info)
+ self.datasets = filename_info['datasets'].split('-')
+ self.use_tc = use_tc
+
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content"""
start_time = filename_info['start_time']
end_time = filename_info['end_time'].replace(year=start_time.year,
month=start_time.month,
day=start_time.day)
-
- prefix1 = 'Data_Products/{file_group}'.format(**filetype_info)
- prefix2 = '{prefix}/{file_group}_Aggr'.format(prefix=prefix1, **filetype_info)
- prefix3 = 'All_Data/{file_group}_All'.format(**filetype_info)
- begin_date = start_time.strftime('%Y%m%d')
- begin_time = start_time.strftime('%H%M%S.%fZ')
- ending_date = end_time.strftime('%Y%m%d')
- ending_time = end_time.strftime('%H%M%S.%fZ')
- if filename[:3] == 'SVI':
- geo_prefix = 'GIMGO'
- elif filename[:3] == 'SVM':
- geo_prefix = 'GMODO'
- else:
- geo_prefix = None
- file_content = {
- "{prefix2}/attr/AggregateBeginningDate": begin_date,
- "{prefix2}/attr/AggregateBeginningTime": begin_time,
- "{prefix2}/attr/AggregateEndingDate": ending_date,
- "{prefix2}/attr/AggregateEndingTime": ending_time,
- "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]),
- "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]),
- "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']),
- "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']),
- "{prefix1}/attr/Instrument_Short_Name": "VIIRS",
- "/attr/Platform_Short_Name": "NPP",
- }
- if geo_prefix:
- file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:]
- for k, v in list(file_content.items()):
- file_content[k.format(prefix1=prefix1, prefix2=prefix2)] = v
-
- if filename[:3] in ['SVM', 'SVI', 'SVD']:
- if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']:
- keys = ['Radiance', 'Reflectance']
- elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']:
- keys = ['Radiance', 'BrightnessTemperature']
+ final_content = {}
+ for dataset in self.datasets:
+ dataset_group = DATASET_KEYS[dataset]
+ prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group)
+ prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group)
+ prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group)
+ begin_date = start_time.strftime('%Y%m%d')
+ begin_time = start_time.strftime('%H%M%S.%fZ')
+ ending_date = end_time.strftime('%Y%m%d')
+ ending_time = end_time.strftime('%H%M%S.%fZ')
+ if filename[:3] == 'SVI':
+ geo_prefix = 'GIMGO'
+ elif filename[:3] == 'SVM':
+ geo_prefix = 'GMODO'
else:
- # DNB
- keys = ['Radiance']
-
- for k in keys:
- k = prefix3 + "/" + k
- file_content[k] = DEFAULT_FILE_DATA.copy()
- file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
- file_content[k + "Factors"] = DEFAULT_FILE_FACTORS.copy()
- elif filename[0] == 'G':
- if filename[:5] in ['GMODO', 'GIMGO']:
- lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
- lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
- else:
- lon_data = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
- lat_data = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
-
- for k in ["Latitude"]:
- k = prefix3 + "/" + k
- file_content[k] = lat_data
- file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
- file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
- for k in ["Longitude"]:
- k = prefix3 + "/" + k
- file_content[k] = lon_data
- file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
- file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ geo_prefix = None
+ file_content = {
+ "{prefix3}/NumberOfScans": np.array([48]),
+ "{prefix2}/attr/AggregateBeginningDate": begin_date,
+ "{prefix2}/attr/AggregateBeginningTime": begin_time,
+ "{prefix2}/attr/AggregateEndingDate": ending_date,
+ "{prefix2}/attr/AggregateEndingTime": ending_time,
+ "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]),
+ "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]),
+ "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']),
+ "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']),
+ "{prefix1}/attr/Instrument_Short_Name": "VIIRS",
+ "/attr/Platform_Short_Name": "NPP",
+ }
+ if geo_prefix:
+ file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:]
+ for k, v in list(file_content.items()):
+ file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v
+
+ if filename[:3] in ['SVM', 'SVI', 'SVD']:
+ if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']:
+ keys = ['Radiance', 'Reflectance']
+ elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']:
+ keys = ['Radiance', 'BrightnessTemperature']
+ else:
+ # DNB
+ keys = ['Radiance']
+
+ for k in keys:
+ k = prefix3 + "/" + k
+ file_content[k] = DEFAULT_FILE_DATA.copy()
+ file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ file_content[k + "Factors"] = DEFAULT_FILE_FACTORS.copy()
+ elif filename[0] == 'G':
+ if filename[:5] in ['GMODO', 'GIMGO']:
+ lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+ lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+ else:
+ lon_data = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+ lat_data = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+
+ for k in ["Latitude"]:
+ k = prefix3 + "/" + k
+ file_content[k] = lat_data
+ file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
+ file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ for k in ["Longitude"]:
+ k = prefix3 + "/" + k
+ file_content[k] = lon_data
+ file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
+ file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+
+ final_content.update(file_content)
# convert to xarrays
from xarray import DataArray
import dask.array as da
- for key, val in file_content.items():
+ for key, val in final_content.items():
if isinstance(val, np.ndarray):
val = da.from_array(val, chunks=val.shape)
if val.ndim > 1:
- file_content[key] = DataArray(val, dims=('y', 'x'))
+ final_content[key] = DataArray(val, dims=('y', 'x'))
else:
- file_content[key] = DataArray(val)
+ final_content[key] = DataArray(val)
- return file_content
+ return final_content
class TestVIIRSSDRReader(unittest.TestCase):
@@ -351,7 +391,7 @@ def test_load_all_m_reflectances_use_nontc(self):
self.assertEqual(d.attrs['area'].lats.min(), 55)
def test_load_all_m_reflectances_use_nontc2(self):
- """Load all M band reflectances but use non-TC geolocation (use_tc=None)"""
+ """Load all M band reflectances but use non-TC geolocation because TC isn't available"""
from satpy.readers import load_reader
r = load_reader(self.reader_configs, use_tc=None)
loadables = r.select_files_from_pathnames([
@@ -496,11 +536,215 @@ def test_load_i_no_files(self):
self.assertEqual(len(ds), 0)
+class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler):
+ """Swap-in HDF5 File Handler"""
+
+ def __init__(self, filename, filename_info, filetype_info, use_tc=None):
+ super(FakeHDF5FileHandlerAggr, self).__init__(filename, filename_info, filetype_info)
+ self.datasets = filename_info['datasets'].split('-')
+ self.use_tc = use_tc
+
+ def get_test_content(self, filename, filename_info, filetype_info):
+ """Mimic reader input file content"""
+ start_time = filename_info['start_time']
+ end_time = filename_info['end_time'].replace(year=start_time.year,
+ month=start_time.month,
+ day=start_time.day)
+ final_content = {}
+ for dataset in self.datasets:
+ dataset_group = DATASET_KEYS[dataset]
+ prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group)
+ prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group)
+ prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group)
+ begin_date = start_time.strftime('%Y%m%d')
+ begin_time = start_time.strftime('%H%M%S.%fZ')
+ ending_date = end_time.strftime('%Y%m%d')
+ ending_time = end_time.strftime('%H%M%S.%fZ')
+ if filename[:3] == 'SVI':
+ geo_prefix = 'GIMGO'
+ elif filename[:3] == 'SVM':
+ geo_prefix = 'GMODO'
+ else:
+ geo_prefix = None
+ file_content = {
+ "{prefix3}/NumberOfScans": np.array([48, 48, 48, 48]),
+ "{prefix2}/attr/AggregateBeginningDate": begin_date,
+ "{prefix2}/attr/AggregateBeginningTime": begin_time,
+ "{prefix2}/attr/AggregateEndingDate": ending_date,
+ "{prefix2}/attr/AggregateEndingTime": ending_time,
+ "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]),
+ "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]),
+ "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']),
+ "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']),
+ "{prefix1}/attr/Instrument_Short_Name": "VIIRS",
+ "/attr/Platform_Short_Name": "NPP",
+ }
+
+ lats_lists = [
+ np.array(
+ [
+ 67.969505, 65.545685, 63.103046, 61.853905, 55.169273,
+ 57.062447, 58.86063, 66.495514
+ ],
+ dtype=np.float32),
+ np.array(
+ [
+ 72.74879, 70.2493, 67.84738, 66.49691, 58.77254,
+ 60.465942, 62.11525, 71.08249
+ ],
+ dtype=np.float32),
+ np.array(
+ [
+ 77.393425, 74.977875, 72.62976, 71.083435, 62.036346,
+ 63.465122, 64.78075, 75.36842
+ ],
+ dtype=np.float32),
+ np.array(
+ [
+ 81.67615, 79.49934, 77.278656, 75.369415, 64.72178,
+ 65.78417, 66.66166, 79.00025
+ ],
+ dtype=np.float32)
+ ]
+ lons_lists = [
+ np.array(
+ [
+ 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385,
+ -7.05221, -10.405702, 14.638646
+ ],
+ dtype=np.float32),
+ np.array(
+ [
+ 53.52594, 51.685738, 50.439102, 14.629087, -10.247547,
+ -13.951393, -18.256989, 8.36572
+ ],
+ dtype=np.float32),
+ np.array(
+ [
+ 59.386833, 55.770416, 53.38952, 8.353765, -18.062435,
+ -22.608992, -27.867302, -1.3537619
+ ],
+ dtype=np.float32),
+ np.array(
+ [
+ 72.50243, 64.17125, 59.15234, -1.3654504, -27.620953,
+ -33.091743, -39.28113, -17.749891
+ ],
+ dtype=np.float32)
+ ]
+
+ for granule in range(4):
+ prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=prefix1,
+ dataset_group=dataset_group,
+ idx=granule)
+ file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule]
+ file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule]
+ if geo_prefix:
+ file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:]
+ for k, v in list(file_content.items()):
+ file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v
+
+ if filename[:3] in ['SVM', 'SVI', 'SVD']:
+ if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']:
+ keys = ['Radiance', 'Reflectance']
+ elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']:
+ keys = ['Radiance', 'BrightnessTemperature']
+ else:
+ # DNB
+ keys = ['Radiance']
+
+ for k in keys:
+ k = prefix3 + "/" + k
+ file_content[k] = DEFAULT_FILE_DATA.copy()
+ file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ file_content[k + "Factors"] = DEFAULT_FILE_FACTORS.copy()
+ elif filename[0] == 'G':
+ if filename[:5] in ['GMODO', 'GIMGO']:
+ lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+ lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+ else:
+ lon_data = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+ lat_data = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+
+ for k in ["Latitude"]:
+ k = prefix3 + "/" + k
+ file_content[k] = lat_data
+ file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
+ file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ for k in ["Longitude"]:
+ k = prefix3 + "/" + k
+ file_content[k] = lon_data
+ file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
+ file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+
+ final_content.update(file_content)
+
+ # convert to xarrays
+ from xarray import DataArray
+ import dask.array as da
+ for key, val in final_content.items():
+ if isinstance(val, np.ndarray):
+ val = da.from_array(val, chunks=val.shape)
+ if val.ndim > 1:
+ final_content[key] = DataArray(val, dims=('y', 'x'))
+ else:
+ final_content[key] = DataArray(val)
+
+ return final_content
+
+
+class TestAggrVIIRSSDRReader(unittest.TestCase):
+ """Test VIIRS SDR Reader"""
+ yaml_file = "viirs_sdr.yaml"
+
+ def setUp(self):
+ """Wrap HDF5 file handler with our own fake handler"""
+ from satpy.config import config_search_paths
+ from satpy.readers.viirs_sdr import VIIRSSDRFileHandler
+ self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
+ # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
+ self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandlerAggr,))
+ self.fake_handler = self.p.start()
+ self.p.is_local = True
+
+ def tearDown(self):
+ """Stop wrapping the HDF5 file handler"""
+ self.p.stop()
+
+ def test_bouding_box(self):
+ """Test bouding box."""
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames([
+ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
+ ])
+ r.create_filehandlers(loadables)
+ # make sure we have some files
+ expected_lons = [
+ 72.50243, 64.17125, 59.15234, 59.386833, 55.770416, 53.38952, 53.52594, 51.685738, 50.439102, 50.51393,
+ 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.247547, -13.951393, -18.062435, -22.608992,
+ -27.620953, -33.091743, -39.28113, -17.749891
+ ]
+ expected_lats = [
+ 81.67615, 79.49934, 77.278656, 77.393425, 74.977875, 72.62976, 72.74879, 70.2493, 67.84738, 67.969505,
+ 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.77254, 60.465942, 62.036346, 63.465122,
+ 64.72178, 65.78417, 66.66166, 79.00025
+ ]
+ lons, lats = r.file_handlers['generic_file'][0].get_bounding_box()
+ np.testing.assert_allclose(lons, expected_lons)
+ np.testing.assert_allclose(lats, expected_lats)
+
+
def suite():
"""The test suite for test_viirs_sdr.
"""
loader = unittest.TestLoader()
mysuite = unittest.TestSuite()
mysuite.addTest(loader.loadTestsFromTestCase(TestVIIRSSDRReader))
+ mysuite.addTest(loader.loadTestsFromTestCase(TestAggrVIIRSSDRReader))
return mysuite
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py
index ec8fb64a17..1d2a04d7c8 100644
--- a/satpy/writers/geotiff.py
+++ b/satpy/writers/geotiff.py
@@ -154,7 +154,7 @@ def _delayed_create(create_opts, datasets, area, start_time, tags):
# Create raster GeoTransform based on upper left corner and pixel
# resolution ... if not overwritten by argument geotransform.
- if "area" is None:
+ if area is None:
LOG.warning("No 'area' metadata found in image")
else:
self._gdal_write_geo(dst_ds, area)