From 320550f42215c9237c0674c30eaec7c87bf32562 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Tue, 26 May 2020 08:34:13 +0000 Subject: [PATCH 1/8] Add VII L1b netCDF-reader. --- README.md | 2 + satpy/etc/readers/vii_l1b_nc.yaml | 398 ++++++++++++++++++ satpy/readers/vii_base_nc.py | 236 +++++++++++ satpy/readers/vii_l1b_nc.py | 137 +++++++ satpy/readers/vii_utils.py | 31 ++ satpy/tests/reader_tests/test_vii_base_nc.py | 407 +++++++++++++++++++ satpy/tests/reader_tests/test_vii_l1b_nc.py | 154 +++++++ satpy/tests/reader_tests/test_vii_utils.py | 44 ++ 8 files changed, 1409 insertions(+) create mode 100644 README.md create mode 100644 satpy/etc/readers/vii_l1b_nc.yaml create mode 100644 satpy/readers/vii_base_nc.py create mode 100644 satpy/readers/vii_l1b_nc.py create mode 100644 satpy/readers/vii_utils.py create mode 100644 satpy/tests/reader_tests/test_vii_base_nc.py create mode 100644 satpy/tests/reader_tests/test_vii_l1b_nc.py create mode 100644 satpy/tests/reader_tests/test_vii_utils.py diff --git a/README.md b/README.md new file mode 100644 index 0000000000..29a531b016 --- /dev/null +++ b/README.md @@ -0,0 +1,2 @@ +# epssg-satpy + diff --git a/satpy/etc/readers/vii_l1b_nc.yaml b/satpy/etc/readers/vii_l1b_nc.yaml new file mode 100644 index 0000000000..071954047a --- /dev/null +++ b/satpy/etc/readers/vii_l1b_nc.yaml @@ -0,0 +1,398 @@ +reader: + name: vii_l1b_nc + short_name: VII L1B RAD NetCDF4 + long_name: EPS-SG VII L1B Radiance (NetCDF4) + description: > + Reader for EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format. + sensors: [vii] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + # EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format + nc_vii_l1b_rad: + file_reader: !!python/name:satpy.readers.vii_l1b_nc.ViiL1bNCFileHandler + file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] + cached_longitude: data/measurement_data/longitude + cached_latitude: data/measurement_data/latitude + +datasets: + +# --- Coordinates --- + lon_tie_points: + name: lon_tie_points + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/longitude + standard_name: longitude + + lat_tie_points: + name: lat_tie_points + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/latitude + standard_name: latitude + + lon_pixels: + name: lon_pixels + file_type: nc_vii_l1b_rad + file_key: cached_longitude + orthorect_data: data/measurement_data/delta_lon_E_dem + standard_name: longitude + + lat_pixels: + name: lat_pixels + file_type: nc_vii_l1b_rad + file_key: cached_latitude + orthorect_data: data/measurement_data/delta_lat_N_dem + standard_name: latitude + +# --- Measurement data --- + vii_443: + name: vii_443 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_443 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 0 + wavelength: [0.428, 0.443, 0.458] + + vii_555: + name: vii_555 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_555 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 1 + wavelength: [0.545, 0.555, 0.565] + + vii_668: + name: vii_668 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_668 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 2 + wavelength: [0.658, 0.668, 0.678] + + vii_752: + name: vii_752 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_752 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 3 + wavelength: [0.7465, 0.7515, 0.7565] + + vii_763: + name: vii_763 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_763 + coordinates: [lon_pixels, lat_pixels] + calibration: [reflectance, radiance] + chan_solar_index: 4 + wavelength: [0.75695, 0.7627, 0.76845] + + vii_865: + name: vii_865 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_865 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 5 + wavelength: [0.855, 0.865, 0.875] + + vii_914: + name: vii_914 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_914 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 6 + wavelength: [0.904, 0.914, 0.924] + + vii_1240: + name: vii_1240 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_1240 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 7 + wavelength: [1.230, 1.240, 1.250] + + vii_1375: + name: vii_1375 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_1375 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 8 + wavelength: [1.355, 1.375, 1.395] + + vii_1630: + name: vii_1630 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_1630 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 9 + wavelength: [1.620, 1.630, 1.640] + + vii_2250: + name: vii_2250 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_2250 + coordinates: [lon_pixels, lat_pixels] + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_solar_index: 10 + wavelength: [2.225, 2.250, 2.275] + + vii_3740: + name: vii_3740 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_3740 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 0 + wavelength: [3.650, 3.740, 3.830] + + vii_3959: + name: vii_3959 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_3959 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 1 + wavelength: [3.929, 3.959, 3.989] + + vii_4050: + name: vii_4050 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_4050 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 2 + wavelength: [4.020, 4.050, 4.080] + + vii_6725: + name: vii_6725 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_6725 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 3 + wavelength: [6.540, 6.725, 6.910] + + vii_7325: + name: vii_7325 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_7325 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 4 + wavelength: [7.180, 7.325, 7.470] + + vii_8540: + name: vii_8540 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_8540 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 5 + wavelength: [8.395, 8.540, 8.685] + + vii_10690: + name: vii_10690 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_10690 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 6 + wavelength: [10.440, 10.690, 10.940] + + vii_12020: + name: vii_12020 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_12020 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 7 + wavelength: [11.770, 12.020, 12.270] + + vii_13345: + name: vii_13345 + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/vii_13345 + coordinates: [lon_pixels, lat_pixels] + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + chan_thermal_index: 8 + wavelength: [13.190, 13.345, 13.500] + + # --- Geometric data --- + # TODO Geometric data on tie points are kept for test purposes + solar_zenith_tie_points: + name: solar_zenith_tie_points + standard_name: solar_zenith_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/solar_zenith + coordinates: [lon_tie_points, lat_tie_points] + + solar_azimuth_tie_points: + name: solar_azimuth_tie_points + standard_name: solar_azimuth_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/solar_azimuth + coordinates: [lon_tie_points, lat_tie_points] + + observation_zenith_tie_points: + name: observation_zenith_tie_points + standard_name: sensor_zenith_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/observation_zenith + coordinates: [lon_tie_points, lat_tie_points] + + observation_azimuth_tie_points: + name: observation_azimuth_tie_points + standard_name: sensor_azimuth_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/observation_azimuth + coordinates: [lon_tie_points, lat_tie_points] + + solar_zenith: + name: solar_zenith + standard_name: solar_zenith_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/solar_zenith + interpolate: True + coordinates: [lon_pixels, lat_pixels] + + solar_azimuth: + name: solar_azimuth + standard_name: solar_azimuth_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/solar_azimuth + interpolate: True + coordinates: [lon_pixels, lat_pixels] + + observation_zenith: + name: observation_zenith + standard_name: sensor_zenith_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/observation_zenith + interpolate: True + coordinates: [lon_pixels, lat_pixels] + + observation_azimuth: + name: observation_azimuth + standard_name: sensor_azimuth_angle + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/observation_azimuth + interpolate: True + coordinates: [lon_pixels, lat_pixels] + + # --- Orthorectification data --- + delta_lat_N_dem: + name: delta_lat_N_dem + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/delta_lat_N_dem + coordinates: [lon_pixels, lat_pixels] + standard_name: parallax_delta_latitude + + delta_lon_N_dem: + name: delta_lon_N_dem + file_type: nc_vii_l1b_rad + file_key: data/measurement_data/delta_lon_N_dem + coordinates: [lon_pixels, lat_pixels] + standard_name: parallax_delta_longitude diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py new file mode 100644 index 0000000000..a2a08cecee --- /dev/null +++ b/satpy/readers/vii_base_nc.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class.""" + +import logging + +from datetime import datetime + +from satpy.readers.netcdf_utils import NetCDF4FileHandler +from satpy.readers.vii_utils import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR +from geotiepoints.viiinterpolator import tie_points_interpolation, tie_points_geo_interpolation + +logger = logging.getLogger(__name__) + + +class ViiNCBaseFileHandler(NetCDF4FileHandler): + """Base reader class for VII products in netCDF format.""" + + def __init__(self, filename, filename_info, filetype_info, orthorect=False): + """Prepare the class for dataset reading.""" + super().__init__(filename, filename_info, filetype_info, auto_maskandscale=True) + + # Saves the orthorectification flag + self.orthorect = orthorect and filetype_info.get('orthorect', True) + + # Saves the interpolation flag + self.interpolate = filetype_info.get('interpolate', True) + + try: + longitude = self[filetype_info['cached_longitude']] + latitude = self[filetype_info['cached_latitude']] + + if self.interpolate: + self.longitude, self.latitude = self._perform_geo_interpolation(longitude, latitude) + else: + self.longitude, self.latitude = longitude, latitude + + except KeyError: + logger.warning("Cached longitude and/or latitude datasets are not correctly defined in YAML file") + self.longitude, self.latitude = None, None + + def get_dataset(self, dataset_id, dataset_info): + """Get dataset using file_key in dataset_info.""" + var_key = dataset_info['file_key'] + logger.debug('Reading in file to get dataset with key %s.', var_key) + + if var_key == 'cached_longitude' and self.longitude is not None: + variable = self.longitude.copy() + elif var_key == 'cached_latitude' and self.latitude is not None: + variable = self.latitude.copy() + else: + try: + variable = self[var_key] + except KeyError: + logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) + return None + + # If the dataset is marked for interpolation, perform the interpolation from tie points to pixels + if dataset_info.get('interpolate', False) and self.interpolate: + variable = self._perform_interpolation(variable) + + # Perform the calibration if required + if dataset_info['calibration'] is not None: + variable = self._perform_calibration(variable, dataset_info) + + # Perform the orthorectification if required + if self.orthorect: + orthorect_data_name = dataset_info.get('orthorect_data', None) + if orthorect_data_name is not None: + variable = self._perform_orthorectification(variable, orthorect_data_name) + + # If the dataset contains a longitude, change it to the interval [0., 360.) as natively in the product + # since the unwrapping performed during the interpolation might have created values outside this range + if dataset_info.get('standard_name', None) == 'longitude': + variable = self._perform_wrapping(variable) + + # Manage the attributes of the dataset + variable.attrs.setdefault('units', None) + + variable.attrs.update(dataset_info) + variable.attrs.update(self._get_global_attributes()) + + return variable + + @staticmethod + def _perform_wrapping(variable): + """Wrap the values to the interval [0, 360.) . + + Args: + variable: xarray DataArray containing the dataset to wrap. + + Returns: + DataArray: array containing the wrapped values and all the original metadata + + """ + variable %= 360. + return variable + + @staticmethod + def _perform_interpolation(variable): + """Perform the interpolation from tie points to pixel points. + + Args: + variable: xarray DataArray containing the dataset to interpolate. + + Returns: + DataArray: array containing the interpolate values, all the original metadata + and the updated dimension names. + + """ + interpolated_values = tie_points_interpolation( + [variable], + SCAN_ALT_TIE_POINTS, + TIE_POINTS_FACTOR + )[0] + new_variable = interpolated_values.rename( + num_tie_points_act='num_pixels', + num_tie_points_alt='num_lines' + ) + new_variable.name = variable.name + new_variable.attrs = variable.attrs + return new_variable + + @staticmethod + def _perform_geo_interpolation(longitude, latitude): + """Perform the interpolation of geographic coodinates from tie points to pixel points. + + Args: + longitude: xarray DataArray containing the longitude dataset to interpolate. + latitude: xarray DataArray containing the longitude dataset to interpolate. + + Returns: + tuple of arrays containing the interpolate values, all the original metadata + and the updated dimension names. + + """ + interpolated_longitude, interpolated_latitude = tie_points_geo_interpolation( + longitude, + latitude, + SCAN_ALT_TIE_POINTS, + TIE_POINTS_FACTOR + ) + new_longitude = interpolated_longitude.rename( + num_tie_points_act='num_pixels', + num_tie_points_alt='num_lines' + ) + new_longitude.name = longitude.name + new_longitude.attrs = longitude.attrs + new_latitude = interpolated_latitude.rename( + num_tie_points_act='num_pixels', + num_tie_points_alt='num_lines' + ) + new_latitude.name = latitude.name + new_latitude.attrs = latitude.attrs + return new_longitude, new_latitude + + def _perform_orthorectification(self, variable, orthorect_data_name): + """Perform the orthorectification.""" + raise NotImplementedError + + def _perform_calibration(self, variable, dataset_info): + """Perform the calibration.""" + raise NotImplementedError + + def _get_global_attributes(self): + """Create a dictionary of global attributes to be added to all datasets.""" + attributes = { + 'filename': self.filename, + 'start_time': self.start_time, + 'end_time': self.end_time, + 'spacecraft_name': self.spacecraft_name, + 'ssp_lon': self.ssp_lon, + 'sensor': self.sensor, + 'filename_start_time': self.filename_info['sensing_start_time'], + 'filename_end_time': self.filename_info['sensing_end_time'], + 'platform_name': self.spacecraft_name, + } + + # Add a "quality_group" item to the dictionary with all the variables and attributes + # which are found in the 'quality' group of the VII product + quality_group = self['quality'] + quality_dict = {} + for key in quality_group: + # Add the values (as Numpy array) of each variable in the group where possible + try: + quality_dict[key] = quality_group[key].values + except ValueError: + quality_dict[key] = None + # Add the attributes of the quality group + quality_dict.update(quality_group.attrs) + + attributes['quality_group'] = quality_dict + + return attributes + + @property + def start_time(self): + """Get observation start time.""" + return datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y%m%d%H%M%S.%f') + + @property + def end_time(self): + """Get observation end time.""" + return datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y%m%d%H%M%S.%f') + + @property + def spacecraft_name(self): + """Return spacecraft name.""" + return self['/attr/spacecraft'] + + @property + def sensor(self): + """Return sensor.""" + return self['/attr/instrument'] + + @property + def ssp_lon(self): + """Return subsatellite point longitude.""" + # This parameter is not applicable to VII + return None diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py new file mode 100644 index 0000000000..6cecaa5b75 --- /dev/null +++ b/satpy/readers/vii_l1b_nc.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""EUMETSAT EPS-SG Visible/Infrared Imager (VII) Level 1B products reader.""" + +import logging +import numpy as np + +from satpy.readers.vii_base_nc import ViiNCBaseFileHandler +from satpy.readers.vii_utils import C1, C2, MEAN_EARTH_RADIUS + +logger = logging.getLogger(__name__) + + +class ViiL1bNCFileHandler(ViiNCBaseFileHandler): + """Reader class for VII L1B products in netCDF format.""" + + def __init__(self, filename, filename_info, filetype_info, **kwargs): + """Read the calibration data and prepare the class for dataset reading.""" + super().__init__(filename, filename_info, filetype_info, **kwargs) + + # Read the variables which are required for the calibration + self._bt_conversion_a = self['data/calibration_data/bt_conversion_a'].values + self._bt_conversion_b = self['data/calibration_data/bt_conversion_b'].values + self._channel_cw_thermal = self['data/calibration_data/channel_cw_thermal'].values + self._integrated_solar_irradiance = self['data/calibration_data/integrated_solar_irradiance'].values + # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle + # (the values in the product file are on tie points and in degrees, + # therefore interpolation and conversion to radians are required) + solar_zenith_angle = self['data/measurement_data/solar_zenith'] + solar_zenith_angle_on_pixels = self._perform_interpolation(solar_zenith_angle) + solar_zenith_angle_on_pixels_radians = np.radians(solar_zenith_angle_on_pixels) + self.angle_factor = 1.0 / (np.cos(solar_zenith_angle_on_pixels_radians)) + + def _perform_calibration(self, variable, dataset_info): + """Perform the calibration. + + Args: + variable: xarray DataArray containing the dataset to calibrate. + dataset_info: dictionary of information about the dataset. + + Returns: + DataArray: array containing the calibrated values and all the original metadata. + + """ + calibration_name = dataset_info['calibration'] + if calibration_name == 'brightness_temperature': + # Extract the values of calibration coefficients for the current channel + chan_index = dataset_info['chan_thermal_index'] + cw = self._channel_cw_thermal[chan_index] + a = self._bt_conversion_a[chan_index] + b = self._bt_conversion_b[chan_index] + # Perform the calibration + calibrated_variable = self._calibrate_bt(variable, cw, a, b) + calibrated_variable.attrs = variable.attrs + elif calibration_name == 'reflectance': + # Extract the values of calibration coefficients for the current channel + chan_index = dataset_info['chan_solar_index'] + isi = self._integrated_solar_irradiance[chan_index] + # Perform the calibration + calibrated_variable = self._calibrate_refl(variable, self.angle_factor, isi) + calibrated_variable.attrs = variable.attrs + elif calibration_name == 'radiance': + calibrated_variable = variable + else: + raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) + + return calibrated_variable + + def _perform_orthorectification(self, variable, orthorect_data_name): + """Perform the orthorectification. + + Args: + variable: xarray DataArray containing the dataset to correct for orthorectification. + orthorect_data_name: name of the orthorectification correction data in the product. + + Returns: + DataArray: array containing the corrected values and all the original metadata. + + """ + try: + orthorect_data = self[orthorect_data_name] + # Convert the orthorectification delta values from meters to degrees + # based on the simplified formula using mean Earth radius + variable += np.degrees(orthorect_data / MEAN_EARTH_RADIUS) + except KeyError: + logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) + return variable + + @staticmethod + def _calibrate_bt(radiance, cw, a, b): + """Perform the calibration to brightness temperature. + + Args: + radiance: numpy ndarray containing the radiance values. + cw: center wavelength [μm]. + a: temperature coefficient [-]. + b: temperature coefficient [K]. + + Returns: + numpy ndarray: array containing the calibrated brightness temperature values. + + """ + log_expr = np.log(1.0 + C1 / ((cw ** 5) * radiance)) + bt_values = b + (a * C2 / (cw * log_expr)) + return bt_values + + @staticmethod + def _calibrate_refl(radiance, angle_factor, isi): + """Perform the calibration to reflectance. + + Args: + radiance: numpy ndarray containing the radiance values. + angle_factor: numpy ndarray containing the inverse of cosine of solar zenith angle [-]. + isi: integrated solar irradiance [W/(m2 * μm)]. + + Returns: + numpy ndarray: array containing the calibrated reflectance values. + + """ + refl_values = (np.pi / isi) * angle_factor * radiance + return refl_values diff --git a/satpy/readers/vii_utils.py b/satpy/readers/vii_utils.py new file mode 100644 index 0000000000..a493f7b9be --- /dev/null +++ b/satpy/readers/vii_utils.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""Utilities for the management of VII products.""" + + +# PLANCK COEFFICIENTS FOR CALIBRATION AS DEFINED BY EUMETSAT +C1 = 1.191062e+8 # [W/m2·sr-1·µm4] +C2 = 1.4387863e+4 # [K·µm] + +# CONSTANTS DEFINING THE TIE POINTS +TIE_POINTS_FACTOR = 8 # Sub-sampling factor of tie points wrt pixel points +SCAN_ALT_TIE_POINTS = 4 # Number of tie points along the satellite track for each scan + +# MEAN EARTH RADIUS AS DEFINED BY IUGG +MEAN_EARTH_RADIUS = 6371008.7714 # [m] diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py new file mode 100644 index 0000000000..e13a4597ab --- /dev/null +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -0,0 +1,407 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""The vii_base_nc reader tests package.""" + +import os +import numpy as np +import xarray as xr +import datetime +from netCDF4 import Dataset + +from satpy.readers.vii_base_nc import ViiNCBaseFileHandler, SCAN_ALT_TIE_POINTS, \ + TIE_POINTS_FACTOR + +import unittest + +try: + from unittest import mock +except ImportError: + import mock + +TEST_FILE = 'test_file_vii_base_nc.nc' + + +class TestViiNCBaseFileHandler(unittest.TestCase): + """Test the ViiNCBaseFileHandler reader.""" + + @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation') + def setUp(self, pgi_): + """Set up the test.""" + # Easiest way to test the reader is to create a test netCDF file on the fly + with Dataset(TEST_FILE, 'w') as nc: + # Add global attributes + nc.sensing_start_time_utc = "20170920173040.888" + nc.sensing_end_time_utc = "20170920174117.555" + nc.spacecraft = "test_spacecraft" + nc.instrument = "test_instrument" + + # Create data group + g1 = nc.createGroup('data') + + # Add dimensions to data group + g1.createDimension('num_pixels', 10) + g1.createDimension('num_lines', 100) + + # Create data/measurement_data group + g1_1 = g1.createGroup('measurement_data') + + # Add dimensions to data/measurement_data group + g1_1.createDimension('num_tie_points_act', 10) + g1_1.createDimension('num_tie_points_alt', 100) + + # Add variables to data/measurement_data group + tpw = g1_1.createVariable('tpw', np.float32, dimensions=('num_pixels', 'num_lines')) + tpw[:] = 1. + tpw.test_attr = 'attr' + lon = g1_1.createVariable('longitude', + np.float32, + dimensions=('num_tie_points_act', 'num_tie_points_alt')) + lon[:] = 100. + lat = g1_1.createVariable('latitude', + np.float32, + dimensions=('num_tie_points_act', 'num_tie_points_alt')) + lat[:] = 10. + + # Create quality group + g2 = nc.createGroup('quality') + + # Add dimensions to quality group + g2.createDimension('gap_items', 2) + + # Add variables to quality group + var = g2.createVariable('duration_of_product', np.double, dimensions=()) + var[:] = 1.0 + var = g2.createVariable('duration_of_data_present', np.double, dimensions=()) + var[:] = 2.0 + var = g2.createVariable('duration_of_data_missing', np.double, dimensions=()) + var[:] = 3.0 + var = g2.createVariable('duration_of_data_degraded', np.double, dimensions=()) + var[:] = 4.0 + var = g2.createVariable('gap_start_time_utc', np.double, dimensions=('gap_items',)) + var[:] = [5.0, 6.0] + var = g2.createVariable('gap_end_time_utc', np.double, dimensions=('gap_items',)) + var[:] = [7.0, 8.0] + + # Create longitude and latitude "interpolated" arrays + interp_longitude = xr.DataArray(np.ones((10, 100))) + interp_latitude = xr.DataArray(np.ones((10, 100)) * 2.) + pgi_.return_value = (interp_longitude, interp_latitude) + + # Filename info valid for all readers + filename_info = { + 'creation_time': datetime.datetime(year=2017, month=9, day=22, + hour=22, minute=40, second=10), + 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + hour=12, minute=30, second=30), + 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + hour=18, minute=30, second=50) + } + + # Create a reader + self.reader = ViiNCBaseFileHandler( + filename=TEST_FILE, + filename_info=filename_info, + filetype_info={ + 'cached_longitude': 'data/measurement_data/longitude', + 'cached_latitude': 'data/measurement_data/latitude' + } + ) + + # Create a second reader where orthorectification and interpolation are inhibited + # by means of the filetype_info flags + self.reader_2 = ViiNCBaseFileHandler( + filename=TEST_FILE, + filename_info=filename_info, + filetype_info={ + 'cached_longitude': 'data/measurement_data/longitude', + 'cached_latitude': 'data/measurement_data/latitude', + 'interpolate': False, + 'orthorect': False + }, + orthorect=True + ) + + # Create a third reader without defining cached latitude and longitude + # by means of the filetype_info flags + self.reader_3 = ViiNCBaseFileHandler( + filename=TEST_FILE, + filename_info=filename_info, + filetype_info={}, + orthorect=True + ) + + def tearDown(self): + """Remove the previously created test file.""" + os.remove(TEST_FILE) + + def test_file_reading(self): + """Test the file product reading.""" + # Checks that the basic functionalities are correctly executed + expected_start_time = datetime.datetime(year=2017, month=9, day=20, + hour=17, minute=30, second=40, microsecond=888000) + self.assertEqual(self.reader.start_time, expected_start_time) + + expected_end_time = datetime.datetime(year=2017, month=9, day=20, + hour=17, minute=41, second=17, microsecond=555000) + self.assertEqual(self.reader.end_time, expected_end_time) + + self.assertEqual(self.reader.spacecraft_name, "test_spacecraft") + self.assertEqual(self.reader.sensor, "test_instrument") + self.assertEqual(self.reader.ssp_lon, None) + + # Checks that the global attributes are correctly read + expected_global_attributes = { + 'filename': TEST_FILE, + 'start_time': expected_start_time, + 'end_time': expected_end_time, + 'spacecraft_name': "test_spacecraft", + 'ssp_lon': None, + 'sensor': "test_instrument", + 'filename_start_time': datetime.datetime(year=2017, month=9, day=20, + hour=12, minute=30, second=30), + 'filename_end_time': datetime.datetime(year=2017, month=9, day=20, + hour=18, minute=30, second=50), + 'platform_name': "test_spacecraft", + 'quality_group': { + 'duration_of_product': 1., + 'duration_of_data_present': 2., + 'duration_of_data_missing': 3., + 'duration_of_data_degraded': 4., + 'gap_start_time_utc': (5., 6.), + 'gap_end_time_utc': (7., 8.) + } + } + + global_attributes = self.reader._get_global_attributes() + # Since the global_attributes dictionary contains numpy arrays, + # it is not possible to peform a simple equality test + # Must iterate on all keys to confirm that the dictionaries are equal + self.assertEqual(global_attributes.keys(), expected_global_attributes.keys()) + for key in expected_global_attributes: + if key not in ['quality_group']: + # Quality check must be valid for both iterable and not iterable elements + try: + equal = all(global_attributes[key] == expected_global_attributes[key]) + except (TypeError, ValueError): + equal = global_attributes[key] == expected_global_attributes[key] + self.assertTrue(equal) + else: + self.assertEqual(global_attributes[key].keys(), expected_global_attributes[key].keys()) + for inner_key in global_attributes[key]: + # Equality check must be valid for both iterable and not iterable elements + try: + equal = all(global_attributes[key][inner_key] == expected_global_attributes[key][inner_key]) + except (TypeError, ValueError): + equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] + self.assertTrue(equal) + + @mock.patch('satpy.readers.vii_base_nc.tie_points_interpolation') + @mock.patch('satpy.readers.vii_base_nc.tie_points_geo_interpolation') + def test_functions(self, tpgi_, tpi_): + """Test the functions.""" + with self.assertRaises(NotImplementedError): + self.reader._perform_orthorectification(mock.Mock(), mock.Mock()) + + with self.assertRaises(NotImplementedError): + self.reader._perform_calibration(mock.Mock(), mock.Mock()) + + # Checks that the _perform_interpolation function is correctly executed + variable = xr.DataArray( + dims=('x', 'y'), + name='test_name', + attrs={ + 'key_1': 'value_1', + 'key_2': 'value_2' + }, + data=np.zeros((10, 100)), + ) + tpi_.return_value = [xr.DataArray( + dims=('num_tie_points_act', 'num_tie_points_alt'), + data=np.ones((10, 100)) + )] + + return_value = self.reader._perform_interpolation(variable) + + tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) + self.assertTrue(np.allclose(return_value, np.ones((10, 100)))) + self.assertEqual(return_value.attrs, {'key_1': 'value_1', 'key_2': 'value_2'}) + self.assertEqual(return_value.name, 'test_name') + self.assertEqual(return_value.dims, ('num_pixels', 'num_lines')) + + # Checks that the _perform_geo_interpolation function is correctly executed + variable_lon = xr.DataArray( + dims=('x', 'y'), + name='test_lon', + attrs={ + 'key_1': 'value_lon_1', + 'key_2': 'value_lon_2' + }, + data=np.zeros((10, 100)) + ) + variable_lat = xr.DataArray( + dims=('x', 'y'), + name='test_lat', + attrs={ + 'key_1': 'value_lat_1', + 'key_2': 'value_lat_2' + }, + data=np.ones((10, 100)) * 2. + ) + + tpgi_.return_value = ( + xr.DataArray( + dims=('num_tie_points_act', 'num_tie_points_alt'), + data=np.ones((10, 100)) + ), + xr.DataArray( + dims=('num_tie_points_act', 'num_tie_points_alt'), + data=6 * np.ones((10, 100)) + ) + ) + + return_lon, return_lat = self.reader._perform_geo_interpolation(variable_lon, variable_lat) + + tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) + + self.assertTrue(np.allclose(return_lon, np.ones((10, 100)))) + self.assertEqual(return_lon.attrs, {'key_1': 'value_lon_1', 'key_2': 'value_lon_2'}) + self.assertEqual(return_lon.name, 'test_lon') + self.assertEqual(return_lon.dims, ('num_pixels', 'num_lines')) + + self.assertTrue(np.allclose(return_lat, 6 * np.ones((10, 100)))) + self.assertEqual(return_lat.attrs, {'key_1': 'value_lat_1', 'key_2': 'value_lat_2'}) + self.assertEqual(return_lat.name, 'test_lat') + self.assertEqual(return_lat.dims, ('num_pixels', 'num_lines')) + + # Checks that the _perform_wrapping function is correctly executed + variable = xr.DataArray( + dims=('x', 'y'), + name='test_name', + attrs={ + 'key_1': 'value_1', + 'key_2': 'value_2' + }, + data=np.ones((10, 100)) + ) + variable.values[0, :] = 361. + variable.values[1, :] = -359. + return_variable = self.reader._perform_wrapping(variable) + self.assertEqual(return_variable.attrs['key_1'], 'value_1') + self.assertEqual(return_variable.dims, ('x', 'y')) + self.assertTrue(np.allclose(return_variable.values, np.ones((10, 100)))) + + @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration') + @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation') + @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_wrapping') + @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification') + def test_dataset(self, po_, pw_, pi_, pc_): + """Test the execution of the get_dataset function.""" + # Checks the correct execution of the get_dataset function with a valid file_key + variable = self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', + 'calibration': None}) + pc_.assert_not_called() + pi_.assert_not_called() + pw_.assert_not_called() + po_.assert_not_called() + + self.assertTrue(np.allclose(variable.values, np.ones((10, 100)))) + self.assertEqual(variable.dims, ('num_pixels', 'num_lines')) + self.assertEqual(variable.attrs['test_attr'], 'attr') + self.assertEqual(variable.attrs['units'], None) + + # Checks the correct execution of the get_dataset function with a valid file_key + # and required calibration, interpolation and wrapping + self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', + 'calibration': 'reflectance', + 'interpolate': True, + 'standard_name': 'longitude'}) + pc_.assert_called() + pi_.assert_called() + pw_.assert_called() + po_.assert_not_called() + + # Checks the correct execution of the get_dataset function with a valid file_key + # and required orthorectification + self.reader.orthorect = True + self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', + 'calibration': None, + 'orthorect_data': 'test_orthorect_data'}) + po_.assert_called() + + # Checks the correct execution of the get_dataset function with an invalid file_key + invalid_dataset = self.reader.get_dataset(None, {'file_key': 'test_invalid', 'calibration': None}) + # Checks that the function returns None + self.assertEqual(invalid_dataset, None) + + pc_.reset_mock() + pi_.reset_mock() + pw_.reset_mock() + po_.reset_mock() + + # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key + longitude = self.reader.get_dataset(None, {'file_key': 'cached_longitude', + 'calibration': 'reflectance', + 'interpolate': True}) + pc_.assert_not_called() + pi_.assert_not_called() + self.assertEqual(longitude[0, 0], 1.) + + # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key + latitude = self.reader.get_dataset(None, {'file_key': 'cached_latitude', + 'calibration': None}) + self.assertEqual(latitude[0, 0], 2.) + + # Repeats some check with the reader where orthorectification and interpolation are inhibited + # by means of the filetype_info flags + + pc_.reset_mock() + pi_.reset_mock() + pw_.reset_mock() + po_.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid file_key + # and required calibration, interpolation and wrapping + self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', + 'calibration': 'reflectance', + 'interpolate': True, + 'standard_name': 'longitude'}) + pc_.assert_called() + pi_.assert_not_called() + pw_.assert_called() + po_.assert_not_called() + + # Checks the correct execution of the get_dataset function with a valid file_key + # and required orthorectification + self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', + 'calibration': None, + 'orthorect_data': 'test_orthorect_data'}) + po_.assert_not_called() + + # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key + longitude = self.reader_2.get_dataset(None, {'file_key': 'cached_longitude', + 'calibration': None}) + self.assertEqual(longitude[0, 0], 100.) + + # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key + # in a reader without defined longitude + longitude = self.reader_3.get_dataset(None, {'file_key': 'cached_longitude', + 'calibration': 'reflectance', + 'interpolate': True}) + # Checks that the function returns None + self.assertEqual(longitude, None) diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py new file mode 100644 index 0000000000..4a80fe319c --- /dev/null +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""The vii_l1b_nc reader tests package.""" + +import os + +import numpy as np +import xarray as xr +import dask.array as da +import datetime +from netCDF4 import Dataset + +from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler +from satpy.readers.vii_utils import MEAN_EARTH_RADIUS + +import unittest + +TEST_FILE = 'test_file_vii_l1b_nc.nc' + + +class TestViiL1bNCFileHandler(unittest.TestCase): + """Test the ViiL1bNCFileHandler reader.""" + + def setUp(self): + """Set up the test.""" + # Easiest way to test the reader is to create a test netCDF file on the fly + with Dataset(TEST_FILE, 'w') as nc: + # Create data group + g1 = nc.createGroup('data') + + # Add dimensions to data group + g1.createDimension('num_chan_solar', 11) + g1.createDimension('num_chan_thermal', 9) + g1.createDimension('num_pixels', 72) + g1.createDimension('num_lines', 600) + + # Create calibration_data group + g1_1 = g1.createGroup('calibration_data') + + # Add variables to data/calibration_data group + bt_a = g1_1.createVariable('bt_conversion_a', np.float32, dimensions=('num_chan_thermal',)) + bt_a[:] = np.arange(9) + bt_b = g1_1.createVariable('bt_conversion_b', np.float32, dimensions=('num_chan_thermal',)) + bt_b[:] = np.arange(9) + cw = g1_1.createVariable('channel_cw_thermal', np.float32, dimensions=('num_chan_thermal',)) + cw[:] = np.arange(9) + isi = g1_1.createVariable('integrated_solar_irradiance', np.float32, dimensions=('num_chan_solar',)) + isi[:] = np.arange(11) + + # Create measurement_data group + g1_2 = g1.createGroup('measurement_data') + + # Add dimensions to data/measurement_data group + g1_2.createDimension('num_tie_points_act', 10) + g1_2.createDimension('num_tie_points_alt', 100) + + # Add variables to data/measurement_data group + sza = g1_2.createVariable('solar_zenith', np.float32, + dimensions=('num_tie_points_act', 'num_tie_points_alt')) + sza[:] = 25.0 + delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_pixels', 'num_lines')) + delta_lat[:] = 1.0 + + self.reader = ViiL1bNCFileHandler( + filename=TEST_FILE, + filename_info={ + 'creation_time': datetime.datetime(year=2017, month=9, day=22, + hour=22, minute=40, second=10), + 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + hour=12, minute=30, second=30), + 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + hour=18, minute=30, second=50) + }, + filetype_info={} + ) + + def tearDown(self): + """Remove the previously created test file.""" + os.remove(TEST_FILE) + + def test_calibration_functions(self): + """Test the calibration functions.""" + radiance = np.array([[1.0, 2.0, 5.0], [7.0, 10.0, 20.0]]) + + cw = 13.0 + a = 3.0 + b = 100.0 + bt = self.reader._calibrate_bt(radiance, cw, a, b) + expected_bt = np.array([[675.04993213, 753.10301462, 894.93149648], + [963.20401882, 1048.95086402, 1270.95546218]]) + self.assertTrue(np.allclose(bt, expected_bt)) + + angle_factor = 0.4 + isi = 2.0 + refl = self.reader._calibrate_refl(radiance, angle_factor, isi) + expected_refl = np.array([[0.628318531, 1.256637061, 3.141592654], + [4.398229715, 6.283185307, 12.56637061]]) + self.assertTrue(np.allclose(refl, expected_refl)) + + def test_functions(self): + """Test the functions.""" + # Checks that the _perform_orthorectification function is correctly executed + variable = xr.DataArray( + dims=('num_pixels', 'num_lines'), + name='test_name', + attrs={ + 'key_1': 'value_1', + 'key_2': 'value_2' + }, + data=da.from_array(np.ones((72, 600))) + ) + + orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + expected_values = np.degrees(np.ones((72, 600)) / MEAN_EARTH_RADIUS) + np.ones((72, 600)) + self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) + + # Checks that the _perform_calibration function is correctly executed in all cases + # radiance calibration: return value is simply a copy of the variable + return_variable = self.reader._perform_calibration(variable, {'calibration': 'radiance'}) + self.assertTrue(np.all(return_variable == variable)) + + # invalid calibration: raises a ValueError + with self.assertRaises(ValueError): + self.reader._perform_calibration(variable, + {'calibration': 'invalid', 'name': 'test'}) + + # brightness_temperature calibration: checks that the return value is correct + calibrated_variable = self.reader._perform_calibration(variable, + {'calibration': 'brightness_temperature', + 'chan_thermal_index': 3}) + expected_values = np.ones((72, 600)) * 1101.103383 + self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) + + # reflectance calibration: checks that the return value is correct + calibrated_variable = self.reader._perform_calibration(variable, + {'calibration': 'reflectance', 'chan_solar_index': 2}) + expected_values = np.ones((72, 600)) * 1.733181982 + self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) diff --git a/satpy/tests/reader_tests/test_vii_utils.py b/satpy/tests/reader_tests/test_vii_utils.py new file mode 100644 index 0000000000..ab38868ba6 --- /dev/null +++ b/satpy/tests/reader_tests/test_vii_utils.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""The vii_utils reader tests package.""" + +import satpy.readers.vii_utils + +import unittest + + +# Constants to be tested +C1 = 1.191062e+8 +C2 = 1.4387863e+4 +TIE_POINTS_FACTOR = 8 +SCAN_ALT_TIE_POINTS = 4 +MEAN_EARTH_RADIUS = 6371008.7714 + + +class TestViiUtils(unittest.TestCase): + """Test the vii_utils module.""" + + def test_constants(self): + """Test the constant values.""" + # Test the value of the constants + self.assertEqual(satpy.readers.vii_utils.C1, C1) + self.assertEqual(satpy.readers.vii_utils.C2, C2) + self.assertEqual(satpy.readers.vii_utils.TIE_POINTS_FACTOR, TIE_POINTS_FACTOR) + self.assertEqual(satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS, SCAN_ALT_TIE_POINTS) + self.assertEqual(satpy.readers.vii_utils.MEAN_EARTH_RADIUS, MEAN_EARTH_RADIUS) From e825f31bc57c30c6fc4e1d89b438ad8048eed702 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Tue, 26 May 2020 10:53:56 +0000 Subject: [PATCH 2/8] Update netCDF utils. --- satpy/readers/netcdf_utils.py | 20 +++++++++++++------ satpy/tests/reader_tests/test_netcdf_utils.py | 17 ++++++---------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index 4b07d613dc..d0eb5d6af9 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -78,6 +78,7 @@ class NetCDF4FileHandler(BaseFileHandler): xarray_kwargs (dict): Addition arguments to `xarray.open_dataset` cache_var_size (int): Cache variables smaller than this size. cache_handle (bool): Keep files open for lifetime of filehandler. + """ file_handle = None @@ -85,6 +86,7 @@ class NetCDF4FileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None, cache_var_size=0, cache_handle=False): + """Initialize object.""" super(NetCDF4FileHandler, self).__init__( filename, filename_info, filetype_info) self.file_content = {} @@ -119,6 +121,7 @@ def __init__(self, filename, filename_info, filetype_info, self._xarray_kwargs.setdefault('mask_and_scale', self.auto_maskandscale) def __del__(self): + """Delete object.""" if self.file_handle is not None: try: self.file_handle.close() @@ -143,13 +146,15 @@ def collect_metadata(self, name, obj): # Look through each subgroup base_name = name + "/" if name else "" for group_name, group_obj in obj.groups.items(): - self.collect_metadata(base_name + group_name, group_obj) + full_group_name = base_name + group_name + self.file_content[full_group_name] = group_obj + self._collect_attrs(full_group_name, group_obj) + self.collect_metadata(full_group_name, group_obj) for var_name, var_obj in obj.variables.items(): var_name = base_name + var_name self.file_content[var_name] = var_obj self.file_content[var_name + "/dtype"] = var_obj.dtype self.file_content[var_name + "/shape"] = var_obj.shape - self.file_content[var_name + "/dimensions"] = var_obj.dimensions self._collect_attrs(var_name, var_obj) self._collect_attrs(name, obj) @@ -172,6 +177,7 @@ def collect_cache_vars(self, cache_vars, obj): Args: cache_vars (List[str]): Names of data variables to be cached. obj (netCDF4.Dataset): Dataset object from which to read them. + """ for var_name in cache_vars: v = self.file_content[var_name] @@ -196,6 +202,11 @@ def __getitem__(self, key): val = self._get_var_from_filehandle(group, key) else: val = self._get_var_from_xr(group, key) + elif isinstance(val, netCDF4.Group): + # Full groups are conveniently read with xr even if file_handle is available + with xr.open_dataset(self.filename, group=key, + **self._xarray_kwargs) as nc: + val = nc return val def _get_var_from_xr(self, group, key): @@ -217,10 +228,7 @@ def _get_var_from_xr(self, group, key): def _get_var_from_filehandle(self, group, key): # Not getting coordinates as this is more work, therefore more # overhead, and those are not used downstream. - if group is None: - g = self.file_handle - else: - g = self.file_handle[group] + g = self.file_handle[group] v = g[key] x = xr.DataArray( da.from_array(v), dims=v.dimensions, attrs=v.__dict__, diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 0d0c7ac0c5..93a6f0d6ad 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -54,7 +54,6 @@ def get_test_content(self, filename, filename_info, filetype_info): - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' - - 'dataset/dimensions' - '/dimension/my_dim' """ @@ -123,11 +122,14 @@ def test_all_basic(self): for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) - self.assertEqual(file_handler[ds + '/dimensions'], ("rows", "cols")) self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) + test_group = file_handler['test_group'] + self.assertTupleEqual(test_group['ds1_i'].shape, (10, 100)) + self.assertTupleEqual(test_group['ds1_i'].dims, ('rows', 'cols')) + self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') self.assertEqual(file_handler['/attr/test_attr_int'], 0) @@ -143,8 +145,7 @@ def test_all_basic(self): self.assertEqual(file_handler["ds2_sc"], 42) def test_caching(self): - """Test that caching works as intended. - """ + """Test that caching works as intended.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler h = NetCDF4FileHandler("test.nc", {}, {}, cache_var_size=1000, cache_handle=True) @@ -157,17 +158,11 @@ def test_caching(self): np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], np.arange(10 * 100).reshape((10, 100))) - # check that root variables can still be read from cached file object, - # even if not cached themselves - np.testing.assert_array_equal( - h["ds2_f"], - np.arange(10. * 100).reshape((10, 100))) h.__del__() self.assertFalse(h.file_handle.isopen()) def test_filenotfound(self): - """Test that error is raised when file not found - """ + """Test that error is raised when file not found.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler with self.assertRaises(IOError): From a971dec3df21cbdebaac7c709542fcadda752fcc Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 28 May 2020 11:23:35 +0000 Subject: [PATCH 3/8] Remove README.md. --- README.md | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 README.md diff --git a/README.md b/README.md deleted file mode 100644 index 29a531b016..0000000000 --- a/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# epssg-satpy - From 90383747fbe3e93cdb4b77f191ec28a4b173bcc9 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 28 May 2020 11:30:21 +0000 Subject: [PATCH 4/8] Add VII L1b-reader to index.rst. --- doc/source/index.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/source/index.rst b/doc/source/index.rst index 747e51953f..bfea532eb7 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -243,6 +243,9 @@ the base Satpy installation. * - OMPS EDR data in HDF5 format - `omps_edr` - Beta + * - EPS-SG VII Level 1b in NetCDF4 format + - `vii_l1b_nc` + - Beta Indices and tables ================== From eea890b312a9b19f7c44b08f4b0ea9905a3a92aa Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 4 Jun 2020 14:24:01 +0000 Subject: [PATCH 5/8] Rebase VII L1b reader to new Satpy master branch. --- doc/source/examples/fci_l1c_natural_color.rst | 41 +++++++++++ doc/source/examples/index.rst | 44 ++++++++++++ doc/source/index.rst | 4 +- satpy/dataset.py | 72 +++++++++++++++---- satpy/readers/netcdf_utils.py | 8 ++- satpy/readers/vii_base_nc.py | 28 +++----- satpy/readers/vii_l1b_nc.py | 2 +- satpy/readers/vii_utils.py | 2 +- satpy/tests/reader_tests/test_netcdf_utils.py | 9 ++- satpy/tests/reader_tests/test_vii_base_nc.py | 31 ++------ satpy/tests/reader_tests/test_vii_l1b_nc.py | 2 +- satpy/tests/reader_tests/test_vii_utils.py | 2 +- satpy/tests/test_dataset.py | 36 ++++++++++ 13 files changed, 213 insertions(+), 68 deletions(-) create mode 100644 doc/source/examples/fci_l1c_natural_color.rst create mode 100644 doc/source/examples/index.rst diff --git a/doc/source/examples/fci_l1c_natural_color.rst b/doc/source/examples/fci_l1c_natural_color.rst new file mode 100644 index 0000000000..f7e81ce8b3 --- /dev/null +++ b/doc/source/examples/fci_l1c_natural_color.rst @@ -0,0 +1,41 @@ +MTG FCI - Natural Color Example +=============================== + +Satpy includes a reader for the Meteosat Third Generation (MTG) FCI Level 1c +data. The following Python code snippet shows an example on how to use Satpy +to generate a Natural Color RGB composite over the European area. + +.. warning:: + + This example is currently a work in progress. Some of the below code may + not work with the currently released version of Satpy. Additional updates + to this example will be coming soon. + +.. code-block:: python + + from satpy.scene import Scene + from satpy import find_files_and_readers + + # define path to FCI test data folder + path_to_data = 'your/path/to/FCI/data/folder/' + + # find files and assign the FCI reader + files = find_files_and_readers(base_dir=path_to_data, reader='fci_l1c_fdhsi') + + # create an FCI scene from the selected files + scn = Scene(filenames=files) + + # print available dataset names for this scene (e.g. 'vis_04', 'vis_05','ir_38',...) + print(scn.available_dataset_names()) + + # print available composite names for this scene (e.g. 'natural_color', 'airmass', 'convection',...) + print(scn.available_composite_names()) + + # load the datasets/composites of interest + scn.load(['natural_color','vis_04']) + + # resample the scene to a specified area (e.g. "eurol1" for Europe in 1km resolution) + scn_resampled = scn.resample("eurol", resampler='nearest', radius_of_influence=5000) + + # save the resampled dataset/composite to disk + scn_resampled.save_dataset("natural_color", filename='./fci_natural_color_resampled.png') diff --git a/doc/source/examples/index.rst b/doc/source/examples/index.rst new file mode 100644 index 0000000000..b77d49ddb7 --- /dev/null +++ b/doc/source/examples/index.rst @@ -0,0 +1,44 @@ +Examples +======== + +Satpy examples are available as Jupyter Notebooks on the +`pytroll-examples `_ +git repository. Some examples are described in further detail as separate pages +in this documentation. They include python code, PNG images, and descriptions of +what the example is doing. Below is a list of some of the examples and a brief +summary. Additional examples can be found at the repository mentioned above or +as explanations in the various sections of this documentation. + +.. toctree:: + :hidden: + :maxdepth: 1 + + fci_l1c_natural_color + +.. list-table:: + :header-rows: 1 + + * - Name + - Description + * - `Quickstart with MSG data `_ + - Satpy quickstart for loading and processing satellite data, with MSG data in this examples + * - `Cartopy Plot `_ + - Plot a single VIIRS SDR granule using Cartopy and matplotlib + * - `Himawari-8 AHI True Color `_ + - Generate and resample a rayleigh corrected true color RGB from Himawari-8 AHI data + * - `Sentinel-3 OLCI True Color `_ + - Reading OLCI data from Sentinel 3 with Pytroll/Satpy + * - `Sentinel 2 MSI true color `_ + - Reading MSI data from Sentinel 2 with Pytroll/Satpy + * - `Suomi-NPP VIIRS SDR True Color `_ + - Generate a rayleigh corrected true color RGB from VIIRS I- and M-bands + * - `Aqua/Terra MODIS True Color `_ + - Generate and resample a rayleigh corrected true color RGB from MODIS + * - `Sentinel 1 SAR-C False Color `_ + - Generate a false color composite RGB from SAR-C polarized datasets + * - `Level 2 EARS-NWC cloud products `_ + - Reading Level 2 EARS-NWC cloud products + * - `Level 2 MAIA cloud products `_ + - Reading Level 2 MAIA cloud products + * - :doc:`Meteosat Third Generation FCI Natural Color RGB ` + - Generate Natural Color RGB from Meteosat Third Generation (MTG) FCI Level 1c data diff --git a/doc/source/index.rst b/doc/source/index.rst index bfea532eb7..f3a083ab06 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -52,7 +52,7 @@ the base Satpy installation. overview install data_download - examples + examples/index quickstart readers composites @@ -243,7 +243,7 @@ the base Satpy installation. * - OMPS EDR data in HDF5 format - `omps_edr` - Beta - * - EPS-SG VII Level 1b in NetCDF4 format + * - VII Level 1b in NetCDF4 format - `vii_l1b_nc` - Beta diff --git a/satpy/dataset.py b/satpy/dataset.py index db1055d385..77c1434836 100644 --- a/satpy/dataset.py +++ b/satpy/dataset.py @@ -17,14 +17,12 @@ # satpy. If not, see . """Dataset objects.""" -import sys import logging import numbers from collections import namedtuple +from collections.abc import Collection from datetime import datetime -import numpy as np - logger = logging.getLogger(__name__) @@ -62,11 +60,13 @@ def average_datetimes(dt_list): def combine_metadata(*metadata_objects, **kwargs): """Combine the metadata of two or more Datasets. - If any keys are not equal or do not exist in all provided dictionaries - then they are not included in the returned dictionary. - By default any keys with the word 'time' in them and consisting - of datetime objects will be averaged. This is to handle cases where - data were observed at almost the same time but not exactly. + If the values corresponding to any keys are not equal or do not + exist in all provided dictionaries then they are not included in + the returned dictionary. By default any keys with the word 'time' + in them and consisting of datetime objects will be averaged. This + is to handle cases where data were observed at almost the same time + but not exactly. In the interest of time, arrays are compared by + object identity rather than by their contents. Args: *metadata_objects: MetadataObject or dict objects to combine @@ -98,18 +98,57 @@ def combine_metadata(*metadata_objects, **kwargs): shared_info = {} for k in shared_keys: values = [nfo[k] for nfo in info_dicts] - any_arrays = any([isinstance(val, np.ndarray) for val in values]) - if any_arrays: - if all(np.all(val == values[0]) for val in values[1:]): + if _share_metadata_key(k, values, average_times): + if 'time' in k and isinstance(values[0], datetime) and average_times: + shared_info[k] = average_datetimes(values) + else: shared_info[k] = values[0] - elif 'time' in k and isinstance(values[0], datetime) and average_times: - shared_info[k] = average_datetimes(values) - elif all(val == values[0] for val in values[1:]): - shared_info[k] = values[0] return shared_info +def _share_metadata_key(k, values, average_times): + """Combine metadata. Helper for combine_metadata, decide if key is shared.""" + any_arrays = any([hasattr(val, "__array__") for val in values]) + # in the real world, the `ancillary_variables` attribute may be + # List[xarray.DataArray], this means our values are now + # List[List[xarray.DataArray]]. + # note that this list_of_arrays check is also true for any + # higher-dimensional ndarray, but we only use this check after we have + # checked any_arrays so this false positive should have no impact + list_of_arrays = any( + [isinstance(val, Collection) and len(val) > 0 and + all([hasattr(subval, "__array__") + for subval in val]) + for val in values]) + if any_arrays: + return _share_metadata_key_array(values) + elif list_of_arrays: + return _share_metadata_key_list_arrays(values) + elif 'time' in k and isinstance(values[0], datetime) and average_times: + return True + elif all(val == values[0] for val in values[1:]): + return True + return False + + +def _share_metadata_key_array(values): + """Combine metadata. Helper for combine_metadata, check object identity in list of arrays.""" + for val in values[1:]: + if val is not values[0]: + return False + return True + + +def _share_metadata_key_list_arrays(values): + """Combine metadata. Helper for combine_metadata, check object identity in list of list of arrays.""" + for val in values[1:]: + for arr, ref in zip(val, values[0]): + if arr is not ref: + return False + return True + + DATASET_KEYS = ("name", "wavelength", "resolution", "polarization", "calibration", "level", "modifiers") DatasetID = namedtuple("DatasetID", " ".join(DATASET_KEYS)) @@ -158,6 +197,7 @@ class DatasetID(DatasetID): other modifications have been performed on this Dataset (ex. 'sunz_corrected', 'rayleigh_corrected', etc). `None` or empty tuple if not applicable. + """ def __new__(cls, *args, **kwargs): @@ -175,6 +215,7 @@ def name_match(a, b): Args: a (str): DatasetID.name or other string b (str): DatasetID.name or other string + """ return a == b @@ -185,6 +226,7 @@ def wavelength_match(a, b): Args: a (tuple or scalar): (min wl, nominal wl, max wl) or scalar wl b (tuple or scalar): (min wl, nominal wl, max wl) or scalar wl + """ if type(a) == (type(b) or isinstance(a, numbers.Number) and diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index d0eb5d6af9..2a18d4c26b 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2016-2017 Satpy developers +# Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # @@ -155,6 +155,7 @@ def collect_metadata(self, name, obj): self.file_content[var_name] = var_obj self.file_content[var_name + "/dtype"] = var_obj.dtype self.file_content[var_name + "/shape"] = var_obj.shape + self.file_content[var_name + "/dimensions"] = var_obj.dimensions self._collect_attrs(var_name, var_obj) self._collect_attrs(name, obj) @@ -228,7 +229,10 @@ def _get_var_from_xr(self, group, key): def _get_var_from_filehandle(self, group, key): # Not getting coordinates as this is more work, therefore more # overhead, and those are not used downstream. - g = self.file_handle[group] + if group is None: + g = self.file_handle + else: + g = self.file_handle[group] v = g[key] x = xr.DataArray( da.from_array(v), dims=v.dimensions, attrs=v.__dict__, diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py index a2a08cecee..986d7c1608 100644 --- a/satpy/readers/vii_base_nc.py +++ b/satpy/readers/vii_base_nc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2019 Satpy developers +# Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -30,7 +30,15 @@ class ViiNCBaseFileHandler(NetCDF4FileHandler): - """Base reader class for VII products in netCDF format.""" + """Base reader class for VII products in netCDF format. + + Args: + filename (str): File to read + filename_info (dict): Dictionary with filename information + filetype_info (dict): Dictionary with filetype information + orthorect (bool): activates the orthorectification correction where available + + """ def __init__(self, filename, filename_info, filetype_info, orthorect=False): """Prepare the class for dataset reading.""" @@ -88,7 +96,7 @@ def get_dataset(self, dataset_id, dataset_info): # If the dataset contains a longitude, change it to the interval [0., 360.) as natively in the product # since the unwrapping performed during the interpolation might have created values outside this range if dataset_info.get('standard_name', None) == 'longitude': - variable = self._perform_wrapping(variable) + variable %= 360. # Manage the attributes of the dataset variable.attrs.setdefault('units', None) @@ -98,20 +106,6 @@ def get_dataset(self, dataset_id, dataset_info): return variable - @staticmethod - def _perform_wrapping(variable): - """Wrap the values to the interval [0, 360.) . - - Args: - variable: xarray DataArray containing the dataset to wrap. - - Returns: - DataArray: array containing the wrapped values and all the original metadata - - """ - variable %= 360. - return variable - @staticmethod def _perform_interpolation(variable): """Perform the interpolation from tie points to pixel points. diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py index 6cecaa5b75..8095119238 100644 --- a/satpy/readers/vii_l1b_nc.py +++ b/satpy/readers/vii_l1b_nc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2019 Satpy developers +# Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/satpy/readers/vii_utils.py b/satpy/readers/vii_utils.py index a493f7b9be..154519bb4d 100644 --- a/satpy/readers/vii_utils.py +++ b/satpy/readers/vii_utils.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2019 Satpy developers +# Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 93a6f0d6ad..53db5f6efd 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2017-2018 Satpy developers +# Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # @@ -54,6 +54,7 @@ def get_test_content(self, filename, filename_info, filetype_info): - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' + - 'dataset/dimensions' - '/dimension/my_dim' """ @@ -122,6 +123,7 @@ def test_all_basic(self): for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) + self.assertEqual(file_handler[ds + '/dimensions'], ("rows", "cols")) self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) @@ -158,6 +160,11 @@ def test_caching(self): np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], np.arange(10 * 100).reshape((10, 100))) + # check that root variables can still be read from cached file object, + # even if not cached themselves + np.testing.assert_array_equal( + h["ds2_f"], + np.arange(10. * 100).reshape((10, 100))) h.__del__() self.assertFalse(h.file_handle.isopen()) diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index e13a4597ab..7796e04439 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2019 Satpy developers +# Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -289,35 +289,16 @@ def test_functions(self, tpgi_, tpi_): self.assertEqual(return_lat.name, 'test_lat') self.assertEqual(return_lat.dims, ('num_pixels', 'num_lines')) - # Checks that the _perform_wrapping function is correctly executed - variable = xr.DataArray( - dims=('x', 'y'), - name='test_name', - attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' - }, - data=np.ones((10, 100)) - ) - variable.values[0, :] = 361. - variable.values[1, :] = -359. - return_variable = self.reader._perform_wrapping(variable) - self.assertEqual(return_variable.attrs['key_1'], 'value_1') - self.assertEqual(return_variable.dims, ('x', 'y')) - self.assertTrue(np.allclose(return_variable.values, np.ones((10, 100)))) - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration') @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation') - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_wrapping') @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification') - def test_dataset(self, po_, pw_, pi_, pc_): + def test_dataset(self, po_, pi_, pc_): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key variable = self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': None}) pc_.assert_not_called() pi_.assert_not_called() - pw_.assert_not_called() po_.assert_not_called() self.assertTrue(np.allclose(variable.values, np.ones((10, 100)))) @@ -326,14 +307,13 @@ def test_dataset(self, po_, pw_, pi_, pc_): self.assertEqual(variable.attrs['units'], None) # Checks the correct execution of the get_dataset function with a valid file_key - # and required calibration, interpolation and wrapping + # and required calibration and interpolation self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': 'reflectance', 'interpolate': True, 'standard_name': 'longitude'}) pc_.assert_called() pi_.assert_called() - pw_.assert_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key @@ -351,7 +331,6 @@ def test_dataset(self, po_, pw_, pi_, pc_): pc_.reset_mock() pi_.reset_mock() - pw_.reset_mock() po_.reset_mock() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key @@ -372,18 +351,16 @@ def test_dataset(self, po_, pw_, pi_, pc_): pc_.reset_mock() pi_.reset_mock() - pw_.reset_mock() po_.reset_mock() # Checks the correct execution of the get_dataset function with a valid file_key - # and required calibration, interpolation and wrapping + # and required calibration and interpolation self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', 'calibration': 'reflectance', 'interpolate': True, 'standard_name': 'longitude'}) pc_.assert_called() pi_.assert_not_called() - pw_.assert_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index 4a80fe319c..4b404ff299 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2019 Satpy developers +# Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/satpy/tests/reader_tests/test_vii_utils.py b/satpy/tests/reader_tests/test_vii_utils.py index ab38868ba6..e2ce3cc6ac 100644 --- a/satpy/tests/reader_tests/test_vii_utils.py +++ b/satpy/tests/reader_tests/test_vii_utils.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# Copyright (c) 2019 Satpy developers +# Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index f72bb147e5..82bd360dc2 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -84,3 +84,39 @@ def test_combine_times(self): ret = combine_metadata(*dts, average_times=False) # times are not equal so don't include it in the final result self.assertNotIn('start_time', ret) + + def test_combine_arrays(self): + """Test the combine_metadata with arrays.""" + from satpy.dataset import combine_metadata + from numpy import arange, ones + from xarray import DataArray + dts = [ + {"quality": (arange(25) % 2).reshape(5, 5).astype("?")}, + {"quality": (arange(1, 26) % 3).reshape(5, 5).astype("?")}, + {"quality": ones((5, 5,), "?")}, + ] + assert "quality" not in combine_metadata(*dts) + dts2 = [{"quality": DataArray(d["quality"])} for d in dts] + assert "quality" not in combine_metadata(*dts2) + # the ancillary_variables attribute is actually a list of data arrays + dts3 = [{"quality": [d["quality"]]} for d in dts] + assert "quality" not in combine_metadata(*dts3) + # check cases with repeated arrays + dts4 = [ + {"quality": dts[0]["quality"]}, + {"quality": dts[0]["quality"]}, + ] + assert "quality" in combine_metadata(*dts4) + dts5 = [ + {"quality": dts3[0]["quality"]}, + {"quality": dts3[0]["quality"]}, + ] + assert "quality" in combine_metadata(*dts5) + # check with other types + dts6 = [ + DataArray(arange(5), attrs=dts[0]), + DataArray(arange(5), attrs=dts[0]), + DataArray(arange(5), attrs=dts[1]), + object() + ] + assert "quality" not in combine_metadata(*dts6) From f43cb0c6d7b36c3cd2e19a12a392284a674a1e53 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 5 Jun 2020 14:45:40 +0000 Subject: [PATCH 6/8] Fix unittests for permission error. --- satpy/tests/reader_tests/test_vii_base_nc.py | 6 +++++- satpy/tests/reader_tests/test_vii_l1b_nc.py | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index 7796e04439..64fae3cb3d 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -148,7 +148,11 @@ def setUp(self, pgi_): def tearDown(self): """Remove the previously created test file.""" - os.remove(TEST_FILE) + # Catch Windows PermissionError for removing the created test file. + try: + os.remove(TEST_FILE) + except OSError: + pass def test_file_reading(self): """Test the file product reading.""" diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index 4b404ff299..e5fc629b0a 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -92,7 +92,11 @@ def setUp(self): def tearDown(self): """Remove the previously created test file.""" - os.remove(TEST_FILE) + # Catch Windows PermissionError for removing the created test file. + try: + os.remove(TEST_FILE) + except OSError: + pass def test_calibration_functions(self): """Test the calibration functions.""" From 4bc91c964291775d00a8c3647c11d712a4aa08e9 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Mon, 8 Jun 2020 11:21:45 +0000 Subject: [PATCH 7/8] Fix VII L1b unittests. --- CONTRIBUTING.rst | 149 +------------------ README.rst | 76 +--------- satpy/tests/reader_tests/test_vii_base_nc.py | 16 +- satpy/tests/reader_tests/test_vii_l1b_nc.py | 10 +- 4 files changed, 19 insertions(+), 232 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 5e661b0c80..ac9338fc25 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1,148 +1 @@ -================= -How to contribute -================= - -Thank you for considering contributing to Satpy! Satpy's development team -is made up of volunteers so any help we can get is very appreciated. - -Contributions from users are what keep this community going. We welcome -any contributions including bug reports, documentation fixes or updates, -bug fixes, and feature requests. By contributing to Satpy you are providing -code that everyone can use and benefit from. - -The following guidelines will describe how the Satpy project structures -its code contributions from discussion to code to package release. - -For more information on contributing to open source projects see -`GitHub's Guide `_. - -What can I do? -============== - -- Make sure you have a `GitHub account `_. -- Submit a ticket for your issue, assuming one does not already exist. -- If you're uncomfortable using Git/GitHub, see - `Learn Git Branching `_ or other - online tutorials. -- If you are uncomfortable contributing to an open source project see: - - * `How to Contribute to an Open Source Project on GitHub `_ - video series - * Aaron Meurer's `Git Workflow `_ - * `How to Contribute to Open Source `_ - -- See what `issues `_ already - exist. Issues marked - `good first issue `_ - or `help wanted `_ - can be good issues to start with. -- Read the :doc:`index` for more details on contributing code. -- `Fork `_ the repository on - GitHub and install the package in development mode. -- Update the Satpy documentation to make it clearer and more detailed. -- Contribute code to either fix a bug or add functionality and submit a - `Pull Request `_. -- Make an example Jupyter Notebook and add it to the - `available examples `_. - -What if I break something? -========================== - -Not possible. If something breaks because of your contribution it was our -fault. When you submit your changes to be merged as a GitHub -`Pull Request `_ -they will be automatically tested and checked against coding style rules. -Before they are merged they are reviewed by at least one maintainer of the -Satpy project. If anything needs updating, we'll let you know. - -What is expected? -================= - -You can expect the Satpy maintainers to help you. We are all volunteers, -have jobs, and occasionally go on vacations. We will try our best to answer -your questions as soon as possible. We will try our best to understand your -use case and add the features you need. Although we strive to make -Satpy useful for everyone there may be some feature requests that we can't -allow if they would require breaking existing features. Other features may -be best for a different package, PyTroll or otherwise. Regardless, we will -help you find the best place for your feature and to make it possible to do -what you want. - -We, the Satpy maintainers, expect you to be patient, understanding, and -respectful of both developers and users. Satpy can only be successful if -everyone in the community feels welcome. We also expect you to put in as -much work as you expect out of us. There is no dedicated PyTroll or Satpy -support team, so there may be times when you need to do most of the work -to solve your problem (trying different test cases, environments, etc). - -Being respectful includes following the style of the existing code for any -code submissions. Please follow -`PEP8 `_ style guidelines and -limit lines of code to 80 characters whenever possible and when it doesn't -hurt readability. Satpy follows -`Google Style Docstrings `_ -for all code API documentation. When in doubt use the existing code as a -guide for how coding should be done. - -.. _dev_help: - -How do I get help? -================== - -The Satpy developers (and all other PyTroll package developers) monitor the: - -- `Mailing List `_ -- `Slack chat `_ (get an `invitation `_) -- `GitHub issues `_ - -How do I submit my changes? -=========================== - -Any contributions should start with some form of communication (see above) to -let the Satpy maintainers know how you plan to help. The larger the -contribution the more important direct communication is so everyone can avoid -duplicate code and wasted time. -After talking to the Satpy developers any additional work like code or -documentation changes can be provided as a GitHub -`Pull Request `_. - -To make sure that your code complies with the pytroll python standard, you can -run the `flake8 `_ linter on your changes -before you submit them, or even better install a pre-commit hook that runs the -style check for you. To this aim, we provide a configuration file for the -`pre-commit `_ tool, that you can install with eg:: - - pip install pre-commit - pre-commit install - -running from your base satpy directory. This will automatically check code style for every commit. - -Code of Conduct -=============== - -Satpy follows the same code of conduct as the PyTroll project. For reference -it is copied to this repository in -`CODE_OF_CONDUCT.md `_. - -As stated in the PyTroll home page, this code of conduct applies to the -project space (GitHub) as well as the public space online and offline when -an individual is representing the project or the community. Online examples -of this include the PyTroll Slack team, mailing list, and the PyTroll twitter -account. This code of conduct also applies to in-person situations like -PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when -the project is being represented. - -Any violations of this code of conduct will be handled by the core maintainers -of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. -If you wish to report one of the maintainers for a violation and are -not comfortable with them seeing it, please contact one or more of the other -maintainers to report the violation. Responses to violations will be -determined by the maintainers and may include one or more of the following: - -- Verbal warning -- Ask for public apology -- Temporary or permanent ban from in-person events -- Temporary or permanent ban from online communication (Slack, mailing list, etc) - -For details see the official -`code of conduct document `_. +../../../CONTRIBUTING.rst \ No newline at end of file diff --git a/README.rst b/README.rst index b2b8fe0a06..92cacd2853 100644 --- a/README.rst +++ b/README.rst @@ -1,75 +1 @@ -Satpy -===== - -.. image:: https://travis-ci.org/pytroll/satpy.svg?branch=master - :target: https://travis-ci.org/pytroll/satpy - -.. image:: https://ci.appveyor.com/api/projects/status/a82tm59hldt4ycor/branch/master?svg=true - :target: https://ci.appveyor.com/project/pytroll/satpy/branch/master - -.. image:: https://coveralls.io/repos/github/pytroll/satpy/badge.svg?branch=master - :target: https://coveralls.io/github/pytroll/satpy?branch=master - -.. image:: https://badge.fury.io/py/satpy.svg - :target: https://badge.fury.io/py/satpy - -.. image:: https://zenodo.org/badge/51397392.svg - :target: https://zenodo.org/badge/latestdoi/51397392 - - -The Satpy package is a python library for reading and manipulating -meteorological remote sensing data and writing it to various image and -data file formats. Satpy comes with the ability to make various RGB -composites directly from satellite instrument channel data or higher level -processing output. The -`pyresample `_ package is used -to resample data to different uniform areas or grids. - -The documentation is available at -http://satpy.readthedocs.org/. - -Installation ------------- - -Satpy can be installed from PyPI with pip: - -.. code-block:: bash - - pip install satpy - - -It is also available from `conda-forge` for conda installations: - -.. code-block:: bash - - conda install -c conda-forge satpy - -Code of Conduct ---------------- - -Satpy follows the same code of conduct as the PyTroll project. For reference -it is copied to this repository in CODE_OF_CONDUCT.md_. - -As stated in the PyTroll home page, this code of conduct applies to the -project space (GitHub) as well as the public space online and offline when -an individual is representing the project or the community. Online examples -of this include the PyTroll Slack team, mailing list, and the PyTroll twitter -account. This code of conduct also applies to in-person situations like -PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when -the project is being represented. - -Any violations of this code of conduct will be handled by the core maintainers -of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. -If you wish to report one of the maintainers for a violation and are -not comfortable with them seeing it, please contact one or more of the other -maintainers to report the violation. Responses to violations will be -determined by the maintainers and may include one or more of the following: - -- Verbal warning -- Ask for public apology -- Temporary or permanent ban from in-person events -- Temporary or permanent ban from online communication (Slack, mailing list, etc) - -For details see the official CODE_OF_CONDUCT.md_. - -.. _CODE_OF_CONDUCT.md: ./CODE_OF_CONDUCT.md +README.rst \ No newline at end of file diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index 64fae3cb3d..93974bf1c8 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -23,6 +23,7 @@ import xarray as xr import datetime from netCDF4 import Dataset +import uuid from satpy.readers.vii_base_nc import ViiNCBaseFileHandler, SCAN_ALT_TIE_POINTS, \ TIE_POINTS_FACTOR @@ -44,7 +45,10 @@ class TestViiNCBaseFileHandler(unittest.TestCase): def setUp(self, pgi_): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly - with Dataset(TEST_FILE, 'w') as nc: + # uses a UUID to avoid permission conflicts during execution of tests in parallel + self.test_file_name = TEST_FILE + str(uuid.uuid1()) + + with Dataset(self.test_file_name, 'w') as nc: # Add global attributes nc.sensing_start_time_utc = "20170920173040.888" nc.sensing_end_time_utc = "20170920174117.555" @@ -115,7 +119,7 @@ def setUp(self, pgi_): # Create a reader self.reader = ViiNCBaseFileHandler( - filename=TEST_FILE, + filename=self.test_file_name, filename_info=filename_info, filetype_info={ 'cached_longitude': 'data/measurement_data/longitude', @@ -126,7 +130,7 @@ def setUp(self, pgi_): # Create a second reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags self.reader_2 = ViiNCBaseFileHandler( - filename=TEST_FILE, + filename=self.test_file_name, filename_info=filename_info, filetype_info={ 'cached_longitude': 'data/measurement_data/longitude', @@ -140,7 +144,7 @@ def setUp(self, pgi_): # Create a third reader without defining cached latitude and longitude # by means of the filetype_info flags self.reader_3 = ViiNCBaseFileHandler( - filename=TEST_FILE, + filename=self.test_file_name, filename_info=filename_info, filetype_info={}, orthorect=True @@ -150,7 +154,7 @@ def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: - os.remove(TEST_FILE) + os.remove(self.test_file_name) except OSError: pass @@ -171,7 +175,7 @@ def test_file_reading(self): # Checks that the global attributes are correctly read expected_global_attributes = { - 'filename': TEST_FILE, + 'filename': self.test_file_name, 'start_time': expected_start_time, 'end_time': expected_end_time, 'spacecraft_name': "test_spacecraft", diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index e5fc629b0a..a534c07e54 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -25,6 +25,7 @@ import dask.array as da import datetime from netCDF4 import Dataset +import uuid from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler from satpy.readers.vii_utils import MEAN_EARTH_RADIUS @@ -40,7 +41,10 @@ class TestViiL1bNCFileHandler(unittest.TestCase): def setUp(self): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly - with Dataset(TEST_FILE, 'w') as nc: + # uses a UUID to avoid permission conflicts during execution of tests in parallel + self.test_file_name = TEST_FILE + str(uuid.uuid1()) + + with Dataset(self.test_file_name, 'w') as nc: # Create data group g1 = nc.createGroup('data') @@ -78,7 +82,7 @@ def setUp(self): delta_lat[:] = 1.0 self.reader = ViiL1bNCFileHandler( - filename=TEST_FILE, + filename=self.test_file_name, filename_info={ 'creation_time': datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), @@ -94,7 +98,7 @@ def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: - os.remove(TEST_FILE) + os.remove(self.test_file_name) except OSError: pass From a0aff963bef667757e775d39f1b69bc648aa030f Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Mon, 8 Jun 2020 11:50:16 +0000 Subject: [PATCH 8/8] Add accidentally removed files. --- CONTRIBUTING.rst | 149 ++++++++++++++++++++++++++++++++++++++++++++++- README.rst | 76 +++++++++++++++++++++++- 2 files changed, 223 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index ac9338fc25..5e661b0c80 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1 +1,148 @@ -../../../CONTRIBUTING.rst \ No newline at end of file +================= +How to contribute +================= + +Thank you for considering contributing to Satpy! Satpy's development team +is made up of volunteers so any help we can get is very appreciated. + +Contributions from users are what keep this community going. We welcome +any contributions including bug reports, documentation fixes or updates, +bug fixes, and feature requests. By contributing to Satpy you are providing +code that everyone can use and benefit from. + +The following guidelines will describe how the Satpy project structures +its code contributions from discussion to code to package release. + +For more information on contributing to open source projects see +`GitHub's Guide `_. + +What can I do? +============== + +- Make sure you have a `GitHub account `_. +- Submit a ticket for your issue, assuming one does not already exist. +- If you're uncomfortable using Git/GitHub, see + `Learn Git Branching `_ or other + online tutorials. +- If you are uncomfortable contributing to an open source project see: + + * `How to Contribute to an Open Source Project on GitHub `_ + video series + * Aaron Meurer's `Git Workflow `_ + * `How to Contribute to Open Source `_ + +- See what `issues `_ already + exist. Issues marked + `good first issue `_ + or `help wanted `_ + can be good issues to start with. +- Read the :doc:`index` for more details on contributing code. +- `Fork `_ the repository on + GitHub and install the package in development mode. +- Update the Satpy documentation to make it clearer and more detailed. +- Contribute code to either fix a bug or add functionality and submit a + `Pull Request `_. +- Make an example Jupyter Notebook and add it to the + `available examples `_. + +What if I break something? +========================== + +Not possible. If something breaks because of your contribution it was our +fault. When you submit your changes to be merged as a GitHub +`Pull Request `_ +they will be automatically tested and checked against coding style rules. +Before they are merged they are reviewed by at least one maintainer of the +Satpy project. If anything needs updating, we'll let you know. + +What is expected? +================= + +You can expect the Satpy maintainers to help you. We are all volunteers, +have jobs, and occasionally go on vacations. We will try our best to answer +your questions as soon as possible. We will try our best to understand your +use case and add the features you need. Although we strive to make +Satpy useful for everyone there may be some feature requests that we can't +allow if they would require breaking existing features. Other features may +be best for a different package, PyTroll or otherwise. Regardless, we will +help you find the best place for your feature and to make it possible to do +what you want. + +We, the Satpy maintainers, expect you to be patient, understanding, and +respectful of both developers and users. Satpy can only be successful if +everyone in the community feels welcome. We also expect you to put in as +much work as you expect out of us. There is no dedicated PyTroll or Satpy +support team, so there may be times when you need to do most of the work +to solve your problem (trying different test cases, environments, etc). + +Being respectful includes following the style of the existing code for any +code submissions. Please follow +`PEP8 `_ style guidelines and +limit lines of code to 80 characters whenever possible and when it doesn't +hurt readability. Satpy follows +`Google Style Docstrings `_ +for all code API documentation. When in doubt use the existing code as a +guide for how coding should be done. + +.. _dev_help: + +How do I get help? +================== + +The Satpy developers (and all other PyTroll package developers) monitor the: + +- `Mailing List `_ +- `Slack chat `_ (get an `invitation `_) +- `GitHub issues `_ + +How do I submit my changes? +=========================== + +Any contributions should start with some form of communication (see above) to +let the Satpy maintainers know how you plan to help. The larger the +contribution the more important direct communication is so everyone can avoid +duplicate code and wasted time. +After talking to the Satpy developers any additional work like code or +documentation changes can be provided as a GitHub +`Pull Request `_. + +To make sure that your code complies with the pytroll python standard, you can +run the `flake8 `_ linter on your changes +before you submit them, or even better install a pre-commit hook that runs the +style check for you. To this aim, we provide a configuration file for the +`pre-commit `_ tool, that you can install with eg:: + + pip install pre-commit + pre-commit install + +running from your base satpy directory. This will automatically check code style for every commit. + +Code of Conduct +=============== + +Satpy follows the same code of conduct as the PyTroll project. For reference +it is copied to this repository in +`CODE_OF_CONDUCT.md `_. + +As stated in the PyTroll home page, this code of conduct applies to the +project space (GitHub) as well as the public space online and offline when +an individual is representing the project or the community. Online examples +of this include the PyTroll Slack team, mailing list, and the PyTroll twitter +account. This code of conduct also applies to in-person situations like +PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when +the project is being represented. + +Any violations of this code of conduct will be handled by the core maintainers +of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. +If you wish to report one of the maintainers for a violation and are +not comfortable with them seeing it, please contact one or more of the other +maintainers to report the violation. Responses to violations will be +determined by the maintainers and may include one or more of the following: + +- Verbal warning +- Ask for public apology +- Temporary or permanent ban from in-person events +- Temporary or permanent ban from online communication (Slack, mailing list, etc) + +For details see the official +`code of conduct document `_. diff --git a/README.rst b/README.rst index 92cacd2853..b2b8fe0a06 100644 --- a/README.rst +++ b/README.rst @@ -1 +1,75 @@ -README.rst \ No newline at end of file +Satpy +===== + +.. image:: https://travis-ci.org/pytroll/satpy.svg?branch=master + :target: https://travis-ci.org/pytroll/satpy + +.. image:: https://ci.appveyor.com/api/projects/status/a82tm59hldt4ycor/branch/master?svg=true + :target: https://ci.appveyor.com/project/pytroll/satpy/branch/master + +.. image:: https://coveralls.io/repos/github/pytroll/satpy/badge.svg?branch=master + :target: https://coveralls.io/github/pytroll/satpy?branch=master + +.. image:: https://badge.fury.io/py/satpy.svg + :target: https://badge.fury.io/py/satpy + +.. image:: https://zenodo.org/badge/51397392.svg + :target: https://zenodo.org/badge/latestdoi/51397392 + + +The Satpy package is a python library for reading and manipulating +meteorological remote sensing data and writing it to various image and +data file formats. Satpy comes with the ability to make various RGB +composites directly from satellite instrument channel data or higher level +processing output. The +`pyresample `_ package is used +to resample data to different uniform areas or grids. + +The documentation is available at +http://satpy.readthedocs.org/. + +Installation +------------ + +Satpy can be installed from PyPI with pip: + +.. code-block:: bash + + pip install satpy + + +It is also available from `conda-forge` for conda installations: + +.. code-block:: bash + + conda install -c conda-forge satpy + +Code of Conduct +--------------- + +Satpy follows the same code of conduct as the PyTroll project. For reference +it is copied to this repository in CODE_OF_CONDUCT.md_. + +As stated in the PyTroll home page, this code of conduct applies to the +project space (GitHub) as well as the public space online and offline when +an individual is representing the project or the community. Online examples +of this include the PyTroll Slack team, mailing list, and the PyTroll twitter +account. This code of conduct also applies to in-person situations like +PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when +the project is being represented. + +Any violations of this code of conduct will be handled by the core maintainers +of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. +If you wish to report one of the maintainers for a violation and are +not comfortable with them seeing it, please contact one or more of the other +maintainers to report the violation. Responses to violations will be +determined by the maintainers and may include one or more of the following: + +- Verbal warning +- Ask for public apology +- Temporary or permanent ban from in-person events +- Temporary or permanent ban from online communication (Slack, mailing list, etc) + +For details see the official CODE_OF_CONDUCT.md_. + +.. _CODE_OF_CONDUCT.md: ./CODE_OF_CONDUCT.md