From 64eac438bd1b6053111cdbd1276b4949b8aeb49d Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 28 Nov 2018 11:58:33 +0000 Subject: [PATCH 01/23] Add HRV area extent definition for a ROI file. --- satpy/readers/native_msg.py | 175 ++++++++++++++++++++++-------------- 1 file changed, 110 insertions(+), 65 deletions(-) diff --git a/satpy/readers/native_msg.py b/satpy/readers/native_msg.py index ae68c13dd3..eae7ec573e 100644 --- a/satpy/readers/native_msg.py +++ b/satpy/readers/native_msg.py @@ -66,6 +66,10 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info) self.platform_name = None + # to be (re)set in read_header in order to keep track whether + # we're dealing with a file with an area of interest + self.roi = False + # The available channels are only known after the header # has been read, after that we know what the indices are for each channel self.header = {} @@ -80,6 +84,9 @@ def __init__(self, filename, filename_info, filetype_info): self.trailer = {} self._read_trailer() + data15hd = self.header['15_DATA_HEADER']['ImageDescription']['PlannedCoverageHRV'] + trail = self.trailer['15TRAILER']['ImageProductionStats']['ActualL15CoverageHRV'] + @property def start_time(self): return self.header['15_DATA_HEADER']['ImageAcquisition'][ @@ -116,7 +123,9 @@ def get_lrec(cols): return lrec - visir_rec = get_lrec(int(self.mda['number_of_columns']*1.25)) + # each pixel is 10-bits -> one line of data has 25% more bytes + # than the number of columns suggest (10/8 = 1.25) + visir_rec = get_lrec(int(self.mda['number_of_columns'] * 1.25)) number_of_lowres_channels = len( [s for s in self._channel_list if not s == 'HRV']) drec = [('visir', (visir_rec, number_of_lowres_channels))] @@ -174,51 +183,47 @@ def _read_header(self): 'h': 35785831.00, 'ssp_longitude': ssp_lon} - west = int(sec15hd['WestColumnSelectedRectangle']['Value']) + north = int(sec15hd['NorthLineSelectedRectangle']['Value']) east = int(sec15hd['EastColumnSelectedRectangle']['Value']) - ncols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) - # We suspect the UMARF will pad out any ROI colums that - # arent divisible by 4 so here we work out how many pixels have - # been added to the column. - x = ((west - east + 1) * (10.0 / 8) % 1) - y = int((1 - x) * 4) - - if y < 4: - # column has been padded with y pixels - cols_visir = int((west - east + 1 + y) * 1.25) - else: - # no padding has occurred - cols_visir = int((west - east + 1) * 1.25) + south = int(sec15hd['SouthLineSelectedRectangle']['Value']) + west = int(sec15hd['WestColumnSelectedRectangle']['Value']) + + # check if the file has less rows or columns than + # the maximum, if so it is an area of interest file + # columns and rows start at 1 -> 3712 - 1 = 3711 + if (north - south < 3711) or (west - east < 3711): + self.roi = True + + # If the number of columns in the file is not divisible by 4, + # UMARF will add extra columns to the file + modulo = (west - east + 1) % 4 + padding = 0 + if modulo > 0: + padding = 4 - modulo + cols_visir = west - east + 1 + padding + + # Check the VISIR calculated column dimension against + # the header information + cols_visir_hdr = int(sec15hd['NumberColumnsVISIR']['Value']) + if cols_visir_hdr != cols_visir: + logger.warning( + "Number of VISIR columns from the header is incorrect!") + logger.warning("Header: %d", cols_visir_hdr) + logger.warning("Calculated: = %d", cols_visir) - # HRV Channel - check if an ROI + # HRV Channel - check if the area is reduced in east west + # direction as this affects the number of columns in the file + cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) if (west - east) < 3711: - cols_hrv = int(np.ceil(ncols_hrv_hdr * 10.0 / 8)) # 6960 + cols_hrv = cols_hrv_hdr else: - cols_hrv = int(np.ceil(5568 * 10.0 / 8)) # 6960 + cols_hrv = int(cols_hrv_hdr / 2) - # self.mda should represent the 16bit dimensions not 10bit + # self.mda represents the 16bit dimensions not 10bit self.mda['number_of_lines'] = int(sec15hd['NumberLinesVISIR']['Value']) - self.mda['number_of_columns'] = int(cols_visir / 1.25) + self.mda['number_of_columns'] = cols_visir self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) - self.mda['hrv_number_of_columns'] = int(cols_hrv / 1.25) - - # Check the calculated row,column dimensions against the header information: - ncols = self.mda['number_of_columns'] - ncols_hdr = int(sec15hd['NumberLinesVISIR']['Value']) - - if ncols != ncols_hdr: - logger.warning( - "Number of VISIR columns from header and derived from data are not consistent!") - logger.warning("Number of columns read from header = %d", ncols_hdr) - logger.warning("Number of columns calculated from data = %d", ncols) - - ncols_hrv = self.mda['hrv_number_of_columns'] - - if ncols_hrv != ncols_hrv_hdr: - logger.warning( - "Number of HRV columns from header and derived from data are not consistent!") - logger.warning("Number of columns read from header = %d", ncols_hrv_hdr) - logger.warning("Number of columns calculated from data = %d", ncols_hrv) + self.mda['hrv_number_of_columns'] = cols_hrv def _read_trailer(self): @@ -270,43 +275,49 @@ def get_area_extent(self, dsid): data15hd = self.header['15_DATA_HEADER'] sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] - if dsid.name != 'HRV': + # check for Earth model as this affects the north-south and + # west-east offsets + # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description + earth_model = data15hd['GeometricProcessing']['EarthModel'][ + 'TypeOfEarthModel'] + if earth_model not in [1,2]: + raise NotImplementedError( + 'Unrecognised Earth model: {}'.format(earth_model) + ) + else: + # initialize offset assuming no correction needs to be done + ns_offset = 0 + we_offset = 0 + + # Calculations assume grid origin is south-east corner + # section 7.2.4 of MSG Level 1.5 Image Data Format Description + origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} + grid_origin = data15hd['ImageDescription'][ + "ReferenceGridVIS_IR"]["GridOrigin"] + if grid_origin != 2: + raise NotImplementedError( + 'Grid origin not supported number: {}, {} corner' + .format(grid_origin, origins[grid_origin]) + ) - # following calculations assume grid origin is south-east corner - # section 7.2.4 of MSG Level 1.5 Image Data Format Description - origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} - grid_origin = data15hd['ImageDescription'][ - "ReferenceGridVIS_IR"]["GridOrigin"] - if grid_origin != 2: - raise NotImplementedError( - 'Grid origin not supported number: {}, {} corner' - .format(grid_origin, origins[grid_origin]) - ) + if dsid.name != 'HRV': center_point = 3712/2 - north = int(sec15hd["NorthLineSelectedRectangle"]['Value']) + north = int(sec15hd['NorthLineSelectedRectangle']['Value']) east = int(sec15hd['EastColumnSelectedRectangle']['Value']) west = int(sec15hd['WestColumnSelectedRectangle']['Value']) - south = int(sec15hd["SouthLineSelectedRectangle"]['Value']) + south = int(sec15hd['SouthLineSelectedRectangle']['Value']) column_step = data15hd['ImageDescription'][ - "ReferenceGridVIS_IR"]["ColumnDirGridStep"] * 1000.0 + 'ReferenceGridVIS_IR']['ColumnDirGridStep'] * 1000.0 line_step = data15hd['ImageDescription'][ - "ReferenceGridVIS_IR"]["LineDirGridStep"] * 1000.0 + 'ReferenceGridVIS_IR']['LineDirGridStep'] * 1000.0 + # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description - earth_model = data15hd['GeometricProcessing']['EarthModel'][ - 'TypeOfEarthModel'] - if earth_model == 2: - ns_offset = 0 # north +ve - we_offset = 0 # west +ve - elif earth_model == 1: + if earth_model == 1: ns_offset = -0.5 # north +ve we_offset = 0.5 # west +ve - else: - raise NotImplementedError( - 'unrecognised earth model: {}'.format(earth_model) - ) # section 3.1.5 of MSG Level 1.5 Image Data Format Description ll_c = (center_point - west - 0.5 + we_offset) * column_step @@ -318,7 +329,41 @@ def get_area_extent(self, dsid): else: - raise NotImplementedError('HRV not supported!') + # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description + if earth_model == 1: + ns_offset = -1.5 # north +ve + we_offset = 1.5 # west +ve + + if self.roi: + center_point = 11136/2 + + north = int(sec15hd['NorthLineSelectedRectangle']['Value']) * 3 + east = int(sec15hd['EastColumnSelectedRectangle']['Value']) * 3 + west = int(sec15hd['WestColumnSelectedRectangle']['Value']) * 3 + south = int(sec15hd['SouthLineSelectedRectangle']['Value']) * 3 + + column_step = data15hd['ImageDescription'][ + 'ReferenceGridHRV']['ColumnDirGridStep'] * 1000.0 + line_step = data15hd['ImageDescription'][ + 'ReferenceGridHRV']['LineDirGridStep'] * 1000.0 + + # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description + if earth_model == 1: + ns_offset = -1.5 # north +ve + we_offset = 1.5 # west +ve + + # section 3.1.5 of MSG Level 1.5 Image Data Format Description + ll_c = (center_point - west - 0.5 + we_offset) * column_step + ll_l = (south - center_point - 0.5 + ns_offset) * line_step + ur_c = (center_point - east + 0.5 + we_offset) * column_step + ur_l = (north - center_point + 0.5 + ns_offset) * line_step + + area_extent = (ll_c, ll_l, ur_c, ur_l) + + else: + raise NotImplementedError('HRV not supported!') + + return area_extent From d5e1e22df64d142375886a32c74169324544a29e Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 28 Nov 2018 12:02:14 +0000 Subject: [PATCH 02/23] Add HRV area extent definition for a ROI file. Clean-up code. --- satpy/readers/native_msg.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/readers/native_msg.py b/satpy/readers/native_msg.py index eae7ec573e..a1cf70800e 100644 --- a/satpy/readers/native_msg.py +++ b/satpy/readers/native_msg.py @@ -84,9 +84,6 @@ def __init__(self, filename, filename_info, filetype_info): self.trailer = {} self._read_trailer() - data15hd = self.header['15_DATA_HEADER']['ImageDescription']['PlannedCoverageHRV'] - trail = self.trailer['15TRAILER']['ImageProductionStats']['ActualL15CoverageHRV'] - @property def start_time(self): return self.header['15_DATA_HEADER']['ImageAcquisition'][ From a80e5d96fa359cffca9708627448463b38c29101 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 28 Nov 2018 12:12:59 +0000 Subject: [PATCH 03/23] Fix flake8 & pep8 errors. --- satpy/readers/native_msg.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/readers/native_msg.py b/satpy/readers/native_msg.py index a1cf70800e..59dbddb1ca 100644 --- a/satpy/readers/native_msg.py +++ b/satpy/readers/native_msg.py @@ -277,7 +277,7 @@ def get_area_extent(self, dsid): # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = data15hd['GeometricProcessing']['EarthModel'][ 'TypeOfEarthModel'] - if earth_model not in [1,2]: + if earth_model not in [1, 2]: raise NotImplementedError( 'Unrecognised Earth model: {}'.format(earth_model) ) @@ -360,8 +360,6 @@ def get_area_extent(self, dsid): else: raise NotImplementedError('HRV not supported!') - - return area_extent def get_dataset(self, dsid, info, From d7400e570433ed1f6b8d8b54dc8c86ea65280114 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 30 Nov 2018 12:26:17 +0000 Subject: [PATCH 04/23] Add unittests for area extent calculations. Restructure code for clarity. --- satpy/readers/native_msg.py | 195 ++++++++--------- satpy/tests/reader_tests/test_native_msg.py | 224 +++++++++++++++++--- 2 files changed, 285 insertions(+), 134 deletions(-) diff --git a/satpy/readers/native_msg.py b/satpy/readers/native_msg.py index 59dbddb1ca..7a3f91dfb7 100644 --- a/satpy/readers/native_msg.py +++ b/satpy/readers/native_msg.py @@ -64,24 +64,18 @@ def __init__(self, filename, filename_info, filetype_info): super(NativeMSGFileHandler, self).__init__(filename, filename_info, filetype_info) - self.platform_name = None - # to be (re)set in read_header in order to keep track whether - # we're dealing with a file with an area of interest - self.roi = False - - # The available channels are only known after the header - # has been read, after that we know what the indices are for each channel + # Declare required variables. + # Assume a full disk file, reset in _read_header if otherwise. self.header = {} - self.available_channels = {} self.mda = {} - self._read_header() + self.mda['is_full_disk'] = True + self.trailer = {} - # Prepare dask-array + # Read header, prepare dask-array, read trailer + # Available channels are known only after the header has been read + self._read_header() self.dask_array = da.from_array(self._get_memmap(), chunks=(CHUNK_SIZE,)) - - # Read trailer - self.trailer = {} self._read_trailer() @property @@ -94,6 +88,17 @@ def end_time(self): return self.header['15_DATA_HEADER']['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + @staticmethod + def _calculate_area_extent(center_point, north, east, south, west, + we_offset, ns_offset, column_step, line_step): + + ll_c = (center_point - west - 0.5 + we_offset) * column_step + ll_l = (south - center_point - 0.5 + ns_offset) * line_step + ur_c = (center_point - east + 0.5 + we_offset) * column_step + ur_l = (north - center_point + 0.5 + ns_offset) * line_step + + return (ll_c, ll_l, ur_c, ur_l) + def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels""" @@ -123,11 +128,11 @@ def get_lrec(cols): # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) visir_rec = get_lrec(int(self.mda['number_of_columns'] * 1.25)) - number_of_lowres_channels = len( - [s for s in self._channel_list if not s == 'HRV']) - drec = [('visir', (visir_rec, number_of_lowres_channels))] + number_of_visir_channels = len( + [s for s in self.mda['channel_list'] if not s == 'HRV']) + drec = [('visir', (visir_rec, number_of_visir_channels))] - if self.available_channels['HRV']: + if self.mda['available_channels']['HRV']: hrv_rec = get_lrec(int(self.mda['hrv_number_of_columns'] * 1.25)) drec.append(('hrv', (hrv_rec, 3))) @@ -157,13 +162,13 @@ def _read_header(self): sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] # Set the list of available channels: - self.available_channels = get_available_channels(self.header) - self._channel_list = [i for i in CHANNEL_NAMES.values() - if self.available_channels[i]] + self.mda['available_channels'] = get_available_channels(self.header) + self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() + if self.mda['available_channels'][i]] - self.platform_id = data15hd[ + self.mda['platform_id'] = data15hd[ 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] - self.platform_name = "Meteosat-" + SATNUM[self.platform_id] + self.mda['platform_name'] = "Meteosat-" + SATNUM[self.mda['platform_id']] equator_radius = data15hd['GeometricProcessing'][ 'EarthModel']['EquatorialRadius'] * 1000. @@ -185,19 +190,22 @@ def _read_header(self): south = int(sec15hd['SouthLineSelectedRectangle']['Value']) west = int(sec15hd['WestColumnSelectedRectangle']['Value']) + ncolumns = west - east + 1 + nrows = north - south + 1 + # check if the file has less rows or columns than # the maximum, if so it is an area of interest file # columns and rows start at 1 -> 3712 - 1 = 3711 - if (north - south < 3711) or (west - east < 3711): - self.roi = True + if (nrows < 3712) or (ncolumns < 3712): + self.mda['is_full_disk'] = False # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file - modulo = (west - east + 1) % 4 + modulo = ncolumns % 4 padding = 0 if modulo > 0: padding = 4 - modulo - cols_visir = west - east + 1 + padding + cols_visir = ncolumns + padding # Check the VISIR calculated column dimension against # the header information @@ -211,7 +219,7 @@ def _read_header(self): # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) - if (west - east) < 3711: + if ncolumns < 3712: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) @@ -277,108 +285,89 @@ def get_area_extent(self, dsid): # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = data15hd['GeometricProcessing']['EarthModel'][ 'TypeOfEarthModel'] - if earth_model not in [1, 2]: + if earth_model == 2: + ns_offset = 0 + we_offset = 0 + elif earth_model == 1: + ns_offset = -0.5 + we_offset = 0.5 + if dsid.name == 'HRV': + ns_offset = -1.5 + we_offset = 1.5 + else: raise NotImplementedError( 'Unrecognised Earth model: {}'.format(earth_model) ) - else: - # initialize offset assuming no correction needs to be done - ns_offset = 0 - we_offset = 0 # Calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description + if dsid.name == 'HRV': + reference_grid = 'ReferenceGridHRV' + else: + reference_grid = 'ReferenceGridVIS_IR' + origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} grid_origin = data15hd['ImageDescription'][ - "ReferenceGridVIS_IR"]["GridOrigin"] + reference_grid]['GridOrigin'] if grid_origin != 2: - raise NotImplementedError( - 'Grid origin not supported number: {}, {} corner' - .format(grid_origin, origins[grid_origin]) + msg = 'Grid origin not supported number: {}, {} corner'.format( + grid_origin, origins[grid_origin] ) - - if dsid.name != 'HRV': - - center_point = 3712/2 - - north = int(sec15hd['NorthLineSelectedRectangle']['Value']) - east = int(sec15hd['EastColumnSelectedRectangle']['Value']) - west = int(sec15hd['WestColumnSelectedRectangle']['Value']) - south = int(sec15hd['SouthLineSelectedRectangle']['Value']) - - column_step = data15hd['ImageDescription'][ - 'ReferenceGridVIS_IR']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription'][ - 'ReferenceGridVIS_IR']['LineDirGridStep'] * 1000.0 - - # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description - if earth_model == 1: - ns_offset = -0.5 # north +ve - we_offset = 0.5 # west +ve - - # section 3.1.5 of MSG Level 1.5 Image Data Format Description - ll_c = (center_point - west - 0.5 + we_offset) * column_step - ll_l = (south - center_point - 0.5 + ns_offset) * line_step - ur_c = (center_point - east + 0.5 + we_offset) * column_step - ur_l = (north - center_point + 0.5 + ns_offset) * line_step - - area_extent = (ll_c, ll_l, ur_c, ur_l) - + raise NotImplementedError(msg) + + # When dealing with HRV channel and full disk, area extent is + # in two pieces + if (dsid.name == 'HRV') and self.mda['is_full_disk']: + # TODO: Implement HRV full disk area_extent + # NotImplementedError does not catch this, it must + # be used elsewhere already + msg = 'HRV full disk area extent not implemented.' + raise RuntimeError(msg) + + # Otherwise area extent is in one piece, corner points are + # the same as for VISIR channels, HRV channel is having + # three times the amount of columns and rows else: - # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description - if earth_model == 1: - ns_offset = -1.5 # north +ve - we_offset = 1.5 # west +ve - - if self.roi: + if dsid.name == 'HRV': center_point = 11136/2 + coeff = 3 + else: + center_point = 3712/2 + coeff = 1 - north = int(sec15hd['NorthLineSelectedRectangle']['Value']) * 3 - east = int(sec15hd['EastColumnSelectedRectangle']['Value']) * 3 - west = int(sec15hd['WestColumnSelectedRectangle']['Value']) * 3 - south = int(sec15hd['SouthLineSelectedRectangle']['Value']) * 3 - - column_step = data15hd['ImageDescription'][ - 'ReferenceGridHRV']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription'][ - 'ReferenceGridHRV']['LineDirGridStep'] * 1000.0 - - # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description - if earth_model == 1: - ns_offset = -1.5 # north +ve - we_offset = 1.5 # west +ve - - # section 3.1.5 of MSG Level 1.5 Image Data Format Description - ll_c = (center_point - west - 0.5 + we_offset) * column_step - ll_l = (south - center_point - 0.5 + ns_offset) * line_step - ur_c = (center_point - east + 0.5 + we_offset) * column_step - ur_l = (north - center_point + 0.5 + ns_offset) * line_step + north = coeff * int(sec15hd['NorthLineSelectedRectangle']['Value']) + east = coeff * int(sec15hd['EastColumnSelectedRectangle']['Value']) + west = coeff * int(sec15hd['WestColumnSelectedRectangle']['Value']) + south = coeff * int(sec15hd['SouthLineSelectedRectangle']['Value']) - area_extent = (ll_c, ll_l, ur_c, ur_l) + column_step = data15hd['ImageDescription'][ + reference_grid]['ColumnDirGridStep'] * 1000.0 + line_step = data15hd['ImageDescription'][ + reference_grid]['LineDirGridStep'] * 1000.0 - else: - raise NotImplementedError('HRV not supported!') + area_extent = self._calculate_area_extent( + center_point, north, east, + south, west, we_offset, + ns_offset, column_step, line_step + ) return area_extent def get_dataset(self, dsid, info, xslice=slice(None), yslice=slice(None)): - channel = dsid.name - channel_list = self._channel_list - - if channel not in channel_list: - raise KeyError('Channel % s not available in the file' % channel) - elif channel not in ['HRV']: + if dsid.name not in self.mda['channel_list']: + raise KeyError('Channel % s not available in the file' % dsid.name) + elif dsid.name not in ['HRV']: shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) # Check if there is only 1 channel in the list as a change # is needed in the arrray assignment ie channl id is not present - if len(channel_list) == 1: + if len(self.mda['channel_list']) == 1: raw = self.dask_array['visir']['line_data'] else: - i = channel_list.index(channel) + i = self.mda['channel_list'].index(dsid.name) raw = self.dask_array['visir']['line_data'][:, i, :] data = dec10216(raw.flatten()) @@ -416,7 +405,7 @@ def get_dataset(self, dsid, info, dataset.attrs['units'] = info['units'] dataset.attrs['wavelength'] = info['wavelength'] dataset.attrs['standard_name'] = info['standard_name'] - dataset.attrs['platform_name'] = self.platform_name + dataset.attrs['platform_name'] = self.mda['platform_name'] dataset.attrs['sensor'] = 'seviri' return dataset @@ -460,7 +449,7 @@ def calibrate(self, data, dsid): res = self._convert_to_radiance(data, gain, offset) if calibration == 'reflectance': - solar_irradiance = CALIB[self.platform_id][channel]["F"] + solar_irradiance = CALIB[self.mda['platform_id']][channel]["F"] res = self._vis_calibrate(res, solar_irradiance) elif calibration == 'brightness_temperature': diff --git a/satpy/tests/reader_tests/test_native_msg.py b/satpy/tests/reader_tests/test_native_msg.py index 9626b5c7e1..abe50c885a 100644 --- a/satpy/tests/reader_tests/test_native_msg.py +++ b/satpy/tests/reader_tests/test_native_msg.py @@ -21,7 +21,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Unittesting the native msg reader +"""Unittesting the Native SEVIRI reader. """ import sys @@ -31,6 +31,7 @@ NativeMSGFileHandler, get_available_channels, ) +from satpy.dataset import DatasetID if sys.version_info < (2, 7): import unittest2 as unittest @@ -63,10 +64,75 @@ TEST3_HEADER_CHNLIST = {SEC15HDR: {IDS: {}}} TEST3_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XXXXXXXXXXXX' +TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = { + 'earth_model': 1, + 'dsid': DatasetID(name='VIS006'), + 'is_full_disk': True, + 'expected_area_extent': ( + -5568748.275756836, -5568748.275756836, + 5568748.275756836, 5568748.275756836 + ) +} + +TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI = { + 'earth_model': 1, + 'dsid': DatasetID(name='VIS006'), + 'is_full_disk': False, + 'expected_area_extent': ( + -2205296.3268756866, -333044.75140571594, + 5337717.231988907, 5154692.638874054 + ) +} + +TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK = None + +TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI = { + 'earth_model': 1, + 'dsid': DatasetID(name='HRV'), + 'is_full_disk': False, + 'expected_area_extent': ( + -2204296.1049079895, -332044.6038246155, + 5336716.885566711, 5153692.299723625 + ) +} + +TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK = { + 'earth_model': 2, + 'dsid': DatasetID(name='VIS006'), + 'is_full_disk': True, + 'expected_area_extent': ( + -5570248.477339745, -5567248.074173927, + 5567248.074173927, 5570248.477339745 + ) +} + +TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK = None + +TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI = { + 'earth_model': 2, + 'dsid': DatasetID(name='VIS006'), + 'is_full_disk': False, + 'expected_area_extent': ( + -2206796.5284585953, -331544.5498228073, + 5336217.030405998, 5156192.840456963 + ) +} + +TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI = { + 'earth_model': 2, + 'dsid': DatasetID(name='HRV'), + 'is_full_disk': False, + 'expected_area_extent': ( + -2205796.3064312935, -330544.4023013115, + 5335216.684043407, 5155192.501246929 + ) +} + # This should preferably be put in a helper-module # Fixme! def assertNumpyArraysEqual(self, other): + if self.shape != other.shape: raise AssertionError("Shapes don't match") if not np.allclose(self, other): @@ -113,64 +179,159 @@ class TestNativeMSGAreaExtent(unittest.TestCase): inspecting the output of geoferenced imagery. """ @staticmethod - def get_mock_file_handler(earth_model): + def create_test_header(earth_model, dsid, is_full_disk): """ Mocked NativeMSGFileHandler with sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. """ + + if dsid.name == 'HRV': + reference_grid = 'ReferenceGridHRV' + column_dir_grid_step = 1.0001343488693237 + line_dir_grid_step = 1.0001343488693237 + else: + reference_grid = 'ReferenceGridVIS_IR' + column_dir_grid_step = 3.0004031658172607 + line_dir_grid_step = 3.0004031658172607 + + if is_full_disk: + north = 3712 + east = 1 + west = 3712 + south = 1 + n_visir_cols = 3712 + n_visir_lines = 3712 + else: + north = 3574 + east = 78 + west = 2591 + south = 1746 + n_visir_cols = 2516 + n_visir_lines = north - south + 1 + header = { '15_DATA_HEADER': { 'ImageDescription': { - 'ReferenceGridVIS_IR': { - 'ColumnDirGridStep': 3.0004032, - 'LineDirGridStep': 3.0004032, + reference_grid: { + 'ColumnDirGridStep': column_dir_grid_step, + 'LineDirGridStep': line_dir_grid_step, 'GridOrigin': 2, # south-east corner + }, + 'ProjectionDescription': { + 'LongitudeOfSSP': 0.0 } }, 'GeometricProcessing': { - 'EarthModel': {'TypeOfEarthModel': earth_model} + 'EarthModel': { + 'TypeOfEarthModel': earth_model, + 'EquatorialRadius': 6378169.0, + 'NorthPolarRadius': 6356583.800000001, + 'SouthPolarRadius': 6356583.800000001, + } + }, + 'SatelliteStatus': { + 'SatelliteDefinition': { + 'SatelliteId': 324 + } } }, '15_SECONDARY_PRODUCT_HEADER': { - 'NorthLineSelectedRectangle': {'Value': 3712}, - 'EastColumnSelectedRectangle': {'Value': 1}, - 'WestColumnSelectedRectangle': {'Value': 3712}, - 'SouthLineSelectedRectangle': {'Value': 1}, + 'NorthLineSelectedRectangle': {'Value': north}, + 'EastColumnSelectedRectangle': {'Value': east}, + 'WestColumnSelectedRectangle': {'Value': west}, + 'SouthLineSelectedRectangle': {'Value': south}, + 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, + 'NumberColumnsVISIR': {'Value': n_visir_cols}, + 'NumberLinesVISIR': {'Value': n_visir_lines}, + 'NumberColumnsHRV': {'Value': 11136}, + 'NumberLinesHRV': {'Value': 11136}, } } - return mock.Mock(header=header) + + return header + + def prepare_area_extents(self, test_dict): + + earth_model = test_dict['earth_model'] + dsid = test_dict['dsid'] + is_full_disk = test_dict['is_full_disk'] + header = self.create_test_header(earth_model, dsid, is_full_disk) + + expected_area_extent = ( + np.array(test_dict['expected_area_extent']) + ) + + with mock.patch('satpy.readers.native_msg.np.fromfile') as fromfile: + fromfile.return_value = header + with mock.patch('satpy.readers.native_msg.recarray2dict') as recarray2dict: + recarray2dict.side_effect = (lambda x: x) + with mock.patch('satpy.readers.native_msg.NativeMSGFileHandler._get_memmap') as _get_memmap: + _get_memmap.return_value = np.arange(3) + with mock.patch('satpy.readers.native_msg.NativeMSGFileHandler._read_trailer'): + + fh = NativeMSGFileHandler(None, {}, None) + fh.header = header + calc_area_extent = np.array( + fh.get_area_extent(dsid) + ) + + return (calc_area_extent, expected_area_extent) def setUp(self): pass - def test_earthmodel1(self): - """TypeOfEarthModel = 1, need to offset by 0.5 pixels""" - calc_area_extent = NativeMSGFileHandler.get_area_extent( - self.get_mock_file_handler(earth_model=1), - mock.Mock(name='VIS006') # mocked dsid (not 'HRV') + # Earth model 1 tests + def test_earthmodel1_visir_fulldisk(self): + + calculated, expected = self.prepare_area_extents( + TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK ) - expected_area_extent = ( - -5568748.275756836, -5568748.275756836, - 5568748.275756836, 5568748.275756836 + assertNumpyArraysEqual(calculated, expected) + + def test_earthmodel1_hrv_fulldisk(self): + # Not implemented + pass + + def test_earthmodel1_visir_roi(self): + + calculated, expected = self.prepare_area_extents( + TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI ) - assertNumpyArraysEqual( - np.array(calc_area_extent), np.array(expected_area_extent) + assertNumpyArraysEqual(calculated, expected) + + def test_earthmodel1_hrv_roi(self): + + calculated, expected = self.prepare_area_extents( + TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI ) + assertNumpyArraysEqual(calculated, expected) - def test_earthmodel2(self): - """TypeOfEarthModel = 2, do not offset""" - calc_area_extent = NativeMSGFileHandler.get_area_extent( - self.get_mock_file_handler(earth_model=2), - mock.Mock(name='VIS006') # mocked dsid (not 'HRV') + # Earth model 2 tests + def test_earthmodel2_visir_fulldisk(self): + + calculated, expected = self.prepare_area_extents( + TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK ) - expected_area_extent = ( - -5570248.477339745, -5567248.074173927, - 5567248.074173927, 5570248.477339745 + assertNumpyArraysEqual(calculated, expected) + + def test_earthmodel2_hrv_fulldisk(self): + # Not implemented + pass + + def test_earthmodel2_visir_roi(self): + + calculated, expected = self.prepare_area_extents( + TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI ) - assertNumpyArraysEqual( - np.array(calc_area_extent), np.array(expected_area_extent) + assertNumpyArraysEqual(calculated, expected) + + def test_earthmodel2_hrv_roi(self): + + calculated, expected = self.prepare_area_extents( + TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI ) + assertNumpyArraysEqual(calculated, expected) def tearDown(self): pass @@ -182,6 +343,7 @@ def suite(): loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestNativeMSGFileHandler)) + mysuite.addTest(loader.loadTestsFromTestCase(TestNativeMSGAreaExtent)) return mysuite From 93b35d3f82a50cec4442ab0b99a2e789389f0d8d Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Mon, 1 Apr 2019 10:48:48 +0000 Subject: [PATCH 05/23] Add Vaisala GLD360-reader. --- satpy/etc/readers/gld360.yaml | 43 +++++++++++++++++++++++ satpy/readers/gld360.py | 64 +++++++++++++++++++++++++++++++++++ 2 files changed, 107 insertions(+) create mode 100644 satpy/etc/readers/gld360.yaml create mode 100644 satpy/readers/gld360.py diff --git a/satpy/etc/readers/gld360.yaml b/satpy/etc/readers/gld360.yaml new file mode 100644 index 0000000000..4e55c6b7e4 --- /dev/null +++ b/satpy/etc/readers/gld360.yaml @@ -0,0 +1,43 @@ +reader: + description: Vaisala Global Lightning Dataset 360 reader + name: gld360 + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' + sensors: [vaisala] + default_datasets: + default_channels: [power] + +file_types: + gld360: + file_reader: !!python/name:satpy.readers.gld360.GLD360FileHandler '' + file_patterns: ['flashes_{start_time:%Y%m%d}.txt'] + +datasets: + datetime: + name: datetime + sensor: vaisala + resolution: 2000 + file_type: gld360 + + latitude: + name: latitude + sensor: vaisala + resolution: 2000 + file_type: gld360 + standard_name: latitude + + longitude: + name: longitude + sensor: vaisala + resolution: 2000 + file_type: gld360 + standard_name: longitude + + power: + name: power + sensor: vaisala + resolution: 2000 + file_type: gld360 + coordinates: + - longitude + - latitude + standard_name: power diff --git a/satpy/readers/gld360.py b/satpy/readers/gld360.py new file mode 100644 index 0000000000..1fab750acb --- /dev/null +++ b/satpy/readers/gld360.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2016. +# +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""Reader for Vaisala Global Lightning Dataset 360 products +""" + +import logging +import pandas as pd +import dask.array as da +import xarray as xr + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler + +logger = logging.getLogger(__name__) + + +class GLD360FileHandler(BaseFileHandler): + """Vaisala Global Lightning Dataset Reader.""" + + def __init__(self, filename, filename_info, filetype_info): + super(GLD360FileHandler, self).__init__(filename, filename_info, filetype_info) + + names = ['date', 'time', 'lat', 'lon', 'power', 'unit'] + dtypes = {'date': 'str', 'time': 'str', 'lat': 'float', + 'lon': 'float', 'power': 'float', 'units': 'str'} + parse_dates = {'datetime': ['date', 'time']} + + self.data = pd.read_csv(filename, delim_whitespace=True, header=None, + names=names, dtype=dtypes, parse_dates=parse_dates) + + @property + def start_time(self): + return self.data['datetime'].iloc[0] + + @property + def end_time(self): + # import ipdb; ipdb.set_trace() + return self.data['datetime'].iloc[-1] + + def get_dataset(self, dataset_id, dataset_info): + """Load a dataset + """ + xarr = xr.DataArray(da.from_array(self.data[dataset_id.name], + chunks=CHUNK_SIZE), dims=["y"]) + xarr.attrs.update(dataset_info) + + return xarr From 72b161021371b22c6fb564acb5ee9735c3ffaee3 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 3 Apr 2019 07:53:04 +0000 Subject: [PATCH 06/23] Fix for reading latitudes and longitudes. --- satpy/etc/readers/gld360.yaml | 38 +++++++++++++++++------------------ satpy/readers/gld360.py | 6 +++--- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/satpy/etc/readers/gld360.yaml b/satpy/etc/readers/gld360.yaml index 4e55c6b7e4..6b264580fc 100644 --- a/satpy/etc/readers/gld360.yaml +++ b/satpy/etc/readers/gld360.yaml @@ -3,8 +3,8 @@ reader: name: gld360 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' sensors: [vaisala] - default_datasets: - default_channels: [power] + #default_datasets: + #default_channels: [power] file_types: gld360: @@ -13,10 +13,10 @@ file_types: datasets: datetime: - name: datetime - sensor: vaisala - resolution: 2000 - file_type: gld360 + name: datetime + sensor: vaisala + resolution: 2000 + file_type: gld360 latitude: name: latitude @@ -26,18 +26,18 @@ datasets: standard_name: latitude longitude: - name: longitude - sensor: vaisala - resolution: 2000 - file_type: gld360 - standard_name: longitude + name: longitude + sensor: vaisala + resolution: 2000 + file_type: gld360 + standard_name: longitude power: - name: power - sensor: vaisala - resolution: 2000 - file_type: gld360 - coordinates: - - longitude - - latitude - standard_name: power + name: power + sensor: vaisala + resolution: 2000 + file_type: gld360 + coordinates: + - longitude + - latitude + standard_name: power diff --git a/satpy/readers/gld360.py b/satpy/readers/gld360.py index 1fab750acb..40a7d31192 100644 --- a/satpy/readers/gld360.py +++ b/satpy/readers/gld360.py @@ -37,9 +37,9 @@ class GLD360FileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(GLD360FileHandler, self).__init__(filename, filename_info, filetype_info) - names = ['date', 'time', 'lat', 'lon', 'power', 'unit'] - dtypes = {'date': 'str', 'time': 'str', 'lat': 'float', - 'lon': 'float', 'power': 'float', 'units': 'str'} + names = ['date', 'time', 'latitude', 'longitude', 'power', 'unit'] + dtypes = {'date': 'str', 'time': 'str', 'latitude': 'float', + 'longitude': 'float', 'power': 'float', 'units': 'str'} parse_dates = {'datetime': ['date', 'time']} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, From 5888ef09395c6f7b787cf9f523bb8d8fc1c70d26 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 3 Apr 2019 12:13:52 +0000 Subject: [PATCH 07/23] Rename variables and classes. --- .../{gld360.yaml => vaisala_gld360.yaml} | 27 +++++++++---------- .../readers/{gld360.py => vaisala_gld360.py} | 8 +++--- 2 files changed, 16 insertions(+), 19 deletions(-) rename satpy/etc/readers/{gld360.yaml => vaisala_gld360.yaml} (57%) rename satpy/readers/{gld360.py => vaisala_gld360.py} (87%) diff --git a/satpy/etc/readers/gld360.yaml b/satpy/etc/readers/vaisala_gld360.yaml similarity index 57% rename from satpy/etc/readers/gld360.yaml rename to satpy/etc/readers/vaisala_gld360.yaml index 6b264580fc..cc37a71d1a 100644 --- a/satpy/etc/readers/gld360.yaml +++ b/satpy/etc/readers/vaisala_gld360.yaml @@ -1,43 +1,40 @@ reader: description: Vaisala Global Lightning Dataset 360 reader - name: gld360 + name: vaisala_gld360 reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader '' - sensors: [vaisala] - #default_datasets: - #default_channels: [power] + sensors: [vaisala_gld360] file_types: - gld360: - file_reader: !!python/name:satpy.readers.gld360.GLD360FileHandler '' + vaisala_gld360: + file_reader: !!python/name:satpy.readers.vaisala_gld360.VaisalaGLD360FileHandler '' file_patterns: ['flashes_{start_time:%Y%m%d}.txt'] datasets: datetime: name: datetime - sensor: vaisala + sensor: vaisala_gld360 resolution: 2000 - file_type: gld360 + file_type: vaisala_gld360 latitude: name: latitude - sensor: vaisala + sensor: vaisala_gld360 resolution: 2000 - file_type: gld360 + file_type: vaisala_gld360 standard_name: latitude longitude: name: longitude - sensor: vaisala + sensor: vaisala_gld360 resolution: 2000 - file_type: gld360 + file_type: vaisala_gld360 standard_name: longitude power: name: power - sensor: vaisala + sensor: vaisala_gld360 resolution: 2000 - file_type: gld360 + file_type: vaisala_gld360 coordinates: - longitude - latitude - standard_name: power diff --git a/satpy/readers/gld360.py b/satpy/readers/vaisala_gld360.py similarity index 87% rename from satpy/readers/gld360.py rename to satpy/readers/vaisala_gld360.py index 40a7d31192..be8e5d2cd5 100644 --- a/satpy/readers/gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -31,15 +31,15 @@ logger = logging.getLogger(__name__) -class GLD360FileHandler(BaseFileHandler): +class VaisalaGLD360FileHandler(BaseFileHandler): """Vaisala Global Lightning Dataset Reader.""" def __init__(self, filename, filename_info, filetype_info): - super(GLD360FileHandler, self).__init__(filename, filename_info, filetype_info) + super(VaisalaGLD360FileHandler, self).__init__(filename, filename_info, filetype_info) names = ['date', 'time', 'latitude', 'longitude', 'power', 'unit'] - dtypes = {'date': 'str', 'time': 'str', 'latitude': 'float', - 'longitude': 'float', 'power': 'float', 'units': 'str'} + types = ['str', 'str', 'float', 'float', 'float', 'str'] + dtypes = dict(zip(names, types)) parse_dates = {'datetime': ['date', 'time']} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, From c7132793a840522f4f80f77b99a42ded138a689b Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 5 Apr 2019 09:21:27 +0000 Subject: [PATCH 08/23] Add documentation and reference. Clean-up code. Update AUTHORS.md --- AUTHORS.md | 2 +- satpy/etc/readers/vaisala_gld360.yaml | 2 +- satpy/readers/vaisala_gld360.py | 20 +++++++++++++++----- 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/AUTHORS.md b/AUTHORS.md index f2a0363d32..2af1c44fee 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -30,6 +30,6 @@ The following people have made contributions to this project: - [praerien (praerien)](https://github.com/praerien) - [ralphk11 (ralphk11)](https://github.com/ralphk11) - [roquetp (roquetp)](https://github.com/roquetp) -- [sjoro (sjoro)](https://github.com/sjoro) +- [Sauli Joro (sjoro)](https://github.com/sjoro) - Guido della Bruna - meteoswiss - Marco Sassi - meteoswiss diff --git a/satpy/etc/readers/vaisala_gld360.yaml b/satpy/etc/readers/vaisala_gld360.yaml index cc37a71d1a..d08c00cb30 100644 --- a/satpy/etc/readers/vaisala_gld360.yaml +++ b/satpy/etc/readers/vaisala_gld360.yaml @@ -6,7 +6,7 @@ reader: file_types: vaisala_gld360: - file_reader: !!python/name:satpy.readers.vaisala_gld360.VaisalaGLD360FileHandler '' + file_reader: !!python/name:satpy.readers.vaisala_gld360.VaisalaGLD360TextFileHandler '' file_patterns: ['flashes_{start_time:%Y%m%d}.txt'] datasets: diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index be8e5d2cd5..ab7339579f 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -17,7 +17,17 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Reader for Vaisala Global Lightning Dataset 360 products +"""Vaisala Global Lightning Dataset 360 reader + +Vaisala Global Lightning Dataset GLD360 is data as a service +that provides real-time lightning data for accurate and early +detection and tracking of severe weather. The data provided i +s generated by a Vaisala owned and operated world-wide lightning +detection sensor network. + +References: +- [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 + """ import logging @@ -31,11 +41,12 @@ logger = logging.getLogger(__name__) -class VaisalaGLD360FileHandler(BaseFileHandler): - """Vaisala Global Lightning Dataset Reader.""" +class VaisalaGLD360TextFileHandler(BaseFileHandler): + """ASCII reader for Vaisala GDL360 data. + """ def __init__(self, filename, filename_info, filetype_info): - super(VaisalaGLD360FileHandler, self).__init__(filename, filename_info, filetype_info) + super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) names = ['date', 'time', 'latitude', 'longitude', 'power', 'unit'] types = ['str', 'str', 'float', 'float', 'float', 'str'] @@ -51,7 +62,6 @@ def start_time(self): @property def end_time(self): - # import ipdb; ipdb.set_trace() return self.data['datetime'].iloc[-1] def get_dataset(self, dataset_id, dataset_info): From 84d4cce03b70f1eb1fe4124f367d9833ee8f34c8 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Mon, 13 May 2019 18:37:27 +0000 Subject: [PATCH 09/23] Add unittest for filehandler. --- satpy/tests/reader_tests/__init__.py | 3 +- .../tests/reader_tests/test_vaisala_gld360.py | 89 +++++++++++++++++++ 2 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 satpy/tests/reader_tests/test_vaisala_gld360.py diff --git a/satpy/tests/reader_tests/__init__.py b/satpy/tests/reader_tests/__init__.py index a4db9605ad..a616de617c 100644 --- a/satpy/tests/reader_tests/__init__.py +++ b/satpy/tests/reader_tests/__init__.py @@ -38,7 +38,7 @@ test_nc_slstr, test_olci_nc, test_viirs_edr_flood, test_nwcsaf_nc, test_seviri_l1b_hrit, test_sar_c_safe, - test_modis_l1b) + test_modis_l1b, test_vaisala_gld360) if sys.version_info < (2, 7): @@ -82,5 +82,6 @@ def suite(): mysuite.addTests(test_seviri_l1b_hrit.suite()) mysuite.addTests(test_sar_c_safe.suite()) mysuite.addTests(test_modis_l1b.suite()) + mysuite.addTests(test_vaisala_gld360.suite()) return mysuite diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py new file mode 100644 index 0000000000..afd20febea --- /dev/null +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2017-2018 PyTroll Community + +# Author(s): + +# Adam.Dybbroe +# Sauli Joro + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Unittesting the Vaisala GLD360 reader. +""" + +import sys +from io import StringIO + +import numpy as np + +from satpy.readers.vaisala_gld360 import ( + VaisalaGLD360TextFileHandler +) +from satpy.dataset import DatasetID + +if sys.version_info < (2, 7): + import unittest2 as unittest +else: + import unittest + +try: + from unittest import mock +except ImportError: + import mock + + +EXPECTED_POWER = np.array([ 12.3, 13.2, -31. ]) + +class TestVaisalaGLD360TextFileHandler(unittest.TestCase): + + """Test the VaisalaGLD360TextFileHandler.""" + + def test_vaisala_gld360(self): + + expected = EXPECTED_POWER + + filename = StringIO( + '2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' + '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' + '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' + ) + filename_info = {} + filetype_info = {} + + self.handler = VaisalaGLD360TextFileHandler( + filename, {}, {} + ) + + filename.close() + dataset_id = DatasetID('power') + dataset_info = {} + result = self.handler.get_dataset(dataset_id, dataset_info).values + + # import ipdb; ipdb.set_trace() + np.testing.assert_allclose(result, expected, rtol=1e-05) + +def suite(): + """The test suite for test_scene. + """ + loader = unittest.TestLoader() + mysuite = unittest.TestSuite() + mysuite.addTest(loader.loadTestsFromTestCase(TestVaisalaGLD360TextFileHandler)) + return mysuite + + +if __name__ == "__main__": + # So you can run tests from this module individually. + unittest.main() From 28097846e16cbc34335d6dd8e024cf4f29a3ecc8 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Mon, 13 May 2019 19:24:20 +0000 Subject: [PATCH 10/23] Fix lint errors. --- satpy/tests/reader_tests/test_vaisala_gld360.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index db5d2e699c..dee87aad99 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -42,6 +42,7 @@ EXPECTED_POWER = np.array([12.3, 13.2, -31.]) + class TestVaisalaGLD360TextFileHandler(unittest.TestCase): """Test the VaisalaGLD360TextFileHandler.""" @@ -50,7 +51,7 @@ def test_vaisala_gld360(self): expected = EXPECTED_POWER - filename = StringIO( + filename = StringIO( '2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' @@ -59,7 +60,7 @@ def test_vaisala_gld360(self): filetype_info = {} self.handler = VaisalaGLD360TextFileHandler( - filename, {}, {} + filename, filename_info, filetype_info ) filename.close() @@ -67,9 +68,9 @@ def test_vaisala_gld360(self): dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values - # import ipdb; ipdb.set_trace() np.testing.assert_allclose(result, expected, rtol=1e-05) + def suite(): """The test suite for test_scene. """ From 6aafdf68c0799c1b0c42201e5e3d56d187768bd9 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Mon, 13 May 2019 20:09:42 +0000 Subject: [PATCH 11/23] Fix for seviri_l1b_native-reader. Modify vaisala_gld360-test to use unicode. --- satpy/readers/seviri_l1b_native.py | 2 +- satpy/tests/reader_tests/test_vaisala_gld360.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 5a350cd86f..5d11afcb85 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -172,7 +172,7 @@ def _read_header(self): self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() if self.mda['available_channels'][i]] - self.mda['platform_id'] = data15hd[ + self.platform_id = data15hd[ 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index dee87aad99..4adbae9458 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -52,7 +52,7 @@ def test_vaisala_gld360(self): expected = EXPECTED_POWER filename = StringIO( - '2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' + u'2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' ) From 2bddb107ab3bf1f94228da8b62e30071525fe93c Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Tue, 14 May 2019 16:49:00 +0000 Subject: [PATCH 12/23] Fix seviri_l1b_native reflectance calibration. --- satpy/readers/seviri_l1b_native.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 5d11afcb85..7620c38da1 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -453,7 +453,7 @@ def calibrate(self, data, dsid): res = self._convert_to_radiance(data, gain, offset) if calibration == 'reflectance': - solar_irradiance = CALIB[self.mda['platform_id']][channel]["F"] + solar_irradiance = CALIB[self.platform_id][channel]["F"] res = self._vis_calibrate(res, solar_irradiance) elif calibration == 'brightness_temperature': From a125fb76a493892095bd4dddc7926686e4a4c121 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 16 May 2019 20:50:34 +0000 Subject: [PATCH 13/23] Add units to datasets. --- satpy/etc/readers/vaisala_gld360.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/etc/readers/vaisala_gld360.yaml b/satpy/etc/readers/vaisala_gld360.yaml index d08c00cb30..9eec3b070c 100644 --- a/satpy/etc/readers/vaisala_gld360.yaml +++ b/satpy/etc/readers/vaisala_gld360.yaml @@ -22,6 +22,7 @@ datasets: resolution: 2000 file_type: vaisala_gld360 standard_name: latitude + units: degree longitude: name: longitude @@ -29,6 +30,7 @@ datasets: resolution: 2000 file_type: vaisala_gld360 standard_name: longitude + units: degree power: name: power @@ -38,3 +40,4 @@ datasets: coordinates: - longitude - latitude + units: kA From 88d4f76dffff27ec55657d5dc268e2bfb1a83795 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 16 May 2019 21:09:52 +0000 Subject: [PATCH 14/23] Update copyright text and fix docstring typos. --- satpy/readers/vaisala_gld360.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index ab7339579f..58b418e111 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -1,28 +1,28 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2016. +# Copyright (c) 2019 Satpy developers # # -# This file is part of satpy. +# This file is part of Satpy. # -# satpy is free software: you can redistribute it and/or modify it under the +# Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with -# satpy. If not, see . +# Satpy. If not, see . """Vaisala Global Lightning Dataset 360 reader Vaisala Global Lightning Dataset GLD360 is data as a service that provides real-time lightning data for accurate and early -detection and tracking of severe weather. The data provided i -s generated by a Vaisala owned and operated world-wide lightning +detection and tracking of severe weather. The data provided is +generated by a Vaisala owned and operated world-wide lightning detection sensor network. References: From 89eca78948e5c6756ec92597b88df92e991b240c Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 16 May 2019 21:15:27 +0000 Subject: [PATCH 15/23] Add time, latitude, and longitude as y-coordinates to power dataset. --- satpy/readers/vaisala_gld360.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index 58b418e111..f4afed9e17 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -69,6 +69,12 @@ def get_dataset(self, dataset_id, dataset_info): """ xarr = xr.DataArray(da.from_array(self.data[dataset_id.name], chunks=CHUNK_SIZE), dims=["y"]) + + # Add time, longitude, and latitude as non-dimensional y-coordinates + xarr['time'] = ('y', self.data['time']) + xarr['longitude'] = ('y', self.data['longitude']) + xarr['latitude'] = ('y', self.data['latitude']) + xarr.attrs.update(dataset_info) return xarr From 5d06a3aba88955f7e81559abec73d4be7360600c Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 16 May 2019 21:21:46 +0000 Subject: [PATCH 16/23] Update copyright and clean-up code for VaisalaGLD360 unittest. --- .../tests/reader_tests/test_vaisala_gld360.py | 52 +++++++------------ 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index 4adbae9458..355a3a1719 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -1,25 +1,21 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- - -# Copyright (c) 2017-2018 PyTroll Community - -# Author(s): - -# Adam.Dybbroe -# Sauli Joro - -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . +# Copyright (c) 2019 Satpy developers. +# +# +# This file is part of Satpy. +# +# Satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# Satpy. If not, see . """Unittesting the Vaisala GLD360 reader. """ @@ -29,18 +25,10 @@ import numpy as np -from satpy.readers.vaisala_gld360 import ( - VaisalaGLD360TextFileHandler -) +from satpy.readers.vaisala_gld360 import VaisalaGLD360TextFileHandler from satpy.dataset import DatasetID -if sys.version_info < (2, 7): - import unittest2 as unittest -else: - import unittest - - -EXPECTED_POWER = np.array([12.3, 13.2, -31.]) +import unittest class TestVaisalaGLD360TextFileHandler(unittest.TestCase): @@ -49,7 +37,7 @@ class TestVaisalaGLD360TextFileHandler(unittest.TestCase): def test_vaisala_gld360(self): - expected = EXPECTED_POWER + expected = np.array([12.3, 13.2, -31.]) filename = StringIO( u'2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' @@ -72,7 +60,7 @@ def test_vaisala_gld360(self): def suite(): - """The test suite for test_scene. + """The test suite for test_vaisala_gld360. """ loader = unittest.TestLoader() mysuite = unittest.TestSuite() From 8970d2aeb6c597d2abd44bdba48dffa3e607a715 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 16 May 2019 21:25:40 +0000 Subject: [PATCH 17/23] Fix for time y-coordinate. --- satpy/readers/vaisala_gld360.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index f4afed9e17..9f974f0cc3 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -71,7 +71,7 @@ def get_dataset(self, dataset_id, dataset_info): chunks=CHUNK_SIZE), dims=["y"]) # Add time, longitude, and latitude as non-dimensional y-coordinates - xarr['time'] = ('y', self.data['time']) + xarr['time'] = ('y', self.data['datetime']) xarr['longitude'] = ('y', self.data['longitude']) xarr['latitude'] = ('y', self.data['latitude']) From 1328780cb5f7dde4c7f7b27965bd7738d46811b7 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 16 May 2019 21:28:41 +0000 Subject: [PATCH 18/23] Make stickler happy. --- satpy/tests/reader_tests/test_vaisala_gld360.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index 355a3a1719..88117919b5 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -20,7 +20,6 @@ """Unittesting the Vaisala GLD360 reader. """ -import sys from io import StringIO import numpy as np From cf2742c751786b1fcf05c6f4cc52e05b134ecadd Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 17 May 2019 15:55:38 +0000 Subject: [PATCH 19/23] Add a check for unit uniformity, fix units to CF convention, fix docstrings. --- satpy/etc/readers/vaisala_gld360.yaml | 8 ++++---- satpy/readers/vaisala_gld360.py | 11 +++++++---- satpy/tests/reader_tests/test_vaisala_gld360.py | 3 +-- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/satpy/etc/readers/vaisala_gld360.yaml b/satpy/etc/readers/vaisala_gld360.yaml index 9eec3b070c..0219f810d5 100644 --- a/satpy/etc/readers/vaisala_gld360.yaml +++ b/satpy/etc/readers/vaisala_gld360.yaml @@ -10,8 +10,8 @@ file_types: file_patterns: ['flashes_{start_time:%Y%m%d}.txt'] datasets: - datetime: - name: datetime + time: + name: time sensor: vaisala_gld360 resolution: 2000 file_type: vaisala_gld360 @@ -22,7 +22,7 @@ datasets: resolution: 2000 file_type: vaisala_gld360 standard_name: latitude - units: degree + units: degree_north longitude: name: longitude @@ -30,7 +30,7 @@ datasets: resolution: 2000 file_type: vaisala_gld360 standard_name: longitude - units: degree + units: degree_east power: name: power diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index 9f974f0cc3..2b9377b3f6 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -42,8 +42,7 @@ class VaisalaGLD360TextFileHandler(BaseFileHandler): - """ASCII reader for Vaisala GDL360 data. - """ + """ASCII reader for Vaisala GDL360 data.""" def __init__(self, filename, filename_info, filetype_info): super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) @@ -51,11 +50,16 @@ def __init__(self, filename, filename_info, filetype_info): names = ['date', 'time', 'latitude', 'longitude', 'power', 'unit'] types = ['str', 'str', 'float', 'float', 'float', 'str'] dtypes = dict(zip(names, types)) + # Combine 'date' and 'time' into a datetime object parse_dates = {'datetime': ['date', 'time']} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, names=names, dtype=dtypes, parse_dates=parse_dates) + # Check unit uniformity in input file + if not (self.data.unit == 'kA').all(): + raise ValueError('Inconsistent units found in file!') + @property def start_time(self): return self.data['datetime'].iloc[0] @@ -65,8 +69,7 @@ def end_time(self): return self.data['datetime'].iloc[-1] def get_dataset(self, dataset_id, dataset_info): - """Load a dataset - """ + """Load a dataset.""" xarr = xr.DataArray(da.from_array(self.data[dataset_id.name], chunks=CHUNK_SIZE), dims=["y"]) diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index 88117919b5..a3d3c0e2d7 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -59,8 +59,7 @@ def test_vaisala_gld360(self): def suite(): - """The test suite for test_vaisala_gld360. - """ + """The test suite for test_vaisala_gld360.""" loader = unittest.TestLoader() mysuite = unittest.TestSuite() mysuite.addTest(loader.loadTestsFromTestCase(TestVaisalaGLD360TextFileHandler)) From b6b6094824aad5412e305de19fe49a7b46687bd8 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 17 May 2019 16:58:52 +0000 Subject: [PATCH 20/23] Check input units against reader config. --- satpy/readers/vaisala_gld360.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index 2b9377b3f6..3371243f5d 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -56,10 +56,6 @@ def __init__(self, filename, filename_info, filetype_info): self.data = pd.read_csv(filename, delim_whitespace=True, header=None, names=names, dtype=dtypes, parse_dates=parse_dates) - # Check unit uniformity in input file - if not (self.data.unit == 'kA').all(): - raise ValueError('Inconsistent units found in file!') - @property def start_time(self): return self.data['datetime'].iloc[0] @@ -78,6 +74,11 @@ def get_dataset(self, dataset_id, dataset_info): xarr['longitude'] = ('y', self.data['longitude']) xarr['latitude'] = ('y', self.data['latitude']) + if dataset_id.name == 'power': + # Check that units in the file match the unit specified in the + # reader yaml-file + if not (self.data.unit == dataset_info['units']).all(): + raise ValueError('Inconsistent units found in file!') xarr.attrs.update(dataset_info) return xarr From dbb8c3cfe28105ff6fe0f556fb9bfd6da76414ce Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 17 May 2019 17:03:17 +0000 Subject: [PATCH 21/23] Fix unittest. --- satpy/tests/reader_tests/test_vaisala_gld360.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index a3d3c0e2d7..d75da5cc2e 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -52,7 +52,7 @@ def test_vaisala_gld360(self): filename.close() dataset_id = DatasetID('power') - dataset_info = {} + dataset_info = {'units': 'kA'} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected, rtol=1e-05) From 269dd212b0638f22d8104cd92304a24379e1d02d Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 17 May 2019 18:04:04 +0000 Subject: [PATCH 22/23] Fix stickler complaints. --- satpy/readers/vaisala_gld360.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index 3371243f5d..e8525182d3 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -75,7 +75,7 @@ def get_dataset(self, dataset_id, dataset_info): xarr['latitude'] = ('y', self.data['latitude']) if dataset_id.name == 'power': - # Check that units in the file match the unit specified in the + # Check that units in the file match the unit specified in the # reader yaml-file if not (self.data.unit == dataset_info['units']).all(): raise ValueError('Inconsistent units found in file!') From 6a1c66502f9f4a4bf2e14bc706e8ffa22eb04f76 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Thu, 23 May 2019 07:13:16 +0000 Subject: [PATCH 23/23] Add Vaisala GLD360 to reader-list in docs. --- doc/source/index.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/source/index.rst b/doc/source/index.rst index 52dda505d4..47aa768fab 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -199,6 +199,10 @@ the base Satpy installation. * - MERSI-2 L1B data in HDF5 format - `mersi2_l1b` - Beta + * - Vaisala Global Lightning Dataset GLD360 data in ASCII format + - `vaisala_gld360` + - Beta + Indices and tables ==================