From 06be8ea6714234d73e939bc2e270f37f25e5c060 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Apr 2016 09:14:41 +0200 Subject: [PATCH 1/2] Fix viirs_compact to remove files even when crashing Signed-off-by: Martin Raspaud --- mpop/satin/viirs_compact.py | 319 ++++++++++++++++++------------------ 1 file changed, 161 insertions(+), 158 deletions(-) diff --git a/mpop/satin/viirs_compact.py b/mpop/satin/viirs_compact.py index e61d7202..92905b9b 100644 --- a/mpop/satin/viirs_compact.py +++ b/mpop/satin/viirs_compact.py @@ -53,173 +53,176 @@ def load(satscene, *args, **kwargs): files_to_load = [] files_to_delete = [] - filename = kwargs.get("filename") - logger.debug("reading %s", str(filename)) - if filename is not None: - if isinstance(filename, (list, set, tuple)): - files = filename - else: - files = [filename] - files_to_load = [] - for filename in files: - pathname, ext = os.path.splitext(filename) - if ext == ".bz2": - zipfile = bz2.BZ2File(filename) - newname = os.path.join("/tmp", os.path.basename(pathname)) - if not os.path.exists(newname): - with open(newname, "wb") as fp_: - fp_.write(zipfile.read()) - zipfile.close() - files_to_load.append(newname) - files_to_delete.append(newname) + try: + filename = kwargs.get("filename") + logger.debug("reading %s", str(filename)) + if filename is not None: + if isinstance(filename, (list, set, tuple)): + files = filename else: - files_to_load.append(filename) - else: - time_start, time_end = kwargs.get("time_interval", - (satscene.time_slot, None)) - - conf = ConfigParser() - conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg")) - options = {} - for option, value in conf.items(satscene.instrument_name + "-level2", - raw=True): - options[option] = value + files = [filename] + files_to_load = [] + for filename in files: + pathname, ext = os.path.splitext(filename) + if ext == ".bz2": + zipfile = bz2.BZ2File(filename) + newname = os.path.join("/tmp", os.path.basename(pathname)) + if not os.path.exists(newname): + with open(newname, "wb") as fp_: + fp_.write(zipfile.read()) + zipfile.close() + files_to_load.append(newname) + files_to_delete.append(newname) + else: + files_to_load.append(filename) + else: + time_start, time_end = kwargs.get("time_interval", + (satscene.time_slot, None)) - template = os.path.join(options["dir"], options["filename"]) + conf = ConfigParser() + conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg")) + options = {} + for option, value in conf.items(satscene.instrument_name + "-level2", + raw=True): + options[option] = value - second = timedelta(seconds=1) - files_to_load = [] + template = os.path.join(options["dir"], options["filename"]) - if time_end is not None: - time = time_start - second * 85 + second = timedelta(seconds=1) files_to_load = [] - while time <= time_end: - fname = time.strftime(template) - flist = glob.glob(fname) + + if time_end is not None: + time = time_start - second * 85 + files_to_load = [] + while time <= time_end: + fname = time.strftime(template) + flist = glob.glob(fname) + try: + files_to_load.append(flist[0]) + time += second * 80 + except IndexError: + pass + time += second + + else: + files_to_load = glob.glob(time_start.strftime(template)) + + chan_dict = {"M01": "M1", + "M02": "M2", + "M03": "M3", + "M04": "M4", + "M05": "M5", + "M06": "M6", + "M07": "M7", + "M08": "M8", + "M09": "M9", + "M10": "M10", + "M11": "M11", + "M12": "M12", + "M13": "M13", + "M14": "M14", + "M15": "M15", + "M16": "M16", + "DNB": "DNB"} + + channels = [(chn, chan_dict[chn]) + for chn in satscene.channels_to_load + if chn in chan_dict] + try: + channels_to_load, chans = zip(*channels) + except ValueError: + return + + m_chans = [] + dnb_chan = [] + for chn in chans: + if chn.startswith('M'): + m_chans.append(chn) + elif chn.startswith('DNB'): + dnb_chan.append(chn) + else: + raise ValueError("Reading of channel %s not implemented", chn) + + m_datas = [] + m_lonlats = [] + dnb_datas = [] + dnb_lonlats = [] + + for fname in files_to_load: + is_dnb = os.path.basename(fname).startswith('SVDNBC') + logger.debug("Reading %s", fname) + if is_dnb: + if tables: + h5f = tables.open_file(fname, "r") + else: + logger.warning("DNB data could not be read from %s, " + "PyTables not available.", fname) + continue + else: + h5f = h5py.File(fname, "r") + if m_chans and not is_dnb: try: - files_to_load.append(flist[0]) - time += second * 80 - except IndexError: + arr, m_units = read_m(h5f, m_chans) + m_datas.append(arr) + m_lonlats.append(navigate_m(h5f, m_chans[0])) + except KeyError: pass - time += second - - else: - files_to_load = glob.glob(time_start.strftime(template)) - - chan_dict = {"M01": "M1", - "M02": "M2", - "M03": "M3", - "M04": "M4", - "M05": "M5", - "M06": "M6", - "M07": "M7", - "M08": "M8", - "M09": "M9", - "M10": "M10", - "M11": "M11", - "M12": "M12", - "M13": "M13", - "M14": "M14", - "M15": "M15", - "M16": "M16", - "DNB": "DNB"} - - channels = [(chn, chan_dict[chn]) - for chn in satscene.channels_to_load - if chn in chan_dict] - try: - channels_to_load, chans = zip(*channels) - except ValueError: - return - - m_chans = [] - dnb_chan = [] - for chn in chans: - if chn.startswith('M'): - m_chans.append(chn) - elif chn.startswith('DNB'): - dnb_chan.append(chn) + if dnb_chan and is_dnb and tables: + try: + arr, dnb_units = read_dnb(h5f) + dnb_datas.append(arr) + dnb_lonlats.append(navigate_dnb(h5f)) + except KeyError: + pass + h5f.close() + + if len(m_lonlats) > 0: + m_lons = np.ma.vstack([lonlat[0] for lonlat in m_lonlats]) + m_lats = np.ma.vstack([lonlat[1] for lonlat in m_lonlats]) + if len(dnb_lonlats) > 0: + dnb_lons = np.ma.vstack([lonlat[0] for lonlat in dnb_lonlats]) + dnb_lats = np.ma.vstack([lonlat[1] for lonlat in dnb_lonlats]) + + m_i = 0 + dnb_i = 0 + for chn in channels_to_load: + if m_datas and chn.startswith('M'): + m_data = np.ma.vstack([dat[m_i] for dat in m_datas]) + satscene[chn] = m_data + satscene[chn].info["units"] = m_units[m_i] + m_i += 1 + if dnb_datas and chn.startswith('DNB'): + dnb_data = np.ma.vstack([dat[dnb_i] for dat in dnb_datas]) + satscene[chn] = dnb_data + satscene[chn].info["units"] = dnb_units[dnb_i] + dnb_i += 1 + + if m_datas: + m_area_def = SwathDefinition(np.ma.masked_where(m_data.mask, m_lons), + np.ma.masked_where(m_data.mask, m_lats)) else: - raise ValueError("Reading of channel %s not implemented", chn) - - m_datas = [] - m_lonlats = [] - dnb_datas = [] - dnb_lonlats = [] - - for fname in files_to_load: - logger.debug("Reading %s", fname) - if 'SVDNBC' in fname: - if tables: - h5f = tables.open_file(fname, "r") - else: - logger.warning("DNB data could not be read from %s, " - "PyTables not available.", fname) - continue + logger.warning("No M channel data available.") + + if dnb_datas: + dnb_area_def = SwathDefinition(np.ma.masked_where(dnb_data.mask, + dnb_lons), + np.ma.masked_where(dnb_data.mask, + dnb_lats)) else: - h5f = h5py.File(fname, "r") - if m_chans and "SVDNBC" not in os.path.split(fname)[-1]: - try: - arr, m_units = read_m(h5f, m_chans) - m_datas.append(arr) - m_lonlats.append(navigate_m(h5f, m_chans[0])) - except KeyError: - pass - if dnb_chan and "SVDNBC" in os.path.split(fname)[-1]: - try: - arr, dnb_units = read_dnb(h5f) - dnb_datas.append(arr) - dnb_lonlats.append(navigate_dnb(h5f)) - except KeyError: - pass - h5f.close() - - if m_lonlats: - m_lons = np.ma.vstack([lonlat[0] for lonlat in m_lonlats]) - m_lats = np.ma.vstack([lonlat[1] for lonlat in m_lonlats]) - if dnb_lonlats: - dnb_lons = np.ma.vstack([lonlat[0] for lonlat in dnb_lonlats]) - dnb_lats = np.ma.vstack([lonlat[1] for lonlat in dnb_lonlats]) - - m_i = 0 - dnb_i = 0 - for chn in channels_to_load: - if m_datas and chn.startswith('M'): - m_data = np.ma.vstack([dat[m_i] for dat in m_datas]) - satscene[chn] = m_data - satscene[chn].info["units"] = m_units[m_i] - m_i += 1 - if dnb_datas and chn.startswith('DNB'): - dnb_data = np.ma.vstack([dat[dnb_i] for dat in dnb_datas]) - satscene[chn] = dnb_data - satscene[chn].info["units"] = dnb_units[dnb_i] - dnb_i += 1 - - if m_datas: - m_area_def = SwathDefinition(np.ma.masked_where(m_data.mask, m_lons), - np.ma.masked_where(m_data.mask, m_lats)) - else: - logger.warning("No M channel data available.") - - if dnb_datas: - dnb_area_def = SwathDefinition(np.ma.masked_where(dnb_data.mask, - dnb_lons), - np.ma.masked_where(dnb_data.mask, - dnb_lats)) - else: - logger.warning("No DNB data available.") - - for chn in channels_to_load: - if "DNB" not in chn and m_datas: - satscene[chn].area = m_area_def - - if dnb_datas: - for chn in dnb_chan: - satscene[chn].area = dnb_area_def - - for fname in files_to_delete: - if os.path.exists(fname): - os.remove(fname) + logger.warning("No DNB data available.") + + for chn in channels_to_load: + if "DNB" not in chn and m_datas: + satscene[chn].area = m_area_def + + if dnb_datas: + for chn in dnb_chan: + satscene[chn].area = dnb_area_def + + finally: + for fname in files_to_delete: + if os.path.exists(fname): + os.remove(fname) def read_m(h5f, channels, calibrate=1): From 6fc8e4087f752e91097f6c94a2d2eb40b1d8df07 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 11 Apr 2016 13:06:32 +0200 Subject: [PATCH 2/2] Keep track of sun corrected channels for eg ears viirs Signed-off-by: Martin Raspaud --- mpop/channel.py | 5 +++++ mpop/instruments/viirs.py | 42 ++++++++++++++++++++++++++++++--------- mpop/satin/viirs_sdr.py | 3 +++ 3 files changed, 41 insertions(+), 9 deletions(-) diff --git a/mpop/channel.py b/mpop/channel.py index 6c0952ab..849e38ec 100644 --- a/mpop/channel.py +++ b/mpop/channel.py @@ -376,6 +376,10 @@ def sunzen_corr(self, time_slot, lonlats=None, limit=80., mode='cos', also stored to the info dictionary of the originating channel. ''' + if self.info.get('sun_zen_correction_applied'): + LOG.debug("Sun zenith correction already applied, skipping") + return self + import mpop.tools try: @@ -423,6 +427,7 @@ def sunzen_corr(self, time_slot, lonlats=None, limit=80., mode='cos', LOG.debug("cos_limit = %f", cos_limit) # Mask out data where the sun elevation is below a threshold: new_ch.data = np.ma.masked_where(cos_zen < cos_limit, new_ch.data, copy=False) + new_ch.info["sun_zen_correction_applied"] = True return new_ch # Arithmetic operations on channels. diff --git a/mpop/instruments/viirs.py b/mpop/instruments/viirs.py index edc5acec..e831f588 100644 --- a/mpop/instruments/viirs.py +++ b/mpop/instruments/viirs.py @@ -81,12 +81,33 @@ def overview(self, stretch='linear', gamma=1.6): overview.prerequisites = set(['M05', 'M07', 'M15']) def overview_sun(self, stretch='linear', gamma=1.6): - """Make an Overview RGB image composite from VIIRS - channels. Sun-zenith correction is implicit for VIIRS + """Make an overview RGB image composite normalising with cosine to the + sun zenith angle. """ - return self.overview(stretch=stretch, gamma=gamma) + self.check_channels('M05', 'M07', 'M15') + + lonlats = self['M15'].area.get_lonlats() + + red = self['M05'].sunzen_corr(self.time_slot, lonlats, limit=88., + sunmask=95).data + green = self['M07'].sunzen_corr(self.time_slot, lonlats, limit=88., + sunmask=95).data + blue = -self['M15'].data + + img = geo_image.GeoImage((red, green, blue), + self.area, + self.time_slot, + fill_value=(0, 0, 0), + mode="RGB") + + if stretch: + img.enhance(stretch=stretch) + if gamma: + img.enhance(gamma=gamma) + + return img - overview_sun.prerequisites = overview.prerequisites + overview_sun.prerequisites = set(['M05', 'M07', 'M15']) def hr_overview(self): """Make a high resolution Overview RGB image composite @@ -596,11 +617,14 @@ def snow_age(self): self.check_channels('M07', 'M08', 'M09', 'M10', 'M11') coeff = 255. / 160. - m07 = self['M07'].data * coeff - m08 = self['M08'].data * coeff - m09 = self['M09'].data * coeff - m10 = self['M10'].data * coeff - m11 = self['M11'].data * coeff + + lonlats = self['M11'].area.get_lonlats() + + m07 = self['M07'].sunzen_corr(self.time_slot, lonlats, limit=88., sunmask=95).data * coeff + m08 = self['M08'].sunzen_corr(self.time_slot, lonlats, limit=88., sunmask=95).data * coeff + m09 = self['M09'].sunzen_corr(self.time_slot, lonlats, limit=88., sunmask=95).data * coeff + m10 = self['M10'].sunzen_corr(self.time_slot, lonlats, limit=88., sunmask=95).data * coeff + m11 = self['M11'].sunzen_corr(self.time_slot, lonlats, limit=88., sunmask=95).data * coeff refcu = m11 - m10 refcu[refcu < 0] = 0 diff --git a/mpop/satin/viirs_sdr.py b/mpop/satin/viirs_sdr.py index 58b664dd..f153bc1b 100644 --- a/mpop/satin/viirs_sdr.py +++ b/mpop/satin/viirs_sdr.py @@ -740,6 +740,9 @@ def load(self, satscene, calibrate=1, time_interval=None, satscene[chn].info['band_id'] = band.band_id satscene[chn].info['start_time'] = band.begin_time satscene[chn].info['end_time'] = band.end_time + if chn in ['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', + 'I01', 'I02', 'I03']: + satscene[chn].info['sun_zen_correction_applied'] = True # We assume the same geolocation should apply to all M-bands! # ...and the same to all I-bands: