Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for CMIC product from PPSv2021 #1992

Merged
merged 7 commits into from Feb 18, 2022
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
24 changes: 12 additions & 12 deletions satpy/etc/composites/visir.yaml
Expand Up @@ -337,43 +337,43 @@ composites:
cloud_top_phase:
compositor: !!python/name:satpy.composites.PaletteCompositor
prerequisites:
- cpp_phase
- cpp_phase_pal
- cmic_phase
- cmic_phase_pal
standard_name: cloud_top_phase

cloud_drop_effective_radius:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- cpp_reff
- cpp_reff_pal
- cmic_reff
- cmic_reff_pal
standard_name: cloud_drop_effective_radius

cloud_optical_thickness:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- cpp_cot
- cpp_cot_pal
- cmic_cot
- cmic_cot_pal
standard_name: cloud_optical_thickness

cloud_water_path:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- cpp_cwp
- cpp_cwp_pal
- cmic_cwp
- cmic_cwp_pal
standard_name: cloud_water_path

ice_water_path:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- cpp_iwp
- cpp_iwp_pal
- cmic_iwp
- cmic_iwp_pal
standard_name: ice_water_path

liquid_water_path:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- cpp_lwp
- cpp_lwp_pal
- cmic_lwp
- cmic_lwp_pal
standard_name: liquid_water_path

night_microphysics:
Expand Down
120 changes: 77 additions & 43 deletions satpy/etc/readers/nwcsaf-pps_nc.yaml
Expand Up @@ -35,8 +35,12 @@ file_types:
nc_nwcsaf_cpp:
file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
file_patterns: ['S_NWC_CPP_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc']
file_key_prefix: cpp_


nc_nwcsaf_cmic:
file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
file_patterns: ['S_NWC_CMIC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc']
file_key_prefix: cmic_

datasets:

Expand Down Expand Up @@ -214,63 +218,93 @@ datasets:
file_type: nc_nwcsaf_ctth


# ---- CPP products ------------
# ---- CMIC products (Was CPP in PPS<=2018)------------

cmic_phase:
name: cmic_phase
file_key: phase
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cmic_phase_pal:
name: [cmic_phase_pal, cpp_phase_pal]
file_key: phase_pal
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]

cpp_phase:
name: cpp_phase
file_type: nc_nwcsaf_cpp
cmic_reff:
name: cmic_reff
file_key: reff
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cpp_phase_pal:
name: cpp_phase_pal
file_type: nc_nwcsaf_cpp
cmic_reff_pal:
name: [cmic_reff_pal, cpp_reff_pal]
file_key: reff_pal
scale_offset_dataset: reff
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]

cpp_reff:
name: cpp_reff
file_type: nc_nwcsaf_cpp
cmic_cot:
name: cmic_cot
file_key: cot
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cpp_reff_pal:
name: cpp_reff_pal
scale_offset_dataset: cpp_reff
file_type: nc_nwcsaf_cpp
cmic_cot_pal:
name: [cmic_cot_pal, cpp_cot_pal]
file_key: cot_pal
scale_offset_dataset: cot
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]

cpp_cot:
name: cpp_cot
file_type: nc_nwcsaf_cpp
cmic_cwp:
name: cmic_cwp
file_key: cwp
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cpp_cot_pal:
name: cpp_cot_pal
scale_offset_dataset: cpp_cot
file_type: nc_nwcsaf_cpp
cmic_cwp_pal:
name: [cmic_cwp_pal, cpp_cwp_pal]
file_key: cwp_pal
scale_offset_dataset: cwp
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]

cpp_cwp:
name: cpp_cwp
file_type: nc_nwcsaf_cpp
cmic_iwp:
name: cmic_iwp
file_key: iwp
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cpp_cwp_pal:
name: cpp_cwp_pal
scale_offset_dataset: cpp_cwp
file_type: nc_nwcsaf_cpp
cmic_iwp_pal:
name: [cmic_iwp_pal, cpp_iwp_pal]
file_key: iwp_pal
scale_offset_dataset: iwp
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]

cpp_iwp:
name: cpp_iwp
file_type: nc_nwcsaf_cpp
cmic_lwp:
name: cmic_lwp
file_key: lwp
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cpp_iwp_pal:
name: cpp_iwp_pal
scale_offset_dataset: cpp_iwp
file_type: nc_nwcsaf_cpp
cmic_lwp_pal:
name: [cmic_lwp_pal, cpp_lwp_pal]
file_key: lwp_pal
scale_offset_dataset: lwp
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]

cmic_status_flag:
name: [cmic_status_flag, cpp_status_flag]
file_key: status_flag
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cpp_lwp:
name: cpp_lwp
file_type: nc_nwcsaf_cpp
cmic_conditions:
name: [cmic_conditions, cpp_conditions]
djhoese marked this conversation as resolved.
Show resolved Hide resolved
file_key: conditions
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]

cpp_lwp_pal:
name: cpp_lwp_pal
scale_offset_dataset: cpp_lwp
file_type: nc_nwcsaf_cpp
cmic_quality:
name: [cmic_quality, cpp_quality]
file_key: quality
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]
70 changes: 38 additions & 32 deletions satpy/readers/nwcsaf_nc.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Satpy developers
# Copyright (c) 2017-2022 Satpy developers
#
# This file is part of satpy.
#
Expand All @@ -25,6 +25,7 @@
import logging
import os
from datetime import datetime
from functools import lru_cache

import dask.array as da
import numpy as np
Expand Down Expand Up @@ -89,6 +90,7 @@ def __init__(self, filename, filename_info, filetype_info):
self.pps = False
self.platform_name = None
self.sensor = None
self.file_key_prefix = filetype_info.get("file_key_prefix", "")

try:
# NWCSAF/Geo:
Expand Down Expand Up @@ -130,22 +132,29 @@ def get_dataset(self, dsid, info):
logger.debug('Get the data set from cache: %s.', dsid_name)
return self.cache[dsid_name]
if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc:
dsid_name = dsid_name + '_reduced'
# Get full resolution lon,lat from the reduced (tie points) grid
lon, lat = self.upsample_geolocation()
if dsid_name == "lon":
return lon
else:
return lat

logger.debug('Reading %s.', dsid_name)
variable = self.nc[dsid_name]
file_key = self._get_filekey(dsid_name, info)
variable = self.nc[file_key]
variable = self.remove_timedim(variable)
variable = self.scale_dataset(dsid, variable, info)

if dsid_name.endswith('_reduced'):
# Get full resolution lon,lat from the reduced (tie points) grid
self.upsample_geolocation(dsid, info)

return self.cache[dsid['name']]
variable = self.scale_dataset(variable, info)

return variable

def scale_dataset(self, dsid, variable, info):
def _get_filekey(self, dsid_name, info):
try:
file_key = self.file_key_prefix + info["file_key"]
except KeyError:
file_key = dsid_name
return file_key

def scale_dataset(self, variable, info):
"""Scale the data set, applying the attributes from the netCDF file.

The scale and offset attributes will then be removed from the resulting variable.
Expand Down Expand Up @@ -183,7 +192,7 @@ def scale_dataset(self, dsid, variable, info):

if 'standard_name' in info:
variable.attrs.setdefault('standard_name', info['standard_name'])
variable = self._adjust_variable_for_legacy_software(variable, dsid)
variable = self._adjust_variable_for_legacy_software(variable)

return variable

Expand All @@ -207,13 +216,14 @@ def _mask_variable(variable):
return variable

def _prepare_variable_for_palette(self, variable, info):
if 'scale_offset_dataset' in info:
so_dataset = self.nc[info['scale_offset_dataset']]
scale = so_dataset.attrs['scale_factor']
offset = so_dataset.attrs['add_offset']
else:
try:
so_dataset = self.nc[self.file_key_prefix + info['scale_offset_dataset']]
except KeyError:
scale = 1
offset = 0
else:
scale = so_dataset.attrs['scale_factor']
offset = so_dataset.attrs['add_offset']
variable.attrs['palette_meanings'] = [int(val)
for val in variable.attrs['palette_meanings'].split()]
if variable.attrs['palette_meanings'][0] == 1:
Expand All @@ -225,29 +235,26 @@ def _prepare_variable_for_palette(self, variable, info):
variable = variable[idx]
return variable

def _adjust_variable_for_legacy_software(self, variable, data_id):
if self.sw_version == 'NWC/PPS version v2014' and data_id['name'] == 'ctth_alti':
def _adjust_variable_for_legacy_software(self, variable):
if self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('standard_name') == 'cloud_top_altitude':
# pps 2014 valid range and palette don't match
variable.attrs['valid_range'] = (0., 9000.)
if self.sw_version == 'NWC/PPS version v2014' and data_id['name'] == 'ctth_alti_pal':
if (self.sw_version == 'NWC/PPS version v2014' and
variable.attrs.get('long_name') == 'RGB Palette for ctth_alti'):
# pps 2014 palette has the nodata color (black) first
variable = variable[1:, :]
if self.sw_version == 'NWC/GEO version v2016' and data_id['name'] == 'ctth_alti':
# Geo 2016/18 valid range and palette don't match
# Valid range is 0 to 27000 in the file. But after scaling the valid range becomes -2000 to 25000
# This now fixed by the scaling of the valid range above.
pass
return variable

def upsample_geolocation(self, dsid, info):
@lru_cache(maxsize=1)
def upsample_geolocation(self):
"""Upsample the geolocation (lon,lat) from the tiepoint grid."""
from geotiepoints import SatelliteInterpolator

# Read the fields needed:
col_indices = self.nc['nx_reduced'].values
row_indices = self.nc['ny_reduced'].values
lat_reduced = self.scale_dataset(dsid, self.nc['lat_reduced'], info)
lon_reduced = self.scale_dataset(dsid, self.nc['lon_reduced'], info)
lat_reduced = self.scale_dataset(self.nc['lat_reduced'], {})
lon_reduced = self.scale_dataset(self.nc['lon_reduced'], {})

shape = (self.nc['y'].shape[0], self.nc['x'].shape[0])
cols_full = np.arange(shape[1])
Expand All @@ -259,10 +266,9 @@ def upsample_geolocation(self, dsid, info):
(rows_full, cols_full))

lons, lats = satint.interpolate()
self.cache['lon'] = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x'])
self.cache['lat'] = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x'])

return
lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x'])
lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x'])
return lon, lat

def get_area_def(self, dsid):
"""Get the area definition of the datasets in the file.
Expand Down