diff --git a/mpop/afgl.dat b/mpop/afgl.dat new file mode 100644 index 00000000..cdc2158d --- /dev/null +++ b/mpop/afgl.dat @@ -0,0 +1,60 @@ +# different temperature profiles +# (1) AFGL atmospheric constituent profile. U.S. standard atmosphere 1976. ( AFGL-TR-86-0110) +# (2) AFGL atmospheric constituent profile. tropical. ( AFGL-TR-86-0110) +# (3) AFGL atmospheric constituent profile. midlatitude summer. ( AFGL-TR-86-0110) +# (4) AFGL atmospheric constituent profile. midlatitude winter. ( AFGL-TR-86-0110) +# (5) AFGL atmospheric constituent profile. subarctic summer. ( AFGL-TR-86-0110) +# (6) AFGL atmospheric constituent profile. subarctic winter. ( AFGL-TR-86-0110) +# +# (1) us stand (2) tropic (3) MS (4) MW (5) SS (6) SW +# z(km) T(K) T(K) T(K) T(K) T(K) T(K) + 120.000 360.000 380.000 380.000 333.000 380.000 333.000 + 115.000 300.000 299.700 316.800 293.000 322.700 288.500 + 110.000 240.000 241.600 262.400 259.500 270.100 252.600 + 105.000 208.800 212.000 222.200 237.100 226.000 234.000 + 100.000 195.100 190.700 190.500 218.600 190.400 218.500 + 95.000 188.400 184.300 178.300 208.300 176.800 211.000 + 90.000 186.900 177.000 165.000 199.500 161.600 202.300 + 85.000 188.900 177.100 165.100 199.800 161.700 213.100 + 80.000 198.600 184.800 174.100 210.100 170.600 223.900 + 75.000 208.400 201.800 196.100 220.400 193.600 234.700 + 70.000 219.600 218.900 218.100 230.700 216.600 245.400 + 65.000 233.300 236.000 240.100 240.900 239.700 248.400 + 60.000 247.000 253.100 257.100 250.800 262.700 250.900 + 55.000 260.800 263.400 269.300 260.600 274.000 259.100 + 50.000 270.700 270.200 275.700 265.700 277.200 259.300 + 47.500 270.600 269.600 275.200 265.100 276.200 253.200 + 45.000 264.200 264.800 269.900 258.500 273.600 247.000 + 42.500 257.300 259.400 263.700 250.800 269.500 240.800 + 40.000 250.400 254.000 257.500 243.200 262.100 234.700 + 37.500 243.435 248.500 251.300 235.500 254.600 228.500 + 35.000 236.500 243.100 245.200 227.900 247.200 222.300 + 32.500 229.588 237.700 239.000 220.400 240.000 218.500 + 30.000 226.500 232.300 233.700 217.400 235.100 216.000 + 27.500 224.000 227.000 228.450 215.500 231.000 213.600 + 25.000 221.600 221.400 225.100 215.200 228.100 211.200 + 24.000 220.600 219.200 223.900 215.200 226.600 211.800 + 23.000 219.600 217.000 222.800 215.200 225.200 212.400 + 22.000 218.600 214.600 221.600 215.200 225.200 213.000 + 21.000 217.600 210.700 220.400 215.200 225.200 213.600 + 20.000 216.700 206.700 219.200 215.200 225.200 214.200 + 19.000 216.700 202.700 217.900 215.200 225.200 214.800 + 18.000 216.700 198.800 216.800 215.700 225.200 215.400 + 17.000 216.700 194.800 215.700 216.200 225.200 216.000 + 16.000 216.700 197.000 215.700 216.700 225.200 216.600 + 15.000 216.700 203.700 215.700 217.200 225.200 217.200 + 14.000 216.700 210.300 215.700 217.700 225.200 217.200 + 13.000 216.700 217.000 215.800 218.200 225.200 217.200 + 12.000 216.700 223.600 222.300 218.700 225.200 217.200 + 11.000 216.800 230.100 228.800 219.200 225.200 217.200 + 10.000 223.300 237.000 235.300 219.700 225.200 217.200 + 9.000 229.700 243.600 241.700 225.700 232.200 217.200 + 8.000 236.200 250.300 248.200 231.700 239.200 220.600 + 7.000 242.700 257.000 254.700 237.700 246.100 227.300 + 6.000 249.200 263.600 261.200 243.700 253.100 234.100 + 5.000 255.700 270.300 267.200 249.700 260.100 240.900 + 4.000 262.200 277.000 273.200 255.700 265.500 247.700 + 3.000 268.700 283.700 279.200 261.700 270.900 252.700 + 2.000 275.200 287.700 285.200 265.200 276.300 255.900 + 1.000 281.700 293.700 289.700 268.700 281.700 259.100 + 0.000 288.200 299.700 294.200 272.200 287.200 257.200 \ No newline at end of file diff --git a/mpop/channel.py b/mpop/channel.py index 849e38ec..eca02f13 100644 --- a/mpop/channel.py +++ b/mpop/channel.py @@ -314,6 +314,8 @@ def project(self, coverage_instance): calibration_unit=self.unit) res.area = coverage_instance.out_area res.info = self.info + if hasattr(self, 'palette'): # UH, new + res.palette = self.palette # UH, new if self.is_loaded(): LOG.info("Projecting channel %s (%fμm)..." % (self.name, self.wavelength_range[1])) @@ -430,6 +432,320 @@ def sunzen_corr(self, time_slot, lonlats=None, limit=80., mode='cos', new_ch.info["sun_zen_correction_applied"] = True return new_ch + def get_viewing_geometry(self, orbital, time_slot, altitude=None): + '''Calculates the azimuth and elevation angle as seen by the observer + at the position of the current area pixel. + inputs: + orbital an orbital object define by the tle file + (see pyorbital.orbital import Orbital or mpop/scene.py get_oribtal) + time_slot time object specifying the observation time + altitude optinal: altitude of the observer above the earth ellipsoid + outputs: + azi azimuth viewing angle in degree (south is 0, counting clockwise) + ele elevation viewing angle in degree (zenith is 90, horizon is 0) + ''' + + try: + from pyorbital.orbital import Orbital + except ImportError: + LOG.warning("Could not load pyorbital.orbial.Orbital") + return None + + try: + from pyorbital import tlefile + except ImportError: + LOG.warning("Could not load pyorbital.tlefile") + return None + + (lons, lats) = self.area.get_lonlats() + # Calculate observer azimuth and elevation + if altitude==None: + altitude = np.zeros(lons.shape) + azi, ele = orbital.get_observer_look(time_slot, lons, lats, altitude) + + return (azi, ele) + + def vinc_vect(phi, lembda, alpha, s, f=None, a=None, degree=True): + """ Vincenty's Direct formular + + Returns the lat and long of projected point and reverse azimuth + given a reference point and a distance and azimuth to project. + lats, longs and azimuths are passed in radians. + + Keyword arguments: + phi Latitude in degree/radians + lembda Longitude in degree/radians + alpha Geodetic azimuth in degree/radians + s Ellipsoidal distance in meters + f WGS84 parameter + a WGS84 parameter + degree Boolean if in/out values are in degree or radians. + Default is in degree + + Returns: + (phiout, lembdaout, alphaout ) as a tuple + + """ + if degree: + phi = np.deg2rad(phi) + lembda = np.deg2rad(lembda) + alpha = np.deg2rad(alpha) + + if f is None: + f = 1/298.257223563 + if a is None: + a = 6378137 + + two_pi = 2.0*np.pi + + if isinstance(alpha, np.ndarray): + alpha[alpha < 0.0] += two_pi + alpha[alpha > two_pi] -= two_pi + + else: + if alpha < 0.0: + alpha = alpha + two_pi + if (alpha > two_pi): + alpha = alpha - two_pi + """ + alphama = np.ma.masked_less_equal(alphama, two_pi) + alpha = alphama - two_pi + alpha.mask = np.ma.nomask + logger.debug(alpha) + """ + b = a * (1.0 - f) + + tan_u1 = (1-f) * np.tan(phi) + u_1 = np.arctan(tan_u1) + sigma1 = np.arctan2(tan_u1, np.cos(alpha)) + + sinalpha = np.cos(u_1) * np.sin(alpha) + cosalpha_sq = 1.0 - sinalpha * sinalpha + + u_2 = cosalpha_sq * (a * a - b * b) / (b * b) + aa_ = 1.0 + (u_2 / 16384) * (4096 + u_2 * (-768 + u_2 * + (320 - 175 * u_2))) + bb_ = (u_2 / 1024) * (256 + u_2 * (-128 + u_2 * (74 - 47 * u_2))) + + # Starting with the approximation + sigma = (s / (b * aa_)) + last_sigma = 2.0 * sigma + 2.0 # something impossible + + # Iterate the following three equations + # until there is no significant change in sigma + + # two_sigma_m , delta_sigma + + def iter_sigma(sigma, last_sigma, sigma1, s, b, aa_, bb_): + while (abs((last_sigma - sigma) / sigma) > 1.0e-9): + two_sigma_m = 2 * sigma1 + sigma + + delta_sigma = (bb_ * np.sin(sigma) * + (np.cos(two_sigma_m) + (bb_/4) * + (np.cos(sigma) * + (-1 + 2 * np.power(np.cos(two_sigma_m), 2) - + (bb_ / 6) * np.cos(two_sigma_m) * + (-3 + 4 * np.power(np.sin(sigma), 2)) * + (-3 + 4 * np.power(np.cos(two_sigma_m), 2)))))) + last_sigma = sigma + sigma = (s / (b * aa_)) + delta_sigma + + return(sigma, two_sigma_m) + + # Check for array inputs + arraybool = [isinstance(ele, np.ndarray) for ele in (sigma, last_sigma, + sigma1)] + logger.debug("Sigma Arrays?: " + str(arraybool)) + if all(arraybool): + viter_sigma = np.vectorize(iter_sigma) + sigma, two_sigma_m = viter_sigma(sigma, last_sigma, sigma1, s, b, aa_, + bb_) + + else: + sigma, two_sigma_m = iter_sigma(sigma, last_sigma, sigma1, s, b, aa_, + bb_) + + phiout = np.arctan2((np.sin(u_1) * np.cos(sigma) + + np.cos(u_1) * np.sin(sigma) * np.cos(alpha)), + ((1 - f) * np.sqrt(np.power(sinalpha, 2) + + pow(np.sin(u_1) * + np.sin(sigma) - + np.cos(u_1) * + np.cos(sigma) * + np.cos(alpha), 2)))) + + deltalembda = np.arctan2((np.sin(sigma) * np.sin(alpha)), + (np.cos(u_1) * np.cos(sigma) - + np.sin(u_1) * np.sin(sigma) * np.cos(alpha))) + + cc_ = (f/16) * cosalpha_sq * (4 + f * (4 - 3 * cosalpha_sq)) + + omega = (deltalembda - (1 - cc_) * f * sinalpha * + (sigma + cc_ * np.sin(sigma) * (np.cos(two_sigma_m) + cc_ * + np.cos(sigma) * + (-1 + 2 * + np.power(np.cos(two_sigma_m), + 2))))) + + lembdaout = lembda + omega + + alphaout = np.arctan2(sinalpha, (-np.sin(u_1) * np.sin(sigma) + + np.cos(u_1) * np.cos(sigma) * + np.cos(alpha))) + + alphaout = alphaout + two_pi / 2.0 + + if isinstance(alphaout, np.ndarray): + alphaout[alphaout < 0.0] += two_pi + alphaout[alphaout > two_pi] -= two_pi + + else: + if alphaout < 0.0: + alphaout = alphaout + two_pi + if (alphaout > two_pi): + alphaout = alphaout - two_pi + + if degree: + phiout = np.rad2deg(phiout) + lembdaout = np.rad2deg(lembdaout) + alphaout = np.rad2deg(alphaout) + + return(phiout, lembdaout, alphaout) + + + def parallax_corr(self, cth=None, time_slot=None, orbital=None, azi=None, ele=None, fill="False"): + '''Perform the parallax correction for channel at + *time_slot* (datetime.datetime() object), assuming the cloud top height cth + and the viewing geometry given by the satellite orbital "orbital" and return the + corrected channel. + Authors: Ulrich Hamann (MeteoSwiss), Thomas Leppelt (DWD) + Example calls: + * calling this function (using orbital and time_slot) + orbital = data.get_oribtal() + data["VIS006"].parallax_corr(cth=data["CTTH"].height, time_slot=data.time_slot, orbital=orbital) + * calling this function (using viewing geometry) + orbital = data.get_oribtal() + (azi, ele) = get_viewing_geometry(self, orbital, time_slot) + data["VIS006"].parallax_corr(cth=data["CTTH"].height, azi=azi, ele=ele) + Optional input: + cth The parameter cth is the cloud top height + (or the altitude of the object that should be shifted). + cth must have the same size and projection as the channel + + orbital an orbital object define by the tle file + (see pyorbital.orbital import Orbital or mpop/scene.py get_oribtal) + azi azimuth viewing angle in degree (south is 0, counting clockwise) + e.g. as given by self.get_viewing_geometry + ele elevation viewing angle in degree (zenith is 90, horizon is 0) + e.g. as given by self.get_viewing_geometry + fill specifies the interpolation method to fill the gaps + (basically areas behind the cloud that can't be observed by the satellite instrument) + "False" (default): no interpolation, gaps are np.nan values and mask is set accordingly + "nearest": fill gaps with nearest neighbour + "bilinear": use scipy.interpolate.griddata with linear interpolation + to fill the gaps + + output: + parallax corrected channel + the content of the channel will be parallax corrected. + The name of the new channel will be + *original_chan.name+'_PC'*, eg. "IR_108_PC". This name is + also stored to the info dictionary of the originating channel. + ''' + + # get time_slot from info, if present + if time_slot==None: + if "time" in self.info.keys(): + time_slot=self.info["time"] + + if azi==None or ele==None: + if time_slot==None or orbital==None: + print "*** Error in parallax_corr (mpop/channel.py)" + print " parallax_corr needs either time_slot and orbital" + print " data[\"IR_108\"].parallax_corr(data[\"CTTH\"].height, time_slot=data.time_slot, orbital=orbital)" + print " or the azimuth and elevation angle" + print " data[\"IR_108\"].parallax_corr(data[\"CTTH\"].height, azi=azi, ele=ele)" + quit() + else: + print ("... calculate viewing geometry (orbit and time are given)") + (azi, ele) = self.get_viewing_geometry(orbital, time_slot) + else: + print ("... azimuth and elevation angle given") + + # mask the cloud top height + cth_ = np.ma.masked_where(cth < 0, cth, copy=False) + + # Elevation displacement + dz = cth_ / np.tan(np.deg2rad(ele)) + + # Create the new channel (by copying) and initialize the data with None values + new_ch = copy.deepcopy(self) + new_ch.data[:,:] = np.nan + + # Set the name + new_ch.name += '_PC' + + # Add information about the corrected version to original channel + self.info["parallax_corrected"] = self.name + '_PC' + + # get projection coordinates in meter + (proj_x,proj_y) = self.area.get_proj_coords() + + print "... calculate parallax shift" + # shifting pixels according to parallax corretion + proj_x_pc = proj_x - np.sin(np.deg2rad(azi)) * dz # shift West-East in m # ??? sign correct ??? + proj_y_pc = proj_y + np.cos(np.deg2rad(azi)) * dz # shift North-South in m + + # get indices for the pixels for the original position + (y,x) = self.area.get_xy_from_proj_coords(proj_x, proj_y) + # comment: might be done more efficient with meshgrid + # >>> x = np.arange(-5.01, 5.01, 0.25) + # >>> y = np.arange(-5.01, 5.01, 0.25) + # >>> xx, yy = np.meshgrid(x, y) + # get indices for the pixels at the parallax corrected position + (y_pc,x_pc) = self.area.get_xy_from_proj_coords(proj_x_pc, proj_y_pc) + + # copy cloud free satellite pixels (surface observations) + ind = np.where(cth_.mask == True) + new_ch.data[x[ind],y[ind]] = self.data[x[ind],y[ind]] + + print "... copy data to parallax corrected position" + # copy cloudy pixel with new position modified with parallax shift + ind = np.where(x_pc.mask == False) + new_ch.data[x_pc[ind],y_pc[ind]] = self.data[x[ind],y[ind]] + + # Mask out data gaps (areas behind the clouds) + new_ch.data = np.ma.masked_where(np.isnan(new_ch.data), new_ch.data, copy=False) + + if fill.lower()=="false": + return new_ch + elif fill=="nearest": + print "*** fill missing values with nearest neighbour" + from scipy.ndimage import distance_transform_edt + invalid = np.isnan(new_ch.data) + ind = distance_transform_edt(invalid, return_distances=False, return_indices=True) + new_ch.data = new_ch.data[tuple(ind)] + elif fill=="bilinear": + # this function does not interpolate at the outer boundaries + from scipy.interpolate import griddata + ind = np.where(new_ch.data.mask == False) + points = np.transpose(np.append([y[ind]], [x[ind]], axis=0)) + values = new_ch.data[ind] + new_ch.data = griddata(points, values, (y, x), method='linear') + + # fill the remaining pixels with nearest neighbour + from scipy.ndimage import distance_transform_edt + invalid = np.isnan(new_ch.data) + ind = distance_transform_edt(invalid, return_distances=False, return_indices=True) + new_ch.data = new_ch.data[tuple(ind)] + else: + print "*** Error in parallax_corr (channel.py)" + print " unknown gap fill method ", fill + quit() + + return new_ch + + # Arithmetic operations on channels. def __pow__(self, other): diff --git a/mpop/imageo/HRWimage.py b/mpop/imageo/HRWimage.py new file mode 100644 index 00000000..511e72e5 --- /dev/null +++ b/mpop/imageo/HRWimage.py @@ -0,0 +1,680 @@ +import matplotlib as mpl # this HAS TO BE the very first lines (before any other matplotlib functions are imported) +mpl.use('Agg') # this HAS TO BE the very first lines (before any other matplotlib functions are imported) +from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas +from matplotlib.figure import Figure +import matplotlib.pyplot as plt +from matplotlib import rcParams +from PIL import Image as PIL_Image +from TRTimage import fig2data, fig2img +from pylab import text as pylab_text +from numpy import sin, cos, radians, where, nonzero, transpose, arange, append, meshgrid, mgrid, empty, isnan, nan, percentile +from numpy import sum as np_sum +from scipy.interpolate import griddata # interp2d +from matplotlib.patches import Rectangle +from matplotlib.colors import Normalize + +def prepare_figure(obj_area): + + # create new figure + #fig = Figure() # old version, does not work for the stream plot + + ## Turn interactive plotting off + #plt.ioff() + fig = plt.figure() # needs a DISPLAY environment variable (simulated here with mpl.use('Agg')) + + # define size of image + nx = obj_area.x_size + ny = obj_area.y_size + # canvas figure + canvas = FigureCanvas(fig) + # get dots per inch of the screen + DPI = fig.get_dpi() + # print "DPI", DPI + fig.set_size_inches(nx/float(DPI),ny/float(DPI)) + # set fonts to bold + plt.rc('font', weight='bold') + # get axis object + ax = fig.add_subplot(111, aspect='equal') + ## eliminates margins totally + fig.subplots_adjust(left=0.0,right=1.0,bottom=0.0,top=1.0, wspace=0, hspace=0) + # set limits of the axis + ax.set_xlim(0, nx) + ax.set_ylim(0, ny) + # set transparent backgroud + fig.patch.set_alpha(0.0) # transparent outside of diagram + ax.set_axis_bgcolor([1,0,0,0]) # transparent color inside diagram + + return fig, ax + + +def HRWimage( HRW_data, obj_area, hrw_channels=None, min_correlation=None, cloud_type=None, style='barbs', \ + barb_length=None, color_mode='channel', pivot='middle', legend=True, legend_loc=3): + + """Create a PIL image from high resolution wind data + required input: + HRW data [HRW_class]: HRW_class instant containing the data, see mpop/satin/nwcsaf_hrw_hdf.py + obj_area [area_class]: instant of area class, returned by area_def + optional input + hrw_channels [string array]: giving the channels that are used derive the HRW vectors + e.g. hrw_channels['HRV','WV_073'] + min_correlation [int]: minimum correlation of tracking, if below arrow is not shown + cloud_type [int array]: cloud types of the wind vectors, e.g. cloud_type=[8,10,11] + style [string]: different styles of plotting + style='barbs' or style='5min_displacement' or style='15min_displacement' + color_mode [string]: choose color of the wind symbols, possible choises: + color_mode='channel' -> one color per SEVIRI channel used to derive HRW + color_mode='pressure' -> colorcoded cloud top pressure + color_mode='temperature' -> colorcoded cloud top temperature + color_mode='cloud_type' -> NWC-SAF cloud types + color_mode='correlation' 80 ... 100 + color_mode='conf_nwp' 70 ... 100 + color_mode='conf_no_nwp' 70 ... 100 + pivot [string]: position of the barb, e.g. pivot='middle' == center of barb at origin + legend [True or False] : show legend or not + legend_loc [string or int]: location of the legend + upper right 1 + upper left 2 + lower left 3 + lower right 4 + right 5 + center left 6 + center right 7 + lower center 8 + upper center 9 + center 10 + best + """ + + #if min_correlation != None: + # print " filter for min_correlation = ", min_correlation + # inds = where(HRW_data.correlation > min_correlation) + # HRW_data = HRW_data.subset(inds) + + print "... create HRWimage, color_mode = ", color_mode + + # get a empty figure with transparent background, no axis and no margins outside the diagram + fig, ax = prepare_figure(obj_area) + + # define arrow properties + head_width = 0.006 * min(obj_area.x_size,obj_area.x_size) + head_length = 2 * head_width + m_per_s_to_knots = 1.944 + + #barb_length = 0.008 * min(obj_area.x_size,obj_area.x_size) + + if barb_length == None: + n_winds = len(HRW_data.wind_id) + if n_winds < 300: + barb_length = 5.68 + elif n_winds < 500: + barb_length = 5.43 + elif n_winds < 700: + barb_length = 5.18 + elif n_winds < 900: + barb_length = 4.68 + else: + barb_length = 4.00 + print "barb_length", barb_length + + if color_mode == 'channel': + classes = ('HRV', 'VIS008 ', 'WV_062 ', 'WV_073 ', 'IR_120 ') + colors = ['mediumorchid', 'red', 'limegreen', 'darkgreen', 'darkturquoise'] + elif color_mode == 'pressure': + classes = ['<200hPa', '200-300hPa','300-400hPa','400-500hPa','500-600hPa','600-700hPa','700-800hPa', '800-900hPa','>900hPa'] + colors = ['darksalmon', 'red' ,'darkorange','yellow' ,'lime' ,'seagreen', 'deepskyblue','blue', 'mediumorchid'] + classes = tuple(['CTP '+cl for cl in classes]) + elif color_mode == 'cloud_type' or color_mode == 'cloudtype': + classes=['non-processed','cloud free land', 'cloud free sea', 'land snow', 'sea ice',\ + 'very low cum.', 'very low', 'low cum.', 'low', 'med cum.', 'med', 'high cum.', 'high', 'very high cum.', 'very high', \ + 'sem. thin', 'sem. med.', 'sem. thick', 'sem. above', 'broken', 'undefined'] + + colors = empty( (len(classes),3), dtype=int ) + colors[ 0,:] = [100, 100, 100] + colors[ 1,:] = [ 0, 120, 0] + colors[ 2,:] = [ 0, 0, 0] + colors[ 3,:] = [250, 190, 250] + colors[ 4,:] = [220, 160, 220] + colors[ 5,:] = [255, 150, 0] + colors[ 6,:] = [255, 100, 0] + colors[ 7,:] = [255, 220, 0] + colors[ 8,:] = [255, 180, 0] + colors[ 9,:] = [255, 255, 140] + colors[10,:] = [240, 240, 0] + colors[11,:] = [250, 240, 200] + colors[12,:] = [215, 215, 150] + colors[13,:] = [255, 255, 255] + colors[14,:] = [230, 230, 230] + colors[15,:] = [ 0, 80, 215] + colors[16,:] = [ 0, 180, 230] + colors[17,:] = [ 0, 240, 240] + colors[18,:] = [ 90, 200, 160] + colors[19,:] = [200, 0, 200] + colors[20,:] = [ 95, 60, 30] + colors = colors/255. + elif color_mode in ['correlation','conf_nwp','conf_no_nwp']: + classes = ['<70', '<75', '<80', '<85', '<90', '<95', '>95' ] + colors = ['indigo', 'darkred', 'red','darkorange','gold', 'lime', 'green'] + classes = tuple([color_mode+' '+cl for cl in classes]) + else: + print "*** Error in HRW_streamplot (mpop/imageo/HRWimage.py)" + print " unknown color_mode" + quit() + + for wid in range(len(HRW_data.wind_id)): + + if color_mode == 'channel': + + if HRW_data.channel[wid].find('HRV') != -1: # HRV + barbcolor = colors[0] + elif HRW_data.channel[wid].find('VIS008') != -1: # 0.8 micro m + barbcolor = colors[1] + elif HRW_data.channel[wid].find('WV_062') != -1: # 6.2 micro m + barbcolor = colors[2] + elif HRW_data.channel[wid].find('WV_073') != -1: # 7.3 micro m + barbcolor = colors[3] + elif HRW_data.channel[wid].find('IR_120') != -1: # 12.0 micro m + barbcolor = colors[4] + + elif color_mode == 'pressure': + + if HRW_data.pressure[wid] < 20000: + barbcolor = colors[0] + elif HRW_data.pressure[wid] < 30000: + barbcolor = colors[1] + elif HRW_data.pressure[wid] < 40000: + barbcolor = colors[2] + elif HRW_data.pressure[wid] < 50000: + barbcolor = colors[3] + elif HRW_data.pressure[wid] < 60000: + barbcolor = colors[4] + elif HRW_data.pressure[wid] < 70000: + barbcolor = colors[5] + elif HRW_data.pressure[wid] < 80000: + barbcolor = colors[6] + elif HRW_data.pressure[wid] < 90000: + barbcolor = colors[7] + else: + barbcolor = colors[8] + + elif color_mode == 'cloud_type' or color_mode == 'cloudtype': + + barbcolor = list(colors[HRW_data.cloud_type[wid], :]) + + elif color_mode in ['correlation','conf_nwp','conf_no_nwp']: + if color_mode == 'correlation': + cdata = HRW_data.correlation + elif color_mode == 'conf_nwp': + cdata = HRW_data.conf_nwp + elif color_mode == 'conf_no_nwp': + cdata = HRW_data.conf_no_nwp + + if cdata[wid] < 70: + barbcolor = colors[0] + elif cdata[wid] < 75: + barbcolor = colors[1] + elif cdata[wid] < 80: + barbcolor = colors[2] + elif cdata[wid] < 85: + barbcolor = colors[3] + elif cdata[wid] < 90: + barbcolor = colors[4] + elif cdata[wid] < 95: + barbcolor = colors[5] + else: + barbcolor = colors[6] + else: + print "*** Error in HRW_streamplot (mpop/imageo/HRWimage.py)" + print " unknown color_mode" + quit() + + x0, y0 = obj_area.get_xy_from_lonlat( HRW_data.lon[wid], HRW_data.lat[wid], outside_error=False) #, return_int=True + + u = HRW_data.wind_speed[wid] * -1 * sin(radians(HRW_data.wind_direction[wid])) + v = HRW_data.wind_speed[wid] * -1 * cos(radians(HRW_data.wind_direction[wid])) + + #print '%6s %3d %10.7f %10.7f %7.2f %7.1f %8.1f %10s' % (HRW_data.channel[wid], HRW_data.wind_id[wid], \ + # HRW_data.lon[wid], HRW_data.lat[wid], \ + # HRW_data.wind_speed[wid]*m_per_s_to_knots, \ + # HRW_data.wind_direction[wid], HRW_data.pressure[wid], barbcolor) + + + if style == 'barbs': + u = HRW_data.wind_speed[wid] * -1 * sin(radians(HRW_data.wind_direction[wid])) * m_per_s_to_knots + v = HRW_data.wind_speed[wid] * -1 * cos(radians(HRW_data.wind_direction[wid])) * m_per_s_to_knots + ax.barbs(x0, obj_area.y_size - y0, u * m_per_s_to_knots, v * m_per_s_to_knots, length = barb_length, pivot='middle', barbcolor=barbcolor) + + elif style == '5min_displacement' or style == '15min_displacement': + if style == '5min_displacement': + t_in_s = 5*60 + elif style == '15min_displacement': + t_in_s = 15*60 + dx = u * t_in_s / obj_area.pixel_size_x + dy = v * t_in_s / obj_area.pixel_size_y + ax.arrow(x0, y0, dx, dy, head_width = head_width, head_length = head_length, fc=barbcolor, ec=barbcolor) + + if legend: + + rcParams['legend.handlelength'] = 0 + rcParams['legend.numpoints'] = 1 + + # create blank rectangle + rec = Rectangle((0, 0), 0, 0, fc="w", fill=False, edgecolor='none', linewidth=0) + + ## *fontsize*: [size in points | 'xx-small' | 'x-small' | 'small' | + ## 'medium' | 'large' | 'x-large' | 'xx-large'] + + + alpha=1.0 + bbox={'facecolor':'white', 'alpha':alpha, 'pad':10} + + print "... add legend: color is a function of ", color_mode + + recs = empty( len(classes), dtype=object) + recs[:] = rec + + #if color_mode == 'pressure': + # recs = [rec, rec, rec, rec, rec, rec, rec, rec, rec] + #if color_mode == 'channel': + # recs = [rec, rec, rec, rec, rec] + #if color_mode in ['correlation','conf_nwp','conf_no_nwp']: + # recs = [rec, rec, rec, rec, rec, rec, rec] + + size=12 + if color_mode=='cloud_type': + size=10 + + leg = ax.legend(recs, classes, loc=legend_loc, prop={'size':size}) + + for color,text in zip(colors,leg.get_texts()): + text.set_color(color) + + + return fig2img ( fig ) + + +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- + +def HRWstreamplot( u2d, v2d, obj_area, interpol_method, color_mode='speed', density=(3,3), linewidth_mode="scaled", linewidth_max=2.5, \ + min_correlation=None, legend=True, legend_loc=3, vmax=None, colorbar=True, fontcolor='w'): + + """ Create a streamplot image in PIL format of a 2d wind field + color_mode [string]: choose color of the stream lines, + color_mode='speed' -> wind speed + color_mode='u' -> u-wind component + color_mode='v' -> v-wind component + density [2 int tuple] density of stream lines, default density = (4,4) + linewidth_mode [string] "scaled" to color_mode data + "const" always linewith_max + """ + + + ## get a empty figure with transparent background, no axis and no margins outside the diagram + fig, ax = prepare_figure(obj_area) + #print dir(ax) + + # check if there is there have been enough observation (or if data is not a number array) + if isnan(np_sum(u2d)): + print "... there are not enough observations" + ax.text(0.95, 0.01, 'currently not enough observations', + verticalalignment='bottom', horizontalalignment='right', + transform=ax.transAxes, color='red', fontsize=15) + else: + print "there is enough data, interpolation method: ", interpol_method + + # create grid for the wind data + [nx, ny] = u2d.shape # for ccs4 this is (640, 710) + Y, X = mgrid[nx-1:-1:-1, 0:ny] # watch out for Y->nx and X->ny + #print "X.shape ", Y.shape + #print Y[:,0] + + print " calculate color data ", color_mode + if color_mode == 'speed': + from numpy import sqrt + cdata = sqrt(u2d*u2d + v2d*v2d) + elif color_mode == 'u': + cdata = u2d + elif color_mode == 'v': + cdata = v2d + else: + print "*** Error in HRW_streamplot (mpop/imageo/HRWimage.py)" + print " unknown color_mode" + quit() + + print " calculate linewidth ", linewidth_mode + if linewidth_mode == "const": + linewidth = linewidth_max + elif linewidth_mode == "scaled": + if vmax != None: + linewidth = 1 + linewidth_max*(cdata) / vmax + else: + linewidth = 1 + linewidth_max*(cdata) / cdata.max() + else: + print "*** Error in HRW_streamplot (mpop/imageo/HRWimage.py)" + print " unknown linewidth_mode" + quit() + + print "... data_max =", cdata.max() ,", vmax=", vmax + + if vmax != None: + norm = Normalize(vmin=0, vmax=vmax) + else: + norm = Normalize(vmin=0, vmax=cdata.max()) + + #optional arguments of streamplot + # density=1, linewidth=None, color=None, + # cmap=None, norm=None, arrowsize=1, arrowstyle='-|>', + # minlength=0.1, transform=None, zorder=1, start_points=None, INTEGRATOR='RK4',density=(10,10) + plt.streamplot(X, Y, u2d, v2d, color=cdata, linewidth=linewidth, cmap=plt.cm.rainbow, density=density, norm=norm) + + if colorbar: + colorbar_ax = fig.add_axes([0.9, 0.1, 0.05, 0.8]) + cbar = plt.colorbar(cax=colorbar_ax) + plt.setp(plt.getp(cbar.ax.axes, 'yticklabels'), color=fontcolor) + xlabel = cbar.ax.set_xlabel('m/s', weight='bold') #get the title property handler + plt.setp(xlabel, color=fontcolor) + + #plt.savefig("test_streamplot.png") + + # add information about interpolation method + ax.text(0.95, 0.01, interpol_method, + verticalalignment='bottom', horizontalalignment='right', + transform=ax.transAxes, color='green', fontsize=15) + + return fig2img ( fig ) + + +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- + + +def fill_with_closest_pixel(data, invalid=None): + """ + Replace the value of invalid 'data' cells (indicated by 'invalid') + by the value of the nearest valid data cell + + Input: + data: numpy array of any dimension + invalid: a binary array of same shape as 'data'. True cells set where data + value should be replaced. + If None (default), use: invalid = np.isnan(data) + + Output: + Return a filled array. + """ + + from numpy import isnan + from scipy.ndimage import distance_transform_edt + + if invalid is None: invalid = isnan(data) + + ind = distance_transform_edt(invalid, return_distances=False, return_indices=True) + + return data[tuple(ind)] + +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- +def HRWscatterplot( HRW_data, title='', hrw_channels=None, min_correlation=None, cloud_type=None, color_mode='direction'): + + ## get a empty figure with transparent background, no axis and no margins outside the diagram + # fig = plt.figure() + import pylab + fig = pylab.figure() + ax = plt.subplot(111) + ax.set_yscale("log", nonposx='clip') + plt.scatter(HRW_data.wind_speed, HRW_data.pressure/100, s=5, c=HRW_data.wind_direction, alpha=0.5, edgecolor='none') + pylab.title(title) + pylab.ylim([1000,100]) + plt.yticks([1000,900,800,700,600,500,400,300,200,100], ['1000','900','800','700','600','500','400','300','200','100'], rotation='horizontal') + + p = percentile(HRW_data.wind_speed, 95) + vmax = (round(p/10)+1)*10 + print "... vmax:", vmax + + plt.plot([0,vmax], [680,680], color='g') + plt.plot([0,vmax], [440,440], color='b') + + pylab.xlim([0,vmax]) + ax.set_xlabel('HRW [m/s]') + ax.set_ylabel('p [hPa]') + + cbar = plt.colorbar() + cbar.ax.set_ylabel('wind direction') + + return fig2img ( fig ) + +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------- + +def HRW_2dfield( HRW_data, obj_area, interpol_method=None, hrw_channels=None, min_correlation=None, level=''): + + print "... calculate 2d wind field (HRW_2dfield)" + + if min_correlation != None: + print " filter for min_correlation = ", min_correlation + inds = where(HRW_data.correlation > min_correlation) + HRW_data.subset(inds) + + xx, yy = obj_area.get_xy_from_lonlat( HRW_data.lon, HRW_data.lat, outside_error=False, return_int=False) #, return_int=True + + yy = obj_area.y_size - yy + + uu = - HRW_data.wind_speed * sin(radians(HRW_data.wind_direction)) + vv = - HRW_data.wind_speed * cos(radians(HRW_data.wind_direction)) + + # get rid of all vectors outside of the field + index = nonzero(xx) + xx = xx[index] + yy = yy[index] + uu = uu[index] + vv = vv[index] + + points = transpose(append([xx], [yy], axis=0)) + #print type(uu), uu.shape + #print type(points), points.shape + #print points[0], yy[0], xx[0] + #print uu[0] + + nx = obj_area.x_size + ny = obj_area.y_size + + x2 = arange(nx) + y2 = (ny-1) - arange(ny) + + grid_x, grid_y = meshgrid(x2, y2) + + if interpol_method == None: + # we need at least 2 winds to interpolate + if uu.size < 4: + print "*** Warning, not wnough wind data available, n_winds = ", uu.size + fake = empty(grid_x.shape) + fake[:,:] = nan + HRW_data.interpol_method = None + return fake, fake + + elif uu.size < 50: + interpol_method = "RBF" + + else: + interpol_method = "linear + nearest" + + #interpol_method = "nearest" + #interpol_method = "cubic + nearest" # might cause unrealistic overshoots + #interpol_method = "kriging" + #interpol_method = "..." + + print "... min windspeed (org data): ", HRW_data.wind_speed.min() + print "... max windspeed (org data): ", HRW_data.wind_speed.max() + + for i_iteration in [0,1]: + + if interpol_method == "nearest": + + print '... fill with nearest neighbour' + # griddata, see http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.interpolate.griddata.html + grid_u1x = griddata(points, uu, (grid_x, grid_y), method='nearest') + grid_v1x = griddata(points, vv, (grid_x, grid_y), method='nearest') + + elif interpol_method == "RBF": + + print '... inter- and extrapolation using radial basis functions' + # https://www.youtube.com/watch?v=_cJLVhdj0j4 + print "... start Rbf" + from scipy.interpolate import Rbf + # rbfu = Rbf(xx, yy, uu, epsilon=0.1) # + rbfu = Rbf(xx, yy, uu, epsilon=0.2) + grid_u1x = rbfu(grid_x, grid_y) + rbfv = Rbf(xx, yy, vv, epsilon=0.1) # + grid_v1x = rbfv(grid_x, grid_y) + print "... finish Rbf" + # !very! slow for a large number of observations + + elif interpol_method == "linear + nearest" or interpol_method == "cubic + nearest": + + if interpol_method == "linear + nearest": + print '... calculate linear interpolation' + # griddata, see http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.interpolate.griddata.html + grid_u1 = griddata(points, uu, (grid_x, grid_y), method='linear') + grid_v1 = griddata(points, vv, (grid_x, grid_y), method='linear') + elif interpol_method == "cubic + nearest": + # smoother, but can cause unrealistic overshoots + print '... calculate cubic interpolation' + grid_u1 = griddata(points, uu, (grid_x, grid_y), method='cubic') + grid_v1 = griddata(points, vv, (grid_x, grid_y), method='cubic') + else: + print "*** Error in mpop/imageo/HRWimage.py" + print " unknown interpolation method: ", interpol_method + quit() + + if 1==1: + # use faster function to extrapolate with closest neighbour + print "... fill outside area with closest value" + grid_u1x = fill_with_closest_pixel(grid_u1, invalid=None) + grid_v1x = fill_with_closest_pixel(grid_v1, invalid=None) + else: + # use griddata to extrapolate with closest neighbour + points2 = transpose(append([grid_x.flatten()], [grid_y.flatten()], axis=0)) + print type(grid_x.flatten()), grid_x.flatten().shape + print type(points2), points2.shape + mask = ~isnan(grid_v1.flatten()) + inds = where(mask)[0] + grid_u1x = griddata(points2[inds], grid_u1.flatten()[inds], (grid_x, grid_y), method='nearest') + grid_v1x = griddata(points2[inds], grid_v1.flatten()[inds], (grid_x, grid_y), method='nearest') + + if 1==0: + # add othermost points as additional data + y_add = [0, 0, ny-1, ny-1] + x_add = [0, nx-1, 0, nx-1] + for (i,j) in zip(x_add,y_add): + uu = append(uu, grid_u0[i,j]) + vv = append(vv, grid_v0[i,j]) + xx = append(xx, x_add) + yy = append(yy, y_add) + points = transpose(append([yy], [xx], axis=0)) + + print 'calc extent1' + grid_u1e = griddata(points, uu, (grid_x, grid_y), method='linear') + grid_v1e = griddata(points, vv, (grid_x, grid_y), method='linear') + + else: + print "*** Error in mpop/imageo/HRWimage.py" + print " unknown interpol_method", interpol_method + quit() + + ##http://docs.scipy.org/doc/scipy/reference/tutorial/interpolate.html + ##http://stackoverflow.com/questions/3526514/problem-with-2d-interpolation-in-scipy-non-rectangular-grid + #print "SmoothBivariateSpline:" + #from scipy.interpolate import SmoothBivariateSpline + #fitu = SmoothBivariateSpline( xx, yy, uu, s=1000) # , kx=3, ky=3, s = smooth * z2sum m + #from numpy import empty + #grid_u_SBP = empty(grid_x.shape) + #for i in range(0,nx-1): # starting upper right going down + # for j in range(0,ny-1): # starting lower right going right + # #print i,j + # grid_u_SBP[j,i] = fitu(j,i) + + #grid_u_SBP = np.array([k.predict([x,y]) for x,y in zip(np.ravel(grid_x), np.ravel(grid_y))]) + #grid_u_SBP = grid_u_SBP.reshape(grid_x.shape) + + ##print x2 + ##print y2 + #grid_u_SBP = fitu(x2,y2) + ##print "grid_u_SBP.shape", grid_u_SBP.shape + ###print grid_u_SBP + #print "End SmoothBivariateSpline:" + + #print "bisplrep:" + #from scipy import interpolate + #tck = interpolate.bisplrep(xx, yy, uu) + #grid_u_BSR = interpolate.bisplev(grid_x[:,0], grid_y[0,:], tck) + #print grid_u_BSR.shape + #print "bisplrep" + #print "grid_v1x.shape", grid_v1x.shape + + extent=(0,nx,0,ny) + origin='lower' + origin='upper' + origin=None + + # show different stages of 2d inter- and extra-polation + if 1==0: + print 'make matplotlib.pyplot' + import matplotlib.pyplot as plt + vmin=-10 + vmax=10 + fig = plt.figure() + plt.subplot(221) + plt.title('u '+interpol_method) + plt.plot(points[:,0], ny-1-points[:,1], 'k.', ms=1) + plt.imshow(grid_u1x, vmin=vmin, vmax=vmax) #, extent=extent + #plt.colorbar() + plt.subplot(222) + plt.title('v '+interpol_method) + plt.plot(points[:,0], ny-1-points[:,1], 'k.', ms=1) + plt.imshow(grid_v1x, origin=origin, vmin=vmin, vmax=vmax) #, extent=extent + #plt.colorbar() + + # standard calculation for comparison + print '... calculate linear interpolation' + grid_u1 = griddata(points, uu, (grid_x, grid_y), method='linear') + grid_v1 = griddata(points, vv, (grid_x, grid_y), method='linear') + grid_u1xx = fill_with_closest_pixel(grid_u1, invalid=None) + grid_v1xx = fill_with_closest_pixel(grid_v1, invalid=None) + + plt.subplot(223) + plt.title('U Linear+Nearest') + plt.plot(points[:,0], ny-1-points[:,1], 'k.', ms=1) + plt.imshow(grid_u1xx, origin=origin, vmin=vmin, vmax=vmax) #, extent=extent + #plt.colorbar() + plt.subplot(224) + plt.title('V Linear+Nearest') + plt.plot(points[:,0], ny-1-points[:,1], 'k.', ms=1) + #plt.title('Cubic') + plt.imshow(grid_v1xx, origin=origin, vmin=vmin, vmax=vmax) #, extent=extent + #plt.colorbar() + plt.gcf().set_size_inches(6, 6) + #plt.show() # does not work with AGG + tmpfile="test_hrw"+level+".png" + fig.savefig(tmpfile) + print "display "+tmpfile+" &" + + + if grid_u1x.min() < -150 or grid_v1x.min() < -150 or grid_u1x.max() > 150 or grid_v1x.max() > 150: + print "*** Warning, numerical instability detected, interpolation method: ", interpol_method + print " min u windspeed (u 2dimensional): ", grid_u1x.min() + print " min v windspeed (v 2dimensional): ", grid_v1x.min() + print " max u windspeed (u 2dimensional): ", grid_u1x.max() + print " max v windspeed (v 2dimensional): ", grid_v1x.max() + interpol_method = "glinear + nearest" + print "... try another interpolation method: ", interpol_method + else: + # (hopefully) numerical stable interpolation, exit the interpolation loop + break + + HRW_data.interpol_method = interpol_method + + return grid_u1x, grid_v1x diff --git a/mpop/imageo/TRTimage.py b/mpop/imageo/TRTimage.py new file mode 100644 index 00000000..10f3e807 --- /dev/null +++ b/mpop/imageo/TRTimage.py @@ -0,0 +1,131 @@ +from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas +from matplotlib.figure import Figure +#import matplotlib as mpl +#mpl.use('Agg') +#from pylab import figure +from pylab import rand +import matplotlib.pyplot as plt +from matplotlib.patches import Ellipse +from uuid import uuid4 +import subprocess +from PIL import Image as PIL_Image +import numpy + +def fig2data ( fig ): + """ + @brief Convert a Matplotlib figure to a 4D numpy array with RGBA channels and return it + @param fig a matplotlib figure + @return a numpy 3D array of RGBA values + """ + # draw the renderer + fig.canvas.draw ( ) + + # Get the RGBA buffer from the figure + w,h = fig.canvas.get_width_height() + buf = numpy.fromstring ( fig.canvas.tostring_argb(), dtype=numpy.uint8 ) + buf.shape = ( w, h, 4 ) + + # canvas.tostring_argb give pixmap in ARGB mode. Roll the ALPHA channel to have it in RGBA mode + buf = numpy.roll ( buf, 3, axis = 2 ) + return buf + +def fig2img ( fig ): + """ + @brief Convert a Matplotlib figure to a PIL Image in RGBA format and return it + @param fig a matplotlib figure + @return a Python Imaging Library ( PIL ) image + """ + # put the figure pixmap into a numpy array + buf = fig2data ( fig ) + w, h, d = buf.shape + return PIL_Image.frombytes( "RGBA", ( w ,h ), buf.tostring( ) ) + + +def TRTimage( TRTcell_IDs, TRTcells, obj_area, minRank=8, alpha_max=1.0, plot_vel=True): + + # define size of image + nx = obj_area.x_size + ny = obj_area.y_size + + # create new figure + fig = Figure() + # canvas figure + canvas = FigureCanvas(fig) + # get dots per inch of the screen + DPI = fig.get_dpi() + # print "DPI", DPI + fig.set_size_inches(nx/float(DPI),ny/float(DPI)) + # get axis object + ax = fig.add_subplot(111, aspect='equal') + ## eliminates margins totally + fig.subplots_adjust(left=0.0,right=1.0,bottom=0.0,top=1.0, wspace=0, hspace=0) + #plt.subplots_adjust(left=0, right=1, bottom=0, top=1, wspace=0, hspace=0) # does only work with x11 display + # set limits of the axis + ax.set_xlim(0, nx) + ax.set_ylim(0, ny) + # set transparent backgroud + fig.patch.set_alpha(0.0) # transparent outside of diagram + ax.set_axis_bgcolor([1,0,0,0]) # transparent color inside diagram + + # define arrow properties + head_width = 0.006 * min(obj_area.x_size,obj_area.x_size) + head_length = 2 * head_width + + pixel_size_x_km = 0.001 * obj_area.pixel_size_x + pixel_size_y_km = 0.001 * obj_area.pixel_size_y + + for cell in TRTcell_IDs: + + if TRTcells[cell].RANKr > minRank: + + (x0,y0) = obj_area.get_xy_from_lonlat(TRTcells[cell].lon, TRTcells[cell].lat, outside_error=False, return_int=False) + y0 = (obj_area.y_size-1)-y0 + # print (x0,y0) + + vx = TRTcells[cell].vel_x + vy = TRTcells[cell].vel_y + + # !!!scaling of width and height is not correct, that is on map projection, but not on the ground!!! + e = Ellipse( xy = (x0, y0), \ + width = 2*TRTcells[cell].ell_S / pixel_size_x_km, \ + height = 2*TRTcells[cell].ell_L / pixel_size_y_km, \ + angle = -TRTcells[cell].angle ) + + ax.add_artist(e) + e.set_clip_box(ax.bbox) + + if TRTcells[cell].RANKr <= 12: + cell_color="white" + alpha = (alpha_max-0.2) / 12. * TRTcells[cell].RANKr + elif TRTcells[cell].RANKr <= 15: + cell_color="white" + alpha = alpha_max + elif TRTcells[cell].RANKr <= 25: + cell_color="green" + alpha = alpha_max + elif TRTcells[cell].RANKr <= 35: + cell_color="yellow" + alpha = alpha_max + else: + cell_color="red" + alpha = alpha_max + # print "cell ID: %s, cell rank: %2d, cell_color:%7s, alpha = %4.1f" % (cell, TRTcells[cell].RANKr, cell_color, alpha) + e.set_alpha(alpha) # transparency: 0.0 transparent, 1 total visible + e.set_facecolor(cell_color) # "white" or [1,1,1] + + if plot_vel: + ax.arrow(x0, y0, vx, vy, head_width = head_width, head_length = head_length, fc=cell_color, ec=cell_color) + + if 1==1: + # print " !!! convert fig to image by function fig2img !!!" + ### this would avoid saving into a file, but it fills the transparent areas with "white" + PIL_image = fig2img ( fig ) + else: + tmp_file = '/tmp/TRT_'+str(uuid4())+'.png' + # print tmp_file + plt.savefig(tmp_file, dpi=DPI, transparent=True) #, bbox_inches='tight' + # subprocess.call("display "+tmp_file+" &", shell=True) + PIL_image = PIL_Image.open(tmp_file) + subprocess.call("rm "+tmp_file+" &", shell=True) + + return PIL_image diff --git a/mpop/imageo/palettes.py b/mpop/imageo/palettes.py index 5e04603e..3205299e 100644 --- a/mpop/imageo/palettes.py +++ b/mpop/imageo/palettes.py @@ -35,11 +35,11 @@ def tv_legend(): """Palette for TV. """ legend = [] - legend.append((0, 0, 0)) # Unprocessed: Black - legend.append((0, 120, 0)) # Land - legend.append((0, 0, 215)) # Sea: Blue - legend.append((0, 120, 0)) # Land (Snow on land) - legend.append((0, 0, 215)) # Sea: Blue (Snow/Ice on sea) + legend.append(( 0, 0, 0)) # Unprocessed: Black + legend.append(( 0, 120, 0)) # Land + legend.append(( 0, 0, 215)) # Sea: Blue + legend.append(( 0, 120, 0)) # Land (Snow on land) + legend.append(( 0, 0, 215)) # Sea: Blue (Snow/Ice on sea) for i in range(5, 256): # All other pixel values are grey according to IR temp. @@ -52,9 +52,9 @@ def vv_legend(): """Palette for Swedish road authorities (Vägverket). """ legend = [] - legend.append((0, 0, 0)) # Unprocessed: Black - legend.append((0, 120, 0)) # Land - legend.append((0, 0, 215)) # Sea: Blue + legend.append(( 0, 0, 0)) # Unprocessed: Black + legend.append(( 0, 120, 0)) # Land + legend.append(( 0, 0, 215)) # Sea: Blue # Cloud type values 5 to 8: legend.append((255, 150, 0)) # Very low cumuliform legend.append((255, 100, 0)) # Very low @@ -67,6 +67,16 @@ def vv_legend(): return convert_palette(legend) +def cloud_phase(): + """Palette for cloud phase. + """ + legend = [] + legend.append(( 0, 0, 0)) # Unprocessed: Black + legend.append(( 0, 0, 215)) # Water Clouds: Blue + legend.append(( 240, 240, 240)) # Ice Clouds: Almost White + legend.append(( 120, 120, 0)) # Uncertain Phase: ? + + return convert_palette(legend) def cms_modified(): """Palette for regular cloud classification. @@ -79,8 +89,8 @@ def nwcsaf_cloudtype(): """ legend = [] legend.append((100, 100, 100)) # Unprocessed: Grey - legend.append((0, 120, 0)) - legend.append((0, 0, 0)) # Sea: Black + legend.append(( 0, 120, 0)) + legend.append(( 0, 0, 0)) # Sea: Black legend.append((250, 190, 250)) # Snow legend.append((220, 160, 220)) # Sea-ice @@ -95,12 +105,12 @@ def nwcsaf_cloudtype(): legend.append((255, 255, 255)) # Very high cumuliform legend.append((230, 230, 230)) # Very high - legend.append((0, 80, 215)) # Semi-transparent thin - legend.append((0, 180, 230)) # Semi-transparent medium - legend.append((0, 240, 240)) # Semi-transparent thick - legend.append((90, 200, 160)) # Semi-transparent above + legend.append(( 0, 80, 215)) # Semi-transparent thin + legend.append(( 0, 180, 230)) # Semi-transparent medium + legend.append(( 0, 240, 240)) # Semi-transparent thick + legend.append(( 90, 200, 160)) # Semi-transparent above legend.append((200, 0, 200)) # Broken - legend.append((95, 60, 30)) # Undefined: Brown + legend.append(( 95, 60, 30)) # Undefined: Brown return convert_palette(legend) @@ -109,7 +119,7 @@ def ctth_height(): """CTTH height palette. """ legend = [] - legend.append((0, 0, 0)) + legend.append(( 0, 0, 0)) legend.append((255, 0, 216)) # 0 meters legend.append((126, 0, 43)) legend.append((153, 20, 47)) @@ -121,11 +131,11 @@ def ctth_height(): legend.append((216, 255, 0)) legend.append((178, 255, 0)) legend.append((153, 255, 0)) - legend.append((0, 255, 0)) - legend.append((0, 140, 48)) - legend.append((0, 178, 255)) - legend.append((0, 216, 255)) - legend.append((0, 255, 255)) + legend.append(( 0, 255, 0)) + legend.append(( 0, 140, 48)) + legend.append(( 0, 178, 255)) + legend.append(( 0, 216, 255)) + legend.append(( 0, 255, 255)) legend.append((238, 214, 210)) legend.append((239, 239, 223)) legend.append((255, 255, 255)) # 10,000 meters @@ -141,46 +151,46 @@ def ctth_height_pps(): Identical to the one found in the hdf5 files. """ legend = [] - legend.append((255, 0, 216)) # 0 meters - legend.append((255, 0, 216)) # 0 meters - legend.append((255, 0, 216)) # 0 meters - legend.append((126, 0, 43)) - legend.append((126, 0, 43)) - legend.append((153, 20, 47)) - legend.append((153, 20, 47)) - legend.append((153, 20, 47)) - legend.append((178, 51, 0)) - legend.append((178, 51, 0)) - legend.append((255, 76, 0)) - legend.append((255, 76, 0)) - legend.append((255, 76, 0)) - legend.append((255, 102, 0)) - legend.append((255, 102, 0)) - legend.append((255, 164, 0)) - legend.append((255, 164, 0)) - legend.append((255, 164, 0)) - legend.append((255, 216, 0)) - legend.append((255, 216, 0)) - legend.append((216, 255, 0)) - legend.append((216, 255, 0)) - legend.append((178, 255, 0)) - legend.append((178, 255, 0)) - legend.append((178, 255, 0)) - legend.append((153, 255, 0)) - legend.append((153, 255, 0)) - legend.append((0, 255, 0)) - legend.append((0, 255, 0)) - legend.append((0, 255, 0)) - legend.append((0, 140, 48)) - legend.append((0, 140, 48)) - legend.append((0, 178, 255)) - legend.append((0, 178, 255)) - legend.append((0, 178, 255)) - legend.append((0, 216, 255)) - legend.append((0, 216, 255)) - legend.append((0, 255, 255)) - legend.append((0, 255, 255)) - legend.append((0, 255, 255)) + legend.append((255, 0, 216)) # 0 meters + legend.append((255, 0, 216)) # 0 meters + legend.append((255, 0, 216)) # 0 meters + legend.append((126, 0, 43)) + legend.append((126, 0, 43)) + legend.append((153, 20, 47)) + legend.append((153, 20, 47)) + legend.append((153, 20, 47)) + legend.append((178, 51, 0)) + legend.append((178, 51, 0)) + legend.append((255, 76, 0)) + legend.append((255, 76, 0)) + legend.append((255, 76, 0)) + legend.append((255, 102, 0)) + legend.append((255, 102, 0)) + legend.append((255, 164, 0)) + legend.append((255, 164, 0)) + legend.append((255, 164, 0)) + legend.append((255, 216, 0)) + legend.append((255, 216, 0)) + legend.append((216, 255, 0)) + legend.append((216, 255, 0)) + legend.append((178, 255, 0)) + legend.append((178, 255, 0)) + legend.append((178, 255, 0)) + legend.append((153, 255, 0)) + legend.append((153, 255, 0)) + legend.append(( 0, 255, 0)) + legend.append(( 0, 255, 0)) + legend.append(( 0, 255, 0)) + legend.append(( 0, 140, 48)) + legend.append(( 0, 140, 48)) + legend.append(( 0, 178, 255)) + legend.append(( 0, 178, 255)) + legend.append(( 0, 178, 255)) + legend.append(( 0, 216, 255)) + legend.append(( 0, 216, 255)) + legend.append(( 0, 255, 255)) + legend.append(( 0, 255, 255)) + legend.append(( 0, 255, 255)) legend.append((238, 214, 210)) legend.append((238, 214, 210)) legend.append((239, 239, 223)) @@ -280,3 +290,44 @@ def convert_palette(palette): i[1] / 255.0, i[2] / 255.0)) return new_palette + +def convert_palette2colormap(palette): + """Convert palette from [0,255] range to [0,1]. + """ + from trollimage.colormap import Colormap + j=0 + n_pal = len(palette)-1 + values=[] + colors=[] + + red = [r for (r, g, b) in palette] + green = [g for (r, g, b) in palette] + blue = [b for (r, g, b) in palette] + + max_pal = max( max(red), max(blue), max(green) ) + if max_pal <= 1.0: + # print "palette already normalized" + denom = 1.0 + else: + # print "palette normalized to 255" + denom = 255.0 + + for i in palette: + values.append( (n_pal-j) / float(n_pal) ) + colors.append((i[0] / denom, i[1] / denom, i[2] / denom)) + j=j+1 + # reverse order to the entries + values=values[::-1] + colors=colors[::-1] + + #for i in palette: + # values.append( j / float(n_pal)) + # colors.append((i[0] / 255.0, i[1] / 255.0, i[2] / 255.0)) + # j=j+1 + + # attention: + # Colormap(values, colors) uses the second input option of Colormap + # values has to be a list (not a tuple) and + # colors has to be the corresponding list of color tuples + + return Colormap(values, colors) diff --git a/mpop/instruments/seviri.py b/mpop/instruments/seviri.py index 016e78fc..f651d599 100644 --- a/mpop/instruments/seviri.py +++ b/mpop/instruments/seviri.py @@ -300,11 +300,12 @@ def snow(self): snow.prerequisites = refl39_chan.prerequisites | set( [0.8, 1.63, 3.75]) - def day_microphysics(self, wintertime=False): + def day_microphysics(self, wintertime=False, fill_value=None): """Make a 'Day Microphysics' RGB as suggested in the MSG interpretation guide (rgbpart04.ppt). It is kind of special as it requires the derivation of the daytime component of the mixed Terrestrial/Solar 3.9 micron channel. Furthermore the sun zenith angle is used. + for black backgroup specify: fill_value=(0,0,0) """ self.refl39_chan() @@ -335,7 +336,7 @@ def day_microphysics(self, wintertime=False): self.area, self.time_slot, crange=crange, - fill_value=None, mode="RGB") + fill_value=fill_value, mode="RGB") if wintertime: img.gamma((1.0, 1.5, 1.0)) else: diff --git a/mpop/satin/msg_seviri_hdf.py b/mpop/satin/msg_seviri_hdf.py new file mode 100755 index 00000000..8b99cfd0 --- /dev/null +++ b/mpop/satin/msg_seviri_hdf.py @@ -0,0 +1,265 @@ +"""Loader for MSG, netcdf format. +""" +from ConfigParser import ConfigParser +from mpop import CONFIG_PATH +import os +import numpy.ma as ma +from numpy import array as np_array +from numpy import nan as np_nan +from glob import glob +from mpop.projector import get_area_def +import datetime + +try: + import h5py +except ImportError: + print "... module h5py needs to be installed" + quit() + +from mipp.xrit.MSG import _Calibrator + +import logging +LOG = logging.getLogger(__name__) +#from mpop.utils import debug_on +#debug_on() + +SatelliteIds = { '08': 321, # Meteosat 8 + '8': 321, # Meteosat 8 + '09': 322, # Meteosat 9 + '9': 322, # Meteosat 9 + '10': 323, # Meteosat 10 + '11': 324 } # Meteosat 11 + +channel_numbers = {"VIS006": 1, + "VIS008": 2, + "IR_016": 3, + "IR_039": 4, + "WV_062": 5, + "WV_073": 6, + "IR_087": 7, + "IR_097": 8, + "IR_108": 9, + "IR_120": 10, + "IR_134": 11, + "HRV": 12} + +dict_channel= {'VIS006':'Channel 01','VIS008':'Channel 02','IR_016':'Channel 03','IR_039':'Channel 04','WV_062':'Channel 05','WV_073':'Channel 06',\ + 'IR_087':'Channel 07','IR_097':'Channel 08','IR_108':'Channel 09','IR_120':'Channel 10','IR_134':'Channel 11','HRV':'Channel 12'} + + +def load(satscene, calibrate=True, area_extent=None, **kwargs): + """Load MSG SEVIRI data from hdf5 format. + """ + + # Read config file content + conf = ConfigParser() + conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg")) + values = {"orbit": satscene.orbit, + "satname": satscene.satname, + "number": satscene.number, + "instrument": satscene.instrument_name, + "satellite": satscene.fullname + } + + LOG.info("assume seviri-level4") + print "... assume seviri-level4" + + satscene.add_to_history("hdf5 data read by mpop/msg_seviri_hdf.py") + + + if "reader_level" in kwargs.keys(): + reader_level = kwargs["reader_level"] + else: + reader_level = "seviri-level4" + + if "RSS" in kwargs.keys(): + if kwargs["RSS"]: + dt_end = 4 + else: + dt_end = 12 + else: + from my_msg_module import check_RSS + RSS = check_RSS(satscene.number, satscene.time_slot) + if RSS == None: + print "*** Error in mpop/satin/msg_seviri_hdf.py" + print " satellite MSG", satscene.number ," is not active yet" + quit() + else: + if RSS: + dt_end = 4 + else: + dt_end = 12 + + print "... hdf file name is specified by observation end time" + print " assume ", dt_end, " min between start and end time of observation" + + # end of scan time 4 min after start + end_time = satscene.time_slot + datetime.timedelta(minutes=dt_end) + + filename = os.path.join( end_time.strftime(conf.get(reader_level, "dir", raw=True)), + end_time.strftime(conf.get(reader_level, "filename", raw=True)) % values ) + + print "... search for file: ", filename + filenames=glob(str(filename)) + if len(filenames) == 0: + print "*** Error, no file found" + return # just return without exit the program + elif len(filenames) > 1: + print "*** Warning, more than 1 datafile found: ", filenames + filename = filenames[0] + print("... read data from %s" % str(filename)) + + # read data from hdf5 file + data_folder='U-MARF/MSG/Level1.5/' + + # Load data from hdf file + with h5py.File(filename,'r') as hf: + + subset_info=hf.get(data_folder+'METADATA/SUBSET') + for i in range(subset_info.len()): + #print subset_info[i]['EntryName'], subset_info[i]['Value'] + if subset_info[i]['EntryName'] == "VIS_IRSouthLineSelectedRectangle": + VIS_IRSouthLine = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "VIS_IRNorthLineSelectedRectangle": + VIS_IRNorthLine = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "VIS_IREastColumnSelectedRectangle": + VIS_IREastColumn = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "VIS_IRWestColumnSelectedRectangle": + VIS_IRWestColumn = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "HRVLowerNorthLineSelectedRectangle": + HRVLowerNorthLine = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "HRVLowerSouthLineSelectedRectangle": + HRVLowerSouthLine = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "HRVLowerEastColumnSelectedRectangle": + HRVLowerEastColumn = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "HRVLowerWestColumnSelectedRectangle": + HRVLowerWestColumn = int(subset_info[i]['Value']) + if subset_info[i]['EntryName'] == "HRVUpperSouthLineSelectedRectangle": + HRVUpperSouthLine = int(subset_info[i]['Value']) # 0 + if subset_info[i]['EntryName'] == "HRVUpperNorthLineSelectedRectangle": + HRVUpperNorthLine = int(subset_info[i]['Value']) # 0 + if subset_info[i]['EntryName'] == "HRVUpperEastColumnSelectedRectangle": + HRVUpperEastColumn = int(subset_info[i]['Value']) # 0 + if subset_info[i]['EntryName'] == "HRVUpperWestColumnSelectedRectangle": + HRVUpperWestColumn = int(subset_info[i]['Value']) # 0 + + sat_status=hf.get(data_folder+'METADATA/HEADER/SatelliteStatus/SatelliteStatus_DESCR') + for i in range(subset_info.len()): + if sat_status[i]['EntryName']=="SatelliteDefinition-NominalLongitude": + sat_lon = sat_status[i]['Value'] + break + + #print 'VIS_IRSouthLine', VIS_IRSouthLine + #print 'VIS_IRNorthLine', VIS_IRNorthLine + #print 'VIS_IREastColumn', VIS_IREastColumn + #print 'VIS_IRWestColumn', VIS_IRWestColumn + #print 'sat_longitude', sat_lon, type(sat_lon), 'GEOS<'+'{:+06.1f}'.format(sat_lon)+'>' + + if 1 == 0: + # works only if all pixels are on the disk + from msg_pixcoord2area import msg_pixcoord2area + print "VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn: ", VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn + area_def = msg_pixcoord2area ( VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn, "vis", sat_lon ) + else: + # works also for pixels outside of the disk + pname = 'GEOS<'+'{:+06.1f}'.format(sat_lon)+'>' # "GEOS<+009.5>" + proj = {'proj': 'geos', 'a': '6378169.0', 'b': '6356583.8', 'h': '35785831.0', 'lon_0': str(sat_lon)} + aex=(-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612) + + # define full disk projection + from pyresample.geometry import AreaDefinition + full_disk_def = AreaDefinition('full_disk', + 'full_disk', + pname, + proj, + 3712, + 3712, + aex ) + + # define name and calculate area for sub-demain + area_name= 'MSG_'+'{:04d}'.format(VIS_IRNorthLine)+'_'+'{:04d}'.format(VIS_IRWestColumn)+'_'+'{:04d}'.format(VIS_IRSouthLine)+'_'+'{:04d}'.format(VIS_IREastColumn) + aex = full_disk_def.get_area_extent_for_subset(3712-VIS_IRSouthLine,3712-VIS_IRWestColumn,3712-VIS_IRNorthLine,3712-VIS_IREastColumn) + + area_def = AreaDefinition(area_name, + area_name, + pname, + proj, + (VIS_IRWestColumn-VIS_IREastColumn)+1, + (VIS_IRNorthLine-VIS_IRSouthLine)+1, + aex ) + + #print area_def + #print "REGION:", area_def.area_id, "{" + #print "\tNAME:\t", area_def.name + #print "\tPCS_ID:\t", area_def.proj_id + #print ("\tPCS_DEF:\tproj="+area_def.proj_dict['proj']+", lon_0=" + area_def.proj_dict['lon_0'] + ", a="+area_def.proj_dict['a']+", b="+area_def.proj_dict['b']+", h="+area_def.proj_dict['h']) + #print "\tXSIZE:\t", area_def.x_size + #print "\tYSIZE:\t", area_def.y_size + #print "\tAREA_EXTENT:\t", area_def.area_extent + #print "};" + + # copy area to satscene + satscene.area = area_def + + # write information used by mipp.xrit.MSG._Calibrator in a fake header file + hdr = dict() + + # satellite ID number + hdr["SatelliteDefinition"] = dict() + hdr["SatelliteDefinition"]["SatelliteId"] = SatelliteIds[satscene.number] + + # processing + hdr["Level 1_5 ImageProduction"] = dict() + hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"] = np_array([2,2,2,2,2,2,2,2,2,2,2,2], int) + + # calibration factors + Level15ImageCalibration = hf.get(data_folder+'METADATA/HEADER/RadiometricProcessing/Level15ImageCalibration_ARRAY') + hdr["Level1_5ImageCalibration"] = dict() + + for chn_name in channel_numbers.keys(): + chn_nb = channel_numbers[chn_name]-1 + hdr["Level1_5ImageCalibration"][chn_nb] = dict() + #print chn_name, chn_nb, Level15ImageCalibration[chn_nb]['Cal_Slope'], Level15ImageCalibration[chn_nb]['Cal_Offset'] + hdr["Level1_5ImageCalibration"][chn_nb]['Cal_Slope'] = Level15ImageCalibration[chn_nb]['Cal_Slope'] + hdr["Level1_5ImageCalibration"][chn_nb]['Cal_Offset'] = Level15ImageCalibration[chn_nb]['Cal_Offset'] + + # loop over channels to load + for chn_name in satscene.channels_to_load: + + dataset_name = data_folder+'DATA/'+dict_channel[chn_name]+'/IMAGE_DATA' + if dataset_name in hf: + data_tmp = hf.get(data_folder+'DATA/'+dict_channel[chn_name]+'/IMAGE_DATA') + + LOG.info('hdr["SatelliteDefinition"]["SatelliteId"]: '+str(hdr["SatelliteDefinition"]["SatelliteId"])) + #LOG.info('hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"]', hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"]) + chn_nb = channel_numbers[chn_name]-1 + LOG.info('hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Slope"]: '+str(hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Slope"])) + LOG.info('hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Offset"]: '+str(hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Offset"])) + + if calibrate: + #Calibrator = _Calibrator(hdr, chn_name) + bits_per_pixel = 10 ### !!! I have no idea if this is correct !!! + Calibrator = _Calibrator(hdr, chn_name, bits_per_pixel) ## changed call in mipp/xrit/MSG.py + data, calibration_unit = Calibrator (data_tmp, calibrate=1) + else: + data = data_tmp + calibration_unit = "counts" + + LOG.info(chn_name+ " min/max: "+str(data.min())+","+str(data.max())+" "+calibration_unit ) + + satscene[chn_name] = ma.asarray(data) + + satscene[chn_name].info['units'] = calibration_unit + satscene[chn_name].info['satname'] = satscene.satname + satscene[chn_name].info['satnumber'] = satscene.number + satscene[chn_name].info['instrument_name'] = satscene.instrument_name + satscene[chn_name].info['time'] = satscene.time_slot + satscene[chn_name].info['is_calibrated'] = True + + else: + print "*** Warning, no data for channel "+ chn_name+ " in file "+ filename + data = np_nan + calibration_unit = "" + LOG.info("*** Warning, no data for channel "+ chn_name+" in file "+filename) + # do not append the channel chn_name + diff --git a/mpop/satin/nwcsaf_hrw_hdf.py b/mpop/satin/nwcsaf_hrw_hdf.py new file mode 100644 index 00000000..d6a22ca7 --- /dev/null +++ b/mpop/satin/nwcsaf_hrw_hdf.py @@ -0,0 +1,355 @@ +"""Loader for MSG, nwcsaf high resolution hdf5 format. +""" +from ConfigParser import ConfigParser +from mpop import CONFIG_PATH +import os +from numpy import array as np_array +from numpy import empty as np_empty +from numpy import append as np_append +from numpy import dtype as np_dtype +from numpy import append as np_append +from numpy import where as np_where +from numpy import in1d as np_in1d +from numpy import logical_and as np_logical_and +from glob import glob +from mpop.projector import get_area_def +import datetime + +from copy import deepcopy + +try: + import h5py +except ImportError: + print "... module h5py needs to be installed" + quit() + +from mipp.xrit.MSG import _Calibrator + +import logging +LOG = logging.getLogger(__name__) +#from mpop.utils import debug_on +#debug_on() + +GP_IDs = { 321: '08', # Meteosat 8 + 322: '09', # Meteosat 9 + 323: '10', # Meteosat 10 + 324: '11' } # Meteosat 11 + +dict_channel = {'CHANNEL00':'HRV', 'CHANNEL01':'VIS006','CHANNEL02':'VIS008','CHANNEL03':'IR_016','CHANNEL04':'IR_039','CHANNEL05':'WV_062',\ + 'CHANNEL06':'WV_073','CHANNEL07':'IR_087','CHANNEL08':'IR_097','CHANNEL09':'IR_108','CHANNEL10':'IR_120','CHANNEL11':'IR_134'} + + +# class definition of a high resolution wind data +class HRW_class: + + def __init__(self): + # see http://docs.scipy.org/doc/numpy/reference/generated/numpy.dtype.html + self.date = None # datetime of the observation + self.detailed = None # False-> basic, True -> detailed + self.channel = np_array([], dtype='|S6') + self.wind_id = np_array([], dtype=int) + self.prev_wind_id = np_array([], dtype=int) + self.segment_X = np_array([], dtype='f') + self.segment_Y = np_array([], dtype='f') + self.t_corr_method = np_array([], dtype=int) + self.lon = np_array([], dtype='f') # 6.3862 [longitude in degree E] + self.lat = np_array([], dtype='f') # 46.8823 [latitude in degree N] + self.dlon = np_array([], dtype='f') # -0.011 [longitude in degree E] + self.dlat = np_array([], dtype='f') # 0.01 [latitude in degree N] + self.pressure = np_array([], dtype='f') # 64200.0 [p in Pa] + self.wind_speed = np_array([], dtype='f') # 3.1 [v in m/s] + self.wind_direction = np_array([], dtype='f') # 313.0 [v_dir in deg] + self.temperature = np_array([], dtype='f') # 272.4 [T in K] + self.conf_nwp = np_array([], dtype='f') + self.conf_no_nwp = np_array([], dtype='f') + self.t_type = np_array([], dtype=int) + self.t_level_method = np_array([], dtype=int) + self.t_winds = np_array([], dtype=int) + self.t_corr_test = np_array([], dtype=int) + self.applied_QI = np_array([], dtype=int) + self.NWP_wind_levels = np_array([], dtype=int) + self.num_prev_winds = np_array([], dtype=int) + self.orographic_index= np_array([], dtype=int) + self.cloud_type = np_array([], dtype=int) + self.wind_channel = np_array([], dtype=int) + self.correlation = np_array([], dtype=int) + self.pressure_error = np_array([], dtype='f') + + # ---------------- add two data sets e.g. time steps --------------------- + def __add__(self, HRW_class2): + + HRW_new = HRW_class() + + HRW_new.date = self.date # !!! does not make sense !!! + HRW_new.detailed = self.detailed # !!! does not make sense !!! + HRW_new.channel = np_append(self.channel, HRW_class2.channel) + HRW_new.wind_id = np_append(self.wind_id, HRW_class2.wind_id) + HRW_new.prev_wind_id = np_append(self.prev_wind_id, HRW_class2.prev_wind_id) + HRW_new.segment_X = np_append(self.segment_X, HRW_class2.segment_X) + HRW_new.segment_Y = np_append(self.segment_Y, HRW_class2.segment_Y) + HRW_new.t_corr_method = np_append(self.t_corr_method, HRW_class2.t_corr_method) + HRW_new.lon = np_append(self.lon, HRW_class2.lon) + HRW_new.lat = np_append(self.lat, HRW_class2.lat) + HRW_new.dlon = np_append(self.dlon, HRW_class2.dlon) + HRW_new.dlat = np_append(self.dlat, HRW_class2.dlat) + HRW_new.pressure = np_append(self.pressure, HRW_class2.pressure) + HRW_new.wind_speed = np_append(self.wind_speed, HRW_class2.wind_speed) + HRW_new.wind_direction = np_append(self.wind_direction, HRW_class2.wind_direction) + HRW_new.temperature = np_append(self.temperature, HRW_class2.temperature) + HRW_new.conf_nwp = np_append(self.conf_nwp, HRW_class2.conf_nwp) + HRW_new.conf_no_nwp = np_append(self.conf_no_nwp, HRW_class2.conf_no_nwp) + HRW_new.t_type = np_append(self.t_type, HRW_class2.t_type) + HRW_new.t_level_method = np_append(self.t_level_method, HRW_class2.t_level_method) + HRW_new.t_winds = np_append(self.t_winds, HRW_class2.t_winds) + HRW_new.t_corr_test = np_append(self.t_corr_test, HRW_class2.t_corr_test) + HRW_new.applied_QI = np_append(self.applied_QI, HRW_class2.applied_QI) + HRW_new.NWP_wind_levels = np_append(self.NWP_wind_levels, HRW_class2.NWP_wind_levels) + HRW_new.num_prev_winds = np_append(self.num_prev_winds, HRW_class2.num_prev_winds) + HRW_new.orographic_index= np_append(self.orographic_index,HRW_class2.orographic_index) + HRW_new.cloud_type = np_append(self.cloud_type, HRW_class2.cloud_type) + HRW_new.wind_channel = np_append(self.wind_channel, HRW_class2.wind_channel) + HRW_new.correlation = np_append(self.correlation, HRW_class2.correlation) + HRW_new.pressure_error = np_append(self.pressure_error, HRW_class2.pressure_error) + + return HRW_new + + # ---------------- filter for certain criterias --------------------- + def filter(self, **kwargs): + + # if empty than return self (already empty) + if self.channel.size == 0: + return self + + HRW_new = deepcopy(self) + + for key_filter in ['min_correlation', 'min_conf_nwp', 'min_conf_no_nwp', 'cloud_type', 'level']: + if key_filter in kwargs.keys(): + + # if argument given is None or all keyword than skip this filter + if kwargs[key_filter] == None or kwargs[key_filter] == 'all' or kwargs[key_filter] == 'ALL' or kwargs[key_filter] == 'A': + continue + + n1 = str(HRW_new.channel.size) + + if key_filter == 'min_correlation': + inds = np_where(HRW_new.correlation > kwargs[key_filter]) + elif key_filter == 'min_conf_nwp': + inds = np_where(HRW_new.conf_nwp > kwargs[key_filter]) + elif key_filter == 'min_conf_no_nwp': + inds = np_where(HRW_new.conf_no_nwp > kwargs[key_filter]) + elif key_filter == 'cloud_type': + mask = np_in1d(HRW_new.cloud_type, kwargs[key_filter]) + inds = np_where(mask)[0] + elif key_filter == 'level': + if kwargs[key_filter] == 'H': # high level: < 440hPa like in the ISCCP + inds = np_where(HRW_new.pressure < 44000 ) + elif kwargs[key_filter] == 'M': # mid level: 440hPa ... 680hPa like in the ISCCP + inds = np_where( np_logical_and(44000 < HRW_new.pressure, HRW_new.pressure < 68000) ) + elif kwargs[key_filter] == 'L': # low level: > 680hPa like in the ISCCP + inds = np_where(68000 < HRW_new.pressure) + + HRW_new.subset(inds) + print " filter for "+key_filter+" = ", kwargs[key_filter],' ('+n1+'->'+str(HRW_new.channel.size)+')' + + return HRW_new + + # ---------------- reduce HRW_dataset to the given indices inds --------------------- + def subset(self, inds): + + self.channel = self.channel [inds] + self.wind_id = self.wind_id [inds] + self.prev_wind_id = self.prev_wind_id [inds] + self.segment_X = self.segment_X [inds] + self.segment_Y = self.segment_Y [inds] + self.t_corr_method = self.t_corr_method [inds] + self.lon = self.lon [inds] + self.lat = self.lat [inds] + self.dlon = self.dlon [inds] + self.dlat = self.dlat [inds] + self.pressure = self.pressure [inds] + self.wind_speed = self.wind_speed [inds] + self.wind_direction = self.wind_direction [inds] + self.temperature = self.temperature [inds] + self.conf_nwp = self.conf_nwp [inds] + self.conf_no_nwp = self.conf_no_nwp [inds] + self.t_type = self.t_type [inds] + self.t_level_method = self.t_level_method [inds] + self.t_winds = self.t_winds [inds] + self.t_corr_test = self.t_corr_test [inds] + self.applied_QI = self.applied_QI [inds] + self.NWP_wind_levels = self.NWP_wind_levels [inds] + self.num_prev_winds = self.num_prev_winds [inds] + self.orographic_index = self.orographic_index[inds] + self.cloud_type = self.cloud_type [inds] + self.wind_channel = self.wind_channel [inds] + self.correlation = self.correlation [inds] + self.pressure_error = self.pressure_error [inds] + + return self + + +def load(satscene, calibrate=True, area_extent=None, read_basic_or_detailed='both', **kwargs): + """Load MSG SEVIRI High Resolution Wind (HRW) data from hdf5 format. + """ + + # Read config file content + conf = ConfigParser() + conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg")) + values = {"orbit": satscene.orbit, + "satname": satscene.satname, + "number": satscene.number, + "instrument": satscene.instrument_name, + "satellite": satscene.fullname + } + + LOG.info("assume seviri-level5") + print "... assume seviri-level5" + + satscene.add_to_history("hdf5 data read by mpop/nwcsaf_hrw_hdf.py") + + # end of scan time 4 min after start + end_time = satscene.time_slot + datetime.timedelta(minutes=4) + + # area !!! satscene.area + + filename = os.path.join( satscene.time_slot.strftime(conf.get("seviri-level5", "dir", raw=True)), + satscene.time_slot.strftime(conf.get("seviri-level5", "filename", raw=True)) % values ) + + # define classes before we search for files (in order to return empty class if no file is found) + HRW_basic = HRW_class() + HRW_basic.detailed = False + HRW_basic.date = satscene.time_slot + HRW_detailed = HRW_class() + HRW_detailed.detailed = True + HRW_detailed.date = satscene.time_slot + + print "... search for file: ", filename + filenames=glob(str(filename)) + + if len(filenames) != 0: + + if len(filenames) > 1: + print "*** Warning, more than 1 datafile found: ", filenames + + filename = filenames[0] + print("... read data from %s" % str(filename)) + + # create an instant of the HRW_class + m_per_s_to_knots = 1.944 + + ## limit channels to read + #hrw_channels=['HRV'] + # limit basic or detailed or both + #read_basic_or_detailed='detailed' + #read_basic_or_detailed='basic' + + + with h5py.File(filename,'r') as hf: + + #print hf.attrs.keys() + #print hf.attrs.values() + + region_name = hf.attrs['REGION_NAME'].replace("_", "") + print "... read HRW data for region ", region_name + LOG.info("... read HRW data for region "+region_name) + sat_ID = GP_IDs[int(hf.attrs["GP_SC_ID"])] + print "... derived from Meteosat ", sat_ID + LOG.info("... derived from Meteosat "+sat_ID) + + # print('List of arrays in this file: \n', hf.keys()), len(hf.keys()) + + if len(hf.keys()) == 0: + print "*** Warning, empty file ", filename + print "" + else: + for key in hf.keys(): + + if key[4:9] == "BASIC": + if 'read_basic_or_detailed' in locals(): + if read_basic_or_detailed.lower() == "detailed": + continue + HRW_data = HRW_basic # shallow copy + elif key[4:12] == "DETAILED": + if 'read_basic_or_detailed' in locals(): + if read_basic_or_detailed.lower() == "basic": + continue + HRW_data = HRW_detailed # shallow copy + + hrw_chn = dict_channel[key[len(key)-9:]] + + if 'hrw_channels' in locals(): + if hrw_channels != None: + if hrw_chn not in hrw_channels: + print "... "+hrw_chn+" is not in hrw_channels", hrw_channels + print " skip reading this channel" + continue + + # read all data + channel = hf.get(key) + # print '... read wind vectors of channel ', channel.name, hrw_chn + # print " i lon lat speed[kn] dir pressure" + #for i in range(channel.len()): + # print '%3d %10.7f %10.7f %7.2f %7.1f %8.1f' % (channel[i]['wind_id'], channel[i]['lon'], channel[i]['lat'], \ + # channel[i]['wind_speed']*m_per_s_to_knots, \ + # channel[i]['wind_direction'], channel[i]['pressure']) + # create string array with channel names + channel_chararray = np_empty(channel.len(), dtype='|S6') + channel_chararray[:] = hrw_chn + + HRW_data.channel = np_append(HRW_data.channel , channel_chararray ) + HRW_data.wind_id = np_append(HRW_data.wind_id , channel[:]['wind_id'] ) + HRW_data.prev_wind_id = np_append(HRW_data.prev_wind_id , channel[:]['prev_wind_id'] ) + HRW_data.segment_X = np_append(HRW_data.segment_X , channel[:]['segment_X'] ) + HRW_data.segment_Y = np_append(HRW_data.segment_Y , channel[:]['segment_Y'] ) + HRW_data.t_corr_method = np_append(HRW_data.t_corr_method , channel[:]['t_corr_method'] ) + HRW_data.lon = np_append(HRW_data.lon , channel[:]['lon'] ) + HRW_data.lat = np_append(HRW_data.lat , channel[:]['lat'] ) + HRW_data.dlon = np_append(HRW_data.dlon , channel[:]['dlon'] ) + HRW_data.dlat = np_append(HRW_data.dlat , channel[:]['dlat'] ) + HRW_data.pressure = np_append(HRW_data.pressure , channel[:]['pressure'] ) + HRW_data.wind_speed = np_append(HRW_data.wind_speed , channel[:]['wind_speed'] ) + HRW_data.wind_direction = np_append(HRW_data.wind_direction , channel[:]['wind_direction'] ) + HRW_data.temperature = np_append(HRW_data.temperature , channel[:]['temperature'] ) + HRW_data.conf_nwp = np_append(HRW_data.conf_nwp , channel[:]['conf_nwp'] ) + HRW_data.conf_no_nwp = np_append(HRW_data.conf_no_nwp , channel[:]['conf_no_nwp'] ) + HRW_data.t_type = np_append(HRW_data.t_type , channel[:]['t_type'] ) + HRW_data.t_level_method = np_append(HRW_data.t_level_method , channel[:]['t_level_method'] ) + HRW_data.t_winds = np_append(HRW_data.t_winds , channel[:]['t_winds'] ) + HRW_data.t_corr_test = np_append(HRW_data.t_corr_test , channel[:]['t_corr_test'] ) + HRW_data.applied_QI = np_append(HRW_data.applied_QI , channel[:]['applied_QI'] ) + HRW_data.NWP_wind_levels = np_append(HRW_data.NWP_wind_levels , channel[:]['NWP_wind_levels'] ) + HRW_data.num_prev_winds = np_append(HRW_data.num_prev_winds , channel[:]['num_prev_winds'] ) + HRW_data.orographic_index = np_append(HRW_data.orographic_index, channel[:]['orographic_index'] ) + HRW_data.cloud_type = np_append(HRW_data.cloud_type , channel[:]['cloud_type'] ) + HRW_data.wind_channel = np_append(HRW_data.wind_channel , channel[:]['wind_channel'] ) + HRW_data.correlation = np_append(HRW_data.correlation , channel[:]['correlation'] ) + HRW_data.pressure_error = np_append(HRW_data.pressure_error , channel[:]['pressure_error'] ) + + # sort according to wind_id + inds = HRW_data.wind_id.argsort() + HRW_data.subset(inds) # changes HRW_data itself + + # sorting without conversion to numpy arrays + #[e for (wid,pwid) in sorted(zip(HRW_data.wind_id,HRW_data.prev_wind_id))] + + else: + print "*** Error, no file found" + print "" + sat_ID = "no file" + # but we continue the program in order to add an empty channel below + + + ## filter data according to the given optional arguments + #n1 = str(HRW_data.channel.size) + #HRW_data = HRW_data.filter(**kwargs) + #print " apply filters "+' ('+n1+'->'+str(HRW_data.channel.size)+')' + + chn_name="HRW" + satscene[chn_name].HRW_basic = HRW_basic.filter(**kwargs) # returns new object (deepcopied and filtered) + satscene[chn_name].HRW_detailed = HRW_detailed.filter(**kwargs) # returns new object (deepcopied and filtered) + satscene[chn_name].info['units'] = 'm/s' + satscene[chn_name].info['satname'] = 'meteosat' + satscene[chn_name].info['satnumber'] = sat_ID + satscene[chn_name].info['instrument_name'] = 'seviri' + satscene[chn_name].info['time'] = satscene.time_slot + satscene[chn_name].info['is_calibrated'] = True diff --git a/mpop/satin/nwcsaf_msg.py b/mpop/satin/nwcsaf_msg.py new file mode 100644 index 00000000..1e71802b --- /dev/null +++ b/mpop/satin/nwcsaf_msg.py @@ -0,0 +1,3086 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2010, 2012, 2014. + +# SMHI, +# Folkborgsvägen 1, +# Norrköping, +# Sweden + +# Author(s): + +# Martin Raspaud +# Marco Sassi for CRR, PC (partly), SPhR, PCPh, CRPh +# Jörg Asmus for CRR, PC (partly), SPhR, PCPh, CRPH +# Ulrich Hamann for CMa, bugfix SPhR.cape, 1st version generic class MsgNwcsafClass + + +# This file is part of mpop. + +# mpop is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. + +# mpop is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License along with +# mpop. If not, see . + +"""Plugin for reading NWCSAF MSG products hdf files. +""" +import ConfigParser +import os.path +from mpop import CONFIG_PATH +import mpop.channel +import numpy as np +import pyresample.utils + +import glob +from mpop.utils import get_logger +from mpop.projector import get_area_def +from os.path import basename + +LOG = get_logger('satin/nwcsaf_msg') +COMPRESS_LVL = 6 + + +def pcs_def_from_region(region): + items = region.proj_dict.items() + return ' '.join([t[0] + '=' + t[1] for t in items]) + + +def _get_area_extent(cfac, lfac, coff, loff, numcols, numlines): + """Get the area extent from msg parameters. + """ + + # h = 35785831.0, see area_def.cfg + + xur = (numcols - coff) * 2 ** 16 / (cfac * 1.0) + xur = np.deg2rad(xur) * 35785831.0 + xll = (-1 - coff) * 2 ** 16 / (cfac * 1.0) + xll = np.deg2rad(xll) * 35785831.0 + xres = (xur - xll) / numcols + xur, xll = xur - xres / 2, xll + xres / 2 + yll = (numlines - loff) * 2 ** 16 / (-lfac * 1.0) + yll = np.deg2rad(yll) * 35785831.0 + yur = (-1 - loff) * 2 ** 16 / (-lfac * 1.0) + yur = np.deg2rad(yur) * 35785831.0 + yres = (yur - yll) / numlines + yll, yur = yll + yres / 2, yur - yres / 2 + print "msg_hdf _get_area_extent: xll, yll, xur, yur = ", xll, yll, xur, yur + return xll, yll, xur, yur + + +def get_area_extent(filename): + """Get the area extent of the data in *filename*. + """ + import h5py + h5f = h5py.File(filename, 'r') + print "msg_hdf get_area_extent: CFAC, LFAC, COFF, LOFF, NC, NL = ", h5f.attrs["CFAC"], h5f.attrs["LFAC"], h5f.attrs["COFF"], h5f.attrs["LOFF"], h5f.attrs["NC"], h5f.attrs["NL"] + aex = _get_area_extent(h5f.attrs["CFAC"], + h5f.attrs["LFAC"], + h5f.attrs["COFF"], + h5f.attrs["LOFF"], + h5f.attrs["NC"], + h5f.attrs["NL"]) + h5f.close() + return aex + + +def _get_palette(h5f, dsname): + try: + p = h5f[dsname].attrs['PALETTE'] + return h5f[p].value + except KeyError: + return None + +# ------------------------------------------------------------------ + +class MsgCloudMaskData(object): + + """NWCSAF/MSG Cloud Mask data layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + +class MsgCloudMask(mpop.channel.GenericChannel): + + """NWCSAF/MSG Cloud Mask data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + """ + + def __init__(self, resolution=None): + mpop.channel.GenericChannel.__init__(self, "CloudMask") + self.filled = False + self.name = "CloudMask" + self.resolution = resolution + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.CMa = None + self.CMa_DUST = None + self.CMa_QUALITY = None + self.CMa_VOLCANIC = None + self.shape = None + self.satid = "" + self.qc_straylight = -1 + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.CMa.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + + def read(self, filename, calibrate=True): + """Reader for the NWCSAF/MSG cloudtype. Use *filename* to read data. + """ + import h5py + + self.CMa = MsgCloudMaskData() + self.CMa_DUST = MsgCloudMaskData() + self.CMa_QUALITY = MsgCloudMaskData() + self.CMa_VOLCANIC = MsgCloudMaskData() + + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The cloud mask data + print "... read cloud mask data" + h5d = h5f['CMa'] + self.CMa.data = h5d[:, :] + self.CMa.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.CMa.offset = h5d.attrs["OFFSET"] + self.CMa.num_of_lines = h5d.attrs["N_LINES"] + self.CMa.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.CMa.num_of_lines, + self.CMa.num_of_columns) + self.CMa.product = h5d.attrs["PRODUCT"] + self.CMa.id = h5d.attrs["ID"] + self.CMa_palette = _get_palette(h5f, 'CMa') + # ------------------------ + + # The cloud mask dust data + print "... read cloud mask dust data" + h5d = h5f['CMa_DUST'] + self.CMa_DUST.data = h5d[:, :] + self.CMa_DUST.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.CMa_DUST.offset = h5d.attrs["OFFSET"] + self.CMa_DUST.num_of_lines = h5d.attrs["N_LINES"] + self.CMa_DUST.num_of_columns = h5d.attrs["N_COLS"] + self.CMa_DUST.product = h5d.attrs["PRODUCT"] + self.CMa_DUST.id = h5d.attrs["ID"] + self.CMa_DUST_palette = _get_palette(h5f, 'CMa_DUST') + # ------------------------ + + # The cloud mask quality + print "... read cloud mask quality" + h5d = h5f['CMa_QUALITY'] + self.CMa_QUALITY.data = h5d[:, :] + self.CMa_QUALITY.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.CMa_QUALITY.offset = h5d.attrs["OFFSET"] + self.CMa_QUALITY.num_of_lines = h5d.attrs["N_LINES"] + self.CMa_QUALITY.num_of_columns = h5d.attrs["N_COLS"] + self.CMa_QUALITY.product = h5d.attrs["PRODUCT"] + self.CMa_QUALITY.id = h5d.attrs["ID"] + # no palette for QUALITY + # ------------------------ + + h5d = h5f['CMa_VOLCANIC'] + print "... read volcanic dust mask" + self.CMa_VOLCANIC.data = h5d[:, :] + self.CMa_VOLCANIC.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.CMa_VOLCANIC.offset = h5d.attrs["OFFSET"] + self.CMa_VOLCANIC.num_of_lines = h5d.attrs["N_LINES"] + self.CMa_VOLCANIC.num_of_columns = h5d.attrs["N_COLS"] + self.CMa_VOLCANIC.product = h5d.attrs["PRODUCT"] + self.CMa_VOLCANIC.id = h5d.attrs["ID"] + self.CMa_VOLCANIC_palette = _get_palette(h5f, 'CMa_VOLCANIC') + # ------------------------ + + h5f.close() + + self.CMa = self.CMa.data + self.CMa_DUST = self.CMa_DUST.data + self.CMa_QUALITY = self.CMa_QUALITY.data + self.CMa_VOLCANIC = self.CMa_VOLCANIC.data + + self.area = get_area_from_file(filename) + + self.filled = True + + def save(self, filename): + """Save the current cloudtype object to hdf *filename*, in pps format. + """ + import h5py + cma = self.convert2pps() + LOG.info("Saving CMa hdf file...") + cma.save(filename) + h5f = h5py.File(filename, mode="a") + h5f.attrs["straylight_contaminated"] = self.qc_straylight + h5f.close() + LOG.info("Saving CMa hdf file done !") + + def project(self, coverage): + """Remaps the NWCSAF/MSG Cloud Type to cartographic map-projection on + area give by a pre-registered area-id. Faster version of msg_remap! + """ + LOG.info("Projecting channel %s..." % (self.name)) + + region = coverage.out_area + dest_area = region.area_id + + retv = MsgCloudMask() + + retv.name = self.name + retv.package = self.package + retv.saf = self.saf + retv.product_name = self.product_name + retv.region_name = dest_area + retv.cfac = self.cfac + retv.lfac = self.lfac + retv.coff = self.coff + retv.loff = self.loff + retv.nb_param = self.nb_param + retv.gp_sc_id = self.gp_sc_id + retv.image_acquisition_time = self.image_acquisition_time + retv.spectral_channel_id = self.spectral_channel_id + retv.nominal_product_time = self.nominal_product_time + retv.sgs_product_quality = self.sgs_product_quality + retv.sgs_product_completeness = self.sgs_product_completeness + retv.product_algorithm_version = self.product_algorithm_version + + retv.CMa = coverage.project_array(self.CMa) + retv.CMa_DUST = coverage.project_array(self.CMa_DUST) + retv.CMa_QUALITY = coverage.project_array(self.CMa_QUALITY) + retv.CMa_VOLCANIC = coverage.project_array(self.CMa_VOLCANIC) + + retv.qc_straylight = self.qc_straylight + retv.region_name = dest_area + retv.area = region + retv.projection_name = region.proj_id + + retv.pcs_def = pcs_def_from_region(region) + + retv.num_of_columns = region.x_size + retv.num_of_lines = region.y_size + retv.xscale = region.pixel_size_x + retv.yscale = region.pixel_size_y + + import pyproj + prj = pyproj.Proj(region.proj4_string) + aex = region.area_extent + lonur, latur = prj(aex[2], aex[3], inverse=True) + lonll, latll = prj(aex[0], aex[1], inverse=True) + retv.ll_lon = lonll + retv.ll_lat = latll + retv.ur_lon = lonur + retv.ur_lat = latur + + self.shape = region.shape + + retv.filled = True + retv.resolution = self.resolution + + return retv + + def convert2pps(self): + """Converts the NWCSAF/MSG Cloud Mask to the PPS format, + in order to have consistency in output format between PPS and MSG. + """ + import epshdf + retv = PpsCloudMask() + retv.region = epshdf.SafRegion() + retv.region.xsize = self.num_of_columns + retv.region.ysize = self.num_of_lines + retv.region.id = self.region_name + retv.region.pcs_id = self.projection_name + + retv.region.pcs_def = pcs_def_from_region(self.area) + retv.region.area_extent = self.area.area_extent + retv.satellite_id = self.satid + + retv.CMa_lut = pps_luts('CMa') + retv.CMa_DUST_lut = pps_luts('CMa_DUST') + retv.CMa_VOLCANIC_lut = pps_luts('CMa_VOLCANIC') + + retv.CMa_des = "MSG SEVIRI Cloud Mask" + retv.CMa_DUST_des = 'MSG SEVIRI Cloud Mask DUST' + retv.CMa_QUALITY_des = 'MSG SEVIRI bitwise quality/processing flags' + retv.CMa_VOLCANIC_des = 'MSG SEVIRI Cloud Mask VOLCANIC' + + retv.CMa = self.CMa.astype('B') + retv.CMa_DUST = self.CMa_DUST.astype('B') + retv.CMa_QUALITY = self.CMa_QUALITY.astype('B') + retv.CMa_VOLCANIC = self.CMa_VOLCANIC.astype('B') + + return retv + + def convert2nordrad(self): + return NordRadCType(self) + +#----------------------------------------------------------------------- + +# ------------------------------------------------------------------ + +class MsgNwcsafData(object): + + """NWCSAF/MSG data layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + +class MsgNwcsafClass(mpop.channel.GenericChannel): + + """NWCSAF/MSG data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + """ + + def __init__(self, product, resolution=None): + mpop.channel.GenericChannel.__init__(self, product) + self.filled = False + self.name = product + self.var_names = None + self.resolution = resolution + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + + if product == "CloudMask": + self.CMa = None + self.CMa_DUST = None + self.CMa_QUALITY = None + self.CMa_VOLCANIC = None + elif product == "CT": + self.CT = None + self.CT_PHASE = None + self.CT_QUALITY = None + elif product == "CTTH": + self.CTTH_TEMPER = None + self.CTTH_HEIGHT = None + self.CTTH_PRESS = None + self.CTTH_EFFECT = None + self.CTTH_QUALITY = None + elif product == "CRR": + self.crr = None + self.crr_accum = None + self.crr_intensity = None + self.crr_quality = None + self.processing_flags = None + elif product == "PC": + self.probability_1 = None + self.processing_flags = None + elif product == "SPhR": + self.sphr_bl = None + self.sphr_cape = None + self.sphr_diffbl = None + self.sphr_diffhl = None + self.sphr_diffki = None + self.sphr_diffli = None + self.sphr_diffml = None + self.sphr_diffshw = None + self.sphr_difftpw = None + self.sphr_hl = None + self.sphr_ki = None + self.sphr_li = None + self.sphr_ml = None + self.sphr_quality = None + self.sphr_sflag = None + self.sphr_shw = None + self.sphr_tpw = None + elif product == "PCPh": + self.pcph_pc = MNone + self.pcph_quality = None + self.pcph_dataflag = None + self.processing_flags = None + elif product =="CRPh": + self.crph_crr = None + self.crph_accum = None + self.crph_iqf = None + self.crph_quality = None + self.crph_dataflag = None + self.processing_flags = None + else: + print "*** ERROR in MsgNWCSAF (nwcsaf_msg.py)" + print " unknown NWCSAF product: ", product + quit() + + self.shape = None + self.satid = "" + self.qc_straylight = -1 + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + + def read(self, filename, calibrate=True): + """Reader for the NWCSAF/MSG cloudtype. Use *filename* to read data. + """ + import h5py + + if self.name == "CTTH": + self.var_names = ('CTTH_TEMPER', 'CTTH_HEIGHT', 'CTTH_PRESS', 'CTTH_EFFECT', 'CTTH_QUALITY') + elif self.name == "CloudType": + self.var_names = ('CT', 'CT_PHASE', 'CT_QUALITY') + elif self.name == "CloudMask": + self.var_names = ('CMa', 'CMa_DUST', 'CMa_QUALITY', 'CMa_VOLCANIC') + elif self.name == "SPhR": + self.var_names = ('SPhR_BL','SPhR_CAPE','SPhR_HL','SPhR_KI','SPhR_LI','SPhR_ML','SPhR_QUALITY','SPhR_SHW','SPhR_TPW') + else: + print "*** ERROR in MsgNWCSAF read (nwcsaf_msg.py)" + print " unknown NWCSAF product: ", product + quit() + + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + for var_name in self.var_names: + print "... read hdf5 variable ", var_name + h5d = h5f[var_name] + var1=MsgNwcsafData() + var1.data = h5d[:, :] + var1.scaling_factor = h5d.attrs["SCALING_FACTOR"] + var1.offset = h5d.attrs["OFFSET"] + var1.num_of_lines = h5d.attrs["N_LINES"] + var1.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (var1.num_of_lines, + var1.num_of_columns) + var1.product = h5d.attrs["PRODUCT"] + var1.id = h5d.attrs["ID"] + + # copy temporal var1 to self.var_name + if calibrate: + print "... apply scaling_factor", var1.scaling_factor, " and offset ", var1.offset + setattr(self, var_name, var1.data*var1.scaling_factor + +var1.offset ) + else: + setattr(self, var_name, var1.data) + + # !!! is there a check needed, if the palette exists? !!! + # read 'product'_palette and copy it to self.'product'_palette + setattr(self, var_name+"_palette", _get_palette(h5f, var_name) ) + + h5f.close() + + self.area = get_area_from_file(filename) + + self.filled = True + + def save(self, filename): + """Save the current cloudtype object to hdf *filename*, in pps format. + """ + import h5py + cma = self.convert2pps() + LOG.info("Saving NWCSAF data as hdf file...") + cma.save(filename) + h5f = h5py.File(filename, mode="a") + h5f.attrs["straylight_contaminated"] = self.qc_straylight + h5f.close() + LOG.info("Saving NWCSAF data hdf file done !") + + def project(self, coverage): + """Remaps the NWCSAF/MSG Cloud Type to cartographic map-projection on + area give by a pre-registered area-id. Faster version of msg_remap! + """ + LOG.info("Projecting channel %s..." % (self.name)) + + region = coverage.out_area + dest_area = region.area_id + + retv = MsgNwcsafClass(self.name) + + retv.name = self.name + retv.package = self.package + retv.saf = self.saf + retv.product_name = self.product_name + retv.region_name = dest_area + retv.cfac = self.cfac + retv.lfac = self.lfac + retv.coff = self.coff + retv.loff = self.loff + retv.nb_param = self.nb_param + retv.gp_sc_id = self.gp_sc_id + retv.image_acquisition_time = self.image_acquisition_time + retv.spectral_channel_id = self.spectral_channel_id + retv.nominal_product_time = self.nominal_product_time + retv.sgs_product_quality = self.sgs_product_quality + retv.sgs_product_completeness = self.sgs_product_completeness + retv.product_algorithm_version = self.product_algorithm_version + + # loop for reprojecting data, e.g. retv.CMa = coverage.project_array(self.CMa) + for var_name in self.var_names: + setattr(retv, var_name, coverage.project_array(getattr(self, var_name))) + # !!! BUG !!! copy palette is missing + + retv.qc_straylight = self.qc_straylight + retv.region_name = dest_area + retv.area = region + retv.projection_name = region.proj_id + + retv.pcs_def = pcs_def_from_region(region) + + retv.num_of_columns = region.x_size + retv.num_of_lines = region.y_size + retv.xscale = region.pixel_size_x + retv.yscale = region.pixel_size_y + + import pyproj + prj = pyproj.Proj(region.proj4_string) + aex = region.area_extent + lonur, latur = prj(aex[2], aex[3], inverse=True) + lonll, latll = prj(aex[0], aex[1], inverse=True) + retv.ll_lon = lonll + retv.ll_lat = latll + retv.ur_lon = lonur + retv.ur_lat = latur + + self.shape = region.shape + + retv.filled = True + retv.resolution = self.resolution + + return retv + + def convert2pps(self): + """Converts the NWCSAF/MSG data set to the PPS format, + in order to have consistency in output format between PPS and MSG. + """ + import epshdf + retv = PpsCloudMask() + retv.region = epshdf.SafRegion() + retv.region.xsize = self.num_of_columns + retv.region.ysize = self.num_of_lines + retv.region.id = self.region_name + retv.region.pcs_id = self.projection_name + + retv.region.pcs_def = pcs_def_from_region(self.area) + retv.region.area_extent = self.area.area_extent + retv.satellite_id = self.satid + + # !!! UH: THIS PART IS TO BE DONE BY SOMEBODY WHO USES PPS !!! + # loop for intersting variables + for var_name in self.var_names: + # get look-up tables, e.g. retv.CMa_lut = pps_luts('CMa') + setattr( retv, var_name+"_lut", pps_luts(var_name) ) + # get describing strings, e.g. retv.CMa_des = "MSG SEVIRI Cloud Mask" + setattr( retv, var_name+"_des", pps_description(var_name) ) + + # if not processing flag, get astype, e.g. retv.cloudtype = self.cloudtype.astype('B') + if var_name.find("QUALITY") != -1 and var_name.find("flag") != -1: + setattr( retv, var_name, getattr(self, var_name).astype('B') ) + elif var_name=="CT_QUALITY" or var_name=="qualityflag": + retv.qualityflag = ctype_procflags2pps(self.processing_flags) + elif var_name=="CTTH_QUALITY" or var_name=="processingflag": + retv.processingflag = ctth_procflags2pps(self.processing_flags) + elif var_name=="CMa_QUALITY" or var_name=="QUALITY": + print "*** WARNING, no conversion for CMA and SPhR products flags yet!" + # !!! UH: THIS PART IS TO BE DONE BY SOMEBODY WHO USES PPS !!! + + return retv + + def convert2nordrad(self): + return NordRadCType(self) + +#----------------------------------------------------------------------- + +class MsgCloudTypeData(object): + + """NWCSAF/MSG Cloud Type data layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + + +class MsgCloudType(mpop.channel.GenericChannel): + + """NWCSAF/MSG Cloud Type data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + """ + + def __init__(self): + mpop.channel.GenericChannel.__init__(self, "CloudType") + self.filled = False + self.name = "CloudType" + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.cloudtype = None + self.processing_flags = None + self.cloudphase = None + self.shape = None + self.satid = "" + self.qc_straylight = -1 + self.cloudtype_palette = None + self.cloudphase_palette = None + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.cloudtype.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + +# ------------------------------------------------------------------ + def read(self, filename, calibrate=True): + """Reader for the NWCSAF/MSG cloudtype. Use *filename* to read data. + """ + import h5py + + self.cloudtype = MsgCloudTypeData() + self.processing_flags = MsgCloudTypeData() + self.cloudphase = MsgCloudTypeData() + + LOG.debug("Filename = <" + str(filename) + ">") + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The cloudtype data + h5d = h5f['CT'] + self.cloudtype.data = h5d[:, :] + self.cloudtype.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.cloudtype.offset = h5d.attrs["OFFSET"] + self.cloudtype.num_of_lines = h5d.attrs["N_LINES"] + self.cloudtype.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.cloudtype.num_of_lines, + self.cloudtype.num_of_columns) + self.cloudtype.product = h5d.attrs["PRODUCT"] + self.cloudtype.id = h5d.attrs["ID"] + self.cloudtype_palette = _get_palette(h5f, 'CT') / 255.0 + # ------------------------ + + # The cloud phase data + h5d = h5f['CT_PHASE'] + self.cloudphase.data = h5d[:, :] + self.cloudphase.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.cloudphase.offset = h5d.attrs["OFFSET"] + self.cloudphase.num_of_lines = h5d.attrs["N_LINES"] + self.cloudphase.num_of_columns = h5d.attrs["N_COLS"] + self.cloudphase.product = h5d.attrs["PRODUCT"] + self.cloudphase.id = h5d.attrs["ID"] + self.cloudphase_palette = _get_palette(h5f, 'CT_PHASE') + + # ------------------------ + + # The cloudtype processing/quality flags + h5d = h5f['CT_QUALITY'] + self.processing_flags.data = h5d[:, :] + self.processing_flags.scaling_factor = \ + h5d.attrs["SCALING_FACTOR"] + self.processing_flags.offset = h5d.attrs["OFFSET"] + self.processing_flags.num_of_lines = h5d.attrs["N_LINES"] + self.processing_flags.num_of_columns = h5d.attrs["N_COLS"] + self.processing_flags.product = h5d.attrs["PRODUCT"] + self.processing_flags.id = h5d.attrs["ID"] + # ------------------------ + + h5f.close() + + self.cloudtype = self.cloudtype.data + self.cloudphase = self.cloudphase.data + self.processing_flags = self.processing_flags.data + + self.area = get_area_from_file(filename) + + self.filled = True + + def save(self, filename): + """Save the current cloudtype object to hdf *filename*, in pps format. + """ + import h5py + ctype = self.convert2pps() + LOG.info("Saving CType hdf file...") + ctype.save(filename) + h5f = h5py.File(filename, mode="a") + h5f.attrs["straylight_contaminated"] = self.qc_straylight + h5f.close() + LOG.info("Saving CType hdf file done !") + + def project(self, coverage): + """Remaps the NWCSAF/MSG Cloud Type to cartographic map-projection on + area give by a pre-registered area-id. Faster version of msg_remap! + """ + LOG.info("Projecting channel %s..." % (self.name)) + + region = coverage.out_area + dest_area = region.area_id + + retv = MsgCloudType() + + retv.name = self.name + retv.package = self.package + retv.saf = self.saf + retv.product_name = self.product_name + retv.region_name = dest_area + retv.cfac = self.cfac + retv.lfac = self.lfac + retv.coff = self.coff + retv.loff = self.loff + retv.nb_param = self.nb_param + retv.gp_sc_id = self.gp_sc_id + retv.image_acquisition_time = self.image_acquisition_time + retv.spectral_channel_id = self.spectral_channel_id + retv.nominal_product_time = self.nominal_product_time + retv.sgs_product_quality = self.sgs_product_quality + retv.sgs_product_completeness = self.sgs_product_completeness + retv.product_algorithm_version = self.product_algorithm_version + + retv.cloudtype = coverage.project_array(self.cloudtype) + retv.cloudtype_palette = self.cloudtype_palette + + retv.cloudphase = coverage.project_array(self.cloudphase) + retv.cloudphase_palette = self.cloudphase_palette + + retv.processing_flags = \ + coverage.project_array(self.processing_flags) + + retv.qc_straylight = self.qc_straylight + retv.region_name = dest_area + retv.area = region + retv.projection_name = region.proj_id + + retv.pcs_def = pcs_def_from_region(region) + + retv.num_of_columns = region.x_size + retv.num_of_lines = region.y_size + retv.xscale = region.pixel_size_x + retv.yscale = region.pixel_size_y + + import pyproj + prj = pyproj.Proj(region.proj4_string) + aex = region.area_extent + lonur, latur = prj(aex[2], aex[3], inverse=True) + lonll, latll = prj(aex[0], aex[1], inverse=True) + retv.ll_lon = lonll + retv.ll_lat = latll + retv.ur_lon = lonur + retv.ur_lat = latur + + self.shape = region.shape + + retv.filled = True + retv.resolution = self.resolution + + return retv + +# is it necessary? + +# def convert2nordrad(self): +# return NordRadCType(self) + + +class MsgCTTHData(object): + + """CTTH data object. + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + + +class MsgCTTH(mpop.channel.GenericChannel): + + """CTTH channel. + """ + + def __init__(self, resolution=None): + mpop.channel.GenericChannel.__init__(self, "CTTH") + self.filled = False + self.name = "CTTH" + self.resolution = resolution + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.cloudiness = None # Effective cloudiness + self.processing_flags = None + self.height = None + self.temperature = None + self.pressure = None + self.satid = "" + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + + def read(self, filename, calibrate=True): + import h5py + + self.cloudiness = MsgCTTHData() # Effective cloudiness + self.temperature = MsgCTTHData() + self.height = MsgCTTHData() + self.pressure = MsgCTTHData() + self.processing_flags = MsgCTTHData() + + h5f = h5py.File(filename, 'r') + + # The header + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The CTTH cloudiness data + h5d = h5f['CTTH_EFFECT'] + self.cloudiness.data = h5d[:, :] + self.cloudiness.scaling_factor = \ + h5d.attrs["SCALING_FACTOR"] + self.cloudiness.offset = h5d.attrs["OFFSET"] + self.cloudiness.num_of_lines = h5d.attrs["N_LINES"] + self.cloudiness.num_of_columns = h5d.attrs["N_COLS"] + self.cloudiness.product = h5d.attrs["PRODUCT"] + self.cloudiness.id = h5d.attrs["ID"] + +# self.cloudiness.data = np.ma.masked_equal(self.cloudiness.data, 255) +# self.cloudiness.data = np.ma.masked_equal(self.cloudiness.data, 0) + self.cloudiness_palette = _get_palette(h5f, 'CTTH_EFFECT') / 255.0 + + # ------------------------ + + # The CTTH temperature data + h5d = h5f['CTTH_TEMPER'] + self.temperature.data = h5d[:, :] + self.temperature.scaling_factor = \ + h5d.attrs["SCALING_FACTOR"] + self.temperature.offset = h5d.attrs["OFFSET"] + self.temperature.num_of_lines = h5d.attrs["N_LINES"] + self.shape = (self.temperature.num_of_lines, + self.temperature.num_of_columns) + self.temperature.num_of_columns = h5d.attrs["N_COLS"] + self.temperature.product = h5d.attrs["PRODUCT"] + self.temperature.id = h5d.attrs["ID"] + +# self.temperature.data = np.ma.masked_equal(self.temperature.data, 0) + if calibrate: + self.temperature = (self.temperature.data * + self.temperature.scaling_factor + + self.temperature.offset) + else: + self.temperature = self.temperature.data + self.temperature_palette = _get_palette(h5f, 'CTTH_TEMPER') / 255.0 + + # ------------------------ + + # The CTTH pressure data + h5d = h5f['CTTH_PRESS'] + self.pressure.data = h5d[:, :] + self.pressure.scaling_factor = \ + h5d.attrs["SCALING_FACTOR"] + self.pressure.offset = h5d.attrs["OFFSET"] + self.pressure.num_of_lines = h5d.attrs["N_LINES"] + self.pressure.num_of_columns = h5d.attrs["N_COLS"] + self.pressure.product = h5d.attrs["PRODUCT"] + self.pressure.id = h5d.attrs["ID"] + +# self.pressure.data = np.ma.masked_equal(self.pressure.data, 255) +# self.pressure.data = np.ma.masked_equal(self.pressure.data, 0) + if calibrate: + self.pressure = (self.pressure.data * + self.pressure.scaling_factor + + self.pressure.offset) + else: + self.pressure = self.pressure.data + self.pressure_palette = _get_palette(h5f, 'CTTH_PRESS') / 255.0 + + # ------------------------ + + # The CTTH height data + h5d = h5f['CTTH_HEIGHT'] + self.height.data = h5d[:, :] + self.height.scaling_factor = \ + h5d.attrs["SCALING_FACTOR"] + self.height.offset = h5d.attrs["OFFSET"] + self.height.num_of_lines = h5d.attrs["N_LINES"] + self.height.num_of_columns = h5d.attrs["N_COLS"] + self.height.product = h5d.attrs["PRODUCT"] + self.height.id = h5d.attrs["ID"] + +# self.height.data = np.ma.masked_equal(self.height.data, 255) +# self.height.data = np.ma.masked_equal(self.height.data, 0) + if calibrate: + self.height = (self.height.data * + self.height.scaling_factor + + self.height.offset) + else: + self.height = self.height.data + self.height_palette = _get_palette(h5f, 'CTTH_HEIGHT') / 255.0 + + # ------------------------ + + # The CTTH processing/quality flags + h5d = h5f['CTTH_QUALITY'] + self.processing_flags.data = h5d[:, :] + self.processing_flags.scaling_factor = \ + h5d.attrs["SCALING_FACTOR"] + self.processing_flags.offset = h5d.attrs["OFFSET"] + self.processing_flags.num_of_lines = \ + h5d.attrs["N_LINES"] + self.processing_flags.num_of_columns = \ + h5d.attrs["N_COLS"] + self.processing_flags.product = h5d.attrs["PRODUCT"] + self.processing_flags.id = h5d.attrs["ID"] + + self.processing_flags = \ + np.ma.masked_equal(self.processing_flags.data, 0) + + h5f.close() + + self.shape = self.height.shape + + self.area = get_area_from_file(filename) + + self.filled = True + + def save(self, filename): + """Save the current CTTH channel to HDF5 format. + """ + ctth = self.convert2pps() + LOG.info("Saving CTTH hdf file...") + ctth.save(filename) + LOG.info("Saving CTTH hdf file done !") + + def project(self, coverage): + """Project the current CTTH channel along the *coverage* + """ + dest_area = coverage.out_area + dest_area_id = dest_area.area_id + + retv = MsgCTTH() + + retv.temperature = coverage.project_array(self.temperature) + retv.height = coverage.project_array(self.height) + retv.pressure = coverage.project_array(self.pressure) + #retv.cloudiness = coverage.project_array(self.cloudiness) + retv.processing_flags = \ + coverage.project_array(self.processing_flags) + + retv.height_palette = self.height_palette + retv.pressure_palette = self.pressure_palette + retv.temperature_palette = self.temperature_palette + + retv.area = dest_area + retv.region_name = dest_area_id + retv.projection_name = dest_area.proj_id + retv.num_of_columns = dest_area.x_size + retv.num_of_lines = dest_area.y_size + + retv.shape = dest_area.shape + + retv.name = self.name + retv.resolution = self.resolution + retv.filled = True + + return retv + +# ---------------------------------------- + + +class MsgPCData(object): + + """NWCSAF/MSG Precipitating Clouds data layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + + +class MsgPC(mpop.channel.GenericChannel): + + """NWCSAF/MSG Precipitating Clouds data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + """ + + def __init__(self): + mpop.channel.GenericChannel.__init__(self, "PC") + self.filled = False + self.name = "PC" + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.probability_1 = None + self.processing_flags = None + self.shape = None + self.satid = "" + self.qc_straylight = -1 + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.probability_1.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + +# ------------------------------------------------------------------ + def read(self, filename, calibrate=True): + """Reader for the NWCSAF/MSG precipitating clouds. Use *filename* to read data. + """ + import h5py + + self.probability_1 = MsgPCData() + self.processing_flags = MsgPCData() + + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The precipitating clouds data + h5d = h5f['PC_PROB1'] + self.probability_1.data = h5d[:, :] + self.probability_1.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.probability_1.offset = h5d.attrs["OFFSET"] + self.probability_1.num_of_lines = h5d.attrs["N_LINES"] + self.probability_1.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.probability_1.num_of_lines, + self.probability_1.num_of_columns) + self.probability_1.product = h5d.attrs["PRODUCT"] + self.probability_1.id = h5d.attrs["ID"] + if calibrate: + self.probability_1 = (self.probability_1.data * + self.probability_1.scaling_factor + + self.probability_1.offset) + else: + self.probability_1 = self.probability_1.data + self.probability_1_palette = _get_palette(h5f, 'PC_PROB1') / 255.0 + + # ------------------------ + + # The PC processing/quality flags + h5d = h5f['PC_QUALITY'] + self.processing_flags.data = h5d[:, :] + self.processing_flags.scaling_factor = \ + h5d.attrs["SCALING_FACTOR"] + self.processing_flags.offset = h5d.attrs["OFFSET"] + self.processing_flags.num_of_lines = h5d.attrs["N_LINES"] + self.processing_flags.num_of_columns = h5d.attrs["N_COLS"] + self.processing_flags.product = h5d.attrs["PRODUCT"] + self.processing_flags.id = h5d.attrs["ID"] + self.processing_flags = np.ma.masked_equal( + self.processing_flags.data, 0) + + # ------------------------ + h5f.close() + + self.area = get_area_from_file(filename) + + self.filled = True + + + def project(self, coverage): + """Project the current PC channel along the *coverage* + """ + dest_area = coverage.out_area + dest_area_id = dest_area.area_id + + retv = MsgPC() + + retv.probability_1 = coverage.project_array(self.probability_1) + retv.processing_flags = \ + coverage.project_array(self.processing_flags) + + retv.probability_1_palette = self.probability_1_palette + + retv.area = dest_area + retv.region_name = dest_area_id + retv.projection_name = dest_area.proj_id + retv.num_of_columns = dest_area.x_size + retv.num_of_lines = dest_area.y_size + + retv.shape = dest_area.shape + + retv.name = self.name + retv.resolution = self.resolution + retv.filled = True + + return retv + + +# ------------------------------------------------------------------ + + +def get_bit_from_flags(arr, nbit): + """I don't know what this function does. + """ + res = np.bitwise_and(np.right_shift(arr, nbit), 1) + return res.astype('b') + +# NEU Anfang NEW Beginn PyTroll-Workshop Kopenhagen 2014 + +class MsgCRRData(object): + + """NWCSAF/MSG Convective Rain Rate data layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + + +class MsgCRR(mpop.channel.GenericChannel): + + """NWCSAF/MSG Convective Rain Rate data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + """ + + def __init__(self): + mpop.channel.GenericChannel.__init__(self, "CRR") + self.filled = False + self.name = "CRR" +# self.resolution = resolution + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.crr = None + self.crr_accum = None + self.crr_intensity = None + self.crr_quality = None + self.crr_dataflag = None + self.processing_flags = None + self.shape = None + self.satid = "" + self.qc_straylight = -1 + self.crr_palette = None + self.crr_accum_palette = None + self.crr_intensity_palette = None + self.crr_quality_palette = None + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.crr.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + +# ------------------------------------------------------------------ + def read(self, filename, calibrate=True): + """Reader for the . Use *filename* to read data. + """ + import h5py + + self.crr = MsgCRRData() + self.crr_accum = MsgCRRData() + self.crr_intensity = MsgCRRData() + self.crr_quality = MsgCRRData() + self.processing_flags = MsgCRRData() + + LOG.debug("Filename = <" + str(filename) + ">") + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The CRR data + h5d = h5f['CRR'] + self.crr.data = h5d[:, :] + self.crr.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crr.offset = h5d.attrs["OFFSET"] + self.crr.num_of_lines = h5d.attrs["N_LINES"] + self.crr.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.crr.num_of_lines, + self.crr.num_of_columns) + self.crr.product = h5d.attrs["PRODUCT"] + self.crr.id = h5d.attrs["ID"] + if calibrate: + self.crr = (self.crr.data * + self.crr.scaling_factor + + self.crr.offset) + else: + self.crr = self.crr.data + self.crr_palette = _get_palette(h5f, 'CRR') / 255.0 + + # ------------------------ + + # The CRR ACCUM data + h5d = h5f['CRR_ACCUM'] + self.crr_accum.data = h5d[:, :] + self.crr_accum.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crr_accum.offset = h5d.attrs["OFFSET"] + self.crr_accum.num_of_lines = h5d.attrs["N_LINES"] + self.crr_accum.num_of_columns = h5d.attrs["N_COLS"] + self.crr_accum.product = h5d.attrs["PRODUCT"] + self.crr_accum.id = h5d.attrs["ID"] + if calibrate: + self.crr_accum = (self.crr_accum.data * + self.crr_accum.scaling_factor + + self.crr_accum.offset) + else: + self.crr_accum = self.crr_accum.data + self.crr_accum_palette = _get_palette(h5f, 'CRR_ACCUM') / 255.0 + + # ------------------------ + + # The CRR Intensity data + h5d = h5f['CRR_INTENSITY'] + self.crr_intensity.data = h5d[:, :] + self.crr_intensity.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crr_intensity.offset = h5d.attrs["OFFSET"] + self.crr_intensity.num_of_lines = h5d.attrs["N_LINES"] + self.crr_intensity.num_of_columns = h5d.attrs["N_COLS"] + self.crr_intensity.product = h5d.attrs["PRODUCT"] + self.crr_intensity.id = h5d.attrs["ID"] + if calibrate: + self.crr_intensity = (self.crr_intensity.data * + self.crr_intensity.scaling_factor + + self.crr_intensity.offset) + else: + self.crr_intensity = self.crr_intensity.data + self.crr_intensity_palette = _get_palette(h5f, 'CRR_INTENSITY') / 255.0 + + # ------------------------ + + # The CRR quality data + h5d = h5f['CRR_QUALITY'] + self.crr_quality.data = h5d[:, :] + self.crr_quality.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crr_quality.offset = h5d.attrs["OFFSET"] + self.crr_quality.num_of_lines = h5d.attrs["N_LINES"] + self.crr_quality.num_of_columns = h5d.attrs["N_COLS"] + self.crr_quality.product = h5d.attrs["PRODUCT"] + self.crr_quality.id = h5d.attrs["ID"] + if calibrate: + self.crr_quality = (self.crr_quality.data * + self.crr_quality.scaling_factor + + self.crr_quality.offset) + else: + self.crr_quality = self.crr_quality.data + self.crr_quality_palette = _get_palette(h5f, 'CRR_QUALITY') + # ------------------------ + + h5f.close() + + #self.crr = self.crr.data + #self.crr_accum = self.crr_accum.data + #self.crr_intensity = self.crr_intensity.data + #self.crr_quality = self.crr_quality.data + #self.processing_flags = self.processing_flags.data + + self.area = get_area_from_file(filename) + + self.filled = True + + def save(self, filename): + """Save the current cloudtype object to hdf *filename*, in pps format. + """ + import h5py + ctype = self.convert2pps() + LOG.info("Saving CRR hdf file...") + ctype.save(filename) + h5f = h5py.File(filename, mode="a") + h5f.attrs["straylight_contaminated"] = self.qc_straylight + h5f.close() + LOG.info("Saving CRR hdf file done !") + + def project(self, coverage): + """Remaps the NWCSAF/MSG CRR to cartographic map-projection on + area give by a pre-registered area-id. Faster version of msg_remap! + """ + LOG.info("Projecting channel %s..." % (self.name)) + + region = coverage.out_area + dest_area = region.area_id + + retv = MsgCRR() + + retv.name = self.name + retv.package = self.package + retv.saf = self.saf + retv.product_name = self.product_name + retv.region_name = dest_area + retv.cfac = self.cfac + retv.lfac = self.lfac + retv.coff = self.coff + retv.loff = self.loff + retv.nb_param = self.nb_param + retv.gp_sc_id = self.gp_sc_id + retv.image_acquisition_time = self.image_acquisition_time + retv.spectral_channel_id = self.spectral_channel_id + retv.nominal_product_time = self.nominal_product_time + retv.sgs_product_quality = self.sgs_product_quality + retv.sgs_product_completeness = self.sgs_product_completeness + retv.product_algorithm_version = self.product_algorithm_version + + retv.crr = coverage.project_array(self.crr) + retv.crr_palette = self.crr_palette + + retv.crr_accum = coverage.project_array(self.crr_accum) + retv.crr_accum_palette = self.crr_accum_palette + + retv.crr_intensity = coverage.project_array(self.crr_intensity) + retv.crr_intensity_palette = self.crr_intensity_palette + + retv.crr_quality = coverage.project_array(self.crr_quality) + retv.crr_quality_palette = self.crr_quality_palette + + #retv.processing_flags = \ + # coverage.project_array(self.processing_flags) + + retv.qc_straylight = self.qc_straylight + retv.region_name = dest_area + retv.area = region + retv.projection_name = region.proj_id + + retv.pcs_def = pcs_def_from_region(region) + + retv.num_of_columns = region.x_size + retv.num_of_lines = region.y_size + retv.xscale = region.pixel_size_x + retv.yscale = region.pixel_size_y + + import pyproj + prj = pyproj.Proj(region.proj4_string) + aex = region.area_extent + lonur, latur = prj(aex[2], aex[3], inverse=True) + lonll, latll = prj(aex[0], aex[1], inverse=True) + retv.ll_lon = lonll + retv.ll_lat = latll + retv.ur_lon = lonur + retv.ur_lat = latur + + self.shape = region.shape + + retv.filled = True + retv.resolution = self.resolution + + return retv + + +# def convert2nordrad(self): +# return NordRadCType(self) + +class MsgSPhRData(object): + + """NWCSAF/MSG Convective Rain Rate data layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + + +class MsgSPhR(mpop.channel.GenericChannel): + + """NWCSAF/MSG SPhR data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + Palette now missing + """ + + def __init__(self): + mpop.channel.GenericChannel.__init__(self, "SPhR") + self.filled = False + self.name = "SPhR" +# self.resolution = resolution + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.sphr = None + self.sphr_bl = None + self.sphr_cape = None + self.sphr_diffbl = None + self.sphr_diffhl = None + self.sphr_diffki = None + self.sphr_diffli = None + self.sphr_diffml = None + self.sphr_diffshw = None + self.sphr_difftpw = None + self.sphr_hl = None + self.sphr_ki = None + self.sphr_li = None + self.sphr_ml = None + self.sphr_quality = None + self.sphr_sflag = None + self.sphr_shw = None + self.sphr_tpw = None + self.processing_flags = None + self.shape = None + self.satid = "" + self.qc_straylight = -1 + self.sphr = None + self.sphr_bl_palette = None + self.sphr_cape_palette = None + self.sphr_diffbl_palette = None + self.sphr_diffhl_palette = None + self.sphr_diffki_palette = None + self.sphr_diffli_palette = None + self.sphr_diffml_palette = None + self.sphr_diffshw_palette = None + self.sphr_difftpw_palette = None + self.sphr_hl_palette = None + self.sphr_ki_palette = None + self.sphr_li_palette = None + self.sphr_ml_palette = None + self.sphr_quality_palette = None + self.sphr_sflag_palette = None + self.sphr_shw_palette = None + self.sphr_tpw_palette = None + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.sphr_bl.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + +# ------------------------------------------------------------------ + def read(self, filename, calibrate=True): + """Reader for the . Use *filename* to read data. + """ + import h5py + +# Erste Zeile notwendig? + self.sphr = MsgSPhRData() + self.sphr_bl = MsgSPhRData() + self.sphr_cape = MsgSPhRData() + self.sphr_diffbl = MsgSPhRData() + self.sphr_diffhl = MsgSPhRData() + self.sphr_diffki = MsgSPhRData() + self.sphr_diffli = MsgSPhRData() + self.sphr_diffml = MsgSPhRData() + self.sphr_diffshw = MsgSPhRData() + self.sphr_difftpw = MsgSPhRData() + self.sphr_hl = MsgSPhRData() + self.sphr_ki = MsgSPhRData() + self.sphr_li = MsgSPhRData() + self.sphr_ml = MsgSPhRData() + self.sphr_quality = MsgSPhRData() + self.sphr_sflag = MsgSPhRData() + self.sphr_shw = MsgSPhRData() + self.sphr_tpw = MsgSPhRData() + + self.processing_flags = MsgSPhRData() + + LOG.debug("Filename = <" + str(filename) + ">") + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The SPhR BL data + h5d = h5f['SPhR_BL'] + self.sphr_bl.data = h5d[:, :] + self.sphr_bl.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_bl.offset = h5d.attrs["OFFSET"] + self.sphr_bl.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_bl.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_bl.num_of_lines, + self.sphr_bl.num_of_columns) + self.sphr_bl.product = h5d.attrs["PRODUCT"] + self.sphr_bl.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_bl.data ) * ( self.sphr_bl.data <= 128 ) + # apply scaling factor and offset + self.sphr_bl = mask * (self.sphr_bl.data * + self.sphr_bl.scaling_factor + + self.sphr_bl.offset) + else: + self.sphr_bl = self.sphr_bl.data + self.sphr_bl_palette = _get_palette(h5f, 'SPhR_BL') / 255.0 + + # The SPhR Cape data + h5d = h5f['SPhR_CAPE'] + self.sphr_cape.data = h5d[:, :] + self.sphr_cape.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_cape.offset = h5d.attrs["OFFSET"] + self.sphr_cape.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_cape.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_cape.num_of_lines, + self.sphr_cape.num_of_columns) + self.sphr_cape.product = h5d.attrs["PRODUCT"] + + self.sphr_cape.id = h5d.attrs["ID"] + if calibrate: + mask = ( 128 < self.sphr_cape.data ) + # apply scaling factor and offset + self.sphr_cape = mask * (self.sphr_cape.data * + self.sphr_cape.scaling_factor + + self.sphr_cape.offset) + else: + self.sphr_cape = self.sphr_cape.data + #self.sphr_cape_palette = _get_palette(h5f, 'SPhR_CAPE') / 255.0 + + # The SPhR DIFFBL data + h5d = h5f['SPhR_DIFFBL'] + self.sphr_diffbl.data = h5d[:, :] + self.sphr_diffbl.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_diffbl.offset = h5d.attrs["OFFSET"] + self.sphr_diffbl.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_diffbl.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_diffbl.num_of_lines, + self.sphr_diffbl.num_of_columns) + self.sphr_diffbl.product = h5d.attrs["PRODUCT"] + self.sphr_diffbl.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_diffbl.data ) * ( self.sphr_diffbl.data <= 128 ) + # apply scaling factor and offset + self.sphr_diffbl = mask * (self.sphr_diffbl.data * + self.sphr_diffbl.scaling_factor + + self.sphr_diffbl.offset) + else: + self.sphr_diffbl = self.sphr_diffbl.data + self.sphr_diffbl_palette = _get_palette(h5f, 'SPhR_DIFFBL') / 255.0 + + # The SPhR DIFFHL data + h5d = h5f['SPhR_DIFFHL'] + self.sphr_diffhl.data = h5d[:, :] + self.sphr_diffhl.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_diffhl.offset = h5d.attrs["OFFSET"] + self.sphr_diffhl.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_diffhl.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_diffhl.num_of_lines, + self.sphr_diffhl.num_of_columns) + self.sphr_diffhl.product = h5d.attrs["PRODUCT"] + self.sphr_diffhl.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_diffhl.data ) * ( self.sphr_diffhl.data <= 128 ) + # apply scaling factor and offset + self.sphr_diffhl = mask * (self.sphr_diffhl.data * + self.sphr_diffhl.scaling_factor + + self.sphr_diffhl.offset) + else: + self.sphr_diffhl = self.sphr_diffhl.data + self.sphr_diffhl_palette = _get_palette(h5f, 'SPhR_DIFFHL') / 255.0 + + # The SPhR DIFFKI data + h5d = h5f['SPhR_DIFFKI'] + self.sphr_diffki.data = h5d[:, :] + self.sphr_diffki.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_diffki.offset = h5d.attrs["OFFSET"] + self.sphr_diffki.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_diffki.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_diffki.num_of_lines, + self.sphr_diffki.num_of_columns) + self.sphr_diffki.product = h5d.attrs["PRODUCT"] + self.sphr_diffki.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_diffki.data ) * ( self.sphr_diffki.data <= 128 ) + # apply scaling factor and offset + self.sphr_diffki = mask * (self.sphr_diffki.data * + self.sphr_diffki.scaling_factor + + self.sphr_diffki.offset) + else: + self.sphr_diffki = self.sphr_diffki.data + self.sphr_diffki_palette = _get_palette(h5f, 'SPhR_DIFFKI') / 255.0 + + # The SPhR DIFFLI data + h5d = h5f['SPhR_DIFFLI'] + self.sphr_diffli.data = h5d[:, :] + self.sphr_diffli.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_diffli.offset = h5d.attrs["OFFSET"] + self.sphr_diffli.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_diffli.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_diffli.num_of_lines, + self.sphr_diffli.num_of_columns) + self.sphr_diffli.product = h5d.attrs["PRODUCT"] + self.sphr_diffli.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_diffli.data ) * ( self.sphr_diffli.data <= 128 ) + # apply scaling factor and offset + self.sphr_diffli = mask * (self.sphr_diffli.data * + self.sphr_diffli.scaling_factor + + self.sphr_diffli.offset) + else: + self.sphr_diffli= self.sphr_diffli.data + self.sphr_diffli_palette = _get_palette(h5f, 'SPhR_DIFFLI') / 255.0 + + # The SPhR DIFFML data + h5d = h5f['SPhR_DIFFML'] + self.sphr_diffml.data = h5d[:, :] + self.sphr_diffml.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_diffml.offset = h5d.attrs["OFFSET"] + self.sphr_diffml.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_diffml.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_diffml.num_of_lines, + self.sphr_diffml.num_of_columns) + self.sphr_diffml.product = h5d.attrs["PRODUCT"] + self.sphr_diffml.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_diffml.data ) * ( self.sphr_diffml.data <= 128 ) + # apply scaling factor and offset + self.sphr_diffml = mask * (self.sphr_diffml.data * + self.sphr_diffml.scaling_factor + + self.sphr_diffml.offset) + else: + self.sphr_diffml = self.sphr_diffml.data + self.sphr_diffml_palette = _get_palette(h5f, 'SPhR_DIFFML') / 255.0 + + # The SPhR DIFFSHW data + h5d = h5f['SPhR_DIFFSHW'] + self.sphr_diffshw.data = h5d[:, :] + self.sphr_diffshw.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_diffshw.offset = h5d.attrs["OFFSET"] + self.sphr_diffshw.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_diffshw.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_diffshw.num_of_lines, + self.sphr_diffshw.num_of_columns) + self.sphr_diffshw.product = h5d.attrs["PRODUCT"] + self.sphr_diffshw.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_diffshw.data ) * ( self.sphr_diffshw.data <= 128 ) + # apply scaling factor and offset + self.sphr_diffshw = mask * (self.sphr_diffshw.data * + self.sphr_diffshw.scaling_factor + + self.sphr_diffshw.offset) + else: + self.sphr_diffshw = self.sphr_diffshw.data + self.sphr_diffshw_palette = _get_palette(h5f, 'SPhR_DIFFSHW') / 255.0 + + # The SPhR DIFFTPW data + h5d = h5f['SPhR_DIFFTPW'] + self.sphr_difftpw.data = h5d[:, :] + self.sphr_difftpw.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_difftpw.offset = h5d.attrs["OFFSET"] + self.sphr_difftpw.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_difftpw.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_difftpw.num_of_lines, + self.sphr_difftpw.num_of_columns) + self.sphr_difftpw.product = h5d.attrs["PRODUCT"] + self.sphr_difftpw.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_difftpw.data ) * ( self.sphr_difftpw.data <= 128 ) + # apply scaling factor and offset + self.sphr_difftpw = mask * (self.sphr_difftpw.data * + self.sphr_difftpw.scaling_factor + + self.sphr_difftpw.offset) + else: + self.sphr_difftpw = self.sphr_difftpw.data + self.sphr_difftpw_palette = _get_palette(h5f, 'SPhR_DIFFTPW') / 255.0 + + # The SPhR HL data + h5d = h5f['SPhR_HL'] + self.sphr_hl.data = h5d[:, :] + self.sphr_hl.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_hl.offset = h5d.attrs["OFFSET"] + self.sphr_hl.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_hl.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_hl.num_of_lines, + self.sphr_hl.num_of_columns) + self.sphr_hl.product = h5d.attrs["PRODUCT"] + self.sphr_hl.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_hl.data ) * ( self.sphr_hl.data <= 128 ) + # apply scaling factor and offset + self.sphr_hl = mask * (self.sphr_hl.data * + self.sphr_hl.scaling_factor + + self.sphr_hl.offset) + else: + self.sphr_hl = self.sphr_hl.data + self.sphr_hl_palette = _get_palette(h5f, 'SPhR_HL') / 255.0 + + # The SPhR KI data + h5d = h5f['SPhR_KI'] + self.sphr_ki.data = h5d[:, :] + self.sphr_ki.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_ki.offset = h5d.attrs["OFFSET"] + self.sphr_ki.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_ki.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_ki.num_of_lines, + self.sphr_ki.num_of_columns) + self.sphr_ki.product = h5d.attrs["PRODUCT"] + self.sphr_ki.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_ki.data ) * ( self.sphr_ki.data <= 128 ) + # apply scaling factor and offset + self.sphr_ki = mask * (self.sphr_ki.data * + self.sphr_ki.scaling_factor + + self.sphr_ki.offset) + else: + self.sphr_ki = self.sphr_ki.data + self.sphr_ki_palette = _get_palette(h5f, 'SPhR_KI') / 255.0 + + # The SPhR LI data + h5d = h5f['SPhR_LI'] + self.sphr_li.data = h5d[:, :] + self.sphr_li.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_li.offset = h5d.attrs["OFFSET"] + self.sphr_li.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_li.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_li.num_of_lines, + self.sphr_li.num_of_columns) + self.sphr_li.product = h5d.attrs["PRODUCT"] + self.sphr_li.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_li.data ) * ( self.sphr_li.data <= 128 ) + # apply scaling factor and offset + self.sphr_li = mask * (self.sphr_li.data * + self.sphr_li.scaling_factor + + self.sphr_li.offset) + else: + self.sphr_li = self.sphr_li.data + self.sphr_li_palette = _get_palette(h5f, 'SPhR_LI') / 255.0 + + # The SPhR ML data + h5d = h5f['SPhR_ML'] + self.sphr_ml.data = h5d[:, :] + self.sphr_ml.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_ml.offset = h5d.attrs["OFFSET"] + self.sphr_ml.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_ml.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_ml.num_of_lines, + self.sphr_ml.num_of_columns) + self.sphr_ml.product = h5d.attrs["PRODUCT"] + self.sphr_ml.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_ml.data ) * ( self.sphr_ml.data <= 128 ) + # apply scaling factor and offset + self.sphr_ml = mask * (self.sphr_ml.data * + self.sphr_ml.scaling_factor + + self.sphr_ml.offset) + else: + self.sphr_ml = self.sphr_ml.data + self.sphr_ml_palette = _get_palette(h5f, 'SPhR_ML') / 255.0 + + # The SPhR QUALITY data + h5d = h5f['SPhR_QUALITY'] + self.sphr_quality.data = h5d[:, :] + self.sphr_quality.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_quality.offset = h5d.attrs["OFFSET"] + self.sphr_quality.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_quality.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_quality.num_of_lines, + self.sphr_quality.num_of_columns) + self.sphr_quality.product = h5d.attrs["PRODUCT"] + self.sphr_quality.id = h5d.attrs["ID"] + if calibrate: + mask = (self.sphr_quality.data != 0 ) + # apply scaling factor and offset + self.sphr_quality = mask * (self.sphr_quality.data * + self.sphr_quality.scaling_factor + + self.sphr_quality.offset) + else: + self.sphr_quality = self.sphr_quality.data + + # The SPhR SFLAG data + h5d = h5f['SPhR_SFLAG'] + self.sphr_sflag.data = h5d[:, :] + self.sphr_sflag.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_sflag.offset = h5d.attrs["OFFSET"] + self.sphr_sflag.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_sflag.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_sflag.num_of_lines, + self.sphr_sflag.num_of_columns) + self.sphr_sflag.product = h5d.attrs["PRODUCT"] + self.sphr_sflag.id = h5d.attrs["ID"] + self.sphr_sflag = self.sphr_sflag.data + + # The SPhR SHW data + h5d = h5f['SPhR_SHW'] + self.sphr_shw.data = h5d[:, :] + self.sphr_shw.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_shw.offset = h5d.attrs["OFFSET"] + self.sphr_shw.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_shw.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_shw.num_of_lines, + self.sphr_shw.num_of_columns) + self.sphr_shw.product = h5d.attrs["PRODUCT"] + self.sphr_shw.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_shw.data ) * ( self.sphr_shw.data <= 128 ) + # apply scaling factor and offset + self.sphr_shw = mask * (self.sphr_shw.data * + self.sphr_shw.scaling_factor + + self.sphr_shw.offset) + else: + self.sphr_shw = self.sphr_shw.data + self.sphr_shw_palette = _get_palette(h5f, 'SPhR_SHW') / 255.0 + + # The SPhR TPW data + h5d = h5f['SPhR_TPW'] + self.sphr_tpw.data = h5d[:, :] + self.sphr_tpw.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.sphr_tpw.offset = h5d.attrs["OFFSET"] + self.sphr_tpw.num_of_lines = h5d.attrs["N_LINES"] + self.sphr_tpw.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.sphr_tpw.num_of_lines, + self.sphr_tpw.num_of_columns) + self.sphr_tpw.product = h5d.attrs["PRODUCT"] + self.sphr_tpw.id = h5d.attrs["ID"] + if calibrate: + mask = ( 8 <= self.sphr_tpw.data ) * ( self.sphr_tpw.data <= 128 ) + # apply scaling factor and offset + self.sphr_tpw = mask * (self.sphr_tpw.data * + self.sphr_tpw.scaling_factor + + self.sphr_tpw.offset) + print self.sphr_tpw.min(), self.sphr_tpw.max() + else: + self.sphr_tpw = self.sphr_tpw.data + + self.sphr_tpw_palette = _get_palette(h5f, 'SPhR_TPW') / 255.0 + + # ------------------------ + + h5f.close() + + #self.sphr = self.sphr.data + #self.sphr_bl = self.sphr_bl.data + #self.sphr_cape = self.sphr_cape.data + #self.sphr_diffbl = self.sphr_diffbl.data + #self.sphr_diffhl = self.sphr_diffhl.data + #self.sphr_diffki = self.sphr_diffki.data + #self.sphr_diffli = self.sphr_diffli.data + #self.sphr_diffml = self.sphr_diffml.data + #self.sphr_diffshw = self.sphr_diffshw.data + #self.sphr_difftpw = self.sphr_difftpw.data + #self.sphr_hl = self.sphr_hl.data + #self.sphr_ki = self.sphr_ki.data + #self.sphr_li = self.sphr_li.data + #self.sphr_ml = self.sphr_ml.data + #self.sphr_quality = self.sphr_quality.data + #self.sphr_sflag = self.sphr_sflag.data + #self.sphr_shw = self.sphr_shw.data + #self.sphr_tpw = self.sphr_tpw.data + + + self.processing_flags = self.processing_flags.data + + self.area = get_area_from_file(filename) + + self.filled = True + + def project(self, coverage): + """Remaps the NWCSAF/MSG CRR to cartographic map-projection on + area give by a pre-registered area-id. Faster version of msg_remap! + """ + LOG.info("Projecting channel %s..." % (self.name)) + + region = coverage.out_area + dest_area = region.area_id + + retv = MsgSPhR() + + retv.name = self.name + retv.package = self.package + retv.saf = self.saf + retv.product_name = self.product_name + retv.region_name = dest_area + retv.cfac = self.cfac + retv.lfac = self.lfac + retv.coff = self.coff + retv.loff = self.loff + retv.nb_param = self.nb_param + retv.gp_sc_id = self.gp_sc_id + retv.image_acquisition_time = self.image_acquisition_time + retv.spectral_channel_id = self.spectral_channel_id + retv.nominal_product_time = self.nominal_product_time + retv.sgs_product_quality = self.sgs_product_quality + retv.sgs_product_completeness = self.sgs_product_completeness + retv.product_algorithm_version = self.product_algorithm_version + + retv.sphr_bl = coverage.project_array(self.sphr_bl) + retv.sphr_bl_palette = self.sphr_bl_palette + retv.sphr_ml = coverage.project_array(self.sphr_ml) + retv.sphr_ml_palette = self.sphr_ml_palette + retv.sphr_hl = coverage.project_array(self.sphr_hl) + retv.sphr_hl_palette = self.sphr_hl_palette + retv.sphr_ki = coverage.project_array(self.sphr_ki) + retv.sphr_ki_palette = self.sphr_ki_palette + retv.sphr_li = coverage.project_array(self.sphr_li) + retv.sphr_li_palette = self.sphr_li_palette + retv.sphr_tpw = coverage.project_array(self.sphr_tpw) + retv.sphr_tpw_palette = self.sphr_tpw_palette + retv.sphr_cape = coverage.project_array(self.sphr_cape) + # no sphr_cape_palette + retv.sphr_quality = coverage.project_array(self.sphr_quality) + # no sphr_quality_palette + retv.sphr_sflag = coverage.project_array(self.sphr_sflag) + # no sphr_sflag_palette + retv.sphr_shw = coverage.project_array(self.sphr_shw) + retv.sphr_shw_palette = self.sphr_shw_palette + retv.sphr_diffbl = coverage.project_array(self.sphr_diffbl) + retv.sphr_diffbl_palette = self.sphr_diffbl_palette + retv.sphr_diffml = coverage.project_array(self.sphr_diffml) + retv.sphr_diffml_palette = self.sphr_diffml_palette + retv.sphr_diffhl = coverage.project_array(self.sphr_diffhl) + retv.sphr_diffhl_palette = self.sphr_diffhl_palette + retv.sphr_diffki = coverage.project_array(self.sphr_diffki) + retv.sphr_diffki_palette = self.sphr_diffki_palette + retv.sphr_diffli = coverage.project_array(self.sphr_diffli) + retv.sphr_diffli_palette = self.sphr_diffli_palette + retv.sphr_difftpw = coverage.project_array(self.sphr_difftpw) + retv.sphr_difftpw_palette = self.sphr_difftpw_palette + retv.sphr_diffshw = coverage.project_array(self.sphr_diffshw) + retv.sphr_diffshw_palette = self.sphr_diffshw_palette + + + +# retv.processing_flags = \ +# coverage.project_array(self.processing_flags) + + retv.qc_straylight = self.qc_straylight + retv.region_name = dest_area + retv.area = region + retv.projection_name = region.proj_id + + retv.pcs_def = pcs_def_from_region(region) + + retv.num_of_columns = region.x_size + retv.num_of_lines = region.y_size + retv.xscale = region.pixel_size_x + retv.yscale = region.pixel_size_y + + import pyproj + prj = pyproj.Proj(region.proj4_string) + aex = region.area_extent + lonur, latur = prj(aex[2], aex[3], inverse=True) + lonll, latll = prj(aex[0], aex[1], inverse=True) + retv.ll_lon = lonll + retv.ll_lat = latll + retv.ur_lon = lonur + retv.ur_lat = latur + + self.shape = region.shape + + retv.filled = True + retv.resolution = self.resolution + + return retv + + + +class MsgPCPhData(object): + + """NWCSAF/MSG PCPh data layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + + +class MsgPCPh(mpop.channel.GenericChannel): + + """NWCSAF/MSG PCPh data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + Palette now missing + """ + + def __init__(self): + mpop.channel.GenericChannel.__init__(self, "PCPh") + self.filled = False + self.name = "PCPh" +# self.resolution = resolution + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.pcph = None + self.pcph_pc = None + self.pcph_quality = None + self.pcph_dataflag = None + self.processing_flags = None + self.shape = None + self.satid = "" + self.qc_straylight = -1 + self.pcph = None + self.pcph_pc_palette = None + self.pcph_quality_palette = None + self.pcph_sflag_palette = None + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.pcph_pc.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + +# ------------------------------------------------------------------ + def read(self, filename, calibrate=True): + """Reader for the . Use *filename* to read data. + """ + import h5py + +# Erste Zeile notwendig? + self.pcph = MsgPCPhData() + self.pcph_pc = MsgPCPhData() + self.pcph_quality = MsgPCPhData() + self.pcph_dataflag = MsgPCPhData() + + self.processing_flags = MsgPCPhData() + + LOG.debug("Filename = <" + str(filename) + ">") + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The PPh PC data + h5d = h5f['PCPh_PC'] + self.pcph_pc.data = h5d[:, :] + self.pcph_pc.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.pcph_pc.offset = h5d.attrs["OFFSET"] + self.pcph_pc.num_of_lines = h5d.attrs["N_LINES"] + self.pcph_pc.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.pcph_pc.num_of_lines, + self.pcph_pc.num_of_columns) + self.pcph_pc.product = h5d.attrs["PRODUCT"] + + self.pcph_pc.id = h5d.attrs["ID"] + if calibrate: + self.pcph_pc = (self.pcph_pc.data * + self.pcph_pc.scaling_factor + + self.pcph_pc.offset) + else: + self.pcph_pc = self.pcph_pc.data + self.pcph_pc_palette = _get_palette(h5f, 'PCPh_PC') / 255.0 + + # The PPh QUALITY data + h5d = h5f['PCPh_QUALITY'] + self.pcph_quality.data = h5d[:, :] + self.pcph_quality.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.pcph_quality.offset = h5d.attrs["OFFSET"] + self.pcph_quality.num_of_lines = h5d.attrs["N_LINES"] + self.pcph_quality.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.pcph_quality.num_of_lines, + self.pcph_quality.num_of_columns) + self.pcph_quality.product = h5d.attrs["PRODUCT"] + self.pcph_quality.id = h5d.attrs["ID"] + + # The PPh DATA FLAG data + h5d = h5f['PCPh_DATAFLAG'] + self.pcph_dataflag.data = h5d[:, :] + self.pcph_dataflag.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.pcph_dataflag.offset = h5d.attrs["OFFSET"] + self.pcph_dataflag.num_of_lines = h5d.attrs["N_LINES"] + self.pcph_dataflag.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.pcph_dataflag.num_of_lines, + self.pcph_dataflag.num_of_columns) + self.pcph_dataflag.product = h5d.attrs["PRODUCT"] + self.pcph_dataflag.id = h5d.attrs["ID"] + + # ------------------------ + + h5f.close() + + self.processing_flags = self.processing_flags.data + + self.area = get_area_from_file(filename) + + self.filled = True + + + def project(self, coverage): + """Remaps the NWCSAF/MSG PCPh to cartographic map-projection on + area give by a pre-registered area-id. Faster version of msg_remap! + """ + LOG.info("Projecting channel %s..." % (self.name)) + + region = coverage.out_area + dest_area = region.area_id + + retv = MsgPCPh() + + retv.name = self.name + retv.package = self.package + retv.saf = self.saf + retv.product_name = self.product_name + retv.region_name = dest_area + retv.cfac = self.cfac + retv.lfac = self.lfac + retv.coff = self.coff + retv.loff = self.loff + retv.nb_param = self.nb_param + retv.gp_sc_id = self.gp_sc_id + retv.image_acquisition_time = self.image_acquisition_time + retv.spectral_channel_id = self.spectral_channel_id + retv.nominal_product_time = self.nominal_product_time + retv.sgs_product_quality = self.sgs_product_quality + retv.sgs_product_completeness = self.sgs_product_completeness + retv.product_algorithm_version = self.product_algorithm_version + + retv.pcph_pc = coverage.project_array(self.pcph_pc) + retv.pcph_pc_palette = self.pcph_pc_palette + + #retv.processing_flags = \ + # coverage.project_array(self.processing_flags) + + retv.qc_straylight = self.qc_straylight + retv.region_name = dest_area + retv.area = region + retv.projection_name = region.proj_id + + retv.pcs_def = pcs_def_from_region(region) + + retv.num_of_columns = region.x_size + retv.num_of_lines = region.y_size + retv.xscale = region.pixel_size_x + retv.yscale = region.pixel_size_y + + import pyproj + prj = pyproj.Proj(region.proj4_string) + aex = region.area_extent + lonur, latur = prj(aex[2], aex[3], inverse=True) + lonll, latll = prj(aex[0], aex[1], inverse=True) + retv.ll_lon = lonll + retv.ll_lat = latll + retv.ur_lon = lonur + retv.ur_lat = latur + + self.shape = region.shape + + retv.filled = True + retv.resolution = self.resolution + + return retv + + +class MsgCRPhData(object): + + """NWCSAF/MSG CRPh layer + """ + + def __init__(self): + self.data = None + self.scaling_factor = 1 + self.offset = 0 + self.num_of_lines = 0 + self.num_of_columns = 0 + self.product = "" + self.id = "" + + +class MsgCRPh(mpop.channel.GenericChannel): + + """NWCSAF/MSG CRPh data structure as retrieved from HDF5 + file. Resolution sets the nominal resolution of the data. + Palette now missing + """ + + def __init__(self): + mpop.channel.GenericChannel.__init__(self, "CRPh") + self.filled = False + self.name = "CRPh" +# self.resolution = resolution + self.package = "" + self.saf = "" + self.product_name = "" + self.num_of_columns = 0 + self.num_of_lines = 0 + self.projection_name = "" + self.pcs_def = "" + self.xscale = 0 + self.yscale = 0 + self.ll_lon = 0.0 + self.ll_lat = 0.0 + self.ur_lon = 0.0 + self.ur_lat = 0.0 + self.region_name = "" + self.cfac = 0 + self.lfac = 0 + self.coff = 0 + self.loff = 0 + self.nb_param = 0 + self.gp_sc_id = 0 + self.image_acquisition_time = 0 + self.spectral_channel_id = 0 + self.nominal_product_time = 0 + self.sgs_product_quality = 0 + self.sgs_product_completeness = 0 + self.product_algorithm_version = "" + self.crph = None + self.crph_crr = None + self.crph_accum = None + self.crph_IQF = None + self.crph_quality = None + self.crph_dataflag = None + self.processing_flags = None + self.shape = None + self.satid = "" + self.qc_straylight = -1 + self.crph = None + self.crph_pc_palette = None + self.crph_quality_palette = None + self.crph_sflag_palette = None + + def __str__(self): + return ("'%s: shape %s, resolution %sm'" % + (self.name, + self.crph_crr.shape, + self.resolution)) + + def is_loaded(self): + """Tells if the channel contains loaded data. + """ + return self.filled + +# ------------------------------------------------------------------ + def read(self, filename, calibrate=True): + """Reader for the . Use *filename* to read data. + """ + import h5py + +# Erste Zeile notwendig? + self.crph = MsgCRPhData() + self.crph_crr = MsgCRPhData() + self.crph_accum = MsgCRPhData() + self.crph_iqf = MsgCRPhData() + self.crph_quality = MsgCRPhData() + self.crph_dataflag = MsgCRPhData() + + self.processing_flags = MsgCRPhData() + + LOG.debug("Filename = <" + str(filename) + ">") + h5f = h5py.File(filename, 'r') + # pylint: disable-msg=W0212 + self.package = h5f.attrs["PACKAGE"] + self.saf = h5f.attrs["SAF"] + self.product_name = h5f.attrs["PRODUCT_NAME"] + self.num_of_columns = h5f.attrs["NC"] + self.num_of_lines = h5f.attrs["NL"] + self.projection_name = h5f.attrs["PROJECTION_NAME"] + self.region_name = h5f.attrs["REGION_NAME"] + self.cfac = h5f.attrs["CFAC"] + self.lfac = h5f.attrs["LFAC"] + self.coff = h5f.attrs["COFF"] + self.loff = h5f.attrs["LOFF"] + self.nb_param = h5f.attrs["NB_PARAMETERS"] + self.gp_sc_id = h5f.attrs["GP_SC_ID"] + self.image_acquisition_time = h5f.attrs["IMAGE_ACQUISITION_TIME"] + self.spectral_channel_id = h5f.attrs["SPECTRAL_CHANNEL_ID"] + self.nominal_product_time = h5f.attrs["NOMINAL_PRODUCT_TIME"] + self.sgs_product_quality = h5f.attrs["SGS_PRODUCT_QUALITY"] + self.sgs_product_completeness = h5f.attrs["SGS_PRODUCT_COMPLETENESS"] + self.product_algorithm_version = h5f.attrs["PRODUCT_ALGORITHM_VERSION"] + # pylint: enable-msg=W0212 + # ------------------------ + + # The CRPh CRR data + h5d = h5f['CRPh_CRR'] + self.crph_crr.data = h5d[:, :] + self.crph_crr.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crph_crr.offset = h5d.attrs["OFFSET"] + self.crph_crr.num_of_lines = h5d.attrs["N_LINES"] + self.crph_crr.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.crph_crr.num_of_lines, + self.crph_crr.num_of_columns) + self.crph_crr.product = h5d.attrs["PRODUCT"] + self.crph_crr.id = h5d.attrs["ID"] + if calibrate: + self.crph_crr = (self.crph_crr.data * + self.crph_crr.scaling_factor + + self.crph_crr.offset) + else: + self.crph_crr = self.crph_crr.data + self.crph_crr_palette = _get_palette(h5f, 'CRPh_CRR') / 255.0 + + # The CRPh ACCUM data + h5d = h5f['CRPh_ACUM'] + self.crph_accum.data = h5d[:, :] + self.crph_accum.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crph_accum.offset = h5d.attrs["OFFSET"] + self.crph_accum.num_of_lines = h5d.attrs["N_LINES"] + self.crph_accum.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.crph_accum.num_of_lines, + self.crph_accum.num_of_columns) + self.crph_accum.product = h5d.attrs["PRODUCT"] + self.crph_accum.id = h5d.attrs["ID"] + if calibrate: + self.crph_accum = (self.crph_accum.data * + self.crph_accum.scaling_factor + + self.crph_accum.offset) + else: + self.crph_accum = self.crph_accum.data + self.crph_accum_palette = _get_palette(h5f, 'CRPh_ACUM') / 255.0 + + + # The CRPH IQF data + h5d = h5f['CRPh_IQF'] + self.crph_iqf.data = h5d[:, :] + self.crph_iqf.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crph_iqf.offset = h5d.attrs["OFFSET"] + self.crph_iqf.num_of_lines = h5d.attrs["N_LINES"] + self.crph_iqf.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.crph_iqf.num_of_lines, + self.crph_iqf.num_of_columns) + self.crph_iqf.product = h5d.attrs["PRODUCT"] + self.crph_iqf.id = h5d.attrs["ID"] + + # The CRPh QUALITY data + h5d = h5f['CRPh_QUALITY'] + self.crph_quality.data = h5d[:, :] + self.crph_quality.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crph_quality.offset = h5d.attrs["OFFSET"] + self.crph_quality.num_of_lines = h5d.attrs["N_LINES"] + self.crph_quality.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.crph_quality.num_of_lines, + self.crph_quality.num_of_columns) + self.crph_quality.product = h5d.attrs["PRODUCT"] + self.crph_quality.id = h5d.attrs["ID"] + + # The CRPh DATA FLAG data + h5d = h5f['CRPh_DATAFLAG'] + self.crph_dataflag.data = h5d[:, :] + self.crph_dataflag.scaling_factor = h5d.attrs["SCALING_FACTOR"] + self.crph_dataflag.offset = h5d.attrs["OFFSET"] + self.crph_dataflag.num_of_lines = h5d.attrs["N_LINES"] + self.crph_dataflag.num_of_columns = h5d.attrs["N_COLS"] + self.shape = (self.crph_dataflag.num_of_lines, + self.crph_dataflag.num_of_columns) + self.crph_dataflag.product = h5d.attrs["PRODUCT"] + self.crph_dataflag.id = h5d.attrs["ID"] + + # ------------------------ + + h5f.close() + + self.processing_flags = self.processing_flags.data + + self.area = get_area_from_file(filename) + + self.filled = True + + def project(self, coverage): + """Remaps the NWCSAF/MSG CRPh to cartographic map-projection on + area give by a pre-registered area-id. Faster version of msg_remap! + """ + LOG.info("Projecting channel %s..." % (self.name)) + + region = coverage.out_area + dest_area = region.area_id + + retv = MsgCRPh() + + retv.name = self.name + retv.package = self.package + retv.saf = self.saf + retv.product_name = self.product_name + retv.region_name = dest_area + retv.cfac = self.cfac + retv.lfac = self.lfac + retv.coff = self.coff + retv.loff = self.loff + retv.nb_param = self.nb_param + retv.gp_sc_id = self.gp_sc_id + retv.image_acquisition_time = self.image_acquisition_time + retv.spectral_channel_id = self.spectral_channel_id + retv.nominal_product_time = self.nominal_product_time + retv.sgs_product_quality = self.sgs_product_quality + retv.sgs_product_completeness = self.sgs_product_completeness + retv.product_algorithm_version = self.product_algorithm_version + + retv.crph_crr = coverage.project_array(self.crph_crr) + retv.crph_crr_palette = self.crph_crr_palette + retv.crph_accum = coverage.project_array(self.crph_accum) + retv.crph_accum_palette = self.crph_accum_palette +# retv.processing_flags = \ +# coverage.project_array(self.processing_flags) + + retv.qc_straylight = self.qc_straylight + retv.region_name = dest_area + retv.area = region + retv.projection_name = region.proj_id + + retv.pcs_def = pcs_def_from_region(region) + + retv.num_of_columns = region.x_size + retv.num_of_lines = region.y_size + retv.xscale = region.pixel_size_x + retv.yscale = region.pixel_size_y + + import pyproj + prj = pyproj.Proj(region.proj4_string) + aex = region.area_extent + lonur, latur = prj(aex[2], aex[3], inverse=True) + lonll, latll = prj(aex[0], aex[1], inverse=True) + retv.ll_lon = lonll + retv.ll_lat = latll + retv.ur_lon = lonur + retv.ur_lat = latur + + self.shape = region.shape + + retv.filled = True + retv.resolution = self.resolution + + return retv + +""" NEU ENDE """ + +MSG_PGE_EXTENTIONS = ["PLAX.CTTH.0.h5", "PLAX.CLIM.0.h5", "h5"] + + +def get_best_product(filename, area_extent): + """Get the best of the available products for the *filename* template. + """ + + for ext in MSG_PGE_EXTENTIONS: + match_str = filename + "." + ext + LOG.debug("glob-string for filename: " + str(match_str)) + flist = glob.glob(match_str) + if len(flist) == 0: + LOG.warning("No matching %s.%s input MSG file." + % (filename, ext)) + else: + # File found: + if area_extent is None: + LOG.warning("Didn't specify an area, taking " + flist[0]) + return flist[0] + for fname in flist: + aex = get_area_extent(fname) + #import pdb + # pdb.set_trace() + if np.all(np.max(np.abs(np.array(aex) - + np.array(area_extent))) < 1000): + LOG.info("MSG file found: %s" % fname) + return fname + LOG.info("Did not find any MSG file for specified area") + + +def get_best_products(filename, area_extent): + """Get the best of the available products for the *filename* template. + """ + + filenames = [] + + for ext in MSG_PGE_EXTENTIONS: + match_str = filename + "." + ext + LOG.debug('Match string = ' + str(match_str)) + flist = glob.glob(match_str) + if len(flist) == 0: + LOG.warning("No matching %s.%s input MSG file." + % (filename, ext)) + else: + # File found: + if area_extent is None: + LOG.warning("Didn't specify an area, taking " + flist[0]) + filenames.append(flist[0]) + else: + found = False + for fname in flist: + aex = get_area_extent(fname) + if np.all(np.max(np.abs(np.array(aex) - + np.array(area_extent))) < 1000): + found = True + LOG.info("MSG file found: %s" % fname) + filenames.append(fname) + if not found: + LOG.info( + "Did not find any MSG file for specified area") + LOG.debug("Sorted filenames: %s", str(sorted(filenames))) + return sorted(filenames) + + +def get_area_from_file(filename): + """Get the area from the h5 file. + """ + from pyresample.geometry import AreaDefinition + import h5py + + aex = get_area_extent(filename) + h5f = h5py.File(filename, 'r') + pname = h5f.attrs["PROJECTION_NAME"] + proj = {} + if pname.startswith("GEOS"): + proj["proj"] = "geos" + proj["a"] = "6378169.0" + proj["b"] = "6356583.8" + proj["h"] = "35785831.0" + proj["lon_0"] = str(float(pname.split("<")[1][:-1])) + else: + raise NotImplementedError("Only geos projection supported yet.") + + #h5f.attrs["REGION_NAME"] # alps + #pname # GEOS<+009.5> + #proj # {'a': '6378169.0', 'h': '35785831.0', 'b': '6356583.8', 'lon_0': '9.5', 'proj': 'geos'} + #int(h5f.attrs["NC"]) # 349 + #int(h5f.attrs["NL"]) # 151 + #aex # (-613578.17189778585, 4094060.208733994, 433553.97518292483, 4547101.2335793395) + + area_def = AreaDefinition(h5f.attrs["REGION_NAME"], + h5f.attrs["REGION_NAME"], + pname, + proj, + int(h5f.attrs["NC"]), + int(h5f.attrs["NL"]), + aex) + h5f.close() + return area_def + + +def load(scene, **kwargs): + """Load data into the *channels*. *Channels* is a list or a tuple + containing channels we will load data into. If None, all channels are + loaded. + """ + + print "*** read NWC-SAF data with nwcsaf_msg.py", scene.channels_to_load + + area_extent = kwargs.get("area_extent") + calibrate = kwargs.get("calibrate", True) + + conf = ConfigParser.ConfigParser() + conf.read(os.path.join(CONFIG_PATH, scene.fullname + ".cfg")) + directory = conf.get(scene.instrument_name + "-level3", "dir", raw=True) + filename_raw = conf.get(scene.instrument_name + "-level3", "filename", raw=True) + pathname = os.path.join(directory, filename_raw) + + LOG.debug("Inside load: " + str(scene.channels_to_load)) + + if "CloudMask" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "01", + "product": "CMa__"}) + filename = get_best_product(filename_wildcards, area_extent) + if filename != None: + ct_chan = MsgCloudMask() + ct_chan.read(filename,calibrate) + ct_chan.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if "CloudType" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "02", + "product": "CT___"}) + filenames = get_best_products(filename_wildcards, area_extent) + if len(filenames) > 0: + filename = filenames[-1] + else: + LOG.info("Did not find any MSG file for specified area") + return + ct_chan = MsgCloudType() + ct_chan.read(filenames[-1]) + LOG.debug("Uncorrected file: %s", filename) + ct_chan.name = "CloudType" + ct_chan.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if "CloudType_plax" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "02", + "product": "CT___"}) + filenames = get_best_products(filename_wildcards, area_extent) + if len(filenames) > 0: + filename = filenames[0] + else: + LOG.info("Did not find any MSG file for specified area") + return + ct_chan_plax = MsgCloudType() + if filename != None: + LOG.debug("Parallax corrected file: %s", filename) + ct_chan_plax.read(filename) + ct_chan_plax.name = "CloudType_plax" + ct_chan_plax.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan_plax.resolution = ct_chan_plax.area.pixel_size_x + scene.channels.append(ct_chan_plax) + + print "*** hallo world***" + + if "CTTH" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "03", + "product": "CTTH_"}) + filename = get_best_product(filename_wildcards, area_extent) + if filename != None: + ct_chan = MsgCTTH() + ct_chan.read(filename,calibrate) + print "CCC", scene.sat_nr() + ct_chan.satid = (scene.satname[0:8].capitalize() + + str(scene.sat_nr()).rjust(2)) + print "bullshit (nwcsat_msg.py) ", ct_chan.satid # "Meteosat 9" + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if "CRR" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "05", + "product": "CRR__"}) + filename = get_best_product(filename_wildcards, area_extent) + if filename != None: + ct_chan = MsgCRR() + ct_chan.read(filename,calibrate) + ct_chan.name = "CRR_" # !!!!! changed as we create another channel named 'CRR' when transforming the format + ct_chan.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if "PC" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "04", + "product": "PC___"}) + filename = get_best_product(filename_wildcards, area_extent) + if filename != None: + ct_chan = MsgPC() + ct_chan.read(filename,calibrate) + ct_chan.name = "PC" + ct_chan.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if "SPhR" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "13", + "product": "SPhR_"}) + filename = get_best_product(filename_wildcards, area_extent) + if filename != None: + ct_chan = MsgSPhR() + ct_chan.read(filename,calibrate) + ct_chan.name = "SPhR" + ct_chan.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if "PCPh" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "14", + "product": "PCPh_"}) + filename = get_best_product(filename_wildcards, area_extent) + if filename != None: + ct_chan = MsgPCPh() + ct_chan.read(filename,calibrate) + ct_chan.name = "PCPh_" + ct_chan.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if "CRPh" in scene.channels_to_load: + filename_wildcards = (scene.time_slot.strftime(pathname) + % {"number": "14", + "product": "CRPh_"}) + filename = get_best_product(filename_wildcards, area_extent) + if filename != None: + ct_chan = MsgCRPh() + ct_chan.read(filename,calibrate) + ct_chan.name = "CRPh_" + ct_chan.satid = (scene.satname.capitalize() + + str(scene.sat_nr()).rjust(2)) + ct_chan.resolution = ct_chan.area.pixel_size_x + scene.channels.append(ct_chan) + + if 'filename' in locals() and filename != None: + # print "nwcsaf_msg", len(filename), filename + if len(filename) > 12: + sat_nr= int(basename(filename)[10:11])+7 + if int(scene.sat_nr()) != int(sat_nr): + print "*** Warning, change Meteosat number to "+str(sat_nr)+" (input: "+scene.sat_nr()+")" + #scene.number = str(sat_nr).zfill(2) + # !!! update number !!! + scene.number = str(sat_nr) + + + LOG.info("Loading channels done.") diff --git a/mpop/scene.py b/mpop/scene.py index 9d993ef1..97467087 100644 --- a/mpop/scene.py +++ b/mpop/scene.py @@ -83,6 +83,14 @@ def fullname(self): """ return self.variant + self.satname + self.number + def sat_nr(self, string=False): + import re + sat_nr = re.findall(r'\d+', self.fullname)[0] + if string: + return sat_nr + else: + return int(sat_nr) + @classmethod def remove_attribute(cls, name): """Remove an attribute from the class. @@ -461,6 +469,12 @@ def load(self, channels=None, load_again=False, area_extent=None, **kwargs): if len(self.channels_to_load) == 0: return + if "reader_level" in kwargs.keys(): + if kwargs["reader_level"] != None: + LOG.debug("Using explecit definition of reader level: "+kwargs["reader_level"] ) + if kwargs["reader_level"] != level: + continue + LOG.debug("Looking for sources in section " + level) reader_name = conf.get(level, 'format') try: @@ -550,6 +564,178 @@ def loaded_channels(self): """ return set([chan for chan in self.channels if chan.is_loaded()]) + def get_orbital(self): + from pyorbital.orbital import Orbital + from pyorbital import tlefile + + from pyorbital.tlefile import get_norad_line + sat_line = get_norad_line(self.satname, self.number) + self.orbital = Orbital(sat_line) + + return self.orbital + + def estimate_cth(self, cth_atm="best", time_slot=None): + """ + General purpose + =============== + Estimation of the cloud top height using the 10.8 micron channel + limitations: this is the most simple approach + a simple fit of the ir108 to the temperature profile + * no correction for water vapour or any other trace gas + * no viewing angle dependency + * no correction for semi-transparent clouds + * no special treatment of temperature inversions + Example call + ============ + data.estimate_cth(cth_atm="best") + input arguments + =============== + cth_atm * using temperature profile to estimate the cloud top height + possible choices are (see estimate_cth in mpop/tools.py): + "standard", "tropics", "midlatitude summer", "midlatitude winter", "subarctic summer", "subarctic winter" + this will choose the corresponding atmospheric AFGL temperature profile + * new choice: "best" -> choose according to central (lon,lat) and time from: + "tropics", "midlatitude summer", "midlatitude winter", "subarctic summer", "subarctic winter" + time_slot current observation time as (datetime.datetime() object) + time_slot option can be omitted, the function tries to use self.time_slot + """ + + print "*** Simple estimation of Cloud Top Height with IR_108 channel" + + # check if IR_108 is loaded + loaded_channels = [chn.name for chn in self.loaded_channels()] + if "IR_108" not in loaded_channels: + print "*** Error in estimate_cth (mpop/scene.py)" + print " IR_108 is required to estimate CTH, but not loaded" + quit() + else: + ir108 = self["IR_108"].data + + # choose atmosphere + if cth_atm.lower() == "best": + # get central lon/lat coordinates + (yc,xc) = ir108.shape + (lon,lat) = self.area.get_lonlat(yc/2, xc/2) + + if time_slot==None: + if hasattr(self, 'time_slot'): + time_slot = self.time_slot + else: + print "*** Error, in estimate_cth (mpop/channel.py)" + print " when using cth_atm=\"best\" also the time_slot information is required!" + quit() + + # automatic choise of temperature profile + doy = time_slot.timetuple().tm_yday + print "... automatic choise of temperature profile lon=",lon," lat=",lat,", time=", str(time_slot),", doy=", doy + if abs(lat) <= 30.0: + cth_atm="tropics" + elif doy < 80 or doy <= 264: + # northern summer + if lat < -60.0: + cth_atm = "subarctic winter" + elif -60.0 <= lat and lat < -30.0: + cth_atm = "midlatitude winter" + elif 30.0 < lat and lat <= 60.0: + cth_atm = "midlatitude summer" + elif 60.0 < lat: + cth_atm = "subarctic summer" + else: + # northern winter + if lat < -60.0: + cth_atm = "subarctic summer" + elif -60.0 <= lat and lat < -30.0: + cth_atm = "midlatitude summer" + elif 30.0 < lat and lat <= 60.0: + cth_atm = "midlatitude winter" + elif 60 < lat: + cth_atm = "subarctic winter" + print " choosing temperature profile for ", cth_atm + + # estimate cloud top height by searching first fit of ir108 with temperature profile + from mpop.tools import estimate_cth + cth = estimate_cth(ir108, cth_atm=cth_atm) + + # create new channel named "CTH" + self.channels.append(Channel(name="CTH", + wavelength_range=[0.,0.,0.], + resolution=self["IR_108"].resolution, + data=cth, + calibration_unit="m")) + + # copy additional information from IR_108 + self["CTH"].info = self["IR_108"].info + self["CTH"].info['units'] = 'm' + self["CTH"].area = self["IR_108"].area + self["CTH"].area_id = self["IR_108"].area_id + self["CTH"].area_def = self["IR_108"].area_def + self["CTH"].resolution = self["IR_108"].resolution + + return cth + + + def parallax_corr(self, fill="False", estimate_cth=False, cth_atm='best', replace=False): + """ + perform the CTH parallax corretion for all loaded channels + """ + + loaded_channels = [chn.name for chn in self.loaded_channels()] + if len(loaded_channels) == 0: + return + + # loop over channels and check, if one is a normal radiance channel + # having the method to calculate the viewing geometry + for chn in self.loaded_channels(): + if hasattr(chn, 'get_viewing_geometry'): + # calculate the viewing geometry of the SEVIRI sensor + print "... calculate viewing geometry using ", chn.name + (azi, ele) = chn.get_viewing_geometry(self.get_orbital(), self.time_slot) + break + + # choose best way to get CTH for parallax correction + if not estimate_cth: + if "CTTH" in loaded_channels: + # make a copy of CTH, as it might get replace by its parallax corrected version + cth = copy.deepcopy(self["CTTH"].height) + else: + print "*** Error in parallax_corr (mpop.scene.py)" + print " parallax correction needs some cloud top height information" + print " please load the NWC-SAF CTTH product (recommended) or" + print " activate the option data.parallax_corr( estimate_cth=True )" + quit() + else: + if "IR_108" in loaded_channels: + # try to estimate CTH with IR_108 + self.estimate_cth() + cth = self["CTH"].data + else: + print "*** Error in parallax_corr (mpop.scene.py)" + print " parallax correction needs some cloud top height information" + print " you specified the estimation of CTH with the IR_108, but " + print " this channel is not loaded" + quit() + + # perform parallax correction for each loaded channel + for chn in self.loaded_channels(): + if hasattr(chn, 'parallax_corr'): + print "... perform parallax correction for ", chn.name + if replace: + chn_name_PC = chn.name + print " replace channel ", chn_name_PC + else: + chn_name_PC = chn.name+"_PC" + print " create channel ", chn_name_PC + + # take care of the parallax correction + self[chn_name_PC] = chn.parallax_corr(cth=cth, azi=azi, ele=ele, fill=fill) + else: + LOG.warning("Channel " + str(chn.name) + " has no attribute parallax_corr," + "thus parallax effect wont be corrected.") + print "Channel " + str(chn.name) + " has no attribute parallax_corr," + print "thus parallax effect wont be corrected." + + return self + def project(self, dest_area, channels=None, precompute=False, mode=None, radius=None, nprocs=1): """Make a copy of the current snapshot projected onto the diff --git a/mpop/tools.py b/mpop/tools.py index a31fd91a..bf6d0cf3 100644 --- a/mpop/tools.py +++ b/mpop/tools.py @@ -46,3 +46,143 @@ def sunzen_corr_cos(data, cos_zen, limit=80.): data[lim_y, lim_x] /= cos_limit return data + + +def estimate_cth(IR_108, cth_atm="standard"): + + ''' + Estimation of the cloud top height using the 10.8 micron channel + limitations: this is the most simple approach + a simple fit of the ir108 to the temperature profile + * no correction for water vapour or any other trace gas + * no viewing angle dependency + * no correction for semi-transparent clouds + + optional input: + cth_atm * "standard", "tropics", "midlatitude summer", "midlatitude winter", "subarctic summer", "subarctic winter" + Matching the 10.8 micron temperature with atmosphere profile + (s) AFGL atmospheric constituent profile. U.S. standard atmosphere 1976. (AFGL-TR-86-0110) + (t) AFGL atmospheric constituent profile. tropical. (AFGL-TR-86-0110) + (mw) AFGL atmospheric constituent profile. midlatitude summer. (AFGL-TR-86-0110) + (ms) AFGL atmospheric constituent profile. midlatitude winter. (AFGL-TR-86-0110) + (ss) AFGL atmospheric constituent profile. subarctic summer. (AFGL-TR-86-0110) + (sw) AFGL atmospheric constituent profile. subarctic winter. (AFGL-TR-86-0110) + Ulrich Hamann (MeteoSwiss) + * "tropopause" + Assuming a fixed tropopause height and a fixed temperature gradient + Richard Mueller (DWD) + output: + parallax corrected channel + the content of the channel will be parallax corrected. + The name of the new channel will be + *original_chan.name+'_PC'*, eg. "IR_108_PC". This name is + also stored to the info dictionary of the originating channel. + + Versions: 05.07.2016 initial version + Ulrich Hamann (MeteoSwiss), Richard Mueller (DWD) + ''' + + print "*** estimating CTH using the 10.8 micro meter brightness temperature " + + if cth_atm.lower() != "tropopause": + + # define atmospheric temperature profile + import os + from numpy import loadtxt, zeros, where, logical_and + import mpop + + mpop_dir = os.path.dirname(mpop.__file__) + afgl_file = mpop_dir+"/afgl.dat" + print "... assume ", cth_atm, " atmosphere for temperature profile" + + if cth_atm.lower()=="standard" or cth_atm.lower()=="s": + z, T = loadtxt(afgl_file, usecols=(0, 1), unpack=True, comments="#") + elif cth_atm.lower()=="tropics" or cth_atm.lower()=="t": + z, T = loadtxt(afgl_file, usecols=(0, 2), unpack=True, comments="#") + elif cth_atm.lower()=="midlatitude summer" or cth_atm.lower()=="ms": + z, T = loadtxt(afgl_file, usecols=(0, 3), unpack=True, comments="#") + elif cth_atm.lower()=="midlatitude winter" or cth_atm.lower()=="ws": + z, T = loadtxt(afgl_file, usecols=(0, 4), unpack=True, comments="#") + elif cth_atm.lower()=="subarctic summer" or cth_atm.lower()=="ss": + z, T = loadtxt(afgl_file, usecols=(0, 5), unpack=True, comments="#") + elif cth_atm.lower()=="subarctic winter" or cth_atm.lower()=="ss": + z, T = loadtxt(afgl_file, usecols=(0, 6), unpack=True, comments="#") + else: + print "*** Error in estimate_cth (mpop/tools.py)" + print "unknown temperature profiel for CTH estimation: cth_atm = ", cth_atm + quit() + + height = zeros(IR_108.shape) + # warmer than lowest level -> clear sky + height[where(IR_108 > T[-1])] = -1. + print " z0(km) z1(km) T0(K) T1(K) number of pixels" + print "------------------------------------------------------" + for i in range(z.size)[::-1]: + + # search for temperatures between layer i-1 and i + ind = np.where( logical_and( T[i-1]< IR_108, IR_108 < T[i]) ) + # interpolate CTH according to ir108 temperature + height[ind] = z[i] + (IR_108[ind]-T[i])/(T[i-1]-T[i]) * (z[i-1]-z[i]) + # verbose output + print " {0:8.1f} {1:8.1f} {2:8.1f} {3:8.1f} {4:8d}".format(z[i], z[i-1], T[i], T[i-1], len(ind[0])) + + # if temperature increases above 8km -> tropopause detected + if z[i]>=8. and T[i] <= T[i-1]: + # no cloud above tropopose + break + # no cloud heights above 20km + if z[i]>=20.: + break + + # if height is still 0 -> cloud colder than tropopause -> cth == tropopause height + height[np.where( height == 0 )] = z[i] + + else: + + Htropo=11.0 # km + # this is an assumption it should be optimized + # by making it dependent on region and season. + # It might be good to include the ITC in the + # region of interest, that would make a fixed Htropo + # value more reliable. + Tmin = np.amin(IR_108) + # for Tmin it might be better to use the 5th or 10th percentile + # else overshoting tops induces further uncertainties + # in the calculation of the cloud height. + # However numpy provides weird results for 5th percentile. + # Hence, for the working version the minima is used + + print "... assume tropopause height ", Htropo, ", tropopause temperature ", Tmin, "K (", Tmin-273.16, "deg C)" + print " and constant temperature gradient 6.5 K/km" + + height = -(IR_108 - Tmin)/6.5 + Htropo + # calculation of the height, the temperature gradient + # 6.5 K/km is an assumption + # derived from USS and MPI standard profiles. It + # has to be improved as well + + # convert to masked array + # convert form km to meter + height = np.ma.masked_where(height <= 0, height, copy=False) * 1000. + + if False: + from trollimage.image import Image as trollimage + from trollimage.colormap import rainbow + from copy import deepcopy + # cloud top height + prop = height + min_data = prop.min() + max_data = prop.max() + print " estimated CTH(meter) (min/max): ", min_data, max_data + min_data = 0 + max_data = 12000 + colormap = deepcopy(rainbow) + colormap.set_range(min_data, max_data) + img = trollimage(prop, mode="L") #, fill_value=[0,0,0] + img.colorize(colormap) + img.show() + + # return cloud top height in meter + return height + +