Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

adding more products to the initial geocat frontend and changing the …

…variable handling to use patterns; also added a bunch of constants and wired up a first try at the back end for one test domain/satellite/instrument
  • Loading branch information...
commit 99c56ca6d5b5e416a6f9aa72f9e8805b627e583f 1 parent 03a4071
@evas-ssec evas-ssec authored
View
16 py/polar2grid/polar2grid/awips/awips_grids.conf
@@ -47,8 +47,20 @@ terra, modis, mod06_nav, cloud_top_temperature, none, btemp,
terra, modis, geo_nav, ndvi, none, contiguous_index, 211e, 7348, NDVI, SSEC, MODIS, grid211e.ncml, SSEC_AWIPS_MODIS_EAST_1KM_NDVI_%Y%m%d_%H%M.7348
terra, modis, mod07_nav, total_precipitable_water, none, distance, 211e, 7360, TPW, SSEC, MODIS, grid211e.ncml, SSEC_AWIPS_MODIS_EAST_4KM_TPW_%Y%m%d_%H%M.7360
-# TODO, for testing
-aqua, modis, ifr, fog, fog, 203, 0000, IFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_IFRPROB_%Y%m%d_%H%M.0000
+# TODO, these are temporary entries for testing the geocat products; this is only a small subset of the grids/satellites/instruments that will need to be configured
+aqua, modis, ifr, fog, percent, 203, 9814, IFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_IFRPROB_%Y%m%d_%H%M.9814
+aqua, modis, lifr, fog, percent, 203, 9714, LIFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_LIFRPROB_%Y%m%d_%H%M.9714
+aqua, modis, mvfr, fog, percent, 203, 9804, LIFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_MVFRPROB_%Y%m%d_%H%M.9804
+aqua, modis, cloud_thickness, none, distance, 203, 9806, CLD T, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_GM06_%Y%m%d_%H%M.9806
+aqua, modis, cloud_phase, none, category, 203, 9724, CLD P, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_CPHASE_%Y%m%d_%H%M.9724
+aqua, modis, ash_height, none, distance, 203, 9801, ASH H, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_GM02_%Y%m%d_%H%M.9801
+aqua, modis, ash_mass_loading, none, mass_loading, 203, 9800, ASH M, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_GM01_%Y%m%d_%H%M.9800
+aqua, modis, ash_effective_radius, none, distance, 203, 9802, ASH E, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_GM03_%Y%m%d_%H%M.9802
+aqua, modis, ash_btd_11_12, none, btemp, 203, 9816, ASH BTD, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_BTD1112_%Y%m%d_%H%M.9816
+aqua, modis, ash_11, none, btemp, 203, 9820, ASH 11um, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_IRWBT_%Y%m%d_%H%M.9820
+aqua, modis, ash_visible, none, reflectance, 203, 9818, ASH VIS, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_VISREF_%Y%m%d_%H%M.9818
+aqua, modis, so2_loading, none, mass_loading, 203, 9809, SO2 L, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_SO2LOADING_%Y%m%d_%H%M.9809
+aqua, modis, so2_mask, none, category, 203, 9808, SO2 M, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_SO2MASK_%Y%m%d_%H%M.9808
# TODO, this is missing a lot of information and I'm not planning to do the other versions of these lines until I've got that information
#terra, modis, ice_surface_temperature, none, btemp, 211e, 0000, IST, SSEC, MODIS, grid211e.ncml, SSEC_AWIPS_MODIS_EAST_IST_%Y%m%d_%H%M.0000
View
6 py/polar2grid/polar2grid/geocat2awips.py
@@ -256,11 +256,11 @@ def run_geocat2awips(filepaths,
temp_processes.start()
processes_to_wait_for.append(temp_processes)
else:
- stat = _process_data_sets([filepath], filename **kwargs)
+ stat = _process_data_sets([filepath], filename, **kwargs)
exit_status = exit_status or stat
except StandardError:
- log.error("Could not process file %s" % filepath)
- exit_status = exit_status or len(1) # TODO, not right
+ log.error("Could not process file %s" % filepath, exc_info=True)
+ exit_status = exit_status or 1 # TODO, not right
log.debug("Waiting for subprocesses")
# look through our processes and wait for any processes we saved to wait for
View
24 py/polar2grid_core/polar2grid/core/constants.py
@@ -63,9 +63,13 @@
DEFAULT_FILL_VALUE = -999.0
# Satellites
-SAT_NPP = "npp"
-SAT_TERRA = "terra"
-SAT_AQUA = "aqua"
+SAT_NPP = "npp"
+SAT_TERRA = "terra"
+SAT_AQUA = "aqua"
+SAT_GOES13 = "goes13"
+SAT_GOES15 = "goes15"
+SAT_METEO9 = "meteosat9"
+SAT_MTSAT2 = "mtsat2"
# Instruments
INST_VIIRS = "viirs"
@@ -91,6 +95,19 @@
BKIND_NDVI = "ndvi"
BKIND_TPW = "total_precipitable_water"
BKIND_IFR = "ifr"
+BKIND_LIFR = "lifr"
+BKIND_MVFR = "mvfr"
+BKIND_CLDT = "cloud_thickness"
+BKIND_CLDP = "cloud_phase"
+BKIND_ASHH = "ash_height"
+BKIND_ASHM = "ash_mass_loading"
+BKIND_ASHE = "ash_effective_radius"
+BKIND_ASHB = "ash_btd_11_12"
+BKIND_ASH11 = "ash_11"
+BKIND_SO2L = "so2_loading"
+BKIND_SO2M = "so2_mask"
+BKIND_ASHV = "ash_visible"
+
# Band Identifier
BID_01 = "01"
@@ -128,6 +145,7 @@
DKIND_DISTANCE = "distance" # this is meant to be a distance in the sense of mm, cm, meters, km, or miles
DKIND_PERCENT = "percent"
DKIND_C_INDEX = "contiguous_index" # this represents some abstract ranging index with meaningfully contiguous values (not discrete categories)
+DKIND_M_LOAD = "mass_loading" # this represents an amount (mass usually) over an area
SET_DKINDS = set([
DKIND_RADIANCE,
View
16 py/polar2grid_core/polar2grid/core/rescale_configs/rescale.8bit.conf
@@ -66,8 +66,20 @@ terra, modis, geo_nav, ndvi, none, contiguous_ind
terra, modis, mod06_nav, cloud_top_temperature, none, btemp, btemp_lin, 300.26, 173.16, 10, 250
terra, modis, mod07_nav, total_precipitable_water, none, distance, linear, 30.0, 40.0
-# TODO, temp
-aqua, modis, ifr, fog, fog, linear, 2.55, 0.0
+# TODO, these are temporary entries for testing the geocat products
+aqua, modis, ifr, fog, percent, linear, 2.55, 0.0
+aqua, modis, lifr, fog, percent, linear, 2.55, 0.0
+aqua, modis, mvfr, fog, percent, linear, 2.55, 0.0
+aqua, modis, cloud_thickness, none, distance, linear, 0.17, 0.0
+aqua, modis, cloud_phase, none, category, linear, 30.0, 0.0
+aqua, modis, ash_height, none, distance, linear, 12.75, 0.0
+aqua, modis, ash_mass_loading, none, mass_loading, linear, 5.1, 0.0
+aqua, modis, ash_effective_radius, none, distance, linear, 15.875, 0.0
+aqua, modis, ash_btd_11_12, none, btemp, linear, 25.5, 127.5
+aqua, modis, ash_11, none, btemp, raw
+aqua, modis, ash_visible, none, reflectance, sqrt, 100.0, 2.55
+aqua, modis, so2_loading, none, mass_loading, raw
+aqua, modis, so2_mask, none, category, raw
# TODO, this is guesswork
terra, modis, geo_nav, inversion_strength, none, btemp, btemp_c, 242.0, 660, 2, 418, 1
View
161 py/polar2grid_modis/polar2grid/modis/geocat_guidebook.py
@@ -41,45 +41,137 @@
DEFAULT_SCALE_FACTOR_NAME = 'scale_factor'
DEFAULT_SCALE_METHOD_NAME = 'scaling_method'
-# TODO, there are notes in the table suggesting this will need a part specific to the instrument (like the "goesr" part)
-# TODO, if so, I should find out how that's formatted and generate the different prefixes programatically
-IFR_FOG_PROB_VAR_NAME = 'goesr_fog_nooptprop_IFR_fog_probability'
+# TODO, these patterns are guesses, need to get confirmation from Corey or Justin
+# fog variable patterns
+IFR_FOG_PROB_VAR_PATTERN = r'.*?_IFR_fog_probability'
+LIFR_FOG_PROB_VAR_PATTERN = r'.*?_LIFR_fog_probability'
+MVFR_FOG_PROB_VAR_PATTERN = r'.*?_MVFR_fog_probability'
+# cloud variable patterns
+CLOUD_THICKNESS_VAR_PATTERN = r'.*?_fog_depth'
+CLOUD_PHASE_VAR_PATTERN = r'.*?_cloud_phase'
+# ash variable patterns
+ASH_HEIGHT_VAR_PATTERN = r'.*?_ash_top_height'
+ASH_MASS_LOADING_VAR_PATTERN = r'.*?_ash_mass_loading'
+ASH_EFF_RADIUS_VAR_PATTERN = r'.*?_ash_effective_radius'
+ASH_BTD_11_12_UM_VAR_PATTERN = r'btd1112' # TODO, will this pattern work?
+ASH_11_UM_VAR_PATTERN = r'channel_14_brightness_temperature' # TODO, will this pattern work?
+ASH_VISIBLE_VAR_PATTERN = r'channel_2_reflectance' # TODO, will this pattern work?
+# so2 variable patterns
+SO2_LOADING_VAR_PATTERN = r'.*?_So2_Loading'
+SO2_MASK_VAR_PATTERN = r'.*?_so2_mask'
+
# this is true for the 1km data, FUTURE: when we get to other kinds, this will need to be more sophisicated
MODIS_ROWS_PER_SCAN = 10
-# TODO, additional values for other cases, this should cover Aqua and Terra, but we also expect Goes-12, Goes-15, SNPP (VIIRS), and Meteosat-9 (SEVIRI)
+# TODO, need additional values for other cases: this should cover Aqua and Terra, but we also expect Goes-12, Goes-15, SNPP (VIIRS), Meteosat-9 (SEVIRI), and MTSAT-2
# a regular expression that will match geocat files
GEOCAT_FILE_PATTERN = r'geocatL2\..*?\.\d\d\d\d\d\d\d\.\d\d\d\d\d\d\.hdf'
# a mapping between regular expressions to match files and their band_kind and band_id contents
FILE_CONTENTS_GUIDE = {
- GEOCAT_FILE_PATTERN: {
- BKIND_IFR: [BID_FOG],
- },
+ GEOCAT_FILE_PATTERN: {
+ BKIND_IFR: [BID_FOG],
+ BKIND_LIFR: [BID_FOG],
+ BKIND_MVFR: [BID_FOG],
+
+ BKIND_CLDT: [NOT_APPLICABLE],
+ BKIND_CLDP: [NOT_APPLICABLE],
+
+ BKIND_ASHH: [NOT_APPLICABLE],
+ BKIND_ASHM: [NOT_APPLICABLE],
+ BKIND_ASHE: [NOT_APPLICABLE],
+ BKIND_ASHB: [NOT_APPLICABLE], # has no attrs
+ BKIND_ASH11: [NOT_APPLICABLE], # has no attrs
+ BKIND_ASHV: [NOT_APPLICABLE], # has no attrs
+
+ BKIND_SO2L: [NOT_APPLICABLE],
+ BKIND_SO2M: [NOT_APPLICABLE],
+ },
}
# a mapping between bands and their fill value attribute names
FILL_VALUE_ATTR_NAMES = \
{
- (BKIND_IFR, BID_FOG): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_IFR, BID_FOG): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_LIFR, BID_FOG): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_MVFR, BID_FOG): DEFAULT_FILL_VALUE_NAME,
+
+ (BKIND_CLDT, NOT_APPLICABLE): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_CLDP, NOT_APPLICABLE): DEFAULT_FILL_VALUE_NAME,
+
+ (BKIND_ASHH, NOT_APPLICABLE): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_ASHM, NOT_APPLICABLE): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_ASHE, NOT_APPLICABLE): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_ASHB, NOT_APPLICABLE): None,
+ (BKIND_ASH11, NOT_APPLICABLE): None,
+ (BKIND_ASHV, NOT_APPLICABLE): None,
+
+ (BKIND_SO2L, NOT_APPLICABLE): DEFAULT_FILL_VALUE_NAME,
+ (BKIND_SO2M, NOT_APPLICABLE): DEFAULT_FILL_VALUE_NAME,
+
}
# a mapping between the bands and their data kinds (in the file)
DATA_KINDS = {
- (BKIND_IFR, BID_FOG): DKIND_FOG,
+ (BKIND_IFR, BID_FOG): DKIND_PERCENT,
+ (BKIND_LIFR, BID_FOG): DKIND_PERCENT,
+ (BKIND_MVFR, BID_FOG): DKIND_PERCENT,
+
+ (BKIND_CLDT, NOT_APPLICABLE): DKIND_DISTANCE,
+ (BKIND_CLDP, NOT_APPLICABLE): DKIND_CATEGORY,
+
+ (BKIND_ASHH, NOT_APPLICABLE): DKIND_DISTANCE,
+ (BKIND_ASHM, NOT_APPLICABLE): DKIND_M_LOAD,
+ (BKIND_ASHE, NOT_APPLICABLE): DKIND_DISTANCE,
+ (BKIND_ASHB, NOT_APPLICABLE): DKIND_BTEMP, # this is technically brightness temp difference, is using this type ok?
+ (BKIND_ASH11, NOT_APPLICABLE): DKIND_BTEMP,
+ (BKIND_ASHV, NOT_APPLICABLE): DKIND_REFLECTANCE,
+
+ (BKIND_SO2L, NOT_APPLICABLE): DKIND_M_LOAD,
+ (BKIND_SO2M, NOT_APPLICABLE): DKIND_CATEGORY,
}
# a mapping between the bands and the variable names used in the files to hold them
-VAR_NAMES = {
- (BKIND_IFR, BID_FOG): IFR_FOG_PROB_VAR_NAME,
- }
+VAR_PATTERN = {
+ (BKIND_IFR, BID_FOG): IFR_FOG_PROB_VAR_PATTERN,
+ (BKIND_LIFR, BID_FOG): LIFR_FOG_PROB_VAR_PATTERN,
+ (BKIND_MVFR, BID_FOG): MVFR_FOG_PROB_VAR_PATTERN,
+
+ (BKIND_CLDT, NOT_APPLICABLE): CLOUD_THICKNESS_VAR_PATTERN,
+ (BKIND_CLDP, NOT_APPLICABLE): CLOUD_PHASE_VAR_PATTERN,
+
+ (BKIND_ASHH, NOT_APPLICABLE): ASH_HEIGHT_VAR_PATTERN,
+ (BKIND_ASHM, NOT_APPLICABLE): ASH_MASS_LOADING_VAR_PATTERN,
+ (BKIND_ASHE, NOT_APPLICABLE): ASH_EFF_RADIUS_VAR_PATTERN,
+ (BKIND_ASHB, NOT_APPLICABLE): ASH_BTD_11_12_UM_VAR_PATTERN,
+ (BKIND_ASH11, NOT_APPLICABLE): ASH_11_UM_VAR_PATTERN,
+ (BKIND_ASHV, NOT_APPLICABLE): ASH_VISIBLE_VAR_PATTERN,
+
+ (BKIND_SO2L, NOT_APPLICABLE): SO2_LOADING_VAR_PATTERN,
+ (BKIND_SO2M, NOT_APPLICABLE): SO2_MASK_VAR_PATTERN,
+ }
# a mapping between bands and the names of their scale and offset attributes
RESCALING_ATTRS = \
- {
- (BKIND_IFR, BID_FOG): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
- }
+ {
+ (BKIND_IFR, BID_FOG): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ (BKIND_LIFR, BID_FOG): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ (BKIND_MVFR, BID_FOG): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+
+ (BKIND_CLDT, NOT_APPLICABLE): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ (BKIND_CLDP, NOT_APPLICABLE): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+
+ (BKIND_ASHH, NOT_APPLICABLE): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ (BKIND_ASHM, NOT_APPLICABLE): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ (BKIND_ASHE, NOT_APPLICABLE): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ (BKIND_ASHB, NOT_APPLICABLE): (None, None, None),
+ (BKIND_ASH11, NOT_APPLICABLE): (None, None, None),
+ (BKIND_ASHV, NOT_APPLICABLE): (None, None, None),
+
+ (BKIND_SO2L, NOT_APPLICABLE): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ (BKIND_SO2M, NOT_APPLICABLE): (DEFAULT_SCALE_FACTOR_NAME, DEFAULT_ADD_OFFSET_NAME, DEFAULT_SCALE_METHOD_NAME),
+ }
def parse_datetime_from_filename (file_name_string) :
"""parse the given file_name_string and create an appropriate datetime object
@@ -93,7 +185,7 @@ def parse_datetime_from_filename (file_name_string) :
if file_name_string.startswith('geocatL2') :
temp = file_name_string.split('.')
datetime_to_return = datetime.strptime(temp[2] + temp[3], "%Y%j%H%M%S")
- # TODO, I need to confirm that this is the right format for the date
+ # I confirmed with Corey that this is the correct date format
return datetime_to_return
@@ -104,14 +196,31 @@ def get_satellite_from_filename (data_file_name_string) :
"""
satellite_to_return = None
+ instrument_to_return = None
if data_file_name_string.find("Aqua") >= 0 :
- satellite_to_return = SAT_AQUA
+ satellite_to_return = SAT_AQUA
+ instrument_to_return = INST_MODIS
elif data_file_name_string.find("Terra") >= 0 :
- satellite_to_return = SAT_TERRA
- # TODO, there are other types to process, but I need info on how the names will be structured
+ satellite_to_return = SAT_TERRA
+ instrument_to_return = INST_MODIS
+ elif data_file_name_string.find("GOES-13") >= 0 :
+ satellite_to_return = SAT_GOES13
+ # TODO, what instrument name to use here?
+ elif data_file_name_string.find("GOES-15") >= 0 :
+ satellite_to_return = SAT_GOES15
+ # TODO, what instrument name to use here?
+ elif data_file_name_string.find("SNPP") >= 0 :
+ satellite_to_return = SAT_NPP
+ instrument_to_return = INST_VIIRS
+ elif data_file_name_string.find("Meteosat-9") >= 0 :
+ satellite_to_return = SAT_METEO9
+ # TODO, what instrument name to use here?
+ elif data_file_name_string.find("MTSAT-2") >= 0 :
+ satellite_to_return = SAT_MTSAT2
+ # TODO, what instrument name to use here?
- return satellite_to_return
+ return satellite_to_return, instrument_to_return
# TODO, once this is mature, move it into it's own module so it can be a utility function
def unscale_data (data, fill_value, scaling_method, scale_factor=None, add_offset=None) :
@@ -128,34 +237,32 @@ def unscale_data (data, fill_value, scaling_method, scale_factor=None, add_offse
if a scale factor or add offset is given as None (or not given) it will not be applied
"""
- to_return = None
+ to_return = data
# figure out which scaling method to use
if scaling_method == SCALING_METHOD_NO_SCALING :
LOG.debug("No scaling required, using existing data.")
- to_return = data
elif scaling_method == SCALING_METHOD_LINEAR :
LOG.debug("Unscaling Geocat data using linear scaling method.")
- to_return = data
not_fill_mask = to_return != fill_value
# if we found a scale use it to scale the data
- if scale_factor is not None :
+ if (scale_factor is not None) and (scale_factor is not 1.0) :
to_return[not_fill_mask] *= scale_factor
# if we have an offset use it to offset the data
- if add_offset is not None :
+ if (add_offset is not None) and (add_offset is not 0.0) :
to_return[not_fill_mask] += add_offset
elif scaling_method == SCALING_METHOD_LOGARITHM :
- LOG.warn("Unscaling Geocat data using a logarithm method is not yet supported. Unable to unscale data.")
+ LOG.warn("Unscaling Geocat data using a logarithm method is not yet supported. Using raw scaled data.")
elif scaling_method == SCALING_METHOD_SQUARE_ROOT :
- LOG.warn("Unscaling Geocat data using a square root method is not yet supported. Unable to unscale data.")
+ LOG.warn("Unscaling Geocat data using a square root method is not yet supported. Using raw scaled data.")
return to_return
View
68 py/polar2grid_modis/polar2grid/modis/geocat_to_swath.py
@@ -93,15 +93,18 @@ def _load_meta_data (file_objects) :
raise ValueError("One file was expected for processing in _load_meta_data_and_image_data and more were given.")
file_object = file_objects[0]
+ # based on the file name, figure out which satellite and instrument we have
+ temp_sat, temp_inst = geocat_guidebook.get_satellite_from_filename(file_object.file_name)
+
# set up the base dictionaries
meta_data = {
- "sat": geocat_guidebook.get_satellite_from_filename(file_object.file_name),
- "instrument": INST_MODIS, # TODO, this is not a given, once more are wired in this will need to be properly selected
+ "sat": temp_sat,
+ "instrument": temp_inst,
"start_time": geocat_guidebook.parse_datetime_from_filename(file_object.file_name),
"bands" : { },
"rows_per_scan": geocat_guidebook.MODIS_ROWS_PER_SCAN, # TODO, not general?
- # TO FILL IN LATER
+ # these will be filled in later in the process
"lon_fill_value": None,
"lat_fill_value": None,
"fbf_lat": None,
@@ -228,7 +231,7 @@ def _load_data_to_flat_file (file_objects, descriptive_string, variable_name,
if missing_attribute_name is not None :
#print ("attributes: " + str(temp_var_object.attributes()))
- temp_fill_value = temp_var_object.attributes()[missing_attribute_name]
+ temp_fill_value = temp_var_object.attributes().get(missing_attribute_name, fill_value_default)
else :
temp_fill_value = fill_value_default
# if we already have a fill value and it's not the same as the one we just loaded, fix our data
@@ -241,15 +244,15 @@ def _load_data_to_flat_file (file_objects, descriptive_string, variable_name,
# if there's scaling information load it
scale_value = None
if scale_name is not None :
- scale_value = temp_var_object.attributes()[scale_name]
+ scale_value = temp_var_object.attributes().get(scale_name, None)
scale_value = float(scale_value) if scale_value is not None else scale_value
offset_value = None
if offset_name is not None :
- offset_value = temp_var_object.attributes()[offset_name]
+ offset_value = temp_var_object.attributes().get(offset_name, None)
offset_value = float(offset_value) if offset_value is not None else offset_value
scaling_method = geocat_guidebook.SCALING_METHOD_LINEAR
if scale_method_name is not None :
- scaling_method = temp_var_object.attributes()[scale_method_name]
+ scaling_method = temp_var_object.attributes().get(scale_method_name, None)
scaling_method = int(scaling_method) if scaling_method is not None else scaling_method
log.debug("Using scale method " + str(scaling_method) + " scale value " + str(scale_value) + " and offset value " + str(offset_value))
@@ -281,6 +284,39 @@ def _load_data_to_flat_file (file_objects, descriptive_string, variable_name,
return temp_file_name, stats
+def _get_var_name_from_pattern (variable_pattern, file_to_search) :
+ """
+ find the variable name that matches the given pattern and return it
+
+ # TODO, this may be a bad assumption, check example data
+ Note: It's expected that only one variable will match the given pattern.
+ If more than one variable matches, an exception will be raised.
+ """
+
+ variables_list = sorted(file_to_search.datasets().keys())
+
+ toReturn = None
+
+ # look through the variables and find one that matches the pattern
+ for variable_name in variables_list :
+ # does this variable match the pattern?
+ if re.match(variable_pattern, variable_name) is not None :
+
+ # if we already had a match, then there are multiple variables
+ # that could match our pattern, which is bad
+ if toReturn is not None :
+ message = ("Multple matches for pattern " + str (variable_pattern)
+ + " found (" + str(toReturn) + ", " + str(variable_name)
+ + ") unable to determine which variable to use.")
+ log.warn(message)
+ raise ValueError (message)
+
+ # we found a match, hang on to it
+ toReturn = variable_name
+ log.debug ("Matched variable pattern " + str(variable_pattern) + " with name: " + str(variable_name))
+
+ return toReturn
+
def _load_image_data (meta_data_to_update, cut_bad=False) :
"""
load image data into binary flat files based on the meta data provided
@@ -289,11 +325,19 @@ def _load_image_data (meta_data_to_update, cut_bad=False) :
# process each of the band kind / id sets
for band_kind, band_id in meta_data_to_update["bands"] :
+ file_temp = [meta_data_to_update["bands"][(band_kind, band_id)]["file_obj"].file_object]
+ var_name_temp = _get_var_name_from_pattern (geocat_guidebook.VAR_PATTERN[(band_kind, band_id)], file_temp[0])
+
+ # if we couldn't find a variable, log something and move on
+ if var_name_temp is None :
+ log.debug ("Could not find a variable matching pattern " + str(geocat_guidebook.VAR_PATTERN[(band_kind, band_id)]))
+ continue
+
# load the data into a flat file
(scale_name, offset_name, scaling_method) = geocat_guidebook.RESCALING_ATTRS[(band_kind, band_id)]
- temp_image_file_name, image_stats = _load_data_to_flat_file ([meta_data_to_update["bands"][(band_kind, band_id)]["file_obj"].file_object],
+ temp_image_file_name, image_stats = _load_data_to_flat_file (file_temp,
str(band_kind) + str(band_id),
- geocat_guidebook.VAR_NAMES[(band_kind, band_id)],
+ var_name_temp,
missing_attribute_name=geocat_guidebook.FILL_VALUE_ATTR_NAMES[(band_kind, band_id)],
scale_method_name=scaling_method, scale_name=scale_name, offset_name=offset_name)
@@ -313,6 +357,7 @@ def _load_image_data (meta_data_to_update, cut_bad=False) :
meta_data_to_update["bands"][(band_kind, band_id)]["swath_rows"] = rows
meta_data_to_update["bands"][(band_kind, band_id)]["swath_cols"] = cols
meta_data_to_update["bands"][(band_kind, band_id)]["swath_scans"] = rows / geocat_guidebook.MODIS_ROWS_PER_SCAN
+ # TODO, the actual variable name may be needed later to discriminate some processing steps, store this at some point?
if rows != meta_data_to_update["swath_rows"] or cols != meta_data_to_update["swath_cols"]:
msg = ("Expected %d rows and %d cols, but band %s %s had %d rows and %d cols"
@@ -339,7 +384,8 @@ def get_swaths(ifilepaths, cut_bad=False, nav_uid=None):
TODO, for now this doesn't do anything!
"""
- # TODO, for now this method only handles one file, eventually it will need to handle more
+ # TODO, for now this method only handles one file, for geocat it may not ever handle more than one
+ # TODO, the interface was originally intended to allow multiple granules to be concatinated
if len(ifilepaths) != 1 :
raise ValueError("One file was expected for processing in get_swaths and more were given.")
@@ -373,9 +419,11 @@ def make_swaths(self, filepaths, **kwargs):
try:
temp_meta_data = get_swaths([temp_filepath], **kwargs)
+
temp_bands = { } if "bands" not in meta_data else meta_data["bands"]
meta_data.update(temp_meta_data)
meta_data["bands"].update(temp_bands)
+
except StandardError:
log.error("Swath creation failed")
log.debug("Swath creation error:", exc_info=1)
Please sign in to comment.
Something went wrong with that request. Please try again.