Skip to content
Browse files

more geocat stuff

  • Loading branch information...
1 parent 0f8d9ee commit 28ec84c36602898809e785a88fd86c7cf4f7f85d @evas-ssec evas-ssec committed May 17, 2013
View
17 py/polar2grid/polar2grid/awips/awips_grids.conf
@@ -48,11 +48,18 @@ terra, modis, geo_nav, ndvi, none, contiguous_ind
terra, modis, mod07_nav, total_precipitable_water, none, distance, 211e, 7360, TPW, SSEC, MODIS, grid211e.ncml, SSEC_AWIPS_MODIS_EAST_4KM_TPW_%Y%m%d_%H%M.7360
# TODO, these are temporary entries for testing the geocat products; this is only a small subset of the grids/satellites/instruments that will need to be configured
-aqua, modis, geo_nav, ifr, fog, percent, 203, 9814, IFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_IFRPROB_%Y%m%d_%H%M.9814
-aqua, modis, geo_nav, lifr, fog, percent, 203, 9714, LIFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_LIFRPROB_%Y%m%d_%H%M.9714
-aqua, modis, geo_nav, mvfr, fog, percent, 203, 9804, LIFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_MVFRPROB_%Y%m%d_%H%M.9804
-aqua, modis, geo_nav, cloud_thickness, none, distance, 203, 9806, CLD T, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_GM06_%Y%m%d_%H%M.9806
-aqua, modis, geo_nav, cloud_phase, none, category, 203, 9724, CLD P, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_CPHASE_%Y%m%d_%H%M.9724
+aqua, modis, geo_nav, ifr, fog, percent, 207, 9814, IFR Fog, SSEC, AQUA-MODIS, grid207_2km.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_IFRPROB_%Y%m%d_%H%M.9814
+aqua, modis, geo_nav, lifr, fog, percent, 207, 9714, LIFR Fog, SSEC, AQUA-MODIS, grid207_2km.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_LIFRPROB_%Y%m%d_%H%M.9714
+aqua, modis, geo_nav, mvfr, fog, percent, 207, 9804, LIFR Fog, SSEC, AQUA-MODIS, grid207_2km.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_MVFRPROB_%Y%m%d_%H%M.9804
+aqua, modis, geo_nav, cloud_thickness, none, distance, 207, 9806, CLD T, SSEC, AQUA-MODIS, grid207_2km.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_GM06_%Y%m%d_%H%M.9806
+aqua, modis, geo_nav, cloud_phase, none, category, 207, 9724, CLD P, SSEC, AQUA-MODIS, grid207_2km.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_CPHASE_%Y%m%d_%H%M.9724
+
+#aqua, modis, geo_nav, ifr, fog, percent, 203, 9814, IFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_IFRPROB_%Y%m%d_%H%M.9814
+#aqua, modis, geo_nav, lifr, fog, percent, 203, 9714, LIFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_LIFRPROB_%Y%m%d_%H%M.9714
+#aqua, modis, geo_nav, mvfr, fog, percent, 203, 9804, LIFR Fog, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_MVFRPROB_%Y%m%d_%H%M.9804
+#aqua, modis, geo_nav, cloud_thickness, none, distance, 203, 9806, CLD T, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_GM06_%Y%m%d_%H%M.9806
+#aqua, modis, geo_nav, cloud_phase, none, category, 203, 9724, CLD P, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_2KM_CPHASE_%Y%m%d_%H%M.9724
+
aqua, modis, geo_nav, ash_height, none, distance, 203, 9801, ASH H, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_GM02_%Y%m%d_%H%M.9801
aqua, modis, geo_nav, ash_mass_loading, none, mass_loading, 203, 9800, ASH M, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_GM01_%Y%m%d_%H%M.9800
aqua, modis, geo_nav, ash_effective_radius, none, distance, 203, 9802, ASH E, SSEC, AQUA-MODIS, grid203.ncml, SSEC_AWIPS_GEOCAT-MOD_ALAS_1KM_GM03_%Y%m%d_%H%M.9802
View
262 py/polar2grid/polar2grid/geocat2awips.py
@@ -13,39 +13,27 @@
"""
__docformat__ = "restructuredtext en"
-from polar2grid.core import Workspace
-from polar2grid.core.constants import *
+from polar2grid.core import Workspace
+from polar2grid.core.constants import *
from polar2grid.core.glue_utils import *
-from .grids.grids import create_grid_jobs, Cartographer
-from polar2grid.modis import FILE_CONTENTS_GUIDE
-
-from polar2grid.modis import Geo_Frontend
-
-from .remap import remap_bands
-from .awips import Backend
+from polar2grid.core.time_utils import utc_now
+from .grids.grids import create_grid_jobs, Cartographer
+from polar2grid.modis import FILE_CONTENTS_GUIDE
+from polar2grid.modis import Geo_Frontend
+from .awips import Backend
+import remap
import os
import sys
import re
import logging
-from multiprocessing import Process
-import numpy
-from glob import glob
-from collections import defaultdict
-log = logging.getLogger(__name__)
-LOG_FN = os.environ.get("GEOCAT2AWIPS_LOG", "./geocat2awips.log")
-
-def exc_handler(exc_type, exc_value, traceback):
- """An execption handler/hook that will only be called if an exception
- isn't called. This will save us from print tracebacks or unrecognizable
- errors to the user's console.
+from multiprocessing import Process
+from datetime import datetime
- Note, however, that this doesn't effect code in a separate process as the
- exception never gets raised in the parent.
- """
- logging.getLogger(__name__).error(exc_value)
- logging.getLogger('traceback').error(exc_value, exc_info=(exc_type,exc_value,traceback))
+log = logging.getLogger(__name__)
+GLUE_NAME = "geocat2awips"
+LOG_FN = os.environ.get("GEOCAT2AWIPS_LOG", None) # None interpreted in main
# TODO this is currently being used in a slipshod manner, move to the newer p2g pattern of deleting files
def clean_up_files():
@@ -69,22 +57,29 @@ def clean_up_files():
def process_data_sets(filepaths,
nav_uid,
fornav_D=None, fornav_d=None,
+ grid_configs=None,
+ fornav_m=True, #
forced_grid=None,
forced_gpd=None, forced_nc=None,
num_procs=1,
rescale_config=None,
- backend_config=None
+ backend_config=None,
) :
"""Process all the files provided from start to finish,
from filename to AWIPS NC file.
Note: all files provided are expected to share a navigation source.
"""
+ log.debug("Processing %s navigation set" % (nav_uid,))
status_to_return = STATUS_SUCCESS
+ # Handle parameters
+ grid_configs = grid_configs or tuple() # needs to be a tuple for use
+
# create the front and backend objects
# these calls should load any configuration files needed
+ cartographer = Cartographer(*grid_configs)
frontend_object = Geo_Frontend()
backend_object = Backend(rescale_config=rescale_config, backend_config=backend_config)
@@ -119,7 +114,7 @@ def process_data_sets(filepaths,
try:
log.info("Determining what grids the data fits in...")
grid_jobs = create_grid_jobs(sat, instrument, nav_uid, band_info,
- backend_object, Cartographer(), # TODO, this is a stopgap, ultimately will need a proper Cartographer object to reuse
+ backend_object, cartographer,
fbf_lat=flatbinaryfilename_lat,
fbf_lon=flatbinaryfilename_lon,
forced_grids=forced_grid,
@@ -131,15 +126,17 @@ def process_data_sets(filepaths,
status_to_return |= STATUS_GDETER_FAIL
return status_to_return
-
### Remap the data
try:
- remapped_jobs = remap_bands(sat, instrument, nav_uid,
- flatbinaryfilename_lon, flatbinaryfilename_lat, grid_jobs,
- num_procs=num_procs, fornav_d=fornav_d, fornav_D=fornav_D,
- lat_fill_value=lat_fill,
- lon_fill_value=lon_fill,
- )
+ remapped_jobs = remap.remap_bands(sat, instrument, nav_uid,
+ flatbinaryfilename_lon,
+ flatbinaryfilename_lat, grid_jobs,
+ num_procs=num_procs,
+ fornav_d=fornav_d, fornav_D=fornav_D,
+ lat_fill_value=lat_fill,
+ lon_fill_value=lon_fill,
+ do_single_sample=fornav_m,
+ )
except StandardError:
log.debug("Remapping Error:", exc_info=1)
log.error("Remapping data failed")
@@ -195,8 +192,8 @@ def process_data_sets(filepaths,
return status_to_return
def _process_data_sets(*args, **kwargs):
- """Wrapper function around `process_data_sets` so that it can called
- properly from `run_geocat2awips`, where the exitcode is the actual
+ """Wrapper function around `process_data_sets` so that it can be
+ called properly from `run_glue`, where the exitcode is the actual
returned value from `process_data_sets`.
This function also checks for exceptions other than the ones already
@@ -220,11 +217,10 @@ def _process_data_sets(*args, **kwargs):
sys.exit(-1)
-def run_geocat2awips(filepaths,
- multiprocess=True,
- **kwargs):
- """Go through the motions of converting
- a Geocat hdf file into a AWIPS NetCDF file.
+def run_glue(filepaths,
+ multiprocess=True,
+ **kwargs):
+ """Convert a Geocat hdf file into a AWIPS NetCDF file.
1. geocat_guidebook.py : Info on what's in the files
2. geocat_to_swath.py : Code to load the data
@@ -294,89 +290,149 @@ def main():
# Logging related
parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
-
- # Multiprocessing related
- parser.add_argument('--sp', dest='single_process', default=False, action='store_true',
- help="Processing is sequential instead of one process per navigation group")
- parser.add_argument('--num-procs', dest="num_procs", default=1,
- help="Specify number of processes that can be used to run ll2cr/fornav calls in parallel")
-
- # Input related
- parser.add_argument('-f', dest='get_files', default=False, action="store_true",
- help="Specify that hdf files are listed, not a directory")
- parser.add_argument('data_files', nargs="+",
- help="Data directory where satellite data is stored or list of data filenames if '-f' is specified")
+ parser.add_argument('-l', '--log', dest="log_fn", default=None,
+ help="""specify the log filename, default
+<gluescript>_%%Y%%m%%d_%%H%%M%%S. Date information is provided from data filename
+through strftime. Current time if no files.""")
+ parser.add_argument('--debug', dest="debug_mode", default=False,
+ action='store_true',
+ help="Enter debug mode. Keeping intermediate files.")
+
+ # general behavior
+ #parser.add_argument('-h', '--help', dest='help', default=False, action='store_true',
+ # help="Print help describing how to use this command line tool")
# Remapping and grid related
- parser.add_argument('-D', dest='fornav_D', default=5,
+ parser.add_argument('--fornav-D', dest='fornav_D', default=10,
help="Specify the -D option for fornav")
- parser.add_argument('-d', dest='fornav_d', default=1,
+ parser.add_argument('--fornav-d', dest='fornav_d', default=1,
help="Specify the -d option for fornav")
- parser.add_argument('-g', '--grids', dest='forced_grids', nargs="+", default="all",
+ parser.add_argument('--fornav-m', dest='fornav_m', default=False, action='store_true',
+ help="Specify the -m option for fornav")
+ parser.add_argument('--grid-configs', dest='grid_configs', nargs="+", default=tuple(),
+ help="Specify additional grid configuration files ('grids.conf' for built-ins)")
+ parser.add_argument('-g', '--grids', dest='forced_grids', nargs="+", default=["all"],
help="Force remapping to only some grids, defaults to 'all', use 'all' for determination")
parser.add_argument('--gpd', dest='forced_gpd', default=None,
help="Specify a different gpd file to use")
- # Backend related
- parser.add_argument('--rescale-config', dest='rescale_config', default=None,
- help="specify alternate rescale configuration file")
+ # multiprocess related
+ parser.add_argument('--sp', dest='single_process', default=False, action='store_true',
+ help="Processing is sequential instead of one process per navigation group")
+ parser.add_argument('--num-procs', dest="num_procs", default=1,
+ help="Specify number of processes that can be used to run ll2cr/fornav calls in parallel")
+
+ # Backend Specific
+ parser.add_argument('--nc', dest='forced_nc', default=None,
+ help="Specify a different ncml file to use")
parser.add_argument('--backend-config', dest='backend_config', default=None,
help="specify alternate backend configuration file")
+ parser.add_argument('--rescale-config', dest='rescale_config', default=None,
+ help="specify alternate rescale configuration file")
- # Output file related
- parser.add_argument('-k', '--keep', dest='remove_prev', default=True, action='store_true',
- help="Don't delete any files that were previously made (WARNING: processing may not run successfully)")
- parser.add_argument('--nc', dest='forced_nc', default=None,
- help="Specify a different nc file to use")
+ # Input related
+ group = parser.add_mutually_exclusive_group(required=True)
+ group.add_argument('-f', dest='data_files', nargs="+",
+ help="List of one or more hdf files")
+ group.add_argument('-d', dest='data_dir', nargs="?",
+ help="Data directory to look for input data files")
+ group.add_argument('-R', dest='remove_prev', default=False, action='store_true',
+ help="Delete any files that may conflict with future processing. Processing is not done with this flag.")
args = parser.parse_args()
-
+
+ # if they asked for help, print that and stop now
+ #if args.help :
+ # parser.print_help()
+ # sys.exit(0)
+
+ # Figure out what the log should be named
+ log_fn = args.log_fn
+ if args.remove_prev:
+ # They didn't need to specify a filename
+ if log_fn is None :
+ log_fn = GLUE_NAME + "_removal.log"
+ file_start_time = utc_now()
+ else:
+ # Get input files and the first filename for the logging datetime
+ if args.data_files:
+ hdf_files = args.data_files[:]
+ elif args.data_dir:
+ base_dir = os.path.abspath(os.path.expanduser(args.data_dir))
+ hdf_files = [ os.path.join(base_dir,x) for x in os.listdir(base_dir) ]
+ else:
+ # Should never get here because argparse mexc group
+ log.error("Wrong number of arguments")
+ parser.print_help()
+ return -1
+
+ # Handle the user using a '~' for their home directory
+ hdf_files = [ os.path.realpath(os.path.expanduser(x)) for x in sorted(hdf_files) ]
+ for hdf_file in hdf_files:
+ if not os.path.exists(hdf_file):
+ print "ERROR: File '%s' doesn't exist" % (hdf_file,)
+ return -1
+
+ # Get the date of the first file if provided
+ file_start_time = sorted(Geo_Frontend.parse_datetimes_from_filepaths(hdf_files))[0]
+
+ # Determine the log filename
+ if log_fn is None :
+ log_fn = GLUE_NAME + "_%Y%m%d_%H%M%S.log"
+ log_fn = datetime.strftime(file_start_time, log_fn)
+
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
- setup_logging(log_filename=LOG_FN, console_level=levels[min(3, args.verbosity)])
+ setup_logging(log_filename=log_fn, console_level=levels[min(3, args.verbosity)])
# Don't set this up until after you have setup logging
- sys.excepthook = exc_handler
+ sys.excepthook = create_exc_handler(GLUE_NAME)
+
+ # Remove previous intermediate and product files
+ if args.remove_prev:
+ log.info("Removing any possible conflicting files")
+ remove_file_patterns(
+ Geo_Frontend.removable_file_patterns,
+ remap.removable_file_patterns,
+ Backend.removable_file_patterns
+ )
+ return 0
fornav_D = int(args.fornav_D)
fornav_d = int(args.fornav_d)
+ fornav_m = args.fornav_m
num_procs = int(args.num_procs)
forced_grids = args.forced_grids
- if forced_grids == 'all': forced_grids = None
- if args.forced_gpd is not None and not os.path.exists(args.forced_gpd):
- log.error("Specified gpd file does not exist '%s'" % args.forced_gpd)
- return -1
- if args.forced_nc is not None and not os.path.exists(args.forced_nc):
- log.error("Specified nc file does not exist '%s'" % args.forced_nc)
- return -1
-
- if "help" in args.data_files:
- parser.print_help()
- sys.exit(0)
- elif "remove" in args.data_files:
- log.debug("Removing previous products")
- clean_up_files()
- sys.exit(0)
-
- if args.get_files:
- hdf_files = args.data_files[:]
- elif len(args.data_files) == 1:
- base_dir = os.path.abspath(os.path.expanduser(args[0]))
- hdf_files = [ os.path.join(base_dir,x) for x in os.listdir(base_dir) if x.endswith(".hdf") ]
- else:
- log.error("Wrong number of arguments")
- parser.print_help()
- return -1
-
- if args.remove_prev:
- log.debug("Removing any previous files")
- clean_up_files()
-
- stat = run_geocat2awips(hdf_files, fornav_D=fornav_D, fornav_d=fornav_d,
- forced_gpd=args.forced_gpd, forced_nc=args.forced_nc,
- forced_grid=forced_grids,
- rescale_config=args.rescale_config,
- backend_config=args.backend_config,
- multiprocess=not args.single_process, num_procs=num_procs)
+ # Assumes 'all' doesn't appear in the list twice
+ if 'all' in forced_grids :
+ forced_grids[forced_grids.index('all')] = None
+ if args.forced_gpd is not None:
+ args.forced_gpd = os.path.realpath(os.path.expanduser(args.forced_gpd))
+ if not os.path.exists(args.forced_gpd):
+ log.error("Specified gpd file does not exist '%s'" % args.forced_gpd)
+ return -1
+ if args.forced_nc is not None:
+ args.forced_nc = os.path.realpath(os.path.expanduser(args.forced_nc))
+ if not os.path.exists(args.forced_nc):
+ log.error("Specified nc file does not exist '%s'" % args.forced_nc)
+ return -1
+
+ stat = run_glue(hdf_files,
+ fornav_D=fornav_D, fornav_d=fornav_d, fornav_m=fornav_m,
+ grid_configs=args.grid_configs, #
+ forced_grid=forced_grids, forced_gpd=args.forced_gpd, forced_nc=args.forced_nc,
+ #create_pseudo=args.create_pseudo, # geocat2awips doesn't make pseudo bands
+ multiprocess=not args.single_process, num_procs=num_procs,
+ rescale_config=args.rescale_config, backend_config=args.backend_config
+ )
+ log.debug("Processing returned status code: %d" % stat)
+
+ # Remove intermediate files (not the backend)
+ if not stat and not args.debug_mode:
+ log.info("Removing intermediate products")
+ remove_file_patterns(
+ Geo_Frontend.removable_file_patterns,
+ remap.removable_file_patterns
+ )
return stat
View
1 py/polar2grid/polar2grid/grids/grids.conf
@@ -9,6 +9,7 @@
dwd_germany, gpd, griddwd_germany.gpd, -2.000, 56.000, 25.000, 56.000, 25.000, 40.000, -2.000, 40.000
australia, gpd, australia.gpd, 105.000, 5.000, 175.000, 5.000, 175.000, -65.000, 105.000, -65.000
australia2, gpd, australia2.gpd, 105.000, 5.000, 175.000, 5.000, 175.000, -65.000, 105.000, -65.000
+207, gpd, grid207_2km.gpd, 153.690, 63.975, -93.690, 63.975, -124.359, 42.085, -175.641, 42.085
# PROJ.4 Grids
# proj4 grids may have None for sizes, origins, or pixel sizes to be considered 'dynamic'
# pixel size or grid size must be specified
View
4 py/polar2grid/polar2grid/modis2awips.py
@@ -290,12 +290,16 @@ def main(argv = sys.argv[1:]):
parser.add_argument('--debug', dest="debug_mode", default=False,
action='store_true',
help="Enter debug mode. Keeping intermediate files.")
+
+ # Remapping and grid related
parser.add_argument('--fornav-D', dest='fornav_D', default=10,
help="Specify the -D option for fornav")
parser.add_argument('--fornav-d', dest='fornav_d', default=1,
help="Specify the -d option for fornav")
parser.add_argument('--fornav-m', dest='fornav_m', default=False, action='store_true',
help="Specify the -m option for fornav")
+
+ # multiprocess related
parser.add_argument('--sp', dest='single_process', default=False, action='store_true',
help="Processing is sequential instead of one process per navigation group")
parser.add_argument('--num-procs', dest="num_procs", default=1,
View
19 py/polar2grid_modis/polar2grid/modis/geocat_guidebook.py
@@ -60,14 +60,24 @@
SO2_LOADING_VAR_PATTERN = r'.*?_So2_Loading'
SO2_MASK_VAR_PATTERN = r'.*?_so2_mask'
-
# this is true for the 1km data, FUTURE: when we get to other kinds, this will need to be more sophisicated
MODIS_ROWS_PER_SCAN = 10
# TODO, need additional values for other cases: this should cover Aqua and Terra, but we also expect Goes-12, Goes-15, SNPP (VIIRS), Meteosat-9 (SEVIRI), and MTSAT-2
+# more general rows per scan dictionary
+ROWS_PER_SCAN = {
+ (SAT_AQUA, INST_MODIS) : MODIS_ROWS_PER_SCAN,
+ (SAT_TERRA, INST_MODIS) : MODIS_ROWS_PER_SCAN,
+ }
+
# a regular expression that will match geocat files
GEOCAT_FILE_PATTERN = r'geocatL2\..*?\.\d\d\d\d\d\d\d\.\d\d\d\d\d\d\.hdf'
+# not sure if this will work this way in the long run
+GEO_FILE_GROUPING = {
+ GEO_NAV_UID: [GEOCAT_FILE_PATTERN],
+ }
+
# a mapping between regular expressions to match files and their band_kind and band_id contents
FILE_CONTENTS_GUIDE = {
GEOCAT_FILE_PATTERN: {
@@ -85,11 +95,8 @@
BKIND_ASH11: [NOT_APPLICABLE], # has no attrs
BKIND_ASHV: [NOT_APPLICABLE], # has no attrs
- # TODO, right now when these aren't present in a file it will include
- # TODO, them in the meta data anyway and cause weird errors to appear
- # TODO, to fix this I need to change how it handles variable loading, I think
-# BKIND_SO2L: [NOT_APPLICABLE],
-# BKIND_SO2M: [NOT_APPLICABLE],
+ BKIND_SO2L: [NOT_APPLICABLE], # not present in current test files
+ BKIND_SO2M: [NOT_APPLICABLE], # not present in current test files
},
}
View
28 py/polar2grid_modis/polar2grid/modis/geocat_to_swath.py
@@ -95,14 +95,16 @@ def _load_meta_data (file_objects) :
# based on the file name, figure out which satellite and instrument we have
temp_sat, temp_inst = geocat_guidebook.get_satellite_from_filename(file_object.file_name)
+ # also figure out the rows per scan
+ temp_rows_per_scan = geocat_guidebook.ROWS_PER_SCAN[(temp_sat, temp_inst)]
# set up the base dictionaries
meta_data = {
"sat": temp_sat,
"instrument": temp_inst,
"start_time": geocat_guidebook.parse_datetime_from_filename(file_object.file_name),
"bands" : { },
- "rows_per_scan": geocat_guidebook.MODIS_ROWS_PER_SCAN, # TODO, not general?
+ "rows_per_scan": temp_rows_per_scan,
# these will be filled in later in the process
"lon_fill_value": None,
@@ -124,13 +126,13 @@ def _load_meta_data (file_objects) :
data_kind_const = geocat_guidebook.DATA_KINDS[(band_kind, band_number)]
- # TODO, when there are multiple files, this will algorithm will need to change
+ # TODO, when there are multiple files, this algorithm will need to change
meta_data["bands"][(band_kind, band_number)] = {
"data_kind": data_kind_const,
"remap_data_as": data_kind_const,
"kind": band_kind,
"band": band_number,
- "rows_per_scan": geocat_guidebook.MODIS_ROWS_PER_SCAN, # TODO not a long term solution
+ "rows_per_scan": temp_rows_per_scan,
# TO FILL IN LATER
"fill_value": None,
@@ -175,8 +177,8 @@ def _load_geonav_data (meta_data_to_update, file_info_objects, nav_uid=None, cut
# rename the flat file to a more descriptive name
shape_temp = lat_stats["shape"]
suffix = '.real4.' + '.'.join(str(x) for x in reversed(shape_temp))
- new_lat_file_name = "latitude_" + suffix # TODO what to use? + str(nav_uid) + suffix
- new_lon_file_name = "longitude_" + suffix # TODO what to use? + str(nav_uid) + suffix
+ new_lat_file_name = "latitude_" + str(nav_uid) + "_" + suffix
+ new_lon_file_name = "longitude_" + str(nav_uid) + "_" + suffix
os.rename(lat_temp_file_name, new_lat_file_name)
os.rename(lon_temp_file_name, new_lon_file_name)
@@ -188,7 +190,7 @@ def _load_geonav_data (meta_data_to_update, file_info_objects, nav_uid=None, cut
meta_data_to_update["fbf_lon"] = new_lon_file_name
meta_data_to_update["swath_rows"] = rows
meta_data_to_update["swath_cols"] = cols
- meta_data_to_update["swath_scans"] = rows / geocat_guidebook.MODIS_ROWS_PER_SCAN # TODO, not a long term solution
+ meta_data_to_update["swath_scans"] = rows / meta_data_to_update["rows_per_scan"]
meta_data_to_update["nav_set_uid"] = nav_uid
def _load_data_to_flat_file (file_objects, descriptive_string, variable_name,
@@ -320,6 +322,9 @@ def _load_image_data (meta_data_to_update, cut_bad=False) :
load image data into binary flat files based on the meta data provided
"""
+ # keep a list of things we need to remove from the dictionary
+ keys_to_remove = [ ]
+
# process each of the band kind / id sets
for band_kind, band_id in meta_data_to_update["bands"] :
@@ -328,8 +333,9 @@ def _load_image_data (meta_data_to_update, cut_bad=False) :
# if we couldn't find a variable, log something and move on
if var_name_temp is None :
- log.debug ("Could not find a variable matching pattern " + str(geocat_guidebook.VAR_PATTERN[(band_kind, band_id)]))
-
+ log.debug ("Could not find a variable matching pattern: " + str(geocat_guidebook.VAR_PATTERN[(band_kind, band_id)]))
+ log.debug ("Skipping the associated band (" + str(band_kind) + ", " + str(band_id) + ") for this file.")
+ keys_to_remove.append((band_kind, band_id))
continue
# load the data into a flat file
@@ -355,7 +361,7 @@ def _load_image_data (meta_data_to_update, cut_bad=False) :
meta_data_to_update["bands"][(band_kind, band_id)]["fbf_img"] = new_img_file_name
meta_data_to_update["bands"][(band_kind, band_id)]["swath_rows"] = rows
meta_data_to_update["bands"][(band_kind, band_id)]["swath_cols"] = cols
- meta_data_to_update["bands"][(band_kind, band_id)]["swath_scans"] = rows / geocat_guidebook.MODIS_ROWS_PER_SCAN
+ meta_data_to_update["bands"][(band_kind, band_id)]["swath_scans"] = rows / meta_data_to_update["rows_per_scan"]
# TODO, the actual variable name may be needed later to discriminate some processing steps, store this at some point?
log.debug (str((band_kind, band_id)) + " has fill value: " + str(meta_data_to_update["bands"][(band_kind, band_id)]["fill_value"]))
@@ -365,6 +371,10 @@ def _load_image_data (meta_data_to_update, cut_bad=False) :
% (meta_data_to_update["swath_rows"], meta_data_to_update["swath_cols"], band_kind, band_id, rows, cols))
log.error(msg)
raise ValueError(msg)
+
+ # remove the bands that we couldn't process
+ for band_kind, band_id in keys_to_remove :
+ del meta_data_to_update["bands"][(band_kind, band_id)]
def get_swaths(ifilepaths, cut_bad=False, nav_uid=None):
"""Takes geocat hdf files and creates flat binary files for the information

0 comments on commit 28ec84c

Please sign in to comment.
Something went wrong with that request. Please try again.