Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Merge branch 'viirs_crefl'

Added all CREFL features. Updated crefl frontend to use the updated
VIIRS frontend boundary function (fill value).

Conflicts:
	py/polar2grid_viirs/polar2grid/viirs/viirs_guidebook.py
  • Loading branch information...
commit 152364703efb54d6a2c1d6c29ee078c3aadc9b42 2 parents 5bc4b80 + 14d8ba6
@davidh-ssec authored
View
5 py/Makefile
@@ -34,14 +34,15 @@ DIST_DIR ?= ./dist
CORE_PKG_DIR = polar2grid_core
VIIRS_PKG_DIR = polar2grid_viirs
MODIS_PKG_DIR = polar2grid_modis
+CREFL_PKG_DIR = polar2grid_crefl
MAIN_PKG_DIR = polar2grid
# Make sure target names are just the dir name with a suffix
# See targets for substitution
-ALL_PKG_DIRS = $(CORE_PKG_DIR) $(VIIRS_PKG_DIR) $(MODIS_PKG_DIR) $(MAIN_PKG_DIR)
+ALL_PKG_DIRS = $(CORE_PKG_DIR) $(VIIRS_PKG_DIR) $(MODIS_PKG_DIR) $(CREFL_PKG_DIR) $(MAIN_PKG_DIR)
ALL_PKG_SDIST = $(ALL_PKG_DIRS:=_sdist)
#ALL_PKG_SDIST = polar2grid_core_sdist polar2grid_viirs_sdist polar2grid_sdist
-ALL_PKG_DEV = polar2grid_core_dev polar2grid_viirs_dev polar2grid_modis_dev polar2grid_dev
+ALL_PKG_DEV = polar2grid_core_dev polar2grid_viirs_dev polar2grid_modis_dev polar2grid_crefl_dev polar2grid_dev
#DOC_DIR = /home/davidh/test_doc
DOC_DIR = /var/apache/www/htdocs/software/polar2grid
View
523 py/polar2grid/polar2grid/crefl2gtiff.py
@@ -0,0 +1,523 @@
+#!/usr/bin/env python
+# encoding: utf-8
+"""Script that uses the `polar2grid` toolbox of modules to take corrected
+reflectance (.hdf) files and create a properly scaled geotiff file. Has the
+ability to make RGB geotiffs as well.
+
+:author: David Hoese (davidh)
+:contact: david.hoese@ssec.wisc.edu
+:organization: Space Science and Engineering Center (SSEC)
+:copyright: Copyright (c) 2013 University of Wisconsin SSEC. All rights reserved.
+:date: April 2013
+:license: GNU GPLv3
+
+Copyright (C) 2013 Space Science and Engineering Center (SSEC),
+ University of Wisconsin-Madison.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+This file is part of the polar2grid software package. Polar2grid takes
+satellite observation data, remaps it, and writes it to a file format for
+input into another program.
+Documentation: http://www.ssec.wisc.edu/software/polar2grid/
+
+ Written by David Hoese April 2013
+ University of Wisconsin-Madison
+ Space Science and Engineering Center
+ 1225 West Dayton Street
+ Madison, WI 53706
+ david.hoese@ssec.wisc.edu
+
+"""
+__docformat__ = "restructuredtext en"
+
+from polar2grid.core import Workspace
+from polar2grid.core.glue_utils import setup_logging,create_exc_handler,remove_file_patterns
+from polar2grid.core.time_utils import utc_now
+from polar2grid.core.constants import *
+from .grids.grids import create_grid_jobs, Cartographer
+from polar2grid.crefl import Frontend
+import remap
+from .gtiff_backend import Backend
+from polar2grid.core.dtype import str_to_dtype
+import numpy
+
+import os
+import sys
+import logging
+from multiprocessing import Process
+from datetime import datetime
+
+log = logging.getLogger(__name__)
+GLUE_NAME = "crefl2gtiff"
+LOG_FN = os.environ.get("CREFL2GTIFF_LOG", None) # None interpreted in main
+
+def process_data_sets(nav_set_dict,
+ fornav_D=None, fornav_d=None,
+ grid_configs=None,
+ forced_grid=None,
+ num_procs=1,
+ data_type=None,
+ output_pattern=None,
+ rescale_config=None,
+ inc_by_one=False,
+ create_true_color=True
+ ):
+ """Process all the files provided from start to finish,
+ from filename to Geotiff file.
+ """
+ log.debug("Processing navigation sets: %s" % (", ".join(nav_set_dict.keys()),))
+ status_to_return = STATUS_SUCCESS
+
+ # Handle parameters
+ grid_configs = grid_configs or tuple() # needs to be a tuple for use
+
+ # Declare polar2grid components
+ nav_set_jobs = {}
+ cart = Cartographer(*grid_configs)
+ frontend = Frontend()
+ backend = Backend(
+ data_type = data_type,
+ output_pattern = output_pattern,
+ rescale_config = rescale_config,
+ inc_by_one = inc_by_one
+ )
+
+ # Extract Swaths
+ log.info("Extracting swaths...")
+ for nav_set_uid,filepaths_dict in nav_set_dict.items():
+ if nav_set_uid == GEO_250M_NAV_UID:
+ bands_desired = [BID_01]
+ elif nav_set_uid == IBAND_NAV_UID:
+ bands_desired = [BID_08]
+ else:
+ bands_desired = [BID_01, BID_04, BID_03]
+
+ try:
+ meta_data = frontend.make_swaths(
+ nav_set_uid,
+ filepaths_dict,
+ bands_desired = bands_desired
+ )
+ nav_set_jobs[nav_set_uid] = {}
+ nav_set_jobs[nav_set_uid]["meta_data"] = meta_data
+ except StandardError:
+ log.error("Swath creation failed")
+ log.debug("Swath creation error:", exc_info=1)
+ status_to_return |= STATUS_FRONTEND_FAIL
+ return status_to_return
+
+ if nav_set_uid in nav_set_jobs and len(nav_set_jobs[nav_set_uid]["meta_data"]["bands"]) == 0:
+ log.error("No more bands to process for nav set %s, removing..." % (nav_set_uid,))
+ del nav_set_jobs[nav_set_uid]
+ status_to_return = status_to_return or STATUS_UNKNOWN_FAIL
+
+ # Determine grid
+ for nav_set_uid,nav_set_job in nav_set_jobs.items():
+ meta_data = nav_set_job["meta_data"]
+ bbox = None
+ fbf_lat_to_use = meta_data["fbf_lat"]
+ fbf_lon_to_use = meta_data["fbf_lon"]
+ if "lon_west" in meta_data:
+ bbox = (
+ meta_data["lon_west"], meta_data["lat_north"],
+ meta_data["lon_east"], meta_data["lat_south"]
+ )
+ fbf_lat_to_use = None
+ fbf_lon_to_use = None
+ try:
+ log.info("Determining what grids the data fits in...")
+ grid_jobs = create_grid_jobs(meta_data["sat"], meta_data["instrument"], nav_set_uid,
+ meta_data["bands"],
+ backend, cart,
+ forced_grids=forced_grid,
+ bbox = bbox, fbf_lat=fbf_lat_to_use, fbf_lon=fbf_lon_to_use)
+ nav_set_job["grid_jobs"] = grid_jobs
+ except StandardError:
+ log.debug("Grid Determination error:", exc_info=1)
+ log.error("Determining data's grids failed")
+ status_to_return |= STATUS_GDETER_FAIL
+ return status_to_return
+
+ ### Remap the data
+ for nav_set_uid,nav_set_job in nav_set_jobs.items():
+ meta_data = nav_set_job["meta_data"]
+ try:
+ remapped_jobs = remap.remap_bands(meta_data["sat"], meta_data["instrument"], nav_set_uid,
+ meta_data["fbf_lon"], meta_data["fbf_lat"], nav_set_job["grid_jobs"],
+ num_procs=num_procs, fornav_d=fornav_d, fornav_D=fornav_D,
+ lat_fill_value=meta_data.get("lat_fill_value", None),
+ lon_fill_value=meta_data.get("lon_fill_value", None),
+ lat_south=meta_data.get("lat_south", None),
+ lat_north=meta_data.get("lat_north", None),
+ lon_west=meta_data.get("lon_west", None),
+ lon_east=meta_data.get("lon_east", None)
+ )
+ nav_set_job["remapped_jobs"] = remapped_jobs
+ except StandardError:
+ log.debug("Remapping Error:", exc_info=1)
+ log.error("Remapping data failed")
+ status_to_return |= STATUS_REMAP_FAIL
+ return status_to_return
+
+ ### Create combined bands ###
+ # Sharpen any true color images (may overwrite remapped images)
+ from polar2grid.core.sharpen import rgb_ratio_sharpen
+ W = Workspace('.')
+ if create_true_color and MBAND_NAV_UID in nav_set_jobs:
+ # We can only make true color images when bands are available in the same grid
+ if IBAND_NAV_UID in nav_set_jobs:
+ true_color_grids = list(set(nav_set_jobs[MBAND_NAV_UID]["remapped_jobs"].keys()).intersection(nav_set_jobs[IBAND_NAV_UID]["remapped_jobs"].keys()))
+ else:
+ true_color_grids = nav_set_jobs[MBAND_NAV_UID]["remapped_jobs"].keys()
+
+ for grid_name in true_color_grids:
+ mgrid_dict = nav_set_jobs[MBAND_NAV_UID]["remapped_jobs"][grid_name]
+ if (BKIND_CREFL,BID_01) not in mgrid_dict or \
+ (BKIND_CREFL,BID_04) not in mgrid_dict or \
+ (BKIND_CREFL,BID_03) not in mgrid_dict:
+ log.info("Some of the bands needed for true color images are not present")
+ continue
+
+ log.info("Creating true color image for grid %s" % (grid_name,))
+
+ # Get the data to sharpen
+ red_hi = None
+ if IBAND_NAV_UID in nav_set_jobs and (BKIND_CREFL,BID_08) in nav_set_jobs[IBAND_NAV_UID]["remapped_jobs"][grid_name]:
+ igrid_dict = nav_set_jobs[IBAND_NAV_UID]["remapped_jobs"][grid_name]
+ red_hi = getattr(W, igrid_dict[(BKIND_CREFL,BID_08)]["fbf_remapped"].split('.')[0])
+
+ red_lo = getattr(W, mgrid_dict[(BKIND_CREFL,BID_01)]["fbf_remapped"].split('.')[0])
+ green = getattr(W, mgrid_dict[(BKIND_CREFL,BID_04)]["fbf_remapped"].split('.')[0])
+ blue = getattr(W, mgrid_dict[(BKIND_CREFL,BID_03)]["fbf_remapped"].split('.')[0])
+
+ # Sharpen the data
+ if red_hi is not None:
+ log.info("Performing true color image sharpening")
+ try:
+ sharp_red,sharp_green,sharp_blue = rgb_ratio_sharpen(red_hi, red_lo, green, blue)
+ except StandardError:
+ log.error("Could not sharpen nav. set %s for grid %s, will attempt an unsharpened image" % (MBAND_NAV_UID,grid_name))
+ log.debug("Debug information:", exc_info=True)
+ sharp_red,sharp_green,sharp_blue = red_lo,green,blue
+ status_to_return |= STATUS_BACKEND_FAIL
+ else:
+ log.info("Missing some bands needed for sharpening, can't perform true color image sharpening for nav. set '%s'" % (MBAND_NAV_UID,))
+ sharp_red,sharp_green,sharp_blue = red_lo,green,blue
+
+ # Add the true color band to jobs to process
+ try:
+ true_color_data = numpy.array([sharp_red,sharp_green,sharp_blue])
+ true_color_stem = "result_%s_%s_%s_%s" % (nav_set_uid, BKIND_TCOLOR_CREFL, NOT_APPLICABLE, grid_name)
+ true_color_fn = true_color_stem + ".real4." + ".".join( str(x) for x in true_color_data.shape[::-1] )
+ true_color_data.tofile(true_color_fn)
+ true_color_info = mgrid_dict[(BKIND_CREFL,BID_01)].copy()
+ true_color_info["fbf_remapped"] = true_color_fn
+ true_color_info["kind"] = BKIND_TCOLOR_CREFL
+ true_color_info["band_id"] = NOT_APPLICABLE
+ true_color_info["data_kind"] = DKIND_TCOLOR_CREFL
+ mgrid_dict[(BKIND_TCOLOR_CREFL,NOT_APPLICABLE)] = true_color_info
+ except StandardError:
+ log.error("Could not create a true color image for nav. set %s for grid" % (MBAND_NAV_UID,grid_name))
+ log.debug("Debug information:", exc_info=True)
+ status_to_return |= STATUS_BACKEND_FAIL
+ del sharp_red,sharp_green,sharp_blue,red_lo,green,blue,red_hi
+
+ if create_true_color and GEO_NAV_UID in nav_set_jobs:
+ # XXX: Need to incorporate sharpening if possible for MODIS
+ # We can only make true color images when bands are available in the same grid
+ if GEO_250M_NAV_UID in nav_set_jobs:
+ true_color_grids = list(set(nav_set_jobs[GEO_NAV_UID]["remapped_jobs"].keys()).intersection(nav_set_jobs[GEO_250M_NAV_UID]["remapped_jobs"].keys()))
+ else:
+ true_color_grids = nav_set_jobs[GEO_NAV_UID]["remapped_jobs"].keys()
+ for grid_name in true_color_grids:
+ dict_1000m = nav_set_jobs[GEO_NAV_UID]["remapped_jobs"][grid_name]
+ if (BKIND_CREFL,BID_01) not in dict_1000m or \
+ (BKIND_CREFL,BID_04) not in dict_1000m or \
+ (BKIND_CREFL,BID_03) not in dict_1000m:
+ log.info("Some of the bands needed for true color images are not present")
+ continue
+
+ log.info("Creating true color image for grid %s" % (grid_name,))
+
+ # Get the data
+ red_hi = None
+ if GEO_250M_NAV_UID in nav_set_jobs and (BKIND_CREFL,BID_01) in nav_set_jobs[GEO_250M_NAV_UID]["remapped_jobs"][grid_name]:
+ dict_250m = nav_set_jobs[GEO_250M_NAV_UID]["remapped_jobs"][grid_name]
+ red_hi = getattr(W, dict_250m[(BKIND_CREFL,BID_01)]["fbf_remapped"].split('.')[0])
+
+ red = getattr(W, dict_1000m[(BKIND_CREFL,BID_01)]["fbf_remapped"].split(".")[0])
+ green = getattr(W, dict_1000m[(BKIND_CREFL,BID_04)]["fbf_remapped"].split(".")[0])
+ blue = getattr(W, dict_1000m[(BKIND_CREFL,BID_03)]["fbf_remapped"].split(".")[0])
+
+ # Sharpen the data if possible
+ if red_hi is not None:
+ log.info("Performing true color image sharpening")
+ try:
+ sharp_red,sharp_green,sharp_blue = rgb_ratio_sharpen(red_hi, red, green, blue)
+ except StandardError:
+ log.error("Could not sharpen nav. set %s for grid %s, will attempt an unsharpened image" % (GEO_NAV_UID,grid_name))
+ log.debug("Debug information:", exc_info=True)
+ sharp_red,sharp_green,sharp_blue = red,green,blue
+ status_to_return |= STATUS_BACKEND_FAIL
+ else:
+ log.info("Missing some bands needed for sharpening, can't perform true color image sharpening for nav. set '%s'" % (GEO_NAV_UID,))
+ sharp_red,sharp_green,sharp_blue = red,green,blue
+
+ # Add the true color band to jobs to process
+ try:
+ true_color_data = numpy.array([sharp_red, sharp_green, sharp_blue])
+ true_color_stem = "result_%s_%s_%s_%s" % (nav_set_uid, BKIND_TCOLOR_CREFL, NOT_APPLICABLE, grid_name)
+ true_color_fn = true_color_stem + ".real4." + ".".join( str(x) for x in true_color_data.shape[::-1] )
+ true_color_data.tofile(true_color_fn)
+ true_color_info= dict_1000m[(BKIND_CREFL,BID_01)].copy()
+ true_color_info["fbf_remapped"] = true_color_fn
+ true_color_info["kind"] = BKIND_TCOLOR_CREFL
+ true_color_info["band_id"] = NOT_APPLICABLE
+ true_color_info["data_kind"] = DKIND_TCOLOR_CREFL
+ dict_1000m[(BKIND_TCOLOR_CREFL,NOT_APPLICABLE)] = true_color_info
+ except StandardError:
+ log.error("Could not create a true color image for nav. set %s for grid" % (GEO_NAV_UID,grid_name))
+ log.debug("Debug information:", exc_info=True)
+ status_to_return |= STATUS_BACKEND_FAIL
+ del sharp_red,sharp_green,sharp_blue,red,green,blue,red_hi
+
+ ### BACKEND ###
+ W = Workspace('.')
+ for nav_set_uid,nav_set_job in nav_set_jobs.items():
+ meta_data = nav_set_job["meta_data"]
+ remapped_jobs = nav_set_job["remapped_jobs"]
+ for grid_name,grid_dict in remapped_jobs.items():
+ for (band_kind, band_id),band_dict in grid_dict.items():
+ log.info("Running geotiff backend for %s%s band grid %s" % (band_kind, band_id, grid_name))
+ try:
+ # Get the data from the flat binary file
+ data = getattr(W, band_dict["fbf_remapped"].split(".")[0]).copy()
+
+ # Call the backend
+ backend.create_product(
+ meta_data["sat"],
+ meta_data["instrument"],
+ nav_set_uid,
+ band_kind,
+ band_id,
+ band_dict["data_kind"],
+ data,
+ start_time=meta_data["start_time"],
+ grid_name=grid_name,
+ proj4_str=band_dict["proj4_str"],
+ grid_origin_x=band_dict["grid_origin_x"],
+ grid_origin_y=band_dict["grid_origin_y"],
+ pixel_size_x=band_dict["pixel_size_x"],
+ pixel_size_y=band_dict["pixel_size_y"],
+ fill_value=band_dict.get("fill_value", None)
+ )
+ except StandardError:
+ log.error("Error in the Geotiff backend for %s%s in grid %s" % (band_kind, band_id, grid_name))
+ log.debug("Geotiff backend error:", exc_info=1)
+ del remapped_jobs[grid_name][(band_kind, band_id)]
+
+ if len(remapped_jobs[grid_name]) == 0:
+ log.error("All backend jobs for grid %s failed" % (grid_name,))
+ del remapped_jobs[grid_name]
+
+ if len(remapped_jobs) == 0:
+ log.warning("Geotiff backend failed for all grids for bands %r" % (meta_data["bands"].keys(),))
+ status_to_return |= STATUS_BACKEND_FAIL
+
+ log.info("Processing of bands %r is complete" % (meta_data["bands"].keys(),))
+
+ return status_to_return
+
+def _process_data_sets(*args, **kwargs):
+ """Wrapper function around `process_data_sets` so that it can called
+ properly from `run_glue`, where the exitcode is the actual
+ returned value from `process_data_sets`.
+
+ This function also checks for exceptions other than the ones already
+ checked for in `process_data_sets` so that they are
+ recorded properly.
+ """
+ try:
+ stat = process_data_sets(*args, **kwargs)
+ sys.exit(stat)
+ except MemoryError:
+ log.error("%s ran out of memory, check log file for more info" % (GLUE_NAME,))
+ log.debug("Memory error:", exc_info=1)
+ except OSError:
+ log.error("%s had a OS error, check log file for more info" % (GLUE_NAME,))
+ log.debug("OS error:", exc_info=1)
+ except StandardError:
+ log.error("%s had an unexpected error, check log file for more info" % (GLUE_NAME,))
+ log.debug("Unexpected/Uncaught error:", exc_info=1)
+ except KeyboardInterrupt:
+ log.info("%s was cancelled by a keyboard interrupt" % (GLUE_NAME,))
+
+ sys.exit(-1)
+
+def run_glue(filepaths, **kwargs):
+ """Separate input files into groups that share navigation files data.
+
+ Call the processing function in separate process or same process depending
+ on value of `multiprocess` keyword.
+ """
+ # Rewrite/force parameters to specific format
+ filepaths = [ os.path.abspath(os.path.expanduser(x)) for x in sorted(filepaths) ]
+
+ # sort our file paths based on their navigation
+ nav_file_type_sets = Frontend.sort_files_by_nav_uid(filepaths)
+
+ exit_status = STATUS_SUCCESS
+ try:
+ stat = process_data_sets(nav_file_type_sets, **kwargs)
+ exit_status = exit_status or stat
+ except StandardError:
+ log.error("Could not process files")
+ exit_status = exit_status or STATUS_UNKNOWN_FAIL
+
+ return exit_status
+
+def main(argv = sys.argv[1:]):
+ import argparse
+ description = """
+ Create CREFL swaths, remap them to a grid, and place that remapped data
+ into a GeoTiff.
+ """
+ parser = argparse.ArgumentParser(description=description)
+
+ parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
+ help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
+ parser.add_argument('-l', '--log', dest="log_fn", default=None,
+ help="""specify the log filename, default
+<gluescript>_%%Y%%m%%d_%%H%%M%%S. Date information is provided from data filename
+through strftime. Current time if no files.""")
+ parser.add_argument('--debug', dest="debug_mode", default=False,
+ action='store_true',
+ help="Enter debug mode. Keeping intermediate files.")
+ parser.add_argument('--fornav-D', dest='fornav_D', default=40,
+ help="Specify the -D option for fornav")
+ parser.add_argument('--fornav-d', dest='fornav_d', default=2,
+ help="Specify the -d option for fornav")
+ parser.add_argument('--num-procs', dest="num_procs", default=1,
+ help="Specify number of processes that can be used to run ll2cr/fornav calls in parallel")
+
+ # Remapping/Grids
+ parser.add_argument('--grid-configs', dest='grid_configs', nargs="+", default=tuple(),
+ help="Specify additional grid configuration files ('grids.conf' for built-ins)")
+ parser.add_argument('-g', '--grids', dest='forced_grids', nargs="+", default=["wgs84_fit"],
+ help="Force remapping to only some grids, defaults to 'wgs84_fit', use 'all' for determination")
+
+ # Backend Specific
+ parser.add_argument('--dtype', dest="data_type", type=str_to_dtype, default=None,
+ help="specify the data type for the backend to output")
+ # pattern needs double formatting %% because argparse runs dict formatting on it
+ parser.add_argument('-p', '--pattern', dest="output_pattern", default=None,
+ help="specify an alternative product filename pattern (ex. '%%(sat)s_%%(instrument)s_%%(kind)s_%%(band)s_%%(start_time)s')")
+ parser.add_argument('--dont_inc', dest="inc_by_one", default=True, action="store_false",
+ help="tell rescaler to not increment by one to scaled data can have a 0 fill value (ex. 0-254 -> 1-255 with 0 being fill)")
+ parser.add_argument('--rescale-config', dest='rescale_config', default=None,
+ help="specify alternate rescale configuration file")
+
+ # Input related
+ group = parser.add_mutually_exclusive_group(required=True)
+ group.add_argument('-f', dest='data_files', nargs="+",
+ help="List of one or more hdf files")
+ group.add_argument('-d', dest='data_dir', nargs="?",
+ help="Data directory to look for input data files")
+ group.add_argument('-R', dest='remove_prev', default=False, action='store_true',
+ help="Delete any files that may conflict with future processing. Processing is not done with this flag.")
+
+ args = parser.parse_args(args=argv)
+
+ # Figure out what the log should be named
+ log_fn = args.log_fn
+ if args.remove_prev:
+ # They didn't need to specify a filename
+ if log_fn is None: log_fn = GLUE_NAME + "_removal.log"
+ file_start_time = utc_now()
+ else:
+ # Get input files and the first filename for the logging datetime
+ if args.data_files:
+ hdf_files = args.data_files[:]
+ elif args.data_dir:
+ base_dir = os.path.abspath(os.path.expanduser(args.data_dir))
+ hdf_files = [ os.path.join(base_dir,x) for x in os.listdir(base_dir) ]
+ else:
+ # Should never get here because argparse mexc group
+ log.error("Wrong number of arguments")
+ parser.print_help()
+ return -1
+
+ # Handle the user using a '~' for their home directory
+ hdf_files = [ os.path.realpath(os.path.expanduser(x)) for x in sorted(hdf_files) ]
+ for hdf_file in hdf_files:
+ if not os.path.exists(hdf_file):
+ print "ERROR: File '%s' doesn't exist" % (hdf_file,)
+ return -1
+
+ # Get the date of the first file if provided
+ file_start_time = sorted(Frontend.parse_datetimes_from_filepaths(hdf_files))[0]
+
+ # Determine the log filename
+ if log_fn is None: log_fn = GLUE_NAME + "_%Y%m%d_%H%M%S.log"
+ log_fn = datetime.strftime(file_start_time, log_fn)
+
+ levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
+ setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=log_fn)
+
+ # Don't set this up until after you have setup logging
+ sys.excepthook = create_exc_handler(GLUE_NAME)
+
+ # Remove previous intermediate and product files
+ if args.remove_prev:
+ log.info("Removing any possible conflicting files")
+ remove_file_patterns(
+ Frontend.removable_file_patterns,
+ remap.removable_file_patterns,
+ Backend.removable_file_patterns
+ )
+ return 0
+
+ fornav_D = int(args.fornav_D)
+ fornav_d = int(args.fornav_d)
+ num_procs = int(args.num_procs)
+ forced_grids = args.forced_grids
+ # Assumes 'all' doesn't appear in the list twice
+ if 'all' in forced_grids: forced_grids[forced_grids.index('all')] = None
+
+ stat = run_glue(hdf_files,
+ fornav_D=fornav_D, fornav_d=fornav_d,
+ grid_configs=args.grid_configs,
+ forced_grid=forced_grids,
+ data_type=args.data_type,
+ num_procs=num_procs,
+ rescale_config=args.rescale_config,
+ output_pattern=args.output_pattern,
+ inc_by_one=args.inc_by_one,
+ )
+ log.debug("Processing returned status code: %d" % stat)
+
+ # Remove intermediate files (not the backend)
+ if not stat and not args.debug_mode:
+ log.info("Removing intermediate products")
+ remove_file_patterns(
+ Frontend.removable_file_patterns,
+ remap.removable_file_patterns
+ )
+
+ return stat
+
+if __name__ == "__main__":
+ sys.exit(main())
+
View
18 py/polar2grid/polar2grid/grids/grids.conf
@@ -12,11 +12,13 @@ australia2, gpd, australia2.gpd, 105.000, 5.000, 175.000,
# PROJ.4 Grids
# proj4 grids may have None for sizes, origins, or pixel sizes to be considered 'dynamic'
# pixel size or grid size must be specified
-# grid_name, proj4, proj4_str, width, height, pixel_size_x, pixel_size_y, origin_x, origin_y
-p4_211e, proj4, +proj=lcc +datum=NAD83 +ellps=GRS80 +lat_1=25 +lon_0=-95 +no_defs, 5120, 5120, 1015.9, -1015.9, -1956254.806724622, 4364276.201489102
-lcc_fit, proj4, +proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lon_0=-95, None, None, 1000, -1000, None, None
-lcc_fit_hr, proj4, +proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lon_0=-95, None, None, 400, -400, None, None
-wgs84_fit, proj4, +proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs, None, None, 0.0001, -0.0001, None, None
-polar_canada, proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=45.0 +lon_0=-150, None, None, 1000, -1000, None, None
-polar_north_pacific, proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=45.0 +lon_0=-170, None, None, 400, -400, None, None
-polar_south_pacific, proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=-90 +lat_ts=-45.0 +lon_0=-170, None, None, 400, -400, None, None
+# grid_name, proj4, proj4_str, width, height, pixel_size_x, pixel_size_y, origin_x, origin_y
+p4_211e, proj4, +proj=lcc +datum=NAD83 +ellps=GRS80 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs, 5120, 5120, 1015.9, -1015.9, -1956254.806724622, 4364276.201489102
+p4_211e_hi, proj4, +proj=lcc +datum=NAD83 +ellps=GRS80 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs, 10000, 10000, 500, -500, -1956254.806724622, 4364276.201489102
+lcc_fit, proj4, +proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs, None, None, 1000, -1000, None, None
+lcc_fit_hr, proj4, +proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs, None, None, 400, -400, None, None
+wgs84_fit, proj4, +proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs, None, None, 0.0001, -0.0001, None, None
+eqc_fit, proj4, +proj=eqc +datum=WGS84 +ellps=WGS84 +lat_ts=0 +lon_0=-100 +units=m +no_defs, None, None, 250, -250, None, None
+polar_canada, proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=45.0 +lon_0=-150 +units=m, None, None, 1000, -1000, None, None
+polar_north_pacific, proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=45.0 +lon_0=-170 +units=m, None, None, 400, -400, None, None
+polar_south_pacific, proj4, +proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=-90 +lat_ts=-45.0 +lon_0=-170 +units=m, None, None, 400, -400, None, None
View
38 py/polar2grid/polar2grid/ll2cr.py
@@ -171,7 +171,7 @@ def ll2cr(lon_arr, lat_arr, proj4_str,
log.debug("Data west longitude: %f" % swath_lon_west)
swath_lon_east = lon_arr[ good_mask & (lon_arr > 0) ].min()
log.debug("Data east longitude: %f" % swath_lon_east)
- stadles_180 = True
+ stradles_180 = True
### Find out if we stradle the anti-meridian of the projection ###
# Get the projections origin in lon/lat space
@@ -196,28 +196,30 @@ def ll2cr(lon_arr, lat_arr, proj4_str,
data_grid_west += proj_circum if data_grid_west < 0 else 0
data_grid_east += proj_circum if data_grid_east < 0 else 0
- # see if the grids bounds stadle the anti-meridian
+ # see if the grids bounds stradle the anti-meridian
stradles_anti = False
if (data_grid_west < proj_anti_x) and (data_grid_east > proj_anti_x):
log.debug("Data crosses the projections anti-meridian")
stradles_anti = True
- normal_lon_east = swath_lon_east if swath_lon_east > swath_lon_west else swath_lon_east + 360
- lon_range = numpy.linspace(swath_lon_west, normal_lon_east, 15)
- lat_range = numpy.linspace(swath_lat_north, swath_lat_south, 15)
-
- ### Create a matrix of projection points to find the bounding box of the grid ###
- # Test the left, right, ul2br diagonal, ur2bl diagonal, top, bottom
- # If we are over the north pole then handle strong possibility of data in all longitudes
- if swath_lat_north >= 89.5: bottom_lon_range = numpy.linspace(-180, 180, 15)
- else: bottom_lon_range = lon_range
- # If we are over the north pole then handle strong possibility of data in all longitudes
- if swath_lat_south <= -89.5: top_lon_range = numpy.linspace(-180, 180, 15)
- else: top_lon_range = lon_range
-
- lon_test = numpy.array([ [swath_lon_west]*15, [swath_lon_east]*15, lon_range, lon_range, top_lon_range, bottom_lon_range ])
- lat_test = numpy.array([ lat_range, lat_range, lat_range, lat_range[::-1], [swath_lat_north]*15, [swath_lat_south]*15 ])
- x_test,y_test = _transform_array(tformer, lon_test, lat_test, proj_circum, stradles_anti=stradles_anti)
+ # If we are doing anything dynamic, then we need to get the bounding box of the data in *grid* space
+ if grid_origin_x is None or grid_width is None or pixel_size_x is None:
+ normal_lon_east = swath_lon_east if swath_lon_east > swath_lon_west else swath_lon_east + 360
+ lon_range = numpy.linspace(swath_lon_west, normal_lon_east, 15)
+ lat_range = numpy.linspace(swath_lat_north, swath_lat_south, 15)
+
+ ### Create a matrix of projection points to find the bounding box of the grid ###
+ # Test the left, right, ul2br diagonal, ur2bl diagonal, top, bottom
+ # If we are over the north pole then handle strong possibility of data in all longitudes
+ if swath_lat_north >= 89.5: bottom_lon_range = numpy.linspace(-180, 180, 15)
+ else: bottom_lon_range = lon_range
+ # If we are over the north pole then handle strong possibility of data in all longitudes
+ if swath_lat_south <= -89.5: top_lon_range = numpy.linspace(-180, 180, 15)
+ else: top_lon_range = lon_range
+
+ lon_test = numpy.array([ [swath_lon_west]*15, [swath_lon_east]*15, lon_range, lon_range, top_lon_range, bottom_lon_range ])
+ lat_test = numpy.array([ lat_range, lat_range, lat_range, lat_range[::-1], [swath_lat_north]*15, [swath_lat_south]*15 ])
+ x_test,y_test = _transform_array(tformer, lon_test, lat_test, proj_circum, stradles_anti=stradles_anti)
# Calculate the best corners of the data
if grid_width is None or pixel_size_x is None:
View
11 py/polar2grid_core/polar2grid/core/constants.py
@@ -90,6 +90,8 @@
BKIND_ICON = "ice_concentration"
BKIND_NDVI = "ndvi"
BKIND_TPW = "total_precipitable_water"
+BKIND_CREFL = "crefl"
+BKIND_TCOLOR_CREFL = "true_color_crefl"
# Band Identifier
BID_01 = "01"
@@ -127,6 +129,8 @@
DKIND_DISTANCE = "distance" # this is meant to be a distance in the sense of mm, cm, meters, km, or miles
DKIND_PERCENT = "percent"
DKIND_C_INDEX = "contiguous_index" # this represents some abstract ranging index with meaningfully contiguous values (not discrete categories)
+DKIND_CREFL = "corrected_reflectance"
+DKIND_TCOLOR_CREFL = "true_color_crefl"
SET_DKINDS = set([
DKIND_RADIANCE,
@@ -137,7 +141,9 @@
DKIND_ANGLE,
DKIND_DISTANCE,
DKIND_PERCENT,
- DKIND_C_INDEX
+ DKIND_C_INDEX,
+ DKIND_CREFL,
+ DKIND_TCOLOR_CREFL
])
# Data types (int,float,#bits,etc.)
@@ -156,7 +162,8 @@
MBAND_NAV_UID = "m_nav" # the M band navigation group
IBAND_NAV_UID = "i_nav" # the I band navigation group
DNB_NAV_UID = "dnb_nav" # the Day/Night band navigation group
-GEO_NAV_UID = "geo_nav" # the geo navigation group
+GEO_NAV_UID = "geo_1000m_nav"# the geo navigation group
+GEO_500M_NAV_UID = "geo_500m_nav" # the geo navigation group
GEO_250M_NAV_UID = "geo_250m_nav" # the 250m navigation group
MOD06_NAV_UID = "mod06_nav" # the mod06 navigation group
MOD07_NAV_UID = "mod07_nav" # the mod07 navigation group
View
33 py/polar2grid_core/polar2grid/core/rescale.py
@@ -116,6 +116,38 @@ def sqrt_scale(img, inner_mult, outer_mult, fill_in=DEFAULT_FILL_IN, fill_out=DE
img[mask] = fill_out
return img
+pw_255_lookup_table = numpy.array([ 0, 3, 7, 10, 14, 18, 21, 25, 28, 32, 36, 39, 43,
+ 46, 50, 54, 57, 61, 64, 68, 72, 75, 79, 82, 86, 90,
+ 91, 93, 95, 96, 98, 100, 101, 103, 105, 106, 108, 110, 111,
+ 113, 115, 116, 118, 120, 121, 123, 125, 126, 128, 130, 131, 133,
+ 135, 136, 138, 140, 140, 141, 142, 143, 143, 144, 145, 146, 147,
+ 147, 148, 149, 150, 150, 151, 152, 153, 154, 154, 155, 156, 157,
+ 157, 158, 159, 160, 161, 161, 162, 163, 164, 164, 165, 166, 167,
+ 168, 168, 169, 170, 171, 171, 172, 173, 174, 175, 175, 176, 176,
+ 177, 177, 178, 178, 179, 179, 180, 180, 181, 181, 182, 182, 183,
+ 183, 184, 184, 185, 185, 186, 186, 187, 187, 188, 188, 189, 189,
+ 190, 191, 191, 192, 192, 193, 193, 194, 194, 195, 195, 196, 196,
+ 197, 197, 198, 198, 199, 199, 200, 200, 201, 201, 202, 202, 203,
+ 203, 204, 204, 205, 205, 206, 207, 207, 208, 208, 209, 209, 210,
+ 210, 211, 211, 212, 212, 213, 213, 214, 214, 215, 215, 216, 216,
+ 217, 217, 218, 218, 219, 219, 220, 220, 221, 221, 222, 223, 223,
+ 224, 224, 225, 225, 226, 226, 227, 227, 228, 228, 229, 229, 230,
+ 230, 231, 231, 232, 232, 233, 233, 234, 234, 235, 235, 236, 236,
+ 237, 237, 238, 239, 239, 240, 240, 241, 241, 242, 242, 243, 243,
+ 244, 244, 245, 245, 246, 246, 247, 247, 248, 248, 249, 249, 250,
+ 250, 251, 251, 252, 252, 253, 253, 254, 255], dtype=numpy.int32)
+
+lookup_tables = [pw_255_lookup_table]
+
+def lookup_scale(img, m, b, table_idx=0, fill_in=DEFAULT_FILL_IN, fill_out=DEFAULT_FILL_OUT):
+ log.debug("Running 'lookup_scale'...")
+ lut = lookup_tables[table_idx]
+ img = linear_scale(img, m, b, fill_in=fill_in, fill_out=fill_out)
+ numpy.clip(img, 0, lut.shape[0]-1, out=img)
+ img = img.astype(numpy.uint32)
+ return lut[img]
+
+
def bt_scale_c(img, threshold, high_max, high_mult, low_max, low_mult, fill_in=DEFAULT_FILL_IN, fill_out=DEFAULT_FILL_OUT):
"""
this is a version of the brightness temperature scaling that is intended to work for data in celsius
@@ -299,6 +331,7 @@ def default_config_dir(self):
'ndvi' : ndvi_scale,
'distance' : passive_scale, # TODO, this is wrong... but we'll sort it out later?
'percent' : passive_scale, # TODO, this is wrong, find out what it should be
+ 'lookup' : lookup_scale,
}
@property
def known_rescale_kinds(self):
View
16 py/polar2grid_core/polar2grid/core/rescale_configs/rescale.16bit.conf
@@ -28,3 +28,19 @@ npp, viirs, m_nav, m, 13, btemp, btemp, 242.0,
npp, viirs, m_nav, m, 14, btemp, btemp, 242.0, 169620, 514, 107426, 257
npp, viirs, m_nav, m, 15, btemp, btemp, 242.0, 169620, 514, 107426, 257
npp, viirs, m_nav, m, 16, btemp, btemp, 242.0, 169620, 514, 107426, 257
+# crefl (applied over range -0.01 to 1.1)
+npp, viirs, m_nav, crefl, 01, corrected_reflectance, lookup, 59040.54, 590.4054
+npp, viirs, m_nav, crefl, 03, corrected_reflectance, lookup, 59040.54, 590.4054
+npp, viirs, m_nav, crefl, 04, corrected_reflectance, lookup, 59040.54, 590.4054
+npp, viirs, i_nav, crefl, 08, corrected_reflectance, lookup, 59040.54, 590.4054
+npp, viirs, m_nav, true_color_crefl, none, true_color_crefl, lookup, 59040.54, 590.4054
+aqua, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 59040.54, 590.4054
+aqua, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 59040.54, 590.4054
+aqua, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 59040.54, 590.4054
+aqua, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 59040.54, 590.4054
+aqua, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 59040.54, 590.4054
+terra, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 59040.54, 590.4054
+terra, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 59040.54, 590.4054
+terra, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 59040.54, 590.4054
+terra, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 59040.54, 590.4054
+terra, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 59040.54, 590.4054
View
16 py/polar2grid_core/polar2grid/core/rescale_configs/rescale.8bit.conf
@@ -71,3 +71,19 @@ terra, modis, geo_nav, inversion_strength, none, btemp,
terra, modis, geo_nav, inversion_depth, none, distance, raw
terra, modis, geo_nav, ice_concentration, none, percent, raw
+# crefl (applied over range -0.01 to 1.1)
+npp, viirs, m_nav, crefl, 01, corrected_reflectance, lookup, 229.73, 2.2973
+npp, viirs, m_nav, crefl, 03, corrected_reflectance, lookup, 229.73, 2.2973
+npp, viirs, m_nav, crefl, 04, corrected_reflectance, lookup, 229.73, 2.2973
+npp, viirs, i_nav, crefl, 08, corrected_reflectance, lookup, 229.73, 2.2973
+npp, viirs, m_nav, true_color_crefl, none, true_color_crefl, lookup, 229.73, 2.2973
+aqua, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 229.83, 2.2893
+aqua, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 229.83, 2.2893
+aqua, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 229.83, 2.2893
+aqua, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 229.83, 2.2893
+aqua, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 229.83, 2.2983
+terra, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 229.83, 2.2893
+terra, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 229.83, 2.2893
+terra, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 229.83, 2.2893
+terra, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 229.83, 2.2893
+terra, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 229.83, 2.2983
View
16 py/polar2grid_core/polar2grid/core/rescale_configs/rescale_inc.16bit.conf
@@ -28,3 +28,19 @@ npp, viirs, m_nav, m, 13, btemp, btemp, 242.0,
npp, viirs, m_nav, m, 14, btemp, btemp, 242.0, 169620, 514, 107422.854, 256.987
npp, viirs, m_nav, m, 15, btemp, btemp, 242.0, 169620, 514, 107422.854, 256.987
npp, viirs, m_nav, m, 16, btemp, btemp, 242.0, 169620, 514, 107422.854, 256.987
+# crefl (applied over range -0.01 to 1.1)
+npp, viirs, m_nav, crefl, 01, corrected_reflectance, lookup, 59039.64, 590.3964
+npp, viirs, m_nav, crefl, 03, corrected_reflectance, lookup, 59039.64, 590.3964
+npp, viirs, m_nav, crefl, 04, corrected_reflectance, lookup, 59039.64, 590.3964
+npp, viirs, i_nav, crefl, 08, corrected_reflectance, lookup, 59039.64, 590.3964
+npp, viirs, m_nav, true_color_crefl, none, true_color_crefl, lookup, 59039.64, 590.3964
+aqua, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 59039.64, 590.3964
+aqua, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 59039.64, 590.3964
+aqua, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 59039.64, 590.3964
+aqua, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 59039.64, 590.3964
+aqua, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 59039.64, 590.3964
+terra, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 59039.64, 590.3964
+terra, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 59039.64, 590.3964
+terra, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 59039.64, 590.3964
+terra, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 59039.64, 590.3964
+terra, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 59039.64, 590.3964
View
16 py/polar2grid_core/polar2grid/core/rescale_configs/rescale_inc.8bit.conf
@@ -28,3 +28,19 @@ npp, viirs, m_nav, m, 13, btemp, btemp, 242.0,660
npp, viirs, m_nav, m, 14, btemp, btemp, 242.0,660,2,414.854,0.987
npp, viirs, m_nav, m, 15, btemp, btemp, 242.0,660,2,414.854,0.987
npp, viirs, m_nav, m, 16, btemp, btemp, 242.0,660,2,414.854,0.987
+# crefl (applied over range -0.01 to 1.1)
+npp, viirs, m_nav, crefl, 01, corrected_reflectance, lookup, 228.83, 2.2883
+npp, viirs, m_nav, crefl, 03, corrected_reflectance, lookup, 228.83, 2.2883
+npp, viirs, m_nav, crefl, 04, corrected_reflectance, lookup, 228.83, 2.2883
+npp, viirs, i_nav, crefl, 08, corrected_reflectance, lookup, 228.83, 2.2883
+npp, viirs, m_nav, true_color_crefl, none, true_color_crefl, lookup, 228.83, 2.2883
+aqua, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 228.83, 2.2883
+aqua, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 228.83, 2.2883
+aqua, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 228.83, 2.2883
+aqua, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 228.83, 2.2883
+aqua, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 228.83, 2.2883
+terra, modis, geo_1000m_nav,crefl, 01, corrected_reflectance, lookup, 228.83, 2.2883
+terra, modis, geo_250m_nav, crefl, 01, corrected_reflectance, lookup, 228.83, 2.2883
+terra, modis, geo_1000m_nav,crefl, 03, corrected_reflectance, lookup, 228.83, 2.2883
+terra, modis, geo_1000m_nav,crefl, 04, corrected_reflectance, lookup, 228.83, 2.2883
+terra, modis, geo_1000m_nav,true_color_crefl, none, true_color_crefl, lookup, 228.83, 2.2883
View
79 py/polar2grid_core/polar2grid/core/sharpen.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# encoding: utf-8
+"""Various image sharpening functions.
+
+:author: David Hoese (davidh)
+:contact: david.hoese@ssec.wisc.edu
+:organization: Space Science and Engineering Center (SSEC)
+:copyright: Copyright (c) 2013 University of Wisconsin SSEC. All rights reserved.
+:date: April 2013
+:license: GNU GPLv3
+
+Copyright (C) 2013 Space Science and Engineering Center (SSEC),
+ University of Wisconsin-Madison.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+This file is part of the polar2grid software package. Polar2grid takes
+satellite observation data, remaps it, and writes it to a file format for
+input into another program.
+Documentation: http://www.ssec.wisc.edu/software/polar2grid/
+
+ Written by David Hoese April 2013
+ University of Wisconsin-Madison
+ Space Science and Engineering Center
+ 1225 West Dayton Street
+ Madison, WI 53706
+ david.hoese@ssec.wisc.edu
+
+"""
+__docformat__ = "restructuredtext en"
+
+from .fbf import Workspace
+import numpy
+
+import os
+import sys
+
+def rgb_ratio_sharpen(band1_hires, band1_lores, band2, band3, fill_value=None):
+ """Sharpen 2 remapped bands by using the ratio of a
+ a third band in high resolution and low resolution remapped versions.
+ All bands must be remapped to the same grid.
+ """
+ if fill_value is not None:
+ fill_mask = (band1_hires == fill_value) | (band2 == fill_value) | (band3 == fill_value)
+
+ ratio = band1_hires / band1_lores
+ sharp2 = ratio * band2
+ sharp3 = ratio * band3
+
+ # Make sure fill values are respected
+ if fill_value is not None:
+ sharp2[ fill_mask ] = fill_value
+ sharp3[ fill_mask ] = fill_value
+
+ return band1_hires,sharp2,sharp3
+
+def rgb_ratio_sharpen_fbf(band1_hires_fn, band1_lores_fn, band2_fn, band3_fn, fill_value=None, workspace_dir='.'):
+ """Simple wrapper of `rgb_ratio_sharpen` to load binary files from their
+ filenames.
+ """
+ W = Workspace(workspace_dir)
+ band1_hires = getattr(W, band1_hires_fn.split(".")[0])
+ band1_lores = getattr(W, band1_lores_fn.split(".")[0])
+ band2 = getattr(W, band2_fn.split(".")[0])
+ band3 = getattr(W, band3_fn.split(".")[0])
+ return rgb_ratio_sharpen(band1_hires, band1_lores, band2, band3, fill_value=fill_value)
+
+
View
17 py/polar2grid_crefl/polar2grid/__init__.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# encoding: utf-8
+"""polar2grid is a python package that provides utilities and scripts to
+convert polar orbiting satellite data and remap it into a known grid.
+
+:author: David Hoese (davidh)
+:author: Ray Garcia (rayg)
+:author: Eva Schiffer (evas)
+:contact: david.hoese@ssec.wisc.edu
+:organization: Space Science and Engineering Center (SSEC)
+:copyright: Copyright (c) 2012 University of Wisconsin SSEC. All rights reserved.
+:date: Jan 2012
+:license: GNU GPLv3
+:revision: $Id$
+"""
+__docformat__ = "restructuredtext en"
+__import__('pkg_resources').declare_namespace(__name__)
View
8 py/polar2grid_crefl/polar2grid/crefl/__init__.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+# encoding: utf-8
+"""Polar2grid frontend for corrected reflectance (crefl) products created
+for VIIRS and MODIS data.
+"""
+__docformat__ = "restructuredtext en"
+
+from .crefl2swath import Frontend
View
362 py/polar2grid_crefl/polar2grid/crefl/crefl2swath.py
@@ -0,0 +1,362 @@
+#!/usr/bin/env python
+# encoding: utf-8
+"""
+Read one or more contiguous in-order HDF4 VIIRS or MODIS corrected reflectance product
+granules and aggregate them into one swath per band. Return a dictionary of meta data.
+Write out Swath binary files used by other polar2grid components.
+
+:author: David Hoese (davidh)
+:contact: david.hoese@ssec.wisc.edu
+:organization: Space Science and Engineering Center (SSEC)
+:copyright: Copyright (c) 2012 University of Wisconsin SSEC. All rights reserved.
+:date: Nov 2012
+:license: GNU GPLv3
+:revision: $Id$
+"""
+__docformat__ = "restructuredtext en"
+
+from polar2grid.core import roles
+from polar2grid.core.constants import *
+from polar2grid.core.fbf import file_appender,check_stem
+from polar2grid.viirs import viirs_guidebook
+from polar2grid.modis.modis_geo_interp_250 import interpolate_geolocation
+from pyhdf import SD
+import h5py
+from . import guidebook
+import numpy
+
+import os
+import sys
+import logging
+from pprint import pprint
+
+log = logging.getLogger(__name__)
+
+# XXX: Temporary - should be replaced with call to VIIRS Frontend when possible
+def load_geo_data(nav_set_uid, geo_filepath, fill_value=DEFAULT_FILL_VALUE, dtype=numpy.float32):
+ """Place holder for when VIIRS frontend can be rewritten to support
+ third-party users.
+ """
+ if nav_set_uid == IBAND_NAV_UID or nav_set_uid == MBAND_NAV_UID:
+ file_info = viirs_guidebook.geo_info(geo_filepath)
+ h = h5py.File(file_info["geo_path"], mode='r')
+ lon_array = viirs_guidebook.load_geo_variable(h, file_info, viirs_guidebook.K_LONGITUDE, dtype=dtype, required=True)
+ lon_array[ viirs_guidebook.get_geo_variable_fill_mask(lon_array, viirs_guidebook.K_LONGITUDE) ] = fill_value
+ lat_array = viirs_guidebook.load_geo_variable(h, file_info, viirs_guidebook.K_LATITUDE, dtype=dtype, required=True)
+ lat_array[ viirs_guidebook.get_geo_variable_fill_mask(lat_array, viirs_guidebook.K_LATITUDE) ] = fill_value
+ west_bound,east_bound,north_bound,south_bound = viirs_guidebook.read_geo_bbox_coordinates(h, file_info, fill_value=fill_value)
+ elif nav_set_uid == GEO_NAV_UID or nav_set_uid == GEO_250M_NAV_UID:
+ # XXX: Interpolation does not handle 500M navigation yet
+ h = SD.SD(geo_filepath, SD.SDC.READ)
+ lon_ds = h.select("Longitude")
+ lon_array = lon_ds[:].astype(dtype)
+ lon_fill_value = lon_ds.attributes()["_FillValue"]
+ lon_mask = lon_array == lon_fill_value
+
+ lat_ds = h.select("Latitude")
+ lat_array = lat_ds[:].astype(dtype)
+ lat_fill_value = lat_ds.attributes()["_FillValue"]
+ lat_mask = lat_array == lat_fill_value
+
+ # Interpolate to the proper resolution
+ if nav_set_uid == GEO_250M_NAV_UID:
+ log.info("Interpolating 250m navigation from 1km")
+ lon_array = interpolate_geolocation(lon_array)
+ lat_array = interpolate_geolocation(lat_array)
+
+ lon_array[ lon_mask ] = fill_value
+ lat_array[ lat_mask ] = fill_value
+ west_bound = 0
+ east_bound = 0
+ north_bound = 0
+ south_bound = 0
+ else:
+ log.error("Don't know how to get navigation data for nav set: %s" % nav_set_uid)
+ raise ValueError("Don't know how to get navigation data for nav set: %s" % nav_set_uid)
+
+ return lon_array,lat_array,west_bound,east_bound,north_bound,south_bound
+
+class Frontend(roles.FrontendRole):
+ removable_file_patterns = [
+ ".latitude_*_",
+ ".longitude_*_*",
+ ".image_*_*_*_*",
+ "image_*_*.real4.*.*",
+ "latitude_*.real4.*.*",
+ "longitude_*.real4.*.*"
+ ]
+
+ def __init__(self):
+ pass
+
+ @classmethod
+ def parse_datetimes_from_filepaths(cls, filepaths):
+ return guidebook.parse_datetimes_from_filepaths(filepaths)
+
+ @classmethod
+ def sort_files_by_nav_uid(cls, filepaths):
+ return guidebook.sort_files_by_nav_uid(filepaths)
+
+ def load_band_data(self, meta_data, data_filepath, nav_set_uid, band_kind, band_id, fill_value=DEFAULT_FILL_VALUE):
+ """Read the specified band from the file in the meta data dictionary
+ provided and return a numpy array of the scaled and properly filled data.
+
+ Only operates on one band.
+ """
+ band_info = meta_data["bands"][(band_kind, band_id)]
+
+ # Get a file object for the file containing this band's information
+ h = SD.SD(data_filepath, mode=SD.SDC.READ)
+
+ # Get the name of the attributes and variables in the file
+ dataset_name = band_info[guidebook.K_DATASET]
+ fill_attr_name = band_info.get(guidebook.K_FILL_VALUE, None)
+ scale_factor_attr_name = band_info.get(guidebook.K_SCALE_FACTOR, None)
+ scale_offset_attr_name = band_info.get(guidebook.K_SCALE_OFFSET, None)
+ units_attr_name = band_info.get(guidebook.K_UNITS, None) # Unused
+
+ # Get the band data from the file
+ ds_object = h.select(dataset_name)
+ band_data = ds_object.get().astype(numpy.float32)
+
+ attrs = ds_object.attributes()
+ if fill_attr_name:
+ input_fill_value = attrs[fill_attr_name]
+ input_fill_value = 14000 # FIXME
+ fill_mask = band_data >= input_fill_value
+ else:
+ fill_mask = numpy.zeros_like(band_data).astype(numpy.bool)
+
+ if isinstance(scale_factor_attr_name, float):
+ scale_factor = scale_factor_attr_name
+ elif scale_factor_attr_name is not None:
+ scale_factor = attrs[scale_factor_attr_name]
+
+ if isinstance(scale_offset_attr_name, float):
+ scale_offset = scale_offset_attr_name
+ elif scale_offset_attr_name is not None:
+ scale_offset = attrs[scale_offset_attr_name]
+
+ # Scale the data
+ if scale_factor_attr_name is not None and scale_offset_attr_name is not None:
+ band_data = band_data * scale_factor + scale_offset
+ band_data[fill_mask] = fill_value
+
+ return band_data
+
+ def write_band_fbf(self, meta_data, nav_set_uid, band_kind, band_id, fill_value=DEFAULT_FILL_VALUE):
+ """Writes a flat binary file for an entire swath of data.
+ """
+ # Create a temporary flat binary file (will be renamed later)
+ spid = "%d" % os.getpid()
+ stem = "image_%s_%s_%s" % (nav_set_uid, band_kind, band_id)
+ check_stem(stem)
+ fbf_tmp_filename = "." + stem + "_" + spid
+ image_fbf_file = file(fbf_tmp_filename, 'wb')
+ image_fbf_appender = file_appender(image_fbf_file, dtype=numpy.float32)
+
+ # Iterate over each file that contains data that makes up this band
+ for data_filepath in meta_data["bands"][(band_kind,band_id)]["data_filepaths"]:
+ # Load the individual array for this band
+ band_array = self.load_band_data(meta_data, data_filepath, nav_set_uid, band_kind, band_id, fill_value=fill_value)
+ # Add the data to the FBF
+ image_fbf_appender.append(band_array)
+
+ # Rename flat binary files to proper fbf names
+ suffix = '.real4.' + '.'.join(str(x) for x in reversed(image_fbf_appender.shape))
+ fbf_image_filename = stem + suffix
+ os.rename(fbf_tmp_filename, fbf_image_filename)
+
+ # Add this new information to the band information
+ band_info = meta_data["bands"][(band_kind,band_id)]
+ band_info["fbf_img"] = fbf_image_filename
+ band_info["fill_value"] = fill_value
+ band_info["swath_rows"] = image_fbf_appender.shape[0]
+ band_info["swath_cols"] = image_fbf_appender.shape[1]
+ band_info["swath_scans"] = band_info["swath_cols"] / band_info["rows_per_scan"]
+
+ # Add info to the main meta data dictionary if its not there already
+ if "swath_rows" not in meta_data: meta_data["swath_rows"] = band_info["swath_rows"]
+ if "swath_cols" not in meta_data: meta_data["swath_cols"] = band_info["swath_cols"]
+ if "swath_scans" not in meta_data: meta_data["swath_scans"] = band_info["swath_scans"]
+
+ def write_geo_fbf(self, meta_data, nav_set_uid, all_geo_filepaths, fill_value=DEFAULT_FILL_VALUE):
+ # Create a temporary flat binary file( will be renamed later)
+ spid = "%d" % os.getpid()
+ lat_stem = "latitude_%s" % (nav_set_uid)
+ lon_stem = "longitude_%s" % (nav_set_uid)
+ check_stem(lat_stem)
+ check_stem(lon_stem)
+ fbf_lat_tmp_fn = "." + lat_stem + "_" + spid
+ fbf_lon_tmp_fn = "." + lon_stem + "_" + spid
+ lat_fbf_file = file(fbf_lat_tmp_fn, 'wb')
+ lon_fbf_file = file(fbf_lon_tmp_fn, 'wb')
+ lat_fbf_appender = file_appender(lat_fbf_file, dtype=numpy.float32)
+ lon_fbf_appender = file_appender(lon_fbf_file, dtype=numpy.float32)
+
+ # Iterate over each navigation file that makes up the swath
+ wests,easts,norths,souths = [],[],[],[]
+ for geo_filepath in all_geo_filepaths:
+ # Get longitude,latitude arrays from navigation files
+ # Get the bounding coordinates
+ lon_array,lat_array,wbound,ebound,nbound,sbound = load_geo_data(nav_set_uid, geo_filepath, fill_value=fill_value)
+ # Add the data to the FBFs
+ lon_fbf_appender.append(lon_array)
+ lat_fbf_appender.append(lat_array)
+ # Add it to a list of all of the bounds for every file
+ wests.append(wbound)
+ easts.append(ebound)
+ norths.append(nbound)
+ souths.append(sbound)
+
+ # Find the overall bounding box from the individual bounding box for each file
+ if nav_set_uid == IBAND_NAV_UID or nav_set_uid == MBAND_NAV_UID:
+ overall_wbound,overall_ebound,overall_nbound,overall_sbound = viirs_guidebook.calculate_bbox_bounds(wests, easts, norths, souths)
+ meta_data["lon_west"] = overall_wbound
+ meta_data["lon_east"] = overall_ebound
+ meta_data["lat_north"] = overall_nbound
+ meta_data["lat_south"] = overall_sbound
+
+ # Rename flat binary files to proper fbf names
+ suffix = '.real4.' + '.'.join(str(x) for x in reversed(lat_fbf_appender.shape))
+ fbf_lat_fn = lat_stem + suffix
+ fbf_lon_fn = lon_stem + suffix
+ os.rename(fbf_lat_tmp_fn, fbf_lat_fn)
+ os.rename(fbf_lon_tmp_fn, fbf_lon_fn)
+
+ # Add this new information to the meta data dictionary
+ meta_data["lat_fill_value"] = fill_value
+ meta_data["lon_fill_value"] = fill_value
+ meta_data["fbf_lat"] = fbf_lat_fn
+ meta_data["fbf_lon"] = fbf_lon_fn
+
+ # Check to make sure navigation is the same size as the other data
+ if lat_fbf_appender.shape != lon_fbf_appender.shape or \
+ lat_fbf_appender.shape != (meta_data["swath_rows"],meta_data["swath_cols"]):
+ log.error("Navigation data was not the same size as the band data")
+ raise ValueError("Navigation data was not the same size as the band data")
+
+ def extract_band_info(self, meta_data, file_info, nav_set_uid, band_id):
+ """Update the meta_data dictionary to include relevant information
+ about the band tied to the band_id provided.
+ """
+ band_kind = file_info["kind"]
+ if (band_kind, band_id) not in meta_data["bands"]:
+ meta_data["bands"][(band_kind, band_id)] = band_info = {}
+ else:
+ band_info = meta_data["bands"][(band_kind, band_id)]
+
+ # Fill in global information required by the frontend interface
+ if "sat" not in meta_data: meta_data["sat"] = file_info["sat"]
+ if "instrument" not in meta_data: meta_data["instrument"] = file_info["instrument"]
+ if "start_time" not in meta_data or file_info["start_time"] < meta_data["start_time"]:
+ meta_data["start_time"] = file_info["start_time"]
+
+ # Fill in information required by the frontend interface
+ band_info["kind"] = band_kind
+ band_info["band_id"] = band_id
+ band_info["data_kind"] = file_info["data_kind"]
+ band_info["remap_data_as"] = file_info["data_kind"]
+ band_info["rows_per_scan"] = file_info["rows_per_scan"]
+ meta_data["rows_per_scan"] = band_info["rows_per_scan"]
+
+ # Fill in information required to load the data later in the frontend
+ if "data_filepaths" not in band_info:
+ band_info["data_filepaths"] = [ file_info["data_filepath"] ]
+ else:
+ band_info["data_filepaths"].append(file_info["data_filepath"])
+
+ band_info[guidebook.K_DATASET] = file_info[guidebook.K_DATASETS][file_info["bands"].index(band_id)]
+ band_info[guidebook.K_FILL_VALUE] = file_info.get(guidebook.K_FILL_VALUE, None)
+ band_info[guidebook.K_SCALE_FACTOR] = file_info.get(guidebook.K_SCALE_FACTOR, None)
+ band_info[guidebook.K_SCALE_OFFSET] = file_info.get(guidebook.K_SCALE_OFFSET, None)
+ band_info[guidebook.K_UNITS] = file_info.get(guidebook.K_UNITS, None)
+
+ def make_swaths(self, nav_set_uid, filepaths_dict, cut_bad=False, fill_value=DEFAULT_FILL_VALUE,
+ bands_desired=[ BID_08, BID_01, BID_04, BID_03 ]
+ ):
+ """Create binary swath files and return a python dictionary
+ of meta data.
+ """
+ # Store the information for each file that we look at
+ all_nav_filepaths = set()
+ meta_data = {
+ "nav_set_uid" : nav_set_uid,
+ "bands" : {},
+ }
+
+ # Get meta data for all data files
+ for file_pattern,filepath_list in filepaths_dict.items():
+ # Sort the file list alphabetically (in place)
+ filepath_list.sort()
+
+ for filepath in filepath_list:
+ log.info("Loading meta data from %s" % filepath)
+ # Get meta data for this filepath
+ file_info = guidebook.get_file_meta(nav_set_uid, file_pattern, filepath)
+ # Store the navigation information for later
+ all_nav_filepaths.add(file_info["geo_filepath"])
+
+ # Extract each band that is in this file
+ # Data is currently organized in a "per-file" structure, this
+ # makes it a "per-band" structure
+ for band_id in file_info["bands"]:
+ # Does the user want this band?
+ if band_id in bands_desired:
+ self.extract_band_info(meta_data, file_info, nav_set_uid, band_id)
+
+ # Sort the navigation file list
+ all_nav_filepaths = sorted(all_nav_filepaths)
+
+ # Create image data flat binary files
+ for band_kind,band_id in meta_data["bands"].keys():
+ log.info("Writing swath binary files for band kind %s band %s" % (band_kind,band_id))
+ self.write_band_fbf(meta_data, nav_set_uid, band_kind, band_id, fill_value=fill_value)
+
+ log.info("Writing navigation binary files for nav. set %s" % nav_set_uid)
+ # Create navigation flat binary files
+ self.write_geo_fbf(meta_data, nav_set_uid, all_nav_filepaths, fill_value=fill_value)
+
+ return meta_data
+
+def main():
+ from argparse import ArgumentParser
+ from polar2grid.core.glue_utils import remove_file_patterns
+ import json
+ description = """Run crefl frontend on files provided, writing JSON-ified
+ meta data dictionary to stdout.
+ """
+ parser = ArgumentParser(description=description)
+ parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
+ help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
+ parser.add_argument('-R', dest='remove_prev', default=False, action='store_true',
+ help="Delete any files that may conflict with future processing. Processing is not done with this flag.")
+ parser.add_argument('crefl_files', nargs="*",
+ help="crefl output files for one contiguous swath/pass")
+
+ args = parser.parse_args()
+
+ levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
+ logging.basicConfig(level=levels[min(3, args.verbosity)])
+
+ if not args.remove_prev and not args.crefl_files:
+ log.error("You must either specify '-R' to remove conflicting files or provide files to process")
+ return -1
+
+ if args.remove_prev:
+ log.info("Removing any possible conflicting files")
+ remove_file_patterns(
+ Frontend.removable_file_patterns
+ )
+ return 0
+
+ nav_set_dict = Frontend.sort_files_by_nav_uid(args.crefl_files)
+ for nav_set_uid,filepaths_dict in nav_set_dict.items():
+ frontend = Frontend()
+ meta_data = frontend.make_swaths(nav_set_uid, filepaths_dict)
+ pprint(meta_data)
+ #print json.dumps(meta_data)
+
+if __name__ == "__main__":
+ sys.exit(main())
+
View
535 py/polar2grid_crefl/polar2grid/crefl/guidebook.py
@@ -0,0 +1,535 @@
+#!/usr/bin/env python
+# encoding: utf-8
+"""
+Read one or more contiguous in-order HDF4 VIIRS or MODIS corrected reflectance product
+granules and aggregate them into one swath per band. Return a dictionary of meta data.
+Write out Swath binary files used by other polar2grid components.
+
+:author: David Hoese (davidh)
+:contact: david.hoese@ssec.wisc.edu
+:organization: Space Science and Engineering Center (SSEC)
+:copyright: Copyright (c) 2012 University of Wisconsin SSEC. All rights reserved.
+:date: Nov 2012
+:license: GNU GPLv3
+:revision: $Id$
+"""
+__docformat__ = "restructuredtext en"
+
+from polar2grid.core.constants import *
+from polar2grid.core.time_utils import UTC
+from pyhdf import SD,error as hdf_error
+import numpy
+
+import os
+import sys
+import re
+import logging
+from datetime import datetime
+from glob import glob
+
+log = logging.getLogger(__name__)
+
+# Instance of the UTC timezone
+UTC = UTC()
+
+# Dimension constants
+DIMNAME_LINES_750 = "lines_750m"
+DIMNAME_SAMPLES_750 = "samples_750m"
+DIMNAME_LINES_350 = "lines_350"
+DIMNAME_SAMPLES_350 = "samples_350"
+DIM_LINES_750 = 768
+DIM_SAMPLES_750 = 3200
+DIM_LINES_350 = 1536
+DIM_SAMPLES_350 = 6400
+
+# Pixel size constants to determine how to interpolate later
+PIXEL_SIZE_750 = 750
+PIXEL_SIZE_350 = 350
+
+# Constants for dataset names in crefl output files
+DS_CR_01 = "CorrRefl_01"
+DS_CR_02 = "CorrRefl_02"
+DS_CR_03 = "CorrRefl_03"
+DS_CR_04 = "CorrRefl_04"
+DS_CR_05 = "CorrRefl_05"
+DS_CR_06 = "CorrRefl_06"
+DS_CR_07 = "CorrRefl_07"
+DS_CR_08 = "CorrRefl_08"
+DS_CR_09 = "CorrRefl_09"
+DS_CR_10 = "CorrRefl_10"
+
+# Constants for navigation dataset names in crefl output files
+DS_LONGITUDE = "Longitude"
+DS_LATITUDE = "Latitude"
+
+# Keys in hdf4 attributes
+K_FILL_VALUE = "FillValueKey"
+K_SCALE_FACTOR = "ScaleFactorKey"
+K_SCALE_OFFSET = "ScaleOffsetKey"
+K_UNITS = "UnitsKey"
+K_DATASETS = "DatasetsKey"
+K_DATASET = "DatasetKey"
+K_LONGITUDE = "LongitudeKey"
+K_LATITUDE = "LatitudeKey"
+K_COLUMNS_750 = "Columns750Key"
+K_COLUMNS_350 = "Columns350Key"
+K_ROWS_750 = "Rows750Key"
+K_ROWS_350 = "Rows350Key"
+K_GEO_PATTERN = "GeoGlobPatternKey"
+
+
+"""
+CorrRefl_01 | M5 Corrected reflectances
+CorrRefl_02 | M7 Corrected reflectances
+CorrRefl_03 | M3 Corrected reflectances
+CorrRefl_04 | M4 Corrected reflectances
+CorrRefl_05 | M8 Corrected reflectances
+CorrRefl_06 | M10 Corrected reflectances
+CorrRefl_07 | M11 Corrected reflectances
+CorrRefl_08 | I1 Corrected reflectances
+CorrRefl_09 | I2 Corrected reflectances
+CorrRefl_10 | I3 Corrected reflectances
+"""
+
+def _convert_npp_datetimes(date_str, start_str):
+ # the last digit in the 7 digit time is tenths of a second
+ # we turn it into microseconds
+ start_us = int(start_str[-1]) * 100000
+
+ # Parse out the datetime, making sure to add the microseconds and set the timezone to UTC
+ start_time = datetime.strptime(date_str + "_" + start_str[:-1], "%Y%m%d_%H%M%S").replace(tzinfo=UTC, microsecond=start_us)
+
+ return start_time
+
+def _convert_modis_datetimes(date_str, start_str):
+ return datetime.strptime(date_str + "_" + start_str, "%y%j_%H%M")
+
+# Regular expression file patterns used later
+IBAND_REGEX = r'CREFLI_(?P<sat>[A-Za-z0-9]+)_d(?P<date_str>\d+)_t(?P<start_str>\d+)_e(?P<end_str>\d+).hdf'
+MBAND_REGEX = r'CREFLM_(?P<sat>[A-Za-z0-9]+)_d(?P<date_str>\d+)_t(?P<start_str>\d+)_e(?P<end_str>\d+).hdf'
+MODIS_1000M_REGEX = r'(?P<sat>[at])1.(?P<date_str>\d+).(?P<start_str>\d+).crefl.1000m.hdf'
+MODIS_500M_REGEX = r'(?P<sat>[at])1.(?P<date_str>\d+).(?P<start_str>\d+).crefl.500m.hdf'
+MODIS_250M_REGEX = r'(?P<sat>[at])1.(?P<date_str>\d+).(?P<start_str>\d+).crefl.250m.hdf'
+
+NAV_SET_USES = {
+ IBAND_NAV_UID : [ IBAND_REGEX ],
+ MBAND_NAV_UID : [ MBAND_REGEX ],
+ GEO_NAV_UID : [ MODIS_1000M_REGEX ],
+ GEO_500M_NAV_UID : [ MODIS_500M_REGEX ],
+ GEO_250M_NAV_UID : [ MODIS_250M_REGEX ],
+ }
+
+# Regular expressions for files we understand and some information that we know based on which one matches
+FILE_REGEX = {
+ IBAND_REGEX : {
+ K_FILL_VALUE : "_FillValue",
+ K_SCALE_FACTOR : "scale_factor",
+ K_SCALE_OFFSET : "add_offset",
+ K_UNITS : "units",
+ K_DATASETS : [ DS_CR_08, DS_CR_09, DS_CR_10 ],
+ K_GEO_PATTERN : "GITCO_%(sat)s_d%(date_str)s_t%(start_str)s_e%(end_str)s_b*_c*_*.h5",
+ "resolution" : 500,
+ "instrument" : INST_VIIRS,
+ "rows_per_scan" : 32,
+ "date_convert_func" : _convert_npp_datetimes,
+ },
+ MBAND_REGEX : {
+ K_FILL_VALUE : "_FillValue",
+ K_SCALE_FACTOR : "scale_factor",
+ K_SCALE_OFFSET : "add_offset",
+ K_UNITS : "units",
+ K_DATASETS : [ DS_CR_01, DS_CR_02, DS_CR_03, DS_CR_04, DS_CR_05, DS_CR_06, DS_CR_07 ],
+ K_GEO_PATTERN : "GMTCO_%(sat)s_d%(date_str)s_t%(start_str)s_e%(end_str)s_b*_c*_*.h5",
+ "resolution" : 1000,
+ "instrument" : INST_VIIRS,
+ "rows_per_scan" : 16,
+ "date_convert_func" : _convert_npp_datetimes,
+ },
+ MODIS_1000M_REGEX : {
+ K_FILL_VALUE : "_FillValue",
+ K_SCALE_FACTOR : 0.0001, # Floats are interpreted as constant scaling factors
+ K_SCALE_OFFSET : 0.0,
+ K_UNITS : None,
+ K_DATASETS : [ DS_CR_01, DS_CR_02, DS_CR_03, DS_CR_04, DS_CR_05, DS_CR_06, DS_CR_07 ],
+ K_GEO_PATTERN : "%(sat)s1.%(date_str)s.%(start_str)s.geo.hdf",
+ "resolution" : 1000,
+ "instrument" : INST_MODIS,
+ "rows_per_scan" : 10,
+ "date_convert_func" : _convert_modis_datetimes,
+ },
+ MODIS_500M_REGEX : {
+ K_FILL_VALUE : "_FillValue",
+ K_SCALE_FACTOR : 0.0001,
+ K_SCALE_OFFSET : 0.0,
+ K_UNITS : None,
+ K_DATASETS : [ DS_CR_01, DS_CR_02, DS_CR_03, DS_CR_04, DS_CR_05, DS_CR_06, DS_CR_07 ],
+ K_GEO_PATTERN : "%(sat)s1.%(date_str)s.%(start_str)s.geo.hdf",
+ "resolution" : 500,
+ "instrument" : INST_MODIS,
+ "rows_per_scan" : 20,
+ "date_convert_func" : _convert_modis_datetimes,
+ },
+ MODIS_250M_REGEX : {
+ K_FILL_VALUE : "_FillValue",
+ K_SCALE_FACTOR : 0.0001,
+ K_SCALE_OFFSET : 0.0,
+ K_UNITS : None,
+ K_DATASETS : [ DS_CR_01, DS_CR_02, DS_CR_03, DS_CR_04, DS_CR_05, DS_CR_06, DS_CR_07 ],
+ K_GEO_PATTERN : "%(sat)s1.%(date_str)s.%(start_str)s.geo.hdf",
+ "resolution" : 250,
+ "instrument" : INST_MODIS,
+ "rows_per_scan" : 40,
+ "date_convert_func" : _convert_modis_datetimes,
+ },
+ }
+
+# Satellites that we know how to handle
+# XXX: JPSS satellites will need to be added to this
+SATELLITES = {
+ "npp" : SAT_NPP,
+ "a" : SAT_AQUA,
+ "t" : SAT_TERRA,
+ }
+
+DATASET2BID = {
+ DS_CR_01 : BID_01,
+ DS_CR_02 : BID_02,
+ DS_CR_03 : BID_03,
+ DS_CR_04 : BID_04,
+ DS_CR_05 : BID_05,
+ DS_CR_06 : BID_06,
+ DS_CR_07 : BID_07,
+ DS_CR_08 : BID_08,
+ DS_CR_09 : BID_09,
+ DS_CR_10 : BID_10,
+ }
+
+def _safe_glob(pat, num_allowed=1):
+ glob_results = glob(pat)
+ if len(glob_results) < num_allowed:
+ log.error("Could not find enough files matching file pattern: '%s'" % (pat,))
+ raise ValueError("Could not find enough files matching file pattern: '%s'" % (pat,))
+ if len(glob_results) > num_allowed:
+ log.error("Too many files found matching file pattern: '%s'" % (pat,))
+ raise ValueError("Too many files found matching file pattern: '%s'" % (pat,))
+
+ if num_allowed == 1: return glob_results[0]
+ return glob_results
+
+def parse_datetimes_from_filepaths(filepaths):
+ """Provide a list of datetime objects for each understood file provided.
+
+ >>> assert( not parse_datetimes_from_filepaths([]) )
+ >>> assert( not parse_datetimes_from_filepaths([ "fake1.txt", "fake2.txt" ]) )
+ >>> dts = parse_datetimes_from_filepaths([
+ ... "CREFLI_npp_d20130311_t1916582_e1918223.hdf",
+ ... "CREFLI_npp_d20130311_t1918236_e1919477.hdf",
+ ... "CREFLI_npp_d20130311_t1919490_e1921131.hdf",
+ ... "CREFLI_npp_d20130311_t1921144_e1922385.hdf",
+ ... "CREFLI_npp_d20130311_t1922398_e1924039.hdf",
+ ... "CREFLI_npp_d20130311_t1924052_e1925294.hdf",
+ ... "CREFLI_npp_d20130311_t1925306_e1926548.hdf",
+ ... "CREFLI_npp_d20130311_t1926560_e1928202.hdf",
+ ... "CREFLI_npp_d20130311_t1928214_e1929456.hdf",
+ ... "fake3.txt",
+ ... "CREFLM_npp_d20130311_t1916582_e1918223.hdf",
+ ... "CREFLM_npp_d20130311_t1918236_e1919477.hdf",
+ ... "CREFLM_npp_d20130311_t1919490_e1921131.hdf",
+ ... "CREFLM_npp_d20130311_t1921144_e1922385.hdf",
+ ... "CREFLM_npp_d20130311_t1922398_e1924039.hdf",
+ ... "CREFLM_npp_d20130311_t1924052_e1925294.hdf",
+ ... "CREFLM_npp_d20130311_t1925306_e1926548.hdf",
+ ... "CREFLM_npp_d20130311_t1926560_e1928202.hdf",
+ ... "CREFLM_npp_d20130311_t1928214_e1929456.hdf"
+ ... ])
+ >>> assert( len(dts) == 18 )
+ """
+ file_dates = []
+ for fn in [ os.path.split(fp)[1] for fp in filepaths ]:
+ matched_pattern_obj = None
+ matched_pattern = None
+ for file_pattern in FILE_REGEX:
+ matched_pattern_obj = re.match(file_pattern, fn)
+ if not matched_pattern_obj:
+ continue
+ else:
+ matched_pattern = file_pattern
+ break
+
+ if not matched_pattern_obj:
+ # ignore any files we don't understand
+ continue
+
+ filename_info = matched_pattern_obj.groupdict()
+ file_dates.append(FILE_REGEX[matched_pattern]["date_convert_func"](filename_info["date_str"], filename_info["start_str"]))
+
+ return file_dates
+
+def sort_files_by_nav_uid(filepaths):
+ """Sort the provided filepaths into a dictionary structure keyed by
+ `nav_set_uid` and file pattern.
+
+ >>> assert( not sort_files_by_nav_uid([]) )
+ >>> assert( not sort_files_by_nav_uid([ "fake1.txt", "fake2.txt" ]) )
+ >>> filepaths_dict = sort_files_by_nav_uid([
+ ... "CREFLI_npp_d20130311_t1916582_e1918223.hdf",
+ ... "CREFLI_npp_d20130311_t1918236_e1919477.hdf",
+ ... "CREFLI_npp_d20130311_t1919490_e1921131.hdf",
+ ... "CREFLI_npp_d20130311_t1921144_e1922385.hdf",
+ ... "CREFLI_npp_d20130311_t1922398_e1924039.hdf",
+ ... "CREFLI_npp_d20130311_t1924052_e1925294.hdf",
+ ... "CREFLI_npp_d20130311_t1925306_e1926548.hdf",
+ ... "CREFLI_npp_d20130311_t1926560_e1928202.hdf",
+ ... "CREFLI_npp_d20130311_t1928214_e1929456.hdf",
+ ... "fake3.txt",
+ ... "CREFLM_npp_d20130311_t1916582_e1918223.hdf",
+ ... "CREFLM_npp_d20130311_t1918236_e1919477.hdf",
+ ... "CREFLM_npp_d20130311_t1919490_e1921131.hdf",
+ ... "CREFLM_npp_d20130311_t1921144_e1922385.hdf",
+ ... "CREFLM_npp_d20130311_t1922398_e1924039.hdf",
+ ... "CREFLM_npp_d20130311_t1924052_e1925294.hdf",
+ ... "CREFLM_npp_d20130311_t1925306_e1926548.hdf",
+ ... "CREFLM_npp_d20130311_t1926560_e1928202.hdf",
+ ... "CREFLM_npp_d20130311_t1928214_e1929456.hdf"
+ ... ])
+ >>> assert( len(filepaths_dict) == 2 )
+ >>> assert( len( filepaths_dict[MBAND_NAV_UID][MBAND_REGEX]) == 9 )
+ >>> assert( len( filepaths_dict[IBAND_NAV_UID][IBAND_REGEX]) == 9 )
+ """
+ filepaths_dict = { nav_set : { file_pattern : [] for file_pattern in NAV_SET_USES[nav_set] } for nav_set in NAV_SET_USES }
+
+ for nav_set,file_pattern_dict in filepaths_dict.items():
+ for file_pattern,file_pattern_match_list in file_pattern_dict.items():
+ for fp in filepaths:
+ fn = os.path.split(fp)[1]
+ if re.match(file_pattern, fn):
+ file_pattern_match_list.append(fp)
+
+ if not file_pattern_match_list:
+ # remove any file patterns that aren't used
+ del file_pattern_dict[file_pattern]
+
+ if not file_pattern_dict:
+ # remove any empty nav sets
+ del filepaths_dict[nav_set]
+
+ return filepaths_dict
+
+def get_file_meta(nav_set_uid, file_pattern, filepath):
+ """Gets all the meta data information for one specific
+ file. It is up to the user to separate a file's information into
+ separate bands.
+
+ Assumes the file exists.
+
+ >>> file_info = get_file_meta(IBAND_NAV_UID, IBAND_REGEX, "CREFLI_npp_d20130311_t1916582_e1918223.hdf")
+ Traceback (most recent call last):
+ ...
+ ValueError: Could not find enough files matching file pattern: 'GITCO_npp_d20130311_s1916582_e1918223_b*_c*_*.h5'
+ """
+ base_path,filename = os.path.split(filepath)
+ m = re.match(file_pattern, filename)
+ if not m:
+ log.error("Filename '%s' does not match the pattern provided '%s'" % (filename, file_pattern))
+ raise ValueError("Filename '%s' does not match the pattern provided '%s'" % (filename, file_pattern))
+
+ file_info = {}
+ file_info.update(m.groupdict())
+ file_info.update(FILE_REGEX[file_pattern])
+ file_info["data_dir"] = base_path
+ file_info["data_filepath"] = filepath
+
+ # Get start time
+ _convert_datetimes = file_info["date_convert_func"]
+ file_info["start_time"] = _convert_datetimes(file_info["date_str"], file_info["start_str"])
+ if "end_str" in file_info:
+ file_info["end_time"] = _convert_datetimes(file_info["date_str"], file_info["end_str"])
+
+ # Geo file information
+ # MUST do before satellite constant so we don't get the right entry in the glob
+ file_info["geo_filename_pat"] = file_info[K_GEO_PATTERN] % file_info
+ file_info["geo_filepath"] = _safe_glob(os.path.join(base_path, file_info["geo_filename_pat"]))
+
+ # Get the constant value for whatever satellite we have
+ file_info["sat"] = SATELLITES[file_info["sat"]]
+ # XXX: If band kind is different this will have to be taken from a dictionary
+ file_info["kind"] = BKIND_CREFL
+ file_info["data_kind"] = DKIND_CREFL
+
+ file_info["bands"] = [ DATASET2BID[ds] for ds in file_info[K_DATASETS] ]
+
+ return file_info
+
+def get_data_from_dataset(ds,
+ data_fill_value,
+ scale_factor,
+ scale_offset,
+ fill_value=DEFAULT_FILL_VALUE):
+ # get data
+ data = ds.get().astype(numpy.float32)
+
+ # unscale data
+ fill_mask = data == data_fill_value
+ numpy.multiply(data, scale_factor, out=data)
+ numpy.add(data, scale_offset, out=data)
+ if fill_value is None: fill_value = data_fill_value
+ data[fill_mask] = fill_value
+
+ return data
+
+def get_attr_from_dataset(
+ ds, file_info):
+ ds_info = ds.attributes()
+
+ # fill value
+ data_fill_value = ds_info[file_info[K_FILL_VALUE]]
+
+ # scale factor
+ scale_factor = ds_info[file_info[K_SCALE_FACTOR]]
+
+ # scale offset
+ scale_offset = ds_info[file_info[K_SCALE_OFFSET]]
+
+ # units (not used)
+
+ return data_fill_value,scale_factor,scale_offset
+
+
+def read_data_file(file_info, fill_value=DEFAULT_FILL_VALUE):
+ """Read information from the HDF4 file specified by ``data_filepath``
+ in the ``file_info`` passed as the first argument.
+
+ Any numeric data
+ returned will have the optional ``fill_value`` keyword for any invalid
+ data points or data that could not be found/calculated. The default is
+ the :ref:`DEFAULT_FILL_VALUE <default_fill_value>` constant value. If
+ ``fill_value`` is ``None``, the fill value will be the same as in the
+ crefl file.
+
+ The following keys are required in the ``file_info`` dictionary (all-caps
+ means a constant defined in this file):
+
+ - data_filepath:
+ The filepath of the crefl file to operate on
+ - K_DATASETS:
+ A list of dataset names to get info for (not include navigation)
+ - K_FILL_VALUE:
+ Attribute name in the datasets for the fill_value of the data
+ - K_SCALE_FACTOR:
+ Attribute name in the datasets for the scale_factor of the data
+ - K_SCALE_OFFSET:
+ Attribute name in the datasets for the scale_offset of the data
+ - kind:
+ The kind of the band in the data (I, M, etc.)
+
+ This function fills in file_info with more data, primarily the ``bands``
+ key whose value is a dictionary of dictionaries holding 'per dataset'
+ information that can be used by other polar2grid components.
+ """
+ # Open the file to get additional information
+ h = SD.SD(file_info["data_filepath"], SD.SDC.READ)
+
+ # Pull band information
+ file_info["bands"] = {}
+ for ds_name in file_info[K_DATASETS]:
+ try:
+ ds = h.select(ds_name)
+ except hdf_error.HDF4Error:
+ msg = "Data set '%s' does not exist in '%s'" % (ds_name,file_info["data_filepath"])
+ log.error(msg)
+ raise ValueError(msg)
+
+ # Get attributes
+ data_fill_value,scale_factor,scale_offset = get_attr_from_dataset(
+ ds, file_info
+ )
+
+ # Get the data
+ if fill_value is None:
+ fill_value = data_fill_value
+ data = get_data_from_dataset(ds,
+ data_fill_value,
+ scale_factor,
+ scale_offset,
+ fill_value=fill_value
+ )
+
+ # write information to the file information
+ if data.shape[0] == file_info[K_ROWS_750] and data.shape[1] == file_info[K_COLUMNS_750]:
+ res = PIXEL_SIZE_750
+ elif data.shape[0] == file_info[K_ROWS_350] and data.shape[1] == file_info[K_COLUMNS_350]:
+ res = PIXEL_SIZE_350
+ else:
+ log.error("Don't know how to handle data shape '%r'" % (data.shape,))
+ raise ValueError("Don't know how to handle data shape '%r'" % (data.shape,))
+
+ file_info["bands"][(file_info["kind"],DATASET2BID[ds_name])] = {
+ "data" : data,
+ "data_kind" : DATASET2DKIND[ds_name],
+ "remap_data_as" : DATASET2DKIND[ds_name],
+ "kind" : file_info["kind"],
+ "band" : DATASET2BID[ds_name],
+ "rows_per_scan" : DATASET2RPS[ds_name],
+ "fill_value" : fill_value,
+ "pixel_size" : res
+ }
+
+ # Get Navigation data
+ lat_ds = h.select(file_info[K_LATITUDE])
+ lat_fv,lat_factor,lat_offset = get_attr_from_dataset(lat_ds, file_info)
+ file_info["latitude"] = get_data_from_dataset(
+ lat_ds, lat_fv, lat_factor, lat_offset)
+ lat_shape = file_info["latitude"].shape
+
+ lon_ds = h.select(file_info[K_LATITUDE])
+ lon_fv,lon_factor,lon_offset = get_attr_from_dataset(lon_ds, file_info)
+ file_info["longitude"] = get_data_from_dataset(
+ lon_ds, lon_fv, lon_factor, lon_offset)
+ lon_shape = file_info["longitude"].shape
+ if lat_shape != lon_shape:
+ log.error("Latitude and longitude data are a different shape")
+ raise ValueError("Latitude and longitude data are a different shape")
+
+ if lat_shape[0] == DIM_LINES_750 and lat_shape[1] == DIM_SAMPLES_750:
+ file_info["pixel_size"] = PIXEL_SIZE_750
+ elif lat_shape[0] == DIM_LINES_350 and lat_shape[1] == DIM_SAMPLES_350:
+ file_info["pixel_size"] = PIXEL_SIZE_350
+ else:
+ log.error("Don't know how to handle nav shape '%r'" % (lat_shape,))
+ raise ValueError("Don't know how to handle nav shape '%r'" % (lat_shape,))
+
+ return file_info
+
+def main():
+ from argparse import ArgumentParser
+ description = """Simple command line interface to test what information
+ the guidebook returns for provided files.
+ """
+ parser = ArgumentParser(description=description)
+ parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
+ help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG')
+ parser.add_argument('--doctest', dest="run_doctests", action="store_true", default=False,
+ help="Run doctests")
+ parser.add_argument("data_files", nargs="*",
+ help="1 or more crefl product files")
+ args = parser.parse_args()
+
+ levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
+ logging.basicConfig(level = levels[min(3, args.verbosity)])
+
+ if args.run_doctests:
+ import doctest
+ return doctest.testmod()
+
+ if not args.data_files:
+ print "No files were provided"
+ return 0
+
+ from pprint import pprint
+ for fn in args.data_files:
+ print "### %s ###" % fn
+ pprint(get_file_info(fn))
+
+if __name__ == "__main__":
+ sys.exit(main())
+
View
24 py/polar2grid_crefl/setup.py
@@ -0,0 +1,24 @@
+#!python
+from setuptools import setup, find_packages
+
+classifiers = ""
+version = '1.0.0'
+
+setup(
+ name='polar2grid.crefl',
+ version=version,
+ description="Library and scripts to aggregate VIIRS or MODIS corrected reflectance products (crefl) and get associated metadata",
+ classifiers=filter(None, classifiers.split("\n")),
+ keywords='',
+ author='David Hoese, SSEC',
+ author_email='david.hoese@ssec.wisc.edu',
+ url='',
+ packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
+ namespace_packages=["polar2grid"],
+ include_package_data=True,
+ zip_safe=True,
+ install_requires=['numpy', 'matplotlib', 'pyhdf', 'h5py', 'polar2grid.core', 'polar2grid.viirs'],
+ dependency_links = ['http://larch.ssec.wisc.edu/cgi-bin/repos.cgi'],
+ entry_points = {'console_scripts' : [ ]}
+)
+
View
135 py/polar2grid_viirs/polar2grid/viirs/viirs_guidebook.py
@@ -697,6 +697,63 @@ def geo_info(fn):
LOG.warning('unable to find %s in guidebook' % filename)
return finfo
+def read_geo_bbox_coordinates(hp, finfo, fill_value=DEFAULT_FILL_VALUE):
+ wbound_path = finfo[K_WEST_COORD]
+ ebound_path = finfo[K_EAST_COORD]
+ nbound_path = finfo[K_NORTH_COORD]
+ sbound_path = finfo[K_SOUTH_COORD]
+
+ # Get the bounding box coordinates (special cased for aggregate files)
+ wests,easts,norths,souths = [],[],[],[]
+ w_h5v = h5path(hp, wbound_path, finfo["geo_path"], required=False)
+ e_h5v = h5path(hp, ebound_path, finfo["geo_path"], required=False)
+ n_h5v = h5path(hp, nbound_path, finfo["geo_path"], required=False)
+ s_h5v = h5path(hp, sbound_path, finfo["geo_path"], required=False)
+ count = 0
+ while w_h5v is not None:
+ # Get the data and add it to the list of bounds
+ wests.append( w_h5v[0] )
+ easts.append( e_h5v[0] )
+ norths.append( n_h5v[0] )
+ souths.append( s_h5v[0] )
+
+ # Update the h5paths for other granules if they exist (aggregate files)
+ count += 1
+ wbound_path_tmp = wbound_path.replace("Gran_0", "Gran_%d" % count)
+ ebound_path_tmp = ebound_path.replace("Gran_0", "Gran_%d" % count)
+ nbound_path_tmp = nbound_path.replace("Gran_0", "Gran_%d" % count)
+ sbound_path_tmp = sbound_path.replace("Gran_0", "Gran_%d" % count)
+ w_h5v = h5path(hp, wbound_path_tmp, finfo["geo_path"], required=False)
+ e_h5v = h5path(hp, ebound_path_tmp, finfo["geo_path"], required=False)
+ n_h5v = h5path(hp, nbound_path_tmp, finfo["geo_path"], required=False)
+ s_h5v = h5path(hp, sbound_path_tmp, finfo["geo_path"], required=False)
+
+ # Check that we got some bounds information
+ if len(wests) == 0:
+ log.error("Could not find any bounding coordinates: '%s'" % (ebound_path,))
+ raise ValueError("Could not find any bounding coordinates: '%s'" % (ebound_path,))
+
+ # Find the actual bounding box of these values
+ wbound,ebound,nbound,sbound = calculate_bbox_bounds(wests, easts, norths, souths, fill_value=fill_value)
+
+ return wbound,ebound,nbound,sbound
+
+def load_geo_variable(hp, finfo, variable_key, dtype=np.float32, required=False):
+ var_path = finfo[variable_key]
+ h5v = h5path(hp, var_path, finfo["geo_path"], required=required)
+ if h5v is None:
+ LOG.debug("Variable '%s' was not found in '%s'" % (var_path, finfo["geo_path"]))
+ data = None
+ else:
+ data = h5v[:,:]
+ data = data.astype(dtype)
+
+ return data
+
+def get_geo_variable_fill_mask(data_array, variable_key):
+ mask = MISSING_GUIDE[K_LATITUDE][1](data_array) if K_LATITUDE in MISSING_GUIDE else None
+ return mask
+
def read_geo_info(finfo, fill_value=-999, dtype=np.float32):
"""Will fill in finfo with the data that is assumed to
be necessary for future processing.
@@ -728,31 +785,23 @@ def read_geo_info(finfo, fill_value=-999, dtype=np.float32):
hp = h5.File(finfo["geo_path"], 'r')
- lat_var_path = finfo[K_LATITUDE]
- lon_var_path = finfo[K_LONGITUDE]
st_var_path = finfo[K_STARTTIME]
- sza_var_path = finfo[K_SOLARZENITH]
- lza_var_path = finfo[K_LUNARZENITH]
mia_var_path = finfo[K_MOONILLUM]
lat_gring_path = finfo[K_LAT_G_RING]
lon_gring_path = finfo[K_LON_G_RING]
- wbound_path = finfo[K_WEST_COORD]
- ebound_path = finfo[K_EAST_COORD]
- nbound_path = finfo[K_NORTH_COORD]
- sbound_path = finfo[K_SOUTH_COORD]
# Get latitude data
- h5v = h5path(hp, lat_var_path, finfo["geo_path"], required=True)
- lat_data = h5v[:,:]
- lat_data = lat_data.astype(dtype)
- del h5v
+ lat_data = load_geo_variable(hp, finfo, K_LATITUDE, dtype=dtype, required=True)
# Get longitude data
- h5v = h5path(hp, lon_var_path, finfo["geo_path"], required=True)
- lon_data = h5v[:,:]
- lon_data = lon_data.astype(dtype)
- del h5v
+ lon_data = load_geo_variable(hp, finfo, K_LONGITUDE, dtype=dtype, required=True)
+ # Get solar zenith angle
+ sza_data = load_geo_variable(hp, finfo, K_SOLARZENITH)
+
+ # Get the lunar zenith angle
+ lza_data = load_geo_variable(hp, finfo, K_LUNARZENITH)
+
# Get start time
h5v = h5path(hp, st_var_path, finfo["geo_path"], required=True)
start_time = _st_to_datetime(h5v[0])
@@ -765,52 +814,8 @@ def read_geo_info(finfo, fill_value=-999, dtype=np.float32):
swath_polygon = make_polygon_tuple(lon_gring, lat_gring)
# Get the bounding box coordinates (special cased for aggregate files)
- wests,easts,norths,souths = [],[],[],[]
- w_h5v = h5path(hp, wbound_path, finfo["geo_path"], required=False)
- e_h5v = h5path(hp, ebound_path, finfo["geo_path"], required=False)
- n_h5v = h5path(hp, nbound_path, finfo["geo_path"], required=False)
- s_h5v = h5path(hp, sbound_path, finfo["geo_path"], required=False)
- count = 0
- while w_h5v is not None:
- # Get the data and add it to the list of bounds
- wests.append( w_h5v[0] )
- easts.append( e_h5v[0] )
- norths.append( n_h5v[0] )
- souths.append( s_h5v[0] )
-
- # Update the h5paths for other granules if they exist (aggregate files)
- count += 1
- wbound_path_tmp = wbound_path.replace("Gran_0", "Gran_%d" % count)
- ebound_path_tmp = ebound_path.replace("Gran_0", "Gran_%d" % count)
- nbound_path_tmp = nbound_path.replace("Gran_0", "Gran_%d" % count)
- sbound_path_tmp = sbound_path.replace("Gran_0", "Gran_%d" % count)
- w_h5v = h5path(hp, wbound_path_tmp, finfo["geo_path"], required=False)
- e_h5v = h5path(hp, ebound_path_tmp, finfo["geo_path"], required=False)
- n_h5v = h5path(hp, nbound_path_tmp, finfo["geo_path"], required=False)
- s_h5v = h5path(hp, sbound_path_tmp, finfo["geo_path"], required=False)
+ wbound,ebound,nbound,sbound = read_geo_bbox_coordinates(hp, finfo, fill_value=fill_value)
- # Check that we got some bounds information
- if len(wests) == 0:
- log.error("Could not find any bounding coordinates: '%s'" % (ebound_path,))
- raise ValueError("Could not find any bounding coordinates: '%s'" % (ebound_path,))
-
- # Find the actual bounding box of these values
- wbound,ebound,nbound,sbound = calculate_bbox_bounds(wests, easts, norths, souths, fill_value=fill_value)
-
- # Get solar zenith angle
- h5v = h5path(hp, sza_var_path, finfo["geo_path"], required=True)
- sza_data = h5v[:,:]
- sza_data = sza_data.astype(dtype)
-
- # Get the lunar zenith angle
- h5v = h5path(hp, lza_var_path, finfo["geo_path"], required=False)
- if h5v is None:
- LOG.debug("Variable '%s' was not found in '%s'" % (lza_var_path, finfo["geo_path"]))
- lza_data = None
- else:
- lza_data = h5v[:,:]
- lza_data = lza_data.astype(dtype)
-
# Get the moon illumination information
h5v = h5path(hp, mia_var_path, finfo["geo_path"], required=False)
if h5v is None:
@@ -821,17 +826,17 @@ def read_geo_info(finfo, fill_value=-999, dtype=np.float32):
LOG.debug("moon illumination fraction: " + str(moon_illum))
# Calculate latitude mask
- lat_mask = MISSING_GUIDE[K_LATITUDE][1](lat_data) if K_LATITUDE in MISSING_GUIDE else None
+ lat_mask = get_geo_variable_fill_mask(lat_data, K_LATITUDE)
# Calculate longitude mask
- lon_mask = MISSING_GUIDE[K_LONGITUDE][1](lon_data) if K_LONGITUDE in MISSING_GUIDE else None
+ lon_mask = get_geo_variable_fill_mask(lon_data, K_LONGITUDE)
# Calculate solar zenith angle mask
- sza_mask = MISSING_GUIDE[K_SOLARZENITH][1](sza_data) if K_SOLARZENITH in MISSING_GUIDE else None
+ sza_mask = get_geo_variable_fill_mask(sza_data, K_SOLARZENITH)
# Calculate the lunar zenith angle mask
if lza_data is not None:
- lza_mask = MISSING_GUIDE[K_LUNARZENITH][1](lza_data) if K_LUNARZENITH in MISSING_GUIDE else None
+ lza_mask = get_geo_variable_fill_mask(lza_data, K_LUNARZENITH)
# Mask off bad data
# NOTE: There could still be missing image data to account for
View
2  py/polar2grid_viirs/polar2grid/viirs/viirs_imager_to_swath.py
@@ -391,7 +391,7 @@ def process_geo(meta_data, geo_data, fill_value=DEFAULT_FILL_VALUE, cut_bad=Fals
os.rename(moonname, fbf_moon)
# Calculate the actual bounds of the entire swath
- lon_west,lon_east,lat_north,lat_south = calculate_bbox_bounds(wests, easts, norths, souths)
+ lon_west,lon_east,lat_north,lat_south = calculate_bbox_bounds(wests, easts, norths, souths, fill_value=fill_value)
if lon_west == fill_value or lon_east == fill_value or \
lat_north == fill_value or lat_south == fill_value:
log.error("No valid bounding coordinates could be found, not enough valid geolocation data")
View
41 swbundle/crefl2gtiff.sh
@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+### VIIRS2GTIFF WRAPPER ###
+#
+# Copyright (C) 2013 Space Science and Engineering Center (SSEC),
+# University of Wisconsin-Madison.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+# This file is part of the polar2grid software package. Polar2grid takes
+# satellite observation data, remaps it, and writes it to a file format for
+# input into another program.
+# Documentation: http://www.ssec.wisc.edu/software/polar2grid/
+#
+# Written by David Hoese January 2013
+# University of Wisconsin-Madison
+# Space Science and Engineering Center
+# 1225 West Dayton Street
+# Madison, WI 53706
+# david.hoese@ssec.wisc.edu
+
+if [ -z "$POLAR2GRID_HOME" ]; then
+ export POLAR2GRID_HOME="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )"
+fi
+
+# Setup necessary environments
+source $POLAR2GRID_HOME/bin/polar2grid_env.sh
+
+# Call the python module to do the processing, passing all arguments
+$POLAR2GRID_HOME/ShellB3/bin/python -m polar2grid.crefl2gtiff -vv $@
+
Please sign in to comment.
Something went wrong with that request. Please try again.