Skip to content

Commit

Permalink
Partial update, non-functioning
Browse files Browse the repository at this point in the history
  • Loading branch information
zkwurst committed Aug 1, 2017
1 parent 56aa141 commit 85e21a9
Showing 1 changed file with 126 additions and 45 deletions.
171 changes: 126 additions & 45 deletions r.in.usgs/r.in.usgs.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
#!/usr/bin/env python
#-*- coding: utf-8 -*-

#MODULE: r.in.usgsned
#MODULE: r.in.usgs
#
#AUTHOR: Zechariah Krautwurst
#
#MENTORS: Anna Petrasova
# Vaclav Petras
#
#PURPOSE: Download user-requested tiles from USGS NED database.
#PURPOSE: Download user-requested products from USGS database.
#
#VERSION: [STABLE] r.in.usgsned
#VERSION: [DEV] r.in.usgs
#
#COPYRIGHT: (C) 2017 Zechariah Krautwurst and the GRASS Development Team
#
Expand All @@ -19,9 +19,9 @@
# for details.

#%module
#% description: Download USGS NED data
#% description: Download USGS data
#% keyword: raster
#% keyword: NED
#% keyword: USGS
#%end

#%flag
Expand All @@ -33,7 +33,7 @@
#%option
#% key: product
#% required: yes
#% options: 'vectorcmb', 'nhd', 'nbdmi', 'gnis', 'nsd', 'ned', 'naip', 'ustopo', 'woodland', 'hro', 'nlcd', 'smallscale', 'histtopo', 'nedsrc', 'ntd', 'nbd'
#% options: vectorcmb, nhd, nbdmi, gnis, nsd, ned, naip, ustopo, woodland, hro, nlcd, smallscale, histtopo, nedsrc, ntd, nbd
#% answer: ned
#% label: Select USGS Data Product
#% description: Choose which available USGS datasets to query
Expand All @@ -43,7 +43,7 @@
#%option
#% key: dataset
#% required: yes
#% options: 'NHDPlus High Resolution (NHDPlus HR) Beta', 'National Hydrography Dataset (NHD)', 'Watershed Boundary Dataset (WBD)', '5 meter DEM (Alaska only)', '2 arc-second DEM - Alaska', '1/3 arc-second DEM', '1 meter DEM', '1/9 arc-second DEM', 'Contours (1:24,000-scale)', '1 arc-second DEM', 'US Topo Current', 'US Topo Non-Current', 'National Land Cover Database (NLCD) - 2006', 'National Land Cover Database (NLCD) - 2011', 'National Land Cover Database (NLCD) - 2001', 'Hydrography (Small-scale)', 'Contours (Small-scale)', 'Transportation (Small-scale)', 'Elevation (Small-scale)', 'Land Cover (Small-scale)', 'Orthoimagery (Small-scale)', 'Structures (Small-scale)', 'Boundaries (Small-scale)', 'Lidar Point Cloud (LPC)', 'DEM Source (OPR)', 'Ifsar Orthorectified Radar Image (ORI)', 'Ifsar Digital Surface Model (DSM)', 'USFS Roads', 'National Transportation Dataset'
#% options: NHDPlus High Resolution (NHDPlus HR) Beta, National Hydrography Dataset (NHD), Watershed Boundary Dataset (WBD), 5 meter DEM (Alaska only), 2 arc-second DEM - Alaska, 1/3 arc-second DEM, 1 meter DEM, 1/9 arc-second DEM, Contours (1:24,000-scale), 1 arc-second DEM, US Topo Current, US Topo Non-Current, National Land Cover Database (NLCD) - 2006, National Land Cover Database (NLCD) - 2011, National Land Cover Database (NLCD) - 2001, Hydrography (Small-scale), Contours (Small-scale), Transportation (Small-scale), Elevation (Small-scale), Land Cover (Small-scale), Orthoimagery (Small-scale), Structures (Small-scale), Boundaries (Small-scale), Lidar Point Cloud (LPC), DEM Source (OPR), Ifsar Orthorectified Radar Image (ORI), Ifsar Digital Surface Model (DSM), USFS Roads, National Transportation Dataset
#% answer: ned
#% label: Select USGS Data Product
#% description: Choose which available USGS datasets to query
Expand All @@ -52,9 +52,9 @@


#%option
#% key: resolution
#% key: extent
#% required: yes
#% options: 'HU-4 Subregion', 'State', 'HU-8 Subbasin', 'National', 'HU-2 Region', 'Varies', '1 x 1 degree', '10000 x 10000 meter', '15 x 15 minute', '7.5 x 7.5 minute', '3 x 3 degree', 'North America', 'Contiguous US'
#% options: HU-4 Subregion, State, HU-8 Subbasin, National, HU-2 Region, Varies, 1 x 1 degree, 10000 x 10000 meter, 15 x 15 minute, 7.5 x 7.5 minute, 3 x 3 degree, North America, Contiguous US
#% label: USGS dataset resolution
#% description: Available dataset resolutions
#% guisection: USGS Data Selection
Expand Down Expand Up @@ -112,74 +112,155 @@
def main():
# Set GRASS GUI options and flags to python variables
gui_product = options['product']
gui_resolution = options['resolution']
gui_dataset = options['dataset']
gui_extent = options['extent']
gui_output_layer = options['output']
gui_resampling_method = options['resampling_method']
gui_i_flag = flags['i']
global gui_k_flag
gui_k_flag = flags['k']
work_dir = options['output_directory']

############################################
# # Hard-coded data dictionary for NED parameters
# usgs_product_dict = {
# "ned":
# {"product": "National Elevation Dataset (NED)",
# # defined resolution in degrees, meters, and feet
# "dataset": {"1 arc-second": (1. / 3600, 30, 100),
# "1/3 arc-second": (1. / 3600 / 3, 10, 30),
# "1/9 arc-second": (1. / 3600 / 9, 3, 10)
# },
# "extent":
# "format": "IMG",
# "srs": "wgs84",
# "srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
# "interpolation": "bilinear"}
# }
## gui_product:
## {"dataset": gui_dataset,
## "extent": {gui_extent
## "format": gui_format,
#
## }}
##
#
#
#
#########################################

# NOTE: Working on getting data-tags to function as API URL input
# Have tags input into list, need to figure out how to associate
# appropriate tag for GUI selected extent?

# Data dictionary generator

dict_TNM_API_URL = "https://viewer.nationalmap.gov/tnmaccess/api/datasets?"
dict_TNM_API_GET = urllib2.urlopen(dict_TNM_API_URL, timeout=12)
dict_returnJSON = json.load(dict_TNM_API_GET)

# Hard-coded data dictionary for NED parameters
USGS_product_dict = {
"ned":
{"dataset": "National Elevation Dataset (NED)",
"format": "IMG",
# defined resolution in degrees, meters, and feet
"extent": {"1 arc-second": (1. / 3600, 30, 100),
"1/3 arc-second": (1. / 3600 / 3, 10, 30),
"1/9 arc-second": (1. / 3600 / 9, 3, 10)
},
"srs": "wgs84",
"srs_proj4": "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs",
"interpolation": "bilinear"}
}
# gui_product:
# {"dataset": gui_dataset,
# "extent": {gui_extent
# "format": gui_format,
# }}
#
dict_TNM_API_URL = "https://viewer.nationalmap.gov/tnmaccess/api/datasets?"
dict_TNM_API_GET = urllib2.urlopen(dict_TNM_API_URL, timeout=12)
dict_returnJSON = json.load(dict_TNM_API_GET)

usgs_dict = {}
for product in dict_returnJSON:
prod_title = str(product["sbDatasetTag"])
prod_id = str(product["internalId"])
prod_tags = product['tags']
usgs_dict[prod_id] = {}
usgs_dict[prod_id]["product"] = {}
usgs_dict[prod_id] = {"product":prod_title}
usgs_dict[prod_id]["dataset"] = {}
for tag in prod_tags:
usgs_dict[prod_id]["dataset"][tag] = {}
prod_extents = prod_tags[tag]["extentsFormats"]
usgs_dict[prod_id]["dataset"][tag]["extents"] = {}
prod_data_tag = prod_tags[tag]["sbDatasetTag"]
usgs_dict[prod_id]["dataset"][tag]["sbDatasetTag"] = prod_data_tag
for prod_extent in prod_extents:
usgs_dict[prod_id]["dataset"][tag]["extents"][prod_extent] = {}
prod_formats = prod_tags[tag]["extentsFormats"][str(prod_extent)]
usgs_dict[prod_id]["dataset"][tag]["extents"][prod_extent]["formats"] = []
usgs_dict[prod_id]["dataset"][tag]["extents"][prod_extent]["formats"] = prod_formats

usgs_product_dict = {}
for p in usgs_dict:
product_title = usgs_dict[p]['product']
product_datasets = []
product_extents = []
product_formats = []
product_srs = []
product_proj4 = []
for ds in usgs_dict[p]['dataset']:
product_datasets.append(str(ds))
for e in usgs_dict[p]['dataset'][ds]['extents']:
if e not in product_extents:
product_extents.append(str(e))
for f in usgs_dict[p]['dataset'][ds]['extents'][e]['formats']:
if f not in product_formats:
product_formats.append(str(f))
usgs_product_dict[p] = {
'product':product_title,
'dataset':product_datasets,
'extent':product_extents,
'format':product_formats,
if usgs_product_dict[p] is 'ned':
usgs_product_dict[p]['dataset'] = {"1 arc-second DEM": (1. / 3600, 30, 100),
"1/3 arc-second DEM": (1. / 3600 / 3, 10, 30),
"1/9 arc-second DEM": (1. / 3600 / 9, 3, 10)}
usgs_product_dict[p]['srs'] = 'wgs84'
usgs_product_dict[p]['srs_proj4'] = "+proj=longlat +ellps=GRS80 +datum=NAD83 +nodefs"
usgs_product_dict[p]['interpolation'] = 'bilinear'

##########################################################





# Dynamic variables called from USGS data dict
nav_string = USGS_product_dict[gui_product]
product_title = nav_string["dataset"]
nav_string = usgs_product_dict[gui_product]
product_title = nav_string["product"]
product_format = nav_string["format"]
product_resolution = nav_string["extent"][gui_resolution]
product_SRS = nav_string["srs"]
product_PROJ4 = nav_string["srs_proj4"]
product_dataset = nav_string["dataset"]
product_tag = usgs_dict[gui_product]["dataset"][gui_dataset]["sbDatasetTag"]
product_srs = nav_string["srs"]
product_proj4 = nav_string["srs_proj4"]

# current units
proj = gscript.parse_command('g.proj', flags='g')
if gscript.locn_is_latlong():
product_resolution = product_resolution[0]
product_dataset = product_dataset[0]
elif float(proj['meters']) == 1:
product_resolution = product_resolution[1]
product_dataset = product_dataset[1]
else:
# we assume feet
product_resolution = product_resolution[2]
product_dataset = product_dataset[2]

if gui_resampling_method == 'default':
gui_resampling_method = nav_string['interpolation']
gscript.verbose(_("The default resampling method for product {product} is {res}").format(product=gui_product,
res=gui_resampling_method))
res=usgs_product_dict[p]['interpolation']))

# Get coordinates for current GRASS computational region and convert to USGS SRS
gregion = gscript.region()
min_coords = gscript.read_command('m.proj', coordinates=(gregion['w'], gregion['s']),
proj_out=product_PROJ4, separator='comma',
proj_out=product_proj4, separator='comma',
flags='d')
max_coords = gscript.read_command('m.proj', coordinates=(gregion['e'], gregion['n']),
proj_out=product_PROJ4, separator='comma',
proj_out=product_proj4, separator='comma',
flags='d')
min_list = min_coords.split(',')[:2]
max_list = max_coords.split(',')[:2]
list_bbox = min_list + max_list
str_bbox = ",".join((str(coord) for coord in list_bbox))

# Format variables for TNM API call
gui_prod_str = product_title + " " + gui_resolution

gui_prod_str = str(product_tag)

# gui_prod_str = product_title + " " + gui_extent
datasets = urllib.quote_plus(gui_prod_str)
prod_format = urllib.quote_plus(product_format)

Expand Down Expand Up @@ -287,7 +368,7 @@ def down_list():
)
data_info = '\n'.join(data_info).format(size=total_size_str,
count=tile_download_count,
srs=product_SRS,
srs=product_srs,
tile=tile_titles_info)

if gui_i_flag:
Expand Down Expand Up @@ -380,7 +461,7 @@ def down_list():
gscript.info(in_info)
try:
gscript.run_command('r.import', input=t, output=LT_layer_name,
resolution='value', resolution_value=product_resolution,
resolution='value', resolution_value=product_dataset,
extent="region", resample=gui_resampling_method)
if not gui_k_flag:
cleanup_list.append(t)
Expand All @@ -392,7 +473,7 @@ def down_list():
try:
gscript.use_temp_region()
# set the resolution
gscript.run_command('g.region', res=product_resolution, flags='a')
gscript.run_command('g.region', res=product_dataset, flags='a')
gscript.run_command('r.patch', input=patch_names,
output=gui_output_layer)
gscript.del_temp_region()
Expand Down

0 comments on commit 85e21a9

Please sign in to comment.