Permalink
Browse files

Moved WarpElev.sh functionality into GetDataset.py.

Only one shell-out to gdalwarp remaining!
Added support for NED 1/9-second files where available.
Increased range of acceptable metadata.
Added support for TGZ files and changed preference to them over ZIP.
Added partial support for NLCD 1992 and 2006.
Added partial support for new image types.
Changed processAOI to processAOI2 as per USGS documentation.
Updated documentation.
Fixed file permissions.
  • Loading branch information...
1 parent f6e309b commit f5288a93d3b77c14fe069c5d62a62dc0ccf6b7e3 JohnX M Twilley committed Aug 22, 2011
Showing with 147 additions and 173 deletions.
  1. +87 −6 GetDataset.py
  2. +2 −6 README
  3. +2 −7 TODO
  4. +0 −143 WarpElev.sh
  5. +56 −11 dataset.py
  6. 0 invdisttree.py
  7. 0 terrain.py
  8. 0 tree.py
View
@@ -7,14 +7,25 @@
import sys
import os
import shutil
+import zipfile
+import tarfile
import argparse
from lxml import etree
from time import sleep
-from dataset import landcoverIDs, elevationIDs
+from dataset import landcoverIDs, elevationIDs, decodeLayerID, getSRS
+from tempfile import NamedTemporaryFile
#import logging
#logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
+# dataset-specific images
+# NB: not multi-file friendly
+landcoverimage = ""
+elevationimage = ""
+
+# sadness
+zipfileBroken = True
+
def cleanDatasetDir(args):
"Clean up the dataset directory."
datasetName = args.region
@@ -154,22 +165,26 @@ def checkDownloadOptions(productIDs):
for pair in products[4].split(','):
(v, k) = pair.split('-')
metadataformats[k] = v
- # I want GeoTIFF, HTML and ZIP here
- # should use list in order of preference like landcoverIDs etc
+ # I want GeoTIFF, HTML and TGZ here
if u'GeoTIFF' in outputformats:
layerID += outputformats['GeoTIFF']
else:
print "oh no GeoTIFF not available"
return -1
+ # I do not use metadata so I don't care!
if u'HTML' in metadataformats:
layerID += metadataformats['HTML']
else:
print "oh no HTML not available"
return -1
- if u'ZIP' in compressionformats:
+ # prefer TGZ to ZIP
+ # consider preferences like landcoverIDs
+ if u'TGZ' in compressionformats:
+ layerID += compressionformats['TGZ']
+ elif u'ZIP' in compressionformats:
layerID += compressionformats['ZIP']
else:
- print "oh no ZIP not available"
+ print "no compression formats available"
return -1
layerIDs.append([layerID, xmin, xmax, ymin, ymax])
@@ -188,7 +203,7 @@ def requestValidation(layerIDs):
(tag, xmin, xmax, ymin, ymax) = layerID
xmlString = "<REQUEST_SERVICE_INPUT><AOI_GEOMETRY><EXTENT><TOP>%f</TOP><BOTTOM>%f</BOTTOM><LEFT>%f</LEFT><RIGHT>%f</RIGHT></EXTENT><SPATIALREFERENCE_WKID/></AOI_GEOMETRY><LAYER_INFORMATION><LAYER_IDS>%s</LAYER_IDS></LAYER_INFORMATION><CHUNK_SIZE>%d</CHUNK_SIZE><JSON></JSON></REQUEST_SERVICE_INPUT>" % (ymax, ymin, xmin, xmax, tag, 250)
- response = clientRequest.service.processAOI(xmlString)
+ response = clientRequest.service.processAOI2(xmlString)
print "Requested URLs for layer ID %s..." % tag
@@ -307,6 +322,69 @@ def downloadFile(layerID, downloadURL, datasetDir):
endPos = result.find("</ns:return>")
status = result[startPos:endPos]
+def warpelev(datasetDir):
+ "Extracts files and warps elevation file."
+
+ layerIDs = [ name for name in os.listdir(datasetDir) if os.path.isdir(os.path.join(datasetDir, name)) ]
+ for layerID in layerIDs:
+ (pType, iType, mType, cType) = decodeLayerID(layerID)
+ filesuffix = cType.lower()
+ layersubdir = os.path.join(datasetDir, layerID)
+ compfiles = [ name for name in os.listdir(layersubdir) if (os.path.isfile(os.path.join(layersubdir, name)) and name.endswith(filesuffix)) ]
+ for compfile in compfiles:
+ (compbase, compext) = os.path.splitext(compfile)
+ fullfile = os.path.join(layersubdir, compfile)
+ datasubdir = os.path.join(layersubdir, compbase)
+ compimage = os.path.join(compbase, "%s.%s" % (compbase, iType))
+ if os.path.exists(datasubdir):
+ shutil.rmtree(datasubdir)
+ os.makedirs(datasubdir)
+ if (zipfileBroken == False):
+ if (cType == "TGZ"):
+ cFile = tarfile.open(fullfile)
+ elif (cType == "ZIP"):
+ cFile = zipfile.ZipFile(fullfile)
+ cFile.extract(compimage, layersubdir)
+ cFile.close()
+ else:
+ if (cType == "TGZ"):
+ cFile = tarfile.open(fullfile)
+ cFile.extract(compimage, layersubdir)
+ elif (cType == "ZIP"):
+ omfgcompimage = "\\".join([compbase, "%s.%s" % (compbase, iType)])
+ os.mkdir(os.path.dirname(os.path.join(datasubdir,compimage)))
+ cFile = zipfile.ZipFile(fullfile)
+ cFile.extract(omfgcompimage, datasubdir)
+ os.rename(os.path.join(datasubdir,omfgcompimage),os.path.join(layersubdir,compimage))
+ cFile.close()
+ # tag what we found
+ # NB: needs fixing when supporting multiple images!
+ if (pType == "elevation"):
+ elevationimage = os.path.join(layersubdir, compimage)
+ elif (pType == "landcover"):
+ landcoverimage = os.path.join(layersubdir, compimage)
+ else:
+ print "Product type %s not yet supported!" % pType
+ return -1
+
+ # gotta have one of each
+ # NB: needs fixing when supporting multiple images!
+ if (elevationimage == ""):
+ print "Elevation image not found!"
+ return -1
+ if (landcoverimage == ""):
+ print "Landcover image not found!"
+ return -1
+
+ elevationimageorig = "%s-orig" % elevationimage
+ prffd = NamedTemporaryFile(delete=False)
+ prfname = prffd.name
+ prffd.write(getSRS(landcoverimage))
+ prffd.close()
+ shutil.copyfile(elevationimage, elevationimageorig)
+ os.system('gdalwarp -t_srs %s -r cubic %s %s' % (prfname, elevationimageorig, elevationimage))
+ os.remove(prfname)
+
def main(argv):
"The main routine."
@@ -346,6 +424,9 @@ def main(argv):
for layerID in downloadURLs.keys():
for downloadURL in downloadURLs[layerID]:
downloadFile(layerID, downloadURL, datasetDir)
+
+ # extract files and warp elevation files
+ warpelev(datasetDir)
if __name__ == '__main__':
sys.exit(main(sys.argv))
View
8 README
@@ -18,7 +18,6 @@ Features include:
This project includes one small dataset for testing. It was generated with the following commands:
./GetDataset.py --region BlockIsland --ymax 41.2378 --ymin 41.1415 --xmin -71.6202 --xmax -71.5332
- ./WarpElev.sh --region BlockIsland
The default spawn point is at the highest point in the dataset. The safe house is back! Perhaps someday there'll be a chest in there with everything the explorer needs?
@@ -29,13 +28,10 @@ Next, here's what to do!
1. Download the dataset from the USGS.
jmt@belle:~/TopoMC$ ./GetDataset.py --region Provincetown --ymax 42.0901 --ymin 42.0091 --xmin -70.2611 --xmax -70.1100
-2. Extract the data files and warp the elevation file.
- jmt@belle:~/TopoMC$ ./WarpElev.sh --region Provincetown
-
-3. Build the arrays.
+2. Build the arrays.
jmt@belle:~/TopoMC$ ./BuildArrays.py --region Provincetown
-4. Build a new world!
+3. Build a new world!
jmt@belle:~/TopoMC$ ./BuildWorld.py --region Provincetown
Both BuildArrays.py and BuildWorld.py have lots of options for the curious.
View
9 TODO
@@ -8,24 +8,19 @@ Test and drivers for all functions
- maybe even comments!
Convert most print output to logging statements
- boy this is spammy
- - especially GetDatabase.py!
+ - especially GetDataset.py!
Schematics and equipping the player
- in progress
- don't forget the bed in the safe house!
Support multiple USGS chunks per dataset
- - mcarray.py needs to be redesigned as does GetDatabase.py
+ - mcarray.py needs to be redesigned as does GetDataset.py
Normal stuff underground!
- ore done, what about caves?
- I bet someone's done it better -- look for add-a-dungeon code!
Full multiprocessing support
- everything works except turning chunks into worlds
- test this to make sure
-Clean up WarpElev.sh
- - only unzip the files we want
Clean up terrain.py
- move terrain types into abstract routines to support more land cover types
-Support NED 1/9 arc seconds
- - is this anything more than downloading?
- - multiple chunks come first!
Put buildings in developed areas
- need to do this without interfering with roads in the future?
View
@@ -1,143 +0,0 @@
-#!/bin/bash
-
-# warpelev.sh - 2011Jan24 - mathuin@gmail.com
-
-# This script warps the elevation file for a particular region
-# to match the land cover file.
-
-# example: ./warpelev.sh BlockIsland
-
-function usage() {
- echo "Usage: $0 --region [region]"
- exit
-}
-
-if [ $# -eq 2 ] && [ "$1" = "--region" ]; then
- region=$2
-else
- usage
-fi
-
-function processLayerID() {
- origstring=$1
- layerid=`echo $origstring | sed -e "s/\(...\)\(..\)\(.\)\(.\)/\1 \2 \3 \4/"`
- set -- $layerid
- productid=$1
- if [ "$productid" = "L01" ]; then
- foundl01=$origstring
- elif [ "$productid" = "ND3" ]; then
- foundnd3=$origstring
- elif [ "$productid" = "NED" ]; then
- foundned=$origstring
- else
- echo "Unknown product ID found!"
- exit -1
- fi
- imagetype=$2
- if [ "$imagetype" = "02" ]; then
- imagesuffix=tif
- else
- echo "Unknown image format found!"
- exit -1
- fi
- metatype=$3
- if [ "$metatype" = "H" ]; then
- foundhtml=true
- elif [ "$metatype" = "T" ]; then
- foundtxt=true
- elif [ "$metatype" = "X" ]; then
- foundxml=true
- else
- echo "Unknown metadata format found!"
- exit -1
- fi
- compresstype=$4
- if [ "$compresstype" = "Z" ]; then
- extractcmd=unzip
- compresssuffix=zip
- elif [ "$compresstype" = "T" ]; then
- extractcmd="tar xf"
- compresssuffix=tgz
- else
- echo "Unknown compression format found!"
- exit -1
- fi
-}
-
-regiondir="Datasets/$region"
-if [ -d $regiondir ]; then
- regionfilelist=`ls $regiondir | xargs echo`
-else
- echo "There is no region directory for $region."
- exit -1
-fi
-
-# extract the images
-for regionfile in $regionfilelist; do
- fullname=$regiondir/$regionfile
- if [ -d $fullname ]; then
-
- processLayerID $regionfile
- # now do something
- datafiles=`ls $fullname`
- for datafile in $datafiles; do
- if [ "${datafile#*.}" = "$compresssuffix" ]; then
- basename=`basename $datafile .$compresssuffix`
- # FIXME: cannot unzip single file?!
- #`(cd $fullname && unzip $datafile $basename/$basename.$imagesuffix)`
- (cd $fullname && unzip -oqq $datafile)
- # and now we have the imagename
- imagename=$fullname/$basename/$basename.$imagesuffix
- if [ ! -f $imagename ]; then
- echo "Image name $imagename not found!"
- exit -1
- fi
- else
- echo "$datafile is not of the correct type"
- fi
- done
- else
- echo "$file found but ignored"
- fi
-done
-
-# did we find what we needed
-if [ "x${foundl01}" = "x" ]; then
- echo "No landcover directory found!"
- exit -1
-else
- foundlc=$foundl01
-fi
-if [ "x${foundnd3}" = "x" ]; then
- if [ "x${foundned}" = "x" ]; then
- echo "No elevation directory found!"
- exit -1
- else
- foundelev=$foundned
- fi
-else
- foundelev=$foundnd3
-fi
-
-# foundlc and foundelev have the layerids
-processLayerID $foundlc
-# FIXME: not multi-directory safe!
-lcimage=`find $regiondir/$foundlc/ -name "*.$imagesuffix" -print`
-processLayerID $foundelev
-# FIXME: not multi-directory safe!
-elevimage=`find $regiondir/$foundelev/ -name "*.$imagesuffix" -print`
-elevorig=${elevimage}-orig
-elevnew=${elevimage}-new
-# archive the original
-if [ ! -f ${elevorig} ]; then
- cp ${elevimage} ${elevorig}
-fi
-# populate PRF with SRS
-# FIXME: use known value for SRS
-gdalinfo ${lcimage} | sed -e "1,/Coordinate System is:/d" -e "/Origin =/,\$d" | xargs echo > /tmp/crazy.prf
-
-# now warp the elevation image
-gdalwarp -t_srs /tmp/crazy.prf -r cubic ${elevorig} ${elevnew} && mv ${elevnew} ${elevimage}
-
-# clean up
-rm /tmp/crazy.prf
Oops, something went wrong.

0 comments on commit f5288a9

Please sign in to comment.