From 26282e0a466139185d8e1c694d003fd939511599 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Thu, 4 Jan 2024 09:21:09 -0600 Subject: [PATCH 01/10] remove unused functionality and tests --- hyp3lib/apply_wb_mask.py | 110 --- hyp3lib/area2point.py | 34 - hyp3lib/asf_geometry.py | 933 ------------------- hyp3lib/asf_time_series.py | 452 --------- hyp3lib/copy_metadata.py | 42 - hyp3lib/createAmp.py | 33 - hyp3lib/cutGeotiffsByLine.py | 94 -- hyp3lib/dem2isce.py | 125 --- hyp3lib/draw_polygon_on_raster.py | 247 ----- hyp3lib/enh_lee_filter.py | 77 -- hyp3lib/etc/__init__.py | 0 hyp3lib/etc/config/get_dem.cfg | 5 - hyp3lib/etc/lut/change.lut | 4 - hyp3lib/etc/lut/glacier_tracking.lut | 256 ----- hyp3lib/extendDateline.py | 74 -- hyp3lib/file_subroutines.py | 89 -- hyp3lib/geotiff_lut.py | 93 -- hyp3lib/getBursts.py | 23 - hyp3lib/getDemFor.py | 69 -- hyp3lib/getSubSwath.py | 257 ----- hyp3lib/get_bb_from_shape.py | 40 - hyp3lib/get_bounding.py | 153 --- hyp3lib/get_dem.py | 507 ---------- hyp3lib/ingest_S1_granule.py | 63 -- hyp3lib/iscegeo2geotif.py | 133 --- hyp3lib/makeChangeBrowse.py | 166 ---- hyp3lib/makeColorPhase.py | 412 -------- hyp3lib/makeKml.py | 74 -- hyp3lib/make_arc_thumb.py | 42 - hyp3lib/make_cogs.py | 65 -- hyp3lib/offset_xml.py | 106 --- hyp3lib/par_s1_slc_single.py | 95 -- hyp3lib/ps2dem.py | 166 ---- hyp3lib/rasterMask.py | 72 -- hyp3lib/raster_boundary2shape.py | 90 -- hyp3lib/rtc2color.py | 225 ----- hyp3lib/rtc2colordiff.py | 235 ----- hyp3lib/simplify_shapefile.py | 116 --- hyp3lib/subset_geotiff_shape.py | 166 ---- hyp3lib/system.py | 58 -- hyp3lib/tileList2shape.py | 58 -- hyp3lib/verify_opod.py | 54 -- tests/data/test_ned13_dem.tif | Bin 4459 -> 0 bytes tests/data/test_srtmgl1_antimeridian_dem.tif | Bin 6427 -> 0 bytes tests/test_entrypoints.py | 142 --- tests/test_getSubSwath.py | 6 - tests/test_get_bounding.py | 80 -- tests/test_get_dem.py | 129 --- tests/test_make_cogs.py | 33 - tests/test_system.py | 96 -- 50 files changed, 6599 deletions(-) delete mode 100755 hyp3lib/apply_wb_mask.py delete mode 100644 hyp3lib/area2point.py delete mode 100644 hyp3lib/asf_geometry.py delete mode 100644 hyp3lib/asf_time_series.py delete mode 100755 hyp3lib/copy_metadata.py delete mode 100755 hyp3lib/createAmp.py delete mode 100755 hyp3lib/cutGeotiffsByLine.py delete mode 100755 hyp3lib/dem2isce.py delete mode 100755 hyp3lib/draw_polygon_on_raster.py delete mode 100755 hyp3lib/enh_lee_filter.py delete mode 100644 hyp3lib/etc/__init__.py delete mode 100644 hyp3lib/etc/config/get_dem.cfg delete mode 100644 hyp3lib/etc/lut/change.lut delete mode 100644 hyp3lib/etc/lut/glacier_tracking.lut delete mode 100755 hyp3lib/extendDateline.py delete mode 100644 hyp3lib/file_subroutines.py delete mode 100755 hyp3lib/geotiff_lut.py delete mode 100644 hyp3lib/getBursts.py delete mode 100755 hyp3lib/getDemFor.py delete mode 100644 hyp3lib/getSubSwath.py delete mode 100644 hyp3lib/get_bb_from_shape.py delete mode 100755 hyp3lib/get_bounding.py delete mode 100755 hyp3lib/get_dem.py delete mode 100644 hyp3lib/ingest_S1_granule.py delete mode 100755 hyp3lib/iscegeo2geotif.py delete mode 100755 hyp3lib/makeChangeBrowse.py delete mode 100755 hyp3lib/makeColorPhase.py delete mode 100755 hyp3lib/makeKml.py delete mode 100755 hyp3lib/make_arc_thumb.py delete mode 100755 hyp3lib/make_cogs.py delete mode 100755 hyp3lib/offset_xml.py delete mode 100755 hyp3lib/par_s1_slc_single.py delete mode 100755 hyp3lib/ps2dem.py delete mode 100755 hyp3lib/rasterMask.py delete mode 100755 hyp3lib/raster_boundary2shape.py delete mode 100755 hyp3lib/rtc2color.py delete mode 100755 hyp3lib/rtc2colordiff.py delete mode 100755 hyp3lib/simplify_shapefile.py delete mode 100755 hyp3lib/subset_geotiff_shape.py delete mode 100644 hyp3lib/system.py delete mode 100755 hyp3lib/tileList2shape.py delete mode 100755 hyp3lib/verify_opod.py delete mode 100644 tests/data/test_ned13_dem.tif delete mode 100644 tests/data/test_srtmgl1_antimeridian_dem.tif delete mode 100644 tests/test_getSubSwath.py delete mode 100644 tests/test_get_bounding.py delete mode 100644 tests/test_get_dem.py delete mode 100644 tests/test_make_cogs.py delete mode 100644 tests/test_system.py diff --git a/hyp3lib/apply_wb_mask.py b/hyp3lib/apply_wb_mask.py deleted file mode 100755 index 0144c084..00000000 --- a/hyp3lib/apply_wb_mask.py +++ /dev/null @@ -1,110 +0,0 @@ -"""Create a water body mask wherein all water is 0 and land is 1""" - -import argparse -import logging -import os -from tempfile import NamedTemporaryFile - -from osgeo import gdal, ogr - - -def get_water_mask(upper_left, lower_right, res, gcs=True, mask_value=1): - mask_location = '/vsicurl/https://asf-dem-west.s3.amazonaws.com/WATER_MASK' - - xmin, ymax = upper_left - xmax, ymin = lower_right - - if gcs: - shpfile = f'{mask_location}/GSHHG/GSHHS_f_L1.shp' - src_ds = ogr.Open(shpfile) - src_lyr = src_ds.GetLayer() - - logging.info("Using xmin, xmax {} {}, ymin, ymax {} {}".format(xmin, xmax, ymin, ymax)) - - ncols = int((xmax - xmin) / res + 0.5) - nrows = int((ymax - ymin) / res + 0.5) - - logging.info("Creating water body mask of size {} x {} (lxs) using {}".format(nrows, ncols, shpfile)) - - geotransform = (xmin, res, 0, ymax, 0, -res) - dst_ds = gdal.GetDriverByName('MEM').Create('', ncols, nrows, 1, gdal.GDT_Byte) - dst_rb = dst_ds.GetRasterBand(1) - dst_rb.Fill(0) - dst_rb.SetNoDataValue(0) - dst_ds.SetGeoTransform(geotransform) - - _ = gdal.RasterizeLayer(dst_ds, [mask_value], src_lyr) - dst_ds.FlushCache() - mask = dst_ds.GetRasterBand(1).ReadAsArray() - del dst_ds - - else: - if ymin > 0: - mask_file = f'{mask_location}/Antimeridian_UTM1N_WaterMask1.tif' - else: - mask_file = f'{mask_location}/Antimeridian_UTM1S_WaterMask1.tif' - - coords = [xmin, ymax, xmax, ymin] - with NamedTemporaryFile() as tmpfile: - gdal.Translate(tmpfile.name, mask_file, projWin=coords, xRes=res, yRes=res) - srs_ds = gdal.Open(tmpfile.name) - mask = srs_ds.GetRasterBand(1).ReadAsArray() - del srs_ds - - return mask - - -def apply_wb_mask(tiffile, outfile, maskval=0, gcs=True, band=1): - """ - Given a tiffile input, create outfile, filling in all water areas with the - maskval. - """ - - logging.info(f"Using mask value of {maskval}") - tif_info = gdal.Info(tiffile, format='json') - upper_left = tif_info['cornerCoordinates']['upperLeft'] - lower_right = tif_info['cornerCoordinates']['lowerRight'] - - src_ds = gdal.Open(tiffile) - data = src_ds.GetRasterBand(band).ReadAsArray() - proj = src_ds.GetProjection() - trans = src_ds.GetGeoTransform() - del src_ds - - logging.info("Applying water body mask") - mask = get_water_mask(upper_left, lower_right, trans[1], gcs=gcs) - data[mask == 0] = maskval - - dst_ds = gdal.GetDriverByName('GTiff').Create( - outfile, data.shape[1], data.shape[0], band, gdal.GDT_Float32 - ) - dst_ds.SetProjection(proj) - dst_ds.SetGeoTransform(trans) - dst_ds.GetRasterBand(1).WriteArray(data) - dst_ds.GetRasterBand(1).SetNoDataValue(maskval) - del dst_ds - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('tiffile', help='Name of tif file to mask') - parser.add_argument('outfile', help='Name of output masked file') - parser.add_argument('-m', '--maskval', help='Mask value to apply; default 0', type=float, default=0) - args = parser.parse_args() - - log_file = "apply_wb_mask_{}_log.txt".format(os.getpid()) - logging.basicConfig(filename=log_file, format='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) - logging.getLogger().addHandler(logging.StreamHandler()) - logging.info("Starting run") - - apply_wb_mask(args.tiffile, args.outfile, maskval=args.maskval) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/area2point.py b/hyp3lib/area2point.py deleted file mode 100644 index bd9ed8fe..00000000 --- a/hyp3lib/area2point.py +++ /dev/null @@ -1,34 +0,0 @@ -import glob -import os -from hyp3lib import saa_func_lib as saa -from osgeo import gdal - - -# Gamma program data2geotiff shifts the corner coordinates -# by 1/2 a pixel. This routine shifts them back. It also -# sets the geotiff metadata to say Point. -def fix_geotiff_locations(dir="PRODUCT"): - back = os.getcwd() - os.chdir(dir) - for myfile in glob.glob("*.tif"): - x1,y1,t1,p1,data = saa.read_gdal_file(saa.open_gdal_file(myfile)) - easting = t1[0] - resx = t1[1] - rotx = t1[2] - northing = t1[3] - roty = t1[4] - resy = t1[5] - easting = easting + resx/2.0 - northing = northing + resy/2.0 - t1 = [easting, resx, rotx, northing, roty, resy] - tmpfile = "tmp_tiff_{}.tif".format(os.getpid()) - if "dem" in myfile or "DEM" in myfile: - saa.write_gdal_file(tmpfile,t1,p1,data) - elif "ls_map" in myfile or "LS" in myfile: - saa.write_gdal_file_byte(tmpfile,t1,p1,data) - else: - saa.write_gdal_file_float(tmpfile,t1,p1,data,nodata=0) - gdal.Translate(myfile,tmpfile,metadataOptions=['AREA_OR_POINT=Point'],noData="0") - os.remove(tmpfile) - os.chdir(back) - diff --git a/hyp3lib/asf_geometry.py b/hyp3lib/asf_geometry.py deleted file mode 100644 index fff90443..00000000 --- a/hyp3lib/asf_geometry.py +++ /dev/null @@ -1,933 +0,0 @@ -import csv -import os - -import numpy as np -from osgeo import gdal, ogr, osr -from osgeo.gdalconst import GA_ReadOnly -from scipy import ndimage - -from hyp3lib import GeometryError -from hyp3lib.saa_func_lib import get_zone - - -# Determine the boundary polygon of a GeoTIFF file -def geotiff2polygon_ext(geotiff): - - raster = gdal.Open(geotiff) - proj = osr.SpatialReference() - proj.ImportFromWkt(raster.GetProjectionRef()) - gt = raster.GetGeoTransform() - originX = gt[0] - originY = gt[3] - pixelWidth = gt[1] - pixelHeight = gt[5] - cols = raster.RasterXSize - rows = raster.RasterYSize - polygon = ogr.Geometry(ogr.wkbPolygon) - ring = ogr.Geometry(ogr.wkbLinearRing) - ring.AddPoint_2D(originX, originY) - ring.AddPoint_2D(originX + cols*pixelWidth, originY) - ring.AddPoint_2D(originX + cols*pixelWidth, originY + rows*pixelHeight) - ring.AddPoint_2D(originX, originY + rows*pixelHeight) - ring.AddPoint_2D(originX, originY) - polygon.AddGeometry(ring) - ring = None - raster = None - - return (polygon, proj) - - -def geotiff2polygon(geotiff): - - (polygon, proj) = geotiff2polygon_ext(geotiff) - return polygon - - -def geotiff2boundary_mask(inGeotiff, tsEPSG, threshold, use_closing=True): - - inRaster = gdal.Open(inGeotiff) - proj = osr.SpatialReference() - proj.ImportFromWkt(inRaster.GetProjectionRef()) - if proj.GetAttrValue('AUTHORITY', 0) == 'EPSG': - epsg = int(proj.GetAttrValue('AUTHORITY', 1)) - - if tsEPSG != 0 and epsg != tsEPSG: - print('Reprojecting ...') - inRaster = reproject2grid(inRaster, tsEPSG) - proj.ImportFromWkt(inRaster.GetProjectionRef()) - if proj.GetAttrValue('AUTHORITY', 0) == 'EPSG': - epsg = int(proj.GetAttrValue('AUTHORITY', 1)) - - geoTrans = inRaster.GetGeoTransform() - inBand = inRaster.GetRasterBand(1) - noDataValue = inBand.GetNoDataValue() - data = inBand.ReadAsArray() - minValue = np.min(data) - - ### Check for black fill - if minValue > 0: - data /= data - colFirst = 0 - rowFirst = 0 - else: - data[np.isnan(data)==True] = noDataValue - if threshold is not None: - print('Applying threshold ({0}) ...'.format(threshold)) - data[datanoDataValue] = 1 - if use_closing: - data = ndimage.binary_closing(data, iterations=10, - structure=np.ones((3,3))).astype(data.dtype) - inRaster = None - - (data, colFirst, rowFirst, geoTrans) = cut_blackfill(data, geoTrans) - - return (data, colFirst, rowFirst, geoTrans, proj) - - -def reproject2grid(inRaster, tsEPSG, xRes = None ): - - # Read basic metadata - geoTrans = inRaster.GetGeoTransform() - proj = osr.SpatialReference() - proj.ImportFromEPSG(tsEPSG) - - # Define warping options - rasterFormat = 'VRT' - if xRes is None: - xRes = geoTrans[1] - yRes = xRes - resampleAlg = gdal.GRA_Bilinear - options = ['COMPRESS=DEFLATE'] - - outRaster = gdal.Warp('', inRaster, format=rasterFormat, dstSRS=proj, - targetAlignedPixels=True, xRes=xRes, yRes=yRes, resampleAlg=resampleAlg, - options=options) - inRaster = None - - return outRaster - - -def cut_blackfill(data, geoTrans): - - originX = geoTrans[0] - originY = geoTrans[3] - pixelSize = geoTrans[1] - colProfile = list(data.max(axis=1)) - rows = colProfile.count(1) - rowFirst = colProfile.index(1) - rowProfile = list(data.max(axis=0)) - cols = rowProfile.count(1) - colFirst = rowProfile.index(1) - originX += colFirst*pixelSize - originY -= rowFirst*pixelSize - data = data[rowFirst:rows+rowFirst,colFirst:cols+colFirst] - geoTrans = (originX, pixelSize, 0, originY, 0, -pixelSize) - - return (data, colFirst, rowFirst, geoTrans) - - -def geotiff_overlap(firstFile, secondFile, method): - - # Check map projections - raster = gdal.Open(firstFile) - proj = raster.GetProjection() - gt = raster.GetGeoTransform() - pixelSize = gt[1] - raster = None - - # Extract boundary polygons - firstPolygon = geotiff2polygon(firstFile) - secondPolygon = geotiff2polygon(secondFile) - - if method == 'intersection': - overlap = firstPolygon.Intersection(secondPolygon) - elif method == 'union': - overlap = firstPolygon.Union(secondPolygon) - - return (firstPolygon, secondPolygon, overlap, proj, pixelSize) - - -def overlap_indices(polygon, boundary, pixelSize): - - polyEnv = polygon.GetEnvelope() - boundEnv = boundary.GetEnvelope() - xOff = int((boundEnv[0] - polyEnv[0]) / pixelSize) - yOff = int((polyEnv[3] - boundEnv[3]) / pixelSize) - xCount = int((boundEnv[1] - boundEnv[0]) / pixelSize) - yCount = int((boundEnv[3] - boundEnv[2]) / pixelSize) - - return (xOff, yOff, xCount, yCount) - - -# Extract geometry from shapefile -def shape2geometry(shapeFile, field): - - name = [] - fields = [] - driver = ogr.GetDriverByName('ESRI Shapefile') - shape = driver.Open(shapeFile, 0) - multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - layer = shape.GetLayer() - spatialRef = layer.GetSpatialRef() - layerDef = layer.GetLayerDefn() - for i in range(layerDef.GetFieldCount()): - fields.append(layerDef.GetFieldDefn(i).GetName()) - if field not in fields: - return (None, None, None) - for feature in layer: - geometry = feature.GetGeometryRef() - count = geometry.GetGeometryCount() - if geometry.GetGeometryName() == 'MULTIPOLYGON': - for i in range(0, count): - polygon = geometry.GetGeometryRef(i) - multipolygon.AddGeometry(polygon) - name.append(feature.GetField(field)) - else: - multipolygon.AddGeometry(geometry) - name.append(feature.GetField(field)) - shape.Destroy() - - return (multipolygon, spatialRef, name) - - -def shape2geometry_ext(shapeFile): - - values = [] - fields = [] - driver = ogr.GetDriverByName('ESRI Shapefile') - shape = driver.Open(shapeFile, 0) - layer = shape.GetLayer() - spatialRef = layer.GetSpatialRef() - layerDef = layer.GetLayerDefn() - featureCount = layerDef.GetFieldCount() - for ii in range(featureCount): - field = {} - field['name'] = layerDef.GetFieldDefn(ii).GetName() - field['type'] = layerDef.GetFieldDefn(ii).GetType() - if field['type'] == ogr.OFTString: - field['width'] = layerDef.GetFieldDefn(ii).GetWidth() - fields.append(field) - for feature in layer: - multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - geometry = feature.GetGeometryRef() - count = geometry.GetGeometryCount() - if geometry.GetGeometryName() == 'MULTIPOLYGON': - for i in range(0, count): - polygon = geometry.GetGeometryRef(i) - multipolygon.AddGeometry(polygon) - else: - multipolygon.AddGeometry(geometry) - value = {} - for field in fields: - value[field['name']] = feature.GetField(field['name']) - value['geometry'] = multipolygon - values.append(value) - shape.Destroy() - - return (fields, values, spatialRef) - - -# Save geometry with fields to shapefile -def geometry2shape(fields, values, spatialRef, merge, shapeFile): - - driver = ogr.GetDriverByName('ESRI Shapefile') - if os.path.exists(shapeFile): - driver.DeleteDataSource(shapeFile) - outShape = driver.CreateDataSource(shapeFile) - outLayer = outShape.CreateLayer('layer', srs=spatialRef) - for field in fields: - fieldDefinition = ogr.FieldDefn(field['name'], field['type']) - if field['type'] == ogr.OFTString: - fieldDefinition.SetWidth(field['width']) - elif field['type'] == ogr.OFTReal: - fieldDefinition.SetWidth(24) - fieldDefinition.SetPrecision(8) - outLayer.CreateField(fieldDefinition) - featureDefinition = outLayer.GetLayerDefn() - if merge == True: - combine = ogr.Geometry(ogr.wkbMultiPolygon) - for value in values: - combine = combine.Union(value['geometry']) - outFeature = ogr.Feature(featureDefinition) - for field in fields: - name = field['name'] - outFeature.SetField(name, 'multipolygon') - outFeature.SetGeometry(combine) - outLayer.CreateFeature(outFeature) - outFeature.Destroy() - else: - for value in values: - outFeature = ogr.Feature(featureDefinition) - for field in fields: - name = field['name'] - outFeature.SetField(name, value[name]) - outFeature.SetGeometry(value['geometry']) - outLayer.CreateFeature(outFeature) - outFeature.Destroy() - outShape.Destroy() - - -# Save data with fields to shapefile -def data_geometry2shape_ext(data, fields, values, spatialRef, geoTrans, - classes, threshold, background, shapeFile): - - # Check input - if threshold is not None: - threshold = float(threshold) - if background is not None: - background = int(background) - - # Buffer data - (rows, cols) = data.shape - pixelSize = geoTrans[1] - originX = geoTrans[0] - 10*pixelSize - originY = geoTrans[3] + 10*pixelSize - geoTrans = (originX, pixelSize, 0, originY, 0, -pixelSize) - mask = np.zeros((rows+20, cols+20), dtype=np.float32) - mask[10:rows+10,10:cols+10] = data - data = mask - - # Save in memory - (rows, cols) = data.shape - data = data.astype(np.byte) - gdalDriver = gdal.GetDriverByName('Mem') - outRaster = gdalDriver.Create('value', cols, rows, 1, gdal.GDT_Byte) - outRaster.SetGeoTransform(geoTrans) - outRaster.SetProjection(spatialRef.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.WriteArray(data) - - # Write data to shapefile - driver = ogr.GetDriverByName('ESRI Shapefile') - if os.path.exists(shapeFile): - driver.DeleteDataSource(shapeFile) - outShape = driver.CreateDataSource(shapeFile) - outLayer = outShape.CreateLayer('polygon', srs=spatialRef) - outField = ogr.FieldDefn('value', ogr.OFTInteger) - outLayer.CreateField(outField) - gdal.Polygonize(outBand, None, outLayer, 0, [], callback=None) - for field in fields: - fieldDefinition = ogr.FieldDefn(field['name'], field['type']) - if field['type'] == ogr.OFTString: - fieldDefinition.SetWidth(field['width']) - outLayer.CreateField(fieldDefinition) - fieldDefinition = ogr.FieldDefn('area', ogr.OFTReal) - fieldDefinition.SetWidth(16) - fieldDefinition.SetPrecision(3) - outLayer.CreateField(fieldDefinition) - fieldDefinition = ogr.FieldDefn('centroid', ogr.OFTString) - fieldDefinition.SetWidth(50) - outLayer.CreateField(fieldDefinition) - if classes: - fieldDefinition = ogr.FieldDefn('size', ogr.OFTString) - fieldDefinition.SetWidth(25) - outLayer.CreateField(fieldDefinition) - _ = outLayer.GetLayerDefn() - for outFeature in outLayer: - for value in values: - for field in fields: - name = field['name'] - outFeature.SetField(name, value[name]) - cValue = outFeature.GetField('value') - fill = False - if cValue == 0: - fill = True - if background is not None and cValue == background: - fill = True - geometry = outFeature.GetGeometryRef() - area = float(geometry.GetArea()) - outFeature.SetField('area', area) - if classes: - for ii in range(len(classes)): - if area > classes[ii]['minimum'] and area < classes[ii]['maximum']: - outFeature.SetField('size',classes[ii]['class']) - centroid = geometry.Centroid().ExportToWkt() - outFeature.SetField('centroid', centroid) - if fill == False and area > threshold: - outLayer.SetFeature(outFeature) - else: - outLayer.DeleteFeature(outFeature.GetFID()) - outShape.Destroy() - - -def data_geometry2shape(data, fields, values, spatialRef, geoTrans, shapeFile): - - return data_geometry2shape_ext(data, fields, values, spatialRef, geoTrans, - None, 0, None, shapeFile) - - -def geotiff2data(inGeotiff): - - inRaster = gdal.Open(inGeotiff) - proj = osr.SpatialReference() - proj.ImportFromWkt(inRaster.GetProjectionRef()) - if proj.GetAttrValue('AUTHORITY', 0) == 'EPSG': - epsg = int(proj.GetAttrValue('AUTHORITY', 1)) - geoTrans = inRaster.GetGeoTransform() - inBand = inRaster.GetRasterBand(1) - noData = inBand.GetNoDataValue() - data = inBand.ReadAsArray() - if data.dtype == np.uint8: - dtype = 'BYTE' - elif data.dtype == np.float32: - dtype = 'FLOAT' - elif data.dtype == np.float64: - dtype = 'DOUBLE' - - return (data, geoTrans, proj, epsg, dtype, noData) - - -def data2geotiff(data, geoTrans, proj, dtype, noData, outFile): - - (rows, cols) = data.shape - gdalDriver = gdal.GetDriverByName('GTiff') - if dtype == 'BYTE': - outRaster = gdalDriver.Create(outFile, cols, rows, 1, gdal.GDT_Byte, - ['COMPRESS=DEFLATE']) - elif dtype == 'FLOAT': - outRaster = gdalDriver.Create(outFile, cols, rows, 1, gdal.GDT_Float32, - ['COMPRESS=DEFLATE']) - outRaster.SetGeoTransform(geoTrans) - outRaster.SetProjection(proj.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.SetNoDataValue(noData) - outBand.WriteArray(data) - outRaster = None - - -# Save raster information (fields, values) to CSV file -def raster2csv(fields, values, csvFile): - - header = [] - for field in fields: - header.append(field['name']) - line = [] - for value in values: - for field in fields: - name = field['name'] - line.append(value[name]) - - with open(csvFile, 'wb') as outF: - writer = csv.writer(outF, delimiter=';') - writer.writerow(header) - writer.writerow(line) - - -# Combine all geometries in a list -def union_geometries(geometries): - - combine = ogr.Geometry(ogr.wkbMultiPolygon) - for geometry in geometries: - combine = combine.Union(geometry) - - return combine - - -def spatial_query(source, reference, function): - - # Extract information from tiles and boundary shapefiles - (geoTile, spatialRef, nameTile) = shape2geometry(reference, 'tile') - if geoTile is None: - raise GeometryError(f'Could not extract information (tile) out of shapefile {reference}') - (boundary, spatialRef, granule) = shape2geometry(source, 'granule') - if boundary is None: - raise GeometryError(f'Could not extract information (granule) out of shapefile {source}') - - # Perform the spatial analysis - i = 0 - tile = [] - multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - for geo in geoTile: - for bound in boundary: - if function == 'intersects': - intersection = bound.Intersection(geo) - if intersection.GetGeometryName() == 'POLYGON': - if nameTile[i] not in tile: - tile.append(nameTile[i]) - multipolygon.AddGeometry(geo) - i = i + 1 - - return (multipolygon, tile) - - -# Converted geometry from projected to geographic -def geometry_proj2geo(inMultipolygon, inSpatialRef): - - outSpatialRef = osr.SpatialReference() - outSpatialRef.ImportFromEPSG(4326) - coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef) - outMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - for polygon in inMultipolygon: - if inSpatialRef != outSpatialRef: - polygon.Transform(coordTrans) - outMultipolygon.AddGeometry(polygon) - - return (outMultipolygon, outSpatialRef) - -# Convert corner points from geographic to UTM projection -def geometry_geo2proj(lat_max,lat_min,lon_max,lon_min): - - zone = get_zone(lon_min,lon_max) - if (lat_min+lat_max)/2 > 0: - proj = ('326%02d' % int(zone)) - else: - proj = ('327%02d' % int(zone)) - - inSpatialRef = osr.SpatialReference() - inSpatialRef.ImportFromEPSG(4326) - outSpatialRef = osr.SpatialReference() - outSpatialRef.ImportFromEPSG(int(proj)) - coordTrans = osr.CoordinateTransformation(inSpatialRef,outSpatialRef) - - x1, y1, h = coordTrans.TransformPoint(lon_max, lat_min) - x2, y2, h = coordTrans.TransformPoint(lon_min, lat_min) - x3, y3, h = coordTrans.TransformPoint(lon_max, lat_max) - x4, y4, h = coordTrans.TransformPoint(lon_min, lat_max) - - y_min = min(y1,y2,y3,y4) - y_max = max(y1,y2,y3,y4) - x_min = min(x1,x2,x3,x4) - x_max = max(x1,x2,x3,x4) - - # false_easting = outSpatialRef.GetProjParm(osr.SRS_PP_FALSE_EASTING) - false_northing = outSpatialRef.GetProjParm(osr.SRS_PP_FALSE_NORTHING) - - return zone, false_northing, y_min, y_max, x_min, x_max - - -def reproject_corners(corners, posting, inEPSG, outEPSG): - - # Reproject coordinates - inProj = osr.SpatialReference() - inProj.ImportFromEPSG(inEPSG) - outProj = osr.SpatialReference() - outProj.ImportFromEPSG(outEPSG) - transform = osr.CoordinateTransformation(inProj, outProj) - corners.Transform(transform) - - # Get extent and round to even coordinates - (minX, maxX, minY, maxY) = corners.GetEnvelope() - #posting = inGT[1] - minX = np.ceil(minX/posting)*posting - minY = np.ceil(minY/posting)*posting - maxX = np.ceil(maxX/posting)*posting - maxY = np.ceil(maxY/posting)*posting - - # Add points to multiPoint - corners = ogr.Geometry(ogr.wkbMultiPoint) - ul = ogr.Geometry(ogr.wkbPoint) - ul.AddPoint(minX, maxY) - corners.AddGeometry(ul) - ll = ogr.Geometry(ogr.wkbPoint) - ll.AddPoint(minX, minY) - corners.AddGeometry(ll) - ur = ogr.Geometry(ogr.wkbPoint) - ur.AddPoint(maxX, maxY) - corners.AddGeometry(ur) - lr = ogr.Geometry(ogr.wkbPoint) - lr.AddPoint(maxX, minY) - corners.AddGeometry(lr) - - return corners - - -def reproject_extent(minX, maxX, minY, maxY, posting, inEPSG, outEPSG): - - # Add points to multiPoint - corners = ogr.Geometry(ogr.wkbMultiPoint) - ul = ogr.Geometry(ogr.wkbPoint) - ul.AddPoint(minX, maxY) - corners.AddGeometry(ul) - ll = ogr.Geometry(ogr.wkbPoint) - ll.AddPoint(minX, minY) - corners.AddGeometry(ll) - ur = ogr.Geometry(ogr.wkbPoint) - ur.AddPoint(maxX, maxY) - corners.AddGeometry(ur) - lr = ogr.Geometry(ogr.wkbPoint) - lr.AddPoint(maxX, minY) - corners.AddGeometry(lr) - - # Re-project corners - reproject_corners(corners, posting, inEPSG, outEPSG) - - # Extract min/max values - return corners.GetEnvelope() - - -def raster_meta(rasterFile): - - raster = gdal.Open(rasterFile) - spatialRef = osr.SpatialReference() - spatialRef.ImportFromWkt(raster.GetProjectionRef()) - gt = raster.GetGeoTransform() - shape = [ raster.RasterYSize, raster.RasterXSize ] - pixel = raster.GetMetadataItem('AREA_OR_POINT') - raster = None - - return (spatialRef, gt, shape, pixel) - - -def overlapMask(meta, maskShape, invert, outFile): - - ### Extract metadata - posting = meta['pixelSize'] - # proj = meta['proj'] - imageEPSG = meta['epsg'] - multiBoundary = meta['boundary'] - dataRows = meta['rows'] - dataCols = meta['cols'] - geoEPSG = 4326 - - ### Extract mask polygon - ogrDriver = ogr.GetDriverByName('ESRI Shapefile') - inShape = ogrDriver.Open(maskShape) - outLayer = inShape.GetLayer() - outProj = outLayer.GetSpatialRef() - outEPSG = int(outProj.GetAttrValue('AUTHORITY', 1)) - if geoEPSG != outEPSG: - raise GeometryError(f'Expecting mask file with EPSG code: {geoEPSG}') - - ### Define re-projection from geographic to UTM - inProj = osr.SpatialReference() - inProj.ImportFromEPSG(4326) - outProj = osr.SpatialReference() - outProj.ImportFromEPSG(imageEPSG) - transform = osr.CoordinateTransformation(inProj, outProj) - - ### Loop through features - for boundary in multiBoundary: - for feature in outLayer: - outMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - inMultiPolygon = feature.GetGeometryRef() - for polygon in inMultiPolygon: - overlap = boundary.Intersection(polygon) - if 'POLYGON' in overlap.ExportToWkt(): - overlap.Transform(transform) - outMultipolygon.AddGeometry(overlap) - - ### Save intersection polygon in memory - spatialRef = osr.SpatialReference() - spatialRef.ImportFromEPSG(imageEPSG) - memDriver = ogr.GetDriverByName('Memory') - outVector = memDriver.CreateDataSource('mem') - outLayer = outVector.CreateLayer('', spatialRef, ogr.wkbMultiPolygon) - outLayer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger)) - definition = outLayer.GetLayerDefn() - outFeature = ogr.Feature(definition) - outFeature.SetField('id', 0) - geometry = ogr.CreateGeometryFromWkb(outMultipolygon.ExportToWkb()) - outFeature.SetGeometry(geometry) - outLayer.CreateFeature(outFeature) - outFeature = None - - ### Calculate extent - (aoiMinX, aoiMaxX, aoiMinY, aoiMaxY) = outLayer.GetExtent() - aoiLines = int(np.rint((aoiMaxY - aoiMinY)/posting)) - aoiSamples = int(np.rint((aoiMaxX - aoiMinX)/posting)) - maskGeoTrans = (aoiMinX, posting, 0, aoiMaxY, 0, -posting) - - ### Rasterize mask polygon - gdalDriver = gdal.GetDriverByName('MEM') - outRaster = gdalDriver.Create('', aoiSamples, aoiLines, 1, gdal.GDT_Float32) - outRaster.SetGeoTransform((aoiMinX, posting, 0, aoiMaxY, 0, -posting)) - outRaster.SetProjection(outProj.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.SetNoDataValue(0) - outBand.FlushCache() - gdal.RasterizeLayer(outRaster, [1], outLayer, burn_values=[1]) - mask = outRaster.GetRasterBand(1).ReadAsArray() - outVector = None - outRaster = None - - ### Invert mask (if requested) - if invert == True: - mask = 1.0 - mask - - ### Final adjustments - mask = mask[:dataRows,:dataCols] - mask[mask==0] = np.nan - - return (mask, maskGeoTrans) - - -def apply_mask(data, dataGeoTrans, mask, maskGeoTrans): - - (dataRows, dataCols) = data.shape - dataOriginX = dataGeoTrans[0] - dataOriginY = dataGeoTrans[3] - # dataPixelSize = dataGeoTrans[1] - (maskRows, maskCols) = mask.shape - maskOriginX = maskGeoTrans[0] - maskOriginY = maskGeoTrans[3] - maskPixelSize = maskGeoTrans[1] - offsetX = int(np.rint((maskOriginX - dataOriginX)/maskPixelSize)) - offsetY = int(np.rint((dataOriginY - maskOriginY)/maskPixelSize)) - data = data[offsetY:maskRows+offsetY,offsetX:maskCols+offsetX] - data *= mask - - return data - - -def geotiff2boundary_ext(inGeotiff, maskFile, geographic): - - # Extract metadata - (spatialRef, gt, shape, pixel) = raster_meta(inGeotiff) - epsg = int(spatialRef.GetAttrValue('AUTHORITY', 1)) - (data, colFirst, rowsFirst, geoTrans, proj) = \ - geotiff2boundary_mask(inGeotiff, epsg, None) - (rows, cols) = data.shape - - # Save in mask file (if defined) - if maskFile is not None: - gdalDriver = gdal.GetDriverByName('GTiff') - outRaster = gdalDriver.Create(maskFile, rows, cols, 1, gdal.GDT_Byte) - outRaster.SetGeoTransform(geoTrans) - outRaster.SetProjection(proj.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.WriteArray(data) - outRaster = None - - # Save in memory - gdalDriver = gdal.GetDriverByName('Mem') - outRaster = gdalDriver.Create('out', rows, cols, 1, gdal.GDT_Byte) - outRaster.SetGeoTransform(geoTrans) - outRaster.SetProjection(proj.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.WriteArray(data) - data = None - - # Polygonize the raster image - inBand = outRaster.GetRasterBand(1) - ogrDriver = ogr.GetDriverByName('Memory') - outVector = ogrDriver.CreateDataSource('out') - outLayer = outVector.CreateLayer('boundary', srs=proj) - fieldDefinition = ogr.FieldDefn('ID', ogr.OFTInteger) - outLayer.CreateField(fieldDefinition) - gdal.Polygonize(inBand, inBand, outLayer, 0, [], None) - outRaster = None - - # Extract geometry from layer - inSpatialRef = outLayer.GetSpatialRef() - multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - for outFeature in outLayer: - geometry = outFeature.GetGeometryRef() - multipolygon.AddGeometry(geometry) - outFeature = None - outLayer = None - - # Convert geometry from projected to geographic coordinates (if requested) - if geographic == True: - (multipolygon, outSpatialRef) = \ - geometry_proj2geo(multipolygon, inSpatialRef) - return (multipolygon, outSpatialRef) - else: - return (multipolygon, inSpatialRef) - - -def geotiff2boundary(inGeotiff, maskFile): - - return geotiff2boundary_ext(inGeotiff, maskFile, False) - - -def geotiff2boundary_geo(inGeotiff, maskFile): - - return geotiff2boundary_ext(inGeotiff, maskFile, True) - - -# Get polygon for a tile -def get_tile_geometry(tile, step): - - # Extract corners - xmin = int(tile[1:3]) - ymin = int(tile[4:7]) - if tile[0] == 'S': - xmin = -xmin - if tile[3] == 'W': - ymin = -ymin - xmax = xmin + step - ymax = ymin + step - - # Create geometry - ring = ogr.Geometry(ogr.wkbLinearRing) - ring.AddPoint_2D(ymax, xmin) - ring.AddPoint_2D(ymax, xmax) - ring.AddPoint_2D(ymin, xmax) - ring.AddPoint_2D(ymin, xmin) - ring.AddPoint_2D(ymax, xmin) - polygon = ogr.Geometry(ogr.wkbPolygon) - polygon.AddGeometry(ring) - - return polygon - - -# Get tile names -def get_tile_names(minLat, maxLat, minLon, maxLon, step): - - tiles = [] - for i in range(minLon, maxLon, step): - for k in range(minLat, maxLat, step): - eastwest = 'W' if i<0 else 'E' - northsouth = 'S' if k<0 else 'N' - tile = ('%s%02d%s%03d' % (northsouth, abs(k), eastwest, abs(i))) - tiles.append(tile) - return tiles - - -# Get tiles extent -def get_tiles_extent(tiles, step): - - minLat = 90 - maxLat = -90 - minLon = 180 - maxLon = -180 - for tile in tiles: - xmin = int(tile[1:3]) - ymin = int(tile[4:7]) - if tile[0] == 'S': - xmin = -xmin - if tile[3] == 'W': - ymin = -ymin - if xmin < minLat: - minLat = xmin - if xmin > maxLat: - maxLat = xmin - if ymin < minLon: - minLon = ymin - if ymin > maxLon: - maxLon = ymin - maxLat += step - maxLon += step - - return (minLat, maxLat, minLon, maxLon) - - -# Generate a global tile shapefile -def generate_tile_shape(shapeFile, minLat, maxLat, minLon, maxLon, step): - - # General setup for shapefile - driver = ogr.GetDriverByName('ESRI Shapefile') - if os.path.exists(shapeFile): - driver.DeleteDataSource(shapeFile) - shapeData = driver.CreateDataSource(shapeFile) - - # Define layer and attributes - spatialReference = osr.SpatialReference() - spatialReference.ImportFromEPSG(4326) - layer = shapeData.CreateLayer(shapeFile, spatialReference, ogr.wkbPolygon) - fieldname = ogr.FieldDefn('tile', ogr.OFTString) - fieldname.SetWidth(10) - layer.CreateField(fieldname) - - # Going through the tiles - tiles = get_tile_names(minLat, maxLat, minLon, maxLon, step) - for tile in tiles: - geometry = get_tile_geometry(tile, step) - tileGeometry = geometry.ExportToWkt() - feature = ogr.Feature(layer.GetLayerDefn()) - feature.SetField('tile', tile) - - # Define geometry as polygon - geom = ogr.CreateGeometryFromWkt(tileGeometry) - if geom: - feature.SetGeometry(geom) - layer.CreateFeature(feature) - feature.Destroy() - - shapeData.Destroy() - - -# Generate a shapefile from a CSV list file -def list2shape(csvFile, shapeFile): - - # Set up shapefile attributes - fields = [] - field = {} - values = [] - field['name'] = 'granule' - field['type'] = ogr.OFTString - field['width'] = 254 - fields.append(field) - - files = [line.strip() for line in open(csvFile)] - for file in files: - data = gdal.Open(file, GA_ReadOnly) - if data is not None and data.GetDriver().LongName == 'GeoTIFF': - - print('Reading %s ...' % file) - # Generate GeoTIFF boundary geometry - data = None - (geometry, spatialRef) = geotiff2boundary(file, None) - - # Simplify the geometry - only works with GDAL 1.8.0 - #geometry = geometry.Simplify(float(tolerance)) - - # Add granule name and geometry - base = os.path.basename(file) - granule = os.path.splitext(base)[0] - value = {} - value['granule'] = granule - value['geometry'] = geometry - values.append(value) - - # Write geometry to shapefile - merge = False - geometry2shape(fields, values, spatialRef, merge, shapeFile) - - -# Determine the tiles for an area of interest -def aoi2tiles(aoiGeometry): - - # Determine the bounding box - envelope = aoiGeometry.GetEnvelope() - west = int(envelope[0] - 0.5) - east = int(envelope[1] + 1.5) - south = int(envelope[2] - 0.5) - north = int(envelope[3] + 1.5) - - # Walk through the potential tiles and add the required on to the geometry - tiles = [] - multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - for i in range(west, east): - for k in range(south, north): - eastwest = 'W' if i<0 else 'E' - northsouth = 'S' if k<0 else 'N' - tile = ('%s%02d%s%03d' % (northsouth, abs(k), eastwest, abs(i))) - polygon = get_tile_geometry(tile, 1) - intersection = polygon.Intersection(aoiGeometry) - if intersection is not None: - multipolygon.AddGeometry(polygon) - tiles.append(tile) - - return (tiles, multipolygon) - -def get_latlon_extent(filename): - src = gdal.Open(filename) - ulx, xres, xskew, uly, yskew, yres = src.GetGeoTransform() - lrx = ulx + (src.RasterXSize * xres) - lry = uly + (src.RasterYSize * yres) - - source = osr.SpatialReference() - source.ImportFromWkt(src.GetProjection()) - - target = osr.SpatialReference() - target.ImportFromEPSG(4326) - - transform = osr.CoordinateTransformation(source, target) - - lon1, lat1, h = transform.TransformPoint(ulx, uly) - lon2, lat2, h = transform.TransformPoint(lrx, uly) - lon3, lat3, h = transform.TransformPoint(ulx, lry) - lon4, lat4, h = transform.TransformPoint(lrx, lry) - - lat_min = min(lat1,lat2,lat3,lat4) - lat_max = max(lat1,lat2,lat3,lat4) - lon_min = min(lon1,lon2,lon3,lon4) - lon_max = max(lon1,lon2,lon3,lon4) - - return lat_min, lat_max, lon_min, lon_max - diff --git a/hyp3lib/asf_time_series.py b/hyp3lib/asf_time_series.py deleted file mode 100644 index 0f6f2db0..00000000 --- a/hyp3lib/asf_time_series.py +++ /dev/null @@ -1,452 +0,0 @@ -import os -from datetime import datetime, timedelta - -import netCDF4 as nc -import numpy as np -import statsmodels.api as sm -from osgeo import gdal, ogr, osr -from scipy import ndimage -from scipy.interpolate import interp1d -from statsmodels.tsa.seasonal import seasonal_decompose - -from hyp3lib import GeometryError -from hyp3lib.asf_geometry import geometry_proj2geo, raster_meta - -tolerance = 0.00005 - - -def initializeNetcdf(ncFile, meta): - - dataset = nc.Dataset(ncFile, 'w', format='NETCDF4') - - ### Define global attributes - dataset.Conventions = ('CF-1.7') - dataset.institution = meta['institution'] - dataset.title = meta['title'] - dataset.source = meta['source'] - dataset.comment = meta['comment'] - dataset.reference = meta['reference'] - timestamp = datetime.utcnow().isoformat() + 'Z' - dataset.history = ('{0}: netCDF file created'.format(timestamp)) - dataset.featureType = ('timeSeries') - - ### Create dimensions - dataset.createDimension('xgrid', meta['cols']) - dataset.createDimension('ygrid', meta['rows']) - dataset.createDimension('time', None) - dataset.createDimension('nchar', 100) - - ### Create variables - time, coordinates, values - ## time - time = dataset.createVariable('time', np.float32, ('time',)) - time.axis = ('T') - time.long_name = ('serial date') - time.standard_name = ('time') - time.units = ('seconds since {0}'.format(meta['refTime'])) - time.calendar = 'gregorian' - time.fill_value = 0 - time.reference = ('center time of image') - - ## map projection - projSpatialRef = osr.SpatialReference() - projSpatialRef.ImportFromEPSG(int(meta['epsg'])) - wkt = projSpatialRef.ExportToWkt() - projection = dataset.createVariable('Transverse_Mercator', 'S1') - projection.grid_mapping_name = ('transverse_mercator') - projection.crs_wkt = wkt - projection.scale_factor_at_centeral_meridian = \ - projSpatialRef.GetProjParm(osr.SRS_PP_SCALE_FACTOR) - projection.longitude_of_central_meridian = \ - projSpatialRef.GetProjParm(osr.SRS_PP_CENTRAL_MERIDIAN) - projection.latitude_of_projection_origin = \ - projSpatialRef.GetProjParm(osr.SRS_PP_LATITUDE_OF_ORIGIN) - projection.false_easting = \ - projSpatialRef.GetProjParm(osr.SRS_PP_FALSE_EASTING) - projection.false_northing = \ - projSpatialRef.GetProjParm(osr.SRS_PP_FALSE_NORTHING) - projection.projection_x_coordinate = ('xgrid') - projection.projection_y_coordinate = ('ygrid') - projection.units = ('meters') - - ## coordinate: x grid - xgrid = dataset.createVariable('xgrid', np.float32, ('xgrid')) - xgrid.axis = ('X') - xgrid.long_name = ('projection_grid_y_center') - xgrid.standard_name = ('projection_y_coordinate') - xgrid.units = ('meters') - xgrid.fill_value = np.nan - - ## coordinate: y grid - ygrid = dataset.createVariable('ygrid', np.float32, ('ygrid')) - ygrid.axis = ('Y') - ygrid.long_name = ('projection_grid_x_center') - ygrid.standard_name = ('projection_x_coordinate') - ygrid.units = ('meters') - ygrid.fill_value = np.nan - - ## image - image = dataset.createVariable('image', np.float32, \ - ('time', 'ygrid', 'xgrid'), zlib=True) - image.long_name = meta['imgLongName'] - image.units = meta['imgUnits'] - image.fill_value = meta['imgNoData'] - - ## name - name = dataset.createVariable('granule', 'S1', ('time', 'nchar')) - name.long_name = 'name of the granule' - - ### Fill in coordinates - xCoordinate = np.arange(meta['minX'], meta['maxX'], meta['pixelSize']) - xgrid[:] = xCoordinate - yCoordinate = np.arange(meta['maxY'], meta['minY'], -meta['pixelSize']) - ygrid[:] = yCoordinate - - dataset.close() - - -def extractNetcdfTime(ncFile, csvFile): - - outF = open(csvFile, 'w') - timeSeries = nc.Dataset(ncFile, 'r') - timeRef = timeSeries.variables['time'].getncattr('units')[14:] - timeRef = datetime.strptime(timeRef, '%Y-%m-%d %H:%M:%S') - time = timeSeries.variables['time'][:].tolist() - for t in time: - timestamp = timeRef + timedelta(seconds=t) - outF.write('%s\n' % timestamp.isoformat()) - outF.close() - - -def nc2meta(ncFile): - - dataset = nc.Dataset(ncFile, 'r') - - meta = {} - - ### Global attributes - meta['conventions'] = dataset.Conventions - meta['institution'] = dataset.institution - meta['title'] = dataset.title - meta['source'] = dataset.source - meta['comment'] = dataset.comment - meta['reference'] = dataset.reference - meta['history'] = dataset.history - - ### Coordinates - xGrid = dataset.variables['xgrid'] - (meta['cols'],) = xGrid.shape - meta['pixelSize'] = xGrid[1] - xGrid[0] - meta['minX'] = np.min(xGrid) - meta['maxX'] = np.max(xGrid) + meta['pixelSize'] - yGrid = dataset.variables['ygrid'] - (meta['rows'],) = yGrid.shape - meta['minY'] = np.min(yGrid) - meta['pixelSize'] - meta['maxY'] = np.max(yGrid) - - ### Time reference - time = dataset.variables['time'] - (meta['timeCount'],) = time.shape - meta['refTime'] = time.units[14:] - - ### Map projection: EPSG - proj = dataset.variables['Transverse_Mercator'] - projSpatialRef = osr.SpatialReference() - projSpatialRef.ImportFromWkt(proj.crs_wkt) - meta['epsg'] = int(projSpatialRef.GetAttrValue('AUTHORITY', 1)) - - ### Image metadata - image = dataset.variables['image'] - meta['imgLongName'] = image.long_name - meta['imgUnits'] = image.units - meta['imgNoData'] = image.fill_value - - dataset.close() - - return meta - - -def addImage2netcdf(image, ncFile, granule, imgTime): - - dataset = nc.Dataset(ncFile, 'a') - - ### Updating time - time = dataset.variables['time'] - name = dataset.variables['granule'] - data = dataset.variables['image'] - numGranules = time.shape[0] - time[numGranules] = nc.date2num(imgTime, units=time.units, - calendar=time.calendar) - name[numGranules] = nc.stringtochar(np.array(granule, 'S100')) - data[numGranules,:,:] = image - - dataset.close() - - -def filter_change(image, kernelSize, iterations): - - (cols, rows) = image.shape - positiveChange = np.zeros((rows,cols), dtype=np.uint8) - negativeChange = np.zeros((rows,cols), dtype=np.uint8) - noChange = np.zeros((rows,cols), dtype=np.uint8) - for ii in range(int(cols)): - for kk in range(int(rows)): - if image[ii,kk] == 1: - negativeChange[ii,kk] = 1 - elif image[ii,kk] == 2: - noChange = 1 - elif image[ii,kk] == 3: - positiveChange[ii,kk] = 1 - image = None - positiveChange = ndimage.binary_opening(positiveChange, - iterations=iterations, structure=np.ones(kernelSize)).astype(np.uint8) - negativeChange = ndimage.binary_opening(negativeChange, - iterations=iterations, structure=np.ones(kernelSize)).astype(np.uint8) - change = np.full((rows,cols), 2, dtype=np.uint8) - for ii in range(int(cols)): - for kk in range(int(rows)): - if negativeChange[ii,kk] == 1: - change[ii,kk] = 1 - elif positiveChange[ii,kk] == 1: - change[ii,kk] = 3 - change *= noChange - - return change - - -def vector_meta(vectorFile): - - vector = ogr.Open(vectorFile) - layer = vector.GetLayer() - layerDefinition = layer.GetLayerDefn() - fieldCount = layerDefinition.GetFieldCount() - fields = [] - for ii in range(fieldCount): - field = {} - field['name'] = layerDefinition.GetFieldDefn(ii).GetName() - field['type'] = layerDefinition.GetFieldDefn(ii).GetType() - field['width'] = layerDefinition.GetFieldDefn(ii).GetWidth() - field['precision'] = layerDefinition.GetFieldDefn(ii).GetPrecision() - fields.append(field) - proj = layer.GetSpatialRef() - extent = layer.GetExtent() - features = [] - featureCount = layer.GetFeatureCount() - for kk in range(featureCount): - value = {} - feature = layer.GetFeature(kk) - for ii in range(fieldCount): - if fields[ii]['type'] == ogr.OFTInteger: - value[fields[ii]['name']] = int(feature.GetField(ii)) - elif fields[ii]['type'] == ogr.OFTReal: - value[fields[ii]['name']] = float(feature.GetField(ii)) - else: - value[fields[ii]['name']] = feature.GetField(ii) - value['geometry'] = feature.GetGeometryRef().ExportToWkt() - features.append(value) - - return (fields, proj, extent, features) - - -def raster_metadata(input): - - # Set up shapefile attributes - fields = [] - field = {} - values = [] - field['name'] = 'granule' - field['type'] = ogr.OFTString - field['width'] = 254 - fields.append(field) - field = {} - field['name'] = 'epsg' - field['type'] = ogr.OFTInteger - fields.append(field) - field = {} - field['name'] = 'originX' - field['type'] = ogr.OFTReal - fields.append(field) - field = {} - field['name'] = 'originY' - field['type'] = ogr.OFTReal - fields.append(field) - field = {} - field['name'] = 'pixSize' - field['type'] = ogr.OFTReal - fields.append(field) - field = {} - field['name'] = 'cols' - field['type'] = ogr.OFTInteger - fields.append(field) - field = {} - field['name'] = 'rows' - field['type'] = ogr.OFTInteger - fields.append(field) - field = {} - field['name'] = 'pixel' - field['type'] = ogr.OFTString - field['width'] = 8 - fields.append(field) - - # Extract other raster image metadata - (outSpatialRef, outGt, outShape, outPixel) = raster_meta(input) - if outSpatialRef.GetAttrValue('AUTHORITY', 0) == 'EPSG': - epsg = int(outSpatialRef.GetAttrValue('AUTHORITY', 1)) - - # Add granule name and geometry - base = os.path.basename(input) - granule = os.path.splitext(base)[0] - value = {} - value['granule'] = granule - value['epsg'] = epsg - value['originX'] = outGt[0] - value['originY'] = outGt[3] - value['pixSize'] = outGt[1] - value['cols'] = outShape[1] - value['rows'] = outShape[0] - value['pixel'] = outPixel - values.append(value) - - return (fields, values, outSpatialRef) - - -def netcdf2boundary_mask(ncFile, geographic): - - ### Extract metadata - meta = nc2meta(ncFile) - cols = meta['cols'] - rows = meta['rows'] - proj = osr.SpatialReference() - proj.ImportFromEPSG(meta['epsg']) - geoTrans = \ - (meta['minX'], meta['pixelSize'], 0, meta['maxY'], 0, -meta['pixelSize']) - - ### Reading time series - dataset = nc.Dataset(ncFile, 'r') - image = dataset.variables['image'][:] - dataset.close() - - ### Save in memory - data = image[0,:,:]/image[0,:,:] - image = None - gdalDriver = gdal.GetDriverByName('Mem') - outRaster = gdalDriver.Create('out', rows, cols, 1, gdal.GDT_Byte) - outRaster.SetGeoTransform(geoTrans) - outRaster.SetProjection(proj.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.WriteArray(data) - inBand = None - data = None - - ### Polygonize the raster image - inBand = outRaster.GetRasterBand(1) - ogrDriver = ogr.GetDriverByName('Memory') - outVector = ogrDriver.CreateDataSource('out') - outLayer = outVector.CreateLayer('boundary', srs=proj) - fieldDefinition = ogr.FieldDefn('ID', ogr.OFTInteger) - outLayer.CreateField(fieldDefinition) - gdal.Polygonize(inBand, inBand, outLayer, 0, [], None) - outRaster = None - - ### Extract geometry from layer - inSpatialRef = outLayer.GetSpatialRef() - multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - for outFeature in outLayer: - geometry = outFeature.GetGeometryRef() - multipolygon.AddGeometry(geometry) - outFeature = None - outLayer = None - - ### Convert geometry from projected to geographic coordinates (if requested) - if geographic == True: - (multipolygon, outSpatialRef) = \ - geometry_proj2geo(multipolygon, inSpatialRef) - return (multipolygon, outSpatialRef) - else: - return (multipolygon, inSpatialRef) - - -def time_series_slice(ncFile, x, y, typeXY): - - timeSeries = nc.Dataset(ncFile, 'r') - - ### Extract information for variables: image, time, granule - timeRef = timeSeries.variables['time'].getncattr('units')[14:] - timeRef = datetime.strptime(timeRef, '%Y-%m-%d %H:%M:%S') - time = timeSeries.variables['time'][:].tolist() - timestamp = [] - for t in time: - timestamp.append(timeRef + timedelta(seconds=t)) - xGrid = timeSeries.variables['xgrid'][:] - yGrid = timeSeries.variables['ygrid'][:] - granules = timeSeries.variables['granule'] - granule = nc.chartostring(granules[:]) - data = timeSeries.variables['image'] - # numGranules = len(time) - - ### Define geo transformation and map proejction - # originX = xGrid[0] - # originY = yGrid[0] - pixelSize = xGrid[1] - xGrid[0] - # gt = (originX, pixelSize, 0, originY, 0, -pixelSize) - var = timeSeries.variables.keys() - if 'Transverse_Mercator' in var: - wkt = timeSeries.variables['Transverse_Mercator'].getncattr('crs_wkt') - else: - raise GeometryError('Could not find map projection information!') - - ### Work out line/sample from various input types - if typeXY == 'pixel': - sample = x - line = y - elif typeXY == 'latlon': - inProj = osr.SpatialReference() - inProj.ImportFromEPSG(4326) - outProj = osr.SpatialReference() - outProj.ImportFromWkt(wkt) - transform = osr.CoordinateTransformation(inProj, outProj) - coord = ogr.Geometry(ogr.wkbPoint) - coord.AddPoint(x,y) - coord.Transform(transform) - coordX = np.rint(coord.GetX()/pixelSize)*pixelSize - coordY = np.rint(coord.GetY()/pixelSize)*pixelSize - sample = xGrid.tolist().index(coordX) - line = yGrid.tolist().index(coordY) - elif typeXY == 'mapXY': - sample = xGrid.tolist().index(x) - line = yGrid.tolist().index(y) - value = data[:,sample,line] - - ### Work on time series - ## Fill in gaps by interpolation - startDate = timestamp[0].date() - stopDate = timestamp[len(timestamp)-1].date() - refDates = np.arange(startDate, stopDate + timedelta(days=12), 12).tolist() - datestamp = [] - for t in time: - datestamp.append((timeRef + timedelta(seconds=t)).date()) - missingDates = list(set(refDates) - set(datestamp)) - f = interp1d(time, value) - missingTime = [] - for missingDate in missingDates: - missingTime.append((missingDate - timeRef.date()).total_seconds()) - missingValues = f(missingTime) - allValues = [] - refType = [] - for ii in range(len(refDates)): - if refDates[ii] in missingDates: - index = missingDates.index(refDates[ii]) - allValues.append(missingValues[index]) - refType.append('interpolated') - else: - index = datestamp.index(refDates[ii]) - allValues.append(value[index]) - refType.append('acquired') - allValues = np.asarray(allValues) - - ## Smoothing the time line with localized regression (LOESS) - lowess = sm.nonparametric.lowess - smooth = lowess(allValues, np.arange(len(allValues)), frac=0.08, it=0)[:,1] - - sd = seasonal_decompose(x=smooth, model='additive', freq=4) - - return (granule, refDates, refType, smooth, sd) diff --git a/hyp3lib/copy_metadata.py b/hyp3lib/copy_metadata.py deleted file mode 100755 index 246be8ed..00000000 --- a/hyp3lib/copy_metadata.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Copy metadata from one tif to another""" -import os -import argparse -from hyp3lib import saa_func_lib as saa -from osgeo import gdal - - -def copy_metadata(infile, outfile): - ds = saa.open_gdal_file(infile) - md = ds.GetMetadata() - print(md) - - # ds = saa.open_gdal_file(outfile) - # ds.SetMetadata(md) - - # outfile2 = "tmp_outfile.tif" - # gdal.Translate(outfile2,outfile, metadataOptions = md) - # shutil.move(outfile2,outfile) - - ds = saa.open_gdal_file(outfile) - for item in md: - ds1 = gdal.Translate('',ds,format='MEM',metadataOptions = ['{}={}'.format(item,md[item])]) - ds = ds1 - gdal.Translate(outfile,ds1) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("infile", help="Input tif filename") - parser.add_argument("outfile", help="Output tif filename") - args = parser.parse_args() - - copy_metadata(args.infile, args.outfile) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/createAmp.py b/hyp3lib/createAmp.py deleted file mode 100755 index 21b286c1..00000000 --- a/hyp3lib/createAmp.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Convert Geotiff Power to Amplitude""" -from hyp3lib import saa_func_lib as saa -import numpy as np -import argparse -import os - - -def createAmp(fi,nodata=None): - (x,y,trans,proj,data) = saa.read_gdal_file(saa.open_gdal_file(fi)) - ampdata = np.sqrt(data) - outfile = fi.replace('.tif','_amp.tif') - saa.write_gdal_file_float(outfile,trans,proj,ampdata,nodata=nodata) - return outfile - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("infile", nargs="+", help="Input tif filename(s)") - parser.add_argument("-n", "--nodata", type=float, help="Set nodata value") - args = parser.parse_args() - - infiles = args.infile - for fi in infiles: - createAmp(fi, args.nodata) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/cutGeotiffsByLine.py b/hyp3lib/cutGeotiffsByLine.py deleted file mode 100755 index d57548c8..00000000 --- a/hyp3lib/cutGeotiffsByLine.py +++ /dev/null @@ -1,94 +0,0 @@ -"""Clip a bunch of geotiffs to the same area""" -import argparse -import os -from osgeo import gdal -from hyp3lib import saa_func_lib as saa -import numpy as np - - -def getOrigins(files): - - ul = np.zeros((2,len(files))) - lr = np.zeros((2,len(files))) - - for i in range(len(files)): - x,y,trans,proj = saa.read_gdal_file_geo(saa.open_gdal_file(files[i])) - ul[0,i] = trans[0] - lr[0,i] = trans[0] + x*trans[1] - ul[1,i] = trans[3] - lr[1,i] = trans[3] + y*trans[5] - - return ul,lr,trans[1],trans[5] - - -def copyOrigins(files,all_coords,all_pixsize): - - ul = np.zeros((2,len(files))) - lr = np.zeros((2,len(files))) - - for i in range(len(files)): - coords = all_coords[i] - ul[0,i] = coords[0] - lr[0,i] = coords[2] - ul[1,i] = coords[1] - lr[1,i] = coords[3] - - if i == 0: - xres = all_pixsize[i] - yres = all_pixsize[i] - - return ul,lr,xres,yres - - -def cutGeotiffsByLine(files,all_coords=None,all_pixsize=None): - - if all_coords is None: - ul,lr,xres,yres = getOrigins(files) - else: - ul,lr,xres,yres = copyOrigins(files,all_coords,all_pixsize) - - diff_ul = np.zeros((2,len(files))) - - diff_ul[0] = (max(ul[0])-ul[0])/xres - diff_ul[1] = -1*(min(ul[1])-ul[1])/(-1*yres) - - print("Difference list:") - print(diff_ul) - - lrx = min(lr[0]) - lry = max(lr[1]) - lenx = (lrx-max(ul[0])) / xres - leny = -1*(lry-min(ul[1])) / (-1*yres) - if leny < 0: - leny = abs(leny) - diff_ul[1] = diff_ul[1] * -1 - print("Size of output images {} x {}".format(lenx, leny)) - - outfiles = [] - for i in range(len(files)): - outfile = files[i].replace(".tif","_cut.tif") - if all_coords is not None: - outfile = os.path.basename(outfile) - print("Processing file {} to create file {}".format(files[i], outfile)) - gdal.Translate(outfile,files[i],srcWin=[diff_ul[0,i],diff_ul[1,i],lenx,leny],noData=0) - outfiles.append(outfile) - - return(outfiles) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("infiles", nargs='+', - help="Geotiff files to clip; output will be have _clip appended to the file name") - args = parser.parse_args() - - cutGeotiffsByLine(args.infiles) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/dem2isce.py b/hyp3lib/dem2isce.py deleted file mode 100755 index 10b620b4..00000000 --- a/hyp3lib/dem2isce.py +++ /dev/null @@ -1,125 +0,0 @@ -"""generates an XML file for a DEM for ISCE processing""" - -import argparse -import os - -import lxml.etree as et -from osgeo import osr, gdal - - -def dem2isce(demFile, hdrFile, xmlFile): - - # Read metadata from the DEM - raster = gdal.Open(demFile, gdal.GA_ReadOnly) - if raster is None: - raise FileNotFoundError(f'Unable to open DEM file {demFile} !') - print('Converting %s file (%s) ...' % (raster.GetDriver().ShortName, demFile)) - gt = raster.GetGeoTransform() - band = raster.GetRasterBand(1) - data_type = gdal.GetDataTypeName(band.DataType) - proj = osr.SpatialReference() - proj.ImportFromWkt(raster.GetProjectionRef()) - datum = proj.GetAttrValue('datum') - lines = [line.rstrip() for line in open(hdrFile)] - - # Build XML tree - isce = et.Element('component', name='DEM') - element_property = et.SubElement(isce, 'property', name='BYTE_ORDER') - if 'byte order = 0' in lines: - et.SubElement(element_property, 'value').text = 'l' - else: - et.SubElement(element_property, 'value').text = 'b' - element_property = et.SubElement(isce, 'property', name='ACCESS_MODE') - et.SubElement(element_property, 'value').text = 'read' - element_property = et.SubElement(isce, 'property', name='REFERENCE') - if datum == 'WGS_1984': - et.SubElement(element_property, 'value').text = 'WGS84' - elif datum == 'North_American_Datum_1983': - et.SubElement(element_property, 'value').text = 'NAD83' - element_property = et.SubElement(isce, 'property', name='DATA_TYPE') - if data_type == 'Int16': - et.SubElement(element_property, 'value').text = 'SHORT' - elif data_type == 'Float32': - et.SubElement(element_property, 'value').text = 'FLOAT' - element_property = et.SubElement(isce, 'property', name='SCHEME') - if 'interleave = bsq' in lines: - et.SubElement(element_property, 'value').text = 'BSQ' - elif 'interleave = bil' in lines: - et.SubElement(element_property, 'value').text = 'BIL' - elif 'interleave = bip' in lines: - et.SubElement(element_property, 'value').text = 'BIP' - element_property = et.SubElement(isce, 'property', name='IMAGE_TYPE') - et.SubElement(element_property, 'value').text = 'dem' - element_property = et.SubElement(isce, 'property', name='FILE_NAME') - et.SubElement(element_property, 'value').text = os.path.abspath(demFile) - element_property = et.SubElement(isce, 'property', name='WIDTH') - et.SubElement(element_property, 'value').text = str(raster.RasterXSize) - element_property = et.SubElement(isce, 'property', name='LENGTH') - et.SubElement(element_property, 'value').text = str(raster.RasterYSize) - element_property = et.SubElement(isce, 'property', name='NUMBER_BANDS') - et.SubElement(element_property, 'value').text = str(raster.RasterCount) - element_property = et.SubElement(isce, 'property', name='FIRST_LATITUDE') - et.SubElement(element_property, 'value').text = str(gt[3]) - element_property = et.SubElement(isce, 'property', name='FIRST_LONGITUDE') - et.SubElement(element_property, 'value').text = str(gt[0]) - element_property = et.SubElement(isce, 'property', name='DELTA_LATITUDE') - et.SubElement(element_property, 'value').text = str(gt[5]) - element_property = et.SubElement(isce, 'property', name='DELTA_LONGITUDE') - et.SubElement(element_property, 'value').text = str(gt[1]) - component = et.SubElement(isce, 'component', name='Coordinate1') - et.SubElement(component, 'factorymodule').text = 'isceobj.Image' - et.SubElement(component, 'factoryname').text = 'createCoordinate' - et.SubElement(component, 'doc').text = 'First coordinate of a 2D image (width).' - element_property = et.SubElement(component, 'property', name='startingValue') - et.SubElement(element_property, 'value').text = str(gt[0]) - et.SubElement(element_property, 'doc').text = 'Starting value of the coordinate.' - et.SubElement(element_property, 'units').text = 'degree' - element_property = et.SubElement(component, 'property', name='delta') - et.SubElement(element_property, 'value').text = str(gt[1]) - et.SubElement(element_property, 'doc').text = 'Coordinate quantization.' - element_property = et.SubElement(component, 'property', name='size') - et.SubElement(element_property, 'value').text = str(raster.RasterXSize) - et.SubElement(element_property, 'doc').text = 'Coordinate size.' - component = et.SubElement(isce, 'component', name='Coordinate2') - et.SubElement(component, 'factorymodule').text = 'isceobj.Image' - et.SubElement(component, 'factoryname').text = 'createCoordinate' - et.SubElement(component, 'doc').text = 'Second coordinate of a 2D image (length).' - element_property = et.SubElement(component, 'property', name='startingValue') - et.SubElement(element_property, 'value').text = str(gt[3]) - et.SubElement(element_property, 'doc').text = 'Starting value of the coordinate.' - et.SubElement(element_property, 'units').text = 'degree' - element_property = et.SubElement(component, 'property', name='delta') - et.SubElement(element_property, 'value').text = str(gt[5]) - et.SubElement(element_property, 'doc').text = 'Coordinate quantization.' - element_property = et.SubElement(component, 'property', name='size') - et.SubElement(element_property, 'value').text = str(raster.RasterYSize) - et.SubElement(element_property, 'doc').text = 'Coordinate size.' - - # Write the tree structure to file - with open(xmlFile, 'wb') as outF: - outF.write(et.tostring(isce, encoding='UTF-8', xml_declaration=True, - pretty_print=True)) - outF.close() - lines = None - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('dem', metavar='', - help='name of DEM file, assumed to be in ENVI format') - parser.add_argument('hdr', metavar='', - help='name of the ENVI header file') - parser.add_argument('xml', metavar='', - help='name of XML file') - args = parser.parse_args() - - dem2isce(args.dem, args.hdr, args.xml) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/draw_polygon_on_raster.py b/hyp3lib/draw_polygon_on_raster.py deleted file mode 100755 index 87747ce6..00000000 --- a/hyp3lib/draw_polygon_on_raster.py +++ /dev/null @@ -1,247 +0,0 @@ -"""Draws a polygon from a shapefile onto a raster image""" -import argparse -import shutil -import os - -import lxml.etree as et -from imageio import imread -from osgeo import gdal, ogr, osr -import matplotlib as mpl -mpl.use('Agg') -import matplotlib.pyplot as mplt -import matplotlib.lines as mlines - - -def write_worldfile(gt, worldFile): - - world = open(worldFile, 'w') - world.write('%.10f\n' % float(gt[1])) - world.write('%.10f\n' % float(gt[2])) - world.write('%.10f\n' % float(gt[4])) - world.write('%.10f\n' % float(gt[5])) - world.write('%.10f\n' % (float(gt[0])+float(gt[1])/2.0)) - world.write('%.10f\n' % (float(gt[3])+float(gt[5])/2.0)) - world.close() - - -def write_aux_file(spatialRef, auxFile): - - aux = et.Element('PAMDataset') - et.SubElement(aux, 'SRS').text = spatialRef.ExportToWkt() - meta = et.SubElement(aux, 'Metadata') - et.SubElement(meta, 'MDI', {'key':'AREA_OR_POINT'}).text = 'Area' - et.SubElement(meta, 'MDI', {'key':'TIFFTAG_RESOLUTIONUNIT'}).text = \ - '1 (unitless)' - et.SubElement(meta, 'MDI', {'key':'TIFFTAG_XRESOLUTION'}).text = '1' - et.SubElement(meta, 'MDI', {'key':'TIFFTAG_YRESOLUTION'}).text = '1' - band = et.SubElement(aux, 'PAMRasterBand', {'band':'1'}) - et.SubElement(band, 'NoDataValue').text = '0.00000000000000E+00' - domain = et.SubElement(band, 'Metadata', {'domain':'IMAGE_STRUCTURE'}) - et.SubElement(domain, 'MDI', {'key':'COMPRESSION'}).text = 'JPEG' - with open(auxFile, 'wb') as outF: - outF.write(et.tostring(aux, pretty_print=True)) - - -def get_projected_vector_geometry(shapeFile, rasterSpatialRef): - - driver = ogr.GetDriverByName('ESRI Shapefile') - shape = driver.Open(shapeFile, 0) - vectorMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - layer = shape.GetLayer() - vectorSpatialRef = layer.GetSpatialRef() - if vectorSpatialRef != rasterSpatialRef: - coordTrans = osr.CoordinateTransformation(vectorSpatialRef, rasterSpatialRef) - for feature in layer: - geometry = feature.GetGeometryRef() - count = geometry.GetGeometryCount() - if geometry.GetGeometryName() == 'MULTIPOLYGON': - for i in range(count): - polygon = geometry.GetGeometryRef(i) - if vectorSpatialRef != rasterSpatialRef: - polygon.Transform(coordTrans) - vectorMultipolygon.AddGeometry(polygon) - else: - if vectorSpatialRef != rasterSpatialRef: - geometry.Transform(coordTrans) - vectorMultipolygon.AddGeometry(geometry) - shape.Destroy() - - return vectorMultipolygon - - -def gcs2poly_geometry(gcsPolygon, rasterSpatialRef): - - vectorMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - vectorSpatialRef = osr.SpatialReference() - vectorSpatialRef.ImportFromEPSG(4326) - coordTrans = osr.CoordinateTransformation(vectorSpatialRef, rasterSpatialRef) - geometry = gcsPolygon.ImportFromWkt() - count = geometry.GetGeometryCount() - if geometry.GetGeometryName() == 'MULTIPOLYGON': - for i in range(count): - polygon = geometry.GetGeometryRef(i) - polygon.Transform(coordTrans) - vectorMultipolygon.AddGeometry(polygon) - else: - geometry.Transform(coordTrans) - vectorMultipolygon.AddGeometry(geometry) - - return vectorMultipolygon - - -def get_raster_spatial_reference(rasterFile): - - gdal.UseExceptions() - gdal.PushErrorHandler('CPLQuietErrorHandler') - inRaster = gdal.Open(rasterFile) - rasterProj = inRaster.GetProjection() - rasterSpatialRef = osr.SpatialReference(wkt = rasterProj) - - return rasterSpatialRef - - -def intersect_raster_with_polygon(rasterFile, vectorPolygon): - - gdal.UseExceptions() - gdal.PushErrorHandler('CPLQuietErrorHandler') - inRaster = gdal.Open(rasterFile) - gt = inRaster.GetGeoTransform() - originX = gt[0] - originY = gt[3] - cols = inRaster.RasterXSize - rows = inRaster.RasterYSize - ulX = originX - ulY = originY - urX = originX + gt[1]*cols - urY = originY - lrX = originX + gt[1]*cols + gt[2]*rows - lrY = originY + gt[4]*cols + gt[5]*rows - llX = originX - llY = originY + gt[4]*cols + gt[5]*rows - geometry = ('MULTIPOLYGON ((( %f %f, %f %f, %f %f, %f %f, %f %f )))' % - (ulX, ulY, urX, urY, lrX, lrY, llX, llY, ulX, ulY)) - rasterPolygon = ogr.CreateGeometryFromWkt(geometry) - intersection = rasterPolygon.Intersection(vectorPolygon) - - return intersection - - -def proj2pixel(x, y, inverse_geo_transform): - - px, py = gdal.ApplyGeoTransform(inverse_geo_transform, x, y) - px = int(px) - 1 - py = int(py) - 1 - - return (px, abs(py)) - - -def draw_polygon_on_raster(inRasterFile, polygon, color, outRasterFile): - - # Extract information from raster image - gdal.UseExceptions() - gdal.PushErrorHandler('CPLQuietErrorHandler') - inRaster = gdal.Open(inRasterFile) - fileFormat = inRaster.GetDriver().LongName - rasterProj = inRaster.GetProjection() - rasterSpatialRef = osr.SpatialReference(wkt = rasterProj) - cols = inRaster.RasterXSize - rows = inRaster.RasterYSize - geo_transform = inRaster.GetGeoTransform() - inverse_geo_transform = gdal.InvGeoTransform(geo_transform) - line = [] - sample = [] - for k in range(0, polygon.GetGeometryCount()): - geometry = polygon.GetGeometryRef(k) - for i in range(0, geometry.GetPointCount()): - point = geometry.GetPoint(i) - (px, py) = proj2pixel(point[0], point[1], inverse_geo_transform) - line.append(py) - sample.append(px) - - # Draw polygon - figRows = float(rows)/100.0 - figCols = float(cols)/100.0 - fig = mplt.figure(figsize=(figCols, figRows), dpi=100, frameon=False) - image = imread(inRasterFile) - mplt.imshow(image, interpolation='none') - mplt.axis('off') - mplt.subplots_adjust(left=0, bottom=0, right=1, top=1, hspace=0, wspace=0) - sub = mplt.subplot(111) - polyline = mlines.Line2D(sample, line, linewidth=2.0, color=color) - sub.add_line(polyline) - with open(outRasterFile, 'w') as outfile: - fig.canvas.print_jpg(outfile) - - # Create extra GEO files - if fileFormat == 'JPEG JFIF': - write_worldfile(geo_transform, outRasterFile.replace('.jpg', '.wld')) - write_aux_file(rasterSpatialRef, outRasterFile + '.aux.xml') - - -def draw_polygon_from_shape_on_raster(inRaster, shapeFile, polyColor, outRaster): - - # Assign colors - color = {} - color['blue'] = 'b' - color['green'] = 'g' - color['red'] = 'r' - color['cyan'] = 'c' - color['magenta'] = 'm' - color['yellow'] = 'y' - color['black'] = 'k' - color['white'] = 'w' - - # Extracting intersection of raster image and shapefile geometry - rasterSpatialRef = get_raster_spatial_reference(inRaster) - vectorPolygon = get_projected_vector_geometry(shapeFile, rasterSpatialRef) - polygon = intersect_raster_with_polygon(inRaster, vectorPolygon) - - # Draw polygon on geocoded image - if polygon: - draw_polygon_on_raster(inRaster, polygon, color[polyColor], outRaster) - else: - shutil.copy(inRaster,outRaster) - -def draw_polygon_from_gcs_polygon_on_raster(inRaster, gcsPolygon, polyColor, - outRaster): - - # Assign colors - color = {} - color['blue'] = 'b' - color['green'] = 'g' - color['red'] = 'r' - color['cyan'] = 'c' - color['magenta'] = 'm' - color['yellow'] = 'y' - color['black'] = 'k' - color['white'] = 'w' - - # Extracting intersection of raster image and shapefile geometry - rasterSpatialRef = get_raster_spatial_reference(inRaster) - vectorPolygon = gcs2poly_geometry(gcsPolygon, rasterSpatialRef) - polygon = intersect_raster_with_polygon(inRaster, vectorPolygon) - - # Draw polygon on geocoded image - draw_polygon_on_raster(inRaster, polygon, color[polyColor], outRaster) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('inRaster', help='name of the input raster file') - parser.add_argument('shape', help='name of the polygon shapefile to be drawn') - parser.add_argument('color', help='color of the polygon') - parser.add_argument('outRaster', help='name of the output raster file') - args = parser.parse_args() - - draw_polygon_from_shape_on_raster( - args.inRaster, args.shape, args.color, args.outRaster - ) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/enh_lee_filter.py b/hyp3lib/enh_lee_filter.py deleted file mode 100755 index 30528ebc..00000000 --- a/hyp3lib/enh_lee_filter.py +++ /dev/null @@ -1,77 +0,0 @@ -"""Apply enhanced lee filer to geotiff image""" -from hyp3lib import saa_func_lib as saa -import numpy as np -import os -import argparse -from scipy.ndimage.filters import uniform_filter - - -def enh_lee(looks,size,dampening_factor,img): - - Cu = np.sqrt(1/looks) - Cmax = np.sqrt(1+2/looks) - - Im = uniform_filter(img, (size, size)) - diff = img - Im - sqdiff = diff*diff - mean_diff = uniform_filter(sqdiff, (size, size)) - mean_diff[mean_diff<0] = 0 - - S = np.sqrt(mean_diff) - Ic = img - - mask = np.zeros(Im.shape,np.uint8) - mask[Im==0] = 1 - Ci = S/Im - Ci[mask] = 0 - - W = np.exp(-1.0*dampening_factor*(Ci-Cu)/(Cmax-Ci)) - W[np.isnan(W)]=0 - - mask1 = np.zeros(Im.shape,np.uint8) - mask1[Ci<=Cu] = 1 - T1 = Im * mask1 - - mask2 = np.zeros(Im.shape,np.uint8) - mask2[Ci>=Cmax] = 1 - T2 = Ic * mask2 - - mask1 = np.logical_not(mask1) - mask2 = np.logical_not(mask2) - mask3 = np.logical_and(mask1,mask2) - T3 = mask3 * (Im*W + Ic*(1-W)) - - R = T1 + T2 + T3 - - R[np.isnan(R)] = 0 - R[np.abs(R)<0.00001] = 0 - - return(R) - - -def enhanced_lee(infile,outfile,looks,size,dampening): - - x,y,trans,proj,img = saa.read_gdal_file(saa.open_gdal_file(infile)) - img2 = enh_lee(looks,size,dampening,img) - saa.write_gdal_file_float(outfile,trans,proj,img2) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("infile",help="Geotiff file to smooth") - parser.add_argument("outfile",help="Output smoothed geotiff file") - parser.add_argument("looks",help="Looks to use",type=float) - parser.add_argument("size",help="Kernel size to use",type=float) - parser.add_argument("dampening",help="Dampening factor",type=float) - args = parser.parse_args() - - enhanced_lee(args.infile,args.outfile,args.looks,args.size,args.dampening) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/etc/__init__.py b/hyp3lib/etc/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/hyp3lib/etc/config/get_dem.cfg b/hyp3lib/etc/config/get_dem.cfg deleted file mode 100644 index f31bca74..00000000 --- a/hyp3lib/etc/config/get_dem.cfg +++ /dev/null @@ -1,5 +0,0 @@ -NED13 https://asf-dem-west.s3.amazonaws.com/NED13/ 4269 -SRTMGL1 https://asf-dem-west.s3.amazonaws.com/SRTMGL1/ 4326 -NED1 https://asf-dem-west.s3.amazonaws.com/NED1/ 4269 -NED2 https://asf-dem-west.s3.amazonaws.com/NED2/ 4269 -SRTMGL3 https://asf-dem-west.s3.amazonaws.com/SRTMGL3/ 4326 diff --git a/hyp3lib/etc/lut/change.lut b/hyp3lib/etc/lut/change.lut deleted file mode 100644 index a7941d0f..00000000 --- a/hyp3lib/etc/lut/change.lut +++ /dev/null @@ -1,4 +0,0 @@ -0,0,0 -255,0,0 -0,0,0 -0,0,255 diff --git a/hyp3lib/etc/lut/glacier_tracking.lut b/hyp3lib/etc/lut/glacier_tracking.lut deleted file mode 100644 index c9fb5a43..00000000 --- a/hyp3lib/etc/lut/glacier_tracking.lut +++ /dev/null @@ -1,256 +0,0 @@ -0,0,0 -2,3,254 -4,6,253 -5,8,252 -7,11,251 -9,14,250 -11,17,249 -13,20,248 -14,23,247 -16,25,246 -18,28,245 -20,31,244 -22,34,243 -23,37,242 -25,39,241 -27,42,240 -29,45,239 -31,48,238 -33,50,237 -34,53,236 -36,56,235 -38,59,234 -40,61,233 -42,64,232 -43,67,231 -45,70,230 -47,72,229 -49,75,228 -51,78,227 -52,80,226 -54,83,225 -56,86,224 -58,88,223 -60,91,222 -61,93,221 -63,96,220 -65,98,219 -67,101,218 -69,104,217 -70,106,216 -72,109,215 -74,111,214 -76,114,213 -78,116,212 -80,118,211 -81,121,210 -83,123,209 -85,126,208 -87,128,207 -89,130,206 -90,133,205 -92,135,204 -94,137,203 -96,139,202 -98,142,201 -99,144,200 -101,146,199 -103,148,198 -105,150,197 -107,153,196 -108,155,195 -110,157,194 -112,159,193 -114,161,192 -116,163,191 -117,165,190 -119,167,189 -121,169,188 -123,171,187 -125,172,186 -126,174,185 -128,176,184 -130,178,183 -132,180,182 -134,181,181 -136,183,180 -137,185,179 -139,186,178 -141,188,177 -143,190,176 -145,191,175 -146,193,174 -148,194,173 -150,196,172 -152,197,171 -154,199,170 -155,200,169 -157,202,168 -159,203,167 -161,204,166 -163,205,165 -164,207,164 -166,208,163 -168,209,162 -170,210,161 -172,211,160 -173,212,159 -175,213,158 -177,215,157 -179,215,156 -181,216,155 -183,217,154 -184,218,153 -186,219,152 -188,220,151 -190,221,150 -192,221,149 -193,222,148 -195,223,147 -197,224,146 -199,224,145 -201,225,144 -202,225,143 -204,226,142 -206,226,141 -208,227,140 -210,227,139 -211,228,138 -213,228,137 -215,228,136 -217,229,135 -219,229,134 -220,229,133 -222,229,132 -224,229,131 -226,229,130 -228,229,129 -230,229,128 -230,229,127 -230,229,126 -230,229,125 -231,229,124 -231,229,123 -232,229,122 -232,229,121 -232,229,120 -233,228,119 -233,228,118 -234,228,117 -234,227,116 -234,227,115 -235,226,114 -235,226,113 -236,225,112 -236,225,111 -236,224,110 -237,224,109 -237,223,108 -238,222,107 -238,221,106 -238,221,105 -239,220,104 -239,219,103 -240,218,102 -240,217,101 -240,216,100 -241,215,99 -241,215,98 -242,213,97 -242,212,96 -242,211,95 -243,210,94 -243,209,93 -244,208,92 -244,207,91 -244,205,90 -245,204,89 -245,203,88 -246,202,87 -246,200,86 -247,199,85 -247,197,84 -247,196,83 -248,194,82 -248,193,81 -249,191,80 -249,190,79 -249,188,78 -250,186,77 -250,185,76 -251,183,75 -251,181,74 -251,180,73 -252,178,72 -252,176,71 -253,174,70 -253,172,69 -253,171,68 -254,169,67 -254,167,66 -255,165,65 -255,163,64 -255,161,63 -255,159,62 -255,157,61 -255,155,60 -255,153,59 -255,150,58 -255,148,57 -255,146,56 -255,144,55 -255,142,54 -255,139,53 -255,137,52 -255,135,51 -255,133,50 -255,130,49 -255,128,48 -255,126,47 -255,123,46 -255,121,45 -255,118,44 -255,116,43 -255,114,42 -255,111,41 -255,109,40 -255,106,39 -255,104,38 -255,101,37 -255,98,36 -255,96,35 -255,93,34 -255,91,33 -255,88,32 -255,86,31 -255,83,30 -255,80,29 -255,78,28 -255,75,27 -255,72,26 -255,70,25 -255,67,24 -255,64,23 -255,61,22 -255,59,21 -255,56,20 -255,53,19 -255,50,18 -255,48,17 -255,45,16 -255,42,15 -255,39,14 -255,37,13 -255,34,12 -255,31,11 -255,28,10 -255,25,9 -255,23,8 -255,20,7 -255,17,6 -255,14,5 -255,11,4 -255,8,3 -255,6,2 -255,3,1 -255,0,0 diff --git a/hyp3lib/extendDateline.py b/hyp3lib/extendDateline.py deleted file mode 100755 index 53dac091..00000000 --- a/hyp3lib/extendDateline.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Extend the coverage next to the dateline""" -import argparse -import os -from osgeo import ogr - - -def extendDateline(inFile, outFile, degrees): - - driver = ogr.GetDriverByName("ESRI Shapefile") - if os.path.exists(outFile): - driver.DeleteDataSource(outFile) - inData = driver.Open(inFile, 0) - outData = driver.CreateDataSource(outFile) - inLayer = inData.GetLayer() - spatialRef = inLayer.GetSpatialRef() - outLayer = outData.CreateLayer('', geom_type=ogr.wkbPolygon, - srs=spatialRef) - featureDefinition = outLayer.GetLayerDefn() - fieldDefinition = ogr.FieldDefn('tile', ogr.OFTString) - fieldDefinition.SetWidth(100) - outLayer.CreateField(fieldDefinition) - for inFeature in inLayer: - inPolygon = inFeature.GetGeometryRef() - outRing = ogr.Geometry(ogr.wkbLinearRing) - outExtraRing = ogr.Geometry(ogr.wkbLinearRing) - extra = False - for inRing in inPolygon: - numPoints = inRing.GetPointCount() - for ii in range(numPoints): - point = inRing.GetPoint(ii) - minLon = -179.999 + degrees - if point[0] <= minLon: - extra = True - outRing.AddPoint_2D(point[0], point[1]) - outExtraRing.AddPoint_2D(point[0]+360.0, point[1]) - tile = inFeature.GetField('tile') - outPolygon = ogr.Geometry(ogr.wkbPolygon) - outPolygon.AddGeometry(outRing) - outFeature = ogr.Feature(featureDefinition) - outFeature.SetField('tile', tile) - outFeature.SetGeometry(outPolygon) - outLayer.CreateFeature(outFeature) - outFeature = None - if extra == True: - outPolygon = ogr.Geometry(ogr.wkbPolygon) - outPolygon.AddGeometry(outExtraRing) - outFeature = ogr.Feature(featureDefinition) - outFeature.SetField('tile', tile) - outFeature.SetGeometry(outPolygon) - outLayer.CreateFeature(outFeature) - outFeature = None - outData = None - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('inShape', metavar='', - help='name of the input shapefile') - parser.add_argument('outShape', metavar='', - help='name of the output shapefile') - parser.add_argument('degrees', metavar='', - help='number of degrees to extend dateline') - args = parser.parse_args() - - extendDateline(args.inShape, args.outShape, float(args.degrees)) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/file_subroutines.py b/hyp3lib/file_subroutines.py deleted file mode 100644 index f890e084..00000000 --- a/hyp3lib/file_subroutines.py +++ /dev/null @@ -1,89 +0,0 @@ -import errno -import glob -import os -import re -import zipfile - -from hyp3lib.execute import execute - - -def prepare_files(csv_file): - """Download granules and unzip granules - - Given a CSV file of granule names, download the granules and unzip them, - removing the zip files as we go. Note: This will unzip and REMOVE ALL ZIP - FILES in the current directory. - """ - cmd = "get_asf.py %s" % csv_file - execute(cmd) - os.rmdir("download") - for myfile in os.listdir("."): - if ".zip" in myfile: - try: - zip_ref = zipfile.ZipFile(myfile, 'r') - zip_ref.extractall(".") - zip_ref.close() - except: - print("Unable to unzip file {}".format(myfile)) - else: - print("WARNING: {} not recognized as a zip file".format(myfile)) - - -def get_file_list(): - """ - Return a list of file names and file dates, including all SAFE - directories, found in the current directory, sorted by date. - """ - files = [] - filenames = [] - filedates = [] - - # Set up the list of files to process - i = 0 - for myfile in os.listdir("."): - if ".SAFE" in myfile and os.path.isdir(myfile): - t = re.split('_+', myfile) - m = [myfile, t[4][0:15]] - files.append(m) - i += 1 - - print('Found %s files to process' % i) - files.sort(key=lambda row: row[1]) - print(files) - - for i in range(len(files)): - filenames.append(files[i][0]) - filedates.append(files[i][1]) - - return filenames, filedates - - -def get_dem_tile_list(): - - tile_list = None - for myfile in glob.glob("DEM/*.tif"): - tile = os.path.basename(myfile) - if tile_list: - tile_list = tile_list + ", " + tile - else: - tile_list = tile - - if tile_list: - print("Found DEM tile list of {}".format(tile_list)) - return tile_list - else: - print("Warning: no DEM tile list created") - return None - - -def mkdir_p(path): - """ - Make parent directories as needed and no error if existing. Works like `mkdir -p`. - """ - try: - os.makedirs(path) - except OSError as exc: # Python >2.5 - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise diff --git a/hyp3lib/geotiff_lut.py b/hyp3lib/geotiff_lut.py deleted file mode 100755 index 030154a2..00000000 --- a/hyp3lib/geotiff_lut.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Applies a LUT to a GeoTIFF""" - -import argparse -import os - -import numpy as np -from osgeo import gdal, osr - - -def geotiff_lut(geotiff, lutFile, outFile): - - # Suppress GDAL warnings - gdal.UseExceptions() - gdal.PushErrorHandler('CPLQuietErrorHandler') - - # Read GeoTIFF and normalize it (if needed) - inRaster = gdal.Open(geotiff) - cols = inRaster.RasterXSize - rows = inRaster.RasterYSize - geotransform = inRaster.GetGeoTransform() - originX = geotransform[0] - originY = geotransform[3] - pixelWidth = geotransform[1] - pixelHeight = geotransform[5] - data = inRaster.GetRasterBand(1).ReadAsArray() - dataType = gdal.GetDataTypeName(inRaster.GetRasterBand(1).DataType) - if dataType == 'Byte': - index = data - else: - data[np.isnan(data)] = 0.0 - data = data.astype(np.float32) - mean = np.mean(data) - std_dev = np.std(data) - minValue = max(np.min(data), mean-2.0*std_dev) - maxValue = min(np.max(data), mean+2.0*std_dev) - data -= minValue - data /= maxValue - data[data>1.0] = 1.0 - data = data*255.0 + 0.5 - index = data.astype(np.uint8) - data = None - - # Read look up table - lut = np.genfromtxt(lutFile, delimiter = ',', dtype = int) - redLut = lut[:, 0] - greenLut = lut[:, 1] - blueLut = lut[:, 2] - - # Apply look up table - red = np.zeros((rows, cols), dtype = np.uint8) - green = np.zeros((rows, cols), dtype = np.uint8) - blue = np.zeros((rows, cols), dtype = np.uint8) - red = redLut[index] - green = greenLut[index] - blue = blueLut[index] - - # Write RGB GeoTIFF image - driver = gdal.GetDriverByName('GTiff') - outRaster = driver.Create(outFile, cols, rows, 3, gdal.GDT_Byte, - ['COMPRESS=LZW']) - outRaster.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight)) - outRasterSRS = osr.SpatialReference() - outRasterSRS.ImportFromWkt(inRaster.GetProjectionRef()) - outRaster.SetProjection(outRasterSRS.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.WriteArray(red) - outBand = outRaster.GetRasterBand(2) - outBand.WriteArray(green) - outBand = outRaster.GetRasterBand(3) - outBand.WriteArray(blue) - outRaster = None - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('geotiff', help='name of GeoTIFF file (input)') - parser.add_argument('lut', help='name of look up table file to apply (input)') - parser.add_argument('output', help='name of output file (output)') - args = parser.parse_args() - - if not os.path.exists(args.geotiff): - parser.error(f'GeoTIFF file {args.geotiff} does not exist!') - - geotiff_lut(args.geotiff, args.lut, args.output) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/getBursts.py b/hyp3lib/getBursts.py deleted file mode 100644 index 63e655be..00000000 --- a/hyp3lib/getBursts.py +++ /dev/null @@ -1,23 +0,0 @@ -import os -from lxml import etree -import logging - -def getBursts(mydir,make_tab_flag=True): - logging.info("Determining number of bursts") - back = os.getcwd() - burst_tab = "%s_burst_tab" % mydir[17:25] - if make_tab_flag: - f1 = open(burst_tab,"w") - os.chdir(os.path.join(mydir,"annotation")) - for name in ['001.xml','002.xml','003.xml']: - for myfile in os.listdir("."): - if name in myfile: - root = etree.parse(myfile) - for count in root.iter('burstList'): - total_bursts=int(count.attrib['count']) - f1.write("1 {}\n".format(total_bursts)) - f1.close() - os.chdir(back) - return burst_tab - - diff --git a/hyp3lib/getDemFor.py b/hyp3lib/getDemFor.py deleted file mode 100755 index 68c907bc..00000000 --- a/hyp3lib/getDemFor.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Get a DEM file for a given sentinel1 SAFE file""" - -import argparse -import logging -import os -import shutil - -from osgeo import gdal - -from hyp3lib.get_dem import get_dem -from hyp3lib.execute import execute -from hyp3lib.getSubSwath import get_bounding_box_file -from hyp3lib.saa_func_lib import get_utm_proj - - -def getDemFile(infile, outfile: str, use_opentopo=False, in_utm=True, post=None, dem_name=None): - lat_max, lat_min, lon_max, lon_min = get_bounding_box_file(infile) - - if use_opentopo: - demtype = None - url = f'http://opentopo.sdsc.edu/otr/getdem' \ - f'?demtype=SRTMGL1&west={lon_min}&south={lat_min}&east={lon_max}&north={lat_max}&outputFormat=GTiff' - execute(f'wget -O {outfile} "{url}"') - - if in_utm: - proj = get_utm_proj(lon_min, lon_max, lat_min, lat_max) - tmpdem = 'tmpdem_getDemFile_utm.tif' - gdal.Warp(tmpdem, outfile, dstSRS=proj, resampleAlg='cubic') - shutil.move(tmpdem, outfile) - else: - dem_type = 'utm' if in_utm else 'latlon' - demtype = get_dem( - lon_min, lat_min, lon_max, lat_max, outfile, post=post, dem_name=dem_name, dem_type=dem_type - ) - if not os.path.isfile(outfile): - logging.error(f'Unable to find output file {outfile}') - - return outfile, demtype - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("SAFEfile", help="S1 SAFE file") - parser.add_argument("outfile", help="Name of output geotiff DEM file") - parser.add_argument("-o", "--opentopo", action="store_true", help="Use opentopo instead of get_dem") - parser.add_argument("-l", "--latlon", action="store_false", - help="Create DEM in lat,lon space - dangerous option for polar imagery") - parser.add_argument("-d", "--dem", help="Only use the specified DEM type") - parser.add_argument("-p", "--post", help="Posting for creating DEM", type=float) - args = parser.parse_args() - - log_file = f'getDemFor_{os.getpid()}.log' - logging.basicConfig(filename=log_file, format='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) - logging.getLogger().addHandler(logging.StreamHandler()) - logging.info('Starting run') - - outfile, demtype = getDemFile(args.SAFEfile, args.outfile, use_opentopo=args.opentopo, - in_utm=args.latlon, post=args.post, dem_name=args.dem) - logging.info(f'Wrote DEM file {outfile}') - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/getSubSwath.py b/hyp3lib/getSubSwath.py deleted file mode 100644 index b17fb1b5..00000000 --- a/hyp3lib/getSubSwath.py +++ /dev/null @@ -1,257 +0,0 @@ -import os -from osgeo import ogr -import glob -from lxml import etree - -def get_bounding_box_file(safeFile): - mydir = "%s/annotation" % safeFile - myxml = "" - name = "" - - # Get corners from first and last swath - name = "001.xml" - for myfile in os.listdir(mydir): - if name in myfile: - myxml = "%s/annotation/%s" % (safeFile,myfile) - (lat1_max,lat1_min,lon1_max,lon1_min) = get_bounding_box(myxml) - - name = "003.xml" - for myfile in os.listdir(mydir): - if name in myfile: - myxml = "%s/annotation/%s" % (safeFile,myfile) - (lat2_max,lat2_min,lon2_max,lon2_min) = get_bounding_box(myxml) - - if ((lon1_max-lon2_max)>180) or ((lon1_min-lon2_min)>180): - if lon1_max < 0: - lon1_max += 360 - if lon1_min < 0: - lon1_min += 360 - if lon2_max < 0: - lon2_max += 360 - if lon2_min < 0: - lon2_min += 360 - - lat_max = max(lat1_max,lat1_min,lat2_max,lat2_min) - lat_min = min(lat1_max,lat1_min,lat2_max,lat2_min) - lon_max = max(lon1_max,lon1_min,lon2_max,lon2_min) - lon_min = min(lon1_max,lon1_min,lon2_max,lon2_min) - - if (lon_min <= -177 and lon_max>177): - lat_max = lat_max - 0.15 - lat_min = lat_min + 0.15 - lon_max = lon_max - 0.15 - lon_min = lon_min + 0.15 - else: - lat_max = lat_max + 0.15 - lat_min = lat_min - 0.15 - lon_max = lon_max + 0.15 - lon_min = lon_min - 0.15 - - return lat_max,lat_min,lon_max,lon_min - - -def get_bounding_box(myxml): - lon_max = -180 - lon_min = 360 - lat_max = -90 - lat_min = 90 - lon = [] - root = etree.parse(myxml) - for coord in root.iter('latitude'): - lat_max = max(float(coord.text),lat_max) - lat_min = min(float(coord.text),lat_min) - for coord in root.iter('longitude'): - lon.append(float(coord.text)) - lon_max = max(lon) - lon_min = min(lon) - diff = lon_max - lon_min - if diff > 180: - for ii in range(len(lon)): - if lon[ii] < 0: - lon[ii] += 360 - lon_min = min(lon) - lon_max = max(lon) - - return lat_max,lat_min,lon_max,lon_min - - -############################################################################### -# selectSubswath -# -# Purpose: Figure out the best subswath a given bounding box lies in. -# Returns: 1-3 for a valid subswath or 0 if not a valid overlap -# -############################################################################### -def SelectSubswath(safeFile,lon_min,lat_min,lon_max,lat_max): - - os.chdir(safeFile) - os.chdir("annotation") - for myfile in os.listdir("."): - if "001.xml" in myfile: - (lat_max1,lat_min1,lon_max1,lon_min1) = get_bounding_box(myfile) - if "002.xml" in myfile: - (lat_max2,lat_min2,lon_max2,lon_min2) = get_bounding_box(myfile) - if "003.xml" in myfile: - (lat_max3,lat_min3,lon_max3,lon_min3) = get_bounding_box(myfile) - os.chdir("../../") - - wkt1 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lat_min,lon_min,lat_max,lon_min,lat_max,lon_max,lat_min,lon_max,lat_min,lon_min) - wkt2 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lat_min1,lon_min1,lat_max1,lon_min1,lat_max1,lon_max1,lat_min1,lon_max1,lat_min1,lon_min1) - wkt3 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lat_min2,lon_min2,lat_max2,lon_min2,lat_max2,lon_max2,lat_min2,lon_max2,lat_min2,lon_min2) - wkt4 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lat_min3,lon_min3,lat_max3,lon_min3,lat_max3,lon_max3,lat_min3,lon_max3,lat_min3,lon_min3) - - poly0 = ogr.CreateGeometryFromWkt(wkt1) - poly1 = ogr.CreateGeometryFromWkt(wkt2) - poly2 = ogr.CreateGeometryFromWkt(wkt3) - poly3 = ogr.CreateGeometryFromWkt(wkt4) - - intersect1 = poly0.Intersection(poly1) - area1 = intersect1.GetArea() - - intersect2 = poly0.Intersection(poly2) - area2 = intersect2.GetArea() - - intersect3 = poly0.Intersection(poly3) - area3 = intersect3.GetArea() - - ss = 0 - if (area1 > area2): - if (area1 > area3): - ss = 1 - i = intersect1 - else: - ss = 3 - i = intersect3 - else: - if (area2 > area3): - ss = 2 - i = intersect2 - else: - if (area3 > 0): - ss = 3 - i = intersect3 - - return ss, i.GetEnvelope() - - -############################################################################### -# get_real_cc -# -# Purpose: Get the actual corner coordinates of a Sentinel1 xml file -# Returns: lists of lat, lon for each corner -# : -# pt1---------------pt4 -# / / -# / / -# / / -# / / -# / / -# / / -# pt2---------------pt3 -# -############################################################################### - -def get_real_cc(myxml): - - lats = [] - lons = [] - - root = etree.parse(myxml) - for i in root.iter('numberOfSamples'): - ns = int(i.text) - - for i in root.iter('numberOfLines'): - nl = int(i.text) - - for i in root.iter('geolocationGridPoint'): - line = int(i[2].text) - samp = int(i[3].text) - if samp==0 and line==0: - lats.append(i[4].text) - lons.append(i[5].text) - - for i in root.iter('geolocationGridPoint'): - line = int(i[2].text) - samp = int(i[3].text) - # the last line is sometimes nl, sometimes nl-1 - if samp==0 and (abs(line-nl) <= 1): - lats.append(i[4].text) - lons.append(i[5].text) - - for i in root.iter('geolocationGridPoint'): - line = int(i[2].text) - samp = int(i[3].text) - # the last line is sometimes nl, sometimes nl-1 - if samp==ns-1 and (abs(line-nl) <= 1): - lats.append(i[4].text) - lons.append(i[5].text) - - for i in root.iter('geolocationGridPoint'): - line = int(i[2].text) - samp = int(i[3].text) - if samp==ns-1 and line==0: - lats.append(i[4].text) - lons.append(i[5].text) - - if len(lats) != 4 or len(lons) != 4: - print("ERROR: Unable to find corner points!") - exit(1) - return lats, lons - -############################################################################### -# SelectAllSubswaths -# -# Purpose: Find all subswaths that overlap with the given bounding box -# Returns: List of subswath numbers and bounding boxes of the intersections -# -############################################################################### - -def SelectAllSubswaths(safeFile,lon_min,lat_min,lon_max,lat_max): - - # Get the real corner coordinates of each subswath - fi = glob.glob("%s/annotation/*001.xml" % safeFile)[0] - lats1, lons1 = get_real_cc(fi) - - fi = glob.glob("%s/annotation/*002.xml" % safeFile)[0] - lats2, lons2 = get_real_cc(fi) - - fi = glob.glob("%s/annotation/*003.xml" % safeFile)[0] - lats3, lons3 = get_real_cc(fi) - - # Create polygons - wkt1 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lat_min,lon_min,lat_max,lon_min,lat_max,lon_max,lat_min,lon_max,lat_min,lon_min) - wkt2 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lats1[0],lons1[0],lats1[1],lons1[1],lats1[2],lons1[2],lats1[3],lons1[3],lats1[0],lons1[0]) - wkt3 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lats2[0],lons2[0],lats2[1],lons2[1],lats2[2],lons2[2],lats2[3],lons2[3],lats2[0],lons2[0]) - wkt4 = "POLYGON ((%s %s, %s %s, %s %s, %s %s, %s %s))" % (lats3[0],lons3[0],lats3[1],lons3[1],lats3[2],lons3[2],lats3[3],lons3[3],lats3[0],lons3[0]) - - poly0 = ogr.CreateGeometryFromWkt(wkt1) - poly1 = ogr.CreateGeometryFromWkt(wkt2) - poly2 = ogr.CreateGeometryFromWkt(wkt3) - poly3 = ogr.CreateGeometryFromWkt(wkt4) - - # Calculate intersections - intersect1 = poly0.Intersection(poly1) - area1 = intersect1.GetArea() - - intersect2 = poly0.Intersection(poly2) - area2 = intersect2.GetArea() - - intersect3 = poly0.Intersection(poly3) - area3 = intersect3.GetArea() - - ss = [] - polygon = [] - - if area1 > 0.0: - ss.append(1) - polygon.append(intersect1.GetEnvelope()) - - if area2 > 0.0: - ss.append(2) - polygon.append(intersect2.GetEnvelope()) - - if area3 > 0.0: - ss.append(3) - polygon.append(intersect3.GetEnvelope()) - - return ss, polygon diff --git a/hyp3lib/get_bb_from_shape.py b/hyp3lib/get_bb_from_shape.py deleted file mode 100644 index c5fb0d40..00000000 --- a/hyp3lib/get_bb_from_shape.py +++ /dev/null @@ -1,40 +0,0 @@ -from osgeo import ogr - -def get_bb_from_shape(shapeFile): - - # Extract boundary from shapefile - driver = ogr.GetDriverByName('ESRI Shapefile') - shape = driver.Open(shapeFile, 0) - vectorMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - layer = shape.GetLayer() - # vectorSpatialRef = layer.GetSpatialRef() - - # Reproject polygon if necessary - # if vectorSpatialRef != rasterSpatialRef: - # print('Need to re-project vector polygon') - # coordTrans = osr.CoordinateTransformation(vectorSpatialRef, rasterSpatialRef) - - for feature in layer: - geometry = feature.GetGeometryRef() - count = geometry.GetGeometryCount() - if geometry.GetGeometryName() == 'MULTIPOLYGON': - for i in range(count): - polygon = geometry.GetGeometryRef(i) -# if vectorSpatialRef != rasterSpatialRef: -# polygon.Transform(coordTrans) - vectorMultipolygon.AddGeometry(polygon) - else: -# if vectorSpatialRef != rasterSpatialRef: -# geometry.Transform(coordTrans) - vectorMultipolygon.AddGeometry(geometry) - shape.Destroy() - - envelope = vectorMultipolygon.GetEnvelope() - minX = envelope[0] - minY = envelope[2] - maxX = envelope[1] - maxY = envelope[3] - - print(minX, minY, maxX, maxY) - - return(minX,minY,maxX,maxY) diff --git a/hyp3lib/get_bounding.py b/hyp3lib/get_bounding.py deleted file mode 100755 index a5c301e8..00000000 --- a/hyp3lib/get_bounding.py +++ /dev/null @@ -1,153 +0,0 @@ -"""Get the lat/lon min/max values given a .SAFE directory""" -import re -import os -import argparse - - -def get_granule_bounding(granule_path): - annotation_xml_paths = get_annotation_xmls_paths(granule_path) - - annotation_xmls = read_files(annotation_xml_paths) - - bounds = [ - get_values_from(xml_contents) for xml_contents in annotation_xmls - ] - - return get_granule_extrema(bounds) - - -def get_annotation_xmls_paths(granule_path): - annotation_dir = os.path.join(granule_path, 'annotation') - - annotation_folder_paths = [ - os.path.join(annotation_dir, f) - for f in os.listdir(annotation_dir) if is_xml_file(f) - ] - - return annotation_folder_paths - - -def is_xml_file(f): - return re.match('.*\.xml', f) - - -def read_files(paths): - file_contents = [] - for path in paths: - with open(path, 'r') as f: - contents = f.read() - - file_contents.append(contents) - - return file_contents - - -def get_bounding(annotation_xml): - lats, lons = get_values_from(annotation_xml) - - return get_extrema(lats, lons) - - -def get_extrema(lats, lons): - return { - "lat": get_extrema_from(lats), - "lon": get_extrema_from(lons) - } - - -def get_values_from(annotation_xml): - lats = numbers_between('latitude', annotation_xml) - lons = numbers_between('longitude', annotation_xml) - - return ( - lats, - lons - ) - - -def numbers_between(tag, annotation_xml): - numbers_in_tags = "<{tag}>(.*?)<\/{tag}>".format( - tag=tag - ) - - matchs = re.findall( - numbers_in_tags, - annotation_xml - ) - - return convert_matches_to_floats(matchs) - - -def get_extrema_from(vals): - return { - "max": max(vals), - "min": min(vals) - } - - -def convert_matches_to_floats(matchs): - return [ - convert_to_float(match) for match in matchs - ] - - -def convert_to_float(match): - """ - float conversion expects 1.0e-01 - xml is formatted like -1.0e+01 - """ - modifier = 1 - - if '-' in match: - modifier = -1 - match = match.replace('-', '') - - return modifier * float(match) - - -def get_granule_extrema(swath_bounds): - granule_lats, granule_lons = [], [] - - for bound in swath_bounds: - lats, lons = bound - - granule_lats += lats - granule_lons += lons - - return get_extrema(granule_lats, granule_lons) - - -def nice_printout(granule_path, extrema): - print("from granule: {}\n".format(granule_path)) - - print("lat:") - print(" min: {}".format(extrema['lat']['min'])) - print(" max: {}".format(extrema['lat']['max'])) - - print("lon:") - print(" min: {}".format(extrema['lon']['min'])) - print(" max: {}".format(extrema['lon']['max'])) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument( - 'granule_safe_path', - help='relative path to a *.SAFE directory containing the annotation xml files' - ) - args = parser.parse_args() - - granule_path = args.granule_safe_path - - extrema = get_granule_bounding(granule_path) - - nice_printout(granule_path, extrema) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/get_dem.py b/hyp3lib/get_dem.py deleted file mode 100755 index 4dbc4ab7..00000000 --- a/hyp3lib/get_dem.py +++ /dev/null @@ -1,507 +0,0 @@ -"""Get a DEM file in .tif format from the ASF DEM heap""" - -import argparse -import logging -import math -import multiprocessing as mp -import os -import shutil -import sys -from pathlib import Path - -import lxml.etree as et -import numpy as np -from osgeo import gdal -from osgeo import ogr -from osgeo import osr -from pyproj import Transformer - -import hyp3lib.etc -from hyp3lib import DemError -from hyp3lib import dem2isce -from hyp3lib import saa_func_lib as saa -from hyp3lib.asf_geometry import raster_meta -from hyp3lib.fetch import download_file - - -def reproject_wkt(wkt, in_epsg, out_epsg): - source = osr.SpatialReference() - source.ImportFromEPSG(in_epsg) - - target = osr.SpatialReference() - target.ImportFromEPSG(out_epsg) - - transform = osr.CoordinateTransformation(source, target) - - geom = ogr.CreateGeometryFromWkt(wkt) - geom.Transform(transform) - - return geom.ExportToWkt() - - -def get_dem_list(): - try: - config_file = Path.home() / '.hyp3' / 'get_dem.cfg' - with open(config_file) as f: - config_content = f.readlines() - except FileNotFoundError: - config_file = Path(hyp3lib.etc.__file__).parent / 'config' / 'get_dem.cfg' - with open(config_file) as f: - config_content = f.readlines() - - dem_list = [] - for line in config_content: - name, location, epsg = line.split() - shape_file = os.path.join(location, 'coverage', f'{name.lower()}_coverage.shp') - if shape_file.startswith('http'): - shape_file = '/vsicurl/' + shape_file - dem = { - 'name': name, - 'location': location, - 'epsg': int(epsg), - 'coverage': shape_file, - } - dem_list.append(dem) - return dem_list - - -def get_best_dem(y_min, y_max, x_min, x_max, dem_name=None): - dem_list = get_dem_list() - if dem_name: - dem_list = [dem for dem in dem_list if dem['name'] == dem_name] - - scene_wkt = f'POLYGON (({x_min} {y_min}, {x_max} {y_min}, {x_max} {y_max}, {x_min} {y_max}, {x_min} {y_min}))' - - best_pct = 0 - best_name = '' - best_epsg = '' - best_tile_list = [] - best_poly_list = [] - driver = ogr.GetDriverByName('ESRI Shapefile') - for dem in dem_list: - if dem['epsg'] != 4326: - logging.info(f"Reprojecting corners into projection {dem['epsg']}") - proj_wkt = reproject_wkt(scene_wkt, 4326, dem['epsg']) - else: - proj_wkt = scene_wkt - poly = ogr.CreateGeometryFromWkt(proj_wkt) - - dataset = driver.Open(dem['coverage'], 0) - layer = dataset.GetLayer() - - coverage = 0 - tile_list = [] - poly_list = [] - while True: - feature = layer.GetNextFeature() - if not feature: - break - - intersection = feature.geometry().Intersection(poly) - area = intersection.GetArea() - if area > 0: - coverage += area - tile_list.append(feature['tile']) - poly_list.append(feature.geometry().ExportToWkt()) - - total_area = poly.GetArea() - pct = coverage / total_area - logging.info(f"Totals: {dem['name']} {coverage} {total_area} {pct}") - - if best_pct == 0 or pct > best_pct + 0.05: - best_pct = pct - best_name = dem['name'] - best_tile_list = tile_list - best_epsg = dem['epsg'] - best_poly_list = poly_list - if pct >= 0.99: - break - - if best_pct < 0.20: - raise DemError('Unable to find a DEM file for that area') - - logging.info(f'Best DEM: {best_name}') - logging.info(f'Tile List: {best_tile_list}') - return best_name, best_epsg, best_tile_list, best_poly_list - - -def get_tile_for(args): - dem_name, tile_name = args - output_dir = 'DEM' - - dem_list = get_dem_list() - for dem in dem_list: - if dem['name'] == dem_name: - source_file = os.path.join(dem['location'], tile_name) + '.tif' - - if source_file.startswith('http'): - download_file(source_file, directory=output_dir) - else: - shutil.copy(source_file, output_dir) - - -def write_vrt(dem_proj, nodata, tile_list, poly_list, out_file): - # Get dimensions and pixel size from first DEM in tile ListCommand - dem_file = os.path.join('DEM', f'{tile_list[0]}.tif') - spatial_ref, gt, shape, pixel = raster_meta(dem_file) - rows, cols = shape - pix_size = gt[1] - - # Determine coverage - min_lon = 360 - max_lon = -180 - min_lat = 90 - max_lat = -90 - for poly in poly_list: - polygon = ogr.CreateGeometryFromWkt(poly) - envelope = polygon.GetEnvelope() - if envelope[0] < min_lon: - min_lon = envelope[0] - if envelope[1] > max_lon: - max_lon = envelope[1] - if envelope[2] < min_lat: - min_lat = envelope[2] - if envelope[3] > max_lat: - max_lat = envelope[3] - - raster_x_size = np.int(np.rint((max_lon - min_lon) / pix_size)) + 1 - raster_y_size = np.int(np.rint((max_lat - min_lat) / pix_size)) + 1 - - # Determine offsets - offset_x = [] - offset_y = [] - for poly in poly_list: - polygon = ogr.CreateGeometryFromWkt(poly) - envelope = polygon.GetEnvelope() - offset_x.append(np.int(np.rint((envelope[0] - min_lon) / pix_size))) - offset_y.append(np.int(np.rint((max_lat - envelope[3]) / pix_size))) - - # Generate XML structure - vrt = et.Element('VRTDataset', rasterXSize=str(raster_x_size), - rasterYSize=str(raster_y_size)) - srs = osr.SpatialReference() - srs.ImportFromEPSG(dem_proj) - et.SubElement(vrt, 'SRS').text = srs.ExportToWkt() - geo_trans = f'{min_lon:.16f}, {pix_size:.16f}, 0.0, {max_lat:.16f}, 0.0, {-pix_size:.16f}' - et.SubElement(vrt, 'GeoTransform').text = geo_trans - bands = et.SubElement(vrt, 'VRTRasterBand', dataType='Float32', band='1') - et.SubElement(bands, 'NoDataValue').text = '-32768' - et.SubElement(bands, 'ColorInterp').text = 'Gray' - tile_count = len(tile_list) - for ii in range(tile_count): - source = et.SubElement(bands, 'ComplexSource') - dem_file = os.path.join('DEM', f'{tile_list[ii]}.tif') - et.SubElement(source, 'SourceFilename', relativeToVRT='1').text = \ - dem_file - et.SubElement(source, 'SourceBand').text = '1' - properties = et.SubElement(source, 'SourceProperties') - properties.set('RasterXSize', str(cols)) - properties.set('RasterYSize', str(rows)) - properties.set('DataType', 'Float32') - properties.set('BlockXSize', str(cols)) - properties.set('BlockYSize', '1') - src = et.SubElement(source, 'SrcRect') - src.set('xOff', '0') - src.set('yOff', '0') - src.set('xSize', str(cols)) - src.set('ySize', str(rows)) - dst = et.SubElement(source, 'DstRect') - dst.set('xOff', str(offset_x[ii])) - dst.set('yOff', str(offset_y[ii])) - dst.set('xSize', str(cols)) - dst.set('ySize', str(rows)) - et.SubElement(source, 'NODATA').text = f"{nodata}" - - # Write VRT file - with open(out_file, 'wb') as outF: - outF.write(et.tostring(vrt, xml_declaration=False, encoding='utf-8', - pretty_print=True)) - - -def get_dem(x_min, y_min, x_max, y_max, outfile, post=None, processes=1, dem_name=None, leave=False, dem_type='utm'): - if post is not None: - logging.info(f"Snapping to grid at posting of {post} meters") - - if y_min < -90 or y_max > 90: - raise ValueError(f"Please use latitude in range (-90, 90) ({y_min}, {y_max})") - - if x_min > x_max: - logging.warning("WARNING: minimum easting > maximum easting - swapping") - (x_min, x_max) = (x_max, x_min) - - if y_min > y_max: - logging.warning("WARNING: minimum northing > maximum northing - swapping") - (y_min, y_max) = (y_max, y_min) - - # Figure out which DEM and get the tile list - (demname, demproj, tile_list, poly_list) = get_best_dem(y_min, y_max, x_min, x_max, dem_name=dem_name) - demproj = int(demproj) - logging.info(f"demproj is {demproj}") - - # Add buffer for REMA - if 'REMA' in demname or 'GIMP' in demname: - x_min -= 4 - x_max += 4 - if 'EU_DEM' in demname: - y_min -= 2 - y_max += 2 - - # Copy the files into a dem directory - if not os.path.isdir("DEM"): - os.mkdir("DEM") - - # Download tiles in parallel - logging.info("Fetching DEM tiles to local storage") - p = mp.Pool(processes=processes) - p.map( - get_tile_for, - [(demname, fi) for fi in tile_list] - ) - p.close() - p.join() - - # os.system("gdalbuildvrt temp.vrt DEM/*.tif") - if "SRTMGL" in demname: - nodata = -32768 - elif "GIMP" in demname: - nodata = None - elif "REMA" in demname: - nodata = 0 - elif "NED" in demname or "EU_DEM_V11" in demname: - nodata = -3.4028234663852886e+38 - else: - raise DemError(f'Unable to determine NoData value for DEM {demname}') - - write_vrt(demproj, nodata, tile_list, poly_list, 'temp.vrt') - - # - # Set the output projection to either NPS, SPS, or UTM - # - if demproj == 3413: # North Polar Stereo - outproj = 'EPSG:3413' - outproj_num = 3413 - elif demproj == 3031: # South Polar Stereo - outproj = 'EPSG:3031' - outproj_num = 3031 - else: - lon = (x_max + x_min) / 2 - zone = math.floor((lon + 180) / 6 + 1) - if zone > 60: - zone -= 60 - if (y_min + y_max) / 2 > 0: - outproj = ('EPSG:326%02d' % int(zone)) - outproj_num = int("326%02d" % int(zone)) - else: - outproj = ('EPSG:327%02d' % int(zone)) - outproj_num = int("327%02d" % int(zone)) - - tmpdem = "xxyyzz_img.tif" - tmpdem2 = "aabbcc_img.tif" - tmpproj = "lmnopqr_img.tif" - if os.path.isfile(tmpdem): - logging.info(f"Removing old file {tmpdem}") - os.remove(tmpdem) - if os.path.isfile(tmpproj): - logging.info("Removing old file projected dem file") - os.remove(tmpproj) - - pixsize = 30.0 - gcssize = 0.00027777777778 - - if demname == "SRTMGL3": - pixsize = 90. - gcssize *= 3 - if demname == "NED2": - pixsize = 60. - gcssize *= 2 - - logging.info("Creating initial raster file") - logging.info(f" tmpdem {tmpdem}") - logging.info(f" pixsize {pixsize}") - logging.info(f" bounds: x_min {x_min}; y_min {y_min}; x_max {x_max}; y_max {y_max}") - - # xform bounds to projection of the DEM - if demproj != 4326: - transformer = Transformer.from_crs('epsg:4326', f'epsg:{demproj}') - t_x, t_y = transformer.transform([x_min, x_max], [y_min, y_max]) - x_min, x_max = sorted(t_x) - y_min, y_max = sorted(t_y) - logging.info(f" transformed bounds: x_min {x_min}; y_min {y_min}; x_max {x_max}; y_max {y_max}") - - if demproj == 4269 or demproj == 4326: - res = gcssize - else: - res = pixsize - gdal.Warp(tmpdem, "temp.vrt", xRes=res, yRes=res, outputBounds=[x_min, y_min, x_max, y_max], - resampleAlg="cubic", dstNodata=-32767) - - # If DEM is from NED collection, then it will have a NAD83 ellipse - - # need to convert to WGS84 - # Also, need to convert from pixel as area to pixel as point - if "NED" in demname: - logging.info("Converting to WGS84") - gdal.Warp("temp_dem_wgs84.tif", tmpdem, dstSRS="EPSG:4326") - logging.info("Converting to pixel as point") - x1, y1, t1, p1, data = \ - saa.read_gdal_file(saa.open_gdal_file("temp_dem_wgs84.tif")) - lon = t1[0] - resx = t1[1] - rotx = t1[2] - lat = t1[3] - roty = t1[4] - resy = t1[5] - lon = lon + resx / 2.0 - lat = lat + resy / 2.0 - t1 = [lon, resx, rotx, lat, roty, resy] - saa.write_gdal_file_float(tmpdem, t1, p1, data) - if not leave: - os.remove("temp_dem_wgs84.tif") - - clean_dem(tmpdem, tmpdem2) - shutil.move(tmpdem2, tmpdem) - gdal.Translate(tmpdem2, tmpdem, metadataOptions=['AREA_OR_POINT=Point']) - shutil.move(tmpdem2, tmpdem) - - # Reproject the DEM file into UTM space - if demproj != outproj_num: - logging.info(f"Translating raster file to projected coordinates ({outproj})") - gdal.Warp(tmpproj, tmpdem, dstSRS=outproj, xRes=pixsize, yRes=pixsize, resampleAlg="cubic", - srcNodata=-32767, dstNodata=-32767) - infile = tmpproj - else: - infile = tmpdem - - report_min(infile) - - # Snap to posting grid - if post: - snap_to_grid(post, pixsize, infile, outfile) - else: - shutil.copy(infile, outfile) - - report_min(outfile) - - # Clean up intermediate files - if not leave: - if os.path.isfile(tmpdem): - logging.info(f"Removing temp file {tmpdem}") - os.remove(tmpdem) - if os.path.isfile(tmpproj): - logging.info(f"Removing temp file {tmpproj}") - os.remove(tmpproj) - - logging.info("Successful Completion!") - if dem_type.lower() == 'utm': - return demname - - elif dem_type.lower() == 'latlon': - pixsize = 0.000277777777778 - gdal.Warp( - "temp_dem.tif", outfile, dstSRS="EPSG:4326", xRes=pixsize, yRes=pixsize, resampleAlg="cubic", - dstNodata=-32767 - ) - shutil.move("temp_dem.tif", outfile) - - elif dem_type.lower() == 'isce': - pixsize = 0.000277777777778 - gdal.Warp("temp_dem.tif", outfile, format="ENVI", dstSRS="EPSG:4326", xRes=pixsize, yRes=pixsize, - resampleAlg="cubic", dstNodata=-32767) - shutil.move("temp_dem.tif", outfile) - hdr_name = os.path.splitext(outfile)[0] + ".hdr" - dem2isce.dem2isce(outfile, hdr_name, f'{outfile}.xml') - - else: - raise NotImplementedError(f'Cannot get DEM for unkown type {dem_type}') - - return demname - - -def report_min(in_dem): - (x, y, trans, proj, data) = saa.read_gdal_file(saa.open_gdal_file(in_dem)) - logging.debug(f"DEM file {in_dem} minimum is {np.min(data)}") - - -def clean_dem(in_dem, out_dem): - (x, y, trans, proj, data) = saa.read_gdal_file(saa.open_gdal_file(in_dem)) - logging.info("Replacing values less than -1000 with zero") - data[data <= -1000] = -32767 - logging.info(f"DEM Maximum value: {np.max(data)}") - logging.info(f"DEM minimum value: {np.min(data)}") - - if data.dtype == np.float32: - saa.write_gdal_file_float(out_dem, trans, proj, data.astype(np.float32)) - elif data.dtype == np.uint16: - saa.write_gdal_file(out_dem, trans, proj, data) - else: - logging.error(f"ERROR: Unknown DEM data type {data.dtype}") - sys.exit(1) - - -def snap_to_grid(post, pixsize, infile, outfile): - if post: - logging.info(f"Snapping file to grid at {post} meters") - coords = gdal.Info(infile, format='json')['cornerCoordinates'] - - easts = np.array([c[0] for c in coords.values()]) - norths = np.array([c[1] for c in coords.values()]) - - bounds = [np.floor(easts / post).min() * post, - np.floor(norths / post).min() * post, - np.ceil(easts / post).max() * post, - np.ceil(norths / post).max() * post] - logging.info(f'New coordinate bounds: {bounds}') - - gdal.Warp(outfile, infile, xRes=pixsize, yRes=pixsize, outputBounds=bounds, resampleAlg="cubic", - dstNodata=-32767) - else: - logging.info("Copying DEM to output file name") - shutil.copy(infile, outfile) - - -def positive_int(value): - ivalue = int(value) - if ivalue <= 0: - raise argparse.ArgumentTypeError(f"{value} is an invalid positive int value") - return ivalue - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("x_min", help="minimum longitude/easting", type=float) - parser.add_argument("y_min", help="minimum latitude/northing", type=float) - parser.add_argument("x_max", help="maximum longitude/easting", type=float) - parser.add_argument("y_max", help="maximum latitude/northing", type=float) - parser.add_argument("outfile", help="output DEM name") - parser.add_argument("-p", "--posting", type=float, help="Snap DEM to align with grid at given posting") - parser.add_argument("-d", "--dem", help="Type of DEM to use") - parser.add_argument("-t", "--threads", type=positive_int, default=1, - help="Num of threads to use for downloading DEM tiles") - parser.add_argument("-l", "--latlon", action='store_true', - help="Create output in GCS coordinates (default is native DEM projection)") - parser.add_argument("-k", "--keep", action='store_true', help="Keep intermediate DEM results") - args = parser.parse_args() - - log_file = f"get_dem_{os.getpid()}.log" - logging.basicConfig(filename=log_file, format='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) - logging.getLogger().addHandler(logging.StreamHandler()) - logging.info("Starting run") - - if args.latlon: - dem_type = 'latlon' - else: - dem_type = 'utm' - - get_dem( - args.x_min, args.y_min, args.x_max, args.y_max, args.outfile, - post=args.posting, leave=args.keep, processes=args.threads, dem_name=args.dem, dem_type=dem_type - ) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/ingest_S1_granule.py b/hyp3lib/ingest_S1_granule.py deleted file mode 100644 index 57684009..00000000 --- a/hyp3lib/ingest_S1_granule.py +++ /dev/null @@ -1,63 +0,0 @@ -import logging -import os -import shutil - -from hyp3lib import OrbitDownloadError -from hyp3lib.SLC_copy_S1_fullSW import SLC_copy_S1_fullSW -from hyp3lib.execute import execute -from hyp3lib.getBursts import getBursts -from hyp3lib.get_orb import downloadSentinelOrbitFile -from hyp3lib.par_s1_slc_single import par_s1_slc_single - - -def ingest_S1_granule(safe_dir: str, pol: str, looks: int, out_file: str, orbit_file: str = None): - """Pre-process S1 imagery into GAMMA format - - Args: - safe_dir: Sentinel-1 SAFE directory location - pol: polarization (e.g., 'vv') - looks: the number of looks to take - out_file: file name of the output GAMMA formatted imagery - orbit_file: Orbit file to use (will download a matching orbit file if None) - """ - pol = pol.lower() - granule_type = safe_dir[7:10] - - # Ingest the granule into gamma format - if granule_type == 'GRD': - cmd = f'par_S1_GRD {safe_dir}/*/*{pol}*.tiff {safe_dir}/*/*{pol}*.xml {safe_dir}/*/*/calibration-*{pol}*.xml ' \ - f'{safe_dir}/*/*/noise-*{pol}*.xml {pol}.grd.par {pol}.grd' - execute(cmd, uselogging=True) - - # Ingest the precision state vectors - try: - if orbit_file is None: - logging.info('Trying to get orbit file information from file {}'.format(safe_dir)) - orbit_file, _ = downloadSentinelOrbitFile(safe_dir) - logging.debug('Applying precision orbit information') - execute(f'S1_OPOD_vec {pol}.grd.par {orbit_file}', uselogging=True) - except OrbitDownloadError: - logging.warning('Unable to fetch precision state vectors... continuing') - - if looks > 1.0: - cmd = f'multi_look_MLI {pol}.grd {pol}.grd.par {out_file} {out_file}.par {looks} {looks} - - - 1' - execute(cmd, uselogging=True) - else: - shutil.copy(f'{pol}.grd', out_file) - shutil.copy(f'{pol}.grd.par', f'{out_file}.par') - - else: - # Ingest SLC data files into gamma format - par_s1_slc_single(safe_dir, pol, orbit_file=orbit_file) - date = safe_dir[17:25] - burst_tab = getBursts(safe_dir, make_tab_flag=True) - shutil.copy(burst_tab, date) - - # Mosaic the swaths together and copy SLCs over - back = os.getcwd() - os.chdir(date) - SLC_copy_S1_fullSW('../', date, 'SLC_TAB', burst_tab, mode=2, raml=looks * 5, azml=looks) - os.chdir(back) - - shutil.move(f'{date}.mli', out_file) - shutil.move(f'{date}.mli.par', f'{out_file}.par') diff --git a/hyp3lib/iscegeo2geotif.py b/hyp3lib/iscegeo2geotif.py deleted file mode 100755 index d012a66e..00000000 --- a/hyp3lib/iscegeo2geotif.py +++ /dev/null @@ -1,133 +0,0 @@ -"""Convert ISCE outputs into geotiff, browse, and kmz files""" -import os -import zipfile -import shutil -from lxml import etree -from osgeo import gdal -from hyp3lib.execute import execute -import argparse - - -def fixKmlName(inKML,inName): - """ - The kmlfile created by mdx.py contains the wrong png file name. - This won't work. So, we change the text to be the new name. - """ - tree = etree.parse(inKML) - rt = tree.getroot() - rt[0][0][3][0].text = inName - with open(inKML, 'wb') as of: - of.write(b'\n') - tree.write(of,pretty_print=True) - - -def makeKMZ(infile,outfile): - - kmlfile = infile + ".kml" - kmzfile = outfile + ".kmz" - pngfile = infile + ".png" - outpng = outfile + ".png" - lrgfile = outfile + "_large.png" - - # Create the colorized kml file and png image - cmd = "mdx.py {0} -kml {1}".format(infile,kmlfile) - execute(cmd) - - #fix the name in the kml file!!! - fixKmlName(kmlfile,lrgfile) - - # scale the PNG image to browse size - gdal.Translate("temp.png",pngfile,format="PNG",width=0,height=1024) - gdal.Translate("tmpl.png",pngfile,format="PNG",width=0,height=2048) - - shutil.move("temp.png",pngfile) - shutil.move("tmpl.png",lrgfile) - - # finally, zip the kmz up - with zipfile.ZipFile(kmzfile,'w') as myzip: - myzip.write(kmlfile) - myzip.write(lrgfile) - shutil.move(pngfile,outpng) - - -def create_browse(oldname,pngname,auxname,gcsname,proj,height): - """Create a browse image""" - # Use the gcsfile's aux.xml information - gdal.Translate(pngname,gcsname,format="PNG",height=height) - shutil.move(auxname,"gcs.aux.xml") - - # Use the GMT5SAR provided PNG file - gdal.Translate(pngname,oldname,format="PNG",height=height) - shutil.move("gcs.aux.xml",auxname) - - # Repoject the PNG file into UTM coordinates - gdal.Warp("tmp.vrt",pngname,format="vrt",dstSRS=proj,resampleAlg="cubic",dstNodata=0) - gdal.Translate(pngname,"tmp.vrt",format="PNG") - os.remove("tmp.vrt") - - -def convert_files(s1aFlag,proj=None,res=30): - - makeKMZ("filt_topophase.unw.geo","unw") - shutil.move("unw.kmz","colorized_unw.kmz") - makeKMZ("filt_topophase.flat.geo","col") - shutil.move("col.kmz","color.kmz") - - gcsname = "tmp_gcs.tif" - - # Create the phase image - if proj is None: - gdal.Translate("phase.tif","filt_topophase.unw.geo",bandList=[2],creationOptions = ['COMPRESS=PACKBITS']) - shutil.copy("phase.tif",gcsname) - else: - print("Creating tmp.tif") - gdal.Translate("tmp.tif","filt_topophase.unw.geo.vrt",bandList=[2],creationOptions = ['COMPRESS=PACKBITS']) - print("phase.tif") - gdal.Warp("phase.tif","tmp.tif",dstSRS=proj,xRes=res,yRes=res,resampleAlg="cubic",dstNodata=0,creationOptions=['COMPRESS=LZW']) - print("mv tmp.tif {}".format(gcsname)) - shutil.copy("tmp.tif",gcsname) -# os.remove("tmp.tif") - - print("Creating browse image colorized_unw.png") - create_browse("unw.png","colorized_unw.png","colorized_unw.png.aux.xml",gcsname,proj,1024) - create_browse("unw.png","colorized_unw_large.png","colorized_unw_large.png.aux.xml",gcsname,proj,2048) - - print("Creating browse image color.png") - create_browse("col.png","color.png","color.png.aux.xml",gcsname,proj,1024) - print("Creating browse image color_large.png") - create_browse("col.png","color_large.png","color_large.png.aux.xml",gcsname,proj,2048) - - - # Create the amplitude image - if proj is None: - gdal.Translate("amp.tif","filt_topophase.unw.geo",bandList=[1],creationOptions = ['COMPRESS=PACKBITS']) - else: - gdal.Translate("tmp.tif","filt_topophase.unw.geo.vrt",bandList=[1],creationOptions = ['COMPRESS=PACKBITS']) - gdal.Warp("amp.tif","tmp.tif",dstSRS=proj,xRes=res,yRes=res,resampleAlg="cubic",dstNodata=0,creationOptions = ['COMPRESS=LZW']) - os.remove("tmp.tif") - - # Create the coherence image - if proj is None: - gdal.Translate("coherence.tif","phsig.cor.geo",creationOptions = ['COMPRESS=PACKBITS']) - else: - gdal.Translate("tmp.tif","phsig.cor.geo.vrt",creationOptions = ['COMPRESS=PACKBITS']) - gdal.Warp("coherence.tif","tmp.tif",dstSRS=proj,xRes=res,yRes=res,resampleAlg="cubic",dstNodata=0,creationOptions = ['COMPRESS=LZW']) - os.remove("tmp.tif") - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("-p","--proj",help="Projection code to convert to") - parser.add_argument("-r","--res",type=float,help="Resolution for projection") - args = parser.parse_args() - - convert_files(True,proj=args.proj,res=args.res) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/makeChangeBrowse.py b/hyp3lib/makeChangeBrowse.py deleted file mode 100755 index 15c5eba9..00000000 --- a/hyp3lib/makeChangeBrowse.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Creates browse images for classified change detection geotiffs""" - -import argparse -import os - -import numpy as np -from osgeo import gdal - -import hyp3lib.saa_func_lib as saa -from hyp3lib.makeAsfBrowse import makeAsfBrowse - -MAX_CLASSES = 10 - - -def makeChangeBrowse(geotiff,type="MSCD"): - - # read in the data - x,y,trans,proj,data = saa.read_gdal_file(saa.open_gdal_file(geotiff)) - - red = np.zeros(data.shape,dtype=np.uint8) - blue = np.zeros(data.shape,dtype=np.uint8) - green = np.zeros(data.shape,dtype=np.uint8) - newData = np.zeros(data.shape,dtype=np.uint8) - - if type == "SACD": - - # - # Make the greyscale image - # - lut = [0,64,0,192] - - # - # Make the color images - # - red_lut = [0,255, 0, 1] - green_lut = [0, 1, 0, 1] - blue_lut = [0, 1, 0,255] - - for i in range(y): - for j in range(x): - newData[i,j] = lut[data[i,j]] - red[i,j] = red_lut[data[i,j]] - green[i,j] = green_lut[data[i,j]] - blue[i,j] = blue_lut[data[i,j]] - - else: - - # - # get data median and histogram - # - median = np.median(data) - bins = np.zeros(MAX_CLASSES,dtype=np.int8) - for i in range(MAX_CLASSES): - bins[i] = i - hist = np.histogram(data,bins=bins) - - # - # count the number of classes present in histogram and set class number, - # making the median class 0, keeping all zeors as class 0, and all others - # with histogram values to a linear sequence 1, 2, 3, ... - # - class_cnt = 0 - next_class = 1 - classifications = np.zeros(MAX_CLASSES,dtype=np.int8) - classifications[:] = -1 - classifications[0] = 0 - for i in range(0,len(hist[0])): - if hist[0][i] != 0: - class_cnt = class_cnt + 1 - if hist[1][i] == 0: - # we have zeros in the image - need to be handled as background - classifications[i] = 0 - elif i == median: - classifications[i] = 0 - else: - classifications[i] = next_class - next_class = next_class + 1 - - # - # Make LUT to map classifications to greyscale values - # Start at 64, increment by 192/(#classes-2) to get - # sequences like {64,255}, {64,160,255}, {64,128,192,256}, etc, - # always leaving the median class and zero pixels as zero valued - # - lut = np.zeros(class_cnt,dtype=np.uint8) - if class_cnt == 1: - print("ERROR: Only found one class") - exit(1) - if (class_cnt == 2): - lut[0] = 0 - lut[1] = 255 - else: - val = 64 - inc = 192/(class_cnt-2) - for i in range(class_cnt): - if i != median and hist[1][i] != 0: - lut[classifications[i]] = int(val) - val = val + inc - if val > 255: - val = int(255) - - # - # Use the look up table to set the values in newData array - # - newData = lut[classifications[data]] - - # - # Create the color version of the data - # Here, we use the same classifications as - # an index into a color look up table. - # - red_lut = [1,255, 1, 1,255,255, 1,128,128, 1] - green_lut = [1, 1, 1,255,128, 1,255,255, 1,128] - blue_lut = [1, 1,255, 1, 1,128,128, 1,255,255] - - for i in range(y): - for j in range(x): - k = classifications[data[i,j]] - red[i,j] = red_lut[k] - blue[i,j] = blue_lut[k] - green[i,j] = green_lut[k] - - # - # Write out the greyscale png files - # - outName = geotiff.replace(".tif","_byte.tif") - pngName = geotiff.replace(".tif","_byte_full.png") - saa.write_gdal_file_byte(outName,trans,proj,newData.astype(np.byte)) - gdal.Translate(pngName,outName,format="PNG",outputType=gdal.GDT_Byte,scaleParams=[[0,255]],noData="0 0 0") - os.remove(outName) - - # - # Write out the RGB tif - # - outName = geotiff.replace(".tif","_rgb.tif") - pngName = geotiff.replace(".tif","_rgb_full.png") - saa.write_gdal_file_rgb(outName,trans,proj,red,green,blue) - gdal.Translate(pngName,outName,format="PNG",outputType=gdal.GDT_Byte,scaleParams=[[0,255]],noData="0 0 0") - - # - # Make the ASF standard browse and kmz images - # - tmpName = geotiff.replace(".tif","_rgb") - makeAsfBrowse(outName,tmpName,use_nn=True) - os.remove(outName) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('geotiff', help='name of GeoTIFF file (input)') - parser.add_argument('type', help='type of input file (MSCD or SACD)') - args = parser.parse_args() - - if not os.path.exists(args.geotiff): - parser.error(f'GeoTIFF file {args.geotiff} does not exist!') - - makeChangeBrowse(args.geotiff, type=args.type) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/makeColorPhase.py b/hyp3lib/makeColorPhase.py deleted file mode 100755 index 377aec2d..00000000 --- a/hyp3lib/makeColorPhase.py +++ /dev/null @@ -1,412 +0,0 @@ -"""Create a colorize phase file from a phase geotiff""" -import os -import math -import numpy as np -import argparse -from hyp3lib import saa_func_lib as saa -import colorsys -from osgeo import gdal -from hyp3lib.cutGeotiffs import cutFiles - -def get2sigmacutoffs(fi): - (x,y,trans,proj,data) = saa.read_gdal_file(saa.open_gdal_file(fi)) - top = np.percentile(data,98) - data[data>top]=top - stddev = np.std(data) - mean = np.mean(data) - lo = mean - 2*stddev - hi = mean + 2*stddev - return lo,hi - -def createAmp(fi): - (x,y,trans,proj,data) = saa.read_gdal_file(saa.open_gdal_file(fi)) - ampdata = np.sqrt(data) - outfile = fi.replace('.tif','-amp.tif') - print(outfile) - saa.write_gdal_file_float(outfile,trans,proj,ampdata) - return outfile - -def makeColorPhase(inFile,rateReduction=1,shift=0,ampFile=None,scale=0,table='CMY'): - - samples = 1024 - - pinf = float('+inf') - ninf = float('-inf') - # fnan = float('nan') - - mod2pi = False - if table=='CMY': - mod2pi = True - R, G, B = makeCycleColor(samples) - elif table=='RYB' : - R, G, B = makeContinuousColor(samples) - elif table=='RWB': - R, G, B = makeRWBColor(samples) - else: - print("ERROR: Unknown color table: {}".format(table)) - exit(1) - - # - # Read in the phase data - # - x,y,trans,proj= saa.read_gdal_file_geo(saa.open_gdal_file(inFile)) - - # If data if too big, resize it - if x > 4096 or y > 4096: - phaseTmp = "{}_small.tif".format(os.path.basename(inFile.replace(".tif",""))) - gdal.Translate(phaseTmp,inFile,height=4096) - x,y,trans,proj,data = saa.read_gdal_file(saa.open_gdal_file(phaseTmp)) - print("Created small tif of size {} x {}".format(x, y)) - else: - x,y,trans,proj,data= saa.read_gdal_file(saa.open_gdal_file(inFile)) - print("Using full size tif of size {} x {}".format(x, y)) - phaseTmp = inFile - - # Make a black mask for use after colorization - mask = np.ones(data.shape,dtype=np.uint8) - mask[data[:]==0] = 0 - - # Scale to 0 .. samples-1 - data[:] = data[:] + shift - if mod2pi == True: - data[:] = data[:] % (2*rateReduction*np.pi) - const = samples / (2*rateReduction*np.pi) - data[:] = data[:] * const - else: - - mask = np.ones(data.shape,dtype=np.uint8) - mask[data==pinf] = 0 - mask[data==ninf] = 0 - mask[np.isnan(data)] = 0 - data[mask==0]=0 - - -# mini = np.min(data) -# maxi = np.max(data) - - mini = np.percentile(data,2) - maxi = np.percentile(data,98) - data[datamaxi] = maxi - - - data[:] = (data[:] - mini) / (maxi - mini) - data[:] = data * float(samples) - - print(np.max(data)) - print(np.min(data)) - - hist = np.histogram(data) - print(hist[1]) - print(hist[0]) - - data[data==samples]=samples-1 - - # Convert to integer for indexing - idata = np.zeros(data.shape,dtype=np.uint16) - idata[:] = data[:] - - # Make the red, green, and blue versions - red = np.zeros(data.shape,dtype=np.uint8) - green = np.zeros(data.shape,dtype=np.uint8) - blue = np.zeros(data.shape,dtype=np.uint8) - - red = R[idata[:]] - green = G[idata[:]] - blue = B[idata[:]] - - # Apply the black mask - red[mask==0] = 0 - green[mask==0] = 0 - blue[mask==0] = 0 - - if ampFile is None: - # Write out the RGB phase image - fileName = inFile.replace(".tif","_rgb.tif") - saa.write_gdal_file_rgb(fileName,trans,proj,red,green,blue) - - # If we have amplitude, use that - else: - # Make the red, green, and blue floating point versions - redf = np.zeros(data.shape) - greenf = np.zeros(data.shape) - bluef = np.zeros(data.shape) - - # Scale from 0 .. 1 - redf[::] = red[::]/255.0 - greenf[::] = green[::]/255.0 - bluef[::] = blue[::]/255.0 - - # Read in the amplitude data - x1,y1,trans1,proj1 = saa.read_gdal_file_geo(saa.open_gdal_file(ampFile)) - - # If too large, resize the data - if x1 > 4096 or y1 > 4096: - ampTmp = "{}_small.tif".format(os.path.basename(ampFile.replace(".tif",""))) - gdal.Translate(ampTmp,ampFile,height=y,width=x) - x1,y1,trans1,proj1,amp = saa.read_gdal_file(saa.open_gdal_file(ampTmp)) - else: - x1,y1,trans1,proj1,amp = saa.read_gdal_file(saa.open_gdal_file(ampFile)) - ampTmp = ampFile - - if (x != x1) or (y != y1): - cutFiles([phaseTmp,ampTmp]) -# if phaseTmp != inFile: -# os.remove(phaseTmp) - phaseTmp = phaseTmp.replace(".tif","_clip.tif") - x,y,trans,proj,data = saa.read_gdal_file(saa.open_gdal_file(phaseTmp)) - -# if ampTmp != ampFile: -# os.remove(ampTmp) - ampTmp = ampTmp.replace(".tif","_clip.tif") - x1,y1,trans1,proj1,amp = saa.read_gdal_file(saa.open_gdal_file(ampTmp)) - - print("Data shape is {}".format(data.shape)) - print("Amp shape is {}".format(amp.shape)) - - # Make a black mask for use after colorization - mask = np.ones(amp.shape,dtype=np.uint8) - mask[amp==pinf] = 0 - mask[amp==ninf] = 0 - mask[np.isnan(amp)] = 0 - amp[mask==0]=0 - - ave = np.mean(amp) - print("Mean of amp data is {}".format(ave)) - amp[mask==0]=ave - - print("AMP HISTOGRAM:") - hist = np.histogram(amp) - print(hist[1]) - print(hist[0]) - - ave = np.mean(amp) - print("Amp average is {}".format(ave)) - print("Amp median is {}".format(np.median(amp))) - print("Amp stddev is {}".format(np.std(amp))) - - # Rescale amplitude to 2-sigma byte range, otherwise may be all dark - amp2File = createAmp(ampTmp) - myrange = get2sigmacutoffs(amp2File) - newFile = "tmp.tif" - gdal.Translate(newFile,amp2File,outputType=gdal.GDT_Byte,scaleParams=[myrange],resampleAlg="average") - x,y,trans,proj,amp = saa.read_gdal_file(saa.open_gdal_file(newFile)) -# if ampTmp != ampFile: -# os.remove(ampTmp) -# os.remove(amp2File) -# os.remove(newFile) - - print("2-sigma AMP HISTOGRAM:") - hist = np.histogram(amp) - print(hist[1]) - print(hist[0]) - - # Scale amplitude from 0.0 to 1.0 - ampf = np.zeros(data.shape) - ampf = amp / 255.0 - ampf = ampf + float(scale) - ampf[ampf>1.0]=1.0 - - print("SCALED AMP HISTOGRAM:") - hist = np.histogram(ampf) - print(hist[1]) - print(hist[0]) - - # Perform color transformation - h = np.zeros(data.shape) - l = np.zeros(data.shape) - s = np.zeros(data.shape) - - for j in range(x): - for i in range(y): - h[i,j],l[i,j],s[i,j] = colorsys.rgb_to_hls(redf[i,j],greenf[i,j],bluef[i,j]) - - print("LIGHTNESS HISTOGRAM:") - hist = np.histogram(l) - print(hist[1]) - print(hist[0]) - - l = l * ampf - - print("NEW LIGHTNESS HISTOGRAM:") - hist = np.histogram(l) - print(hist[1]) - print(hist[0]) - - for j in range(x): - for i in range(y): - redf[i,j],greenf[i,j],bluef[i,j] = colorsys.hls_to_rgb(h[i,j],l[i,j],s[i,j]) - - red = redf * 255 - green = greenf * 255 - blue = bluef * 255 - - print("TRANFORMED RED HISTOGRAM:") - hist = np.histogram(red) - print(hist[1]) - print(hist[0]) - - # Apply mask - red[mask==0]=0 - green[mask==0]=0 - blue[mask==0]=0 - - # Write out the RGB phase image - fileName = inFile.replace(".tif","_amp_rgb.tif") - saa.write_gdal_file_rgb(fileName,trans,proj,red,green,blue) - - -# -# This code makes an image to show off the color table -# -# rainbow_red = np.zeros((1024,1024),np.uint8) -# rainbow_green = np.zeros((1024,1024),np.uint8) -# rainbow_blue = np.zeros((1024,1024),np.uint8) -# for i in range(1024): -# for j in range(1024): -# idx = int((float(i)/1024.0)*samples) -# rainbow_red[i,j] = R[idx] -# rainbow_green[i,j] = G[idx] -# rainbow_blue[i,j] = B[idx] -# saa.write_gdal_file_rgb("rainbow.tif",trans,proj,rainbow_red,rainbow_green,rainbow_blue) - - return(fileName) - - -def makeContinuousColor(samples): - - # - # Make the color LUT - # - R = np.zeros(samples,np.uint8) - G = np.zeros(samples,np.uint8) - B = np.zeros(samples,np.uint8) - - # Going from Red to Yellow - for i in range (1,samples/3): - val = i * math.pi / (samples/3) - R[i] = 255 - G[i] = 128 + math.sin(val+3*math.pi/2)*128 - B[i] = 0 - - # Going from Yellow to Green - for i in range(samples/3,2*samples/3): - val = i*math.pi/(samples/3) - R[i] = 128 + math.sin(val+3*math.pi/2)*128 - G[i] = 255 - B[i] = 0 - - # Going from Green to Blue - for i in range(2*samples/3,samples): - val = i*math.pi/(samples/3) - R[i] = 0 - G[i] = 128 + math.sin(val+math.pi/2)*128 - B[i] = 128 + math.sin(val+3*math.pi/2)*128 - - R[0] = 255 - R[samples/3] = 255 - G[2*samples/3] = 255 - B[samples-1] = 255 - - R = R[::-1] - G = G[::-1] - B = B[::-1] - - return R, G, B - -def makeRWBColor(samples): - - # - # Make the color LUT - # - R = np.zeros(samples,np.uint8) - G = np.zeros(samples,np.uint8) - B = np.zeros(samples,np.uint8) - - # Going from Blue to White - for i in range(samples+1/2): - val = i * math.pi / (samples/2) - R[i] = 128 + math.sin(val+3*math.pi/2)*128 - G[i] = 128 + math.sin(val+3*math.pi/2)*128 - B[i] = 255 - - # Going from White to Red - for i in range(samples/2,samples): - val = i*math.pi/(samples/2) - R[i] = 255 - G[i] = 128 + math.sin(val+3*math.pi/2)*128 - B[i] = 128 + math.sin(val+3*math.pi/2)*128 - - - R[samples/2] = 255 - G[samples/2] = 255 - B[samples/2] = 255 - return R, G, B - -def makeCycleColor(samples): - - # - # Make the color LUT - # - R = np.zeros(samples,np.uint8) - G = np.zeros(samples,np.uint8) - B = np.zeros(samples,np.uint8) - - # Going from Yellow to Cyan - for i in range (1,samples/3): - val = i * math.pi / (samples/3) - G[i] = 255 - R[i] = 128 + math.sin(val+math.pi/2)*128 - B[i] = 128 + math.sin(val+3*math.pi/2)*128 - - # Going from Cyan to Magenta - for i in range(samples/3,2*samples/3): - val = i*math.pi/(samples/3) - B[i] = 255 - R[i] = 128 + math.sin(val+math.pi/2)*128 - G[i] = 128 + math.sin(val+3*math.pi/2)*128 - - # Going from Magenta to Yellow - for i in range(2*samples/3,samples): - val = i*math.pi/(samples/3) - R[i] = 255 - B[i] = 128 + math.sin(val+math.pi/2)*128 - G[i] = 128 + math.sin(val+3*math.pi/2)*128 - - # Fix holes in color scheme - G[samples/3] = 255 - B[2*samples/3] = 255 - G[samples-1] = 255 - - return R, G, B - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('geotiff', help='name of GeoTIFF phase file (input)') - parser.add_argument('-a',help='Ampltiude image to use for intensity') - parser.add_argument('-c',type=float,help='Scale the amplitude by this value (0-1)',default=0.0) - parser.add_argument('-r',type=float,help='Reduction factor for phase rate',default=1) - parser.add_argument('-s',type=float,help='Color cycle shift value (0..2pi)',default=0) - parser.add_argument('-t',choices=['CMY','RYB','RWB'],help='Name of color table to use (default CMY)',default='CMY') - args = parser.parse_args() - - if not os.path.exists(args.geotiff): - print('ERROR: GeoTIFF file (%s) does not exist!' % args.geotiff) - exit(1) - - if args.a is not None: - if not os.path.exists(args.a): - print('ERROR: Amplitude file (%s) does not exist!' % args.a) - exit(1) - - makeColorPhase(args.geotiff,ampFile=args.a,rateReduction=args.r,shift=args.s,scale=args.c,table=args.t) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/makeKml.py b/hyp3lib/makeKml.py deleted file mode 100755 index 180a8aee..00000000 --- a/hyp3lib/makeKml.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Create a KML file from a geotiff and a png""" - -import argparse -import os -import zipfile - -import lxml.etree as et -from osgeo import gdal - - -def makeKML(geotiff,pngFile): - - # Extract information from GeoTIFF - raster = gdal.Open(geotiff) - - # Extract metadata from GeoTIFF to fill into the KML - gt = raster.GetGeoTransform() - coordStr = ('%.4f,%.4f %.4f,%.4f %.4f,%.4f %.4f,%.4f' % - (gt[0], gt[3]+raster.RasterYSize*gt[5], gt[0]+raster.RasterXSize*gt[1], - gt[3]+raster.RasterYSize*gt[5], gt[0]+raster.RasterXSize*gt[1], gt[3], - gt[0], gt[3])) - - # Take care of namespaces - prefix = {} - gx = '{http://www.google.com/kml/ext/2.2}' - prefix['gx'] = gx - ns_gx = {'gx' : 'http://www.google.com/kml/ext/2.2'} - ns_main = { None : 'http://www.opengis.net/kml/2.2'} - ns = dict(list(ns_main.items()) + list(ns_gx.items())) - - # Fill in the tree structure - kmlFile = pngFile.replace('.png','.kml') - kml = et.Element('kml', nsmap=ns) - overlay = et.SubElement(kml, 'GroundOverlay') - et.SubElement(overlay, 'name').text = \ - os.path.basename(kmlFile).replace('.kml', '') + ' overlay' - icon = et.SubElement(overlay, 'Icon') - et.SubElement(icon, 'href').text = pngFile - et.SubElement(icon, 'viewBoundScale').text = '0.75' - latLonQuad = et.SubElement(overlay, '{0}LatLonQuad'.format(gx)) - et.SubElement(latLonQuad, 'coordinates').text = coordStr - with open(kmlFile, 'wb') as outF: - outF.write(et.tostring(kml, xml_declaration=True, encoding='utf-8', - pretty_print=True)) - - # Zip PNG and KML together - zipFile = kmlFile.replace('.kml', '.kmz') - zip = zipfile.ZipFile(zipFile, 'w', zipfile.ZIP_DEFLATED) - zip.write(kmlFile) - zip.write(pngFile) - zip.close() - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('geotiff', help='name of GeoTIFF file (input)') - parser.add_argument('pngFile', help='name of PNG file (input)') - args = parser.parse_args() - - if not os.path.exists(args.geotiff): - parser.error(f'GeoTIFF file {args.geotiff} does not exist!') - if not os.path.exists(args.pngFile): - parser.error(f'PNG file {args.pngFile} does not exist!') - - makeKML(args.geotiff, args.pngFile) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/make_arc_thumb.py b/hyp3lib/make_arc_thumb.py deleted file mode 100755 index 4ad4a272..00000000 --- a/hyp3lib/make_arc_thumb.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Creates an arcgis compatible thumbnail""" -import argparse -from PIL import Image -import base64 -import os - - -def pngtothumb(pngfile): - - # Modify the png to a jpg thumbnail, then encode to base64 - rgb_im = Image.open(pngfile).convert('RGB') - x, y = rgb_im.size - if x>y: - width = 200 - length = 200 * y/x - else: - length = 200 - width = 200 * x/y - - size = length,width - _ = rgb_im.thumbnail(size) - _ = rgb_im.save('tmp_thumb.jpg') - encoded = base64.b64encode(open(r'tmp_thumb.jpg', "rb").read()) - os.remove("tmp_thumb.jpg") - return(encoded) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('input',help='Name of input PNG file') - args = parser.parse_args() - - pngtothumb(args.input) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/make_cogs.py b/hyp3lib/make_cogs.py deleted file mode 100755 index 8b53d5ec..00000000 --- a/hyp3lib/make_cogs.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Creates a Cloud Optimized GeoTIFF from the input GeoTIFF(s)""" - -import argparse -import logging -import os -import shutil -import sys -from glob import glob -from tempfile import NamedTemporaryFile - -from osgeo import gdal - -from hyp3lib.execute import execute - - -def cogify_dir(directory: str, file_pattern: str = '*.tif'): - """ - Convert all found GeoTIFF files to a Cloud Optimized GeoTIFF inplace - Args: - directory: directory to search through - file_pattern: the pattern for finding GeoTIFFs - """ - path_expression = os.path.join(directory, file_pattern) - logging.info(f'Converting files to COGs for {path_expression}') - for filename in glob(path_expression): - cogify_file(filename) - - -def cogify_file(filename: str): - """ - Convert a GeoTIFF to a Cloud Optimized GeoTIFF inplace - - Args: - filename: GeoTIFF file to convert - """ - logging.info(f'Converting {filename} to COG') - execute(f'gdaladdo -r average {filename} 2 4 8 16', uselogging=True) - creation_options = ['TILED=YES', 'COMPRESS=DEFLATE', 'NUM_THREADS=ALL_CPUS', 'COPY_SRC_OVERVIEWS=YES'] - with NamedTemporaryFile() as temp_file: - shutil.copy(filename, temp_file.name) - gdal.Translate(filename, temp_file.name, format='GTiff', creationOptions=creation_options) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('geotiffs', nargs='+', help='name of GeoTIFF file(s)') - args = parser.parse_args() - - out = logging.StreamHandler(stream=sys.stdout) - out.addFilter(lambda record: record.levelno <= logging.INFO) - err = logging.StreamHandler() - err.setLevel(logging.WARNING) - logging.basicConfig(format='%(message)s', level=logging.INFO, handlers=(out, err)) - - for geotiff_file in args.geotiffs: - cogify_file(geotiff_file) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/offset_xml.py b/hyp3lib/offset_xml.py deleted file mode 100755 index 5e274201..00000000 --- a/hyp3lib/offset_xml.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Extracts offset information from ISO XML files""" -import argparse -import os - -from lxml import etree as et - -ns_gmd = {'gmd': 'http://www.isotc211.org/2005/gmd'} -ns_gmd_new = {'ns': 'http://www.isotc211.org/2005/gmd'} -ns_gmi = {'gmi': 'http://www.isotc211.org/2005/gmi'} -ns_gco = {'gco': 'http://www.isotc211.org/2005/gco'} -ns_xs = {'xs': 'http://www.isotc211.org/2005/gmx'} -ns_eos = {'eos': 'http://earthdata.nasa.gov/schema/eos'} -ns_xlink = {'xlink': 'http://www.w3.org/1999/xlink'} -ns_gml = {'gml': 'http://www.opengis.net/gml/3.2'} -ns_gmx = {'gmx': 'http://www.isotc211.org/2005/gmx'} -ns = dict( - list(ns_gmd.items()) + - list(ns_gmi.items()) + - list(ns_gco.items()) + - list(ns_xs.items()) + - list(ns_eos.items()) + - list(ns_xlink.items()) + - list(ns_gml.items()) + - list(ns_gmx.items()) -) - - -def offset_xml(listFile, csvFile): - parser = et.XMLParser(remove_blank_text=True) - - lines = [line.rstrip() for line in open(listFile)] - with open(csvFile, 'w') as fp: - fp.write('rtc,granule,west,east,north,south,coregistration,rangeOffset,' - 'azimuthOffset\n') - for line in lines: - print('Reading {0} ...'.format(line)) - meta = et.parse(line, parser) - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:fileIdentifier/gco:CharacterString') - granule = meta.xpath(param, namespaces=ns)[0].text - - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:identificationInfo/gmd:MD_DataIdentification/' - 'gmd:extent/gmd:EX_Extent/gmd:geographicElement/' - 'gmd:EX_GeographicBoundingBox/gmd:westBoundLongitude/gco:Decimal') - westBound = float(meta.xpath(param, namespaces=ns)[0].text) - - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:identificationInfo/gmd:MD_DataIdentification/' - 'gmd:extent/gmd:EX_Extent/gmd:geographicElement/' - 'gmd:EX_GeographicBoundingBox/gmd:eastBoundLongitude/gco:Decimal') - eastBound = float(meta.xpath(param, namespaces=ns)[0].text) - - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:identificationInfo/gmd:MD_DataIdentification/' - 'gmd:extent/gmd:EX_Extent/gmd:geographicElement/' - 'gmd:EX_GeographicBoundingBox/gmd:southBoundLatitude/gco:Decimal') - southBound = float(meta.xpath(param, namespaces=ns)[0].text) - - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:identificationInfo/gmd:MD_DataIdentification/' - 'gmd:extent/gmd:EX_Extent/gmd:geographicElement/' - 'gmd:EX_GeographicBoundingBox/gmd:northBoundLatitude/gco:Decimal') - northBound = float(meta.xpath(param, namespaces=ns)[0].text) - - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:dataQualityInfo/gmd:DQ_DataQuality/gmd:report[3]/' - 'gmd:DQ_QuantitativeAttributeAccuracy/gmd:result/' - 'gmd:DQ_QuantitativeResult/gmd:value/gco:Record/gco:CharacterString') - coregistration = meta.xpath(param, namespaces=ns)[0].text - - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:dataQualityInfo/gmd:DQ_DataQuality/gmd:report[4]/' - 'gmd:DQ_QuantitativeAttributeAccuracy/gmd:result/' - 'gmd:DQ_QuantitativeResult/gmd:value/gco:Record/gco:Real') - rangeOffset = float(meta.xpath(param, namespaces=ns)[0].text) - - param = ('/gmd:DS_Series/gmd:composedOf/gmd:DS_DataSet/gmd:has[1]/' - 'gmi:MI_Metadata/gmd:dataQualityInfo/gmd:DQ_DataQuality/gmd:report[5]/' - 'gmd:DQ_QuantitativeAttributeAccuracy/gmd:result/' - 'gmd:DQ_QuantitativeResult/gmd:value/gco:Record/gco:Real') - azimuthOffset = float(meta.xpath(param, namespaces=ns)[0].text) - - fp.write( - '{0},{1},{2},{3},{4},{5},{6},{7},{8}\n'.format( - line[:-8], granule[:-8], westBound, eastBound, southBound, northBound, coregistration, rangeOffset, - azimuthOffset) - ) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('list', help='XML list') - parser.add_argument('csv', help='CSV output file') - args = parser.parse_args() - - offset_xml(args.list, args.csv) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/par_s1_slc_single.py b/hyp3lib/par_s1_slc_single.py deleted file mode 100755 index 0b973fc5..00000000 --- a/hyp3lib/par_s1_slc_single.py +++ /dev/null @@ -1,95 +0,0 @@ -import glob -import logging -import os - -from hyp3lib import OrbitDownloadError -from hyp3lib.execute import execute -from hyp3lib.getParameter import getParameter -from hyp3lib.get_orb import downloadSentinelOrbitFile - - -def make_cmd(swath, acquisition_date, out_dir, pol=None): - """Assemble the par_S1_SLC gamma commands - - Args: - swath: Swath to process - acquisition_date: The acquisition date of the SLC imagery - out_dir: Where to output the GAMMA formatted files - pol: pol: polarization (e.g., 'vv') - """ - if pol is None: - m = glob.glob(f'measurement/s1*-iw{swath}*')[0] - n = glob.glob(f'annotation/s1*-iw{swath}*')[0] - o = glob.glob(f'annotation/calibration/calibration-s1*-iw{swath}*')[0] - p = glob.glob(f'annotation/calibration/noise-s1*-iw{swath}*')[0] - else: - m = glob.glob(f'measurement/s1*-iw{swath}*{pol}*')[0] - n = glob.glob(f'annotation/s1*-iw{swath}*{pol}*')[0] - o = glob.glob(f'annotation/calibration/calibration-s1*-iw{swath}*{pol}*')[0] - p = glob.glob(f'annotation/calibration/noise-s1*-iw{swath}*{pol}*')[0] - - cmd = f'par_S1_SLC {m} {n} {o} {p} {out_dir}/{acquisition_date}_00{swath}.slc.par ' \ - f'{out_dir}/{acquisition_date}_00{swath}.slc {out_dir}/{acquisition_date}_00{swath}.tops_par' - - return cmd - - -def par_s1_slc_single(safe_dir, pol='vv', orbit_file=None): - """Pre-process S1 SLC imagery into GAMMA format SLCs - - Args: - safe_dir: Sentinel-1 SAFE directory location - pol: polarization (e.g., 'vv') - orbit_file: Orbit file to use (will download a matching orbit file if None) - """ - wrk = os.getcwd() - pol = pol.lower() - - logging.info(f'Procesing directory {safe_dir}') - image_type = safe_dir[13:16] - logging.info(f'Found image type {image_type}') - - datelong = safe_dir.split('_')[5] - acquisition_date = (safe_dir.split('_')[5].split('T'))[0] - path = os.path.join(wrk, acquisition_date) - if not os.path.exists(path): - os.mkdir(path) - - logging.info(f'SAFE directory is {safe_dir}') - logging.info(f'Long date is {datelong}') - logging.info(f'Acquisition date is {acquisition_date}') - - os.chdir(safe_dir) - - for swath in range(1, 4): - cmd = make_cmd(swath, acquisition_date, path, pol=pol) - execute(cmd, uselogging=True) - - os.chdir(path) - - # Ingest the precision state vectors - try: - if orbit_file is None: - logging.info(f'Trying to get orbit file information from file {safe_dir}') - orbit_file, _ = downloadSentinelOrbitFile(safe_dir) - logging.info('Applying precision orbit information') - execute(f'S1_OPOD_vec {acquisition_date}_001.slc.par {orbit_file}', uselogging=True) - execute(f'S1_OPOD_vec {acquisition_date}_002.slc.par {orbit_file}', uselogging=True) - execute(f'S1_OPOD_vec {acquisition_date}_003.slc.par {orbit_file}', uselogging=True) - except OrbitDownloadError: - logging.warning('Unable to fetch precision state vectors... continuing') - - slc = glob.glob('*_00*.slc') - slc.sort() - par = glob.glob('*_00*.slc.par') - par.sort() - top = glob.glob('*_00*.tops_par') - top.sort() - with open(os.path.join(path, 'SLC_TAB'), 'w') as f: - for i in range(len(slc)): - f.write(f'{slc[i]} {par[i]} {top[i]}\n') - - # Make a raster version of swath 3 - width = getParameter(f'{acquisition_date}_003.slc.par', 'range_samples') - execute(f"rasSLC {acquisition_date}_003.slc {width} 1 0 50 10") - os.chdir(wrk) diff --git a/hyp3lib/ps2dem.py b/hyp3lib/ps2dem.py deleted file mode 100755 index 1471ccf8..00000000 --- a/hyp3lib/ps2dem.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Convert a polar stereo GeoTIFF DEM into GAMMA's internal format""" - -import argparse -import logging -import os -import warnings -from pathlib import Path -from typing import Union - -import numpy as np -from osgeo import gdal, osr - -import hyp3lib.saa_func_lib as saa -from hyp3lib.execute import execute -from hyp3lib.system import gamma_version - - -def ps2dem(in_dem: Union[str, Path], out_dem: str, dem_par: str): - """ - Convert a polar stereo GeoTIFF DEM into GAMMA's internal format - - Args: - in_dem: Polar stereographic DEM in GeoTIFF to be converted - out_dem: Name of the output DEM in GAMMA's internal format - dem_par: Name of the output DEM parameter file - """ - dem_par_in = "dem_par.in" - basename = os.path.basename(in_dem) - - logging.info("PS DEM in GEOTIFF format: {}".format(in_dem)) - logging.info("output DEM: {}".format(out_dem)) - logging.info("output DEM parameter file: {}".format(dem_par)) - - xsize, ysize, trans, proj, data = saa.read_gdal_file(saa.open_gdal_file(in_dem)) - - east = trans[0] - north = trans[3] - pix_east = trans[1] - pix_north = trans[5] - - src_ds = gdal.Open(in_dem) - - prj = src_ds.GetProjection() - - srs = osr.SpatialReference(wkt=prj) - - lat_of_origin = srs.GetProjParm("latitude_of_origin") - logging.info("latitude of origin {}".format(lat_of_origin)) - - central_meridian = srs.GetProjParm("central_meridian") - logging.info("central_meridian {}".format(central_meridian)) - - false_easting = srs.GetProjParm('false_easting') - logging.info("false_easting {}".format(false_easting)) - - false_northing = srs.GetProjParm('false_northing') - logging.info("false_northing {}".format(false_northing)) - - string = src_ds.GetMetadata() - pixasarea = string["AREA_OR_POINT"] - if "AREA" in pixasarea: - logging.info("Pixel as Area! Updating corner coordinates to pixel as point") - logging.info("pixel upper northing (m): {} easting (m): {}".format(north, east)) - east = east + pix_east / 2.0 - north = north + pix_north / 2.0 - logging.info("Update pixel upper northing (m): {} easting (m): {}".format(north, east)) - - gamma_ver = gamma_version() - if gamma_ver.startswith('2017'): - warnings.warn('GAMMA versions prior to 2019 will not be supported in hyp3lib 2.0+', - DeprecationWarning, stacklevel=2) - with open(dem_par_in, "w") as f: - f.write("PS\n") - f.write("WGS84\n") - f.write("1\n") - f.write(f"{lat_of_origin}\n") - f.write(f"{central_meridian}\n") - f.write(f"{basename}\n") - f.write("REAL*4\n") - f.write("0\n") - f.write("1\n") - f.write(f"{np.abs(xsize)}\n") - f.write(f"{np.abs(ysize)}\n") - f.write(f"{pix_north} {pix_east}\n") - f.write(f"{north} {east}\n") - else: - with open(dem_par_in, "w") as f: - f.write("PS\n") - f.write("WGS84\n") - f.write("1\n") - f.write("other\n") - f.write(f"{srs.GetAttrValue('PROJECTION')}\n") - f.write("0\n") - f.write(f"{false_easting}\n") - f.write(f"{false_northing}\n") - f.write("1\n") - f.write(f"{central_meridian}\n") - f.write(f"{lat_of_origin}\n") - f.write(f"{basename}\n") - f.write("REAL*4\n") - f.write("0\n") - f.write("1\n") - f.write(f"{np.abs(xsize)}\n") - f.write(f"{np.abs(ysize)}\n") - f.write(f"{pix_north} {pix_east}\n") - f.write(f"{north} {east}\n") - - if os.path.isfile(dem_par): - os.remove(dem_par) - execute("create_dem_par {} < {}".format(dem_par, dem_par_in)) - os.remove(dem_par_in) - - # Since 0 is the invalid pixel sentinel for gamma software, - # Replace 0 with 1, because zero in a DEM is assumed valid - # Then, replace anything <= -32767 with 0 - # (but first remove NANs) - srcband = src_ds.GetRasterBand(1) - no_data = srcband.GetNoDataValue() - - data[np.isnan(data)] = 0.0001 - data[data == 0] = 1 - data[data <= no_data] = 0 - - # Convert to ENVI (binary) format - if data.dtype == np.float32: - fdata = data - else: - # Convert to floating point - fdata = data.astype(np.float32) - fdata = fdata.byteswap() - - tmptif = "temporary_dem_file.tif" - saa.write_gdal_file_float(tmptif, trans, proj, fdata) - gdal.Translate(out_dem, tmptif, format="ENVI") - os.remove(tmptif) - os.remove(out_dem + ".aux.xml") - filename, file_extension = os.path.splitext(out_dem) - os.remove(out_dem.replace(file_extension, ".hdr")) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('ps_dem', help='name of GeoTIFF file (input)') - parser.add_argument('dem', help='DEM data (output)') - parser.add_argument('dempar', help='Gamma DEM parameter file (output)') - - log_file = "{}_{}_log.txt".format("ps2dem", os.getpid()) - logging.basicConfig(filename=log_file, format='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) - logging.getLogger().addHandler(logging.StreamHandler()) - logging.info("Starting run") - args = parser.parse_args() - - if not os.path.exists(args.ps_dem): - parser.error(f'GeoTIFF file {args.ps_dem} does not exist!') - - ps2dem(args.ps_dem, args.dem, args.dempar) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/rasterMask.py b/hyp3lib/rasterMask.py deleted file mode 100755 index d19e4440..00000000 --- a/hyp3lib/rasterMask.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Generate an AOI mask and apply it""" -import argparse -import os -import numpy as np -from osgeo import gdal -from hyp3lib.asf_geometry import geotiff2data, data2geotiff -from hyp3lib.asf_time_series import vector_meta - - -def applyRasterMask(inFile, maskFile, outFile): - - (data, dataGeoTrans, dataProj, dataEPSG, dataDtype, dataNoData) = \ - geotiff2data(inFile) - (mask, maskGeoTrans, maskProj, maskEPSG, maskDtype, maskNoData) = \ - geotiff2data(maskFile) - - data = data.astype(np.float32) - mask = mask.astype(np.float32) - # (dataRows, dataCols) = data.shape - dataOriginX = dataGeoTrans[0] - dataOriginY = dataGeoTrans[3] - # dataPixelSize = dataGeoTrans[1] - (maskRows, maskCols) = mask.shape - maskOriginX = maskGeoTrans[0] - maskOriginY = maskGeoTrans[3] - maskPixelSize = maskGeoTrans[1] - offsetX = int(np.rint((maskOriginX - dataOriginX)/maskPixelSize)) - offsetY = int(np.rint((dataOriginY - maskOriginY)/maskPixelSize)) - data = data[offsetY:maskRows+offsetY,offsetX:maskCols+offsetX] - data *= mask - - data2geotiff(data, dataGeoTrans, dataProj, 'FLOAT', np.nan, outFile) - - -def rasterMask(inFile, maskFile, aoiFile, maskAoiFile, outFile): - - ### Extract relevant metadata from AOI shapefile - (fields, proj, extent, features) = vector_meta(aoiFile) - pixelSize = features[0]['pixSize'] - epsg = features[0]['epsg'] - proj = ('EPSG:{0}'.format(epsg)) - coords = (extent[0], extent[2], extent[1], extent[3]) - - ### Generate raster mask - gdal.Warp(maskAoiFile, maskFile, format='GTiff', dstSRS=proj, xRes=pixelSize, - yRes=pixelSize, resampleAlg='cubic', outputBounds=coords, - outputType=gdal.GDT_Byte, creationOptions=['COMPRESS=LZW']) - - ### Apply raster mask to image - applyRasterMask(inFile, maskAoiFile, outFile) - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('inFile', help='name of the file to be masked') - parser.add_argument('maskFile', help='name of the external mask file') - parser.add_argument('aoiFile', help='name of the AOI polygon file') - parser.add_argument('maskAoiFile', help='name of the AOI mask file') - parser.add_argument('outFile', help='name of the masked file') - args = parser.parse_args() - - rasterMask( - args.inFile, args.maskFile, args.aoiFile, args.maskAoiFile, args.outFile - ) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/raster_boundary2shape.py b/hyp3lib/raster_boundary2shape.py deleted file mode 100755 index 08dfc21d..00000000 --- a/hyp3lib/raster_boundary2shape.py +++ /dev/null @@ -1,90 +0,0 @@ -"""generates boundary shapefile from GeoTIFF file""" - -import argparse -import os - -from scipy import ndimage - -from hyp3lib.asf_geometry import geotiff2boundary_mask, data_geometry2shape -from hyp3lib.asf_time_series import raster_metadata - - -def raster_boundary2shape(inFile, threshold, outShapeFile, use_closing=True, fill_holes = False, - pixel_shift=False): - # Extract raster image metadata - print('Extracting raster information ...') - (fields, values, spatialRef) = raster_metadata(inFile) - - print("Initial origin {x},{y}".format(x=values[0]['originX'],y=values[0]['originY'])) - - if spatialRef.GetAttrValue('AUTHORITY', 0) == 'EPSG': - epsg = int(spatialRef.GetAttrValue('AUTHORITY', 1)) - # Generate GeoTIFF boundary geometry - print('Extracting boundary geometry ...') - (data, colFirst, rowFirst, geoTrans, proj) = \ - geotiff2boundary_mask(inFile, epsg, threshold,use_closing=use_closing) - (rows, cols) = data.shape - - print("After geotiff2boundary_mask origin {x},{y}".format(x=geoTrans[0],y=geoTrans[3])) - - if fill_holes: - data = ndimage.binary_fill_holes(data).astype(bool) - -# if pixel_shift: - if values[0]['pixel']: - minx = geoTrans[0] - maxy = geoTrans[3] - # maxx = geoTrans[0] + cols*geoTrans[1] - # miny = geoTrans[3] + rows*geoTrans[5] - - # compute the pixel-aligned bounding box (larger than the feature's bbox) - left = minx - (geoTrans[1]/2) - top = maxy - (geoTrans[5]/2) - - values[0]['originX'] = left - values[0]['originY'] = top - - print("After pixel_shift origin {x},{y}".format(x=values[0]['originX'],y=values[0]['originY'])) - - values[0]['rows'] = rows - values[0]['cols'] = cols - - # Write broundary to shapefile - print('Writing boundary to shapefile ...') - data_geometry2shape(data, fields, values, spatialRef, geoTrans, outShapeFile) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('input', metavar='', - help='name of the GeoTIFF file') - parser.add_argument('-threshold', metavar='', action='store', - default=None, help='threshold value what is considered blackfill') - parser.add_argument('shape', metavar='',help='name of the shapefile') - - parser.add_argument('--fill_holes', default=False, action="store_true", help='Turn on hole filling') - - parser.add_argument('--pixel_shift', default=False, - action="store_true", help='apply pixel shift') - - parser.add_argument('--no_closing', - default=True,action='store_false', - help='Switch to turn off closing operation') - - args = parser.parse_args() - - if not os.path.exists(args.input): - parser.error(f'GeoTIFF file {args.input} does not exist!') - - raster_boundary2shape( - args.input, args.threshold, args.shape, args.no_closing, args.fill_holes, args.pixel_shift - ) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/rtc2color.py b/hyp3lib/rtc2color.py deleted file mode 100755 index c2bbf1ed..00000000 --- a/hyp3lib/rtc2color.py +++ /dev/null @@ -1,225 +0,0 @@ -"""RGB decomposition of a dual-pol RTC - -The RGB decomposition enhances RTC dual-pol data for visual interpretation. It -decomposes the co-pol and cross-pol signal into these color channels: - red: simple bounce (polarized) with some volume scattering - green: volume (depolarized) scattering - blue: simple bounce with very low volume scattering - -In the case where the volume to simple scattering ratio is larger than expected -for typical vegetation, such as in glaciated areas or some forest types, a teal -color (green + blue) can be used -""" - -import argparse -import logging -import os -import sys -from pathlib import Path -from typing import Union - -import numpy as np -from osgeo import gdal, osr - - -def cleanup_threshold(amp=False, cleanup=False) -> float: - """Determine the appropriate cleanup threshold value to use in amp or power - - Args: - amp: input TIF is in amplitude and not power - cleanup: Cleanup artifacts using a -48 db power threshold - - Returns: - clean_threshold: the cleaning threshold to use in amp or power - """ - if amp and cleanup: - clean_threshold = pow(10.0, -24.0 / 10.0) # db to amp - elif cleanup: - clean_threshold = pow(10.0, -48.0 / 10.0) # db to power - else: - clean_threshold = 0.0 - - return clean_threshold - - -def prepare_geotif_data(geotiff_handle: gdal.Dataset, rows: int, cols: int, amp=False, cleanup=False) -> np.ndarray: - """Load in and clean the GeoTIFF for calculating the color thresholds - - Args: - geotiff_handle: gdal Dataset for the GeoTIFF to prepare - rows: number of data rows to read in - cols: number of data columns to read in - amp: input TIF is in amplitude and not power - cleanup: Cleanup artifacts using a -48 db power threshold - - Returns: - data: A numpy array containing the prepared GeoTIFF data - """ - - data = np.nan_to_num(geotiff_handle.GetRasterBand(1).ReadAsArray()[:rows, :cols]) - - threshold = cleanup_threshold(amp, cleanup) - data[data < threshold] = 0.0 - - if amp: # to power - data *= data - - return data - - -def calculate_color_channel(copol_data: np.ndarray, crosspol_data: np.ndarray, threshold: float, - scale_factor: float, color: str): - """Calculate color channel values for the RGB decomposition of copol and crosspol data - - Args: - copol_data: copol data - crosspol_data: crosspol data - threshold: decomposition threshold value in db - scale_factor: scale data by this factor - color: the color channel to calculate - - Returns: - color_channel: color channel data - """ - - power_threshold = pow(10.0, threshold / 10.0) # db to power - below_threshold_mask = crosspol_data < power_threshold - - # I don't know what 'zp' is... - zp = np.arctan(np.sqrt(np.clip(copol_data - crosspol_data, 0, None))) * 2.0 / np.pi - zp[~below_threshold_mask] = 0 - - if color == 'red': - z_constant = 1.0 - color_term = 2.0 * np.sqrt(np.clip(copol_data - 3.0 * crosspol_data, 0, None)) - color_term[below_threshold_mask] = 0.0 - - elif color == 'green': - z_constant = 2.0 - color_term = 3.0 * np.sqrt(crosspol_data) - color_term[below_threshold_mask] = 0.0 - - elif color == 'blue': - z_constant = 5.0 - color_term = np.zeros(copol_data.shape) - - elif color == 'teal': - z_constant = 5.0 - color_term = 2.0 * np.sqrt(np.clip(3.0 * crosspol_data - copol_data, 0, None)) - - else: - raise ValueError(f'Unknown color {color}, pick red, green, blue, or teal') - - # Find all our no data and bad data pixels - # NOTE: we're using crosspol here because it will typically have the most bad - # data and we want the same mask applied to all 3 channels (otherwise, we'll - # accidentally be changing colors from intended) - invalid_crosspol_mask = ~(crosspol_data > 0) - - color_channel = 1.0 + (color_term + z_constant * zp) * scale_factor - color_channel[invalid_crosspol_mask] = 0 - - return color_channel - - -def rtc2color(copol_tif: Union[str, Path], crosspol_tif: Union[str, Path], threshold: float, out_tif: Union[str, Path], - cleanup=False, teal=False, amp=False, real=False): - """RGB decomposition of a dual-pol RTC - - Args: - copol_tif: The co-pol RTC GeoTIF - crosspol_tif: The cross-pol RTC GeoTIF - threshold: Decomposition threshold value in db - out_tif: The output color GeoTIFF file name - cleanup: Cleanup artifacts using a -48 db power threshold - teal: Combine green and blue channels because the volume to simple scattering ratio is high - amp: input TIFs are in amplitude and not power - real: Output real (floating point) values instead of RGB scaled (0--255) ints - """ - - # Suppress GDAL warnings but raise python exceptions - # https://gis.stackexchange.com/a/91393 - gdal.UseExceptions() - gdal.PushErrorHandler('CPLQuietErrorHandler') - - copol_handle = gdal.Open(copol_tif) - crosspol_handle = gdal.Open(crosspol_tif) - - rows = min(copol_handle.RasterYSize, crosspol_handle.RasterYSize) - cols = min(copol_handle.RasterXSize, crosspol_handle.RasterXSize) - - geotransform = copol_handle.GetGeoTransform() - projection_reference = copol_handle.GetProjectionRef() - - copol_data = prepare_geotif_data(copol_handle, rows, cols, amp=amp, cleanup=cleanup) - crosspol_data = prepare_geotif_data(crosspol_handle, rows, cols, amp=amp, cleanup=cleanup) - - copol_handle = None # How to close because gdal is weird - crosspol_handle = None # How to close because gdal is weird - - driver = gdal.GetDriverByName('GTiff') - out_type = gdal.GDT_Float32 if real else gdal.GDT_Byte - out_raster = driver.Create(out_tif, cols, rows, 3, out_type, ['COMPRESS=LZW']) - out_raster.SetGeoTransform((geotransform[0], geotransform[1], 0, geotransform[3], 0, geotransform[5])) - out_raster_srs = osr.SpatialReference() - out_raster_srs.ImportFromWkt(projection_reference) - out_raster.SetProjection(out_raster_srs.ExportToWkt()) - - logging.info('Calculating color decomposition components') - - # used scale the results to fit inside RGB 1-255 (ints), with 0 for no/bad data - scale_factor = 1.0 if real else 254.0 - no_data_value = 0 - - bands = { - 1: 'red', - 2: 'green', - 3: 'teal' if teal else 'blue', - } - - for band_number, color in bands.items(): - logging.info(f'Calculate {color} channel and save in GeoTIFF') - band_data = calculate_color_channel( - copol_data, crosspol_data, threshold=threshold, scale_factor=scale_factor, color=color - ) - out_band = out_raster.GetRasterBand(band_number) - out_band.WriteArray(band_data) - out_band.SetNoDataValue(no_data_value) - del band_data - - out_raster = None # How to close because gdal is weird - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument('copol', help='the co-pol RTC GeoTIF') - parser.add_argument('crosspol', help='the cross-pol GeoTIF') - parser.add_argument('threshold', type=float, help='decomposition threshold value in dB') - parser.add_argument('geotiff', help='the output color GeoTIFF file name') - parser.add_argument('-c', '-cleanup', '--cleanup', action='store_true', - help='cleanup artifacts using a -48 db power threshold') - parser.add_argument('-t', '-teal', '--teal', action='store_true', - help='combine green and blue channels because the volume to simple scattering ratio is high') - parser.add_argument('-a', '-amp', '--amp', action='store_true', help='input is amplitude, not powerscale') - parser.add_argument('-r', '-real', '--real', action='store_true', - help='output real (floating point) values instead of RGB scaled (0--255) ints') - args = parser.parse_args() - - out = logging.StreamHandler(stream=sys.stdout) - out.addFilter(lambda record: record.levelno <= logging.INFO) - err = logging.StreamHandler() - err.setLevel(logging.WARNING) - logging.basicConfig(format='%(message)s', level=logging.INFO, handlers=(out, err)) - - rtc2color(args.copol, args.crosspol, args.threshold, args.geotiff, - args.cleanup, args.teal, args.amp, args.real) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/rtc2colordiff.py b/hyp3lib/rtc2colordiff.py deleted file mode 100755 index 5d8bd5ac..00000000 --- a/hyp3lib/rtc2colordiff.py +++ /dev/null @@ -1,235 +0,0 @@ -"""Generates pre-event and post-event RTCs to a color difference GeoTIFF""" -import os -import argparse -import datetime -from osgeo import gdal, osr -from hyp3lib.asf_geometry import geotiff2polygon, overlap_indices, geotiff_overlap -from hyp3lib.rtc2color import rtc2color -from hyp3lib.execute import execute - - -class FileException(Exception): - """File does not exist""" - - -def check_pixelsize(preFullpol, postFullpol): - - pre = gdal.Open(preFullpol) - gt = pre.GetGeoTransform() - prePixelsize = gt[1] - pre = None - post = gdal.Open(postFullpol) - gt = post.GetGeoTransform() - postPixelsize = gt[1] - if prePixelsize != postPixelsize: - error = ('Pixel sizes in pre-event (%f) and post-event (%f) images differ.' % \ - (prePixelsize, postPixelsize)) - raise ValueError(error) - - -# This function assumes that we are looking at UTM projected images -def check_projection(tmpDir, preFullpol, preCrosspol, postFullpol, postCrosspol): - - pre = gdal.Open(preFullpol) - gt = pre.GetGeoTransform() - pixelSize = gt[1] - preSpatialRef = osr.SpatialReference() - preSpatialRef.ImportFromWkt(pre.GetProjectionRef()) - post = gdal.Open(postFullpol) - postSpatialRef = osr.SpatialReference() - postSpatialRef.ImportFromWkt(post.GetProjectionRef()) - geoSpatialRef = osr.SpatialReference() - geoSpatialRef.ImportFromEPSG(4326) - preCoordTrans = osr.CoordinateTransformation(preSpatialRef, geoSpatialRef) - postCoordTrans = osr.CoordinateTransformation(postSpatialRef, geoSpatialRef) - prePoly = geotiff2polygon(preFullpol) - prePoly.Transform(preCoordTrans) - preCentroid = prePoly.Centroid() - preUtm = int((preCentroid.GetX() + 180.0)/6.0 + 1.0) - postPoly = geotiff2polygon(postFullpol) - postPoly.Transform(postCoordTrans) - postCentroid = postPoly.Centroid() - postUtm = int((postCentroid.GetX() + 180.0)/6.0 + 1.0) - overlap = prePoly.Intersection(postPoly) - overlapCentroid = overlap.Centroid() - overlapUtm = int((overlapCentroid.GetX() + 180.0)/6.0 + 1.0) - - if preUtm != postUtm: - print('Pre- and post-event images in different UTM zones.') - if preUtm == overlapUtm: - print('Reprojecting post-event images to UTM zone %d' % overlapUtm) - if postCentroid.GetY() > 0.0: - proj = ('EPSG:326{0}'.format(preUtm)) - else: - proj = ('EPSG:327{0}'.format(preUtm)) - fullpol = os.path.join(tmpDir, 'postFullpol.tif') - cmd = ("gdalwarp -r bilinear -tr %f %f -t_srs %s %s %s" % \ - (pixelSize, pixelSize, proj, postFullpol, fullpol)) - execute(cmd) - postFullpol = fullpol - crosspol = os.path.join(tmpDir, 'postCrosspol.tif') - cmd = ("gdalwarp -r bilinear -tr %f %f -t_srs %s %s %s" % \ - (pixelSize, pixelSize, proj, postCrosspol, crosspol)) - execute(cmd) - postCrosspol = crosspol - if postUtm == overlapUtm: - print('Reprojecting post-event images to UTM zone %d' % overlapUtm) - if postCentroid.GetY() > 0.0: - proj = ('EPSG:326{0}'.format(postUtm)) - else: - proj = ('EPSG:327{0}'.format(postUtm)) - fullpol = os.path.join(tmpDir, 'preFullpol.tif') - execute("gdalwarp -r bilinear -tr %f %f -t_srs %s %s %s" % \ - (pixelSize, pixelSize, proj, preFullpol, fullpol)) - preFullpol = fullpol - crosspol = os.path.join(tmpDir, 'preCrosspol.tif') - execute("gdalwarp -r bilinear -tr %f %f -t_srs %s %s %s" % \ - (pixelSize, pixelSize, proj, preCrosspol, crosspol)) - preCrosspol = crosspol - - return (preFullpol, preCrosspol, postFullpol, postCrosspol) - - -def make_tmp_dir(path, prefix): - # Generate the temporary directory in location defined in the configuration - # file states. As general failover method generate a temporary directory in - # the current directory - - tmpStr = prefix + '_' + datetime.datetime.utcnow().isoformat() - if path: - tmpDir = os.path.join(path, tmpStr) - else: - tmpDir = tmpStr - os.makedirs(tmpDir) - - return tmpDir - - -def rtc2colordiff(preFullpol, preCrosspol, postFullpol, postCrosspol, threshold, - geotiff, teal, amp): - - print('Converting RTC dual-pol data to color GeoTIFF') - - # Check whether the input files actually exist - if not os.path.exists(preFullpol): - error = ('Pre-event RTC full-pol file (%s) does not exist!' % preFullpol) - raise FileException(error) - - if not os.path.exists(preCrosspol): - error = ('Pre-event RTC cross-pol file (%s) does not exist!' % preCrosspol) - raise FileException(error) - - if not os.path.exists(postFullpol): - error = ('Post-event RTC full-pol file (%s) does not exist!' % postFullpol) - raise FileException(error) - - if not os.path.exists(postCrosspol): - error = ('Post-event RTC cross-pol file (%s) does not exist!' % - postCrosspol) - raise FileException(error) - - # Generating a temporary directory - dirName = os.path.dirname(os.path.abspath(geotiff)) - tmpDir = make_tmp_dir(dirName, 'color') - - # Check pixel sizes of pre- and post-event image - check_pixelsize(preFullpol, postFullpol) - - # Reproject files if necessary - (preFullpol, preCrosspol, postFullpol, postCrosspol) = \ - check_projection(tmpDir, preFullpol, preCrosspol, postFullpol, postCrosspol) - - # Determine common overlap of pre- and post-event files - (prePolygon, postPolygon, overlap, proj, pixelSize) = \ - geotiff_overlap(preFullpol, postFullpol, 'intersection') - (xPreOff, yPreOff, xPreCount, yPreCount) = \ - overlap_indices(prePolygon, overlap, pixelSize) - (xPostOff, yPostOff, xPostCount, yPostCount) = \ - overlap_indices(postPolygon, overlap, pixelSize) - - # Calculating pre- and post-event RGB images - colorPreFile = os.path.join(tmpDir, 'preColor.tif') - rtc2color(preFullpol, preCrosspol, threshold, colorPreFile, amp=amp, real=True) - colorPostFile = os.path.join(tmpDir, 'postColor.tif') - rtc2color(postFullpol, postCrosspol, threshold, colorPostFile, amp=amp, real=True) - - # Read input parameter - colorPreImg = gdal.Open(colorPreFile) - colorPostImg = gdal.Open(colorPostFile) - cols = xPreCount - rows = yPreCount - geotransform = colorPreImg.GetGeoTransform() - originX = geotransform[0] - originY = geotransform[3] - pixelWidth = geotransform[1] - pixelHeight = geotransform[5] - - # Read color images - print('Reading pre-color image (%s)' % os.path.basename(colorPreFile)) - preGreen = colorPreImg.GetRasterBand(2).ReadAsArray() - print('Reading post-color image (%s)' % os.path.basename(colorPostFile)) - postRed = colorPostImg.GetRasterBand(1).ReadAsArray() - postGreen = colorPostImg.GetRasterBand(2).ReadAsArray() - - # Calculate color difference image - print('Calculating color difference') - xPreEnd = xPreOff + xPreCount - yPreEnd = yPreOff + yPreCount - xPostEnd = xPostOff + xPostCount - yPostEnd = yPostOff + yPostCount - preGreen = preGreen[yPreOff:yPreEnd, xPreOff:xPreEnd] - postRed = postRed[yPostOff:yPostEnd, xPostOff:xPostEnd] - postGreen = postGreen[yPostOff:yPostEnd, xPostOff:xPostEnd] - preMask = (preGreen > 0).astype(int) - postMask = (postGreen > 0).astype(int) - mask = preMask*postMask - red = postRed*255*mask - green = postGreen*255*mask - blue = 5.0*(postGreen - preGreen)*255*mask - - # Write output GeoTIFF - print('Writing color difference image to GeoTIFF (%s)' % geotiff) - driver = gdal.GetDriverByName('GTiff') - outRaster = driver.Create(geotiff, cols, rows, 3, gdal.GDT_Byte, - ['COMPRESS=LZW']) - outRaster.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight)) - outRasterSRS = osr.SpatialReference() - outRasterSRS.ImportFromWkt(colorPreImg.GetProjectionRef()) - outRaster.SetProjection(outRasterSRS.ExportToWkt()) - outBand = outRaster.GetRasterBand(1) - outBand.WriteArray(red) - outBand = outRaster.GetRasterBand(2) - outBand.WriteArray(green) - outBand = outRaster.GetRasterBand(3) - outBand.WriteArray(blue) - outRaster = None - - # Cleanup intermediate files - os.remove(os.path.join(tmpDir, colorPreFile)) - os.remove(os.path.join(tmpDir, colorPostFile)) - os.rmdir(tmpDir) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('preFullpol', help='name of the pre-event full-pol RTC file (input)') - parser.add_argument('preCrosspol', help='name of the pre-event cross-pol RTC (input)') - parser.add_argument('postFullpol', help='name of the post-event full-pol RTC file (input)') - parser.add_argument('postCrosspol', help='name of the post-event cross-pol RTC file (input)') - parser.add_argument('threshold', help='threshold value in dB (input)') - parser.add_argument('geotiff', help='name of color difference GeoTIFF file (output)') - parser.add_argument('-teal', action='store_true', help='extend the blue band with teal') - parser.add_argument('-amp', action='store_true', help='input is amplitude, not powerscale') - args = parser.parse_args() - - rtc2colordiff(args.preFullpol, args.preCrosspol, args.postFullpol, - args.postCrosspol, args.threshold, args.geotiff, args.teal, args.amp) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/simplify_shapefile.py b/hyp3lib/simplify_shapefile.py deleted file mode 100755 index 6520694c..00000000 --- a/hyp3lib/simplify_shapefile.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Simplify complicated shapefiles""" - -import argparse -import glob -import json -import logging -import os -import shutil - -import requests -import shapefile -from osgeo import osr, ogr - - -def wkt2shape(wkt,output_file): - - layer_name = os.path.splitext(os.path.basename(output_file))[0] - - spatialref = osr.SpatialReference() # Set the spatial ref. - spatialref.SetWellKnownGeogCS('WGS84') # WGS84 aka ESPG:4326 - - driver = ogr.GetDriverByName("ESRI Shapefile") - dstfile = driver.CreateDataSource(output_file) # Your output file - - # Please note that it will fail if a file with the same name already exists - dstlayer = dstfile.CreateLayer(layer_name, spatialref, geom_type=ogr.wkbMultiPolygon) - - # Add the other attribute fields needed with the following schema : - #fielddef = ogr.FieldDefn("ID", ogr.OFTInteger) - #fielddef.SetWidth(10) - #dstlayer.CreateField(fielddef) - - poly = ogr.CreateGeometryFromWkt(wkt) - feature = ogr.Feature(dstlayer.GetLayerDefn()) - feature.SetGeometry(poly) - #feature.SetField("ID", "shape") # A field with an unique id. - dstlayer.CreateFeature(feature) - feature.Destroy() - dstfile.Destroy() - - -def simplify_shapefile(inshp,outshp): - if not os.path.isfile(inshp): - raise FileNotFoundError(f"{inshp} does not exist") - sf = shapefile.Reader(inshp) - shapes = sf.shapes() - scnt = len(shapes) - print("Found {} shapes in input file".format(scnt)) - pcnt = 0 - for x in range(scnt): - pcnt += len(shapes[x].points) - - if pcnt > 300: - logging.info("Shapefile is too large ({} points) - reducing to fewer than 300 points".format(pcnt)) - - # read the shape file - files = {'files': open('{}'.format(inshp), 'rb')} - - # post a request for simplification service - try: - response = requests.post('https://api.daac.asf.alaska.edu/services/utils/files_to_wkt', files=files) - except requests.RequestException: - logging.error("ERROR: service unavaible - it may be that your shapefile is too large. Reduce to under 300 points") - - if not response.status_code == requests.codes.ok: - response.raise_for_status("Response error: it may be that your shapefile is too large. Reduce to under 300 points") - - results = json.loads(response.text) -# logging.info(json.dumps(results, sort_keys=True, indent=4)) - - if "error" in results.keys(): - logging.error("ERROR: {}".format(results['error']['report'])) - exit(1) - - if "repairs" in results.keys(): - logging.info("Repairs") - for x in range(0,len(results['repairs'])): - logging.info(" {}: {}".format(x,results['repairs'][x]['report'])) - - if "wkt" in results.keys(): - # logging.info("{}".format(results['wkt']['wrapped'])) - wkt = ("{}".format(results['wkt']['wrapped'])) - logging.info("Creating new shape file {}".format(outshp)) - wkt2shape(wkt,outshp) - else: - logging.info("Shapefile has {} points; using as is".format(pcnt)) - inbase = os.path.splitext(inshp)[0] - outbase = os.path.splitext(outshp)[0] - for myfile in glob.glob("{}.*".format(inbase)): - newExt = os.path.splitext(myfile)[1] - newName = outbase + newExt - shutil.copy(myfile,newName) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("infile",help="input shapefile") - parser.add_argument("outfile",help="output shapefile") - args = parser.parse_args() - - logFile = "simplify_shapefile.log" - logging.basicConfig(filename=logFile, format='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) - logging.getLogger().addHandler(logging.StreamHandler()) - logging.info("Starting run") - - simplify_shapefile(args.infile, args.outfile) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/subset_geotiff_shape.py b/hyp3lib/subset_geotiff_shape.py deleted file mode 100755 index f6b02ce9..00000000 --- a/hyp3lib/subset_geotiff_shape.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Subsets a GeoTIFF file using an AOI from a shapefile""" - -import argparse -import os -import shutil - -import numpy as np -from osgeo import gdal, ogr, osr - - -def point_within_polygon(x, y, polygon): - - ring = polygon.GetGeometryRef(0) - nPoints = ring.GetPointCount() - inside = False - - if nPoints>0: - p1x, p1y, p1z = ring.GetPoint(0) - for i in range(nPoints + 1): - p2x, p2y, p2z = ring.GetPoint(i % nPoints) - if y > min(p1y, p2y): - if y <= max(p1y, p2y): - if x <= max(p1x, p2x): - if p1y != p2y: - xInt = (y - p1y)*(p2x - p1x)/(p2y -p1y) + p1x - if p1x == p2x or x < xInt: - inside = not inside - p1x, p1y = p2x, p2y - - return inside - - -def subset_geotiff_shape(inGeoTIFF, shapeFile, outGeoTIFF): - - print('Subsetting GeoTIFF file (%s) using an AOI from a shapefile (%s)' % - (inGeoTIFF, shapeFile)) - - # Suppress GDAL warnings - gdal.UseExceptions() - gdal.PushErrorHandler('CPLQuietErrorHandler') - - # Read input GeoTIFF parameters and generate boundary polygon - inRaster = gdal.Open(inGeoTIFF) - gt = inRaster.GetGeoTransform() - originX = gt[0] - originY = gt[3] - pixelWidth = gt[1] - pixelHeight = gt[5] - cols = inRaster.RasterXSize - rows = inRaster.RasterYSize - dataType = inRaster.GetRasterBand(1).DataType - rasterProj = inRaster.GetProjection() - rasterSpatialRef = osr.SpatialReference(wkt = rasterProj) - ulX = originX - ulY = originY - urX = originX + gt[1]*cols - urY = originY - lrX = originX + gt[1]*cols + gt[2]*rows - lrY = originY + gt[4]*cols + gt[5]*rows - llX = originX - llY = originY + gt[4]*cols + gt[5]*rows - geometry = ('MULTIPOLYGON ((( %f %f, %f %f, %f %f, %f %f, %f %f )))' % - (ulX, ulY, urX, urY, lrX, lrY, llX, llY, ulX, ulY)) - rasterPolygon = ogr.CreateGeometryFromWkt(geometry) - - # Extract boundary from shapefile and reproject polygon if necessary - driver = ogr.GetDriverByName('ESRI Shapefile') - shape = driver.Open(shapeFile, 0) - vectorMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) - layer = shape.GetLayer() - vectorSpatialRef = layer.GetSpatialRef() - if vectorSpatialRef != rasterSpatialRef: - print('Need to re-project vector polygon') - coordTrans = osr.CoordinateTransformation(vectorSpatialRef, rasterSpatialRef) - for feature in layer: - geometry = feature.GetGeometryRef() - count = geometry.GetGeometryCount() - if geometry.GetGeometryName() == 'MULTIPOLYGON': - for i in range(count): - polygon = geometry.GetGeometryRef(i) - if vectorSpatialRef != rasterSpatialRef: - polygon.Transform(coordTrans) - vectorMultipolygon.AddGeometry(polygon) - else: - if vectorSpatialRef != rasterSpatialRef: - geometry.Transform(coordTrans) - vectorMultipolygon.AddGeometry(geometry) - shape.Destroy() - - # Intersect polygons and determine subset parameters - intersection = rasterPolygon.Intersection(vectorMultipolygon) - if intersection is None or intersection.GetGeometryCount() == 0: - print('Image does not intersect with vector AOI') - shutil.copy(inGeoTIFF,outGeoTIFF) - return - - envelope = intersection.GetEnvelope() - minX = envelope[0] - minY = envelope[2] - maxX = envelope[1] - maxY = envelope[3] - startX = int((minX - originX) / pixelWidth) - startY = int((maxY - originY) / pixelHeight) - if startX < 0: - startX = 0 - if startY < 0: - startY = 0 - originX = minX - originY = maxY - cols = abs(int((maxX - minX) / pixelWidth)) - rows = abs(int((maxY - minY) / pixelHeight)) - endX = startX + cols - endY = startY + rows - - # Write output GeoTIFF with subsetted image - driver = gdal.GetDriverByName('GTiff') - numBands = inRaster.RasterCount - outRaster = driver.Create(outGeoTIFF, cols, rows, numBands, dataType, - ['COMPRESS=LZW']) - outRaster.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight)) - outRasterSRS = osr.SpatialReference() - outRasterSRS.ImportFromWkt(inRaster.GetProjectionRef()) - outRaster.SetProjection(outRasterSRS.ExportToWkt()) - for i in range(numBands): - noDataValueActual = inRaster.GetRasterBand(i+1).GetNoDataValue() - noDataValue = noDataValueActual - if noDataValueActual is None: noDataValue = 0 - inRasterData = np.array(inRaster.GetRasterBand(i+1).ReadAsArray()) - outRasterData = inRasterData[startY:endY, startX:endX] - for y in range(rows): - for x in range(cols): - pointX = originX + x*pixelWidth - pointY = originY + y*pixelHeight - if not point_within_polygon(pointX, pointY, intersection): - outRasterData[y, x] = noDataValue - outBand = outRaster.GetRasterBand(i+1) - if noDataValueActual is not None: - outBand.SetNoDataValue(noDataValue) - outBand.WriteArray(outRasterData) - outBand.FlushCache() - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('inGeoTIFF', help='name of the full size GeoTIFF file (input)') - parser.add_argument('shapeFile', help='name of the shapefile (input)') - parser.add_argument('outGeoTIFF', help='name of the subsetted GeoTIFF file (output)') - args = parser.parse_args() - - if not os.path.exists(args.inGeoTIFF): - parser.error(f'GeoTIFF file {args.inGeoTIFF} does not exist!') - - if not os.path.exists(args.shapeFile): - parser.error(f'Shapefile {args.shapeFile} does not exist!') - - subset_geotiff_shape(args.inGeoTIFF, args.shapeFile, args.outGeoTIFF) - - -if __name__ == '__main__': - main() - diff --git a/hyp3lib/system.py b/hyp3lib/system.py deleted file mode 100644 index 502ffe64..00000000 --- a/hyp3lib/system.py +++ /dev/null @@ -1,58 +0,0 @@ -"""Utilities for probing the processing system""" - -import datetime -import logging -import os -import subprocess - - -def gamma_version(): - """Probe the system to find the version of GAMMA installed, if possible""" - gamma_ver = os.getenv('GAMMA_VERSION') - if gamma_ver is None: - try: - gamma_home = os.environ['GAMMA_HOME'] - except KeyError: - logging.error('No GAMMA_VERSION or GAMMA_HOME environment variables defined! GAMMA is not installed.') - raise - - try: - with open(f"{gamma_home}/ASF_Gamma_version.txt") as f: - gamma_ver = f.readlines()[-1].strip() - except IOError: - logging.warning( - f"No GAMMA_VERSION environment variable or ASF_Gamma_version.txt " - f"file found in GAMMA_HOME:\n {os.getenv('GAMMA_HOME')}\n" - f"Attempting to parse GAMMA version from its install directory" - ) - gamma_ver = os.path.basename(gamma_home).split('-')[-1] - try: - datetime.datetime.strptime(gamma_ver, '%Y%m%d') - except ValueError: - logging.warning(f'GAMMA version {gamma_ver} does not conform to the expected YYYYMMDD format') - - return gamma_ver - - -def isce_version(): - """Probe the system to find the version of ISCE installed, if possible""" - # NOTE: ISCE does not consistently provide version numbers. For example, the - # self reported version of ISCE with the conda install of ISCE 2.4.1 - # is 2.3 (import isce; isce.__version__). - try: - import isce - except ImportError: - logging.error('ISCE is not installed.') - raise - - # prefer the conda reported version number; requires shell for active conda env - version = subprocess.check_output('conda list | grep isce | awk \'{print $2}\'', shell=True, text=True) - if version: - return version.strip() - - try: - version = isce.__version__ - return version - except AttributeError: - logging.warning('ISCE does not have a version attribute.') - return None diff --git a/hyp3lib/tileList2shape.py b/hyp3lib/tileList2shape.py deleted file mode 100755 index c50cb712..00000000 --- a/hyp3lib/tileList2shape.py +++ /dev/null @@ -1,58 +0,0 @@ -"""generates a shapefile from a list of tile files""" - -import argparse -import os - -from osgeo import ogr, osr - -from hyp3lib.asf_geometry import geotiff2polygon, geometry2shape - - -def tileList2shape(listFile, shapeFile): - - # Set up shapefile attributes - fields = [] - field = {} - values = [] - field['name'] = 'tile' - field['type'] = ogr.OFTString - field['width'] = 100 - fields.append(field) - - files = [line.strip() for line in open(listFile)] - for fileName in files: - print('Reading %s ...' % fileName) - polygon = geotiff2polygon(fileName) - tile = os.path.splitext(os.path.basename(fileName))[0] - value = {} - value['tile'] = tile - value['geometry'] = polygon - values.append(value) - spatialRef = osr.SpatialReference() - spatialRef.ImportFromEPSG(4326) - - # Write geometry to shapefiles - geometry2shape(fields, values, spatialRef, False, shapeFile) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('file_list', - help='name of the tiles file list') - parser.add_argument('shape_file', - help='name of the shapefile') - args = parser.parse_args() - - if not os.path.exists(args.file_list): - parser.error(f'GeoTIFF file {args.file_list} does not exist!') - - tileList2shape(args.file_list, args.shape_file) - - -if __name__ == '__main__': - main() diff --git a/hyp3lib/verify_opod.py b/hyp3lib/verify_opod.py deleted file mode 100755 index 38e1d660..00000000 --- a/hyp3lib/verify_opod.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Read OPOD State Vector""" - -import argparse -import logging -import os - -from lxml import etree - - -def verify_opod(fi): - logging.info("Verifying state vector file") - root = etree.parse(fi) - check = 0 - for item in root.iter('File_Description'): - if "Orbit File" not in item.text: - raise ValueError("Not an orbit file!") - else: - logging.info("...Found orbit file") - check += 1 - for item in root.iter('File_Type'): - if "AUX_POEORB" not in item.text and "AUX_PREORB" not in item.text and "AUX_RESORB" not in item.text: - raise ValueError("Unknown file type!") - else: - logging.info("...Found file type {}".format(item.text)) - check += 1 - - if not check: - raise ValueError("Not a valid state vector file: {}".format(fi)) - - else: - logging.info("State vector file verified") - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument("OPODfile", help="S1 OPOD file") - args = parser.parse_args() - - log_file = "OPOD_{}.log".format(os.getpid()) - logging.basicConfig(filename=log_file, format='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.DEBUG) - logging.getLogger().addHandler(logging.StreamHandler()) - logging.info("Starting run") - - verify_opod(args.OPODfile) - - -if __name__ == '__main__': - main() diff --git a/tests/data/test_ned13_dem.tif b/tests/data/test_ned13_dem.tif deleted file mode 100644 index 3411b758b2948cafd71e51b091618f098a69f9eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4459 zcmc(fc{r8Z+s8Myd7dK~qRfKf@ z1C`RGfkdU0C{c&h$r1J2oZjnq{&@d?*K^(1_j|2%KkKvZ^{i+4`Z};wSS*$fizTPP zl9M@eS;+nH<7Hm{hqsh@g&%$-Pw8J?SB^dUZj@}m@L&B#Km5o%#y@g;wOK5)f93f9 z@CvdVt#}RAh}O#@EX0ocK*}5ljZF0=I!OpVtxC5 zy=|}TdVIlWT9_T=5%-sci?Q z=Q)gUF^`6LS^}%w3&Aw~95_xq4I3V=gqvwEi5EeKiE%rx5H&?T#0+f(@bxr@Y-S}a zJ8=e%>)(SEzn36a`35?Jd!cXLAcPtYLB+H|uxcBGhVnru@fd)PTm4Y8`~!@8-UA(j z-7sfg*C_Y+gGMm0eGC=N^)R-#UY6Sk-3rx!*Y?44&Kx*qG84Meg22sU7r1QM3*E7q zz+)A`v|pQ`U_w8X6n=&9--ki6Q4SwT<&jZPz<6~<9-3#ohh;J!vL=or6%ztiu+#BG%z)GCITo;1XLxe2Bc z1|pZzus7l<6eSOX_cujcVx)|`Bo*w4P{rFCYH0FW9eo$8W6*LHd`u~0ovjkSPmsgv zwZou1_;r-~gg*$^bcR4n_!T<6zrwu+7H)edj~SWr_LMf2MXM9KDBrGy<0CYXT&Ibuc3ODPR}JS4 zv9arlJdS+d`&Ei~{*V$LxTb`7SP|EIDx)w&4gU^TM=x~^bUdSi9>23O_O2Q_59wmS zX)O$DQNgKxs@St!6JyJCvD()Fub(u+tb#F^lxKqF-bVPUPYb`OY2qJ|x_IJeLrmVN zhpN%)xIR~9l-#yX9oD?nelI0%Qt%o`9^>FT8J>1-(i!~+o8lP-Gh7oq7TtI0(ZT3_TBF?K&IY);(hz@ZG(xjbBYa?Ggza5pa9f-ShHe>yZz)qWd|`(9^UU$* z&z7jn9go3zR@l>RiB?6HxM-CnZaHXk|o;DHpjaA7MQlm0#8O;AR(N9AuH{XS7wjr z(;YC?%mIxJ9FgPWg!<@!_m54)&T&>q){MpeY-4OXtBsK+3b@v_A65qSgPhwiJUqt2 zH`)rQ&S&H6$L!I13%6L{A!jReUt))U%yPsQdporDwnnh9!Yk<$QGSmL@^?+fz(6;= zan1uvHoK!;ggZWy`NAkyj5M@Go<0cp66S(Hh3|1Jbp-uVcqA;qg=j{5voo$#zvk2+JF)6nx%yig6|+{9D>wDHT)1|h;?>m zc&Nw*^EbGlQj`afAQGu9Z27ML1lgfCRp@x5&i+CRc%b^%cPNEpS-*GnlHV z;#ngDza&PGF-%TuRJ4ECzt02zi8p2@c9H@&}3HjO) za842haoHyz*Xb&_ysC%zw>Mz>o@&?~TL;b`??81j1M2(oU|-m7;M&ZC8IM$8O)!UO zt5p?0eyuFxv>X-jHf0KLw0bgcKkX1bNt`O4-@HOR!qwbYCOR4sC-OLOR=n`9B0^*8 zSoq@S2CMar;M@`wV9#Fz>-BTto%AF`4`jfFzBJgTk^}FY^Wd*fS>P~hBRokp1-?o; z(XP=e{yimARH62l(AIQg@r_t0$;2bdWWf-%cB&V){4?|G(>^6x2ZeVj*D(K_K70A+k~51D7w>aHZ#JN zpUh@veDh?4)pnu+m3Z-%H3edwXN}@o8+pP~xmmo?A%;llDkZ88B@o>Bbi!Y$nn>BG z04tuFf{vORoNlTnrX=YSmSPqiZQn~5XcNMh6YdLlhBOLyL?;VLr?H~_sY^uP2Hy(h z4|ND?>#kBG+;aatv}jiu{WYLUICI-s(Qyw|5jCfjK9!s)BEDIO@3|+67dUShcg%GZ zKeaw3-jjP(ocqF?u=YqKtXx8fO)E@@L1TaMm$mkE*OC>&*6rrPc?(VoT5oWK*$Ib) zjS(n}U2tEpBT*u($xs&QyjBs7aGNv~>4oR(@q8k@ zobJ!J_t0RxFNHH3D)%vW=VdZmIszC|)g??CCNnkxXBo4|V@%44gN*K@2FCSKCgbYw zPbYrMq{zgvlsRgV2N%ilc=-Q28U=ScA%We zjycYFDMvHr8G6j&2X?giDmf~|U?w@-!X|smc9X70Oh}DHH@;%$R{n$+bNPJ>efcBY z#ncR?9L310P}p6|*&jNhv8*dwd>kI=PsBw@^a!Huci

d}RV zw~D}^Cwb1)dtj2LQJg>bQ}GTVgmbkPn!vTt;R(DMU^7{K*0ci z!0R&qR*;n6H@}E)yQP39ufB@3Yu`+k%~GUZW$dGrwAWH$*AG(@6?>@pE*B_UTA9}R zU{9x=vZsW@g_QPDJ-W`YNt*69iBj14jY^e_rH15}QrpjFQ(hf9qg;pVtyPZpf5wlw78_kW6!`CdChxspy%n z$v2ILl&fGK6?I`Y6`5c}xq2?3Cb%x3*6b-Er#8D$g_3Y8kNbeMTAD~jUSCV~jQ1Sf z=j6PD5~FuNN!ERPB)M|`gQWJ|-b(ND&6Ux0QjW8H2=~{*Mko0w(ZGFC6hn3iFf3 zMg&Od;smMSLANwVTc0e-c_g*@*_Ry3NgL&6e_!F{7gg-tJ*~-mT7?>CuSE?f)9V~3 zB(6uI^Lk?Cl4C3%@c||7uP)oTRSnmG_g7VwK8`zO|BBPd`>DM4jZY{6l(RH|fWUt4nj7-G$I2?a3mJyI))#Ev z%N5GJ5$>N-HFr^4T3I{pE!!2kr>r#FzMQQXSDt+Hm-3LNC*|SX-zpCHc23m7Ig#N5%8C%MEMBl~ueiEuF^u%QI<- zuUG$leXoXN_MYUjH)XebkCb0Zl&iofQ3dH3R}m3eQ1R;4m5QuO4=ci$I~4)a^A!&b sHdj0&o|I21&GfR!EA}3|?Z#O@<{&3_V1TnsV~WrB+;E?fdjCJ-KiMsbc>n+a diff --git a/tests/data/test_srtmgl1_antimeridian_dem.tif b/tests/data/test_srtmgl1_antimeridian_dem.tif deleted file mode 100644 index 04cb1d8a384859d31412790b1dee853b5ad08ec2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6427 zcmeI#d9;n?9tZG!4jm~pBIG)9>6$Z76&gTG7i;+)tQItY;~SPb$l-?tv)Fu?}7i@-v@=$t`|8}$L}e6=sQmpO{Gpx`%dda zbspa-G`K)2URQDVkOHYJuUmL|Se{h2*CKxZ%wd_SlInKqyC!F*@~Qi%5C5Fo=wO|^ z9{%KLRfjrR=efS9e&%)B#)MV5?XXAaAkDxAvFx{TDw zGN}Ubx?oV=1Fz-Fw7;-*cHNrURkPc+X_4LUrk>rhtF<0*@Zx|}s&b9$b!yj1rT*Bz z^#9oz8*+2bq!QJrMLo_ThYM*&OWJTLow%CrTu(3ha61FIo1xstC?4T)#`84Kk@O^Y zZEj9SdNG72c!_`W3ETOdlh)5+)Utl_~Fqefa zVL2;V&1Xc9u9JNrqj`aae8g7v5q&8~^yOl@ll0|T^*mOwiC-uv>lxIhIakt`5lmz@ z%UQ>6^1F|c)SwBM(2kD89Jq!X=uMJ6#(x+|_Q%vQ_RkR6r;%h|piZ){Qh!WjU&m&) zvy}Ji?1CWHsCP zgHzn+KRB1x^dO#x(Zq8;{0i&?`r zWXe#K%H+_V8@Zc!HeMon@hKbG%D3#|7yclR`_0b@oI-I*QI^Pl7PV-=d0a?yB71wT zAj#fS-G@6E$Ph*_iZP63BGI4I<*my3Tt+YM;YnujHvi#!vScmJSv28tZe}QBnaNT< z=Le3Fs|2y{#M*K>v6l=bo|%~}Py zdviOHeK7a(Adm1Qk^NbwGLt#XXEDoI$%jOLa^>#G%?xE6GkJ%#>>{76C8^GZbfOQ# znZRsf&0WtPj+3<%HHp2qGqJXeU?Q)wf(`sc0rzqSwMpjvMe5dEO3eFiTu(1y4TI(Cy^)>70Y z_UL3Ux=;Nyb6CMfex{(TXHb`m=*k_8WHR%3pRf3h6WvEVAF&?9bI_ezNX9%vZ%wjGJSi*AN=VLx+16%lpUHroDMD}AT#Hkb~n{rg7I<<-U^Rzz9=3Uma zhd8@rlkCw~t8e20o?$Mr7j0%QC&(J-mIlQB9DDKr9_D#spNaJ~*3?)_V*QBcrV(w4 zc@c9VdJtp(BGWl?>{qz9nq=%Zs*|zbquxtOt}Kq{WKQF6l%YITs7ZYq(S+u-q61gZ zl^#TY;!L-JpAPjU&UAHYK^JakB$IiA57@#!PLef!FS^$IL5yJvi-zo+b8@*gv)s&&w(9qXMxOv?azr$$zUlJ^zE=k6|oNk>sDHp2tGo z;yoh&8rHIj?R>``ej{^}HJU=4LNT%_Met= z9j-_IpV&uw{=%}BAjw}@U4wcwwZyD;*lI{#(G_eoGx#Lq}kIi)7#W;&5#Cj2Hh`= zEX66!nH-tFiEHWkJA2=a>$#cR7|0MtFp4omf8uX|^!XC&%aMJFzm3!T^0W8Rm%me= zONg^;oLA$F8hu$z>_a=rD^m%olS^mf*}0Erh-V}Ev4wd4BY%eb%SS;Xf6V_ru^z^AP=<;`{<@q)%>T%L z3CaAA{MYek;~)9QGnpyOAj!W(y_^sDlrPxGHg>X`y+nV`k~#LHq%X1V#@|MNHeVXZ zePmyzxt8?hheLgdGg&R7FIRI1vA)Kd8vDq{d`--g)2K@Hp$(UF71wYhy@_YwUkqkA zBYA`;m`L;?nFq7guk$8LS;2>_A?d|d^|$Qe7ycm6=7T+nz9gFKMMruwoC&;2GG~(S P{q=gdXMgqLa4-G~vA7-% diff --git a/tests/test_entrypoints.py b/tests/test_entrypoints.py index a44f31b2..17ca0248 100644 --- a/tests/test_entrypoints.py +++ b/tests/test_entrypoints.py @@ -11,183 +11,41 @@ def test_gc_map_mod_script(script_runner): assert ret.success -def test_apply_wb_mask(script_runner): - ret = script_runner.run('apply_wb_mask.py', '-h') - assert ret.success - - def test_byteSigmaScale(script_runner): ret = script_runner.run('byteSigmaScale.py', '-h') assert ret.success -def test_copy_metadata(script_runner): - ret = script_runner.run('copy_metadata.py', '-h') - assert ret.success - - -def test_createAmp(script_runner): - ret = script_runner.run('createAmp.py', '-h') - assert ret.success - - -def test_cutGeotiffsByLine(script_runner): - ret = script_runner.run('cutGeotiffsByLine.py', '-h') - assert ret.success - - def test_cutGeotiffs(script_runner): ret = script_runner.run('cutGeotiffs.py', '-h') assert ret.success -def test_draw_polygon_on_raster(script_runner): - ret = script_runner.run('draw_polygon_on_raster.py', '-h') - assert ret.success - - -def test_dem2isce(script_runner): - ret = script_runner.run('dem2isce.py', '-h') - assert ret.success - - -def test_enh_lee_filter(script_runner): - ret = script_runner.run('enh_lee_filter.py', '-h') - assert ret.success - - -def test_extendDateline(script_runner): - ret = script_runner.run('extendDateline.py', '-h') - assert ret.success - - -def test_geotiff_lut(script_runner): - ret = script_runner.run('geotiff_lut.py', '-h') - assert ret.success - - -def test_get_bounding(script_runner): - ret = script_runner.run('get_bounding.py', '-h') - assert ret.success - - -def test_getDemFor(script_runner): - ret = script_runner.run('getDemFor.py', '-h') - assert ret.success - - def test_get_asf(script_runner): ret = script_runner.run('get_asf.py', '-h') assert ret.success -def test_get_dem(script_runner): - ret = script_runner.run('get_dem.py', '-h') - assert ret.success - - def test_get_orb(script_runner): ret = script_runner.run('get_orb.py', '-h') assert ret.success -def test_iscegeo2geotif(script_runner): - ret = script_runner.run('iscegeo2geotif.py', '-h') - assert ret.success - - -def test_make_arc_thumb(script_runner): - ret = script_runner.run('make_arc_thumb.py', '-h') - assert ret.success - - def test_makeAsfBrowse(script_runner): ret = script_runner.run('makeAsfBrowse.py', '-h') assert ret.success -def test_makeChangeBrowse(script_runner): - ret = script_runner.run('makeChangeBrowse.py', '-h') - assert ret.success - - -def test_make_cogs(script_runner): - ret = script_runner.run('make_cogs.py', '-h') - assert ret.success - - -def test_makeColorPhase(script_runner): - ret = script_runner.run('makeColorPhase.py', '-h') - assert ret.success - - -def test_makeKml(script_runner): - ret = script_runner.run('makeKml.py', '-h') - assert ret.success - - -def test_offset_xml(script_runner): - ret = script_runner.run('offset_xml.py', '-h') - assert ret.success - - -def test_ps2dem(script_runner): - ret = script_runner.run('ps2dem.py', '-h') - assert ret.success - - -def test_raster_boundary2shape(script_runner): - ret = script_runner.run('raster_boundary2shape.py', '-h') - assert ret.success - - -def test_rasterMask(script_runner): - ret = script_runner.run('rasterMask.py', '-h') - assert ret.success - - def test_resample_geotiff(script_runner): ret = script_runner.run('resample_geotiff.py', '-h') assert ret.success -def test_rtc2colordiff(script_runner): - ret = script_runner.run('rtc2colordiff.py', '-h') - assert ret.success - - -def test_rtc2color(script_runner): - ret = script_runner.run('rtc2color.py', '-h') - assert ret.success - - -def test_simplify_shapefile(script_runner): - ret = script_runner.run('simplify_shapefile.py', '-h') - assert ret.success - - def test_SLC_copy_S1_fullSW(script_runner): ret = script_runner.run('SLC_copy_S1_fullSW.py', '-h') assert ret.success -def test_subset_geotiff_shape(script_runner): - ret = script_runner.run('subset_geotiff_shape.py', '-h') - assert ret.success - - -def test_tileList2shape(script_runner): - ret = script_runner.run('tileList2shape.py', '-h') - assert ret.success - - def test_utm2dem(script_runner): ret = script_runner.run('utm2dem.py', '-h') assert ret.success - - -def test_verify_opod(script_runner): - ret = script_runner.run('verify_opod.py', '-h') - assert ret.success - - diff --git a/tests/test_getSubSwath.py b/tests/test_getSubSwath.py deleted file mode 100644 index a2b599fa..00000000 --- a/tests/test_getSubSwath.py +++ /dev/null @@ -1,6 +0,0 @@ -from hyp3lib.getSubSwath import get_bounding_box_file - - -def test_get_bounding_box_file(safe_data): - expected = (39.22924837602602, 36.96964287499973, -111.0340780928982, -114.513182762547) - assert get_bounding_box_file(safe_data) == expected diff --git a/tests/test_get_bounding.py b/tests/test_get_bounding.py deleted file mode 100644 index 4aab6321..00000000 --- a/tests/test_get_bounding.py +++ /dev/null @@ -1,80 +0,0 @@ -import os -import numpy as np - -from hyp3lib import get_bounding - - -def test_simple_box(): - box_xml = '1.0e+00\n' \ - '1.0e+00\n' \ - '\n' \ - '0.0e+00\n' \ - '0.0e+00\n' - - truth = (0., 1., 0., 1.) # lat_min, lat_max, lon_min, lon_max - - bound = get_bounding.get_bounding(box_xml) - - test = (bound['lat']['min'], bound['lat']['max'], - bound['lon']['min'], bound['lon']['max']) - - assert np.allclose(truth, test, atol=1e-4) - - -def test_box_with_negatives(): - box_xml = '-1.0e+00\n' \ - '-1.0e+00\n' \ - '\n' \ - '-0.3e+00\n' \ - '-0.3e+00\n' - - truth = (-1., -0.3, -1., -0.3) # lat_min, lat_max, lon_min, lon_max - - bound = get_bounding.get_bounding(box_xml) - - test = (bound['lat']['min'], bound['lat']['max'], - bound['lon']['min'], bound['lon']['max']) - - assert np.allclose(truth, test, atol=1e-4) - - -def test_annotation_file_sample(): - box_xml = '3.788855146424307e+01\n' \ - '-1.119597934146415e+02\n' \ - '\n' \ - '3.791710313604354e+01\n' \ - '-1.121557443533816e+02\n' - - truth = (37.88855, 37.91710, -112.15574, -111.95979) # lat_min, lat_max, lon_min, lon_max - - bound = get_bounding.get_bounding(box_xml) - - test = (bound['lat']['min'], bound['lat']['max'], - bound['lon']['min'], bound['lon']['max']) - - assert np.allclose(truth, test, atol=1e-4) - - -def test_annotation_file(safe_data): - with open(os.path.join(safe_data, 'annotation', 'test-swath-001.xml')) as f: - box_xml = f.read() - - truth = (37.11964, 38.78037, -112.54442, -111.184078) # lat_min, lat_max, lon_min, lon_max - - bound = get_bounding.get_bounding(box_xml) - - test = (bound['lat']['min'], bound['lat']['max'], - bound['lon']['min'], bound['lon']['max']) - - assert np.allclose(truth, test, atol=1e-4) - - -def test_granule_bounding(safe_data): - truth = (37.11964, 39.07924, -114.36318, -111.184078) # lat_min, lat_max, lon_min, lon_max - - bound = get_bounding.get_granule_bounding(safe_data) - - test = (bound['lat']['min'], bound['lat']['max'], - bound['lon']['min'], bound['lon']['max']) - - assert np.allclose(truth, test, atol=1e-4) diff --git a/tests/test_get_dem.py b/tests/test_get_dem.py deleted file mode 100644 index 3fbccd72..00000000 --- a/tests/test_get_dem.py +++ /dev/null @@ -1,129 +0,0 @@ -from filecmp import cmp -from os import chdir - -import pytest - -from hyp3lib import DemError -from hyp3lib.get_dem import get_dem, get_best_dem - - -def test_get_best_dem_no_coverage(): - # atlantic ocean, south of western africa - with pytest.raises(DemError): - get_best_dem(y_min=0, y_max=1, x_min=0, x_max=1) - - -def test_get_best_dem_ned13(): - # utah, western united states - name, projection, tile_list, wkt_list = get_best_dem(y_min=38.2, y_max=38.8, x_min=-110.8, x_max=-110.2) - assert name == 'NED13' - assert projection == 4269 - assert tile_list == ['n39w111'] - assert wkt_list == [ - 'POLYGON ((-111.000556 39.000556,-109.999444888884 39.000556,-109.999444888884 37.9994448888845,' - '-111.000556 37.9994448888845,-111.000556 39.000556))', - ] - - -def test_get_best_dem_srtmgl1(): - # democratic republic of the congo, southern africa - name, projection, tile_list, wkt_list = get_best_dem(y_min=-6.8, y_max=-6.2, x_min=27.2, x_max=27.8) - assert name == 'SRTMGL1' - assert projection == 4326 - assert tile_list == ['S07E027'] - assert wkt_list == [ - 'POLYGON ((26.999861 -5.999861,28.0001387777858 -5.999861,28.0001387777858 -7.00013877778578,' - '26.999861 -7.00013877778578,26.999861 -5.999861))', - ] - - -def test_get_best_dem_ned2(): - # alaska - name, projection, tile_list, wkt_list = get_best_dem(y_min=67.9, y_max=68.1, x_min=-155.6, x_max=-155.4) - assert name == 'NED2' - assert projection == 4269 - assert tile_list == ['n68w156', 'n69w156'] - assert wkt_list == [ - 'POLYGON ((-156.003333 68.003333,-154.996666333325 68.003333,-154.996666333325 66.9966663333253,' - '-156.003333 66.9966663333253,-156.003333 68.003333))', - 'POLYGON ((-156.003333 69.003333,-154.996666333325 69.003333,-154.996666333325 67.9966663333253,' - '-156.003333 67.9966663333253,-156.003333 69.003333))', - ] - - -def test_get_best_dem_specify_dem(): - # utah, western united states, has both NED13 and SRTMGL1 coverage - name, projection, tile_list, wkt_list = get_best_dem(y_min=38.2, y_max=38.8, x_min=-110.8, x_max=-110.2, dem_name='SRTMGL1') - assert name == 'SRTMGL1' - - # democratic republic of the congo, southern africa, has SRTMGL1 coverage but not NED13 coverage - with pytest.raises(DemError): - get_best_dem(y_min=-6.8, y_max=-6.2, x_min=27.2, x_max=27.8, dem_name='NED13') - - -def test_get_best_dem_just_missing_coverage_threshold(): - # northern russia - with pytest.raises(DemError): - get_best_dem(y_min=59.976, y_max=60.1, x_min=99.5, x_max=100.5) - - -def test_get_best_dem_just_passing_coverage_threshold(): - # northern russia - name, projection, tile_list, wkt_list = get_best_dem(y_min=59.975, y_max=60.1, x_min=99.5, x_max=100.5) - assert name == 'SRTMGL1' - assert projection == 4326 - assert tile_list == ['N59E099', 'N59E100'] - assert wkt_list == [ - 'POLYGON ((98.999861 60.000139,100.000138777786 60.000139,100.000138777786 58.9998612222142,' - '98.999861 58.9998612222142,98.999861 60.000139))', - 'POLYGON ((99.999861 60.000139,101.000138777786 60.000139,101.000138777786 58.9998612222142,' - '99.999861 58.9998612222142,99.999861 60.000139))', - ] - - -def test_get_best_dem_antimeridian(): - # aleutian islands - name, projection, tile_list, wkt_list = get_best_dem(y_min=51.3, y_max=51.7, x_min=-179.5, x_max=179.5) - assert name == 'SRTMGL1' - assert projection == 4326 - assert len(tile_list) == 241 - for tile in tile_list: - assert tile.startswith('N51') - assert len(wkt_list) == 241 - - -def test_get_best_dem_antimeridian_shifted(): - # aleutian islands - name, projection, tile_list, wkt_list = get_best_dem(y_min=51.3, y_max=51.7, x_min=179.5, x_max=180.5) - assert name == 'SRTMGL1' - assert projection == 4326 - assert tile_list == ['N51E179', 'N51W180'] - assert wkt_list == [ - 'POLYGON ((178.999861 52.000139,180.000138777786 52.000139,180.000138777786 50.9998612222142,' - '178.999861 50.9998612222142,178.999861 52.000139))', - 'POLYGON ((179.999861 52.000139,181.000138777786 52.000139,181.000138777786 50.9998612222142,' - '179.999861 50.9998612222142,179.999861 52.000139))', - ] - - -def test_get_dem_no_coverage(): - with pytest.raises(DemError): - get_dem(y_min=0, y_max=1, x_min=0, x_max=1, outfile='dem.tif', post=30.0) - - -def test_get_dem_ned13(tmp_path, test_data_folder): - chdir(tmp_path) - output_file = tmp_path / 'dem.tif' - name = get_dem(y_min=37.99, y_max=37.999, x_min=-123.02, x_max=-123.01, outfile=str(output_file), post=30.0) - assert name == 'NED13' - assert output_file.exists() - assert cmp(output_file, test_data_folder / 'test_ned13_dem.tif') - - -def test_get_dem_srtmgl1_antimeridian(tmp_path, test_data_folder): - chdir(tmp_path) - output_file = tmp_path / 'dem.tif' - name = get_dem(y_min=-18.415, y_max=-18.41, x_min=179.99, x_max=180.01, outfile=str(output_file), post=30.0) - assert name == 'SRTMGL1' - assert output_file.exists() - assert cmp(output_file, test_data_folder / 'test_srtmgl1_antimeridian_dem.tif') diff --git a/tests/test_make_cogs.py b/tests/test_make_cogs.py deleted file mode 100644 index abfdb3d9..00000000 --- a/tests/test_make_cogs.py +++ /dev/null @@ -1,33 +0,0 @@ -import os -import shutil - -from osgeo_utils.samples.validate_cloud_optimized_geotiff import validate - -from hyp3lib.make_cogs import cogify_dir, cogify_file - - -def _is_cog(filename): - warnings, errors, details = validate(filename, full_check=True) - return errors == [] - - -def test_make_cog(geotiff): - assert not _is_cog(geotiff) - cogify_file(geotiff) - assert _is_cog(geotiff) - - -def test_cogify_dir(geotiff): - base_dir = os.path.dirname(geotiff) - copy_names = [os.path.join(base_dir, '1.tif'), os.path.join(base_dir, '2.tif')] - - for name in copy_names: - shutil.copy(geotiff, name) - - # Only cogify our copied files - cogify_dir(base_dir, file_pattern='?.tif') - - for name in copy_names: - assert _is_cog(name) - - assert not _is_cog(geotiff) diff --git a/tests/test_system.py b/tests/test_system.py deleted file mode 100644 index f8d09926..00000000 --- a/tests/test_system.py +++ /dev/null @@ -1,96 +0,0 @@ -import logging -import os - -import pytest - -from hyp3lib import system - - -def test_gamma_version_var(): - gamma_version = '20200131' - os.environ['GAMMA_VERSION'] = gamma_version - - result = system.gamma_version() - os.environ.pop('GAMMA_VERSION') - - assert gamma_version == result - - -def test_bad_gamma_version_var(caplog): - with caplog.at_level(logging.WARNING): - gamma_version = '20202233' - os.environ['GAMMA_VERSION'] = gamma_version - - _ = system.gamma_version() - os.environ.pop('GAMMA_VERSION') - - assert 'does not conform to the expected YYYYMMDD format' in caplog.text - - -def test_no_gamma_home_var(): - with pytest.raises(KeyError): - os.environ.pop('GAMMA_VERSION', None) - os.environ.pop('GAMMA_HOME', None) - - _ = system.gamma_version() - - -def test_asf_gamma_version(tmp_path): - os.environ.pop('GAMMA_VERSION', None) - os.environ['GAMMA_HOME'] = str(tmp_path.resolve()) - - gamma_version = '20170707' - asf = tmp_path / 'ASF_Gamma_version.txt' - asf.write_text(gamma_version) - - result = system.gamma_version() - os.environ.pop('GAMMA_HOME') - - assert gamma_version == result - - -def test_bad_asf_gamma_version(caplog, tmp_path): - with caplog.at_level(logging.WARNING): - os.environ.pop('GAMMA_VERSION', None) - os.environ['GAMMA_HOME'] = str(tmp_path.resolve()) - - gamma_version = '20170732' - asf = tmp_path / 'ASF_Gamma_version.txt' - asf.write_text(gamma_version) - - _ = system.gamma_version() - os.environ.pop('GAMMA_HOME') - - assert 'does not conform to the expected YYYYMMDD format' in caplog.text - - -def test_gamma_direcory_parse(caplog, tmp_path): - with caplog.at_level(logging.WARNING): - os.environ.pop('GAMMA_VERSION', None) - - gamma_version = '20170707' - gamma_home = tmp_path / f'GAMMA_SOFTWARE-{gamma_version}' - gamma_home.mkdir() - os.environ['GAMMA_HOME'] = str(gamma_home.resolve()) - - result = system.gamma_version() - os.environ.pop('GAMMA_HOME') - - assert 'No GAMMA_VERSION environment variable or ASF_Gamma_version.txt ' in caplog.text - assert gamma_version == result - - -def test_bad_gamma_direcory_parse(caplog, tmp_path): - with caplog.at_level(logging.WARNING): - os.environ.pop('GAMMA_VERSION', None) - - gamma_version = '20170732' - gamma_home = tmp_path / f'GAMMA_SOFTWARE-{gamma_version}' - gamma_home.mkdir() - os.environ['GAMMA_HOME'] = str(gamma_home.resolve()) - - _ = system.gamma_version() - os.environ.pop('GAMMA_HOME') - - assert 'No GAMMA_VERSION environment variable or ASF_Gamma_version.txt ' in caplog.text - assert 'does not conform to the expected YYYYMMDD format' in caplog.text From d675a591ae3ff87ae9eef25bbcbbb39bfa5a5fe4 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Thu, 4 Jan 2024 09:29:14 -0600 Subject: [PATCH 02/10] update changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 63d74e77..ead371e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [3.0.0] + +### Remove +* All hyp3lib functionality not currently being used by one of these GitHub orgs: ASFHyP3, asfadmin, ASFOpenSARLab, access-cloud-based-insar, dbekaert. For a full list of the deleted files, see [here](https://github.com/ASFHyP3/hyp3-lib/pull/286). + ## [2.0.2] ### Fixed From 7fc8edb8e23d063ea1124005ffaad40c6deef589 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Thu, 4 Jan 2024 09:34:45 -0600 Subject: [PATCH 03/10] update setup.py entrypoints --- setup.py | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/setup.py b/setup.py index 7383cd7c..468763ba 100644 --- a/setup.py +++ b/setup.py @@ -73,42 +73,14 @@ scripts=['scripts/GC_map_mod'], entry_points={'console_scripts': [ - 'apply_wb_mask.py = hyp3lib.apply_wb_mask:main', 'byteSigmaScale.py = hyp3lib.byteSigmaScale:main', - 'copy_metadata.py = hyp3lib.copy_metadata:main', - 'createAmp.py = hyp3lib.createAmp:main', - 'cutGeotiffsByLine.py = hyp3lib.cutGeotiffsByLine:main', 'cutGeotiffs.py = hyp3lib.cutGeotiffs:main', - 'draw_polygon_on_raster.py = hyp3lib.draw_polygon_on_raster:main', - 'dem2isce.py = hyp3lib.dem2isce:main', - 'enh_lee_filter.py = hyp3lib.enh_lee_filter:main', - 'extendDateline.py = hyp3lib.extendDateline:main', - 'geotiff_lut.py = hyp3lib.geotiff_lut:main', - 'get_bounding.py = hyp3lib.get_bounding:main', - 'getDemFor.py = hyp3lib.getDemFor:main', 'get_asf.py = hyp3lib.get_asf:main', - 'get_dem.py = hyp3lib.get_dem:main', 'get_orb.py = hyp3lib.get_orb:main', - 'iscegeo2geotif.py = hyp3lib.iscegeo2geotif:main', - 'make_arc_thumb.py = hyp3lib.make_arc_thumb:main', 'makeAsfBrowse.py = hyp3lib.makeAsfBrowse:main', - 'makeChangeBrowse.py = hyp3lib.makeChangeBrowse:main', - 'make_cogs.py = hyp3lib.make_cogs:main', - 'makeColorPhase.py = hyp3lib.makeColorPhase:main', - 'makeKml.py = hyp3lib.makeKml:main', - 'offset_xml.py = hyp3lib.offset_xml:main', - 'ps2dem.py = hyp3lib.ps2dem:main', - 'raster_boundary2shape.py = hyp3lib.raster_boundary2shape:main', - 'rasterMask.py = hyp3lib.rasterMask:main', 'resample_geotiff.py = hyp3lib.resample_geotiff:main', - 'rtc2colordiff.py = hyp3lib.rtc2colordiff:main', - 'rtc2color.py = hyp3lib.rtc2color:main', - 'simplify_shapefile.py = hyp3lib.simplify_shapefile:main', 'SLC_copy_S1_fullSW.py = hyp3lib.SLC_copy_S1_fullSW:main', - 'subset_geotiff_shape.py = hyp3lib.subset_geotiff_shape:main', - 'tileList2shape.py = hyp3lib.tileList2shape:main', 'utm2dem.py = hyp3lib.utm2dem:main', - 'verify_opod.py = hyp3lib.verify_opod:main', ] }, From fe1b40f36216d2825933d3fa42a14835a7e50601 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Thu, 4 Jan 2024 15:51:40 -0600 Subject: [PATCH 04/10] readd missed modules --- setup.py | 4 ++++ tests/test_entrypoints.py | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/setup.py b/setup.py index 468763ba..f3794671 100644 --- a/setup.py +++ b/setup.py @@ -74,11 +74,15 @@ entry_points={'console_scripts': [ 'byteSigmaScale.py = hyp3lib.byteSigmaScale:main', + 'createAmp.py = hyp3lib.createAmp:main', 'cutGeotiffs.py = hyp3lib.cutGeotiffs:main', 'get_asf.py = hyp3lib.get_asf:main', 'get_orb.py = hyp3lib.get_orb:main', 'makeAsfBrowse.py = hyp3lib.makeAsfBrowse:main', + 'make_cogs.py = hyp3lib.make_cogs:main', + 'raster_boundary2shape.py = hyp3lib.raster_boundary2shape:main', 'resample_geotiff.py = hyp3lib.resample_geotiff:main', + 'rtc2color.py = hyp3lib.rtc2color:main', 'SLC_copy_S1_fullSW.py = hyp3lib.SLC_copy_S1_fullSW:main', 'utm2dem.py = hyp3lib.utm2dem:main', ] diff --git a/tests/test_entrypoints.py b/tests/test_entrypoints.py index 17ca0248..1d903df6 100644 --- a/tests/test_entrypoints.py +++ b/tests/test_entrypoints.py @@ -16,6 +16,11 @@ def test_byteSigmaScale(script_runner): assert ret.success +def test_createAmp(script_runner): + ret = script_runner.run('createAmp.py', '-h') + assert ret.success + + def test_cutGeotiffs(script_runner): ret = script_runner.run('cutGeotiffs.py', '-h') assert ret.success @@ -36,11 +41,26 @@ def test_makeAsfBrowse(script_runner): assert ret.success +def test_make_cogs(script_runner): + ret = script_runner.run('make_cogs.py', '-h') + assert ret.success + + +def test_raster_boundary2shape(script_runner): + ret = script_runner.run('raster_boundary2shape.py', '-h') + assert ret.success + + def test_resample_geotiff(script_runner): ret = script_runner.run('resample_geotiff.py', '-h') assert ret.success +def test_rtc2color(script_runner): + ret = script_runner.run('rtc2color.py', '-h') + assert ret.success + + def test_SLC_copy_S1_fullSW(script_runner): ret = script_runner.run('SLC_copy_S1_fullSW.py', '-h') assert ret.success From 770873dd00271c6ea156bb15f1852c2365a2e61b Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Thu, 4 Jan 2024 15:55:31 -0600 Subject: [PATCH 05/10] readd missed modules 2 --- hyp3lib/asf_geometry.py | 933 +++++++++++++++++++++++++++++++ hyp3lib/asf_time_series.py | 452 +++++++++++++++ hyp3lib/createAmp.py | 33 ++ hyp3lib/make_cogs.py | 65 +++ hyp3lib/raster_boundary2shape.py | 90 +++ hyp3lib/rtc2color.py | 225 ++++++++ hyp3lib/system.py | 58 ++ tests/test_make_cogs.py | 33 ++ tests/test_system.py | 96 ++++ 9 files changed, 1985 insertions(+) create mode 100644 hyp3lib/asf_geometry.py create mode 100644 hyp3lib/asf_time_series.py create mode 100755 hyp3lib/createAmp.py create mode 100755 hyp3lib/make_cogs.py create mode 100755 hyp3lib/raster_boundary2shape.py create mode 100755 hyp3lib/rtc2color.py create mode 100644 hyp3lib/system.py create mode 100644 tests/test_make_cogs.py create mode 100644 tests/test_system.py diff --git a/hyp3lib/asf_geometry.py b/hyp3lib/asf_geometry.py new file mode 100644 index 00000000..fff90443 --- /dev/null +++ b/hyp3lib/asf_geometry.py @@ -0,0 +1,933 @@ +import csv +import os + +import numpy as np +from osgeo import gdal, ogr, osr +from osgeo.gdalconst import GA_ReadOnly +from scipy import ndimage + +from hyp3lib import GeometryError +from hyp3lib.saa_func_lib import get_zone + + +# Determine the boundary polygon of a GeoTIFF file +def geotiff2polygon_ext(geotiff): + + raster = gdal.Open(geotiff) + proj = osr.SpatialReference() + proj.ImportFromWkt(raster.GetProjectionRef()) + gt = raster.GetGeoTransform() + originX = gt[0] + originY = gt[3] + pixelWidth = gt[1] + pixelHeight = gt[5] + cols = raster.RasterXSize + rows = raster.RasterYSize + polygon = ogr.Geometry(ogr.wkbPolygon) + ring = ogr.Geometry(ogr.wkbLinearRing) + ring.AddPoint_2D(originX, originY) + ring.AddPoint_2D(originX + cols*pixelWidth, originY) + ring.AddPoint_2D(originX + cols*pixelWidth, originY + rows*pixelHeight) + ring.AddPoint_2D(originX, originY + rows*pixelHeight) + ring.AddPoint_2D(originX, originY) + polygon.AddGeometry(ring) + ring = None + raster = None + + return (polygon, proj) + + +def geotiff2polygon(geotiff): + + (polygon, proj) = geotiff2polygon_ext(geotiff) + return polygon + + +def geotiff2boundary_mask(inGeotiff, tsEPSG, threshold, use_closing=True): + + inRaster = gdal.Open(inGeotiff) + proj = osr.SpatialReference() + proj.ImportFromWkt(inRaster.GetProjectionRef()) + if proj.GetAttrValue('AUTHORITY', 0) == 'EPSG': + epsg = int(proj.GetAttrValue('AUTHORITY', 1)) + + if tsEPSG != 0 and epsg != tsEPSG: + print('Reprojecting ...') + inRaster = reproject2grid(inRaster, tsEPSG) + proj.ImportFromWkt(inRaster.GetProjectionRef()) + if proj.GetAttrValue('AUTHORITY', 0) == 'EPSG': + epsg = int(proj.GetAttrValue('AUTHORITY', 1)) + + geoTrans = inRaster.GetGeoTransform() + inBand = inRaster.GetRasterBand(1) + noDataValue = inBand.GetNoDataValue() + data = inBand.ReadAsArray() + minValue = np.min(data) + + ### Check for black fill + if minValue > 0: + data /= data + colFirst = 0 + rowFirst = 0 + else: + data[np.isnan(data)==True] = noDataValue + if threshold is not None: + print('Applying threshold ({0}) ...'.format(threshold)) + data[datanoDataValue] = 1 + if use_closing: + data = ndimage.binary_closing(data, iterations=10, + structure=np.ones((3,3))).astype(data.dtype) + inRaster = None + + (data, colFirst, rowFirst, geoTrans) = cut_blackfill(data, geoTrans) + + return (data, colFirst, rowFirst, geoTrans, proj) + + +def reproject2grid(inRaster, tsEPSG, xRes = None ): + + # Read basic metadata + geoTrans = inRaster.GetGeoTransform() + proj = osr.SpatialReference() + proj.ImportFromEPSG(tsEPSG) + + # Define warping options + rasterFormat = 'VRT' + if xRes is None: + xRes = geoTrans[1] + yRes = xRes + resampleAlg = gdal.GRA_Bilinear + options = ['COMPRESS=DEFLATE'] + + outRaster = gdal.Warp('', inRaster, format=rasterFormat, dstSRS=proj, + targetAlignedPixels=True, xRes=xRes, yRes=yRes, resampleAlg=resampleAlg, + options=options) + inRaster = None + + return outRaster + + +def cut_blackfill(data, geoTrans): + + originX = geoTrans[0] + originY = geoTrans[3] + pixelSize = geoTrans[1] + colProfile = list(data.max(axis=1)) + rows = colProfile.count(1) + rowFirst = colProfile.index(1) + rowProfile = list(data.max(axis=0)) + cols = rowProfile.count(1) + colFirst = rowProfile.index(1) + originX += colFirst*pixelSize + originY -= rowFirst*pixelSize + data = data[rowFirst:rows+rowFirst,colFirst:cols+colFirst] + geoTrans = (originX, pixelSize, 0, originY, 0, -pixelSize) + + return (data, colFirst, rowFirst, geoTrans) + + +def geotiff_overlap(firstFile, secondFile, method): + + # Check map projections + raster = gdal.Open(firstFile) + proj = raster.GetProjection() + gt = raster.GetGeoTransform() + pixelSize = gt[1] + raster = None + + # Extract boundary polygons + firstPolygon = geotiff2polygon(firstFile) + secondPolygon = geotiff2polygon(secondFile) + + if method == 'intersection': + overlap = firstPolygon.Intersection(secondPolygon) + elif method == 'union': + overlap = firstPolygon.Union(secondPolygon) + + return (firstPolygon, secondPolygon, overlap, proj, pixelSize) + + +def overlap_indices(polygon, boundary, pixelSize): + + polyEnv = polygon.GetEnvelope() + boundEnv = boundary.GetEnvelope() + xOff = int((boundEnv[0] - polyEnv[0]) / pixelSize) + yOff = int((polyEnv[3] - boundEnv[3]) / pixelSize) + xCount = int((boundEnv[1] - boundEnv[0]) / pixelSize) + yCount = int((boundEnv[3] - boundEnv[2]) / pixelSize) + + return (xOff, yOff, xCount, yCount) + + +# Extract geometry from shapefile +def shape2geometry(shapeFile, field): + + name = [] + fields = [] + driver = ogr.GetDriverByName('ESRI Shapefile') + shape = driver.Open(shapeFile, 0) + multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + layer = shape.GetLayer() + spatialRef = layer.GetSpatialRef() + layerDef = layer.GetLayerDefn() + for i in range(layerDef.GetFieldCount()): + fields.append(layerDef.GetFieldDefn(i).GetName()) + if field not in fields: + return (None, None, None) + for feature in layer: + geometry = feature.GetGeometryRef() + count = geometry.GetGeometryCount() + if geometry.GetGeometryName() == 'MULTIPOLYGON': + for i in range(0, count): + polygon = geometry.GetGeometryRef(i) + multipolygon.AddGeometry(polygon) + name.append(feature.GetField(field)) + else: + multipolygon.AddGeometry(geometry) + name.append(feature.GetField(field)) + shape.Destroy() + + return (multipolygon, spatialRef, name) + + +def shape2geometry_ext(shapeFile): + + values = [] + fields = [] + driver = ogr.GetDriverByName('ESRI Shapefile') + shape = driver.Open(shapeFile, 0) + layer = shape.GetLayer() + spatialRef = layer.GetSpatialRef() + layerDef = layer.GetLayerDefn() + featureCount = layerDef.GetFieldCount() + for ii in range(featureCount): + field = {} + field['name'] = layerDef.GetFieldDefn(ii).GetName() + field['type'] = layerDef.GetFieldDefn(ii).GetType() + if field['type'] == ogr.OFTString: + field['width'] = layerDef.GetFieldDefn(ii).GetWidth() + fields.append(field) + for feature in layer: + multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + geometry = feature.GetGeometryRef() + count = geometry.GetGeometryCount() + if geometry.GetGeometryName() == 'MULTIPOLYGON': + for i in range(0, count): + polygon = geometry.GetGeometryRef(i) + multipolygon.AddGeometry(polygon) + else: + multipolygon.AddGeometry(geometry) + value = {} + for field in fields: + value[field['name']] = feature.GetField(field['name']) + value['geometry'] = multipolygon + values.append(value) + shape.Destroy() + + return (fields, values, spatialRef) + + +# Save geometry with fields to shapefile +def geometry2shape(fields, values, spatialRef, merge, shapeFile): + + driver = ogr.GetDriverByName('ESRI Shapefile') + if os.path.exists(shapeFile): + driver.DeleteDataSource(shapeFile) + outShape = driver.CreateDataSource(shapeFile) + outLayer = outShape.CreateLayer('layer', srs=spatialRef) + for field in fields: + fieldDefinition = ogr.FieldDefn(field['name'], field['type']) + if field['type'] == ogr.OFTString: + fieldDefinition.SetWidth(field['width']) + elif field['type'] == ogr.OFTReal: + fieldDefinition.SetWidth(24) + fieldDefinition.SetPrecision(8) + outLayer.CreateField(fieldDefinition) + featureDefinition = outLayer.GetLayerDefn() + if merge == True: + combine = ogr.Geometry(ogr.wkbMultiPolygon) + for value in values: + combine = combine.Union(value['geometry']) + outFeature = ogr.Feature(featureDefinition) + for field in fields: + name = field['name'] + outFeature.SetField(name, 'multipolygon') + outFeature.SetGeometry(combine) + outLayer.CreateFeature(outFeature) + outFeature.Destroy() + else: + for value in values: + outFeature = ogr.Feature(featureDefinition) + for field in fields: + name = field['name'] + outFeature.SetField(name, value[name]) + outFeature.SetGeometry(value['geometry']) + outLayer.CreateFeature(outFeature) + outFeature.Destroy() + outShape.Destroy() + + +# Save data with fields to shapefile +def data_geometry2shape_ext(data, fields, values, spatialRef, geoTrans, + classes, threshold, background, shapeFile): + + # Check input + if threshold is not None: + threshold = float(threshold) + if background is not None: + background = int(background) + + # Buffer data + (rows, cols) = data.shape + pixelSize = geoTrans[1] + originX = geoTrans[0] - 10*pixelSize + originY = geoTrans[3] + 10*pixelSize + geoTrans = (originX, pixelSize, 0, originY, 0, -pixelSize) + mask = np.zeros((rows+20, cols+20), dtype=np.float32) + mask[10:rows+10,10:cols+10] = data + data = mask + + # Save in memory + (rows, cols) = data.shape + data = data.astype(np.byte) + gdalDriver = gdal.GetDriverByName('Mem') + outRaster = gdalDriver.Create('value', cols, rows, 1, gdal.GDT_Byte) + outRaster.SetGeoTransform(geoTrans) + outRaster.SetProjection(spatialRef.ExportToWkt()) + outBand = outRaster.GetRasterBand(1) + outBand.WriteArray(data) + + # Write data to shapefile + driver = ogr.GetDriverByName('ESRI Shapefile') + if os.path.exists(shapeFile): + driver.DeleteDataSource(shapeFile) + outShape = driver.CreateDataSource(shapeFile) + outLayer = outShape.CreateLayer('polygon', srs=spatialRef) + outField = ogr.FieldDefn('value', ogr.OFTInteger) + outLayer.CreateField(outField) + gdal.Polygonize(outBand, None, outLayer, 0, [], callback=None) + for field in fields: + fieldDefinition = ogr.FieldDefn(field['name'], field['type']) + if field['type'] == ogr.OFTString: + fieldDefinition.SetWidth(field['width']) + outLayer.CreateField(fieldDefinition) + fieldDefinition = ogr.FieldDefn('area', ogr.OFTReal) + fieldDefinition.SetWidth(16) + fieldDefinition.SetPrecision(3) + outLayer.CreateField(fieldDefinition) + fieldDefinition = ogr.FieldDefn('centroid', ogr.OFTString) + fieldDefinition.SetWidth(50) + outLayer.CreateField(fieldDefinition) + if classes: + fieldDefinition = ogr.FieldDefn('size', ogr.OFTString) + fieldDefinition.SetWidth(25) + outLayer.CreateField(fieldDefinition) + _ = outLayer.GetLayerDefn() + for outFeature in outLayer: + for value in values: + for field in fields: + name = field['name'] + outFeature.SetField(name, value[name]) + cValue = outFeature.GetField('value') + fill = False + if cValue == 0: + fill = True + if background is not None and cValue == background: + fill = True + geometry = outFeature.GetGeometryRef() + area = float(geometry.GetArea()) + outFeature.SetField('area', area) + if classes: + for ii in range(len(classes)): + if area > classes[ii]['minimum'] and area < classes[ii]['maximum']: + outFeature.SetField('size',classes[ii]['class']) + centroid = geometry.Centroid().ExportToWkt() + outFeature.SetField('centroid', centroid) + if fill == False and area > threshold: + outLayer.SetFeature(outFeature) + else: + outLayer.DeleteFeature(outFeature.GetFID()) + outShape.Destroy() + + +def data_geometry2shape(data, fields, values, spatialRef, geoTrans, shapeFile): + + return data_geometry2shape_ext(data, fields, values, spatialRef, geoTrans, + None, 0, None, shapeFile) + + +def geotiff2data(inGeotiff): + + inRaster = gdal.Open(inGeotiff) + proj = osr.SpatialReference() + proj.ImportFromWkt(inRaster.GetProjectionRef()) + if proj.GetAttrValue('AUTHORITY', 0) == 'EPSG': + epsg = int(proj.GetAttrValue('AUTHORITY', 1)) + geoTrans = inRaster.GetGeoTransform() + inBand = inRaster.GetRasterBand(1) + noData = inBand.GetNoDataValue() + data = inBand.ReadAsArray() + if data.dtype == np.uint8: + dtype = 'BYTE' + elif data.dtype == np.float32: + dtype = 'FLOAT' + elif data.dtype == np.float64: + dtype = 'DOUBLE' + + return (data, geoTrans, proj, epsg, dtype, noData) + + +def data2geotiff(data, geoTrans, proj, dtype, noData, outFile): + + (rows, cols) = data.shape + gdalDriver = gdal.GetDriverByName('GTiff') + if dtype == 'BYTE': + outRaster = gdalDriver.Create(outFile, cols, rows, 1, gdal.GDT_Byte, + ['COMPRESS=DEFLATE']) + elif dtype == 'FLOAT': + outRaster = gdalDriver.Create(outFile, cols, rows, 1, gdal.GDT_Float32, + ['COMPRESS=DEFLATE']) + outRaster.SetGeoTransform(geoTrans) + outRaster.SetProjection(proj.ExportToWkt()) + outBand = outRaster.GetRasterBand(1) + outBand.SetNoDataValue(noData) + outBand.WriteArray(data) + outRaster = None + + +# Save raster information (fields, values) to CSV file +def raster2csv(fields, values, csvFile): + + header = [] + for field in fields: + header.append(field['name']) + line = [] + for value in values: + for field in fields: + name = field['name'] + line.append(value[name]) + + with open(csvFile, 'wb') as outF: + writer = csv.writer(outF, delimiter=';') + writer.writerow(header) + writer.writerow(line) + + +# Combine all geometries in a list +def union_geometries(geometries): + + combine = ogr.Geometry(ogr.wkbMultiPolygon) + for geometry in geometries: + combine = combine.Union(geometry) + + return combine + + +def spatial_query(source, reference, function): + + # Extract information from tiles and boundary shapefiles + (geoTile, spatialRef, nameTile) = shape2geometry(reference, 'tile') + if geoTile is None: + raise GeometryError(f'Could not extract information (tile) out of shapefile {reference}') + (boundary, spatialRef, granule) = shape2geometry(source, 'granule') + if boundary is None: + raise GeometryError(f'Could not extract information (granule) out of shapefile {source}') + + # Perform the spatial analysis + i = 0 + tile = [] + multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + for geo in geoTile: + for bound in boundary: + if function == 'intersects': + intersection = bound.Intersection(geo) + if intersection.GetGeometryName() == 'POLYGON': + if nameTile[i] not in tile: + tile.append(nameTile[i]) + multipolygon.AddGeometry(geo) + i = i + 1 + + return (multipolygon, tile) + + +# Converted geometry from projected to geographic +def geometry_proj2geo(inMultipolygon, inSpatialRef): + + outSpatialRef = osr.SpatialReference() + outSpatialRef.ImportFromEPSG(4326) + coordTrans = osr.CoordinateTransformation(inSpatialRef, outSpatialRef) + outMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + for polygon in inMultipolygon: + if inSpatialRef != outSpatialRef: + polygon.Transform(coordTrans) + outMultipolygon.AddGeometry(polygon) + + return (outMultipolygon, outSpatialRef) + +# Convert corner points from geographic to UTM projection +def geometry_geo2proj(lat_max,lat_min,lon_max,lon_min): + + zone = get_zone(lon_min,lon_max) + if (lat_min+lat_max)/2 > 0: + proj = ('326%02d' % int(zone)) + else: + proj = ('327%02d' % int(zone)) + + inSpatialRef = osr.SpatialReference() + inSpatialRef.ImportFromEPSG(4326) + outSpatialRef = osr.SpatialReference() + outSpatialRef.ImportFromEPSG(int(proj)) + coordTrans = osr.CoordinateTransformation(inSpatialRef,outSpatialRef) + + x1, y1, h = coordTrans.TransformPoint(lon_max, lat_min) + x2, y2, h = coordTrans.TransformPoint(lon_min, lat_min) + x3, y3, h = coordTrans.TransformPoint(lon_max, lat_max) + x4, y4, h = coordTrans.TransformPoint(lon_min, lat_max) + + y_min = min(y1,y2,y3,y4) + y_max = max(y1,y2,y3,y4) + x_min = min(x1,x2,x3,x4) + x_max = max(x1,x2,x3,x4) + + # false_easting = outSpatialRef.GetProjParm(osr.SRS_PP_FALSE_EASTING) + false_northing = outSpatialRef.GetProjParm(osr.SRS_PP_FALSE_NORTHING) + + return zone, false_northing, y_min, y_max, x_min, x_max + + +def reproject_corners(corners, posting, inEPSG, outEPSG): + + # Reproject coordinates + inProj = osr.SpatialReference() + inProj.ImportFromEPSG(inEPSG) + outProj = osr.SpatialReference() + outProj.ImportFromEPSG(outEPSG) + transform = osr.CoordinateTransformation(inProj, outProj) + corners.Transform(transform) + + # Get extent and round to even coordinates + (minX, maxX, minY, maxY) = corners.GetEnvelope() + #posting = inGT[1] + minX = np.ceil(minX/posting)*posting + minY = np.ceil(minY/posting)*posting + maxX = np.ceil(maxX/posting)*posting + maxY = np.ceil(maxY/posting)*posting + + # Add points to multiPoint + corners = ogr.Geometry(ogr.wkbMultiPoint) + ul = ogr.Geometry(ogr.wkbPoint) + ul.AddPoint(minX, maxY) + corners.AddGeometry(ul) + ll = ogr.Geometry(ogr.wkbPoint) + ll.AddPoint(minX, minY) + corners.AddGeometry(ll) + ur = ogr.Geometry(ogr.wkbPoint) + ur.AddPoint(maxX, maxY) + corners.AddGeometry(ur) + lr = ogr.Geometry(ogr.wkbPoint) + lr.AddPoint(maxX, minY) + corners.AddGeometry(lr) + + return corners + + +def reproject_extent(minX, maxX, minY, maxY, posting, inEPSG, outEPSG): + + # Add points to multiPoint + corners = ogr.Geometry(ogr.wkbMultiPoint) + ul = ogr.Geometry(ogr.wkbPoint) + ul.AddPoint(minX, maxY) + corners.AddGeometry(ul) + ll = ogr.Geometry(ogr.wkbPoint) + ll.AddPoint(minX, minY) + corners.AddGeometry(ll) + ur = ogr.Geometry(ogr.wkbPoint) + ur.AddPoint(maxX, maxY) + corners.AddGeometry(ur) + lr = ogr.Geometry(ogr.wkbPoint) + lr.AddPoint(maxX, minY) + corners.AddGeometry(lr) + + # Re-project corners + reproject_corners(corners, posting, inEPSG, outEPSG) + + # Extract min/max values + return corners.GetEnvelope() + + +def raster_meta(rasterFile): + + raster = gdal.Open(rasterFile) + spatialRef = osr.SpatialReference() + spatialRef.ImportFromWkt(raster.GetProjectionRef()) + gt = raster.GetGeoTransform() + shape = [ raster.RasterYSize, raster.RasterXSize ] + pixel = raster.GetMetadataItem('AREA_OR_POINT') + raster = None + + return (spatialRef, gt, shape, pixel) + + +def overlapMask(meta, maskShape, invert, outFile): + + ### Extract metadata + posting = meta['pixelSize'] + # proj = meta['proj'] + imageEPSG = meta['epsg'] + multiBoundary = meta['boundary'] + dataRows = meta['rows'] + dataCols = meta['cols'] + geoEPSG = 4326 + + ### Extract mask polygon + ogrDriver = ogr.GetDriverByName('ESRI Shapefile') + inShape = ogrDriver.Open(maskShape) + outLayer = inShape.GetLayer() + outProj = outLayer.GetSpatialRef() + outEPSG = int(outProj.GetAttrValue('AUTHORITY', 1)) + if geoEPSG != outEPSG: + raise GeometryError(f'Expecting mask file with EPSG code: {geoEPSG}') + + ### Define re-projection from geographic to UTM + inProj = osr.SpatialReference() + inProj.ImportFromEPSG(4326) + outProj = osr.SpatialReference() + outProj.ImportFromEPSG(imageEPSG) + transform = osr.CoordinateTransformation(inProj, outProj) + + ### Loop through features + for boundary in multiBoundary: + for feature in outLayer: + outMultipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + inMultiPolygon = feature.GetGeometryRef() + for polygon in inMultiPolygon: + overlap = boundary.Intersection(polygon) + if 'POLYGON' in overlap.ExportToWkt(): + overlap.Transform(transform) + outMultipolygon.AddGeometry(overlap) + + ### Save intersection polygon in memory + spatialRef = osr.SpatialReference() + spatialRef.ImportFromEPSG(imageEPSG) + memDriver = ogr.GetDriverByName('Memory') + outVector = memDriver.CreateDataSource('mem') + outLayer = outVector.CreateLayer('', spatialRef, ogr.wkbMultiPolygon) + outLayer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger)) + definition = outLayer.GetLayerDefn() + outFeature = ogr.Feature(definition) + outFeature.SetField('id', 0) + geometry = ogr.CreateGeometryFromWkb(outMultipolygon.ExportToWkb()) + outFeature.SetGeometry(geometry) + outLayer.CreateFeature(outFeature) + outFeature = None + + ### Calculate extent + (aoiMinX, aoiMaxX, aoiMinY, aoiMaxY) = outLayer.GetExtent() + aoiLines = int(np.rint((aoiMaxY - aoiMinY)/posting)) + aoiSamples = int(np.rint((aoiMaxX - aoiMinX)/posting)) + maskGeoTrans = (aoiMinX, posting, 0, aoiMaxY, 0, -posting) + + ### Rasterize mask polygon + gdalDriver = gdal.GetDriverByName('MEM') + outRaster = gdalDriver.Create('', aoiSamples, aoiLines, 1, gdal.GDT_Float32) + outRaster.SetGeoTransform((aoiMinX, posting, 0, aoiMaxY, 0, -posting)) + outRaster.SetProjection(outProj.ExportToWkt()) + outBand = outRaster.GetRasterBand(1) + outBand.SetNoDataValue(0) + outBand.FlushCache() + gdal.RasterizeLayer(outRaster, [1], outLayer, burn_values=[1]) + mask = outRaster.GetRasterBand(1).ReadAsArray() + outVector = None + outRaster = None + + ### Invert mask (if requested) + if invert == True: + mask = 1.0 - mask + + ### Final adjustments + mask = mask[:dataRows,:dataCols] + mask[mask==0] = np.nan + + return (mask, maskGeoTrans) + + +def apply_mask(data, dataGeoTrans, mask, maskGeoTrans): + + (dataRows, dataCols) = data.shape + dataOriginX = dataGeoTrans[0] + dataOriginY = dataGeoTrans[3] + # dataPixelSize = dataGeoTrans[1] + (maskRows, maskCols) = mask.shape + maskOriginX = maskGeoTrans[0] + maskOriginY = maskGeoTrans[3] + maskPixelSize = maskGeoTrans[1] + offsetX = int(np.rint((maskOriginX - dataOriginX)/maskPixelSize)) + offsetY = int(np.rint((dataOriginY - maskOriginY)/maskPixelSize)) + data = data[offsetY:maskRows+offsetY,offsetX:maskCols+offsetX] + data *= mask + + return data + + +def geotiff2boundary_ext(inGeotiff, maskFile, geographic): + + # Extract metadata + (spatialRef, gt, shape, pixel) = raster_meta(inGeotiff) + epsg = int(spatialRef.GetAttrValue('AUTHORITY', 1)) + (data, colFirst, rowsFirst, geoTrans, proj) = \ + geotiff2boundary_mask(inGeotiff, epsg, None) + (rows, cols) = data.shape + + # Save in mask file (if defined) + if maskFile is not None: + gdalDriver = gdal.GetDriverByName('GTiff') + outRaster = gdalDriver.Create(maskFile, rows, cols, 1, gdal.GDT_Byte) + outRaster.SetGeoTransform(geoTrans) + outRaster.SetProjection(proj.ExportToWkt()) + outBand = outRaster.GetRasterBand(1) + outBand.WriteArray(data) + outRaster = None + + # Save in memory + gdalDriver = gdal.GetDriverByName('Mem') + outRaster = gdalDriver.Create('out', rows, cols, 1, gdal.GDT_Byte) + outRaster.SetGeoTransform(geoTrans) + outRaster.SetProjection(proj.ExportToWkt()) + outBand = outRaster.GetRasterBand(1) + outBand.WriteArray(data) + data = None + + # Polygonize the raster image + inBand = outRaster.GetRasterBand(1) + ogrDriver = ogr.GetDriverByName('Memory') + outVector = ogrDriver.CreateDataSource('out') + outLayer = outVector.CreateLayer('boundary', srs=proj) + fieldDefinition = ogr.FieldDefn('ID', ogr.OFTInteger) + outLayer.CreateField(fieldDefinition) + gdal.Polygonize(inBand, inBand, outLayer, 0, [], None) + outRaster = None + + # Extract geometry from layer + inSpatialRef = outLayer.GetSpatialRef() + multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + for outFeature in outLayer: + geometry = outFeature.GetGeometryRef() + multipolygon.AddGeometry(geometry) + outFeature = None + outLayer = None + + # Convert geometry from projected to geographic coordinates (if requested) + if geographic == True: + (multipolygon, outSpatialRef) = \ + geometry_proj2geo(multipolygon, inSpatialRef) + return (multipolygon, outSpatialRef) + else: + return (multipolygon, inSpatialRef) + + +def geotiff2boundary(inGeotiff, maskFile): + + return geotiff2boundary_ext(inGeotiff, maskFile, False) + + +def geotiff2boundary_geo(inGeotiff, maskFile): + + return geotiff2boundary_ext(inGeotiff, maskFile, True) + + +# Get polygon for a tile +def get_tile_geometry(tile, step): + + # Extract corners + xmin = int(tile[1:3]) + ymin = int(tile[4:7]) + if tile[0] == 'S': + xmin = -xmin + if tile[3] == 'W': + ymin = -ymin + xmax = xmin + step + ymax = ymin + step + + # Create geometry + ring = ogr.Geometry(ogr.wkbLinearRing) + ring.AddPoint_2D(ymax, xmin) + ring.AddPoint_2D(ymax, xmax) + ring.AddPoint_2D(ymin, xmax) + ring.AddPoint_2D(ymin, xmin) + ring.AddPoint_2D(ymax, xmin) + polygon = ogr.Geometry(ogr.wkbPolygon) + polygon.AddGeometry(ring) + + return polygon + + +# Get tile names +def get_tile_names(minLat, maxLat, minLon, maxLon, step): + + tiles = [] + for i in range(minLon, maxLon, step): + for k in range(minLat, maxLat, step): + eastwest = 'W' if i<0 else 'E' + northsouth = 'S' if k<0 else 'N' + tile = ('%s%02d%s%03d' % (northsouth, abs(k), eastwest, abs(i))) + tiles.append(tile) + return tiles + + +# Get tiles extent +def get_tiles_extent(tiles, step): + + minLat = 90 + maxLat = -90 + minLon = 180 + maxLon = -180 + for tile in tiles: + xmin = int(tile[1:3]) + ymin = int(tile[4:7]) + if tile[0] == 'S': + xmin = -xmin + if tile[3] == 'W': + ymin = -ymin + if xmin < minLat: + minLat = xmin + if xmin > maxLat: + maxLat = xmin + if ymin < minLon: + minLon = ymin + if ymin > maxLon: + maxLon = ymin + maxLat += step + maxLon += step + + return (minLat, maxLat, minLon, maxLon) + + +# Generate a global tile shapefile +def generate_tile_shape(shapeFile, minLat, maxLat, minLon, maxLon, step): + + # General setup for shapefile + driver = ogr.GetDriverByName('ESRI Shapefile') + if os.path.exists(shapeFile): + driver.DeleteDataSource(shapeFile) + shapeData = driver.CreateDataSource(shapeFile) + + # Define layer and attributes + spatialReference = osr.SpatialReference() + spatialReference.ImportFromEPSG(4326) + layer = shapeData.CreateLayer(shapeFile, spatialReference, ogr.wkbPolygon) + fieldname = ogr.FieldDefn('tile', ogr.OFTString) + fieldname.SetWidth(10) + layer.CreateField(fieldname) + + # Going through the tiles + tiles = get_tile_names(minLat, maxLat, minLon, maxLon, step) + for tile in tiles: + geometry = get_tile_geometry(tile, step) + tileGeometry = geometry.ExportToWkt() + feature = ogr.Feature(layer.GetLayerDefn()) + feature.SetField('tile', tile) + + # Define geometry as polygon + geom = ogr.CreateGeometryFromWkt(tileGeometry) + if geom: + feature.SetGeometry(geom) + layer.CreateFeature(feature) + feature.Destroy() + + shapeData.Destroy() + + +# Generate a shapefile from a CSV list file +def list2shape(csvFile, shapeFile): + + # Set up shapefile attributes + fields = [] + field = {} + values = [] + field['name'] = 'granule' + field['type'] = ogr.OFTString + field['width'] = 254 + fields.append(field) + + files = [line.strip() for line in open(csvFile)] + for file in files: + data = gdal.Open(file, GA_ReadOnly) + if data is not None and data.GetDriver().LongName == 'GeoTIFF': + + print('Reading %s ...' % file) + # Generate GeoTIFF boundary geometry + data = None + (geometry, spatialRef) = geotiff2boundary(file, None) + + # Simplify the geometry - only works with GDAL 1.8.0 + #geometry = geometry.Simplify(float(tolerance)) + + # Add granule name and geometry + base = os.path.basename(file) + granule = os.path.splitext(base)[0] + value = {} + value['granule'] = granule + value['geometry'] = geometry + values.append(value) + + # Write geometry to shapefile + merge = False + geometry2shape(fields, values, spatialRef, merge, shapeFile) + + +# Determine the tiles for an area of interest +def aoi2tiles(aoiGeometry): + + # Determine the bounding box + envelope = aoiGeometry.GetEnvelope() + west = int(envelope[0] - 0.5) + east = int(envelope[1] + 1.5) + south = int(envelope[2] - 0.5) + north = int(envelope[3] + 1.5) + + # Walk through the potential tiles and add the required on to the geometry + tiles = [] + multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + for i in range(west, east): + for k in range(south, north): + eastwest = 'W' if i<0 else 'E' + northsouth = 'S' if k<0 else 'N' + tile = ('%s%02d%s%03d' % (northsouth, abs(k), eastwest, abs(i))) + polygon = get_tile_geometry(tile, 1) + intersection = polygon.Intersection(aoiGeometry) + if intersection is not None: + multipolygon.AddGeometry(polygon) + tiles.append(tile) + + return (tiles, multipolygon) + +def get_latlon_extent(filename): + src = gdal.Open(filename) + ulx, xres, xskew, uly, yskew, yres = src.GetGeoTransform() + lrx = ulx + (src.RasterXSize * xres) + lry = uly + (src.RasterYSize * yres) + + source = osr.SpatialReference() + source.ImportFromWkt(src.GetProjection()) + + target = osr.SpatialReference() + target.ImportFromEPSG(4326) + + transform = osr.CoordinateTransformation(source, target) + + lon1, lat1, h = transform.TransformPoint(ulx, uly) + lon2, lat2, h = transform.TransformPoint(lrx, uly) + lon3, lat3, h = transform.TransformPoint(ulx, lry) + lon4, lat4, h = transform.TransformPoint(lrx, lry) + + lat_min = min(lat1,lat2,lat3,lat4) + lat_max = max(lat1,lat2,lat3,lat4) + lon_min = min(lon1,lon2,lon3,lon4) + lon_max = max(lon1,lon2,lon3,lon4) + + return lat_min, lat_max, lon_min, lon_max + diff --git a/hyp3lib/asf_time_series.py b/hyp3lib/asf_time_series.py new file mode 100644 index 00000000..0f6f2db0 --- /dev/null +++ b/hyp3lib/asf_time_series.py @@ -0,0 +1,452 @@ +import os +from datetime import datetime, timedelta + +import netCDF4 as nc +import numpy as np +import statsmodels.api as sm +from osgeo import gdal, ogr, osr +from scipy import ndimage +from scipy.interpolate import interp1d +from statsmodels.tsa.seasonal import seasonal_decompose + +from hyp3lib import GeometryError +from hyp3lib.asf_geometry import geometry_proj2geo, raster_meta + +tolerance = 0.00005 + + +def initializeNetcdf(ncFile, meta): + + dataset = nc.Dataset(ncFile, 'w', format='NETCDF4') + + ### Define global attributes + dataset.Conventions = ('CF-1.7') + dataset.institution = meta['institution'] + dataset.title = meta['title'] + dataset.source = meta['source'] + dataset.comment = meta['comment'] + dataset.reference = meta['reference'] + timestamp = datetime.utcnow().isoformat() + 'Z' + dataset.history = ('{0}: netCDF file created'.format(timestamp)) + dataset.featureType = ('timeSeries') + + ### Create dimensions + dataset.createDimension('xgrid', meta['cols']) + dataset.createDimension('ygrid', meta['rows']) + dataset.createDimension('time', None) + dataset.createDimension('nchar', 100) + + ### Create variables - time, coordinates, values + ## time + time = dataset.createVariable('time', np.float32, ('time',)) + time.axis = ('T') + time.long_name = ('serial date') + time.standard_name = ('time') + time.units = ('seconds since {0}'.format(meta['refTime'])) + time.calendar = 'gregorian' + time.fill_value = 0 + time.reference = ('center time of image') + + ## map projection + projSpatialRef = osr.SpatialReference() + projSpatialRef.ImportFromEPSG(int(meta['epsg'])) + wkt = projSpatialRef.ExportToWkt() + projection = dataset.createVariable('Transverse_Mercator', 'S1') + projection.grid_mapping_name = ('transverse_mercator') + projection.crs_wkt = wkt + projection.scale_factor_at_centeral_meridian = \ + projSpatialRef.GetProjParm(osr.SRS_PP_SCALE_FACTOR) + projection.longitude_of_central_meridian = \ + projSpatialRef.GetProjParm(osr.SRS_PP_CENTRAL_MERIDIAN) + projection.latitude_of_projection_origin = \ + projSpatialRef.GetProjParm(osr.SRS_PP_LATITUDE_OF_ORIGIN) + projection.false_easting = \ + projSpatialRef.GetProjParm(osr.SRS_PP_FALSE_EASTING) + projection.false_northing = \ + projSpatialRef.GetProjParm(osr.SRS_PP_FALSE_NORTHING) + projection.projection_x_coordinate = ('xgrid') + projection.projection_y_coordinate = ('ygrid') + projection.units = ('meters') + + ## coordinate: x grid + xgrid = dataset.createVariable('xgrid', np.float32, ('xgrid')) + xgrid.axis = ('X') + xgrid.long_name = ('projection_grid_y_center') + xgrid.standard_name = ('projection_y_coordinate') + xgrid.units = ('meters') + xgrid.fill_value = np.nan + + ## coordinate: y grid + ygrid = dataset.createVariable('ygrid', np.float32, ('ygrid')) + ygrid.axis = ('Y') + ygrid.long_name = ('projection_grid_x_center') + ygrid.standard_name = ('projection_x_coordinate') + ygrid.units = ('meters') + ygrid.fill_value = np.nan + + ## image + image = dataset.createVariable('image', np.float32, \ + ('time', 'ygrid', 'xgrid'), zlib=True) + image.long_name = meta['imgLongName'] + image.units = meta['imgUnits'] + image.fill_value = meta['imgNoData'] + + ## name + name = dataset.createVariable('granule', 'S1', ('time', 'nchar')) + name.long_name = 'name of the granule' + + ### Fill in coordinates + xCoordinate = np.arange(meta['minX'], meta['maxX'], meta['pixelSize']) + xgrid[:] = xCoordinate + yCoordinate = np.arange(meta['maxY'], meta['minY'], -meta['pixelSize']) + ygrid[:] = yCoordinate + + dataset.close() + + +def extractNetcdfTime(ncFile, csvFile): + + outF = open(csvFile, 'w') + timeSeries = nc.Dataset(ncFile, 'r') + timeRef = timeSeries.variables['time'].getncattr('units')[14:] + timeRef = datetime.strptime(timeRef, '%Y-%m-%d %H:%M:%S') + time = timeSeries.variables['time'][:].tolist() + for t in time: + timestamp = timeRef + timedelta(seconds=t) + outF.write('%s\n' % timestamp.isoformat()) + outF.close() + + +def nc2meta(ncFile): + + dataset = nc.Dataset(ncFile, 'r') + + meta = {} + + ### Global attributes + meta['conventions'] = dataset.Conventions + meta['institution'] = dataset.institution + meta['title'] = dataset.title + meta['source'] = dataset.source + meta['comment'] = dataset.comment + meta['reference'] = dataset.reference + meta['history'] = dataset.history + + ### Coordinates + xGrid = dataset.variables['xgrid'] + (meta['cols'],) = xGrid.shape + meta['pixelSize'] = xGrid[1] - xGrid[0] + meta['minX'] = np.min(xGrid) + meta['maxX'] = np.max(xGrid) + meta['pixelSize'] + yGrid = dataset.variables['ygrid'] + (meta['rows'],) = yGrid.shape + meta['minY'] = np.min(yGrid) - meta['pixelSize'] + meta['maxY'] = np.max(yGrid) + + ### Time reference + time = dataset.variables['time'] + (meta['timeCount'],) = time.shape + meta['refTime'] = time.units[14:] + + ### Map projection: EPSG + proj = dataset.variables['Transverse_Mercator'] + projSpatialRef = osr.SpatialReference() + projSpatialRef.ImportFromWkt(proj.crs_wkt) + meta['epsg'] = int(projSpatialRef.GetAttrValue('AUTHORITY', 1)) + + ### Image metadata + image = dataset.variables['image'] + meta['imgLongName'] = image.long_name + meta['imgUnits'] = image.units + meta['imgNoData'] = image.fill_value + + dataset.close() + + return meta + + +def addImage2netcdf(image, ncFile, granule, imgTime): + + dataset = nc.Dataset(ncFile, 'a') + + ### Updating time + time = dataset.variables['time'] + name = dataset.variables['granule'] + data = dataset.variables['image'] + numGranules = time.shape[0] + time[numGranules] = nc.date2num(imgTime, units=time.units, + calendar=time.calendar) + name[numGranules] = nc.stringtochar(np.array(granule, 'S100')) + data[numGranules,:,:] = image + + dataset.close() + + +def filter_change(image, kernelSize, iterations): + + (cols, rows) = image.shape + positiveChange = np.zeros((rows,cols), dtype=np.uint8) + negativeChange = np.zeros((rows,cols), dtype=np.uint8) + noChange = np.zeros((rows,cols), dtype=np.uint8) + for ii in range(int(cols)): + for kk in range(int(rows)): + if image[ii,kk] == 1: + negativeChange[ii,kk] = 1 + elif image[ii,kk] == 2: + noChange = 1 + elif image[ii,kk] == 3: + positiveChange[ii,kk] = 1 + image = None + positiveChange = ndimage.binary_opening(positiveChange, + iterations=iterations, structure=np.ones(kernelSize)).astype(np.uint8) + negativeChange = ndimage.binary_opening(negativeChange, + iterations=iterations, structure=np.ones(kernelSize)).astype(np.uint8) + change = np.full((rows,cols), 2, dtype=np.uint8) + for ii in range(int(cols)): + for kk in range(int(rows)): + if negativeChange[ii,kk] == 1: + change[ii,kk] = 1 + elif positiveChange[ii,kk] == 1: + change[ii,kk] = 3 + change *= noChange + + return change + + +def vector_meta(vectorFile): + + vector = ogr.Open(vectorFile) + layer = vector.GetLayer() + layerDefinition = layer.GetLayerDefn() + fieldCount = layerDefinition.GetFieldCount() + fields = [] + for ii in range(fieldCount): + field = {} + field['name'] = layerDefinition.GetFieldDefn(ii).GetName() + field['type'] = layerDefinition.GetFieldDefn(ii).GetType() + field['width'] = layerDefinition.GetFieldDefn(ii).GetWidth() + field['precision'] = layerDefinition.GetFieldDefn(ii).GetPrecision() + fields.append(field) + proj = layer.GetSpatialRef() + extent = layer.GetExtent() + features = [] + featureCount = layer.GetFeatureCount() + for kk in range(featureCount): + value = {} + feature = layer.GetFeature(kk) + for ii in range(fieldCount): + if fields[ii]['type'] == ogr.OFTInteger: + value[fields[ii]['name']] = int(feature.GetField(ii)) + elif fields[ii]['type'] == ogr.OFTReal: + value[fields[ii]['name']] = float(feature.GetField(ii)) + else: + value[fields[ii]['name']] = feature.GetField(ii) + value['geometry'] = feature.GetGeometryRef().ExportToWkt() + features.append(value) + + return (fields, proj, extent, features) + + +def raster_metadata(input): + + # Set up shapefile attributes + fields = [] + field = {} + values = [] + field['name'] = 'granule' + field['type'] = ogr.OFTString + field['width'] = 254 + fields.append(field) + field = {} + field['name'] = 'epsg' + field['type'] = ogr.OFTInteger + fields.append(field) + field = {} + field['name'] = 'originX' + field['type'] = ogr.OFTReal + fields.append(field) + field = {} + field['name'] = 'originY' + field['type'] = ogr.OFTReal + fields.append(field) + field = {} + field['name'] = 'pixSize' + field['type'] = ogr.OFTReal + fields.append(field) + field = {} + field['name'] = 'cols' + field['type'] = ogr.OFTInteger + fields.append(field) + field = {} + field['name'] = 'rows' + field['type'] = ogr.OFTInteger + fields.append(field) + field = {} + field['name'] = 'pixel' + field['type'] = ogr.OFTString + field['width'] = 8 + fields.append(field) + + # Extract other raster image metadata + (outSpatialRef, outGt, outShape, outPixel) = raster_meta(input) + if outSpatialRef.GetAttrValue('AUTHORITY', 0) == 'EPSG': + epsg = int(outSpatialRef.GetAttrValue('AUTHORITY', 1)) + + # Add granule name and geometry + base = os.path.basename(input) + granule = os.path.splitext(base)[0] + value = {} + value['granule'] = granule + value['epsg'] = epsg + value['originX'] = outGt[0] + value['originY'] = outGt[3] + value['pixSize'] = outGt[1] + value['cols'] = outShape[1] + value['rows'] = outShape[0] + value['pixel'] = outPixel + values.append(value) + + return (fields, values, outSpatialRef) + + +def netcdf2boundary_mask(ncFile, geographic): + + ### Extract metadata + meta = nc2meta(ncFile) + cols = meta['cols'] + rows = meta['rows'] + proj = osr.SpatialReference() + proj.ImportFromEPSG(meta['epsg']) + geoTrans = \ + (meta['minX'], meta['pixelSize'], 0, meta['maxY'], 0, -meta['pixelSize']) + + ### Reading time series + dataset = nc.Dataset(ncFile, 'r') + image = dataset.variables['image'][:] + dataset.close() + + ### Save in memory + data = image[0,:,:]/image[0,:,:] + image = None + gdalDriver = gdal.GetDriverByName('Mem') + outRaster = gdalDriver.Create('out', rows, cols, 1, gdal.GDT_Byte) + outRaster.SetGeoTransform(geoTrans) + outRaster.SetProjection(proj.ExportToWkt()) + outBand = outRaster.GetRasterBand(1) + outBand.WriteArray(data) + inBand = None + data = None + + ### Polygonize the raster image + inBand = outRaster.GetRasterBand(1) + ogrDriver = ogr.GetDriverByName('Memory') + outVector = ogrDriver.CreateDataSource('out') + outLayer = outVector.CreateLayer('boundary', srs=proj) + fieldDefinition = ogr.FieldDefn('ID', ogr.OFTInteger) + outLayer.CreateField(fieldDefinition) + gdal.Polygonize(inBand, inBand, outLayer, 0, [], None) + outRaster = None + + ### Extract geometry from layer + inSpatialRef = outLayer.GetSpatialRef() + multipolygon = ogr.Geometry(ogr.wkbMultiPolygon) + for outFeature in outLayer: + geometry = outFeature.GetGeometryRef() + multipolygon.AddGeometry(geometry) + outFeature = None + outLayer = None + + ### Convert geometry from projected to geographic coordinates (if requested) + if geographic == True: + (multipolygon, outSpatialRef) = \ + geometry_proj2geo(multipolygon, inSpatialRef) + return (multipolygon, outSpatialRef) + else: + return (multipolygon, inSpatialRef) + + +def time_series_slice(ncFile, x, y, typeXY): + + timeSeries = nc.Dataset(ncFile, 'r') + + ### Extract information for variables: image, time, granule + timeRef = timeSeries.variables['time'].getncattr('units')[14:] + timeRef = datetime.strptime(timeRef, '%Y-%m-%d %H:%M:%S') + time = timeSeries.variables['time'][:].tolist() + timestamp = [] + for t in time: + timestamp.append(timeRef + timedelta(seconds=t)) + xGrid = timeSeries.variables['xgrid'][:] + yGrid = timeSeries.variables['ygrid'][:] + granules = timeSeries.variables['granule'] + granule = nc.chartostring(granules[:]) + data = timeSeries.variables['image'] + # numGranules = len(time) + + ### Define geo transformation and map proejction + # originX = xGrid[0] + # originY = yGrid[0] + pixelSize = xGrid[1] - xGrid[0] + # gt = (originX, pixelSize, 0, originY, 0, -pixelSize) + var = timeSeries.variables.keys() + if 'Transverse_Mercator' in var: + wkt = timeSeries.variables['Transverse_Mercator'].getncattr('crs_wkt') + else: + raise GeometryError('Could not find map projection information!') + + ### Work out line/sample from various input types + if typeXY == 'pixel': + sample = x + line = y + elif typeXY == 'latlon': + inProj = osr.SpatialReference() + inProj.ImportFromEPSG(4326) + outProj = osr.SpatialReference() + outProj.ImportFromWkt(wkt) + transform = osr.CoordinateTransformation(inProj, outProj) + coord = ogr.Geometry(ogr.wkbPoint) + coord.AddPoint(x,y) + coord.Transform(transform) + coordX = np.rint(coord.GetX()/pixelSize)*pixelSize + coordY = np.rint(coord.GetY()/pixelSize)*pixelSize + sample = xGrid.tolist().index(coordX) + line = yGrid.tolist().index(coordY) + elif typeXY == 'mapXY': + sample = xGrid.tolist().index(x) + line = yGrid.tolist().index(y) + value = data[:,sample,line] + + ### Work on time series + ## Fill in gaps by interpolation + startDate = timestamp[0].date() + stopDate = timestamp[len(timestamp)-1].date() + refDates = np.arange(startDate, stopDate + timedelta(days=12), 12).tolist() + datestamp = [] + for t in time: + datestamp.append((timeRef + timedelta(seconds=t)).date()) + missingDates = list(set(refDates) - set(datestamp)) + f = interp1d(time, value) + missingTime = [] + for missingDate in missingDates: + missingTime.append((missingDate - timeRef.date()).total_seconds()) + missingValues = f(missingTime) + allValues = [] + refType = [] + for ii in range(len(refDates)): + if refDates[ii] in missingDates: + index = missingDates.index(refDates[ii]) + allValues.append(missingValues[index]) + refType.append('interpolated') + else: + index = datestamp.index(refDates[ii]) + allValues.append(value[index]) + refType.append('acquired') + allValues = np.asarray(allValues) + + ## Smoothing the time line with localized regression (LOESS) + lowess = sm.nonparametric.lowess + smooth = lowess(allValues, np.arange(len(allValues)), frac=0.08, it=0)[:,1] + + sd = seasonal_decompose(x=smooth, model='additive', freq=4) + + return (granule, refDates, refType, smooth, sd) diff --git a/hyp3lib/createAmp.py b/hyp3lib/createAmp.py new file mode 100755 index 00000000..21b286c1 --- /dev/null +++ b/hyp3lib/createAmp.py @@ -0,0 +1,33 @@ +"""Convert Geotiff Power to Amplitude""" +from hyp3lib import saa_func_lib as saa +import numpy as np +import argparse +import os + + +def createAmp(fi,nodata=None): + (x,y,trans,proj,data) = saa.read_gdal_file(saa.open_gdal_file(fi)) + ampdata = np.sqrt(data) + outfile = fi.replace('.tif','_amp.tif') + saa.write_gdal_file_float(outfile,trans,proj,ampdata,nodata=nodata) + return outfile + + +def main(): + """Main entrypoint""" + + parser = argparse.ArgumentParser( + prog=os.path.basename(__file__), + description=__doc__, + ) + parser.add_argument("infile", nargs="+", help="Input tif filename(s)") + parser.add_argument("-n", "--nodata", type=float, help="Set nodata value") + args = parser.parse_args() + + infiles = args.infile + for fi in infiles: + createAmp(fi, args.nodata) + + +if __name__ == "__main__": + main() diff --git a/hyp3lib/make_cogs.py b/hyp3lib/make_cogs.py new file mode 100755 index 00000000..8b53d5ec --- /dev/null +++ b/hyp3lib/make_cogs.py @@ -0,0 +1,65 @@ +"""Creates a Cloud Optimized GeoTIFF from the input GeoTIFF(s)""" + +import argparse +import logging +import os +import shutil +import sys +from glob import glob +from tempfile import NamedTemporaryFile + +from osgeo import gdal + +from hyp3lib.execute import execute + + +def cogify_dir(directory: str, file_pattern: str = '*.tif'): + """ + Convert all found GeoTIFF files to a Cloud Optimized GeoTIFF inplace + Args: + directory: directory to search through + file_pattern: the pattern for finding GeoTIFFs + """ + path_expression = os.path.join(directory, file_pattern) + logging.info(f'Converting files to COGs for {path_expression}') + for filename in glob(path_expression): + cogify_file(filename) + + +def cogify_file(filename: str): + """ + Convert a GeoTIFF to a Cloud Optimized GeoTIFF inplace + + Args: + filename: GeoTIFF file to convert + """ + logging.info(f'Converting {filename} to COG') + execute(f'gdaladdo -r average {filename} 2 4 8 16', uselogging=True) + creation_options = ['TILED=YES', 'COMPRESS=DEFLATE', 'NUM_THREADS=ALL_CPUS', 'COPY_SRC_OVERVIEWS=YES'] + with NamedTemporaryFile() as temp_file: + shutil.copy(filename, temp_file.name) + gdal.Translate(filename, temp_file.name, format='GTiff', creationOptions=creation_options) + + +def main(): + """Main entrypoint""" + + parser = argparse.ArgumentParser( + prog=os.path.basename(__file__), + description=__doc__, + ) + parser.add_argument('geotiffs', nargs='+', help='name of GeoTIFF file(s)') + args = parser.parse_args() + + out = logging.StreamHandler(stream=sys.stdout) + out.addFilter(lambda record: record.levelno <= logging.INFO) + err = logging.StreamHandler() + err.setLevel(logging.WARNING) + logging.basicConfig(format='%(message)s', level=logging.INFO, handlers=(out, err)) + + for geotiff_file in args.geotiffs: + cogify_file(geotiff_file) + + +if __name__ == '__main__': + main() diff --git a/hyp3lib/raster_boundary2shape.py b/hyp3lib/raster_boundary2shape.py new file mode 100755 index 00000000..08dfc21d --- /dev/null +++ b/hyp3lib/raster_boundary2shape.py @@ -0,0 +1,90 @@ +"""generates boundary shapefile from GeoTIFF file""" + +import argparse +import os + +from scipy import ndimage + +from hyp3lib.asf_geometry import geotiff2boundary_mask, data_geometry2shape +from hyp3lib.asf_time_series import raster_metadata + + +def raster_boundary2shape(inFile, threshold, outShapeFile, use_closing=True, fill_holes = False, + pixel_shift=False): + # Extract raster image metadata + print('Extracting raster information ...') + (fields, values, spatialRef) = raster_metadata(inFile) + + print("Initial origin {x},{y}".format(x=values[0]['originX'],y=values[0]['originY'])) + + if spatialRef.GetAttrValue('AUTHORITY', 0) == 'EPSG': + epsg = int(spatialRef.GetAttrValue('AUTHORITY', 1)) + # Generate GeoTIFF boundary geometry + print('Extracting boundary geometry ...') + (data, colFirst, rowFirst, geoTrans, proj) = \ + geotiff2boundary_mask(inFile, epsg, threshold,use_closing=use_closing) + (rows, cols) = data.shape + + print("After geotiff2boundary_mask origin {x},{y}".format(x=geoTrans[0],y=geoTrans[3])) + + if fill_holes: + data = ndimage.binary_fill_holes(data).astype(bool) + +# if pixel_shift: + if values[0]['pixel']: + minx = geoTrans[0] + maxy = geoTrans[3] + # maxx = geoTrans[0] + cols*geoTrans[1] + # miny = geoTrans[3] + rows*geoTrans[5] + + # compute the pixel-aligned bounding box (larger than the feature's bbox) + left = minx - (geoTrans[1]/2) + top = maxy - (geoTrans[5]/2) + + values[0]['originX'] = left + values[0]['originY'] = top + + print("After pixel_shift origin {x},{y}".format(x=values[0]['originX'],y=values[0]['originY'])) + + values[0]['rows'] = rows + values[0]['cols'] = cols + + # Write broundary to shapefile + print('Writing boundary to shapefile ...') + data_geometry2shape(data, fields, values, spatialRef, geoTrans, outShapeFile) + + +def main(): + """Main entrypoint""" + + parser = argparse.ArgumentParser( + prog=os.path.basename(__file__), + description=__doc__, + ) + parser.add_argument('input', metavar='', + help='name of the GeoTIFF file') + parser.add_argument('-threshold', metavar='', action='store', + default=None, help='threshold value what is considered blackfill') + parser.add_argument('shape', metavar='',help='name of the shapefile') + + parser.add_argument('--fill_holes', default=False, action="store_true", help='Turn on hole filling') + + parser.add_argument('--pixel_shift', default=False, + action="store_true", help='apply pixel shift') + + parser.add_argument('--no_closing', + default=True,action='store_false', + help='Switch to turn off closing operation') + + args = parser.parse_args() + + if not os.path.exists(args.input): + parser.error(f'GeoTIFF file {args.input} does not exist!') + + raster_boundary2shape( + args.input, args.threshold, args.shape, args.no_closing, args.fill_holes, args.pixel_shift + ) + + +if __name__ == '__main__': + main() diff --git a/hyp3lib/rtc2color.py b/hyp3lib/rtc2color.py new file mode 100755 index 00000000..c2bbf1ed --- /dev/null +++ b/hyp3lib/rtc2color.py @@ -0,0 +1,225 @@ +"""RGB decomposition of a dual-pol RTC + +The RGB decomposition enhances RTC dual-pol data for visual interpretation. It +decomposes the co-pol and cross-pol signal into these color channels: + red: simple bounce (polarized) with some volume scattering + green: volume (depolarized) scattering + blue: simple bounce with very low volume scattering + +In the case where the volume to simple scattering ratio is larger than expected +for typical vegetation, such as in glaciated areas or some forest types, a teal +color (green + blue) can be used +""" + +import argparse +import logging +import os +import sys +from pathlib import Path +from typing import Union + +import numpy as np +from osgeo import gdal, osr + + +def cleanup_threshold(amp=False, cleanup=False) -> float: + """Determine the appropriate cleanup threshold value to use in amp or power + + Args: + amp: input TIF is in amplitude and not power + cleanup: Cleanup artifacts using a -48 db power threshold + + Returns: + clean_threshold: the cleaning threshold to use in amp or power + """ + if amp and cleanup: + clean_threshold = pow(10.0, -24.0 / 10.0) # db to amp + elif cleanup: + clean_threshold = pow(10.0, -48.0 / 10.0) # db to power + else: + clean_threshold = 0.0 + + return clean_threshold + + +def prepare_geotif_data(geotiff_handle: gdal.Dataset, rows: int, cols: int, amp=False, cleanup=False) -> np.ndarray: + """Load in and clean the GeoTIFF for calculating the color thresholds + + Args: + geotiff_handle: gdal Dataset for the GeoTIFF to prepare + rows: number of data rows to read in + cols: number of data columns to read in + amp: input TIF is in amplitude and not power + cleanup: Cleanup artifacts using a -48 db power threshold + + Returns: + data: A numpy array containing the prepared GeoTIFF data + """ + + data = np.nan_to_num(geotiff_handle.GetRasterBand(1).ReadAsArray()[:rows, :cols]) + + threshold = cleanup_threshold(amp, cleanup) + data[data < threshold] = 0.0 + + if amp: # to power + data *= data + + return data + + +def calculate_color_channel(copol_data: np.ndarray, crosspol_data: np.ndarray, threshold: float, + scale_factor: float, color: str): + """Calculate color channel values for the RGB decomposition of copol and crosspol data + + Args: + copol_data: copol data + crosspol_data: crosspol data + threshold: decomposition threshold value in db + scale_factor: scale data by this factor + color: the color channel to calculate + + Returns: + color_channel: color channel data + """ + + power_threshold = pow(10.0, threshold / 10.0) # db to power + below_threshold_mask = crosspol_data < power_threshold + + # I don't know what 'zp' is... + zp = np.arctan(np.sqrt(np.clip(copol_data - crosspol_data, 0, None))) * 2.0 / np.pi + zp[~below_threshold_mask] = 0 + + if color == 'red': + z_constant = 1.0 + color_term = 2.0 * np.sqrt(np.clip(copol_data - 3.0 * crosspol_data, 0, None)) + color_term[below_threshold_mask] = 0.0 + + elif color == 'green': + z_constant = 2.0 + color_term = 3.0 * np.sqrt(crosspol_data) + color_term[below_threshold_mask] = 0.0 + + elif color == 'blue': + z_constant = 5.0 + color_term = np.zeros(copol_data.shape) + + elif color == 'teal': + z_constant = 5.0 + color_term = 2.0 * np.sqrt(np.clip(3.0 * crosspol_data - copol_data, 0, None)) + + else: + raise ValueError(f'Unknown color {color}, pick red, green, blue, or teal') + + # Find all our no data and bad data pixels + # NOTE: we're using crosspol here because it will typically have the most bad + # data and we want the same mask applied to all 3 channels (otherwise, we'll + # accidentally be changing colors from intended) + invalid_crosspol_mask = ~(crosspol_data > 0) + + color_channel = 1.0 + (color_term + z_constant * zp) * scale_factor + color_channel[invalid_crosspol_mask] = 0 + + return color_channel + + +def rtc2color(copol_tif: Union[str, Path], crosspol_tif: Union[str, Path], threshold: float, out_tif: Union[str, Path], + cleanup=False, teal=False, amp=False, real=False): + """RGB decomposition of a dual-pol RTC + + Args: + copol_tif: The co-pol RTC GeoTIF + crosspol_tif: The cross-pol RTC GeoTIF + threshold: Decomposition threshold value in db + out_tif: The output color GeoTIFF file name + cleanup: Cleanup artifacts using a -48 db power threshold + teal: Combine green and blue channels because the volume to simple scattering ratio is high + amp: input TIFs are in amplitude and not power + real: Output real (floating point) values instead of RGB scaled (0--255) ints + """ + + # Suppress GDAL warnings but raise python exceptions + # https://gis.stackexchange.com/a/91393 + gdal.UseExceptions() + gdal.PushErrorHandler('CPLQuietErrorHandler') + + copol_handle = gdal.Open(copol_tif) + crosspol_handle = gdal.Open(crosspol_tif) + + rows = min(copol_handle.RasterYSize, crosspol_handle.RasterYSize) + cols = min(copol_handle.RasterXSize, crosspol_handle.RasterXSize) + + geotransform = copol_handle.GetGeoTransform() + projection_reference = copol_handle.GetProjectionRef() + + copol_data = prepare_geotif_data(copol_handle, rows, cols, amp=amp, cleanup=cleanup) + crosspol_data = prepare_geotif_data(crosspol_handle, rows, cols, amp=amp, cleanup=cleanup) + + copol_handle = None # How to close because gdal is weird + crosspol_handle = None # How to close because gdal is weird + + driver = gdal.GetDriverByName('GTiff') + out_type = gdal.GDT_Float32 if real else gdal.GDT_Byte + out_raster = driver.Create(out_tif, cols, rows, 3, out_type, ['COMPRESS=LZW']) + out_raster.SetGeoTransform((geotransform[0], geotransform[1], 0, geotransform[3], 0, geotransform[5])) + out_raster_srs = osr.SpatialReference() + out_raster_srs.ImportFromWkt(projection_reference) + out_raster.SetProjection(out_raster_srs.ExportToWkt()) + + logging.info('Calculating color decomposition components') + + # used scale the results to fit inside RGB 1-255 (ints), with 0 for no/bad data + scale_factor = 1.0 if real else 254.0 + no_data_value = 0 + + bands = { + 1: 'red', + 2: 'green', + 3: 'teal' if teal else 'blue', + } + + for band_number, color in bands.items(): + logging.info(f'Calculate {color} channel and save in GeoTIFF') + band_data = calculate_color_channel( + copol_data, crosspol_data, threshold=threshold, scale_factor=scale_factor, color=color + ) + out_band = out_raster.GetRasterBand(band_number) + out_band.WriteArray(band_data) + out_band.SetNoDataValue(no_data_value) + del band_data + + out_raster = None # How to close because gdal is weird + + +def main(): + """Main entrypoint""" + + parser = argparse.ArgumentParser( + prog=os.path.basename(__file__), + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument('copol', help='the co-pol RTC GeoTIF') + parser.add_argument('crosspol', help='the cross-pol GeoTIF') + parser.add_argument('threshold', type=float, help='decomposition threshold value in dB') + parser.add_argument('geotiff', help='the output color GeoTIFF file name') + parser.add_argument('-c', '-cleanup', '--cleanup', action='store_true', + help='cleanup artifacts using a -48 db power threshold') + parser.add_argument('-t', '-teal', '--teal', action='store_true', + help='combine green and blue channels because the volume to simple scattering ratio is high') + parser.add_argument('-a', '-amp', '--amp', action='store_true', help='input is amplitude, not powerscale') + parser.add_argument('-r', '-real', '--real', action='store_true', + help='output real (floating point) values instead of RGB scaled (0--255) ints') + args = parser.parse_args() + + out = logging.StreamHandler(stream=sys.stdout) + out.addFilter(lambda record: record.levelno <= logging.INFO) + err = logging.StreamHandler() + err.setLevel(logging.WARNING) + logging.basicConfig(format='%(message)s', level=logging.INFO, handlers=(out, err)) + + rtc2color(args.copol, args.crosspol, args.threshold, args.geotiff, + args.cleanup, args.teal, args.amp, args.real) + + +if __name__ == '__main__': + main() diff --git a/hyp3lib/system.py b/hyp3lib/system.py new file mode 100644 index 00000000..502ffe64 --- /dev/null +++ b/hyp3lib/system.py @@ -0,0 +1,58 @@ +"""Utilities for probing the processing system""" + +import datetime +import logging +import os +import subprocess + + +def gamma_version(): + """Probe the system to find the version of GAMMA installed, if possible""" + gamma_ver = os.getenv('GAMMA_VERSION') + if gamma_ver is None: + try: + gamma_home = os.environ['GAMMA_HOME'] + except KeyError: + logging.error('No GAMMA_VERSION or GAMMA_HOME environment variables defined! GAMMA is not installed.') + raise + + try: + with open(f"{gamma_home}/ASF_Gamma_version.txt") as f: + gamma_ver = f.readlines()[-1].strip() + except IOError: + logging.warning( + f"No GAMMA_VERSION environment variable or ASF_Gamma_version.txt " + f"file found in GAMMA_HOME:\n {os.getenv('GAMMA_HOME')}\n" + f"Attempting to parse GAMMA version from its install directory" + ) + gamma_ver = os.path.basename(gamma_home).split('-')[-1] + try: + datetime.datetime.strptime(gamma_ver, '%Y%m%d') + except ValueError: + logging.warning(f'GAMMA version {gamma_ver} does not conform to the expected YYYYMMDD format') + + return gamma_ver + + +def isce_version(): + """Probe the system to find the version of ISCE installed, if possible""" + # NOTE: ISCE does not consistently provide version numbers. For example, the + # self reported version of ISCE with the conda install of ISCE 2.4.1 + # is 2.3 (import isce; isce.__version__). + try: + import isce + except ImportError: + logging.error('ISCE is not installed.') + raise + + # prefer the conda reported version number; requires shell for active conda env + version = subprocess.check_output('conda list | grep isce | awk \'{print $2}\'', shell=True, text=True) + if version: + return version.strip() + + try: + version = isce.__version__ + return version + except AttributeError: + logging.warning('ISCE does not have a version attribute.') + return None diff --git a/tests/test_make_cogs.py b/tests/test_make_cogs.py new file mode 100644 index 00000000..abfdb3d9 --- /dev/null +++ b/tests/test_make_cogs.py @@ -0,0 +1,33 @@ +import os +import shutil + +from osgeo_utils.samples.validate_cloud_optimized_geotiff import validate + +from hyp3lib.make_cogs import cogify_dir, cogify_file + + +def _is_cog(filename): + warnings, errors, details = validate(filename, full_check=True) + return errors == [] + + +def test_make_cog(geotiff): + assert not _is_cog(geotiff) + cogify_file(geotiff) + assert _is_cog(geotiff) + + +def test_cogify_dir(geotiff): + base_dir = os.path.dirname(geotiff) + copy_names = [os.path.join(base_dir, '1.tif'), os.path.join(base_dir, '2.tif')] + + for name in copy_names: + shutil.copy(geotiff, name) + + # Only cogify our copied files + cogify_dir(base_dir, file_pattern='?.tif') + + for name in copy_names: + assert _is_cog(name) + + assert not _is_cog(geotiff) diff --git a/tests/test_system.py b/tests/test_system.py new file mode 100644 index 00000000..f8d09926 --- /dev/null +++ b/tests/test_system.py @@ -0,0 +1,96 @@ +import logging +import os + +import pytest + +from hyp3lib import system + + +def test_gamma_version_var(): + gamma_version = '20200131' + os.environ['GAMMA_VERSION'] = gamma_version + + result = system.gamma_version() + os.environ.pop('GAMMA_VERSION') + + assert gamma_version == result + + +def test_bad_gamma_version_var(caplog): + with caplog.at_level(logging.WARNING): + gamma_version = '20202233' + os.environ['GAMMA_VERSION'] = gamma_version + + _ = system.gamma_version() + os.environ.pop('GAMMA_VERSION') + + assert 'does not conform to the expected YYYYMMDD format' in caplog.text + + +def test_no_gamma_home_var(): + with pytest.raises(KeyError): + os.environ.pop('GAMMA_VERSION', None) + os.environ.pop('GAMMA_HOME', None) + + _ = system.gamma_version() + + +def test_asf_gamma_version(tmp_path): + os.environ.pop('GAMMA_VERSION', None) + os.environ['GAMMA_HOME'] = str(tmp_path.resolve()) + + gamma_version = '20170707' + asf = tmp_path / 'ASF_Gamma_version.txt' + asf.write_text(gamma_version) + + result = system.gamma_version() + os.environ.pop('GAMMA_HOME') + + assert gamma_version == result + + +def test_bad_asf_gamma_version(caplog, tmp_path): + with caplog.at_level(logging.WARNING): + os.environ.pop('GAMMA_VERSION', None) + os.environ['GAMMA_HOME'] = str(tmp_path.resolve()) + + gamma_version = '20170732' + asf = tmp_path / 'ASF_Gamma_version.txt' + asf.write_text(gamma_version) + + _ = system.gamma_version() + os.environ.pop('GAMMA_HOME') + + assert 'does not conform to the expected YYYYMMDD format' in caplog.text + + +def test_gamma_direcory_parse(caplog, tmp_path): + with caplog.at_level(logging.WARNING): + os.environ.pop('GAMMA_VERSION', None) + + gamma_version = '20170707' + gamma_home = tmp_path / f'GAMMA_SOFTWARE-{gamma_version}' + gamma_home.mkdir() + os.environ['GAMMA_HOME'] = str(gamma_home.resolve()) + + result = system.gamma_version() + os.environ.pop('GAMMA_HOME') + + assert 'No GAMMA_VERSION environment variable or ASF_Gamma_version.txt ' in caplog.text + assert gamma_version == result + + +def test_bad_gamma_direcory_parse(caplog, tmp_path): + with caplog.at_level(logging.WARNING): + os.environ.pop('GAMMA_VERSION', None) + + gamma_version = '20170732' + gamma_home = tmp_path / f'GAMMA_SOFTWARE-{gamma_version}' + gamma_home.mkdir() + os.environ['GAMMA_HOME'] = str(gamma_home.resolve()) + + _ = system.gamma_version() + os.environ.pop('GAMMA_HOME') + + assert 'No GAMMA_VERSION environment variable or ASF_Gamma_version.txt ' in caplog.text + assert 'does not conform to the expected YYYYMMDD format' in caplog.text From 51774ee9555ee7722d84977f737ec02892961a43 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Thu, 4 Jan 2024 15:59:33 -0600 Subject: [PATCH 06/10] remove functionality only used in scratch repositories --- hyp3lib/cutGeotiffs.py | 135 -------------------------------------- hyp3lib/utm2dem.py | 133 ------------------------------------- setup.py | 2 - tests/test_entrypoints.py | 10 --- 4 files changed, 280 deletions(-) delete mode 100755 hyp3lib/cutGeotiffs.py delete mode 100755 hyp3lib/utm2dem.py diff --git a/hyp3lib/cutGeotiffs.py b/hyp3lib/cutGeotiffs.py deleted file mode 100755 index fac5b19b..00000000 --- a/hyp3lib/cutGeotiffs.py +++ /dev/null @@ -1,135 +0,0 @@ -"""Clip a bunch of geotiffs to the same area""" -from hyp3lib import saa_func_lib as saa -import re -import os -import argparse -from osgeo import gdal - - -def getPixSize(fi): - (x1,y1,t1,p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi)) - return (t1[1]) - - -def getCorners(fi): - (x1,y1,t1,p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi)) - ullon1 = t1[0] - ullat1 = t1[3] - lrlon1 = t1[0] + x1*t1[1] - lrlat1 = t1[3] + y1*t1[5] - return (ullon1,ullat1,lrlon1,lrlat1) - - -def getOverlap(coords,fi): - (x1,y1,t1,p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi)) - - ullon1 = t1[0] - ullat1 = t1[3] - lrlon1 = t1[0] + x1*t1[1] - lrlat1 = t1[3] + y1*t1[5] - - ullon2 = coords[0] - ullat2 = coords[1] - lrlon2 = coords[2] - lrlat2 = coords[3] - - ullat = min(ullat1,ullat2) - ullon = max(ullon1,ullon2) - lrlat = max(lrlat1,lrlat2) - lrlon = min(lrlon1,lrlon2) - - return (ullon,ullat,lrlon,lrlat) - - -def cutFiles(arg): - - if len(arg) == 1: - print("Nothing to do!!! Exiting...") - return(0) - - file1 = arg[0] - - # Open file1, get projection and pixsize - dst1 = gdal.Open(file1) - p1 = dst1.GetProjection() - - # Find the largest pixel size of all scenes - pixSize = getPixSize(arg[0]) - for x in range(len(arg) - 1): - tmp = getPixSize(arg[x + 1]) - pixSize = max(pixSize, tmp) - - # Make sure that UTM projections match - ptr = p1.find("UTM zone ") - if ptr != -1: - (zone1,hemi) = [t(s) for t,s in zip((int,str), re.search("(\d+)(.)",p1[ptr:]).groups())] - for x in range(len(arg)-1): - file2 = arg[x+1] - - # Open up file2, get projection - dst2 = gdal.Open(file2) - p2 = dst2.GetProjection() - - # Cut the UTM zone out of projection2 - ptr = p2.find("UTM zone ") - zone2 = re.search("(\d+)",p2[ptr:]).groups() - zone2 = int(zone2[0]) - - if zone1 != zone2: - print("Projections don't match... Reprojecting %s" % file2) - if hemi == "N": - proj = ('EPSG:326%02d' % int(zone1)) - else: - proj = ('EPSG:327%02d' % int(zone1)) - print(" reprojecting post image") - print(" proj is %s" % proj) - name = file2.replace(".tif","_reproj.tif") - gdal.Warp(name,file2,dstSRS=proj,xRes=pixSize,yRes=pixSize) - arg[x+1] = name - - # Find the overlap between all scenes - coords = getCorners(arg[0]) - for x in range (len(arg)-1): - coords = getOverlap(coords,arg[x+1]) - - # Check to make sure there was some overlap - print("Clipping coordinates: {}".format(coords)) - diff1 = (coords[2] - coords[0]) / pixSize - diff2 = (coords[3] - coords[1]) / pixSize * -1.0 - print("Found overlap size of {}x{}".format(int(diff1), int(diff2))) - if diff1 < 1 or diff2 < 1: - print("ERROR: There was no overlap between scenes") - exit(1) - # Finally, clip all scenes to the overlap region at the largest pixel size - lst = list(coords) - tmp = lst[3] - lst[3] = lst[1] - lst[1] = tmp - coords = tuple(lst) - print("Pixsize : x = {} y = {}".format(pixSize,-1*pixSize)) - for x in range (len(arg)): - file1 = arg[x] - file1_new = file1.replace('.tif','_clip.tif') - print(" clipping file {} to create file {}".format(file1, file1_new)) - # dst_d1 = gdal.Translate(file1_new,file1,projWin=coords,xRes=pixSize,yRes=pixSize,creationOptions = ['COMPRESS=LZW']) - gdal.Warp(file1_new,file1,outputBounds=coords,xRes=pixSize,yRes=-1*pixSize,creationOptions = ['COMPRESS=LZW']) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument( - "infiles", nargs='+', - help="Geotiff files to clip; output will be have _clip appended to the file name" - ) - args = parser.parse_args() - - cutFiles(args.infiles) - - -if __name__ == "__main__": - main() diff --git a/hyp3lib/utm2dem.py b/hyp3lib/utm2dem.py deleted file mode 100755 index 04a7ca18..00000000 --- a/hyp3lib/utm2dem.py +++ /dev/null @@ -1,133 +0,0 @@ -"""Convert a geotiff DEM into GAMMA internal format""" - -import argparse -import os - -import numpy as np -from osgeo import gdal, osr, gdalconst - -import hyp3lib.saa_func_lib as saa -from hyp3lib.execute import execute - - -def utm2dem(inDem,outDem,demPar,dataType="float"): - demParIn = "dem_par.in" - dataType = dataType.lower() - basename = os.path.basename(inDem) - logname = basename + "_utm_dem.log" - log = open(logname,"w") - - print("UTM DEM in GEOTIFF format: {}".format(inDem)) - print("output DEM: {}".format(outDem)) - print("output DEM parameter file: {}".format(demPar)) - print("log file: {}".format(logname)) - - (x,y,trans,proj,data) = saa.read_gdal_file(saa.open_gdal_file(inDem)) - - xsize = x - ysize = y - east = trans[0] - north = trans[3] - pix_east = trans[1] - pix_north = trans[5] - - ds=gdal.Open(inDem) - prj=ds.GetProjection() - s = prj.split("[") - for t in s: - if "false_northing" in t: - u = t.split('"') - v = u[2].split(",") - w = v[1].split("]") - false_north = w[0] - print("found false_north {}".format(false_north)) - - srs=osr.SpatialReference(wkt=prj) - string = srs.GetAttrValue('projcs') - t = string.split(" ") - zone = t[5] - print("Found zone string {} of length {}".format(zone, len(zone))) - - if len(zone) == 3: - zone = zone[0:2] - else: - zone = zone[0] - print("found zone {}".format(zone)) - - src = gdal.Open(inDem, gdalconst.GA_ReadOnly) - string = src.GetMetadata() - pixasarea = string["AREA_OR_POINT"] - if "AREA" in pixasarea: - print("Pixel as Area! Updating corner coordinates to pixel as point") - print("pixel upper northing (m): {} easting (m): {}".format(north, east)) - east = east + pix_east / 2.0 - north = north + pix_north / 2.0 - print("Update pixel upper northing (m): {} easting (m): {}".format(north, east)) - - pix_size = pix_east - print("approximate DEM latitude pixel spacing (m): {}".format(pix_size)) - - # Create the input file for create_dem_par - f = open(demParIn,"w") - f.write("UTM\n") - f.write("WGS84\n") - f.write("1\n") - f.write("{}\n".format(zone)) - f.write("{}\n".format(false_north)) - f.write("{}\n".format(basename)) - if "float" in dataType: - f.write("REAL*4\n") - elif "int16" in dataType: - f.write("INTEGER*2\n") - f.write("0.0\n") - f.write("1.0\n") - f.write("{}\n".format(xsize)) - f.write("{}\n".format(ysize)) - f.write("{} {}\n".format(pix_north,pix_east)) - f.write("{} {}\n".format(north,east)) - f.close() - - # Create a new dem par file - if os.path.isfile(demPar): - os.remove(demPar) - execute("create_dem_par {} < {}".format(demPar,demParIn),logfile=log) - - # Replace 0 with 1; Replace anything <= -32767 with 0; byteswap - data[data==0] = 1 - data[data<=-32767] = 0 - data = data.byteswap() - - # Convert to ENVI (binary) format - tmptif = "temporary_dem_file.tif" - if "float" in dataType: - saa.write_gdal_file_float(tmptif,trans,proj,data.astype(np.float32)) - elif "int16" in dataType: - saa.write_gdal_file(tmptif,trans,proj,data) - gdal.Translate(outDem,tmptif,format="ENVI") - os.remove(tmptif) - os.remove(outDem + ".aux.xml") - filename, file_extension = os.path.splitext(outDem) - os.remove(outDem.replace(file_extension,".hdr")) - - -def main(): - """Main entrypoint""" - - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description=__doc__, - ) - parser.add_argument('utm_dem', help='name of GeoTIFF file (input)') - parser.add_argument('dem', help='DEM data (output)') - parser.add_argument('dempar', help='Gamma DEM parameter file (output)') - parser.add_argument('-t', '--dataType', help='Desired output data type (float or int16)', default='float') - args = parser.parse_args() - - if not os.path.exists(args.utm_dem): - parser.error(f'GeoTIFF file {args.utm_dem} does not exist!') - - utm2dem(args.utm_dem, args.dem, args.dempar, args.dataType) - - -if __name__ == '__main__': - main() diff --git a/setup.py b/setup.py index f3794671..57d06a07 100644 --- a/setup.py +++ b/setup.py @@ -75,7 +75,6 @@ entry_points={'console_scripts': [ 'byteSigmaScale.py = hyp3lib.byteSigmaScale:main', 'createAmp.py = hyp3lib.createAmp:main', - 'cutGeotiffs.py = hyp3lib.cutGeotiffs:main', 'get_asf.py = hyp3lib.get_asf:main', 'get_orb.py = hyp3lib.get_orb:main', 'makeAsfBrowse.py = hyp3lib.makeAsfBrowse:main', @@ -84,7 +83,6 @@ 'resample_geotiff.py = hyp3lib.resample_geotiff:main', 'rtc2color.py = hyp3lib.rtc2color:main', 'SLC_copy_S1_fullSW.py = hyp3lib.SLC_copy_S1_fullSW:main', - 'utm2dem.py = hyp3lib.utm2dem:main', ] }, diff --git a/tests/test_entrypoints.py b/tests/test_entrypoints.py index 1d903df6..6ec4035f 100644 --- a/tests/test_entrypoints.py +++ b/tests/test_entrypoints.py @@ -21,11 +21,6 @@ def test_createAmp(script_runner): assert ret.success -def test_cutGeotiffs(script_runner): - ret = script_runner.run('cutGeotiffs.py', '-h') - assert ret.success - - def test_get_asf(script_runner): ret = script_runner.run('get_asf.py', '-h') assert ret.success @@ -64,8 +59,3 @@ def test_rtc2color(script_runner): def test_SLC_copy_S1_fullSW(script_runner): ret = script_runner.run('SLC_copy_S1_fullSW.py', '-h') assert ret.success - - -def test_utm2dem(script_runner): - ret = script_runner.run('utm2dem.py', '-h') - assert ret.success From defa789bd5344153f5648f3bef18b7a6992debbf Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 5 Jan 2024 07:15:36 -0600 Subject: [PATCH 07/10] remove get_asf --- setup.py | 1 - tests/test_entrypoints.py | 5 ----- 2 files changed, 6 deletions(-) diff --git a/setup.py b/setup.py index 57d06a07..e37d15ae 100644 --- a/setup.py +++ b/setup.py @@ -75,7 +75,6 @@ entry_points={'console_scripts': [ 'byteSigmaScale.py = hyp3lib.byteSigmaScale:main', 'createAmp.py = hyp3lib.createAmp:main', - 'get_asf.py = hyp3lib.get_asf:main', 'get_orb.py = hyp3lib.get_orb:main', 'makeAsfBrowse.py = hyp3lib.makeAsfBrowse:main', 'make_cogs.py = hyp3lib.make_cogs:main', diff --git a/tests/test_entrypoints.py b/tests/test_entrypoints.py index 6ec4035f..c9d1de7e 100644 --- a/tests/test_entrypoints.py +++ b/tests/test_entrypoints.py @@ -21,11 +21,6 @@ def test_createAmp(script_runner): assert ret.success -def test_get_asf(script_runner): - ret = script_runner.run('get_asf.py', '-h') - assert ret.success - - def test_get_orb(script_runner): ret = script_runner.run('get_orb.py', '-h') assert ret.success From a751daf157f4970af898b3117f1725aab3c9fba6 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 5 Jan 2024 07:17:39 -0600 Subject: [PATCH 08/10] remove metadata --- hyp3lib/metadata.py | 31 ------------------------------- 1 file changed, 31 deletions(-) delete mode 100644 hyp3lib/metadata.py diff --git a/hyp3lib/metadata.py b/hyp3lib/metadata.py deleted file mode 100644 index c6b47bf6..00000000 --- a/hyp3lib/metadata.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Utilities for metadata manipulation""" - -import os -import re -from datetime import datetime -from pathlib import Path -from typing import Union - -from hyp3lib import GranuleError - - -def add_esa_citation(granule: str, directory: Union[str, Path]): - """Add an ESA citation file for S1 Granules - - Args: - granule: The name of the granule - directory: The directory to add the citation file to - """ - - if not granule.startswith('S1'): - raise GranuleError(f'ESA citation only valid for S1 granules, not: {granule}') - - current_year = datetime.now().year - try: - timestamp = re.search(r'\d{8}T\d{6}', granule)[0] - aq_year = datetime.strptime(timestamp, '%Y%m%dT%H%M%S').year - except (TypeError, ValueError): - raise GranuleError(f'Unable to determine acquisition year from: {granule}') - - with open(os.path.join(directory, 'ESA_citation.txt'), 'w') as f: - f.write(f'ASF DAAC {current_year}, contains modified Copernicus Sentinel data {aq_year}, processed by ESA.\n') From e3d732fbb7943fabd942c4cce8cf3284db1d3ffd Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 5 Jan 2024 07:19:40 -0600 Subject: [PATCH 09/10] remove metadata test --- tests/data/test_metadata.py | 31 ------------------------------- 1 file changed, 31 deletions(-) delete mode 100644 tests/data/test_metadata.py diff --git a/tests/data/test_metadata.py b/tests/data/test_metadata.py deleted file mode 100644 index e4538a55..00000000 --- a/tests/data/test_metadata.py +++ /dev/null @@ -1,31 +0,0 @@ -import pytest - -from hyp3lib import GranuleError, metadata - - -def test_add_esa_citation(tmp_path): - granule = 'S1B_IW_GRDH_1SDV_20191005T151525_20191005T151554_018342_0228D3_656F' - - metadata.add_esa_citation(granule, tmp_path) - - assert (tmp_path / 'ESA_citation.txt').is_file() - - -def test_add_esa_citation_bad_granule_type(tmp_path): - granule = 'boogers' - - with pytest.raises(GranuleError) as execinfo: - metadata.add_esa_citation(granule, tmp_path) - - assert 'ESA citation only valid for S1 granules' in str(execinfo.value) - - -def test_add_esa_citation_bad_granule_time(tmp_path): - granule = 'S1B_IW_GRDH_1SDV_boogers' - - with pytest.raises(GranuleError) as execinfo: - metadata.add_esa_citation(granule, tmp_path) - - print(execinfo.value) - - assert 'Unable to determine acquisition year' in str(execinfo.value) From 3c20a509477b2f93b179777b9035e9534669e500 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 5 Jan 2024 07:39:05 -0600 Subject: [PATCH 10/10] re-add par_s1_slc_single --- hyp3lib/par_s1_slc_single.py | 95 ++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100755 hyp3lib/par_s1_slc_single.py diff --git a/hyp3lib/par_s1_slc_single.py b/hyp3lib/par_s1_slc_single.py new file mode 100755 index 00000000..0b973fc5 --- /dev/null +++ b/hyp3lib/par_s1_slc_single.py @@ -0,0 +1,95 @@ +import glob +import logging +import os + +from hyp3lib import OrbitDownloadError +from hyp3lib.execute import execute +from hyp3lib.getParameter import getParameter +from hyp3lib.get_orb import downloadSentinelOrbitFile + + +def make_cmd(swath, acquisition_date, out_dir, pol=None): + """Assemble the par_S1_SLC gamma commands + + Args: + swath: Swath to process + acquisition_date: The acquisition date of the SLC imagery + out_dir: Where to output the GAMMA formatted files + pol: pol: polarization (e.g., 'vv') + """ + if pol is None: + m = glob.glob(f'measurement/s1*-iw{swath}*')[0] + n = glob.glob(f'annotation/s1*-iw{swath}*')[0] + o = glob.glob(f'annotation/calibration/calibration-s1*-iw{swath}*')[0] + p = glob.glob(f'annotation/calibration/noise-s1*-iw{swath}*')[0] + else: + m = glob.glob(f'measurement/s1*-iw{swath}*{pol}*')[0] + n = glob.glob(f'annotation/s1*-iw{swath}*{pol}*')[0] + o = glob.glob(f'annotation/calibration/calibration-s1*-iw{swath}*{pol}*')[0] + p = glob.glob(f'annotation/calibration/noise-s1*-iw{swath}*{pol}*')[0] + + cmd = f'par_S1_SLC {m} {n} {o} {p} {out_dir}/{acquisition_date}_00{swath}.slc.par ' \ + f'{out_dir}/{acquisition_date}_00{swath}.slc {out_dir}/{acquisition_date}_00{swath}.tops_par' + + return cmd + + +def par_s1_slc_single(safe_dir, pol='vv', orbit_file=None): + """Pre-process S1 SLC imagery into GAMMA format SLCs + + Args: + safe_dir: Sentinel-1 SAFE directory location + pol: polarization (e.g., 'vv') + orbit_file: Orbit file to use (will download a matching orbit file if None) + """ + wrk = os.getcwd() + pol = pol.lower() + + logging.info(f'Procesing directory {safe_dir}') + image_type = safe_dir[13:16] + logging.info(f'Found image type {image_type}') + + datelong = safe_dir.split('_')[5] + acquisition_date = (safe_dir.split('_')[5].split('T'))[0] + path = os.path.join(wrk, acquisition_date) + if not os.path.exists(path): + os.mkdir(path) + + logging.info(f'SAFE directory is {safe_dir}') + logging.info(f'Long date is {datelong}') + logging.info(f'Acquisition date is {acquisition_date}') + + os.chdir(safe_dir) + + for swath in range(1, 4): + cmd = make_cmd(swath, acquisition_date, path, pol=pol) + execute(cmd, uselogging=True) + + os.chdir(path) + + # Ingest the precision state vectors + try: + if orbit_file is None: + logging.info(f'Trying to get orbit file information from file {safe_dir}') + orbit_file, _ = downloadSentinelOrbitFile(safe_dir) + logging.info('Applying precision orbit information') + execute(f'S1_OPOD_vec {acquisition_date}_001.slc.par {orbit_file}', uselogging=True) + execute(f'S1_OPOD_vec {acquisition_date}_002.slc.par {orbit_file}', uselogging=True) + execute(f'S1_OPOD_vec {acquisition_date}_003.slc.par {orbit_file}', uselogging=True) + except OrbitDownloadError: + logging.warning('Unable to fetch precision state vectors... continuing') + + slc = glob.glob('*_00*.slc') + slc.sort() + par = glob.glob('*_00*.slc.par') + par.sort() + top = glob.glob('*_00*.tops_par') + top.sort() + with open(os.path.join(path, 'SLC_TAB'), 'w') as f: + for i in range(len(slc)): + f.write(f'{slc[i]} {par[i]} {top[i]}\n') + + # Make a raster version of swath 3 + width = getParameter(f'{acquisition_date}_003.slc.par', 'range_samples') + execute(f"rasSLC {acquisition_date}_003.slc {width} 1 0 50 10") + os.chdir(wrk)