Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Init of basic snap (BEAM-DIMAP) ingest functionality #101

Merged
merged 7 commits into from
Jun 19, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 31 additions & 22 deletions mintpy/load_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,36 +52,42 @@
auto_path.gammaAutoPath)

TEMPLATE = """template:
########## 1. Load Data (--load to exit after this step)
########## 1. Load Data
## auto - automatic path pattern for Univ of Miami file structure
## load_data.py -H to check more details and example inputs.
mintpy.load.processor = auto #[isce,roipac,gamma,], auto for isce
## compression to save disk usage for ifgramStack.h5 file:
## no - save 0% disk usage, fast [default]
## lzf - save ~57% disk usage, relative slow
## gzip - save ~62% disk usage, very slow [not recommend]
mintpy.load.processor = auto #[isce,roipac,gamma,snap], auto for isce
mintpy.load.updateMode = auto #[yes / no], auto for yes, skip re-loading if HDF5 files are complete
mintpy.load.compression = auto #[gzip / lzf / no], auto for no [recommended].
##---------for ISCE only:
mintpy.load.compression = auto #[gzip / lzf / no], auto for no.
##---------metadata (for ISCE and SNAP):
## ./master/IW1.xml for ISCE/topsStack
## ./masterShelve/data.dat for ISCE/stripmapStack
## date1_date2_unw_tc.dim for SNAP
mintpy.load.metaFile = auto #[path2metadata_file]
mintpy.load.baselineDir = auto #[path2baseline_dir]
##---------baseline directory (for ISCE):
mintpy.load.baselineDir = auto #[path2baseline_dir], i.e.: ./baselines
##---------interferogram datasets:
mintpy.load.unwFile = auto #[path2unw_file]
mintpy.load.corFile = auto #[path2cor_file]
mintpy.load.connCompFile = auto #[path2conn_file]
mintpy.load.intFile = auto #[path2int_file]
mintpy.load.ionoFile = auto #[path2iono_file]
mintpy.load.connCompFile = auto #[path2conn_file], optional
mintpy.load.intFile = auto #[path2int_file], optional
mintpy.load.ionoFile = auto #[path2iono_file], optional
##---------geometry datasets:
mintpy.load.demFile = auto #[path2hgt_file]
mintpy.load.lookupYFile = auto #[path2lat_file]]
mintpy.load.lookupXFile = auto #[path2lon_file]
mintpy.load.incAngleFile = auto #[path2los_file]
mintpy.load.azAngleFile = auto #[path2los_file]
mintpy.load.shadowMaskFile = auto #[path2shadow_file]
mintpy.load.waterMaskFile = auto #[path2water_mask_file]
mintpy.load.bperpFile = auto #[path2bperp_file]

## 1.1 Subset (optional)
mintpy.load.lookupYFile = auto #[path2lat_file], not required for geocoded data
mintpy.load.lookupXFile = auto #[path2lon_file], not required for geocoded data
mintpy.load.incAngleFile = auto #[path2los_file], optional
mintpy.load.azAngleFile = auto #[path2los_file], optional
mintpy.load.shadowMaskFile = auto #[path2shadow_file], optional
mintpy.load.waterMaskFile = auto #[path2water_mask_file], optional
mintpy.load.bperpFile = auto #[path2bperp_file], optional
##---------subset (optional):
## if both yx and lalo are specified, use lalo option unless a) no lookup file AND b) dataset is in radar coord
mintpy.subset.lalo = auto #[31.5:32.5,130.5:131.0 / no], auto for no
mintpy.subset.yx = auto #[1800:2000,700:800 / no], auto for no
mintpy.subset.tightBox = auto #[yes / no], auto for yes, tight bounding box for files in geo coord
mintpy.subset.yx = auto #[1800:2000,700:800 / no], auto for no
mintpy.subset.lalo = auto #[31.5:32.5,130.5:131.0 / no], auto for no
"""

NOTE = """NOTE:
Expand Down Expand Up @@ -111,7 +117,7 @@ def create_parser():
parser.add_argument('--project', type=str, dest='PROJECT_NAME',
help='project name of dataset for INSARMAPS Web Viewer')
parser.add_argument('--processor', type=str, dest='processor',
choices={'isce', 'roipac', 'gamma', 'doris', 'gmtsar'},
choices={'isce', 'roipac', 'gamma', 'doris', 'gmtsar', 'snap'},
help='InSAR processor/software of the file', default='isce')
parser.add_argument('--enforce', '-f', dest='updateMode', action='store_false',
help='Disable the update mode, or skip checking dataset already loaded.')
Expand Down Expand Up @@ -343,6 +349,9 @@ def read_inps_dict2geometry_dict_object(inpsDict):
elif inpsDict['processor'] in ['roipac', 'gamma']:
datasetName2templateKey.pop('latitude')
datasetName2templateKey.pop('longitude')
elif inpsDict['processor'] in ['snap']:
#check again when there is a SNAP product in radar coordiantes
pass
else:
print('Un-recognized InSAR processor: {}'.format(inpsDict['processor']))

Expand Down Expand Up @@ -452,7 +461,7 @@ def prepare_metadata(inpsDict):
print('-'*50)
print('prepare metadata files for {} products'.format(processor))

if processor in ['gamma', 'roipac']:
if processor in ['gamma', 'roipac', 'snap']:
for key in [i for i in inpsDict.keys() if (i.startswith('mintpy.load.') and i.endswith('File'))]:
if len(glob.glob(str(inpsDict[key]))) > 0:
cmd = '{} {}'.format(script_name, inpsDict[key])
Expand Down
286 changes: 286 additions & 0 deletions mintpy/prep_snap.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,286 @@
#!/usr/bin/env python3
############################################################
# Program is part of MintPy #
# Copyright(c) 2019, Andre Theron, Zhang Yunjun #
# Author: Andre Theron (andretheronsa@gmail.com), Jun 2019 #
############################################################

import os
import argparse
import datetime
import math
from mintpy.utils import readfile, writefile, utils as ut


EXAMPLE = """example:
prep_snap.py ./201090101_20190112/201090101_20190112_filt_tc.dim
prep_snap.py ./*/*filt_tc*.dim
"""

DESCRIPTION = """
For each interferogram, coherence or unwrapped .dim product this script will prepare.rsc
metadata files for for mintpy based on .dim metadata file.

The SNAP .dim file should contain all the required sensor / baseline metadata needed.
The baseline metadata gets written during snap back-geocoding (co-registration).
prep_snap is run seperately for unw/ifg/cor files so neeeds seperate .dim/.data products
with only the relevant band in each product. Use Band Subset > save BEAM-DIMAP file.

The file name should be yyyymmdd_yyyymmdd_type_tc.dim where type can be filt/unw/coh.

The DEM should be prepared by adding an elevation file to a coregestered product -
then extract the elevation band only. Use Band Subset > save BEAM-DIMAP file

Currently only works for geocoded (terrain correction step in SNAP) interferograms.

Metadata extraction from .dim was built around products generated via the following
workflow:
- Read
- Slice assembly (if required)
- Apply orbit file
- Split product (extract relevant polarisation and subswath swaths)
+ Following is done per subswath [IW1, IW2, IW3]
- Back geocoding
- Enhanced spectral diversity (if more than one burst)
- Interferogram generation
- TOPSAR Deburst
- Topophase removal
- Goldstein phase filtering
- Merge subswaths (if more than one swath was done)
- Add elevation
- Coherence
- Subset geometry and all seperate bands (elevation, ifg, coh) (by geometry only possible after working with bursts)
- Snaphu export for ifg
- SNAPHU phase unwrapping
- Snaphu import
- Terrain correct (all products)
"""

def create_parser():
parser = argparse.ArgumentParser(description='Prepare attributes file for SNAP products.\n'+DESCRIPTION,
formatter_class=argparse.RawTextHelpFormatter,
epilog=EXAMPLE)

parser.add_argument('file', nargs='+', help='SNAP dim file(s)')
parser.add_argument('--no-parallel', dest='parallel', action='store_false', default=True,
help='Disable parallel processing. Diabled auto for 1 input file.')
return parser


def cmd_line_parse(iargs=None):
parser = create_parser()
inps = parser.parse_args(args=iargs)
inps.file = ut.get_file_list(inps.file, abspath=True)
# Check input file type
ext_list = ['.dim']
ext = os.path.splitext(inps.file[0])[1]
if ext not in ext_list:
msg = 'unsupported input file extension: {}'.format(ext)
msg += '\nsupported file extensions: {}'.format(ext_list)
raise ValueError(msg)
# Check that input dim is interferogram
return inps

def get_ellpsoid_local_radius(xyz):
"""Calculate satellite height and ellpsoid local radius from orbital state vector
Parameters: xyz : tuple of 3 float, orbital state vector
Reference: isce2.isceobj.Planet
"""

# Code simplified from from ISCE2.isceobj.Planet.xyz_to_llh())
a = 6378137.000 # Semimajor of WGS84 - from ISCE2.isceobj.Planet.AstronomicalHandbook
e2 = 0.0066943799901 # Eccentricity squared of WGS84 from ISCE2.isceobj.Planet.AstronomicalHandbook
a2 = a**2
e4 = e2**2
r_llh = [0]*3
d_llh = [[0]*3 for i in range(len(xyz))]
xy2 = xyz[0]**2+xyz[1]**2
p = (xy2)/a2
q = (1.-e2)*xyz[2]**2/a2
r = (p+q-e4)/6.
s = e4*p*q/(4.*r**3)
t = (1.+s+math.sqrt(s*(2.+s)))**(1./3.)
u = r*(1.+t+1./t)
v = math.sqrt(u**2+e4*q)
w = e2*(u+v-q)/(2.*v)
k = math.sqrt(u+v+w**2)-w
D = k*math.sqrt(xy2)/(k+e2)
r_llh[0] = math.atan2(xyz[2],D)
r_llh[1] = math.atan2(xyz[1],xyz[0])
r_llh[2] = (k+e2-1.)*math.sqrt(D**2+xyz[2]**2)/k
d_llh[0] = math.degrees(r_llh[0])
d_llh[1] = math.degrees(r_llh[1])
d_llh[2] = r_llh[2]
height = r_llh[2]

# Calculate local radius
# code from ISCE2.isceobj.Ellipsoid.localRadius()
b = a * (1.0-e2)**0.5
latg = math.atan(math.tan(math.radians(d_llh[0]))*a**2/b**2)
arg = math.cos(latg)**2/a**2 + math.sin(latg)**2/b**2
earth_radius = 1.0/math.sqrt(arg)

return height, earth_radius


# Parse .dim file
def utm_dim_to_rsc(fname):
''' read and extract attributes from a SNAP .dim file and create mintpy .rsc file
Inputs:
fname: SNAP .dim interferogram/coherence/unwrapped filepath
Outputs:
atr : dict, Attributes dictionary
'''

# Read XML and extract required vals - using basic file reader
# xmltree or minidom might be better but this works
with open(fname) as f:
lines = f.readlines()
# use lists so that elements where there are more than one, only the first mention can be extracted -
# Usually the first mention (list item 0) is the final subsetted/geocoded product metadata
bp, azimuth_looks, range_looks, az_pixel, rg_pixel, dates, x, y, z = [], [], [], [], [], [], [], [], []
for line in lines:
if "Perp Baseline" in line:
bp.append(line.split(">")[1].split("<")[0])
if "Master: " in line:
dates.append(line.split(": ")[1].split('">')[0])
if "radar_frequency" in line:
rf = 299.792458 / float(line.split(">")[1].split("<")[0])
if "NROWS" in line:
lenght = int(line.split(">")[1].split("<")[0])
if "NCOLS" in line:
width = int(line.split(">")[1].split("<")[0])
if "DATA_TYPE" in line:
data_type = line.split(">")[1].split("<")[0]
if "IMAGE_TO_MODEL_TRANSFORM" in line:
transform = str(line.split(">")[1].split("<")[0]).split(",")
if "antenna_pointing" in line:
looking = line.split(">")[1].split("<")[0]
if looking == "right":
antenna_side = "-1"
if "PRODUCT_SCENE_RASTER_START_TIME" in line:
first_time = line.split(">")[1].split("<")[0]
if "PRODUCT_SCENE_RASTER_STOP_TIME" in line:
last_time = line.split(">")[1].split("<")[0]
if "first_near_lat" in line:
first_near_lat = line.split(">")[1].split("<")[0]
if "first_near_long" in line:
first_near_long = line.split(">")[1].split("<")[0]
if "first_far_lat" in line:
first_far_lat = line.split(">")[1].split("<")[0]
if "first_far_long" in line:
first_far_long = line.split(">")[1].split("<")[0]
if "last_near_lat" in line:
last_near_lat = line.split(">")[1].split("<")[0]
if "last_near_long" in line:
last_near_long = line.split(">")[1].split("<")[0]
if "last_far_lat" in line:
last_far_lat = line.split(">")[1].split("<")[0]
if "last_far_long" in line:
last_far_long = line.split(">")[1].split("<")[0]
if "ASCENDING or DESCENDING" in line:
direction = line.split(">")[1].split("<")[0]
if "azimuth_looks" in line:
azimuth_looks.append(line.split(">")[1].split("<")[0])
if "range_looks" in line:
range_looks.append(line.split(">")[1].split("<")[0])
if "pulse_repetition_frequency" in line:
prf = line.split(">")[1].split("<")[0]
if "slant_range_to_first_pixel" in line:
starting_range = line.split(">")[1].split("<")[0]
if "Satellite mission" in line:
platform = line.split(">")[1].split("<")[0]
if "platformHeading" in line:
heading = line.split(">")[1].split("<")[0]
if "Azimuth sample spacing" in line:
az_pixel.append(line.split(">")[1].split("<")[0])
if "Range sample spacing" in line:
rg_pixel.append(line.split(">")[1].split("<")[0])
if "incidenceAngleMidSwath" in line:
inc_angle_mid = line.split(">")[1].split("<")[0]
if '"x"' in line:
x.append(line.split(">")[1].split("<")[0])
if '"y"' in line:
y.append(line.split(">")[1].split("<")[0])
if '"z"' in line:
z.append(line.split(">")[1].split("<")[0])

# Do datetime things needed after the loop
# Calculate the center of the scene as floating point seconds
first_time_dt = datetime.datetime.strptime(first_time, '%d-%b-%Y %H:%M:%S.%f')
last_time_dt = datetime.datetime.strptime(last_time, '%d-%b-%Y %H:%M:%S.%f')
center_time_dt = first_time_dt + ((last_time_dt - first_time_dt) / 2)
center_time_s = float(datetime.timedelta(
hours=center_time_dt.hour,
minutes=center_time_dt.minute,
seconds=center_time_dt.second,
microseconds=center_time_dt.microsecond).total_seconds())
# Extract master slave date in yymmdd format
master_dt = datetime.datetime.strptime(dates[0], "%d%b%Y")
master = master_dt.strftime("%Y%m%d")[2:8]
slave_dt = datetime.datetime.strptime(dates[1], "%d%b%Y")
slave = slave_dt.strftime("%Y%m%d")[2:8]

xyz = (float(x[0]), float(y[0]), float(z[0])) # Using first state vector for simplicity
height, earth_radius = get_ellpsoid_local_radius(xyz)

# Calculate range/azimuth pixel sizes
range_pixel_size = float(rg_pixel[0]) * math.sin(float(inc_angle_mid))
azimuth_pixel_size = float(az_pixel[0])*((earth_radius + height)/earth_radius)

# Add values to dict
atr = {}
atr['PROCESSOR'] = 'snap'
atr['FILE_TYPE'] = os.path.basename(fname).split('_')[-2] # yyyymmdd_yyyymmdd_type_tc.dim
atr["WAVELENGTH"] = rf
atr["P_BASELINE_TOP_HDR"], atr["P_BASELINE_BOTTOM_HDR"] = bp[1], bp[1]
atr["DATE12"] = str(master) + "-" + str(slave)
atr["WIDTH"] = width
atr["LENGTH"], atr["FILE_LENGTH"] = lenght, lenght
atr["DATA_TYPE"] = data_type
atr["X_STEP"], atr["Y_STEP"] = transform[0], transform[3]
atr["X_FIRST"], atr["Y_FIRST"] = transform[4], transform[5]
atr["ANTENNA_SIDE"] = antenna_side
atr["CENTER_LINE_UTC"] = center_time_s
atr["LAT_REF1"], atr["LONG_REF1"] = first_near_lat, first_near_long
atr["LAT_REF2"], atr["LONG_REF2"] = first_far_lat, first_far_long
atr["LAT_REF3"], atr["LONG_REF3"] = last_near_lat, last_near_long
atr["LAT_REF4"], atr["LONG_REF4"] = last_far_lat, last_far_long
atr["ORBIT_DIRECTION"] = direction
atr["ALOOKS"], atr["RLOOKS"] = int(float(azimuth_looks[0])), int(float(range_looks[0]))
atr["PRF"] = prf
atr["STARTING_RANGE"] = starting_range
atr["PLATFORM"] = platform
atr["HEADING"] = heading
atr["EARTH_RADIUS"] = earth_radius
atr["HEIGHT"] = height
atr["RANGE_PIXEL_SIZE"] = range_pixel_size
atr["AZIMUTH_PIXEL_SIZE"] = azimuth_pixel_size

return atr

# Write to rsc file
def write_rsc(fname, atr):
basic_rsc_file = fname+'.rsc'
try:
atr_orig = readfile.read_roipac_rsc(basic_rsc_file)
except:
atr_orig = dict()
if not set(atr.items()).issubset(set(atr_orig.items())):
atr_out = {**atr_orig, **atr}
print('Extracting metadata from {} into {} '.format(os.path.basename(fname),
os.path.basename(basic_rsc_file)))
writefile.write_roipac_rsc(atr_out, out_file=basic_rsc_file)
return basic_rsc_file

##################################################################################################
def main(iargs=None):
inps = cmd_line_parse(iargs)
for fname in inps.file:
print("Running prep_snap for file: {}".format(fname))
atr = utm_dim_to_rsc(fname)
write_rsc(fname, atr)

##################################################################################################
if __name__ == "__main__":
main()