if __name__ == "__main__": import platform, sys, os import xarray as xr import numpy as np import pandas as pd from dask.distributed import Client import warnings warnings.filterwarnings('ignore') # plotting modules import hvplot.pandas # noqa import hvplot.xarray # noqa import holoviews as hv from IPython.display import Image from pygmtsar import SBAS import os import rasterio import rioxarray import zipfile import glob ORBIT = 'D' WORKDIR = 'sen1Proc' DATADIR = 'sen1_extracted' DEMFILE = None BASEDAYS = 100 BASEMETERS = 150 client = Client() client sen1_list = glob.glob(DATADIR+'/*.zip') #loop through zip files and unzip, generate pandas df #for zip_file in sen1_list: # try: # filename = os.path.basename(zip_file).split('.')[0]+'.SAFE' # sen_type = os.path.basename(zip_file).split('_')[0] # print(filename) # print(sen_type.lower()) # subswath = 1 # #print() # with zipfile.ZipFile(zip_file, 'r') as file: # #print(file.namelist()) # name = f'{sen_type.lower()}-iw{subswath}-slc-vv' # tiff = os.path.join(filename, 'measurement', name) # xml = os.path.join(filename, 'annotation', name) # #tiff_files = glob.glob('./measurement/*.tiff') # for i in file.infolist(): # if not i.is_dir() and i.filename.startswith(tiff): # file.extract(i, path=DATADIR) # print(i) # for i in file.infolist(): # if not i.is_dir() and i.filename.startswith(xml): # file.extract(i, path=DATADIR) # print(i) # except: # print('Error occured with unzipping process for: {}'.format(zip_file)) #MASTER = '2018-02-15' #sbas = SBAS(DATADIR, DEMFILE, basedir=WORKDIR).set_master(MASTER) sbas = SBAS(DATADIR, DEMFILE, basedir=WORKDIR) sbas.to_dataframe() sbas.download_orbits() sbas.download_dem(backend='GMT', resolution_meters=90) sbas.stack_parallel(n_jobs=6) baseline_pairs = sbas.baseline_pairs(days=BASEDAYS, meters=BASEMETERS) sbas.topo_ra_parallel() topo_ra = sbas.get_topo_ra() pairs = baseline_pairs[['ref_date', 'rep_date']] print(pairs) # SRTM3 DEM resolution 3 sec, 90 m but use 60 m instead to follow NetCDF chunks size 512x512 decimator = sbas.pixel_decimator(resolution_meters=60, debug=True) # default parameters: wavelength=200, psize=32, func=None (no postprocessing) sbas.intf_parallel(pairs, wavelength=400, func=decimator, n_jobs=6) # n jobs limited by bug in GMTSAR sbas.merge_parallel(pairs, n_jobs=1) sbas.to_dataframe() sbas.geocode_parallel(pairs) phasefilts = sbas.open_grids(pairs, 'phasefilt') phasefilts_ll = sbas.open_grids(pairs, 'phasefilt', geocode=True) corrs = sbas.open_grids(pairs, 'corr') CORRLIMIT = 0.075 #unwrap interferrograms print('UNWRAP INTERFERROGRAMS') # fill NODATA gaps for valid pixels only interpolator = lambda corr, unwrap: sbas.nearest_grid(unwrap).where(corr>=0) sbas.unwrap_parallel(pairs, threshold=CORRLIMIT, func=interpolator, n_jobs=6) unwraps = sbas.open_grids(pairs, 'unwrap', n_jobs=6) #sbas.detrend_parallel(pairs, wavelength=12000) #unwraps_detrend = sbas.open_grids(pairs, 'detrend') # calculate displacement (mm) print('DETREND AND CALCULATE DISPLACEMENT') #sbas.sbas_parallel(pairs) #dates = np.unique(pairs.values.flatten() if isinstance(pairs, pd.DataFrame) else pairs.flatten()) # interpolate for valid coherence area only #valid_area = corrs.min('pair') #def interp(da): # return sbas.nearest_grid(da).where(~np.isnan(valid_area)) # use wrapper "interp" instead of "sbas.nearest_grid" for post-processing functions list below #disps_ll = sbas.open_grids(dates, 'disp', geocode=True, func=[sbas.los_displacement_mm, interp]) #dates = np.unique(pairs.values.flatten() if isinstance(pairs, pd.DataFrame) else pairs.flatten()) sbas.detrend_parallel(pairs, wavelength=12000, n_jobs=6) sbas.sbas_parallel(pairs, n_jobs=6) unwraps_detrend = sbas.open_grids(pairs, 'detrend', n_jobs=6) dates = np.unique(pairs.values.flatten() if isinstance(pairs, pd.DataFrame) else pairs.flatten()) valid_area = corrs.min('pair') def interp(da): return sbas.nearest_grid(da).where(~np.isnan(valid_area)) # use wrapper "interp" instead of "sbas.nearest_grid" for post-processing functions list below disps = sbas.open_grids(dates, 'disp', func=[sbas.los_displacement_mm, interp], n_jobs=6) print('CREATE OUTPUT') disps_ll = sbas.open_grids(dates, 'disp', func=[sbas.los_displacement_mm, interp], geocode=True, n_jobs=6) # clean 1st zero-filled displacement map for better visualization disps[0] = np.nan # calculate cumulative displacement like to GMTSAR SBAS disps_cum = disps.cumsum('date') print(type(disps_cum)) # clean 1st zero-filled displacement map for better visualization disps_cum[0] = np.nan # geocode the subsets on the full interferogram grid and crop a valid area only disps_cum_ll = sbas.cropna(sbas.intf_ra2ll(disps_cum)) #displacement_filename = f'los_disp_mm_iw{SUBSWATH}_vv_{ORBIT}.nc' displacement_filename = f'brumadhino_cum_disp_full.nc' #phasefilt.to_netcdf(phasefilt_filename, engine=sbas.engine) #corr.to_netcdf(corr_filename, engine=sbas.engine) print('SAVING OUTPUT...') #los_disp_mm_ll.to_netcdf(displacement_filename, engine=sbas.engine) write_job = disps_cum_ll.to_netcdf(displacement_filename, engine=sbas.engine, compute=False) write_job.compute() disps_netcdf = xr.open_dataset('brumadhino_cum_disp_full.nc') disps_netcdf.rio.write_crs( 4326, inplace=True, ).rio.set_spatial_dims( x_dim="lon", y_dim="lat", inplace=True, ).rio.write_coordinate_system(inplace=True) disps_netcdf["__xarray_dataarray_variable__"].rio.to_raster('brumadhino_cum_disp_full.tif')