├── Filt.py ├── README.md ├── __pycache__ ├── invertRates.cpython-39.pyc ├── makeMap.cpython-39.pyc ├── setup_PyPS.cpython-39.pyc └── util.cpython-39.pyc ├── ariaTS.py ├── cleanup.py ├── coupledGreens.py ├── downLook.py ├── errorLog.txt ├── fitSine.py ├── gac_correction.py ├── invertRates.py ├── makeGamma0_SLC.py ├── makeIfg.py ├── makeMap.py ├── makeVRT.py ├── noiseModel.py ├── noiseModel_run.py ├── ps.py ├── rate_uncertainty.py ├── refDef.py ├── runAll.py ├── runSnaphu.py ├── setup_PyPS.py ├── setup_PyPS_ALOS.py ├── setup_UAVSAR.py ├── smartLookSLC.py ├── structure_function.py ├── util.py └── weeding.py /Filt.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Mon Jul 6 15:08:09 2020 5 | Filter coherent areas into nan values for better unwrapping 6 | @author: kdm95 7 | """ 8 | 9 | import numpy as np 10 | import astropy 11 | import isceobj 12 | from matplotlib import pyplot as plt 13 | import cv2 14 | import os 15 | from astropy.io import fits 16 | from astropy.utils.data import get_pkg_data_filename 17 | from astropy.convolution import Gaussian2DKernel 18 | from scipy.signal import convolve as scipy_convolve 19 | from astropy.convolution import convolve 20 | 21 | 22 | gamThresh = .45 23 | connCompCompleteness = 0.9 24 | mincor = .5 25 | 26 | 27 | ps = np.load('./ps.npy',allow_pickle=True).all() 28 | 29 | gam = np.load('Npy/gam.npy') 30 | msk = np.load('Npy/msk.npy') 31 | corAvgMap = np.load('Npy/cor.npy') 32 | connSum = np.load('Npy/connSum.npy') 33 | 34 | 35 | # Make masks based on 4 criteria 36 | gamMsk = np.ones(gam.shape) 37 | gamMsk[gamgamthresh) & (cor>0.35))] = 1 71 | # convert low gamma areas to nans 72 | ifg_real[msk2==0] = np.nan 73 | ifg_real[msk2==0] = np.nan 74 | 75 | # Do the filtering 76 | 77 | 78 | # astropy's convolution replaces the NaN pixels with a kernel-weighted 79 | # interpolation from their neighbors 80 | astropy_conv_r = convolve(ifg_real, kernel) 81 | astropy_conv_i = convolve(ifg_imag, kernel) 82 | 83 | #Now add back in the good data 84 | astropy_conv_r[msk2==1] = ifg_real[msk2==1] 85 | astropy_conv_i[msk2==1] = ifg_imag[msk2==1] 86 | 87 | ifg_filt = astropy_conv_i*1j + astropy_conv_r 88 | ifg_filt[np.isnan(ifg_filt)] = 0 89 | 90 | out1 = ifgimg.copy('read') 91 | out1.filename = ps.intdir +'/' + pair + '/filt.int' 92 | out1.dump(out1.filename + '.xml') # Write out xml 93 | ifg_filt.tofile(out1.filename) # Write file out 94 | out1.renderHdr() 95 | out1.renderVRT() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | No longer maintained. Recommended to use Dolphin and MintPy 2 | 3 | PyPS (Python Persistent Scatterer) scripts for processing InSAR time series. 4 | Author: Kyle Murray 5 | 6 | When using these scripts please cite: 7 | Murray, K. D., & Lohman, R. B. (2018). Short-lived pause in Central California subsidence after heavy winter precipitation of 2017. Science Advances, 4(8), eaar8144. 8 | -------------------------------------------------------------------------------- /__pycache__/invertRates.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kylemurray2/PyPS/9ffb34b6422fdd7bdd17aec0af0490387f1972e2/__pycache__/invertRates.cpython-39.pyc -------------------------------------------------------------------------------- /__pycache__/makeMap.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kylemurray2/PyPS/9ffb34b6422fdd7bdd17aec0af0490387f1972e2/__pycache__/makeMap.cpython-39.pyc -------------------------------------------------------------------------------- /__pycache__/setup_PyPS.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kylemurray2/PyPS/9ffb34b6422fdd7bdd17aec0af0490387f1972e2/__pycache__/setup_PyPS.cpython-39.pyc -------------------------------------------------------------------------------- /__pycache__/util.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kylemurray2/PyPS/9ffb34b6422fdd7bdd17aec0af0490387f1972e2/__pycache__/util.cpython-39.pyc -------------------------------------------------------------------------------- /ariaTS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Fri Sep 2 13:03:45 2022 5 | 6 | @author: km 7 | """ 8 | import numpy as np 9 | import glob 10 | import os 11 | from datetime import date 12 | import isce.components.isceobj as isceobj 13 | import matplotlib.pyplot as plt 14 | import makeMap 15 | import cartopy.crs as ccrs 16 | from mroipac.looks.Looks import Looks 17 | from scipy.interpolate import griddata 18 | import cv2 19 | from scipy import signal 20 | import localParams 21 | import util 22 | import netCDF4 as nc 23 | import PyPS2.util 24 | from PyPS2.util import show 25 | 26 | fn = '/d/S1-GUNW-A-R-064-tops-20220606_20220513-015117-37215N_35340N-PP-8858-v2_0_4.nc' 27 | ds = nc.Dataset(fn) 28 | print(ds.__dict__) 29 | for var in ds.variables.values(): 30 | print(var) 31 | 32 | unw = ds['science']['grids']['data']['unwrappedPhase'] 33 | show(unw) 34 | -------------------------------------------------------------------------------- /cleanup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Thu Aug 11 12:14:22 2022 5 | 6 | To add more dates later, we need to keep the last six dates (at least for the 7 | safe files) as well as the reference date (ps.reference_date) 8 | 9 | 10 | To add more data to the stack you need the following: 11 | SLCS 12 | orbits 13 | DEM 14 | coreg_secondarys 15 | geom_reference 16 | secondarys 17 | reference 18 | 19 | Delete: 20 | run_files 21 | 22 | 23 | @author: km 24 | """ 25 | 26 | import os 27 | import time 28 | import glob 29 | import numpy as np 30 | 31 | ps = np.load('./ps.npy',allow_pickle=True).all() 32 | 33 | # It will keep the last six dates in secondarys and coreg_secondarys 34 | delList = ['SLCS', 35 | 'coarse_interferograms', 36 | 'coreg_secondarys', 37 | 'secondarys', 38 | 'ESD', 39 | 'misreg', 40 | 'orbits'] 41 | 42 | if False: 43 | coregSLCS = 'merged/SLC/2*/*.full' 44 | os.system('rm -r ' + coregSLCS) 45 | 46 | print('WARNING: About to delete: ') 47 | print(delList) 48 | kk = 5 49 | for ii in range(kk): 50 | print('\r' + str(kk), end=' ') 51 | kk-=1 52 | time.sleep(1) 53 | 54 | 55 | dates = [] 56 | datesFN = glob.glob('./coreg_secondarys/*') 57 | for d in datesFN: 58 | dates.append(d.split('/')[2]) 59 | dates.sort() 60 | 61 | print('keeping dates: ') 62 | print(dates[-6:]) 63 | 64 | 65 | if 'coreg_secondarys' in delList: 66 | print('removing coreg_secondarys') 67 | for d in dates[:-6]: 68 | if d != ps.reference_date: #Don't delete the reference date 69 | # print('removing ./coreg_secondarys/' + d) 70 | os.system('rm -r ./coreg_secondarys/' + d + '/*') 71 | 72 | if 'secondarys' in delList: 73 | print('removing secondarys') 74 | for d in dates[:-6]: 75 | if d != ps.reference_date: #Don't delete the reference date 76 | # print('removing ./coreg_secondarys/' + d) 77 | os.system('rm -r ./secondarys/' + d + '/*') 78 | 79 | if 'SLCS' in delList: 80 | print('removing safe files') 81 | safeList = glob.glob(ps.slc_dirname + '*zip') # make a list of all the safe files 82 | for ii,d in enumerate(dates[:-6]): # Loop through the dates 83 | for safe in safeList: # Delete all safe files with that date in the name 84 | if d in safe: 85 | if d != ps.reference_date: #Don't delete the reference date 86 | # print('removing ' + safe) 87 | os.remove(safe) 88 | 89 | if 'orbits' in delList: 90 | print('removing orbits') 91 | orbList = glob.glob('./orbits/*EOF') # make a list of all the safe files 92 | for ii,d in enumerate(dates[:-6]): # Loop through the dates 93 | for orb in orbList: # Delete all safe files with that date in the name 94 | if d in orb: 95 | if d != ps.reference_date: #Don't delete the reference date 96 | # print('removing ' + orb) 97 | os.remove(orb) 98 | 99 | # Remove the full res ifgs 100 | os.system('rm ./merged/interferograms/*/fine*') 101 | 102 | #os.system('rm ./merged/SLCS/*.full ./merged/SLCS/*.xml ./merged/SLCS/*.hdr ./merged/SLCS/*crop') 103 | os.system('rm merged/SLC/2*/fine_diff*') # delete uneeded files from makeGamma0_slc.py 104 | 105 | -------------------------------------------------------------------------------- /coupledGreens.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Mon Nov 11 09:39:00 2019 5 | Make greens functions G matrix for coupled inversions 6 | @author: kdm95 7 | """ 8 | import numpy as np 9 | 10 | # Evaluate the three Greens functions q, p, w: 11 | def get_radius(X,P): 12 | #GET_RADIUS Returns dx, dy,and radial distances between points in X and P 13 | # X: rows contain the 2-dimensional coordinates in X 14 | # P: rows contain the 2-dimensional coordinates in P 15 | 16 | if P.shape[0] == 2: 17 | n = X.shape[0] # n is number of points in X 18 | m = 1 # m is number of points in P 19 | dx = np.zeros((m,n)) 20 | dy = np.zeros((m,n)) 21 | 22 | for k in np.arange(0,m): 23 | dx[k,:] = np.array([X[:,0] - np.ones((n,)) * P[0]]) 24 | dy[k,:] = np.array([X[:,1] - np.ones((n,)) * P[1]]) 25 | else: 26 | n = X.shape[0] # n is number of points in X 27 | m = P.shape[0] # m is number of points in P 28 | dx = np.zeros((m,n)) 29 | dy = np.zeros((m,n)) 30 | for k in np.arange(0,m-1): 31 | dx[k,:] = np.array([X[:,0] - np.ones((n,)) * P[k,0]]) 32 | dy[k,:] = np.array([X[:,1] - np.ones((n,)) * P[k,1]]) 33 | r = np.sqrt((dx**2 + dy**2)) 34 | return r.astype(np.float32),dx.astype(np.float32),dy.astype(np.float32) 35 | 36 | #r,dx,dy = get_radius(np.array([xi,yi]).T,np.array([xi,yi]).T) 37 | 38 | def get_qpw(X, P, dr, nu): 39 | # Compute the Green's functions for all (X;P) combinations 40 | 41 | r,dx,dy = get_radius(X, P); 42 | logr = np.log(r+dr); # Add fudge dr term 43 | r2 = np.square((r+dr),dtype=np.float32) 44 | q = np.multiply((3 - nu), logr,dtype=np.float32) + np.divide((1+nu)*(dy**2),r2,dtype=np.float32) 45 | p = np.multiply((3 - nu), logr,dtype=np.float32) + np.divide((1+nu)*(dx**2),r2,dtype=np.float32) 46 | w = np.multiply((-(1+nu)*dx), (np.divide(dy,r2)),dtype=np.float32) 47 | return q,p,w 48 | 49 | 50 | def get_subpqw(X1,Y1,dim,nu,dr): 51 | # Now we're going to do the inversion in a moving window 52 | ii,jj=0,0 53 | X = X1[ii:ii+dim,jj:jj+dim] 54 | Y = Y1[ii:ii+dim,jj:jj+dim] 55 | xi,yi=X.ravel(),Y.ravel() 56 | n1 = len(xi) 57 | n2 = int(2*n1) 58 | # Evaluate the three Greens functions q, p, w: 59 | Xin = np.stack((xi, yi),axis=1) 60 | Pin = np.stack((xi, yi),axis=1) 61 | q, p, w = get_qpw(Xin, Pin, dr, nu) 62 | return X,Y,q,p,w,n1,n2 63 | 64 | def make_Glos(q,p,w,LosPath='/data/kdm95/WW/LOS.npy'): 65 | # this one doesn't solve for the Z component 66 | los1,los2,los3,los4 = np.load(LosPath) 67 | # Make design matrix Glos1 68 | col1 = los1[0]*q + los1[1]*w 69 | col2 = los1[0]*w + los1[1]*p 70 | # col3 = np.eye((col1.shape[0]))*los1[2] 71 | Glos1 = np.concatenate((col1,col2),axis=1) 72 | 73 | col1 = los2[0]*q + los2[1]*w 74 | col2 = los2[0]*w + los2[1]*p 75 | # col3 = np.eye((col1.shape[0]))*los2[2] 76 | Glos2 = np.concatenate((col1,col2),axis=1) 77 | 78 | col1 = los3[0]*q + los3[1]*w 79 | col2 = los3[0]*w + los3[1]*p 80 | # col3 = np.eye((col1.shape[0]))*los3[2] 81 | Glos3 = np.concatenate((col1,col2),axis=1) 82 | 83 | col1 = los4[0]*q + los4[1]*w 84 | col2 = los4[0]*w + los4[1]*p 85 | # col3 = np.eye((col1.shape[0]))*los4[2] 86 | Glos4 = np.concatenate((col1,col2),axis=1) 87 | return Glos1,Glos2,Glos3,Glos4 88 | 89 | def make_GlosZ(q,p,w,LosPath='/data/kdm95/WW/LOS.npy'): 90 | 91 | los1,los2,los3,los4 = np.load(LosPath) 92 | los5 = np.load('lUAV.npy') 93 | 94 | col1 = los1[0]*q + los1[1]*w 95 | col2 = los1[0]*w + los1[1]*p 96 | col3 = np.ones((col1.shape[0],1))*los1[2] 97 | 98 | Glos1 = np.concatenate((col1,col2,col3),axis=1) 99 | 100 | col1 = los2[0]*q + los2[1]*w 101 | col2 = los2[0]*w + los2[1]*p 102 | col3 = np.ones((col1.shape[0],1))*los2[2] 103 | Glos2 = np.concatenate((col1,col2,col3),axis=1) 104 | 105 | col1 = los3[0]*q + los3[1]*w 106 | col2 = los3[0]*w + los3[1]*p 107 | col3 = np.ones((col1.shape[0],1))*los3[2] 108 | Glos3 = np.concatenate((col1,col2,col3),axis=1) 109 | 110 | col1 = los4[0]*q + los4[1]*w 111 | col2 = los4[0]*w + los4[1]*p 112 | col3 = np.ones((col1.shape[0],1))*los4[2] 113 | Glos4 = np.concatenate((col1,col2,col3),axis=1) 114 | 115 | col1 = los5[0]*q + los5[1]*w 116 | col2 = los5[0]*w + los5[1]*p 117 | col3 = np.ones((col1.shape[0],1))*los5[2] 118 | GlosUAV = np.concatenate((col1,col2,col3),axis=1) 119 | 120 | return Glos1,Glos2,Glos3,Glos4,GlosUAV 121 | 122 | def make_GlosZ2(q,p,w,LosPath='/data/kdm95/WW/LOS.npy'): 123 | # this is if you're doing the inversion as a single step rather thatn windowed 124 | # this one doesn't solve for the Z component 125 | los1,los2,los3,los4 = np.load(LosPath) 126 | # Make design matrix Glos1 127 | col1 = los1[0]*q + los1[1]*w 128 | col2 = los1[0]*w + los1[1]*p 129 | col3 = np.eye((col1.shape[0]))*los1[2] 130 | Glos1 = np.concatenate((col1,col2,col3),axis=1) 131 | 132 | col1 = los2[0]*q + los2[1]*w 133 | col2 = los2[0]*w + los2[1]*p 134 | col3 = np.eye((col1.shape[0]))*los2[2] 135 | Glos2 = np.concatenate((col1,col2,col3),axis=1) 136 | 137 | col1 = los3[0]*q + los3[1]*w 138 | col2 = los3[0]*w + los3[1]*p 139 | col3 = np.eye((col1.shape[0]))*los3[2] 140 | Glos3 = np.concatenate((col1,col2,col3),axis=1) 141 | 142 | col1 = los4[0]*q + los4[1]*w 143 | col2 = los4[0]*w + los4[1]*p 144 | col3 = np.eye((col1.shape[0]))*los4[2] 145 | Glos4 = np.concatenate((col1,col2,col3),axis=1) 146 | return Glos1,Glos2,Glos3,Glos4 147 | 148 | 149 | def make_greens(Glos1,Glos2,Glos3,Glos4,alphax,alphay): 150 | e = np.eye(Glos1.shape[1]) 151 | dime = len(e) 152 | e[0:int(dime/2),0:int(dime/2)] = e[0:int(dime/2),0:int(dime/2)]*alphax 153 | e[int(dime/2):,int(dime/2):] = e[int(dime/2):,int(dime/2):]*alphay 154 | e[:,-1] = 0 # This is so we don't damp Z 155 | GlosAll = np.concatenate((Glos1,Glos2,Glos3,Glos4),axis=0) 156 | GlosAlle = np.concatenate((Glos1,Glos2,Glos3,Glos4,e),axis=0) 157 | Gg = np.dot( np.linalg.inv( np.dot( GlosAlle.T,GlosAlle )), GlosAll.T ) 158 | return GlosAll,GlosAlle,Gg 159 | 160 | def make_greensWhole(Glos1,Glos2,Glos3,Glos4,alphax,alphay): 161 | e = np.eye(Glos1.shape[1]) 162 | dime = len(e) 163 | e[0:int(dime/3),0:int(dime/3)] = e[0:int(dime/3),0:int(dime/3)]*alphax 164 | e[int(dime/3):int(dime*(2/3)),int(dime/3):int(dime*(2/3))] =e[int(dime/3):int(dime*(2/3)),int(dime/3):int(dime*(2/3))]*alphay 165 | e[int(dime*(2/3)):,int(dime*(2/3)):] =e[int(dime*(2/3)):,int(dime*(2/3)):]*0 166 | GlosAll = np.concatenate((Glos1,Glos2,Glos3,Glos4),axis=0) 167 | GlosAlle = np.concatenate((Glos1,Glos2,Glos3,Glos4,e),axis=0) 168 | Gg = np.dot( np.linalg.inv( np.dot( GlosAlle.T,GlosAlle )), GlosAll.T ) 169 | return GlosAll,GlosAlle,Gg 170 | 171 | 172 | def make_greens2(Glos1,Glos2,alphax,alphay): 173 | e = np.eye(Glos1.shape[1]) 174 | dime = len(e) 175 | e[0:int(dime/2),0:int(dime/2)] = e[0:int(dime/2),0:int(dime/2)]*alphax 176 | e[int(dime/2):,int(dime/2):] = e[int(dime/2):,int(dime/2):]*alphay 177 | e[:,-1] = 0 # This is so we don't damp Z 178 | GlosAll = np.concatenate((Glos1,Glos2),axis=0) 179 | GlosAlle = np.concatenate((Glos1,Glos2,e),axis=0) 180 | Gg = np.dot( np.linalg.inv( np.dot( GlosAlle.T,GlosAlle )), GlosAll.T ) 181 | return GlosAll,GlosAlle,Gg 182 | 183 | def make_greens3(Glos1,Glos2,Glos3,alphax,alphay): 184 | e = np.eye(Glos1.shape[1]) 185 | dime = len(e) 186 | e[0:int(dime/2),0:int(dime/2)] = e[0:int(dime/2),0:int(dime/2)]*alphax 187 | e[int(dime/2):,int(dime/2):] = e[int(dime/2):,int(dime/2):]*alphay 188 | e[:,-1] = 0 # This is so we don't damp Z 189 | GlosAll = np.concatenate((Glos1,Glos2,Glos3),axis=0) 190 | GlosAlle = np.concatenate((Glos1,Glos2,Glos3,e),axis=0) 191 | Gg = np.dot( np.linalg.inv( np.dot( GlosAlle.T,GlosAlle )), GlosAll.T ) 192 | return GlosAll,GlosAlle,Gg 193 | 194 | def make_greens5(Glos1,Glos2,Glos3,Glos4,GlosUAV,alphax,alphay): 195 | e = np.eye(Glos1.shape[1]) 196 | dime = len(e) 197 | e[0:int(dime/2),0:int(dime/2)] = e[0:int(dime/2),0:int(dime/2)]*alphax 198 | e[int(dime/2):,int(dime/2):] = e[int(dime/2):,int(dime/2):]*alphay 199 | e[:,-1] = 0 # This is so we don't damp Z 200 | GlosAll = np.concatenate((Glos1,Glos2,Glos3,Glos4,GlosUAV),axis=0) 201 | GlosAlle = np.concatenate((Glos1,Glos2,Glos3,Glos4,GlosUAV,e),axis=0) 202 | Gg = np.dot( np.linalg.inv( np.dot( GlosAlle.T,GlosAlle )), GlosAll.T ) 203 | return GlosAll,GlosAlle,Gg -------------------------------------------------------------------------------- /downLook.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Fri Nov 18 10:25:53 2022 5 | 6 | @author: km 7 | """ 8 | 9 | import numpy as np 10 | import os 11 | import glob 12 | import datetime 13 | import isceobj 14 | from osgeo import gdal 15 | import argparse 16 | from PyPS2 import util 17 | from matplotlib import pyplot as plt 18 | import unwrap_fringe as uf 19 | import argparse 20 | import scipy.spatial 21 | from datetime import date 22 | import FilterAndCoherence 23 | import looks 24 | 25 | 26 | filterFlag = True 27 | unwrap = False # Usually better to leave False and use runSnaphu.py for more options and outputs 28 | filterStrength = '.6' 29 | fixImage = False #Do this in case you renamed any of the directories or moved the SLCs since they were made 30 | 31 | ps = np.load('./ps.npy',allow_pickle=True).all() 32 | 33 | 34 | # For unwrapping 35 | inps = argparse.Namespace() 36 | inps.method = 'snaphu' 37 | inps.xmlFile = None 38 | # For downlooking 39 | inps.rglooks = ps.rlks 40 | inps.azlooks = ps.alks 41 | 42 | gam = np.ones((ps.nyl,ps.nxl)) 43 | np.save('./Npy/gam.npy',gam) 44 | # Make the ifgs 45 | if not os.path.isdir(ps.intdir): 46 | os.mkdir(ps.intdir) 47 | 48 | 49 | for pair in ps.pairs2: 50 | pairDir = ps.intdir + '/' + pair 51 | if not os.path.isdir(pairDir): 52 | os.mkdir(pairDir) 53 | if not os.path.isfile(pairDir + '/fine.int'): 54 | ifgOutName = pairDir + '/fine.int' 55 | ifgLkName = pairDir + '/fine_lk.int' 56 | print('making ' + pair) 57 | d1 = pair.split('_')[0] 58 | d2 = pair.split('_')[1] 59 | 60 | if ps.crop: 61 | fn_slc1 = ps.slcdir +'/'+ d1 + '/' + d1 + '.slc.full.crop.vrt' 62 | fn_slc2 = ps.slcdir +'/'+ d2 + '/' + d2 + '.slc.full.crop.vrt' 63 | else: 64 | fn_slc1 = ps.slcdir +'/'+ d1 + '/' + d1 + '.slc.full.vrt' 65 | fn_slc2 = ps.slcdir +'/'+ d2 + '/' + d2 + '.slc.full.vrt' 66 | 67 | ds1 = gdal.Open(fn_slc1) 68 | ds2 = gdal.Open(fn_slc2) 69 | 70 | slc1 = ds1.GetVirtualMemArray() 71 | slc2 = ds2.GetVirtualMemArray() 72 | 73 | ifg = np.multiply(slc1,np.conj(slc2)) 74 | 75 | out = isceobj.createImage() # Copy the interferogram image from before 76 | out.dataType = 'CFLOAT' 77 | out.filename = ifgOutName 78 | out.width = ifg.shape[1] 79 | out.length = ifg.shape[0] 80 | out.dump(out.filename + '.xml') # Write out xml 81 | fid=open(out.filename,"wb+") 82 | fid.write(ifg) 83 | out.renderHdr() 84 | out.renderVRT() 85 | fid.close() 86 | 87 | # Downlook 88 | inps.infile = ifgOutName 89 | inps.outfile = ifgLkName 90 | looks.main(inps) 91 | 92 | # Filter and coherence 93 | corname = pairDir + '/filt_lk.cor' 94 | offilt = pairDir + '/filt_lk.int' 95 | if not os.path.isfile(corname): 96 | print('\n making ' + pair) 97 | FilterAndCoherence.runFilter(ifgLkName,offilt,float(filterStrength)) 98 | FilterAndCoherence.estCoherence(offilt, corname) 99 | else: 100 | print(pair + ' fine.int already exists') 101 | 102 | -------------------------------------------------------------------------------- /errorLog.txt: -------------------------------------------------------------------------------- 1 | Error log 2 | 3 | ImportError: libstdc++.so.6: version `GLIBCXX_3.4.30' 4 | 5 | -------------------------------------------------------------------------------- /fitSine.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Thu Feb 28 18:22:43 2019 5 | 6 | @author: kdm95 7 | """ 8 | 9 | from pylab import * 10 | import numpy as np 11 | from math import atan2 12 | def fitSine(tList,yList,period): 13 | ''' 14 | period in days 15 | time list in days 16 | returns 17 | phase in degrees 18 | ''' 19 | b = np.asarray(yList) 20 | # b = b.T 21 | 22 | rows = [ [sin(1/period*2*pi*t), cos(1/period*2*pi*t), 1,t] for t in tList] 23 | A = matrix(rows) 24 | (w,residuals,rank,sing_vals) = lstsq(A,b) 25 | phase = np.arctan2(w[1,:],w[0,:])*180/pi 26 | amplitude = np.sqrt((np.square(w[0,:]) + np.square(w[1,:]))) 27 | bias = w[2,:] 28 | slope = w[3,:] 29 | return (phase,amplitude,bias,slope) 30 | 31 | 32 | def fitSine1d(tList,yList,period): 33 | ''' 34 | period in days 35 | time list in days 36 | returns 37 | phase in degrees 38 | ''' 39 | b = matrix(yList).T 40 | 41 | 42 | rows = [ [sin(1/period*2*pi*t), cos(1/period*2*pi*t), 1,t] for t in tList] 43 | A = matrix(rows) 44 | (w,residuals,rank,sing_vals) = lstsq(A,b) 45 | phase = atan2(w[1,0],w[0,0])*180/pi 46 | amplitude = norm([w[0,0],w[1,0]],2) 47 | bias = w[2,0] 48 | slope = w[3,0] 49 | return (phase,amplitude,bias,slope,A,w) 50 | 51 | 52 | if __name__=='__main__': 53 | import random 54 | 55 | tList = arange(0.0,1.0,0.001) 56 | tSamples = arange(0.0,1.0,0.05) 57 | random.seed(0.0) 58 | phase = 65 59 | amplitude = 3 60 | bias = -0.3 61 | frequency = 4 62 | yList = amplitude*sin(tList*frequency*2*pi+phase*pi/180.0)+bias 63 | ySamples = amplitude*sin(tSamples*frequency*2*pi+phase*pi/180.0)+bias 64 | yMeasured = [y+random.normalvariate(0,2) for y in ySamples] 65 | #print yList 66 | (phaseEst,amplitudeEst,biasEst) = fitSine(tSamples,yMeasured,frequency) 67 | print ('Phase estimate = %f, Amplitude estimate = %f, Bias estimate = %f' 68 | % (phaseEst,amplitudeEst,biasEst)) 69 | 70 | yEst = amplitudeEst*sin(tList*frequency*2*pi+phaseEst*pi/180.0)+biasEst 71 | 72 | figure(1) 73 | plot(tList,yList,'b') 74 | plot(tSamples,yMeasured,'+r',markersize=12,markeredgewidth=2) 75 | plot(tList,yEst,'-g') 76 | xlabel('seconds') 77 | legend(['True value','Measured values','Estimated value']) 78 | grid(True) 79 | show() 80 | -------------------------------------------------------------------------------- /gac_correction.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Fri Aug 31 12:37:39 2018 5 | 6 | @author: kdm95 7 | """ 8 | # Global Imports 9 | import numpy as np 10 | import isceobj 11 | from osgeo import gdal 12 | import pickle 13 | import os 14 | import cv2 15 | from matplotlib import pyplot as plt 16 | import scipy.spatial.qhull as qhull 17 | from scipy.interpolate import griddata 18 | import util 19 | import requests 20 | import pandas as pd 21 | # Local Imports 22 | #from APS_tools import ifg 23 | 24 | # Load stuff 25 | params = np.load('params.npy',allow_pickle=True).item() 26 | locals().update(params) 27 | geom = np.load('geom.npy',allow_pickle=True).item() 28 | locals().update(geom) 29 | 30 | dataDir = workdir + '/GACOS/' 31 | 32 | do_wrapped =2 # 1 if you want to apply correction to wrapped data. 0 for unwrapped. 33 | 34 | minlat=np.floor(lat_ifg.min()) 35 | maxlat=np.ceil(lat_ifg.max()) 36 | minlon=np.floor(lon_ifg.min()) 37 | maxlon=np.ceil(lon_ifg.max()) 38 | 39 | path = os.getcwd().split('/')[-2] 40 | frame= os.getcwd().split('/')[-1] 41 | 42 | def getTime(path,frame): 43 | 44 | ''' 45 | Figure out what time the aquisition was 46 | ''' 47 | start='2020-05-01T00:00:00Z' 48 | end='2021-06-01T00:00:00Z' 49 | asfUrl = 'https://api.daac.asf.alaska.edu/services/search/param?platform=SENTINEL-1&processinglevel=SLC&output=CSV' 50 | call = asfUrl + '&relativeOrbit=' + path + '&frame=' + frame + '&start=' + start + '&end=' + end 51 | # Here we'll make a request to ASF API and then save the output info to .CSV file 52 | if not os.path.isfile('out.csv'): 53 | r =requests.get(call,timeout=100) 54 | with open('out.csv','w') as j: 55 | j.write(r.text) 56 | # Open the CSV file and get the URL and File names 57 | hour = pd.read_csv('out.csv')["Start Time"][0][11:13] 58 | minute = pd.read_csv('out.csv')["Start Time"][0][14:16] 59 | return int(hour),int(minute) 60 | 61 | hour,minute = getTime(path,frame) 62 | timee =hour+round(minute/60,2) 63 | 64 | # email = 'murray8@hawaii.edu' 65 | email = 'bXVycmF5OEBoYXdhaWkuZWR1' 66 | #Request gacos from API? 67 | url = 'http://www.gacos.net/result.php?flag=MQ==&email='+email+'&' 68 | boundURL = 'S='+str(minlat)+'&N='+str(maxlat)+'&W='+str(minlon)+'&E='+str(maxlon)+'&' 69 | timeURL = 'time_of_day='+str(timee)+'&type=2&date=' 70 | 71 | dstr = dates[21] 72 | for d in dates[22:]: 73 | dstr+='-' 74 | dstr+=d 75 | 76 | requestURL = url + boundURL + timeURL + dstr 77 | # r =requests.get(requestURL,timeout=100) 78 | 79 | 80 | print(requestURL) 81 | 82 | 83 | 84 | # Read one of the rsc files 85 | fname = dataDir + dates[0] +'.ztd.tif' 86 | data = gdal.Open(fname, gdal.GA_ReadOnly) 87 | nxg = int(data.RasterXSize) 88 | nyg = int(data.RasterYSize ) 89 | geoTransform = data.GetGeoTransform() 90 | lon_gac_min = geoTransform[0] 91 | lat_gac_max = geoTransform[3] 92 | lon_gac_max = lon_gac_min + geoTransform[1] * nxg 93 | lat_gac_min = lat_gac_max + geoTransform[5] * nyg 94 | print([lon_gac_min, lat_gac_max, lon_gac_max, lat_gac_min]) 95 | 96 | 97 | gac_lon_vec = np.linspace(lon_gac_min, lon_gac_max, int(nxg)) 98 | gac_lat_vec = np.linspace(lat_gac_min, lat_gac_max, int(nyg)) 99 | 100 | gac_lat,gac_lon = np.meshgrid(gac_lat_vec, gac_lon_vec, sparse=False, indexing='ij') 101 | gac_lat = np.flipud(gac_lat) 102 | 103 | #Aminlat = 35.9 104 | #Amaxlat = 36.8 105 | #Aminlon = -96.97 106 | #Amaxlon = -96.0 107 | #gid1,gid2 = np.where((gac_lon>Aminlon) & (gac_lonAminlat) & (gac_latAminlon) & (lon_ifgAminlat) & (lat_ifgnj', temp[:, :d, :], delta) 125 | return vertices, np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) 126 | 127 | def interpolate(values, vtx, wts): 128 | return np.einsum('nj,nj->n', np.take(values, vtx), wts) 129 | 130 | # Get weights for interpolation (this avoids redundant operations in loop) 131 | vtx, wts = interp_weights(np.asarray((gac_lon.flatten(),gac_lat.flatten())).T, np.asarray((lon_ifg.flatten(),lat_ifg.flatten())).T) 132 | 133 | gac_stack = list() 134 | # Loop through and grid each gacos image 135 | for ii in np.arange(0,nd): 136 | print('gridding gacos to ' + pairs[ii]) 137 | date1 = dates[ii]; 138 | date2 =dates[ii+1] 139 | gf1 = dataDir + date1 + '.ztd.tif' 140 | gf2 = dataDir + date2 + '.ztd.tif' 141 | gactmp1 = gdal.Open(gf1, gdal.GA_ReadOnly) 142 | gac1 = gactmp1.GetRasterBand(1).ReadAsArray() 143 | gactmp2 = gdal.Open(gf2, gdal.GA_ReadOnly) 144 | gac2 = gactmp2.GetRasterBand(1).ReadAsArray() 145 | gac = gac2-gac1 146 | gac = np.asarray(gac, dtype=np.float32) 147 | 148 | gac_grid2 =griddata((gac_lon.flatten(),gac_lat.flatten()),gac2.flatten(), (lon_ifg,lat_ifg), method='linear') 149 | gac_grid=interpolate(gac, vtx, wts) 150 | gac_grid[gac_grid==0]=np.nan 151 | gac_grid = np.reshape(gac_grid,lon_ifg.shape) 152 | gac_grid=np.asarray(gac_grid,dtype=np.float32) 153 | gac_grid-=np.nanmean(gac_grid) 154 | gac_stack.append(gac_grid) 155 | 156 | np.save(dataDir + 'gac_stack.npy', gac_stack) 157 | gac_coords = [];gac_coords.append(lon_ifg);gac_coords.append(lat_ifg) 158 | np.save(dataDir + 'gac_coords.npy', gac_coords) 159 | 160 | else: 161 | print(dataDir + 'gac_stack.npy already exists. Loading it...') 162 | gac_stack = np.load(dataDir + 'gac_stack.npy') 163 | 164 | 165 | if do_wrapped==1: 166 | # Load ifg and correct for wrapped data 167 | gamma_thresh = .2 168 | rx=2 169 | ry=2 170 | gausx = np.exp( np.divide( -np.square(np.arange(-rx,rx)), np.square(rx))); 171 | gausy = np.exp( np.divide( -np.square(np.arange(-ry,ry)), np.square(ry))); 172 | gaus = gausx[:, np.newaxis] * gausy[np.newaxis, :] 173 | gaus = gaus-gaus.min() 174 | gaus = gaus/np.sum(gaus.flatten()) 175 | for ii,pair in enumerate(pairs): 176 | phs_c = (np.zeros((nyl,nxl))*1j).astype(np.complex64) 177 | f = intdir + pair + '/fine_lk.int' 178 | Image = isceobj.createIntImage() 179 | Image.load(f + '.xml') 180 | phs_ifg = Image.memMap()[ymin:ymax,:,0] 181 | Image.finalizeImage() 182 | gac = ((gac_stack[ii])/np.cos(los_ifg)) #meters 183 | gac_complex = np.exp( ((gac *np.pi *4)/lam)*1j ) # complex number 184 | phs_c[ymin:ymax,:] = phs_ifg * np.conj(gac_complex) 185 | phs_c[np.isnan(phs_c)]=0 186 | phs_c = np.asarray(phs_c,dtype=np.complex64) 187 | 188 | 189 | # DO ps interp________________________________________________________ 190 | rea_lk = np.real(phs_c) 191 | ima_lk = np.imag(phs_c) 192 | # Mask ones where the data is good 193 | mask = np.ones(rea_lk.shape) 194 | mask[np.where(gamma0_lk < gamma_thresh)]=0 195 | 196 | # Smooth everything into zero space 197 | mask_f = cv2.filter2D(mask,-1, gaus) 198 | rea_f = cv2.filter2D(rea_lk,-1, gaus) 199 | ima_f = cv2.filter2D(ima_lk,-1, gaus) 200 | 201 | # Divide by mask. This is how we care for nan values 202 | rea = rea_f/mask_f 203 | ima = ima_f/mask_f 204 | rea += rea_lk 205 | ima += ima_lk 206 | phs_lk = (rea+(1j*ima)).astype(np.complex64) 207 | phs_lk[np.isnan(phs_lk)]=0 208 | #_____________________________________________________________ 209 | 210 | out = Image.clone() # Copy the interferogram image from before 211 | out.filename = intdir + pair + '/fine_lk_gac.int' 212 | out.dump( intdir + pair + '/fine_lk_gac.int.xml') # Write out xml 213 | phs_lk.tofile(out.filename) # Write file out 214 | out.finalizeImage() 215 | 216 | 217 | if do_wrapped==0: 218 | # Load ifg and correct 219 | for ii,pair in enumerate(pairs): 220 | phs_c = np.zeros((nyl,nxl)) 221 | f = intdir + pair + '/fine_lk.unw' 222 | Image = isceobj.createIntImage() 223 | Image.load(f + '.xml') 224 | phs_ifg = Image.memMap()[ymin:ymax,:,0] 225 | phs_ifg = phs_ifg.copy().astype(np.float32)*lam/(4*np.pi)*100 226 | phs_ifg[phs_ifg==0]=np.nan 227 | Image.finalizeImage() 228 | plt.imshow(phs_ifg) 229 | # phs_ifg-=np.nanmedian(phs_ifg) 230 | gac = ((gac_stack[ii]*100)/np.cos(np.deg2rad(los_ifg))) 231 | # gac-=np.nanmedian(gac) 232 | phs_c[ymin:ymax,:] = phs_ifg-gac 233 | phs_c-=np.nanmedian(phs_c) 234 | phs_c[np.isnan(phs_c)]=0 235 | phs_c = np.asarray(phs_c,dtype=np.float32) 236 | out = Image.clone() # Copy the interferogram image from before 237 | out.filename = intdir + pair + '/fine_lk_gac.unw' 238 | out.dump( intdir + pair + '/fine_lk_gac.unw.xml') # Write out xml 239 | phs_c.tofile(out.filename) # Write file out 240 | out.finalizeImage() 241 | 242 | 243 | hgt = hgt_ifg 244 | 245 | # ymin2=84 246 | # ymax2=2575 247 | # xmin=35 248 | # xmax=6320 249 | # crop_mask = np.zeros(hgt.shape) 250 | # crop_mask[ymin2:ymax2,xmin:xmax] =1 251 | 252 | # Load example 253 | # Load phs 254 | idx = 31 255 | pair =pairs[idx] 256 | f = intdir + pair + '/fine_lk.unw' 257 | Image = isceobj.createImage() 258 | Image.load(f + '.xml') 259 | phs_ifg = Image.memMap()[ymin:ymax,:,0] 260 | phs_ifg = phs_ifg.copy().astype(np.float32)*lam/(4*np.pi)*100 261 | Image.finalizeImage() 262 | # phs_ifg*=crop_mask 263 | phs_ifg[phs_ifg==0]=np.nan 264 | phs_ifg[np.where( (hgt<.1) ) ]=np.nan # masks the water 265 | phs_ifg-=np.nanmedian(phs_ifg) 266 | 267 | # Load phs corrected 268 | f = intdir + pair + '/fine_lk_gac.unw' 269 | Image = isceobj.createImage() 270 | Image.load(f + '.xml') 271 | phs_c = Image.memMap()[ymin:ymax,:,0] 272 | phs_c = phs_c.copy().astype(np.float32) 273 | # phs_c*=crop_mask 274 | phs_c[phs_c==0]=np.nan 275 | phs_c[np.where( (hgt<.1) ) ]=np.nan # masks the water 276 | 277 | # Load gac model 278 | gac_mod = (gac_stack[idx]*100)/np.cos(np.deg2rad(los_ifg)) 279 | # gac_mod*=crop_mask 280 | gac_mod[gac_mod==0]=np.nan 281 | gac_mod[np.where( (hgt<.1) ) ]=np.nan # masks the water 282 | gac_mod-=np.nanmedian(gac_mod) 283 | 284 | # Plot example gacos model and ifg 285 | vmin,vmax=-5,5 286 | fig = plt.figure(figsize=(8,10)) 287 | ax = fig.add_subplot(4,1,1);plt.imshow(phs_ifg,vmin=vmin,vmax=vmax) 288 | ax.set_title('ifg') 289 | ax = fig.add_subplot(4,1,2);plt.imshow(gac_mod,vmin=vmin,vmax=vmax) 290 | ax.set_title('model') 291 | ax = fig.add_subplot(4,1,3);plt.imshow(phs_c,vmin=vmin,vmax=vmax) 292 | ax.set_title('corrected ifg') 293 | 294 | pad=0 295 | 296 | 297 | mapImg(phs_ifg, lon_ifg, lat_ifg, vmin, vmax, pad,10, 'Original IFG (cm)') 298 | mapImg(gac_mod, lon_ifg, lat_ifg, vmin, vmax, pad,10, 'Original IFG (cm)') 299 | mapImg(phs_c, lon_ifg, lat_ifg, vmin, vmax, pad,10, 'Original IFG (cm)') 300 | 301 | # plt.savefig(workdir + 'Figs/GACOS_correction.png',transparent=True,dpi=300 ) 302 | 303 | ## Load example 304 | ## Load phs 305 | #idx = 31 306 | #pair =pairs[idx] 307 | #f = intdir + pair + '/fine_lk.unw' 308 | #Image = isceobj.createImage() 309 | #Image.load(f + '.xml') 310 | #phs_ifg = Image.memMap()[:,:,0] 311 | #phs_ifg = phs_ifg.copy().astype(np.float32)*lam/(4*np.pi)*100 312 | #Image.finalizeImage() 313 | #phs_ifg*=crop_mask 314 | #phs_ifg[phs_ifg==0]=np.nan 315 | #phs_ifg[np.where( (hgt<.1) ) ]=np.nan # masks the water 316 | #phs_ifg-=np.nanmean(phs_ifg) 317 | # 318 | ## Load phs corrected 319 | #f = intdir + pair + '/fine_lk_gac.int' 320 | #Image = isceobj.createImage() 321 | #Image.load(f + '.xml') 322 | #phs_c = Image.memMap()[:,:,0] 323 | #phs_c = phs_c.copy().astype(np.float32) 324 | #phs_c*=crop_mask 325 | #phs_c[phs_c==0]=np.nan 326 | #phs_c[np.where( (hgt<.1) ) ]=np.nan # masks the water 327 | 328 | _,_,_,_,phs_ifg_S_bins, phs_ifg_S_bins_std, phs_ifg_dist_bins,_ = structure_function.struct_fun(phs_ifg,250,400,0) 329 | phs_ci = 1.96*(np.divide(phs_ifg_S_bins_std,np.sqrt( 250))) 330 | 331 | _,_,_,_,gac_ifg_S_bins, gac_ifg_S_bins_std, gac_ifg_dist_bins,_ = structure_function.struct_fun(phs_c,250,400,0) 332 | gac_ci = 1.96*(np.divide(gac_ifg_S_bins_std,np.sqrt( 250))) 333 | 334 | plt.figure() 335 | plt.plot(phs_ifg_dist_bins[:26],phs_ifg_S_bins[:26]) 336 | plt.fill_between(phs_ifg_dist_bins[:26],phs_ifg_S_bins[:26]-phs_ci[:26], phs_ifg_S_bins[:26]+phs_ci[:26], 337 | alpha=0.3, linewidth=1, linestyle='dashed', antialiased=True) 338 | plt.plot(gac_ifg_dist_bins[:26],gac_ifg_S_bins[:26]) 339 | plt.fill_between(gac_ifg_dist_bins[:26],gac_ifg_S_bins[:26]-gac_ci[:26], gac_ifg_S_bins[:26]+gac_ci[:26], 340 | alpha=0.3, linewidth=1, linestyle='dashed', antialiased=True) 341 | plt.title('Structure Function ' + pair) 342 | plt.ylabel('RMS of phase difference between pixels (cm)') 343 | plt.xlabel('Distance between pixels(km)') 344 | plt.legend(['Original IFG','Corrected IFG'],loc='upper left') 345 | # plt.savefig(workdir + 'Figs/GACOS_structfun.svg',transparent=True,dpi=100 ) 346 | 347 | -------------------------------------------------------------------------------- /invertRates.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Fri Aug 3 16:45:20 2018 5 | 6 | @author: kdm95 7 | """ 8 | 9 | import numpy as np 10 | import isceobj 11 | from matplotlib import pyplot as plt 12 | from skimage.measure import profile_line as pl 13 | import fitSine 14 | 15 | #******************************** 16 | # Set these paramaters 17 | #******************************** 18 | #seasonal = False 19 | #mcov_flag = True 20 | #water_elevation = -103 21 | #******************************** 22 | 23 | def invertRates(data,dn,seasonals=False,mcov_flag=False,water_elevation=-103,uncertainties=False): 24 | ''' data is a stack of inverted displacements shape=[ny,nx] 25 | you can invert for a sinusoidal fit at each pixel with seasonals = True 26 | mcov_flag is the model covariance and works only with seasonals=False for now. 27 | Water elevation is usually not zero (relative to wgs84 ellipsoid).''' 28 | 29 | ps = np.load('./ps.npy',allow_pickle=True).all() 30 | 31 | data = data.astype(np.float32).reshape((len(dn),-1)) 32 | dn0 = dn -dn[0] 33 | period = 365.25 34 | rate_uncertainty = [] 35 | 36 | if seasonals: 37 | if mcov_flag: 38 | mcov_flag=False 39 | print('model covariance only works with seasonals=False for now') 40 | 41 | # Invert for seasonal plus long term rates 42 | phases,amplitudes,biases,slopes = fitSine.fitSine(dn0,data,period) 43 | rates = np.reshape(slopes,(ps.nyl,ps.nxl)).astype(np.float32)*365 44 | amps = np.reshape(amplitudes,(ps.nyl,ps.nxl)).astype(np.float32) 45 | return rates, amps 46 | 47 | elif uncertainties: 48 | G = np.vstack([dn0, np.ones((len(dn0),1)).flatten()]).T 49 | mod = [] 50 | rates = [] 51 | resstd = [] 52 | for ii in range(data.shape[1]): 53 | W = np.diag(1/uncertainties[:,ii]) 54 | Gw = np.dot(W,G) 55 | dw = np.dot(W,data[:,ii]) 56 | mod.append(np.dot( np.linalg.inv(np.dot(Gw.T,Gw)), np.dot(Gw.T,dw))) 57 | rates.append( mod[ii][0] *365 ) # cm/yr 58 | #offs = np.reshape(mod[1,:],(ps.nyl, ps.nxl)) 59 | synth = np.dot(G,mod[ii]); 60 | res = (data[:,ii]-synth) 61 | rs = np.std(res,axis=0) 62 | resstd.append(rs) 63 | 64 | rates = np.reshape(np.asarray(rates),(ps.nyl,ps.nxl)) 65 | resstd = np.reshape(np.asarray(resstd),(ps.nyl,ps.nxl)) 66 | 67 | return rates,resstd 68 | 69 | else: 70 | G = np.vstack([dn0, np.ones((len(dn0),1)).flatten()]).T 71 | Gg = np.dot( np.linalg.inv(np.dot(G.T,G)), G.T) 72 | mod = np.dot(Gg, data) 73 | rates = np.reshape(mod[0,:],(ps.nyl,ps.nxl))*365 # cm/yr 74 | #offs = np.reshape(mod[1,:],(ps.nyl, ps.nxl)) 75 | synth = np.dot(G,mod); 76 | res = (data-synth)#*lam/(4*np.pi)*100 # cm 77 | 78 | 79 | resstd = np.std(res,axis=0) 80 | resstd = np.reshape(resstd,(ps.nyl, ps.nxl)) 81 | 82 | if mcov_flag: 83 | for ii in np.arange(0,len(data[0,:])): 84 | co=np.cov(data[:,ii]); 85 | mcov=np.diag(np.dot(Gg,np.dot(co,Gg.T))); 86 | rate_uncertainty.append(1.96*mcov[0]**.5) 87 | rate_uncertainty = np.asarray(rate_uncertainty,dtype=np.float32) 88 | rate_uncertainty = np.reshape(rate_uncertainty,(ps.nyl,ps.nxl)) 89 | rate_uncertainty= rate_uncertainty*365 #cm/yr 90 | 91 | return rates,resstd#,worst,worstVal 92 | 93 | 94 | 95 | ## Save rates 96 | #fname = tsdir + '/rates_flat.unw' 97 | #out = isceobj.createIntImage() # Copy the interferogram image from before 98 | #out.dataType = 'FLOAT' 99 | #out.filename = fname 100 | #out.width = ps.nxl 101 | #out.length = ps.nyl 102 | #out.dump(fname + '.xml') # Write out xml 103 | #rates.tofile(out.filename) # Write file out 104 | #out.renderHdr() 105 | #out.renderVRT() 106 | # 107 | ## GEOCODE 108 | ##cmd = 'geocodeIsce.py -f ' + tsdir + 'rates_flat.unw -d ' + workdir + 'DEM/demLat_N33_N35_Lon_W119_W116.dem -m ' + workdir + 'master/ -s ' + workdir + 'pawns/20150514 -r ' + str(rlks) + ' -a ' + str(alks) + ' -b "'" 33 35 -118 -116"'" ' 109 | ##os.system(cmd) 110 | 111 | -------------------------------------------------------------------------------- /makeGamma0_SLC.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Jul 31 15:39:22 2018 5 | calcGamma and smart downlooking 6 | 7 | Saves fine_diff.int files in respective ifg directories used to make the gamma0 8 | file (they can be deleted after it is made) 9 | 10 | Saves a file called gamma0 in the tsdir (TS directory) 11 | Saves downlooked ifgs in the respective ifg directories. 12 | 13 | @author: kdm95 14 | """ 15 | 16 | import numpy as np 17 | import isce.components.isceobj as isceobj 18 | from matplotlib import pyplot as plt 19 | import cv2 20 | import os 21 | import time 22 | import util 23 | 24 | #from mroipac.filter.Filter import Filter 25 | 26 | doPS = True # Setting this to False will make gamma0 all ones. 27 | 28 | if doPS: 29 | 30 | overwrite =False 31 | plot = True 32 | 33 | ps = np.load('./ps.npy',allow_pickle=True).all() 34 | 35 | # Make the gaussian filter we'll convolve with ifg 36 | rx = 8 37 | rx2 = np.floor(rx*3) 38 | gausx = np.exp( np.divide( -np.square(np.arange(-rx2,rx2)), np.square(rx))); 39 | gausy = np.exp( np.divide( -np.square(np.arange(-rx2,rx2)), np.square(rx))); 40 | gaus = gausx[:, np.newaxis] * gausy[np.newaxis, :] 41 | gaus -= gaus.min() 42 | gaus /= np.sum(gaus.flatten()) 43 | del(rx,rx2,gausx,gausy) 44 | # get slc example image (extract from an xml file) 45 | f = ps.slcdir +'/'+ ps.dates[0] + '/' + ps.dates[0] + '.slc.full' 46 | slcImage = isceobj.createSlcImage() 47 | slcImage.load(f + '.xml') 48 | intimg = isceobj.createIntImage() 49 | intimg.width = slcImage.width 50 | intimg.length = slcImage.length 51 | 52 | 53 | # Find SLCs with zero size or that don't exist. And find what size they should be. 54 | fSizes = [] 55 | for ii,d in enumerate(ps.dates[:-1]): 56 | if os.path.isfile(ps.slcdir + '/' + d + '/' + d + '.slc.full'): 57 | if os.path.getsize(ps.slcdir + '/' + d + '/' + d + '.slc.full')==0: 58 | print('WARNING: ' + ps.slcdir + '/' + d + '/.slc.full. File size too small. May be corrupt.' ) 59 | else: 60 | fSizes.append(os.path.getsize(ps.slcdir + '/' + d + '/' + d + '.slc.full')) 61 | else: 62 | print(d + '/.slc.full does not exist') 63 | medSize = np.nanmedian(fSizes) 64 | 65 | # Find SLCs with zero size or that don't exist. And find what size they should be. 66 | for ii,d in enumerate(ps.dates[:-1]): 67 | if os.path.isfile(ps.slcdir + '/' + d + '/' + d + '.slc.full'): 68 | if os.path.getsize(ps.slcdir + '/' + d + '/' + d + '.slc.full')!=medSize: 69 | # os.system('rm -r ' + ps.slcdir + '/' + d ) 70 | print('WARNING: ' + ps.slcdir + '/' + d + '/.slc.full. File size not right. May be corrupt.' ) 71 | else: 72 | print(d + '/.slc.full does not exist') 73 | 74 | # Fine fine_diff files that are too small and delete them. 75 | for ii,d in enumerate(ps.dates[:-1]): 76 | if os.path.isfile(ps.slcdir + '/' + d + '/fine_diff.int'): 77 | if os.path.getsize(ps.slcdir + '/' + d + '/fine_diff.int')0: 169 | # reader = shpreader.Reader("/d/MapData/gem-global-active-faults/shapefile/gem_active_faults.shp") 170 | reader = shpreader.Reader("/d/MapData/EARS/kivu.shp") 171 | 172 | else: 173 | # reader2 = shpreader.Reader("/d/faults/CFM/traces/shp/CFM5.3_traces.shp") 174 | reader = shpreader.Reader("/d/faults/Shapefile/QFaults.shp") 175 | 176 | shape_feature = ShapelyFeature(reader.geometries(), ccrs.PlateCarree(), edgecolor='r', facecolor='none',linewidth=.5,zorder=5,alpha=0.8) 177 | ax.add_feature(shape_feature) 178 | # shape_feature2 = ShapelyFeature(reader2.geometries(), ccrs.PlateCarree(), edgecolor='b', facecolor='none',linewidth=.5,zorder=5,alpha=0.8) 179 | # ax.add_feature(shape_feature2) 180 | 181 | # scale_bar(ax, location, length, metres_per_unit=1000, unit_name='km', 182 | # tol=0.01, angle=0, color='black', linewidth=5, text_offset=0.01, 183 | # ha='center', va='bottom', plot_kwargs=None, text_kwargs=None) 184 | if scalebar: 185 | scale_bar(ax, (.1,.1), scalebar,linewidth=0.5) 186 | 187 | 188 | plt.title(title) 189 | plt.show() 190 | return data_crs 191 | 192 | 193 | 194 | 195 | 196 | 197 | def _axes_to_lonlat(ax, coords): 198 | """(lon, lat) from axes coordinates.""" 199 | display = ax.transAxes.transform(coords) 200 | data = ax.transData.inverted().transform(display) 201 | lonlat = ccrs.PlateCarree().transform_point(*data, ax.projection) 202 | 203 | return lonlat 204 | 205 | 206 | def _upper_bound(start, direction, distance, dist_func): 207 | """A point farther than distance from start, in the given direction. 208 | 209 | It doesn't matter which coordinate system start is given in, as long 210 | as dist_func takes points in that coordinate system. 211 | 212 | Args: 213 | start: Starting point for the line. 214 | direction Nonzero (2, 1)-shaped array, a direction vector. 215 | distance: Positive distance to go past. 216 | dist_func: A two-argument function which returns distance. 217 | 218 | Returns: 219 | Coordinates of a point (a (2, 1)-shaped NumPy array). 220 | """ 221 | if distance <= 0: 222 | raise ValueError(f"Minimum distance is not positive: {distance}") 223 | 224 | if np.linalg.norm(direction) == 0: 225 | raise ValueError("Direction vector must not be zero.") 226 | 227 | # Exponential search until the distance between start and end is 228 | # greater than the given limit. 229 | length = 0.1 230 | end = start + length * direction 231 | 232 | while dist_func(start, end) < distance: 233 | length *= 2 234 | end = start + length * direction 235 | 236 | return end 237 | 238 | 239 | def _distance_along_line(start, end, distance, dist_func, tol): 240 | """Point at a distance from start on the segment from start to end. 241 | 242 | It doesn't matter which coordinate system start is given in, as long 243 | as dist_func takes points in that coordinate system. 244 | 245 | Args: 246 | start: Starting point for the line. 247 | end: Outer bound on point's location. 248 | distance: Positive distance to travel. 249 | dist_func: Two-argument function which returns distance. 250 | tol: Relative error in distance to allow. 251 | 252 | Returns: 253 | Coordinates of a point (a (2, 1)-shaped NumPy array). 254 | """ 255 | initial_distance = dist_func(start, end) 256 | if initial_distance < distance: 257 | raise ValueError(f"End is closer to start ({initial_distance}) than " 258 | f"given distance ({distance}).") 259 | 260 | if tol <= 0: 261 | raise ValueError(f"Tolerance is not positive: {tol}") 262 | 263 | # Binary search for a point at the given distance. 264 | left = start 265 | right = end 266 | 267 | while not np.isclose(dist_func(start, right), distance, rtol=tol): 268 | midpoint = (left + right) / 2 269 | 270 | # If midpoint is too close, search in second half. 271 | if dist_func(start, midpoint) < distance: 272 | left = midpoint 273 | # Otherwise the midpoint is too far, so search in first half. 274 | else: 275 | right = midpoint 276 | 277 | return right 278 | 279 | 280 | def _point_along_line(ax, start, distance, angle=0, tol=0.01): 281 | """Point at a given distance from start at a given angle. 282 | 283 | Args: 284 | ax: CartoPy axes. 285 | start: Starting point for the line in axes coordinates. 286 | distance: Positive physical distance to travel. 287 | angle: Anti-clockwise angle for the bar, in radians. Default: 0 288 | tol: Relative error in distance to allow. Default: 0.01 289 | 290 | Returns: 291 | Coordinates of a point (a (2, 1)-shaped NumPy array). 292 | """ 293 | # Direction vector of the line in axes coordinates. 294 | direction = np.array([np.cos(angle), np.sin(angle)]) 295 | 296 | geodesic = cgeo.Geodesic() 297 | 298 | # Physical distance between points. 299 | def dist_func(a_axes, b_axes): 300 | a_phys = _axes_to_lonlat(ax, a_axes) 301 | b_phys = _axes_to_lonlat(ax, b_axes) 302 | 303 | # Geodesic().inverse returns a NumPy MemoryView like [[distance, 304 | # start azimuth, end azimuth]]. 305 | return geodesic.inverse(a_phys, b_phys).base[0, 0] 306 | 307 | end = _upper_bound(start, direction, distance, dist_func) 308 | 309 | return _distance_along_line(start, end, distance, dist_func, tol) 310 | 311 | 312 | def scale_bar(ax, location, length, metres_per_unit=1000, unit_name='km', 313 | tol=0.01, angle=0, color='black', linewidth=5, text_offset=0.01, 314 | ha='center', va='bottom', plot_kwargs=None, text_kwargs=None, 315 | **kwargs): 316 | """Add a scale bar to CartoPy axes. 317 | 318 | For angles between 0 and 90 the text and line may be plotted at 319 | slightly different angles for unknown reasons. To work around this, 320 | override the 'rotation' keyword argument with text_kwargs. 321 | 322 | Args: 323 | ax: CartoPy axes. 324 | location: Position of left-side of bar in axes coordinates. 325 | length: Geodesic length of the scale bar. 326 | metres_per_unit: Number of metres in the given unit. Default: 1000 327 | unit_name: Name of the given unit. Default: 'km' 328 | tol: Allowed relative error in length of bar. Default: 0.01 329 | angle: Anti-clockwise rotation of the bar. 330 | color: Color of the bar and text. Default: 'black' 331 | linewidth: Same argument as for plot. 332 | text_offset: Perpendicular offset for text in axes coordinates. 333 | Default: 0.005 334 | ha: Horizontal alignment. Default: 'center' 335 | va: Vertical alignment. Default: 'bottom' 336 | **plot_kwargs: Keyword arguments for plot, overridden by **kwargs. 337 | **text_kwargs: Keyword arguments for text, overridden by **kwargs. 338 | **kwargs: Keyword arguments for both plot and text. 339 | """ 340 | # Setup kwargs, update plot_kwargs and text_kwargs. 341 | if plot_kwargs is None: 342 | plot_kwargs = {} 343 | if text_kwargs is None: 344 | text_kwargs = {} 345 | 346 | plot_kwargs = {'linewidth': linewidth, 'color': color, **plot_kwargs, 347 | **kwargs} 348 | text_kwargs = {'ha': ha, 'va': va, 'rotation': angle, 'color': color, 349 | **text_kwargs, **kwargs} 350 | 351 | # Convert all units and types. 352 | location = np.asarray(location) # For vector addition. 353 | length_metres = length * metres_per_unit 354 | angle_rad = angle * np.pi / 180 355 | 356 | # End-point of bar. 357 | end = _point_along_line(ax, location, length_metres, angle=angle_rad,tol=tol) 358 | 359 | from matplotlib import patheffects 360 | buffer = [patheffects.withStroke(linewidth=1, foreground="w")] 361 | # Coordinates are currently in axes coordinates, so use transAxes to 362 | # put into data coordinates. *zip(a, b) produces a list of x-coords, 363 | # then a list of y-coords. 364 | ax.plot(*zip(location, end), transform=ax.transAxes, linewidth=linewidth, color='black', alpha=.6) 365 | 366 | # Push text away from bar in the perpendicular direction. 367 | midpoint = (location + end) / 2 368 | offset = text_offset * np.array([-np.sin(angle_rad), np.cos(angle_rad)]) 369 | text_location = midpoint + offset 370 | 371 | # 'rotation' keyword argument is in text_kwargs. 372 | ax.text(*text_location, f"{length} {unit_name}", rotation_mode='anchor', 373 | transform=ax.transAxes, **text_kwargs, path_effects=buffer) 374 | 375 | -------------------------------------------------------------------------------- /makeVRT.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | ''' 4 | Modified from Fringe: tops2vrt.py 5 | Takes a bounding box and makes vrt files for the slcs and geom files 6 | so that we only do the analysis on the area of interest. 7 | 8 | 9 | ''' 10 | import numpy as np 11 | import os 12 | import glob 13 | import datetime 14 | from osgeo import gdal 15 | import argparse 16 | 17 | 18 | # ds = gdal.Open('/d/HI/S1/Asc/Oahu/Fringe/Sequential/compressedSlc/20171220/20171220.slc.vrt') 19 | # slc = ds.GetVirtualMemArray() 20 | 21 | # ps = np.load('./ps.npy',allow_pickle=True).all() 22 | 23 | if not os.path.isdir(ps.workdir + '/vrts'): 24 | os.mkdir(ps.workdir + '/vrts') 25 | os.mkdir(ps.workdir + '/vrts/slcs') 26 | os.mkdir(ps.workdir + '/vrts/geom') 27 | 28 | 29 | 30 | def radarGeometryTransformer(latfile, lonfile, epsg=4326): 31 | ''' 32 | Create a coordinate transformer to convert map coordinates to radar image line/pixels. 33 | ''' 34 | 35 | driver = gdal.GetDriverByName('VRT') 36 | inds = gdal.OpenShared(latfile, gdal.GA_ReadOnly) 37 | tempds = driver.Create('', inds.RasterXSize, inds.RasterYSize, 0) 38 | inds = None 39 | 40 | tempds.SetMetadata({'SRS' : 'EPSG:{0}'.format(epsg), 41 | 'X_DATASET': lonfile, 42 | 'X_BAND' : '1', 43 | 'Y_DATASET': latfile, 44 | 'Y_BAND' : '1', 45 | 'PIXEL_OFFSET' : '0', 46 | 'LINE_OFFSET' : '0', 47 | 'PIXEL_STEP' : '1', 48 | 'LINE_STEP' : '1'}, 'GEOLOCATION') 49 | 50 | trans = gdal.Transformer( tempds, None, ['METHOD=GEOLOC_ARRAY']) 51 | 52 | return trans 53 | 54 | def lonlat2pixeline(lonFile, latFile, lon, lat): 55 | 56 | trans = radarGeometryTransformer(latFile, lonFile) 57 | 58 | ###Checkour our location of interest 59 | success, location = trans.TransformPoint(1, lon, lat, 0.) 60 | if not success: 61 | print('Location outside the geolocation array range') 62 | 63 | return location 64 | 65 | 66 | def getLinePixelBbox(geobbox, latFile, lonFile): 67 | 68 | south,north, west, east = geobbox 69 | 70 | se = lonlat2pixeline(lonFile, latFile, east, south) 71 | nw = lonlat2pixeline(lonFile, latFile, west, north) 72 | 73 | ymin = np.int(np.round(np.min([se[1], nw[1]]))) 74 | ymax = np.int(np.round(np.max([se[1], nw[1]]))) 75 | 76 | xmin = np.int(np.round(np.min([se[0], nw[0]]))) 77 | xmax = np.int(np.round(np.max([se[0], nw[0]]))) 78 | 79 | print("x min-max: ", xmin, xmax) 80 | print("y min-max: ", ymin, ymax) 81 | 82 | return ymin, ymax, xmin, xmax 83 | 84 | 85 | def runMain(inps): 86 | ##Parse command line 87 | inps = argparse.Namespace() 88 | inps.indir = '../merged/' 89 | inps.stackdir= 'coreg_stack' 90 | inps.geomdir = 'geometry' 91 | inps.slcs = 'slcs' 92 | 93 | 94 | ###Get ann list and slc list 95 | slclist = glob.glob(os.path.join(inps.indir,'SLC','*','*.slc.full')) 96 | num_slc = len(slclist) 97 | 98 | print('number of SLCs discovered: ', num_slc) 99 | print('we assume that the SLCs and the vrt files are sorted in the same order') 100 | 101 | slclist.sort() 102 | 103 | 104 | ###Read the first ann file to get some basic things like dimensions 105 | ###Just walk through each of them and create a separate VRT first 106 | if not os.path.exists(inps.outdir): 107 | print('creating directory: {0}'.format(inps.outdir)) 108 | os.makedirs(inps.outdir) 109 | else: 110 | print('directory "{0}" already exists.'.format(inps.outdir)) 111 | 112 | data = [] 113 | dates = [] 114 | 115 | width = None 116 | height = None 117 | 118 | print('write vrt file for each SLC ...') 119 | for ind, slc in enumerate(slclist): 120 | 121 | ###Parse the vrt file information. 122 | metadata = {} 123 | width = None 124 | height = None 125 | path = None 126 | 127 | ds = gdal.Open(slc , gdal.GA_ReadOnly) 128 | width = ds.RasterXSize 129 | height = ds.RasterYSize 130 | ds = None 131 | 132 | metadata['WAVELENGTH'] = 0.05546576 133 | metadata['ACQUISITION_TIME'] = os.path.basename(os.path.dirname(slc)) 134 | 135 | path = os.path.abspath(slc) 136 | 137 | tag = metadata['ACQUISITION_TIME'] 138 | 139 | vrttmpl=''' 140 | 141 | {PATH} 142 | 0 143 | 8 144 | {linewidth} 145 | LSB 146 | 147 | ''' 148 | 149 | 150 | # outname = datetime.datetime.strptime(tag.upper(), '%d-%b-%Y %H:%M:%S UTC').strftime('%Y%m%d') 151 | 152 | outname = metadata['ACQUISITION_TIME'] 153 | out_file = os.path.join(inps.outdir, '{0}.vrt'.format(outname)) 154 | print('{} / {}: {}'.format(ind+1, num_slc, out_file)) 155 | with open(out_file, 'w') as fid: 156 | fid.write( vrttmpl.format(width=width, 157 | height=height, 158 | PATH=path, 159 | linewidth=8*width)) 160 | 161 | data.append(metadata) 162 | dates.append(outname) 163 | 164 | 165 | ####Set up single stack file 166 | if os.path.exists( inps.stackdir): 167 | print('stack directory: {0} already exists'.format(inps.stackdir)) 168 | else: 169 | print('creating stack directory: {0}'.format(inps.stackdir)) 170 | os.makedirs(inps.stackdir) 171 | 172 | latFile = os.path.join(inps.indir, "geom_reference", "lat.rdr.full.vrt") 173 | lonFile = os.path.join(inps.indir, "geom_reference", "lon.rdr.full.vrt") 174 | 175 | # setting up a subset of the stack 176 | if inps.geobbox: 177 | # if the bounding box in geo-coordinate is given, this has priority 178 | print("finding bbox based on geo coordinates of {} ...".format(inps.geobbox)) 179 | ymin, ymax, xmin, xmax = getLinePixelBbox(inps.geobbox, latFile, lonFile) 180 | 181 | elif inps.bbox: 182 | # if bbox in geo not given then look for line-pixel bbox 183 | print("using the input bbox based on line and pixel subset") 184 | ymin, ymax, xmin, xmax = inps.bbox 185 | 186 | else: 187 | # if no bbox provided, the take the full size 188 | ymin, ymax, xmin, xmax = [0 , height, 0 , width] 189 | 190 | xsize = xmax - xmin 191 | ysize = ymax - ymin 192 | 193 | slcs_base_file = os.path.join(inps.stackdir, 'slcs_base.vrt') 194 | print('write vrt file for stack directory') 195 | with open(slcs_base_file, 'w') as fid: 196 | fid.write( '\n'.format(xsize=xsize, ysize=ysize)) 197 | 198 | for ind, (date, meta) in enumerate( zip(dates, data)): 199 | outstr = ''' 200 | 201 | {path} 202 | 1 203 | 204 | 205 | 206 | 207 | 208 | {date} 209 | {wvl} 210 | {acq} 211 | 212 | \n'''.format(width=width, height=height, 213 | xmin=xmin, ymin=ymin, 214 | xsize=xsize, ysize=ysize, 215 | date=date, acq=meta['ACQUISITION_TIME'], 216 | wvl = meta['WAVELENGTH'], index=ind+1, 217 | path = os.path.abspath( os.path.join(inps.outdir, date+'.vrt'))) 218 | fid.write(outstr) 219 | 220 | fid.write('') 221 | 222 | ####Set up latitude, longitude and height files 223 | 224 | if os.path.exists( inps.geomdir): 225 | print('directory {0} already exists.'.format(inps.geomdir)) 226 | else: 227 | print('creating geometry directory: {0}'.format(inps.geomdir)) 228 | os.makedirs( inps.geomdir) 229 | 230 | 231 | vrttmpl=''' 232 | 233 | 234 | {PATH} 235 | 1 236 | 237 | 238 | 239 | 240 | 241 | ''' 242 | 243 | print('write vrt file for geometry dataset') 244 | layers = ['lat','lon','hgt','los','shadowMask','incLocal'] 245 | for ind, val in enumerate(layers): 246 | with open( os.path.join(inps.geomdir, val+'.vrt'), 'w') as fid: 247 | fid.write( vrttmpl.format( xsize = xsize, ysize = ysize, 248 | xmin = xmin, ymin = ymin, 249 | width = width, 250 | height = height, 251 | PATH = os.path.abspath( os.path.join(inps.indir, 'geom_reference', val+'.rdr.full.vrt')), 252 | linewidth = width * 8)) 253 | 254 | -------------------------------------------------------------------------------- /noiseModel.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Input: 5 | data time series 6 | dates 7 | 8 | Output: 9 | linear model: rate, intercept 10 | 11 | 12 | @author: km 13 | """ 14 | 15 | import numpy as np 16 | from scipy import signal,optimize 17 | from scipy.linalg import lstsq 18 | import os 19 | import h5py 20 | from matplotlib import pyplot as plt 21 | from scipy.special import gamma 22 | from scipy.interpolate import interp1d 23 | import scipy 24 | from datetime import date 25 | from sklearn.linear_model import LinearRegression 26 | import math 27 | import argparse 28 | 29 | def interpolate_timeseries(time_vec, data_ts, fs): 30 | """ 31 | Interpolates a time series with unevenly sampled data to an evenly sampled time series. 32 | Adds white noise to interpolated values 33 | 34 | Args: 35 | time (np.ndarray): A 1D array of time values. 36 | data (np.ndarray): A 1D array of data values corresponding to the time values. 37 | freq (float): Sampling interval for time series data. 38 | 39 | Returns: 40 | np.ndarray: A 1D array of interpolated data values. 41 | """ 42 | # Determine the start and end times of the time series 43 | start_time = np.min(time_vec) 44 | end_time = np.max(time_vec) 45 | # Generate the evenly spaced time values 46 | interp_time = np.arange(start_time, end_time, fs) 47 | # Interpolate the data values 48 | f = interp1d(time_vec, data_ts, kind='linear') 49 | interp_data = f(interp_time) 50 | 51 | mask = np.in1d(interp_time,time_vec) 52 | indices = np.where(~mask)[0] 53 | 54 | # Figure out data residual standard deviation so we can add white noise. 55 | _,_,_,residuals,_ = linear(data_ts,time_vec) 56 | interp_data[indices] +=np.random.normal(0, np.nanstd(residuals), size=len(indices)) 57 | 58 | return interp_data,interp_time 59 | 60 | 61 | def linear(ts,time): 62 | # Fit a linear function 63 | A = np.vstack([time, np.ones((len(time), 1)).flatten()]).T 64 | Aa = np.dot(np.linalg.inv(np.dot(A.T, A)), A.T) 65 | mod = np.dot(Aa, ts) 66 | synth = np.dot(A, mod) 67 | residuals = (ts-synth) # *lam/(4*np.pi)*100 # cm 68 | C_model_white = np.var(residuals) * np.linalg.inv( np.dot( A.T,A) ) 69 | return mod, A, synth, residuals, C_model_white 70 | 71 | 72 | def sineDesign(t,period=365.25): 73 | ''' 74 | Input time vector and desired period 75 | Output design matrix for lsq inversion 76 | ''' 77 | rows = [np.sin(1/period*2*np.pi*t), np.cos(1/period*2*np.pi*t), np.ones(len(t)),t] 78 | A = np.asarray(rows) 79 | return A.T 80 | 81 | 82 | def fitSine(A,y): 83 | (mod,residuals,rank,sing_vals) = lstsq(A,y) 84 | return mod 85 | 86 | 87 | def makeSine(mod, time_vec): 88 | phase = np.arctan2(mod[0],mod[1])*180/np.pi 89 | amplitude = np.linalg.norm([mod[0],mod[1]],2) 90 | bias = mod[2] 91 | slope = mod[3] 92 | sineSynth = time_vec*slope + amplitude*np.sin((2*np.pi/365)*(time_vec) + phase) + bias 93 | 94 | 95 | 96 | 97 | return sineSynth 98 | 99 | 100 | # Define the power law model 101 | def power_law(f,fs, sig_pl2, k): 102 | P_0 = (2*(2*np.pi)**k * sig_pl2) / fs**(1-(-k/2)) 103 | return (P_0 / f**k) 104 | 105 | 106 | def getPLcov(ts,k,dT): 107 | ''' 108 | Colored noise uncertainties 109 | From Langbein 2004 110 | 111 | Inputs: 112 | ts: time series data values 113 | k: spectral index 114 | dT: sampling interval 115 | Outputs: 116 | E: colored noise covariance matrix 117 | 118 | ''' 119 | N = len(ts) 120 | k+=1e-10 # add a tiny number to stabilize in case k=0 121 | gs=[] 122 | iterate = 100 # no longer computable after k=170 (on this computer) 123 | # what happens to psi as you increase k? 124 | for ni in range(iterate): 125 | g = gamma(ni-(-k/2)) / (np.math.factorial(ni) * gamma(k/2)) 126 | gs.append(g) 127 | 128 | gs = np.asarray(gs) 129 | gs[np.isnan(gs)] = 0.0 130 | gs=gs.T 131 | g_vec = gs[-1] * np.ones((N,)) 132 | g_vec[0:len(gs)] = gs 133 | H = scipy.linalg.toeplitz(g_vec) 134 | H *= np.tri(*H.shape) 135 | H = H* dT**(-k/4) 136 | E = np.dot(H,H.T) 137 | E[E<1e-9]=0 138 | # plt.figure();plt.imshow(E);plt.show() 139 | return E 140 | 141 | 142 | def bootstrap_linreg(time_vec, data_ts, num_bootstraps=1000): 143 | # Initialize arrays to store bootstrap estimates of slope and intercept 144 | bootstrap_slopes = np.zeros(num_bootstraps) 145 | bootstrap_intercepts = np.zeros(num_bootstraps) 146 | # Fit the original data to a linear regression model 147 | # linreg = LinearRegression().fit(x.reshape(-1, 1), y) 148 | # Generate bootstrap samples and fit each one to a linear regression model 149 | for i in range(num_bootstraps): 150 | # Generate a bootstrap sample by randomly selecting with replacement from the original data 151 | bootstrap_indices = np.random.choice(len(time_vec), len(time_vec), replace=True) 152 | bootstrap_x = time_vec[bootstrap_indices] 153 | bootstrap_y = data_ts[bootstrap_indices] 154 | # Fit the bootstrap sample to a linear regression model 155 | # bootstrap_linreg = LinearRegression().fit(bootstrap_x.reshape(-1, 1), bootstrap_y) 156 | mod, A,synth, residuals,_ = linear(bootstrap_y,bootstrap_x) 157 | # Store the slope and intercept estimates from the bootstrap 158 | bootstrap_slopes[i] = mod[0] 159 | bootstrap_intercepts[i] = mod[1] 160 | 161 | # Calculate the mean and standard deviation of the bootstrap estimates of slope and intercept 162 | mean_slope = np.mean(bootstrap_slopes) 163 | std_slope = np.std(bootstrap_slopes) 164 | mean_intercept = np.mean(bootstrap_intercepts) 165 | std_intercept = np.std(bootstrap_intercepts) 166 | 167 | # Calculate the 95% confidence intervals for the slope and intercept estimates 168 | ci_slope = np.percentile(bootstrap_slopes, [2.5, 97.5]) 169 | ci_intercept = np.percentile(bootstrap_intercepts, [2.5, 97.5]) 170 | 171 | # Return the original linear regression model, along with the bootstrap estimates and confidence intervals 172 | return mean_slope, std_slope, ci_slope, mean_intercept, std_intercept, ci_intercept 173 | 174 | 175 | def get_uncertainties(residuals,A,fs,plot=False): 176 | ''' 177 | Parameters 178 | ---------- 179 | residuals : 180 | A : Design matrix for inverse problem 181 | fs : sampling freq 182 | plot : True or falst 183 | 184 | Returns 185 | ------- 186 | m_uncertainty_white : 187 | m_uncertainty_color : 188 | rmse : 189 | ''' 190 | rmse = np.sqrt(np.mean(residuals**2)) 191 | # compute the power spectral density using Welch's method 192 | f, psd = signal.welch(residuals) 193 | # Call least_squares with method='lm' to use LAPACK implementation 194 | init_guess = [.01,1.5] 195 | res = optimize.least_squares(lambda params: power_law(f[1:],fs, *params) - psd[1:], init_guess, method='lm') 196 | # output of res.x = [sig_pl2, k, sig_wh2] 197 | 198 | if plot: 199 | # plot the power spectral density and the fitted model 200 | plt.figure() 201 | plt.semilogx(f, psd,'.', label='Power Spectral Density') 202 | plt.semilogx(f, power_law(f,fs, *res.x), 'r--', label='Fitted Model') 203 | plt.xlabel('Frequency (log scale)') 204 | plt.ylabel('Power Spectral Density') 205 | plt.title('Power Spectral Density of Time Series') 206 | plt.legend() 207 | plt.show() 208 | 209 | sig_pl2 = res.x[0] # Variance of powerlaw noise 210 | spectral_index = res.x[1] # Spectral index for powerlaw noise model (k) 211 | 212 | dT = 1 # sampling interval 213 | Epl = getPLcov(residuals,spectral_index,dT) 214 | 215 | I = np.eye(len(residuals)) 216 | C = np.var(residuals)*I + sig_pl2*Epl 217 | C_model_white = np.var(residuals) * np.linalg.inv( np.dot( A.T,A) ) # same as np.linalg.inv( np.dot( A.T,np.dot(np.linalg.inv(sig_wh2*I),A) ) ) 218 | C_model_color = np.linalg.inv( np.dot( A.T,np.dot(np.linalg.inv(C),A) ) ) 219 | 220 | m_uncertainty_white = 1.96*np.sqrt(np.diag(C_model_white)) 221 | m_uncertainty_color = 1.96*np.sqrt(np.diag(C_model_color)) 222 | 223 | return m_uncertainty_white, m_uncertainty_color, rmse, spectral_index, sig_pl2*Epl 224 | 225 | -------------------------------------------------------------------------------- /noiseModel_run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Wed Apr 5 16:06:21 2023 5 | 6 | @author: km 7 | """ 8 | 9 | from PyPS2 import noiseModel as nm 10 | import numpy as np 11 | import os 12 | import h5py 13 | from matplotlib import pyplot as plt 14 | from PyPS2 import util 15 | 16 | 17 | periodic = True 18 | fs = 6 19 | plt.close('all') 20 | workDir = os.getcwd() 21 | mintDir = './MintPy/' 22 | 23 | ps = np.load('./ps.npy', allow_pickle=True).all() 24 | 25 | # ifgramStack.h5 26 | filename = mintDir + 'inputs/ifgramStack.h5' 27 | ds_ifgramStack = h5py.File(filename,'r+') 28 | pairs = np.asarray(ds_ifgramStack['date']) 29 | ds_ifgramStack.close() 30 | 31 | filename = mintDir + 'timeseries.h5' 32 | ds = h5py.File(filename, 'r+') 33 | timeseries = ds['timeseries'] 34 | dates = np.asarray(ds['date']) 35 | ts = np.asarray(ds['timeseries'][:, 1400, 1125]) 36 | ds.close() 37 | 38 | 39 | ts_interp,time_interp = nm.interpolate_timeseries(ps.dn0, ts, fs) 40 | # Get decimal years for plotting time series 41 | dec_year = [] 42 | yr0 = ps.dates[0][0:4] 43 | dec_year_interp=[] 44 | for dn in time_interp: 45 | yr = np.floor(dn/365) + int(yr0) 46 | doy = dn%365 47 | dec_year_interp.append(float(yr) + (doy/365.25)) 48 | dec_year_interp = np.asarray(dec_year_interp,dtype=np.float32) 49 | 50 | 51 | # Fit the data to a linear regression model using bootstrapping 52 | mean_slope, std_slope, ci_slope, mean_intercept, std_intercept, ci_intercept = nm.bootstrap_linreg(time_interp, ts_interp,1000) 53 | synth_line = mean_slope*time_interp + mean_intercept 54 | ts_detrend = ts_interp - synth_line 55 | 56 | freq_cos, freq_sin, amplitude_cos, amplitude_sin, phase_shift_cos, phase_shift_sin = util.fitSine1d(dec_year_interp, ts_detrend) 57 | synth_sine = amplitude_cos * np.cos(2 * np.pi * freq_cos * dec_year_interp + phase_shift_cos) + amplitude_sin * np.sin(2 * np.pi * freq_sin * dec_year_interp + phase_shift_sin) 58 | 59 | 60 | freq_cos = 1 61 | freq_sin = 2 62 | amplitude_cos = np.mean(abs(ts_detrend)) 63 | amplitude_sin = np.mean(abs(ts_detrend))/2 64 | phase_shift_cos = 0.0 65 | phase_shift_sin = 0.0 66 | synth_sine_0 = amplitude_cos * np.cos(2 * np.pi * freq_cos * dec_year_interp + phase_shift_cos) + amplitude_sin * np.sin(2 * np.pi * freq_sin * dec_year_interp + phase_shift_sin) 67 | 68 | 69 | plt.figure() 70 | plt.plot(dec_year_interp,ts_detrend,'.') 71 | plt.plot(dec_year_interp,synth_sine,label='sin model') 72 | plt.plot(dec_year_interp,synth_sine_0,label='sin 0') 73 | plt.legend() 74 | 75 | 76 | 77 | 78 | plt.plot(dec_year_interp,ts_interp,'.') 79 | plt.plot(dec_year_interp,synth_line,'.') 80 | plt.plot(dec_year_interp,ts_detrend,'.') 81 | 82 | # plt.plot(ps.dn0,ts,'.') 83 | plt.plot(dec_year_interp,synth) 84 | 85 | plt.plot(dec_year_interp,synth_sine) 86 | 87 | # # Generate a sequence of random steps 88 | # steps = np.random.randn(len(ts_interp)) 89 | # # Construct the random walk series 90 | # ts_interp = np.cumsum(steps) 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | if periodic: 100 | # Periodic Fit 101 | period = 365.25 102 | A = nm.sineDesign(time_interp,period) 103 | # Fit the sinusoid with slope and intercept 104 | mod = nm.fitSine(A,ts_interp)#(phase,amplitude,bias,slope) 105 | synth = nm.makeSine(mod,time_interp) 106 | residuals_s = ts_interp-synth 107 | else: 108 | # Linear Fit 109 | mod, A,synth, residuals,C_model_white = nm.linear(ts_interp,time_interp) 110 | 111 | 112 | from scipy.stats import f_oneway 113 | f_statistic, p_value = f_oneway(residuals, residuals_s) 114 | 115 | print("F-Statistic:", f_statistic) 116 | print("p-value:", p_value) 117 | alpha = 0.05 # significance level 118 | 119 | if p_value < alpha: 120 | print("The models are significantly different. Model 2 provides a better representation.") 121 | else: 122 | print("There is no significant difference between the models.") 123 | 124 | 125 | 126 | plt.figure() 127 | plt.plot(dec_year_interp,np.cumsum(abs(residuals)),label='lsq') 128 | plt.plot(dec_year_interp,np.cumsum(abs(residuals_s)),label='sine') 129 | plt.legend() 130 | 131 | m_uncertainty_white, m_uncertainty_color, rmse, spectral_index,sig = nm.get_uncertainties(residuals,A,fs,plot=True) 132 | 133 | mod2 = [0.0021011/365, 1.35800236e-02] 134 | mle_upper = np.dot(A,mod+ mod2) 135 | mle_lower = np.dot(A,mod- mod2) 136 | 137 | synth_upper_wh = np.dot(A, mod+m_uncertainty_white) 138 | synth_lower_wh = np.dot(A, mod-m_uncertainty_white) 139 | synth_upper_pl = np.dot(A, mod+m_uncertainty_color) 140 | synth_lower_pl = np.dot(A, mod-m_uncertainty_color) 141 | 142 | 143 | 144 | plt.figure() 145 | plt.plot(dec_year_interp, ts_interp, '.',color='gray') 146 | plt.plot(dec_year_interp, synth,'black') 147 | plt.plot(dec_year_interp, synth_lower_wh,'g') 148 | plt.plot(dec_year_interp, synth_upper_wh,'g') 149 | plt.plot(dec_year_interp, synth_lower_pl,'--',color='purple') 150 | plt.plot(dec_year_interp, synth_upper_pl,'--',color='purple') 151 | plt.plot(dec_year_interp, mle_upper,'--',color='red') 152 | plt.plot(dec_year_interp, mle_lower,'--',color='red') 153 | 154 | plt.legend(['Data','mean rate','white','white','Powerlaw','powerlaw','mle']) 155 | plt.show() 156 | 157 | print('Boot strapped Rate uncertainty: ', str(np.round(100000*1.96*std_slope,3))) 158 | print('White noise Rate uncertainty: ', str(np.round(100000*m_uncertainty_white[0],3))) 159 | print('Colored noise Rate uncertainty: ', str(np.round(100000*m_uncertainty_color[0],3))) 160 | print('spectral index: ', str(np.round(spectral_index,3))) 161 | print('rate: ', str(np.round(mod[0],7))) 162 | 163 | 164 | 165 | 166 | import numpy as np 167 | from scipy.optimize import minimize 168 | 169 | def doMLE(C,r): 170 | N = len(r) 171 | ln_det_C = np.log(np.linalg.det(C)) 172 | rtCr = np.dot(r.T, np.dot(np.linalg.inv(C), r)) 173 | Nln2pi = N * np.log(2 * np.pi) 174 | mle = -0.5 * (ln_det_C + rtCr + Nln2pi) 175 | return np.log(mle) 176 | 177 | r=residuals 178 | # define initial guess for C 179 | C_guess = np.eye(len(r)) 180 | C = C_guess 181 | # mle = doMLE(r,C_guess) 182 | # set constraints for C to be positive-definite 183 | # bounds = [(0, None) for _ in range(len(r)**2)] 184 | r.reshape(1,-1) 185 | # optimize the negative log-likelihood function with L-BFGS-B 186 | result = minimize(doMLE, C_guess,args=(r,), method='L-BFGS-B') 187 | # get the optimized value of C 188 | C_opt = result.x.reshape((len(r), len(r))) 189 | 190 | 191 | def doMLE(C_1d, r): 192 | C = C_1d.reshape(len(r),len(r)) # Reshape C_1d into a square matrix 193 | C = np.dot(C.T, C) # Ensure positive semi-definite 194 | C += np.eye(len(r)) * 1e-6 # Add small positive value to the diagonal elements 195 | ln_det_C = np.log(np.linalg.det(C)) 196 | rtCr = np.dot(r.T, np.dot(np.linalg.inv(C), r)) 197 | Nln2pi = N * np.log(2 * np.pi) 198 | mle = -0.5 * (ln_det_C + rtCr + Nln2pi) 199 | return -np.sum(np.log(mle)) # Minimize the negative log likelihood, ensure scalar output 200 | 201 | 202 | C_guess = np.eye(len(r)).reshape(1,-1) # Initial guess for C_1d 203 | result = minimize(doMLE, C_guess, args=(r,)) # solve for C 204 | # j 205 | estimated_C_1d = result.x 206 | estimated_C = estimated_C_1d.reshape(len(r),len(r)) 207 | estimated_C = np.dot(estimated_C.T, estimated_C) # Ensure positive semi-definite 208 | 209 | 210 | 211 | N = len(residuals) 212 | U = np.linalg.cholesky(C_model_white) 213 | ln_det_I = 0.0 214 | for i in range(0,1): 215 | ln_det_I -= np.log(U[i,i]) 216 | ln_det_I *= 2.0 217 | 218 | ln_det_C = np.log(np.linalg.det(C_model_white)) 219 | # sigma_eta = 220 | logL = -0.5 * (N*np.log(2*np.pi) + ln_det_C + 2.0*(N)*np.log(sigma_eta) + N) 221 | 222 | #____________________________________________ 223 | # Try using Hectorp functions 224 | # t = 225 | [theta,C_theta,ln_det_C,sigma_eta] = compute_leastsquares(t,H,x,F,samenoise=False) 226 | -------------------------------------------------------------------------------- /ps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Aug 16 11:32:18 2022 5 | 6 | Persistent Scatterer type approach 7 | 8 | @author: km 9 | """ 10 | 11 | import numpy as np 12 | import isce.components.isceobj as isceobj 13 | from matplotlib import pyplot as plt 14 | import cv2 15 | import os 16 | import timeit 17 | import time 18 | import glob as glob 19 | import util 20 | from util import show 21 | import skimage 22 | minGam = .65 23 | 24 | ps = np.load('./ps.npy',allow_pickle=True).all() 25 | 26 | # Load the gamma0 file 27 | f = ps.tsdir + '/gamma0.int' 28 | intImage = isceobj.createIntImage() 29 | intImage.load(f + '.xml') 30 | gamma0= intImage.memMap()[:,:,0] 31 | gamma0=gamma0.copy() # mmap is readonly, so we need to copy it. 32 | gamma0[np.isnan(gamma0)] = 0 33 | 34 | msk = np.ones(gamma0.shape) 35 | msk[gamma0>minGam] = 0 36 | show(msk) 37 | 38 | stack = [] 39 | ymin,ymax,xmin,xmax = 1200,2300,500,2000 40 | mskCrop = msk[ymin:ymax,xmin:xmax] 41 | for ii in range(len(ps.dates)-1): 42 | # load SLCS and make an ifg 43 | d=ps.dates[ii] 44 | d2 = ps.dates[ii+1] 45 | if ps.crop: 46 | 47 | f = ps.slcdir +'/'+ d + '/' + d + '.slc.full.crop' 48 | else: 49 | f = ps.slcdir +'/'+ d + '/' + d + '.slc.full' 50 | slcImage1 = isceobj.createSlcImage() 51 | slcImage1.load(f + '.xml') 52 | 53 | if ps.crop: 54 | f = ps.slcdir +'/'+ d2 + '/' + d2 + '.slc.full.crop' 55 | else: 56 | f = ps.slcdir +'/'+ d2 + '/' + d2 + '.slc.full' 57 | slcImage2 = isceobj.createSlcImage() 58 | slcImage2.load(f + '.xml') 59 | 60 | ifg = np.multiply(slcImage1.memMap()[ymin:ymax,xmin:xmax,0],np.conj(slcImage2.memMap()[ymin:ymax,xmin:xmax,0])) 61 | ifgMA = np.ma.masked_array(ifg,mask=mskCrop) 62 | stack.append(ifgMA) 63 | 64 | stack = np.asarray(stack) 65 | mskCropCube = np.expand_dims(mskCrop,2) 66 | 67 | # Convert to masked array by extending the dimension of the mask and repeating 68 | stackMA = np.ma.masked_array(stack,mask=np.tile(mskCropCube,(1,stack.shape[0]))) 69 | 70 | 71 | start_time=time.time() 72 | samp_unw = skimage.restoration.unwrap_phase(np.angle(stack)) 73 | totalTime = time.time()-start_time 74 | print('It took ' + str(np.round(totalTime,3)) + ' seconds.') 75 | show(samp_unw[10,:,:]) 76 | 77 | from scipy.interpolate import griddata 78 | 79 | 80 | 81 | 82 | xx = np.arange(0,samp.shape[1]) 83 | yy = np.arange(0,samp.shape[0]) 84 | XX,YY = np.meshgrid(xx,yy) 85 | ids = np.where(~np.isnan(samp)) 86 | reaGrid = griddata((XX[ids],YY[ids]),np.real(samp[ids]), (XX,YY), method='cubic') 87 | imaGrid = griddata((XX[ids],YY[ids]),np.imag(samp[ids]), (XX,YY), method='cubic') 88 | ifgGrid = cpx = imaGrid*1j + reaGrid 89 | 90 | plt.figure();plt.imshow(np.angle(ifgGrid));plt.title('PS interpolated') 91 | 92 | -------------------------------------------------------------------------------- /rate_uncertainty.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Tue Mar 21 13:13:50 2023 4 | 5 | @author: km 6 | """ 7 | 8 | from scipy import signal,optimize 9 | from scipy.signal import freqz 10 | from scipy.signal import periodogram 11 | import os 12 | import isceobj 13 | import h5py 14 | import numpy as np 15 | from matplotlib import pyplot as plt 16 | from scipy.optimize import minimize 17 | from sklearn.gaussian_process import GaussianProcessRegressor 18 | from sklearn.gaussian_process.kernels import RBF 19 | from sklearn.gaussian_process.kernels import ConstantKernel as C 20 | from scipy.special import gamma 21 | import scipy 22 | import fitSine 23 | 24 | workDir = os.getcwd() 25 | mintDir = './MintPy/' 26 | 27 | ps = np.load('./ps.npy', allow_pickle=True).all() 28 | 29 | 30 | # ifgramStack.h5 31 | filename = mintDir + 'inputs/ifgramStack.h5' 32 | ds_ifgramStack = h5py.File(filename,'r+') 33 | pairs = np.asarray(ds_ifgramStack['date']) 34 | ts1 = np.asarray(ds_ifgramStack['unwrapPhase'][:, 900, 2795]) 35 | ts2 = np.asarray(ds_ifgramStack['unwrapPhase'][:, 960, 2795]) 36 | plt.figure() 37 | plt.plot(ts1,label='1') 38 | plt.plot(ts2,label='2') 39 | plt.legend() 40 | plt.figure() 41 | plt.plot(ts1-ts2,'.');plt.show() 42 | coh = np.asarray(ds_ifgramStack['coherence'][:, 1000, 2000]) 43 | np.pdf 44 | ds_ifgramStack.close() 45 | 46 | ds = h5py.File(filename, 'r+') 47 | dates = np.asarray(ds['date']) 48 | 49 | filename = mintDir + 'timeseries.h5' 50 | ds = h5py.File(filename, 'r+') 51 | timeseries = ds['timeseries'] 52 | dates = np.asarray(ds['date']) 53 | ts = np.asarray(ds['timeseries'][:, 1400, 1125]) 54 | ds.close() 55 | # plt.figure();plt.imshow(timeseries[5,:,:],'magma') 56 | plt.figure();plt.plot(ts) 57 | 58 | # Fit a linear function 59 | G = np.vstack([ps.dn0, np.ones((len(ps.dn0), 1)).flatten()]).T 60 | Gg = np.dot(np.linalg.inv(np.dot(G.T, G)), G.T) 61 | mod = np.dot(Gg, ts) 62 | rate = mod[0]*365 # cm/yr 63 | #offs = np.reshape(mod[1,:],(ps.nyl, ps.nxl)) 64 | synth = np.dot(G, mod) 65 | res = (ts-synth) # *lam/(4*np.pi)*100 # cm 66 | 67 | # Find the uncertainty 68 | co = np.cov(ts) 69 | # mcov=np.diag(np.dot(Gg,np.dot(co,Gg.T))) 70 | mcov = co * np.diag(np.linalg.inv(np.dot(G.T, G))) 71 | # mcov = np.inv( np.dot( np.dot())) 72 | 73 | m_uncertainty = 1.96*mcov**.5 74 | synthlow = np.dot(G, mod-m_uncertainty) 75 | synthhigh = np.dot(G, mod+m_uncertainty) 76 | 77 | 78 | 79 | # Fit a periodic funcition 80 | period = 365.25 81 | phase, amp, bias, slope = fitSine.fitSine1d(ps.dn0, ts, period) 82 | sinesynth = ps.dn0*slope + amp*np.sin((2*np.pi/365)*(ps.dn0) + phase) + bias 83 | plt.figure() 84 | plt.plot(ps.dec_year, ts, '.') 85 | plt.plot(ps.dec_year, synth) 86 | plt.plot(ps.dec_year, synthlow) 87 | plt.plot(ps.dec_year, synthhigh) 88 | plt.plot(ps.dec_year, sinesynth) 89 | 90 | 91 | 92 | 93 | # Colored noise uncertainties 94 | # From Langbein 2004 95 | d = ts # Data time series 96 | A= np.vstack([ps.dn0, np.ones((len(ps.dn0), 1)).flatten()]).T # Design matrix 97 | n = 1.33 # Spectral index 98 | N = len(d) 99 | 100 | gs=[] 101 | iterate = 150 102 | I = np.eye(len(ts)) 103 | # what happens to psi as you increase n? 104 | for ni in range(iterate): 105 | # g = gamma(I + n/2)/(gamma(n/2) * np.math.factorial(ni)) 106 | 107 | g = gamma(ni-(-n/2)) / (np.math.factorial(ni) * gamma(n/2)) 108 | # no longer computable after n=170 (on this computer) 109 | gs.append(g) 110 | plt.figure();plt.plot(range(len(gs)),gs,'.');plt.ylabel('g');plt.xlabel('N') 111 | plt.title('k=-1; psi goes to 0 inf as');plt.show() 112 | gs = np.asarray(gs) 113 | 114 | gs[np.isnan(gs)] = 0.0 115 | 116 | g_vec = gs[-1] * np.ones((N,)) 117 | g_vec[0:len(gs)] = gs 118 | H = scipy.linalg.toeplitz(g_vec) 119 | H *= np.tri(*H.shape) 120 | plt.figure();plt.imshow(H);plt.title('Transformation matrix H') 121 | plt.show() 122 | 123 | gamma(ni-(n/2)) / (np.math.factorial(ni) * gamma(-n/2)) 124 | 125 | 126 | dT =1# 6 * 24*60*60# 6 days (in seconds) sampling interval (we'll use days) !!!! might need to interpolate TS????? 127 | fs = 1/dT # sampling frequency in Hz 128 | H = H* dT**(-n/4) 129 | E = np.dot(H,H.T) 130 | plt.figure();plt.imshow(E);plt.title('E') 131 | plt.show() 132 | 133 | 134 | sig_pl2 = 1e-4 135 | # (11) Amplitude of the power law noise in the freq domain 136 | P_0 = (2*(2*np.pi)**-n * sig_pl2) / fs**(1-(n/2)) 137 | 138 | # compute the power spectral density using Welch's method 139 | f, psd = signal.welch(ts) 140 | 141 | # (7) Power law noise is described in the frequency domain: 142 | P_f = P_0/f**n 143 | plt.figure() 144 | plt.semilogx(f, P_f, label='Power Spectral Density of power law model') 145 | plt.semilogx(f, psd, label='Power Spectral Density of time series') 146 | 147 | plt.xlabel('Frequency (log scale)') 148 | plt.ylabel('Power Spectral Density') 149 | plt.title('Power Spectral Density of Time Series') 150 | plt.legend() 151 | plt.show() 152 | 153 | 154 | 155 | 156 | # Define the power law model 157 | def power_law(f, sig_pl2, n,y0): 158 | P_0 = (2*(2*np.pi)**n * sig_pl2) / fs**(1-(-n/2)) 159 | return (P_0 / f**n) + y0 160 | 161 | # fit the model to the power spectral density 162 | 163 | popt, pcov = optimize.curve_fit(power_law, f[1:], psd[1:]) 164 | ''' output of popt = [sig_pl2, n, sig_wh2] ''' 165 | 166 | # Call least_squares with method='lm' to use LAPACK implementation 167 | init_guess = popt 168 | res = optimize.least_squares(lambda params: power_law(f[1:], *params) - psd[1:], init_guess, method='lm') 169 | 170 | # plot the power spectral density and the fitted model 171 | plt.figure() 172 | plt.semilogx(f[1:], psd[1:],'.', label='Power Spectral Density') 173 | plt.semilogx(f, power_law(f, *res.x), 'r--', label='Fitted Model') 174 | plt.xlabel('Frequency (log scale)') 175 | plt.ylabel('Power Spectral Density') 176 | plt.title('Power Spectral Density of Time Series') 177 | plt.legend() 178 | plt.show() 179 | 180 | sig_wh2 = popt[2] # Variance of white noise 181 | sig_pl2 = popt[0] # Variance of white noise 182 | 183 | 184 | C = np.nanvar(ts)*I + sig_pl2*E 185 | C_model_white = np.var(ts) * np.linalg.inv( np.dot( A.T,A) ) # same as np.linalg.inv( np.dot( A.T,np.dot(np.linalg.inv(sig_wh2*I),A) ) ) 186 | C_model_color = np.linalg.inv( np.dot( A.T,np.dot(np.linalg.inv(C),A) ) ) 187 | 188 | m_uncertainty_white = m_uncertainty # Defined above during inversion 189 | m_uncertainty_color = 1.96*np.sqrt(C_model_color) 190 | 191 | 192 | # rate_upper_wh = rate + m_uncertainty_white[0,0] 193 | # rate_lower_wh = rate - m_uncertainty_white[0,0] 194 | 195 | synth = np.dot(A, mod) 196 | synth_upper_wh = np.dot(A, mod+m_uncertainty_white) 197 | synth_lower_wh = np.dot(A, mod-m_uncertainty_white) 198 | 199 | synth_upper_pl = np.dot(A, mod+np.diag(m_uncertainty_color)) 200 | synth_lower_pl = np.dot(A, mod-np.diag(m_uncertainty_color)) 201 | 202 | 203 | plt.figure() 204 | plt.plot(ps.dec_year, ts, '.',color='black') 205 | plt.plot(ps.dec_year, synth,'black') 206 | plt.plot(ps.dec_year, synth_lower_wh,'g') 207 | plt.plot(ps.dec_year, synth_upper_wh,'g') 208 | plt.plot(ps.dec_year, synth_lower_pl,'--',color='gray') 209 | plt.plot(ps.dec_year, synth_upper_pl,'--',color='gray') 210 | plt.legend(['Data','mean rate','white','white','Powerlaw','powerlaw']) 211 | plt.show() 212 | 213 | 214 | 215 | standardError = np.sqrt(np.diag(sig_x2)) 216 | from scipy.stats import t 217 | t_value = t.ppf(0.975, N - 3) 218 | ci = t_value * standardError 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | # # Code from Williams 2003 239 | # from scipy.special import gamma 240 | # import scipy 241 | # x= ts # time series of data 242 | # n = len(x) # number of data 243 | # k = 0 # Spectral index for colored noise model 244 | 245 | # # Form the covariance matrix for colored noise 246 | # psis=[] 247 | # iterate = 150 248 | # # what happens to psi as you increase n? 249 | # for ni in range(iterate): 250 | # psi_n = gamma(ni-(k/2)) / (np.math.factorial(ni) * gamma(-k/2)) 251 | # # no longer computable after n=170 (on this computer) 252 | # psis.append(psi_n) 253 | # plt.figure();plt.plot(range(len(psis)),psis,'.');plt.ylabel('psi');plt.xlabel('n') 254 | # plt.title('k=-1; psi goes to 0 inf as');plt.show() 255 | # psis = np.asarray(psis) 256 | 257 | # psis[np.isnan(psis)] = 0.0 258 | 259 | # psi_vec = psis[-1] * np.ones((n,)) 260 | # psi_vec[0:len(psis)] = psis 261 | # T = scipy.linalg.toeplitz(psi_vec) 262 | # T *= np.tri(*T.shape) 263 | # plt.figure();plt.imshow(T);plt.title('Transformation matrix T') 264 | 265 | # # Scale T to ensure that the power spectra for k will cross at the consistent freq given sampling interval 266 | # Tdel = 6 * 24*60*60# 6 days (in seconds) sampling interval (we'll use days) !!!! might need to interpolate TS????? 267 | # T = T* Tdel**(-k/4) 268 | # J_k = np.dot(T,T.T) 269 | # plt.figure();plt.imshow(J_k);plt.title('J') 270 | # plt.show() 271 | 272 | # # Equation for the power spectrum (10) 273 | # D_k = 2*(2*np.pi)**k * (24*60*60*365.25)**(k/2) 274 | # b_k = 1 #??? noise amplitude 275 | # fs = 1/Tdel # sampling frequency in Hz 276 | # f = 1 #??? frequency array? 277 | # powerAmp = ((D_k * b_k**2)/(fs**(k/2 + 1))) * f**k #this is for frequency domain. function of f? 278 | 279 | # f_0 = fs**.5 / (2* np.pi* np.sqrt(24*60*60*365.25)) 280 | 281 | 282 | # # uncertainty in slope for any colored noise source (25) 283 | # beta = -(k/2) - 2 284 | # gam = -3-k 285 | # P = np.array([-.0237,-.3881,-2.661,-9.8529,-21.0922,-25.1638,-11.4275,10.7839,20.3377,11.9942]) 286 | # N = len(P) 287 | # nu_vec = np.zeros((N)) 288 | 289 | # for ii in range(len(P)): 290 | # nu_vec[ii] = (P[ii]*k**(N-ii)) 291 | # nu = np.sum(nu_vec) 292 | # sig_r2 = b_k**2 * nu * Tdel**(beta) * n**(gam) 293 | 294 | 295 | 296 | 297 | # # Gaussian processes regression 298 | # # Define the kernel function 299 | # lengthScale = 5 300 | # kernel = C(1.0, (1e-3, 1e3)) * RBF(lengthScale, (1, 365)) 301 | # data_var = 1.96*.01 # np.sqrt(np.var(ts)) 302 | # noise = data_var*np.ones_like(ts) 303 | # # Create a Gaussian Process regressor object 304 | # gp = GaussianProcessRegressor( 305 | # kernel=kernel, alpha=noise**2, n_restarts_optimizer=10) 306 | # gp.fit(ps.dn0.reshape(-1, 1), ts.reshape(-1, 1)) 307 | # # Make predictions for some test data 308 | # X_test = np.linspace(ps.dn0[0], ps.dn0[-1], 500).reshape(-1, 1) 309 | # x_dec = (X_test/365) + ps.dec_year[0] 310 | # y_pred, sigma = gp.predict(X_test, return_std=True) 311 | # # Plot the results 312 | # plt.figure(figsize=(10, 5)) 313 | # plt.plot(ps.dec_year, ts, 'ko', label='Observations') 314 | # plt.plot(x_dec, y_pred, 'b-', label='Prediction') 315 | # # plt.fill_between(X_test[:, 0], y_pred[:, 0] - sigma, y_pred[:, 0] + sigma, 316 | # # alpha=0.5, color='lightblue', label='Uncertainty') 317 | # plt.fill_between(x_dec.ravel(), y_pred.ravel() - sigma, y_pred.ravel() + sigma, 318 | # alpha=0.5, color='lightblue', label='Uncertainty') 319 | # plt.xlabel('Time') 320 | # plt.ylabel('Value') 321 | # plt.legend() 322 | # plt.show() 323 | 324 | 325 | 326 | # Fit a weighted least squares linear function to the ts array 327 | # coh_subset = np.zeros(len(ps.pairs)) 328 | # for i in range(len(ps.pairs)): 329 | # p = ps.pairs[i] 330 | # if p in 331 | # index = ps.pairs2.index(p) 332 | # coh_subset[i] = coh[index] 333 | 334 | # corStack = [] 335 | # for p in ps.pairs: 336 | # cor_file = '/' + p + '/fine_lk.cor' 337 | # corImage = isceobj.createIntImage() 338 | # corImage.load(cor_file + '.xml') 339 | # cor = corImage.memMap()[:,:,0] 340 | # cor = cor.copy() 341 | # # cor[np.isnan(gam)] = np.nan 342 | # corStack.append(cor) 343 | # corStack = np.asarray(corStack,dtype=np.float32)[:,:,:] 344 | # # make the G matrix 345 | # G = np.vstack([ps.dn0, np.ones((len(ps.dn0), 1)).flatten()]).T 346 | # # make the weights matrix based on the inverse of coherence coh 347 | # W = np.diag(1/coh) 348 | # # make the weighted G matrix 349 | # Gw = np.dot(np.linalg.inv(np.dot(np.dot(G.T, W), G)), np.dot(G.T, W)) 350 | # # make the weighted ts array 351 | # ts_w = ts/coh 352 | # # make the weighted model 353 | # mod_w = np.dot(Gw, ts_w) 354 | # # make the weighted synthetic time series 355 | # synth_w = np.dot(G, mod_w) 356 | # # make the weighted residuals 357 | # res_w = (ts_w-synth_w) # *lam/(4*np.pi)*100 # cm 358 | # # Get the norm of the residuals 359 | # resnorm = np.linalg.norm(res, axis=0) 360 | # s = resnorm/np.sqrt(len(res)) # from page 39 of Aster Parameter Estimation and Inverse Problems 361 | # resstd = np.std(res, axis=0) 362 | # # Do a chi-squared test 363 | # import scipy.stats as stats 364 | # chi2 = np.sum(res**2/s) 365 | # p = 1 - stats.chi2.cdf(chi2, len(res)-2) 366 | # # Plot the time series with error bars using s for each point 367 | # plt.figure() 368 | # plt.plot(ps.dec_year, ts, '.') 369 | # plt.errorbar(ps.dec_year, ts, yerr=s, fmt='o', color='black', ecolor='lightgray', elinewidth=1, capsize=3, capthick=1) 370 | # plt.plot(ps.dec_year, synth) 371 | # plt.xlabel('Time') 372 | # plt.ylabel('Value') 373 | # plt.title('Time series with error bars') 374 | # plt.show() 375 | -------------------------------------------------------------------------------- /refDef.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Mon Aug 13 12:35:54 2018 5 | 6 | @author: kdm95 7 | """ 8 | 9 | import numpy as np 10 | import isceobj 11 | from matplotlib import pyplot as plt 12 | import scipy.signal as signal 13 | from PyPS2 import invertRates,makeMap,util 14 | 15 | # xRef=40;yRef=900;disconnected=True;plotStuff=True;doTimeFilt=False;removePlane=False;order=1;skip=1;offset=False;phsElev=True;startID=0;stopID=-1;geoCode=False 16 | def refDef(xRef=False,yRef=False,disconnected=True, plotStuff=True, order=1, skip=5, doTimeFilt=False,offset=False,phsElev=False, startID=0,stopID=-1,geoCode=False): 17 | ''' 18 | Loads the unwrapped interferograms, does the sbas-like inversion, converts to 19 | cm, inverts to find the rates. 20 | input: 21 | xRef: reference pixel x-coordinate (radar coordinates) 22 | yRef: reference pixel y-coordinate (radar coordinates) 23 | If you don't give values for reference, it will guess the best one. 24 | disconnected: True or False. Make True if there are islands in the image. (Disconnected components) 25 | plotStuff: True or False if you want to make plot outputs or just run and save the rates.npy file. 26 | order: order of 2d polynomial to remove. (0 is none, 1 is ramp, 2 is quadratic) 27 | skip: redundant pairs (1 is a simple sequential chain) 28 | do timefilt: apply a temporal filter (this doesn't change result of long term rates. recommended to keep False) 29 | offset: apply an offset (not recommended) 30 | phsElev: remove a phs-elevation dependence 31 | minCor: sets ifgs with median corrlation < minCor to to zero 32 | startID/stopID: start and stop ids for the dates for the rate estimation (it still returns alld with all dates) 33 | outputs: 34 | rates.npy 35 | ''' 36 | 37 | ps = np.load('./ps.npy',allow_pickle=True).all() 38 | msk = np.load('Npy/msk.npy') 39 | cor = np.load('Npy/cor.npy') 40 | 41 | 42 | pairs = list() 43 | for ii,d in enumerate(ps.dates[0:-1]): 44 | for jj in np.arange(1,skip+1): 45 | if ii+jj < len(ps.dates): 46 | pairs.append(ps.dates[ii] + '_' + ps.dates[ii+jj]) 47 | 48 | # Now make pairs2 49 | # pairs2Overlap = 5 50 | # pairs = list() 51 | # # pairs2.append(dates[ii] + '_' + dates[0]) 52 | # for ii,d in enumerate(ps.dates[0:-1]): 53 | # for jj in np.arange(4,pairs2Overlap+1): 54 | # if ii+jj < len(ps.dates): 55 | # pairs.append(ps.dates[ii] + '_' + ps.dates[ii+jj]) 56 | 57 | # order = 2 # Use 0 if you don't want to remove long wavelength function 58 | 59 | if not xRef: 60 | win=80 61 | Q = np.ones((win,win)) 62 | corF = signal.convolve2d(cor,Q, mode='same')/(win**2) 63 | yRef,xRef = np.where(corF==np.nanmax(corF)); yRef=yRef[0];xRef=xRef[0] 64 | 65 | print([xRef,yRef]) 66 | 67 | if plotStuff: 68 | plt.close('all') 69 | fig,ax = plt.subplots(1,3,figsize=(16,4)) 70 | ax[0].imshow(msk);ax[0].set_title('Mask') 71 | ax[1].imshow(cor);ax[1].set_title('Avg Correlation') 72 | ax[2].imshow(corF);ax[2].set_title('Filtered Avg Correlation') 73 | ax[2].scatter(xRef,yRef,color='red') 74 | 75 | 76 | 77 | stack = [] 78 | for ii,p in enumerate(pairs): 79 | unw_file = ps.intdir + '/' + p + '/filt.unw' 80 | unwImage = isceobj.createIntImage() 81 | unwImage.load(unw_file + '.xml') 82 | unw = unwImage.memMap()[:,:,0].copy() 83 | unw = unw - unw[yRef,xRef] 84 | stack.append(unw) 85 | stack = np.asarray(stack,dtype=np.float32) 86 | 87 | 88 | 89 | # SBAS Inversion to get displacement at each date 90 | # Make G matrix for dates inversion 91 | G = np.zeros((len(pairs)+1,len(ps.dates)))# extra row of zeros to make first date zero for reference 92 | for ii,pair in enumerate(pairs): 93 | a = ps.dates.index(pair[0:8]) 94 | b = ps.dates.index(pair[9:17]) 95 | G[ii,a] = 1 96 | G[ii,b] = -1 97 | G[-1,0]=1 98 | 99 | Gg = np.dot( np.linalg.inv(np.dot(G.T,G)), G.T) 100 | 101 | # Do dates inversion 102 | alld=np.zeros((len(ps.dec_year),ps.nxl*ps.nyl)) 103 | for ii in np.arange(0,ps.nyl-1): #iterate through rows 104 | tmp = np.zeros((len(pairs)+1,ps.nxl)) 105 | for jj,pair in enumerate(pairs): #loop through each ifg and append to alld 106 | tmp[jj,:] = stack[jj,ii,:] 107 | alld[:,ii*ps.nxl:ps.nxl*ii+ps.nxl] = np.dot(Gg, tmp) 108 | del(tmp) 109 | 110 | if doTimeFilt: 111 | alldFilt = util.tsFilt(alld, ps.dec_year, N=5, desiredPeriod = 1) 112 | 113 | alld = np.reshape(alld,(len(ps.dates),ps.nyl,ps.nxl)) 114 | 115 | 116 | for ii in range(alld.shape[0]): 117 | if order > 0: 118 | alld[ii,:,:] -= util.fitLong(alld[ii,:,:], order,msk) 119 | if phsElev: 120 | alld[ii,:,:] -= util.phaseElev(alld[ii,:,:], ps.hgt_ifg,msk,0,ps.nyl,0,ps.nxl) 121 | 122 | 123 | # # CONVERT TO CM 124 | alld=alld*ps.lam/(4*np.pi)*100 125 | 126 | stacksum = -np.nansum(stack,axis=0) 127 | 128 | rates,resstd = invertRates.invertRates(alld[startID:stopID,:,:],ps.dn[startID:stopID], seasonals=False,mcov_flag=False,water_elevation=ps.seaLevel) 129 | rates = np.asarray(rates,dtype=np.float32) 130 | resstd = np.asarray(resstd,dtype=np.float32) 131 | ratesMasked = rates.copy() 132 | ratesMasked[msk==0] = np.nan 133 | # plt.figure();plt.plot(ps.dec_year,alld[:,897,46]) 134 | 135 | # if disconnected: 136 | # #remove mean from each disconnected region 137 | # minPix = 1000 138 | # labels = util.getConCom(msk,minPix) 139 | # if plotStuff: 140 | # fig,ax = plt.subplots(2,1,figsize=(5,6)) 141 | # ax[0].imshow(msk);ax[0].set_title('mask') 142 | # ax[1].imshow(labels);ax[1].set_title('connected regions') 143 | # for ii in range(int(labels.max())): 144 | # if len(rates[labels==ii+1]) < minPix: 145 | # rates[labels==ii+1] = np.nan # mask out small islands of data 146 | # msk[labels==ii+1] = 0 147 | # else: 148 | # rates[labels==ii+1]-=np.nanmean(rates[labels==ii+1]) 149 | 150 | 151 | if geoCode: 152 | ratesGeo = util.geocodeKM(rates,method='linear') 153 | ratesGeo[np.isnan(ratesGeo)] = 0 154 | np.save('./TS/rates.geo.npy',ratesGeo) 155 | 156 | # ratesStdGeo = util.geocodeKM(resstd,method='linear') 157 | # ratesStdGeo[np.isnan(ratesStdGeo)] = 0 158 | # np.save('./TS/ratesStd.geo.npy',ratesStdGeo) 159 | 160 | mskGeo = util.geocodeKM(msk,method='nearest') 161 | # mskGeo[np.isnan(mskGeo)] = 0 162 | # mskGeo[mskGeo<.6] = 0 163 | # mskGeo[mskGeo>0] = 1 164 | np.save('./TS/msk.geo.npy',mskGeo) 165 | 166 | 167 | # gamthresh = .5 168 | # rates[msk == 0 ]=np.nan 169 | # resstd[msk == 0 ]=np.nan 170 | # stacksum[msk == 0 ]=np.nan 171 | 172 | 173 | if offset: 174 | rates=rates+offset 175 | 176 | ssvmin = stacksum[~np.isnan(stacksum)].min() 177 | ssvmax = stacksum[~np.isnan(stacksum)].max() 178 | 179 | vmin,vmax = -3,3 180 | pad=0 181 | 182 | 183 | if plotStuff: 184 | import cartopy.crs as ccrs 185 | 186 | bg = 'World_Imagery' 187 | zoomLevel = 13 188 | title = 'Rates (cm/yr)' 189 | vmin,vmax = -10,10 190 | makeMap.mapBackground(bg, ps.minlon, ps.maxlon, ps.minlat, ps.maxlat, zoomLevel, title) 191 | plt.imshow(ratesGeo,transform=ccrs.PlateCarree(),vmin=vmin,vmax=vmax,extent=[ps.minlon, ps.maxlon, ps.minlat, ps.maxlat],zorder=2) 192 | makeMap.mapImg(rates,ps.lon_ifg,ps.lat_ifg,vmin,vmax,pad,10,'rates (cm/yr)',plotFaults=True) 193 | makeMap.mapImg(stacksum,ps.lon_ifg,ps.lat_ifg,ssvmin, ssvmax, pad, 10, 'Stack sum (cm)', plotFaults=True) 194 | fig,ax = plt.subplots(2,1,figsize=(6,8)) 195 | ax[0].imshow(rates,vmin=vmin,vmax=vmax);ax[0].set_title('rates (cm/yr)') 196 | ax[1].imshow(stacksum,vmin=ssvmin,vmax=ssvmax); ax[1].set_title('stack sum') 197 | 198 | 199 | np.save('rates.npy',rates) 200 | np.save('resstd.npy',resstd) 201 | 202 | return rates,alld 203 | 204 | if __name__ == "__main__": 205 | refDef(xRef=False,yRef=False,disconnected=True, plotStuff=True, order=1, skip=1, doTimeFilt=False,offset=False,phsElev=False, startID=0,stopID=-1,geoCode=False) -------------------------------------------------------------------------------- /runAll.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Wed Jul 20 14:20:50 2022 5 | 6 | Run all the scripts 7 | 8 | @author: km 9 | """ 10 | 11 | from PyPS2 import setup_PyPS 12 | 13 | setup_PyPS.main(plot=True,doDownlook=True,replace=False) 14 | -------------------------------------------------------------------------------- /runSnaphu.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Wed Aug 1 15:01:24 2018 5 | run snaphu 6 | @author: kdm95 7 | """ 8 | 9 | import numpy as np 10 | import isce.components.isceobj as isceobj 11 | import os 12 | import glob 13 | from time import sleep 14 | 15 | ps = np.load('./ps.npy',allow_pickle=True).all() 16 | 17 | geocode = False 18 | nproc='16' 19 | ntilerow='2' 20 | ntilecol='2' 21 | rowovrlp='250' 22 | colovrlp='250' 23 | maxComps = 20 24 | initMethod = 'MCF' 25 | defoMax = 0 26 | 27 | ps.intdir = ps.workdir + '/Fringe2/PS_DS/sequential1' 28 | 29 | fSizes = [] 30 | for ii,p in enumerate(ps.pairs): 31 | if os.path.isfile(ps.intdir + '/' + p + '/' + 'filt_lk.int'): 32 | if os.path.getsize(ps.intdir + '/' + p + '/' + 'fine_lk.int')==0: 33 | print('WARNING: ' + ps.intdir + '/' + p + ' File size too small. May be corrupt.' ) 34 | # os.system('rm -r ' + ps.intdir + '/' + p ) 35 | 36 | else: 37 | fSizes.append(os.path.getsize(ps.intdir + '/' + p + '/' + 'fine_lk.int')) 38 | # os.system('rm -r ' + ps.intdir + '/' + p ) 39 | else: 40 | print(p + '/' + 'filt_lk.int does not exist') 41 | medSize = np.nanmedian(fSizes) 42 | sleep(5) 43 | 44 | 45 | 46 | for ii in range(len(ps.pairs)): 47 | pair = ps.pairs[ii] 48 | infile = ps.intdir+ '/' + pair+'/filt_lk2.int' 49 | corfile = ps.intdir+ '/' + pair+'/filt_lk2.cor' 50 | outfile = ps.intdir+ '/' + pair+'/filt_lk2.unw' 51 | conncompOut = ps.intdir+ '/' + pair+'/filt_lk2.unw.conncomp' 52 | waterMask = ps.mergeddir + '/geom_reference/waterMask_lk.rdr' 53 | if not os.path.isfile(outfile): 54 | print('unwrapping ' + pair) 55 | os.system('rm snaphu_tiles*') 56 | # The command line way doesn't work right now, so we'll use the config file 57 | # cmd = '/home/insar/BIN/LIN/snaphu ' + infile + ' ' + str(nxl) + ' -o ' + outfile + ' --mcf ' + ' -s --tile 30 30 80 80 --dumpall --nproc ' + nproc 58 | # os.system(cmd) 59 | 60 | # Write out the xml file for the unwrapped ifg 61 | out1 = isceobj.createIntImage() # Copy the interferogram image from before 62 | out1.scheme = 'BIP' #'BIP'/ 'BIL' / 'BSQ' 63 | out1.dataType = 'FLOAT' 64 | out1.filename = outfile 65 | out1.width = ps.nxl 66 | out1.length = ps.nyl 67 | out1.dump(outfile + '.xml') # Write out xml 68 | out1.renderHdr() 69 | out1.renderVRT() 70 | out1.finalizeImage() 71 | 72 | intImage = isceobj.createIntImage() 73 | intImage.load(infile + '.xml') 74 | nxl2= intImage.width 75 | # intImage.close() 76 | 77 | # Write xml for conncomp files 78 | out = isceobj.createImage() # Copy the interferogram image from before 79 | out.accessMode = 'READ' 80 | out.byteOrder = 'l' 81 | out.dataType = 'BYTE' 82 | out.family = 'image' 83 | out.filename = conncompOut 84 | out.bands = 1 85 | out.scheme = 'BIL' #'BIP'/ 'BIL' / 'BSQ' 86 | out.width = ps.nxl 87 | out.length = ps.nyl 88 | out.dump(conncompOut + '.xml') # Write out xml 89 | out.renderHdr() 90 | out.renderVRT() 91 | out.finalizeImage() 92 | 93 | # Write out a config file 94 | config_file_name = ps.intdir + '/' + pair + '/snaphu.conf' 95 | f = ps.intdir + '/' + pair + '/snaphu_config' 96 | conf=list() 97 | conf.append('# Input \n') 98 | conf.append('INFILE ' + infile + '\n') 99 | conf.append('# Input file line length \n') 100 | conf.append('LINELENGTH ' + str(nxl2) + '\n') 101 | conf.append('MAXNCOMPS ' + str(maxComps) + '\n') 102 | conf.append('INITMETHOD ' + str(initMethod) + '\n') 103 | conf.append('DEFOMAX_CYCLE ' + str(defoMax) + '\n') 104 | conf.append(' \n') 105 | conf.append('# Output file name \n') 106 | conf.append('OUTFILE ' + outfile + '\n') 107 | conf.append(' \n') 108 | conf.append('# Correlation file name \n') 109 | conf.append('CORRFILE ' + corfile + '\n') 110 | conf.append('BYTEMASKFILE ' + waterMask + '\n') 111 | 112 | conf.append(' \n') 113 | conf.append('# Statistical-cost mode (TOPO, DEFO, SMOOTH, or NOSTATCOSTS) \n') 114 | conf.append('STATCOSTMODE SMOOTH \n') 115 | conf.append(' \n') 116 | conf.append('INFILEFORMAT COMPLEX_DATA \n') 117 | conf.append('#UNWRAPPEDINFILEFORMAT COMPLEX_DATA \n') 118 | conf.append('OUTFILEFORMAT FLOAT_DATA \n') 119 | conf.append('CORRFILEFORMAT FLOAT_DATA \n') 120 | conf.append(' \n') 121 | conf.append('NTILEROW ' + ntilerow + '\n') 122 | conf.append('NTILECOL ' + ntilecol + '\n') 123 | conf.append('# Maximum number of child processes to start for parallel tile \n') 124 | conf.append('# unwrapping. \n') 125 | conf.append('NPROC ' + nproc + '\n') 126 | conf.append('ROWOVRLP ' + rowovrlp + ' \n') 127 | conf.append('COLOVRLP ' + colovrlp + ' \n') 128 | conf.append('RMTMPTILE TRUE \n') 129 | with open(config_file_name,'w') as f: 130 | [f.writelines(c) for c in conf] 131 | if ntilerow=='1' and ntilecol=='1': # Only use the -S flag if we are using tiles 132 | command = 'snaphu --mcf -g ' + conncompOut + ' -f ' + config_file_name 133 | else: 134 | command = 'snaphu --mcf -g ' + conncompOut + ' -S -f ' + config_file_name 135 | os.system(command) 136 | else: 137 | print(outfile + ' already exists.') 138 | 139 | 140 | if geocode==True: 141 | ''' 142 | this only works for noncropped imagery 143 | ''' 144 | setupParams = np.load('setupParams.npy',allow_pickle=True).item() 145 | DEM = setupParams['DEM'] 146 | bounds = setupParams['bounds'] 147 | mstr = ps.workdir + '/master' 148 | 149 | for pair in ps.pairs: 150 | file = ps.intdir + '/' + pair + '/filt.unw' 151 | if pair==ps.pairs[0]: 152 | pwn=mstr 153 | else: 154 | pwn = ps.workdir 155 | 156 | command = 'geocodeIsce.py -a ' + str(ps.alks) + ' -r ' +str(ps.rlks) + ' -d ' + DEM + ' -m ' + mstr + ' -f ' +file + ' -b ' + bounds + ' -s ' + mstr 157 | os.system(command) -------------------------------------------------------------------------------- /setup_PyPS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Jul 31 15:20:01 2018 5 | Make dates and pairs dictionaries 6 | 7 | @author: kdm95 8 | """ 9 | 10 | import numpy as np 11 | import glob 12 | import os 13 | from datetime import date 14 | import isce.components.isceobj as isceobj 15 | import matplotlib.pyplot as plt 16 | import makeMap 17 | import cartopy.crs as ccrs 18 | from mroipac.looks.Looks import Looks 19 | from scipy.interpolate import griddata 20 | import cv2 21 | from scipy import signal 22 | import localParams 23 | import util 24 | 25 | 26 | from osgeo import gdal 27 | 28 | 29 | def main(plot=False,doDownlook=True,replace=False): 30 | # plot=True;doDownlook=True;replace=True 31 | 32 | ps = localParams.getLocalParams() 33 | plt.close('all') 34 | 35 | if replace: 36 | os.system('rm ./merged/geom_reference/*crop*') 37 | os.system('rm ./merged/geom_reference/*lk*') 38 | os.system('rm ./merged/SLC/*/*crop*') 39 | 40 | 41 | # Make directories 42 | if not os.path.isdir(ps.tsdir): 43 | os.mkdir(ps.tsdir) 44 | if not os.path.isdir('Npy'): 45 | os.mkdir('Npy') 46 | if not os.path.isdir(ps.workdir + '/Figs'): 47 | os.mkdir(ps.workdir + '/Figs') 48 | 49 | 50 | geomList = glob.glob(ps.mergeddir + '/geom_reference/*full') 51 | slcList = glob.glob(ps.slcdir + '/*/*full') 52 | blList = glob.glob(ps.mergeddir + '/baselines/????????/????????') 53 | # if doDownlook: 54 | # for fname in slcList: 55 | # os.system('fixImageXml.py -i ' + fname + ' -f') 56 | # for fname in geomList: 57 | # os.system('fixImageXml.py -i ' + fname + ' -f') 58 | # for fname in blList: 59 | # os.system('fixImageXml.py -i ' + fname + ' -f') 60 | 61 | 62 | if ps.ifgMode: 63 | pairs1 = [] 64 | pairs2 = [] 65 | pairs = [] 66 | flist = glob.glob(ps.intdir + '/2*_2*') 67 | [pairs.append(f[-17:]) for f in flist] 68 | [pairs1.append(f[-17:-9]) for f in flist] 69 | [pairs2.append(f[-8:]) for f in flist] 70 | pairs.sort();pairs1.sort();pairs2.sort() 71 | dates = np.unique(np.vstack((pairs1,pairs2))) 72 | else: 73 | flist = glob.glob(ps.slcdir + '/2*') 74 | # Convert pairs to dates 75 | dates = [] 76 | for f in flist: 77 | dates.append(f[-8:]) 78 | dates.sort() 79 | 80 | pairs = [] 81 | for ii,d in enumerate(dates[0:-1]): 82 | pairs.append(dates[ii] + '_' + dates[ii+1]) 83 | 84 | # Now make pairs2 85 | pairs2 = [] 86 | for ii,d in enumerate(dates[0:-1]): 87 | for jj in np.arange(1,ps.skip+1): 88 | if ii+jj < len(dates): 89 | pairs2.append(dates[ii] + '_' + dates[ii+jj]) 90 | 91 | 92 | dn = [] 93 | dec_year = [] 94 | for d in dates: 95 | yr = d[0:4] 96 | mo = d[4:6] 97 | day = d[6:8] 98 | dt = date.toordinal(date(int(yr), int(mo), int(day))) 99 | dn.append(dt) 100 | d0 = date.toordinal(date(int(yr), 1, 1)) 101 | doy = np.asarray(dt)-d0+1 102 | dec_year.append(float(yr) + (doy/365.25)) 103 | dn = np.asarray(dn) 104 | dn0 = dn-dn[0] # make relative to first date 105 | 106 | 107 | 108 | nd = len(pairs) 109 | # rename geometry files to add 'full' 110 | if os.path.isfile('merged/geom_reference/hgt.rdr'): 111 | os.system('mv merged/geom_reference/hgt.rdr merged/geom_reference/hgt.rdr.full') 112 | os.system('mv merged/geom_reference/lat.rdr merged/geom_reference/lat.rdr.full') 113 | os.system('mv merged/geom_reference/lon.rdr merged/geom_reference/lon.rdr.full') 114 | os.system('mv merged/geom_reference/incLocal.rdr merged/geom_reference/incLocal.rdr.full') 115 | os.system('mv merged/geom_reference/los.rdr merged/geom_reference/los.rdr.full') 116 | os.system('mv merged/geom_reference/shadowMask.rdr merged/geom_reference/shadowMask.rdr.full') 117 | else: 118 | print('rdr files have already been renamed to full') 119 | 120 | # Get width and length 121 | f_lon = ps.mergeddir + '/geom_reference/lon.rdr.full' 122 | gImage = isceobj.createIntImage() 123 | gImage.load(f_lon + '.xml') 124 | nyf = gImage.length 125 | nxf = gImage.width 126 | 127 | 128 | if ps.crop: 129 | ny = ps.cropymax-ps.cropymin 130 | nx = ps.cropxmax-ps.cropxmin 131 | 132 | else: 133 | ny = gImage.length 134 | nx = gImage.width 135 | ps.cropxmin=0 136 | ps.cropxmax=nx 137 | ps.cropymin=0 138 | ps.cropymax=ny 139 | 140 | 141 | if ps.crop: 142 | for d in dates: 143 | infile = ps.slcdir + '/' + d + '/' + d + '.slc.full' 144 | if not os.path.isfile(infile+'.crop'): 145 | imgi = isceobj.createSlcImage() 146 | imgi.load(infile+'.xml') 147 | if f in ['los','incLocal']: 148 | imgi.scheme = 'BSQ' 149 | # Rearrange axes order from small to big 150 | slcIm = util.orderAxes(imgi.memMap(),nx,ny) 151 | slcIm = slcIm[:,ps.cropymin:ps.cropymax,ps.cropxmin:ps.cropxmax] 152 | 153 | imgo = imgi.clone() 154 | imgo.filename = infile+'.crop' 155 | imgo.width = ps.cropxmax-ps.cropxmin 156 | imgo.length = ps.cropymax-ps.cropymin 157 | imgo.dump(imgo.filename+'.xml') 158 | slcIm.tofile(imgo.filename) 159 | imgo.finalizeImage() 160 | del(slcIm) 161 | 162 | 163 | 164 | file_list = list(['lat','lon','hgt','los','shadowMask','incLocal']) 165 | if ps.crop: 166 | for f in file_list: 167 | infile = ps.mergeddir + '/geom_reference/' + f + '.rdr.full' 168 | if not os.path.isfile(infile+'.crop'): 169 | imgi = isceobj.createImage() 170 | imgi.load(infile+'.xml') 171 | if f in ['los','incLocal']: 172 | imgi.scheme = 'BSQ' 173 | # print(imgi.memMap().shape) 174 | # Rearrange axes order from small to big 175 | geomIm = util.orderAxes(imgi.memMap(),nx,ny) 176 | geomIm = geomIm[:,ps.cropymin:ps.cropymax,ps.cropxmin:ps.cropxmax] 177 | # geomIm = geomIm[:,ymin:ymax,xmin:xmax] 178 | imgo = imgi.clone() 179 | imgo.filename = infile+'.crop' 180 | imgo.width = nx #ps.cropxmax-ps.cropxmin 181 | imgo.length =ny #ps.cropymax-ps.cropymin 182 | imgo.dump(imgo.filename+'.xml') 183 | geomIm.tofile(imgo.filename) 184 | imgo.finalizeImage() 185 | del(geomIm) 186 | 187 | if doDownlook: 188 | def downLook(infile, outfile,alks,rlks): 189 | inImage = isceobj.createImage() 190 | inImage.load(infile + '.xml') 191 | inImage.filename = infile 192 | lkObj = Looks() 193 | lkObj.setDownLooks(alks) 194 | lkObj.setAcrossLooks(rlks) 195 | lkObj.setInputImage(inImage) 196 | lkObj.setOutputFilename(outfile) 197 | lkObj.looks() 198 | for f in file_list: 199 | if ps.crop: 200 | infile = ps.mergeddir + '/geom_reference/' + f + '.rdr.full.crop' 201 | else: 202 | infile = ps.mergeddir + '/geom_reference/' + f + '.rdr.full' 203 | 204 | outfile = ps.mergeddir + '/geom_reference/' + f + '_lk.rdr' 205 | 206 | if not os.path.isfile(outfile): 207 | print('downlooking ' + f) 208 | downLook(infile, outfile,ps.alks,ps.rlks) 209 | else: 210 | print(outfile + ' already exists') 211 | 212 | nxl = nx//ps.rlks 213 | nyl = ny//ps.alks 214 | 215 | # Get bounding coordinates (Frame) 216 | f_lon_lk = ps.mergeddir + '/geom_reference/lon_lk.rdr' 217 | f_lat_lk = ps.mergeddir + '/geom_reference/lat_lk.rdr' 218 | f_hgt_lk = ps.mergeddir + '/geom_reference/hgt_lk.rdr' 219 | f_los_lk = ps.mergeddir + '/geom_reference/los_lk.rdr' 220 | f_shm_lk = ps.mergeddir + '/geom_reference/shadowMask_lk.rdr' 221 | f_inc_lk = ps.mergeddir + '/geom_reference/incLocal_lk.rdr' 222 | 223 | 224 | # LON -------------- 225 | Image = isceobj.createImage() 226 | Image.load(f_lon_lk + '.xml') 227 | lon_ifg = util.orderAxes(Image.memMap(),nxl,nyl)[0,:,:] 228 | lon_ifg = lon_ifg.copy().astype(np.float32) 229 | lon_ifg[lon_ifg==0]=np.nan 230 | Image.finalizeImage() 231 | 232 | 233 | # LAT -------------- 234 | Image = isceobj.createImage() 235 | Image.load(f_lat_lk + '.xml') 236 | lat_ifg =util.orderAxes(Image.memMap(),nxl,nyl)[0,:,:] 237 | lat_ifg = lat_ifg.copy().astype(np.float32) 238 | lat_ifg[lat_ifg==0]=np.nan 239 | Image.finalizeImage() 240 | 241 | 242 | # HGT -------------- 243 | Image = isceobj.createImage() 244 | Image.load(f_hgt_lk + '.xml') 245 | hgt_ifg = util.orderAxes(Image.memMap(),nxl,nyl)[0,:,:] 246 | hgt_ifg = hgt_ifg.copy().astype(np.float32) 247 | hgt_ifg[hgt_ifg==-500]=np.nan 248 | Image.finalizeImage() 249 | 250 | # LOS -------------- 251 | Image = isceobj.createImage() 252 | Image.load(f_los_lk + '.xml') 253 | # Image.bands=2 254 | # Image.scheme='BIP' 255 | los_ifg = util.orderAxes(Image.memMap(),nxl,nyl)[0,:,:] 256 | los_ifg = los_ifg.copy() 257 | util.show(los_ifg) 258 | az_ifg = util.orderAxes(Image.memMap(),nxl,nyl)[1,:,:] 259 | az_ifg = az_ifg.copy() 260 | Image.finalizeImage() 261 | 262 | # Write out a new los file 263 | losOutname = ps.mergeddir + '/geom_reference/los2_lk.rdr' 264 | fidc=open(losOutname,"wb") 265 | fidc.write(los_ifg) 266 | #write out an xml file for it 267 | out = isceobj.createIntImage() # Copy the interferogram image from before 268 | out.dataType = 'FLOAT' 269 | out.bands = 1 270 | out.filename = losOutname 271 | out.width = nxl 272 | out.length = nyl 273 | out.dump(losOutname + '.xml') # Write out xml 274 | out.renderHdr() 275 | out.renderVRT() 276 | 277 | 278 | # Write out a new az file 279 | azOutname = ps.mergeddir + '/geom_reference/az_lk.rdr' 280 | fidc=open(azOutname,"wb") 281 | fidc.write(az_ifg) 282 | #write out an xml file for it 283 | out = isceobj.createIntImage() # Copy the interferogram image from before 284 | out.dataType = 'FLOAT' 285 | out.bands = 1 286 | out.filename = azOutname 287 | out.width = nxl 288 | out.length = nyl 289 | out.dump(azOutname + '.xml') # Write out xml 290 | out.renderHdr() 291 | out.renderVRT() 292 | 293 | # if you want to save these to geom 294 | los_ifg = los_ifg.copy().astype(np.float32) 295 | los_ifg[los_ifg==0]=np.nan 296 | az_ifg = az_ifg.copy().astype(np.float32) 297 | az_ifg[az_ifg==0]=np.nan 298 | 299 | Image = isceobj.createImage() 300 | Image.load(f_shm_lk + '.xml') 301 | Image.bands=1 302 | shm_ifg = util.orderAxes(Image.memMap(),nxl,nyl)[0,:,:] 303 | shm_ifg = shm_ifg.copy().astype(np.float32) 304 | shm_ifg[np.isnan(hgt_ifg)]=np.nan 305 | Image.finalizeImage() 306 | 307 | Image = isceobj.createImage() 308 | Image.load(f_inc_lk + '.xml') 309 | # Image.bands=2 310 | # Image.scheme='BSQ' 311 | # inc_ifg1 = Image.memMap()[0,:,:] # relative to the local plane of the ground 312 | inc_ifg = util.orderAxes(Image.memMap(),nxl,nyl)[1,:,:]# relative to surface normal vector (this is the one we want I think) 313 | inc_ifg = inc_ifg.copy() 314 | 315 | # Write out a new inc file 316 | incOutname = ps.mergeddir + '/geom_reference/inc_lk.rdr' 317 | fidc=open(incOutname,"wb") 318 | fidc.write(inc_ifg) 319 | #write out an xml file for it 320 | out = isceobj.createIntImage() # Copy the interferogram image from before 321 | out.dataType = 'FLOAT' 322 | out.bands = 1 323 | out.filename = incOutname 324 | out.width = nxl 325 | out.length = nyl 326 | out.dump(incOutname + '.xml') # Write out xml 327 | out.renderHdr() 328 | out.renderVRT() 329 | 330 | inc_ifg = inc_ifg.copy().astype(np.float32) 331 | inc_ifg[inc_ifg==0]=np.nan 332 | Image.finalizeImage() 333 | 334 | 335 | # Get rid of edge artifacts from downlooking 336 | Q = np.array([[0,0,0],[0,1,0],[0,0,0]]) 337 | lon_ifg = signal.convolve2d(lon_ifg,Q, mode='same') 338 | lat_ifg = signal.convolve2d(lat_ifg,Q, mode='same') 339 | hgt_ifg = signal.convolve2d(hgt_ifg,Q, mode='same') 340 | los_ifg = signal.convolve2d(los_ifg,Q, mode='same') 341 | shm_ifg = signal.convolve2d(shm_ifg,Q, mode='same') 342 | inc_ifg = signal.convolve2d(inc_ifg,Q, mode='same') 343 | 344 | 345 | 346 | 347 | # outputfilename = ps.mergeddir + '/geom_reference/waterMask_lk.rdr.crop' 348 | # util.getWaterMask(ps.dem, lon_ifg, lat_ifg, outputfilename) 349 | 350 | 351 | if plot: 352 | cmap = 'Spectral_r' 353 | fig,ax = plt.subplots(3,2,figsize=(9,9)) 354 | ax[0,0].imshow(lon_ifg,cmap=cmap);ax[0,0].set_title('lon_ifg') 355 | ax[0,1].imshow(lat_ifg,cmap=cmap);ax[0,1].set_title('lat_ifg') 356 | ax[1,0].imshow(hgt_ifg,cmap=cmap);ax[1,0].set_title('hgt_ifg') 357 | ax[1,1].imshow(los_ifg,cmap=cmap);ax[1,1].set_title('los_ifg') 358 | ax[2,0].imshow(shm_ifg,cmap=cmap);ax[2,0].set_title('shm_ifg') 359 | ax[2,1].imshow(inc_ifg,cmap=cmap);ax[2,1].set_title('inc_ifg') 360 | plt.savefig(ps.workdir + '/Figs/geom.svg',transparent=True,dpi=100 ) 361 | 362 | # Figure out where the nan values begin and end so we can crop them if we want later. 363 | for l in np.arange(0,nyl): 364 | ll = lon_ifg[l,:] 365 | if not np.isnan(ll.max()): 366 | break 367 | 368 | for p in np.arange(l+1,nyl): 369 | ll = lon_ifg[p,:] 370 | if np.isnan(ll.max()): 371 | break 372 | l+=1 373 | ymin=l+1 374 | ymax=p-1 375 | xmin=0 376 | xmax=nxl 377 | ul = (lon_ifg[l+1,1],lat_ifg[l+1,1]) 378 | ur = (lon_ifg[l+1,-2],lat_ifg[l+1,-2]) 379 | ll = (lon_ifg[p-2,1],lat_ifg[p-2,1]) 380 | lr = (lon_ifg[p-2,-2],lat_ifg[p-2,-2]) 381 | lon_bounds = np.array([ul[0],ur[0],ur[0],lr[0],lr[0],ll[0],ll[0],ul[0]]) 382 | lat_bounds = np.array([ul[1],ur[1],ur[1],lr[1],lr[1],ll[1],ll[1],ul[1]]) 383 | 384 | # Now extrapolate the geom edges out so we can map non-rectangle images 385 | xx,yy = np.meshgrid(np.arange(0,nxl), np.arange(0,nyl)) 386 | xxValid = xx[~np.isnan(lon_ifg)].astype(np.float32) 387 | yyValid = yy[~np.isnan(lon_ifg)].astype(np.float32) 388 | lonValid = lon_ifg[~np.isnan(lon_ifg)].astype(np.float32) 389 | latValid = lat_ifg[~np.isnan(lon_ifg)].astype(np.float32) 390 | lonI = griddata((xxValid,yyValid), lonValid , (xx,yy), method='nearest') 391 | xxValid = xx[~np.isnan(lat_ifg)].astype(np.float32) 392 | yyValid = yy[~np.isnan(lat_ifg)].astype(np.float32) 393 | lonValid = lon_ifg[~np.isnan(lat_ifg)].astype(np.float32) 394 | latValid = lat_ifg[~np.isnan(lat_ifg)].astype(np.float32) 395 | latI = griddata((xxValid,yyValid), latValid , (xx,yy), method='nearest') 396 | minlat=latI.min() 397 | maxlat=latI.max() 398 | minlon=lonI.min() 399 | maxlon=lonI.max() 400 | 401 | 402 | # if plot: 403 | # zoomLevel=8 404 | # bg = 'World_Shaded_Relief' 405 | # pad=2 406 | # title = 'Footprint' 407 | # makeMap.mapBackground(bg, minlon, maxlon, minlat, maxlat, pad, zoomLevel, title) 408 | # plt.plot(lon_bounds,lat_bounds,linewidth=2,color='red',zorder=10,transform=ccrs.PlateCarree()) 409 | # plt.rc('font',size=14) 410 | # plt.savefig(ps.workdir + '/Figs/areamap.svg',transparent=True,dpi=100 ) 411 | 412 | 413 | 414 | 415 | 416 | # f = './DEM/swbdLat_N19_N24_Lon_W162_W158.wbd' 417 | # intImage = isceobj.createIntImage() 418 | # intImage.load(f + '.xml') 419 | # wm= intImage.memMap() 420 | # wm=wm.copy() # mmap is readonly, so we need to copy it. 421 | 422 | # wm[wm==0] = 1 423 | # wm[wm==-1] = 0 424 | # wm=np.asarray(wm,dtype=np.float32) 425 | # util.writeISCEimg(wm,f,1,wm.shape[1],wm.shape[0],'Float') 426 | 427 | # intImage.dump(intImage.filename + '.xml') # Write out xml 428 | # wm.tofile(f) # Write file out 429 | 430 | # f_lon = ps.mergeddir + '/geom_reference/lon.rdr.full' 431 | # f_lat = ps.mergeddir + '/geom_reference/lat.rdr.full' 432 | # import createWaterMask 433 | # createWaterMask.geo2radar('./DEM/swbdLat_N19_N24_Lon_W162_W158.wbd','wm',f_lon,f_lat) 434 | 435 | # util.getWaterMask(ps.dem, f_lon, f_lat, 'waterMask.byte') 436 | 437 | 438 | ps.lon_ifg = lonI 439 | ps.lat_ifg = latI 440 | ps.hgt_ifg = hgt_ifg 441 | ps.los_ifg = los_ifg 442 | ps.shm_ifg = shm_ifg 443 | ps.inc_ifg = inc_ifg 444 | 445 | ps.pairs = pairs 446 | ps.dates = dates 447 | ps.pairs = pairs 448 | ps.pairs2 = pairs2 449 | ps.dec_year = dec_year 450 | ps.dn = dn 451 | ps.dn0 = dn0 452 | ps.nd = nd 453 | 454 | ps.minlon = lon_ifg.min() 455 | ps.maxlon = lon_ifg.max() 456 | ps.minlat = lat_ifg.min() 457 | ps.maxlat = lat_ifg.max() 458 | 459 | ps.ny = ny 460 | ps.nx = nx 461 | ps.nxl = nxl 462 | ps.nyl = nyl 463 | ps.nxf = nxf 464 | ps.nyf = nyf 465 | ps.lon_bounds = lon_bounds 466 | ps.lat_bounds = lat_bounds 467 | ps.ymin = ymin 468 | ps.ymax = ymax 469 | ps.xmin = xmin 470 | ps.xmax = xmax 471 | 472 | # Save the namespace 473 | np.save('ps.npy',ps) 474 | 475 | if __name__ == '__main__': 476 | main(plot=True,doDownlook=True,replace=True) -------------------------------------------------------------------------------- /setup_PyPS_ALOS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Jul 31 15:20:01 2018 5 | Make dates and pairs dictionaries 6 | 7 | @author: kdm95 8 | """ 9 | import numpy as np 10 | import glob 11 | import os 12 | from datetime import date 13 | import isceobj 14 | import matplotlib.pyplot as plt 15 | import makeMap 16 | from mroipac.looks.Looks import Looks 17 | 18 | #<><><><><><><><><><><><>Set these variables><><><><><><><><><><><><><><>< 19 | # Define area of interest 20 | #bbox = list([35.8, 36.9, -120.3, -118.7]) #minlat,maxlat,minlon,maxlon 21 | #maxlat = bbox[0]; minlat = bbox[1]; minlon = bbox[2]; maxlon = bbox[3] 22 | workdir = os.getcwd() # Use current directory as working directory 23 | # working directory (should be where merged is) 24 | skip = 2 25 | alks = int(6) # number of looks in azimuth 26 | rlks = int(3) # number of looks in range 27 | ifg_mode = False 28 | #<><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>< 29 | 30 | 31 | lam = 0.23 # 0.056 for c-band 32 | mergeddir=workdir + '/merged' 33 | intdir = mergeddir + '/interferograms' 34 | tsdir = workdir + '/TS' 35 | slcdir = mergeddir + '/SLC' 36 | # Make directories 37 | if not os.path.isdir(tsdir): 38 | os.mkdir(tsdir) 39 | if not os.path.isdir(workdir + '/Figs'): 40 | os.mkdir(workdir + '/Figs') 41 | 42 | if ifg_mode: 43 | pairs1=list() 44 | pairs2=list() 45 | pairs = list() 46 | flist = glob.glob(intdir + '/2*_2*') 47 | [pairs.append(f[-17:]) for f in flist] 48 | [pairs1.append(f[-17:-9]) for f in flist] 49 | [pairs2.append(f[-8:]) for f in flist] 50 | pairs.sort();pairs1.sort();pairs2.sort() 51 | dates = np.unique(np.vstack((pairs1,pairs2))) 52 | else: 53 | flist = glob.glob(slcdir + '/2*') 54 | # Convert pairs to dates 55 | dates = list() 56 | for f in flist: 57 | dates.append(f[-8:]) 58 | dates.sort() 59 | #dates = np.unique(np.asarray(dates,dtype = str)) 60 | pairs1=list() 61 | pairs2=list() 62 | pairs = list() 63 | for ii,d in enumerate(dates): 64 | for jj in np.arange(1,skip+1): 65 | try: 66 | pairs.append(dates[ii] + '_' + dates[ii+jj]) 67 | except: 68 | pass 69 | nd = len(pairs) 70 | 71 | dn = list() 72 | dec_year = list() 73 | for d in dates: 74 | yr = d[0:4] 75 | mo = d[4:6] 76 | day = d[6:8] 77 | dt = date.toordinal(date(int(yr), int(mo), int(day))) 78 | dn.append(dt) 79 | d0 = date.toordinal(date(int(yr), 1, 1)) 80 | doy = np.asarray(dt)-d0+1 81 | dec_year.append(float(yr) + (doy/365.25)) 82 | dn = np.asarray(dn) 83 | dn0 = dn-dn[0] # make relative to first date 84 | 85 | # rename geometry files to add 'full' 86 | os.system('mv merged/geom_master/hgt.rdr.full merged/geom_master/hgt.rdr') 87 | os.system('mv merged/geom_master/lat.rdr.full merged/geom_master/lat.rdr') 88 | os.system('mv merged/geom_master/lon.rdr.full merged/geom_master/lon.rdr') 89 | os.system('mv merged/geom_master/incLocal.rdr.full merged/geom_master/incLocal.rdr') 90 | os.system('mv merged/geom_master/los.rdr.full merged/geom_master/los.rdr') 91 | os.system('mv merged/geom_master/shadowMask.rdr.full merged/geom_master/shadowMask.rdr') 92 | 93 | # Get width and length 94 | f_lon = mergeddir + '/geom_master/lon.rdr.vrt' 95 | f_lon = mergeddir + '/geom_master/lon.rdr' 96 | gImage = isceobj.createIntImage() 97 | gImage.load(f_lon + '.xml') 98 | ny = gImage.length 99 | nx = gImage.width 100 | nxl = int(np.floor(nx/rlks)) 101 | nyl = int(np.floor(ny/alks)) 102 | 103 | def downLook(infile, outfile,alks,rlks): 104 | inImage = isceobj.createImage() 105 | inImage.load(infile + '.xml') 106 | inImage.filename = infile 107 | 108 | lkObj = Looks() 109 | lkObj.setDownLooks(alks) 110 | lkObj.setAcrossLooks(rlks) 111 | lkObj.setInputImage(inImage) 112 | lkObj.setOutputFilename(outfile) 113 | lkObj.looks() 114 | 115 | file_list = list(['lat','lon','hgt','los'])#,'incLocal','shadowMask']) 116 | for f in file_list: 117 | infile = mergeddir + '/geom_master/' + f + '.rdr' 118 | outfile = mergeddir + '/geom_master/' + f + '_lk.rdr' 119 | downLook(infile, outfile,alks,rlks) 120 | 121 | # Get bounding coordinates (Frame) 122 | f_lon_lk = mergeddir + '/geom_master/lon_lk.rdr' 123 | f_lat_lk = mergeddir + '/geom_master/lat_lk.rdr' 124 | f_hgt_lk = mergeddir + '/geom_master/hgt_lk.rdr' 125 | 126 | Image = isceobj.createImage() 127 | Image.load(f_lon_lk + '.xml') 128 | lon_ifg = Image.memMap()[:,:,0] 129 | lon_ifg = lon_ifg.copy().astype(np.float32) 130 | lon_ifg[lon_ifg==0]=np.nan 131 | Image.finalizeImage() 132 | 133 | Image = isceobj.createImage() 134 | Image.load(f_lat_lk + '.xml') 135 | lat_ifg = Image.memMap()[:,:,0] 136 | lat_ifg = lat_ifg.copy().astype(np.float32) 137 | lat_ifg[lat_ifg==0]=np.nan 138 | Image.finalizeImage() 139 | 140 | Image = isceobj.createImage() 141 | Image.load(f_hgt_lk + '.xml') 142 | hgt_ifg = Image.memMap()[:,:,0] 143 | hgt_ifg = hgt_ifg.copy().astype(np.float32) 144 | hgt_ifg[hgt_ifg==0]=np.nan 145 | Image.finalizeImage() 146 | 147 | geom = {} 148 | geom['lon_ifg'] = lon_ifg 149 | geom['lat_ifg'] = lat_ifg 150 | geom['hgt_ifg'] = hgt_ifg 151 | np.save('geom.npy',geom) 152 | 153 | for l in np.arange(0,nyl): 154 | ll = lon_ifg[l,:] 155 | if not np.isnan(ll.max()): 156 | break 157 | 158 | for p in np.arange(l+1,nyl): 159 | ll = lon_ifg[p,:] 160 | if np.isnan(ll.max()): 161 | break 162 | l+=1 163 | 164 | ymin=l+1 165 | ymax=p-1 166 | xmin=0 167 | xmax=nxl 168 | 169 | ul = (lon_ifg[l+1,1],lat_ifg[l+1,1]) 170 | ur = (lon_ifg[l+1,-2],lat_ifg[l+1,-2]) 171 | ll = (lon_ifg[p-2,1],lat_ifg[p-2,1]) 172 | lr = (lon_ifg[p-2,-2],lat_ifg[p-2,-2]) 173 | 174 | lon_bounds = np.array([ul[0],ur[0],ur[0],lr[0],lr[0],ll[0],ll[0],ul[0]]) 175 | lat_bounds = np.array([ul[1],ur[1],ur[1],lr[1],lr[1],ll[1],ll[1],ul[1]]) 176 | 177 | 178 | pad=2 179 | import cartopy.crs as ccrs 180 | makeMap.mapBackground('World_Shaded_Relief',lon_bounds.min,lon_bounds.max,lat_bounds.min,lat_bounds.max,1,7,'example',borders=False) 181 | plt.plot(lon_bounds,lat_bounds,linewidth=2,color='red',zorder=10,transform=ccrs.PlateCarree()) 182 | plt.rc('font',size=14) 183 | plt.savefig(workdir + '/Figs/areamap.svg',transparent=True,dpi=100 ) 184 | 185 | mergeddir =workdir + '/merged' 186 | intdir = mergeddir + '/interferograms' 187 | tsdir = workdir + '/TS' 188 | 189 | plt.imshow(hgt_ifg) 190 | 191 | # Save arrays and variables to a dictionary 'params' 192 | params = dict() 193 | params['pairs'] = pairs 194 | params['dates'] = dates 195 | params['pairs'] = pairs 196 | params['dec_year'] = dec_year 197 | params['dn'] = dn 198 | params['dn0'] = dn0 199 | params['nd'] = nd 200 | params['lam'] = lam 201 | params['workdir'] = workdir 202 | params['intdir'] = intdir 203 | params['tsdir'] = tsdir 204 | params['ny'] = ny 205 | params['nx'] = nx 206 | params['nxl'] = nxl 207 | params['nyl'] = nyl 208 | params['lon_bounds'] = lon_bounds 209 | params['lat_bounds'] = lat_bounds 210 | params['ymin'] = ymin 211 | params['ymax'] = ymax 212 | params['xmin'] = xmin 213 | params['xmax'] = xmax 214 | params['alks'] = alks 215 | params['rlks'] = rlks 216 | params['mergeddir'] = mergeddir 217 | params['intdir'] = intdir 218 | params['tsdir'] = tsdir 219 | params['slcdir'] = slcdir 220 | 221 | # Save the dictionary 222 | np.save('params.npy',params) 223 | 224 | # To load the dictionary later, do this: 225 | # params = np.load('params.npy').item() 226 | # locals().update(params) this parses all variables from the dict to local variables 227 | -------------------------------------------------------------------------------- /setup_UAVSAR.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Fri May 17 16:55:27 2019 5 | 6 | Setup UAVsar stack 7 | 1. download the files.. they have to be a 'stack' of SLCs. https://uavsar.jpl.nasa.gov/cgi-bin/data.pl 8 | 2. put them in the working directory (we'll move them later) 9 | 3. set the number of looks below 10 | 4. open a '.ann' file and figure out what the slc dimensions are and define ny and nx below 11 | 5. Run this script and all the other ones should work like sentinel. 12 | 13 | @author: kdm95 14 | """ 15 | 16 | import numpy as np 17 | from matplotlib import pyplot as plt 18 | import glob 19 | import os 20 | from datetime import date 21 | import isceobj 22 | import makeMap 23 | from mroipac.looks.Looks import Looks 24 | 25 | 26 | workdir = os.getcwd() # Use current directory as working directory 27 | # working directory (should be where merged is) 28 | skip = 1 29 | alks = int(4) # number of looks in azimuth 30 | rlks = int(4) # number of looks in range 31 | seaLevel = -200 32 | #1x4 looks 33 | nx = 4950 34 | ny = 6601 35 | 36 | ifg_mode = False 37 | nxl = int(np.floor(nx/rlks)) 38 | nyl = int(np.floor(ny/alks)) 39 | 40 | 41 | 42 | lam = .23 # 0.056 for c-band.. 0.23 for l-band (?) 43 | mergeddir=workdir + '/merged' 44 | intdir = mergeddir + '/interferograms' 45 | tsdir = workdir + '/TS' 46 | slcdir = mergeddir + '/SLC' 47 | 48 | 49 | # Make directories and move files there 50 | if not os.path.isdir(mergeddir): 51 | os.mkdir(mergeddir) 52 | if not os.path.isdir(mergeddir+'/geom_master'): 53 | os.mkdir(mergeddir+'/geom_master') 54 | os.system('mv *.lkv merged/geom_master/') 55 | os.system('mv *.llh merged/geom_master/') 56 | os.system('mv *.dop merged/geom_master/') 57 | if not os.path.isdir(intdir): 58 | os.mkdir(intdir) 59 | if not os.path.isdir(slcdir): 60 | os.mkdir(slcdir) 61 | os.system('mv *.slc merged/SLC/') 62 | os.system('mv *.ann merged/SLC/') 63 | if not os.path.isdir(tsdir): 64 | os.mkdir(tsdir) 65 | if not os.path.isdir(workdir + '/Figs'): 66 | os.mkdir(workdir + '/Figs') 67 | 68 | llhFile = glob.glob(mergeddir + '/geom_master/*.llh')[0] 69 | lkvFile = glob.glob(mergeddir + '/geom_master/*.lkv')[0] 70 | 71 | slcFileList = glob.glob(slcdir + '/*.slc') 72 | 73 | dates = [] 74 | dn = [] 75 | dec_year = [] 76 | for f in slcFileList: 77 | f1 = f[-60:] 78 | yr = '20' + f1[30:32] 79 | mo = f1[32:34] 80 | da = f1[34:36] 81 | dates.append(yr+mo+da) 82 | dt = date.toordinal(date(int(yr), int(mo), int(da))) 83 | dn.append(dt) 84 | d0 = date.toordinal(date(int(yr), 1, 1)) 85 | doy = np.asarray(dt)-d0+1 86 | dec_year.append(float(yr) + (doy/365.25)) 87 | if not os.path.isdir(slcdir + '/' + dates[-1]): 88 | os.mkdir(slcdir + '/' + dates[-1]) 89 | slc = np.fromfile(f,dtype=np.complex64) 90 | slc = np.reshape(slc,(ny,nx)) 91 | im = isceobj.createImage()# Copy the interferogram image from before 92 | im.filename = slcdir + '/' + dates[-1] + '/' + dates[-1] + '.slc.full' 93 | im.width = nx 94 | im.length = ny 95 | im.dataType = 'CFLOAT' 96 | im.dump(im.filename + '.xml') # Write out xml 97 | slc.tofile(im.filename) # Write file out 98 | im.finalizeImage() 99 | 100 | dates.sort() 101 | dates = np.asarray(dates) 102 | dn = np.asarray(dn) 103 | dn0 = dn-dn[0] # make relative to first date 104 | 105 | pairs1=list() 106 | pairs2=list() 107 | pairs = list() 108 | for ii,d in enumerate(dates): 109 | for jj in np.arange(1,skip+1): 110 | try: 111 | pairs.append(dates[ii] + '_' + dates[ii+jj]) 112 | except: 113 | pass 114 | 115 | # Get geom info 116 | llh = np.fromfile(llhFile,dtype=np.float32) 117 | lkv = np.fromfile(lkvFile,dtype=np.float32) 118 | 119 | lids = np.arange(0,len(lkv),3) 120 | kids = np.arange(1,len(lkv),3) 121 | vids = np.arange(2,len(lkv),3) 122 | l = lkv[lids].reshape(ny,nx) 123 | k = lkv[kids].reshape(ny,nx) 124 | v = lkv[vids].reshape(ny,nx) 125 | 126 | plt.figure();plt.imshow(l) 127 | plt.figure();plt.imshow(k) 128 | plt.figure();plt.imshow(v) 129 | 130 | 131 | laIds = np.arange(0,len(llh),3) 132 | loIds = np.arange(1,len(llh),3) 133 | hgIds = np.arange(2,len(llh),3) 134 | lon_ifg = llh[loIds] 135 | lat_ifg = llh[laIds] 136 | hgt_ifg = llh[hgIds] 137 | lon_ifg = np.reshape(lon_ifg,(ny,nx)) 138 | lat_ifg = np.reshape(lat_ifg,(ny,nx)) 139 | hgt_ifg = np.reshape(hgt_ifg,(ny,nx)) 140 | 141 | # write out the geom files 142 | im = isceobj.createImage()# Copy the interferogram image from before 143 | im.filename = mergeddir + '/geom_master/lon.rdr.full' 144 | im.width = nx 145 | im.length = ny 146 | im.dataType = 'FLOAT' 147 | im.dump(im.filename + '.xml') # Write out xml 148 | lon_ifg.tofile(im.filename) # Write file out 149 | im.finalizeImage() 150 | 151 | # write out the geom files 152 | im = isceobj.createImage()# Copy the interferogram image from before 153 | im.filename = mergeddir + '/geom_master/lat.rdr.full' 154 | im.width = nx 155 | im.length = ny 156 | im.dataType = 'FLOAT' 157 | im.dump(im.filename + '.xml') # Write out xml 158 | lat_ifg.tofile(im.filename) # Write file out 159 | im.finalizeImage() 160 | 161 | # write out the geom files 162 | im = isceobj.createImage()# Copy the interferogram image from before 163 | im.filename = mergeddir + '/geom_master/hgt.rdr.full' 164 | im.width = nx 165 | im.length = ny 166 | im.dataType = 'FLOAT' 167 | im.dump(im.filename + '.xml') # Write out xml 168 | hgt_ifg.tofile(im.filename) # Write file out 169 | im.finalizeImage() 170 | 171 | im = isceobj.createImage()# Copy the interferogram image from before 172 | im.filename = mergeddir + '/geom_master/losE.rdr.full' 173 | im.width = nx 174 | im.length = ny 175 | im.dataType = 'FLOAT' 176 | im.dump(im.filename + '.xml') # Write out xml 177 | l.tofile(im.filename) # Write file out 178 | im.finalizeImage() 179 | 180 | im = isceobj.createImage()# Copy the interferogram image from before 181 | im.filename = mergeddir + '/geom_master/losN.rdr.full' 182 | im.width = nx 183 | im.length = ny 184 | im.dataType = 'FLOAT' 185 | im.dump(im.filename + '.xml') # Write out xml 186 | k.tofile(im.filename) # Write file out 187 | im.finalizeImage() 188 | 189 | im = isceobj.createImage()# Copy the interferogram image from before 190 | im.filename = mergeddir + '/geom_master/losU.rdr.full' 191 | im.width = nx 192 | im.length = ny 193 | im.dataType = 'FLOAT' 194 | im.dump(im.filename + '.xml') # Write out xml 195 | v.tofile(im.filename) # Write file out 196 | im.finalizeImage() 197 | 198 | # Downlook geom files 199 | def downLook(infile, outfile,alks,rlks): 200 | inImage = isceobj.createImage() 201 | inImage.load(infile + '.xml') 202 | inImage.filename = infile 203 | 204 | lkObj = Looks() 205 | lkObj.setDownLooks(alks) 206 | lkObj.setAcrossLooks(rlks) 207 | lkObj.setInputImage(inImage) 208 | lkObj.setOutputFilename(outfile) 209 | lkObj.looks() 210 | 211 | file_list = list(['lat','lon','hgt','losE','losN','losU'])#,'incLocal','shadowMask']) 212 | for f in file_list: 213 | infile = mergeddir + '/geom_master/' + f + '.rdr.full' 214 | outfile = mergeddir + '/geom_master/' + f + '_lk.rdr' 215 | downLook(infile, outfile,alks,rlks) 216 | 217 | 218 | # Get bounding coordinates (Frame) 219 | f_lon_lk = mergeddir + '/geom_master/lon_lk.rdr' 220 | f_lat_lk = mergeddir + '/geom_master/lat_lk.rdr' 221 | f_hgt_lk = mergeddir + '/geom_master/hgt_lk.rdr' 222 | f_losE_lk = mergeddir + '/geom_master/losE_lk.rdr' 223 | f_losN_lk = mergeddir + '/geom_master/losN_lk.rdr' 224 | f_losU_lk = mergeddir + '/geom_master/losU_lk.rdr' 225 | 226 | 227 | Image = isceobj.createImage() 228 | Image.load(f_lon_lk + '.xml') 229 | lon_ifg = Image.memMap()[:,:,0] 230 | lon_ifg = lon_ifg.copy().astype(np.float32) 231 | lon_ifg[lon_ifg==0]=np.nan 232 | Image.finalizeImage() 233 | 234 | Image = isceobj.createImage() 235 | Image.load(f_lat_lk + '.xml') 236 | lat_ifg = Image.memMap()[:,:,0] 237 | lat_ifg = lat_ifg.copy().astype(np.float32) 238 | lat_ifg[lat_ifg==0]=np.nan 239 | Image.finalizeImage() 240 | 241 | Image = isceobj.createImage() 242 | Image.load(f_hgt_lk + '.xml') 243 | hgt_ifg = Image.memMap()[:,:,0] 244 | hgt_ifg = hgt_ifg.copy().astype(np.float32) 245 | hgt_ifg[hgt_ifg==0]=np.nan 246 | Image.finalizeImage() 247 | 248 | Image = isceobj.createImage() 249 | Image.load(f_losE_lk + '.xml') 250 | losE_ifg = Image.memMap()[:,:,0] 251 | losE_ifg = losE_ifg.copy().astype(np.float32) 252 | losE_ifg[losE_ifg==0]=np.nan 253 | Image.finalizeImage() 254 | 255 | Image = isceobj.createImage() 256 | Image.load(f_losN_lk + '.xml') 257 | losN_ifg = Image.memMap()[:,:,0] 258 | losN_ifg = losN_ifg.copy().astype(np.float32) 259 | losN_ifg[losN_ifg==0]=np.nan 260 | Image.finalizeImage() 261 | 262 | Image = isceobj.createImage() 263 | Image.load(f_losU_lk + '.xml') 264 | losU_ifg = Image.memMap()[:,:,0] 265 | losU_ifg = losU_ifg.copy().astype(np.float32) 266 | losU_ifg[losU_ifg==0]=np.nan 267 | Image.finalizeImage() 268 | 269 | geom = {} 270 | geom['lon_ifg'] = lon_ifg 271 | geom['lat_ifg'] = lat_ifg 272 | geom['hgt_ifg'] = hgt_ifg 273 | geom['losE_ifg'] = losE_ifg 274 | geom['losN_ifg'] = losN_ifg 275 | geom['losU_ifg'] = losU_ifg 276 | 277 | np.save('geom.npy',geom) 278 | 279 | for l in np.arange(0,nyl): 280 | ll = lon_ifg[l,:] 281 | if not np.isnan(ll.max()): 282 | break 283 | 284 | for p in np.arange(l+1,nyl): 285 | ll = lon_ifg[p,:] 286 | if np.isnan(ll.max()): 287 | break 288 | l+=1 289 | 290 | ymin=l+1 291 | ymax=p-1 292 | xmin=0 293 | xmax=nxl 294 | 295 | ul = (lon_ifg[l+1,1],lat_ifg[l+1,1]) 296 | ur = (lon_ifg[l+1,-2],lat_ifg[l+1,-2]) 297 | ll = (lon_ifg[p-2,1],lat_ifg[p-2,1]) 298 | lr = (lon_ifg[p-2,-2],lat_ifg[p-2,-2]) 299 | 300 | lon_bounds = np.array([ul[0],ur[0],ur[0],lr[0],lr[0],ll[0],ll[0],ul[0]]) 301 | lat_bounds = np.array([ul[1],ur[1],ur[1],lr[1],lr[1],ll[1],ll[1],ul[1]]) 302 | 303 | 304 | pad=.5 305 | import cartopy.crs as ccrs 306 | makeMap.mapBackground('World_Shaded_Relief',lon_bounds.min,lon_bounds.max,lat_bounds.min,lat_bounds.max,1,7,'example',borders=False) 307 | plt.plot(lon_bounds,lat_bounds,linewidth=2,color='red',zorder=10,transform=ccrs.PlateCarree()) 308 | plt.rc('font',size=14) 309 | plt.savefig(workdir + '/Figs/areamap.svg',transparent=True,dpi=100 ) 310 | 311 | 312 | params = dict() 313 | params['pairs'] = pairs 314 | params['dates'] = dates 315 | params['pairs'] = pairs 316 | params['dec_year'] = dec_year 317 | params['dn'] = dn 318 | params['dn0'] = dn0 319 | params['seaLevel'] = seaLevel 320 | nd = len(pairs) 321 | params['nd'] = nd 322 | params['lam'] = lam 323 | params['workdir'] = workdir 324 | params['intdir'] = intdir 325 | params['tsdir'] = tsdir 326 | params['ny'] = ny 327 | params['nx'] = nx 328 | params['nxl'] = nxl 329 | params['nyl'] = nyl 330 | params['lon_bounds'] = lon_bounds 331 | params['lat_bounds'] = lat_bounds 332 | params['ymin'] = ymin 333 | params['ymax'] = ymax 334 | params['xmin'] = xmin 335 | params['xmax'] = xmax 336 | params['alks'] = alks 337 | params['rlks'] = rlks 338 | params['mergeddir'] = mergeddir 339 | params['intdir'] = intdir 340 | params['tsdir'] = tsdir 341 | params['slcdir'] = slcdir 342 | 343 | # Save the dictionary 344 | np.save('params.npy',params) 345 | -------------------------------------------------------------------------------- /smartLookSLC.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Thu Aug 9 10:16:54 2018 5 | 6 | Distributed scatterer type approach 7 | 8 | Downlooking, filtering, and coherence 9 | Saves downlooked ifgs in the respective ifg directories. 10 | 11 | Note: 12 | when it makes ifgs, it's subtracting the secondary from the reference. So 13 | negative values are going away from the satellite (e.g., subsidence) and 14 | positive values are going towards the satellite (e.g., uplift). 15 | 16 | @author: kdm95 17 | """ 18 | 19 | import isce.components.iscesys.Component.ProductManager as pm 20 | 21 | 22 | 23 | import numpy as np 24 | import isce.components.isceobj as isceobj 25 | from matplotlib import pyplot as plt 26 | import cv2 27 | import os 28 | import timeit 29 | import glob as glob 30 | import util 31 | from util import show 32 | import FilterAndCoherence 33 | 34 | 35 | filterFlag = True 36 | unwrap = False # Usually better to leave False and use runSnaphu.py for more options and outputs 37 | filterStrength = '.3' 38 | fixImage = False #Do this in case you renamed any of the directories or moved the SLCs since they were made 39 | nblocks = 1 40 | seaLevel = -10 41 | 42 | ps = np.load('./ps.npy',allow_pickle=True).all() 43 | 44 | # Creat window and downlooking vectors 45 | win1 =np.ones((ps.alks,ps.rlks)) 46 | win=win1/sum(win1.flatten()) 47 | win = np.asarray(win,dtype=np.float32) 48 | rangevec=np.arange(0,ps.nxl) * ps.rlks 49 | azvec=np.arange(0,ps.nyl) * ps.alks 50 | yy,xx= np.meshgrid(azvec,rangevec,sparse=False, indexing='ij') 51 | y=yy.flatten() 52 | x=xx.flatten() 53 | del(xx,yy) 54 | 55 | # for p in pairs: 56 | # os.system('rm -r ' + intdir + '/' + p) 57 | 58 | if fixImage: 59 | slcList = glob.glob(ps.slcdir + '/*/*full') 60 | for fname in slcList: 61 | os.system('fixImageXml.py -i ' + fname + ' -f') 62 | 63 | # Load the gamma0 file 64 | # f = ps.tsdir + '/gamma0.int' 65 | # intImage = isceobj.createIntImage() 66 | # intImage.load(f + '.xml') 67 | # gamma0= intImage.memMap() 68 | gamma0= np.ones((ps.ny,ps.nx)) 69 | 70 | gamma0=gamma0.copy() # mmap is readonly, so we need to copy it. 71 | gamma0[np.isnan(gamma0)] = 0 72 | 73 | 74 | 75 | if not os.path.isfile('Npy/gam.npy'): 76 | # Perform smart_looks first on gamma0 77 | # gam=gamma0.copy() # mmap is readonly, so we need to copy it. 78 | #gam[np.where(gam==0)]=np.nan 79 | gam = cv2.filter2D(gamma0,-1, win) 80 | gam = np.reshape(gam[y,x],(ps.nyl,ps.nxl)) 81 | gam[np.isnan(gam)] = 0 82 | # Save gamma0 file 83 | out = isceobj.createIntImage() # Copy the interferogram image from before 84 | out.dataType = 'FLOAT' 85 | out.filename = ps.tsdir + '/gamma0_lk.int' 86 | out.width = ps.nxl 87 | out.length = ps.nyl 88 | out.dump(out.filename + '.xml') # Write out xml 89 | gam.tofile(out.filename) # Write file out 90 | out.renderHdr() 91 | out.renderVRT() 92 | # gam[geom['hgt_ifg'] < seaLevel] = 0 93 | np.save('Npy/gam.npy',gam) 94 | # del(gam) 95 | else: 96 | print('gam.npy already exists') 97 | 98 | 99 | if not os.path.isdir(ps.intdir): 100 | os.system('mkdir ' + ps.intdir) 101 | 102 | # gam_filt = cv2.filter2D(gamma0,-1, win) 103 | 104 | gamma0 = gamma0**2 105 | rangevec=np.arange(0,ps.nxl) * ps.rlks 106 | idl = int(np.floor(ps.nyl/nblocks)) 107 | idy = int(np.floor(ps.ny/nblocks)) 108 | azvec=np.arange(0,idl) * ps.alks 109 | yy,xx= np.meshgrid(azvec,rangevec,sparse=False, indexing='ij') 110 | y=yy.flatten() 111 | x=xx.flatten() 112 | 113 | pair = ps.pairs2[0] 114 | pair = '20211129_20211205' 115 | for pair in ps.pairs2: #loop through each ifg and save to 116 | if not os.path.isdir(ps.intdir + '/' + pair): 117 | os.system('mkdir ' + ps.intdir+ '/' + pair) 118 | if not os.path.isfile(ps.intdir + '/' + pair + '/fine_lk.int'): 119 | print('working on ' + pair) 120 | 121 | starttime = timeit.default_timer() 122 | #Open a file to save stuff to 123 | out = isceobj.createImage() # Copy the interferogram image from before 124 | out.dataType = 'CFLOAT' 125 | out.filename = ps.intdir + '/' + pair + '/fine_lk.int' 126 | out.width = ps.nxl 127 | out.length = ps.nyl 128 | out.dump(out.filename + '.xml') # Write out xml 129 | fid=open(out.filename,"ab+") 130 | 131 | # # open a cor file too 132 | # outc = isceobj.createImage() # Copy the interferogram image from before 133 | # outc.dataType = 'FLOAT' 134 | # outc.filename = ps.intdir + '/' + pair + '/cor_lk.r4' 135 | # outc.width = ps.nxl 136 | # outc.length = ps.nyl 137 | # outc.dump(outc.filename + '.xml') # Write out xml 138 | # fidc=open(outc.filename,"ab+") 139 | 140 | 141 | # break it into blocks 142 | for kk in np.arange(0,nblocks): 143 | 144 | start = int(kk*idy) 145 | stop = start+idy+1 146 | 147 | d2 = pair[9:] 148 | d = pair[0:8] 149 | 150 | if ps.crop: 151 | f1 = ps.slcdir +'/'+ d + '/' + d + '.slc.full.crop' 152 | else: 153 | f1 = ps.slcdir +'/'+ d + '/' + d + '.slc.full' 154 | slcImage = isceobj.createSlcImage() 155 | slcImage.load(f1 + '.xml') 156 | slc1 = slcImage.memMap()[:,:,0][start:stop,:] 157 | 158 | if ps.crop: 159 | f2 = ps.slcdir +'/'+ d2 + '/' + d2 + '.slc.full.crop' 160 | else: 161 | f2 = ps.slcdir +'/'+ d2 + '/' + d2 + '.slc.full' 162 | slcImage = isceobj.createSlcImage() 163 | slcImage.load(f2 + '.xml') 164 | slc2 = slcImage.memMap()[:,:,0][start:stop,:] 165 | ifg = np.multiply(slc1,np.conj(slc2)) 166 | 167 | cohFile = ps.intdir + '/' + pair + '/coh.coh' 168 | FilterAndCoherence.estCpxCoherence(f1, f2,cohFile, alks=1, rlks=1) 169 | 170 | # del(slc1,slc2) 171 | 172 | ifg_real = np.real(ifg) * gamma0[start:stop,:] 173 | ifg_imag = np.imag(ifg) * gamma0[start:stop,:] 174 | 175 | # del(ifg) 176 | 177 | ifg_real_filt = cv2.filter2D(ifg_real,-1, win) 178 | ifg_imag_filt = cv2.filter2D(ifg_imag,-1, win) 179 | rea_lk = np.reshape((ifg_real_filt)[y,x],(idl,ps.nxl)) 180 | ima_lk = np.reshape((ifg_imag_filt)[y,x],(idl,ps.nxl)) 181 | 182 | # del(ifg_imag_filt,ifg_imag,ifg_real_filt,ifg_real) 183 | 184 | cpx = ima_lk*1j + rea_lk 185 | cpx[np.isnan(cpx)] = 0 186 | fid.write(cpx) 187 | 188 | # slc1_F = cv2.filter2D(abs(slc1**2),-1, win) # using win instead of win1 because cpx is an average, not a sum 189 | # slc2_F = cv2.filter2D(abs(slc2**2),-1, win) 190 | # denom = np.sqrt(slc1_F[y,x]) * np.sqrt(slc2_F[y,x]) 191 | # cor=(abs(cpx.ravel())/denom).reshape(ps.nyl,ps.nxl) 192 | # fidc.write(cor) 193 | 194 | out.renderHdr() 195 | out.renderVRT() 196 | # outc.renderHdr() 197 | # outc.renderVRT() 198 | fid.close() 199 | # fidc.close() 200 | # for pair in ps.pairs2: #loop through each ifg and save to 201 | if filterFlag: 202 | name = ps.intdir + '/' + pair + '/fine_lk.int' 203 | corname = ps.intdir + '/' + pair + '/cor.r4' 204 | offilt = ps.intdir + '/' + pair + '/fine_lk_filt.int' 205 | # FilterAndCoherence.runFilter(name,offilt,float(filterStrength)) 206 | FilterAndCoherence.estCoherence(name, corname) 207 | if unwrap: 208 | unwName = ps.intdir+ '/' + pair + '/filt.unw' 209 | util.unwrap_snaphu(name,corname,unwName,ps.nyl,ps.nxl) 210 | 211 | 212 | 213 | # a = 1+0j # angle: 0 214 | # b = 0+1j # angle: +90 215 | # c=np.multiply(a,np.conj(b)) # a - b = c 216 | # d =np.angle(c) # a - b = -90 217 | # If LOS shortened between a and b, then the difference should be positive (uplift) 218 | # If LOS lengthened, then the difference should be negative (subsidence) 219 | # For ifgs, MintPY uses the opposite: ("positive value represents motion away from the satellite."), 220 | # but for the MintPy time series they use positive is uplift. 221 | # from osgeo import gdal 222 | # p1='20211129_20211205' 223 | # p2 = '20211205_20211211' # This has the flood 224 | # ds1 = gdal.Open(ps.intdir+'/'+p1+'/coh.coh.vrt') 225 | # preflood = ds1.GetVirtualMemArray() 226 | # ds2 = gdal.Open(ps.intdir+'/'+p2+'/coh.coh.vrt') 227 | # flood = ds2.GetVirtualMemArray() 228 | 229 | # diff = preflood[1,:,:]-flood[1,:,:] 230 | # plt.figure() 231 | # plt.imshow(diff[2000:4000,10000:20000],vmin=.1,vmax=.4) -------------------------------------------------------------------------------- /structure_function.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Wed May 30 09:28:10 2018 5 | @author: kdm95 6 | args: 7 | ifg_data: 2D vector with unwrapped phase values or other pixel values 8 | ny, nx: dimensions of image 9 | tot: Total number of differences to make 10 | lengthscale: Maximum distance between pixels being differenced 11 | plot_flag: 0 or 1 (off or on) to make plots or not. 12 | binwidth: Bindwidth for differencing distances 13 | fun: function to fit to the variogram. Can be 'spherical' or 'exp'. 14 | """ 15 | 16 | # Global imports 17 | import numpy as np 18 | import matplotlib.pyplot as plt 19 | import scipy 20 | # Local Imports 21 | 22 | 23 | def struct_fun(data, ny,nx, tot=600, lengthscale=600, plot_flag=0, binwidth=20, fun=None): 24 | ''' 25 | Main function to calculate structure function from a unwrapped ifg matrix (data) 26 | 27 | ''' 28 | 29 | # ny,nx = lon_ifg.shape 30 | 31 | xx = np.arange(0,nx);yy=np.arange(0,ny) 32 | X,Y = np.meshgrid(xx,yy, sparse=False, indexing='ij') 33 | 34 | xd,yd = np.meshgrid([0,1,2,5,10,15,20,25,35,(lengthscale-binwidth),lengthscale],[-lengthscale, (-lengthscale+binwidth),-35,-25,-20,-15,-10,-5,-4,-2,-1,0,1,2,4,5,10,15,20,25,35,(lengthscale-binwidth),lengthscale], sparse=False, indexing='ij') #dense sampling near origin 35 | 36 | tx =np.floor(np.random.randint(1,lengthscale,size=tot)) 37 | ty =np.floor(np.random.randint(1,lengthscale,size=tot)) 38 | ty[::2] = -ty[::2] # Make half of points negative; start stop step 39 | q=np.matrix([tx,ty]).T 40 | 41 | # Remove duplicates 42 | jnk,ids = np.unique(q,axis=0,return_index=True) 43 | tx = tx[ids] 44 | tx = np.asarray([*map(int, tx)]) 45 | ty = ty[ids] 46 | ty = np.asarray([*map(int, ty)]) 47 | 48 | #***add on dense grid from above; 49 | tx = np.append(tx, xd.flatten()) 50 | ty = np.append(ty, yd.flatten()) 51 | 52 | #***remove duplicates 53 | # a=np.array((tx,ty)) 54 | # ix = np.unique(a,return_index=True, axis=1); 55 | # tx = tx[ix[1]]; 56 | # ty = ty[ix[1]]; 57 | 58 | aty = abs(ty) # used for the negative offsets 59 | S = np.empty([len(tx)]) 60 | # S2 = np.empty([len(tx)]) 61 | allnxy = np.empty([len(tx)]) 62 | iters = np.arange(0,len(tx)) 63 | 64 | for ii in iters: 65 | i=int(ii) 66 | if ty[ii] >= 0: 67 | A = data[1 : ny-ty[ii] , tx[ii] : nx-1 ] 68 | B = data[ty[i] : ny-1 , 1 : nx-tx[i] ]; 69 | else: 70 | A = data[aty[ii] : ny-1 , tx[ii] : nx-1] 71 | B = data[1 : ny-aty[ii] , 1 : nx-tx[ii]] 72 | 73 | C = A-B # All differences 74 | C2 = np.square(C) 75 | 76 | S[ii] = np.nanmean(C2) 77 | # S2[ii] = np.nanstd(C2) 78 | 79 | allnxy[ii] = len(C2); 80 | dists = np.sqrt(np.square(tx) + np.square(ty)) 81 | 82 | # S[np.isnan(S)]=0 83 | bins = np.arange(0,dists.max(),binwidth,dtype=int) 84 | S_bins=list() 85 | # S2_bins=list() 86 | Ws = list() 87 | dist_bins=list() 88 | for ii,bin_min in enumerate(bins): 89 | bin_ids = np.where((dists< (bin_min+binwidth)) & (dists>bin_min)) 90 | w = allnxy[bin_ids] #these are the weights for the weighted average 91 | if len(w)==0: 92 | S_bins.append(np.nan) 93 | # S2_bins.append(np.nan) 94 | dist_bins.append(np.nan) 95 | elif len(w)==1: 96 | S_bins.append(S[bin_ids[0]]) 97 | # S2_bins.append(S2[bin_ids[0]]) 98 | dist_bins.append(np.nan) 99 | else: 100 | S_bins.append(np.average(S[bin_ids],axis=0,weights=w)) 101 | # S2_bins.append(np.average(S2[bin_ids],axis=0,weights=w)) 102 | Ws.append(len(w)) 103 | dist_bins.append(np.nanmean(dists[bin_ids])) 104 | 105 | if plot_flag: 106 | fig = plt.figure(figsize=(14,10)) 107 | # Plot IFG 108 | ax = fig.add_subplot(221) 109 | ax.set_title("Image") 110 | cf = plt.imshow(data) 111 | #cmap=plt.cm.Spectral.reversed() 112 | plt.colorbar(cf) 113 | 114 | ax = fig.add_subplot(222) 115 | ax.set_title("sqrt(S) vs. position") 116 | cf = plt.scatter(tx,ty,c=np.sqrt(S)) 117 | plt.scatter(-tx,-ty,c=np.sqrt(S)) 118 | plt.ylabel('north') 119 | plt.xlabel('east') 120 | plt.colorbar(cf) 121 | 122 | ax = fig.add_subplot(212) 123 | ax.set_title("S vs. distance, colored by num points") 124 | cf = plt.scatter(dists[1:],np.sqrt(S[1:]),c=allnxy[1:]) 125 | plt.ylabel('sqrt(S), units of cm') 126 | plt.xlabel('distance(km)') 127 | plt.colorbar(cf) 128 | plt.show() 129 | 130 | 131 | # Fit a log function to the binned data 132 | # S_bins = np.asarray(S_bins) 133 | # S_bins[np.where(np.isnan(S_bins))]=0 134 | xd = np.asarray(dist_bins) 135 | oh=np.asarray(S_bins,dtype=np.float32)/2 136 | # oh[np.isnan(oh)]=0 137 | yd = np.sqrt(oh) 138 | # yd_std = np.sqrt(S2_bins) 139 | yd[np.isnan(yd)]=0 140 | # yd_std[np.isnan(yd_std)]=0 141 | 142 | 143 | # Fit exponential function to structure function 144 | # y = A*log(Bx) 145 | if fun=='exp': 146 | def fit_log(x,a,b,c): 147 | ''' 148 | Spherical model of the semivariogram 149 | ''' 150 | return a*np.log(b*x)+c 151 | 152 | popt, pcov = scipy.optimize.curve_fit(fit_log,xd,yd) 153 | sf_fit = fit_log(xd, *popt) 154 | 155 | 156 | elif fun=='spherical': 157 | def spherical(x, a, b ): 158 | ''' 159 | Spherical model of the semivariogram 160 | ''' 161 | return b*( 1.5*x/a - 0.5*(x/a)**3.0 ) 162 | 163 | popt, pcov = scipy.optimize.curve_fit(spherical,xd,yd) 164 | sf_fit = spherical(xd, *popt) 165 | 166 | else: 167 | print('No function specified. Can be spherical or exp.') 168 | sf_fit=0 169 | 170 | S2=0 171 | yd_std=0 172 | return np.sqrt(S/2), S2, dists, allnxy, yd, yd_std, xd, sf_fit -------------------------------------------------------------------------------- /weeding.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Fri Nov 20 14:39:57 2020 5 | 6 | This script checks for any dates in the time series that are abnormally 7 | noisy. This is based on 8 | 9 | -variance of each unwrapped ifg (usually dominated by atmosphere) 10 | -correlation of each ifg (could be related to ground surface properties) 11 | 12 | This also writes a file called msk.npy which is based on gamma0 and average correlation 13 | 14 | @author: km 15 | """ 16 | 17 | import numpy as np 18 | import isceobj 19 | from matplotlib import pyplot as plt 20 | from scipy.interpolate import griddata 21 | import glob 22 | import os 23 | from datetime import date 24 | from PyPS2 import util,makeMap 25 | from scipy import signal 26 | 27 | 28 | 29 | def weeding(mincor=0.7,gamThresh=0.7,varMax =.05, connCompCompleteness = 0.9,plotStuff=False,makeChanges=False,pairs2Overlap=1,overwriteGeo=False): 30 | ''' 31 | plotStuff = True 32 | makeChanges = False 33 | mincor = .5 34 | gamThresh = .5 35 | varMax = .05 36 | overwriteGeo=False 37 | pairs2Overlap=1 38 | connCompCompleteness = 0.9 # at least 90% of the ifgs must have a connected component at the given pixel, or it is masked to nan 39 | ''' 40 | plt.close('all') 41 | 42 | 43 | ps = np.load('./ps.npy',allow_pickle=True).all() 44 | gam = np.load('Npy/gam.npy') 45 | 46 | 47 | if overwriteGeo: 48 | gamGeo = util.geocodeKM(gam) 49 | gamGeo[np.isnan(gamGeo)] = 0 50 | np.save('./TS/gam.geo.npy',gamGeo) 51 | 52 | 53 | gamFlat = gam.flatten() 54 | 55 | X,Y = np.meshgrid(range(ps.nxl),range(ps.nyl)) 56 | 57 | pairs3 = list() 58 | for ii,d in enumerate(ps.dates[0:-1]): 59 | for jj in np.arange(1,pairs2Overlap+1): 60 | if ii+jj < len(ps.dates): 61 | pairs3.append(ps.dates[ii] + '_' + ps.dates[ii+jj]) 62 | 63 | 64 | 65 | 66 | stack = [] 67 | for p in pairs3: 68 | unw_file = ps.intdir + '/' + p + '/filt.unw' 69 | unwImage = isceobj.createIntImage() 70 | unwImage.load(unw_file + '.xml') 71 | unw = unwImage.memMap()[:,:,0] #- unwImage.memMap()[ymin:ymax,xmin:xmax,0][r,c] 72 | unw = unw.copy() 73 | # unw[np.isnan(gam)] = np.nan 74 | stack.append(unw) 75 | stack = np.asarray(stack,dtype=np.float32) 76 | 77 | # stackTimeMean = np.nanmean(stack,axis=0) 78 | # stackTimeVar = np.nanvar(stack,axis=0) 79 | # plt.figure();plt.imshow(stackTimeMean);plt.title('stack time mean') 80 | # plt.figure();plt.imshow(stackTimeVar);plt.title('stack time var') 81 | 82 | corStack = [] 83 | for p in pairs3: 84 | cor_file = ps.intdir + '/' + p + '/fine_lk.cor' 85 | corImage = isceobj.createIntImage() 86 | corImage.load(cor_file + '.xml') 87 | cor = corImage.memMap()[:,:,0] 88 | cor = cor.copy() 89 | # cor[np.isnan(gam)] = np.nan 90 | corStack.append(cor) 91 | corStack = np.asarray(corStack,dtype=np.float32)[:,:,:] 92 | 93 | connStack = [] 94 | for p in pairs3: 95 | conn_file = ps.intdir + '/' + p + '/filt.unw.conncomp' 96 | connImage = isceobj.createIntImage() 97 | connImage.load(conn_file + '.xml') 98 | conn = connImage.memMap()[:,0,:] 99 | # conn = conn.copy() 100 | connStack.append(conn) 101 | connStack = np.asarray(connStack,dtype=np.float32)[:,:,:] 102 | 103 | 104 | # average cor value for each pair 105 | corAvg = [] 106 | ifgVar = [] 107 | for ii in np.arange(0,len(pairs3)): 108 | corAvg.append(np.nanmedian(corStack[ii,:,:])) 109 | iv = stack[ii,:,:] 110 | iv[np.isnan(corStack[ii,:,:])] =np.nan 111 | ifgVar.append(np.nanvar(iv)) 112 | 113 | corAvg = np.asarray(corAvg,dtype=np.float32) 114 | ifgVar = np.asarray(ifgVar,dtype=np.float32) 115 | 116 | 117 | 118 | 119 | corAvgMap = np.nanmean(corStack,axis=0) 120 | corVar = np.nanvar(corStack,axis=0) 121 | 122 | 123 | np.save('Npy/cor.npy',corAvgMap) 124 | np.save('Npy/corVar.npy',corVar) 125 | 126 | if overwriteGeo: 127 | corGeo = util.geocodeKM(corAvgMap) 128 | corGeo[np.isnan(corGeo)] = 0 129 | np.save('./TS/cor.geo.npy',corGeo) 130 | 131 | corVarGeo = util.geocodeKM(corVar) 132 | corVarGeo[np.isnan(corVarGeo)] = 0 133 | np.save('./TS/corVar.geo.npy',corVarGeo) 134 | 135 | 136 | # Find the bad dates 137 | # gamThresh = np.nanmedian(gam) - 2*np.nanstd(gam) 138 | medianCorStack = np.nanmedian(corAvgMap) 139 | print('\nThe median correlation for entire stack is ' + str(round(medianCorStack,2))) 140 | 141 | corThresh = np.nanmedian(corAvgMap) - np.nanstd(corAvgMap) 142 | ifgVarThresh = np.nanmedian(ifgVar) + np.nanstd(ifgVar) 143 | # badPairs = np.where((corAvgifgVarThresh))[0] 144 | badPairs = np.where(corAvg 1: 155 | badDates.append(b) 156 | badDates = np.unique(badDates) 157 | 158 | 159 | 160 | # This loop looks at ifgs associated with each date and finds the minimum ifg var for each date. 161 | # This is a way to find which dates are noisy as opposed to individual ifgs. 162 | dateVar = [] 163 | dateCor = [] 164 | for ii in np.arange(0,len(ps.dn)): 165 | dt = ps.dates[ii] 166 | # first find all of the ifgs that have that date. This is generalized in case there are redundant pairs. 167 | dtPairs = [] 168 | for jj,p in enumerate(pairs3): 169 | if p[0:8] == dt or p[9:] == dt: 170 | dtPairs.append(jj) 171 | pVars = [] 172 | pAvgs = [] 173 | for kk in dtPairs: 174 | iv = stack[kk,:,:] 175 | iv[np.isnan(corStack[kk,:,:])] =np.nan 176 | pVars.append(np.nanvar(iv)) 177 | pAvgs.append(np.nanmedian(corStack[kk,:,:])) 178 | 179 | 180 | dateVar.append(np.nanmin(pVars)) 181 | dateCor.append(np.nanmax(pAvgs)) 182 | dateVar = np.asarray(dateVar,dtype=np.float32) 183 | dateCor = np.asarray(dateCor,dtype=np.float32) 184 | 185 | connSum = np.sum(connStack,axis=0) 186 | np.save('Npy/connSum.npy',connSum) 187 | 188 | 189 | # Make masks based on 4 criteria 190 | gamMsk = np.ones(gam.shape) 191 | gamMsk[gamvarMax] = 0 198 | 199 | # # Make the final msk 200 | msk = np.ones(gam.shape) 201 | msk[gamMsk==0] = 0 202 | msk[connMsk==0] = 0 203 | msk[corMsk==0] = 0 204 | msk[varMsk==0] = 0 205 | 206 | mskSum = gamMsk+connMsk+corMsk+varMsk 207 | 208 | np.save('Npy/msk.npy',msk) 209 | 210 | if overwriteGeo: 211 | gamGeo= util.geocodeKM(gam,method='linear') 212 | np.save('./TS/gam.geo.npy',gamGeo) 213 | 214 | mskGeo= util.geocodeKM(msk,method='nearest') 215 | np.save('./TS/msk.geo.npy',gamGeo) 216 | 217 | # connSumGeo= util.geocodeKM(connSum,method='linear') 218 | # np.save('./TS/connSum.geo.npy',connSumGeo) 219 | # mskGeo = np.ones(gamGeo.shape) 220 | # mskGeo[connSumGeo < round(connCompCompleteness*ps.nd)] = 0 221 | # mskGeo[gamGeo1: 270 | # pairs2Overlap-=1 271 | # print('Cor too low. Rerunning with lower skip. Skip= ' + str(pairs2Overlap)) 272 | # weeding(mincor=mincor,gamThresh=gamThresh,plotStuff=False,makeChanges=False,pairs2Overlap=pairs2Overlap) 273 | 274 | if makeChanges == True: 275 | val = input("Do you want to move these dates and redifine params? [y/n]: ") 276 | 277 | if val =='y': 278 | print('ok, moved directories, and reassigned param variables...') 279 | print('rerun smartLooks.py and runsnaphu.py') 280 | 281 | if not os.path.isdir('backup'): 282 | os.system('mkdir backup') 283 | os.system('cp ./params.npy backup/') 284 | 285 | for b in badDates: 286 | os.system('mv ' + ps.slcdir + '/' + b + ' ' + ps.slcdir + '/_' + b) 287 | 288 | 289 | datesNew = ps.dates[ps.dates!=badDates] 290 | 291 | 292 | # Redefine dates, pairs 293 | skip = 1 294 | dat = [] 295 | for f in flist: 296 | dat.append(f[-8:]) 297 | dat.sort() 298 | pairs1=[] 299 | pairs2=[] 300 | pairs =[] 301 | for ii,d in enumerate(dat): 302 | for jj in np.arange(1,skip+1): 303 | try: 304 | pairs.append(dat[ii] + '_' + dat[ii+jj]) 305 | except: 306 | pass 307 | 308 | dn2 = list() 309 | dec_year = list() 310 | for d in dat: 311 | yr = d[0:4] 312 | mo = d[4:6] 313 | day = d[6:8] 314 | dt = date.toordinal(date(int(yr), int(mo), int(day))) 315 | dn2.append(dt) 316 | d0 = date.toordinal(date(int(yr), 1, 1)) 317 | doy = np.asarray(dt)-d0+1 318 | dec_year.append(float(yr) + (doy/365.25)) 319 | dn2 = np.asarray(dn2) 320 | dn20 = dn2-dn2[0] # make relative to first date 321 | 322 | 323 | # Save arrays and variables to a dictionary 'params' 324 | ps.dates = dat 325 | ps.pairs = pairs 326 | ps.dec_year = dec_year 327 | ps.dn = dn2 328 | ps.dn0 = d0 329 | 330 | np.save('params.npy',params) 331 | if plotStuff: 332 | plt.show() 333 | 334 | 335 | if __name__ == '__main__': 336 | plotStuff = True 337 | makeChanges = False 338 | mincor = .5 339 | gamThresh = .5 340 | varMax = .06 341 | pairs2Overlap=1 342 | overwriteGeo=True 343 | connCompCompleteness = .9 344 | weeding(mincor,gamThresh,varMax,connCompCompleteness,plotStuff,makeChanges,pairs2Overlap,overwriteGeo) 345 | --------------------------------------------------------------------------------