├── modules ├── ping │ ├── ping_batch.sh │ └── ping.sh ├── gdal_merge │ └── gdal_merge.sh ├── unstable_coh_metric │ ├── UCM-batch.sh │ └── unstable_coh_metric.sh ├── __module_template__ │ └── __module_template__.sh ├── timeseries_xy │ └── timeseries_xy.sh ├── grid_difference │ └── grid_difference.sh ├── displacement │ └── displacement.sh ├── harmonize_grids │ └── harmonize_grids.sh ├── crop │ └── crop.sh ├── detrend │ └── detrend.sh ├── sgp_identification │ └── sgp_identification.sh ├── gacos_correction │ └── PP-gacos-correction.sh ├── preview_files │ └── preview_files.sh ├── summary_pdf │ └── PP-summary-pdf.sh ├── prep_arctic_dem │ └── add_single_col.sh └── mask_unwrapping_errors │ └── mask_unwrapping_errors.sh ├── lib ├── combination ├── .gitignore ├── s1-orbit-download.sh ├── palettes │ ├── con_comp_01.cpt │ ├── diverging_blue_white_red.cpt │ ├── diverging_blue_yellow_red.cpt │ ├── diverging_blue_red_dark.cpt │ ├── diverging_brown_green.cpt │ ├── diverging_red_yellow_green.cpt │ └── diverging_red_yellow_blue.cpt ├── meta4-to-filelist.py ├── PP-extract.sh ├── GMTSAR-mods │ ├── single-swath-unwrap-geocode.csh │ ├── snaphu_OSARIS.csh │ ├── merge_unwrap_geocode.sh │ └── geocode_OSARIS.csh ├── process-stack.sh ├── crop-dem.sh ├── sbas.sh ├── PP-multiswath.sh ├── mask-grdseries.sh ├── z_min_max.sh ├── PP-stack.sh ├── steps_boundaries.sh ├── InSAR │ └── intf.sh ├── harmonize_grids.sh ├── process-multi-swath.sh ├── min_grd_extent.sh ├── process-MUG.sh ├── PP-MUG.sh ├── PP-SBAS.sh ├── unwrapping-sum.sh ├── s1-file-download.sh ├── difference.sh └── check-queue.sh ├── doc ├── contributors.md └── CHANGELOG.md ├── templates ├── filelist.template ├── module-config │ ├── displacement.config.template │ ├── __module_template__.config.template │ ├── unstable_coh_metric.config_template │ ├── timeseries_xy.config.template │ ├── harmonize_grids.config.template │ ├── statistics.config.template │ ├── ping.config_template │ ├── detrend.config.template │ ├── sgp_identification.config.template │ ├── prep_arctic_dem.config_template │ ├── crop.config.template │ ├── mask_unwrapping_errors.config.template │ ├── gacos_correction.config.template │ └── summary_pdf.config.template ├── login.credentials ├── grid_difference.config.template └── GMTSAR-config.template ├── .gitignore └── tools ├── dem-from-tifs.sh └── shift_longitude_coords.sh /modules/ping/ping_batch.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo "PONG!" 4 | -------------------------------------------------------------------------------- /lib/combination: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cryotools/osaris/HEAD/lib/combination -------------------------------------------------------------------------------- /lib/.gitignore: -------------------------------------------------------------------------------- 1 | /MANIFEST 2 | /OSquery-result.xml 3 | /failed_MD5_check_list.txt 4 | /filelist-golubin.txt 5 | /product_list 6 | /products-list.csv 7 | -------------------------------------------------------------------------------- /doc/contributors.md: -------------------------------------------------------------------------------- 1 | The original author of OSARIS is [David Loibl](https://github.com/David-Loibl). 2 | The following people have contributed in some way or another to OSARIS. 3 | 4 | - [Bodo Bookhagen](https://github.com/BodoBookhagen) 5 | - [Laila](https://github.com/dedetmix) 6 | - [Ziyadin Cakir](https://github.com/ziyadin) -------------------------------------------------------------------------------- /templates/filelist.template: -------------------------------------------------------------------------------- 1 | x 090e58f8-7e69-4256-b127-9de5588130ff x S1A_IW_SLC__1SDV_20170217T125851_20170217T125918_015324_0191F4_666E 2 | x d9d33681-ad91-4a57-9bd6-02a5b8238b81 x S1A_IW_SLC__1SSV_20161113T125855_20161113T125922_013924_0166A2_6DB8 3 | x 67fbce35-f044-4593-84d9-d700ca64ead4 x S1A_IW_SLC__1SSV_20160130T125846_20160130T125912_009724_00E32D_642B 4 | -------------------------------------------------------------------------------- /templates/module-config/displacement.config.template: -------------------------------------------------------------------------------- 1 | ###################################################################### 2 | # 3 | # Template configuration for the 'Displacement' module 4 | # 5 | # Copy to 'config' folder, rename to 'displacement.config' 6 | # and fit to your machine's configuration. 7 | # 8 | ###################################################################### 9 | 10 | unwrapped_intf_PATH=( "$output_PATH/Interf-unwrpd" ) 11 | # Paths to unwrapped intergferogram files 12 | -------------------------------------------------------------------------------- /templates/login.credentials: -------------------------------------------------------------------------------- 1 | 2 | # Login credentials for your ESA DHuS account 3 | # Required if downloading from ESA's DHuS platform is enabled 4 | # Register a new account at https://scihub.copernicus.eu/dhus/#/self-registration 5 | 6 | ESA_username="_your_username_" 7 | ESA_password="_your_password_" 8 | 9 | 10 | # Login credentials for your ASF/EarthData account 11 | # Required if downloading via ASF is enabled 12 | # Register a new account at https://urs.earthdata.nasa.gov/users/new 13 | 14 | ASF_username="_your_username_" 15 | ASF_password="_your_password_" 16 | -------------------------------------------------------------------------------- /lib/s1-orbit-download.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo; echo "Updating S1 Orbits"; echo 4 | 5 | # wget -nH -l1 --no-parent --no-check-certificate -nc --reject-regex '\?' -r -nd -R *.txt,*.html* -P $1 https://s1qc.asf.alaska.edu/aux_poeorb/ 6 | 7 | if [ "$2" -ge 1 ]; then 8 | last_page=$2 9 | else 10 | last_page=1 11 | fi 12 | 13 | for page in `seq 1 $last_page`; do 14 | wget -nH -l1 --no-parent --no-check-certificate -nc --reject-regex '\?' -r -nd -R *.txt,*.html* -P $1 https://qc.sentinel1.eo.esa.int/aux_poeorb/?page=$page 15 | done 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /templates/module-config/__module_template__.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the OSARIS module template. 4 | # 5 | # Only of interest when you are developing a new module. 6 | # 7 | # Add all variables that should be user-defined here. 8 | # 9 | ################################################################# 10 | 11 | example_var="Foo bar" 12 | # Put a brief description of the variable here ... 13 | 14 | example_array=( piff paff bam ) 15 | # Put a brief description of the array here ... -------------------------------------------------------------------------------- /templates/module-config/unstable_coh_metric.config_template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the Unstable Coherence Metric module 4 | # 5 | # Copy to 'config' folder, rename to 'unstable_coh_metric.config' 6 | # and fit to your setup and needs. 7 | # 8 | ################################################################# 9 | 10 | high_corr_threshold=0.4 11 | # Threshold value for high coherence 12 | # A value > 0.8 may be adequate when focusing on buildings 13 | # (e.g. earthquake damages), 0.4 is reasonable to detect 14 | # general land surface changes. 15 | 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | # Compiled source # 7 | # Logs and databases # 8 | # OS generated files # 9 | # Packages # 10 | # git has its own built in compression methods 11 | # it's better to unpack these files and commit the raw source 12 | ############ 13 | ################### 14 | ###################### 15 | ###################### 16 | *.7z 17 | *.class 18 | *.com 19 | *.dll 20 | *.dmg 21 | *.exe 22 | *.gz 23 | *.iso 24 | *.jar 25 | *.log 26 | *.o 27 | *.rar 28 | *.so 29 | *.sql 30 | *.sqlite 31 | *.tar 32 | *.zip 33 | *~ 34 | .DS_Store 35 | .DS_Store? 36 | .Spotlight-V100 37 | .Trashes 38 | ._* 39 | Thumbs.db 40 | config.txt 41 | ehthumbs.db 42 | filelist.txt 43 | 44 | **/config/ -------------------------------------------------------------------------------- /lib/palettes/con_comp_01.cpt: -------------------------------------------------------------------------------- 1 | # GMT palette Set3_11.cpt 2 | # 3 | # This product includes color specifications and designs 4 | # developed by Cynthia Brewer (http://colorbrewer.org/). 5 | # 6 | # Converted to the cpt format by J.J.Green 7 | # Qualitative palette with 11 colours 8 | # 9 | # COLOR_MODEL = RGB 10 | 0.00 141 211 199 1.00 141 211 199 11 | 1.00 255 255 179 2.00 255 255 179 12 | 2.00 190 186 218 3.00 190 186 218 13 | 3.00 251 128 114 4.00 251 128 114 14 | 4.00 128 177 211 5.00 128 177 211 15 | 5.00 253 180 098 6.00 253 180 098 16 | 6.00 179 222 105 7.00 179 222 105 17 | 7.00 252 205 229 8.00 252 205 229 18 | 8.00 217 217 217 9.00 217 217 217 19 | 9.00 188 128 189 10.00 188 128 189 20 | 10.00 204 235 197 11.00 204 235 197 21 | -------------------------------------------------------------------------------- /templates/module-config/timeseries_xy.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the 'Timeseries xy' module 4 | # 5 | # Copy to 'config' folder, rename to 'ping.config' and fit to 6 | # your machine's configuration. 7 | # 8 | ################################################################# 9 | 10 | TS_coordinates=( 11.11,22.22 ) 11 | # Provide sampling locations as a series of lat,lon pairs 12 | # in decimal degrees seqparated by spaces, e.g. 13 | # 73.98989,42.1707 74.48420,42.85183 14 | 15 | TS_gridfiles=( corr_ll.grd ) 16 | # Name of grid files to sample 17 | 18 | TS_input_PATH="$output_PATH/Pairs-forward/F3" 19 | # Path to directory which contains the directories in which the 20 | # grid files are located. Typically, this will be sth. like 21 | # "$output_PATH/Pairs-forward/F3" 22 | -------------------------------------------------------------------------------- /templates/module-config/harmonize_grids.config.template: -------------------------------------------------------------------------------- 1 | ###################################################################### 2 | # 3 | # Template configuration for the 'Harmonize grids' module 4 | # 5 | # Copy to 'config' folder, rename to 'harmonize_grids.config' 6 | # and fit to your machine's configuration. 7 | # 8 | # Activate the module in the main config file using the postprocessing 9 | # hook. 10 | # 11 | ###################################################################### 12 | 13 | grid_input_PATH=( "$output_PATH/Displacement" ) #"$output_PATH/GACOS-corrected" "$output_PATH/Displacement" 14 | # Path(s) to grid files 15 | # Multiple paths can be provided, seperated by spaces. 16 | 17 | # ref_point_xy_coords="" 18 | # Optional: XY coordinates of reference point in decimal degrees. 19 | # Format: longitude/latidue 20 | # Will override the default usage of 'SGP Identification' module result. 21 | -------------------------------------------------------------------------------- /templates/module-config/statistics.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the statistics module 4 | # 5 | # Copy to 'config' folder, rename to 'grid_difference.config' 6 | # and fit to your setup and needs. 7 | # 8 | ################################################################# 9 | 10 | stats_input_filenames=( "*.grd" ) 11 | # Filename(s) of files to crop. 12 | # You may provide multiple file names, seperated by spaces. 13 | # Wildcards are allowed, e.g. *.grd or corr* 14 | # Input files shall reside in sub-directories of input directory (s. below) 15 | 16 | stats_input_PATH="$output_PATH/Coherences" 17 | # Path to where the input files are located 18 | 19 | stats_subdirs=0 20 | # 0 -> Files are located directly in the dir specified in crop_input_PATH 21 | # 1 -> Scan through direct subdirs of crop_input_PATH for files 22 | -------------------------------------------------------------------------------- /templates/module-config/ping.config_template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the ping module 4 | # 5 | # Copy to 'config' folder, rename to 'ping.config' and fit to 6 | # your machine's configuration. 7 | # 8 | ################################################################# 9 | 10 | 11 | ping_count=10 12 | # Number of ping jobs to send to the queue. 13 | # Number of jobs times ntasks should amount ~ to the number of cores 14 | # required in subsequent processing steps. 15 | 16 | slurm_ntasks=30 17 | # Number of cores used for parallel processing. 18 | # Make this large enough to avoid uneccessarily many ping jobs. 19 | 20 | slurm_account=my_account 21 | # SLURM account name 22 | 23 | slurm_qos=my_jobtype 24 | # Job type 25 | 26 | slurm_partition=partition_name 27 | # Partition used for computing. 28 | # Optional in most cases, comment out when not needed 29 | 30 | -------------------------------------------------------------------------------- /templates/grid_difference.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the grid_difference module 4 | # 5 | # Copy to 'config' folder, rename to 'grid_difference.config' 6 | # and fit to your setup and needs. 7 | # 8 | ################################################################# 9 | 10 | grddiff_input_filenames=( display_amp_ll.grd ) 11 | # Files to process, e.g. 'display_amp_ll.grd' for amplitudes. 12 | # You may provide multiple file names, seperated by spaces. 13 | # Input files shall reside in sub-directories of input directory (s. below) 14 | 15 | grddiff_input_PATH="$output_PATH/Pairs-forward" 16 | # Path where the input data resides. 17 | # You may provide a full path or use the $output_PATH variable, e.g. 18 | # Default: $output_PATH/Pairs-forward 19 | 20 | grddiff_palette="$OSARIS_PATH/lib/palettes/corr_diff_brown_green.cpt" 21 | # Define a CPT palette to use for for styling the results. 22 | 23 | -------------------------------------------------------------------------------- /templates/module-config/detrend.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the 'Detrend' module 4 | # 5 | # Copy to 'config' folder, rename to 'detrend.config' 6 | # and fit to your machine's configuration. 7 | # 8 | ################################################################# 9 | 10 | RT_grid_input_PATH="$output_PATH/Interf-unwrpd" 11 | # Path to directory where the input grid files are located, e.g. 12 | # $output_PATH/Interf-unwrpd 13 | # $output_PATH/GACOS-corrected 14 | # $output_PATH/Displacement 15 | 16 | RT_model="10+r" 17 | # The model used for trend removal. Default: 10+r 18 | # Options: 19 | # 3 -> bilinear 20 | # 6 -> quadratic 21 | # 10 -> bicubic 22 | # +r may be added to each of the models to perform a iterative robust fitting. 23 | # More info: http://gmt.soest.hawaii.edu/doc/5.4.4/grdtrend.html 24 | 25 | RT_safe_trend_files=1 26 | # Save calculated trend surface to a subdirectory. -------------------------------------------------------------------------------- /templates/module-config/sgp_identification.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the 'Stable Ground Point Identification' module 4 | # 5 | # Copy to 'config' folder, rename to 'sgp_identification.config', 6 | # and fit to your setup and needs. 7 | # 8 | ################################################################# 9 | 10 | sgpi_threshold="0.2" 11 | # Minimum coherence value. 12 | # If unwrapping is acitve, psi_theshold should be >= the unwrapping 13 | # threshold as provided in the GMTSAR config file. 14 | 15 | sgpi_input_PATH="$output_PATH/Coherences" 16 | # Path where the input data resides. 17 | # You may provide a full path or use the $output_PATH variable 18 | # Default: $output_PATH/Coherences 19 | 20 | sgpi_region="11.1/22.2/33.3/44.4" 21 | # Limit the analysis to a subregion instead of whole scene extents (optional). 22 | # Format: lon_min/lon_max/lat_min/lat_max (each in decimal degrees). 23 | 24 | -------------------------------------------------------------------------------- /lib/meta4-to-filelist.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Convert .meta4 XML-files to file list compatible with dhusget.sh 4 | 5 | import sys 6 | import xml.etree.ElementTree as ET 7 | # import math as math 8 | # from scipy.io.numpyio import fwrite, fread 9 | # import os 10 | # import numpy as np 11 | # import pylab as py 12 | 13 | if (len(sys.argv) > 1): 14 | 15 | input_file = open(sys.argv[1],'rb') 16 | output_file = sys.argv[2] 17 | 18 | tree = ET.parse(input_file) # '/data/scratch/loibldav/GSP/Input/Meta4/Bishkek-Golubin-2017-03-01.meta4' 19 | root = tree.getroot() 20 | 21 | # print root.tag 22 | # print root.attrib 23 | 24 | with open(output_file, "w") as text_file: 25 | for child in root: 26 | # print(child.attrib['name'][:-4]) 27 | # print(child[1].text[53:-9]) 28 | text_file.write(" x {0}".format(child[1].text[53:-9])) 29 | text_file.write(" x {0}".format(child.attrib['name'][:-4])) 30 | text_file.write("\n") 31 | 32 | else: 33 | print "Usage: meta4-to-filelist.py [input file (.meta4)] [output file]" 34 | 35 | 36 | -------------------------------------------------------------------------------- /templates/module-config/prep_arctic_dem.config_template: -------------------------------------------------------------------------------- 1 | ###################################################################### 2 | # 3 | # Configuration of the prep_arctic_dem module for OSARIS 4 | # 5 | # Activate the module in the main config file using the preprocessing 6 | # hook. Alternatively, run in standalone mode from the module folder. 7 | # 8 | # Further information on ArcticDEM and a shapefile with tile numbers 9 | # are available at https://www.pgc.umn.edu/data/arcticdem/ 10 | # 11 | ###################################################################### 12 | 13 | # Arctic DEM tiles to process. 14 | # Make sure to define a rectangle without missing tiles! 15 | row_min=1 16 | row_max=2 17 | 18 | col_min=1 19 | col_max=2 20 | 21 | # Optional: additional single cols to be added to the DEM 22 | single_col=3_1 # Referring to the second and third character of name string 23 | 24 | 25 | 26 | # Output grid resolution in meters (original ArcticDEM is 5 m) 27 | scale_factor=50 28 | 29 | # Path to folder where ArcticDEM tiles are located 30 | input_mosaic_PATH=/path/to/ArcticDEM/mosaic/v2.0 31 | 32 | # Output folder. Will be created if not exists. 33 | dem_output_PATH=/your/output_folder -------------------------------------------------------------------------------- /templates/module-config/crop.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the 'Crop' module 4 | # 5 | # Copy to 'config' folder, rename to 'crop.config' 6 | # and fit to your demands. 7 | # 8 | ################################################################# 9 | 10 | crop_input_PATH="$output_PATH/Coherences" 11 | # Path to where the input files are located 12 | 13 | crop_subdirs=0 14 | # 0 -> Files are located directly in the dir specified in crop_input_PATH 15 | # 1 -> Scan through direct subdirs of crop_input_PATH for files 16 | 17 | crop_input_filenames=( "*.grd" ) 18 | # Filename(s) of files to crop. 19 | # Wildcards are allowed but must be escaped, e.g. "*.grd" 20 | 21 | crop_regions=( 12.34/34.56/56.78/78.90 ) 22 | # Extent(s) to crop the image to in decimal degrees 23 | # Format: lon_min/lon_max/lat_min/lat_max 24 | # You may enter multiple regions separated by spaces. 25 | 26 | crop_region_labels=( Crop_region_label ) 27 | # Labels for regions. 28 | # Will also be used for directory names, so avoid special chars. 29 | # When using multiple regions, make sure to provide the same number 30 | # of labels, separated by spaces. 31 | -------------------------------------------------------------------------------- /templates/module-config/mask_unwrapping_errors.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the 'Mask unwrapping errors' module 4 | # 5 | # Copy to 'config' folder, rename to 'mask_unwrapping_errors.config' 6 | # and fit to your machine's configuration. 7 | # 8 | ################################################################# 9 | 10 | MUE_input_PATH="$output_PATH/GACOS-corrected" 11 | # Path to directory where the (forward) unwrapped interferograms are located 12 | # Default "$output_PATH/Interf_unwrpd" 13 | # Alternatively you may use: 14 | # "$output_PATH/Harmonized-grids/some_directory" if you applied time series harmonization (highly recommended), or 15 | # "$output_PATH/GACOS-correction" if the gacos correction was applied previously 16 | 17 | MUE_fwdrev_sums_PATH="$output_PATH/Unwrapping-sums" 18 | # Path to reverse unwrapping sum results. 19 | # Make sure to activate reverse interferogram processing in the main config file! 20 | # Default: $output_PATH/Unwrapping-sums 21 | 22 | MUE_threshold="0.1" 23 | # The threshold of allowed difference between fwd and rev unwr. intfs. 24 | # All pixels with values greater +threshold and less than -threshold will be masked. 25 | # Default: 0.1 26 | -------------------------------------------------------------------------------- /lib/palettes/diverging_blue_white_red.cpt: -------------------------------------------------------------------------------- 1 | -2 28/50/107 -1.9 28/50/107 2 | -1.9 30/61/124 -1.8 30/61/124 3 | -1.8 33/72/141 -1.7 33/72/141 4 | -1.7 35/85/161 -1.6 35/85/161 5 | -1.6 47/105/177 -1.5 47/105/177 6 | -1.5 59/123/189 -1.4 59/123/189 7 | -1.4 74/145/204 -1.3 74/145/204 8 | -1.3 90/166/217 -1.2 90/166/217 9 | -1.2 107/180/225 -1.1 107/180/225 10 | -1.1 128/196/234 -1 128/196/234 11 | -1 145/209/242 -0.9 145/209/242 12 | -0.9 167/222/247 -0.8 167/222/247 13 | -0.8 181/227/248 -0.7 181/227/248 14 | -0.7 196/232/248 -0.6 196/232/248 15 | -0.6 215/239/249 -0.5 215/239/249 16 | -0.5 225/243/251 -0.4 225/243/251 17 | -0.4 235/247/252 -0.3 235/247/252 18 | -0.3 247/252/254 -0.2 247/252/254 19 | -0.2 white -0.1 white 20 | -0.1 white 0 white 21 | 0 white 0.1 white 22 | 0.1 white 0.2 white 23 | 0.2 255/252/238 0.3 255/252/238 24 | 0.3 254/246/209 0.4 254/246/209 25 | 0.4 253/241/184 0.5 253/241/184 26 | 0.5 252/235/155 0.6 252/235/155 27 | 0.6 253/218/124 0.7 253/218/124 28 | 0.7 253/203/98 0.8 253/203/98 29 | 0.8 254/188/72 0.9 254/188/72 30 | 0.9 253/167/49 1 253/167/49 31 | 1 251/150/47 1.1 251/150/47 32 | 1.1 248/129/44 1.2 248/129/44 33 | 1.2 246/112/42 1.3 246/112/42 34 | 1.3 238/90/41 1.4 238/90/41 35 | 1.4 228/69/41 1.5 228/69/41 36 | 1.5 220/50/40 1.6 220/50/40 37 | 1.6 208/31/39 1.7 208/31/39 38 | 1.7 189/28/35 1.8 189/28/35 39 | 1.8 173/25/31 1.9 173/25/31 40 | 1.9 157/23/28 2 157/23/28 41 | B black 42 | F white 43 | N red 44 | -------------------------------------------------------------------------------- /lib/palettes/diverging_blue_yellow_red.cpt: -------------------------------------------------------------------------------- 1 | -2 5/40/89 -1.9 5/40/89 2 | -1.9 6/51/106 -1.8 6/51/106 3 | -1.8 7/62/124 -1.7 7/62/124 4 | -1.7 8/75/144 -1.6 8/75/144 5 | -1.6 15/89/161 -1.5 15/89/161 6 | -1.5 28/104/171 -1.4 28/104/171 7 | -1.4 44/121/182 -1.3 44/121/182 8 | -1.3 57/136/192 -1.2 57/136/192 9 | -1.2 71/150/201 -1.1 71/150/201 10 | -1.1 82/160/205 -1 82/160/205 11 | -1 92/169/208 -0.9 92/169/208 12 | -0.9 103/179/212 -0.8 103/179/212 13 | -0.8 115/187/216 -0.7 115/187/216 14 | -0.7 129/196/220 -0.6 129/196/220 15 | -0.6 146/206/224 -0.5 146/206/224 16 | -0.5 160/214/228 -0.4 160/214/228 17 | -0.4 176/223/233 -0.3 176/223/233 18 | -0.3 189/230/240 -0.2 189/230/240 19 | -0.2 200/235/245 -0.1 200/235/245 20 | -0.1 211/241/251 0 211/241/251 21 | 0 255/252/190 0.1 255/252/190 22 | 0.1 255/246/171 0.2 255/246/171 23 | 0.2 255/241/151 0.3 255/241/151 24 | 0.3 255/234/128 0.4 255/234/128 25 | 0.4 254/226/109 0.5 254/226/109 26 | 0.5 251/219/98 0.6 251/219/98 27 | 0.6 247/210/85 0.7 247/210/85 28 | 0.7 244/202/74 0.8 244/202/74 29 | 0.8 241/188/62 0.9 241/188/62 30 | 0.9 237/167/49 1 237/167/49 31 | 1 235/150/38 1.1 235/150/38 32 | 1.1 231/129/25 1.2 231/129/25 33 | 1.2 227/114/18 1.3 227/114/18 34 | 1.3 220/103/13 1.4 220/103/13 35 | 1.4 212/89/8 1.5 212/89/8 36 | 1.5 205/78/3 1.6 205/78/3 37 | 1.6 193/67/3 1.7 193/67/3 38 | 1.7 175/58/9 1.8 175/58/9 39 | 1.8 160/52/14 1.9 160/52/14 40 | 1.9 145/45/19 2 145/45/19 41 | B black 42 | F white 43 | N red 44 | -------------------------------------------------------------------------------- /modules/gdal_merge/gdal_merge.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS module to merge grids using GDAL. 6 | # 7 | # Primarily intended for overlapping grids, e.g. to merge interferograms 8 | # from neighboring S1 slices. 9 | # 10 | # David Loibl, 2018 11 | # 12 | ##################################################################### 13 | 14 | start=`date +%s` 15 | 16 | if [ ! -f "$OSARIS_PATH/config/gdal_merge.config" ]; then 17 | echo 18 | echo "Cannot open $OSARIS_PATH/config/gdal_merge.config. Please provide a valid config file in the OSARIS config folder." 19 | echo 20 | exit 2 21 | else 22 | 23 | echo; echo "Merging files with GDAL" 24 | 25 | source $OSARIS_PATH/config/gdal_merge.config 26 | 27 | gdal_translate -a_srs EPSG:4326 -co interleave=pixel -a_ullr -180.0 90.0 180.0 -90.0 0.jpg ONE.tif 28 | gdal_translate -a_srs EPSG:4326 -co interleave=pixel -a_ullr -90.0 90.0 90.0 -90.0 0.jpg TWO.tif 29 | 30 | gdalbuildvrt -input_file_list my_list.txt doq_index.vrt 31 | 32 | gdal_translate ONETWO.vrt ONETWO.tif 33 | 34 | 35 | echo; echo 36 | echo "Cleaning up" 37 | rm -r temp 38 | rm merged_dem.grd 39 | echo; echo 40 | 41 | end=`date +%s` 42 | 43 | runtime=$((end-start)) 44 | 45 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 46 | echo 47 | 48 | 49 | fi 50 | 51 | 52 | -------------------------------------------------------------------------------- /templates/module-config/gacos_correction.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the 'GACOS correction' module 4 | # 5 | # Copy to 'config' folder, rename to 'gacos_correction.config' 6 | # and fit to your machine's configuration. 7 | # 8 | ################################################################# 9 | 10 | gacos_PATH="/path/to/GACOS-data" 11 | # Path to directory where the GACOS files are located 12 | # GACOS files can be obtained via http://ceg-research.ncl.ac.uk/v2/gacos/ 13 | 14 | unwrp_intf_PATH="$output_PATH/Interf-unwrpd" 15 | # Path to unwrapped interferograms 16 | # Default: $output_PATH/Interf-unwrpd 17 | 18 | referene_point="SGPI" 19 | # The 'stable ground' reference point to which data will be harmonized. 20 | # Options: 21 | # Coordinates -> Manually define reference point (dec. degrees, format longitude/latitude) 22 | # SGPI -> Write 'SGPI' to obtain coordinates from 'SGP Identification' result file. 23 | 24 | harmonize_input_grids=1 25 | # Harmonize input unwrapped interferograms relative to 'stable ground point' before 26 | # applying the GACOS correction. Required if the dataset was not harmonized in a prior step. 27 | # 0 -> Deactivate, no harmonization 28 | # 1 -> Activate harmonization, will use the reference defined in the 'reference_point' variable (default) 29 | 30 | gacos_extent="11.11/22.22/33.33/44.44" 31 | # The extent of the map window in decimal degrees 32 | # Format: lon_min/lon_max/lat_min/lat_max 33 | 34 | 35 | -------------------------------------------------------------------------------- /lib/PP-extract.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo 4 | echo "- - - - - - - - - - - - - - - - - -" 5 | echo "SLURM job EXTRACT started" 6 | echo "- - - - - - - - - - - - - - - - - -" 7 | echo 8 | 9 | 10 | if [ ! $# -eq 5 ]; then 11 | echo 12 | echo "Wrong parameter count, exiting." 13 | echo "Usage: PP-extract file target_path output_path polarization" 14 | echo 15 | exit 1 16 | elif [ ! -f "$1/$2" ]; then 17 | echo 18 | echo "Cannot open $1. Please provide a valid zipped Sentinel1 file. Exiting." 19 | echo 20 | exit 2 21 | else 22 | extract_start=`date +%s` 23 | 24 | # $OSARIS_PATH/lib/PP-extract.sh $input_PATH $S1_archive $work_PATH/orig $output_PATH $polarization 25 | input_PATH=$1 26 | S1_archive=$2 27 | S1_output_PATH=$3 28 | output_PATH=$4 29 | polarization=$5 30 | 31 | echo "Extracting file $S1_archive from directory $input_PATH to $S1_output_PATH ..." 32 | if [ "$polarization" = "vv" ]; then 33 | pol_exclude="-x *-vh-*" 34 | elif [ "$polarization" = "vh" ]; then 35 | pol_exclude="-x *-vv-*" 36 | elif [ "$polarization" = "both" ]; then 37 | pol_exclude="" 38 | else 39 | pol_exclude="-x *-vh-*" 40 | fi 41 | 42 | unzip $input_PATH/$S1_archive $pol_exclude -d $S1_output_PATH 43 | 44 | extract_end=`date +%s` 45 | 46 | extract_runtime=$((extract_end-extract_start)) 47 | echo "$2 $SLURM_JOB_ID $extract_runtime" >> $output_PATH/Reports/PP-extract-stats.tmp 48 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($extract_runtime/86400)) $(($extract_runtime%86400/3600)) $(($extract_runtime%3600/60)) $(($extract_runtime%60)) 49 | 50 | fi 51 | 52 | -------------------------------------------------------------------------------- /lib/palettes/diverging_blue_red_dark.cpt: -------------------------------------------------------------------------------- 1 | -2 10.75/0/61.5 -1.9 10.75/0/61.5 2 | -1.9 18/0/82.5 -1.8 18/0/82.5 3 | -1.8 24/0/103.5 -1.7 24/0/103.5 4 | -1.7 26.875/0/124.5 -1.6 26.875/0/124.5 5 | -1.6 26.875/0/145.5 -1.5 26.875/0/145.5 6 | -1.5 24.625/0/166.5 -1.4 24.625/0/166.5 7 | -1.4 22.375/0/187.5 -1.3 22.375/0/187.5 8 | -1.3 17.625/0/208.5 -1.2 17.625/0/208.5 9 | -1.2 10.125/0/229.5 -1.1 10.125/0/229.5 10 | -1.1 4.25/4.125/248 -1 4.25/4.125/248 11 | -1 17.75/28.875/254 -0.9 17.75/28.875/254 12 | -0.9 42.5/59.875/255 -0.8 42.5/59.875/255 13 | -0.8 68.375/91.375/255 -0.7 68.375/91.375/255 14 | -0.7 93.125/121.5/255 -0.6 93.125/121.5/255 15 | -0.6 117.88/148.5/255 -0.5 117.88/148.5/255 16 | -0.5 142.62/173.62/255 -0.4 142.62/173.62/255 17 | -0.4 168.5/195.37/255 -0.3 168.5/195.37/255 18 | -0.3 195.12/214.25/255 -0.2 195.12/214.25/255 19 | -0.2 219.88/233.75/255 -0.1 219.88/233.75/255 20 | -0.1 243.38/248.25/255 0 243.38/248.25/255 21 | 0 255/248.25/243.38 0.1 255/248.25/243.38 22 | 0.1 255/233.75/219.87 0.2 255/233.75/219.87 23 | 0.2 255/214.25/195.12 0.3 255/214.25/195.12 24 | 0.3 255/195.37/168.5 0.4 255/195.37/168.5 25 | 0.4 255/173.62/142.62 0.5 255/173.62/142.62 26 | 0.5 255/148.5/117.88 0.6 255/148.5/117.88 27 | 0.6 255/121.5/93.125 0.7 255/121.5/93.125 28 | 0.7 255/91.375/68.375 0.8 255/91.375/68.375 29 | 0.8 255/59.875/42.5 0.9 255/59.875/42.5 30 | 0.9 254/28.875/17.75 1 254/28.875/17.75 31 | 1 248/4.125/4.25 1.1 248/4.125/4.25 32 | 1.1 229.5/0/10.125 1.2 229.5/0/10.125 33 | 1.2 208.5/0/17.625 1.3 208.5/0/17.625 34 | 1.3 187.5/0/22.375 1.4 187.5/0/22.375 35 | 1.4 166.5/0/24.625 1.5 166.5/0/24.625 36 | 1.5 145.5/0/26.875 1.6 145.5/0/26.875 37 | 1.6 124.5/0/26.875 1.7 124.5/0/26.875 38 | 1.7 103.5/0/24 1.8 103.5/0/24 39 | 1.8 82.5/0/18 1.9 82.5/0/18 40 | 1.9 62.75/0/10.75 2 62.75/0/10.75 41 | B black 42 | F white 43 | N red 44 | -------------------------------------------------------------------------------- /modules/ping/ping.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Send minimal jobs to Slurm queue to wake up sleeping nodes 6 | # 7 | # Requires a file 'ping.config' in the OSARIS config folder containing 8 | # the Slurm configuration. Get startet by copying the config_template 9 | # file from the templates folder and fit it to your setup. 10 | # 11 | # David Loibl, 2017 12 | # 13 | ##################################################################### 14 | 15 | module_name="ping" 16 | 17 | 18 | if [ -z $module_config_PATH ]; then 19 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 20 | echo " $OSARIS_PATH/config" 21 | module_config_PATH="$OSARIS_PATH/config" 22 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 23 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 24 | fi 25 | 26 | if [ ! -d "$module_config_PATH" ]; then 27 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 28 | exit 2 29 | fi 30 | 31 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 32 | echo 33 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 34 | echo 35 | else 36 | 37 | # Include the config file 38 | source ${module_config_PATH}/${module_name}.config 39 | 40 | i=0 41 | while [ $i -lt $ping_count ]; do 42 | sbatch \ 43 | --output=/dev/null \ 44 | --error=/dev/null \ 45 | --workdir=$input_PATH \ 46 | --job-name=ping \ 47 | --qos=$slurm_qos \ 48 | --account=$slurm_account \ 49 | --partition=$slurm_partition \ 50 | --mail-type=NONE \ 51 | $OSARIS_PATH/modules/ping/ping_batch.sh 52 | ((i++)) 53 | done 54 | fi 55 | -------------------------------------------------------------------------------- /lib/palettes/diverging_brown_green.cpt: -------------------------------------------------------------------------------- 1 | -2 84/48/5 -1.9 84/48/5 2 | -1.9 84/48/5 -1.8 84/48/5 3 | -1.8 94.5/54/6.125 -1.7 94.5/54/6.125 4 | -1.7 109.9/62.8/7.775 -1.6 109.9/62.8/7.775 5 | -1.6 125.3/72.075/8.95 -1.5 125.3/72.075/8.95 6 | -1.5 140.65/81.6/10.45 -1.4 140.65/81.6/10.45 7 | -1.4 154.95/94.8/20.35 -1.3 154.95/94.8/20.35 8 | -1.3 169.13/108/30.125 -1.2 169.13/108/30.125 9 | -1.2 182.88/121.2/39.475 -1.1 182.88/121.2/39.475 10 | -1.1 194.6/136.42/54 -1 194.6/136.42/54 11 | -1 203.4/154.58/76 -0.9 203.4/154.58/76 12 | -0.9 212.2/172.4/98 -0.8 212.2/172.4/98 13 | -0.8 221/190/120 -0.7 221/190/120 14 | -0.7 228.1/202.08/139.88 -0.6 228.1/202.08/139.88 15 | -0.6 234.7/212.53/159.13 -0.5 234.7/212.53/159.13 16 | -0.5 240.78/222.98/178.38 -0.4 240.78/222.98/178.38 17 | -0.4 246/232.45/196.88 -0.3 246/232.45/196.88 18 | -0.3 246/235.75/210.63 -0.2 246/235.75/210.63 19 | -0.2 245.82/239.23/224.38 -0.1 245.82/239.23/224.38 20 | -0.1 245.28/243.07/238.13 0 245.28/243.07/238.13 21 | 0 238.68/243.62/242.8 0.1 238.68/243.62/242.8 22 | 0.1 226.02/240.88/238.4 0.2 226.02/240.88/238.4 23 | 0.2 213.38/237.75/234 0.3 213.38/237.75/234 24 | 0.3 200.72/234.45/229.6 0.4 200.72/234.45/229.6 25 | 0.4 181.9/227.35/220.45 0.5 181.9/227.35/220.45 26 | 0.5 162.13/219.62/210.55 0.6 162.13/219.62/210.55 27 | 0.6 142.87/211.37/200.65 0.7 142.87/211.37/200.65 28 | 0.7 123.25/201.63/189.88 0.8 123.25/201.63/189.88 29 | 0.8 102.35/186.77/176.12 0.9 102.35/186.77/176.12 30 | 0.9 81.675/171.92/162.38 1 81.675/171.92/162.38 31 | 1 61.325/157.08/148.62 1.1 61.325/157.08/148.62 32 | 1.1 44.55/142.87/134.87 1.2 44.55/142.87/134.87 33 | 1.2 30.25/129.12/121.12 1.3 30.25/129.12/121.12 34 | 1.3 15.95/115.8/107.8 1.4 15.95/115.8/107.8 35 | 1.4 1.65/102.6/94.6 1.5 1.65/102.6/94.6 36 | 1.5 0.475/90.975/81.925 1.6 0.475/90.975/81.925 37 | 1.6 0/79.425/69.275 1.7 0/79.425/69.275 38 | 1.7 0/67.875/56.625 1.8 0/67.875/56.625 39 | 1.8 0/60/48 1.9 0/60/48 40 | 1.9 0/60/48 2 0/60/48 41 | B black 42 | F black 43 | N black 44 | -------------------------------------------------------------------------------- /lib/palettes/diverging_red_yellow_green.cpt: -------------------------------------------------------------------------------- 1 | -2 165/0/38 -1.9 165/0/38 2 | -1.9 165/0/38 -1.8 165/0/38 3 | -1.8 174.38/9/38.375 -1.7 174.38/9/38.375 4 | -1.7 188.12/22.2/38.925 -1.6 188.12/22.2/38.925 5 | -1.6 201.88/35.4/39 -1.5 201.88/35.4/39 6 | -1.5 215.38/48.75/39.35 -1.4 215.38/48.75/39.35 7 | -1.4 223.63/65.25/47.05 -1.3 223.63/65.25/47.05 8 | -1.3 231.75/81.875/54.75 -1.2 231.75/81.875/54.75 9 | -1.2 239.45/98.925/62.45 -1.1 239.45/98.925/62.45 10 | -1.1 245.12/116.42/70.375 -1 245.12/116.42/70.375 11 | -1 247.88/134.58/78.625 -0.9 247.88/134.58/78.625 12 | -0.9 250.3/152.4/86.875 -0.8 250.3/152.4/86.875 13 | -0.8 252.5/170/95.125 -0.7 252.5/170/95.125 14 | -0.7 253.43/184.63/105.92 -0.6 253.43/184.63/105.92 15 | -0.6 253.97/198.38/117.48 -0.5 253.97/198.38/117.48 16 | -0.5 254/212.12/129.03 -0.4 254/212.12/129.03 17 | -0.4 254/225.2/140.95 -0.3 254/225.2/140.95 18 | -0.3 254/234/155.25 -0.2 254/234/155.25 19 | -0.2 254.18/242.63/169.55 -0.1 254.18/242.63/169.55 20 | -0.1 254.72/250.88/183.85 0 254.72/250.88/183.85 21 | 0 249.78/252.8/183.85 0.1 249.78/252.8/183.85 22 | 0.1 239.32/248.4/169.55 0.2 239.32/248.4/169.55 23 | 0.2 228.88/244/155.25 0.3 228.88/244/155.25 24 | 0.3 218.42/239.6/140.95 0.4 218.42/239.6/140.95 25 | 0.4 205.12/233.78/130.92 0.5 205.12/233.78/130.92 26 | 0.5 191.35/227.72/121.6 0.6 191.35/227.72/121.6 27 | 0.6 177.05/221.67/112.8 0.7 177.05/221.67/112.8 28 | 0.7 162/215.25/105.5 0.8 162/215.25/105.5 29 | 0.8 144.4/207.55/103.3 0.9 144.4/207.55/103.3 30 | 0.9 126.8/199.85/101.33 1 126.8/199.85/101.33 31 | 1 109.2/192.15/99.675 1.1 109.2/192.15/99.675 32 | 1.1 89.65/183.15/96.075 1.2 89.65/183.15/96.075 33 | 1.2 68.75/173.25/91.125 1.3 68.75/173.25/91.125 34 | 1.3 47.85/162.92/85.75 1.4 47.85/162.92/85.75 35 | 1.4 26.95/152.47/80.25 1.5 26.95/152.47/80.25 36 | 1.5 19.175/139.4/73.7 1.6 19.175/139.4/73.7 37 | 1.6 12.025/126.2/67.025 1.7 12.025/126.2/67.025 38 | 1.7 4.875/113/59.875 1.8 4.875/113/59.875 39 | 1.8 0/104/55 1.9 0/104/55 40 | 1.9 0/104/55 2 0/104/55 41 | B black 42 | F black 43 | N black 44 | -------------------------------------------------------------------------------- /tools/dem-from-tifs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -u # Disable usage of unset variables. 4 | set -e # Exit when scripts return a non-true value. 5 | 6 | if [ ! $# -eq 2 ]; then 7 | echo 8 | echo "Usage: dem-from-tifs.sh " 9 | echo 10 | echo " Path to Digital Elevation Data in GeoTiff format." 11 | echo " No other files should be located in this directory." 12 | echo " Path where the output dem.grd file will be written." 13 | echo " The directory will be created if it does not exist." 14 | echo 15 | echo 16 | echo "Merge all GeoTiff files in to a single DEM called dem.grd which can be used as topo file by OSARIS." 17 | echo 18 | echo "Requires GDAL and Python." 19 | echo "Input GeoTiff files should be in WGS84 projection." 20 | echo 21 | elif [ ! -d $1 ]; then 22 | echo 23 | echo "Error: $1 is not a valid directory." 24 | echo 25 | else 26 | input_PATH="$1" 27 | output_PATH="$2" 28 | 29 | mkdir -p $output_PATH 30 | 31 | cd $input_PATH 32 | dem_files=($( ls *.tif )) 33 | 34 | if [ ${#dem_files} -eq 0 ]; then 35 | echo "No .tif files found in $input_PATH. Exiting." 36 | exit 37 | else 38 | echo; echo "Merging ${#dem_files[@]} tiles ..." 39 | echo "${dem_files[@]}" 40 | gdal_merge.py ${dem_files[@]} -o merged_dem.tif 41 | gmt grdconvert merged_dem.tif $output_PATH/dem-raw.grd -V 42 | grid_mode=$( gmt grdinfo $output_PATH/dem-raw.grd | grep 'Pixel node registration' | awk '{print $6}' ) 43 | grid_mode=${grid_mode:1} 44 | if [ "$grid_mode" == "Cartesian" ]; then 45 | echo; echo "Converting cartesian to geographic grid" 46 | grd_extent=$( gmt grdinfo -I- $output_PATH/dem-raw.grd | awk 'NR==1' ) 47 | # grd_centermedian= 48 | gmt grdproject $output_PATH/dem-raw.grd $grd_extent -Jm1:1 -I -G$output_PATH/dem.grd -V 49 | fi 50 | 51 | rm merged_dem.tif 52 | fi 53 | 54 | fi 55 | -------------------------------------------------------------------------------- /lib/palettes/diverging_red_yellow_blue.cpt: -------------------------------------------------------------------------------- 1 | -2 158/1/66 -1.9 158/1/66 2 | -1.9 158/1/66 -1.8 158/1/66 3 | -1.8 168.5/12.625/68.25 -1.7 168.5/12.625/68.25 4 | -1.7 183.9/29.675/71.55 -1.6 183.9/29.675/71.55 5 | -1.6 198.82/46.25/75.325 -1.5 198.82/46.25/75.325 6 | -1.5 213.38/62.6/78.85 -1.4 213.38/62.6/78.85 7 | -1.4 221.63/75.8/75.55 -1.3 221.63/75.8/75.55 8 | -1.3 230/88.875/72.25 -1.2 230/88.875/72.25 9 | -1.2 238.8/101.52/68.95 -1.1 238.8/101.52/68.95 10 | -1.1 245.12/116.42/70.375 -1 245.12/116.42/70.375 11 | -1 247.88/134.58/78.625 -0.9 247.88/134.58/78.625 12 | -0.9 250.3/152.4/86.875 -0.8 250.3/152.4/86.875 13 | -0.8 252.5/170/95.125 -0.7 252.5/170/95.125 14 | -0.7 253.43/184.63/105.92 -0.6 253.43/184.63/105.92 15 | -0.6 253.97/198.38/117.48 -0.5 253.97/198.38/117.48 16 | -0.5 254/212.12/129.03 -0.4 254/212.12/129.03 17 | -0.4 254/225.2/140.95 -0.3 254/225.2/140.95 18 | -0.3 254/234/155.25 -0.2 254/234/155.25 19 | -0.2 254.18/242.63/169.55 -0.1 254.18/242.63/169.55 20 | -0.1 254.72/250.88/183.85 0 254.72/250.88/183.85 21 | 0 251.42/253.62/185.78 0.1 251.42/253.62/185.78 22 | 0.1 244.27/250.88/175.32 0.2 244.27/250.88/175.32 23 | 0.2 237.5/248.12/164.5 0.3 237.5/248.12/164.5 24 | 0.3 230.9/245.38/153.5 0.4 230.9/245.38/153.5 25 | 0.4 215.75/239.3/154.85 0.5 215.75/239.3/154.85 26 | 0.5 199.28/232.7/158.15 0.6 199.28/232.7/158.15 27 | 0.6 183.32/226.1/161.45 0.7 183.32/226.1/161.45 28 | 0.7 166.63/219.38/164 0.8 166.63/219.38/164 29 | 0.8 147.37/212.22/164 0.9 147.37/212.22/164 30 | 0.9 128.35/204.85/164.22 1 128.35/204.85/164.22 31 | 1 109.65/197.15/164.78 1.1 109.65/197.15/164.78 32 | 1.1 93.55/184.57/168.9 1.2 93.55/184.57/168.9 33 | 1.2 79.25/168.62/175.5 1.3 79.25/168.62/175.5 34 | 1.3 64.95/152.67/182.1 1.4 64.95/152.67/182.1 35 | 1.4 50.65/136.72/188.7 1.5 50.65/136.72/188.7 36 | 1.5 61.55/121.3/182.18 1.6 61.55/121.3/182.18 37 | 1.6 73.65/105.82/174.95 1.7 73.65/105.82/174.95 38 | 1.7 85.75/89.875/167.25 1.8 85.75/89.875/167.25 39 | 1.8 94/79/162 1.9 94/79/162 40 | 1.9 94/79/162 2 94/79/162 41 | B black 42 | F black 43 | N black 44 | -------------------------------------------------------------------------------- /lib/GMTSAR-mods/single-swath-unwrap-geocode.csh: -------------------------------------------------------------------------------- 1 | 2 | ################################ 3 | # 5 - start from unwrap phase # 4 | ################################ 5 | 6 | if ($stage <= 5 ) then 7 | if ($threshold_snaphu != 0 ) then 8 | cd intf 9 | set ref_id = `grep SC_clock_start ../SLC/$master.PRM | awk '{printf("%d",int($3))}' ` 10 | set rep_id = `grep SC_clock_start ../SLC/$slave.PRM | awk '{printf("%d",int($3))}' ` 11 | cd $ref_id"_"$rep_id 12 | if ((! $?region_cut) || ($region_cut == "")) then 13 | set region_cut = `grdinfo phase.grd -I- | cut -c3-20` 14 | endif 15 | 16 | # 17 | # landmask 18 | # 19 | if ($switch_land == 1) then 20 | cd ../../topo 21 | if (! -f landmask_ra.grd) then 22 | landmask.csh $region_cut 23 | endif 24 | cd ../intf 25 | cd $ref_id"_"$rep_id 26 | ln -s ../../topo/landmask_ra.grd . 27 | endif 28 | 29 | echo " " 30 | echo "SNAPHU.CSH - START" 31 | echo "threshold_snaphu: $threshold_snaphu" 32 | 33 | $OSARIS_PATH/lib/GMTSAR-mods/snaphu_OSARIS.csh $threshold_snaphu $defomax $region_cut 34 | 35 | echo "SNAPHU.CSH - END" 36 | cd ../.. 37 | else 38 | echo "" 39 | echo "SKIP UNWRAP PHASE" 40 | endif 41 | endif 42 | 43 | ########################### 44 | # 6 - start from geocode # 45 | ########################### 46 | 47 | if ($stage <= 6) then 48 | cd intf 49 | set ref_id = `grep SC_clock_start ../SLC/$master.PRM | awk '{printf("%d",int($3))}' ` 50 | set rep_id = `grep SC_clock_start ../SLC/$slave.PRM | awk '{printf("%d",int($3))}' ` 51 | cd $ref_id"_"$rep_id 52 | echo " " 53 | echo "GEOCODE.CSH - START" 54 | rm raln.grd ralt.grd 55 | if ($topo_phase == 1) then 56 | rm trans.dat 57 | ln -s ../../topo/trans.dat . 58 | echo "threshold_geocode: $threshold_geocode" 59 | $OSARIS_PATH/lib/GMTSAR-mods/geocode_OSARIS.csh $threshold_geocode 60 | else 61 | echo "topo_ra is needed to geocode" 62 | exit 1 63 | endif 64 | echo "GEOCODE.CSH - END" 65 | cd ../.. 66 | endif 67 | 68 | # end 69 | 70 | -------------------------------------------------------------------------------- /modules/unstable_coh_metric/UCM-batch.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | start=`date +%s` 4 | 5 | echo; echo "Starting UCM processing ..."; echo 6 | 7 | UCM_work_PATH=$1 8 | UCM_output_PATH=$2 9 | corr_file=$3 10 | high_corr_file=$4 11 | high_corr_threshold=$5 12 | boundary_box=$6 13 | 14 | cd $UCM_work_PATH/input 15 | 16 | 17 | echo "Input variables:" 18 | echo "UCM_work_PATH: $UCM_work_PATH" 19 | echo "UCM_output_PATH: $UCM_output_PATH" 20 | echo "corr_file: $corr_file" 21 | echo "high_corr_file: $high_corr_file" 22 | echo "high_corr_threshold: $high_corr_threshold" 23 | echo "boundary_box: $boundary_box" 24 | 25 | 26 | 27 | echo; echo "Grdinfo high corr file:" 28 | gmt grdinfo $high_corr_file 29 | 30 | echo "Extracting high coherence areas (threshold: $high_corr_threshold)" 31 | gmt grdclip $high_corr_file -GHC_$high_corr_file -R$boundary_box -V -Sb$high_corr_threshold/NaN; 32 | 33 | echo "Now working on:"; echo "Corr file: $corr_file"; echo "High corr file: $high_corr_file" 34 | echo "Cutting files to same extent ..." 35 | 36 | gmt grdcut $corr_file -G$UCM_work_PATH/cut_files/$corr_file -R$boundary_box -V 37 | gmt grdcut HC_$high_corr_file -G$UCM_work_PATH/cut_files/HC_$high_corr_file -R$boundary_box -V 38 | # cut2same_extent 39 | 40 | 41 | echo; echo "Processing Unstable Coherence Metric ..." 42 | cd $UCM_work_PATH/cut_files 43 | UCM_file="${high_corr_file:0:8}-${high_corr_file:10:8}---${corr_file:0:8}-${corr_file:10:8}-UCM.grd" 44 | echo "gmt grdmath $high_corr_file $corr_file SUB -V1 = $work_PATH/UCM/temp/$UCM_file" 45 | gmt grdmath HC_$high_corr_file $corr_file SUB -V1 = $UCM_work_PATH/temp/$UCM_file 46 | 47 | cd $UCM_work_PATH/temp 48 | echo "gmt grdclip $UCM_file -G$output_PATH/UCM/$UCM_file -Sb0/NaN" 49 | gmt grdclip $UCM_file -G$UCM_output_PATH/$UCM_file -Sb0/NaN 50 | echo; echo 51 | 52 | if [ -f $UCM_output_PATH/$UCM_file ]; then status_UCM=1; else status_UCM=0; fi 53 | 54 | end=`date +%s` 55 | runtime=$((end-start)) 56 | 57 | echo "${high_corr_file:7:8}-${high_corr_file:30:8} ${corr_file:7:8}-${corr_file:30:8} $SLURM_JOB_ID $runtime $status_UCM" >> $output_PATH/Reports/PP-UCM-stats.tmp 58 | 59 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 60 | -------------------------------------------------------------------------------- /lib/process-stack.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ################################################################# 4 | # 5 | # Process a stack of S1 scenes. 6 | # 7 | # Scenes and orbits must be available in data_swath[nr].in files 8 | # created by prepare_data.sh. Processing of coherence and inter- 9 | # ferograms are conducted in individual SLURM jobs for each 10 | # scene pair. 11 | # 12 | # Usage: process_stack.sh [config file] 13 | # 14 | ################################################################ 15 | 16 | if [ $# -eq 0 ]; then 17 | echo 18 | echo "Usage: process_stack.sh config_file [supermaster]" 19 | echo 20 | elif [ ! -f $1 ]; then 21 | echo 22 | echo "Cannot open $1. Please provide a valid config file." 23 | echo 24 | else 25 | 26 | echo 27 | echo "- - - - - - - - - - - - - - - - - - - -" 28 | echo " Starting STACK processing ..." 29 | echo "- - - - - - - - - - - - - - - - - - - -" 30 | echo 31 | 32 | config_file=$1 33 | source $config_file 34 | echo "Config file: $config_file" 35 | 36 | OSARIS_PATH=$( pwd ) 37 | echo "GSP directory: $OSARIS_PATH" 38 | 39 | work_PATH=$base_PATH/$prefix/Processing 40 | # Path to working directory 41 | 42 | output_PATH=$base_PATH/$prefix/Output 43 | # Path to directory where all output will be written 44 | 45 | log_PATH=$base_PATH/$prefix/Log 46 | # Path to directory where the log files will be written 47 | 48 | mkdir -p $work_PATH/Stack 49 | 50 | cd $work_PATH/raw 51 | 52 | echo "SWATH2PROC: $SAR_sensor" 53 | printf '%s\n' "${swath_to_process[@]}" 54 | echo $swath_to_process 55 | 56 | for swath in ${swaths_to_process[@]}; do 57 | 58 | echo "SWATH: $swath" 59 | echo - - - - - - - - - - - - - - - - 60 | echo "Launching SLURM batch jobs" 61 | echo 62 | echo "Processing logs will be written to $log_PATH" 63 | echo "Use tail -f [logfile] to monitor the SLURM tasks" 64 | echo 65 | 66 | slurm_jobname="$slurm_jobname_prefix-stack" 67 | 68 | sbatch \ 69 | --ntasks=10 \ 70 | --output=$log_PATH/GSP-%j-stack \ 71 | --error=$log_PATH/GSP-%j-stack \ 72 | --workdir=$work_PATH \ 73 | --job-name=$slurm_jobname \ 74 | --qos=$slurm_qos \ 75 | --account=$slurm_account \ 76 | --partition=$slurm_partition \ 77 | --mail-type=$slurm_mailtype \ 78 | $OSARIS_PATH/lib/PP-stack.sh \ 79 | data_swath$swath.in \ 80 | $config_file \ 81 | $OSARIS_PATH/$gmtsar_config_file \ 82 | $OSARIS_PATH 83 | 84 | 85 | done 86 | fi 87 | 88 | -------------------------------------------------------------------------------- /doc/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | Changelog 2 | All notable changes to this OSARIS will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) 5 | and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). 6 | 7 | 8 | ## [0.7.2] - 2019-02-07 9 | ### Added 10 | - Parallel processing option for Summary PDF Module 11 | - Improved burst handling, now also stripping unused bursts in single slice configurations 12 | 13 | ### Bugs fixed 14 | - Reporting in GACOS correction module 15 | - File downloads 16 | 17 | 18 | ## [0.7.1] - 2019-01-28 19 | ### Bugs fixed 20 | - File downloads 21 | 22 | 23 | ## [0.7.0] - 2019-01-25 24 | ### Added 25 | - Functionality to merge multiple swaths 26 | - Cutting of output files to an area of interest defined by boundary box coordinates in the config file 27 | - Module 'GACOS Correction' to handle atmospheric disturbances 28 | - Module 'Detrend' to remove large-scale trends 29 | - Module 'Preview Files' generating PNGs and KMLs 30 | - Tool to shift coordinates from 0/360 to -180/180 notation 31 | - ASF as alternative provider for S1 orbits and scenes 32 | 33 | ### Changed 34 | - Output directory structure: now containing only one directory per dataset without sub-directories 35 | - Output file naming: now consistently beginning with scene dates in the format YYYYMMDD--YYYMMDD 36 | - Renamed module 'Create Summary PDF' to 'Summary PDF' and modified to new directory structure and file names 37 | - Renamed module 'Homogenize Intfs' to 'Harmonize Grids' and made it much more flexible 38 | - Renamed module 'Simple PSI' to 'Stable Ground Point Identification' 39 | - Moved login credentials to separate file (from main configuration file) 40 | 41 | 42 | ## [0.6.0] - 2018-04-18 43 | ### Added 44 | - Module "Create PDF Summary" to generate a visual overview of key processing results 45 | - Module "Crop" to crop grid files to a extent given by coordinates 46 | - Module "Statistics" generatig statistics for grid files 47 | - SNAPHU connected components as output file 48 | - Directory "tools" for supplementary routines to be configured and run manually. 49 | - Tool "pyStatisticPlots" to create box and whisker plots from output of the Statistics module 50 | - Template for new modules 51 | - Options to skip processing steps 52 | 53 | ### Changed 54 | - lib/z_min_max.sh -> Parameter order 55 | - File names of output files and directories consitently use master/slave dates first in YYYYMMDD format 56 | - Module Simple PSI configuration to use only a sub-region 57 | - Fixed hardcoded path bug in p2p script 58 | - Fixed bugs in reverse pairs processing functionality 59 | 60 | 61 | ### Removed 62 | - Directory /lib/inlcude. Scripts were moved to lib and modified to be called directly instead of being inlcuded. 63 | -------------------------------------------------------------------------------- /modules/__module_template__/__module_template__.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Template for new OSARIS modules. 6 | # 7 | # Put module description here. If applicable, include infos on input 8 | # and output data. 9 | # 10 | # Make sure that this file, the module directory, the related 11 | # config file all have the same basename. In the first line below, 12 | # replace __module_template__ with this name. 13 | # Do not use any special characters in your module name. 14 | # 15 | # You may use the following PATH variables: 16 | # $OSARIS_PATH -> OSARIS' program directory 17 | # $work_PATH -> Processing directory of a run 18 | # $output_PATH -> Output dircetory of a run 19 | # $log_PATH -> Log file directory of a run 20 | # $topo_PATH -> Directory with dem.grd used by GMTSAR 21 | # $oribts_PATH -> Directory containing the oribt files 22 | # 23 | # 24 | # Author, year 25 | # 26 | ##################################################################### 27 | 28 | module_name="_choose_a_module_name_" 29 | 30 | if [ -z $module_config_PATH ]; then 31 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 32 | echo " $OSARIS_PATH/config" 33 | module_config_PATH="$OSARIS_PATH/config" 34 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 35 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 36 | fi 37 | 38 | if [ ! -d "$module_config_PATH" ]; then 39 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 40 | exit 2 41 | fi 42 | 43 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 44 | echo 45 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 46 | echo 47 | else 48 | # Start runtime timer 49 | module_start=`date +%s` 50 | 51 | # Include the config file 52 | source ${module_config_PATH}/${module_name}.config 53 | 54 | 55 | 56 | ############################ 57 | # Module actions start here 58 | 59 | echo "Hello, I am the OSARIS module ${module_name}." 60 | echo; echo "Variable example_var is set to $example_var ..." 61 | 62 | # Module actions end here 63 | ########################### 64 | 65 | 66 | 67 | # Stop runtime timer and print runtime 68 | module_end=`date +%s` 69 | module_runtime=$((module_end-module_start)) 70 | 71 | echo 72 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n\n' \ 73 | $(($module_runtime/86400)) \ 74 | $(($module_runtime%86400/3600)) \ 75 | $(($module_runtime%3600/60)) \ 76 | $(($module_runtime%60)) 77 | echo 78 | fi 79 | -------------------------------------------------------------------------------- /tools/shift_longitude_coords.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Helper tool to change grid longitude notation from 0/360 (OSARIS 6 | # and GMTSAR default) to -180/180. 7 | # 8 | # David Loibl, 2018 9 | # 10 | ##################################################################### 11 | 12 | 13 | if [ $# -lt 1 ]; then 14 | echo 15 | echo "Helper tool to change grid longitude notation from 0/360 (OSARIS " 16 | echo "and GMTSAR default) to -180/180." 17 | echo 18 | echo "Usage: shift_longitude_coords.sh input_path [output_path]" 19 | echo 20 | echo " Recalculate longitude notation of all grid files in input directory " 21 | echo " to -180/180. If an output directory is specified, results will be " 22 | echo " saved there, otherwise the input files will be overwritten." 23 | echo 24 | echo " Arguments: " 25 | echo " Input path -> The directory containing grid files" 26 | echo " Output path (opt.) -> Output grids will be written here" 27 | echo 28 | echo " Output:" 29 | echo " Series of .grd files with -180/180° longitude notation."; echo 30 | 31 | else 32 | long180_start=`date +%s` 33 | echo; echo "Changing longitudes to -180/180 value range ..." 34 | 35 | # Read attributes and setup environment 36 | grid_input_PATH=$1 37 | if [ ! -z $2 ]; then 38 | long180_output_PATH=$2 39 | else 40 | long180_output_PATH=$grid_input_PATH 41 | fi 42 | 43 | mkdir -p $long180_output_PATH 44 | 45 | if [ ! -d "$grid_input_PATH" ]; then 46 | echo; echo "ERROR: Directory $grid_input_PATH does not exist. Exiting ..." 47 | else 48 | 49 | cd $grid_input_PATH 50 | 51 | grid_files=($( ls *.grd )) 52 | if [ ${#grid_files} -eq 0 ]; then 53 | echo "No grid files found in ${grid_input_PATH}. Exiting ..." 54 | else 55 | echo "Found ${#grid_files} in $grid_input_PATH"; echo 56 | for grid_file in ${grid_files[@]}; do 57 | coords=$( gmt grdinfo -I- $grid_file ) 58 | coords=${coords:2} 59 | coord_array=( ${coords//\// } ) 60 | new_lon_min=$( echo "${coord_array[0]} - 360" | bc -l ) 61 | new_lon_max=$( echo "${coord_array[1]} - 360" | bc -l ) 62 | echo "New longitude coordinate range for $grid_file is" 63 | echo "$new_lon_min - $new_lon_max"; echo 64 | gmt grdedit $grid_file -R${new_lon_min}/${new_lon_max}/${coord_array[2]}/${coord_array[3]} -G${long180_output_PATH}/$grid_file -V 65 | done 66 | fi 67 | fi 68 | 69 | 70 | long180_end=`date +%s` 71 | 72 | long180_runtime=$((long180_end-long180_start)) 73 | 74 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($long180_runtime/86400)) $(($long180_runtime%86400/3600)) $(($long180_runtime%3600/60)) $(($long180_runtime%60)) 75 | echo 76 | 77 | 78 | fi 79 | -------------------------------------------------------------------------------- /modules/timeseries_xy/timeseries_xy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Sample a series of grid values through a stack for a given xy 6 | # cooridnate. 7 | # 8 | # Requires a file 'timeseries_xy.config' in the OSARIS config folder. 9 | # Get startet by copying the config_template file from the templates 10 | # folder and fit it to your setup. 11 | # 12 | # David Loibl, 2018 13 | # 14 | ##################################################################### 15 | 16 | module_name="timeseries_xy" 17 | 18 | if [ -z $module_config_PATH ]; then 19 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 20 | echo " $OSARIS_PATH/config" 21 | module_config_PATH="$OSARIS_PATH/config" 22 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 23 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 24 | fi 25 | 26 | if [ ! -d "$module_config_PATH" ]; then 27 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 28 | exit 2 29 | fi 30 | 31 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 32 | echo 33 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 34 | echo 35 | else 36 | # Start runtime timer 37 | TS_start_time=`date +%s` 38 | 39 | # Include the config file 40 | source ${module_config_PATH}/${module_name}.config 41 | 42 | 43 | mkdir -p $work_PATH/timeseries_xy 44 | cd $work_PATH/timeseries_xy 45 | 46 | for coordset in ${TS_coordinates[@]}; do 47 | echo $coordset >> TS_sample_locations.xy 48 | done 49 | 50 | 51 | cd $TS_input_PATH 52 | 53 | folders=($( ls -d */ )) 54 | for folder in "${folders[@]}"; do 55 | folder=${folder::-1} 56 | 57 | for grdfile in ${TS_gridfiles[@]}; do 58 | values=$( gmt grdtrack $work_PATH/timeseries_xy/TS_sample_locations.xy -G$folder/$grdfile ) 59 | # echo "Gridfile: $grdfile" 60 | # echo "Values: $values" 61 | # echo "Gridfile: $grdfile, Values: $values" >> $output_PATH/timeseries_string_${grdfile}.csv 62 | # echo "${grdfile:10:8},${grdfile:33:8},${value:16}" >> $output_PATH/timeseries_string_${grdfile}.csv 63 | for read value; do 64 | csv_string=$( echo $value | awk '{ print $1,$2,$3 }') 65 | echo "$folder,$csv_string" >> $output_PATH/timeseries_${grdfile}.csv 66 | done <<< $values 67 | done 68 | done 69 | 70 | 71 | 72 | TS_end_time=`date +%s` 73 | TS_runtime=$((TS_end_time - TS_start_time)) 74 | 75 | printf 'Elapsed wall clock time:\t %02dd %02dh:%02dm:%02ds\n' $(($TS_runtime/86400)) $(($TS_runtime%86400/3600)) $(($TS_runtime%3600/60)) $(($TS_runtime%60)) >> $output_PATH/Reports/timeseries_xy.report 76 | 77 | 78 | fi 79 | -------------------------------------------------------------------------------- /lib/crop-dem.sh: -------------------------------------------------------------------------------- 1 | # Extract from Ziyadins script 2 | # make_dem_gmtsar.csh 3 | 4 | if ($PS == "SM" ) then 5 | # koordinates of subswath 6 | set lo = `grep longitude $workdir/SLC/$master/*00[$swath].xml | awk -F'[<|>]' '{print $3}' | gmtinfo -C | awk '{printf "%2.3f %2.3f\n", $1-0.1,$2+0.1}'` 7 | set la = `grep latitude $workdir/SLC/$master/*00[$swath].xml | awk -F'[<|>]' '{print $3}' | gmtinfo -C | awk '{printf "%2.3f %2.3f\n", $1-0.1,$2+0.1}'` 8 | set r = $lo[1]/$lo[2]/$la[1]/$la[2] 9 | # coordinates of all images 10 | else 11 | set lo = `grep longitude $workdir/SLC/*/*00[$swath].xml | awk -F'[<|>]' '{print $3}' | gmtinfo -C | awk '{printf "%2.3f %2.3f\n", $1-0.1,$2+0.1}'` 12 | set la = `grep latitude $workdir/SLC/*/*00[$swath].xml | awk -F'[<|>]' '{print $3}' | gmtinfo -C | awk '{printf "%2.3f %2.3f\n", $1-0.1,$2+0.1}'` 13 | set r = $lo[1]/$lo[2]/$la[1]/$la[2] 14 | endif 15 | else if ($sensor == "TSX") then 16 | set lo = `grep lon $workdir/SLC/$workdir/SLC/$master/*.xml | awk -F'[<|>]' '{print $3}' | gmtinfo -C | awk '{printf "%2.2f %2.2f\n", $1-0.2,$2+0.2}'` 17 | set la = `grep lat $workdir/SLC/$workdir/SLC/$master/*.xml | awk -F'[<|>]' '{print $3}' | gmtinfo -C | awk '{printf "%2.2f %2.2f\n", $1-0.2,$2+0.2}'` 18 | set r = $lo[1]/$lo[2]/$la[1]/$la[2] 19 | else 20 | echo "No dem file can be made for other sensors" 21 | set PPID = `ps -ef | awk -v pid="$$" '{if ($2 == pid) {print $3}}'` 22 | kill $PPID 23 | exit 1 24 | endif 25 | 26 | echo "" 27 | echo " region covering the frames = -R$r" 28 | echo "" 29 | 30 | cd $workdir/topo 31 | # 32 | # check if it is already done! 33 | # 34 | if (! -e dem.grd) then 35 | echo "cropping dem for swath $swath" 36 | echo "" 37 | grdcut $dem_file -R$r -G${workdir}/topo/cut.grd 38 | grdclip -Sb-1000/NaN ${workdir}/topo/cut.grd -G${workdir}/topo/dem.grd 39 | if ($sim == 1) then 40 | goto makesim 41 | endif 42 | endif 43 | 44 | # check if the DEM is ok. if yes, skip cropping a new dem 45 | if ( `grdinfo dem.grd -C |awk '{printf "%2.3f %2.3f %2.3f %2.3f\n", $2,$3,$4,$5}' | awk '{if ($1 > '$lo[1]' || $2 < '$lo[2]'|| $3 > '$la[1]'|| $4 < '$la[1]' ) print 1;else print 0 }'` == 1 ) then 46 | echo "cropping dem for swath $swath" 47 | echo "" 48 | grdcut $dem_file -R$r -G${workdir}/topo/cut.grd 49 | grdclip -Sb-1000/NaN ${workdir}/topo/cut.grd -G${workdir}/topo/dem.grd 50 | # it is a new swath. so previous simulation files should be removed 51 | if (-e topo_ra.grd ) then 52 | rm -f topo_ra.grd trans.dat 53 | endif 54 | if ($sim == 1) then 55 | goto makesim 56 | endif 57 | else 58 | # dem file is correct 59 | echo " dem file is already made" 60 | echo "" 61 | # do dem simulation if requested 62 | if ($sim == 1) then 63 | if (! -e topo_ra.grd) then 64 | if (-e trans.dat ) then 65 | rm -f trans.* 66 | endif 67 | goto makesim 68 | else 69 | echo " topo_ra is already made" 70 | endif 71 | endif 72 | endif 73 | -------------------------------------------------------------------------------- /lib/sbas.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ################################################################# 4 | # 5 | # Preparation of SAR data sets. 6 | # Find matching orbits and write data.in files for each swath. 7 | # 8 | # Usage: prepare_data.sh config_file 9 | # 10 | ################################################################ 11 | 12 | 13 | 14 | if [ $# -eq 1 ]; then 15 | echo 16 | echo "Usage: prepare_data.sh config_file" 17 | echo 18 | # elif [ ! -f $1 ]; then 19 | # echo 20 | # echo "Cannot open $1. Please provide a valid config file." 21 | # echo 22 | else 23 | 24 | echo 25 | echo "- - - - - - - - - - - - - - - - - - - -" 26 | echo " Starting data preparation ..." 27 | echo "- - - - - - - - - - - - - - - - - - - -" 28 | echo 29 | 30 | 31 | 32 | OSARIS_PATH="/home/loibldav/Git/osaris" 33 | work_PATH="/data/scratch/loibldav/GSP/Dhaka-DSC-vh/Processing" 34 | output_PATH="/data/scratch/loibldav/GSP/Dhaka-DSC-vh/Output" 35 | 36 | # ls /home/user/area/Sentinel-1/ascending/ -1 | sed -e 's/\.zip$//' > data_asc.txt 37 | # ls /home/user/area/Sentinel-1/ascending/ | awk '{print substr($0,18,8)}' > date_asc.txt 38 | # paste -d\ data_asc.txt date_asc.txt > data_asc_grub.txt 39 | 40 | # Date file 41 | date_file=$output_PATH/Reports/input_dates.list 42 | 43 | # Temporal baseline threshold 44 | temporal=100 45 | 46 | # Perpendicular baseline threshold 47 | perpendicular=100 48 | 49 | cd $work_PATH 50 | 51 | $OSARIS_PATH/lib/combination $date_file 52 | 53 | 54 | rm -f temp_bperp_combination.txt intf.in 55 | shopt -s extglob 56 | IFS=" " 57 | while read master slave 58 | do 59 | 60 | #calculate perpendicular baseline from combination 61 | dir=$(pwd) 62 | # cd $raw 63 | echo "$work_PATH/${master}--${slave}/F1/intf/" 64 | cd $work_PATH/${master}--${slave}/F1/intf/ 65 | PRM_files=($( ls *PRM )) 66 | 67 | master=${PRM_files[0]} 68 | slave=${PRM_files[1]} 69 | 70 | echo "master: $master" 71 | echo "slave: $slave" 72 | 73 | # SAT_baseline *$master*_ALL*.PRM *$slave*_ALL*.PRM > tmp 74 | # BPR=$(grep B_perpendicular tmp | awk '{print $3}') 75 | # #BPR2=$(echo "scale=0; $BPR" | bc) 76 | # BPR2=${BPR%.*} 77 | # rm -f tmp 78 | 79 | # cd $dir 80 | 81 | # #calculate temporal baseline from combination 82 | # master_ts=$(date -d "$master" '+%s') 83 | # slave_ts=$(date -d "$slave" '+%s') 84 | # temporal=$(echo "scale=0; ( $slave_ts - $master_ts)/(60*60*24)" | bc) 85 | 86 | # #make parameter baseline 87 | # if [ "$temporal" -lt $2 ] 88 | # then 89 | # if [ "$BPR2" -gt -$3 ] && [ "$BPR2" -lt $3 ] 90 | # then 91 | # echo $master $slave $temporal $BPR >> temp_bperp_combination.txt 92 | # echo "S1A"$master"_ALL_F2:S1A"$slave"_ALL_F2" >> intf.in 93 | # fi 94 | # fi 95 | 96 | done < $work_PATH/result_combination.txt 97 | 98 | 99 | 100 | 101 | 102 | fi 103 | -------------------------------------------------------------------------------- /modules/grid_difference/grid_difference.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS module to calculate difference between to files in a stack 6 | # 7 | # Provide a valid config file named 'grid_difference.config' in the config 8 | # directory; a template is provided in templates/module_config/ 9 | # 10 | # Requires processed GMTSAR result files (e.g., corr_ll.grd) as input. 11 | # 12 | # Output files will be written to $output_PATH/Grid-difference 13 | # 14 | # David Loibl, 2018 15 | # 16 | ##################################################################### 17 | 18 | 19 | if [ ! -f "$OSARIS_PATH/config/grid_difference.config" ]; then 20 | echo 21 | echo "Cannot open grid_difference.config in the OSARIS config folder. Please provide a valid config file." 22 | echo 23 | else 24 | 25 | source $OSARIS_PATH/config/grid_difference.config 26 | echo 27 | echo - - - - - - - - - - - - - - - - 28 | echo Calculating grid difference 29 | echo 30 | 31 | mkdir -p $output_PATH/Grid-difference 32 | slurm_jobname="$slurm_jobname_prefix-griddiff" 33 | 34 | echo "Filenames: ${grddiff_input_filenames[@]}" 35 | 36 | echo 37 | if [ ! -z $grddiff_input_filenames ]; then 38 | for grddiff_input_filename in "${grddiff_input_filenames[@]}"; do 39 | echo "Preparing batch jobs for $grddiff_input_filename ..." 40 | 41 | for swath in ${swaths_to_process[@]}; do 42 | if [ ! -z $grddiff_input_PATH ] && [ -d $grddiff_input_PATH ]; then 43 | echo "Input path set to $grddiff_input_PATH" 44 | else 45 | echo "No valid input path provided, using default" 46 | echo "$output_PATH/Pairs-forward" 47 | grddiff_input_PATH="$output_PATH/Pairs-forward/" 48 | fi 49 | 50 | cd $grddiff_input_PATH 51 | 52 | folders=($( ls -d *-F$swath/ )) 53 | 54 | for folder in "${folders[@]}"; do 55 | folder=${folder::-1} 56 | echo "Adding $grddiff_input_filename from $folder ..." 57 | 58 | if [ ! -z ${folder_1} ]; then 59 | folder_2=$folder_1 60 | folder_1=$folder 61 | 62 | grddiff_output_filename="${folder_1:0:8}--${folder_2:0:8}-F${swath}-${grddiff_input_filename::-4}-grddiff" 63 | 64 | sbatch \ 65 | --ntasks=1 \ 66 | --output=$log_PATH/OSS-GrdDiff-%j-out \ 67 | --error=$log_PATH/OSS-GrdDiff-%j-out \ 68 | --workdir=$work_PATH \ 69 | --job-name=$slurm_jobname \ 70 | --qos=$slurm_qos \ 71 | --account=$slurm_account \ 72 | --mail-type=$slurm_mailtype \ 73 | $OSARIS_PATH/lib/difference.sh "$grddiff_input_PATH/$folder_2/$grddiff_input_filename" "$grddiff_input_PATH/$folder_1/$grddiff_input_filename" "$output_PATH/Grid-difference" "$grddiff_output_filename" 0 2>&1 >>$logfile 74 | 75 | else 76 | folder_1=$folder 77 | fi 78 | done 79 | done 80 | done 81 | else 82 | echo "Variable grddiff_input_filenames not set in grid_difference.config, aborting ..." 83 | fi 84 | 85 | $OSARIS_PATH/lib/check-queue.sh $slurm_jobname 1 86 | 87 | 88 | if [ $clean_up -gt 0 ]; then 89 | echo 90 | echo - - - - - - - - - - - - - - - - 91 | echo Cleaning up a bit ... 92 | echo "Deleting processing folder ..." 93 | rm -rf $output_PATH/Grid-difference/Temp-* 94 | fi 95 | 96 | 97 | fi 98 | -------------------------------------------------------------------------------- /modules/displacement/displacement.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS module to calculate LOS displacement from unwrapped interferograms 6 | # 7 | # Input: 8 | # - Path to directory containing the unwrapped interferograms 9 | # 10 | # David Loibl, 2018 11 | # 12 | ##################################################################### 13 | 14 | module_name="displacement" 15 | 16 | if [ -z $module_config_PATH ]; then 17 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 18 | echo " $OSARIS_PATH/config" 19 | module_config_PATH="$OSARIS_PATH/config" 20 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 21 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 22 | fi 23 | 24 | if [ ! -d "$module_config_PATH" ]; then 25 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 26 | exit 2 27 | fi 28 | 29 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 30 | echo 31 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 32 | echo 33 | else 34 | # Start runtime timer 35 | displ_start=`date +%s` 36 | 37 | # Include the config file 38 | source ${module_config_PATH}/${module_name}.config 39 | 40 | 41 | 42 | ############################ 43 | # Module actions start here 44 | 45 | echo "Starting the Dislpacement module ..." 46 | 47 | if [ -z "$unwrapped_intf_PATH" ]; then 48 | echo "No path to unwrapped interferograms specified in ${module_name}.config" 49 | echo "Trying default path $output_PATH/Pairs-forward/Interferograms-unwrapped ..." 50 | unwrapped_intf_PATH="$output_PATH/Pairs-forward/Interferograms-unwrapped" 51 | fi 52 | 53 | if [ ! -d "$unwrapped_intf_PATH" ]; then 54 | echo; echo "ERROR: Directory $unwrapped_intf_PATH does not exist. Exiting ..." 55 | else 56 | 57 | displ_output_PATH="$output_PATH/Displacement" 58 | mkdir -p $displ_output_PATH 59 | 60 | # TODO: read from .PRM files in Processing/raw 61 | radar_wavelength="0.554658" 62 | 63 | cd $unwrapped_intf_PATH 64 | 65 | unwrapped_intf_files=($( ls *.grd )) 66 | for unwrapped_intf_file in ${unwrapped_intf_files[@]}; do 67 | 68 | # gmt grdmath ${unwrapped_intf_PATH}/${unwrapped_intf_file} $radar_wavelength MUL -79.58 MUL = $displ_output_PATH/${unwrapped_intf_file::-4}-losdispl.grd -V 69 | 70 | gmt grdmath ${unwrapped_intf_PATH}/${unwrapped_intf_file} $radar_wavelength MUL 4 DIV PI DIV -100 MUL = $displ_output_PATH/${unwrapped_intf_file::-4}-losdispl.grd -V 71 | 72 | gmt grdedit -D//"mm"/1///"$PWD:t LOS displacement"/"equals negative range" $displ_output_PATH/${unwrapped_intf_file::-4}-losdispl.grd 73 | 74 | done 75 | 76 | fi 77 | 78 | 79 | 80 | # Module actions end here 81 | ########################### 82 | 83 | 84 | 85 | # Stop runtime timer and print runtime 86 | displ_end=`date +%s` 87 | displ_runtime=$((displ_end-displ_start)) 88 | 89 | echo 90 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n\n' \ 91 | $(($displ_runtime/86400)) \ 92 | $(($displ_runtime%86400/3600)) \ 93 | $(($displ_runtime%3600/60)) \ 94 | $(($displ_runtime%60)) 95 | echo 96 | fi 97 | -------------------------------------------------------------------------------- /lib/PP-multiswath.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | start=`date +%s` 4 | 5 | echo; echo "Starting multiswath processing ..." 6 | 7 | s1_pair=$1 8 | config_file=$2 9 | gmtsar_config_file=$3 10 | OSARIS_PATH=$4 11 | direction=$5 12 | 13 | echo "Reading configuration file $config_file" 14 | if [ ${config_file:0:2} = "./" ]; then 15 | config_file=$OSARIS_PATH/${config_file:2:${#config_file}} 16 | fi 17 | 18 | source $config_file 19 | 20 | 21 | work_PATH=$base_PATH/$prefix/Processing 22 | # Path to working directory 23 | 24 | output_PATH=$base_PATH/$prefix/Output 25 | # Path to directory where all output will be written 26 | 27 | log_PATH=$base_PATH/$prefix/Output/Log 28 | # Path to directory where the log files will be written 29 | 30 | cd $work_PATH/$s1_pair 31 | 32 | echo 33 | echo "- - - - - - - - - - - - - - - - - - - - " 34 | echo "Starting merge_unwrap_geocode ..." 35 | echo 36 | echo "Current path: $( pwd )" 37 | echo 38 | echo 39 | 40 | $OSARIS_PATH/lib/InSAR/merge_unwrap_geocode.sh \ 41 | $work_PATH/merge-files/${s1_pair}.list \ 42 | $OSARIS_PATH/$gmtsar_config_file \ 43 | $work_PATH/boundary-box.xyz 44 | 45 | source $OSARIS_PATH/$gmtsar_config_file 46 | 47 | echo; echo "Checking results and moving to files to Output directory ..."; echo 48 | 49 | if [ ! "$direction" == "reverse" ]; then 50 | mkdir -p $output_PATH/Amplitudes 51 | cp ./display_amp_ll.grd $output_PATH/Amplitudes/${s1_pair}-amplitude.grd 52 | if [ -f "$output_PATH/Amplitudes/${s1_pair}-amplitude.grd" ]; then status_amp=1; else status_amp=0; fi 53 | 54 | mkdir -p $output_PATH/Conn-comps 55 | cp ./con_comp_ll.grd $output_PATH/Conn-comps/${s1_pair}-conn_comp.grd 56 | if [ -f "$output_PATH/Conn-comps/${s1_pair}-conn_comp.grd" ]; then status_ccp=1; else status_ccp=0; fi 57 | 58 | mkdir -p $output_PATH/Coherences 59 | cp ./corr_ll.grd $output_PATH/Coherences/${s1_pair}-coherence.grd 60 | if [ -f "$output_PATH/Coherences/${s1_pair}-coherence.grd" ]; then status_coh=1; else status_coh=0; fi 61 | 62 | mkdir -p $output_PATH/Interferograms 63 | cp ./phasefilt_ll.grd $output_PATH/Interferograms/${s1_pair}-interferogram.grd 64 | if [ -f "$output_PATH/Interferograms/${s1_pair}-interferogram.grd" ]; then status_pha=1; else status_pha=0; fi 65 | 66 | # unwrapping_active=`grep threshold_snaphu $OSARIS_PATH/$gmtsar_config_file | awk '{ print $3 }'` 67 | 68 | if (( $(echo "$threshold_snaphu > 0" | bc -l ) )); then 69 | mkdir -p $output_PATH/Interf-unwrpd 70 | cp ./unwrap_ll.grd $output_PATH/Interf-unwrpd/${s1_pair}-interf_unwrpd.grd 71 | if [ -f "$output_PATH/Interf-unwrpd/${s1_pair}-interf_unwrpd.grd" ]; then status_unw=1; else status_unw=0; fi 72 | else 73 | status_unw=2 74 | fi 75 | else 76 | mkdir -p $output_PATH/Interf-unwrpd-rev 77 | cp ./unwrap_mask_ll.grd $output_PATH/Interf-unwrpd-rev/${s1_pair}-interf_unwrpd.grd 78 | if [ -f "$output_PATH/Interf-unwrpd-rev/${s1_pair}-interf_unwrpd.grd" ]; then status_unw=1; else status_unw=0; fi 79 | fi 80 | 81 | 82 | 83 | 84 | 85 | end=`date +%s` 86 | runtime=$((end-start)) 87 | 88 | echo; echo "Writing report ..."; echo 89 | 90 | echo "${s1_pair:0:8} ${s1_pair:10:8} $SLURM_JOB_ID $runtime $status_amp $status_coh $status_ccp $status_pha $status_unw $status_los" >> $output_PATH/Reports/PP-pairs-stats.tmp 91 | 92 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 93 | 94 | 95 | -------------------------------------------------------------------------------- /templates/module-config/summary_pdf.config.template: -------------------------------------------------------------------------------- 1 | ################################################################## 2 | # 3 | # Template configuration for the 'Create PDF Summary' module 4 | # 5 | # Copy to 'config' folder, rename to 'create_pdf_summary.config' 6 | # and fit to your machine's configuration. 7 | # 8 | ################################################################# 9 | 10 | page_orientation=1 11 | # 1: portrait 12 | # 2: landscape 13 | 14 | images_per_page=5 15 | # Number of image rows to be displayed on each page of the summary PDF. 16 | # Default is 5 17 | 18 | resolution=300 19 | # Image resolution in dpi. 20 | # Default is 300 (print quality). 21 | 22 | activate_PP=0 23 | # Activate Slurm-based parallel processing. 24 | # Caution: This requires ImageMagick to be installed on the processing nodes. 25 | 26 | 27 | # Datasets to display 28 | 29 | DIRECTORY_1="$output_PATH/Interf-amplitudes" 30 | LABEL_1="Interferogram amplitude" 31 | HIST_EQ_1=1 32 | CPT_1="gray" 33 | RANGE_1="0/0.0015/0.0001" 34 | SHOW_SUPPL_1=1 35 | 36 | DIRECTORY_2="$output_PATH/Coherences" 37 | LABEL_2="Coherence" 38 | HIST_EQ_2=0 39 | CPT_2="jet" 40 | RANGE_2="0/1/0.1" 41 | SHOW_SUPPL_2=0 42 | 43 | DIRECTORY_3="$output_PATH/Conn-comps" 44 | LABEL_3="Connected Components" 45 | HIST_EQ_3=0 46 | CPT_3="gray" 47 | RANGE_3="auto" 48 | SHOW_SUPPL_3=0 49 | 50 | DIRECTORY_4="$output_PATH/Interf-unwrpd" 51 | LABEL_4="Unwrapped Interferogram" 52 | HIST_EQ_4=0 53 | CPT_4="cyclic" 54 | RANGE_4="-20/20/2" 55 | SHOW_SUPPL_4=0 56 | 57 | # In case harmonized displacements were generated, these are a much nicer dataset to display here: 58 | # DIRECTORY_4="$output_PATH/Harmonized-grids/Displacement" 59 | # LABEL_4="LOS Displacement" 60 | # HIST_EQ_4=0 61 | # CPT_4="cyclic" 62 | # RANGE_4="-49/40/5" 63 | # SHOW_SUPPL_4=0 64 | 65 | 66 | # All default GMT cpt files can be used 67 | # Range must be provided as "minimum_value/maximum_value/step" 68 | dem_cpt="#376a4e,#fae394,#8a5117,#7c7772,#ffffff" 69 | # dem_range="" 70 | 71 | AOI_REGION="11.22/22.22/33.33/44.44" 72 | # The extent of the detail maps in decimal degrees 73 | # Optional. If not set, the coordinates defined in the main config file will be used 74 | # Format: lon_min/lon_max/lat_min/lat_max 75 | 76 | 77 | overview_dem="/path/to/overview/dem.grd" 78 | # A DEM of the wider study region. 79 | # If not set, the DEM used by GMTSAR will be used. 80 | 81 | OVERVIEW_REGION="11/22/33/44" 82 | # The extent of the map window in decimal degrees 83 | # Format: lon_min/lon_max/lat_min/lat_max 84 | 85 | # Supplementary vector data for map output 86 | # Shapefile and GMT files are valid as input 87 | reference_polygon="/path/to/reference_polygon.gmt" 88 | aux_polygon_1="/path/to/another_polygon.gmt" 89 | aux_polygon_2="/path/to/another_polygon2.shp" 90 | aux_line_1="/path/to/some_lines.shp" 91 | aux_line_2="/path/to/more_lines.shp" 92 | aux_point_1="/path/to/here_are_points.shp" 93 | aux_point_2="/path/to/more_points.grd" 94 | 95 | # Styling for suppl. vector data sets in GMT syntax. 96 | # More info: http://gmt.soest.hawaii.edu/doc/5.4.3/psxy.html 97 | reference_polygon_style="-Wthin,red" 98 | aux_polygon_1_style="-Wthinnest,lightblue -Glightblue" 99 | aux_polygon_2_style="-Wthinnest,lightred -Glightred" 100 | aux_line_1_style="-Wthick,dotted,white" 101 | aux_line_2_style="-Wthinnest,black" 102 | aux_point_1_style="-Sc0.5c -Wred -Gorange" 103 | aux_point_2_style="-Sd0.5c -Wblack -Gwhite" 104 | 105 | 106 | 107 | -------------------------------------------------------------------------------- /modules/harmonize_grids/harmonize_grids.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS module to harmonize a series of grid files relative to a 6 | # reference point. 7 | # 8 | # Shift all frid files in a directory by their offset to a 9 | # 'stable ground point' (by default the result from the 'SGP Identification' 10 | # module. Most commonly used to harmonize a time series of unwrapped 11 | # intereferograms or LoS displacement grids. 12 | # 13 | # Input: 14 | # - path(s) to one or more directories containing grid files 15 | # - xy coordinates of stable ground point (default SGPI result) 16 | # 17 | # Output: 18 | # - harmonized series of .grd files. 19 | # 20 | # 21 | # David Loibl, 2018 22 | # 23 | ##################################################################### 24 | 25 | 26 | module_name="harmonize_grids" 27 | 28 | if [ -z $module_config_PATH ]; then 29 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 30 | echo " $OSARIS_PATH/config" 31 | module_config_PATH="$OSARIS_PATH/config" 32 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 33 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 34 | fi 35 | 36 | if [ ! -d "$module_config_PATH" ]; then 37 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 38 | exit 2 39 | fi 40 | 41 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 42 | echo 43 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 44 | echo 45 | else 46 | # Start runtime timer 47 | HG_start=`date +%s` 48 | 49 | # Include the config file 50 | source ${module_config_PATH}/${module_name}.config 51 | 52 | 53 | ############################ 54 | # Module actions start here 55 | 56 | echo; echo "Harmonizing grids to reference point ..." 57 | 58 | HG_output_PATH="$output_PATH/Harmonized-grids" 59 | HG_work_PATH="$work_PATH/Harmonized-grids" 60 | 61 | mkdir -p $HG_output_PATH 62 | mkdir -p $HG_work_PATH 63 | 64 | if [ ! -z "$ref_point_xy_coords" ]; then 65 | echo "Reference point is set to $ref_point_xy_coords" 66 | ref_point_array=(${ref_point_xy_coords//\// }) 67 | ref_point_lon=${ref_point_array[0]} 68 | ref_point_lat=${ref_point_array[1]} 69 | elif [ -f $output_PATH/SGPI/sgp-coords.xy ]; then 70 | # cat $output_PATH/SGPI/sgp_coords.xy > $HG_work_PATH/ref_point.xy 71 | ref_point_lon=($( cat $output_PATH/SGPI/sgp-coords.xy | awk '{ print $1 }' )) 72 | ref_point_lat=($( cat $output_PATH/SGPI/sgp-coords.xy | awk '{ print $2 }' )) 73 | else 74 | echo "ERROR: No reference point coordinates found in harmonize_grids.config or sgp_coords.xy." 75 | echo "Exiting module 'Harmonize Grids'." 76 | fi 77 | 78 | for grid_dir in ${grid_input_PATH[@]}; do 79 | if [ ! -d "$grid_dir" ]; then 80 | echo; echo "ERROR: Directory $grid_dir does not exist. Skipping ..." 81 | else 82 | grid_dir_basename=$( basename "$grid_dir" ) 83 | mkdir -p ${HG_output_PATH}/${grid_dir_basename} 84 | 85 | $OSARIS_PATH/lib/harmonize_grids.sh "$grid_dir" "${ref_point_lon}/${ref_point_lat}" "${HG_output_PATH}/${grid_dir_basename}" 86 | 87 | fi 88 | done 89 | 90 | HG_end=`date +%s` 91 | 92 | HG_runtime=$((HG_end-HG_start)) 93 | 94 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($HG_runtime/86400)) $(($HG_runtime%86400/3600)) $(($HG_runtime%3600/60)) $(($HG_runtime%60)) 95 | echo 96 | 97 | 98 | fi 99 | -------------------------------------------------------------------------------- /lib/mask-grdseries.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Harmonize a series of grid files relative to a reference point. 6 | # 7 | # David Loibl, 2018 8 | # 9 | ##################################################################### 10 | 11 | 12 | if [ $# -lt 3 ]; then 13 | echo 14 | echo "Harmonize a series of grid files relative to a reference point." 15 | echo 16 | echo "Usage: harmonize_grids.sh input_path reference_point output_path" 17 | echo 18 | echo " Shift all grid files in input directory by their offset relative to a " 19 | echo " 'stable ground point'. Most commonly used to harmonize a time series of " 20 | echo " unwrapped intereferograms or LoS displacement grids." 21 | echo 22 | echo " Arguments: " 23 | echo " Input path -> The directory containing grid files" 24 | echo " Reference point -> Coordinates of reference point in decimal degrees using the format" 25 | echo " Longitude/Latitude, e.g. 165.1/-12.5" 26 | echo " Output path -> Output grids will be written here" 27 | echo 28 | echo " Output:" 29 | echo " Harmonized series of .grd files."; echo 30 | 31 | else 32 | HG_start=`date +%s` 33 | echo; echo "Harmonizing grids to reference point ..." 34 | 35 | # Read attributes and setup environment 36 | grid_input_PATH=$1 37 | ref_point_xy_coords=$2 38 | HG_output_PATH=$3 39 | HG_work_PATH="$work_PATH/Harmonize-Grids" 40 | 41 | echo "Reference point is set to $ref_point_xy_coords" 42 | ref_point_array=(${ref_point_xy_coords//\// }) 43 | echo "${ref_point_array[0]} ${ref_point_array[1]}" > $HG_work_PATH/ref_point.xy 44 | 45 | mkdir -p $HG_output_PATH 46 | mkdir -p $HG_work_PATH 47 | 48 | if [ ! -d "$grid_input_PATH" ]; then 49 | echo; echo "ERROR: Directory $grid_input_PATH does not exist. Skipping ..." 50 | else 51 | 52 | cd $grid_input_PATH 53 | 54 | grid_input_PATH_basename=$( basename "$PWD" ) 55 | mkdir -p ${HG_output_PATH}/${grid_input_PATH_basename} 56 | 57 | grid_files=($( ls *.grd )) 58 | for grid_file in ${grid_files[@]}; do 59 | 60 | # Get xy coordinates of 'stable ground point' from file and check the value the raster set has at this location. 61 | gmt grdtrack $HG_work_PATH/ref_point.xy -G${grid_input_PATH}/${grid_file} >> $HG_work_PATH/${grid_input_PATH_basename}_ref_point_vals.xyz 62 | ref_point_grid_trk=$( gmt grdtrack ${HG_work_PATH}/ref_point.xy -G${grid_input_PATH}/${grid_file} ) 63 | 64 | if [ ! -z ${ref_point_grid_trk+x} ]; then 65 | ref_point_grid_val=$( echo "$ref_point_grid_trk" | awk '{ print $3 }') 66 | # if [ $debug -gt 1 ]; then echo "Stable ground diff ${grid_input_PATH}/${grid_file}: $ref_point_grid_val"; fi 67 | else 68 | echo "GMT grdtrack for stable ground yielded no result for ${grid_input_PATH}/${grid_file}. Skipping" 69 | fi 70 | 71 | if [ ! -z ${ref_point_grid_val+x} ]; then 72 | # Shift input grid so that the 'stable ground value' is zero 73 | gmt grdmath ${grid_input_PATH}/${grid_file} $ref_point_grid_val SUB = $HG_output_PATH/${grid_file::-4}-harmonized.grd -V 74 | else 75 | echo "Unwrap difference calculation for stable ground point failed in ${folder}. Skipping ..." 76 | fi 77 | done 78 | 79 | fi 80 | 81 | 82 | HG_end=`date +%s` 83 | 84 | HG_runtime=$((HG_end-HG_start)) 85 | 86 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($HG_runtime/86400)) $(($HG_runtime%86400/3600)) $(($HG_runtime%3600/60)) $(($HG_runtime%60)) 87 | echo 88 | 89 | 90 | fi 91 | -------------------------------------------------------------------------------- /lib/z_min_max.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Helper tool to identify minimum and maximum values of grid files. 6 | # Option 1: Analyse all files in a given directory. 7 | # Option 2: Analyse all files with given file name in a series of directories. 8 | # 9 | # Input: 10 | # - Path where files reside in subdirs, e.g. Output/Pairs-forward/ 11 | # - File name, e.g. corr_ll.grd (optional, will acitvate multi-directory mode) 12 | # 13 | # Output: 14 | # - String z_min z_max z_min_file z_max_file 15 | # 16 | # 17 | # David Loibl, 2018 18 | # 19 | ##################################################################### 20 | 21 | 22 | if [ $# -lt 1 ]; then 23 | echo 24 | echo "Usage: z_min_max.sh path [file_name]" 25 | echo 26 | else 27 | mmz_PATH=$1 28 | 29 | cd $mmz_PATH 30 | 31 | mmz_count=1 32 | 33 | if [ $# -eq 1 ]; then 34 | # Analyse files in directory 35 | 36 | mmz_files=($( ls *.grd )) 37 | for mmz_file in "${mmz_files[@]}"; do 38 | if [ -f "$mmz_file" ]; then 39 | 40 | # Find min and max z values for a grd file. 41 | 42 | current_file=$mmz_PATH/$mmz_file 43 | 44 | current_z_min=$( gmt grdinfo $current_file | grep z_min | awk '{ print $3}' ) 45 | current_z_max=$( gmt grdinfo $current_file | grep z_min | awk '{ print $5}' ) 46 | 47 | if [ "$mmz_count" -eq 1 ]; then 48 | # First round, set min and max values to values from file 49 | z_min=$current_z_min 50 | z_max=$current_z_max 51 | z_min_file=$current_file 52 | z_max_file=$current_file 53 | else 54 | # Iteration, check if min/max from file are smaller/larger than previous ... 55 | if [ $( echo "$current_z_min < $z_min" | bc -l ) -eq 1 ]; then 56 | z_min=$current_z_min 57 | z_min_file=$current_file 58 | fi 59 | 60 | if [ $( echo "$current_z_max > $z_max" | bc -l ) -eq 1 ]; then 61 | z_max=$current_z_max 62 | z_max_file=$current_file 63 | fi 64 | fi 65 | 66 | mmz_count=$((mmz_count+1)) 67 | 68 | fi 69 | done 70 | else 71 | # Analyse files of given name in subdirectories 72 | 73 | mmz_file=$2 74 | swath=$3 75 | 76 | 77 | if [ -z $swath ]; then 78 | mmz_folders=($( ls -d */ )) 79 | else 80 | mmz_folders=($( ls -d *-F$swath/ )) 81 | fi 82 | 83 | for folder in "${mmz_folders[@]}"; do 84 | folder=${folder::-1} 85 | if [ -f "$folder/$mmz_file" ]; then 86 | 87 | # Find min and max z values for a grd file. 88 | 89 | current_file=$mmz_PATH/$folder/$mmz_file 90 | 91 | current_z_min=$( gmt grdinfo $current_file | grep z_min | awk '{ print $3}' ) 92 | current_z_max=$( gmt grdinfo $current_file | grep z_min | awk '{ print $5}' ) 93 | 94 | if [ "$mmz_count" -eq 1 ]; then 95 | # First round, set min and max values to values from file 96 | z_min=$current_z_min 97 | z_max=$current_z_max 98 | z_min_file=$current_file 99 | z_max_file=$current_file 100 | else 101 | # Iteration, check if min/max from file are smaller/larger than previous ... 102 | if [ $( echo "$current_z_min < $z_min" | bc -l ) -eq 1 ]; then 103 | z_min=$current_z_min 104 | z_min_file=$current_file 105 | fi 106 | 107 | if [ $( echo "$current_z_max > $z_max" | bc -l ) -eq 1 ]; then 108 | z_max=$current_z_max 109 | z_max_file=$current_file 110 | fi 111 | fi 112 | 113 | mmz_count=$((mmz_count+1)) 114 | 115 | fi 116 | done 117 | fi 118 | 119 | echo "$z_min $z_max $z_min_file $z_max_file" 120 | 121 | fi 122 | -------------------------------------------------------------------------------- /modules/crop/crop.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS module to crop grid files. 6 | # 7 | # David Loibl, 2018 8 | # 9 | ##################################################################### 10 | 11 | module_name="crop" 12 | 13 | if [ -z $module_config_PATH ]; then 14 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 15 | echo " $OSARIS_PATH/config" 16 | module_config_PATH="$OSARIS_PATH/config" 17 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 18 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 19 | fi 20 | 21 | if [ ! -d "$module_config_PATH" ]; then 22 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 23 | exit 2 24 | fi 25 | 26 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 27 | echo 28 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 29 | echo 30 | else 31 | # Start runtime timer 32 | module_start=`date +%s` 33 | 34 | # Include the config file 35 | source ${module_config_PATH}/${module_name}.config 36 | 37 | crop_output_PATH=$output_PATH/Crop 38 | mkdir -p $crop_output_PATH 39 | 40 | for crop_region_label in ${crop_region_labels[@]}; do 41 | mkdir -p $crop_output_PATH/$crop_region_label 42 | done 43 | 44 | if [ -d $crop_input_PATH ]; then 45 | 46 | cd $crop_input_PATH 47 | 48 | if [ "$crop_subdirs" -eq 0 ]; then 49 | 50 | # List and crop all files of specified 51 | 52 | crop_files=($( ls $crop_input_filenames )) 53 | crop_counter=0 54 | 55 | for crop_file in ${crop_files[@]}; do 56 | crop_region_counter=0 57 | for crop_region in ${crop_region_labels[@]}; do 58 | gmt grdcut $crop_file \ 59 | -G$crop_output_PATH/$crop_region/${crop_file::-4}-crop.grd \ 60 | -R${crop_regions[$crop_region_counter]} -V 61 | ((crop_region_counter++)) 62 | done 63 | ((crop_counter++)) 64 | done 65 | else 66 | folders=($( ls -d */ )) 67 | crop_counter=0 68 | for folder in "${folders[@]}"; do 69 | folder=${folder::-1} 70 | cd $folder 71 | 72 | echo "Now working in directory $folder ..." 73 | 74 | crop_files=($( ls $crop_input_filenames )) 75 | 76 | for crop_file in ${crop_files[@]}; do 77 | echo " Cropping $crop_file ..." 78 | crop_region_counter=0 79 | for crop_region in ${crop_region_labels[@]}; do 80 | echo "gmt grdcut $crop_file -G$crop_output_PATH/$crop_region/${folder}-${crop_file::-4}-crop.grd -R${crop_regions[$crop_region_counter]} -V" 81 | gmt grdcut $crop_file \ 82 | -G$crop_output_PATH/$crop_region/${folder}-${crop_file::-4}-crop.grd \ 83 | -R${crop_regions[$crop_region_counter]} -V 84 | ((crop_region_counter++)) 85 | done 86 | ((crop_counter++)) 87 | done 88 | 89 | cd .. 90 | 91 | done 92 | fi 93 | else 94 | echo; echo "Error: $crop_input_PATH does not exist." 95 | echo "Variable crop_input_PATH must be set to a valid directory in crop.config. Exiting crop module."; echo 96 | fi 97 | 98 | 99 | 100 | 101 | # Stop runtime timer and print runtime 102 | module_end=`date +%s` 103 | module_runtime=$((module_end-module_start)) 104 | 105 | echo 106 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n\n' \ 107 | $(($module_runtime/86400)) \ 108 | $(($module_runtime%86400/3600)) \ 109 | $(($module_runtime%3600/60)) \ 110 | $(($module_runtime%60)) 111 | echo 112 | fi 113 | -------------------------------------------------------------------------------- /lib/PP-stack.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | start=`date +%s` 4 | echo "SLURM stack processing started" 5 | 6 | data_in=$1 7 | config_file=$2 8 | gmtsar_config_file=$3 9 | GSP_directory=$4 10 | 11 | 12 | if [ ${config_file:0:2} = "./" ]; then 13 | config_file=$GSP_directory/${config_file:2:${#config_file}} 14 | fi 15 | 16 | folder="Stack" 17 | 18 | echo "Reading configuration file $config_file" 19 | 20 | source $config_file 21 | 22 | work_PATH=$base_PATH/$prefix/Processing 23 | # Path to working directory 24 | 25 | output_PATH=$base_PATH/$prefix/Output 26 | # Path to directory where all output will be written 27 | 28 | log_PATH=$base_PATH/$prefix/Output/Log 29 | # Path to directory where the log files will be written 30 | 31 | 32 | 33 | 34 | 35 | #mkdir -pv $work_PATH/$folder/$job_ID/F$swath/raw 36 | #mkdir -pv $work_PATH/$folder/$job_ID/F$swath/topo 37 | #cd $work_PATH/$folder/$job_ID/F$swath/topo; ln -sf $topo_PATH/dem.grd .; 38 | 39 | cd $work_PATH/raw 40 | 41 | echo 42 | echo "- - - - - - - - - - - - - - - - - - - - " 43 | echo "Starting align_tops.csh with options:" 44 | echo "Scene 1: $previous_scene" 45 | echo "Orbit 1: $previous_orbit" 46 | echo "Scene 2: $current_scene" 47 | echo "Orbit 2: $current_orbit" 48 | echo 49 | echo "Current path: $( pwd )" 50 | echo "align_tops.csh $previous_scene $previous_orbit $current_scene $current_orbit dem.grd" 51 | echo 52 | echo 53 | 54 | # preproc_batch_tops.csh $work_PATH/raw/$data_in $work_PATH/raw/dem.grd 1 55 | preproc_batch_tops_esd.csh $work_PATH/raw/$data_in $work_PATH/raw/dem.grd 1 56 | cp $work_PATH/raw/baseline_table.dat $work_PATH/intf_all/ 57 | preproc_batch_tops_esd.csh $work_PATH/raw/$data_in $work_PATH/raw/dem.grd 2 58 | 59 | echo "S1A20160124_ALL_F3:S1A20160217_ALL_F3" > intf.in 60 | echo "S1A20160124_ALL_F3:S1A20160312_ALL_F3" >> intf.in 61 | echo "S1A20160124_ALL_F3:S1A20160405_ALL_F3" >> intf.in 62 | echo "S1A20160217_ALL_F3:S1A20160312_ALL_F3" >> intf.in 63 | echo "S1A20160217_ALL_F3:S1A20160312_ALL_F3" >> intf.in 64 | 65 | cd .. 66 | 67 | #$gmtsar_config_file 68 | 69 | #align_tops.csh $previous_scene $previous_orbit $current_scene $current_orbit dem.grd 70 | 71 | #cd $work_PATH/$folder/$job_ID/F$swath/raw/ 72 | #ln -sf $work_PATH/raw/$job_ID-aligned/*F$swath* . 73 | 74 | #cd $work_PATH/$folder/$job_ID/F$swath/ 75 | 76 | 77 | 78 | echo 79 | echo "- - - - - - - - - - - - - - - - - - - - " 80 | echo "Starting intfs_tops with options:" 81 | echo "./raw/intf.in" 82 | echo "/home/loibldav/Scripts/gmtsar-sentinel-processing-chain/config/GMTSAR-golubin.config" 83 | 84 | intf_tops.csh $work_PATH/raw/intf.in /home/loibldav/Scripts/gmtsar-sentinel-processing-chain/config/GMTSAR-golubin.config 85 | 86 | # p2p_S1A_TOPS.csh 87 | #$GSP_directory/lib/GMTSAR-mods/p2p_S1PPC.csh \ 88 | # S1A${previous_scene:15:8}_${previous_scene:24:6}_F$swath \ 89 | # S1A${current_scene:15:8}_${current_scene:24:6}_F$swath \ 90 | # $GSP_directory/$gmtsar_config_file 91 | 92 | 93 | #cd $work_PATH/$folder/$job_ID/F$swath/intf/ 94 | #intf_dir=($( ls )) 95 | 96 | #output_intf_dir=$output_PATH/$folder/S1A${previous_scene:15:8}_${previous_scene:24:6}_F$swath"---"S1A${current_scene:15:8}_${current_scene:24:6}_F$swath 97 | 98 | #mkdir -pv $output_intf_dir 99 | 100 | #cp ./$intf_dir/*.grd $output_intf_dir 101 | #cp ./$intf_dir/*.png $output_intf_dir 102 | #cp ./$intf_dir/*.kml $output_intf_dir 103 | #cp ./$intf_dir/*.ps $output_intf_dir 104 | #cp ./$intf_dir/*.cpt $output_intf_dir 105 | #cp ./$intf_dir/*.conf $output_intf_dir 106 | 107 | 108 | 109 | end=`date +%s` 110 | 111 | runtime=$((end-start)) 112 | 113 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /lib/steps_boundaries.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Helper tool to obtain useful boundary and step values for CPT creation. 6 | # 7 | # Input: Min and max values, center at zero (optional) 8 | # Output: string "lower_boundary/upper_boundary/step" 9 | # 10 | # David Loibl, 2018 11 | # 12 | ##################################################################### 13 | 14 | if [ $# -lt 2 ]; then 15 | echo 16 | echo "Usage: steps_boundaries.sh min_value max_value [center_zero]" 17 | echo 18 | else 19 | min=$1 20 | max=$2 21 | if [ $# -eq 3 ]; then 22 | center_zero=$3 23 | else 24 | center_zero=0 25 | fi 26 | 27 | diff=$( echo "$max - $min" | bc ) 28 | if [ $( echo "$diff > 5000" | bc ) -eq 1 ]; then 29 | step=500 30 | elif [ $( echo "$diff > 2000" | bc ) -eq 1 ]; then 31 | step=200 32 | elif [ $( echo "$diff > 1000" | bc ) -eq 1 ]; then 33 | step=100 34 | elif [ $( echo "$diff > 500" | bc ) -eq 1 ]; then 35 | step=50 36 | elif [ $( echo "$diff > 200" | bc ) -eq 1 ]; then 37 | step=20 38 | elif [ $( echo "$diff > 100" | bc ) -eq 1 ]; then 39 | step=10 40 | elif [ $( echo "$diff > 50" | bc ) -eq 1 ]; then 41 | step=5 42 | elif [ $( echo "$diff > 20" | bc ) -eq 1 ]; then 43 | step=2 44 | elif [ $( echo "$diff > 10" | bc ) -eq 1 ]; then 45 | step=1 46 | elif [ $( echo "$diff > 5" | bc ) -eq 1 ]; then 47 | step="0.5" 48 | elif [ $( echo "$diff > 2" | bc ) -eq 1 ]; then 49 | step="0.2" 50 | elif [ $( echo "$diff > 1" | bc ) -eq 1 ]; then 51 | step="0.1" 52 | elif [ $( echo "$diff > 0.5" | bc ) -eq 1 ]; then 53 | step="0.05" 54 | elif [ $( echo "$diff > 0.2" | bc ) -eq 1 ]; then 55 | step="0.02" 56 | elif [ $( echo "$diff > 0.1" | bc ) -eq 1 ]; then 57 | step="0.01" 58 | elif [ $( echo "$diff > 0.05" | bc ) -eq 1 ]; then 59 | step="0.005" 60 | elif [ $( echo "$diff > 0.02" | bc ) -eq 1 ]; then 61 | step="0.002" 62 | elif [ $( echo "$diff > 0.01" | bc ) -eq 1 ]; then 63 | step="0.001" 64 | elif [ $( echo "$diff > 0.005" | bc ) -eq 1 ]; then 65 | step="0.0005" 66 | elif [ $( echo "$diff > 0.002" | bc ) -eq 1 ]; then 67 | step="0.0002" 68 | elif [ $( echo "$diff > 0.001" | bc ) -eq 1 ]; then 69 | step="0.0001" 70 | elif [ $( echo "$diff > 0.0005" | bc ) -eq 1 ]; then 71 | step="0.00005" 72 | elif [ $( echo "$diff > 0.0002" | bc ) -eq 1 ]; then 73 | step="0.00002" 74 | elif [ $( echo "$diff > 0.0001" | bc ) -eq 1 ]; then 75 | step="0.00001" 76 | elif [ $( echo "$diff > 0.00005" | bc ) -eq 1 ]; then 77 | step="0.000005" 78 | elif [ $( echo "$diff > 0.00002" | bc ) -eq 1 ]; then 79 | step="0.000002" 80 | else 81 | step="0.000001" 82 | fi 83 | min_remainder=$( echo "${min} % $step" | bc ) 84 | lower_boundary=$( echo "${min} - $min_remainder" | bc ) 85 | max_remainder=$( echo "${max} % $step" | bc ) 86 | upper_boundary=$( echo "${max} - ${max_remainder} + $step" | bc ) 87 | 88 | if [ ! -z $center_zero ]; then 89 | if [ "$center_zero" -eq "1" ]; then 90 | if [ $( echo "$lower_boundary < 0" | bc ) -eq 1 ] && [ $( echo "$upper_boundary > 0" | bc ) -eq 1 ]; then 91 | lower_boundary_pos=$( echo "$lower_boundary * -1" | bc ) 92 | if [ $( echo "$lower_boundary_pos < $upper_boundary" | bc ) -eq 1 ]; then 93 | lower_boundary=$( echo "$upper_boundary * -1" | bc ) 94 | else 95 | upper_boundary=$( echo "$lower_boundary * -1" | bc ) 96 | fi 97 | fi 98 | fi 99 | fi 100 | 101 | # Remove tailing zeros 102 | # lower_boundary="${lower_boundary/%$( echo $lower_boundary | grep -oP 0*$ )}" 103 | # upper_boundary="${upper_boundary/%$( echo $upper_boundary | grep -oP 0*$ )}" 104 | echo "$lower_boundary/$upper_boundary/$step" 105 | fi 106 | -------------------------------------------------------------------------------- /modules/detrend/detrend.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS modules to remove trends from series of grid data. 6 | # 7 | # 8 | # David Loibl, 2018 9 | # 10 | ##################################################################### 11 | 12 | module_name="detrend" 13 | 14 | if [ -z $module_config_PATH ]; then 15 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 16 | echo " $OSARIS_PATH/config" 17 | module_config_PATH="$OSARIS_PATH/config" 18 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 19 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 20 | fi 21 | 22 | if [ ! -d "$module_config_PATH" ]; then 23 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 24 | exit 2 25 | fi 26 | 27 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 28 | echo 29 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 30 | echo 31 | else 32 | # Start runtime timer 33 | module_start=`date +%s` 34 | 35 | # Include the config file 36 | source ${module_config_PATH}/${module_name}.config 37 | 38 | 39 | 40 | ############################ 41 | # Module actions start here 42 | 43 | echo; echo "Starting Detrend module ..."; echo 44 | 45 | RT_work_PATH="$work_PATH/Detrend" 46 | RT_output_PATH="$output_PATH/Detrend" 47 | 48 | if [ ! -d "$RT_grid_input_PATH" ]; then 49 | echo; echo "ERROR: Directory $RT_grid_input_PATH does not exist. Exiting ..." 50 | check_input=0 51 | else 52 | cd "$RT_grid_input_PATH" 53 | grid_files=($( ls *.grd )) 54 | if [ ! -z "$grid_files" ]; then 55 | echo "Found ${#grid_files[@]} grid files in ${RT_grid_input_PATH}." 56 | check_input=1 57 | else 58 | echo; echo "ERROR: No grid files found in $RT_grid_input_PATH. Exiting ..." 59 | check_input=0 60 | fi 61 | fi 62 | 63 | if [ -z $RT_model ]; then 64 | echo "Parameter RT_model not set in config/${module_name}.config ..." 65 | echo "Setting RT_model to 10+r (bicubic + iterative processing)" 66 | RT_model="10+r" 67 | fi 68 | 69 | if [ "$check_input" -eq 1 ]; then 70 | echo "Input data looks good, initializing trend removal ..." 71 | mkdir -p "$RT_work_PATH" 72 | mkdir -p "$RT_output_PATH" 73 | 74 | if [ "$RT_safe_trend_files" -eq 1 ]; then 75 | mkdir -p "$RT_output_PATH/Trend-surfaces" 76 | fi 77 | 78 | if [ -z $RT_model ]; then 79 | echo "WARNING: RT_model is not set in $OSARIS_PATH/config/${module_name}.config." 80 | echo " Using defaul 10+r (bicubic with iterative fitting) ..." 81 | RT_model="10+r" 82 | fi 83 | 84 | 85 | for grid_file in ${grid_files[@]}; do 86 | echo "Detrending $grid_file ..." 87 | if [ "$RT_safe_trend_files" -eq 1 ]; then 88 | trend_export="-T${RT_output_PATH}/Trend-surfaces/${grid_file::-4}-trend.grd" 89 | else 90 | trend_export="" 91 | fi 92 | 93 | # Remove trends 94 | 95 | gmt grdtrend "${RT_grid_input_PATH}/${grid_file}" -N$RT_model -D${RT_output_PATH}/${grid_file::-4}-detrend.grd $trend_export -V 96 | 97 | 98 | done 99 | fi 100 | 101 | 102 | 103 | 104 | 105 | 106 | # Module actions end here 107 | ########################### 108 | 109 | 110 | 111 | # Stop runtime timer and print runtime 112 | module_end=`date +%s` 113 | module_runtime=$((module_end-module_start)) 114 | 115 | echo 116 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n\n' \ 117 | $(($module_runtime/86400)) \ 118 | $(($module_runtime%86400/3600)) \ 119 | $(($module_runtime%3600/60)) \ 120 | $(($module_runtime%60)) 121 | echo 122 | fi 123 | -------------------------------------------------------------------------------- /lib/InSAR/intf.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ################################################################# 4 | # 5 | # Interferometric processing routines. 6 | # 7 | # Based on GMTSAR's p2p_S1_TOPS.csh by David Sandwell. 8 | # Usage: prep.sh master_scene slave_scene GMTSAR_config_file OSARIS_PATH boundary_box.xyz 9 | # 10 | ################################################################ 11 | 12 | # alias rm 'rm -f' 13 | # unset noclobber 14 | 15 | if [ $# -lt 4 ]; then 16 | echo; echo "Usage: intf.sh master_scene slave_scene OSARIS_config_file OSARIS_PATH boundary_box.xyz"; echo 17 | echo "Example: intf.sh S1A20150526_F1 S1A20150607_F1 config.tsx.slc.txt home/user/osaris /workpath/boundary_box.xyz"; echo; echo 18 | exit 1 19 | fi 20 | 21 | # Check if files exist 22 | 23 | OSARIS_PATH=$4 24 | 25 | if [ ! -f raw/$1.PRM ] || [ ! -f raw/$1.LED ] || [ ! -f raw/$1.SLC ]; then 26 | echo " Missing input file raw/$1"; exit 27 | fi 28 | 29 | if [ ! -f raw/$2.PRM ] || [ ! -f raw/$2.LED ] || [ ! -f raw/$2.SLC ]; then 30 | echo " Missing input file raw/$2"; exit 31 | fi 32 | 33 | if [ ! -f $3 ]; then 34 | echo " Missing config file: $3" 35 | exit 36 | fi 37 | 38 | 39 | # Read parameters from config file 40 | source $3 41 | 42 | # Check if vars are set, set to default values if not 43 | if [ -z $proc_stage ]; then proc_stage=1; fi 44 | 45 | # Vars for interferometric processing 46 | if [ -z $earth_radius ]; then earth_radius=0; fi 47 | if [ -z $topo_phase ]; then topo_phase=1; fi 48 | if [ -z $shift_topo ]; then shift_topo=0; fi 49 | if [ -z $switch_master ]; then switch_master=0; fi 50 | 51 | 52 | if [ -z $filter_wavelength ]; then filter_wavelength=100; fi 53 | if [ -z $dec_factor ]; then dec_factor=0; fi 54 | if [ -z $threshold_snaphu ]; then threshold_snaphu=0.1; fi 55 | if [ -z $threshold_geocode ]; then threshold_geocode=0; fi 56 | if [ -z $region_cut ]; then region_cut=0; fi 57 | if [ -z $switch_land ]; then switch_land=0; fi 58 | if [ -z $defomax ]; then defomax=0; fi 59 | 60 | # Read scenes 61 | master=$1 62 | slave=$2 63 | 64 | if [ $switch_master -eq 1 ]; then 65 | ref=$slave 66 | rep=$master 67 | else 68 | ref=$master 69 | rep=$slave 70 | fi 71 | 72 | 73 | 74 | 75 | # INTERFEROMETRIC PROCESSING 76 | 77 | if [ $proc_stage -le 4 ]; then 78 | echo; echo "Interferometric processing ..."; echo 79 | # Clean up 80 | cleanup.csh intf 81 | 82 | # Make and filter interferograms 83 | 84 | cd intf/ 85 | 86 | # set ref_id = `grep SC_clock_start ../raw/$master.PRM | awk '{printf("%d",int($3))}' ` 87 | # set rep_id = `grep SC_clock_start ../raw/$slave.PRM | awk '{printf("%d",int($3))}' ` 88 | # mkdir $ref_id"_"$rep_id 89 | # cd $ref_id"_"$rep_id 90 | 91 | # ln -s ../raw/$ref.LED . 92 | # ln -s ../raw/$rep.LED . 93 | ln -s ../SLC/$ref.SLC . 94 | ln -s ../SLC/$rep.SLC . 95 | cp ../SLC/$ref.PRM . 96 | cp ../SLC/$rep.PRM . 97 | cp ../raw/$ref.LED . 98 | cp ../raw/$rep.LED . 99 | 100 | if [ $topo_phase -eq 1 ]; then 101 | if [ $shift_topo -eq 1 ]; then 102 | ln -s ../topo/topo_shift.grd . 103 | intf.csh $ref.PRM $rep.PRM -topo topo_shift.grd 104 | # filter.csh $ref.PRM $rep.PRM $filter $dec_factor 105 | else 106 | ln -s ../topo/topo_ra.grd . 107 | intf.csh $ref.PRM $rep.PRM -topo topo_ra.grd 108 | # filter.csh $ref.PRM $rep.PRM $filter $dec_factor 109 | fi 110 | else 111 | intf.csh $ref.PRM $rep.PRM 112 | # filter.csh $ref.PRM $rep.PRM $filter $dec_factor 113 | fi 114 | # echo "Executing filter.csh $ref.PRM $rep.PRM $filter_wavelength $dec_factor " 115 | # filter.csh ${ref}.PRM ${rep}.PRM $filter_wavelength $dec_factor 116 | # cp -u *gauss* ../../ 117 | cd .. 118 | else 119 | echo; echo "Skipping interferometric processing (proc_stage set to ${proc_stage})"; echo 120 | fi 121 | -------------------------------------------------------------------------------- /templates/GMTSAR-config.template: -------------------------------------------------------------------------------- 1 | # 2 | # This is an example configuration file for p2p_S1A_TOPS.csh 3 | # 4 | # all the comments or explanations are marked by "#" 5 | # The parameters in this configuration file is distinguished by their first word so 6 | # user should follow the naming of each parameter. 7 | # the parameter name, "=" sign, parameter value should be separated by space " ". 8 | # leave the parameter value blank if using default value. 9 | 10 | # the namestem of the master image 11 | # REQUIRED FOR STACK PROCESSING 12 | # master_image = S1A20160124_ALL_F3 13 | 14 | ##################### 15 | # processing stage # 16 | ##################### 17 | # 1 - start from preprocess 18 | # 2 - start from align SLC images 19 | # 3 - start from make topo_ra 20 | # 4 - start from make and filter interferograms 21 | # 5 - start from unwrap phase 22 | # 6 - start from geocode 23 | proc_stage = 1 24 | 25 | ################################## 26 | # parameters for preprocess # 27 | # - pre_proc.csh # 28 | ################################## 29 | # num of patches 30 | num_patches = 31 | 32 | # earth radius 33 | earth_radius = 34 | 35 | # near_range 36 | near_range = 37 | 38 | # Doppler centroid 39 | fd1 = 40 | 41 | ################################################ 42 | # parameters for focus and align SLC images # 43 | # - align.csh # 44 | ################################################ 45 | # 46 | ##################################### 47 | # parameters for make topo_ra # 48 | # - dem2topo_ra.csh # 49 | ##################################### 50 | # subtract topo_ra from the phase 51 | # (1 -- yes; 0 -- no) 52 | topo_phase = 1 53 | # if above parameter = 1 then one should have put dem.grd in topo/ 54 | 55 | # topo_ra shift (1 -- yes; 0 -- no) 56 | shift_topo = 0 57 | 58 | #################################################### 59 | # parameters for make and filter interferograms # 60 | # - intf.csh # 61 | # - filter.csh # 62 | #################################################### 63 | # switch the master and slave when doing intf. 64 | # put "1" if assume master as repeat and slave as reference 65 | # put "0" if assume master as reference and slave as repeat [Default] 66 | # phase = repeat phase - reference phase 67 | switch_master = 0 68 | 69 | # set the filter wavelength in meters (default is 200m) 70 | # this is the wavelength where the filter has a gain of 0.5 71 | # the images will be sampled at 1/4 wavelength or smaller 72 | filter_wavelength = 200 73 | 74 | # decimation of images 75 | # decimation control the size of the amplitude and phase images. It is either 1 or 2. 76 | # Set the decimation to be 1 if you want higher resolution images. 77 | # Set the decimation to be 2 if you want images with smaller file size. 78 | # 79 | dec_factor = 1 80 | # 81 | # 82 | ##################################### 83 | # parameters for unwrap phase # 84 | # - snaphu.csh # 85 | ##################################### 86 | # correlation threshold for snaphu.csh (0~1) 87 | # set it to be 0 to skip unwrapping. 88 | threshold_snaphu = 0.2 89 | # Default 0.1 90 | 91 | # region to unwrap in radar coordinates (leave it blank if unwrap the whole region) 92 | # example 300/5900/0/25000 (xmin/xmax/ymin/ymax) 93 | region_cut = 94 | 95 | # mask the wet region (Lakes/Oceans) before unwrapping (1 -- yes; else -- no) 96 | switch_land = 1 97 | 98 | # 99 | # Allow phase discontinuity in unrapped phase. This is needed for interferograms having sharp phase jumps. 100 | # defo_max = 0 - used for smooth unwrapped phase such as interseismic deformation 101 | # defo_max = 65 - will allow a phase jump of 65 cycles or 1.82 m of deformation at C-band 102 | # 103 | defomax = 50 104 | 105 | ##################################### 106 | # parameters for geocode # 107 | # - geocode.csh # 108 | ##################################### 109 | # correlation threshold for geocode.csh (0~1) 110 | threshold_geocode = .10 111 | 112 | -------------------------------------------------------------------------------- /lib/harmonize_grids.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Harmonize a series of grid files relative to a reference point. 6 | # 7 | # David Loibl, 2018 8 | # 9 | ##################################################################### 10 | 11 | 12 | if [ $# -lt 3 ]; then 13 | echo 14 | echo "Harmonize a series of grid files relative to a reference point." 15 | echo 16 | echo "Usage: harmonize_grids.sh input_path reference_point output_path" 17 | echo 18 | echo " Shift all grid files in input directory by their offset relative to a " 19 | echo " 'stable ground point'. Most commonly used to harmonize a time series of " 20 | echo " unwrapped intereferograms or LoS displacement grids." 21 | echo 22 | echo " Arguments: " 23 | echo " Input path -> The directory containing grid files" 24 | echo " Reference point -> Coordinates of reference point in decimal degrees using the format" 25 | echo " Longitude/Latitude, e.g. 165.1/-12.5" 26 | echo " Alternatively, set to 'median' to harmonize grids to their respective medians" 27 | echo " Output path -> Output grids will be written here" 28 | echo 29 | echo " Output:" 30 | echo " Harmonized series of .grd files."; echo 31 | 32 | else 33 | HG_start=`date +%s` 34 | echo; echo "Harmonizing grids to reference point ..." 35 | 36 | # Read attributes and setup environment 37 | grid_input_PATH=$1 38 | ref_point_xy_coords=$2 39 | HG_output_PATH=$3 40 | HG_work_PATH="$work_PATH/Harmonize-Grids" 41 | 42 | mkdir -p $HG_output_PATH 43 | mkdir -p $HG_work_PATH 44 | 45 | if [ "$ref_point_xy_coords" == "median" ]; then 46 | echo "Harmonizing grids to their respective medians." 47 | else 48 | echo "Reference point is set to $ref_point_xy_coords" 49 | ref_point_array=(${ref_point_xy_coords//\// }) 50 | echo "${ref_point_array[0]} ${ref_point_array[1]}" > $HG_work_PATH/ref_point.xy 51 | fi 52 | 53 | 54 | if [ ! -d "$grid_input_PATH" ]; then 55 | echo; echo "ERROR: Directory $grid_input_PATH does not exist. Skipping ..." 56 | else 57 | 58 | cd $grid_input_PATH 59 | 60 | grid_input_PATH_basename=$( basename "$PWD" ) 61 | mkdir -p ${HG_output_PATH}/${grid_input_PATH_basename} 62 | 63 | grid_files=($( ls *.grd )) 64 | for grid_file in ${grid_files[@]}; do 65 | 66 | if [ "$ref_point_xy_coords" == "median" ]; then 67 | # Obtain median value of grid 68 | ref_point_grid_val=$( gmt grdinfo -L1 $grid_file | grep median | awk '{print $3}' ) 69 | else 70 | # Get xy coordinates of 'stable ground point' from file and check the value the raster set has at this location. 71 | gmt grdtrack $HG_work_PATH/ref_point.xy -G${grid_input_PATH}/${grid_file} >> $HG_work_PATH/${grid_input_PATH_basename}_ref_point_vals.xyz 72 | ref_point_grid_trk=$( gmt grdtrack ${HG_work_PATH}/ref_point.xy -G${grid_input_PATH}/${grid_file} ) 73 | 74 | if [ ! -z ${ref_point_grid_trk+x} ]; then 75 | ref_point_grid_val=$( echo "$ref_point_grid_trk" | awk '{ print $3 }') 76 | # if [ $debug -gt 1 ]; then echo "Stable ground diff ${grid_input_PATH}/${grid_file}: $ref_point_grid_val"; fi 77 | else 78 | echo "GMT grdtrack for stable ground yielded no result for ${grid_input_PATH}/${grid_file}. Skipping" 79 | fi 80 | fi 81 | 82 | if [ ! -z ${ref_point_grid_val+x} ]; then 83 | # Shift input grid so that the 'stable ground value' is zero 84 | gmt grdmath ${grid_input_PATH}/${grid_file} $ref_point_grid_val SUB = $HG_output_PATH/${grid_file::-4}-harmonized.grd -V 85 | else 86 | echo "Unwrap difference calculation for stable ground point failed in ${folder}. Skipping ..." 87 | fi 88 | done 89 | 90 | fi 91 | 92 | 93 | HG_end=`date +%s` 94 | 95 | HG_runtime=$((HG_end-HG_start)) 96 | 97 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($HG_runtime/86400)) $(($HG_runtime%86400/3600)) $(($HG_runtime%3600/60)) $(($HG_runtime%60)) 98 | echo 99 | 100 | 101 | fi 102 | -------------------------------------------------------------------------------- /lib/process-multi-swath.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ################################################################# 4 | # 5 | # Merge, unwrap and geocode multiple swaths 6 | # 7 | # 8 | # Usage: process-multi-swath.sh config_file 9 | # 10 | # 11 | ################################################################ 12 | 13 | if [ $# -eq 0 ]; then 14 | echo 15 | echo "Usage: process-multi-swath.sh config_file" 16 | echo 17 | elif [ ! -f $1 ]; then 18 | echo 19 | echo "Cannot open $1. Please provide a valid config file." 20 | echo 21 | else 22 | 23 | echo 24 | echo "- - - - - - - - - - - - - - - - - - - -" 25 | echo " Starting multi-swath processing ..." 26 | echo "- - - - - - - - - - - - - - - - - - - -" 27 | echo 28 | 29 | config_file=$1 30 | source $config_file 31 | echo "Config file: $config_file" 32 | 33 | OSARIS_PATH=$( pwd ) 34 | 35 | work_PATH=$base_PATH/$prefix/Processing 36 | # Path to working directory 37 | 38 | output_PATH=$base_PATH/$prefix/Output 39 | # Path to directory where all output will be written 40 | 41 | log_PATH=$base_PATH/$prefix/Log 42 | # Path to directory where the log files will be written 43 | 44 | 45 | 46 | cd $work_PATH 47 | mkdir -p merge-files 48 | 49 | s1_pairs=($( ls -d *20*/ )) 50 | if [ -f $work_PATH/pairs-forward.list ]; then pairs_forward=($( cat $work_PATH/pairs-forward.list )); fi 51 | if [ -f $work_PATH/pairs-reverse.list ]; then pairs_reverse=($( cat $work_PATH/pairs-reverse.list )); fi 52 | 53 | 54 | slurm_jobname="$slurm_jobname_prefix-MSP" 55 | 56 | for s1_pair in ${s1_pairs[@]}; do 57 | s1_pair=${s1_pair::-1} 58 | echo "Working on $s1_pair" 59 | 60 | if [[ " ${pairs_forward[@]} " =~ " ${s1_pair} " ]]; then 61 | direction="forward" 62 | if [ $debug -ge 1 ]; then echo "Forward scene pair"; fi 63 | elif [[ " ${pairs_reverse[@]} " =~ " ${s1_pair} " ]]; then 64 | direction="reverse" 65 | if [ $debug -ge 1 ]; then echo "Reverse scene pair"; fi 66 | else 67 | if [ $debug -ge 1 ]; then echo "Scene pair found in neither forward nor reverse list. Strange."; fi 68 | fi 69 | 70 | ln -s $work_PATH/topo/dem.grd $work_PATH/$s1_pair 71 | 72 | master_date=${s1_pair:0:8} 73 | slave_date=${s1_pair:10:8} 74 | 75 | mkdir -p $work_PATH/${s1_pair}/merged 76 | 77 | #for swath in ${swaths_to_process[@]}; do 78 | # s1_code=$( ls -d */ | head -n1 ) 79 | # s1_code=${s1_code::-1} 80 | # cd $s1_code 81 | 82 | cd $work_PATH/${s1_pair} 83 | swath_dirs=$( ls -d *F* ) 84 | 85 | if [ $debug -ge 1 ]; then 86 | echo; echo "Swath directories for ${s1_pair}: ${swath_dirs[@]}" 87 | fi 88 | 89 | for swath_dir in ${swath_dirs[@]}; do 90 | if [ -d $swath_dir/intf ]; then 91 | cd $swath_dir/intf 92 | master_PRM=$( ls *${master_date}*.PRM ) 93 | slave_PRM=$( ls *${slave_date}*.PRM ) 94 | echo "$swath_dir/intf/:${master_PRM}:${slave_PRM}" >> $work_PATH/merge-files/${s1_pair}.list #${s1_code}/ 95 | fi 96 | done 97 | 98 | # Setup preferred and alternative partition configuration 99 | slurm_partition_pref=$slurm_partition 100 | slurm_ntasks_pref=$slurm_ntasks 101 | 102 | if [ ! -z $slurm_partition_alt ] && [ ! -z $slurm_ntasks_alt ]; then 103 | # Check for available cores on the preferred slurm partition. 104 | sleep 2 105 | cores_available=$( sinfo -o "%P %C" | grep $slurm_partition | awk '{ print $2 }' | awk 'BEGIN { FS="/?[ \t]*"; } { print $2 }' ) 106 | echo "Cores available on partition ${slurm_partition}: $cores_available" 107 | if [ "$cores_available" -lt "$slurm_ntasks" ]; then 108 | slurm_partition_pref=$slurm_partition_alt 109 | slurm_ntasks_pref=$slurm_ntasks_alt 110 | fi 111 | fi 112 | 113 | sbatch \ 114 | --ntasks=$slurm_ntasks_pref \ 115 | --output=$log_PATH/PP-multiswath-%j-out \ 116 | --error=$log_PATH/PP-multiswath-%j-out \ 117 | --workdir=$work_PATH \ 118 | --job-name=$slurm_jobname \ 119 | --qos=$slurm_qos \ 120 | --account=$slurm_account \ 121 | --partition=$slurm_partition_pref \ 122 | --mail-type=$slurm_mailtype \ 123 | $OSARIS_PATH/lib/PP-multiswath.sh \ 124 | $s1_pair \ 125 | $config_file \ 126 | $OSARIS_PATH/$gmtsar_config_file \ 127 | $OSARIS_PATH \ 128 | $direction 129 | 130 | # step 3: launch PP jobs 131 | cd $work_PATH 132 | done 133 | 134 | 135 | 136 | fi 137 | 138 | -------------------------------------------------------------------------------- /lib/min_grd_extent.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Helper tool to identify a boundary box that represents the minimum 6 | # common coverage of a set of grid files. 7 | # 8 | # Input: 9 | # - Path where files reside, e.g. Output/Coherences/ 10 | # 11 | # Output: 12 | # - String x_min x_max y_min y_max 13 | # 14 | # 15 | # David Loibl, 2018 16 | # 17 | ##################################################################### 18 | 19 | 20 | if [ ! -d $1 ]; then 21 | echo 22 | echo "Usage: min_grd_extent.sh path" 23 | echo 24 | else 25 | min_ext_PATH=$1 26 | 27 | cd $min_ext_PATH 28 | 29 | min_ext_count=1 30 | 31 | min_ext_files=($( ls *.grd )) 32 | 33 | for min_ext_file in "${min_ext_files[@]}"; do 34 | if [ "$min_ext_count" -eq 1 ]; then 35 | min_ext_prev_file=$min_ext_file 36 | else 37 | # echo "grdxtremes=($(grdminmax $min_ext_PATH/$prev_folder/$min_ext_file $min_ext_PATH/$folder/$min_ext_file))" 38 | 39 | 40 | # Find min and max x and y values for a grd file. 41 | # Input parameters: the two grd files to evaluate. 42 | # Output: xmin xmax ymin ymax 43 | 44 | file_1=$min_ext_PATH/$min_ext_prev_file 45 | file_2=$min_ext_PATH/$min_ext_file 46 | 47 | file_1_extent=$( gmt grdinfo -I- $file_1 ); file_1_extent=${file_1_extent:2} 48 | file_2_extent=$( gmt grdinfo -I- $file_2 ); file_2_extent=${file_2_extent:2} 49 | 50 | file_1_coord_string=$( echo $file_1_extent | tr "/" "\n") 51 | file_2_coord_string=$( echo $file_2_extent | tr "/" "\n") 52 | 53 | # Create arrays of coordinates for each dataset 54 | counter=0 55 | for coord in $file_1_coord_string; do 56 | file_1_coord_array[$counter]=$coord 57 | counter=$((counter+1)) 58 | done 59 | 60 | counter=0 61 | for coord in $file_2_coord_string; do 62 | file_2_coord_array[$counter]=$coord 63 | counter=$((counter+1)) 64 | done 65 | 66 | 67 | # Determine overal max and min values for both datasets 68 | 69 | remainder=$( expr $counter % 2 ) 70 | 71 | counter=0 72 | while [ $counter -lt 4 ]; do 73 | if [ $counter -eq 0 ]; then 74 | # Determining xmin 75 | if [ $( echo "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 76 | xmin_local=${file_2_coord_array[$counter]} 77 | else 78 | xmin_local=${file_1_coord_array[$counter]} 79 | fi 80 | elif [ $counter -eq 1 ]; then 81 | # Determining xmax 82 | if [ $( echo "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 83 | xmax_local=${file_2_coord_array[$counter]} 84 | else 85 | xmax_local=${file_1_coord_array[$counter]} 86 | fi 87 | elif [ $counter -eq 2 ]; then 88 | # Determining ymin 89 | if [ $( bc <<< "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 90 | ymin_local=${file_2_coord_array[$counter]} 91 | else 92 | ymin_local=${file_1_coord_array[$counter]} 93 | fi 94 | elif [ $counter -eq 3 ]; then 95 | # Determining ymax 96 | if [ $( bc <<< "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 97 | ymax_local=${file_2_coord_array[$counter]} 98 | else 99 | ymax_local=${file_1_coord_array[$counter]} 100 | fi 101 | fi 102 | 103 | counter=$((counter+1)) 104 | done 105 | 106 | if [ "$min_ext_count" -eq 2 ]; then 107 | xmin=$xmin_local 108 | xmax=$xmax_local 109 | ymin=$ymin_local 110 | ymax=$ymax_local 111 | else 112 | if (( $(echo "$xmin < $xmin_local" | bc -l) )) && (( $(echo "$xmin_local != 0" | bc -l) )); then 113 | xmin=$xmin_local 114 | fi 115 | if (( $(echo "$xmax > $xmax_local" | bc -l) )) && (( $(echo "$xmax_local != 0" | bc -l) )); then 116 | xmax=$xmax_local 117 | fi 118 | if (( $(echo "$ymin < $ymin_local" | bc -l) )) && (( $(echo "$ymin_local != 0" | bc -l) )); then 119 | ymin=$ymin_local 120 | fi 121 | if (( $(echo "$ymax > $ymax_local" | bc -l) )) && (( $(echo "$ymax_local != 0" | bc -l) )); then 122 | ymax=$ymax_local 123 | fi 124 | fi 125 | 126 | fi 127 | min_ext_prev_file=$min_ext_file 128 | min_ext_count=$((min_ext_count+1)) 129 | done 130 | 131 | 132 | # if [ $debug -gt 0 ]; then echo; echo "Common coverage boundary box: $xmin/$xmax/$ymin/$ymax"; fi 133 | echo "$xmin/$xmax/$ymin/$ymax" 134 | 135 | fi 136 | -------------------------------------------------------------------------------- /lib/process-MUG.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ################################################################# 4 | # 5 | # Merge, unwrap and geocode multiple swaths 6 | # 7 | # 8 | # Usage: process-multi-swath.sh config_file 9 | # 10 | # 11 | ################################################################ 12 | 13 | if [ $# -eq 0 ]; then 14 | echo 15 | echo "Usage: process-multi-swath.sh config_file" 16 | echo 17 | elif [ ! -f $1 ]; then 18 | echo 19 | echo "Cannot open $1. Please provide a valid config file." 20 | echo 21 | else 22 | 23 | echo 24 | echo "- - - - - - - - - - - - - - - - - - - -" 25 | echo " Starting multi-swath processing ..." 26 | echo "- - - - - - - - - - - - - - - - - - - -" 27 | echo 28 | 29 | config_file=$1 30 | source $config_file 31 | echo "Config file: $config_file" 32 | 33 | OSARIS_PATH=$( pwd ) 34 | 35 | work_PATH=$base_PATH/$prefix/Processing 36 | # Path to working directory 37 | 38 | output_PATH=$base_PATH/$prefix/Output 39 | # Path to directory where all output will be written 40 | 41 | log_PATH=$base_PATH/$prefix/Log 42 | # Path to directory where the log files will be written 43 | 44 | 45 | 46 | cd $work_PATH 47 | rm -rf merge-files 48 | mkdir -p merge-files 49 | 50 | s1_pairs=($( ls -d *20*/ )) 51 | if [ -f $work_PATH/pairs-forward.list ]; then pairs_forward=($( cat $work_PATH/pairs-forward.list )); fi 52 | if [ -f $work_PATH/pairs-reverse.list ]; then pairs_reverse=($( cat $work_PATH/pairs-reverse.list )); fi 53 | 54 | 55 | slurm_jobname="$slurm_jobname_prefix-MUG" 56 | 57 | for s1_pair in ${s1_pairs[@]}; do 58 | s1_pair=${s1_pair::-1} 59 | echo; echo "Preparing Slurm batch jobs for $s1_pair" 60 | 61 | if [[ " ${pairs_forward[@]} " =~ " ${s1_pair} " ]]; then 62 | direction="forward" 63 | if [ $debug -ge 1 ]; then echo "Forward scene pair"; fi 64 | elif [[ " ${pairs_reverse[@]} " =~ " ${s1_pair} " ]]; then 65 | direction="reverse" 66 | if [ $debug -ge 1 ]; then echo "Reverse scene pair"; fi 67 | else 68 | if [ $debug -ge 1 ]; then echo "Scene pair found in neither forward nor reverse list. Strange."; fi 69 | fi 70 | 71 | ln -s $work_PATH/topo/dem.grd $work_PATH/$s1_pair 72 | 73 | master_date=${s1_pair:0:8} 74 | slave_date=${s1_pair:10:8} 75 | 76 | mkdir -p $work_PATH/${s1_pair}/merged 77 | 78 | #for swath in ${swaths_to_process[@]}; do 79 | # s1_code=$( ls -d */ | head -n1 ) 80 | # s1_code=${s1_code::-1} 81 | # cd $s1_code 82 | 83 | cd $work_PATH/${s1_pair} 84 | swath_dirs=($( ls -d *F* )) 85 | 86 | if [ $debug -ge 1 ]; then 87 | echo "Swath directories found for ${s1_pair}: ${swath_dirs[@]}" 88 | fi 89 | 90 | for swath_dir in ${swath_dirs[@]}; do 91 | if [ -d $swath_dir/intf ]; then 92 | cd $swath_dir/intf 93 | master_PRM=$( ls *${master_date}*.PRM ) 94 | slave_PRM=$( ls *${slave_date}*.PRM ) 95 | echo "${swath_dir}/intf/:${master_PRM}:${slave_PRM}" >> $work_PATH/merge-files/${s1_pair}.list #${s1_code}/ 96 | cd $work_PATH/${s1_pair} 97 | fi 98 | done 99 | 100 | # Setup preferred and alternative partition configuration 101 | slurm_partition_pref=$slurm_partition 102 | slurm_ntasks_pref=$slurm_ntasks 103 | 104 | if [ ! -z $slurm_partition_alt ] && [ ! -z $slurm_ntasks_alt ]; then 105 | # Check for available cores on the preferred slurm partition. 106 | sleep 2 107 | cores_available=$( sinfo -o "%P %C" | grep $slurm_partition | awk '{ print $2 }' | awk 'BEGIN { FS="/?[ \t]*"; } { print $2 }' ) 108 | echo "Cores available on partition ${slurm_partition}: $cores_available" 109 | if [ "$cores_available" -lt "$slurm_ntasks" ]; then 110 | slurm_partition_pref=$slurm_partition_alt 111 | slurm_ntasks_pref=$slurm_ntasks_alt 112 | fi 113 | fi 114 | 115 | sbatch \ 116 | --ntasks=$slurm_ntasks_pref \ 117 | --output=$log_PATH/PP-MUG-%j-out \ 118 | --error=$log_PATH/PP-MUG-%j-out \ 119 | --workdir=$work_PATH \ 120 | --job-name=$slurm_jobname \ 121 | --qos=$slurm_qos \ 122 | --account=$slurm_account \ 123 | --partition=$slurm_partition_pref \ 124 | --mail-type=$slurm_mailtype \ 125 | $OSARIS_PATH/lib/PP-MUG.sh \ 126 | $s1_pair \ 127 | $config_file \ 128 | $OSARIS_PATH/$gmtsar_config_file \ 129 | $OSARIS_PATH \ 130 | $direction 131 | 132 | # step 3: launch PP jobs 133 | cd $work_PATH 134 | done 135 | 136 | 137 | 138 | fi 139 | 140 | -------------------------------------------------------------------------------- /lib/GMTSAR-mods/snaphu_OSARIS.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -f 2 | # $Id$ 3 | # 4 | # 5 | alias rm 'rm -f' 6 | unset noclobber 7 | # 8 | if ($#argv < 2) then 9 | errormessage: 10 | echo "" 11 | echo "snaphu.csh [GMT5SAR] - Unwrap the phase" 12 | echo " " 13 | echo "Usage: snaphu.csh correlation_threshold maximum_discontinuity [///]" 14 | echo "" 15 | echo " correlation is reset to zero when < threshold" 16 | echo " maximum_discontinuity enables phase jumps for earthquake ruptures, etc." 17 | echo " set maximum_discontinuity = 0 for continuous phase such as interseismic " 18 | echo "" 19 | echo "Example: snaphu.csh .12 40 1000/3000/24000/27000" 20 | echo "" 21 | echo "Reference:" 22 | echo "Chen C. W. and H. A. Zebker, Network approaches to two-dimensional phase unwrapping: intractability and two new algorithms, Journal of the Optical Society of America A, vol. 17, pp. 401-414 (2000)." 23 | exit 1 24 | endif 25 | # 26 | # prepare the files adding the correlation mask 27 | # 28 | if ($#argv == 3 ) then 29 | gmt grdcut mask.grd -R$3 -Gmask_patch.grd 30 | gmt grdcut corr.grd -R$3 -Gcorr_patch.grd 31 | gmt grdcut phasefilt.grd -R$3 -Gphase_patch.grd 32 | else 33 | ln -s mask.grd mask_patch.grd 34 | ln -s corr.grd corr_patch.grd 35 | ln -s phasefilt.grd phase_patch.grd 36 | endif 37 | # 38 | # create landmask 39 | # 40 | if (-e landmask_ra.grd) then 41 | if ($#argv == 3 ) then 42 | gmt grdsample landmask_ra.grd -R$3 `gmt grdinfo -I phase_patch.grd` -Glandmask_ra_patch.grd 43 | else 44 | gmt grdsample landmask_ra.grd `gmt grdinfo -I phase_patch.grd` -Glandmask_ra_patch.grd 45 | endif 46 | gmt grdmath phase_patch.grd landmask_ra_patch.grd MUL = phase_patch.grd -V 47 | endif 48 | # 49 | # user defined mask 50 | # 51 | if (-e mask_def.grd) then 52 | if ($#argv == 3 ) then 53 | gmt grdcut mask_def.grd -R$3 -Gmask_def_patch.grd 54 | else 55 | cp mask_def.grd mask_def_patch.grd 56 | endif 57 | gmt grdmath corr_patch.grd mask_def_patch.grd MUL = corr_patch.grd -V 58 | endif 59 | 60 | gmt grdmath corr_patch.grd $1 GE 0 NAN mask_patch.grd MUL = mask2_patch.grd 61 | gmt grdmath corr_patch.grd 0. XOR 1. MIN = corr_patch.grd 62 | gmt grdmath mask2_patch.grd corr_patch.grd MUL = corr_tmp.grd 63 | gmt grd2xyz phase_patch.grd -ZTLf -N0 > phase.in 64 | gmt grd2xyz corr_tmp.grd -ZTLf -N0 > corr.in 65 | # 66 | # run snaphu 67 | # 68 | set sharedir = `gmtsar_sharedir.csh` 69 | echo "unwrapping phase with snaphu - higher threshold for faster unwrapping " 70 | 71 | if ($2 == 0) then 72 | snaphu phase.in `gmt grdinfo -C phase_patch.grd | cut -f 10` -f $sharedir/snaphu/config/snaphu.conf.brief -g con_comp.out -c corr.in -o unwrap.out -v -s 73 | else 74 | sed "s/.*DEFOMAX_CYCLE.*/DEFOMAX_CYCLE $2/g" $sharedir/snaphu/config/snaphu.conf.brief > snaphu.conf.brief 75 | snaphu phase.in `gmt grdinfo -C phase_patch.grd | cut -f 10` -f snaphu.conf.brief -c corr.in -g con_comp.out -o unwrap.out -v -d 76 | endif 77 | # 78 | # convert to grd 79 | # 80 | gmt xyz2grd con_comp.out -ZTLu -r `gmt grdinfo -I- phase_patch.grd` `gmt grdinfo -I phase_patch.grd` -Gcon_comp.grd 81 | gmt xyz2grd unwrap.out -ZTLf -r `gmt grdinfo -I- phase_patch.grd` `gmt grdinfo -I phase_patch.grd` -Gtmp.grd 82 | gmt grdmath tmp.grd mask2_patch.grd MUL = tmp.grd 83 | # 84 | # detrend the unwrapped if DEFOMAX = 0 for interseismic 85 | # 86 | if ($2 == 0) then 87 | gmt grdtrend tmp.grd -N3r -Dunwrap.grd 88 | else 89 | mv tmp.grd unwrap.grd 90 | endif 91 | # 92 | # landmask 93 | if (-e landmask_ra.grd) then 94 | gmt grdmath unwrap.grd landmask_ra_patch.grd MUL = tmp.grd -V 95 | mv tmp.grd unwrap.grd 96 | endif 97 | # 98 | # user defined mask 99 | # 100 | if (-e mask_def.grd) then 101 | gmt grdmath unwrap.grd mask_def_patch.grd MUL = tmp.grd -V 102 | mv tmp.grd unwrap.grd 103 | endif 104 | # 105 | # plot the unwrapped phase 106 | # 107 | gmt grdgradient unwrap.grd -Nt.9 -A0. -Gunwrap_grad.grd 108 | set tmp = `gmt grdinfo -C -L2 unwrap.grd` 109 | set limitU = `echo $tmp | awk '{printf("%5.1f", $12+$13*2)}'` 110 | set limitL = `echo $tmp | awk '{printf("%5.1f", $12-$13*2)}'` 111 | set std = `echo $tmp | awk '{printf("%5.1f", $13)}'` 112 | gmt makecpt -Cseis -I -Z -T"$limitL"/"$limitU"/1 -D > unwrap.cpt 113 | set boundR = `gmt grdinfo unwrap.grd -C | awk '{print ($3-$2)/4}'` 114 | set boundA = `gmt grdinfo unwrap.grd -C | awk '{print ($5-$4)/4}'` 115 | gmt grdimage unwrap.grd -Iunwrap_grad.grd -Cunwrap.cpt -JX6.5i -B"$boundR":Range:/"$boundA":Azimuth:WSen -X1.3i -Y3i -P -K > unwrap.ps 116 | gmt psscale -D3.3/-1.5/5/0.2h -Cunwrap.cpt -B"$std":"unwrapped phase, rad": -O -E >> unwrap.ps 117 | # 118 | # clean up 119 | # 120 | rm tmp.grd corr_tmp.grd unwrap.out tmp2.grd unwrap_grad.grd 121 | rm phase.in corr.in 122 | # 123 | # cleanup more 124 | # 125 | rm wrap.grd corr_patch.grd phase_patch.grd mask_patch.grd mask3.grd mask3.out 126 | # 127 | 128 | -------------------------------------------------------------------------------- /lib/PP-MUG.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | start=`date +%s` 4 | 5 | echo; echo "Starting multiswath processing ..." 6 | 7 | s1_pair=$1 8 | config_file=$2 9 | gmtsar_config_file=$3 10 | OSARIS_PATH=$4 11 | direction=$5 12 | 13 | echo "Reading configuration file $config_file" 14 | if [ ${config_file:0:2} = "./" ]; then 15 | config_file=$OSARIS_PATH/${config_file:2:${#config_file}} 16 | fi 17 | 18 | source $config_file 19 | 20 | 21 | work_PATH=$base_PATH/$prefix/Processing 22 | # Path to working directory 23 | 24 | output_PATH=$base_PATH/$prefix/Output 25 | # Path to directory where all output will be written 26 | 27 | log_PATH=$base_PATH/$prefix/Output/Log 28 | # Path to directory where the log files will be written 29 | 30 | master_date=${s1_pair:0:8} 31 | slave_date=${s1_pair:10:8} 32 | 33 | cd $work_PATH/$s1_pair 34 | 35 | echo 36 | echo "- - - - - - - - - - - - - - - - - - - - " 37 | echo "Starting merge_unwrap_geocode ..." 38 | echo 39 | echo "Current path: $( pwd )" 40 | echo 41 | echo 42 | 43 | $OSARIS_PATH/lib/InSAR/merge-unwrap-geocode.sh \ 44 | $work_PATH/merge-files/${s1_pair}.list \ 45 | $config_file \ 46 | $work_PATH/proc-params/boundary-box.xyz 47 | 48 | # source $OSARIS_PATH/$gmtsar_config_file 49 | 50 | echo; echo "Checking results and moving to files to Output directory ..."; echo 51 | 52 | 53 | if [ ! "$direction" == "reverse" ]; then 54 | if [ $proc_amplitudes -eq 1 ]; then 55 | mkdir -p $output_PATH/Amplitudes 56 | cp -n ./merged/amp1_db_ll.grd $output_PATH/Amplitudes/${master_date}-amplitude-db.grd 57 | cp -n ./merged/amp2_db_ll.grd $output_PATH/Amplitudes/${slave_date}-amplitude-db.grd 58 | if [ -f "$output_PATH/Amplitudes/${master_date}-amplitude-db.grd" ] && [ -f "$output_PATH/Amplitudes/${slave_date}-amplitude-db.grd" ]; then status_amp=1; else status_amp=0; fi 59 | else 60 | status_amp=2 61 | fi 62 | 63 | if [ $proc_amplit_ifg -eq 1 ]; then 64 | mkdir -p $output_PATH/Interf-amplitudes 65 | cp ./merged/display_amp_ll.grd $output_PATH/Interf-amplitudes/${s1_pair}-ifgamp.grd 66 | if [ -f "$output_PATH/Interf-amplitudes/${s1_pair}-amplitude.grd" ]; then status_iga=1; else status_iga=0; fi 67 | else 68 | status_iga=2 69 | fi 70 | 71 | if [ $proc_ifg_grdnts -eq 1 ]; then 72 | mkdir -p $output_PATH/Interf-gradients 73 | cp ./merged/xphase_ll.grd $output_PATH/Interf-gradients/${s1_pair}-xphase.grd 74 | cp ./merged/yphase_ll.grd $output_PATH/Interf-gradients/${s1_pair}-yphase.grd 75 | if [ -f "$output_PATH/Interf-gradients/${s1_pair}-xphase.grd" ] && [ -f "$output_PATH/Interf-gradients/${s1_pair}-yphase.grd" ]; then status_gnt=1; else status_gnt=0; fi 76 | else 77 | status_gnt=2 78 | fi 79 | 80 | 81 | if [ $proc_ifg_concmp -eq 1 ]; then 82 | mkdir -p $output_PATH/Conn-comps 83 | cp ./merged/con_comp_ll.grd $output_PATH/Conn-comps/${s1_pair}-conn_comp.grd 84 | if [ -f "$output_PATH/Conn-comps/${s1_pair}-conn_comp.grd" ]; then status_ccp=1; else status_ccp=0; fi 85 | else 86 | status_ccp=2 87 | fi 88 | 89 | if [ $proc_coherences -eq 1 ]; then 90 | mkdir -p $output_PATH/Coherences 91 | cp ./merged/corr_ll.grd $output_PATH/Coherences/${s1_pair}-coherence.grd 92 | if [ -f "$output_PATH/Coherences/${s1_pair}-coherence.grd" ]; then status_coh=1; else status_coh=0; fi 93 | else 94 | status_coh=2 95 | fi 96 | 97 | if [ $proc_ifg_filtrd -eq 1 ]; then 98 | mkdir -p $output_PATH/Interferograms 99 | cp ./merged/phasefilt_mask_ll.grd $output_PATH/Interferograms/${s1_pair}-interferogram.grd 100 | if [ -f "$output_PATH/Interferograms/${s1_pair}-interferogram.grd" ]; then status_pha=1; else status_pha=0; fi 101 | else 102 | status_pha=2 103 | fi 104 | 105 | # unwrapping_active=`grep threshold_snaphu $OSARIS_PATH/$gmtsar_config_file | awk '{ print $3 }'` 106 | # if [ $( echo "$threshold_snaphu > 0" | bc -l ) -eq 1 ]; then 107 | if [ $proc_ifg_unwrpd -eq 1 ]; then 108 | mkdir -p $output_PATH/Interf-unwrpd 109 | cp ./merged/unwrap_mask_ll.grd $output_PATH/Interf-unwrpd/${s1_pair}-interf_unwrpd.grd 110 | if [ -f "$output_PATH/Interf-unwrpd/${s1_pair}-interf_unwrpd.grd" ]; then status_unw=1; else status_unw=0; fi 111 | else 112 | status_unw=2 113 | fi 114 | else 115 | if [ $proc_ifg_revers -eq 1 ]; then 116 | mkdir -p $output_PATH/Interf-unwrpd-rev 117 | cp ./merged/unwrap_mask_ll.grd $output_PATH/Interf-unwrpd-rev/${s1_pair}-interf_unwrpd.grd 118 | if [ -f "$output_PATH/Interf-unwrpd-rev/${s1_pair}-interf_unwrpd.grd" ]; then status_unw=1; else status_unw=0; fi 119 | fi 120 | fi 121 | 122 | 123 | 124 | 125 | 126 | end=`date +%s` 127 | runtime=$((end-start)) 128 | 129 | echo; echo "Writing report ..."; echo 130 | 131 | echo "${s1_pair:0:8} ${s1_pair:10:8} $SLURM_JOB_ID $runtime $status_amp $status_coh $status_ccp $status_pha $status_unw $status_los" >> $output_PATH/Reports/PP-pairs-stats.tmp 132 | 133 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 134 | 135 | 136 | -------------------------------------------------------------------------------- /modules/sgp_identification/sgp_identification.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS module for 'Stable Ground Point Identification' (SGPI). 6 | # 7 | # Calculates the sum and arithmetic mean of a time series of cohernece files. 8 | # 9 | # Provide a valid config file named 'sgp_identification.config' in the config 10 | # directory; a template is provided in templates/module_config/ 11 | # 12 | # Requires processed GMTSAR coherence files (corr_ll.grd) as input. 13 | # 14 | # Output files will be written to $output_PATH/SGPI: 15 | # - sgp-coords.xy -> Coordinates of max. coherence the stack. 16 | # Input file for other modules, including 17 | # 'Harmonize Grids' and 'GACOS Correction' 18 | # - coherence-sum.grd -> Sum of coherences from stack (grid) 19 | # - cohehernce-arithmean.grd -> Arith. mean of coherences (grid) 20 | # 21 | # 22 | # David Loibl, 2018 23 | # 24 | ##################################################################### 25 | 26 | module_name="sgp_identification" 27 | 28 | 29 | if [ -z $module_config_PATH ]; then 30 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 31 | echo " $OSARIS_PATH/config" 32 | module_config_PATH="$OSARIS_PATH/config" 33 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 34 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 35 | fi 36 | 37 | if [ ! -d "$module_config_PATH" ]; then 38 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 39 | exit 2 40 | fi 41 | 42 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 43 | echo 44 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 45 | echo 46 | else 47 | # Start runtime timer 48 | gacos_start=`date +%s` 49 | 50 | # Include the config file 51 | source ${module_config_PATH}/${module_name}.config 52 | 53 | 54 | 55 | 56 | ############################ 57 | # Module actions start here 58 | 59 | echo; echo "Stable Ground Point Identification"; echo; echo 60 | 61 | sgpi_output_PATH="$output_PATH/SGPI" 62 | sgpi_work_PATH="$work_PATH/SGPI" 63 | 64 | mkdir -p $sgpi_output_PATH 65 | mkdir -p $sgpi_work_PATH/cut 66 | 67 | 68 | cd $sgpi_input_PATH 69 | 70 | 71 | # Handle boundary box 72 | if [ -z $sgpi_region ]; then 73 | echo "Obtaining minimum boundary box from files in $sgpi_input_PATH ..." 74 | boundary_box=$( $OSARIS_PATH/lib/min_grd_extent.sh $sgpi_input_PATH ) 75 | else 76 | echo "Boundary box set to $sgpi_region ..." 77 | boundary_box=$sgpi_region 78 | fi 79 | 80 | 81 | # Cut input files to boundary box extent 82 | coh_files=($( ls *.grd )) 83 | for coh_file in ${coh_files[@]}; do 84 | gmt grdcut $coh_file -G$sgpi_work_PATH/cut/${coh_file::-4}-cut.grd -R$boundary_box -V 85 | gmt grdclip $sgpi_work_PATH/cut/${coh_file::-4}-cut.grd -G$sgpi_work_PATH/cut/${coh_file::-4}-cut-thres.grd -Sb${sgpi_threshold}/NaN -V 86 | done 87 | 88 | 89 | # Calculate sum of coherence from all files 90 | cd $sgpi_work_PATH/cut 91 | rm *-cut.grd 92 | cut_files=($(ls *.grd)) 93 | cut_files_count=0 94 | for cut_file in "${cut_files[@]}"; do 95 | if [ "$cut_files_count" -eq 0 ]; then 96 | if [ $debug -gt 1 ]; then echo "First file $cut_file"; fi 97 | cp $cut_file $sgpi_work_PATH/coherence-sum.grd 98 | # elif [ "$cut_files_count" -eq 2 ]; then 99 | # if [ $debug -gt 0 ]; then echo "Addition of coherence from $cut_file and $prev_cut_file ..."; fi 100 | # gmt grdmath $cut_file $prev_cut_file ADD -V = $sgpi_work_PATH/coherence-sum.grd 101 | else 102 | if [ $debug -gt 0 ]; then echo "Adding coherence from $cut_file ..."; fi 103 | gmt grdmath $cut_file $sgpi_work_PATH/coherence-sum.grd ADD -V = $sgpi_work_PATH/coherence-sum.grd 104 | fi 105 | 106 | # prev_cut_file=$cut_file 107 | cut_files_count=$((cut_files_count+1)) 108 | done 109 | 110 | # Calculate the arithmetic mean of all coherences files 111 | gmt grdmath $sgpi_work_PATH/coherence-sum.grd $cut_files_count DIV -V = $sgpi_output_PATH/coherence-arithmean.grd 112 | cp $sgpi_work_PATH/coherence-sum.grd $sgpi_output_PATH/coherence-sum.grd 113 | 114 | # Write coords of max coherence points to file for further processing .. 115 | gmt grdinfo -M -V $sgpi_work_PATH/coherence-sum.grd | grep z_max | awk '{ print $16,$19 }' > $sgpi_output_PATH/sgp-coords.xy 116 | 117 | 118 | if [ $clean_up -gt 0 ]; then 119 | echo; echo 120 | echo "Cleaning up" 121 | rm -r $sgpi_work_PATH/cut 122 | echo; echo 123 | fi 124 | 125 | # Stop runtime timer and print runtime 126 | module_end=`date +%s` 127 | module_runtime=$((module_end-module_start)) 128 | 129 | echo 130 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n\n' \ 131 | $(($module_runtime/86400)) \ 132 | $(($module_runtime%86400/3600)) \ 133 | $(($module_runtime%3600/60)) \ 134 | $(($module_runtime%60)) 135 | echo 136 | fi 137 | -------------------------------------------------------------------------------- /lib/GMTSAR-mods/merge_unwrap_geocode.sh: -------------------------------------------------------------------------------- 1 | #/bin/csh -f 2 | # $Id$ 3 | # 4 | # 5 | # Xiaohua(Eric) XU, July 7, 2016 6 | # 7 | # Script for merging 3 subswaths TOPS interferograms and then unwrap and geocode. 8 | # 9 | if ($#argv != 2) then 10 | echo "" 11 | echo "Usage: merge_unwrap_geocode_tops.csh inputfile config_file" 12 | echo "" 13 | echo "Note: Inputfiles should be as following:" 14 | echo "" 15 | echo " Swath1_Path:Swath1_master.PRM:Swath1_repeat.PRM" 16 | echo " Swath2_Path:Swath2_master.PRM:Swath2_repeat.PRM" 17 | echo " Swath3_Path:Swath3_master.PRM:Swath3_repeat.PRM" 18 | echo " (Use the repeat PRM which contains the shift information.)" 19 | echo " e.g. ../F1/intf/2015016_2015030/:S1A20151012_134357_F1.PRM" 20 | echo "" 21 | echo " Make sure under each path, the processed phasefilt.grd, corr.grd and mask.grd exist." 22 | echo " Also make sure the dem.grd is linked. " 23 | echo "" 24 | echo " config_file is the same one used for processing." 25 | echo "" 26 | echo "Example: merge_unwrap_geocode_tops.csh filelist batch.config" 27 | echo "" 28 | exit 1 29 | endif 30 | 31 | if (-f tmp_phaselist) rm tmp_phaselist 32 | if (-f tmp_corrlist) rm tmp_corrlist 33 | if (-f tmp_masklist) rm tmp_masklist 34 | 35 | if (! -f dem.grd ) then 36 | echo "Please link dem.grd to current folder" 37 | exit 1 38 | endif 39 | 40 | set region_cut = `grep region_cut $2 | awk '{print $3}'` 41 | 42 | # Creating inputfiles for merging 43 | foreach line (`awk '{print $0}' $1`) 44 | set now_dir = `pwd` 45 | set pth = `echo $line | awk -F: '{print $1}'` 46 | set prm = `echo $line | awk -F: '{print $2}'` 47 | set prm2 = `echo $line | awk -F: '{print $3}'` 48 | cd $pth 49 | set rshift = `grep rshift $prm2 | tail -1 | awk '{print $3}'` 50 | set fs1 = `grep first_sample $prm | awk '{print $3}'` 51 | set fs2 = `grep first_sample $prm2 | awk '{print $3}'` 52 | cp $prm tmp.PRM 53 | if ($fs2 > $fs1) then 54 | update_PRM.csh tmp.PRM first_sample $fs2 55 | endif 56 | update_PRM.csh tmp.PRM rshift $rshift 57 | cd $now_dir 58 | 59 | echo $pth"tmp.PRM:"$pth"phasefilt.grd" >> tmp_phaselist 60 | echo $pth"tmp.PRM:"$pth"corr.grd" >> tmp_corrlist 61 | echo $pth"tmp.PRM:"$pth"mask.grd" >> tmp_masklist 62 | end 63 | 64 | set pth = `awk -F: 'NR==1 {print $1}' $1` 65 | set stem = `awk -F: 'NR==1 {print $2}' $1 | awk -F"." '{print $1}'` 66 | #echo $pth $stem 67 | 68 | echo "" 69 | echo "Merging START" 70 | merge_swath tmp_phaselist phasefilt.grd $stem 71 | merge_swath tmp_corrlist corr.grd 72 | merge_swath tmp_masklist mask.grd 73 | echo "Merging END" 74 | echo "" 75 | 76 | # This step is essential, cut the DEM so it can run faster. 77 | if (! -f trans.dat) then 78 | set led = `grep led_file $pth$stem".PRM" | awk '{print $3}'` 79 | cp $pth$led . 80 | echo "Recomputing the projection LUT..." 81 | gmt grd2xyz --FORMAT_FLOAT_OUT=%lf dem.grd -s | SAT_llt2rat $stem".PRM" 1 -bod > trans.dat 82 | endif 83 | 84 | # Read in parameters 85 | set threshold_snaphu = `grep threshold_snaphu $2 | awk '{print $3}'` 86 | set threshold_geocode = `grep threshold_geocode $2 | awk '{print $3}'` 87 | set region_cut = `grep region_cut $2 | awk '{print $3}'` 88 | set switch_land = `grep switch_land $2 | awk '{print $3}'` 89 | set defomax = `grep defomax $2 | awk '{print $3}'` 90 | 91 | # Unwrapping 92 | if ($region_cut == "") then 93 | set region_cut = `gmt grdinfo phasefilt.grd -I- | cut -c3-20` 94 | endif 95 | if ($threshold_snaphu != 0 ) then 96 | if ($switch_land == 1) then 97 | if (! -f landmask_ra.grd) then 98 | landmask.csh $region_cut 99 | endif 100 | endif 101 | 102 | echo "" 103 | echo "SNAPHU.CSH - START" 104 | echo "threshold_snaphu: $threshold_snaphu" 105 | snaphu.csh $threshold_snaphu $defomax $region_cut 106 | echo "SNAPHU.CSH - END" 107 | else 108 | echo "" 109 | echo "SKIP UNWRAP PHASE" 110 | endif 111 | 112 | # Geocoding 113 | #if (-f raln.grd) rm raln.grd 114 | #if (-f ralt.grd) rm ralt.grd 115 | 116 | if ($threshold_geocode != 0) then 117 | echo "" 118 | echo "GEOCODE-START" 119 | proj_ra2ll.csh trans.dat phasefilt.grd phasefilt_ll.grd 120 | proj_ra2ll.csh trans.dat corr.grd corr_ll.grd 121 | gmt makecpt -T-3.15/3.15/0.05 -Z > phase.cpt 122 | set BT = `gmt grdinfo -C corr.grd | awk '{print $7}'` 123 | gmt makecpt -Cgray -T0/$BT/0.05 -Z > corr.cpt 124 | grd2kml.csh phasefilt_ll phase.cpt 125 | grd2kml.csh corr_ll corr.cpt 126 | 127 | if (-f unwrap.grd) then 128 | gmt grdmath unwrap.grd mask.grd MUL = unwrap_mask.grd 129 | proj_ra2ll.csh trans.dat unwrap.grd unwrap_ll.grd 130 | proj_ra2ll.csh trans.dat unwrap_mask.grd unwrap_mask_ll.grd 131 | set BT = `gmt grdinfo -C unwrap.grd | awk '{print $7}'` 132 | set BL = `gmt grdinfo -C unwrap.grd | awk '{print $6}'` 133 | gmt makecpt -T$BL/$BT/0.5 -Z > unwrap.cpt 134 | grd2kml.csh unwrap_mask_ll unwrap.cpt 135 | grd2kml.csh unwrap_ll unwrap.cpt 136 | endif 137 | 138 | echo "GEOCODE END" 139 | endif 140 | 141 | rm tmp_phaselist tmp_corrlist tmp_masklist *.eps *.bb 142 | -------------------------------------------------------------------------------- /modules/unstable_coh_metric/unstable_coh_metric.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Identify regions in which a coherence drop from relatively high values 6 | # occured. 7 | # 8 | # Requires a file 'unstable_coh_metric.config' in the OSARIS config 9 | # folder containing the Slurm configuration. Get startet by copying 10 | # the config_template file from the templates folder and fit it to 11 | # your setup. 12 | # 13 | # David Loibl, 2018 14 | # 15 | ##################################################################### 16 | 17 | module_name="unstable_coh_metric" 18 | 19 | if [ -z $module_config_PATH ]; then 20 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 21 | echo " $OSARIS_PATH/config" 22 | module_config_PATH="$OSARIS_PATH/config" 23 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 24 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 25 | fi 26 | 27 | if [ ! -d "$module_config_PATH" ]; then 28 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 29 | exit 2 30 | fi 31 | 32 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 33 | echo 34 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 35 | echo 36 | else 37 | # Start runtime timer 38 | UCM_start_time=`date +%s` 39 | 40 | # Include the config file 41 | source ${module_config_PATH}/${module_name}.config 42 | 43 | 44 | rm -rf $work_PATH/UCM 45 | 46 | mkdir -p $work_PATH/UCM/cut_files 47 | mkdir -p $work_PATH/UCM/temp 48 | mkdir -p $output_PATH/UCM/ 49 | mkdir -p $work_PATH/UCM/input 50 | 51 | base_PATH=$output_PATH/Coherences 52 | cd $base_PATH 53 | 54 | coh_files=($( ls *.grd )) 55 | 56 | for coh_file in "${coh_files[@]}"; do 57 | ln -s $base_PATH/$coh_file $work_PATH/UCM/input/$coh_file 58 | done 59 | 60 | count=0 61 | 62 | # Obtain minimum boundary box for coherence files 63 | min_bb=$( $OSARIS_PATH/lib/min_grd_extent.sh $base_PATH ) 64 | echo "Minimum boundary box: $min_bb" 65 | 66 | for coh_file in "${coh_files[@]}"; do 67 | if [ "$count" -gt "0" ]; then 68 | prev_coh_file=${coh_files[$( bc <<< $count-1 )]} 69 | 70 | slurm_jobname="$slurm_jobname_prefix-UCM" 71 | 72 | sbatch \ 73 | --output=$log_PATH/UCM-%j.log \ 74 | --error=$log_PATH/UCM-%j.log \ 75 | --workdir=$input_PATH \ 76 | --job-name=$slurm_jobname \ 77 | --qos=$slurm_qos \ 78 | --account=$slurm_account \ 79 | --partition=$slurm_partition \ 80 | --mail-type=$slurm_mailtype \ 81 | $OSARIS_PATH/modules/unstable_coh_metric/UCM-batch.sh \ 82 | $work_PATH/UCM \ 83 | $output_PATH/UCM \ 84 | $coh_file \ 85 | $prev_coh_file \ 86 | $high_corr_threshold \ 87 | $min_bb 88 | 89 | fi 90 | ((count++)) 91 | done 92 | 93 | 94 | 95 | $OSARIS_PATH/lib/check-queue.sh $slurm_jobname 2 0 96 | 97 | if [ $clean_up -gt 0 ]; then 98 | echo; echo "Cleaning up ..." 99 | rm -rf $work_PATH/UCM/temp/ $work_PATH/UCM/HC_* 100 | echo 101 | fi 102 | 103 | 104 | sort $output_PATH/Reports/PP-UCM-stats.tmp > $output_PATH/Reports/PP-UCM-stats.list 105 | 106 | printf "\n OSARIS UCM module processing report \n \n" > $output_PATH/Reports/PP-UCM.report 107 | printf "Total number of pair jobs executed:\t $(cat $output_PATH/Reports/PP-UCM-stats.list | wc -l) \n \n" 108 | 109 | while read -r PP_job; do 110 | printf "Slurm job ID:\t\t $(echo $PP_job | awk '{ print $3}') \n" >> $output_PATH/Reports/PP-UCM.report 111 | scene_1_date=$(echo $PP_job | awk '{ print $1 }') 112 | scene_2_date=$(echo $PP_job | awk '{ print $2 }') 113 | printf "Scene dates:\t $scene_1_date $scene_2_date \n" >> $output_PATH/Reports/PP-UCM.report 114 | 115 | if [ ! "$(echo $PP_job | awk '{ print $5 }')" -eq 1 ]; then 116 | printf " Status UCM:\t failed \n" >> $output_PATH/Reports/PP-UCM.report 117 | else 118 | printf " Status UCM:\t ok \n" >> $output_PATH/Reports/PP-UCM.report 119 | fi 120 | 121 | PP_runtime=$(echo $PP_job | awk '{ print $4}') 122 | printf ' Processing time:\t %02dd %02dh:%02dm:%02ds\n' $(($PP_runtime/86400)) $(($PP_runtime%86400/3600)) $(($PP_runtime%3600/60)) $(($PP_runtime%60)) >> $output_PATH/Reports/PP-UCM.report 123 | PP_total_runtime=$((PP_total_runtime + PP_runtime)) 124 | printf "\n \n" >> $output_PATH/Reports/PP-UCM.report 125 | done < "$output_PATH/Reports/PP-UCM-stats.list" 126 | 127 | rm $output_PATH/Reports/PP-UCM-stats.list $output_PATH/Reports/PP-UCM-stats.tmp 128 | 129 | printf '\n\nTotal processing time:\t %02dd %02dh:%02dm:%02ds\n' $(($PP_total_runtime/86400)) $(($PP_total_runtime%86400/3600)) $(($PP_total_runtime%3600/60)) $(($PP_total_runtime%60)) >> $output_PATH/Reports/PP-UCM.report 130 | 131 | 132 | UCM_end_time=`date +%s` 133 | UCM_runtime=$((UCM_end_time - UCM_start_time)) 134 | 135 | printf 'Elapsed wall clock time:\t %02dd %02dh:%02dm:%02ds\n' $(($UCM_runtime/86400)) $(($UCM_runtime%86400/3600)) $(($UCM_runtime%3600/60)) $(($UCM_runtime%60)) >> $output_PATH/Reports/PP-UCM.report 136 | 137 | 138 | 139 | fi 140 | -------------------------------------------------------------------------------- /modules/gacos_correction/PP-gacos-correction.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | start=`date +%s` 4 | 5 | echo; echo "Starting GACOS correction processing ..."; echo 6 | 7 | GACOS_work_PATH=$1 8 | GACOS_output_PATH=$2 9 | GACOS_intf_input_PATH=$3 10 | intf=$4 11 | 12 | 13 | master_date="${intf:10:8}" 14 | slave_date="${intf:0:8}" 15 | master_grd="$GACOS_work_PATH/GACOS_files/${master_date}.grd" 16 | slave_grd="$GACOS_work_PATH/GACOS_files/${slave_date}.grd" 17 | 18 | 19 | # Step 1: Time Differencing 20 | echo; echo "Conducting time differencing of GACOS scenes ..." 21 | zpddm_file="$GACOS_work_PATH/${slave_date}-${master_date}.grd" 22 | gmt grdmath $master_grd $slave_grd SUB = "$zpddm_file" -V 23 | 24 | 25 | # Step 2: Space Differencing 26 | echo; echo "Conducting space differencing of GACOS scenes ..." 27 | szpddm_file="$GACOS_work_PATH/${slave_date}-${master_date}-sd.grd" 28 | zpddm_ps_value=$( gmt grdtrack $GACOS_work_PATH/ref_point.xy -G$zpddm_file ) 29 | zpddm_ps_value=$( echo "$zpddm_ps_value" | awk '{ print $3 }' ) 30 | echo; echo " PS value: $zpddm_ps_value" 31 | gmt grdmath $zpddm_file $zpddm_ps_value SUB = $szpddm_file -V 32 | 33 | # Step 3: Apply the correction 34 | echo; echo "Applying GACOS correction to interferogram" 35 | echo "$GACOS_intf_input_PATH/$intf" 36 | 37 | # Cut GACOS diff file and phase file to same extent 38 | 39 | file_1="$szpddm_file" 40 | file_2="$GACOS_intf_input_PATH/$intf" 41 | 42 | file_1_extent=$( gmt grdinfo -I- $file_1 ); file_1_extent=${file_1_extent:2} 43 | file_2_extent=$( gmt grdinfo -I- $file_2 ); file_2_extent=${file_2_extent:2} 44 | 45 | file_1_coord_string=$( echo $file_1_extent | tr "/" "\n") 46 | file_2_coord_string=$( echo $file_2_extent | tr "/" "\n") 47 | 48 | echo; echo "File 1 coordinate string: " 49 | echo "$file_1_coord_string" 50 | echo; echo "File 2 coordinate string: " 51 | echo "$file_2_coord_string" 52 | echo 53 | 54 | # Create arrays of coordinates for each dataset 55 | counter=0 56 | for coord in $file_1_coord_string; do 57 | file_1_coord_array[$counter]=$coord 58 | counter=$((counter+1)) 59 | done 60 | 61 | counter=0 62 | for coord in $file_2_coord_string; do 63 | file_2_coord_array[$counter]=$coord 64 | counter=$((counter+1)) 65 | done 66 | 67 | 68 | # Determine overal max and min values for both datasets 69 | 70 | remainder=$( expr $counter % 2 ) 71 | 72 | counter=0 73 | while [ $counter -lt 4 ]; do 74 | if [ $counter -eq 0 ]; then 75 | # Determining xmin 76 | if [ $( echo "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 77 | xmin=${file_2_coord_array[$counter]} 78 | else 79 | xmin=${file_1_coord_array[$counter]} 80 | fi 81 | elif [ $counter -eq 1 ]; then 82 | # Determining xmax 83 | if [ $( echo "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 84 | xmax=${file_2_coord_array[$counter]} 85 | else 86 | xmax=${file_1_coord_array[$counter]} 87 | fi 88 | elif [ $counter -eq 2 ]; then 89 | # Determining ymin 90 | if [ $( echo "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 91 | ymin=${file_2_coord_array[$counter]} 92 | else 93 | ymin=${file_1_coord_array[$counter]} 94 | fi 95 | elif [ $counter -eq 3 ]; then 96 | # Determining ymax 97 | if [ $( echo "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 98 | ymax=${file_2_coord_array[$counter]} 99 | else 100 | ymax=${file_1_coord_array[$counter]} 101 | fi 102 | fi 103 | 104 | counter=$((counter+1)) 105 | done 106 | 107 | # Check and correct for longitudes > 180° 108 | #if [ $( echo "$xmin > 180" | bc -l ) -eq 1 ]; then xmin=$( echo "$xmin - 360" | bc -l ); fi 109 | #if [ $( echo "$xmax > 180" | bc -l ) -eq 1 ]; then xmax=$( echo "$xmax - 360" | bc -l ); fi 110 | 111 | echo; echo " The common minimum boundary box for the files" 112 | echo " - $szpddm_file and" 113 | echo " - $GACOS_intf_input_PATH/$intf" 114 | echo " is $xmin/$xmax/$ymin/$ymax" 115 | 116 | echo; echo "gmt grdsample $szpddm_file -G${szpddm_file::-4}-cut.grd -R$xmin/$xmax/$ymin/$ymax `gmt grdinfo -I $GACOS_intf_input_PATH/$intf` -V" 117 | gmt grdsample $szpddm_file -G${szpddm_file::-4}-cut.grd -R$xmin/$xmax/$ymin/$ymax `gmt grdinfo -I $GACOS_intf_input_PATH/$intf` -V 118 | 119 | echo; echo "gmt grdsample $GACOS_intf_input_PATH/$intf -G$GACOS_work_PATH/cut_intfs/$intf `gmt grdinfo -I- ${szpddm_file::-4}-cut.grd` `gmt grdinfo -I ${szpddm_file::-4}-cut.grd` -V" 120 | gmt grdsample $GACOS_intf_input_PATH/$intf -G$GACOS_work_PATH/cut_intfs/$intf \ 121 | `gmt grdinfo -I- ${szpddm_file::-4}-cut.grd` \ 122 | `gmt grdinfo -I ${szpddm_file::-4}-cut.grd` -V 123 | 124 | 125 | corrected_phase_file="$GACOS_output_PATH/${slave_date}--${master_date}-gacoscorr.grd" 126 | gmt grdmath $GACOS_work_PATH/cut_intfs/$intf ${szpddm_file::-4}-cut.grd SUB = $corrected_phase_file -V 127 | 128 | 129 | 130 | if [ -f $corrected_phase_file ]; then status_GACOS=1; else status_GACOS=0; fi 131 | 132 | end=`date +%s` 133 | runtime=$((end-start)) 134 | 135 | echo "${intf:0:8} ${intf:10:8} $SLURM_JOB_ID $runtime $status_GACOS" >> $output_PATH/Reports/PP-GACOS-stats.tmp 136 | 137 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 138 | -------------------------------------------------------------------------------- /lib/PP-SBAS.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | ###################################################################### 3 | # Xiaohua(Eric) Xu 4 | # June 2016 5 | # 04 6 | # script to prepare directory and process SBAS 7 | # GMT5SAR processing for sentinel1A/B 8 | # 2017.02.22 "Noorlaila Hayati" 9 | # email: n.isya@tu-braunschweig.de or noorlaila@geodesy.its.ac.id 10 | ###################################################################### 11 | 12 | #if [[ $# -ne 2 ]]; then 13 | # echo "" 14 | # echo "Usage: prep_proc_SBAS.sh data.tab mode" 15 | # echo "" 16 | # echo " script to prepare directory and process SBAS" 17 | # echo "" 18 | # echo " example : prep_proc_SBAS.sh data.tab 1" 19 | # echo "" 20 | # echo " format of data.tab:" 21 | # echo " master_id slave_id" 22 | # echo "" 23 | # echo " Mode: 1 Prepare SBAS file (intf.tab scene.tab)" 24 | # echo " 2 run SBAS" 25 | # echo "" 26 | # exit 1 27 | #fi 28 | 29 | # use data.tab on intf_all path 30 | 31 | if [ $# -eq 0 ]; then 32 | echo 33 | echo "Usage: process_stack.sh config_file [supermaster]" 34 | echo 35 | elif [ ! -f $1 ]; then 36 | echo 37 | echo "Cannot open $1. Please provide a valid config file." 38 | echo 39 | else 40 | 41 | echo 42 | echo "- - - - - - - - - - - - - - - - - - - -" 43 | echo " Starting SBAS processing ..." 44 | echo "- - - - - - - - - - - - - - - - - - - -" 45 | echo 46 | 47 | config_file=$1 48 | source $config_file 49 | echo "Config file: $config_file" 50 | 51 | OSARIS_PATH=$( pwd ) 52 | echo "GSP directory: $OSARIS_PATH" 53 | 54 | work_PATH=$base_PATH/$prefix/Processing 55 | # Path to working directory 56 | 57 | output_PATH=$base_PATH/$prefix/Output 58 | # Path to directory where all output will be written 59 | 60 | log_PATH=$base_PATH/$prefix/Output/Log 61 | # Path to directory where the log files will be written 62 | 63 | 64 | 65 | mode=$2 66 | echo "mode -->" $mode 67 | 68 | if [ $mode -eq 1 ]; then 69 | 70 | cd $work_PATH 71 | rm -rf SBAS 72 | mkdir SBAS 73 | 74 | cd $work_PATH/intf_all 75 | folders=($( ls -d */ )) 76 | for folder in "${folders[@]}"; do 77 | master=${folder:0:7} 78 | slave=${folder:8:7} 79 | 80 | cd $work_PATH/intf_all/"$master"_"$slave" 81 | 82 | echo 83 | echo "Now working on folder:" 84 | pwd 85 | echo 86 | 87 | # shopt -s extglob 88 | # IFS=" " 89 | 90 | 91 | #rm unwrap.grd 92 | 93 | #crop corr.grd to match with unwrap.grd 94 | # region=$(grep region_cut ../../batch_tops.config | awk '{print $3}') 95 | # gmt grdcut corr.grd -Gcorr_crop.grd -R$region -V 96 | 97 | #ls *.PRM > tmp2 98 | #master_prm=$(head -n 1 tmp2) 99 | #slave_prm=$(head -n 2 tmp2 | tail -n 1) 100 | 101 | #echo $master_prm $slave_prm > tmp 102 | 103 | PRM_filelist=($(ls -v *.PRM)) 104 | echo 105 | echo "PRM files: " 106 | echo "$PRM_filelist[0] - $PRM_files[1]" 107 | echo 108 | 109 | for PRM_file in "${PRM_filelist[@]}"; do 110 | 111 | 112 | PRM_id=$(grep SC_clock_start $PRM_file | awk '{printf("%d",int($3))}') 113 | 114 | if [ "$PRM_id" == "$master" ]; then 115 | master_PRM=$PRM_file 116 | echo "Found master PRM file for id $PRM_id: $master_PRM" 117 | elif [ "$PRM_id" == "$slave" ]; then 118 | slave_PRM=$PRM_file 119 | echo "Found slave PRM file for id $PRM_id: $slave_PRM" 120 | else 121 | echo "Warning: no fitting PRM file found for $PRM_id!" 122 | fi 123 | #cp -a $PRM_file ../intf_all/${PRM_id}.PRM 124 | #cp $PRM_file ../intf_all/$PRM_id.PRM 125 | done 126 | 127 | SAT_baseline $work_PATH/intf_all/"$master"_"$slave"/$master_PRM $work_PATH/intf_all/"$master"_"$slave"/$slave_PRM > tmp 128 | 129 | BPL=$(grep B_perpendicular tmp | awk '{print $3}') 130 | # rm tmp* 131 | 132 | #make intf.tab file 133 | cd $work_PATH/SBAS 134 | echo $work_PATH/intf_all/"$master"_"$slave"/unwrap.grd $work_PATH/intf_all/"$master"_"$slave"/corr.grd $master $slave $BPL >> intf.tab 135 | ln -s $work_PATH/intf_all/"$master"_"$slave"/unwrap.grd . 136 | done 137 | 138 | cd $work_PATH/SBAS 139 | #make scene.tab file 140 | awk '{print int($2),$3}' $work_PATH/intf_all/baseline_table.dat >> scene.tab 141 | fi 142 | 143 | if [ $mode -eq 2 ]; then 144 | cd $work_PATH/SBAS 145 | xdim=$(gmt grdinfo -C unwrap.grd | awk '{print $10}') 146 | ydim=$(gmt grdinfo -C unwrap.grd | awk '{print $11}') 147 | n_int=$(wc -l < intf.tab) 148 | n_scn=$(wc -l < scene.tab) 149 | #run SBAS 150 | sbas intf.tab scene.tab $n_int $n_scn $xdim $ydim -smooth 1.0 -wavelength 0.0554658 -incidence 30 -range 800184.946186 -rms -dem 151 | 152 | # project the velocity to Geocooridnates 153 | # 154 | ln -s ../topo/trans.dat . 155 | proj_ra2ll.csh trans.dat vel.grd vel_ll.grd 156 | gmt grd2cpt vel_ll.grd -T= -Z -Cjet > vel_ll.cpt 157 | grd2kml.csh vel_ll vel_ll.cpt 158 | 159 | # view disp.grd 160 | rm *.jpg *.ps disp.tab 161 | ls disp_0* > disp.tab 162 | 163 | shopt -s extglob 164 | IFS=" " 165 | while read disp; 166 | do 167 | gambar="$disp".ps 168 | gmt grdimage $disp -Cvel_ll.cpt -JX6i -Bx1000 -By250 -BWeSn -P -K > $gambar 169 | gmt psscale -D1.3c/-1.2c/5c/0.2h -Cvel_ll.cpt -B30:"LOS displacement, mm":/:"range decrease": -P -J -R -O -X4 -Y20 >> $gambar 170 | 171 | ps2raster $gambar -Tj -E100 172 | #echo $disp 173 | done < disp.tab 174 | 175 | fi 176 | fi 177 | -------------------------------------------------------------------------------- /lib/unwrapping-sum.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | 4 | ################################################################# 5 | # 6 | # Calculate the sum of fwd and rev unwrapped interferograms. 7 | # 8 | # Usage: unwrapping-sum.sh file1 file2 output_directory [output_filename] 9 | # 10 | # Both input files must be in GRD format. 11 | # Output filename needs only to be set when both input files have 12 | # the same name (e.g. multiple unwrap_mask_ll.grd files). 13 | # 14 | # David Loibl, 2018 15 | # 16 | ################################################################ 17 | 18 | 19 | if [ $# -lt 3 ]; then 20 | echo 21 | echo "Usage: unwrapping-sum.sh file1 file2 output_directory [output_filename]" 22 | echo 23 | else 24 | 25 | 26 | # Check whether .grd files where provided 27 | if [ ! -f $1 ]; then 28 | echo 29 | echo "ERROR: Cannot open $1. Please provide file." 30 | echo 31 | exit 1 32 | else 33 | 34 | if [ ! "${1##*.}" = "grd" ]; then 35 | echo 36 | echo "ERROR: difference calculation requirers .grd files as input." 37 | echo 38 | exit 1 39 | else 40 | file_1=$1 41 | fi 42 | fi 43 | 44 | if [ ! -f $2 ]; then 45 | echo 46 | echo "ERROR: Cannot open 21. Please provide file." 47 | echo 48 | exit 1 49 | else 50 | 51 | if [ ! "${2##*.}" = "grd" ]; then 52 | echo 53 | echo "ERROR: difference calculation requirers .grd files as input." 54 | echo 55 | exit 1 56 | else 57 | file_2=$2 58 | fi 59 | fi 60 | 61 | filename_1=$(basename $file_1 .grd)-1 62 | filename_2=$(basename $file_2 .grd)-2 63 | output_PATH=$3 64 | 65 | if [ $# -eq 4 ]; then 66 | diff_filename=$4 67 | else 68 | diff_filename="diff-$filename_2--$filename_1" 69 | fi 70 | 71 | mkdir -p $output_PATH 72 | mkdir -p $output_PATH/Temp 73 | 74 | file_1_extent=$( gmt grdinfo -I- $file_1 ); file_1_extent=${file_1_extent:2} 75 | file_2_extent=$( gmt grdinfo -I- $file_2 ); file_2_extent=${file_2_extent:2} 76 | 77 | echo $file_1_extent 78 | echo $file_2_extent 79 | 80 | file_1_coord_string=$( echo $file_1_extent | tr "/" "\n") 81 | file_2_coord_string=$( echo $file_2_extent | tr "/" "\n") 82 | 83 | # Create arrays of coordinates for each dataset 84 | counter=0 85 | for coord in $file_1_coord_string; do 86 | file_1_coord_array[$counter]=$coord 87 | counter=$((counter+1)) 88 | done 89 | 90 | counter=0 91 | for coord in $file_2_coord_string; do 92 | file_2_coord_array[$counter]=$coord 93 | counter=$((counter+1)) 94 | done 95 | 96 | # Determine overal max and min values for both datasets 97 | # echo ${file_1_coord_array[1]} 98 | # echo ${file_2_coord_array[1]} 99 | 100 | # remainder=$( expr $counter % 2 ) 101 | 102 | 103 | counter=0 104 | while [ $counter -lt 4 ]; do 105 | if [ $counter -eq 0 ]; then 106 | echo "Determining xmin" 107 | if [ $( bc <<< "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 108 | echo "file 1 has smaller xmin value" 109 | echo "Adding ${file_1_coord_array[$counter]}" 110 | echo 111 | xmin=${file_2_coord_array[$counter]} 112 | else 113 | echo "file 2 has smaller xmin value" 114 | echo "Adding ${file_2_coord_array[$counter]}" 115 | echo 116 | xmin=${file_1_coord_array[$counter]} 117 | fi 118 | elif [ $counter -eq 1 ]; then 119 | echo "Determining xmax" 120 | if [ $( bc <<< "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 121 | echo "file 1 has higher xmax value" 122 | echo "Adding file_2: ${file_1_coord_array[$counter]}" 123 | echo 124 | xmax=${file_2_coord_array[$counter]} 125 | else 126 | echo "file 2 has higher xmax value" 127 | echo "Adding file_1: ${file_2_coord_array[$counter]}" 128 | echo 129 | xmax=${file_1_coord_array[$counter]} 130 | fi 131 | elif [ $counter -eq 2 ]; then 132 | echo "Determining ymin" 133 | if [ $( bc <<< "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 134 | echo "file 1 has smaller ymin value" 135 | echo "Adding file_2: ${file_1_coord_array[$counter]}" 136 | echo 137 | ymin=${file_2_coord_array[$counter]} 138 | else 139 | echo "file 2 has smaller ymin value" 140 | echo "Adding file_1: ${file_2_coord_array[$counter]}" 141 | echo 142 | ymin=${file_1_coord_array[$counter]} 143 | fi 144 | elif [ $counter -eq 3 ]; then 145 | echo "Determining ymax" 146 | if [ $( bc <<< "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 147 | echo "file 1 has max value" 148 | echo "Adding file_2: ${file_1_coord_array[$counter]}" 149 | echo 150 | ymax=${file_2_coord_array[$counter]} 151 | else 152 | echo "file 2 has max value" 153 | echo "Adding file_1: ${file_2_coord_array[$counter]}" 154 | echo 155 | ymax=${file_1_coord_array[$counter]} 156 | fi 157 | fi 158 | 159 | counter=$((counter+1)) 160 | done 161 | 162 | # echo "xmin: $xmin" 163 | # echo "xmax: $xmax" 164 | # echo "ymin: $ymin" 165 | # echo "ymax: $ymax" 166 | 167 | 168 | cut_filename_1="$filename_1-cut.grd" 169 | cut_filename_2="$filename_2-cut.grd" 170 | 171 | 172 | cd $output_PATH 173 | 174 | gmt grdcut $file_1 -GTemp/$cut_filename_1 -R$xmin/$xmax/$ymin/$ymax -V 175 | gmt grdcut $file_2 -GTemp/$cut_filename_2 -R$xmin/$xmax/$ymin/$ymax -V 176 | 177 | cd Temp 178 | gmt grdmath $cut_filename_2 $cut_filename_1 ADD = $output_PATH/${diff_filename}.grd -V 179 | 180 | cd .. 181 | rm -r Temp 182 | 183 | fi 184 | -------------------------------------------------------------------------------- /lib/s1-file-download.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo 4 | echo "- - - - - - - - - - - - - - - - - - - -" 5 | echo "Starting Sentinel-1 scene download ..." 6 | echo "- - - - - - - - - - - - - - - - - - - -" 7 | echo 8 | 9 | echo "Area of interest boundary coordinates:" 10 | echo "Longitude 1: $lon_1" 11 | echo "Latitude 1: $lat_1" 12 | echo "Longitude 2: $lon_2" 13 | echo "Latitude 2: $lat_2" 14 | 15 | 16 | # Check if all required data is available 17 | 18 | if [ -z "$lon_1" ] || [ -z "$lat_1" ] || [ -z "$lon_2" ] || [ -z "$lat_2" ]; then 19 | echo; echo "ERROR: Missing area of interest coordinates:" 20 | echo "Please check configuration in ${config_file}." 21 | aoi_ok=0 22 | else 23 | # TODO: Add checks for value ranges (-180 to 180, -90 to 90) 24 | # TODO: Check if values are valid float numbers 25 | echo "Boundary coordinates accepted" 26 | aoi_ok=1 27 | fi 28 | 29 | if [ "$scene_provider" = "ESA" ]; then 30 | echo; echo "Data provider set to ESA's DHuS API" 31 | 32 | if [ -z "$ESA_username" ] || [ -z "$ESA_password" ]; then 33 | echo; echo "ERROR: Missing ESA login credentials." 34 | echo "Please review your login credentials file." 35 | login_ok=0 36 | else 37 | echo "Found ESA login credentials" 38 | login_ok=1 39 | fi 40 | elif [ "$scene_provider" = "ASF" ]; then 41 | echo; echo "Data provider set to ASF EarthData API" 42 | 43 | if [ -z "$ASF_username" ] || [ -z "$ASF_password" ]; then 44 | echo; echo "ERROR: Missing ASF login credentials." 45 | echo "Please review your login credentials file." 46 | login_ok=0 47 | else 48 | echo "Found ASF login credentials" 49 | login_ok=1 50 | fi 51 | fi 52 | 53 | 54 | # If everything is ok, start the download 55 | 56 | if [ "$aoi_ok" -eq 1 ] && [ "$login_ok" -eq 1 ]; then 57 | if [ "$scene_provider" = "ESA" ]; then 58 | dhusget_config="-u $ESA_username -p $ESA_password" 59 | 60 | area_of_interest="${lon_1},${lat_1}:${lon_2},${lat_2}" 61 | 62 | if [ ! -z "$download_option" ]; then dhusget_config="$dhusget_config -o $download_option"; else dhusget_config="$dhusget_config -o product"; fi 63 | if [ ! -z "$mission" ]; then dhusget_config="$dhusget_config -m $mission"; else dhusget_config="$dhusget_config -m Sentinel-1"; fi 64 | if [ ! -z "$instrument" ]; then dhusget_config="$dhusget_config -i $instrument"; fi # else dhusget_config="$dhusget_config -i SAR"; fi 65 | if [ ! -z "$sensing_period_start" ]; then dhusget_config="$dhusget_config -S $sensing_period_start"; fi 66 | if [ ! -z "$sensing_period_end" ]; then dhusget_config="$dhusget_config -E $sensing_period_end"; fi 67 | if [ ! -z "$ingestion_period_start" ]; then dhusget_config="$dhusget_config -s $ingestion_period_start"; fi 68 | if [ ! -z "$ingestion_period_end" ]; then dhusget_config="$dhusget_config -e $ingestion_period_end"; fi 69 | if [ ! -z "$area_of_interest" ]; then dhusget_config="$dhusget_config -c $area_of_interest"; fi 70 | if [ ! -z "$relative_orbit" ]; then dhusget_config="$dhusget_config -F relativeorbitnumber:$relative_orbit"; fi 71 | # if [ ! -z "$search_string" ]; then dhusget_config="$dhusget_config -F $search_string"; fi 72 | if [ ! -z "$product_type" ]; then dhusget_config="$dhusget_config -T $product_type"; else dhusget_config="$dhusget_config -T SLC"; fi 73 | #if [ ! -z "$info_file_destination" ]; then dhusget_config="$dhusget_config -q $info_file_destination -C $info_file_destination" ; fi 74 | if [ ! -z "$max_results_per_page" ]; then dhusget_config="$dhusget_config -l $max_results_per_page"; else dhusget_config="$dhusget_config -l 100"; fi 75 | if [ ! -z "$concurrent_downloads" ]; then dhusget_config="$dhusget_config -n $concurrent_downloads"; else dhusget_config="$dhusget_config -n 2"; fi 76 | 77 | # dhusget_config="$dhusget_config -q $input_PATH -C $input_PATH" 78 | dhusget_config="$dhusget_config -O $input_PATH" 79 | 80 | echo 81 | echo "DHuSget configuration:" 82 | echo $dhusget_config 83 | echo 84 | 85 | cd $OSARIS_PATH/lib/ext/dhusget/ 86 | ./dhusget.sh $dhusget_config 87 | 88 | elif [ "$scene_provider" = "ASF" ]; then 89 | echo "Downloading from ASF" 90 | 91 | cd $input_PATH 92 | ASF_call="https://api.daac.asf.alaska.edu/services/search/param?" 93 | ASF_call="${ASF_call}polygon=${lon_1},${lat_1},${lon_1},${lat_2},${lon_2},${lat_2},${lon_2},${lat_1},${lon_1},${lat_1}" 94 | ASF_call="${ASF_call}&platform=Sentinel-1A,Sentinel-1B" 95 | # polygon=-57.1,-17.83,-57.1,-18.8,-56.6,-18.8,-56.6,-17.83,-57.1,-17.83\ 96 | # &platform=Sentinel-1A,Sentinel-1B\ 97 | # &start=2017-10-01T00:00:00UTC\&end=2019-01-01T00:00:00UTC\&processingLevel=SLC\&relativeOrbit=141\&maxResults=10\&output=csv 98 | 99 | if [ ! -z "$sensing_period_start" ]; then ASF_call="${ASF_call}&start=${sensing_period_start::-5}UTC"; fi 100 | if [ ! -z "$sensing_period_end" ]; then ASF_call="${ASF_call}&end=${sensing_period_end::-5}UTC"; fi 101 | # if [ ! -z "$ingestion_period_start" ]; then ASF_call="${ASF_call}&processingDate=${ingestion_period_start::-5}UTC"; fi 102 | # if [ ! -z "$ingestion_period_end" ]; then ASF_call="${ASF_call}&____=${ingestion_period_end}UTC"; fi 103 | 104 | ASF_call="${ASF_call}&processingLevel=SLC" 105 | if [ ! -z "$relative_orbit" ]; then ASF_call="${ASF_call}&relativeOrbit=${relative_orbit}"; fi 106 | ASF_call="${ASF_call}&output=csv" 107 | 108 | if [ $debug -ge 1 ]; then 109 | echo; echo "ASF call:" 110 | echo "$ASF_call" 111 | fi 112 | echo $ASF_call | xargs curl > asf.csv 113 | 114 | ASF_files=($( cat asf.csv | awk -F"," '{if (NR>1) print $27}' | awk -F'"' '{print $2}' )) 115 | 116 | if [ $debug -ge 1 ]; then 117 | echo; echo "Files to download:" 118 | for ASF_file in ${ASF_files[@]}; do 119 | echo "${ASF_file}"; echo 120 | done 121 | fi 122 | 123 | for ASF_file in ${ASF_files[@]}; do 124 | wget --http-user=$ASF_username --http-password=$ASF_password -nc $ASF_file 125 | done 126 | 127 | fi 128 | 129 | else 130 | echo "Skipping S1 scene download ..."; echo 131 | fi 132 | 133 | -------------------------------------------------------------------------------- /modules/preview_files/preview_files.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # OSARIS module to create preview files for grid time series in 6 | # specified directories. 7 | # 8 | # You may use the following PATH variables: 9 | # $OSARIS_PATH -> OSARIS' program directory 10 | # $work_PATH -> Processing directory of a run 11 | # $output_PATH -> Output dircetory of a run 12 | # $log_PATH -> Log file directory of a run 13 | # $topo_PATH -> Directory with dem.grd used by GMTSAR 14 | # $oribts_PATH -> Directory containing the oribt files 15 | # 16 | # 17 | # David Loibl, 2018 18 | # 19 | ##################################################################### 20 | 21 | module_name="preview_files" 22 | 23 | 24 | if [ -z $module_config_PATH ]; then 25 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 26 | echo " $OSARIS_PATH/config" 27 | module_config_PATH="$OSARIS_PATH/config" 28 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 29 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 30 | fi 31 | 32 | if [ ! -d "$module_config_PATH" ]; then 33 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 34 | exit 2 35 | fi 36 | 37 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 38 | echo 39 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 40 | echo 41 | else 42 | # Start runtime timer 43 | module_start=`date +%s` 44 | 45 | # Include the config file 46 | source ${module_config_PATH}/${module_name}.config 47 | 48 | 49 | 50 | 51 | ############################ 52 | # Module actions start here 53 | 54 | 55 | 56 | 57 | # 58 | # now image for google earth 59 | # 60 | echo "geocode.csh" 61 | echo "make the KML files for Google Earth" 62 | grd2kml.csh display_amp_ll display_amp.cpt 63 | grd2kml.csh corr_ll corr.cpt 64 | grd2kml.csh phase_mask_ll phase.cpt 65 | grd2kml.csh phasefilt_mask_ll phase.cpt 66 | #ln -s phasefilt_mask_ll.grd phase_mask_ll_bw.grd 67 | #grd2kml.csh phase_mask_ll_bw phase_bw.cpt 68 | #rm phase_mask_ll_bw.grd 69 | if [ -e xphase_mask_ll.grd ]; then 70 | grd2kml.csh xphase_mask_ll phase_grad.cpt 71 | grd2kml.csh yphase_mask_ll phase_grad.cpt 72 | fi 73 | if [ -e unwrap_mask_ll.grd ]; then 74 | grd2kml.csh unwrap_mask_ll unwrap.cpt 75 | fi 76 | if [ -e phasefilt_mask_ll.grd ]; then 77 | grd2kml.csh phasefilt_mask_ll phase.cpt 78 | fi 79 | if [ -e unwrap_mask_ll.grd ]; then 80 | 81 | ####### 82 | # Obsolete, now included in displacement module ... 83 | ####### 84 | # # constant is negative to make LOS = -1 * range change 85 | # # constant is (1000 mm) / (4 * pi) 86 | # gmt grdmath unwrap_mask_ll.grd $wavel MUL -79.58 MUL = los_ll.grd 87 | 88 | gmt grdedit -D//"mm"/1///"$PWD:t LOS displacement"/"equals negative range" los_ll.grd 89 | 90 | grd2kml.csh los_ll los.cpt 91 | fi 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | # Module actions end here 101 | ########################### 102 | 103 | 104 | 105 | # Stop runtime timer and print runtime 106 | module_end=`date +%s` 107 | module_runtime=$((module_end-module_start)) 108 | 109 | echo 110 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n\n' \ 111 | $(($module_runtime/86400)) \ 112 | $(($module_runtime%86400/3600)) \ 113 | $(($module_runtime%3600/60)) \ 114 | $(($module_runtime%60)) 115 | echo 116 | fi 117 | 118 | 119 | 120 | # # 121 | # # look at the masked phase 122 | # # 123 | # set boundR = `gmt grdinfo display_amp.grd -C | awk '{print ($3-$2)/4}'` 124 | # set boundA = `gmt grdinfo display_amp.grd -C | awk '{print ($5-$4)/4}'` 125 | # gmt grdimage phase_mask.grd -JX6.5i -Cphase.cpt -B"$boundR":Range:/"$boundA":Azimuth:WSen -X1.3i -Y3i -P -K > phase_mask.ps 126 | # gmt psscale -D3.3/-1.5/5/0.2h -Cphase.cpt -B1.57:"phase, rad": -O >> phase_mask.ps 127 | # if [ -e xphase_mask.grd ]; then 128 | # gmt grdimage xphase_mask.grd -JX8i -Cphase_grad.cpt -X.2i -Y.5i -P > xphase_mask.ps 129 | # gmt grdimage yphase_mask.grd -JX8i -Cphase_grad.cpt -X.2i -Y.5i -P > yphase_mask.ps 130 | # fi 131 | # if [ -e unwrap_mask.grd ] then 132 | # gmt grdimage unwrap_mask.grd -JX6.5i -B"$boundR":Range:/"$boundA":Azimuth:WSen -Cunwrap.cpt -X1.3i -Y3i -P -K > unwrap_mask.ps 133 | # std=`gmt grdinfo -C -L2 unwrap_mask.grd | awk '{printf("%5.1f", $13)}'` 134 | # gmt psscale -D3.3/-1.5/5/0.2h -Cunwrap.cpt -B"$std":"unwrapped phase, rad": -O -E >> unwrap_mask.ps 135 | # fi 136 | # if [ -e phasefilt_mask.grd]; then 137 | # gmt grdimage phasefilt_mask.grd -JX6.5i -B"$boundR":Range:/"$boundA":Azimuth:WSen -Cphase.cpt -X1.3i -Y3i -P -K > phasefilt_mask.ps 138 | # gmt psscale -D3.3/-1.5/5/0.2h -Cphase.cpt -B1.57:"phase, rad": -O >> phasefilt_mask.ps 139 | # fi 140 | # # line-of-sight displacement 141 | # if [ -e unwrap_mask.grd]; then 142 | 143 | # ####### 144 | # # Obsolete, now included in displacement module ... 145 | # ####### 146 | # # wavel=`grep wavelength *.PRM | awk '{print($3)}' | head -1 ` 147 | # # gmt grdmath unwrap_mask.grd $wavel MUL -79.58 MUL = los.grd 148 | # ####### 149 | 150 | # gmt grdgradient los.grd -Nt.9 -A0. -Glos_grad.grd 151 | # tmp=`gmt grdinfo -C -L2 los.grd` 152 | # limitU=`echo $tmp | awk '{printf("%5.1f", $12+$13*2)}'` 153 | # limitL=`echo $tmp | awk '{printf("%5.1f", $12-$13*2)}'` 154 | # std=`echo $tmp | awk '{printf("%5.1f", $13)}'` 155 | # gmt makecpt -Cpolar -Z -T"$limitL"/"$limitU"/1 -D > los.cpt 156 | # gmt grdimage los.grd -Ilos_grad.grd -Clos.cpt -B"$boundR":Range:/"$boundA":Azimuth:WSen -JX6.5i -X1.3i -Y3i -P -K > los.ps 157 | # gmt psscale -D3.3/-1.5/4/0.2h -Clos.cpt -B"$std":"LOS displacement, mm":/:"range decrease": -O -E >> los.ps 158 | # fi 159 | -------------------------------------------------------------------------------- /lib/difference.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | 4 | ################################################################# 5 | # 6 | # Calculate difference between two GRD datasets. 7 | # 8 | # Usage: difference.sh file1 file2 output_directory output_filename [create_png+kml] 9 | # 10 | # Both input files must be in GRD format. 11 | # Set create_png+kml to a .cpt file to process PNG and KML files 12 | # 13 | ################################################################ 14 | 15 | 16 | if [ $# -lt 4 ]; then 17 | echo 18 | echo "Usage: difference.sh file1 file2 output_directory output_filename [create_png+kml]" 19 | echo 20 | else 21 | 22 | # Check whether .grd files where provided 23 | if [ ! -f $1 ]; then 24 | echo 25 | echo "ERROR: Cannot open $1. Please provide file." 26 | echo 27 | exit 1 28 | else 29 | 30 | if [ ! "${1##*.}" = "grd" ]; then 31 | echo 32 | echo "ERROR: difference calculation requirers .grd files as input." 33 | echo 34 | exit 1 35 | else 36 | file_1=$1 37 | fi 38 | fi 39 | 40 | if [ ! -f $2 ]; then 41 | echo 42 | echo "ERROR: Cannot open $2. Please provide file." 43 | echo 44 | exit 1 45 | else 46 | 47 | if [ ! "${2##*.}" = "grd" ]; then 48 | echo 49 | echo "ERROR: difference calculation requirers .grd files as input." 50 | echo 51 | exit 1 52 | else 53 | file_2=$2 54 | fi 55 | fi 56 | 57 | filename_1=$(basename $file_1 .grd)-1 58 | filename_2=$(basename $file_2 .grd)-2 59 | output_PATH=$3 60 | 61 | diff_filename=$4 62 | 63 | supercode=$(date +%s)-$(( RANDOM % 10000 )) 64 | tempdir_PATH=$output_PATH/Temp-$supercode 65 | 66 | mkdir -p $output_PATH 67 | mkdir -p $tempdir_PATH 68 | 69 | file_1_extent=$( gmt grdinfo -I- $file_1 ); file_1_extent=${file_1_extent:2} 70 | file_2_extent=$( gmt grdinfo -I- $file_2 ); file_2_extent=${file_2_extent:2} 71 | 72 | echo $file_1_extent 73 | echo $file_2_extent 74 | 75 | file_1_coord_string=$( echo $file_1_extent | tr "/" "\n") 76 | file_2_coord_string=$( echo $file_2_extent | tr "/" "\n") 77 | 78 | # Create arrays of coordinates for each dataset 79 | counter=0 80 | for coord in $file_1_coord_string; do 81 | file_1_coord_array[$counter]=$coord 82 | counter=$((counter+1)) 83 | done 84 | 85 | counter=0 86 | for coord in $file_2_coord_string; do 87 | file_2_coord_array[$counter]=$coord 88 | counter=$((counter+1)) 89 | done 90 | 91 | # Determine overal max and min values for both datasets 92 | # echo ${file_1_coord_array[1]} 93 | # echo ${file_2_coord_array[1]} 94 | 95 | remainder=$( expr $counter % 2 ) 96 | 97 | 98 | 99 | counter=0 100 | while [ $counter -lt 4 ]; do 101 | if [ $counter -eq 0 ]; then 102 | echo "Determining xmin" 103 | if [ $( bc <<< "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 104 | echo "file 1 has smaller xmin value" 105 | echo "Adding ${file_1_coord_array[$counter]}" 106 | echo 107 | xmin=${file_2_coord_array[$counter]} 108 | else 109 | echo "file 2 has smaller xmin value" 110 | echo "Adding ${file_2_coord_array[$counter]}" 111 | echo 112 | xmin=${file_1_coord_array[$counter]} 113 | fi 114 | elif [ $counter -eq 1 ]; then 115 | echo "Determining xmax" 116 | if [ $( bc <<< "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 117 | echo "file 1 has higher xmax value" 118 | echo "Adding file_2: ${file_1_coord_array[$counter]}" 119 | echo 120 | xmax=${file_2_coord_array[$counter]} 121 | else 122 | echo "file 2 has higher xmax value" 123 | echo "Adding file_1: ${file_2_coord_array[$counter]}" 124 | echo 125 | xmax=${file_1_coord_array[$counter]} 126 | fi 127 | elif [ $counter -eq 2 ]; then 128 | echo "Determining ymin" 129 | if [ $( bc <<< "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 130 | echo "file 1 has smaller ymin value" 131 | echo "Adding file_2: ${file_1_coord_array[$counter]}" 132 | echo 133 | ymin=${file_2_coord_array[$counter]} 134 | else 135 | echo "file 2 has smaller ymin value" 136 | echo "Adding file_1: ${file_2_coord_array[$counter]}" 137 | echo 138 | ymin=${file_1_coord_array[$counter]} 139 | fi 140 | elif [ $counter -eq 3 ]; then 141 | echo "Determining ymax" 142 | if [ $( bc <<< "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" ) -eq 0 ]; then 143 | echo "file 1 has max value" 144 | echo "Adding file_2: ${file_1_coord_array[$counter]}" 145 | echo 146 | ymax=${file_2_coord_array[$counter]} 147 | else 148 | echo "file 2 has max value" 149 | echo "Adding file_1: ${file_2_coord_array[$counter]}" 150 | echo 151 | ymax=${file_1_coord_array[$counter]} 152 | fi 153 | fi 154 | 155 | counter=$((counter+1)) 156 | done 157 | 158 | cut_filename_1="$filename_1-cut.grd" 159 | cut_filename_2="$filename_2-cut.grd" 160 | 161 | 162 | cd $output_PATH 163 | 164 | gmt grdcut $file_1 -G$tempdir_PATH/$cut_filename_1 -R$xmin/$xmax/$ymin/$ymax -V 165 | gmt grdcut $file_2 -G$tempdir_PATH/$cut_filename_2 -R$xmin/$xmax/$ymin/$ymax -V 166 | 167 | cd $tempdir_PATH 168 | gmt grdmath $cut_filename_2 $cut_filename_1 SUB = $diff_filename.grd -V 169 | 170 | 171 | if [ -f $5 ]; then 172 | # cd $output_PATH/Coherence-diffs 173 | echo; echo "Generating PNG and KML files ..."; echo 174 | mkdir -p $output_PATH/PNG+KML 175 | DX=$( gmt grdinfo $diff_filename.grd -C | cut -f8 ) 176 | DPI=$( gmt gmtmath -Q $DX INV RINT = ) 177 | gmt grdimage $diff_filename.grd \ 178 | -C$5 \ 179 | -Jx1id -P -Y2i -X2i -Q -V > $diff_filename.ps 180 | gmt psconvert $diff_filename.ps \ 181 | -W+k+t"$diff_filename" -E$DPI -TG -P -S -V -F$diff_filename.png 182 | # rm -f $diff_filename.ps grad.grd ps2raster* psconvert* 183 | mv *.kml $output_PATH/PNG+KML; mv *.png $output_PATH/PNG+KML 184 | else 185 | echo "Skipping generation of PNG and KML files." 186 | fi 187 | 188 | mv ${diff_filename}.grd $output_PATH 189 | cd .. 190 | 191 | # rm -r $tempdir_PATH 192 | 193 | 194 | fi 195 | -------------------------------------------------------------------------------- /lib/GMTSAR-mods/geocode_OSARIS.csh: -------------------------------------------------------------------------------- 1 | #!/bin/csh -f 2 | # $Id$ 3 | # 4 | # D. Sandwell FEB 10 2010 5 | # Kurt Feigl 20150811 add annotation to grd files 6 | # 7 | alias rm 'rm -f' 8 | unset noclobber 9 | # 10 | if ($#argv < 1) then 11 | errormessage: 12 | echo "" 13 | echo "Usage: geocode.csh correlation_threshold region_cut cut_to_aoi" 14 | echo "" 15 | echo " phase is masked when correlation is less than correlation_threshold" 16 | echo "" 17 | echo "Example: geocode.csh .12" 18 | echo "" 19 | exit 1 20 | endif 21 | # 22 | # first mask the phase and phase gradient using the correlation 23 | # 24 | 25 | set cut_to_aoi = $3 26 | 27 | gmt grdmath corr.grd $1 GE 0 NAN mask.grd MUL = mask2.grd -V 28 | gmt grdmath phase.grd mask2.grd MUL = phase_mask.grd 29 | if (-e xphase.grd) then 30 | gmt grdmath xphase.grd mask2.grd MUL = xphase_mask.grd 31 | gmt grdmath yphase.grd mask2.grd MUL = yphase_mask.grd 32 | endif 33 | if (-e unwrap.grd) then 34 | gmt grdcut mask2.grd `gmt grdinfo unwrap.grd -I-` -Gmask3.grd 35 | gmt grdmath unwrap.grd mask3.grd MUL = unwrap_mask.grd 36 | endif 37 | if (-e phasefilt.grd) then 38 | gmt grdmath phasefilt.grd mask2.grd MUL = phasefilt_mask.grd 39 | endif 40 | 41 | 42 | if (-e $2) then 43 | set lon_1 = `awk 'NR==1{ print $1 }' $2` 44 | set lon_2 = `awk 'NR==2{ print $1 }' $2` 45 | set lat_1 = `awk 'NR==1{ print $2 }' $2` 46 | set lat_2 = `awk 'NR==2{ print $2 }' $2` 47 | if (`echo "$lon_1 > $lon_2" | bc -l` == 1) then 48 | set lon_max = $lon_1 49 | set lon_min = $lon_2 50 | else 51 | set lon_max = $lon_2 52 | set lon_min = $lon_1 53 | endif 54 | if (`echo "$lat_1 > $lat_2" | bc -l` == 1) then 55 | set lat_max = $lat_1 56 | set lat_min = $lat_2 57 | else 58 | set lat_max = $lat_2 59 | set lat_min = $lat_1 60 | endif 61 | 62 | set cut_coords = $lon_min"/"$lon_max"/"$lat_min"/"$lat_max 63 | endif 64 | 65 | # 66 | # now reproject the phase to lon/lat space 67 | # 68 | echo "geocode.csh" 69 | echo "project correlation, phase, unwrapped and amplitude back to lon lat coordinates" 70 | set maker = $0:t 71 | set today = `date` 72 | set remarked = `echo by $USER on $today with $maker` 73 | echo remarked is $remarked 74 | 75 | echo; echo "Projecting coherence to geographic coordinates" 76 | proj_ra2ll.csh trans.dat corr.grd corr_ll.grd 77 | if ($cut_to_aoi == 1) then 78 | gmt grdcut corr_ll.grd -Gcorr_ll.grd -R$cut_coords -V 79 | endif 80 | gmt grdedit -D//"dimensionless"/1///"$PWD:t geocoded correlation"/"$remarked" corr_ll.grd 81 | 82 | # proj_ra2ll.csh trans.dat phase.grd phase_ll.grd 83 | # gmt grdedit -D//"radians"/1///"$PWD:t wrapped phase"/"$remarked" phase_ll.grd 84 | 85 | echo; echo "Projecting filtered phase to geographic coordinates" 86 | proj_ra2ll.csh trans.dat phasefilt.grd phasefilt_ll.grd 87 | if ($cut_to_aoi == 1) then 88 | gmt grdcut phasefilt_ll.grd -Gphasefilt_ll.grd -R$cut_coords -V 89 | endif 90 | gmt grdedit -D//"radians"/1///"$PWD:t wrapped phase after filtering"/"$remarked" phasefilt_ll.grd 91 | 92 | echo; echo "Projecting masked phase to geographic coordinates" 93 | proj_ra2ll.csh trans.dat phase_mask.grd phase_mask_ll.grd 94 | if ($cut_to_aoi == 1) then 95 | gmt grdcut phase_mask_ll.grd -Gphase_mask_ll.grd -R$cut_coords -V 96 | endif 97 | gmt grdedit -D//"radians"/1///"$PWD:t wrapped phase after masking"/"$remarked" phase_mask_ll.grd 98 | 99 | echo; echo "Projecting amplitude to geographic coordinates" 100 | proj_ra2ll.csh trans.dat display_amp.grd display_amp_ll.grd 101 | if ($cut_to_aoi == 1) then 102 | gmt grdcut display_amp_ll.grd -Gdisplay_amp_ll.grd -R$cut_coords -V 103 | endif 104 | gmt grdedit -D//"dimensionless"/1///"$PWD:t amplitude"/"$remarked" display_amp_ll.grd 105 | 106 | if (-e xphase_mask.grd) then 107 | echo; echo "Projecting masked xphase to geographic coordinates" 108 | proj_ra2ll.csh trans.dat xphase_mask.grd xphase_mask_ll.grd 109 | if ($cut_to_aoi == 1) then 110 | gmt grdcut xphase_mask_ll.grd -Gxphase_mask_ll.grd -R$cut_coords -V 111 | endif 112 | gmt grdedit -D//"radians"/1///"$PWD:t xphase"/"$remarked" xphase_mask_ll.grd 113 | echo; echo "Projecting masked yphase to geographic coordinates" 114 | proj_ra2ll.csh trans.dat yphase_mask.grd yphase_mask_ll.grd 115 | if ($cut_to_aoi == 1) then 116 | gmt grdcut yphase_mask_ll.grd -Gyphase_mask_ll.grd -R$cut_coords -V 117 | endif 118 | gmt grdedit -D//"radians"/1///"$PWD:t yphase"/"$remarked" yphase_mask_ll.grd 119 | endif 120 | 121 | if (-e unwrap_mask.grd) then 122 | echo; echo "Projecting masked unwrapped phase to geographic coordinates" 123 | proj_ra2ll.csh trans.dat unwrap_mask.grd unwrap_mask_ll.grd 124 | if ($cut_to_aoi == 1) then 125 | gmt grdcut unwrap_mask_ll.grd -Gunwrap_mask_ll.grd -R$cut_coords -V 126 | endif 127 | gmt grdedit -D//"radians"/1///"PWD:t unwrapped, masked phase"/"$remarked" unwrap_mask_ll.grd 128 | endif 129 | 130 | if (-e unwrap.grd) then 131 | echo; echo "Projecting unwrapped phase to geographic coordinates" 132 | proj_ra2ll.csh trans.dat unwrap.grd unwrap_ll.grd 133 | if ($cut_to_aoi == 1) then 134 | gmt grdcut unwrap_ll.grd -Gunwrap_ll.grd -R$cut_coords -V 135 | endif 136 | gmt grdedit -D//"radians"/1///"PWD:t unwrapped phase"/"$remarked" unwrap_ll.grd 137 | endif 138 | 139 | if (-e phasefilt_mask.grd) then 140 | echo; echo "Projecting filtered masked phase to geographic coordinates" 141 | proj_ra2ll.csh trans.dat phasefilt_mask.grd phasefilt_mask_ll.grd 142 | if ($cut_to_aoi == 1) then 143 | gmt grdcut phasefilt_mask_ll.grd -Gphasefilt_mask_ll.grd -R$cut_coords -V 144 | endif 145 | gmt grdedit -D//"phase in radians"/1///"PWD:t wrapped phase masked filtered"/"$remarked" phasefilt_mask_ll.grd 146 | endif 147 | 148 | if (-e con_comp.grd) then 149 | echo; echo "Projecting Snaphu connected components to geographic coordinates" 150 | proj_ra2ll.csh trans.dat con_comp.grd con_comp_ll.grd 151 | if ($cut_to_aoi == 1) then 152 | gmt grdcut con_comp_ll.grd -Gcon_comp_ll.grd -R$cut_coords -V 153 | endif 154 | gmt grdedit -D//"dimensionless"/1///"PWD:t connected components"/"$remarked" con_comp_ll.grd 155 | endif 156 | 157 | -------------------------------------------------------------------------------- /lib/check-queue.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | convertsecs() { 4 | ((h=${1}/3600)) 5 | ((m=(${1}%3600)/60)) 6 | ((s=${1}%60)) 7 | printf "%02d:%02d:%02d\n" $h $m $s 8 | } 9 | 10 | start_monitor=`date +%s` 11 | 12 | if [ $# -eq 1 ]; then 13 | name=$1 14 | duration=10 15 | display_gfx=0 16 | 17 | elif [ $# -eq 2 ]; then 18 | name=$1 19 | duration=$2 20 | display_gfx=0 21 | 22 | elif [ $# -eq 3 ]; then 23 | name=$1 24 | duration=$2 25 | display_gfx=$3 26 | 27 | else 28 | echo "Warning: No parameters provided to identify job in SLURM queue." 29 | name=$( whoami | cut -c1-8 ) 30 | echo "Trying by username $name" 31 | duration=60 32 | fi 33 | 34 | # jq=$( squeue -o "%.35j %.7i %.8u %.2t %.10M %.6D %R " | awk -F_ 'NR > 1 {printf"%s\n", $1}' | awk 'NR==1{ print $1}' ) 35 | 36 | echo 37 | echo "Waiting for SLURM jobs to finish. This may take a while." 38 | echo "Use the 'squeue' command to see the Slurm queue." 39 | echo "Use 'tail -f ' to monitor processing logs in the Log directory." 40 | echo 41 | 42 | if [ $display_gfx -eq 1 ]; then 43 | case "$(( ( RANDOM % 4 ) + 1 ))" in 44 | "1") 45 | echo 46 | echo 47 | echo " //\\" 48 | echo " V \\" 49 | echo " \\ \\_" 50 | echo " \\,'.\`-." 51 | echo " |\\ \\\`. \`. " 52 | echo " ( \ \`. \`-. _,.-:\\" 53 | echo " \ \ \`. \`-._ __..--' ,-';/" 54 | echo " \\ \`. \`-. \`-..___..---' _.--' ,'/" 55 | echo " \`. \`. \`-._ __..--' ,' /" 56 | echo " \`. \`-_ \`\`--..'' _.-' ,'" 57 | echo " \`-_ \`-.___ __,--' ,'" 58 | echo " \`-.__ \`----\"\"\" __.-'" 59 | echo " \`--..____..--'" 60 | echo 61 | ;; 62 | "2") 63 | echo 64 | echo 65 | echo " //" 66 | echo " //" 67 | echo " //" 68 | echo " //" 69 | echo " _______||" 70 | echo " ,-''' ||\`-." 71 | echo " ( || )" 72 | echo " |\`-..._______,..-'|" 73 | echo " | || |" 74 | echo " | _______|| |" 75 | echo " |,-'''_ _ ~ ||\`-.|" 76 | echo " | ~ / \`-.\ ,-\' ~|" 77 | echo " |\`-...___/___,..-'|" 78 | echo " | \`-./-'_ \/_| |" 79 | echo " | -' ~~ || -.|" 80 | echo " ( ~ ~ ~~ )" 81 | echo " \`-..._______,..-'" 82 | echo 83 | ;; 84 | 85 | "3") 86 | echo 87 | echo 88 | echo 89 | echo " ___ ___ ___ ___ ___.---------------." 90 | echo " .'\__\'\__\'\__\'\__\'\__,\` . ____ ___ \ " 91 | echo " |\/ __\/ __\/ __\/ __\/ _:\ |\`. \ \___ \ " 92 | echo " \\\\'\__\'\__\'\__\'\__\'\_\`.__|\"\"\`. \ \___ \ " 93 | echo " \\\\/ __\/ __\/ __\/ __\/ _: \ " 94 | echo " \\\\'\__\'\__\'\__\ \__\'\_;-----------------\`" 95 | echo " \\\\/ \/ \/ \/ \/ : |" 96 | echo " \|______________________;________________| " 97 | echo 98 | ;; 99 | "4") 100 | echo 101 | echo 102 | echo " (" 103 | echo " ) ( " 104 | echo " ___...(-------)-....___ " 105 | echo " .-\"\" ) ( \"\"-. " 106 | echo " .-'``'|-._ ) _.-| " 107 | echo " / .--.| \`\"\"---...........---\"\"\` | " 108 | echo " / / | | " 109 | echo " | | | | " 110 | echo " \ \ | | " 111 | echo " \`\ \`\ | | " 112 | echo " \`\ \`| | " 113 | echo " _/ /\\ / " 114 | echo " (__/ \\ / " 115 | echo " _..---\"\"\` \\ /\`\"\"---.._ " 116 | echo " .-' \\ / '-. " 117 | echo " : \`-.__ __.-' : " 118 | echo " : ) \"\"---...---\"\" ( : " 119 | echo " '._ \`\"--...___...--\"\` _.' " 120 | echo " \\\"\"--..__ __..--\"\"/ " 121 | echo " '._ \"\"\"----.....______.....----\"\"\" _.' " 122 | echo " \`\"\"--..,,_____ _____,,..--\"\"\` " 123 | echo " \`\"\"\"----\"\"\"\` " 124 | echo 125 | ;; 126 | esac 127 | fi 128 | 129 | while true; do 130 | nq=$( squeue -o "%.35j %.7i %.8u %.2t %.10M %.6D %R " | grep $name | wc -l ) 131 | npr=$( squeue -o "%.35j %.7i %.8u %.2t %.10M %.6D %R " | grep $name | awk 'NR==1{print $1}' ) 132 | 133 | if [ $nq -eq 0 ]; then 134 | echo 135 | echo "SLURM jobs finished. Continuing with next processing step." 136 | echo 137 | break 138 | else 139 | #squeue_status=$(squeue -n $name) 140 | #echo $squeue_status 141 | #echo 142 | 143 | time_now=`date +%s` 144 | runtime_seconds=$((time_now-start_monitor)) 145 | runtime_formated=$(convertsecs $runtime_seconds) 146 | echo "Running for $runtime_formated and still $nq jobs left in the queue." 147 | 148 | 149 | # squeue -o "%.20j %.7i %.8u %.2t %.10M %.6D %R " 150 | 151 | sleep ${duration}s 152 | 153 | #nlines=$( echo $squeue_status | wc -l) #Calculate number of lines for the output previously printed 154 | #for (( i=0; i <= $(($LINES)); i++ ));do #For each line printed as a result of "timedatectl" 155 | tput cuu1 #Move cursor up by one line 156 | tput el #Clear the line 157 | #done 158 | fi 159 | done 160 | -------------------------------------------------------------------------------- /modules/summary_pdf/PP-summary-pdf.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | start=`date +%s` 4 | 5 | config_file=$1 6 | work_PATH=$2 7 | pair_id=$3 8 | 9 | echo "Config file: $config_file" 10 | echo "Work path: $work_PATH" 11 | echo "Pair ID: $pair_id" 12 | 13 | source $config_file 14 | 15 | # Convert dataset configuration to arrays 16 | labels=( "$LABEL_1" "$LABEL_2" "$LABEL_3" "$LABEL_4" ) 17 | directories=( "$DIRECTORY_1" "$DIRECTORY_2" "$DIRECTORY_3" "$DIRECTORY_4" ) 18 | histeqs=( "$HIST_EQ_1" "$HIST_EQ_2" "$HIST_EQ_3" "$HIST_EQ_4" ) 19 | cpts=( $CPT_1 $CPT_2 $CPT_3 $CPT_4 ) 20 | ranges=( $RANGE_1 $RANGE_2 $RANGE_3 $RANGE_4 ) 21 | show_suppls=( $SHOW_SUPPL_1 $SHOW_SUPPL_2 $SHOW_SUPPL_3 $SHOW_SUPPL_4 ) 22 | 23 | dem_grd_hs="$work_PATH/Summary/hillshade.grd" 24 | CPDFS_dem="$work_PATH/Summary/CPDFS_dem.grd" 25 | CPDFS_dem_HS="$work_PATH/Summary/CPDFS_dem_HS.grd" 26 | 27 | ps_base="$work_PATH/Summary/${pair_id}-grd" 28 | histeq_base="$work_PATH/Summary/${pair_id}-hiq" 29 | pdf_merged="$work_PATH/Summary/${pair_id}-combined.pdf" 30 | pdf_merged_ROT90=${pdf_merged::-4}_rot90.png 31 | 32 | 33 | # Set GMT parameters 34 | gmt gmtset MAP_FRAME_PEN 3 35 | gmt gmtset MAP_FRAME_WIDTH 0.1 36 | gmt gmtset MAP_FRAME_TYPE plain 37 | gmt gmtset FONT_TITLE Helvetica-Bold 38 | gmt gmtset FONT_LABEL Helvetica-Bold 14p 39 | gmt gmtset PS_PAGE_ORIENTATION landscape 40 | gmt gmtset PS_MEDIA A4 41 | gmt gmtset FORMAT_GEO_MAP D 42 | gmt gmtset MAP_DEGREE_SYMBOL degree 43 | gmt gmtset PROJ_LENGTH_UNIT cm 44 | 45 | 46 | 47 | if [ ! -f "$pdf_merged_ROT90" ]; then 48 | 49 | GRD_FAIL=( 0 0 0 0 ) 50 | 51 | for counter in 0 1 2 3; do 52 | echo "Preparing ${labels[$counter]} ..." 53 | echo "Searching for files in ${directories[$counter]}" 54 | cpt_files[$counter]="$work_PATH/Summary/grd_${counter}_color.cpt" 55 | cd ${directories[$counter]} 56 | ls_result=$( ls ${pair_id}*.grd ) 57 | echo "Found file $ls_result" 58 | if [ -f $ls_result ]; then 59 | GRD[$counter]="${directories[$counter]}/$ls_result" 60 | echo "${labels[$counter]} file found: ${GRD[$counter]}" 61 | 62 | echo "histeqs $counter : ${histeqs[$counter]}" 63 | 64 | if [ ${histeqs[$counter]} -eq "1" ]; then 65 | if [ ! -f ${histeq_base}-$counter.grd ]; then 66 | echo; echo "Calculate histogram equalization for ${GRD[$counter]}" 67 | gmt grdhisteq ${GRD[$counter]} -G${histeq_base}-$counter.grd -N -V 68 | gmt grd2cpt -E15 ${histeq_base}-$counter.grd -C${cpts[$counter]} -V > $work_PATH/Summary/grd_${counter}.cpt 69 | else 70 | echo; echo "${labels[$counter]} histogram exists, skipping ..."; echo 71 | fi 72 | fi 73 | else 74 | echo "No ${labels[$counter]} file found" 75 | GRD_FAIL[$counter]=1 76 | GRD_MESSAGE[$counter]="No ${labels[$counter]} file" 77 | fi 78 | done 79 | 80 | 81 | 82 | cd $work_PATH/Summary 83 | 84 | SCALE=18 85 | XSTEPS=0.5 86 | YSTEPS=0.5 87 | 88 | if [ ! -e $pdf_merged ]; then 89 | for counter in 0 1 2 3; do 90 | if [ ! -e ${ps_base}-$counter.ps ]; then 91 | echo; echo "Creating ${labels[$counter]} in ${ps_base}-$counter.ps" 92 | TITLE="${labels[$counter]} {master_date}" 93 | if [ ! "${GRD_FAIL[$counter]}" -eq 1 ]; then 94 | if [ ${histeqs[$counter]} -eq 1 ]; then 95 | echo; echo "${labels[$counter]}: ${histeq_base}-$counter.grd"; echo 96 | gmt grdimage ${histeq_base}-$counter.grd \ 97 | -C$work_PATH/Summary/grd_${counter}.cpt -R$AOI_REGION -JM$SCALE -B+t"$TITLE" -Q \ 98 | -Bx$XSTEPS -By$YSTEPS -V -K -Yc -Xc > ${ps_base}-$counter.ps 99 | else 100 | gmt grdimage ${GRD[$counter]} \ 101 | -C${cpt_files[$counter]} -R$AOI_REGION -JM$SCALE -B+t"$TITLE" -Q \ 102 | -Bx$XSTEPS -By$YSTEPS -V -K -Yc -Xc > ${ps_base}-$counter.ps 103 | fi 104 | 105 | if [ ${show_suppls[$counter]} -eq 1 ]; then 106 | for vector_file in ${vector_files[@]}; do 107 | style_name=${vector_file}_style 108 | vector_style=$( echo "${!style_name}" | tr -d "'" ) 109 | gmt psxy $vector_style -JM$SCALE -R$AOI_REGION ${!vector_file::-4}.gmt -O -K -V >> ${ps_base}-$counter.ps 110 | done 111 | fi 112 | else 113 | gmt grdimage $CPDFS_dem_HS -C#ffffff,#eeeeee \ 114 | -R$AOI_REGION -JM$SCALE -B+t"${GRD_MESSAGE[$counter]}" -Q -Bx$XSTEPS -By$YSTEPS -V -K -Yc -Xc > ${ps_base}-$counter.ps 115 | 116 | 117 | if [ $page_orientation -eq 1 ]; then 118 | convert -density $resolution -fill red -pointsize 18 -gravity center \ 119 | -trim -verbose label:"${GRD_MESSAGE[$counter]}" \ 120 | ${ps_base}-$counter.ps -quality 100 ${ps_base}-$counter.ps 121 | else 122 | convert -rotate 90 -density $resolution -fill red -pointsize 18 -gravity center \ 123 | -trim -verbose label:"${GRD_MESSAGE[$counter]}" \ 124 | ${ps_base}-$counter.ps -quality 100 ${ps_base}-$counter.ps 125 | fi 126 | fi 127 | 128 | if [ $page_orientation -eq 1 ]; then 129 | convert -verbose -density $resolution -trim ${ps_base}-$counter.ps -quality 100 ${ps_base}-$counter.png 130 | else 131 | convert -verbose -rotate 90 -density $resolution -trim ${ps_base}-$counter.ps -quality 100 ${ps_base}-$counter.png 132 | fi 133 | else 134 | echo; echo "${labels[$counter]} in ${ps_base}-$counter.ps exists, skipping ..." 135 | fi 136 | done 137 | 138 | 139 | echo "Merging PS into $pdf_merged_ROT90" 140 | take_diff=$(( ($(date --date="$slave_date" +%s) - $(date --date="$master_date" +%s) )/(60*60*24) )) 141 | if [ "$page_orientation" -eq 1 ]; then 142 | montage ${ps_base}-0.png ${ps_base}-1.png ${ps_base}-2.png ${ps_base}-3.png \ 143 | -rotate 90 -geometry +100+150 -density $resolution -title "${pair_id} (${take_diff} days)" \ 144 | -quality 100 -tile 4x1 -mode concatenate -verbose $pdf_merged_ROT90 145 | else 146 | montage -tile 1x4 -geometry +20+30 \ 147 | ${ps_base}-0.png ${ps_base}-1.png ${ps_base}-2.png ${ps_base}-3.png \ 148 | -title "${pair_id} (${take_diff} days)" \ 149 | -density $resolution -quality 100 -mode concatenate -verbose $pdf_merged_ROT90 150 | fi 151 | 152 | 153 | if [ "$clean_up" -ge 1 ]; then 154 | rm ${ps_base}_*.ps 155 | rm ${ps_base}_*.png 156 | fi 157 | 158 | if [ "${histeq_base}-$counter.grd" != "$CPDFS_dem_HS" ]; then 159 | rm ${histeq_base}-*.grd 160 | fi 161 | fi 162 | else 163 | echo "File $pdf_merged_ROT90 exists, skipping ..." 164 | fi 165 | 166 | 167 | if [ -f $pdf_merged_ROT90 ]; then status_SUMMARY=1; else status_SUMMARY=0; fi 168 | 169 | end=`date +%s` 170 | runtime=$((end-start)) 171 | 172 | echo "${pair_id:0:8} ${pair_id:10:8} $SLURM_JOB_ID $runtime $status_SUMMARY" >> $output_PATH/Reports/PP-SUMMARY-stats.tmp 173 | 174 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 175 | 176 | 177 | -------------------------------------------------------------------------------- /modules/prep_arctic_dem/add_single_col.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Module to prepare ArcitcDEM data for OSARIS 6 | # 7 | # Activate the module in the main config file using the preprocessing 8 | # hook. Alternatively, run in standalone mode from the module folder. 9 | # Usage: prep_arctic_dem.sh path_to_config_file 10 | # 11 | # The config file must be located within the 'config' folder of OSARIS. 12 | # Get started by copying the template configuration file from the module 13 | # folder to the OSARIS config folder and fit it to your needs. 14 | # 15 | # Further information on ArcticDEM and a shapefile with tile numbers 16 | # are available at https://www.pgc.umn.edu/data/arcticdem/ 17 | # 18 | # David Loibl, 2017 19 | # 20 | ##################################################################### 21 | 22 | start=`date +%s` 23 | 24 | function batch_untar { 25 | if [ $# -lt 3 ]; then 26 | echo "Wrong parameter count for batch untar." 27 | exit 4 28 | fi 29 | 30 | i=$1 31 | j=$2 32 | dem_output_PATH=$3 33 | 34 | dirname=${i}_$j 35 | echo $dirname 36 | if [ -d "$dirname" ]; then 37 | echo "Processing data in directory ${i}_$j" 38 | cd $dirname 39 | for tar_file in $( ls *.tar* ); do 40 | case "$tar_file" in 41 | *.gz | *.tgz ) 42 | # it's gzipped 43 | tar -xzvf $tar_file -C $dem_output_PATH/temp/ --wildcards --no-anchored '*dem.tif' 44 | ;; 45 | *) 46 | tar -xvf $tar_file -C $dem_output_PATH/temp/ --wildcards --no-anchored '*dem.tif' 47 | # it's not 48 | ;; 49 | esac 50 | done 51 | cd .. 52 | else 53 | echo "Directory ${i}_$j does not exist. Skipping ..." 54 | fi 55 | } 56 | 57 | if [ $# -eq 0 ]; then 58 | echo 59 | echo "Usage: prep_artic_dem.sh path_to_config_file" 60 | echo 61 | exit 1 62 | elif [ ! -f "$1/prep_arctic_dem.config" ]; then 63 | echo 64 | echo "Cannot open $1/prep_arctic_dem.config. Please provide a valid config file in the OSARIS config folder." 65 | echo 66 | exit 2 67 | else 68 | 69 | echo; echo "Processing ArcticDEM" 70 | config_PATH=$1 71 | 72 | source $config_PATH/prep_arctic_dem.config 73 | 74 | # Test if configuration is valid 75 | if [ ! -d $input_mosaic_PATH ]; then 76 | echo "Input folder $input_MOSAIC not found. Exiting." 77 | exit 3 78 | fi 79 | 80 | rm -r $dem_output_PATH/temp; mkdir -p $dem_output_PATH/temp 81 | 82 | cd $dem_output_PATH 83 | if [ -f "merged_dem.tif" ]; then 84 | echo "File 'merged_dem.tif' already exists. Overwrite? (y/n)" 85 | read delete_file 86 | if [ "$delete_file" == "y" ]; then 87 | rm merged_dem.tif 88 | echo "Overwriting 'merged_dem.tif' ..." 89 | elif [ "$delete_file" == "n" ]; then 90 | echo "Please rename/move merged_dem.tif and restart the script." 91 | exit 1 92 | else 93 | echo "Choose 'y' (yes) or 'n' (no). It's not this hard, is it?" 94 | exit 1 95 | fi 96 | fi 97 | 98 | 99 | 100 | cd $input_mosaic_PATH 101 | 102 | i=$row_min 103 | while [ "$i" -le "$row_max" ]; do 104 | j=$col_min 105 | while [ "$j" -le "$col_max" ]; do 106 | # untar files to target folder 107 | 108 | batch_untar $i $j $dem_output_PATH 109 | 110 | j=$(( $j + 1 )) 111 | done 112 | 113 | # Process single_cols if defined in config 114 | if [ -z ${single_col+x} ]; then 115 | col2add=${single_col:0:2} 116 | batch_untar $i $col2add $dem_output_PATH 117 | fi 118 | 119 | i=$(( $i +1 )) 120 | done 121 | 122 | cd $dem_output_PATH/temp 123 | k=0 124 | 125 | i=$row_min 126 | prev_row=-1 127 | while [ "$i" -le "$row_max" ]; do 128 | j=$col_min 129 | while [ "$j" -le "$col_max" ]; do 130 | # Check if a new row was started. If true, append previous row to merged DEM. 131 | if [ "$i" -gt "$prev_row" ]; then 132 | if [ ! -f "../merged_dem.grd" ]; then 133 | echo; echo "Row $prev_row completed."; echo 134 | cp merged_dem_row${prev_row}.grd ../merged_dem.grd 135 | else 136 | echo; echo "Row $prev_row completed. Merging to DEM ..."; echo 137 | gmt grdpaste merged_dem_row${prev_row}.grd ../merged_dem.grd -G../merged_dem.grd -V 138 | fi 139 | fi 140 | 141 | echo "Current row: $i" 142 | echo "Current col: $j" 143 | gmt grdsample ${i}_${j}_1_1_5m_v2.0_reg_dem.tif -I${scale_factor} -G${i}_${j}_1_1_dem3.grd -V 144 | gmt grdsample ${i}_${j}_1_2_5m_v2.0_reg_dem.tif -I${scale_factor} -G${i}_${j}_1_2_dem3.grd -V 145 | gmt grdsample ${i}_${j}_2_1_5m_v2.0_reg_dem.tif -I${scale_factor} -G${i}_${j}_2_1_dem3.grd -V 146 | gmt grdsample ${i}_${j}_2_2_5m_v2.0_reg_dem.tif -I${scale_factor} -G${i}_${j}_2_2_dem3.grd -V 147 | gmt grdpaste ${i}_${j}_1_1_dem3.grd ${i}_${j}_1_2_dem3.grd -Gtemp_merge_1.grd -V 148 | gmt grdpaste ${i}_${j}_2_1_dem3.grd ${i}_${j}_2_2_dem3.grd -Gtemp_merge_2.grd -V 149 | if [ ! -f "merged_dem_row${i}.grd" ]; then 150 | gmt grdpaste temp_merge_1.grd temp_merge_2.grd -Gmerged_dem_row${i}.grd -V 151 | else 152 | gmt grdpaste temp_merge_1.grd temp_merge_2.grd -Gtemp_merge_${i}_${j}.grd -V 153 | gmt grdpaste temp_merge_${i}_${j}.grd merged_dem_row${i}.grd -Gmerged_dem_row${i}.grd -V 154 | fi 155 | 156 | # rm temp_merge_* 157 | prev_row=$i 158 | 159 | j=$(( $j + 1 )) 160 | done 161 | 162 | if [ -z ${single_col+x} ]; then 163 | 164 | col2add=${single_col:0:2} 165 | line=${single_col:3:1} 166 | 167 | gmt grdsample ${i}_${col2add}_${line}_1_5m_v2.0_reg_dem.tif -I${scale_factor} -G${i}_${col2add}_${line}_1_dem3.grd -V 168 | gmt grdsample ${i}_${col2add}_${line}_2_5m_v2.0_reg_dem.tif -I${scale_factor} -G${i}_${col2add}_${line}_2_dem3.grd -V 169 | gmt grdpaste ${i}_${col2add}_${line}_1_dem3.grd ${i}_${col2add}_${line}_2_dem3.grd -Gtemp_merge_singlecol.grd -V 170 | 171 | gmt grdpaste temp_merge_singlecol.grd merged_dem_singlecol.grd -Gmerged_dem_singlecol.grd -V 172 | fi 173 | 174 | 175 | if [ "$i" -eq "$row_max" ]; then 176 | # Last row reached, put everything together ... 177 | 178 | 179 | echo; echo "Row $i completed. Merging to DEM ..."; echo 180 | gmt grdpaste merged_dem_row${row_max}.grd ../merged_dem.grd -G../merged_dem.grd -V 181 | 182 | if [ -z ${single_col+x} ]; then 183 | # If single_col active, merge that, too ... 184 | 185 | gmt grdpaste merged_dem_singlecol.grd ../merged_dem.grd -G../merged_dem.grd -V 186 | fi 187 | 188 | cd .. 189 | gmt grdproject merged_dem.grd -I -Js-45/90/70/1:1 -C -F -Gdem.grd -V 190 | 191 | echo; echo 192 | echo "Cleaning up" 193 | rm -r temp 194 | rm merged_dem.grd 195 | echo; echo 196 | 197 | end=`date +%s` 198 | 199 | runtime=$((end-start)) 200 | 201 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n' $(($runtime/86400)) $(($runtime%86400/3600)) $(($runtime%3600/60)) $(($runtime%60)) 202 | echo 203 | 204 | exit 1 205 | else 206 | i=$(( $i +1 )) 207 | fi 208 | done 209 | 210 | fi 211 | 212 | 213 | -------------------------------------------------------------------------------- /modules/mask_unwrapping_errors/mask_unwrapping_errors.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ###################################################################### 4 | # 5 | # Mask unwrapping errors based on comparison of forward to reverse 6 | # pairs of unwrapped interferograms. 7 | # 8 | # Requires activation of reverse interferogram processing in the main 9 | # configuration file. 10 | # 11 | # David Loibl, 2018 12 | # 13 | ##################################################################### 14 | 15 | module_name="mask_unwrapping_errors" 16 | 17 | if [ -z $module_config_PATH ]; then 18 | echo "Parameter module_config_PATH not set in main config file. Setting to default:" 19 | echo " $OSARIS_PATH/config" 20 | module_config_PATH="$OSARIS_PATH/config" 21 | elif [[ "$module_config_PATH" != /* ]] && [[ "$module_config_PATH" != "$OSARIS_PATH"* ]]; then 22 | module_config_PATH="${OSARIS_PATH}/config/${module_config_PATH}" 23 | fi 24 | 25 | if [ ! -d "$module_config_PATH" ]; then 26 | echo "ERROR: $module_config_PATH is not a valid directory. Check parameter module_config_PATH in main config file. Exiting ..." 27 | exit 2 28 | fi 29 | 30 | if [ ! -f "${module_config_PATH}/${module_name}.config" ]; then 31 | echo 32 | echo "Cannot open ${module_name}.config in ${module_config_PATH}. Please provide a valid config file." 33 | echo 34 | else 35 | # Start runtime timer 36 | module_start=`date +%s` 37 | 38 | # Include the config file 39 | source ${module_config_PATH}/${module_name}.config 40 | 41 | 42 | # Read attributes and setup environment 43 | 44 | MUE_work_PATH="$work_PATH/Mask-unwrapping-errors" 45 | MUE_output_PATH="$output_PATH/Masked-unwrapping-errors" 46 | 47 | mkdir -p $MUE_output_PATH 48 | mkdir -p $MUE_work_PATH 49 | 50 | echo; echo "PATHES:" 51 | echo "MUE_input_PATH: $MUE_input_PATH" 52 | echo "MUE_fwdrev_sums_PATH: $MUE_fwdrev_sums_PATH" 53 | echo "MUE_work_PATH: $MUE_work_PATH" 54 | echo "MUE_output_PATH: $MUE_output_PATH" 55 | 56 | echo; echo "MUE_threshold: $MUE_threshold" 57 | echo 58 | 59 | cd "$MUE_fwdrev_sums_PATH" 60 | 61 | unwrp_sums_grds=($( ls *.grd )) 62 | 63 | for grd_file in ${unwrp_sums_grds[@]}; do 64 | 65 | echo; echo "Now working on $grd_file .." 66 | # Cut unwrapped interferogram and masked fwd-rev-unwrapping sum to same extent 67 | 68 | cd "$MUE_input_PATH" 69 | echo "Searching for: ${grd_file:0:8}--${grd_file:10:8}*.grd" 70 | echo "in directory $MUE_input_PATH" 71 | echo; echo "FWD-REV PATH: $MUE_fwdrev_sums_PATH" 72 | 73 | input_match=$( ls ${grd_file:0:8}--${grd_file:10:8}*.grd ) 74 | 75 | if [ ! -f "$input_match" ]; then 76 | echo "No matching unwrapped interferogram found for ${grd_file}. Skipping ..." 77 | else 78 | cd "$MUE_fwdrev_sums_PATH" 79 | # $MUE_threshold 80 | 81 | # Create the mask basing on thresholds of deviation in fwd-rev unwrapping sums 82 | gmt grdmath ${MUE_fwdrev_sums_PATH}/${grd_file} -$MUE_threshold LT 1 NAN = ${MUE_work_PATH}/${grd_file::-4}-min-masked.grd -V 83 | gmt grdmath ${MUE_fwdrev_sums_PATH}/${grd_file} $MUE_threshold GT 1 NAN = ${MUE_work_PATH}/${grd_file::-4}-max-masked.grd -V 84 | # gmt grdmath ${MUE_work_PATH}/${grd_file::-4}-min-masked.grd GT 1 NAN = ${MUE_work_PATH}/${grd_file::-4}-masked.grd -V 85 | gmt grdmath ${MUE_work_PATH}/${grd_file::-4}-min-masked.grd ${MUE_work_PATH}/${grd_file::-4}-max-masked.grd ADD = ${MUE_work_PATH}/${grd_file::-4}-masked.grd -V 86 | 87 | file_1="${MUE_work_PATH}/${grd_file::-4}-masked.grd" 88 | file_2="$MUE_input_PATH/$input_match" 89 | 90 | file_1_extent=$( gmt grdinfo -I- $file_1 ); file_1_extent=${file_1_extent:2} 91 | file_2_extent=$( gmt grdinfo -I- $file_2 ); file_2_extent=${file_2_extent:2} 92 | 93 | file_1_coord_string=$( echo $file_1_extent | tr "/" "\n") 94 | file_2_coord_string=$( echo $file_2_extent | tr "/" "\n") 95 | 96 | echo; echo "File 1 coordinate string: " 97 | echo "$file_1_coord_string" 98 | echo; echo "File 2 coordinate string: " 99 | echo "$file_2_coord_string" 100 | 101 | # Create arrays of coordinates for each dataset 102 | counter=0 103 | for coord in $file_1_coord_string; do 104 | file_1_coord_array[$counter]=$coord 105 | counter=$((counter+1)) 106 | done 107 | 108 | counter=0 109 | for coord in $file_2_coord_string; do 110 | file_2_coord_array[$counter]=$coord 111 | counter=$((counter+1)) 112 | done 113 | 114 | 115 | # Determine overal max and min values for both datasets 116 | 117 | remainder=$( expr $counter % 2 ) 118 | 119 | counter=0 120 | while [ $counter -lt 4 ]; do 121 | if [ $counter -eq 0 ]; then 122 | # Determining xmin 123 | if [ $( echo "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 124 | xmin=${file_2_coord_array[$counter]} 125 | else 126 | xmin=${file_1_coord_array[$counter]} 127 | fi 128 | elif [ $counter -eq 1 ]; then 129 | # Determining xmax 130 | if [ $( echo "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 131 | xmax=${file_2_coord_array[$counter]} 132 | else 133 | xmax=${file_1_coord_array[$counter]} 134 | fi 135 | elif [ $counter -eq 2 ]; then 136 | # Determining ymin 137 | if [ $( echo "${file_1_coord_array[$counter]} > ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 138 | ymin=${file_2_coord_array[$counter]} 139 | else 140 | ymin=${file_1_coord_array[$counter]} 141 | fi 142 | elif [ $counter -eq 3 ]; then 143 | # Determining ymax 144 | if [ $( echo "${file_1_coord_array[$counter]} < ${file_2_coord_array[$counter]}" | bc -l ) -eq 0 ]; then 145 | ymax=${file_2_coord_array[$counter]} 146 | else 147 | ymax=${file_1_coord_array[$counter]} 148 | fi 149 | fi 150 | 151 | counter=$((counter+1)) 152 | done 153 | 154 | echo "Minimum boundary box coordinates (xmin/xmax/ymin/ymax):" 155 | echo "$xmin/$xmax/$ymin/$ymax" 156 | 157 | gmt grdsample ${MUE_input_PATH}/${input_match} -G${MUE_work_PATH}/${input_match::-4}-cut.grd -R$xmin/$xmax/$ymin/$ymax `gmt grdinfo -I ${MUE_work_PATH}/${grd_file::-4}-masked.grd` -V 158 | gmt grdsample ${MUE_work_PATH}/${grd_file::-4}-masked.grd -G${MUE_work_PATH}/${grd_file::-4}-cut.grd -R$xmin/$xmax/$ymin/$ymax `gmt grdinfo -I ${MUE_work_PATH}/${input_match::-4}-cut.grd` -V 159 | gmt grdmath ${MUE_work_PATH}/${input_match::-4}-cut.grd ${MUE_work_PATH}/${grd_file::-4}-cut.grd ADD = ${MUE_output_PATH}/${input_match::-4}-masked.grd -V 160 | fi 161 | 162 | done 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | # Stop runtime timer and print runtime 172 | module_end=`date +%s` 173 | module_runtime=$((module_end-module_start)) 174 | 175 | echo 176 | printf 'Processing finished in %02dd %02dh:%02dm:%02ds\n\n' \ 177 | $(($module_runtime/86400)) \ 178 | $(($module_runtime%86400/3600)) \ 179 | $(($module_runtime%3600/60)) \ 180 | $(($module_runtime%60)) 181 | echo 182 | fi 183 | --------------------------------------------------------------------------------