├── LICENSE
├── README.md
├── docs
└── images
│ ├── PyInSAR_logo1.png
│ ├── PyInSAR_logo2.png
│ ├── PyInSAR_logo3.png
│ ├── PyInSAR_logos.pdf
│ └── pyinsar_logo315x83.png
├── pyinsar
├── __init__.py
├── data_import
│ ├── __init__.py
│ ├── import_georaster.py
│ ├── import_raster.py
│ ├── import_srcmod.py
│ ├── import_utils.py
│ ├── sentinel.py
│ └── uavsar.py
├── docs
│ └── pyinsar_doxygen.pdf
├── dox.cfg
├── fix_latex.sh
├── get_file_list.sh
├── output
│ ├── __init__.py
│ ├── export_georaster.py
│ ├── plot_3d_vectors.py
│ └── plot_raster.py
├── processing
│ ├── __init__.py
│ ├── corrections
│ │ ├── __init__.py
│ │ ├── topography.py
│ │ └── troposphere.py
│ ├── data_fetcher
│ │ ├── __init__.py
│ │ ├── gdal.py
│ │ ├── hdf_retriever.py
│ │ └── okada.py
│ ├── deformation
│ │ ├── __init__.py
│ │ ├── elastic_halfspace
│ │ │ ├── __init__.py
│ │ │ ├── fault.py
│ │ │ ├── mogi.py
│ │ │ ├── okada.py
│ │ │ ├── pipe.py
│ │ │ └── surface_load.py
│ │ └── inversion
│ │ │ ├── __init__.py
│ │ │ ├── inversion.py
│ │ │ └── quadtree.py
│ ├── discovery
│ │ ├── __init__.py
│ │ ├── classify_cnn.py
│ │ ├── coherence.py
│ │ ├── coregister.py
│ │ ├── deburst.py
│ │ ├── deformation_to_phase.py
│ │ ├── flat_earth.py
│ │ ├── fusion
│ │ │ └── srtm_fusion.py
│ │ ├── interferogram.py
│ │ ├── los_deformation.py
│ │ ├── mask.py
│ │ ├── project.py
│ │ ├── rotate_squares.py
│ │ ├── shown_cnn_classes.py
│ │ ├── srtm_egm96_wgs84.py
│ │ ├── temporal_decorrelation.py
│ │ ├── train_cnn.py
│ │ └── wrap_phase.py
│ ├── geography
│ │ ├── __init__.py
│ │ ├── coordinates.py
│ │ ├── geodesy.py
│ │ └── geomorphometry.py
│ ├── instruments
│ │ ├── __init__.py
│ │ └── sentinel.py
│ ├── isce
│ │ ├── __init__.py
│ │ └── input_file.py
│ ├── machine_learning
│ │ ├── __init__.py
│ │ ├── geostatistics
│ │ │ ├── __init__.py
│ │ │ ├── direct_sampling.py
│ │ │ ├── geostatistics_utils.py
│ │ │ ├── sequential_gaussian_simulation.py
│ │ │ └── variogram.py
│ │ └── neural_networks
│ │ │ ├── __init__.py
│ │ │ └── anomaly_identification.py
│ └── utilities
│ │ ├── __init__.py
│ │ ├── ann.py
│ │ ├── deformations.py
│ │ ├── file_utils.py
│ │ ├── generic.py
│ │ ├── insar_simulator_utils.py
│ │ └── machine_learning.py
└── run_doxygen.sh
└── setup.py
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 MITeaps
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | - Data Tools for Interferometric Synthetic-Aperture Radar Satellite Data
6 |
7 | ### Introduction
8 |
9 | PyINSAR contains many tools for working InSAR.
10 |
11 | ### Install
12 | PyInSAR requires OpenCV. To install OpenCV with anaconda use:
13 | ```
14 | conda install opencv
15 | ```
16 |
17 | PyInSAR can then be installed via pip:
18 | ```
19 | pip install pyinsar
20 | ```
21 |
--------------------------------------------------------------------------------
/docs/images/PyInSAR_logo1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/docs/images/PyInSAR_logo1.png
--------------------------------------------------------------------------------
/docs/images/PyInSAR_logo2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/docs/images/PyInSAR_logo2.png
--------------------------------------------------------------------------------
/docs/images/PyInSAR_logo3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/docs/images/PyInSAR_logo3.png
--------------------------------------------------------------------------------
/docs/images/PyInSAR_logos.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/docs/images/PyInSAR_logos.pdf
--------------------------------------------------------------------------------
/docs/images/pyinsar_logo315x83.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/docs/images/pyinsar_logo315x83.png
--------------------------------------------------------------------------------
/pyinsar/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["data_import", "processing", "output"]
--------------------------------------------------------------------------------
/pyinsar/data_import/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["import_georaster", "import_raster", "import_utils", "sentinel", "uavsar"]
--------------------------------------------------------------------------------
/pyinsar/data_import/import_georaster.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import numpy as np
26 |
27 | from osgeo import gdal
28 | from gdalconst import GA_ReadOnly, GA_Update
29 |
30 | def open_georaster(georaster_path, read_only = True):
31 | '''
32 | Open a georaster with GDAL
33 |
34 | @param georaster_path: Location of the georaster
35 | @param read_only: Determine if the georaster can be modified
36 |
37 | @return The georaster as a GDAL data set
38 | '''
39 | read_status = GA_ReadOnly
40 | if read_only == False:
41 | read_status = GA_Update
42 | gdal_georaster = gdal.Open(georaster_path, read_status)
43 | if gdal_georaster is None:
44 | print('Could not open file')
45 |
46 | return gdal_georaster
47 |
48 | def get_georaster_array(gdal_georaster, remove_ndv = True, as_float = True):
49 | '''
50 | Get a NumPy array from a georaster opened with GDAL
51 |
52 | @param gdal_georaster: A georaster opened with GDAL
53 | @param remove_ndv: Replace the no-data value as mentionned in the label by np.nan
54 | @param as_float: Transform the array to a float array
55 |
56 | @return The array
57 | '''
58 | assert gdal_georaster is not None, 'No georaster available'
59 |
60 | number_of_bands = gdal_georaster.RasterCount
61 | georaster_array = gdal_georaster.ReadAsArray()
62 | if as_float == True:
63 | georaster_array = georaster_array.astype(np.float)
64 | for i_band in range(number_of_bands):
65 | georaster_band = gdal_georaster.GetRasterBand(i_band + 1)
66 | no_data_value = georaster_band.GetNoDataValue()
67 | if no_data_value is not None and remove_ndv == True:
68 | if number_of_bands > 1:
69 | georaster_array[i_band, :, :][georaster_array[i_band, :, :] == no_data_value] = np.nan
70 | else:
71 | georaster_array[georaster_array == no_data_value] = np.nan
72 | scale = georaster_band.GetScale()
73 | if scale is None:
74 | scale = 1.
75 | offset = georaster_band.GetOffset()
76 | if offset is None:
77 | offset = 0.
78 | if number_of_bands > 1:
79 | georaster_array[i_band, :, :] = georaster_array[i_band, :, :]*scale + offset
80 | else:
81 | georaster_array = georaster_array*scale + offset
82 |
83 | return georaster_array
84 |
85 | def get_georaster_extent(gdal_georaster):
86 | '''
87 | Get the extent of a georaster opened with GDAL
88 |
89 | @param gdal_georaster: A georaster opened with GDAL
90 |
91 | @return The georaster extent
92 | '''
93 | assert gdal_georaster is not None, 'No georaster available'
94 |
95 | georaster_x_size = gdal_georaster.RasterXSize
96 | georaster_y_size = gdal_georaster.RasterYSize
97 | geotransform = gdal_georaster.GetGeoTransform()
98 | xmin = geotransform[0]
99 | ymax = geotransform[3]
100 | xmax = xmin + geotransform[1]*georaster_x_size + geotransform[2]*georaster_y_size
101 | ymin = ymax + geotransform[4]*georaster_x_size + geotransform[5]*georaster_y_size
102 |
103 | return (xmin, xmax, ymin, ymax)
104 |
105 | def print_georaster_info(gdal_georaster):
106 | '''
107 | Print some information about the GDAL georaster
108 |
109 | @param gdal_georaster: A georaster opened with GDAL
110 | '''
111 | assert gdal_georaster is not None, 'No georaster available'
112 |
113 | print('Driver: ', gdal_georaster.GetDriver().ShortName, '/', gdal_georaster.GetDriver().LongName)
114 | print('Size of the cube is ', gdal_georaster.RasterXSize, 'x', gdal_georaster.RasterYSize, 'x', gdal_georaster.RasterCount)
115 | print('Projection is ', gdal_georaster.GetProjection())
116 | geotransform = gdal_georaster.GetGeoTransform()
117 | if not geotransform is None:
118 | print('Origin = (', geotransform[0], ',', geotransform[3], ')')
119 | print('Pixel Size = (', geotransform[1], ',', geotransform[5], ')')
--------------------------------------------------------------------------------
/pyinsar/data_import/import_raster.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import os.path
26 | import numpy as np
27 | from urllib.request import urlopen
28 |
29 | ################################################################################
30 | # Import GACOS runs
31 | ################################################################################
32 |
33 | def read_rsc_header_file(file_path):
34 | '''
35 | Read the rsc header file from GACOS data
36 |
37 | @param file_location: The path to the file
38 |
39 | @return A dictionary containing the header's information
40 | '''
41 | header_dict = {}
42 | with open(file_path) as header_file:
43 | for line in header_file:
44 | line_list = line.rstrip('\n').split(' ')
45 | key = line_list[0]
46 | value = line_list[-1]
47 | try:
48 | value = float(value)
49 | except ValueError:
50 | pass
51 | header_dict[key] = value
52 |
53 | return header_dict
54 |
55 | def open_gacos_tropospheric_delays(tropodelay_header_path):
56 | '''
57 | Open a topospheric delay map computed by the Generic Atmospheric Correction Online Service for InSAR (GACOS)
58 |
59 | @param tropodelay_header_path: Path to the header file (.ztd.rsc or .dztd.rsc)
60 |
61 | @return A NumPy array containing the topospheric delay in meters
62 | and a tuple containing the extent of the array
63 | '''
64 | split_tropodelay_header_path = tropodelay_header_path.split('.')
65 | assert (split_tropodelay_header_path[-1] == 'rsc'
66 | and (split_tropodelay_header_path[-2] == 'ztd'
67 | or split_tropodelay_header_path[-2] == 'dztd')), 'Incorrect input format, must be .ztd.rsc or .dztd.rsc'
68 | assert os.path.exists(tropodelay_header_path) == True, "Header %r doesn't exist" % header_path
69 | tropodelay_file_path = '.'.join(split_tropodelay_header_path[0:-1])
70 | assert os.path.exists(tropodelay_file_path) == True, "File %r doesn't exist" % file_path
71 |
72 | header_dict = read_rsc_header_file(tropodelay_header_path)
73 |
74 | tropodelay_array = np.fromfile(tropodelay_file_path, dtype = np.float32)
75 | tropodelay_array = tropodelay_array.reshape((int(header_dict['FILE_LENGTH']),
76 | int(header_dict['WIDTH'])))
77 | tropodelay_extent = (header_dict['X_FIRST'] - 0.5*header_dict['X_STEP'],
78 | header_dict['X_FIRST'] + header_dict['X_STEP']*(header_dict['WIDTH'] - 1) + 0.5*header_dict['X_STEP'],
79 | header_dict['Y_FIRST'] + header_dict['Y_STEP']*(header_dict['FILE_LENGTH'] - 1) + 0.5*header_dict['Y_STEP'],
80 | header_dict['Y_FIRST'] - 0.5*header_dict['Y_STEP'])
81 |
82 | return tropodelay_array, tropodelay_extent
83 |
84 | ################################################################################
85 | # Import SGEMS files
86 | ################################################################################
87 |
88 | def open_sgems_file(file_location):
89 | '''
90 | Open an SGEMS file containing one or several variables in an array
91 |
92 | @param file_location: The location of the file
93 |
94 | @return A NumPy array
95 | '''
96 | with open(file_location) as sgems_file:
97 |
98 | head = [next(sgems_file).rstrip('\n') for i in range(2)]
99 | array_shape = [int(i) for i in reversed(head[0].split(' '))]
100 | number_var = int(head[1])
101 | var_names = [next(sgems_file).rstrip('\n') for i in range(number_var)]
102 |
103 | flattened_index = 0
104 | var_array = np.empty([number_var] + array_shape)
105 | for line in sgems_file:
106 | index = np.unravel_index(flattened_index, array_shape)
107 | values = list(filter(None, line.rstrip('\n').split(' ')))
108 | for i_var in range(number_var):
109 | var_array[i_var][index] = float(values[i_var])
110 |
111 | flattened_index += 1
112 |
113 | return var_array
114 |
115 | def open_sgems_file_from_url(file_url):
116 | '''
117 | Open an SGEMS file containing one or several variables in an array from the file's URL
118 |
119 | @param file_url: The URL of the file
120 |
121 | @return A NumPy array
122 | '''
123 | with urlopen(file_url) as sgems_url:
124 | sgems_file = iter([line.decode('utf-8').rstrip('\n\r') for line in sgems_url.readlines()])
125 |
126 | head = [next(sgems_file) for i in range(2)]
127 | array_shape = [int(i) for i in reversed(head[0].split(' '))]
128 | number_var = int(head[1])
129 | var_names = [next(sgems_file) for i in range(number_var)]
130 |
131 | flattened_index = 0
132 | var_array = np.empty([number_var] + array_shape)
133 | for line in sgems_file:
134 | index = np.unravel_index(flattened_index, array_shape)
135 | values = list(filter(None, line.split(' ')))
136 | for i_var in range(number_var):
137 | var_array[i_var][index] = float(values[i_var])
138 |
139 | flattened_index += 1
140 |
141 | return var_array
142 |
--------------------------------------------------------------------------------
/pyinsar/data_import/import_srcmod.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Cody Rude
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | # Package imports
26 | from pyinsar.processing.deformation.elastic_halfspace.fault import Fault
27 | from pyinsar.processing.utilities.generic import translate, rotate
28 |
29 | # Standard library imports
30 | import re
31 |
32 | # 3rd party imports
33 | from scipy.io import loadmat
34 | from scipy.interpolate import SmoothBivariateSpline
35 | import numpy as np
36 |
37 | from skdaccess.utilities.image_util import SplineGeolocation
38 |
39 | def read_srcmod_data(srcmod_data, dtype=np.float64, skip_sanity_check=False):
40 | '''
41 | *** In Development *** Generate faults of okada sources from src mod mat files.
42 |
43 | Note: Only single segment models with a single time window are currently supported
44 |
45 | @param srcmod_data: src mod data read in from the .mat file
46 | @param dtype: Data type to use
47 | @param skip_sanity_check: Skip checks to ensure data was interpreted properly (Used for debugging)
48 |
49 | @return List of faults objects, list of slips, list of rakes
50 | '''
51 |
52 | # Number of segments
53 | num_segments = int(srcmod_data['invSEGM'])
54 |
55 | if num_segments != 1:
56 | raise NotImplementedError('Only single segment srcmods are currently supported')
57 |
58 |
59 | # lat = data['geoLAT'].ravel()
60 | # lon = data['geoLON'].ravel()
61 |
62 | # x = data['geoX'].ravel()
63 | # y = data['geoY'].ravel()
64 |
65 | # lat_spline = SmoothBivariateSpline(y, x, lat, s=0)
66 | # lon_spline = SmoothBivariateSpline(y, x, lon, s=0)
67 |
68 | # x_spline = SmoothBivariateSpline(lat, lon, x, s=0)
69 | # y_spline = SmoothBivariateSpline(lat, lon, y, s=0)
70 |
71 | # geolocation = SplineGeolocation(lat_spline=lat_spline,
72 | # lon_spline=lon_spline,
73 | # x_spline=x_spline,
74 | # y_spline=y_spline)
75 |
76 |
77 | lat = srcmod_data['geoLAT']
78 | lon = srcmod_data['geoLON']
79 |
80 | # Size of the subfaults
81 | Dz, Dx = srcmod_data['invDzDx']
82 |
83 | # Number of the subfault
84 | Nz, Nx = srcmod_data['invNzNx'].astype(int)
85 |
86 | # Number of time windows
87 | num_time_windows = int(srcmod_data['invNoTW'])
88 |
89 | def get_data_from_time_windows(data, prefix, alternate_column, num_windows, Nx, Nz, alternate_value=None):
90 | if prefix + 'TW1' in data.keys():
91 | new_data = np.zeros([num_time_windows, Nz, Nx], dtype=dtype)
92 |
93 | for num in range(1, num_windows+1):
94 | new_data[num-1,:,:] = np.fliplr(np.flipud(data[prefix+'TW' + str(num)]))
95 |
96 | elif alternate_column in data.keys():
97 | new_data = np.fliplr(np.flipud(data[alternate_column]))
98 | new_data = new_data.reshape(1, *new_data.shape)
99 |
100 | else:
101 | new_data = np.ones([1, Nz, Nx], dtype=dtype)
102 | new_data[0,:,:] = data[alternate_value]
103 |
104 | return new_data
105 |
106 |
107 | fault_slip = get_data_from_time_windows(srcmod_data, 'slip', 'slipSPL', num_time_windows, Nx, Nz) / 100
108 | fault_rake = np.deg2rad(get_data_from_time_windows(srcmod_data, 'rake', 'RAKE', num_time_windows, Nx, Nz, 'srcARake'))
109 |
110 |
111 |
112 |
113 | # Location of the hypocenter in the along strike/down dip
114 | # coordinate system
115 | center_x, center_z = srcmod_data['srcHypXZ']
116 |
117 | # Distance from the surface to the top of the fault
118 | z_top = srcmod_data['srcZ2top']
119 |
120 | # Width and length of fault
121 | width, length = srcmod_data['srcDimWL']
122 |
123 | # Fault properties
124 | fault_strike = np.deg2rad(srcmod_data['srcAStke'])
125 | fault_dip = np.deg2rad(srcmod_data['srcDipAn'])
126 |
127 | ### Generate fault ###
128 | # Hypocenter vector in the coordinate system where the centroid of
129 | # the fault is at 0,0,0
130 | hypx_col_vec = np.array([[center_z - width / 2],
131 | [ center_x - length/2],
132 | [ 0]], dtype=dtype)
133 |
134 | # SRCMOD's defintion of the centroid (center of cell in x, top in
135 | # z)
136 | # This used for comparing with the positions in the srcmod file
137 | compare_hypx_col_vec = np.array([[-Dz/2],
138 | [ 0],
139 | [ 0]], dtype=dtype)
140 |
141 | # Rotate the fault to put in a new coordinate system (x->EW,
142 | # y->NS)
143 | hypo_center_coords = rotate(rotate(hypx_col_vec, 0, fault_dip, 0, dtype=dtype), -fault_strike, 0, 0, dtype=dtype)
144 |
145 | # Rotating the srcmod's definition of a centroid
146 | compare_center_coords = rotate(rotate(compare_hypx_col_vec, 0, fault_dip, 0, dtype=dtype), -fault_strike, 0, 0, dtype=dtype)
147 |
148 | # Determine the centroid of the fault
149 | fault_center = -hypo_center_coords
150 | fault_center[2] = np.sin(fault_dip)*width/2 + z_top
151 | fault_center *= 1000
152 |
153 | # Create fault
154 | fault = Fault(*fault_center, length*1000, width*1000,
155 | fault_strike, fault_dip, Nx, Nz, dtype=dtype)
156 |
157 | # Determine the centroids of each subfault that srcmod uses
158 | compare_with_provided_centers = translate(fault.cell_centroids[:,::-1]/1000, *compare_center_coords)
159 |
160 |
161 | if not skip_sanity_check and not np.allclose(compare_with_provided_centers, np.stack([ srcmod_data['geoX'].ravel(),
162 | srcmod_data['geoY'].ravel(),
163 | -srcmod_data['geoZ'].ravel()]), atol=1e-3):
164 |
165 | raise RuntimeError("Unable to recreate srcmod's centroids!")
166 |
167 | ### Finished generating fault ###
168 |
169 | return [fault], [fault_slip], [fault_rake]
170 |
--------------------------------------------------------------------------------
/pyinsar/data_import/import_utils.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import shutil
26 | from atomicwrites import atomic_write
27 | import requests
28 | import os.path
29 |
30 | def download_file(url, folder_path, username = None, password = None, filename = None):
31 | '''
32 | Download a file from a URL
33 |
34 | @param url: The URL where the file is
35 | @param folder_path: Path to the folder where the downloaded file will be stored
36 | @param username: username for authentification, if needed
37 | @param password: Password for authentification, if needed
38 | @param filename: Change the filename, if needed
39 |
40 | @return The file path if download was succesful, none otherwise
41 | '''
42 | if filename is None:
43 | filename = url.split('/')[-1]
44 | if folder_path[-1] != '/' and filename[0] != '/':
45 | folder_path += '/'
46 | file_path = folder_path + filename
47 |
48 | if os.path.exists(file_path) == False:
49 | with requests.Session() as session:
50 | try:
51 | r = session.get(url, auth = (username, password), stream = True)
52 | r.raise_for_status()
53 | with atomic_write(file_path, mode = 'wb') as data_file:
54 | shutil.copyfileobj(r.raw, data_file, 1024*1024*10)
55 | return file_path
56 | except requests.exceptions.HTTPError as errh:
57 | print("http error:", errh)
58 | except requests.exceptions.ConnectionError as errc:
59 | print("error connecting:", errc)
60 | except requests.exceptions.Timeout as errt:
61 | print("timeout error:", errt)
62 | except requests.exceptions.RequestException as err:
63 | print("error:", err)
64 | else:
65 | print('File', filename, 'already exists in', folder_path)
66 | return file_path
--------------------------------------------------------------------------------
/pyinsar/data_import/sentinel.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Cody Rude, Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | # Standard library imports
26 | import requests
27 | from getpass import getpass
28 | from glob import glob
29 | import xml.etree.ElementTree as ET
30 |
31 | # 3rd party imports
32 | import pandas as pd
33 | import geopandas as gpd
34 | from lxml import html
35 |
36 | from pyinsar.data_import.import_utils import *
37 |
38 | def parse_satellite_data(in_satellite_file):
39 | '''
40 | Parse Sentinel satellite data
41 |
42 | @param in_satellite_file: Satellite orbit filename
43 |
44 | @return DataFrame of orbit information
45 | '''
46 | satellite_tree = ET.parse(in_satellite_file)
47 |
48 | names = ['TAI', 'UTC', 'UT1','Absolute_Orbit', 'X', 'Y', 'Z', 'VX', 'VY', 'VZ', 'Quality']
49 | time_converter = lambda x: pd.to_datetime(x[4:])
50 | converters = [time_converter, time_converter, time_converter, int, float, float, float,
51 | float, float, float, lambda x: x]
52 | tmp_data = []
53 |
54 | for orbit in satellite_tree.findall('Data_Block/List_of_OSVs/OSV'):
55 | row = []
56 | for name, converter in zip(names, converters):
57 | row.append(converter(orbit.find(name).text))
58 | tmp_data.append(row)
59 |
60 | return pd.DataFrame(tmp_data, columns=names)
61 |
62 | def get_url_precise_orbit(product_name):
63 | '''
64 | Get the URL of the precise orbit corresponding to a given Sentinel-1 product
65 | Modified from https://github.com/scottyhq/dinoSARaws/blob/48f68b0c49b26a91b501bc6d3fb1b2eb4c6c3918/bin/prep_topsApp_aws.py
66 |
67 | @param product_name: Name of the Sentinel-1 product
68 |
69 | @return The URL is the precise orbit file exists, none otherwise
70 | '''
71 | satellite = product_name[:3]
72 | date = product_name[17:25]
73 | # Incomplete inventory: https://s1qc.asf.alaska.edu/aux_poeorb/files.txt
74 | base_url = 'https://s1qc.asf.alaska.edu/aux_poeorb'
75 | r = requests.get(base_url)
76 | webpage = html.fromstring(r.content)
77 | orbits = webpage.xpath('//a/@href')
78 | # Get S1A or S1B
79 | df = gpd.pd.DataFrame(dict(orbit=orbits))
80 | df_sat = df[df.orbit.str.startswith(satellite)].copy()
81 | day_before = gpd.pd.to_datetime(date) - gpd.pd.to_timedelta(1, unit = 'd')
82 | day_before = day_before.strftime('%Y%m%d')
83 | # Get matching orbit file
84 | df_sat.loc[:, 'start_time'] = df_sat.orbit.str[42:50]
85 | match = df_sat.loc[df_sat.start_time == day_before, 'orbit'].values
86 |
87 | if len(match) == 0:
88 | print('No Sentinel-1 precise orbit for the product', product_name)
89 | return None
90 | return f'{base_url}/{match[0]}'
91 |
92 | def download_precise_orbits(product_folder, orbit_folder, username, password):
93 | '''
94 | Download the precise orbits for all the Sentinel-1 products in a folder
95 |
96 | @param product_folder: The folder where the Sentinel-1 products are
97 | @param orbit_folder: The folder where to put the orbit files
98 | @param username: The username for authentification on Earthdata
99 | @param password: The password for authentification on Earthdata
100 |
101 | @return The paths of the orbit files, none if a file couldnot be downloaded
102 | '''
103 | product_paths = glob(product_folder + '/*.zip')
104 | precise_orbit_paths = []
105 | for product_path in product_paths:
106 | product_name = product_path.split('/')[-1].split('.')[0]
107 | precise_orbit_url = get_url_precise_orbit(product_name)
108 | if precise_orbit_url is not None:
109 | precise_orbit_paths.append(download_file(precise_orbit_url,
110 | orbit_folder,
111 | username,
112 | password))
113 |
114 | return precise_orbit_paths
115 |
116 | def download_products(product_names,
117 | product_folder,
118 | base_url = 'https://datapool.asf.alaska.edu/SLC',
119 | use_vertex = True,
120 | username = None,
121 | password = None):
122 | '''
123 | Download Sentinel-1 products in a folder
124 |
125 | @param product_names: List of Sentinel-1 product names
126 | @param product_folder: The folder where to put the product files
127 | @param base_url: Base url from where to download the files (default is from
128 | the Alaska Satellite Facility)
129 | @param use_vertex: True if the base url is that of the Alaska Satellite Facility
130 | @param username: The username for authentification on Earthdata
131 | @param password: The password for authentification on Earthdata
132 |
133 | @return The paths of the orbit files, none if a file couldnot be downloaded
134 | '''
135 | if base_url[-1] != '/':
136 | base_url += '/'
137 |
138 | product_paths = []
139 | for i, product_name in enumerate(product_names):
140 | print('Downloading file ', i + 1, '/', len(product_names), sep = '', end = '\r')
141 | satellite = ''
142 | if product_name[0:3] == 'S1B' and use_vertex == True:
143 | satellite = 'SB/'
144 | elif product_name[0:3] == 'S1A' and use_vertex == True:
145 | satellite = 'SA/'
146 | product_url = base_url + satellite + product_name + '.zip'
147 | product_paths.append(download_file(product_url,
148 | product_folder,
149 | username,
150 | password))
151 | print('\033[K\033[K\033[K\033[K\033[K\033[K\033[KDownload over')
152 |
153 | return product_paths
154 |
--------------------------------------------------------------------------------
/pyinsar/data_import/uavsar.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2017 Massachusetts Institute of Technology
3 | #
4 | # Author: Cody Rude
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import re
26 | from collections import OrderedDict
27 |
28 | def read_uavsar_metadata(in_file):
29 | '''
30 | Parse UAVSAR metadata
31 |
32 | @param in_file: String of Metadata filename or file object (file should end in .ann)
33 |
34 | @return OrderedDict of metadata
35 | '''
36 |
37 | if isinstance(in_file, str):
38 | with open(in_file, 'r') as info_file:
39 | data_info = info_file.readlines()
40 |
41 | else:
42 | data_info = [line.decode() for line in in_file.readlines()]
43 |
44 |
45 |
46 | data_info = [line.strip() for line in data_info]
47 |
48 |
49 | # Function to convert string to a number
50 | def str_to_number(in_string):
51 | try:
52 | return int(in_string)
53 | except:
54 | return float(in_string)
55 |
56 |
57 | data_name = data_info[0][31:]
58 |
59 |
60 | meta_data_dict = OrderedDict()
61 | for line in data_info:
62 | # Only work on lines that aren't commented out
63 | if re.match('^[^;]',line) != None:
64 | # Get the data type ('&' is text)
65 | data_type = re.search('\s+\((.*)\)\s+=', line).group(1)
66 | # Remove data type from line
67 | tmp = re.sub('\s+\(.*\)\s+=', ' =', line)
68 |
69 | # Split line into key,value
70 | split_list = tmp.split('=',maxsplit=1)
71 |
72 | # remove any trailing comments and strip whitespace
73 | split_list[1] = re.search('[^;]*',split_list[1]).group().strip()
74 | split_list[0] = split_list[0].strip()
75 |
76 | #If data type is not a string, parse it as a float or int
77 | if data_type != '&':
78 | # Check if value is N/A
79 | if split_list[1] == 'N/A':
80 | split_list[1] = float('nan')
81 |
82 | # Check for Raskew Doppler Near Mid Far as this
83 | # entry should be three seperate entries
84 | elif split_list[0] == 'Reskew Doppler Near Mid Far':
85 | split_list[0] = 'Reskew Doppler Near'
86 |
87 | second_split = split_list[1].split()
88 | split_list[1] = str_to_number(second_split[0])
89 |
90 | meta_data_dict['Reskew Doppler Mid'] = str_to_number(second_split[1])
91 | meta_data_dict['Reskew Doppler Far'] = str_to_number(second_split[2])
92 |
93 | # Parse value to an int or float
94 | else:
95 | split_list[1] = str_to_number(split_list[1])
96 | # Add key, value pair to dictionary
97 | meta_data_dict[split_list[0]] = split_list[1]
98 |
99 |
100 |
101 | return meta_data_dict
102 |
--------------------------------------------------------------------------------
/pyinsar/docs/pyinsar_doxygen.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/pyinsar/docs/pyinsar_doxygen.pdf
--------------------------------------------------------------------------------
/pyinsar/fix_latex.sh:
--------------------------------------------------------------------------------
1 | gsed -ri 's/\\\+coregister\.//g' *.tex
2 | gsed -ri 's/\\\+coherence\.//g' *.tex
3 | gsed -ri 's/\\\+inteferogram\.//g' *.tex
4 | gsed -ri 's/\\\+base\.//g' *.tex
5 | gsed -ri 's/\\\+classify\\\+\\_\\\+cnn\.//g' *.tex
6 | gsed -ri 's/\\\+flat\\\+\\_\\\+earth\.//g' *.tex
7 | gsed -ri 's/\\\+los\\\+\\_\\\+deformation\.//g' *.tex
8 | gsed -ri 's/\\\+wrap\\\+\\_\\\+phase\.//g' *.tex
9 | gsed -ri 's/\\\+temporal\\\+\\_\\\+decorrelation\.//g' *.tex
10 | gsed -ri 's/\\\+train\\\+\\_\\\+cnn\.//g' *.tex
11 | gsed -ri 's/\\\+rotate\\\+\\_\\\+squares\.//g' *.tex
12 | gsed -ri 's/\\\+deformation\\\+\\_\\\+to\\\+\\_\\\+phase\.//g' *.tex
13 | gsed -ri 's/\\\+show\\\+\\_\\\+cnn\\\+\\_\\\+classes\.//g' *.tex
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/pyinsar/get_file_list.sh:
--------------------------------------------------------------------------------
1 | find . -iname '*.py' -not -iname '*__init__*'
2 |
--------------------------------------------------------------------------------
/pyinsar/output/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["export_georaster", "plot_raster"]
--------------------------------------------------------------------------------
/pyinsar/output/export_georaster.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | from osgeo import gdal
26 |
27 | def create_georaster_from_array(georaster_array,
28 | geotransform,
29 | projection,
30 | file_type = 'MEM',
31 | file_path = '',
32 | data_type = gdal.GDT_Float64,
33 | no_data_value = -99999.,
34 | scale = 1.,
35 | offset = 0.,
36 | options = []):
37 | '''
38 | Create a GDAL georaster from a Numpy array
39 |
40 | @param georaster_array: The Numpy array
41 | @param geotransform: The extent and cell spacing of the georaster
42 | @param projection: The projection of the georaster
43 | @param file_type: Type to save the file (default is memory)
44 | @param file_path: Where to store the new georaster (default is memory)
45 | @param data_type: Data type of the georaster
46 | @param no_data_value: No data value for the georaster
47 | @param scale: Scaling factor for the georaster
48 | @param offset: Offset factor for the georaster
49 | @param options: List of options for compression
50 |
51 | @return The GDAL georaster
52 | '''
53 | georaster_x_size = georaster_array.shape[1]
54 | georaster_y_size = georaster_array.shape[0]
55 | number_of_bands = 1
56 | if len(georaster_array.shape) >= 3:
57 | georaster_x_size = georaster_array.shape[2]
58 | georaster_y_size = georaster_array.shape[1]
59 | number_of_bands = georaster_array.shape[0]
60 |
61 | driver = gdal.GetDriverByName(file_type)
62 | new_georaster = driver.Create(file_path,
63 | georaster_x_size,
64 | georaster_y_size,
65 | number_of_bands,
66 | data_type,
67 | options = options)
68 | new_georaster.SetGeoTransform(geotransform)
69 | new_georaster.SetProjection(projection)
70 |
71 | for band_number in range(1, number_of_bands + 1):
72 | new_georaster_band = new_georaster.GetRasterBand(band_number)
73 | new_georaster_band.SetNoDataValue(no_data_value)
74 | new_georaster_band.SetScale(scale)
75 | new_georaster_band.SetOffset(offset)
76 | # Fill the georaster band, otherwise no data values are not set in the new georaster
77 | new_georaster_band.Fill(no_data_value)
78 |
79 | if len(georaster_array.shape) >= 3:
80 | new_georaster_band.WriteArray(georaster_array[band_number - 1, :, :])
81 | else:
82 | new_georaster_band.WriteArray(georaster_array)
83 |
84 | return new_georaster
--------------------------------------------------------------------------------
/pyinsar/output/plot_3d_vectors.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 |
3 | def plot_3d_vectors(col_vectors, axes_settings = None, **kwargs):
4 | """
5 | Plot 3d vectors
6 |
7 | @param col_vectors: 2d array of 3d column vectors
8 | @param axes_settings: Settings to pass to matplotlib axis set method
9 | @param **kwargs: Additional args to pass to matplotlib scatter function
10 | """
11 | fig, axes = plt.subplots(2,2)
12 | axes[1][1].axis('square')
13 | axes[1][1].axis('off')
14 | axes = [axes[0][0], axes[0][1], axes[1][0]]
15 | # fig.subplots_adjust(hspace=0)
16 |
17 | axes_indicies = [(0,1), (0,2), (1,2)]
18 | axes_labels = [('x','y'), ('x', 'z'), ('y', 'z')]
19 |
20 | for (index1, index2), (xlabel, ylabel), ax in zip(axes_indicies, axes_labels, axes):
21 | ax.scatter(col_vectors[index1,:], col_vectors[index2, :], **kwargs)
22 | ax.set_xlabel(xlabel)
23 | ax.set_ylabel(ylabel)
24 |
25 | if axes_settings != None:
26 | ax.set(axes_settings)
27 |
28 |
29 |
30 | ax.axis('square')
31 |
32 | plt.tight_layout(w_pad=-8)
33 | plt.show()
34 |
--------------------------------------------------------------------------------
/pyinsar/processing/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["corrections", "deformation", "geography", "isce", "machine_learning", "instruments", "utilities"]
--------------------------------------------------------------------------------
/pyinsar/processing/corrections/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["anomaly_identification", "topography", "troposphere"]
--------------------------------------------------------------------------------
/pyinsar/processing/corrections/topography.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from geodesy import wgs84
3 |
4 | def ellipsoidal_earth_slant_ranges(azimuth_time, latlon, orbit_interp,
5 | start_x, end_x, start_y, end_y):
6 | '''
7 | Compute slant ranges assuming no topography
8 |
9 | @param azimuth_time: Pandas time series data conatining the time of each azimuth line
10 | @param latlon: Function to compute latitude and longitude for each pixel coordinate
11 | @param orbit_interp: Function to compute satellite positions
12 | @param start_x: Starting x pixel
13 | @param end_x: Ending pixel x pxiel
14 | @param start_y: Starting y pixel
15 | @param end_y: Endying y pixel
16 |
17 | @return Slant range distance to each pixel
18 | '''
19 |
20 | geo_to_cart = np.vectorize(wgs84.geodesic_to_cartesian)
21 |
22 | x,y = np.meshgrid(np.arange(start_x, end_x), np.arange(start_y, end_y))
23 |
24 | lat, lon = latlon(y,x)
25 |
26 | lines = lat.shape[0]
27 | samples = lat.shape[1]
28 |
29 | dates = azimuth_time[start_y:end_y]
30 |
31 | sat_positions = np.stack(orbit_interp(dates).T, axis=1)
32 |
33 | flat_earth_positions = np.stack(geo_to_cart(lat.ravel(), lon.ravel(), 0), axis=1)
34 |
35 | distance_vectors = np.repeat(sat_positions,samples, axis=0) - flat_earth_positions
36 |
37 | return np.linalg.norm(distance_vectors, axis=1).reshape(lines, samples), sat_positions
38 |
--------------------------------------------------------------------------------
/pyinsar/processing/corrections/troposphere.py:
--------------------------------------------------------------------------------
1 | from scipy.interpolate import CubicSpline, UnivariateSpline
2 | import pandas as pd
3 |
4 | # These functions are all under development
5 |
6 | def vapor_pressure(T):
7 | '''
8 | Under development
9 | '''
10 | return 6.1037*np.exp(17.641 * (T-273.15) / (T - 29.88))
11 |
12 | def N(P, T, RH, k1=77.6, k2=23.3, k3=3.75E5):
13 | '''
14 | Under development
15 | '''
16 | return (k1*(P/T)) + (k2*(vapor_pressure(T)*RH/T) + k3 * (vapor_pressure(T)*RH/(T**2)))
17 |
18 |
19 | def N_h(h, P, T, RH, k1=77.6, k2=23.3, k3=3.75E5):
20 | '''
21 | Under development
22 | '''
23 | return N(P(h), T(h), RH(h), k1, k2, k3)
24 |
25 |
26 | def compute_delays(h, P, T, RH):
27 | '''
28 | Under development
29 | '''
30 | pressure = CubicSpline(h, P, bc_type='natural', extrapolate=True)
31 | temperature = CubicSpline(h, T+273.15, bc_type='natural', extrapolate=True)
32 | rh_index = ~pd.isnull(RH)
33 | relative_humidity = UnivariateSpline(h[rh_index],RH[rh_index], k=3)
34 |
35 | d_tropo = lambda x: 1e-6 * quad(N_h, x, 9e3, args=(pressure, temperature,relative_humidity))[0]
36 |
37 | x = np.arange(0,9000,100)
38 | return pd.Series(np.fromiter(map(d_tropo,x),dtype=np.float,count=len(x)), index=x)
39 |
--------------------------------------------------------------------------------
/pyinsar/processing/data_fetcher/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/pyinsar/processing/data_fetcher/__init__.py
--------------------------------------------------------------------------------
/pyinsar/processing/data_fetcher/gdal.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # Pyinsar imports
31 | from pyinsar.processing.utilities.generic import proj4StringToDictionary
32 | from pyinsar.data_import import import_georaster
33 |
34 | # 3rd party imports
35 | from osgeo import gdal, osr
36 | from skdaccess.framework.data_class import DataFetcherBase, ImageWrapper
37 |
38 |
39 | class DataFetcher(DataFetcherBase):
40 | """
41 | Data fetcher for loading Images produced compatiable with GDAL
42 | """
43 |
44 | def __init__(self, ap_paramList, verbose=False):
45 | """
46 | Initialize GDAL data fetcher
47 |
48 | @param ap_paramList[filename_list]: AutoList of filenames of ISCE interferograms
49 | @param ap_paramList[label_list]: AutoList of strings containing names for the interferograms
50 | @param verbose: Print extra information
51 | """
52 | super(DataFetcher, self).__init__(ap_paramList, verbose)
53 |
54 |
55 | def output(self):
56 | """
57 | Load GDAL data
58 |
59 | @return Image data wrapper
60 | """
61 | filename_list = self.ap_paramList[0]()
62 | label_list = self.ap_paramList[1]()
63 |
64 | data_dict = OrderedDict()
65 | meta_dict = OrderedDict()
66 |
67 | for label, filename in zip(label_list, filename_list):
68 | ds = import_georaster.open_georaster(filename)
69 |
70 | data_dict[label] = ds.ReadAsArray()
71 |
72 | meta_dict[label] = OrderedDict()
73 | meta_dict[label]['WKT'] = ds.GetProjection()
74 | meta_dict[label]['GeoTransform'] = ds.GetGeoTransform()
75 |
76 | spatial = osr.SpatialReference()
77 |
78 | spatial.ImportFromWkt(meta_dict[label]['WKT'])
79 | meta_dict[label]['proj4params'] = proj4StringToDictionary(spatial.ExportToProj4())
80 |
81 | return ImageWrapper(data_dict, meta_data = meta_dict)
82 |
--------------------------------------------------------------------------------
/pyinsar/processing/data_fetcher/hdf_retriever.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # Pyinsar imports
31 | from pyinsar.processing.utilities.machine_learning import DataRetriever
32 | from pyinsar.processing.utilities.generic import subarray_slice
33 |
34 | # Scikit Data Access imports
35 | from skdaccess.framework.data_class import DataFetcherBase, ImageWrapper
36 |
37 | # 3rd party imports
38 | import numpy as np
39 |
40 | class DataFetcher(DataFetcherBase):
41 | """
42 | Data fetcher for retrieving hdf image data made for training in convolutional neural networks
43 | """
44 |
45 | def __init__(self, filename_list, label_list, size, dtype, num_chunks, index):
46 | """
47 | Initialize TrainCNN item
48 |
49 | @param filename_list: List of hdf retriever files
50 | @param label_list: Label for each file
51 | @param size: Image shape
52 | @param dtype: Data type to return
53 | @param num_chunks: Number of chunks to read in at at time. This is necessary due
54 | to a performance issue with h5py
55 | @param num_training_items: Number of items in each dataset to use for training
56 | @param num_validation_items: Number of items from each dataset to use for validation
57 | @param num_testing_items: Number of items in each dataset to use for testing
58 | """
59 | self._filename_list = filename_list
60 | self._label_list = label_list
61 | self._size = size
62 | self._dtype = dtype
63 | self._num_chunks = num_chunks
64 | self._index = index
65 |
66 | self._current_chunk = 0
67 |
68 | super(DataFetcher, self).__init__()
69 |
70 | assert self._index.shape[0] % self._num_chunks == 0, "Number of training items must be divisible by number of chunks"
71 | self._items_per_chunk = self._index.shape[0] // self._num_chunks
72 |
73 | data_retriever = DataRetriever(self._filename_list, self._label_list, self._size, self._dtype)
74 | num_images = data_retriever.get_num_images()
75 | num_labels = len(self._label_list)
76 |
77 | def perturb(self):
78 | self._current_chunk = (self._current_chunk + 1) % self._num_chunks
79 |
80 | def output(self):
81 |
82 | data_retriever = DataRetriever(self._filename_list, self._label_list, self._size, self._dtype)
83 |
84 | data = OrderedDict()
85 | metadata = OrderedDict()
86 |
87 | data_label = 'chunk_' + str(self._current_chunk)
88 |
89 | data_slice = subarray_slice(self._current_chunk, self._items_per_chunk)
90 |
91 | data[data_label] = data_retriever.get_images(self._index[data_slice,:])
92 | metadata[data_label] = OrderedDict()
93 | metadata[data_label]['Num_Chunks'] = self._num_chunks
94 | metadata[data_label]['Current_Chunk'] = self._current_chunk
95 | metadata[data_label]['Labels'] = self._index[data_slice, 0]
96 |
97 | return ImageWrapper(data, -1, metadata)
98 |
99 |
100 | def randomizeIndex(self):
101 | """
102 | Shuffle training index
103 | """
104 | np.random.shuffle(self._index)
105 |
--------------------------------------------------------------------------------
/pyinsar/processing/data_fetcher/okada.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 |
28 | from collections import OrderedDict
29 |
30 | from skdaccess.framework.data_class import DataFetcherBase, ImageWrapper
31 | from pyinsar.processing.deformation.elastic_halfspace.okada import compute_okada_displacement
32 |
33 | class DataFetcher(DataFetcherBase):
34 | """
35 | Generates data from an Okada model
36 | """
37 | def __init__(self, ap_paramList, xx_array, yy_array, verbose=False):
38 | """
39 | Initialize Okada DataFetcher
40 |
41 | @param ap_paramList[fault_centroid_x]: x centroid
42 | @param ap_paramList[fault_centroid_y]: y centroid
43 | @param ap_paramList[fault_centroid_depth]: Fault depth
44 | @param ap_paramList[fault_strike]: Fault strike
45 | @param ap_paramList[fault_dip]: Fault dip
46 | @param ap_paramList[fault_length]: Fault Length
47 | @param ap_paramList[fault_width]: Fault width
48 | @param ap_paramList[fault_rake]: Fault rake
49 | @param ap_paramList[fault_slip]: Fault slip
50 | @param ap_paramList[fault_open]: Fault open
51 | @param ap_paramList[poisson_ratio]: Poisson ratio
52 | @param xx_array: Array of x coordinates
53 | @param yy_array: Array of y coordinates
54 | @param verbose: Print out extra information
55 | """
56 | self._xx_array = xx_array
57 | self._yy_array = yy_array
58 |
59 | super(DataFetcher, self).__init__(ap_paramList, verbose)
60 |
61 |
62 | def output(self):
63 | """
64 | Output deformation in an image wrapper
65 |
66 | @return Deformation in an Image wrapper
67 | """
68 |
69 | metadata_dict = OrderedDict()
70 | data_dict = OrderedDict()
71 |
72 | parameter_list = [
73 | 'fault_centroid_x',
74 | 'fault_centroid_y',
75 | 'fault_centroid_depth',
76 | 'fault_strike',
77 | 'fault_dip',
78 | 'fault_length',
79 | 'fault_width',
80 | 'fault_rake',
81 | 'fault_slip',
82 | 'fault_open',
83 | 'poisson_ratio',
84 | ]
85 |
86 | kwargs = OrderedDict()
87 |
88 | for index, param in enumerate(parameter_list):
89 | kwargs[param] = self.ap_paramList[index]()
90 |
91 |
92 |
93 | deformation = compute_okada_displacement(**kwargs,
94 | x_array = self._xx_array,
95 | y_array = self._yy_array)
96 |
97 |
98 |
99 | data_dict['deformation'] = deformation
100 | metadata_dict['deformation'] = kwargs
101 |
102 | return ImageWrapper(data_dict, meta_data = metadata_dict)
103 |
104 | def multirun_enabled(self):
105 | """
106 | This data fetcher is multirun enabled
107 |
108 | @return True
109 | """
110 | return True
111 |
--------------------------------------------------------------------------------
/pyinsar/processing/deformation/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["elastic_halfspace", "inversion"]
--------------------------------------------------------------------------------
/pyinsar/processing/deformation/elastic_halfspace/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["mogi", "okada", "pipe", "surface_load"]
--------------------------------------------------------------------------------
/pyinsar/processing/deformation/elastic_halfspace/fault.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Cody Rude
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | from pyinsar.processing.utilities.generic import rotate, translate
26 | from .okada import compute_okada_displacement
27 |
28 | from tqdm import trange
29 | import numpy as np
30 |
31 |
32 | class Fault(object):
33 | '''
34 | *** In Development *** Model a fault as a collection of small okada faults
35 | '''
36 | def __init__(self, x_center, y_center, depth, length, width, strike, dip, num_elements_length, num_elements_width,
37 | poisson_ratio = 0.25, dtype=np.float32):
38 | '''
39 | Initialize Fault object
40 |
41 | @param x_center: x centroid of fault
42 | @param y_center: y centroid of fault
43 | @param depth: Depth to centroid of fault
44 | @param length: Length of fault (along strike)
45 | @param width: Width of fault (along dip)
46 | @param strike: Angle from north of the fault direction
47 | @param dip: Dip angle
48 | @param num_elements_length: Number of elements in the length direction
49 | @param num_elements_width: Number of elements in the widht direction
50 | @param poisson_ratio: Poisson ratio
51 | @param dtype: Data type to use in calculations
52 | '''
53 | self.x_center = x_center
54 | self.y_center = y_center
55 | self.depth = depth
56 | self.length = length
57 | self.width = width
58 | self.strike = strike
59 | self.dip = dip
60 | self.poisson_ratio = 0.25
61 | self._dtype = dtype
62 |
63 | # Generate rectangular centroids
64 | self.cell_width = self.width / num_elements_width
65 | self.cell_length = self.length / num_elements_length
66 |
67 | cell_x_coords = np.linspace(self.cell_length/2 - self.length/2, self.length/2 - self. cell_length/2, num_elements_length, dtype=dtype)
68 | cell_z_coords = np.linspace(self.cell_width/2 - self.width/2, self.width/2 - self.cell_width/2, num_elements_width, dtype=dtype)
69 |
70 | cell_x_centroids, cell_z_centroids = np.meshgrid(cell_x_coords, cell_z_coords)
71 |
72 | cell_centroids = np.zeros([3, len(cell_x_centroids.ravel())], dtype=dtype)
73 | cell_centroids[0,:] = cell_x_centroids.ravel()
74 | cell_centroids[2,:] = cell_z_centroids.ravel()
75 |
76 | # Save unrotated centers for making slip matrices
77 | self.unrotated_x = cell_x_centroids
78 | self.unrotated_y = cell_z_centroids
79 |
80 | # Change coordinate system
81 | x_angle = np.pi/2 - self.dip
82 | z_angle = -self.strike - np.pi/2
83 |
84 | cell_centroids = rotate(cell_centroids, 0, 0, x_angle, dtype=dtype)
85 | cell_centroids = rotate(cell_centroids, z_angle, 0, 0, dtype=dtype)
86 |
87 | cell_centroids = translate(cell_centroids, x_center, y_center, -depth)
88 |
89 | self.cell_centroids = cell_centroids
90 |
91 |
92 |
93 | def generateDeformation(self, slip, rake, x_coords, y_coords, simple=True):
94 | '''
95 | Generate surface deformations from fault
96 |
97 | @param slip: 2d array of slip with size (num_elements_width, num_elements_length)
98 | @param rake: Scalar Rake value
99 | @param x_coords: 2d array of x coordinates
100 | @param y_coords: 2d array of y coordinates
101 | @param simple: If multiple slips per cell are given, just apply calculate
102 | deformation from a combined slip
103 |
104 | @return Surface deformations at specificed coordinates
105 | '''
106 |
107 | assert slip.shape == rake.shape, 'slip and rake must have same shape'
108 | assert x_coords.shape == y_coords.shape, 'x_coords and y_coords must have same shape'
109 |
110 | def compute_deformation(deformation, slip_ravel, rake_ravel, verbose=True):
111 |
112 | if verbose:
113 | my_range = trange(len(slip_ravel))
114 | else:
115 | my_range = range(len(slip_ravel))
116 |
117 | for index in my_range:
118 | x_center = self.cell_centroids[0,index]
119 | y_center = self.cell_centroids[1,index]
120 | depth = -self.cell_centroids[2,index]
121 | slip_value = slip_ravel[index]
122 | rake_value = rake_ravel[index]
123 |
124 |
125 | deformation[:,:,:] = deformation + compute_okada_displacement(fault_centroid_x = x_center,
126 | fault_centroid_y = y_center,
127 | fault_centroid_depth = depth,
128 | fault_strike = self.strike,
129 | fault_dip = self.dip,
130 | fault_length = self.cell_length,
131 | fault_width = self.cell_width,
132 | fault_rake = rake_value,
133 | poisson_ratio = self.poisson_ratio,
134 | fault_open = 0,
135 | xx_array = x_coords,
136 | yy_array = y_coords,
137 | fault_slip = slip_value)
138 |
139 |
140 |
141 | deformation = np.zeros([3,*x_coords.shape], dtype=self._dtype)
142 |
143 | if slip.ndim == 3 and simple:
144 | x_components = np.cos(rake) * slip
145 | y_components = np.sin(rake) * slip
146 |
147 | combined_x_components = np.sum(x_components, axis=0)
148 | combined_y_components = np.sum(y_components, axis=0)
149 |
150 | slip = np.sqrt(combined_x_components**2 + combined_y_components**2)
151 | rake = np.arctan2(combined_y_components, combined_x_components)
152 |
153 | if slip.ndim == 2:
154 | compute_deformation(deformation, slip.ravel(), rake.ravel(), verbose=True)
155 |
156 | elif slip.ndim == 3:
157 | for i in trange(slip.shape[0]):
158 | compute_deformation(deformation, slip[i,:,:].ravel(), rake[i,:,:].ravel(), verbose=False)
159 |
160 |
161 | return deformation
162 |
--------------------------------------------------------------------------------
/pyinsar/processing/deformation/elastic_halfspace/mogi.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import numpy as np
26 |
27 | def compute_mogi_source_displacement(source_x,
28 | source_y,
29 | source_depth,
30 | source_radius,
31 | poisson_ratio,
32 | pressurization,
33 | shear_modulus,
34 | xx_array,
35 | yy_array):
36 | '''
37 | Compute the surface displacements for a Mogi source, i.e., a spheroidal
38 | pressure source
39 |
40 | @param source_x: x cooordinate for the source's center
41 | @param source_y: y cooordinate for the source's center
42 | @param source_radius: Source's radius
43 | @param poisson_ratio: Poisson's ratio
44 | @param pressurization: Change of pressure applied to the source
45 | @param shear_modulus: Shear modulus
46 | @param xx_array: x cooordinate for the domain within a 2D array
47 | @param yy_array: y cooordinate for the domain within a 2D array
48 |
49 | @return The surface displacement field
50 | '''
51 | source_distance_array = np.sqrt((xx_array - source_x)**2
52 | + (yy_array - source_y)**2
53 | + source_depth**2)
54 |
55 | source_change_in_volume = (np.pi*pressurization*source_radius**3)/shear_modulus
56 | source_strength = (1 - poisson_ratio)*source_change_in_volume/(np.pi)
57 |
58 | displacement_array = np.zeros((3, xx_array.shape[0], xx_array.shape[1]))
59 | displacement_array[0,:,:] = source_strength*(xx_array - source_x)/(source_distance_array**3)
60 | displacement_array[1,:,:] = source_strength*(yy_array - source_y)/(source_distance_array**3)
61 | displacement_array[2,:,:] = source_strength*source_depth/(source_distance_array**3)
62 |
63 | return displacement_array
--------------------------------------------------------------------------------
/pyinsar/processing/deformation/elastic_halfspace/pipe.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import numpy as np
26 |
27 | def compute_closed_pipe_displacement(closed_pipe_x,
28 | closed_pipe_y,
29 | closed_pipe_depth_1,
30 | closed_pipe_depth_2,
31 | closed_pipe_radius,
32 | poisson_ratio,
33 | pressurization,
34 | shear_modulus,
35 | xx_array,
36 | yy_array):
37 | '''
38 | Compute the surface displacements for a closed pipe
39 |
40 | @param closed_pipe_x: x cooordinate for the pipe's center
41 | @param closed_pipe_y: y cooordinate for the pipe's center
42 | @param closed_pipe_depth_1: Pipe's top depth
43 | @param closed_pipe_depth_2: Pipe's bottom depth
44 | @param closed_pipe_radius: Pipe's radius
45 | @param poisson_ratio: Poisson's ratio
46 | @param pressurization: Change of pressure applied to the pipe
47 | @param shear_modulus: Shear modulus
48 | @param xx_array: x cooordinate for the domain within a 2D array
49 | @param yy_array: y cooordinate for the domain within a 2D array
50 |
51 | @return The surface displacement field
52 | '''
53 | closed_pipe_horizontal_distance_array = np.sqrt((xx_array - closed_pipe_x)**2 + (yy_array - closed_pipe_y)**2)
54 | closed_pipe_distance_1_array = np.sqrt(closed_pipe_horizontal_distance_array**2 + closed_pipe_depth_1**2)
55 | closed_pipe_distance_2_array = np.sqrt(closed_pipe_horizontal_distance_array**2 + closed_pipe_depth_2**2)
56 |
57 | displacement_array = np.zeros((3, xx_array.shape[0], xx_array.shape[1]))
58 |
59 | b = (pressurization*closed_pipe_radius**2)/(4*shear_modulus)
60 | displacement_array[0,:,:] = b*((closed_pipe_depth_1**3)/(closed_pipe_distance_1_array**3)
61 | + 2*closed_pipe_depth_1*(5*poisson_ratio - 3)/closed_pipe_distance_1_array
62 | + ((5*closed_pipe_depth_2**3)*(1 - 2*poisson_ratio)
63 | - 2*closed_pipe_depth_2*(closed_pipe_horizontal_distance_array**2)*(5*poisson_ratio - 3))/(closed_pipe_distance_2_array**3))*(xx_array/closed_pipe_horizontal_distance_array**2)
64 | displacement_array[1,:,:] = b*((closed_pipe_depth_1**3)/(closed_pipe_distance_1_array**3)
65 | + 2*closed_pipe_depth_1*(5*poisson_ratio - 3)/closed_pipe_distance_1_array
66 | + ((5*closed_pipe_depth_2**3)*(1 - 2*poisson_ratio)
67 | - 2*closed_pipe_depth_2*(closed_pipe_horizontal_distance_array**2)*(5*poisson_ratio - 3))/(closed_pipe_distance_2_array**3))*(yy_array/closed_pipe_horizontal_distance_array**2)
68 | displacement_array[2,:,:] = -b*((closed_pipe_depth_1**2)/(closed_pipe_distance_1_array**3)
69 | + 2*(5*poisson_ratio - 2)/closed_pipe_distance_1_array
70 | + ((3 - 10*poisson_ratio)*closed_pipe_depth_2**2 + 2*(5*poisson_ratio - 2)*closed_pipe_horizontal_distance_array**2)/(closed_pipe_distance_2_array**3))
71 |
72 | return displacement_array
73 |
74 | def compute_open_pipe_displacement(open_pipe_x,
75 | open_pipe_y,
76 | open_pipe_depth_0,
77 | open_pipe_depth_1,
78 | open_pipe_depth_2,
79 | open_pipe_radius,
80 | poisson_ratio,
81 | pressurization,
82 | shear_modulus,
83 | xx_array,
84 | yy_array):
85 | '''
86 | Compute the surface displacements for an open pipe
87 |
88 | @param open_pipe_x: x cooordinate for the pipe's center
89 | @param open_pipe_y: y cooordinate for the pipe's center
90 | @param open_pipe_depth_0: Pipe's top depth with minimal pressurization
91 | @param open_pipe_depth_1: Pipe's top depth with maximal pressurization
92 | @param open_pipe_depth_2: Pipe's bottom depth
93 | @param open_pipe_radius: Pipe's radius
94 | @param poisson_ratio: Poisson's ratio
95 | @param pressurization: Change of pressure applied to the pipe
96 | @param shear_modulus: Shear modulus
97 | @param xx_array: x cooordinate for the domain within a 2D array
98 | @param yy_array: y cooordinate for the domain within a 2D array
99 |
100 | @return The surface displacement field
101 | '''
102 | open_pipe_horizontal_distance_array = np.sqrt((xx_array - open_pipe_x)**2 + (yy_array - open_pipe_y)**2)
103 | open_pipe_distance_0_array = np.sqrt(open_pipe_horizontal_distance_array**2 + open_pipe_depth_0**2)
104 | open_pipe_distance_1_array = np.sqrt(open_pipe_horizontal_distance_array**2 + open_pipe_depth_1**2)
105 | open_pipe_distance_2_array = np.sqrt(open_pipe_horizontal_distance_array**2 + open_pipe_depth_2**2)
106 |
107 | b = open_pipe_radius*pressurization/shear_modulus
108 |
109 | displacement_array = np.zeros((3, xx_array.shape[0], xx_array.shape[1]))
110 |
111 | displacement_array[0,:,:] = (b*open_pipe_radius/2.)*((open_pipe_depth_1**3)/(open_pipe_distance_1_array**3)
112 | - 2*open_pipe_depth_1*(1 + poisson_ratio)/open_pipe_distance_1_array
113 | + ((open_pipe_depth_2**3)*(1 + 2*poisson_ratio)
114 | + 2*open_pipe_depth_2*(open_pipe_horizontal_distance_array**2)*(1 + poisson_ratio))/(open_pipe_distance_2_array**3))*(xx_array/open_pipe_horizontal_distance_array**2)
115 | displacement_array[1,:,:] = (b*open_pipe_radius/2.)*((open_pipe_depth_1**3)/(open_pipe_distance_1_array**3)
116 | - 2*open_pipe_depth_1*(1 + poisson_ratio)/open_pipe_distance_1_array
117 | + ((open_pipe_depth_2**3)*(1 + 2*poisson_ratio)
118 | + 2*open_pipe_depth_2*(open_pipe_horizontal_distance_array**2)*(1 + poisson_ratio))/(open_pipe_distance_2_array**3))*(yy_array/open_pipe_horizontal_distance_array**2)
119 | displacement_array[2,:,:] = -(b*open_pipe_radius/2.)*((open_pipe_depth_1**2)/(open_pipe_distance_1_array**3)
120 | - 2*poisson_ratio/open_pipe_distance_1_array
121 | + (-open_pipe_depth_2**2 + 2*(open_pipe_distance_2_array**2)*poisson_ratio)/(open_pipe_distance_2_array**3))
122 |
123 | displacement_array[0,:,:] += (b*open_pipe_radius/2.)*(-(open_pipe_depth_0**3)/(open_pipe_distance_0_array**3)
124 | + 2*poisson_ratio/open_pipe_distance_0_array
125 | + (open_pipe_depth_1**2 - 2*(open_pipe_depth_1**2 + open_pipe_horizontal_distance_array**2)*poisson_ratio)/(open_pipe_distance_1_array**3))*(xx_array/open_pipe_depth_1)
126 | displacement_array[1,:,:] += (b*open_pipe_radius/2.)*(-(open_pipe_depth_0**3)/(open_pipe_distance_0_array**3)
127 | + 2*poisson_ratio/open_pipe_distance_0_array
128 | + (open_pipe_depth_1**2 - 2*(open_pipe_depth_1**2 + open_pipe_horizontal_distance_array**2)*poisson_ratio)/(open_pipe_distance_1_array**3))*(yy_array/open_pipe_depth_1)
129 | displacement_array[2,:,:] += -(b*open_pipe_radius/2.)*((open_pipe_depth_0**3)/(open_pipe_distance_0_array**3)
130 | - (open_pipe_depth_1**3)/(open_pipe_distance_1_array**3)
131 | - 2*poisson_ratio/open_pipe_distance_1_array
132 | + open_pipe_depth_1*(2*poisson_ratio - 1)/open_pipe_distance_1_array
133 | + open_pipe_depth_0*(1 - 2*poisson_ratio)/open_pipe_distance_0_array
134 | + (2*poisson_ratio - 1)*np.log(open_pipe_depth_0 + open_pipe_distance_0_array)
135 | - (2*poisson_ratio - 1)*np.log(open_pipe_depth_1 + open_pipe_distance_1_array))*(1/open_pipe_depth_1)
136 |
137 | return displacement_array
138 |
--------------------------------------------------------------------------------
/pyinsar/processing/deformation/elastic_halfspace/surface_load.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import numpy as np
26 | from scipy.special import hyp2f1
27 |
28 | def compute_uniform_disk_load_displacement(disk_x,
29 | disk_y,
30 | disk_radius,
31 | poisson_ratio,
32 | pressure,
33 | shear_modulus,
34 | xx_array,
35 | yy_array):
36 | '''
37 | Compute the surface displacements for a uniform disk load
38 |
39 | @param disk_x: x cooordinate for the disk's center
40 | @param disk_y: y cooordinate for the disk's center
41 | @param disk_radius: Disk's radius
42 | @param poisson_ratio: Poisson's ratio
43 | @param pressure: Pressure applied by the disk
44 | @param shear_modulus: Shear modulus
45 | @param xx_array: x cooordinate for the domain within a 2D array
46 | @param yy_array: y cooordinate for the domain within a 2D array
47 |
48 | @return The surface displacement field
49 | '''
50 | horizontal_distance_array = np.sqrt((xx_array - disk_x)**2 + (yy_array - disk_y)**2)
51 |
52 | displacement_array = np.zeros((3, xx_array.shape[0], xx_array.shape[1]))
53 |
54 | constant_term = -pressure*(1 - 2*poisson_ratio)/(4*shear_modulus*disk_radius)
55 | displacement_array[0,:,:][horizontal_distance_array <= disk_radius] = constant_term*horizontal_distance_array[horizontal_distance_array <= disk_radius]*xx_array[horizontal_distance_array <= disk_radius]
56 | displacement_array[1,:,:][horizontal_distance_array <= disk_radius] = constant_term*horizontal_distance_array[horizontal_distance_array <= disk_radius]*yy_array[horizontal_distance_array <= disk_radius]
57 | displacement_array[2,:,:][horizontal_distance_array <= disk_radius] = constant_term*(4*(disk_radius**2)*(1 - poisson_ratio)*hyp2f1(1/2., -1/2., 1., (horizontal_distance_array[horizontal_distance_array <= disk_radius]**2)/disk_radius**2))/(1 - 2*poisson_ratio)
58 |
59 | constant_term = -pressure*(disk_radius**2)*(1 - 2*poisson_ratio)/(4*shear_modulus)
60 | displacement_array[0,:,:][horizontal_distance_array > disk_radius] = constant_term*xx_array[horizontal_distance_array > disk_radius]/horizontal_distance_array[horizontal_distance_array > disk_radius]**2
61 | displacement_array[1,:,:][horizontal_distance_array > disk_radius] = constant_term*yy_array[horizontal_distance_array > disk_radius]/horizontal_distance_array[horizontal_distance_array > disk_radius]**2
62 | displacement_array[2,:,:][horizontal_distance_array > disk_radius] = constant_term*(2*(1 - poisson_ratio)*hyp2f1(1/2., 1/2., 2., (disk_radius**2)/horizontal_distance_array[horizontal_distance_array > disk_radius]**2))/((1 - 2*poisson_ratio)*horizontal_distance_array[horizontal_distance_array > disk_radius])
63 |
64 | return displacement_array
--------------------------------------------------------------------------------
/pyinsar/processing/deformation/inversion/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["quadtree", "inversion"]
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/__init__.py:
--------------------------------------------------------------------------------
1 | from .deformation_to_phase import DeformationToPhase
2 | from .temporal_decorrelation import TemporalDecorrelation
3 | from .coherence import Coherence
4 | from .interferogram import Interferogram
5 | from .wrap_phase import WrapPhase
6 | from .train_cnn import TrainCNN
7 | from .classify_cnn import ClassifyCNN
8 | from .deburst import Deburst
9 | from .coregister import Coregister
10 | from .shown_cnn_classes import ShowCNNClasses
11 | from .flat_earth import FlatEarth
12 | from .project import Project
13 | from .mask import Mask
14 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/classify_cnn.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # Pyinsar imports
31 | from pyinsar.processing.utilities import ann
32 |
33 | # Scikit Discovery imports
34 | from skdiscovery.data_structure.framework.base import PipelineItem
35 | from skdiscovery.utilities.patterns.image_tools import divideIntoSquares
36 |
37 |
38 |
39 | # 3rd party imports
40 | import numpy as np
41 |
42 |
43 | class ClassifyCNN(PipelineItem):
44 | """ Train a CNN """
45 |
46 | def __init__(self, str_description, cnn_network_dir, batch_size=2000, config=None, compare_labels=False,
47 | stride = None, size=None):
48 | """
49 | Initialize TrainCNN item
50 |
51 | @param str_description: String describing item
52 | @param cnn_network_dir: Strining containing the directiory where the CNN is stored
53 | @param batch_size: Batch size to use when classifying with Tensorflow
54 | @param config: Additional session configuration dictionary
55 | @param compare_labels: Compare measured labels with labels stored in metadata
56 | @param stride: Distance between images if it necessary to cut image into tiles
57 | @param size: Size of images to feed into CNN
58 | """
59 | assert stride is None and size is None or \
60 | stride is not None and size is not None, \
61 | 'Either both or neither stride and size should be None'
62 |
63 | self.cnn_network_dir = cnn_network_dir
64 | self.batch_size = batch_size
65 | self.config = config
66 | self.compare_labels = compare_labels
67 | self.stride = stride
68 | self.size = size
69 |
70 | super(ClassifyCNN, self).__init__(str_description, [])
71 |
72 | def process(self, obj_data):
73 | """
74 | Classify data using a CNN using data in Image wrapper
75 |
76 | @param obj_data: Image wrapper
77 | """
78 | results = OrderedDict()
79 | for label, data in obj_data.getIterator():
80 | results[label] = OrderedDict()
81 |
82 | if self.stride is not None:
83 | extents, processed_data = divideIntoSquares(data, self.size, self.stride)
84 |
85 | else:
86 | processed_data = data
87 | extents = np.array([[0, data.shape[-2], 0, data.shape[-1]]])
88 |
89 |
90 | labels = ann.classify(image_data = processed_data,
91 | model_dir = self.cnn_network_dir,
92 | batch_size = self.batch_size,
93 | config = self.config)
94 |
95 |
96 | results[label]['labels'] = labels
97 | results[label]['extents'] = extents
98 |
99 | if self.compare_labels:
100 |
101 | given_labels = obj_data.info(label)['Labels']
102 |
103 | fraction_correct = np.count_nonzero(given_labels == labels) / len(labels)
104 |
105 | results[label]['fraction_correct'] = fraction_correct
106 |
107 |
108 |
109 |
110 | obj_data.addResult(self.str_description, results)
111 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/coherence.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2017 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # scikit discovery imports
31 | from skdiscovery.data_structure.framework.base import PipelineItem
32 |
33 | # Pyinsar imports
34 | from pyinsar.processing.utilities.generic import coherence
35 |
36 | # Scikit data access imports
37 | from skdaccess.utilities.support import progress_bar
38 |
39 |
40 | class Coherence(PipelineItem):
41 | ''' Calculate coherence between single-look complex SAR images '''
42 |
43 | def __init__(self, str_description, window, pairing='neighbor', use_progress_bar = False):
44 | '''
45 | Initialize coherence pipeline item
46 |
47 | @param str_description: Short string describing item
48 | @param window: Tuple indicating the y and x window size
49 | @param pairing: How to pair slc images. "neighbor" computes
50 | coherence between neighboring images
51 |
52 | @param use_progress_bar: Display progress using a progress bar
53 | '''
54 |
55 | self.window = window
56 | self.pairing = pairing
57 | self.use_progress_bar = use_progress_bar
58 |
59 |
60 | super(Coherence, self).__init__(str_description,[])
61 |
62 |
63 | def process(self, obj_data):
64 | '''
65 | Compute the coherency between two
66 |
67 | @param obj_data: Data wrapper
68 | '''
69 |
70 | results_dict = OrderedDict()
71 |
72 | if self.pairing == 'neighbor':
73 | first_image_it = obj_data.getIterator()
74 | second_image_it = obj_data.getIterator()
75 |
76 | next(second_image_it)
77 |
78 | for (label1, image1), (label2, image2) in progress_bar(zip(first_image_it, second_image_it),
79 | total = len(obj_data)-1,
80 | enabled = self.use_progress_bar):
81 |
82 | results_dict[label2] = coherence(image1, image2, self.window)
83 |
84 |
85 | obj_data.addResult(self.str_description, results_dict)
86 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/coregister.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # scikit discovery imports
31 | from skdiscovery.data_structure.framework.base import PipelineItem
32 |
33 | # pyinsar imports
34 | from pyinsar.processing.instruments.sentinel import SentinelRamp, get_valid_lines, select_valid_lines, transform_slc, retrieve_azimuth_time
35 | from pyinsar.processing.utilities.generic import keypoints_align, scale_image
36 |
37 |
38 | # 3rd party imports
39 | import imreg_dft as ird
40 | import numpy as np
41 |
42 | class Coregister(PipelineItem):
43 | """
44 | *** In Devolopment *** Pipeline item to coregister images
45 | """
46 |
47 | def __init__(self, str_description, ap_paramList, image_limits = None, num_iterations=3):
48 | """
49 | Initialize Coregister pipeline item
50 |
51 | @param str_description: String describing item
52 | @param ap_paramList[reg_type]: Registration method (currently supports
53 | 'imreg_translation', imreg_affine' and
54 | 'keypoints'
55 | @param image_limits: Limits of image to use when comparing for coregistration
56 | @param num_iterations: Number of iterations (Only used with 'imreg_translation')
57 | """
58 |
59 |
60 |
61 |
62 | self._image_limits = image_limits
63 | self._num_iterations = num_iterations
64 | super(Coregister, self).__init__(str_description, ap_paramList)
65 |
66 | def process(self, obj_data):
67 | """
68 | Coregister images
69 |
70 | @param obj_data: Image data wrapper
71 | """
72 |
73 | reg_type = self.ap_paramList[0]()
74 |
75 | master_burst_list = None
76 | for label, data in obj_data.getIterator():
77 | if master_burst_list == None:
78 | master_burst_list = select_valid_lines(data, obj_data.info(label)['Tree'], cut=False)
79 | master_valid_lines = get_valid_lines(obj_data.info(label)['Tree'], per_burst=True)
80 | else:
81 |
82 | burst_valid_lines = get_valid_lines(obj_data.info(label)['Tree'], per_burst=True)
83 | valid_lines = [np.logical_and(master_lines, burst_lines)
84 | for master_lines, burst_lines in
85 | zip(master_valid_lines, burst_valid_lines)]
86 |
87 | burst_list = select_valid_lines(data, obj_data.info(label)['Tree'], cut=False)
88 | lines_per_burst = int(obj_data.info(label)['Tree'].find('swathTiming/linesPerBurst').text)
89 | samples_per_burst = int(obj_data.info(label)['Tree'].find('swathTiming/samplesPerBurst').text)
90 | lines, samples = np.meshgrid(np.arange(lines_per_burst), np.arange(samples_per_burst), indexing = 'ij')
91 | ramp = SentinelRamp(obj_data.info(label))
92 |
93 | for index, (burst_lines, burst) in enumerate(zip(valid_lines, burst_list)):
94 |
95 | start_valid_line = np.argmax(burst_lines)
96 | end_valid_line = lines_per_burst - np.argmax(burst_lines[::-1])
97 |
98 | if self._image_limits == None:
99 | line_slice = slice(start_valid_line, end_valid_line)
100 | sample_slice = slice(0, samples_per_burst)
101 |
102 | elif self._image_limits[index] != None:
103 | line_slice = self._image_limits[index][0]
104 | sample_slice = self._image_limits[index][1]
105 |
106 | if line_slice.start == None or \
107 | line_slice.start < start_valid_line:
108 | line_slice_start = start_valid_line
109 |
110 | else:
111 | line_slice_start = line_slice.start
112 |
113 | if line_slice.stop == None or \
114 | line_slice.stop > end_valid_line:
115 | line_slice_stop = end_valid_line
116 |
117 | else:
118 | line_slice_stop = line_slice.stop
119 |
120 | line_slice = slice(line_slice_start, line_slice_stop)
121 |
122 |
123 | else:
124 | continue
125 |
126 | master_burst = master_burst_list[index][line_slice, sample_slice]
127 |
128 | burst = burst[line_slice, sample_slice]
129 | deramp = -ramp(lines[line_slice, sample_slice], samples[line_slice,sample_slice], index)
130 |
131 |
132 | if reg_type == 'imreg_translation':
133 |
134 | for i in range(self._num_iterations):
135 |
136 | shift = ird.translation(np.abs(master_burst),
137 | np.abs(burst))
138 |
139 | transform_matrix = np.array([[1, 0, shift['tvec'][1]],
140 | [0, 1, shift['tvec'][0]]])
141 |
142 | burst, deramp = transform_slc(burst, deramp, transform_matrix)
143 |
144 | elif reg_type == 'imreg_affine':
145 |
146 | shift = ird.similarity(np.abs(master_burst), np.abs(burst), numiter=self._num_iterations)
147 |
148 | im_angle = np.deg2rad(shift['angle'])
149 | im_scale = shift['scale']
150 | im_tl = shift['tvec']
151 |
152 | transform_matrix = np.array([[im_scale*np.cos(im_angle), -im_scale*np.sin(im_angle), im_tl[1]],
153 | [im_scale*np.sin(im_angle), im_scale*np.cos(im_angle), im_tl[0]]], dtype=np.float32)
154 | burst = transform_slc(burst, deramp, transform_matrix)[0]
155 |
156 | if index != 0:
157 | pass
158 |
159 | elif reg_type == 'keypoints':
160 | transform_matrix = keypoints_align(scale_image(np.abs(master_burst)), scale_image(np.abs(burst)))
161 | burst = transform_slc(burst, deramp, transform_matrix)[0]
162 |
163 |
164 | if line_slice.start == None:
165 | line_start = 0
166 | elif line_slice.start < 0:
167 | line_start = lines_per_burst + line_slice.start
168 | else:
169 | line_start = line_slice.start
170 |
171 | if line_slice.stop == None:
172 | line_end = lines_per_burst
173 | elif line_slice.stop < 0:
174 | line_end = lines_per_burst + line_slice.stop
175 | else:
176 | line_end = line_slice.stop
177 |
178 | full_data_slice = slice(lines_per_burst*index + line_start, lines_per_burst*(index) + line_end)
179 |
180 | data[full_data_slice,sample_slice] = burst
181 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/deburst.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2017 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # scikit discovery imports
31 | from skdiscovery.data_structure.framework.base import PipelineItem
32 |
33 | # scikit data access imports
34 | from skdaccess.utilities.support import progress_bar
35 | from skdaccess.utilities.image_util import SplineLatLon
36 |
37 | # Pyinsar imports
38 | from pyinsar.processing.instruments.sentinel import retrieve_azimuth_time, read_geolocation, update_geolocation_lines
39 |
40 | # 3rd party imports
41 | from scipy.interpolate import SmoothBivariateSpline
42 |
43 |
44 | class Deburst(PipelineItem):
45 | ''' Debursts Sentinel-1 TOPSAR data '''
46 |
47 |
48 | def __init__(self, str_description, cut_on_master=True):
49 | """
50 | Initialize Deburst item
51 |
52 | @param str_description: String description of item
53 | @param cut_on_master: Use the master burst cut on slave
54 | """
55 | self._cut_on_master = True
56 |
57 | super(Deburst, self).__init__(str_description)
58 |
59 | def process(self, obj_data):
60 | '''
61 | Preprocesses sentinel 1 data
62 |
63 | @param obj_data: Data wrapper
64 | '''
65 |
66 | for index, (label, image) in enumerate(obj_data.getIterator()):
67 |
68 | tree = obj_data.info(label)['Tree']
69 |
70 | azimuth_time, line_index, split_indicies = retrieve_azimuth_time(tree)
71 |
72 | if self._cut_on_master and index==0:
73 | master_azimuth_time = azimuth_time
74 | master_line_index = line_index
75 | master_split_indicies = split_indicies
76 |
77 | elif self._cut_on_master:
78 | line_index = master_line_index
79 |
80 | obj_data.info(label)['Azimuth Time'] = azimuth_time[line_index].reset_index(drop=True)
81 | obj_data.info(label)['Split Indicies'] = split_indicies
82 | obj_data.info(label)['Line Index'] = line_index
83 |
84 |
85 | geo_info = read_geolocation(tree)
86 |
87 | updated_lines = update_geolocation_lines(tree, azimuth_time[line_index], geo_info)
88 |
89 | lat_spline = SmoothBivariateSpline(updated_lines,
90 | geo_info['Samples'],
91 | geo_info['Latitudes'], kx=1, ky=1)
92 |
93 | lon_spline = SmoothBivariateSpline(updated_lines,
94 | geo_info['Samples'],
95 | geo_info['Longitudes'], kx=1, ky=1)
96 |
97 |
98 | obj_data.info(label)['Geolocation'] = SplineLatLon(lat_spline, lon_spline)
99 |
100 | obj_data.updateData(label, image[line_index,:])
101 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/deformation_to_phase.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Scikit Discovery imports
28 | from skdiscovery.data_structure.framework.base import PipelineItem
29 |
30 | # Pyinsar imports
31 | from pyinsar.processing.utilities import insar_simulator_utils
32 |
33 | # Standard library imports
34 | from collections import OrderedDict
35 |
36 | # 3rd party imports
37 | import numpy as np
38 |
39 | class DeformationToPhase(PipelineItem):
40 | '''
41 | Convert deformation to phas
42 | '''
43 |
44 | def __init__(self, str_description, ap_paramList, xx, yy):
45 | '''
46 | Initialize Deformation to Phase pipeline item
47 |
48 | @param str_description: String description of item
49 | @param ap_paramList[track_angle] = Auto param of the track angle
50 | @param ap_paramList[min_ground_range_1] = Auto param of min_ground_range_1
51 | @param ap_paramList[height_1] = Auto param of height_1
52 | @param ap_paramList[is_right_looking] = Auto param of is_right_looking (boolean)
53 | @param ap_paramList[wavelength] = Auto param of the wavelength for converting deformation to phase
54 | @param ap_paramList[k] = Auto param of k
55 | @param xx = x coordinates
56 | @param yy = y coordinates
57 | '''
58 |
59 | self._xx = xx
60 | self._yy = yy
61 |
62 | super(DeformationToPhase, self).__init__(str_description, ap_paramList)
63 |
64 | def process(self, obj_data):
65 | """
66 | Convert deformations in a data wrapper to phases
67 |
68 | @param obj_data: Image data wrapper
69 | """
70 |
71 | track_angle = self.ap_paramList[0]()
72 | min_ground_range_1 = self.ap_paramList[1]()
73 | height_1 = self.ap_paramList[2]()
74 | is_right_looking = self.ap_paramList[3]()
75 | wavelength = self.ap_paramList[4]()
76 | k = self.ap_paramList[5]()
77 |
78 | for label, data in obj_data.getIterator():
79 | phase = insar_simulator_utils.generate_interferogram_from_deformation(track_angle = track_angle,
80 | min_ground_range_1 = min_ground_range_1,
81 | height_1 = height_1,
82 | is_right_looking = is_right_looking,
83 | wavelength = wavelength,
84 | k = k,
85 | deformation = data,
86 | xx = self._xx,
87 | yy = self._yy)
88 |
89 | obj_data.updateData(label, phase)
90 |
91 |
92 |
93 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/flat_earth.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # scikit discovery imports
31 | from skdiscovery.data_structure.framework.base import PipelineItem
32 |
33 | # pyinsar imports
34 | from pyinsar.processing.corrections.topography import ellipsoidal_earth_slant_ranges
35 | from pyinsar.processing.utilities.generic import OrbitInterpolation
36 | from pyinsar.processing.utilities.insar_simulator_utils import wrap
37 |
38 | # 3rd party imports
39 | from geodesy import wgs84
40 | import numpy as np
41 |
42 | class FlatEarth(PipelineItem):
43 | """
44 | *** In Development *** Remove flat Earth contribution from interferogram
45 | """
46 |
47 | def __init__(self, str_description, x_range = None, y_range = None, k = 2,
48 | remove_topography = False, save_correction=False):
49 | """
50 | Initialize Flat Earth item
51 |
52 | @param str_description: String describing item
53 | @param x_range: x pixel range to process (None for entire range)
54 | @param y_range: y pixel range to process (None for entire range)
55 | @param k: Number of satellite or aircraft passes used to generate the interferogram (1 or 2)
56 | @param remove_topography: Not implemented
57 | @param save_correction: Save the image used to correct the interferogram
58 | """
59 | self._x_range = x_range
60 | self._y_range = y_range
61 | self._save_correction = save_correction
62 | self.k = k
63 |
64 | super(FlatEarth, self).__init__(str_description)
65 |
66 | def process(self, obj_data):
67 | """
68 | Remove flat earth contribution
69 |
70 | @param obj_data: Input image data wrapper
71 | """
72 | if self._save_correction:
73 | flat_earth_dict = OrderedDict()
74 |
75 | for label, data in obj_data.getIterator():
76 |
77 | latlon = obj_data.info(label)['Geolocation']
78 | wavelength = obj_data.info(label)['Wavelength']
79 |
80 | image_names = ['image1', 'image2']
81 |
82 | if self._x_range == None:
83 | x_start = 0
84 | x_end = data.shape[1]
85 |
86 | else:
87 | x_start = self._x_range[0]
88 | x_end = self._x_range[1]
89 |
90 |
91 |
92 | if self._y_range == None:
93 | y_start = 0
94 | y_end = data.shape[0]
95 |
96 | else:
97 | y_start = self._y_range[0]
98 | y_end = self._y_range[1]
99 |
100 | slant_range_dict = OrderedDict()
101 |
102 | for image_name in image_names:
103 | orbit_interp = OrbitInterpolation(obj_data.info(label)[image_name]['Orbit'])
104 | az_time = obj_data.info(label)[image_name]['Azimuth Time']
105 |
106 | slant_range_dict[image_name] = ellipsoidal_earth_slant_ranges(az_time, latlon, orbit_interp,
107 | x_start, x_end, y_start, y_end)[0]
108 |
109 | flat_earth_inteferogram = wrap(-2 * np.pi * self.k *(slant_range_dict[image_names[0]] - slant_range_dict[image_names[1]])/wavelength)
110 | if self._save_correction:
111 | flat_earth_dict[label] = flat_earth_inteferogram
112 |
113 | data[y_start:y_end,x_start:x_end] = wrap(data[y_start:y_end,x_start:x_end] - flat_earth_inteferogram)
114 |
115 | if self._save_correction:
116 | obj_data.addResult(self.str_description, flat_earth_dict)
117 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/fusion/srtm_fusion.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # PyInSAR imports
28 | import pyinsar.processing.utilities.generic as generic_tools
29 | from pyinsar.processing.geography.coordinates import reproject_georaster
30 | from pyinsar.processing.discovery.srtm_egm96_wgs84 import SRTM_Transform
31 |
32 | # Scikit discovery imports
33 | from skdiscovery.data_structure.framework.base import PipelineItem
34 | from skdiscovery.data_structure.framework.discoverypipeline import DiscoveryPipeline
35 | from skdiscovery.data_structure.framework.stagecontainers import *
36 | from skdiscovery.data_structure.generic.accumulators import DataAccumulator
37 |
38 |
39 | # Scikit data access imports
40 | from skdaccess.utilities import srtm_util
41 | from skdaccess.geo.srtm.cache import DataFetcher as SRTMDF
42 |
43 |
44 | # 3rd part imports
45 | from osgeo import osr, gdal_array, gdal
46 | import numpy as np
47 |
48 | class SRTM_Fusion(PipelineItem):
49 | """
50 | Find appropriate elevation data from SRTM
51 |
52 | Puts the elevation data as another layer in the image.
53 | Must have WKT and GeoTransform information available in metadata.
54 | """
55 |
56 | def __init__(self, str_description, username, password, convert_to_wgs84=False, **kwargs):
57 | """
58 | Initialize SRTM Fusion object
59 |
60 | @param username: Earth data username
61 | @param password: Earth data password
62 | @param convert_to_wgs84: Convert heights from EGM96 geoid to WGS84 ellipsoid
63 | @param kwargs: additional keyword arguments are given to the SRTM data fetcher
64 | """
65 |
66 | self.convert_to_wgs84 = convert_to_wgs84
67 | self.kwargs = kwargs
68 | self.kwargs['username'] = username
69 | self.kwargs['password'] = password
70 |
71 | super(SRTM_Fusion, self).__init__(str_description)
72 |
73 | def process(self, obj_data):
74 | """
75 | Add SRTM layer to image data
76 |
77 | @param obj_data: Image Data Wrapper
78 | """
79 | for label, data in obj_data.getIterator():
80 |
81 |
82 | geotransform = obj_data.info(label)['GeoTransform']
83 | wkt = obj_data.info(label)['WKT']
84 |
85 | lon_lat_extents = generic_tools.get_lonlat_bounds(data.shape, wkt, geotransform)
86 |
87 | if data.ndim == 3:
88 | shape = data.shape[1:]
89 | else:
90 | shape = data.shape
91 |
92 | extents = generic_tools.get_image_extents(geotransform, shape)
93 |
94 | min_lat = lon_lat_extents[2]
95 | max_lat = lon_lat_extents[3]
96 | min_lon = lon_lat_extents[0]
97 | max_lon = lon_lat_extents[1]
98 |
99 | srtm_lat_lon = srtm_util.getSRTMLatLon(min_lat,
100 | max_lat,
101 | min_lon,
102 | max_lon)
103 |
104 | srtmdf = SRTMDF(*srtm_lat_lon, **self.kwargs)
105 |
106 | if self.convert_to_wgs84:
107 | fl_transform = SRTM_Transform('SRTM_Transform')
108 | sc_transform = StageContainer(fl_transform)
109 |
110 | acc_data = DataAccumulator("Data", save_wrapper=True)
111 | sc_data = StageContainer(acc_data)
112 |
113 | pipe = DiscoveryPipeline(srtmdf, [sc_transform, sc_data])
114 | pipe.run()
115 |
116 | my_dw = pipe.getResults(0)['Data']
117 |
118 | else:
119 | my_dw = srtmdf.output()
120 |
121 |
122 | srtm_info = list(my_dw.info().values())[0]
123 |
124 | srtm_data, srtm_extents, srtm_geotransform = srtm_util.getSRTMData(my_dw, min_lat, max_lat, min_lon, max_lon)
125 |
126 | gdal_srtm_ds = generic_tools.get_gdal_dataset(srtm_data, srtm_info['WKT'], srtm_geotransform)
127 |
128 | gdal_dtype = generic_tools.get_gdal_dtype(data.dtype)
129 |
130 | transformed_ds = reproject_georaster(gdal_srtm_ds, (geotransform[1], np.abs(geotransform[5])), new_projection_wkt=wkt,
131 | no_data_value = np.nan, new_extent = extents, data_type=gdal_dtype)
132 |
133 | transformed_data = transformed_ds.ReadAsArray()
134 |
135 |
136 | if data.ndim == 3:
137 | new_data = np.concatenate((data, transformed_data.reshape(1, *transformed_data.shape)))
138 |
139 | else:
140 | new_data = np.stack((data, transformed_data))
141 |
142 | obj_data.updateData(label, new_data)
143 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/interferogram.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Scikit Discovery imports
28 | from skdiscovery.data_structure.framework.base import PipelineItem
29 |
30 | # Standard library imports
31 | from collections import OrderedDict
32 |
33 | # 3rd party imports
34 | from more_itertools import pairwise
35 | import numpy as np
36 |
37 | class Interferogram(PipelineItem):
38 | ''' Create Inteferogram from SLC data'''
39 |
40 | def __init__(self, str_description, pairing='neighbor'):
41 | """
42 | Initialize Interferogram item
43 |
44 | @param str_description: String describing item
45 | @param pairing: How to pair SLC images. Currently only 'neighbor' is accepted'
46 | """
47 | self._pairing = pairing
48 |
49 | super(Interferogram, self).__init__(str_description)
50 |
51 | def process(self, obj_data):
52 | """
53 | Create interferograms from SLC images in an image wrapper
54 |
55 | @param obj_data: Image wrapper containing SLC images
56 | """
57 |
58 | data_dict = OrderedDict()
59 | metadata_dict = OrderedDict()
60 |
61 | if self._pairing == 'neighbor':
62 | data_iterator = pairwise(obj_data.getIterator())
63 |
64 |
65 | master_label = next(obj_data.getIterator())[0]
66 |
67 | for (label1, image1), (label2, image2) in data_iterator:
68 | new_label = label1 + ' ' + label2
69 | data_dict[new_label] = np.angle(image1 * np.conj(image2))
70 | metadata_dict[new_label] = OrderedDict()
71 | metadata_dict[new_label]['image1'] = obj_data.info(label1)
72 | metadata_dict[new_label]['image2'] = obj_data.info(label2)
73 | if 'Geolocation' in obj_data.info(master_label):
74 | metadata_dict[new_label]['Geolocation'] = obj_data.info(master_label)['Geolocation']
75 | metadata_dict[new_label]['Wavelength'] = obj_data.info(label1)['Wavelength']
76 |
77 | obj_data.data = data_dict
78 | obj_data.meta_data = metadata_dict
79 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/los_deformation.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Scikit Discovery imports
28 | from skdiscovery.data_structure.framework.base import PipelineItem
29 |
30 | # Pyinsar imports
31 | from pyinsar.processing.utilities.insar_simulator_utils import change_in_range_to_phase, phase_to_change_in_range
32 |
33 | # Standard library imports
34 | from collections import OrderedDict
35 |
36 | # 3rd party imports
37 | import numpy as np
38 |
39 |
40 |
41 | class LOS_Deformation_Phase(PipelineItem):
42 | """
43 | Converts between LOS deformation and phase
44 |
45 | *** In Development ***
46 | """
47 |
48 | def __init__(self, str_description, wavelength, k=2, convert_target='los', channel_index = None):
49 | """
50 | Initialize LOS Deformation Phase item
51 |
52 | @param str_description: String describing item
53 | @param wavelength: Radar wavelength
54 | @param k: Number of radar passes
55 | @param convert_target: Convert to 'los' or 'phase'
56 | @param channel_index: Which channel index to use (None if there is no channel axis)
57 | """
58 |
59 | self.wavelength = wavelength
60 | self.convert_target = convert_target
61 | self.channel_index = channel_index
62 |
63 | super(LOS_Deformation_Phase, self).__init__(str_description)
64 |
65 | def process(self, obj_data):
66 | """
67 | Convert between LOS deformation and phase
68 |
69 | @param obj_data: Image Wrapper
70 | """
71 |
72 | if self.convert_target == 'los':
73 | convert_function = change_in_range_to_phase
74 | elif self.convert_target == 'phase':
75 | convert_function = phase_to_change_in_range
76 | else:
77 | raise RuntimeError('Conversion target "{}" not understood'.format(self.convert_target))
78 |
79 | for label, data in obj_data.getIterator():
80 |
81 | if channel_index is None:
82 | data = convert_function(data, self.wavelength, self.k)
83 |
84 | else:
85 | data[channel_index, ...] = convert_function(data[channel_index, ...], self.wavelength, self.k)
86 |
87 | obj_data.updateData(label, data)
88 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/mask.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | import math
29 |
30 | # Pyinsar imports
31 | from pyinsar.output.export_georaster import create_georaster_from_array
32 | from pyinsar.processing.geography.coordinates import reproject_georaster
33 | from pyinsar.processing.utilities.generic import get_image_extents
34 |
35 | # skdiscovery imports
36 | from skdiscovery.data_structure.framework.base import PipelineItem
37 |
38 | # 3rd party imports
39 | from osgeo import gdal
40 |
41 | class Mask(PipelineItem):
42 | """
43 | Pipeline item used for masking images
44 | """
45 |
46 | def __init__(self, str_description, mask, mask_value=math.nan, geotransform=None, wkt=None):
47 | """
48 | Initialize Mask item
49 |
50 | If geotransform and wkt are provided, then mask
51 | will be transformed before being applied
52 |
53 | @param str_description: String describing item
54 | @param mask: Array of zeros and ones with the same shape as the input images (1 for mask, 0 for no mask)
55 | @param mask_value: Value to set the masked values to
56 | @param geotransform: Geotransform of mask
57 | @param wkt: String of the well known text describing projection
58 | """
59 |
60 | if geotransform is None and wkt is not None or \
61 | geotransform is not None and wkt is None:
62 | raise RuntimeError('Must supply both geotransform and wkt or neither of them')
63 | elif geotransform is not None and wkt is not None:
64 | self._apply_transform = True
65 | else:
66 | self._apply_transform = False
67 |
68 | self.mask = mask
69 | self.mask_value = mask_value
70 | self._geotransform = geotransform
71 | self._wkt = wkt
72 |
73 |
74 | super(Mask, self).__init__(str_description)
75 |
76 |
77 | def process(self, obj_data):
78 | """
79 | Mask images
80 |
81 | @param obj_data: Image data wrapper
82 | """
83 | for label, data in obj_data.getIterator():
84 |
85 | if self._apply_transform:
86 | new_ds = create_georaster_from_array(georaster_array = self.mask,
87 | geotransform = self._geotransform,
88 | projection = self._wkt,
89 | data_type = gdal.GDT_Int16,
90 | no_data_value=-1)
91 |
92 |
93 | new_geotransform = obj_data.info(label)['GeoTransform']
94 | new_wkt = obj_data.info(label)['WKT']
95 | data_extent = get_image_extents(new_geotransform, data.shape)
96 |
97 |
98 | transformed_ds = reproject_georaster(new_ds, (new_geotransform[1], abs(new_geotransform[5])), new_projection_wkt=new_wkt,
99 | no_data_value = -1, new_extent = data_extent, interpolation_method=gdal.GRA_NearestNeighbour,
100 | data_type=gdal.GDT_Int16)
101 |
102 | mask = transformed_ds.ReadAsArray().astype(bool)
103 |
104 | else:
105 | mask = self.mask
106 |
107 |
108 |
109 | data[mask] = self.mask_value
110 |
111 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/project.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Pyinsar imports
28 | from pyinsar.processing.utilities.generic import project_insar_data, AffineGlobalCoords, get_gdal_dtype
29 |
30 | # Scikit data access imports
31 | from skdaccess.utilities.image_util import AffineGlobalCoords
32 |
33 | # Scikit discovery imports
34 | from skdiscovery.data_structure.framework.base import PipelineItem
35 |
36 | # 3rd party imports
37 | from osgeo import gdal, osr, gdal_array
38 |
39 |
40 |
41 | class Project(PipelineItem):
42 | """
43 | *** In Development *** Pipeline item to project and image
44 | """
45 |
46 | def __init__(self, str_description, target_projection='tm', center_coords = 'all'):
47 | """
48 | Initialize TransformImage item
49 |
50 | @param str_description: String describing item
51 | @param target_projection: Target projection (currently unused)
52 | @param center_coords: What to use for the central coordinates for the projection
53 | 'all': Use each images center coordinates for it's central projection coordinates
54 | 'first': Use the center of the first image
55 |
56 | """
57 |
58 | self._target_projecton = target_projection
59 | self.center_coords = center_coords
60 |
61 | super(Project, self).__init__(str_description)
62 |
63 |
64 | def _get_center_coords(self, wkt, geotransform, data_shape):
65 |
66 | wgs84 = osr.SpatialReference()
67 | wgs84.ImportFromEPSG(4326)
68 |
69 | spatial = osr.SpatialReference()
70 | spatial.ImportFromWkt(wkt)
71 |
72 | affine_transform = AffineGlobalCoords(geotransform)
73 |
74 | if len(data_shape) == 3:
75 | y_size = data_shape[1]
76 | x_size = data_shape[2]
77 |
78 | else:
79 | y_size = data_shape[0]
80 | x_size = data_shape[1]
81 |
82 |
83 | transform = osr.CreateCoordinateTransformation(spatial, wgs84)
84 | proj_y, proj_x = affine_transform.getProjectedYX(y_size/2, x_size/2)
85 | center_lon, center_lat = transform.TransformPoint(proj_x, proj_y)[:2]
86 |
87 | return center_lon, center_lat
88 |
89 |
90 | def process(self, obj_data):
91 | """
92 | Project data in an image wrapper
93 |
94 | @param obj_data: Image wrapper
95 | """
96 |
97 | for index, (label, data) in enumerate(obj_data.getIterator()):
98 |
99 | wkt = obj_data.info(label)['WKT']
100 | geotransform = obj_data.info(label)['GeoTransform']
101 |
102 | if (self.center_coords.lower() == 'first' and index == 0) or \
103 | self.center_coords.lower() == 'all':
104 | center_lon, center_lat = self._get_center_coords(wkt, geotransform, data.shape)
105 |
106 | ds = gdal_array.OpenNumPyArray(data)
107 |
108 | ds.SetGeoTransform(geotransform)
109 | ds.SetProjection(obj_data.info(label)['WKT'])
110 |
111 |
112 | gdal_dtype = get_gdal_dtype(data.dtype)
113 |
114 | reprojected_ds = project_insar_data(ds, center_lon, center_lat, data_type = gdal_dtype)
115 |
116 | obj_data.updateData(label, reprojected_ds.ReadAsArray())
117 | obj_data.info(label)['WKT'] = reprojected_ds.GetProjection()
118 | obj_data.info(label)['GeoTransform'] = reprojected_ds.GetGeoTransform()
119 |
120 |
121 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/rotate_squares.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 |
28 | from collections import OrderedDict
29 |
30 | from skdiscovery.data_structure.framework.base import PipelineItem
31 | from skdiscovery.utilities.patterns.image_tools import generateSquaresAroundPoly
32 |
33 | import numpy as np
34 | import scipy as sp
35 |
36 | from pyinsar.processing.utilities import insar_simulator_utils
37 |
38 | def rotateSquare(image, square, angle, order):
39 | '''
40 | Rotate a subsection of an image defined by a shapely square
41 |
42 | @param image: Full image containing subsection to be rotated
43 | @param square: Shapely square
44 | @param angle: Angle of rotation
45 | @param order: Order of spline interpolation
46 |
47 | '''
48 | x_start, y_start, x_end, y_end = np.rint(square.bounds).astype(np.int)
49 | size = x_end - x_start
50 |
51 | half_size = np.ceil(size/2.0).astype(np.int)
52 | x_slice = slice(x_start - half_size, x_end + half_size)
53 | y_slice = slice(y_start - half_size, y_end + half_size)
54 |
55 | rotated_image = sp.ndimage.rotate(image[y_slice, x_slice], angle,cval=np.nan, reshape=True, order=order)
56 | return insar_simulator_utils.crop_array_from_center(rotated_image, (size, size))
57 |
58 |
59 | class RotateSquares(PipelineItem):
60 | '''
61 | Generate new images by rotating subsections of data defined by Shapely squares
62 | '''
63 |
64 | def __init__(self, str_description, ap_paramList, square_result_name, angles, clean=True):
65 | '''
66 | Initialize RotateSquares object
67 |
68 | @param str_description: String describing class
69 | @param ap_paramList[SplineOrder]: Spline order used in interpolation
70 | @param square_result_name: Name of pipeline item that contains the Shapely squares
71 | @param angles: Angles used when rotating squares
72 | @param clean: Remove any squares that contain NaN's
73 | '''
74 |
75 | self._angles = angles
76 |
77 | self._square_result_name = square_result_name
78 |
79 | self._clean = clean
80 |
81 | super(RotateSquares, self).__init__(str_description, ap_paramList)
82 |
83 |
84 | def process(self, obj_data):
85 | '''
86 | Generate rotated images based on Shapely squares
87 |
88 | @param obj_data: Image data wrapper
89 | '''
90 |
91 | data_result = OrderedDict()
92 | meta_result = OrderedDict()
93 |
94 |
95 | square_dict = obj_data.getResults()[self._square_result_name]
96 |
97 | spline_order = self.ap_paramList[0]()
98 |
99 | for label, data in obj_data.getIterator():
100 | square_list = square_dict[label]
101 |
102 | i = 0
103 | for square in square_list:
104 | for angle in self._angles:
105 | new_data = rotateSquare(data, square, angle, spline_order)
106 |
107 | if not self._clean or np.count_nonzero(np.isnan(new_data)) == 0:
108 | new_label = 'Image {}, Square {:d}, rotated {:f} degrees'.format(label, i, angle)
109 | data_result[new_label] = new_data
110 | meta_result[new_label] = OrderedDict()
111 | meta_result[new_label]['angle'] = angle
112 | meta_result[new_label]['square'] = square
113 | meta_result[new_label]['original_image'] = label
114 | i += 1
115 |
116 |
117 |
118 |
119 | obj_data.update(data_result)
120 | obj_data.updateMetadata(meta_result)
121 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/shown_cnn_classes.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Pyinsar imports
28 | from pyinsar.processing.utilities.generic import generateMatplotlibRectangle
29 |
30 | # Scikit Discovery imports
31 | from skdiscovery.data_structure.framework.base import PipelineItem
32 |
33 | # 3rd party imports
34 | import matplotlib.pyplot as plt
35 | import matplotlib as mpl
36 | import numpy as np
37 |
38 |
39 |
40 | class ShowCNNClasses(PipelineItem):
41 | """
42 | Dispay CNN Classifications on segments of an image
43 | """
44 |
45 | def __init__(self, str_description, class_name, colors):
46 | """
47 | Initialize ShowCNNClassesItem
48 |
49 | @param str_description: String name of item
50 | @param class_name: Name of classes
51 | @param colors: List of colors containing a color for each class
52 | """
53 | self.class_name = class_name
54 | self.colors = colors
55 |
56 | super(ShowCNNClasses, self).__init__(str_description)
57 |
58 | def process(self, obj_data):
59 | """
60 | Show the images with classifications
61 |
62 | @param obj_data: Image data wrapper
63 | """
64 | for data_label, data in obj_data.getIterator():
65 |
66 | extents = obj_data.getResults()[self.class_name][data_label]['extents']
67 | labels = obj_data.getResults()[self.class_name][data_label]['labels']
68 | possible_labels = np.unique(labels)
69 |
70 | if len(possible_labels) > len(self.colors):
71 | raise RuntimeError('Not enough colors specified')
72 |
73 | fig = plt.figure()
74 | ax = plt.axes()
75 | ax.imshow(data)
76 | for class_label, color in zip(possible_labels, self.colors):
77 | patch_collection = mpl.collections.PatchCollection([generateMatplotlibRectangle(extent) for extent in extents[labels == class_label]],
78 | edgecolor=color, facecolor='none', alpha = 0.5)
79 | ax.add_collection(patch_collection)
80 |
81 | plt.show()
82 | plt.close()
83 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/srtm_egm96_wgs84.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Pyinsar imports
28 | from pyinsar.processing.utilities.generic import get_gdal_dtype
29 | from pyinsar.processing.geography.coordinates import georaster_vertical_datum_shift
30 |
31 | # Scikit data access imports
32 | from skdaccess.utilities.image_util import AffineGlobalCoords
33 | from skdaccess.generic.file.cache import DataFetcher as FILEDF
34 | from skdaccess.framework.param_class import *
35 |
36 | # Scikit discovery imports
37 | from skdiscovery.data_structure.framework.base import PipelineItem
38 |
39 | # 3rd party imports
40 | from osgeo import gdal, osr, gdal_array
41 |
42 |
43 |
44 | class SRTM_Transform(PipelineItem):
45 | """
46 | *** In Development *** Pipeline item to transfrom heights from
47 | SRTM from EGM96 geoid to WGS84 ellipsoid
48 | """
49 |
50 | def __init__(self, str_description):
51 | """
52 | Initialize Convert
53 |
54 | """
55 | super(SRTM_Transform, self).__init__(str_description)
56 |
57 |
58 | def process(self, obj_data):
59 | """
60 | Project data in an image wrapper
61 |
62 | @param obj_data: Image wrapper
63 | """
64 |
65 | egmdf = FILEDF([AutoList(['http://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx'])])
66 | egmdw = egmdf.output()
67 |
68 | egmurl, egmfilename = next(egmdw.getIterator())
69 |
70 | for index, (label, data) in enumerate(obj_data.getIterator()):
71 |
72 | wkt = obj_data.info(label)['WKT']
73 | geotransform = obj_data.info(label)['GeoTransform']
74 |
75 | ds = gdal_array.OpenNumPyArray(data)
76 |
77 | ds.SetGeoTransform(geotransform)
78 | ds.SetProjection(obj_data.info(label)['WKT'])
79 |
80 | gdal_dtype = get_gdal_dtype(data.dtype)
81 |
82 | reprojected_ds = georaster_vertical_datum_shift(
83 | georaster = ds,
84 | old_datum_proj4 = '+proj=longlat +datum=WGS84 +no_defs +geoidgrids=' + egmfilename,
85 | new_datum_proj4 = '+proj=longlat +datum=WGS84 +no_defs'
86 | )
87 |
88 |
89 | obj_data.updateData(label, reprojected_ds.ReadAsArray())
90 | # obj_data.info(label)['WKT'] = reprojected_ds.GetProjection()
91 | # obj_data.info(label)['GeoTransform'] = reprojected_ds.GetGeoTransform()
92 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/temporal_decorrelation.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Scikit Discovery imports
28 | from skdiscovery.data_structure.framework.base import PipelineItem
29 |
30 | # Pyinsar imports
31 | from pyinsar.processing.utilities import insar_simulator_utils
32 | from pyinsar.processing.machine_learning.geostatistics.sequential_gaussian_simulation import run_sgs, compute_averaged_cumulative_distribution_from_array
33 |
34 |
35 | # Standard library imports
36 | from collections import OrderedDict
37 | import random
38 |
39 | # 3rd party imports
40 | import numpy as np
41 | import scipy as sp
42 |
43 | class TemporalDecorrelation(PipelineItem):
44 | '''
45 | Pipeline item to add temporal decorrelation to some phase
46 | '''
47 |
48 | def __init__(self, str_description, ap_paramList, grid_yx_spacing, wavelength, seed=None, save_noise=False):
49 | '''
50 | Initialize Temporal Decorrelation pipeline item
51 |
52 | @param str_description: String description of item
53 | @param ap_paramList[vario_models] = Auto list of SGS models
54 | @param ap_paramList[vario_sills] = Auto list of SGS sills
55 | @param ap_paramList[vario_azimuth] = Auto param of SGS azimuth
56 | @param ap_paramList[vario_ranges] = Auto list of SGS ranges
57 | @param ap_paramList[max_num_data] = Auto param of the max size of the neighborhood
58 | @param ap_paramList[decorrelation_mean] = Auto param of the decorrelation mean in the same units as the wavelength
59 | @param ap_paramList[decorrelation_std] = Auto param of decorrelation standard deviation in the same units as the wavelength
60 | @param grid_yx_spacing: The y,x grid spacing
61 | @param wavelength: Wavelength for converting to phase (from path length)
62 | @param seed: Seed to use when generating noise
63 | @param save_noise: Boolean indicating whether or not to save a copy of the noise in the results
64 | '''
65 |
66 | self._grid_yx_spacing = grid_yx_spacing
67 |
68 | self._seed = seed
69 | self._wavelength = wavelength
70 | self._save_noise = save_noise
71 |
72 | super(TemporalDecorrelation, self).__init__(str_description, ap_paramList)
73 |
74 | def process(self, obj_data):
75 | """
76 | Add temporal decorrelation to a phase image
77 |
78 | @param obj_data: Image data wrapper
79 | """
80 |
81 | vario_models = self.ap_paramList[0]()
82 | vario_sills = self.ap_paramList[1]()
83 | vario_azimuth = self.ap_paramList[2]()
84 |
85 | vario_ranges = self.ap_paramList[3]()
86 | max_num_data = self.ap_paramList[4]()
87 |
88 | decorrelation_mean = self.ap_paramList[5]()
89 | decorrelation_std = self.ap_paramList[6]()
90 |
91 |
92 | if self._save_noise:
93 | my_noise = OrderedDict()
94 |
95 | for label, data in obj_data.getIterator():
96 |
97 | data_array = np.full_like(data, -99999)
98 |
99 |
100 | # run_sgs requires an integer seed
101 | # randomly creating one if necessary
102 | if self._seed is None:
103 | sys_random = random.SystemRandom()
104 | seed = sys_random.randint(0,2**32 -1)
105 | else:
106 | seed = self._seed
107 |
108 | raw_temporal_decorrelation = run_sgs(data_array,
109 | self._grid_yx_spacing,
110 | vario_models,
111 | vario_sills,
112 | vario_azimuth,
113 | vario_ranges,
114 | max_number_data = max_num_data,
115 | seed = seed)
116 |
117 | cumulative_frequency = compute_averaged_cumulative_distribution_from_array(raw_temporal_decorrelation[0])
118 |
119 | temporal_decorrelation = sp.stats.laplace.ppf(cumulative_frequency,
120 | loc = decorrelation_mean,
121 | scale = decorrelation_std)
122 |
123 | temporal_decorrelation = insar_simulator_utils.change_in_range_to_phase(temporal_decorrelation, wavelength = self._wavelength)
124 |
125 | if self._save_noise:
126 | my_noise[label] = temporal_decorrelation
127 |
128 | obj_data.updateData(label, data + temporal_decorrelation)
129 |
130 | if self._save_noise:
131 | obj_data.addResult(self.str_description, my_noise)
132 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/train_cnn.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # Pyinsar imports
31 | from pyinsar.processing.utilities import ann
32 |
33 | # Scikit Discovery imports
34 | from skdiscovery.data_structure.framework.base import PipelineItem
35 |
36 |
37 | # 3rd party imports
38 | import numpy as np
39 |
40 |
41 | class TrainCNN(PipelineItem):
42 | """ Train a CNN """
43 |
44 | def __init__(self, str_description, cnn_network_dir, batch_size, config=None):
45 | """
46 | Initialize TrainCNN item
47 |
48 | @param str_description: String describing item
49 | @param cnn_network_dir: Strining containing the directiory where the CNN is stored
50 | @param batch_size: Batch size to use when training data
51 | @param config: Dictinoary of extra options to use with the tensorflow session
52 | """
53 |
54 | self.cnn_network_dir = cnn_network_dir
55 | self.batch_size = batch_size
56 | self.config = config
57 | super(TrainCNN, self).__init__(str_description, [])
58 |
59 | def process(self, obj_data):
60 | """
61 | Training CNN using data in Image wrapper
62 |
63 | @param obj_data: Image wrapper
64 | """
65 |
66 | for label, data in obj_data.getIterator():
67 |
68 | data_labels = obj_data.info(label)['Labels']
69 |
70 | ann.train(image_data = data,
71 | image_labels = data_labels,
72 | model_dir=self.cnn_network_dir,
73 | batch_size=self.batch_size,
74 | num_epochs=1,
75 | shuffle = False,
76 | config=self.config)
77 |
--------------------------------------------------------------------------------
/pyinsar/processing/discovery/wrap_phase.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # scikit discovery imports
31 | from skdiscovery.data_structure.framework.base import PipelineItem
32 |
33 | # pyinsar imports
34 | from pyinsar.processing.utilities.insar_simulator_utils import wrap
35 |
36 | class WrapPhase(PipelineItem):
37 | """ Pipeline Item that wraps phase """
38 |
39 | def process(self, obj_data):
40 | """
41 | Wrap phase of images
42 |
43 | @param obj_data: Image data wrapper
44 | """
45 |
46 | for label, data in obj_data.getIterator():
47 |
48 | obj_data.updateData(label, wrap(data))
49 |
--------------------------------------------------------------------------------
/pyinsar/processing/geography/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["coordinates", "geodesy", "geomorphometry"]
--------------------------------------------------------------------------------
/pyinsar/processing/geography/geomorphometry.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import math
26 | import numpy as np
27 |
28 | from numba import jit
29 |
30 | @jit(nopython = True)
31 | def add_symmetric_border(array, border_size = 1):
32 | '''
33 | Add a symmetric border to a 2D array
34 |
35 | @param array: The array
36 | @param border_size: The size of the border
37 |
38 | @return The expended array
39 | '''
40 | assert len(array.shape) == 2, "The array must be two-dimensional"
41 | assert border_size > 0, "The border size must be strictly higher than 0"
42 |
43 | bordered_array = np.full((array.shape[0] + 2*border_size,
44 | array.shape[1] + 2*border_size),
45 | np.nan)
46 | bordered_array[border_size:-border_size, border_size:-border_size] = array
47 |
48 | bordered_array[border_size:-border_size, -border_size:] = array[:, -1:-border_size - 1:-1]
49 | bordered_array[border_size:-border_size, border_size - 1::-1] = array[:, 0:border_size]
50 | bordered_array[0:border_size, :] = bordered_array[2*border_size - 1:border_size - 1:-1, :]
51 | bordered_array[-border_size:, :] = bordered_array[-border_size - 1:-2*border_size - 1:-1, :]
52 |
53 | return bordered_array
54 |
55 | @jit(nopython = True)
56 | def compute_gradient_at_cell(array, j, i, grid_yx_spacing, axis = 1):
57 | '''
58 | Compute Horn's gradient for a given cell of an array
59 |
60 | @param array: The array
61 | @param j: The index of the cell along the y axis
62 | @param i: The index of the cell along the x axis
63 | @param grid_yx_spacing: The cell size, which is considered fixed for the entire array
64 | @param axis: the axis along which the gradient is computed (0: y; 1: x)
65 |
66 | @return The gradient value for the cell
67 | '''
68 | assert len(array.shape) == 2 and len(grid_yx_spacing) == 2, "The array must be two-dimensional"
69 | assert 0 <= j < array.shape[0] and 0 <= i < array.shape[1], "The cell is outside the array"
70 | assert axis == 0 or axis == 1, "Invalid axis"
71 |
72 | cell_1 = (j + 1, i + 1)
73 | cell_2 = (j + 1, i - 1)
74 | cell_3 = (j, i + 1)
75 | cell_4 = (j, i - 1)
76 | cell_5 = (j - 1, i + 1)
77 | cell_6 = (j - 1, i - 1)
78 | distance = grid_yx_spacing[1]
79 | if axis == 0:
80 | cell_2 = (j - 1, i + 1)
81 | cell_3 = (j + 1, i)
82 | cell_4 = (j - 1, i)
83 | cell_5 = (j + 1, i - 1)
84 | distance = grid_yx_spacing[0]
85 |
86 | return ((array[cell_1] - array[cell_2]) +
87 | 2*(array[cell_3] - array[cell_4]) +
88 | (array[cell_5] - array[cell_6]))/(8.*distance)
89 |
90 | @jit(nopython = True)
91 | def compute_horne_slope(array,
92 | grid_yx_spacing):
93 | '''
94 | Compute Horn's slope of a 2D array with a fixed cell size
95 |
96 | @param array: The array
97 | @param grid_yx_spacing: The cell size, which is considered fixed for the entire array
98 |
99 | @return The slope (in degree)
100 | '''
101 | assert len(array.shape) == 2 and len(grid_yx_spacing) == 2, "The array must be two-dimensional"
102 |
103 | array = add_symmetric_border(array)
104 |
105 | slope_array = np.full(array.shape, np.nan)
106 |
107 | for j in range(1, slope_array.shape[0] - 1):
108 | for i in range(1, slope_array.shape[1] - 1):
109 | dx = compute_gradient_at_cell(array, j, i,
110 | grid_yx_spacing)
111 | dy = compute_gradient_at_cell(array,j, i,
112 | grid_yx_spacing, 0)
113 | slope_array[j, i] = math.atan(math.sqrt(dx*dx + dy*dy))*180./math.pi
114 |
115 | return slope_array[1:-1, 1:-1]
--------------------------------------------------------------------------------
/pyinsar/processing/instruments/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MITeaps/pyinsar/4d22e3ef90ef842d6b390074a8b5deedc7658a2b/pyinsar/processing/instruments/__init__.py
--------------------------------------------------------------------------------
/pyinsar/processing/isce/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["input_file"]
--------------------------------------------------------------------------------
/pyinsar/processing/isce/input_file.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import os
26 |
27 | def create_product_xml(xml_path,
28 | product_path,
29 | product_type = 'master',
30 | product_output_path = None,
31 | product_orbit_path = None,
32 | product_auxiliary_data_path = None,
33 | do_add = True):
34 | '''
35 | Create the xml file defining a Sentinel-1 product for processing with ISCE
36 |
37 | @param xml_path: Path to the xml file
38 | @param product_path: Path to the Sentinel-1 product
39 | @param product_type: Master or slave product
40 | @param product_output_path: Path for the processing outputs of this product
41 | @param product_orbit_path: Path to the folder containing orbit files
42 | @param product_auxiliary_data_path: Path to the folder containing auxiliary data
43 | @param do_add: True if the component is added to an already existing xml file, false otherwise
44 | '''
45 | mode = 'w'
46 | prefix = ''
47 | if do_add == True:
48 | mode = 'a'
49 | prefix = ' '
50 | with open(xml_path, mode) as xml_file:
51 | xml_file.write(prefix + '''\n''')
52 | xml_file.write(prefix + ''' ["''' + product_path + '''"]\n''')
53 | if product_auxiliary_data_path is not None:
54 | xml_file.write(prefix + ''' ''' + product_auxiliary_data_path + '''\n''')
55 | if product_orbit_path is not None:
56 | xml_file.write(prefix + ''' ''' + product_orbit_path + '''\n''')
57 | if product_output_path is not None:
58 | xml_file.write(prefix + ''' ''' + product_output_path + '''\n''')
59 | xml_file.write(prefix + '''\n''')
60 |
61 | def create_topsApp_xml(xml_folder_path,
62 | master_path,
63 | slave_path,
64 | master_output_path = None,
65 | slave_output_path = None,
66 | master_orbit_path = None,
67 | slave_orbit_path = None,
68 | master_auxiliary_data_path = None,
69 | slave_auxiliary_data_path = None,
70 | do_unwrap = True,
71 | unwrapper_name = 'snaphu_mcf',
72 | xml_filename = 'topsApp.xml'):
73 | '''
74 | Create the topsApp.xml file for processing Sentinel-1 data with ISCE
75 |
76 | @param xml_folder_path: Path to the folder that will contain the xml file
77 | @param master_path: Path to the master Sentinel-1 product
78 | @param slave_path: Path to the slave Sentinel-1 product
79 | @param master_output_path: Path for the processing outputs of the master product
80 | @param slave_output_path: Path for the processing outputs of the slave product
81 | @param master_orbit_path: Path to the folder containing orbit files for the master product
82 | @param slave_orbit_path: Path to the folder containing orbit files for the slave product
83 | @param master_auxiliary_data_path: Path to the folder containing auxiliary data for the master product
84 | @param slave_auxiliary_data_path: Path to the folder containing auxiliary data for the slave product
85 | @param do_unwrap: True to unwrap the created interferogram, false otherwise
86 | @param unwrapper_name: Name of the unwrapper when do_unwrap is true
87 | @param xml_filename: Name of the topsApp.xml file to create
88 |
89 | @return The path to the created topsApp.xml file
90 | '''
91 | xml_path = xml_folder_path
92 | if xml_folder_path[-1] != '/':
93 | xml_path += '/'
94 | xml_path += xml_filename
95 | with open(xml_path, 'w') as xml_file:
96 | xml_file.write('''\n''')
97 | xml_file.write('''\n''')
98 | xml_file.write(''' \n''')
99 | xml_file.write(''' SENTINEL1\n''')
100 | if do_unwrap == True:
101 | xml_file.write(''' True\n''')
102 | xml_file.write(''' ''' + unwrapper_name + '''\n''')
103 | create_product_xml(xml_path,
104 | master_path,
105 | product_type = 'master',
106 | product_output_path = master_output_path,
107 | product_orbit_path = master_orbit_path,
108 | product_auxiliary_data_path = master_auxiliary_data_path)
109 | create_product_xml(xml_path,
110 | slave_path,
111 | product_type = 'slave',
112 | product_output_path = slave_output_path,
113 | product_orbit_path = slave_orbit_path,
114 | product_auxiliary_data_path = slave_auxiliary_data_path)
115 | with open(xml_path, 'a') as xml_file:
116 | xml_file.write(''' \n''')
117 | xml_file.write('''\n''')
118 |
119 | return xml_path
120 |
121 | def prepare_topsApps(product_paths,
122 | result_folder_path,
123 | orbit_path = None,
124 | auxiliary_data_path = None,
125 | do_unwrap = True,
126 | unwrapper_name = 'snaphu_mcf',
127 | do_all_successive_pairs = True):
128 | '''
129 | Create a Pair_X folder for each successive pair X of Sentinel-1 product in product_paths,
130 | and create a topsApp.xml to process that pair with ISCE
131 |
132 | @param product_paths: List of paths to the Sentinel-1 products
133 | @param result_folder_path: Directory where the Pair_X folders will be created
134 | @param orbit_path: Path to the folder containing orbit files
135 | @param auxiliary_data_path: Path to the folder containing auxiliary data
136 | @param do_unwrap: True to unwrap the created interferogram, false otherwise
137 | @param unwrapper_name: Name of the unwrapper when do_unwrap is true
138 | @param do_all_successive_pairs: True if all the successive pairs must be
139 | processed, False if pairs i and i+1, and i+2
140 | and i+3 are processed, but not i+1 and i+2
141 |
142 | @return The path to the created topsApp.xml file
143 | '''
144 | topsApp_paths = []
145 | step = 1
146 | if do_all_successive_pairs == False:
147 | step = 2
148 | pair_index = 1
149 | for i in range(0, len(product_paths) - 1, step):
150 | result_directory = result_folder_path + 'Pair_' + str(pair_index)
151 | os.makedirs(result_directory, exist_ok = True)
152 | topsApp_paths.append(create_topsApp_xml(result_directory,
153 | product_paths[i],
154 | product_paths[i + 1],
155 | master_output_path = 'master',
156 | slave_output_path = 'slave',
157 | master_orbit_path = orbit_path,
158 | slave_orbit_path = orbit_path,
159 | master_auxiliary_data_path = auxiliary_data_path,
160 | slave_auxiliary_data_path = auxiliary_data_path,
161 | do_unwrap = do_unwrap,
162 | unwrapper_name = unwrapper_name))
163 | pair_index += 1
164 |
165 | return topsApp_paths
--------------------------------------------------------------------------------
/pyinsar/processing/machine_learning/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["geostatistics", "neural_networks"]
--------------------------------------------------------------------------------
/pyinsar/processing/machine_learning/geostatistics/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["direct_sampling", "geostatistics_utils", "sequential_gaussian_simulation", "variogram"]
--------------------------------------------------------------------------------
/pyinsar/processing/machine_learning/geostatistics/geostatistics_utils.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | from enum import Enum
26 | import numpy as np
27 |
28 | from numba import jit
29 |
30 | class VariableType(Enum):
31 | DISCRETE = 0
32 | CONTINUOUS = 1
33 |
34 | class PathType(Enum):
35 | LINEAR = 0
36 | RANDOM = 1
37 |
38 | @jit(nopython = True, nogil = True)
39 | def unflatten_index(flattened_index, array_shape):
40 | '''
41 | Unflatten an index for a 2D array
42 |
43 | @param flattened_index: The flattened index (i.e., a single integer)
44 | @param array_shape: The shape of the array for the two dimensions (j, i)
45 |
46 | @return The 2D index (j, i)
47 | '''
48 | j = int((flattened_index/array_shape[1])%array_shape[0])
49 | i = int(flattened_index%array_shape[1])
50 | return (j, i)
51 |
52 | def standardize(x, axis = None):
53 | '''
54 | Reduce and center a float or array
55 |
56 | @param x: The float or array
57 | @param axis: The axis or axes along which the standardization is done.
58 |
59 | @return A float or array
60 | '''
61 | return (x - np.nanmean(x,
62 | axis = axis,
63 | keepdims = True))/np.nanstd(x,
64 | axis = axis,
65 | keepdims = True)
66 |
67 | def normalize(x):
68 | '''
69 | Reduce and center a float or array
70 |
71 | @param x: The float or array
72 |
73 | @return A float or array
74 | '''
75 | x_min = np.nanmin(x)
76 | x_max = np.nanmax(x)
77 |
78 | return (x - x_min)/(x_max - x_min)
--------------------------------------------------------------------------------
/pyinsar/processing/machine_learning/neural_networks/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["anomaly_identification"]
--------------------------------------------------------------------------------
/pyinsar/processing/utilities/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["insar_simulator_utils"]
--------------------------------------------------------------------------------
/pyinsar/processing/utilities/ann.py:
--------------------------------------------------------------------------------
1 | # Standard library imports
2 | from collections import OrderedDict
3 | import os
4 |
5 | # 3rd party imports
6 | import tensorflow as tf
7 | import numpy as np
8 |
9 |
10 | def per_channel_standardization(input_tensor, name=None):
11 |
12 | mean, variance = tf.nn.moments(input_tensor, axes=(1,2), keep_dims=True)
13 | stddev = tf.sqrt(variance)
14 | stddev = tf.maximum(stddev, 1.0/tf.sqrt(tf.cast(tf.reduce_prod(input_tensor.shape[1:3]), input_tensor.dtype)))
15 |
16 | return tf.divide(input_tensor - mean, stddev, name=name)
17 |
18 |
19 | def buildCNN(image_height, image_width, model_dir, config=None, num_bands = 1,
20 | conv_filters = [40,20], conv_kernels = [[9,9],[5,5]],
21 | optimizer = tf.train.GradientDescentOptimizer(0.01)):
22 | """
23 | Build a convolutional neural network
24 |
25 | @param image_height: Height of image in pixels
26 | @param image_width: Width of image in pixels
27 | @param model_dir: Directory to save network too
28 | @param config: Config to pass to tf.Session
29 | @param num_bands: Number of channels in image
30 | @param conv_filters: Number of convolution filters for each layer
31 | @param conv_kernels: Kernel sizes for each layer
32 | @param optimizer: Optimizer to use
33 | """
34 | graph = tf.Graph()
35 |
36 | with tf.Session(graph=graph, config=config) as session:
37 | training = tf.placeholder_with_default(False, shape=(), name='Training')
38 |
39 | with tf.variable_scope('IO'):
40 | data_input = tf.placeholder(tf.float32, shape=(None, image_height, image_width, num_bands), name = 'Input')
41 | output = tf.placeholder(tf.int64, shape=(None), name='Output')
42 |
43 | with tf.name_scope('Clean'):
44 | normalize = per_channel_standardization(data_input, name='Normalize')
45 |
46 | with tf.name_scope('Convolution_Layers'):
47 | new_image_height = image_height
48 | new_image_width = image_width
49 |
50 |
51 | prev_layer = normalize
52 | for conv_index, (filters, kernel) in enumerate(zip(conv_filters, conv_kernels)):
53 | conv_layer = tf.layers.conv2d(prev_layer,
54 | filters=filters,
55 | kernel_size=kernel,
56 | padding='valid',
57 | activation=tf.nn.relu,
58 | name = 'Convolution_' + str(conv_index))
59 | new_image_height = length_after_valid_window(new_image_height, kernel[0], 1)
60 | new_image_width = length_after_valid_window(new_image_width, kernel[1], 1)
61 |
62 | pool = tf.layers.max_pooling2d(conv_layer,
63 | pool_size=[2,2],
64 |
65 | strides=2,
66 | name = 'Max_Pool_' + str(conv_index))
67 |
68 | prev_layer = pool
69 | new_image_height = length_after_valid_window(new_image_height, 2, 2)
70 | new_image_width = length_after_valid_window(new_image_width, 2, 2)
71 |
72 |
73 | pool_flat = tf.reshape(prev_layer,
74 | [-1, new_image_height*new_image_width*conv_filters[-1]],
75 | name='Reshape')
76 |
77 | with tf.name_scope('Fully_Connected_Layers'):
78 | dense1 = tf.layers.dense(inputs=pool_flat, units=1000, name='Dense_1')
79 | batch_norm1 = tf.layers.batch_normalization( dense1, training=training, momentum=0.9)
80 | activate1 = tf.nn.relu(batch_norm1, name='Activate_1')
81 |
82 | dense2 = tf.layers.dense(inputs=activate1, units=100, name='Dense_2')
83 | batch_norm2 = tf.layers.batch_normalization(dense2, training=training, momentum=0.9)
84 | activate2 = tf.nn.relu(batch_norm2, name='Activate_2')
85 |
86 | logits = tf.layers.dense(inputs=activate2, units=2, name='Logits')
87 |
88 | # update_norm_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
89 |
90 | with tf.name_scope('Loss'):
91 | entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=output, name='Entropy')
92 | loss = tf.reduce_mean(entropy, name='Loss')
93 |
94 | with tf.name_scope('Train'):
95 | gradient = optimizer
96 | global_step = tf.train.get_or_create_global_step()
97 | minimize = gradient.minimize(loss, global_step = global_step, name='Minimize')
98 |
99 | with tf.name_scope('Evaluate'):
100 | correct_responses = tf.nn.in_top_k(logits, output, 1, name='Correct_Responses')
101 | evaluate = tf.reduce_mean(tf.cast(correct_responses, tf.float32), name='Evaluate')
102 |
103 | with tf.name_scope('Initialization'):
104 | initializer = tf.global_variables_initializer()
105 |
106 |
107 | graph.add_to_collection('fit', minimize)
108 | graph.add_to_collection('input', data_input)
109 | graph.add_to_collection('output', output)
110 | graph.add_to_collection('train', training)
111 | graph.add_to_collection('global_step', global_step)
112 | graph.add_to_collection('initializer', initializer)
113 | graph.add_to_collection('evaluate', evaluate)
114 | graph.add_to_collection('logits', logits)
115 |
116 | initializer.run()
117 | saver = tf.train.Saver(name='Saver', save_relative_paths=True)
118 | saver.save(session, os.path.join(model_dir,'network'))
119 |
120 |
121 | def train(image_data, image_labels, model_dir,
122 | batch_size, num_epochs, max_batches=None,
123 | status_line_rate = 50, target='', shuffle=True,
124 | config=None):
125 | """
126 | Train neural network
127 |
128 | @param image_data: Image data to train (shape [:,image_width, image_height])
129 | @param image_labels: Corresponding labels
130 | @param model_dir: Directory where network is stored
131 | @param batch_size: Batch size
132 | @param num_epochs: Number of epochs
133 | @param max_batches: Max number of patches (Typically used for testing)
134 | @param status_line_rate: Number of batches between outputting training information
135 | @param target: Unused
136 | @param shuffle: Whether or not to shuffle the training data
137 | @param config: Config to pass to tf.Session
138 | """
139 |
140 | num_batches = image_data.shape[0] // batch_size
141 |
142 | if max_batches != None:
143 | num_batches = min(max_batches, num_batches)
144 |
145 | model_filename = os.path.join(model_dir, 'network')
146 | graph, op_dict, model_checkpoint = restoreGraph(model_dir)
147 |
148 | train_op = op_dict['train']
149 | input_placeholder = op_dict['input']
150 | output_placeholder = op_dict['output']
151 | global_step = op_dict['global_step']
152 | fit_op =op_dict['fit']
153 | evaluate = op_dict['evaluate']
154 | saver = op_dict['saver']
155 |
156 |
157 | with tf.Session(graph=graph, config=config) as session:
158 | saver.restore(session, model_checkpoint)
159 |
160 | for epoch in range(num_epochs):
161 |
162 | if shuffle:
163 | image_data, image_labels = shuffleTrainingData(image_data, image_labels)
164 |
165 | for index in range(num_batches):
166 | run_id = tf.train.global_step(session, global_step)
167 | batch_slice = slice(index*batch_size, (index+1)*batch_size)
168 | train_data = reshape_images(image_data[batch_slice])
169 | train_labels = image_labels[batch_slice]
170 | batch_dict = {input_placeholder : train_data, output_placeholder: train_labels}
171 | if train_op != None:
172 | batch_dict[train_op] = True
173 |
174 | session.run(fit_op, feed_dict=batch_dict)
175 |
176 | if (run_id+1) % status_line_rate == 0:
177 | accuracy = evaluate.eval(feed_dict={input_placeholder : train_data, output_placeholder: train_labels})
178 | print('Batch accuracy after global step ', str(run_id).zfill(6), ": ",
179 | '{:04.2f}'.format(accuracy), sep='')
180 |
181 | saver.save(session, model_filename, run_id)
182 |
183 | def classify(image_data, model_dir, batch_size=2000, config=None):
184 | """
185 | Classify data
186 |
187 | @param image_data: Input data
188 | @param model_dir: Directory where network is stored
189 | @param batch_size: Batch size to use for classifying data
190 | @param config: Config to pass on to tf.Session
191 |
192 | @return Predicted labels for input data
193 | """
194 |
195 | graph, op_dict, model_checkpoint = restoreGraph(model_dir)
196 |
197 | input_placeholder = op_dict['input']
198 | logits = op_dict['logits']
199 | saver = op_dict['saver']
200 |
201 | with tf.Session(graph=graph, config=config) as session:
202 | saver = tf.train.Saver()
203 | saver.restore(session, model_checkpoint)
204 |
205 | results = []
206 | num_images = image_data.shape[0]
207 | num_batches = np.ceil(num_images / batch_size).astype('int')
208 |
209 | for index in range(num_batches):
210 | slice_index = slice(index*batch_size, min((index+1)*batch_size, num_images))
211 | batched_data = {input_placeholder: reshape_images(image_data[slice_index])}
212 |
213 | results.append(np.argmax(logits.eval(batched_data), axis=1))
214 |
215 | return np.concatenate(results)
216 |
217 |
218 | def reshape_images(images):
219 | """
220 | Reshape input array of images to match Tensorflow's expected layout
221 |
222 | @param images: Input image with dimensions of (image index, height, width) or (image_index, channel, height, width)
223 | @return images with (image_index, height, width, channel)
224 | """
225 | if images.ndim == 4:
226 | return np.moveaxis(images, 1,3)
227 |
228 | elif images.ndim == 3:
229 | return images.reshape(*images.shape, 1)
230 |
231 | else:
232 | raise RuntimeError('Can only handle 3 or 4 dimension arrays')
233 |
234 | def length_after_valid_window(length, window, stride):
235 | """
236 | Length of dimension after convolving using the padding type 'valid' or using max pooling
237 |
238 | @param length: Initial length
239 | @param window: Size of convolution window
240 | @param stride: Stride used
241 | @return New size after using convolution with 'valid' padding type or from max pooling
242 | """
243 | return np.ceil( (length - window + 1) / stride).astype('int')
244 |
245 | def shuffleTrainingData(data, labels):
246 | """
247 | Shuffles data
248 |
249 | @param data: Input data
250 | @param labels: Input labels
251 | """
252 | index = np.arange(data.shape[0])
253 | np.random.shuffle(index)
254 |
255 | return data[index], labels[index]
256 |
257 | def restoreGraph(model_dir):
258 | """
259 | Restore a network
260 |
261 | @param model_dir: Directory containing network
262 | @return graph, operation dictionary, and checkpoint
263 | """
264 |
265 | graph = tf.Graph()
266 |
267 | op_dict = OrderedDict()
268 |
269 | with graph.as_default():
270 | model_checkpoint = tf.train.latest_checkpoint(model_dir)
271 | saver = tf.train.import_meta_graph(model_checkpoint + '.meta', clear_devices=True)
272 |
273 | op_dict['train'] = graph.get_collection('train')[0]
274 | op_dict['input'] = graph.get_collection('input')[0]
275 | op_dict['output'] = graph.get_collection('output')[0]
276 | op_dict['global_step'] = graph.get_collection('global_step')[0]
277 | op_dict['fit'] = graph.get_collection('fit') + graph.get_collection(tf.GraphKeys.UPDATE_OPS)
278 | op_dict['evaluate'] = graph.get_collection('evaluate')[0]
279 | op_dict['logits'] = graph.get_collection('logits')[0]
280 | op_dict['saver'] = saver
281 |
282 | return graph, op_dict, model_checkpoint
283 |
--------------------------------------------------------------------------------
/pyinsar/processing/utilities/deformations.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from skimage.filters import threshold_li
3 |
4 | def calc_bounding_box(image):
5 | """
6 | Calculate bounding box of an object in an image
7 |
8 | @param image: Input image
9 | @return Extent of deformation in image (x_start, x_end, y_start, y_end)
10 | """
11 |
12 | thresh = threshold_li(image)
13 | thresh_image = np.where(image < thresh, 0, 1)
14 | column_maximums = np.max(thresh_image, axis=0)
15 | row_maximums = np.max(thresh_image, axis=1)
16 | x_start = np.argmax(column_maximums)
17 | x_end = len(column_maximums) - np.argmax(column_maximums[::-1]) - 1
18 |
19 | y_start = np.argmax(row_maximums)
20 | y_end = len(row_maximums) - np.argmax(row_maximums[::-1]) - 1
21 |
22 | return (x_start, x_end, y_start, y_end)
23 |
24 |
25 | def determine_deformation_bounding_box(deformations):
26 | """
27 | Determine bounds around a deformation
28 |
29 | @param deformations: Input deformations
30 | @return Bounding box large enough to include deformation in all directions (x_start, x_end, y_start, y_end)
31 | """
32 | bounds = np.stack([calc_bounding_box(np.abs(deformations[i,:,:])) for i in range(3)])
33 | return (np.min(bounds[:,0]), np.max(bounds[:,1]), np.min(bounds[:,2]), np.max(bounds[:,3]))
34 |
35 |
36 |
37 | def determine_x_y_bounds(deformations, x_array, y_array, offset=5000):
38 | """
39 | Determine the x and y positions that bound a deformation
40 |
41 | @param deformations: Input deformations
42 | @param x_array: X coordinates
43 | @param y_array: Y coordinates
44 | @param offset: Extra padding around measured bounds
45 | @return Bounds in units of x_array and y_array with padding (x_start, x_end, y_start, y_end)
46 | """
47 | bounding_box = determine_deformation_bounding_box(deformations)
48 | x_start, x_end = x_array[0, bounding_box[:2]]
49 | y_start, y_end = y_array[bounding_box[2:], 0]
50 |
51 | return x_start-offset, x_end+offset, y_start-offset, y_end+offset
52 |
--------------------------------------------------------------------------------
/pyinsar/processing/utilities/file_utils.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Authors: Cody Rude
5 | # This software is part of the NSF DIBBS Project "An Infrastructure for
6 | # Computer Aided Discovery in Geoscience" (PI: V. Pankratius) and
7 | # NASA AIST Project "Computer-Aided Discovery of Earth Surface
8 | # Deformation Phenomena" (PI: V. Pankratius)
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy
11 | # of this software and associated documentation files (the "Software"), to deal
12 | # in the Software without restriction, including without limitation the rights
13 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14 | # copies of the Software, and to permit persons to whom the Software is
15 | # furnished to do so, subject to the following conditions:
16 | #
17 | # The above copyright notice and this permission notice shall be included in
18 | # all copies or substantial portions of the Software.
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 | # THE SOFTWARE.
26 |
27 | # Standard library imports
28 | from collections import OrderedDict
29 |
30 | # PyInSAR imports
31 | from pyinsar.processing.data_fetcher.hdf_retriever import DataFetcher
32 | from pyinsar.processing.utilities.machine_learning import DataRetriever
33 | from pyinsar.processing.utilities.generic import subarray_slice
34 |
35 | # Third party imports
36 | import numpy as np
37 |
38 | def build_data_fetchers(filename_list, label_list, size, dtype, num_training_data_per_label, num_validation_data_per_label, num_testing_data_per_label, num_training_chunks, num_validation_chunks = 1, num_testing_chunks=1):
39 | """
40 | Build training, validation, and testing HDF Retriever Data Fetchers
41 |
42 | @param filename_list: List of HDF file names
43 | @param label_list: List of labels for each HDF file
44 | @param num_training_data_per_label: Number of training data for each label
45 | @param num_validation_data_per_label: Number of validation data for each label
46 | @param num_testing_data_per_label: Number of testing data for each label
47 | @param num_training_chunks = Break the training data into this many chunks
48 | @param num_validation_chunks = Break the validation data into this many chunks
49 | @param num_testing_chunks = Break the testing data into this many chunks
50 |
51 | @return List of HDF Retriever Data fetchers. One for training, one for validation,
52 | and one for testing
53 | """
54 |
55 | data_retriever = DataRetriever(filename_list, label_list, size, dtype)
56 |
57 | num_labels = len(label_list)
58 |
59 | training_index = np.zeros((num_labels * num_training_data_per_label, 2), dtype = np.int)
60 | validation_index = np.zeros((num_labels * num_validation_data_per_label, 2), dtype = np.int)
61 | testing_index = np.zeros((num_labels * num_testing_data_per_label, 2), dtype = np.int)
62 |
63 | def add_indices(index_array, image_index, label_index, label, num_items):
64 | """
65 | Add indices to an index array
66 |
67 | @param index_array: Array being used as the index
68 | @param image_index: Indices of imaging to insert into the index array
69 | @param label_index: Index of the current label
70 | @param label: Label of the current image index
71 | """
72 | my_slice = subarray_slice(label_index, num_items)
73 | index_array[my_slice, 0] = label
74 | index_array[my_slice, 1] = image_index
75 |
76 | def image_index_slice(num_skip_items, num_items):
77 | """
78 | Select num_items from an array after skipping num_skip_items
79 |
80 | @param num_skip_items: Number of items to skip
81 | @param num_items: Number of items to select
82 | @return slice that starts after num_skip_items and is num_items long
83 | """
84 | return slice(num_skip_items, num_items + num_skip_items)
85 |
86 | num_images = data_retriever.get_num_images()
87 |
88 | for label_index, label in enumerate(label_list):
89 | image_index = np.arange(num_images[label_index, 1])
90 | np.random.shuffle(image_index)
91 |
92 | training_image_slice = image_index_slice(0, num_training_data_per_label)
93 | validation_image_slice = image_index_slice(num_training_data_per_label, num_validation_data_per_label)
94 | testing_image_slice = image_index_slice(num_training_data_per_label + num_validation_data_per_label,
95 | num_testing_data_per_label)
96 |
97 | add_indices(training_index,
98 | image_index[training_image_slice],
99 | label_index,
100 | label,
101 | num_training_data_per_label)
102 |
103 | add_indices(validation_index,
104 | image_index[validation_image_slice],
105 | label_index,
106 | label,
107 | num_validation_data_per_label)
108 |
109 | add_indices(testing_index,
110 | image_index[testing_image_slice],
111 | label_index,
112 | label,
113 | num_testing_data_per_label)
114 |
115 | np.random.shuffle(training_index)
116 |
117 | data_fetcher_dict = OrderedDict()
118 |
119 | data_fetcher_names = ['training', 'validation', 'testing']
120 | data_fetcher_indices = [training_index, validation_index, testing_index]
121 | num_chunks_list = [num_training_chunks, num_validation_chunks, num_testing_chunks]
122 |
123 |
124 | for index, label, num_chunks in zip(data_fetcher_indices, data_fetcher_names, num_chunks_list):
125 | data_fetcher_dict[label] = DataFetcher(filename_list, label_list, size, dtype, num_chunks, index)
126 |
127 |
128 | return data_fetcher_dict
129 |
--------------------------------------------------------------------------------
/pyinsar/processing/utilities/insar_simulator_utils.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright (c) 2018 Massachusetts Institute of Technology
3 | #
4 | # Author: Guillaume Rongier
5 | # This software has been created in projects supported by the US National
6 | # Science Foundation and NASA (PI: Pankratius)
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be included in
16 | # all copies or substantial portions of the Software.
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | # THE SOFTWARE.
24 |
25 | import numpy as np
26 | from skimage.filters import threshold_li
27 |
28 | def wrap(x, to_2pi = False):
29 | '''
30 | Wrap a float or an array
31 |
32 | @param x: The float or array
33 | @param to_2pi: If True, wrap to [0, 2pi) instead of [-pi, pi]
34 |
35 | @return The wrapped array (in radian between -pi and pi)
36 | '''
37 | if to_2pi == True:
38 | return np.mod(x, 2.*np.pi)
39 | return np.mod(x + np.pi, 2.*np.pi) - np.pi
40 |
41 | def compute_los_vector(rad_incidence_angle, rad_los_azimuth):
42 | '''
43 | Compute the line-of-sight vector in Cartesian coordinates from spherical
44 | coordinates, considering that the vector is from the ground to the satellite
45 |
46 | @param rad_incidence_angle: The incidence angle of the satellite in radian
47 | @param rad_los_azimuth: The azimuth of the satellite direction in radian
48 |
49 | @return The line-of-sight vector in Cartesian coordinates
50 | '''
51 | los_vector = np.empty([3] + list(rad_incidence_angle.shape))
52 | los_vector[0] = np.sin(rad_incidence_angle)*np.sin(rad_los_azimuth)
53 | los_vector[1] = np.sin(rad_incidence_angle)*np.cos(rad_los_azimuth)
54 | los_vector[2] = np.cos(rad_incidence_angle)
55 |
56 | return los_vector
57 |
58 | def crop_array_from_center(array, crop_shape):
59 | '''
60 | Crop an array along its borders
61 |
62 | @param array: The array
63 | @param crop_shape: The number of cells to remove along the y and x axes
64 |
65 | @return The cropped array
66 | '''
67 | slices = []
68 | for i in range(len(crop_shape)):
69 | start = array.shape[i]//2 - crop_shape[i]//2
70 | end = start + crop_shape[i]
71 | slices.append(slice(start, end))
72 |
73 | return array[slices]
74 |
75 |
76 | def mask_deformation(deformation, threshold_function = threshold_li):
77 | '''
78 | Mask image using a threshold function
79 |
80 | @param deformation: Deformation to mask
81 | @param threshold_function: Function to calculate the threshold value
82 | @return Masked image
83 | '''
84 |
85 | mask = np.zeros_like(deformation, dtype=np.bool)
86 | for i in range(deformation.shape[0]):
87 | thresh = threshold_function(np.abs(deformation[i,:,:]))
88 | mask[i, np.abs(deformation[i,:,:]) < thresh] = True
89 |
90 |
91 | mask = np.all(mask, axis=0)
92 |
93 | deformation_masked = deformation.copy()
94 | deformation_masked[:,mask] = np.nan
95 |
96 | return deformation_masked
97 |
98 |
99 | def calc_bounding_box(image, threshold_function = threshold_li):
100 | '''
101 | Calcluate the bounding box around an image using the li threshold
102 |
103 | @param image: Input image
104 | @param threshold_function: Threshold function to use
105 | @return Extents of a bounding box around the contents in the image (x_min, x_max, y_min, y_max)
106 | '''
107 | thresh = threshold_function(image)
108 | thresh_image = np.where(image < thresh, 0, 1)
109 |
110 | return retrieve_bounds(thresh_image)
111 |
112 |
113 | def retrieve_bounds(thresh_image):
114 | """
115 | Retrieve the bounds of an image that has been thesholded
116 |
117 | @param thresh_image: Image filled with ones for valid and zeros for invalid
118 | @return: Extents of a rectangle around valid data (x_start, x_end, y_start, y_end)
119 | """
120 | column_maximums = np.max(thresh_image, axis=0)
121 | row_maximums = np.max(thresh_image, axis=1)
122 | x_start = np.argmax(column_maximums)
123 | x_end = len(column_maximums) - np.argmax(column_maximums[::-1])
124 |
125 | y_start = np.argmax(row_maximums)
126 | y_end = len(row_maximums) - np.argmax(row_maximums[::-1])
127 |
128 | return x_start, x_end, y_start, y_end
129 |
130 |
131 | def crop_nans(image):
132 | """
133 | Shrink image by removing nans
134 |
135 | @param image: Input image
136 | @return: Image cropped around valid data
137 | """
138 | thresh_image = ~np.isnan(image)
139 |
140 | x_start, x_end, y_start, y_end = retrieve_bounds(thresh_image)
141 |
142 | return image[y_start:y_end, x_start:x_end]
143 |
144 |
145 | def determine_deformation_bounding_box(deformations, largest_box=True, **kwargs):
146 | '''
147 | Calculate the extent of the deformation in image coordinates
148 |
149 | @param deformations: Input deformations
150 | @param largest_box: Choose a bounding max that encomposses all selected values in all dimensions
151 | @param kwargs: Any additional keyword arguments passed to calc_bounding_box
152 |
153 | @return Extents deformations (x_min, x_max, y_min, y_max)
154 | '''
155 | bounds = np.stack([calc_bounding_box(np.abs(deformations[i,:,:]), **kwargs) for i in range(3)])
156 | if largest_box:
157 | return np.min(bounds[:,0]), np.max(bounds[:,1]), np.min(bounds[:,2]), np.max(bounds[:,3])
158 | else:
159 | return np.max(bounds[:,0]), np.min(bounds[:,1]), np.max(bounds[:,2]), np.min(bounds[:,3])
160 |
161 |
162 | def determine_x_y_bounds(deformations, x_array, y_array, offset=5000, **kwargs):
163 | '''
164 | Determine the x and y coordinates of the extent of the deformation
165 |
166 | @param deformations: Input deformations
167 | @param x_array: x coordinates
168 | @param y_array: y coordinatse
169 | @param offset: Size to extend the extents of the box
170 | @param kwargs: Any additional keyword arguments passed to determine_deformation_bounding_box
171 |
172 | @return Extents of the deformation plus the offset (x_min, x_max, y_min, y_max)
173 | '''
174 |
175 | bounding_box = determine_deformation_bounding_box(deformations, **kwargs)
176 | x_start, x_end = x_array[bounding_box[2:], bounding_box[:2]]
177 | y_start, y_end = y_array[bounding_box[2:], bounding_box[:2]]
178 |
179 | if y_start > y_end:
180 | tmp = y_start
181 | y_start = y_end
182 | y_end = tmp
183 |
184 |
185 | return x_start - offset, x_end + offset, y_start - offset, y_end + offset
186 |
187 |
188 | def generate_interferogram_from_deformation(track_angle,
189 | min_ground_range_1,
190 | height_1,
191 | is_right_looking,
192 | wavelength,
193 | k,
194 | deformation,
195 | xx, yy,
196 | projected_topography=None,
197 | min_ground_range_2 = None,
198 | height_2 = None):
199 | '''
200 | Generate an interferogram from deformations
201 |
202 | @param track_angle: Satellite track angle
203 | @param min_ground_range_1: Minimum ground range to deformations for first pass
204 | @param height_1: Height of satellite for first pass
205 | @param is_right_looking: The satellite is looking to the right
206 | @param wavelength: Wavelength of the signal
207 | @param k: number of passes (1 or 2)
208 | @param deformation: map of deformation
209 | @param xx: x coordinates of deformation
210 | @param yy: y coordinates of deformation
211 | @param projected_topography: Elevation data
212 | @param min_ground_range_2: Minimum ground range to deformations for second pass
213 | @param height_2: Height of satellite for second pass
214 |
215 | @return Inteferogram due to the deformations
216 | '''
217 |
218 | rad_track_angle = track_angle
219 |
220 | cross_track_distance = xx * np.cos(rad_track_angle) - yy * np.sin(rad_track_angle)
221 |
222 | if is_right_looking:
223 | phi = 2 * np.pi - track_angle
224 | cross_track_distance *= -1.
225 |
226 | else:
227 | phi = np.pi - track_angle
228 |
229 | cross_track_deformation = deformation[0,:,:].astype(np.float64) * np.cos(phi) + deformation[1,:,:].astype(np.float64) * np.sin(phi)
230 |
231 | if height_2 is None:
232 | height_2 = height_1
233 |
234 | if min_ground_range_2 is None:
235 | min_ground_range_2 = min_ground_range_1
236 |
237 | if projected_topography is not None:
238 | corrected_height_1 = height_1 - projected_topography
239 | corrected_height_2 = height_2 - projected_topography
240 | else:
241 | corrected_height_1 = height_1
242 | corrected_height_2 = height_2
243 |
244 | corrected_height_2 -= deformation[2,:,:].astype(np.float64)
245 |
246 | cross_track_distance -= cross_track_distance.min()
247 |
248 | ground_range_1 = cross_track_distance + min_ground_range_1
249 | ground_range_2 = cross_track_distance + min_ground_range_2 + cross_track_deformation
250 |
251 | slant_range_1 = np.sqrt(corrected_height_1**2 + ground_range_1**2)
252 | slant_range_2 = np.sqrt(corrected_height_2**2 + ground_range_2**2)
253 |
254 | phase = change_in_range_to_phase(slant_range_2 - slant_range_1, wavelength)
255 |
256 | return phase
257 |
258 | def old_generate_interferogram_from_deformation(track_angle,
259 | min_ground_range,
260 | height,
261 | is_right_looking,
262 | wavelength,
263 | k,
264 | deformation,
265 | xx, yy,
266 | projected_topography=None):
267 | '''
268 | Generate an interferogram from deformations
269 |
270 | @param track_angle: Satellite track angle
271 | @param min_ground_range: Minimum ground range to deformations
272 | @param height: Height of satellite
273 | @param is_right_looking: The satellite is looking to the right
274 | @param wavelength: Wavelength of the signal
275 | @param k: number of passes (1 or 2)
276 | @param deformation: map of deformation
277 | @param xx: x coordinates of deformation
278 | @param yy: y coordinates of deformation
279 | @param projected_topography: Elevation data
280 |
281 | @return Inteferogram due to the deformations
282 | '''
283 |
284 | rad_track_angle = track_angle
285 |
286 | cross_track_distance = xx * np.cos(rad_track_angle) - yy * np.sin(rad_track_angle)
287 |
288 | if is_right_looking:
289 | phi = 2 * np.pi - track_angle
290 |
291 | cross_track_distance *= -1.
292 |
293 | else:
294 | phi = np.pi - track_angle
295 |
296 | if projected_topography is not None:
297 | heights = height - projected_topography
298 | else:
299 | heights = height
300 |
301 | cross_track_distance -= cross_track_distance.min()
302 |
303 | ground_range = cross_track_distance + min_ground_range
304 |
305 | rad_look_angle = np.arctan2(ground_range, heights)
306 |
307 | theta = np.pi - rad_look_angle
308 |
309 | x = np.sin(theta) * np.cos(phi)
310 | y = np.sin(theta) * np.sin(phi)
311 | z = np.cos(theta)
312 |
313 | look_vectors = np.stack([x, y, z])
314 |
315 | los_deformation = np.sum(look_vectors * deformation, axis=0)
316 |
317 | phase = 2. * np.pi * k * los_deformation / wavelength
318 |
319 | return phase
320 |
321 |
322 | def change_in_range_to_phase(los_deformation, wavelength, k=2):
323 | '''
324 | Compute phase from change in range
325 |
326 | @param los_deformation: Change in distance along line of site
327 | @param wavelength: Wavelength of radar
328 | @param k: Number of passes
329 |
330 | @return phase due to change in
331 | '''
332 | return -2. * np.pi * k * los_deformation / wavelength
333 |
334 | def phase_to_change_in_range(phase, wavelength, k=2):
335 | '''
336 | Compute change in range from phase
337 |
338 | @param phase: Input phase
339 | @param wavelength: Wavelength of radar
340 | @param k: Number of passes
341 |
342 | @return Change in range
343 | '''
344 | return -phase * wavelength / (2 * np.pi * k)
345 |
--------------------------------------------------------------------------------
/pyinsar/run_doxygen.sh:
--------------------------------------------------------------------------------
1 | rm -R latex/
2 | doxygen dox.cfg &> out.log
3 | cp fix_latex.sh latex
4 | cd latex
5 | ./fix_latex.sh
6 | make &> out.log
7 | cd ..
8 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # from distutils.core import setup
2 | from setuptools import setup
3 | from setuptools import find_packages
4 |
5 | package_name = 'pyinsar'
6 |
7 | package_list = find_packages()
8 |
9 | with open("README.md", 'r', encoding='utf-8') as rfile:
10 | readme = rfile.read()
11 |
12 | setup(name = package_name,
13 | version = '0.0.5post1',
14 | packages = package_list,
15 |
16 | install_requires = [
17 | 'numpy',
18 | 'scikit-dataaccess',
19 | 'scikit-discovery',
20 | 'pandas',
21 | 'scipy',
22 | 'numba',
23 | 'statsmodels',
24 | 'geodesy',
25 | 'GDAL',
26 | 'matplotlib',
27 | 'ipywidgets',
28 | 'atomicwrites',
29 | 'requests',
30 | 'setuptools'
31 | ],
32 |
33 | description = 'Package of InSAR utilities',
34 | author = 'MITHAGI',
35 | author_email='skdaccess@mit.edu',
36 | classifiers=[
37 | 'Topic :: Scientific/Engineering',
38 | 'Intended Audience :: Science/Research',
39 | 'License :: OSI Approved :: MIT License',
40 | 'Programming Language :: Python :: 3 :: Only'
41 | ],
42 |
43 | package_data={'pyinsar': ['license/LICENSE',
44 | 'docs/pyinsar_doxygen.pdf']},
45 |
46 |
47 | python_requires='>=3.6',
48 |
49 | long_description = readme,
50 | long_description_content_type='text/markdown'
51 | )
52 |
--------------------------------------------------------------------------------