├── tests ├── linting │ └── .gitkeep ├── testdata │ ├── logfiles │ │ └── .gitkeep │ └── config_test.json ├── CI_docker │ ├── build_sarvey_testsuite_image.sh │ └── context │ │ ├── entrypoint.sh │ │ └── sarvey_ci.docker ├── __init__.py ├── test_config.py └── test_ifg_network.py ├── docs ├── authors.rst ├── history.rst ├── readme.rst ├── contributing.rst ├── _static │ └── custom.css ├── requirements.txt ├── index.rst ├── demo_datasets.rst ├── Makefile ├── PULL_REQUEST_TEMPLATE.md ├── make.bat ├── visualization.rst ├── demo │ ├── demo_masjed_dam.rst │ ├── demo_masjed_dam_fast_track.rst │ └── demo_masjed_dam_detailed_guide.rst ├── usage.rst ├── preparation.rst ├── installation.rst └── conf.py ├── .gitattributes ├── MANIFEST.in ├── .editorconfig ├── setup.cfg ├── AUTHORS.rst ├── .coveragerc ├── tox.ini ├── .readthedocs.yaml ├── environment.yml ├── sarvey ├── version.py ├── __init__.py ├── geolocation.py ├── console.py ├── triangulation.py ├── osm_utils.py ├── coherence.py ├── sarvey_osm.py ├── densification.py ├── filtering.py ├── sarvey_export.py ├── sarvey_mti.py └── preparation.py ├── HISTORY.rst ├── .gitignore ├── LICENSE ├── Makefile ├── setup.py ├── CONTRIBUTING.rst ├── README.rst └── .github └── workflows └── ci.yml /tests/linting/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/testdata/logfiles/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | .wy-nav-content { 2 | max-width: 1200px !important; 3 | } 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # *.h5 filter=lfs diff=lfs merge=lfs -text 2 | # *.png filter=lfs diff=lfs merge=lfs -text 3 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx>=4.1.1 2 | sphinx-argparse 3 | sphinx-autodoc-typehints 4 | sphinxcontrib-jquery 5 | sphinx_rtd_theme 6 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-exclude tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 12 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab 22 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 1.0.0 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:setup.py] 7 | search = version='{current_version}' 8 | replace = version='{new_version}' 9 | 10 | [bumpversion:file:sarvey/__init__.py] 11 | search = __version__ = '{current_version}' 12 | replace = __version__ = '{new_version}' 13 | 14 | [bdist_wheel] 15 | universal = 1 16 | 17 | [flake8] 18 | exclude = docs 19 | 20 | [aliases] 21 | # Define setup.py command aliases here 22 | test = pytest 23 | 24 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ==================== 2 | SARvey documentation 3 | ==================== 4 | 5 | .. toctree:: 6 | :maxdepth: 2 7 | :caption: Contents: 8 | 9 | readme 10 | Source code repository 11 | installation 12 | usage 13 | preparation 14 | processing 15 | visualization 16 | demo_datasets 17 | modules 18 | contributing 19 | authors 20 | history 21 | 22 | Indices and tables 23 | ================== 24 | * :ref:`genindex` 25 | * :ref:`modindex` 26 | * :ref:`search` 27 | -------------------------------------------------------------------------------- /docs/demo_datasets.rst: -------------------------------------------------------------------------------- 1 | .. _example_datasets: 2 | 3 | ============= 4 | Demo Datasets 5 | ============= 6 | 7 | Several demo datasets are available to help you learn how to perform SARvey processing effectively. 8 | 9 | .. note:: 10 | The demo datasets and instructions provided serve as a practical guide for using SARvey. They do not cover all the software details or offer the best processing strategies for every specific dataset. 11 | 12 | .. toctree:: 13 | :maxdepth: 1 14 | :caption: Demo Datasets: 15 | 16 | demo/demo_masjed_dam.rst 17 | 18 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Andreas Piter 9 | 10 | Contributors 11 | ------------ 12 | 13 | * Mahmud H. Haghighi 14 | * Daniela Rabe 15 | * Romulo Goncalves 16 | * Johannes Knoch 17 | * Erik Rivas 18 | 19 | Project management and funding 20 | ------------------------------ 21 | 22 | * Alison Beamish 23 | * Mahdi Motagh 24 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = sarvey 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | # .coveragerc to control coverage.py 2 | [run] 3 | branch = False 4 | concurrency = multiprocessing 5 | parallel = True 6 | omit = */site-packages/*,*/tests/*,*/.eggs/* 7 | 8 | [report] 9 | show_missing = True 10 | 11 | # Regexes for lines to exclude from consideration 12 | exclude_lines = 13 | # Have to re-enable the standard pragma 14 | pragma: no cover 15 | 16 | # Don't complain if tests don't hit defensive assertion code: 17 | raise AssertionError 18 | raise NotImplementedError 19 | 20 | # Don't complain if non-runnable code isn't run: 21 | if 0: 22 | if __name__ == .__main__.: 23 | 24 | ignore_errors = True 25 | 26 | [html] 27 | directory = htmlcov 28 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [pydocstyle] 2 | ignore = D213,D203,D213,D212,D413,D409 3 | 4 | [tox] 5 | envlist = py35, py36, py37, py38, flake8 6 | 7 | [travis] 8 | python = 9 | 3.8: py38 10 | 3.7: py37 11 | 3.6: py36 12 | 3.5: py35 13 | 14 | [testenv:flake8] 15 | basepython = python 16 | deps = flake8 17 | commands = flake8 sarvey tests 18 | 19 | [testenv] 20 | setenv = 21 | PYTHONPATH = {toxinidir} 22 | deps = 23 | -r{toxinidir}/requirements_dev.txt 24 | ; If you want to make tox run the tests with the same versions, create a 25 | ; requirements.txt with the pinned versions and uncomment the following line: 26 | ; -r{toxinidir}/requirements.txt 27 | commands = 28 | pip install -U pip 29 | pytest --basetemp={envtmpdir} 30 | 31 | -------------------------------------------------------------------------------- /tests/CI_docker/build_sarvey_testsuite_image.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | # Any subsequent(*) commands which fail will cause the shell script to exit immediately 4 | 5 | context_dir="./context" 6 | dockerfile="sarvey_ci.docker" 7 | python_script=' 8 | version = {} 9 | with open("../../sarvey/version.py") as version_file: 10 | exec(version_file.read(), version) 11 | print(version["__version__"]) 12 | ' 13 | version=`python -c "$python_script"` 14 | tag="sarvey_ci:$version" 15 | 16 | echo "#### Build runner docker image" 17 | if [[ "$(docker images ${tag} | grep ${tag} 2> /dev/null)" != "" ]]; then 18 | docker rmi ${tag} 19 | fi 20 | DOCKER_BUILDKIT=1 docker build ${context_dir} \ 21 | --no-cache \ 22 | -f ${context_dir}/${dockerfile} \ 23 | -m 20G \ 24 | -t ${tag} 25 | ls 26 | -------------------------------------------------------------------------------- /docs/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ### Description 2 | 3 | Add a detailed description of the problem and the solution you implemented. 4 | Include any relevant information that will help the reviewer understand the context of your changes. 5 | 6 | --- 7 | 8 | ### Related issues or discussions 9 | Fixes # 10 | 11 | --- 12 | 13 | ### Type of change 14 | 15 | - [ ] Bug fix 16 | - [ ] New feature 17 | - [ ] Documentation update 18 | 19 | --- 20 | 21 | ### Checklist 22 | 23 | - [ ] Make test. 24 | - [ ] Make lint. 25 | - [ ] Remove obsolete print statements. 26 | - [ ] Update History.rst. 27 | - [ ] Update the documentation. 28 | - [ ] Update the dependencies for installation. 29 | 30 | --- 31 | 32 | ### Screenshots (if applicable) 33 | 34 | Add screenshots to help explain your changes. 35 | 36 | -------------------------------------------------------------------------------- /tests/CI_docker/context/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # Check for necessary environment variables 5 | if [ -z "$RUNNER_TOKEN" ] || [ -z "$RUNNER_REPO_URL" ]; then 6 | echo "Error: RUNNER_TOKEN and RUNNER_REPO_URL environment variables must be set." 7 | exit 1 8 | fi 9 | 10 | # Configure the runner if it hasn’t been configured already 11 | if [ ! -f ".runner" ]; then 12 | echo "Configuring the GitHub Actions runner..." 13 | ./config.sh --unattended \ 14 | --replace \ 15 | --labels self-hosted \ 16 | --url "$RUNNER_REPO_URL" \ 17 | --token "$RUNNER_TOKEN" \ 18 | --name "${RUNNER_NAME:-docker-runner}" || exit 1 19 | fi 20 | 21 | # Trap SIGTERM and deregister the runner on shutdown if needed 22 | trap './config.sh remove --token "$RUNNER_TOKEN"; exit 0' SIGTERM 23 | 24 | echo "Starting the GitHub Actions runner..." 25 | exec ./run.sh 26 | 27 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=sarvey 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-24.04 11 | tools: 12 | python: "3.13" 13 | # You can also specify other tool versions: 14 | # nodejs: "23" 15 | # rust: "1.82" 16 | # golang: "1.23" 17 | 18 | # Build documentation in the "docs/" directory with Sphinx 19 | sphinx: 20 | configuration: docs/conf.py 21 | 22 | # Optionally build your docs in additional formats such as PDF and ePub 23 | # formats: 24 | # - pdf 25 | # - epub 26 | 27 | # Optional but recommended, declare the Python requirements required 28 | # to build your documentation 29 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 30 | python: 31 | install: 32 | - requirements: docs/requirements.txt 33 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: sarvey 2 | 3 | channels: &id1 4 | - conda-forge 5 | 6 | dependencies: 7 | - python=3.10 8 | - setuptools 9 | - cython 10 | - pyproj 11 | - h5py 12 | - numpy 13 | - isce2 14 | - scipy 15 | - pip 16 | - gcc_linux-64 17 | - matplotlib 18 | - overpy 19 | - numba 20 | - mintpy 21 | - git 22 | - shapely 23 | - geopandas 24 | - gstools 25 | - pydantic 26 | - json5 27 | # setup requirements 28 | - tox 29 | # test requirements 30 | - pytest 31 | - pytest-cov 32 | - urlchecker 33 | # lint requirements 34 | - flake8 35 | - pillow 36 | - pycodestyle 37 | - pydocstyle 38 | - pylint 39 | # doc requirements 40 | - sphinx>=4.1.1 41 | - sphinx-argparse 42 | - sphinx-autodoc-typehints 43 | - sphinxcontrib-jquery 44 | - sphinx_rtd_theme 45 | # deployment requirements 46 | - twine 47 | 48 | - pip: 49 | - kamui[extra] 50 | - pytest-reporter-html1 51 | - cmcrameri 52 | -------------------------------------------------------------------------------- /docs/visualization.rst: -------------------------------------------------------------------------------- 1 | .. _visualization: 2 | 3 | Visualization 4 | ============= 5 | 6 | 7 | Amplitude image and interferograms 8 | ---------------------------------- 9 | 10 | Even before starting the SARvey processing, you might want to check the amplitude images and interferograms to get to know the study area and the displacement pattern. 11 | This can be done using the command line script `sarvey_plot` which requires the path to the SLC stack as input. 12 | With argument `-i`, it does not require any processing results or configuration file from SARvey. 13 | 14 | .. code-block:: bash 15 | 16 | sarvey_plot inputs/slcStack -i 17 | 18 | 19 | In the baseline plot (perpendicular and temporal baseline), you can click on a dot corresponding to an image to plot the respective amplitude image. 20 | By switching to the interferogram plot, you can visualize the interferograms from two acquisitions by clicking with LEFT and RIGHT mouse buttons on the two images. 21 | 22 | 23 | 24 | .. image:: https://seafile.projekt.uni-hannover.de/f/7226352542e84cbe893e/?dl=1 25 | :width: 600 26 | :align: center 27 | :alt: sarvey_plot amplitude image 28 | 29 | Figure 1: Baseline plot (left) and amplitude image of the study area (right). 30 | 31 | 32 | .. image:: https://seafile.projekt.uni-hannover.de/f/66bc74da23c94947a186/?dl=1 33 | :width: 600 34 | :align: center 35 | :alt: sarvey_plot interferogram 36 | 37 | Figure 2: Baseline plot (right) and interferogram of the study area. 38 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Unit test package for SARvey.""" 31 | -------------------------------------------------------------------------------- /sarvey/version.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Version module for SARvey.""" 31 | 32 | __version__ = '1.2.2' 33 | __versiondate__ = '2025-07-09_01' 34 | __versionalias__ = 'Strawberry Pie' 35 | -------------------------------------------------------------------------------- /sarvey/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Top-level package for SARvey.""" 31 | 32 | __author__ = """Andreas Piter""" 33 | __email__ = 'piter@ipi.uni-hannover.de' 34 | 35 | from .version import __version__ 36 | 37 | __all__ = [ 38 | '__version__' 39 | ] 40 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | 6 | Future minor version (release soon) 7 | ----------------------------------- 8 | 9 | * Add version flag to command line interfaces. 10 | * enhance logging 11 | 12 | 1.2.2 (2025-07-09) 13 | ------------------ 14 | 15 | * Migrate from Pydantic v1 to v2 and update environment requirements. 16 | * Show all arcs in spatial network plot in step 1. 17 | 18 | 1.2.1 (2025-07-08) 19 | ------------------ 20 | 21 | * Fix integer overflow occurring in large datasets. 22 | * Enhance proper resource cleanup in multiprocessing. 23 | * Update CI docker builder. 24 | * Update runner to test installation. 25 | * Update documentation with new instruction for installation including pip. 26 | * Fix numerical problems when computing grid size. 27 | 28 | 1.2.0 (2025-02-19) 29 | ------------------ 30 | 31 | * Create the background map and coordinates file each run of step 0. 32 | * Visualize time series of neighbouring points in sarvey_plot -t. 33 | * Ensure that specified grid size is bigger than study area. 34 | * Update runner. 35 | * Visualize amplitude images and interferograms interactively with sarvey_plot -i. 36 | 37 | 1.1.0 (2024-11-06) 38 | ------------------ 39 | 40 | * Use Scientific colour maps from Crameri. 41 | 42 | 1.0.0 (2024-08-12) Strawberry Pie 43 | --------------------------------- 44 | 45 | * First release version on github. 46 | * Change name of files for second-order points from coh* to p2_coh*. 47 | * Check existence of intermediate results before continuing processing. 48 | * Improve parameter names in config. 49 | * Combine all general settings into one section in config. 50 | * Allow adding user comments in config.json file. 51 | * Improve documentation. 52 | * Adapt CI from gitlab to github. 53 | * Mask mean amplitude to avoid zero division warning in log10. 54 | * Set logging level to debug for log file. 55 | -------------------------------------------------------------------------------- /tests/testdata/config_test.json: -------------------------------------------------------------------------------- 1 | { 2 | "general": { 3 | "input_path": "tests/testdata/inputs", 4 | "output_path": "tests/testdata/output", 5 | "num_cores": 50, 6 | "num_patches": 1, 7 | "apply_temporal_unwrapping": true, 8 | "spatial_unwrapping_method": "puma", 9 | "logging_level": "INFO", 10 | "logfile_path": "tests/testdata/logfiles/" 11 | }, 12 | "phase_linking": { 13 | "use_phase_linking_results": false, 14 | "inverted_path": "tests/testdata/inverted", 15 | "num_siblings": 20, 16 | "mask_phase_linking_file": null, 17 | "use_ps": false, 18 | "mask_ps_file": "tests/testdata/maskPS.h5" 19 | }, 20 | "preparation": { 21 | "start_date": null, 22 | "end_date": null, 23 | "ifg_network_type": "star", 24 | "num_ifgs": 3, 25 | "max_tbase": 100, 26 | "filter_window_size": 9 27 | }, 28 | "consistency_check": { 29 | "coherence_p1": 0.9, 30 | "grid_size": 200, 31 | "mask_p1_file": null, 32 | "num_nearest_neighbours": 30, 33 | "max_arc_length": null, 34 | "velocity_bound": 0.1, 35 | "dem_error_bound": 100.0, 36 | "num_optimization_samples": 100, 37 | "arc_unwrapping_coherence": 0.6, 38 | "min_num_arc": 3 39 | }, 40 | "unwrapping": { 41 | "use_arcs_from_temporal_unwrapping": true 42 | }, 43 | "filtering": { 44 | "coherence_p2": 0.9, 45 | "apply_aps_filtering": true, 46 | "interpolation_method": "kriging", 47 | "grid_size": 1000, 48 | "mask_p2_file": null, 49 | "use_moving_points": true, 50 | "max_temporal_autocorrelation": 0.3 51 | }, 52 | "densification": { 53 | "num_connections_to_p1": 5, 54 | "max_distance_to_p1": 2000, 55 | "velocity_bound": 0.15, 56 | "dem_error_bound": 100.0, 57 | "num_optimization_samples": 100, 58 | "arc_unwrapping_coherence": 0.5 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # .h5 files from sarvey inputs and outputs 10 | *.h5 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | report.xml 53 | report.html 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # pyenv 80 | .python-version 81 | 82 | # celery beat schedule file 83 | celerybeat-schedule 84 | 85 | # SageMath parsed files 86 | *.sage.py 87 | 88 | # dotenv 89 | .env 90 | 91 | # virtualenv 92 | .venv 93 | venv/ 94 | ENV/ 95 | 96 | # Spyder project settings 97 | .spyderproject 98 | .spyproject 99 | 100 | # Rope project settings 101 | .ropeproject 102 | 103 | # mkdocs documentation 104 | /site 105 | 106 | # mypy 107 | .mypy_cache/ 108 | 109 | # IDE settings 110 | .vscode/ 111 | 112 | # IntelliJ Idea family of suites 113 | .idea 114 | *.iml 115 | ## File-based project format: 116 | *.ipr 117 | *.iws 118 | ## mpeltonen/sbt-idea plugin 119 | .idea_modules/ 120 | -------------------------------------------------------------------------------- /tests/CI_docker/context/sarvey_ci.docker: -------------------------------------------------------------------------------- 1 | FROM condaforge/miniforge3:latest 2 | 3 | # Set Mamba root prefix 4 | ENV MAMBA_ROOT_PREFIX="/opt/conda" 5 | 6 | # update base environment 7 | RUN --mount=type=cache,target=/opt/conda/pkgs \ 8 | mamba update --all -y && \ 9 | conda clean -afy 10 | 11 | ARG DEBIAN_FRONTEND=noninteractive 12 | 13 | RUN mkdir actions-runner; cd actions-runner && \ 14 | apt-get update && apt-get install -y curl gfortran build-essential openssh-client 15 | 16 | WORKDIR /actions-runner 17 | 18 | RUN curl -o actions-runner-linux-x64.tar.gz -L https://github.com/actions/runner/releases/download/v2.322.0/actions-runner-linux-x64-2.322.0.tar.gz&& \ 19 | tar xzf ./actions-runner-linux-x64.tar.gz && \ 20 | ./bin/installdependencies.sh && \ 21 | useradd -m runneruser && \ 22 | chown -R runneruser:runneruser /actions-runner 23 | 24 | # install some needed packages 25 | RUN --mount=type=cache,target=/opt/conda/pkgs \ 26 | mamba install -y bzip2 fish gcc gdb git ipython make nano pip tree wget unzip 27 | 28 | # use bash shell instead of sh shell for all docker commands 29 | SHELL ["/bin/bash", "-c"] 30 | 31 | 32 | # Create ci_env environment with pip installed 33 | RUN --mount=type=cache,target=/opt/conda/pkgs \ 34 | conda create -n ci_env python=3.10 pip -y && \ 35 | conda clean -afy 36 | 37 | # Install additional packages using mamba and pip 38 | RUN --mount=type=cache,target=/opt/conda/pkgs \ 39 | conda install -n ci_env conda-forge::pysolid -y && \ 40 | conda install -n ci_env conda-forge::gdal && \ 41 | conda run -n ci_env pip install git+https://github.com/insarlab/MiaplPy.git && \ 42 | # TODO: replace the followong with the main branch 43 | #conda run -n ci_env pip install git+https://github.com/luhipi/sarvey.git@main && \ 44 | conda run -n ci_env pip install git+https://github.com/mahmud1/sarvey.git@update-runner2 && \ 45 | conda run -n ci_env pip install sarvey[dev] && \ 46 | conda run -n ci_env pip install sphinx_rtd_theme && \ 47 | conda clean -afy 48 | 49 | COPY ../entrypoint.sh . 50 | RUN chown runneruser:runneruser entrypoint.sh 51 | RUN chmod +x entrypoint.sh 52 | 53 | USER runneruser 54 | RUN chmod +x /actions-runner/run.sh 55 | 56 | ENTRYPOINT ["./entrypoint.sh"] 57 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | `SARvey` is distributed under the GNU General Public License, version 3 (GPLv3). 2 | 3 | Exceptions are listed in the following: 4 | 5 | * This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 6 | implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 7 | cite [BOYKOV04] in any resulting publication if you use this code for research purposes. 8 | This requirement extends to SARvey. 9 | 10 | BOYKOV04 11 | An Experimental Comparison of Min-Cut/Max-Flow Algorithms for Energy Minimization in Vision. 12 | Yuri Boykov and Vladimir Kolmogorov. In IEEE Transactions on Pattern Analysis and Machine 13 | Intelligence (PAMI), September 2004 14 | 15 | GNU GENERAL PUBLIC LICENSE 16 | Version 3, 29 June 2007 17 | 18 | SARvey 19 | Copyright (C) 2021-2025 Andreas Piter 20 | 21 | This program is free software: you can redistribute it and/or modify 22 | it under the terms of the GNU General Public License as published by 23 | the Free Software Foundation, either version 3 of the License, or 24 | (at your option) any later version. 25 | 26 | This program is distributed in the hope that it will be useful, 27 | but WITHOUT ANY WARRANTY; without even the implied warranty of 28 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 29 | GNU General Public License for more details. 30 | 31 | You should have received a copy of the GNU General Public License 32 | along with this program. If not, see . 33 | 34 | Also add information on how to contact you by electronic and paper mail. 35 | 36 | You should also get your employer (if you work as a programmer) or school, 37 | if any, to sign a "copyright disclaimer" for the program, if necessary. 38 | For more information on this, and how to apply and follow the GNU GPL, see 39 | . 40 | 41 | The GNU General Public License does not permit incorporating your program 42 | into proprietary programs. If your program is a subroutine library, you 43 | may consider it more useful to permit linking proprietary applications with 44 | the library. If this is what you want to do, use the GNU Lesser General 45 | Public License instead of this License. But first, please read 46 | . 47 | 48 | -------------------------------------------------------------------------------- /docs/demo/demo_masjed_dam.rst: -------------------------------------------------------------------------------- 1 | .. _demo_masjed_dam: 2 | 3 | 4 | Masjed Soleyman dam 5 | ------------------- 6 | 7 | This tutorial focuses on measuring the post-construction settlement of the `Masjed Soleyman Dam `_, a rock-fill dam on the Karun river, opened in 2002. Previous investigations using GNSS and high-resolution TerraSAR-X data, as detailed in `Emadali et al., 2017 `_, have shown post-construction settlement of the dam. TerraSAR-X data indicates that the dam undergoes a maximum deformation rate of 13 cm/year in the radar line-of-sight. 8 | 9 | 10 | Dataset 11 | ^^^^^^^ 12 | 13 | The dataset used in this tutorial is a **Sentinel-1** stack of 100 images. The details are provided in the table below. 14 | 15 | 16 | +------------------------+-------------------------------------+ 17 | | Number of SLC images | 100 | 18 | +------------------------+-------------------------------------+ 19 | | Start date | 2015-01-05 | 20 | +------------------------+-------------------------------------+ 21 | | End date | 2018-09-04 | 22 | +------------------------+-------------------------------------+ 23 | | Sensor | Sentinel-1 | 24 | +------------------------+-------------------------------------+ 25 | | Orbit direction | Descending | 26 | +------------------------+-------------------------------------+ 27 | | InSAR processor | GAMMA | 28 | +------------------------+-------------------------------------+ 29 | 30 | 31 | There are two tutorials for this demo dataset: one with a comprehensive description for beginners, and one with minimal description for advanced users. 32 | 33 | .. toctree:: 34 | :maxdepth: 1 35 | :caption: Tutorials: 36 | 37 | demo_masjed_dam_detailed_guide.rst 38 | demo_masjed_dam_fast_track.rst 39 | 40 | 41 | Literature 42 | ^^^^^^^^^^ 43 | 44 | * Emadali L, Motagh M, Haghighi, MH (2017). Characterizing post-construction settlement of the Masjed-Soleyman embankment dam, Southwest Iran, using TerraSAR-X SpotLight radar imagery. Engineering Structures 143:261-273, DOI 10.1016/j.engstruct.2017.04.009. `Link to paper. `_ 45 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | 31 | """Tests for `SARvey config`.""" 32 | 33 | import os 34 | import unittest 35 | 36 | from sarvey.config import loadConfiguration 37 | 38 | 39 | class TestConfig(unittest.TestCase): 40 | root_path = None 41 | config_file = None 42 | configuration = None 43 | output_data_path = None 44 | 45 | @classmethod 46 | def setUp(cls) -> None: 47 | """Define the Class method SetUp.""" 48 | cls.root_path = "./" 49 | if os.path.basename(os.getcwd()) == "tests": 50 | cls.root_path = "../" 51 | 52 | cls.config_file = os.path.abspath(f"{cls.root_path}tests/testdata/config_test.json") 53 | 54 | def testDataDirectories(self): 55 | """Test configuration for the data directories settings.""" 56 | loadConfiguration(path=self.config_file) 57 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | .. _usage: 2 | 3 | ===== 4 | Usage 5 | ===== 6 | 7 | .. image:: https://seafile.projekt.uni-hannover.de/f/39209355cabc4607bf0a/?dl=1 8 | :alt: SARvey workflow 9 | :width: 600px 10 | :align: center 11 | 12 | Processing workflow for using the SARvey software to derive displacement time series. The minimal required processing 13 | steps and datasets are depicted in grey. All other steps are optional. 14 | 15 | 16 | 17 | Command-line tools 18 | ------------------ 19 | 20 | The following command-line tools are available and can be run directly in the terminal. 21 | 22 | `sarvey` 23 | A tool to derive displacements from the SLC stack with Multi-Temporal InSAR (MTI). 24 | A detailed description of the processing steps is given `here `_. 25 | 26 | `sarvey_plot` 27 | A tool to plot the results from `sarvey` processing. 28 | 29 | `sarvey_export` 30 | A tool to export the results from `sarvey` processing to shapefile or geopackage. 31 | 32 | `sarvey_mask` 33 | A tool to create a mask from shapefile containing the area of interest, which can be used in `sarvey` processing. 34 | The tool reads from an input file, which is a shapefile or geopackage containing the geographic data. 35 | It supports both 'LineString' and 'Polygon' geometries. 36 | The tool first gets the spatial extent of the geographic data and searches the location of the polygon/line nodes in the image coordinates of the radar image. 37 | A buffer around the polygon/line is created specified by a width in pixel. 38 | The buffer is then used to create the mask. 39 | 40 | Here is an example of how to use the `sarvey_mask` tool: 41 | 42 | .. code-block:: bash 43 | 44 | sarvey_mask --input_file my_shapefile.shp --geom_file ./inputs/geometryRadar.h5 --out_file_name my_mask.h5 --width 5 45 | 46 | 47 | 48 | `sarvey_osm` 49 | A tool to download OpenStreetMap data for the area of interest specified by the spatial extend of the SLC stack. 50 | The tool first gets the spatial extent of the SAR image from the geometry file. 51 | It then uses this spatial extent to download the OpenStreetMap data for the corresponding area. 52 | The download of railway tracks, highways and bridges is supported. 53 | After downloading the data, the tool saves it to a shapefile. 54 | 55 | After downloading the OpenStreetMap data with `sarvey_osm`, you can use the `sarvey_mask` tool to create a mask from the shapefile. 56 | 57 | Here is an example of how to use the `sarvey_osm` tool: 58 | 59 | .. code-block:: bash 60 | 61 | sarvey_osm --geom ./geometryRadar.h5 --railway # download railway 62 | sarvey_osm --geom ./geometryRadar.h5 --highway # download highway 63 | sarvey_osm --geom ./geometryRadar.h5 --railway --bridge # download railway bridge 64 | sarvey_osm --geom ./geometryRadar.h5 --railway -o mask_railway.shp # specify output path 65 | 66 | 67 | Help and Version Information 68 | ----------------------------- 69 | 70 | To get help on how to use the command-line tools, use the `-h` or `--help` option. For example, run: 71 | 72 | .. code-block:: bash 73 | 74 | sarvey -h 75 | 76 | This will display detailed information about the available options and usage. 77 | 78 | To check the version of the installed SARvey software, use the `--version` option. For example, run: 79 | 80 | .. code-block:: bash 81 | 82 | sarvey --version 83 | 84 | 85 | Usage of the Python API 86 | ----------------------- 87 | 88 | To use SARvey in a project: 89 | 90 | .. code-block:: python 91 | 92 | import sarvey 93 | 94 | -------------------------------------------------------------------------------- /docs/demo/demo_masjed_dam_fast_track.rst: -------------------------------------------------------------------------------- 1 | .. _demo_masjed_dam_fast_track: 2 | 3 | Fast Track Guide for Masjed Soleyman Dam 4 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 5 | 6 | If you are an advanced user, you can proceed with this fast track tutorial. If you prefer a more detailed, step-by-step guide, please refer to the :ref:`detailed guide ` for this example. 7 | 8 | .. note:: 9 | 10 | These instructions are based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly. 11 | 12 | 13 | Download the Data 14 | """"""""""""""""" 15 | 16 | In this tutorial, a processed stack of data is provided. If you wish to generate data for other areas, please refer to the :ref:`preparation` section. 17 | 18 | .. code-block:: bash 19 | 20 | wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip 21 | unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip 22 | cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018 23 | 24 | 25 | Activate SARvey environment 26 | """"""""""""""""""""""""""" 27 | 28 | .. code-block:: bash 29 | 30 | conda activate sarvey 31 | 32 | 33 | Create a Config File 34 | """""""""""""""""""" 35 | 36 | .. code-block:: bash 37 | 38 | sarvey -f config.json 0 0 -g 39 | 40 | Specify parameters in the config file. Set a reasonable value for **num_cores**. 41 | 42 | Run **SARvey** 43 | """""""""""""" 44 | 45 | You can run each step individually or a range of steps by specifying the first and last step. 46 | 47 | .. code-block:: bash 48 | 49 | sarvey -f config.json 0 4 50 | 51 | Check Outputs 52 | """"""""""""" 53 | 54 | First, check the output snapshots in the `outputs/pics` directory. You can also use **`sarvey_plot`** to plot various products to assess the quality of the results and decide how to adjust parameters. Modify the parameters in the config file and rerun the corresponding steps of `sarvey` to improve the results. For instance, changing **`coherence_p2`** from 0.8 to 0.7 and rerunning steps 3 and 4 can increase the density of the final set of points. However, be cautious that reducing the value too much may include noisy points of low quality in the analysis, potentially leading to poor final results. You can check the details of all parameters using the -p flag in `sarvey` and decide how to tune them. For more explanations, please refer to :ref:`processing` 55 | 56 | 57 | 58 | Plot Time Series Results 59 | """""""""""""""""""""""" 60 | 61 | The final products, including the time series, are stored in the coh\*\*_ts.h5 file. The file is named based on the coherence_p2 parameter you used. Plot the time series using the following command: 62 | 63 | .. code-block:: bash 64 | 65 | sarvey_plot outputs/p2_coh80_ts.h5 -t 66 | 67 | You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series. 68 | 69 | .. description of time series options to be added. 70 | 71 | 72 | 73 | 74 | Export to GIS Format 75 | """""""""""""""""""" 76 | 77 | Export the data to Shapefiles using the following command. 78 | 79 | 80 | .. code-block:: bash 81 | 82 | sarvey_export outputs/p2_coh80_ts.h5 -o outputs/shp/p2_coh80_ts.shp 83 | 84 | You can visualize the data in any GIS software. If you use QGIS, you can use the `InSAR Explorer `_ plugin to draw the time series. 85 | 86 | 87 | 88 | Validate Your Results 89 | """"""""""""""""""""" 90 | 91 | You can download a copy of the final SARvey products from `this link `_. Use these files to compare your results and ensure everything worked correctly. 92 | 93 | -------------------------------------------------------------------------------- /tests/test_ifg_network.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Tests for SARvey `ifg_network` functions.""" 31 | 32 | import os 33 | import unittest 34 | from datetime import datetime 35 | 36 | import numpy as np 37 | 38 | from sarvey.ifg_network import SmallBaselineNetwork 39 | 40 | 41 | class TestUtils(unittest.TestCase): 42 | root_path = None 43 | config_file = None 44 | configuration = None 45 | output_data_path = None 46 | 47 | @classmethod 48 | def setUp(cls) -> None: 49 | """Define the Class method SetUp.""" 50 | # define class variables, create temporary output dir etc. here 51 | cls.root_path = "./" 52 | if os.path.basename(os.getcwd()) == "tests": 53 | cls.root_path = "../" 54 | 55 | @classmethod 56 | def tearDown(cls) -> None: 57 | """Define the Class method tearDown.""" 58 | # delete testfolder or do some other cleanup here 59 | 60 | def testConfigure_ok(self): 61 | """Test the expected output.""" 62 | # Input: 63 | pbase = np.array([0, 0, 0, 0]) # not important for this test 64 | dates = [datetime(2023, 8, 17), datetime(2023, 8, 17), datetime(2023, 8, 17), datetime(2023, 8, 17)] 65 | 66 | tbase = np.array([0, 6, 12, 18]) 67 | ifg_net_obj = SmallBaselineNetwork() 68 | ifg_net_obj.configure(pbase=pbase, tbase=tbase, num_link=2, max_tbase=12, dates=dates) 69 | ifg_list = np.array([(0, 1), (0, 2), (1, 2), (1, 3), (2, 3)]) 70 | assert (ifg_net_obj.ifg_list == ifg_list).all() 71 | 72 | tbase = np.array([0, 12, 18, 312]) 73 | ifg_net_obj = SmallBaselineNetwork() 74 | ifg_net_obj.configure(pbase=pbase, tbase=tbase, num_link=3, max_tbase=5, dates=dates) 75 | ifg_list = np.array([(0, 1), (1, 2), (2, 3)]) 76 | assert (ifg_net_obj.ifg_list == ifg_list).all() 77 | 78 | tbase = np.array([0, 12, 18, 312]) 79 | ifg_net_obj = SmallBaselineNetwork() 80 | ifg_net_obj.configure(pbase=pbase, tbase=tbase, num_link=3, max_tbase=20, dates=dates) 81 | ifg_list = np.array([(0, 1), (0, 2), (1, 2), (2, 3)]) 82 | assert (ifg_net_obj.ifg_list == ifg_list).all() 83 | 84 | # def testConfigure_err(self): 85 | # """Test for expected Errors.""" 86 | # 87 | -------------------------------------------------------------------------------- /sarvey/geolocation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Module for correcting the geolocation of the scatterers.""" 31 | import logging 32 | from os.path import join 33 | import numpy as np 34 | 35 | from miaplpy.objects.slcStack import slcStack 36 | 37 | from sarvey.objects import Points 38 | 39 | 40 | def getHeading(input_path: str, logger: logging.Logger): 41 | """ 42 | Read heading angle from slcStack.h5. 43 | 44 | Parameters 45 | ---------- 46 | input_path: str 47 | Path to directory containing 'slcStack.h5' and 'geometryRadar.h5'. 48 | logger: Logger 49 | Logger handle 50 | 51 | Returns 52 | ------- 53 | heading_angle: float 54 | heading angle of the satellite in radians 55 | for ascending ~ -12*pi/180 56 | for descending ~ 190*pi/180 57 | """ 58 | # get heading from slcStack.h5 59 | slc_stack_file = join(input_path, 'slcStack.h5') 60 | slc_stack_obj = slcStack(slc_stack_file) 61 | try: 62 | meta_dict = slc_stack_obj.get_metadata() 63 | lower_case_meta_dict = {k.lower(): v for k, v in meta_dict.items()} 64 | 65 | heading_angle = float(lower_case_meta_dict["heading"]) 66 | logger.info(msg=f"Heading_angle of satellite: {heading_angle} deg") 67 | heading_angle = np.deg2rad(heading_angle) 68 | except Exception as exc: 69 | logger.error(f'Failed to retrieve heading angle from {slc_stack_file}: {exc}') 70 | raise Exception 71 | return heading_angle 72 | 73 | 74 | def calculateGeolocationCorrection(*, path_geom: str, point_obj: Points, demerr: np.array, logger: logging.Logger): 75 | """ 76 | Calculate geolocation correction. 77 | 78 | Parameters 79 | ---------- 80 | path_geom: str 81 | Path to directory containing 'slcStack.h5' or 'geometryRadar.h5'. 82 | point_obj: Points 83 | Point object with incidence angle for points 84 | demerr: np.array 85 | Array of dem error per pixel 86 | logger: Logger 87 | Logger handle 88 | 89 | Returns 90 | ------- 91 | coord_correction: np.array 92 | array of geolocation corrections, two columns [x_correction, y_correction] per point. 93 | """ 94 | heading_angle = getHeading(input_path=path_geom, logger=logger) 95 | 96 | coord_correction = np.zeros_like(point_obj.coord_xy, dtype=float) 97 | coord_correction[:, 0] = demerr * np.cos(heading_angle) / np.tan(point_obj.loc_inc) 98 | coord_correction[:, 1] = -demerr * np.sin(heading_angle) / np.tan(point_obj.loc_inc) 99 | 100 | return coord_correction 101 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help pytest 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | from urllib.request import pathname2url 8 | 9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 10 | endef 11 | export BROWSER_PYSCRIPT 12 | 13 | define PRINT_HELP_PYSCRIPT 14 | import re, sys 15 | 16 | for line in sys.stdin: 17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 18 | if match: 19 | target, help = match.groups() 20 | print("%-20s %s" % (target, help)) 21 | endef 22 | export PRINT_HELP_PYSCRIPT 23 | 24 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 25 | 26 | help: 27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 28 | 29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 30 | 31 | clean-build: ## remove build artifacts 32 | rm -fr build/ 33 | rm -fr dist/ 34 | rm -fr .eggs/ 35 | find . -name '*.egg-info' -exec rm -fr {} + 36 | find . -name '*.egg' -exec rm -f {} + 37 | 38 | clean-pyc: ## remove Python file artifacts 39 | find . -name '*.pyc' -exec rm -f {} + 40 | find . -name '*.pyo' -exec rm -f {} + 41 | find . -name '*~' -exec rm -f {} + 42 | find . -name '__pycache__' -exec rm -fr {} + 43 | 44 | clean-test: ## remove test and coverage artifacts 45 | rm -fr .tox/ 46 | rm -f .coverage 47 | rm -fr .coverage.* 48 | rm -fr htmlcov/ 49 | rm -fr report.html 50 | rm -fr report.xml 51 | rm -fr coverage.xml 52 | rm -fr .pytest_cache 53 | 54 | lint: ## check style with flake8 55 | flake8 --max-line-length=120 sarvey tests > ./tests/linting/flake8.log || \ 56 | (cat ./tests/linting/flake8.log && exit 1) 57 | pycodestyle sarvey --exclude="*.ipynb,*.ipynb*" --max-line-length=120 > ./tests/linting/pycodestyle.log || \ 58 | (cat ./tests/linting/pycodestyle.log && exit 1) 59 | pydocstyle sarvey > ./tests/linting/pydocstyle.log || \ 60 | (cat ./tests/linting/pydocstyle.log && exit 1) 61 | 62 | urlcheck: ## check for dead URLs 63 | urlchecker check . --file-types .py,.rst,.md,.json 64 | 65 | test: ## run tests quickly with the default Python 66 | pytest 67 | 68 | test-all: ## run tests on every Python version with tox 69 | tox 70 | 71 | coverage: ## check code coverage quickly with the default Python 72 | coverage erase 73 | coverage run --source sarvey -m pytest 74 | coverage combine # must be called in order to make coverage work in multiprocessing 75 | coverage report -m 76 | coverage html 77 | $(BROWSER) htmlcov/index.html 78 | 79 | pytest: clean-test ## Runs pytest with coverage and creates coverage and test report 80 | ## - puts the coverage results in the folder 'htmlcov' 81 | ## - generates cobertura 'coverage.xml' (needed to show coverage in GitLab MR changes) 82 | ## - generates 'report.html' based on pytest-reporter-html1 83 | ## - generates JUnit 'report.xml' to show the test report as a new tab in a GitLab MR 84 | ## NOTE: additional options pytest and coverage (plugin pytest-cov) are defined in .pytest.ini and .coveragerc 85 | pytest \ 86 | --verbosity=3 \ 87 | --color=yes \ 88 | --tb=short \ 89 | --cov=sarvey \ 90 | --cov-report html:htmlcov \ 91 | --cov-report term-missing \ 92 | --cov-report xml:coverage.xml \ 93 | --template=html1/index.html \ 94 | --report=report.html \ 95 | --junitxml report.xml \ 96 | tests 97 | 98 | docs: ## generate Sphinx HTML documentation, including API docs 99 | rm -f docs/sarvey.rst 100 | rm -f docs/modules.rst 101 | sphinx-apidoc sarvey -o docs/ --private --doc-project 'Python API reference' 102 | $(MAKE) -C docs clean 103 | $(MAKE) -C docs html 104 | $(BROWSER) docs/_build/html/index.html 105 | 106 | servedocs: docs ## compile the docs watching for changes 107 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 108 | 109 | release: dist ## package and upload a release 110 | twine upload dist/* 111 | 112 | dist: clean ## builds source and wheel package 113 | python setup.py sdist 114 | python setup.py bdist_wheel 115 | ls -l dist 116 | 117 | install: clean ## install the package to the active Python's site-packages 118 | python setup.py install 119 | 120 | gitlab_CI_docker: ## Build a docker image for CI use within gitlab 121 | cd ./tests/CI_docker/; bash ./build_sarvey_testsuite_image.sh 122 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """The setup script.""" 31 | 32 | 33 | from setuptools import setup, find_packages 34 | 35 | with open('README.rst') as readme_file: 36 | readme = readme_file.read() 37 | 38 | with open('HISTORY.rst') as history_file: 39 | history = history_file.read() 40 | 41 | version = {} 42 | with open("sarvey/version.py") as version_file: 43 | exec(version_file.read(), version) 44 | 45 | req = [ 46 | "cython", "numpy<=1.26", "pyproj", "matplotlib", "numba", "scipy", 47 | "mintpy", "h5py", "overpy", "gstools", "shapely", "pandas", "geopandas", "pymaxflow", 48 | "pillow", "importlib_resources", "kamui", "json5", "cmcrameri", 'pydantic', 49 | "miaplpy @ git+https://github.com/insarlab/MiaplPy.git" 50 | ] 51 | 52 | req_setup = [] 53 | 54 | req_test = ['pytest>=3', 'pytest-cov', 'pytest-reporter-html1', 'urlchecker'] 55 | 56 | req_doc = [ 57 | 'sphinx>=4.1.1', 58 | 'sphinx-argparse', 59 | 'sphinx-autodoc-typehints', 60 | 'sphinx_rtd_theme' 61 | ] 62 | 63 | req_lint = ['flake8', 'pycodestyle', 'pydocstyle'] 64 | 65 | req_dev = ['twine'] + req_setup + req_test + req_doc + req_lint 66 | 67 | extra_req = ["gdal"] 68 | 69 | setup( 70 | author="Andreas Piter", 71 | author_email='piter@ipi.uni-hannover.de', 72 | python_requires='>=3.7', 73 | classifiers=[ 74 | 'Development Status :: Release Candidate', 75 | 'Intended Audience :: Researchers', 76 | 'None', 77 | 'Natural Language :: English', 78 | 'Programming Language :: Python :: 3', 79 | 'Programming Language :: Python :: 3.7', 80 | 'Programming Language :: Python :: 3.8', 81 | 'Programming Language :: Python :: 3.9', 82 | 'Programming Language :: Python :: 3.10' 83 | ], 84 | description="InSAR time series analysis software for SAR4Infra project", 85 | entry_points={ 86 | 'console_scripts': [ 87 | 'sarvey=sarvey.sarvey_mti:main', 88 | 'sarvey_plot=sarvey.sarvey_plot:main', 89 | 'sarvey_export=sarvey.sarvey_export:main', 90 | 'sarvey_mask=sarvey.sarvey_mask:main', 91 | 'sarvey_osm=sarvey.sarvey_osm:main', 92 | ], 93 | }, 94 | extras_require={ 95 | "doc": req_doc, 96 | "test": req_test, 97 | "lint": req_lint, 98 | "dev": req_dev 99 | }, 100 | install_requires=req, 101 | license="GPLv3", 102 | include_package_data=True, 103 | keywords='sarvey', 104 | long_description=readme, 105 | name='sarvey', 106 | packages=find_packages(include=['sarvey', 'sarvey.*']), 107 | setup_requires=req_setup, 108 | test_suite='tests', 109 | tests_require=req_test, 110 | url='https://github.com/luhipi/sarvey', 111 | version=version['__version__'], 112 | zip_safe=False, 113 | ) 114 | -------------------------------------------------------------------------------- /sarvey/console.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Console module for SARvey.""" 31 | from sarvey import version 32 | from logging import Logger 33 | 34 | 35 | def printStep(*, step: int, step_dict: dict, logger: Logger): 36 | """Print the current step to console. 37 | 38 | Parameters 39 | ---------- 40 | step: int 41 | current step number 42 | step_dict: dict 43 | dictionary with step numbers and names 44 | logger: Logger 45 | Logging handler 46 | """ 47 | logger.info(msg=" ---------------------------------------------------------------------------------") 48 | logger.info(msg=f" STEP {step}: {step_dict[step]}") 49 | logger.info(msg=" ---------------------------------------------------------------------------------") 50 | 51 | 52 | def printCurrentConfig(*, config_section: dict, config_section_default: dict, logger: Logger): 53 | """Print the current parameters and their default values from the config file to console. 54 | 55 | Parameters 56 | ---------- 57 | config_section: dict 58 | Section of the configuration class which contains the selected parameters. 59 | config_section_default: dict 60 | Config section with default values. 61 | logger: Logger 62 | Logging handler. 63 | """ 64 | shift = " " 65 | logger.info(msg=shift + "{:>35} {:>15} {:>10}".format("Parameter", "value", "default")) 66 | logger.info(msg=shift + "{:>35} {:>15} {:>10}".format("_________", "_____", "_______")) 67 | 68 | for key in config_section.keys(): 69 | default = config_section_default[key] 70 | default = "None" if default is None else default 71 | default = "True" if default is True else default 72 | default = "False" if default is False else default 73 | 74 | value = config_section[key] 75 | value = "None" if value is None else value 76 | value = "True" if value is True else value 77 | value = "False" if value is False else value 78 | if default == value: 79 | logger.info(msg=shift + "{:>35} {:>15} {:>10}".format(key, value, default)) 80 | else: 81 | logger.info(msg=shift + "{:>35} {:>15} <--- {:>10}".format(key, value, default)) 82 | 83 | logger.info(msg="") 84 | 85 | 86 | def showLogoSARvey(*, logger: Logger, step: str): 87 | """ShowLogoSARvey. 88 | 89 | Parameters 90 | ---------- 91 | logger: Logger 92 | logging handler 93 | step: str 94 | Name of the step or script which is shown on the logo. 95 | """ 96 | # generate_from: http://patorjk.com/software/taag/ - font: Big, style: default 97 | # and https://textik.com/ 98 | logger.info(msg=f"SARvey version: {version.__version__} - {version.__versionalias__}, {version.__versiondate__}, " 99 | f"Run: {step}") 100 | new_logo = rf""" . _____ _____ 101 | +------ / \ ------ / ____| /\ | __ \ 102 | | / / | (___ / \ | |__) |_ _____ _ _ 103 | | / / \___ \ / /\ \ | _ /\ \ / / _ \ | | | 104 | | /\\ / / ____) / ____ \| | \ \ \ V / __/ |_| | 105 | | / \\/ / |_____/_/ \_\_| \_\ \_/ \___|\__, | 106 | | / \ / __/ | 107 | | \ / / v{version.__version__:<5} - {version.__versionalias__:<18} |___/ 108 | \\ / /... {version.__versiondate__:<20} | 109 | / \\/ / :... | 110 | / / / :... {step: <20} | 111 | / / / :... | 112 | / / _______ :... _________| 113 | \/ \______ :... ____________/ | 114 | +-------------------- \________:___/ --------------------+ 115 | """ 116 | print(new_logo) 117 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | Contributions are welcome, and they are greatly appreciated! Every little bit 8 | helps, and credit will always be given. 9 | 10 | You can contribute in many ways: 11 | 12 | Types of Contributions 13 | ---------------------- 14 | 15 | Report Bugs 16 | ~~~~~~~~~~~ 17 | 18 | Report bugs at https://github.com/luhipi/sarvey/issues. 19 | 20 | If you are reporting a bug, please include: 21 | 22 | * Your operating system name and version. 23 | * Any details about your local setup that might be helpful in troubleshooting. 24 | * Detailed steps to reproduce the bug. 25 | 26 | Fix Bugs 27 | ~~~~~~~~ 28 | 29 | Look through the Github issues for bugs. Anything tagged with "bug" and "help 30 | wanted" is open to whoever wants to implement it. 31 | 32 | Implement Features 33 | ~~~~~~~~~~~~~~~~~~ 34 | 35 | Look through the Github issues for features. Anything tagged with "enhancement" 36 | and "help wanted" is open to whoever wants to implement it. 37 | 38 | Write Documentation 39 | ~~~~~~~~~~~~~~~~~~~ 40 | 41 | SARvey could always use more documentation, whether as part of the 42 | official SARvey docs, in docstrings, or even on the web in blog posts, 43 | articles, and such. 44 | 45 | Submit Feedback 46 | ~~~~~~~~~~~~~~~ 47 | 48 | The best way to send feedback is to file an issue at https://github.com/luhipi/sarvey/issues. 49 | 50 | If you are proposing a feature: 51 | 52 | * Explain in detail how it would work. 53 | * Keep the scope as narrow as possible, to make it easier to implement. 54 | * Remember that this is a volunteer-driven project, and that contributions 55 | are welcome :) 56 | 57 | Commit Changes 58 | -------------- 59 | 60 | How to 61 | ~~~~~~ 62 | 63 | 1. Fork the `sarvey` repo on Github. 64 | 2. Clone your fork locally:: 65 | 66 | $ git clone https://github.com/luhipi/sarvey.git 67 | 68 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 69 | 70 | $ mkvirtualenv sarvey 71 | $ cd sarvey/ 72 | $ python setup.py develop 73 | 74 | 4. Create a branch for local development:: 75 | 76 | $ git checkout -b name-of-your-bugfix-or-feature 77 | 78 | Now you can make your changes locally. 79 | 80 | 5. When you're done making changes, check that your changes pass flake8 and the 81 | tests, including testing other Python versions with tox:: 82 | 83 | $ make pytest 84 | $ make lint 85 | $ make urlcheck 86 | $ tox 87 | 88 | To get flake8 and tox, just pip install them into your virtualenv. 89 | 90 | 6. Commit your changes and push your branch to Github:: 91 | 92 | $ git add . 93 | $ git commit -m "Your detailed description of your changes." 94 | $ git push origin name-of-your-bugfix-or-feature 95 | 96 | 7. Submit a merge request through the Github website. 97 | 98 | Sign your commits 99 | ~~~~~~~~~~~~~~~~~ 100 | 101 | Please note that our license terms only allow signed commits. 102 | A guideline how to sign your work can be found here: https://git-scm.com/book/en/v2/Git-Tools-Signing-Your-Work 103 | 104 | If you are using the PyCharm IDE, the `Commit changes` dialog has an option called `Sign-off commit` to 105 | automatically sign your work. 106 | 107 | 108 | License header 109 | ~~~~~~~~~~~~~~ 110 | 111 | If you commit new Python files, please note that they have to contain the following license header: 112 | 113 | .. code:: bash 114 | 115 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 116 | # 117 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 118 | # 119 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 120 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 121 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 122 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 123 | # 124 | # This program is free software: you can redistribute it and/or modify it under 125 | # the terms of the GNU General Public License as published by the Free Software 126 | # Foundation, either version 3 of the License, or (at your option) any later 127 | # version. 128 | # 129 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 130 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 131 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 132 | # This requirement extends to SARvey. 133 | # 134 | # This program is distributed in the hope that it will be useful, but WITHOUT 135 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 136 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 137 | # details. 138 | # 139 | # You should have received a copy of the GNU Lesser General Public License along 140 | # with this program. If not, see . 141 | 142 | 143 | Merge Request Guidelines 144 | ------------------------ 145 | 146 | Before you submit a pull request, check that it meets these guidelines: 147 | 148 | 1. The merge request should include tests. 149 | 2. If the merge request adds functionality, the docs should be updated. Put 150 | your new functionality into a function with a docstring, and add the 151 | feature to the list in README.rst. 152 | 3. The pull request should work for Python 3.6, 3.7, 3.8 and 3.9. Check 153 | https://github.com/luhipi/sarvey/pulls 154 | and make sure that the tests pass for all supported Python versions. 155 | 156 | Tips 157 | ---- 158 | 159 | To run a subset of tests:: 160 | 161 | $ pytest tests.test_processing 162 | 163 | -------------------------------------------------------------------------------- /docs/preparation.rst: -------------------------------------------------------------------------------- 1 | .. _preparation: 2 | 3 | =========== 4 | Preparation 5 | =========== 6 | 7 | SARvey requires a coregistered stack of SLC and the related geometry information in the MiaplPy_ data format. 8 | The coregistered stack of SLC can be created using an InSAR processor, such as ISCE, GAMMA, or SNAP. 9 | Currently MiaplPy only supports ISCE_. Support for GAMMA and SNAP_ is planned for future. 10 | After creating the coregistered stack of SLC, run the “load_data” step from MiaplPy to create the “inputs” directory which contains “slcStack.h5” and “geometryRadar.h5”. 11 | 12 | 13 | 14 | Preprocessing 15 | ------------- 16 | 17 | ISCE 18 | ^^^^ 19 | ... ISCE brief processing to be added 20 | 21 | The ISCE products should have the following directory structure that is later in `Loading Data into MiaplPy`_ step. 22 | 23 | :: 24 | 25 | ISCE_processed_data 26 | ├─ reference 27 | │ ├─ IW*.xml 28 | │ └─ ... 29 | ├─ merged 30 | │ ├─ SLC 31 | │ │ ├─ YYYYMMDD 32 | │ │ │ ├─ YYYYMMDD.slc.full 33 | │ │ │ └─ ... 34 | │ │ ├─ YYYYMMDD 35 | │ │ ├─ YYYYMMDD 36 | │ ├─ geom_reference 37 | │ │ ├─ hgt.rdr.full 38 | │ │ ├─ lat.rdr.full 39 | │ │ ├─ lon.rdr.full 40 | │ │ ├─ los.rdr.full 41 | │ │ └─ ... 42 | └─ baselines 43 | └─ YYYYMMDD_YYYYMMDD 44 | └─ YYYYMMDD_YYYYMMDD.txt 45 | 46 | 47 | GAMMA 48 | ^^^^^ 49 | Support is in progress. 50 | 51 | 52 | SNAP 53 | ^^^^ 54 | Support is planned for future. 55 | 56 | 57 | Loading Data to MiaplPy Format 58 | ------------------------------ 59 | 60 | **Loading Data into MiaplPy** 61 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 62 | 63 | Run the `load_data` step of MiaplPy to convert the preprocessed stack of SLC to `slcStack.h5` and `geometryRadar.h5`. 64 | Refer to MiaplPy_ instruction on how to prepare the stack of coregistered SLC and modify the template file. 65 | 66 | .. code-block:: bash 67 | 68 | miaplpyApp miaplpy_template_file.txt --dostep load_data 69 | 70 | The output includes the following directory structure that is later used as input in SARvey processing: 71 | 72 | :: 73 | 74 | inputs 75 | ├── slcStack.h5 76 | └── geometryRadar.h5 77 | 78 | 79 | 80 | **Check the data** 81 | ^^^^^^^^^^^^^^^^^^ 82 | 83 | Use `info.py` from MintPy_ to check the files' information. 84 | 85 | .. code-block:: bash 86 | 87 | info.py inputs/slcStack.h5 88 | info.py inputs/geometryRadar.h5 89 | 90 | 91 | Use `view.py` from MintPy_ to visualize the files and make sure they look fine. 92 | 93 | .. code-block:: bash 94 | 95 | view.py inputs/slcStack.h5 96 | view.py inputs/geometryRadar.h5 97 | 98 | 99 | 100 | Optional Steps 101 | -------------- 102 | 103 | 104 | **Phase Linking** 105 | ^^^^^^^^^^^^^^^^^ 106 | 107 | 108 | This step is optional. You can run it if you wish to perform distributed scatterers (DS) analysis. 109 | **Caution:** This step is computationally heavy and might be time-consuming for large datasets. 110 | 111 | .. code-block:: bash 112 | 113 | miaplpyApp miaplpy_template_file.txt --dostep phase_linking 114 | miaplpyApp miaplpy_template_file.txt --dostep concatenate_patches 115 | 116 | The output includes the following directory structure that is later used as additional input in SARvey processing if the config file is modified to inclued DS analysis. 117 | 118 | :: 119 | 120 | MiaplPy working directory 121 | ├─ inverted 122 | │ ├── phase_series.h5 123 | │ ├── ... 124 | ├── maskPS.h5 125 | └── ... 126 | 127 | 128 | 129 | Subset Data 130 | ^^^^^^^^^^^ 131 | 132 | Data loaded into MiaplPy can be subset using Mintpy_'s subset function. 133 | This is particularly useful if you have a dataset in MiaplPy format and want to crop a small area of it. 134 | Both slcStack.h5 and geometryRadar.h5 should be subset with the same range and azimuth coordinate ranges. 135 | Also the Phase Linking results (phase_series.h5 and maskPS.h5) should be subset if it has been created. 136 | Please refer to Mintpy_ for more instruction to subset. 137 | Run `subset.py -h` for information about parameters. 138 | The following example crops the data between 500 and 800 in range and 100 and 1000 in azimuth coordinates. 139 | 140 | 141 | .. code-block:: bash 142 | 143 | subset.py -h 144 | 145 | subset.py inputs/slcStack.h5 -x 500 800 -y 100 1000 -o inputs_crop/slcStack.h5 146 | subset.py inputs/geometryRadar.h5 -x 500 800 -y 100 1000 -o inputs_crop/geometryRadar.h5 147 | 148 | subset.py inverted/phase_series.h5 -x 500 800 -y 100 1000 -o inverted_crop/phase_series.h5 149 | subset.py maskPS.h5 -x 500 800 -y 100 1000 -o inverted_crop/maskPS.h5 150 | 151 | 152 | `Check the data`_ after subsetting it and make sure all products look correct. 153 | 154 | 155 | 156 | Create Manual Mask 157 | ^^^^^^^^^^^^^^^^^^ 158 | A mask can be created manually using MintPy's `generate_mask.py` tool. 159 | This is particularly useful if you want to limit the MTInSAR processing to certain areas. 160 | Run `generate_mask.py -h` for information about parameters. 161 | The following example allows to draw a polygon on top of the DEM to create a mask. 162 | 163 | .. code-block:: bash 164 | 165 | generate_mask.py -h 166 | 167 | generate_mask.py inputs/geometryRadar.h5 height -o mask.h5 --roipoly # draw polygon on top of the DEM 168 | 169 | Alternatively, a mask can be drawn on top of the temporal coherence map, in case step 0 (preparation) of `sarvey` has been executed already. 170 | 171 | .. code-block:: bash 172 | 173 | generate_mask.py results_dir/temporal_coherence.h5 -o mask.h5 --roipoly # draw polygon on top of the temporal coherence image 174 | 175 | Follow the instructions in the terminal: 176 | 177 | Select points in the figure by enclosing them within a polygon. 178 | Press the 'esc' key to start a new polygon. 179 | Try hold to left key to move a single vertex. 180 | After complete the selection, close the figure/window to continue. 181 | 182 | 183 | 184 | .. _MiaplPy: https://github.com/insarlab/MiaplPy 185 | .. _ISCE: https://github.com/isce-framework/isce2 186 | .. _SNAP: https://step.esa.int/main/toolboxes/snap 187 | .. _MintPy: https://github.com/insarlab/MintPy 188 | 189 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | SARvey is a cross-platform python-based software and can be installed on 8 | * `Linux`_ 9 | * `MacOS ARM (Apple Silicon M2)`_ 10 | * `Windows using WSL`_ 11 | 12 | 13 | Linux 14 | ----- 15 | 16 | On Linux, SARvey can be installed `Using Mamba (recommended)`_ or `Using Anaconda or Miniconda`_ or `Using Pip`_. 17 | 18 | Using Mamba (recommended) 19 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 20 | 21 | Using mamba_ (latest version recommended), **SARvey** is installed as follows: 22 | 23 | 24 | 1. Clone the SARvey source code and install SARvey and all dependencies from the environment.yml file: 25 | 26 | .. code-block:: bash 27 | 28 | git clone https://github.com/luhipi/sarvey.git 29 | cd sarvey 30 | 31 | 32 | 2. Create virtual environment for **SARvey** (optional but recommended): 33 | 34 | .. code-block:: bash 35 | 36 | pip install conda-merge 37 | wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml 38 | conda-merge conda-env.yml environment.yml > env.yml 39 | mamba env create -n sarvey -f env.yml 40 | rm env.yml conda-env.yml 41 | mamba activate sarvey 42 | pip install git+https://github.com/insarlab/MiaplPy.git 43 | pip install . 44 | 45 | 46 | Using Anaconda or Miniconda 47 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 48 | 49 | Using conda_ (latest version recommended), **SARvey** is installed as follows: 50 | 51 | 52 | 1. Then clone the **SARvey** source code and install **SARvey** and all dependencies from the environment.yml file: 53 | 54 | .. code-block:: bash 55 | 56 | git clone https://github.com/luhipi/sarvey.git 57 | cd sarvey 58 | 59 | 60 | 1. Create virtual environment for **SARvey** (optional but recommended): 61 | 62 | .. code-block:: bash 63 | 64 | pip install conda-merge 65 | wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml 66 | conda-merge conda-env.yml environment.yml > env.yml 67 | conda env create -n sarvey -f env.yml 68 | rm env.yml conda-env.yml 69 | conda activate sarvey 70 | pip install git+https://github.com/insarlab/MiaplPy.git 71 | pip install . 72 | 73 | 74 | Using pip 75 | ^^^^^^^^^ 76 | 77 | Using pip_ (latest version recommended), **SARvey** is installed as follows: 78 | 79 | 1. Create a new environment for **SARvey** (optional but recommended): 80 | 81 | .. code-block:: bash 82 | 83 | conda create -n sarvey pip -y 84 | conda activate sarvey 85 | 86 | 2. Install dependencies 87 | 88 | .. code-block:: bash 89 | 90 | conda install -c conda-forge pysolid gdal 91 | 92 | 3. Install **SARvey** 93 | 94 | .. code-block:: bash 95 | 96 | pip install git+https://github.com/luhipi/sarvey.git 97 | 98 | 99 | If your are a developer, install the development requirements using the following command. 100 | 101 | .. code-block:: bash 102 | 103 | pip install sarvey[dev] 104 | 105 | 106 | MacOS ARM (Apple Silicon M2) 107 | ---------------------------- 108 | 109 | This guide provides instructions for installing SARvey on MacOS ARM M2 using conda_. 110 | If you do not have Conda, install `Conda for Mac`_. 111 | Using conda_ (latest version recommended), SARvey is installed as follows: 112 | 113 | 0. **Create a directory for the SARvey package and navigate to it in the terminal. You can choose any other directory if you prefer.** 114 | 115 | .. code-block:: bash 116 | 117 | mkdir -p ~/software/sarvey 118 | 119 | 1. **Install MiaplPy before installing SARvey in the same environment where you want to install SARvey.** 120 | 121 | .. code-block:: bash 122 | 123 | cd ~/software/sarvey 124 | git clone https://github.com/insarlab/MiaplPy.git 125 | cd MiaplPy 126 | 127 | 1.1 Open `conda-env.yml` in an editor of your choice and comment out the line `isce2`. Alternatively, you can run the following command:. 128 | 129 | .. code-block:: bash 130 | 131 | sed -i '' '/isce2/s/^/# /' conda-env.yml 132 | 133 | 1.2 Install the package using Conda. 134 | 135 | .. code-block:: bash 136 | 137 | conda env update --name sarvey --file conda-env.yml 138 | conda activate sarvey 139 | python -m pip install . 140 | 141 | 2. **Install SARvey** 142 | 143 | 2.1 Download the source code of the SARvey package. 144 | 145 | .. code-block:: bash 146 | 147 | cd ~/software/sarvey 148 | git clone https://github.com/luhipi/sarvey.git 149 | cd sarvey 150 | 151 | 2.2 Open `environment.yml` in an editor of your choice and comment out the lines `isce2` and `gcc_linux-64`. Alternatively, you can run the following commands. 152 | 153 | .. code-block:: bash 154 | 155 | sed -i '' '/isce2/s/^/# /' environment.yml 156 | sed -i '' '/gcc_linux-64/s/^/# /' environment.yml 157 | 158 | Note: As of the time of creation of this document, `isce2` for MacOS ARM64 is not available in Conda repositories. Therefore, it is skipped, but it should not cause any problems for running SARvey. Also, `gcc_linux-64` is not required on ARM64. 159 | 160 | 2.3 Install SARvey using the same environment that you used to install MiaplPy. 161 | 162 | .. code-block:: bash 163 | 164 | conda env update --name sarvey -f environment.yml 165 | conda activate sarvey 166 | pip install . 167 | 168 | 3. **Set up the PATH for MiaplPy and SARvey.** 169 | 170 | 3.1 Run the following commands to set up the path in `~/source_sarvey.sh`. 171 | 172 | .. code-block:: bash 173 | 174 | echo 'export miaplpy_path=~/software/sarvey/MiaplPy/src/' > ~/source_sarvey.sh 175 | echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}$miaplpy_path' >> ~/source_sarvey.sh 176 | echo 'export sarvey_path=~/software/sarvey/sarvey' >> ~/source_sarvey.sh 177 | echo 'export PATH=${PATH}:sarvey_path:$sarvey_path/sarvey' >> ~/source_sarvey.sh 178 | echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}:$sarvey_path' >> ~/source_sarvey.sh 179 | 180 | 4. **Test the installation** 181 | 182 | 4.1. Open a new terminal and activate the software. 183 | 184 | .. code-block:: bash 185 | 186 | conda activate sarvey 187 | source ~/source_sarvey.sh 188 | 189 | 4.2. Run the following commands. If the help messages of SARvey and MiaplPy are shown, the installation is correctly done. 190 | 191 | .. code-block:: bash 192 | 193 | sarvey -h 194 | 195 | 196 | Windows using WSL 197 | ----------------- 198 | 199 | On Windows, SARvey is tested on Windows Subsystem for Linux (WSL_) version 2. Please follow the `Linux`_ installation. 200 | 201 | 202 | 203 | .. note:: 204 | 205 | SARvey has been tested with Python 3.6+., i.e., should be fully compatible to all Python versions from 3.6 onwards. 206 | 207 | 208 | .. _pip: https://pip.pypa.io 209 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 210 | .. _conda: https://conda.io/docs 211 | .. _mamba: https://github.com/mamba-org/mamba 212 | .. _Conda for Mac: https://docs.conda.io/projects/conda/en/latest/user-guide/install/macos.html 213 | .. _WSL: https://learn.microsoft.com/en-us/windows/wsl/ 214 | .. _MiaplPy: https://github.com/insarlab/MiaplPy 215 | 216 | -------------------------------------------------------------------------------- /sarvey/triangulation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Triangulation module for SARvey.""" 31 | import time 32 | from typing import Optional 33 | import numpy as np 34 | from scipy.spatial import Delaunay, distance_matrix, KDTree 35 | from scipy.sparse import lil_matrix, csr_matrix 36 | from scipy.sparse.csgraph import connected_components 37 | from logging import Logger 38 | 39 | from mintpy.utils import ptime 40 | 41 | 42 | class PointNetworkTriangulation: 43 | """PointNetworkTriangulation.""" 44 | 45 | def __init__(self, *, coord_xy: np.ndarray, coord_utmxy: Optional[np.ndarray], logger: Logger): 46 | """Triangulate points in space based on distance. 47 | 48 | Parameters 49 | ---------- 50 | coord_xy: np.ndarray 51 | Radar coordinates of the points. 52 | coord_utmxy: np.ndarray 53 | UTM coordinates of the points. 54 | logger: Logger 55 | Logging handler. 56 | """ 57 | self.coord_xy = coord_xy 58 | num_points = self.coord_xy.shape[0] 59 | self.logger = logger 60 | 61 | self.logger.debug(f"Initializing Point Network Triangulation with {num_points} points...") 62 | 63 | # create sparse matrix with dim (num_points x num_points), add 1 if connected. 64 | # create network afterwards once. reduces time. 65 | self.adj_mat = lil_matrix((num_points, num_points), dtype=np.bool_) 66 | 67 | if coord_utmxy is not None: 68 | logger.debug(f"Map coordinates available. Creating distance matrix between all {num_points} points...") 69 | logger.debug(f"[Min, Max] of x coordinates: " 70 | f"[{np.min(coord_utmxy[:, 0]):.2f}/{np.max(coord_utmxy[:, 0]):.2f}]") 71 | logger.debug(f"[Min, Max] of y coordinates: " 72 | f"[{np.min(coord_utmxy[:, 1]):.2f}/{np.max(coord_utmxy[:, 1]):.2f}]") 73 | self.dist_mat = distance_matrix(coord_utmxy, coord_utmxy) 74 | # todo: check out alternatives: 75 | # scipy.spatial.KDTree.sparse_distance_matrix 76 | else: # if only global delaunay shall be computed without memory issues 77 | logger.debug("No Map coordinates given. No distance matrix calculated.") 78 | self.dist_mat = None 79 | 80 | def getArcsFromAdjMat(self): 81 | """Convert the adjacency matrix into a list of arcs. 82 | 83 | Returns 84 | ------- 85 | arcs: np.ndarray 86 | List of arcs with indices of the start and end point. 87 | """ 88 | self.logger.debug(f"Extracting arcs from adjacency matrix with size {self.adj_mat.shape}...") 89 | a = self.adj_mat.copy() 90 | # copy entries from lower to upper triangular matrix 91 | b = (a + a.T) 92 | # remove entries from diagonal and lower part of matrix 93 | arc_tmp = [[i, b.indices[b.indptr[i]:b.indptr[i + 1]]] for i in range(b.shape[0])] 94 | arc_tmp = [[s, e_list[np.where(e_list < s)[0]]] for s, e_list in arc_tmp] 95 | 96 | arcs = list() 97 | for s, e_list in arc_tmp: 98 | for e in e_list: 99 | arcs.append([s, e]) 100 | arcs = np.array(arcs) 101 | return arcs 102 | 103 | def removeLongArcs(self, *, max_dist: float): 104 | """Remove arcs from network which are longer than given threshold. 105 | 106 | Parameter 107 | --------- 108 | max_dist: float 109 | distance threshold on arc length in [m] 110 | """ 111 | mask = self.dist_mat > max_dist 112 | self.logger.debug(f"Removing {np.sum(mask)} arcs with distance longer that {max_dist}.") 113 | self.adj_mat[mask] = False 114 | 115 | def isConnected(self): 116 | """Check if the network is connected.""" 117 | n_components = connected_components(csgraph=csr_matrix(self.adj_mat), directed=False, return_labels=False) 118 | if n_components == 1: 119 | return True 120 | else: 121 | return False 122 | 123 | def triangulateGlobal(self): 124 | """Connect the points with a GLOBAL delaunay triangulation.""" 125 | self.logger.debug("Triangulating points with global delaunay...") 126 | 127 | network = Delaunay(points=self.coord_xy) 128 | self.logger.debug(f"Number of triangles in Delaunay triangulation: {network.simplices.shape[0]}") 129 | for p1, p2, p3 in network.simplices: 130 | self.adj_mat[p1, p2] = True 131 | self.adj_mat[p1, p3] = True 132 | self.adj_mat[p2, p3] = True 133 | 134 | def triangulateKnn(self, *, k: int): 135 | """Connect points to the k-nearest neighbours.""" 136 | self.logger.debug(f"Triangulating points with {k}-nearest neighbours....") 137 | num_points = self.coord_xy.shape[0] 138 | prog_bar = ptime.progressBar(maxValue=num_points) 139 | start_time = time.time() 140 | count = 0 141 | tree = KDTree(data=self.coord_xy) 142 | 143 | if k > num_points: 144 | self.logger.debug(f"{k} k > {num_points} number of points. Connect all points with each other.") 145 | k = num_points 146 | for p1 in range(num_points): 147 | idx = tree.query(self.coord_xy[p1, :], k)[1] 148 | self.adj_mat[p1, idx] = True 149 | count += 1 150 | prog_bar.update(value=count + 1, every=np.int16(num_points / (num_points / 5)), 151 | suffix='{}/{} points triangulated'.format(count + 1, num_points + 1)) 152 | prog_bar.close() 153 | m, s = divmod(time.time() - start_time, 60) 154 | self.logger.debug(f"time used for knn triangulation: {m:02.0f} mins {s:02.1f} secs.") 155 | -------------------------------------------------------------------------------- /sarvey/osm_utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Osm utils module for SARvey.""" 31 | import numpy as np 32 | import overpy 33 | from logging import Logger 34 | from shapely import Point 35 | 36 | from mintpy.utils import readfile, utils as ut 37 | 38 | 39 | def getSpatialExtend(*, geom_file: str, logger: Logger): 40 | """Get spatial extend of the radar image. 41 | 42 | Parameters 43 | ---------- 44 | geom_file: str 45 | path of geometryRadar.h5 file 46 | logger: Logger 47 | Logging handler. 48 | 49 | Returns 50 | ------- 51 | ll_corner_wgs: list 52 | list of coordinates of the lower-left corner of the radar image in WGS84 coordinates. 53 | ur_corner_wgs: list 54 | list of coordinates of the upper-right corner of the radar image in WGS84 coordinates. 55 | coord: np.ndarray 56 | coordinates of all pixels in the radar image in WGS84. 57 | atr: dict 58 | metadata dictionary from geometryRadar.h5. 59 | """ 60 | logger.info(msg='read spatial extend from geometryRadar.h5') 61 | _, atr = readfile.read(geom_file) 62 | coord = ut.coordinate(atr, lookup_file=geom_file) 63 | lat, atr = readfile.read(geom_file, datasetName='latitude') 64 | lon, _ = readfile.read(geom_file, datasetName='longitude') 65 | 66 | # radar image is flipped up-down 67 | # unclear: check if bounding box fits. Otherwise, change to max and min values of lat and lon 68 | ll_bbox = [np.nanmin(lat), np.nanmin(lon)] 69 | ur_bbox = [np.nanmax(lat), np.nanmax(lon)] 70 | 71 | img_ext = [ 72 | Point(lon[0, 0], lat[0, 0]), 73 | Point(lon[-1, 0], lat[-1, 0]), 74 | Point(lon[-1, -1], lat[-1, -1]), 75 | Point(lon[0, -1], lat[0, -1]) 76 | ] 77 | return ll_bbox, ur_bbox, img_ext, coord, atr 78 | 79 | 80 | def runOsmQuery(*, ll_corner_wgs: np.ndarray, ur_corner_wgs: np.ndarray, type_list: list, 81 | logger: Logger) -> overpy.Result: 82 | """Query OSM database for transport infrastructure within the spatial extent of the radar image. 83 | 84 | Parameters 85 | ---------- 86 | ll_corner_wgs: np.ndarray 87 | coordinates of the lower-left corner of the radar image in WGS84 coordinates. 88 | ur_corner_wgs: np.ndarray 89 | coordinates of the upper-right corner of the radar image in WGS84 coordinates. 90 | type_list: list 91 | List of street types that shall be queried at the OSM database. 92 | logger: Logger 93 | Logging handler. 94 | 95 | Returns 96 | ------- 97 | result: overpy.Result 98 | results of the overpy query to OSM database. 99 | """ 100 | # Initialize overpass connection 101 | api = overpy.Overpass() 102 | 103 | # Request data from API 104 | logger.info(msg='querying OSM database for infra types...') 105 | # query_cmd = "way({},{},{},{}) [""highway=motorway_link""]; (._;>;); out body;" 106 | 107 | query_cmd = "[bbox: {},{},{},{}];(" 108 | for infra_type in type_list: 109 | logger.info(msg='\t - {}'.format(infra_type)) 110 | if infra_type == 'rail': 111 | query_cmd += "way[railway={}];".format(infra_type) 112 | else: 113 | query_cmd += "way[highway={}];".format(infra_type) 114 | 115 | query_cmd += ");(._; >;); out body;" # skel 116 | 117 | cmd = query_cmd.format(ll_corner_wgs[0], ll_corner_wgs[1], 118 | ur_corner_wgs[0], ur_corner_wgs[1]) 119 | logger.info(msg="\n" + cmd + "\n") 120 | result = api.query(cmd) 121 | 122 | if len(result.ways) == 0: 123 | logger.error(msg='Empty OSM query results. No roads or railway tracks found.') 124 | raise ValueError 125 | 126 | logger.info(msg='...done.') 127 | return result 128 | 129 | 130 | def runOsmQueryBridge(*, ll_corner_wgs: np.ndarray, ur_corner_wgs: np.ndarray, bridge_highway: bool, 131 | bridge_railway: bool, logger: Logger) -> overpy.Result: 132 | """Query OSM database for bridges of transport infrastructure within the spatial extent of the radar image. 133 | 134 | Parameters 135 | ---------- 136 | ll_corner_wgs: np.ndarray 137 | coordinates of the lower-left corner of the radar image in WGS84 coordinates. 138 | ur_corner_wgs: np.ndarray 139 | coordinates of the upper-right corner of the radar image in WGS84 coordinates. 140 | bridge_highway: bool 141 | Set true to query highway bridges. 142 | bridge_railway: bool 143 | Set true to query railway bridges. 144 | logger: Logger 145 | Logging handler. 146 | 147 | Returns 148 | ------- 149 | result: overpy.Result 150 | results of the overpy query to OSM database. 151 | """ 152 | # Initialize overpass connection 153 | api = overpy.Overpass() 154 | 155 | # Request data from API 156 | logger.info(msg='querying OSM database for infra types...') 157 | # query_cmd = "way({},{},{},{}) [""highway=motorway_link""]; (._;>;); out body;" 158 | 159 | query_cmd = "[bbox: {},{},{},{}];(" 160 | 161 | if bridge_highway: 162 | logger.info(msg='\t - bridge_highway') 163 | query_cmd += 'way[highway~"^(motorway|motorway_link|trunk|trunk_link)$"][bridge];' 164 | 165 | if bridge_railway: 166 | logger.info(msg='\t - bridge_railway') 167 | query_cmd += 'way[railway=rail][bridge];' 168 | 169 | if (bridge_highway is False) & (bridge_railway is False): 170 | logger.info(msg='\t - all bridges') 171 | query_cmd += 'way[bridge];' 172 | 173 | query_cmd += ");(._; >;); out body;" # skel 174 | 175 | cmd = query_cmd.format(ll_corner_wgs[0], ll_corner_wgs[1], 176 | ur_corner_wgs[0], ur_corner_wgs[1]) 177 | logger.info(msg="\n" + cmd + "\n") 178 | result = api.query(cmd) 179 | 180 | if len(result.ways) == 0: 181 | logger.error(msg='Empty OSM query results. No bridges found.') 182 | raise ValueError 183 | 184 | logger.info(msg='...done.') 185 | return result 186 | -------------------------------------------------------------------------------- /sarvey/coherence.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Coherence module for SARvey.""" 31 | import multiprocessing 32 | import time 33 | import numpy as np 34 | from numba import jit 35 | from scipy.signal import convolve2d 36 | from logging import Logger 37 | from miaplpy.objects.slcStack import slcStack 38 | from sarvey.objects import BaseStack 39 | from sarvey.utils import convertBboxToBlock 40 | 41 | 42 | def computeIfgsAndTemporalCoherence(*, path_temp_coh: str, path_ifgs: str, path_slc: str, ifg_array: np.ndarray, 43 | time_mask: np.ndarray, wdw_size: int, num_boxes: int, box_list: list, 44 | num_cores: int, logger: Logger): 45 | """ComputeIfgsAndTemporalCoherence. 46 | 47 | Compute the interferograms and temporal coherence from the SLC stack for a given set of (spatial) patches. 48 | 49 | Parameters 50 | ---------- 51 | path_temp_coh : str 52 | Path to the temporary coherence stack. The data will be stored in this file during processing. 53 | path_ifgs : str 54 | Path to the interferograms stack. The data will be stored in this file during processing. 55 | path_slc : str 56 | Path to the SLC stack. The data will be read from this file. 57 | ifg_array : np.ndarray 58 | Array containing the indices of the reference and secondary images which are used to compute the interferograms. 59 | time_mask : np.ndarray 60 | Binary mask indicating the selected images from the SLC stack. 61 | wdw_size : int 62 | Size of the filter window. Has to be odd. 63 | num_boxes : int 64 | Number of patches to enable reading and processing of larger SLC stacks. 65 | box_list : list 66 | List containing the indices of each patch. 67 | num_cores : int 68 | Number of cores for parallel processing. 69 | logger : Logger 70 | Logger object. 71 | 72 | Returns 73 | ------- 74 | mean_amp_img : np.ndarray 75 | Mean amplitude image. 76 | """ 77 | start_time = time.time() 78 | filter_kernel = np.ones((wdw_size, wdw_size), dtype=np.float64) 79 | filter_kernel[wdw_size // 2, wdw_size // 2] = 0 80 | 81 | slc_stack_obj = slcStack(path_slc) 82 | slc_stack_obj.open() 83 | temp_coh_obj = BaseStack(file=path_temp_coh, logger=logger) 84 | ifg_stack_obj = BaseStack(file=path_ifgs, logger=logger) 85 | 86 | mean_amp_img = np.zeros((slc_stack_obj.length, slc_stack_obj.width), dtype=np.float32) 87 | num_ifgs = ifg_array.shape[0] 88 | 89 | for idx in range(num_boxes): 90 | bbox = box_list[idx] 91 | block2d = convertBboxToBlock(bbox=bbox) 92 | 93 | # read slc 94 | slc = slc_stack_obj.read(datasetName='slc', box=bbox, print_msg=False) 95 | slc = slc[time_mask, :, :] 96 | 97 | mean_amp = np.mean(np.abs(slc), axis=0) 98 | mean_amp[mean_amp == 0] = np.nan 99 | mean_amp_img[bbox[1]:bbox[3], bbox[0]:bbox[2]] = np.log10(mean_amp) 100 | 101 | # compute ifgs 102 | ifgs = computeIfgs(slc=slc, ifg_array=ifg_array) 103 | ifg_stack_obj.writeToFileBlock(data=ifgs, dataset_name="ifgs", block=block2d, print_msg=False) 104 | del slc 105 | 106 | # filter ifgs 107 | avg_neighbours = np.zeros_like(ifgs) 108 | if num_cores == 1: 109 | for i in range(num_ifgs): 110 | avg_neighbours[:, :, i] = convolve2d(in1=ifgs[:, :, i], in2=filter_kernel, mode='same', boundary="symm") 111 | else: 112 | 113 | args = [( 114 | idx, 115 | ifgs[:, :, idx], 116 | filter_kernel) for idx in range(num_ifgs)] 117 | 118 | with multiprocessing.Pool(processes=num_cores) as pool: 119 | results = pool.map(func=launchConvolve2d, iterable=args) 120 | 121 | # retrieve results 122 | for j, avg_neigh in results: 123 | avg_neighbours[:, :, j] = avg_neigh 124 | del results, args, avg_neigh 125 | 126 | # compute temporal coherence 127 | residual_phase = np.angle(ifgs * np.conjugate(avg_neighbours)) 128 | del ifgs, avg_neighbours 129 | temp_coh = np.abs(np.mean(np.exp(1j * residual_phase), axis=2)) 130 | temp_coh_obj.writeToFileBlock(data=temp_coh, dataset_name="temp_coh", block=block2d, print_msg=False) 131 | del residual_phase, temp_coh 132 | logger.info(msg="Patches processed:\t {}/{}".format(idx + 1, num_boxes)) 133 | 134 | m, s = divmod(time.time() - start_time, 60) 135 | logger.debug(msg='\ntime used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) 136 | return mean_amp_img 137 | 138 | 139 | @jit(nopython=True) 140 | def computeIfgs(*, slc: np.ndarray, ifg_array: np.ndarray): 141 | """ComputeIfgs. 142 | 143 | Parameters 144 | ---------- 145 | slc : np.ndarray 146 | SLC stack. 147 | ifg_array : np.ndarray 148 | Array containing the indices of the reference and secondary images which are used to compute the interferograms. 149 | 150 | Returns 151 | ------- 152 | ifgs : np.ndarray 153 | Interferograms. 154 | """ 155 | t, length, width = slc.shape 156 | num_ifgs = ifg_array.shape[0] 157 | ifgs = np.zeros((length, width, num_ifgs), dtype=np.complex64) 158 | 159 | c = 0 160 | for i, j in ifg_array: 161 | ifgs[:, :, c] = slc[i, :, :] * np.conjugate(slc[j, :, :]) 162 | c += 1 163 | return ifgs 164 | 165 | 166 | def launchConvolve2d(args: tuple): 167 | """LaunchConvolve2d. 168 | 169 | Parameters 170 | ---------- 171 | args : tuple 172 | Tuple containing the arguments for the convolution. 173 | Tuple contains: 174 | 175 | idx : int 176 | Index of the processed interferogram. 177 | ifg : np.ndarray 178 | Interferogram. 179 | filter_kernel : np.ndarray 180 | Filter kernel. 181 | 182 | Returns 183 | ------- 184 | idx : int 185 | Index of the processed interferogram. 186 | avg_neighbours : np.ndarray 187 | Low-pass filtered phase derived as average of neighbours. 188 | """ 189 | (idx, ifg, filter_kernel) = args 190 | avg_neighbours = convolve2d(in1=ifg, in2=filter_kernel, mode='same', boundary="symm") 191 | return idx, avg_neighbours 192 | -------------------------------------------------------------------------------- /sarvey/sarvey_osm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Download openstreetmap data for area of interest.""" 31 | import argparse 32 | import logging 33 | import os 34 | import sys 35 | import time 36 | from os.path import join 37 | import geopandas as gpd 38 | from shapely import ops, Point 39 | import matplotlib 40 | 41 | from sarvey import version 42 | from sarvey.osm_utils import runOsmQueryBridge, runOsmQuery, getSpatialExtend 43 | 44 | try: 45 | matplotlib.use('TkAgg') 46 | except ImportError as e: 47 | print(e) 48 | 49 | 50 | EXAMPLE = """Example: 51 | sarvey_osm --geom ./geometryRadar.h5 --railway # download railway 52 | sarvey_osm --geom ./geometryRadar.h5 --highway # download highway 53 | sarvey_osm --geom ./geometryRadar.h5 --railway --bridge # download railway bridge 54 | sarvey_osm --geom ./geometryRadar.h5 --railway -o mask_railway.shp # specify output path 55 | """ 56 | 57 | 58 | def create_parser(): 59 | """Create_parser.""" 60 | parser = argparse.ArgumentParser( 61 | description='Download transport infrastructure information from openstreetmap and store as shp-file.', 62 | formatter_class=argparse.RawTextHelpFormatter, 63 | epilog=EXAMPLE) 64 | 65 | parser.add_argument('-w', '--work_dir', dest='work_dir', default=None, 66 | help='absolute path to working directory\n' + 67 | '(default: current directory).') 68 | 69 | parser.add_argument('--geom', dest='geom_file', default=None, 70 | help='path to existing geometryRadar.h5 file') 71 | 72 | parser.add_argument('--railway', dest='railway', action="store_true", default=False, 73 | help='Set true to query railways.') 74 | 75 | parser.add_argument('--highway', dest='highway', action="store_true", default=False, 76 | help='Set true to query highways.') 77 | 78 | parser.add_argument('--bridge', dest='bridge', action="store_true", default=False, 79 | help='Set true to mask bridges.\n' + 80 | 'If --railway or --highway set true, only railway/highway bridges are queried.') 81 | 82 | parser.add_argument('-o', dest='out_file_name', default='osm_infra.shp', 83 | help="name of output file. (default: 'osm_infra.shp')") 84 | 85 | parser.add_argument('--version', action='version', 86 | version=f"SARvey version {version.__version__} - {version.__versionalias__}, " 87 | f"{version.__versiondate__}") 88 | 89 | return parser 90 | 91 | 92 | def downloadOSM(*, railway: bool, highway: bool, bridge: bool, 93 | work_dir: str, out_file_name: str, logger: logging.Logger, geom_file: str): 94 | """Download openstreetmap data and store to file. 95 | 96 | Parameters 97 | ---------- 98 | railway: bool 99 | download railway data. 100 | highway: bool 101 | download highway data. 102 | bridge: bool 103 | download bridge data. 104 | work_dir: str 105 | working directory. 106 | out_file_name: str 107 | output file name. 108 | logger: logging.Logger 109 | logger. 110 | geom_file: str 111 | path to geometryRadar.h5 file. 112 | """ 113 | logger.info(msg="Start creating mask file based on openstreetmap data.") 114 | 115 | # get bounding box 116 | ll_bbox, ur_bbox, img_ext, coord, atr = getSpatialExtend(geom_file=geom_file, logger=logger) 117 | 118 | # store image extend 119 | gdf = gpd.GeoDataFrame({"geometry": gpd.geoseries.GeoSeries(img_ext)}) 120 | gdf = gdf.dissolve().convex_hull 121 | gdf.to_file(join(work_dir, "img_extend.gpkg")) 122 | 123 | # store bounding box 124 | bbox_points = [ 125 | Point(ll_bbox[1], ll_bbox[0]), 126 | Point(ur_bbox[1], ll_bbox[0]), 127 | Point(ur_bbox[1], ur_bbox[0]), 128 | Point(ll_bbox[1], ur_bbox[0]) 129 | ] 130 | 131 | gdf = gpd.GeoDataFrame({"geometry": gpd.geoseries.GeoSeries(bbox_points)}) 132 | gdf = gdf.dissolve().convex_hull 133 | gdf.to_file(join(work_dir, "bounding_box.gpkg")) 134 | 135 | if (not railway) & (not highway) & (not bridge): 136 | logger.error(msg="No infrastructure type was specified.") 137 | return 138 | 139 | if bridge: 140 | # get requested OSM layer 141 | query_result = runOsmQueryBridge( 142 | ll_corner_wgs=ll_bbox, ur_corner_wgs=ur_bbox, 143 | bridge_highway=highway, bridge_railway=railway, 144 | logger=logger 145 | ) 146 | else: 147 | type_list = list() 148 | if railway: 149 | type_list += ["rail"] 150 | if highway: 151 | type_list += ["motorway", "motorway_link", "trunk", "trunk_link"] 152 | 153 | # get requested OSM layer 154 | query_result = runOsmQuery(ll_corner_wgs=ll_bbox, ur_corner_wgs=ur_bbox, 155 | type_list=type_list, logger=logger) 156 | 157 | multi_line_list = list() 158 | for way in query_result.ways: 159 | if "area" in way.tags: 160 | if way.tags["area"] == "yes": 161 | logger.info('Area flag is true') 162 | continue 163 | else: 164 | # keep coordinates in lat/lon. It will be needed in masking step. 165 | coord = [[float(way.nodes[i].lon), float(way.nodes[i].lat)] for i in range(len(way.nodes))] 166 | multi_line_list.append(coord) 167 | 168 | # Merge all road segments 169 | merged_road = list(ops.linemerge(multi_line_list).geoms) 170 | gdf = gpd.GeoDataFrame({"geometry": gpd.GeoSeries(merged_road)}) 171 | # gdf = gdf.set_crs(crs=utm_crs) # set appropriate CRS 172 | # todo: add attributes if required 173 | 174 | # todo: check ending of output file name 175 | gdf.to_file(join(work_dir, out_file_name)) 176 | logger.info(msg="OSM download finished.") 177 | 178 | 179 | def main(iargs=None): 180 | """Download openstreetmap data and store to file.""" 181 | # check input 182 | parser = create_parser() 183 | inps = parser.parse_args(args=iargs) 184 | 185 | # initiate logger 186 | logging_level = logging.getLevelName('DEBUG') 187 | 188 | log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') 189 | logger = logging.getLogger(__name__) 190 | 191 | current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) 192 | log_filename = f"sarvey_osm_{current_datetime}.log" 193 | if not os.path.exists(os.path.join(os.getcwd(), "logfiles")): 194 | os.mkdir(os.path.join(os.getcwd(), "logfiles")) 195 | file_handler = logging.FileHandler(filename=os.path.join(os.getcwd(), "logfiles", log_filename)) 196 | file_handler.setFormatter(log_format) 197 | logger.addHandler(file_handler) 198 | 199 | console_handler = logging.StreamHandler(sys.stdout) 200 | console_handler.setFormatter(log_format) 201 | logger.addHandler(console_handler) 202 | logger.setLevel(logging_level) 203 | 204 | if inps.work_dir is None: 205 | work_dir = os.getcwd() 206 | else: 207 | work_dir = inps.work_dir 208 | if not os.path.exists(path=work_dir): 209 | logger.info(msg='create output folder: ' + work_dir) 210 | os.mkdir(path=work_dir) 211 | logger.info(msg='working directory: {}'.format(work_dir)) 212 | 213 | downloadOSM( 214 | railway=inps.railway, 215 | highway=inps.highway, 216 | bridge=inps.bridge, 217 | work_dir=work_dir, 218 | out_file_name=inps.out_file_name, 219 | logger=logger, 220 | geom_file=inps.geom_file 221 | ) 222 | 223 | 224 | if __name__ == '__main__': 225 | main() 226 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ======================== 2 | SARvey - survey with SAR 3 | ======================== 4 | 5 | Open-source InSAR time series analysis software developed within the project SAR4Infra. 6 | **SARvey** aims to analyze InSAR displacement time series for engineering applications. 7 | 8 | 9 | 10 | Documentation 11 | ------------- 12 | The documentation with installation instructions, processing steps, and examples with a demo dataset can be found at: 13 | https://luhipi.github.io/sarvey/main 14 | 15 | Discussion 16 | ---------- 17 | 18 | Use the Q&A in discussion_ to ask questions and get help from the community. 19 | Report bugs and request features in the issue_ tracker. 20 | 21 | 22 | Status 23 | ------ 24 | 25 | .. image:: https://github.com/luhipi/sarvey/actions/workflows/ci.yml/badge.svg 26 | :target: https://github.com/luhipi/sarvey/actions 27 | :alt: Pipelines 28 | .. image:: https://img.shields.io/static/v1?label=Documentation&message=GitHub%20Pages&color=blue 29 | :target: https://luhipi.github.io/sarvey/main 30 | :alt: Documentation 31 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.12544130.svg 32 | :target: https://doi.org/10.5281/zenodo.12544130 33 | :alt: DOI 34 | 35 | License 36 | ------- 37 | 38 | **SARvey** is distributed under the GNU General Public License, version 3 (GPLv3). 39 | 40 | The following exceptions applies: 41 | 42 | This package uses PyMaxFlow. The core of PyMaxflows library is the C++ implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you cite [BOYKOV04] in any resulting publication if you use this code for research purposes. 43 | This requirement extends to **SARvey**. 44 | 45 | Please check out the details of the license `here `_. 46 | 47 | How to cite 48 | ----------- 49 | 50 | If you use **SARvey** in your research, please cite the following. 51 | 52 | 1. The paper describing the methodology: 53 | 54 | Piter A, Haghshenas Haghighi M, Motagh M (2024). Challenges and Opportunities of Sentinel-1 InSAR for Transport Infrastructure Monitoring. PFG – Journal of Photogrammetry, Remote Sensing and Geoinformation Science, 92, 609-627. `Link to paper `_. 55 | 56 | 2. The software itself. Please specify the version you use: 57 | 58 | Piter A, Haghshenas Haghighi M, FERN.Lab, Motagh M (2024). SARvey - survey with SAR [version]. Zenodo. https://doi.org/10.5281/zenodo.12544130 59 | 60 | 3. If you use the PUMA method for unwrapping in your research, please cite the following publication as indicated in the license: 61 | 62 | Boykov Y, Kolmogorov V (2004). An experimental comparison of min-cut/max- flow algorithms for energy minimization in vision. IEEE Transactions on Pattern Analysis and Machine Intelligence 26(9):1124–1137, DOI 10.1109/TPAMI.2004.60. `Link to paper `_. 63 | 64 | 65 | Processing overview 66 | ------------------- 67 | 68 | 69 | .. image:: https://seafile.projekt.uni-hannover.de/f/006f702937cd4e618bcb/?dl=1 70 | :width: 600 71 | :align: center 72 | :alt: SARvey workflow 73 | 74 | Processing workflow for using the SARvey software to derive displacement time series. 75 | SARvey builds upon and uses functions of the two InSAR research softwares MintPy_ (multilook SBAS processing) and MiaplPy_ (Phase linking for single-look DS processing). 76 | 77 | 78 | 79 | SARvey is a command-line-based software. The major steps for running SARvey are the following: 80 | 81 | * **Installation** 82 | 83 | SARvey is a cross-platform python-based software and can be installed on Linux and MacOS. On Windows, SARvey is tested on Windows Subsystem for Linux (WSL_) version 2. 84 | Details of installation can be found in `installation instruction`_. 85 | 86 | 87 | * **Preprocessing** 88 | 89 | The software requires a coregistered stack of SLC and the related geometry information in the MiaplPy_ data format. 90 | The coregistered stack of SLC can be created using an InSAR processor. Currently MiaplPy_ only supports ISCE_. Support for GAMMA and SNAP_ is planned for future. 91 | After creating the coregistered stack of SLC, run the "load_data" step from Miaplpy_ to create the "inputs" directory which contains "slcStack.h5" and "geometryRadar.h5". 92 | Details are explained in the preparation_ section 93 | 94 | 95 | * **Time series analysis** 96 | 97 | Time series analysis is performed using `sarvey`. It consists of 5 steps (steps 0 to 4). The details of each step are explained in `processing steps`_. The processing parameters are handled in a json config file. Visualization and export are handled by `sarvey_plot` and `sarvey_export` packages. Below are the major steps: 98 | 99 | * Go to your working directory: 100 | 101 | .. code-block:: bash 102 | 103 | cd path/to/working_dir/ 104 | 105 | * Create a default config file using **"-g"** flag: 106 | 107 | .. code-block:: bash 108 | 109 | sarvey -f config.json 0 4 -g 110 | 111 | * Modify **config.json** to change path to "inputs" directory. Modify other parameters as desired. 112 | 113 | * Run all processing steps (steps 0 to 4): 114 | 115 | .. code-block:: bash 116 | 117 | sarvey -f config.json 0 4 118 | 119 | Different processing steps are explained in `processing`_ section. 120 | 121 | * Plot the resulting displacement time series: 122 | 123 | .. code-block:: bash 124 | 125 | sarvey_plot outputs/p2_coh80_ts.h5 -t 126 | 127 | * Export the results as Shapefiles_: 128 | 129 | .. code-block:: bash 130 | 131 | sarvey_export outputs/p2_coh80_ts.h5 -o outputs/shp/p2_coh80.shp 132 | 133 | 134 | Feature overview 135 | ---------------- 136 | 137 | **SARvey** has three main components for processing, visualization, and exporting data. 138 | 139 | * `sarvey` performs time series analysis. 140 | * `sarvey_plot` plots the outputs. 141 | * `sarvey_export` exports InSAR time series results from to GIS data formats. The GIS data format can be visualized for example in QGIS_. 142 | 143 | It also has two components that facilitate transport infrastructure monitoring. 144 | 145 | * `sarvey_mask` creates mask from Shapefiles, e.g. for transport infrastructures. 146 | * `sarvey_osm` downloads transport infrastructure information from OSM_ and store as Shapefiles. 147 | 148 | You can run each component in the command line with "-h" argument for more information about the usage. For example: 149 | 150 | .. code-block:: bash 151 | 152 | sarvey -h 153 | 154 | 155 | 156 | **SARvey** supports two processing schemes: 157 | 158 | * `Two-step unwrapping`_ with atmospheric correction (default). 159 | 160 | * `One-step unwrapping`_ for a small area. 161 | 162 | History / Changelog 163 | ------------------- 164 | 165 | You can find the protocol of recent changes in the **SARvey** package 166 | `history`_. 167 | 168 | We follow the principle of semantic versioning. 169 | The version number is structured as follows: MAJOR.MINOR.PATCH. 170 | You can find a description of the versioning scheme `here `__. 171 | 172 | Credits 173 | ------- 174 | 175 | This software was developed within the project SAR4Infra (2020-2024) with funds of the German Federal Ministry for Digital and Transport. 176 | The project consortium consists of 177 | the `Institute of Photogrammetry and GeoInformation`_ at Leibniz University Hannover, 178 | `FERN.Lab`_ (innovation and technology transfer lab of the GFZ German Research Centre for Geosciences, Potsdam), 179 | `Landesamt fuer Vermessung und Geoinformation Schleswig-Holstein`_, 180 | and `Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein`_. 181 | The scientific and methodological development was carried out by Andreas Piter (piter@ipi.uni-hannover.de), supervised by Mahmud H. Haghighi (mahmud@ipi.uni-hannover.de) and Mahdi Motagh (motagh@gfz-potsdam.de). 182 | The `FERN.Lab`_ (fernlab@gfz-potsdam.de) contributed to the development, documentation, continuous integration, and testing of the package. 183 | 184 | 185 | This package was created with Cookiecutter_ and the `fernlab/cookiecutter-python-package`_ project template. 186 | 187 | 188 | .. _Cookiecutter: https://github.com/audreyr/cookiecutter 189 | .. _`fernlab/cookiecutter-python-package`: https://git.gfz-potsdam.de/fernlab/products/cookiecutters/cookiecutter-python-package 190 | .. _processing: https://luhipi.github.io/sarvey/main/processing.html 191 | .. _`processing steps`: https://luhipi.github.io/sarvey/main/processing.html#processing-steps-for-two-step-unwrapping-workflow 192 | .. _preparation: https://luhipi.github.io/sarvey/main/preparation.html 193 | .. _`Two-step unwrapping`: https://luhipi.github.io/sarvey/main/processing.html#processing-steps-for-two-step-unwrapping-workflow 194 | .. _`One-step unwrapping`: https://luhipi.github.io/sarvey/main/processing.html#processing-steps-for-one-step-unwrapping-workflow 195 | .. _`installation instruction`: https://luhipi.github.io/sarvey/main/installation.html 196 | .. _`history`: https://luhipi.github.io/sarvey/main/history.html 197 | .. _MiaplPy: https://github.com/insarlab/MiaplPy 198 | .. _MintPy: https://github.com/insarlab/MintPy 199 | .. _ISCE: https://github.com/isce-framework/isce2 200 | .. _SNAP: https://step.esa.int/main/toolboxes/snap 201 | .. _Shapefiles: https://doc.arcgis.com/en/arcgis-online/reference/shapefiles.htm 202 | .. _QGIS: https://qgis.org/en/site/ 203 | .. _`InSAR Explorer`: https://luhipi.github.io/insar-explorer/ 204 | .. _OSM: https://www.openstreetmap.org/ 205 | .. _WSL: https://learn.microsoft.com/en-us/windows/wsl/ 206 | .. _FERN.Lab: https://fernlab.gfz-potsdam.de/ 207 | .. _`Institute of Photogrammetry and GeoInformation`: https://www.ipi.uni-hannover.de/en/ 208 | .. _`Landesamt fuer Vermessung und Geoinformation Schleswig-Holstein`: https://www.schleswig-holstein.de/DE/landesregierung/ministerien-behoerden/LVERMGEOSH/lvermgeosh_node.html 209 | .. _`Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein`: https://www.schleswig-holstein.de/DE/Landesregierung/LBVSH/lbvsh_node.html 210 | .. _discussion: https://github.com/luhipi/sarvey/discussions 211 | .. _issue: https://github.com/luhipi/sarvey/issues 212 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | - 'v?*b' 8 | - 'v?*beta' 9 | push: 10 | branches: 11 | - main 12 | release: 13 | types: 14 | - published 15 | 16 | permissions: 17 | contents: write 18 | 19 | env: 20 | SKIP_DEPLOY: true 21 | SKIP_INSTALL: false 22 | SKIP_TEST: false 23 | 24 | jobs: 25 | test_sarvey: 26 | runs-on: self-hosted 27 | steps: 28 | - name: Checkout code 29 | uses: actions/checkout@v4 30 | 31 | - name: Set up Python 32 | uses: actions/setup-python@v5 33 | with: 34 | python-version: '3.x' 35 | 36 | - name: Run tests 37 | if: env.SKIP_TEST == 'false' 38 | run: | 39 | source /opt/conda/etc/profile.d/conda.sh 40 | conda init bash 41 | source ~/.bashrc 42 | conda activate ci_env 43 | rm -rf tests/testdata 44 | # wget -nv -c -O testdata.zip https://seafile.projekt.uni-hannover.de/f/4b3be399dffa488e98db/?dl=1 45 | wget -nv -c -O testdata.zip https://seafile.projekt.uni-hannover.de/f/104b499f6f7e4360877d/?dl=1 46 | unzip testdata.zip 47 | mkdir -p test 48 | mv testdata tests/ 49 | mamba list 50 | make pytest 51 | shell: bash 52 | 53 | - name: create docs 54 | if: env.SKIP_DEPLOY == 'false' 55 | run: | 56 | conda init bash 57 | source ~/.bashrc 58 | source activate ci_env 59 | 60 | # replace documentation address for tags befro make docs 61 | IFS='/' read -r OWNER REPO <<< "${GITHUB_REPOSITORY}" 62 | URL_IO="https://${OWNER}.github.io/${REPO}" 63 | URL="https://github.com/${OWNER}/${REPO}" 64 | echo "Repository URL: $URL" 65 | echo "Repository Documentation URL: $URL" 66 | 67 | if [[ "${GITHUB_REF}" == refs/tags/* ]]; then 68 | TAG_NAME=${GITHUB_REF##*/} 69 | DEFAULT_URL="${URL_IO}/main" 70 | NEW_URL="${URL_IO}/tags/${TAG_NAME}" 71 | sed -i "s|$DEFAULT_URL|$NEW_URL|g" README.rst 72 | fi 73 | 74 | # update new badge URL based on the branch or tag 75 | if [[ "${GITHUB_REF}" == refs/tags/* ]]; then 76 | TAG_NAME=${GITHUB_REF##*/} 77 | BADGE_URL="${URL}/actions/workflows/ci.yml/badge.svg?branch=$TAG_NAME" 78 | else 79 | BADGE_URL="${URL}/actions/workflows/ci.yml/badge.svg?branch=main" 80 | fi 81 | echo "Badge URL: $BADGE_URL" 82 | BADGE_DEFAULT_URL="${URL}/actions/workflows/ci.yml/badge.svg" 83 | sed -i "s|${BADGE_DEFAULT_URL}|$BADGE_URL|g" README.rst 84 | 85 | echo "README.rst content" 86 | cat README.rst 87 | 88 | make docs 89 | shell: bash 90 | 91 | - name: Upload coverage report 92 | if: env.SKIP_DEPLOY == 'false' 93 | uses: actions/upload-artifact@v4 94 | with: 95 | name: coverage-report 96 | path: htmlcov/ 97 | 98 | - name: Upload report.html 99 | if: env.SKIP_DEPLOY == 'false' 100 | uses: actions/upload-artifact@v4 101 | with: 102 | name: test-report 103 | path: report.html 104 | 105 | - name: Upload docs 106 | if: env.SKIP_DEPLOY == 'false' 107 | uses: actions/upload-artifact@v4 108 | with: 109 | name: docs 110 | path: docs/_build/html/ 111 | 112 | - name: Upload cobertura coverage report 113 | if: env.SKIP_DEPLOY == 'false' 114 | uses: actions/upload-artifact@v4 115 | with: 116 | name: cobertura-coverage 117 | path: coverage.xml 118 | 119 | - name: Upload junit report 120 | if: env.SKIP_DEPLOY == 'false' 121 | uses: actions/upload-artifact@v4 122 | with: 123 | name: junit-report 124 | path: report.xml 125 | 126 | test_styles: 127 | runs-on: self-hosted 128 | needs: test_sarvey 129 | steps: 130 | - name: Checkout code 131 | uses: actions/checkout@v4 132 | 133 | - name: Set up Python 134 | uses: actions/setup-python@v5 135 | with: 136 | python-version: '3.x' 137 | 138 | - name: Install dependencies 139 | if: env.SKIP_TEST == 'false' 140 | run: | 141 | source /opt/conda/etc/profile.d/conda.sh 142 | conda init bash 143 | source ~/.bashrc 144 | conda activate ci_env 145 | make lint 146 | shell: bash 147 | 148 | - name: Upload flake8 log 149 | if: env.SKIP_TEST == 'false' 150 | uses: actions/upload-artifact@v4 151 | with: 152 | name: flake8-log 153 | path: tests/linting/flake8.log 154 | 155 | - name: Upload pycodestyle log 156 | if: env.SKIP_TEST == 'false' 157 | uses: actions/upload-artifact@v4 158 | with: 159 | name: pycodestyle-log 160 | path: tests/linting/pycodestyle.log 161 | 162 | - name: Upload pydocstyle log 163 | if: env.SKIP_TEST == 'false' 164 | uses: actions/upload-artifact@v4 165 | with: 166 | name: pydocstyle-log 167 | path: tests/linting/pydocstyle.log 168 | 169 | test_urls: 170 | runs-on: self-hosted 171 | needs: test_sarvey 172 | steps: 173 | - name: Checkout code 174 | uses: actions/checkout@v4 175 | 176 | - name: Set up Python 177 | uses: actions/setup-python@v5 178 | with: 179 | python-version: '3.x' 180 | 181 | - name: Install dependencies 182 | if: env.SKIP_TEST == 'false' 183 | run: | 184 | source /opt/conda/etc/profile.d/conda.sh 185 | conda init bash 186 | source ~/.bashrc 187 | source activate ci_env 188 | make urlcheck 189 | shell: bash 190 | 191 | test_sarvey_install: 192 | runs-on: self-hosted 193 | steps: 194 | - name: Checkout code 195 | uses: actions/checkout@v4 196 | 197 | - name: Set up Python 198 | uses: actions/setup-python@v5 199 | with: 200 | python-version: '3.x' 201 | 202 | - name: Install dependencies 203 | if: env.SKIP_INSTALL == 'false' 204 | run: | 205 | source /opt/conda/etc/profile.d/conda.sh 206 | conda activate base 207 | 208 | conda create -n sarvey_testinstall python=3.10 pip -y 209 | conda activate sarvey_testinstall 210 | conda install conda-forge::pysolid -y 211 | conda install conda-forge::gdal 212 | pip install git+https://github.com/insarlab/MiaplPy.git 213 | 214 | # get current branch for installation 215 | IFS='/' read -r OWNER REPO <<< "${GITHUB_REPOSITORY}" 216 | 217 | URL="https://github.com/${OWNER}/${REPO}" 218 | URL_IO="https://${OWNER}.github.io/${REPO}" 219 | 220 | echo "Repository URL: ${URL}" 221 | echo "Repository Documentation URL: ${URL}" 222 | 223 | if [[ "$GITHUB_REF" == refs/tags/* ]]; then 224 | current_ref="${GITHUB_REF##*/}" 225 | elif [[ "$GITHUB_REF" == refs/heads/* ]]; then 226 | current_ref="${GITHUB_REF#refs/heads/}" 227 | elif [[ "$GITHUB_REF" == refs/pull/* ]]; then 228 | current_ref="${GITHUB_HEAD_REF}" 229 | else 230 | current_ref="${GITHUB_REF}" 231 | fi 232 | 233 | echo "Current branch/tag: ${URL}.git@$current_ref" 234 | pip install git+${URL}.git@$current_ref 235 | pip install sarvey[dev] 236 | 237 | OUTPUT=$(pip check) || { echo "$OUTPUT"; true; } 238 | conda list 239 | python -c "import sarvey; print(sarvey)" 240 | shell: bash 241 | 242 | # deploy_pages: 243 | # runs-on: self-hosted 244 | 245 | # needs: 246 | # - test_sarvey 247 | # - test_urls 248 | # - test_styles 249 | # if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/') 250 | # steps: 251 | # - name: Checkout code 252 | # uses: actions/checkout@v4 253 | 254 | # - name: Set up Python 255 | # uses: actions/setup-python@v5 256 | # with: 257 | # python-version: '3.x' 258 | 259 | # - name: Download docs 260 | # uses: actions/download-artifact@v4 261 | # with: 262 | # name: docs 263 | # path: docs/_build/html/ 264 | 265 | # - name: Download coverage report 266 | # uses: actions/download-artifact@v4 267 | # with: 268 | # name: coverage-report 269 | # path: htmlcov/ 270 | 271 | # - name: Download report.html 272 | # uses: actions/download-artifact@v4 273 | # with: 274 | # name: test-report 275 | 276 | # - name: Deploy to GitHub Pages 277 | # # trigger if merged into the main branch || published new tag 278 | # if: env.SKIP_DEPLOY == 'false' && github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) 279 | # run: | 280 | # rm -rf public 281 | 282 | # git clone --branch gh-pages https://github.com/${{ github.repository }} public 283 | 284 | # if [[ "${GITHUB_REF}" == refs/tags/* ]]; then 285 | # TAG_NAME=${GITHUB_REF##*/} 286 | # echo "Deploying to GitHub Pages for version tag: $TAG_NAME" 287 | # DOCS_PATH=public/tags/$TAG_NAME 288 | # else 289 | # echo "Deploying to GitHub Pages for main branch" 290 | # DOCS_PATH=public/main 291 | # fi 292 | 293 | # rm -rf $DOCS_PATH 294 | # mkdir -p $DOCS_PATH/docs 295 | # mkdir -p $DOCS_PATH/images 296 | # mkdir -p $DOCS_PATH/coverage 297 | # mkdir -p $DOCS_PATH/test_reports 298 | 299 | # cp -r docs/_build/html/* $DOCS_PATH/docs 300 | # cp -r htmlcov/* $DOCS_PATH/coverage/ 301 | # cp report.html $DOCS_PATH/test_reports/ 302 | 303 | # ls -al $DOCS_PATH 304 | # ls -al $DOCS_PATH/docs 305 | # ls -al $DOCS_PATH/coverage 306 | # ls -al $DOCS_PATH/test_reports 307 | 308 | # shell: bash 309 | 310 | # - name: Upload to GitHub Pages 311 | # if: env.SKIP_DEPLOY == 'false' 312 | # uses: peaceiris/actions-gh-pages@v4 313 | # with: 314 | # github_token: ${{ secrets.GITHUB_TOKEN }} 315 | # publish_dir: ./public 316 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # sarvey documentation build configuration file, created by 5 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another 17 | # directory, add these directories to sys.path here. If the directory is 18 | # relative to the documentation root, use os.path.abspath to make it 19 | # absolute, like shown here. 20 | # 21 | import os 22 | import sys 23 | sys.path.insert(0, os.path.abspath('..')) 24 | 25 | import sarvey 26 | 27 | # -- General configuration --------------------------------------------- 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | # needs_sphinx = '1.0' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 34 | extensions = [ 35 | 'sphinx.ext.autodoc', 36 | 'sphinx.ext.githubpages', 37 | 'sphinx.ext.viewcode', 38 | 'sphinx.ext.todo', 39 | # 'sphinxarg.ext', 40 | 'sphinx_autodoc_typehints', 41 | 'sphinx.ext.intersphinx' 42 | ] 43 | 44 | # Add any paths that contain templates here, relative to this directory. 45 | templates_path = ['_templates'] 46 | 47 | # The suffix(es) of source filenames. 48 | # You can specify multiple suffix as a list of string: 49 | # source_suffix = ['.rst', '.md'] 50 | source_suffix = '.rst' 51 | 52 | # The encoding of source files. 53 | # source_encoding = 'utf-8-sig' 54 | 55 | # The master toctree document. 56 | master_doc = 'index' 57 | 58 | # General information about the project. 59 | project = 'SARvey' 60 | copyright = "2024, IPI - Leibniz Universitaet Hannover" 61 | author = "Andreas Piter" 62 | 63 | # The version info for the project you're documenting, acts as replacement 64 | # for |version| and |release|, also used in various other places throughout 65 | # the built documents. 66 | # 67 | # The short X.Y version. 68 | version = sarvey.__version__ 69 | # The full version, including alpha/beta/rc tags. 70 | release = sarvey.__version__ 71 | 72 | # The language for content autogenerated by Sphinx. Refer to documentation 73 | # for a list of supported languages. 74 | # 75 | # This is also used if you do content translation via gettext catalogs. 76 | # Usually you set "language" from the command line for these cases. 77 | language = "en" 78 | 79 | # There are two options for replacing |today|: either, you set today to 80 | # some non-false value, then it is used: 81 | # today = '' 82 | # Else, today_fmt is used as the format for a strftime call. 83 | # today_fmt = '%B %d, %Y' 84 | 85 | # List of patterns, relative to source directory, that match files and 86 | # directories to ignore when looking for source files. 87 | # This patterns also effect to html_static_path and html_extra_path 88 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 89 | 90 | # The reST default role (used for this markup: `text`) to use for all 91 | # documents. 92 | # default_role = None 93 | 94 | # If true, '()' will be appended to :func: etc. cross-reference text. 95 | # add_function_parentheses = True 96 | 97 | # If true, the current module name will be prepended to all description 98 | # unit titles (such as .. function::). 99 | # add_module_names = True 100 | 101 | # If true, sectionauthor and moduleauthor directives will be shown in the 102 | # output. They are ignored by default. 103 | # show_authors = False 104 | 105 | # The name of the Pygments (syntax highlighting) style to use. 106 | pygments_style = 'sphinx' 107 | 108 | # A list of ignored prefixes for module index sorting. 109 | # modindex_common_prefix = [] 110 | 111 | # If true, keep warnings as "system message" paragraphs in the built 112 | # documents. 113 | # keep_warnings = False 114 | 115 | # Define how to document class docstrings 116 | # '__init__' documents only the __init__ methods, 'class' documents only the class methods and 'both' documents both 117 | autoclass_content = 'both' 118 | 119 | # If true, `todo` and `todoList` produce output, else they produce nothing. 120 | todo_include_todos = True 121 | 122 | 123 | # Apply custom sphinx styles (e.g., increase content width of generated docs) 124 | def setup(app): 125 | app.add_css_file('custom.css') 126 | 127 | 128 | # Add mappings for intersphinx extension (allows to link to the API reference of other sphinx documentations) 129 | intersphinx_mapping = { 130 | 'python': ('https://docs.python.org/3', None), 131 | } 132 | 133 | 134 | # -- Options for HTML output ------------------------------------------- 135 | 136 | # The theme to use for HTML and HTML Help pages. See the documentation for 137 | # a list of builtin themes. 138 | # html_theme = 'default' 139 | html_theme = 'sphinx_rtd_theme' 140 | 141 | # Theme options are theme-specific and customize the look and feel of a 142 | # theme further. For a list of options available for each theme, see the 143 | # documentation. 144 | # html_theme_options = { 145 | # 'canonical_url': '', 146 | # 'analytics_id': '', 147 | # 'logo_only': False, 148 | # 'display_version': True, 149 | # 'prev_next_buttons_location': 'bottom', 150 | # 'style_external_links': False, 151 | # # Toc options 152 | # 'collapse_navigation': True, 153 | # 'sticky_navigation': True, 154 | # 'navigation_depth': 4, 155 | # 'includehidden': True, 156 | # 'titles_only': False, 157 | # # 'set_type_checking_flag': True # option of sphinx_autodoc_typehints extension 158 | # } 159 | 160 | # Add any paths that contain custom themes here, relative to this directory. 161 | # html_theme_path = [] 162 | 163 | # The name for this set of Sphinx documents. If None, it defaults to 164 | # " v documentation". 165 | # html_title = None 166 | 167 | # A shorter title for the navigation bar. Default is the same as 168 | # html_title. 169 | # html_short_title = None 170 | 171 | # The name of an image file (relative to this directory) to place at the 172 | # top of the sidebar. 173 | # html_logo = None 174 | 175 | # The name of an image file (within the static path) to use as favicon 176 | # of the docs. This file should be a Windows icon file (.ico) being 177 | # 16x16 or 32x32 pixels large. 178 | # html_favicon = None 179 | 180 | # Add any paths that contain custom static files (such as style sheets) 181 | # here, relative to this directory. They are copied after the builtin 182 | # static files, so a file named "default.css" will overwrite the builtin 183 | # "default.css". 184 | html_static_path = ['_static'] 185 | 186 | # If not '', a 'Last updated on:' timestamp is inserted at every page 187 | # bottom, using the given strftime format. 188 | # html_last_updated_fmt = '%b %d, %Y' 189 | 190 | # If true, SmartyPants will be used to convert quotes and dashes to 191 | # typographically correct entities. 192 | # html_use_smartypants = True 193 | 194 | # Custom sidebar templates, maps document names to template names. 195 | # html_sidebars = {} 196 | 197 | # Additional templates that should be rendered to pages, maps page names 198 | # to template names. 199 | # html_additional_pages = {} 200 | 201 | # If false, no module index is generated. 202 | # html_domain_indices = True 203 | 204 | # If false, no index is generated. 205 | # html_use_index = True 206 | 207 | # If true, the index is split into individual pages for each letter. 208 | # html_split_index = False 209 | 210 | # If true, links to the reST sources are added to the pages. 211 | # html_show_sourcelink = True 212 | 213 | # If true, "Created using Sphinx" is shown in the HTML footer. 214 | # Default is True. 215 | # html_show_sphinx = True 216 | 217 | # If true, "(C) Copyright ..." is shown in the HTML footer. 218 | # Default is True. 219 | # html_show_copyright = True 220 | 221 | # If true, an OpenSearch description file will be output, and all pages 222 | # will contain a tag referring to it. The value of this option 223 | # must be the base URL from which the finished HTML is served. 224 | # html_use_opensearch = '' 225 | 226 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 227 | # html_file_suffix = None 228 | 229 | # Output file base name for HTML help builder. 230 | htmlhelp_basename = 'sarveydoc' 231 | 232 | 233 | # -- Options for LaTeX output ------------------------------------------ 234 | 235 | latex_elements = { 236 | # The paper size ('letterpaper' or 'a4paper'). 237 | # 'papersize': 'letterpaper', 238 | 239 | # The font size ('10pt', '11pt' or '12pt'). 240 | # 'pointsize': '10pt', 241 | 242 | # Additional stuff for the LaTeX preamble. 243 | # 'preamble': '', 244 | 245 | # Latex figure (float) alignment 246 | # 'figure_align': 'htbp', 247 | } 248 | 249 | # Grouping the document tree into LaTeX files. List of tuples 250 | # (source start file, target name, title, author, documentclass 251 | # [howto, manual, or own class]). 252 | latex_documents = [ 253 | (master_doc, 'sarvey.tex', 254 | 'SARvey Documentation', 255 | author, 'manual'), 256 | ] 257 | 258 | # The name of an image file (relative to this directory) to place at 259 | # the top of the title page. 260 | # latex_logo = None 261 | 262 | # For "manual" documents, if this is true, then toplevel headings 263 | # are parts, not chapters. 264 | # latex_use_parts = False 265 | 266 | # If true, show page references after internal links. 267 | # latex_show_pagerefs = False 268 | 269 | # If true, show URL addresses after external links. 270 | # latex_show_urls = False 271 | 272 | # Documents to append as an appendix to all manuals. 273 | # latex_appendices = [] 274 | 275 | # If false, no module index is generated. 276 | # latex_domain_indices = True 277 | 278 | 279 | # -- Options for manual page output ------------------------------------ 280 | 281 | # One entry per manual page. List of tuples 282 | # (source start file, name, description, authors, manual section). 283 | man_pages = [ 284 | (master_doc, 'sarvey', 285 | 'SARvey Documentation', 286 | [author], 1) 287 | ] 288 | 289 | # If true, show URL addresses after external links. 290 | # man_show_urls = False 291 | 292 | 293 | # -- Options for Texinfo output ---------------------------------------- 294 | 295 | # Grouping the document tree into Texinfo files. List of tuples 296 | # (source start file, target name, title, author, 297 | # dir menu entry, description, category) 298 | texinfo_documents = [ 299 | (master_doc, 'sarvey', 300 | 'SARvey Documentation', 301 | author, 302 | 'sarvey', 303 | 'One line description of project.', 304 | 'Miscellaneous'), 305 | ] 306 | 307 | # Documents to append as an appendix to all manuals. 308 | # texinfo_appendices = [] 309 | 310 | # If false, no module index is generated. 311 | # texinfo_domain_indices = True 312 | 313 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 314 | # texinfo_show_urls = 'footnote' 315 | 316 | # If true, do not generate a @detailmenu in the "Top" node's menu. 317 | # texinfo_no_detailmenu = False 318 | -------------------------------------------------------------------------------- /sarvey/densification.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Densification module for SARvey.""" 31 | import time 32 | import multiprocessing 33 | import numpy as np 34 | from scipy.spatial import KDTree 35 | from logging import Logger 36 | 37 | from mintpy.utils import ptime 38 | 39 | from sarvey.unwrapping import oneDimSearchTemporalCoherence 40 | from sarvey.objects import Points 41 | import sarvey.utils as ut 42 | 43 | 44 | def densificationInitializer(tree_p1: KDTree, point2_obj: Points, demod_phase1: np.ndarray): 45 | """DensificationInitializer. 46 | 47 | Sets values to global variables for parallel processing. 48 | 49 | Parameters 50 | ---------- 51 | tree_p1 : KDTree 52 | KDTree of the first-order network 53 | point2_obj : Points 54 | Points object with second-order points 55 | demod_phase1 : np.ndarray 56 | demodulated phase of the first-order network 57 | """ 58 | global global_tree_p1 59 | global global_point2_obj 60 | global global_demod_phase1 61 | 62 | global_tree_p1 = tree_p1 63 | global_point2_obj = point2_obj 64 | global_demod_phase1 = demod_phase1 65 | 66 | 67 | def launchDensifyNetworkConsistencyCheck(args: tuple): 68 | """LaunchDensifyNetworkConsistencyCheck. 69 | 70 | Launches the densification of the network with second-order points inside parallel processing. 71 | 72 | Parameters 73 | ---------- 74 | args : tuple 75 | Tuple with the following parameters: 76 | 77 | idx_range : np.ndarray 78 | Array with the indices of the second-order points 79 | num_points : int 80 | Number of second-order points 81 | num_conn_p1 : int 82 | Number of nearest points in the first-order network 83 | max_dist_p1 : float 84 | Maximum allowed distance to the nearest points in the first-order network 85 | velocity_bound : float 86 | Bound for the velocity estimate in temporal unwrapping 87 | demerr_bound : float 88 | Bound for the DEM error estimate in temporal unwrapping 89 | num_samples : int 90 | Number of samples for the search of the optimal parameters 91 | 92 | Returns 93 | ------- 94 | idx_range : np.ndarray 95 | Array with the indices of the second-order points 96 | demerr_p2 : np.ndarray 97 | DEM error array of the second-order points 98 | vel_p2 : np.ndarray 99 | Velocity array of the second-order points 100 | gamma_p2 : np.ndarray 101 | Estimated temporal coherence array of the second-order points resulting from temporal unwrapping 102 | """ 103 | (idx_range, num_points, num_conn_p1, max_dist_p1, velocity_bound, demerr_bound, num_samples) = args 104 | 105 | counter = 0 106 | prog_bar = ptime.progressBar(maxValue=num_points) 107 | 108 | # initialize output 109 | demerr_p2 = np.zeros((num_points,), dtype=np.float32) 110 | vel_p2 = np.zeros((num_points,), dtype=np.float32) 111 | gamma_p2 = np.zeros((num_points,), dtype=np.float32) 112 | 113 | design_mat = np.zeros((global_point2_obj.ifg_net_obj.num_ifgs, 2), dtype=np.float32) 114 | 115 | demerr_range = np.linspace(-demerr_bound, demerr_bound, num_samples) 116 | vel_range = np.linspace(-velocity_bound, velocity_bound, num_samples) 117 | 118 | factor = 4 * np.pi / global_point2_obj.wavelength 119 | 120 | for idx in range(num_points): 121 | p2 = idx_range[idx] 122 | # nearest points in p1 123 | dist, nearest_p1 = global_tree_p1.query([global_point2_obj.coord_utm[p2, 0], 124 | global_point2_obj.coord_utm[p2, 1]], k=num_conn_p1) 125 | mask = (dist < max_dist_p1) & (dist != 0) 126 | mask[:3] = True # ensure that always at least the three closest points are used 127 | nearest_p1 = nearest_p1[mask] 128 | 129 | # compute arc observations to nearest points 130 | arc_phase_p1 = np.angle(np.exp(1j * global_point2_obj.phase[p2, :]) * 131 | np.conjugate(np.exp(1j * global_demod_phase1[nearest_p1, :]))) 132 | 133 | design_mat[:, 0] = (factor * global_point2_obj.ifg_net_obj.pbase_ifg 134 | / (global_point2_obj.slant_range[p2] * np.sin(global_point2_obj.loc_inc[p2]))) 135 | design_mat[:, 1] = factor * global_point2_obj.ifg_net_obj.tbase_ifg 136 | 137 | demerr_p2[idx], vel_p2[idx], gamma_p2[idx] = oneDimSearchTemporalCoherence( 138 | demerr_range=demerr_range, 139 | vel_range=vel_range, 140 | obs_phase=arc_phase_p1, 141 | design_mat=design_mat 142 | ) 143 | 144 | prog_bar.update(counter + 1, every=np.int16(200), 145 | suffix='{}/{} points'.format(counter + 1, num_points)) 146 | counter += 1 147 | 148 | return idx_range, demerr_p2, vel_p2, gamma_p2 149 | 150 | 151 | def densifyNetwork(*, point1_obj: Points, vel_p1: np.ndarray, demerr_p1: np.ndarray, point2_obj: Points, 152 | num_conn_p1: int, max_dist_p1: float, velocity_bound: float, demerr_bound: float, 153 | num_samples: int, num_cores: int = 1, logger: Logger): 154 | """DensifyNetwork. 155 | 156 | Densifies the network with second-order points by connecting the second-order points to the closest points in the 157 | first-order network. 158 | 159 | Parameters 160 | ---------- 161 | point1_obj : Points 162 | Points object with first-order points 163 | vel_p1 : np.ndarray 164 | Velocity array of the first-order points 165 | demerr_p1 : np.ndarray 166 | DEM error array of the first-order points 167 | point2_obj : Points 168 | Points object with second-order points 169 | num_conn_p1 : int 170 | Number of nearest points in the first-order network 171 | max_dist_p1 : float 172 | Maximum allowed distance to the nearest points in the first-order network 173 | velocity_bound : float 174 | Bound for the velocity estimate in temporal unwrapping 175 | demerr_bound : float 176 | Bound for the DEM error estimate in temporal unwrapping 177 | num_samples : int 178 | Number of samples for the search of the optimal parameters 179 | num_cores : int 180 | Number of cores for parallel processing (default: 1) 181 | logger : Logger 182 | Logger object 183 | 184 | Returns 185 | ------- 186 | demerr_p2 : np.ndarray 187 | DEM error array of the second-order points 188 | vel_p2 : np.ndarray 189 | Velocity array of the second-order points 190 | gamma_p2 : np.ndarray 191 | Estimated temporal coherence array of the second-order points resulting from temporal unwrapping 192 | """ 193 | msg = "#" * 10 194 | msg += " DENSIFICATION WITH SECOND-ORDER POINTS " 195 | msg += "#" * 10 196 | logger.info(msg=msg) 197 | start_time = time.time() 198 | 199 | # find the closest points from first-order network 200 | tree_p1 = KDTree(data=point1_obj.coord_utm) 201 | 202 | # remove parameters from wrapped phase 203 | pred_phase_demerr, pred_phase_vel = ut.predictPhase( 204 | obj=point1_obj, 205 | vel=vel_p1, demerr=demerr_p1, 206 | ifg_space=True, logger=logger 207 | ) 208 | pred_phase = pred_phase_demerr + pred_phase_vel 209 | 210 | # Note: for small baselines it does not make a difference if re-wrapping the phase difference or not. 211 | # However, for long baselines (like in the star network) it does make a difference. Leijen (2014) does not re-wrap 212 | # the arc double differences to be able to test the ambiguities. Kampes (2006) does re-wrap, but is testing based 213 | # on the estimated parameters. Hence, it doesn't make a difference for him. Not re-wrapping can be a starting point 214 | # for triangle-based temporal unwrapping. 215 | # demod_phase1 = np.angle(np.exp(1j * point1_obj.phase) * np.conjugate(np.exp(1j * pred_phase))) # re-wrapping 216 | demod_phase1 = point1_obj.phase - pred_phase # not re-wrapping 217 | 218 | # initialize output 219 | init_args = (tree_p1, point2_obj, demod_phase1) 220 | 221 | if num_cores == 1: 222 | densificationInitializer(tree_p1=tree_p1, point2_obj=point2_obj, demod_phase1=demod_phase1) 223 | args = (np.arange(point2_obj.num_points), point2_obj.num_points, num_conn_p1, max_dist_p1, 224 | velocity_bound, demerr_bound, num_samples) 225 | idx_range, demerr_p2, vel_p2, gamma_p2 = launchDensifyNetworkConsistencyCheck(args) 226 | else: 227 | with multiprocessing.Pool(num_cores, initializer=densificationInitializer, initargs=init_args) as pool: 228 | logger.info(msg="start parallel processing with {} cores.".format(num_cores)) 229 | num_cores = point2_obj.num_points if num_cores > point2_obj.num_points else num_cores 230 | # avoids having less samples than cores 231 | idx = ut.splitDatasetForParallelProcessing(num_samples=point2_obj.num_points, num_cores=num_cores) 232 | args = [( 233 | idx_range, 234 | idx_range.shape[0], 235 | num_conn_p1, 236 | max_dist_p1, 237 | velocity_bound, 238 | demerr_bound, 239 | num_samples 240 | ) for idx_range in idx] 241 | 242 | results = pool.map_async(launchDensifyNetworkConsistencyCheck, args, chunksize=1) 243 | while True: 244 | time.sleep(5) 245 | if results.ready(): 246 | results = results.get() 247 | break 248 | 249 | demerr_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) 250 | vel_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) 251 | gamma_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) 252 | 253 | # retrieve results 254 | for i, demerr_i, vel_i, gamma_i in results: 255 | demerr_p2[i] = demerr_i 256 | vel_p2[i] = vel_i 257 | gamma_p2[i] = gamma_i 258 | 259 | m, s = divmod(time.time() - start_time, 60) 260 | logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) 261 | 262 | # combine p1 and p2 parameters and bring them in correct order using point_id 263 | sort_idx = np.argsort(np.append(point1_obj.point_id, point2_obj.point_id)) 264 | demerr_p2 = np.append(demerr_p1, demerr_p2) # add gamma=1 for p1 pixels 265 | vel_p2 = np.append(vel_p1, vel_p2) 266 | gamma_p2 = np.append(np.ones_like(point1_obj.point_id), gamma_p2) # add gamma=1 for p1 pixels 267 | 268 | demerr_p2 = demerr_p2[sort_idx] 269 | vel_p2 = vel_p2[sort_idx] 270 | gamma_p2 = gamma_p2[sort_idx] 271 | return demerr_p2, vel_p2, gamma_p2 272 | -------------------------------------------------------------------------------- /docs/demo/demo_masjed_dam_detailed_guide.rst: -------------------------------------------------------------------------------- 1 | .. _demo_masjed_dam_detailed_guide: 2 | 3 | Detailed Guide for Masjed Soleyman Dam 4 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 5 | 6 | This tutorial provides a comprehensive guide to SARvey processing. If you are an advanced user, you can proceed directly to the :ref:`fast track for advanced users `. 7 | 8 | .. note:: 9 | 10 | This instruction is based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly. 11 | 12 | Step 1: Before Running SARvey 13 | """"""""""""""""""""""""""""" 14 | 15 | Step 1.1: Download the Data 16 | """"""""""""""""""""""""""" 17 | 18 | Download the data by running the following commnad in the console: 19 | 20 | .. code-block:: bash 21 | 22 | wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip 23 | 24 | 25 | Unzip the downloaded file and change the directory. 26 | 27 | .. code-block:: bash 28 | 29 | unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip 30 | cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018 31 | 32 | 33 | Check the downloaded data using `info.py` and `view.py`. For example: 34 | 35 | .. code-block:: bash 36 | 37 | info.py inputs/slcStack.h5 38 | 39 | .. code-block:: bash 40 | 41 | view.py inputs/geometryRadar.h5 42 | 43 | 44 | Step 1.2: Activate SARvey and Change Directory 45 | """"""""""""""""""""""""""""""""""""""""""""""" 46 | 47 | If you have not installed SARvey, refer to the `installation instructions `_. Activate the SARvey environment: 48 | 49 | .. code-block:: bash 50 | 51 | conda activate sarvey 52 | 53 | Ensure SARvey can be called from the console. 54 | 55 | .. code-block:: bash 56 | 57 | sarvey -h 58 | 59 | If you see the following command, it indicates that SARvey cannot be called. Ensure it is installed correctly and the conda environment is activated. 60 | 61 | .. code-block:: none 62 | 63 | command not found: sarvey 64 | 65 | Step 1.3: Create a Config File 66 | """""""""""""""""""""""""""""" 67 | 68 | Create a config file, which is a JSON file containing the parameters for `sarvey`. The config file can be created using the following command: 69 | 70 | .. code-block:: bash 71 | 72 | sarvey -f config.json 0 0 -g 73 | 74 | Note: The above command only generates a configuration file. Although step 0 is specified, it will not be executed. 75 | 76 | Step 1.4: Modify the config.json File 77 | """"""""""""""""""""""""""""""""""""" 78 | 79 | 1.4.1. Open the config.json file and check the parameters. The first parameters to specify in the config file are **input_path** and **output_path**. For this example dataset, the `slcStack.h5` and `geometryRadar.h5` files are in the `inputs/` directory, which is the default value in the config file. Therefore, you do not need to change it. The **output_path** should be `outputs/` for this example. 80 | 81 | .. code-block:: json 82 | 83 | { 84 | "general": { 85 | "input_path": "inputs/", 86 | "output_path": "outputs/" 87 | } 88 | // other parameters 89 | } 90 | 91 | 1.4.2. Specify the **num_cores**. You can check the number of cores on your computer using the following commands. 92 | 93 | In Linux, run: 94 | 95 | .. code-block:: bash 96 | 97 | nproc --all 98 | 99 | In MacOS, run: 100 | 101 | .. code-block:: bash 102 | 103 | sysctl -n hw.ncpu 104 | 105 | It is a good practice to specify a number lower than the number of available cores in the config file. 106 | 107 | .. code-block:: json 108 | 109 | { 110 | // other parameters 111 | "general": { 112 | "num_cores": 5, 113 | // other parameters 114 | }, 115 | //other parameters 116 | } 117 | 118 | 119 | 120 | Step 2: Running SARvey 121 | """""""""""""""""""""" 122 | 123 | SARvey consists of five steps as detailed in :ref:`processing`. You can run all steps by specifying starting step `0` and ending step `4`. In this tutorial, however, we will run the steps separately as follows. 124 | 125 | When running `sarvey`, if it finishes normally, you will see a message like the following in the command line: 126 | 127 | .. code-block:: none 128 | 129 | 2024-06-19 11:05:10,305 - INFO - MTI finished normally. 130 | 131 | .. note:: 132 | If you encounter an error, first read all the prompts in the console and carefully track all error and warning messages. If the issue is not clear from the console messages, check the log files stored in the directory specified in the config file. If the error persists and you need assistance, sharing the corresponding log file will help. 133 | 134 | 135 | Step 2.0: Run Step 0 of SARvey: Preparation 136 | ''''''''''''''''''''''''''''''''''''''''''' 137 | 138 | The first step creates an interferogram network and calculates the temporal coherence for all pixels. Run the following command: 139 | 140 | .. code-block:: bash 141 | 142 | sarvey -f config.json 0 0 143 | 144 | In the command line, you will see a list of parameters used by SARvey to run step 0. All parameters that have been changed from the default are indicated: 145 | 146 | .. code-block:: none 147 | 148 | ... 149 | 2024-06-19 11:04:28,137 - INFO - Parameter value default 150 | 2024-06-19 11:04:28,137 - INFO - _________ _____ _______ 151 | 2024-06-19 11:04:28,138 - INFO - num_cores 5 <--- 50 152 | 2024-06-19 11:04:28,138 - INFO - num_patches 1 1 153 | 2024-06-19 11:04:28,138 - INFO - apply_temporal_unwrapping True True 154 | 2024-06-19 11:04:28,138 - INFO - spatial_unwrapping_method puma puma 155 | 2024-06-19 11:04:28,138 - INFO - 156 | 2024-06-19 11:04:28,138 - INFO - --------------------------------------------------------------------------------- 157 | 2024-06-19 11:04:28,138 - INFO - STEP 0: PREPARATION 158 | 2024-06-19 11:04:28,138 - INFO - --------------------------------------------------------------------------------- 159 | 2024-06-19 11:04:28,138 - INFO - Parameter value default 160 | 2024-06-19 11:04:28,139 - INFO - _________ _____ _______ 161 | 2024-06-19 11:04:28,139 - INFO - start_date None None 162 | 2024-06-19 11:04:28,139 - INFO - end_date None None 163 | 2024-06-19 11:04:28,139 - INFO - ifg_network_type sb <--- delaunay 164 | 2024-06-19 11:04:28,139 - INFO - num_ifgs 3 3 165 | 2024-06-19 11:04:28,139 - INFO - max_tbase 100 100 166 | 2024-06-19 11:04:28,139 - INFO - filter_window_size 9 9 167 | ... 168 | 169 | After running this step, a `sbas` directory is created. Inside this directory, you can find the following files: 170 | 171 | .. code-block:: none 172 | 173 | outputs/ 174 | ├── temporal_coherence.h5 175 | ├── ifg_stack.h5 176 | ├── ifg_network.h5 177 | ├── coordinates_utm.h5 178 | ├── config.json 179 | ├── background_map.h5 180 | └── pic/ 181 | ├── step_0_temporal_phase_coherence.png 182 | ├── step_0_interferogram_network.png 183 | └── step_0_amplitude_image.png 184 | 185 | 186 | Check the PNG files inside the `outputs/pic` directory and ensure the amplitude image, interferogram network, and temporal coherence look fine. If you are not satisfied with the interferogram network, you can modify the corresponding parameters in the `config.json` file and run step 0 again. 187 | 188 | Use the following command to plot the interferograms: 189 | 190 | .. code-block:: bash 191 | 192 | sarvey_plot outputs/ifg_stack.h5 -i 193 | 194 | This command creates the interferograms as PNG files in the following directory: 195 | 196 | .. code-block:: none 197 | 198 | outputs/ 199 | └── pic/ 200 | └── ifgs/ 201 | ├── 0_ifg.png 202 | ├── 1_ifg.png 203 | └── ... 204 | 205 | Check the interferograms one by one and ensure they look reasonable. In various interferograms, there are fringes associated with deformation approximately at ranges 100-200, azimuth 40-60. 206 | 207 | 208 | Step 2.1: Run Step 1 of SARvey 209 | '''''''''''''''''''''''''''''' 210 | 211 | .. code-block:: bash 212 | 213 | sarvey -f config.json 1 1 214 | 215 | Outputs of this step are: 216 | 217 | .. code-block:: none 218 | 219 | outputs/ 220 | ├── point_network.h5 221 | ├── p1_ifg_wr.h5 222 | ├── point_network_parameter.h5 223 | └── pic/ 224 | ├── selected_pixels_temp_coh_0.8.png 225 | ├── step_1_mask_p1.png 226 | ├── step_1_arc_coherence.png 227 | ├── step_1_arc_coherence_reduced.png 228 | ├── step_1_rmse_vel_0th_iter.png 229 | └── step_1_rmse_dem_error_0th_iter.png 230 | 231 | 232 | Step 2.2: Run Step 2 of SARvey 233 | '''''''''''''''''''''''''''''' 234 | 235 | .. code-block:: bash 236 | 237 | sarvey -f config.json 2 2 238 | 239 | 240 | Outputs of this step are: 241 | 242 | .. code-block:: none 243 | 244 | outputs/ 245 | ├── p1_ifg_unw.h5 246 | ├── p1_ts.h5 247 | └── pic/ 248 | ├── step_2_estimation_dem_error.png 249 | └── step_2_estimation_velocity.png 250 | 251 | Step 2.3: Run Step 3 of SARvey 252 | '''''''''''''''''''''''''''''' 253 | 254 | .. code-block:: bash 255 | 256 | sarvey -f config.json 3 3 257 | 258 | 259 | Outputs of this step are: 260 | 261 | .. code-block:: none 262 | 263 | outputs/ 264 | ├── p2_coh80_ifg_wr.h5 265 | ├── p2_coh80_aps.h5 266 | ├── p1_aps.h5 267 | ├── p1_ts_filt.h5 268 | └── pic/ 269 | ├── step_3_temporal_autocorrelation.png 270 | ├── step_3_stable_points.png 271 | ├── selected_pixels_temp_coh_0.8.png 272 | └── step_3_mask_p2_coh80.png 273 | 274 | 275 | Step 2.4: Run Step 4 of SARvey 276 | '''''''''''''''''''''''''''''' 277 | 278 | .. code-block:: bash 279 | 280 | sarvey -f config.json 4 4 281 | 282 | .. outputs directory structure to be added 283 | 284 | 285 | The results of step 4 of SARvey, including the time series, are stored in the `p2_coh80_ts.h5` file. The file is named based on the `coherence_p2` parameter in the config.json file. 286 | 287 | 288 | Step 3: Plot Time Series Results 289 | """""""""""""""""""""""""""""""" 290 | 291 | Check the instruction on how to use the `sarvey_plot`. 292 | 293 | .. code-block:: bash 294 | 295 | sarvey_plot -h 296 | 297 | 298 | Plot the time series using the following command. Flag `-t` indicates that you want to plot the time series. 299 | 300 | .. code-block:: bash 301 | 302 | sarvey_plot outputs/p2_coh80_ts.h5 -t 303 | 304 | 305 | You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series. As you will see in the plot, the density of measurement points on the dam is relatively low. In the next section, you will learn how to modify the config file to increase the density of points. 306 | 307 | 308 | Step 4: Modify Config File and Rerun SARvey 309 | """"""""""""""""""""""""""""""""""""""""""" 310 | 311 | Modify the config.json file and change **coherence_p2** from 0.8 to 0.7. 312 | 313 | Run steps 3 and 4 using the following command: 314 | 315 | .. code-block:: bash 316 | 317 | sarvey -f config.json 3 4 318 | 319 | 320 | A new file `p2_coh70_ts.h5` is created. You can now visualize this file that has a higher point density. 321 | 322 | .. code-block:: bash 323 | 324 | sarvey_plot outputs/p2_coh70_ts.h5 -t 325 | 326 | 327 | .. note:: 328 | Be cautious that reducing the value of **coherence_p2** too much may include noisy points of low quality in the analysis, potentially leading to poor final results. 329 | 330 | You should carefully read the :ref:`processing` documentation to understand the meaning of each parameter and carefully choose reasonable values. You should also check the details of all parameters using the -p flag in `sarvey` and decide how to tune them. 331 | 332 | .. code-block:: bash 333 | 334 | sarvey -f config.json 0 0 -p 335 | 336 | 337 | Step 5: Export to GIS Format 338 | """""""""""""""""""""""""""" 339 | 340 | Export the data to Shapefiles using the following command: 341 | 342 | .. code-block:: bash 343 | 344 | sarvey_export outputs/p2_coh70_ts.h5 -o outputs/shp/p2_coh70_ts.shp 345 | 346 | You can open the exported data in any GIS software. If you use QGIS, you can use the `InSAR Explorer `_ plugin to draw the time series. 347 | 348 | 349 | Step 6: Validate Your Results 350 | """"""""""""""""""""""""""""" 351 | 352 | You can download a copy of the final SARvey products from `this link `_. Use these files to compare your results and ensure everything worked correctly. 353 | 354 | -------------------------------------------------------------------------------- /sarvey/filtering.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Filtering module for SARvey.""" 31 | import time 32 | import multiprocessing 33 | import matplotlib.pyplot as plt 34 | import numpy as np 35 | from scipy.interpolate import griddata 36 | import gstools as gs 37 | from logging import Logger 38 | 39 | from mintpy.utils import ptime 40 | 41 | import sarvey.utils as ut 42 | 43 | 44 | def launchSpatialFiltering(parameters: tuple): 45 | """Launch_spatial_filtering. 46 | 47 | Launches the spatial filtering to estimate the atmospheric phase screen with low-pass filtering. 48 | 49 | Parameters 50 | ---------- 51 | parameters: tuple 52 | Tuple containing the following parameters: 53 | 54 | idx_range: np.ndarray 55 | range of indices for the time series 56 | num_time: int 57 | number of time steps 58 | residuals: np.ndarray 59 | residual phase (size: num_points x num_ifgs) 60 | coord_utm1: np.ndarray 61 | coordinates in UTM of the first-order points for which the residuals are given (size: num_points_p1 x 2) 62 | coord_utm2: np.ndarray 63 | coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2) 64 | bins: np.ndarray 65 | bin edges for the variogram 66 | bool_plot: bool 67 | boolean flag to plot intermediate results 68 | logger: Logger 69 | Logging handler 70 | 71 | Returns 72 | ------- 73 | idx_range: np.ndarray 74 | range of indices for the time series 75 | aps1: np.ndarray 76 | atmospheric phase screen for the known points (size: num_points_p1 x num_ifgs) 77 | aps2: np.ndarray 78 | atmospheric phase screen for the new points (size: num_points_p2 x num_ifgs) 79 | """ 80 | # Unpack the parameters 81 | (idx_range, num_time, residuals, coord_utm1, coord_utm2, bins, bool_plot, logger) = parameters 82 | 83 | x = coord_utm1[:, 1] 84 | y = coord_utm1[:, 0] 85 | x_new = coord_utm2[:, 1] 86 | y_new = coord_utm2[:, 0] 87 | 88 | aps1 = np.zeros((coord_utm1.shape[0], num_time), dtype=np.float32) 89 | aps2 = np.zeros((coord_utm2.shape[0], num_time), dtype=np.float32) 90 | 91 | prog_bar = ptime.progressBar(maxValue=num_time) 92 | 93 | for i in range(num_time): 94 | field = residuals[:, i].astype(np.float32) 95 | 96 | # 1) estimate the variogram of the field 97 | bin_center, vario = gs.vario_estimate(pos=[x, y], field=field, bin_edges=bins) 98 | 99 | # 2) fit model to empirical variogram 100 | model = gs.Stable(dim=2) 101 | try: 102 | model.fit_variogram(x_data=bin_center, y_data=vario, nugget=True, max_eval=1500) 103 | except RuntimeError as err: 104 | logger.error(msg="\nIMAGE {}: Not able to fit variogram! {}".format(idx_range[i], err)) 105 | if bool_plot: 106 | fig, ax = plt.subplots(2, figsize=[10, 5]) 107 | sca1 = ax[0].scatter(x, y, c=field) 108 | plt.colorbar(sca1, ax=ax[0], pad=0.03, shrink=0.5) 109 | ax[0].set_title("Not able to fit variogram! - PS1 residuals") 110 | ax[1].scatter(bin_center, vario) 111 | ax[1].set_xlabel("distance in [m]") 112 | ax[1].set_ylabel("semi-variogram") 113 | plt.close(fig) 114 | prog_bar.update(value=i + 1, every=1, suffix='{}/{} images'.format(i + 1, num_time)) 115 | continue 116 | 117 | # 3) estimate parameters of kriging 118 | sk = gs.krige.Simple( 119 | model=model, 120 | cond_pos=[x, y], 121 | cond_val=field, 122 | ) 123 | 124 | # 4) evaluate the kriging model at ORIGINAL locations 125 | fld_sk, _ = sk((x, y), return_var=True) 126 | aps1[:, i] = fld_sk 127 | 128 | # 5) evaluate the kriging model at NEW locations 129 | fld_sk_new, var_sk_new = sk((x_new, y_new), return_var=True) 130 | aps2[:, i] = fld_sk_new 131 | 132 | prog_bar.update(value=i + 1, every=1, suffix='{}/{} images'.format(i + 1, num_time)) 133 | 134 | # 5) show results 135 | if bool_plot: 136 | min_val = np.min(field) 137 | max_val = np.max(field) 138 | 139 | fig, ax = plt.subplots(2, 2, figsize=[10, 5]) 140 | 141 | cur_ax = ax[0, 0] 142 | sca1 = cur_ax.scatter(x, y, c=field, vmin=min_val, vmax=max_val) 143 | plt.colorbar(sca1, ax=cur_ax, pad=0.03, shrink=0.5) 144 | cur_ax.set_title("PS1 residuals") 145 | 146 | cur_ax = ax[0, 1] 147 | cur_ax = model.plot(x_max=bin_center[-1], ax=cur_ax) 148 | cur_ax.scatter(bin_center, vario) 149 | cur_ax.set_xlabel("distance in [m]") 150 | cur_ax.set_ylabel("semi-variogram") 151 | 152 | if coord_utm2 is not None: 153 | cur_ax = ax[1, 0] 154 | sca2 = cur_ax.scatter(x_new, y_new, c=fld_sk_new, vmin=min_val, vmax=max_val) 155 | plt.colorbar(sca2, ax=cur_ax, pad=0.03, shrink=0.5) 156 | cur_ax.set_title("PS2 prediction of atmospheric effect") 157 | 158 | cur_ax = ax[0, 1] 159 | sca4 = cur_ax.scatter(x_new, y_new, c=var_sk_new) 160 | plt.colorbar(sca4, ax=cur_ax, pad=0.03, shrink=0.5) 161 | cur_ax.set_title("Variance of predicted atmospheric effect") 162 | 163 | plt.close(fig) 164 | 165 | return idx_range, aps1, aps2 166 | 167 | 168 | def estimateAtmosphericPhaseScreen(*, residuals: np.ndarray, coord_utm1: np.ndarray, coord_utm2: np.ndarray, 169 | num_cores: int = 1, bool_plot: bool = False, 170 | logger: Logger) -> tuple[np.ndarray, np.ndarray]: 171 | """Estimate_atmospheric_phase_screen. 172 | 173 | Estimates the atmospheric phase screen from a stack of phase time series for a sparse set of points. 174 | Kriging is used to estimate the spatial dependence and to interpolate the phase screen over a set of new points. 175 | 176 | Parameters 177 | ---------- 178 | residuals: np.ndarray 179 | residual phase (size: num_points1 x num_images) 180 | coord_utm1: np.ndarray 181 | coordinates in UTM of the points for which the residuals are given (size: num_points1 x 2) 182 | coord_utm2: np.ndarray 183 | coordinates in UTM of the new points which shall be interpolated (size: num_points2 x 2) 184 | num_cores: int 185 | Number of cores 186 | bool_plot: bool 187 | boolean flag to plot intermediate results (default: False) 188 | logger: Logger 189 | Logging handler 190 | 191 | Returns 192 | ------- 193 | aps1: np.ndarray 194 | atmospheric phase screen for the known points (size: num_points1 x num_images) 195 | aps2: np.ndarray 196 | atmospheric phase screen for the new points (size: num_points2 x num_images) 197 | """ 198 | msg = "#" * 10 199 | msg += " ESTIMATE ATMOSPHERIC PHASE SCREEN (KRIGING) " 200 | msg += "#" * 10 201 | logger.info(msg=msg) 202 | 203 | start_time = time.time() 204 | 205 | num_points1 = residuals.shape[0] 206 | num_points2 = coord_utm2.shape[0] 207 | num_time = residuals.shape[1] # can be either num_ifgs or num_images 208 | 209 | bins = gs.variogram.standard_bins(pos=(coord_utm1[:, 1], coord_utm1[:, 0]), 210 | dim=2, latlon=False, mesh_type='unstructured', bin_no=30, max_dist=None) 211 | 212 | if num_cores == 1: 213 | args = (np.arange(0, num_time), num_time, residuals, coord_utm1, coord_utm2, bins, bool_plot, logger) 214 | _, aps1, aps2 = launchSpatialFiltering(parameters=args) 215 | else: 216 | logger.info(msg="start parallel processing with {} cores.".format(num_cores)) 217 | 218 | aps1 = np.zeros((num_points1, num_time), dtype=np.float32) 219 | aps2 = np.zeros((num_points2, num_time), dtype=np.float32) 220 | 221 | num_cores = num_time if num_cores > num_time else num_cores # avoids having more samples than cores 222 | idx = ut.splitDatasetForParallelProcessing(num_samples=num_time, num_cores=num_cores) 223 | 224 | args = [( 225 | idx_range, 226 | idx_range.shape[0], 227 | residuals[:, idx_range], 228 | coord_utm1, 229 | coord_utm2, 230 | bins, 231 | False, 232 | logger) for idx_range in idx] 233 | 234 | with multiprocessing.Pool(processes=num_cores) as pool: 235 | results = pool.map(func=launchSpatialFiltering, iterable=args) 236 | 237 | # retrieve results 238 | for i, aps1_i, aps2_i in results: 239 | aps1[:, i] = aps1_i 240 | aps2[:, i] = aps2_i 241 | 242 | m, s = divmod(time.time() - start_time, 60) 243 | logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) 244 | 245 | return aps1, aps2 246 | 247 | 248 | def simpleInterpolation(*, residuals: np.ndarray, coord_utm1: np.ndarray, coord_utm2: np.ndarray, 249 | interp_method: str = "linear"): 250 | """SimpleInterpolation. 251 | 252 | Simple interpolation of atmospheric phase screen using scipy's griddata function with options "linear" or "cubic". 253 | For pixels outside the convex hull of the input points, the nearest neighbor is used. 254 | 255 | Parameters 256 | ---------- 257 | residuals: np.ndarray 258 | residual phase (size: num_points x num_ifgs) 259 | coord_utm1: np.ndarray 260 | coordinates in UTM of the points for which the residuals are given (size: num_points_p1 x 2) 261 | coord_utm2: np.ndarray 262 | coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2) 263 | interp_method: str 264 | interpolation method (default: "linear"; options: "linear", "cubic") 265 | 266 | Returns 267 | ------- 268 | aps1: np.ndarray 269 | atmospheric phase screen for the known points (size: num_points_p1 x num_images) 270 | aps2: np.ndarray 271 | atmospheric phase screen for the new points (size: num_points_p2 x num_images) 272 | """ 273 | num_points2 = coord_utm2.shape[0] 274 | num_images = residuals.shape[1] 275 | 276 | aps1 = np.zeros_like(residuals, dtype=np.float32) 277 | aps2 = np.zeros((num_points2, num_images), dtype=np.float32) 278 | for i in range(num_images): 279 | aps1[:, i] = griddata(coord_utm1, residuals[:, i], coord_utm1, method=interp_method) 280 | aps2[:, i] = griddata(coord_utm1, residuals[:, i], coord_utm2, method=interp_method) 281 | # interpolation with 'linear' or 'cubic' yields nan values for pixel that need to be extrapolated. 282 | # interpolation with 'knn' solves this problem. 283 | mask_extrapolate = np.isnan(aps2[:, i]) 284 | aps2[mask_extrapolate, i] = griddata( 285 | coord_utm1, 286 | residuals[:, i], 287 | coord_utm2[mask_extrapolate, :], 288 | method='nearest' 289 | ) 290 | 291 | return aps1, aps2 292 | -------------------------------------------------------------------------------- /sarvey/sarvey_export.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Console script for exporting data from SARvey format to GIS formats.""" 31 | import argparse 32 | import logging 33 | from logging import Logger 34 | import sys 35 | import time 36 | import warnings 37 | import os 38 | from os.path import join, dirname, basename 39 | import numpy as np 40 | import pandas as pd 41 | import geopandas as gpd 42 | from pyproj import CRS 43 | from pyproj.aoi import AreaOfInterest 44 | from pyproj.database import query_utm_crs_info 45 | from shapely import Point 46 | from shapely.errors import ShapelyDeprecationWarning 47 | 48 | from sarvey import version 49 | from sarvey.config import loadConfiguration 50 | from sarvey.console import showLogoSARvey 51 | from sarvey.objects import Points 52 | import sarvey.utils as ut 53 | from sarvey.geolocation import calculateGeolocationCorrection 54 | 55 | 56 | warnings.filterwarnings("ignore", category=ShapelyDeprecationWarning) 57 | 58 | 59 | def exportDataToGisFormat(*, file_path: str, output_path: str, input_path: str, 60 | correct_geolocation: bool = False, no_timeseries: bool = False, logger: Logger): 61 | """Export data to GIS format (shp or gpkg). 62 | 63 | Parameters 64 | ---------- 65 | file_path: str 66 | Path to the input file. 67 | output_path: str 68 | Path for writing output file. 69 | input_path: str 70 | Path to slcStack.h5 and geometryRadar.h5. 71 | correct_geolocation: bool 72 | Correct geolocation or not 73 | no_timeseries: bool 74 | Export time series data or not 75 | logger: Logger 76 | Logger handle. 77 | """ 78 | point_obj = Points(file_path=file_path, logger=logger) 79 | 80 | point_obj.open(input_path=input_path) 81 | 82 | # todo: add corrected height to output 83 | # todo: add option to mask the output to e.g. linear infrastructures or other AOI 84 | 85 | vel, demerr, _, coherence, omega, _ = ut.estimateParameters(obj=point_obj, ifg_space=False) 86 | 87 | stc = ut.spatiotemporalConsistency(coord_utm=point_obj.coord_utm, phase=point_obj.phase, 88 | wavelength=point_obj.wavelength) 89 | 90 | point_obj.phase *= point_obj.wavelength / (4 * np.pi) # in [m] 91 | 92 | # extract displacement 93 | defo_ts = np.zeros_like(point_obj.phase, dtype=np.float32) 94 | for i in range(point_obj.num_points): 95 | phase_topo = (point_obj.ifg_net_obj.pbase / (point_obj.slant_range[i] * np.sin(point_obj.loc_inc[i])) * 96 | demerr[i]) 97 | defo_ts[i, :] = point_obj.phase[i, :] - phase_topo 98 | 99 | # transform into meters 100 | defo_ts *= 1000 # in [mm] 101 | 102 | utm_crs_list = query_utm_crs_info( 103 | datum_name="WGS 84", 104 | area_of_interest=AreaOfInterest( 105 | west_lon_degree=point_obj.coord_lalo[:, 1].min(), 106 | south_lat_degree=point_obj.coord_lalo[:, 0].min(), 107 | east_lon_degree=point_obj.coord_lalo[:, 1].max(), 108 | north_lat_degree=point_obj.coord_lalo[:, 0].max(), 109 | ), 110 | contains=True 111 | ) 112 | 113 | utm_epsg = utm_crs_list[0].code 114 | 115 | dates = ["D{}".format(date).replace("-", "") for date in point_obj.ifg_net_obj.dates] 116 | 117 | dates = dates[:point_obj.phase.shape[1]] # remove dates which were not processed 118 | 119 | if no_timeseries: 120 | df_points = pd.DataFrame({}) 121 | else: 122 | df_points = pd.DataFrame({date: () for date in dates}) 123 | 124 | if correct_geolocation: 125 | logger.info("Calculate geolocation correction.") 126 | coord_correction = calculateGeolocationCorrection(path_geom=input_path, 127 | point_obj=point_obj, 128 | demerr=demerr, 129 | logger=logger) 130 | coord_correction_norm = np.linalg.norm(coord_correction, axis=1) 131 | max_error_index = np.argmax(coord_correction_norm) 132 | logger.info(f"Maximum geolocation correction: {coord_correction_norm[max_error_index]:.1f} m " 133 | f"corresponding to {demerr[max_error_index]:.1f} m DEM correction") 134 | else: 135 | coord_correction = 0 136 | logger.info("geolocation correction skipped.") 137 | 138 | coord_utm = point_obj.coord_utm 139 | coord_utm += coord_correction 140 | df_points['coord'] = (coord_utm).tolist() 141 | df_points['coord'] = df_points['coord'].apply(Point) 142 | df_points.insert(0, 'point_id', point_obj.point_id.tolist()) 143 | df_points.insert(1, 'velocity', vel * 1000) # in [mm] 144 | df_points.insert(2, 'coherence', coherence) 145 | df_points.insert(3, 'omega', omega) 146 | df_points.insert(4, 'st_consistency', stc * 1000) # in [mm] 147 | df_points.insert(5, 'dem_error', demerr) # in [m] 148 | df_points.insert(6, 'dem', point_obj.height) # in [m] 149 | 150 | df_points.columns = [col[:10] for col in df_points.columns] 151 | 152 | if not no_timeseries: 153 | for i, date in enumerate(dates): 154 | df_points[date] = defo_ts[:, i] 155 | 156 | gdf_points = gpd.GeoDataFrame(df_points, geometry='coord') 157 | gdf_points = gdf_points.set_crs(CRS.from_epsg(utm_epsg)) 158 | logger.info(msg="write to file.") 159 | gdf_points.to_file(output_path) 160 | 161 | 162 | def createParser(): 163 | """Create_parser.""" 164 | parser = argparse.ArgumentParser( 165 | description="Export InSAR time series results from '.h5' to GIS data formats.", 166 | formatter_class=argparse.RawTextHelpFormatter, 167 | epilog="""EXAMPLE: 168 | sarvey_export outputs/p2_coh50_ts.h5 -o outputs/shp/p2_coh50.shp # export time series to shapefile 169 | sarvey_export outputs/p2_coh50_ts.h5 -o outputs/shp/p2_coh50.gpkg # export time series to geopackage 170 | sarvey_export outputs/p2_coh90_ts.h5 -o outputs/shp/p2_coh90.shp -g # apply geolocation correction 171 | sarvey_export outputs/p2_coh90_ts.h5 -o outputs/shp/p2_coh90.shp -g -t # skip time series data 172 | """) 173 | 174 | parser.add_argument('file_path', type=str, help='Path to input file.') 175 | 176 | parser.add_argument("-o", "--output_path", type=str, dest="output_path", default="", 177 | help="Path to output file. If empty, the name of the input file will be used.") 178 | 179 | # parser.add_argument("-f", "--format", type=str, required=False, metavar="FILE", dest="format", 180 | # help="Output file format (if not already specified within '-o'). Can be 'shp', 'gpkg', 181 | # 'csv'.") 182 | 183 | parser.add_argument("-l", "--log_dir", type=str, required=False, metavar="FILE", dest="log_dir", 184 | default="logfiles/", help="Logfile directory (default: 'logfiles/')") 185 | 186 | parser.add_argument('-w', '--workdir', default=None, dest="workdir", 187 | help='Working directory (default: current directory).') 188 | 189 | parser.add_argument('-g', '--correct_geo', default=False, action="store_true", dest="correct_geolocation", 190 | help='Correct Geolocation (default: False).') 191 | 192 | parser.add_argument('-t', '--no-time-series', default=False, action="store_true", dest="no_timeseries", 193 | help='Do not export time series (default: False).') 194 | 195 | parser.add_argument('--version', action='version', 196 | version=f"SARvey version {version.__version__} - {version.__versionalias__}, " 197 | f"{version.__versiondate__}") 198 | 199 | return parser 200 | 201 | 202 | def main(iargs=None): 203 | """Run Main. 204 | 205 | :param iargs: 206 | """ 207 | parser = createParser() 208 | args = parser.parse_args(iargs) 209 | 210 | log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') 211 | logger = logging.getLogger(__name__) 212 | 213 | console_handler = logging.StreamHandler(sys.stdout) 214 | console_handler.setFormatter(log_format) 215 | logger.addHandler(console_handler) 216 | logging_level = logging.getLevelName("INFO") 217 | logger.setLevel(logging_level) 218 | 219 | if args.workdir is None: 220 | args.workdir = os.path.abspath(os.path.curdir) 221 | else: 222 | logger.info(msg="Working directory: {}".format(args.workdir)) 223 | 224 | args.log_dir = join(args.workdir, args.log_dir) 225 | current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) 226 | log_filename = f"sarvey_export_{current_datetime}.log" 227 | 228 | if not os.path.exists(args.log_dir): 229 | os.mkdir(args.log_dir) 230 | file_handler = logging.FileHandler(filename=join(args.log_dir, log_filename)) 231 | file_handler.setFormatter(log_format) 232 | logger.addHandler(file_handler) 233 | 234 | showLogoSARvey(logger=logger, step="Export results") 235 | 236 | # read config file to retrieve location of inputs 237 | config_file_path = os.path.abspath(join(args.workdir, dirname(args.file_path), "config.json")) 238 | 239 | if not os.path.exists(config_file_path): 240 | # check if any config file is available in upper directory (backward compatibility) 241 | files = np.array([os.path.abspath(f) for f in os.listdir(join(dirname(config_file_path), "..")) 242 | if os.path.isfile(f)]) 243 | potential_configs = np.array([(basename(f).split(".")[-1] == "json") and ("config" in basename(f)) 244 | for f in files]) 245 | if potential_configs[potential_configs].shape[0] == 0: 246 | raise FileNotFoundError(f"Backup configuration file not found: {config_file_path}!") 247 | else: 248 | logger.warning(msg=f"Backup configuration file not found: {config_file_path}!") 249 | logger.warning(msg=f"Other configuration files automatically detected: {files[potential_configs]}!") 250 | logger.warning(msg=f"Automatically selected configuration file: {files[potential_configs][0]}!") 251 | config_file_path = files[potential_configs][0] 252 | 253 | config = loadConfiguration(path=config_file_path) 254 | 255 | # create output directory 256 | if args.output_path == "": 257 | output_dir = args.workdir 258 | output_fname = basename(args.file_path).split(".")[-2] 259 | output_format = "shp" 260 | args.output_path = join(output_dir, output_fname + "." + output_format) 261 | else: 262 | output_dir = join(args.workdir, dirname(args.output_path)) 263 | output_fname = basename(args.output_path) 264 | name_splitted = output_fname.split(".") 265 | if len(name_splitted) == 1: 266 | args.output_path = join(output_dir, output_fname + ".shp") # use shapefile as default format 267 | elif len(name_splitted) == 2: 268 | output_format = name_splitted[-1] # use specified format 269 | if (output_format != "shp") and (output_format != "gpkg"): 270 | logger.error(msg=f"Output format not supported: {output_format}!") 271 | raise ValueError 272 | logger.info(msg=f"Detected output format: {output_format}.") 273 | args.output_path = join(output_dir, output_fname) 274 | else: 275 | logger.error(msg=f"Output format was not recognized! {output_fname}") 276 | raise ValueError 277 | 278 | logger.info(msg=f"Output file: {args.output_path}") 279 | 280 | # specify geolocation status 281 | logger.info(msg=f"Correct geolocation: {args.correct_geolocation}") 282 | 283 | # specify time series flag 284 | logger.info(msg=f"Export time series data: {not args.no_timeseries}") 285 | 286 | if not os.path.exists(output_dir): 287 | os.mkdir(output_dir) 288 | 289 | exportDataToGisFormat(file_path=args.file_path, output_path=args.output_path, 290 | input_path=config.general.input_path, 291 | correct_geolocation=args.correct_geolocation, no_timeseries=args.no_timeseries, 292 | logger=logger) 293 | 294 | 295 | if __name__ == '__main__': 296 | main() 297 | -------------------------------------------------------------------------------- /sarvey/sarvey_mti.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """MTI module for SARvey.""" 31 | 32 | import argparse 33 | import os 34 | import shutil 35 | from os.path import join 36 | 37 | import json5 38 | import matplotlib 39 | import sys 40 | import logging 41 | import time 42 | from logging import Logger 43 | from pydantic import TypeAdapter 44 | 45 | from sarvey import version 46 | from sarvey.console import printStep, printCurrentConfig, showLogoSARvey 47 | from sarvey.processing import Processing 48 | from sarvey.config import Config, loadConfiguration 49 | from sarvey.utils import checkIfRequiredFilesExist 50 | 51 | try: 52 | matplotlib.use('QtAgg') 53 | except ImportError as e: 54 | print(e) 55 | 56 | EXAMPLE = """Example: 57 | sarvey -f config.json 0 0 -g # create default config file with the name config.json and exit 58 | sarvey -f config.json 0 0 # run only preparation step 59 | sarvey -f config.json 0 4 # run all processing steps 60 | 61 | sarvey -f config.json 0 0 -p # print explanation of the configuration parameters to console 62 | """ 63 | 64 | STEP_DICT = { 65 | 0: "PREPARATION", 66 | 1: "CONSISTENCY CHECK", 67 | 2: "UNWRAPPING", 68 | 3: "FILTERING", 69 | 4: "DENSIFICATION", 70 | } 71 | 72 | 73 | def run(*, config: Config, args: argparse.Namespace, logger: Logger): 74 | """Run the specified processing steps. 75 | 76 | Parameters 77 | ---------- 78 | config: Config 79 | object of configuration class. 80 | args: argparse.Namespace 81 | command line input arguments 82 | logger: Logger 83 | Logging handler. 84 | """ 85 | showLogoSARvey(logger=logger, step="MTInSAR") 86 | 87 | start_time = time.time() 88 | 89 | steps = range(args.start, args.stop + 1) 90 | 91 | config_default_dict = generateTemplateFromConfigModel() 92 | 93 | proc_obj = Processing(path=config.general.output_path, config=config, logger=logger) 94 | 95 | printCurrentConfig(config_section=config.general.model_dump(), 96 | config_section_default=config_default_dict["general"], 97 | logger=logger) 98 | 99 | if config.phase_linking.use_phase_linking_results: 100 | printCurrentConfig(config_section=config.phase_linking.model_dump(), 101 | config_section_default=config_default_dict["phase_linking"], 102 | logger=logger) 103 | 104 | if 0 in steps: 105 | start_time_step = time.time() 106 | printStep(step=0, step_dict=STEP_DICT, logger=logger) 107 | printCurrentConfig(config_section=config.preparation.model_dump(), 108 | config_section_default=config_default_dict["preparation"], 109 | logger=logger) 110 | proc_obj.runPreparation() 111 | m, s = divmod(time.time() - start_time_step, 60) 112 | logger.info(f"Finished step 0 PREPARATION normally in {m:02.0f} mins {s:02.1f} secs.") 113 | required_files = ["background_map.h5", "coordinates_utm.h5", "ifg_network.h5", "ifg_stack.h5", 114 | "temporal_coherence.h5"] 115 | 116 | if 1 in steps: 117 | start_time_step = time.time() 118 | checkIfRequiredFilesExist( 119 | path_to_files=config.general.output_path, 120 | required_files=required_files, 121 | logger=logger 122 | ) 123 | printStep(step=1, step_dict=STEP_DICT, logger=logger) 124 | printCurrentConfig(config_section=config.consistency_check.model_dump(), 125 | config_section_default=config_default_dict["consistency_check"], 126 | logger=logger) 127 | proc_obj.runConsistencyCheck() 128 | m, s = divmod(time.time() - start_time_step, 60) 129 | logger.info(f"Finished step 1 CONSISTENCY CHECK normally in {m:02.0f} mins {s:02.1f} secs.") 130 | required_files.extend(["point_network.h5", "point_network_parameter.h5", "p1_ifg_wr.h5"]) 131 | 132 | if 2 in steps: 133 | start_time_step = time.time() 134 | checkIfRequiredFilesExist( 135 | path_to_files=config.general.output_path, 136 | required_files=required_files, 137 | logger=logger 138 | ) 139 | printStep(step=2, step_dict=STEP_DICT, logger=logger) 140 | printCurrentConfig(config_section=config.unwrapping.model_dump(), 141 | config_section_default=config_default_dict["unwrapping"], 142 | logger=logger) 143 | if proc_obj.config.general.apply_temporal_unwrapping: 144 | proc_obj.runUnwrappingTimeAndSpace() 145 | else: 146 | proc_obj.runUnwrappingSpace() 147 | m, s = divmod(time.time() - start_time_step, 60) 148 | logger.info(f"Finished step 2 UNWRAPPING normally in {m:02.0f} mins {s:02.1f} secs.") 149 | required_files.extend(["p1_ifg_unw.h5", "p1_ts.h5"]) 150 | 151 | if 3 in steps: 152 | start_time_step = time.time() 153 | checkIfRequiredFilesExist( 154 | path_to_files=config.general.output_path, 155 | required_files=required_files, 156 | logger=logger 157 | ) 158 | printStep(step=3, step_dict=STEP_DICT, logger=logger) 159 | printCurrentConfig(config_section=config.filtering.model_dump(), 160 | config_section_default=config_default_dict["filtering"], 161 | logger=logger) 162 | proc_obj.runFiltering() 163 | m, s = divmod(time.time() - start_time_step, 60) 164 | logger.info(f"Finished step 3 FILTERING normally in {m:02.0f} mins {s:02.1f} secs.") 165 | coh_value = int(config.filtering.coherence_p2 * 100) 166 | required_files.extend(["p1_aps.h5", f"p2_coh{coh_value}_ifg_wr.h5", f"p2_coh{coh_value}_aps.h5"]) 167 | 168 | if 4 in steps: 169 | start_time_step = time.time() 170 | checkIfRequiredFilesExist( 171 | path_to_files=config.general.output_path, 172 | required_files=required_files, 173 | logger=logger 174 | ) 175 | printStep(step=4, step_dict=STEP_DICT, logger=logger) 176 | printCurrentConfig(config_section=config.densification.model_dump(), 177 | config_section_default=config_default_dict["densification"], 178 | logger=logger) 179 | if proc_obj.config.general.apply_temporal_unwrapping: 180 | proc_obj.runDensificationTimeAndSpace() 181 | else: 182 | proc_obj.runDensificationSpace() 183 | m, s = divmod(time.time() - start_time_step, 60) 184 | logger.info(f"Finished step 4 DENSIFICATION normally in {m:02.0f} mins {s:02.1f} secs.") 185 | 186 | m, s = divmod(time.time() - start_time, 60) 187 | logger.info(f"SARvey MTI finished normally in in {m:02.0f} mins {s:02.1f} secs.") 188 | # close log-file to avoid problems with deleting the files 189 | if logger.hasHandlers(): 190 | for handler in logger.handlers[:]: 191 | logger.removeHandler(handler) 192 | handler.flush() 193 | handler.close() 194 | 195 | 196 | def generateTemplateFromConfigModel(): 197 | """GenerateTemplateFromConfigModel.""" 198 | top_level_dict = {} 199 | 200 | for sec_name, field in Config.model_fields.items(): 201 | sec_cls = field.annotation 202 | sec_dict = {} 203 | for subsec_name, subsec_def in sec_cls.model_fields.items(): 204 | if subsec_def.default is not None: 205 | default = subsec_def.default 206 | else: 207 | default = None 208 | 209 | sec_dict[subsec_name] = default 210 | top_level_dict[sec_name] = sec_dict 211 | 212 | return top_level_dict 213 | 214 | 215 | def createParser(): 216 | """Create_parser. 217 | 218 | :return: 219 | """ 220 | parser = argparse.ArgumentParser( 221 | description='Multitemporal InSAR processing workflow\n\n' + 222 | 'Run the following steps:\n' + 223 | '0 - preparation\n' + 224 | '1 - consistency check\n' + 225 | '2 - spatial unwrapping\n' + 226 | '3 - filtering\n' + 227 | '4 - densification', 228 | formatter_class=argparse.RawTextHelpFormatter, 229 | epilog=EXAMPLE) 230 | 231 | parser.add_argument('start', choices={0, 1, 2, 3, 4}, type=int, 232 | help='Start of processing') 233 | 234 | parser.add_argument('stop', choices={0, 1, 2, 3, 4}, type=int, 235 | help='Stop of processing') 236 | 237 | parser.add_argument("-f", "--filepath", type=str, required=True, metavar="FILE", 238 | help="Path to the config.json file.") 239 | 240 | parser.add_argument("-g", "--generate_config", action="store_true", default=False, dest="generate_config", 241 | help="Write default configuration to file specified by '-f'.") 242 | 243 | parser.add_argument("-p", "--print_config_explanation", action="store_true", default=False, 244 | dest="print_config_explanation", 245 | help="Prints exhaustive explanations about configuration to console.") 246 | 247 | parser.add_argument('-w', '--workdir', default=None, dest="workdir", 248 | help='Working directory (default: current directory).') 249 | 250 | parser.add_argument('--version', action='version', 251 | version=f"SARvey version {version.__version__} - {version.__versionalias__}, " 252 | f"{version.__versiondate__}") 253 | 254 | return parser 255 | 256 | 257 | def main(iargs=None): 258 | """Run Main. 259 | 260 | :param iargs: 261 | """ 262 | parser = createParser() 263 | args = parser.parse_args(iargs) 264 | 265 | # initiate logger 266 | logging_level = logging.getLevelName('DEBUG') # set a default value before until level is read from config 267 | 268 | log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') 269 | logger = logging.getLogger(__name__) 270 | 271 | console_handler = logging.StreamHandler(sys.stdout) 272 | console_handler.setFormatter(log_format) 273 | logger.addHandler(console_handler) 274 | logger.setLevel(logging_level) 275 | 276 | if args.generate_config: 277 | logger.info(msg=f"Write default config to file: {args.filepath}.") 278 | default_config_dict = generateTemplateFromConfigModel() 279 | with open(args.filepath, "w") as f: 280 | f.write(json5.dumps(default_config_dict, indent=4)) 281 | return 0 282 | 283 | if args.print_config_explanation: 284 | top_level_schema = TypeAdapter(Config).json_schema() 285 | print(json5.dumps(top_level_schema, indent=2)) 286 | return 0 287 | 288 | if args.stop < args.start: 289 | msg = f"Selected Start step ({args.start}) must be less than or equal to Stop step ({args.stop}). Exiting!" 290 | logger.error(msg) 291 | raise ValueError(msg) 292 | 293 | if args.workdir is None: 294 | args.workdir = os.path.abspath(os.path.curdir) 295 | logger.info(f"Working directory: {args.workdir}") 296 | 297 | config_file_path = os.path.abspath(join(args.workdir, args.filepath)) 298 | 299 | config = loadConfiguration(path=config_file_path) 300 | 301 | current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) 302 | log_filename = f"sarvey_{current_datetime}.log" 303 | logpath = config.general.logfile_path 304 | if not os.path.exists(logpath): 305 | os.mkdir(logpath) 306 | file_handler = logging.FileHandler(filename=join(logpath, log_filename)) 307 | file_handler.setFormatter(log_format) 308 | file_logging_level = logging.getLevelName("DEBUG") 309 | file_handler.setLevel(file_logging_level) 310 | logger.addHandler(file_handler) 311 | 312 | logging_level = logging.getLevelName(config.general.logging_level) 313 | console_handler.setLevel(logging_level) 314 | 315 | config.general.output_path = os.path.abspath(join(args.workdir, config.general.output_path)) 316 | if config.consistency_check.mask_p1_file is not None: 317 | config.consistency_check.mask_p1_file = os.path.abspath( 318 | join(args.workdir, config.consistency_check.mask_p1_file)) 319 | if config.filtering.mask_p2_file is not None: 320 | config.filtering.mask_p2_file = os.path.abspath( 321 | join(args.workdir, config.filtering.mask_p2_file)) 322 | 323 | # create all necessary directories 324 | if not os.path.exists(config.general.output_path): 325 | os.mkdir(config.general.output_path) 326 | if not os.path.exists(join(config.general.output_path, "pic")): 327 | os.mkdir(join(config.general.output_path, "pic")) 328 | 329 | # copy config file to output directory to ensure that there is always a backup config file with latest parameters 330 | shutil.copy2(src=config_file_path, dst=join(config.general.output_path, "config.json")) 331 | 332 | run(config=config, args=args, logger=logger) 333 | 334 | 335 | if __name__ == '__main__': 336 | main() 337 | -------------------------------------------------------------------------------- /sarvey/preparation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 4 | # 5 | # Copyright (C) 2021-2025 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 6 | # 7 | # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 8 | # of the SAR4Infra project with funds of the German Federal Ministry for Digital and 9 | # Transport and contributions from Landesamt fuer Vermessung und Geoinformation 10 | # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 11 | # 12 | # This program is free software: you can redistribute it and/or modify it under 13 | # the terms of the GNU General Public License as published by the Free Software 14 | # Foundation, either version 3 of the License, or (at your option) any later 15 | # version. 16 | # 17 | # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 18 | # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 19 | # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 20 | # This requirement extends to SARvey. 21 | # 22 | # This program is distributed in the hope that it will be useful, but WITHOUT 23 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 24 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 25 | # details. 26 | # 27 | # You should have received a copy of the GNU Lesser General Public License along 28 | # with this program. If not, see . 29 | 30 | """Preparation module for SARvey.""" 31 | import datetime 32 | import matplotlib.pyplot as plt 33 | import numpy as np 34 | from logging import Logger 35 | from os.path import join 36 | 37 | import mintpy.utils.readfile as readfile 38 | 39 | from sarvey import viewer 40 | import sarvey.utils as ut 41 | from sarvey.objects import CoordinatesUTM, AmplitudeImage, BaseStack, Points 42 | from sarvey.triangulation import PointNetworkTriangulation 43 | 44 | 45 | def createTimeMaskFromDates(*, start_date: str, stop_date: str, date_list: list, logger: Logger): 46 | """Create a mask with selected dates within given time frame. 47 | 48 | Parameters 49 | ---------- 50 | start_date: str 51 | Start date. 52 | stop_date: str 53 | Stop date. 54 | date_list: list 55 | all avaiable dates in the slcStack.h5. 56 | logger: Logger 57 | Logging handler. 58 | 59 | Returns 60 | ------- 61 | time_mask: np.ndarray 62 | mask with True for selected dates. 63 | num_slc: int 64 | number of selected images. 65 | result_date_list: list 66 | list of selected dates. 67 | """ 68 | time_mask = np.ones((len(date_list)), dtype=np.bool_) 69 | date_list = [datetime.date(year=int(d[:4]), month=int(d[4:6]), day=int(d[6:])) for d in date_list] 70 | 71 | if (start_date is None) and (stop_date is None): 72 | num_slc = time_mask.shape[0] 73 | result_date_list = [date.isoformat() for date in date_list] 74 | logger.debug( 75 | f"Use all {num_slc} images in SLC stack. Time frame: {result_date_list[0]} - {result_date_list[-1]}") 76 | return time_mask, num_slc, result_date_list 77 | 78 | if start_date is None: 79 | start_date = min(date_list) 80 | else: 81 | start_date = datetime.date.fromisoformat(start_date) 82 | 83 | if stop_date is None: 84 | stop_date = max(date_list) 85 | else: 86 | stop_date = datetime.date.fromisoformat(stop_date) 87 | 88 | if start_date >= stop_date: 89 | msg = (f"Invalid date range: Start Date ({start_date}) must be earlier than Stop Date ({stop_date}). " 90 | f"Please correct the config file and try again. Exiting!") 91 | logger.error(msg) 92 | raise ValueError(msg) 93 | 94 | if stop_date < min(date_list): 95 | msg = (f"Invalid Stop Date: Specified Stop Date ({stop_date}) must be later than the first image date" 96 | f" ({min(date_list)}). Please update the Stop Date in the config file and try again. Exiting!") 97 | logger.error(msg) 98 | raise ValueError(msg) 99 | 100 | if start_date > max(date_list): 101 | msg = ( 102 | f"Invalid Start Date: Specified Start Date ({start_date}) must be earlier than the last image date " 103 | f"({max(date_list)}). Please update the Start Date in the configuration file and try again. Exiting!") 104 | logger.error(msg) 105 | raise ValueError(msg) 106 | 107 | shift = " " 108 | logger.debug(f"{shift}{'----------------------':>24}") 109 | logger.debug(f"{shift}| {'Date':>10} | {'Selected':>10} |") 110 | logger.debug(f"{shift}| {'----------':>10} | {'----------':>10} |") 111 | 112 | result_date_list = list() 113 | for i, date in enumerate(date_list): 114 | if (date < start_date) or (date > stop_date): 115 | time_mask[i] = False 116 | else: 117 | result_date_list.append(date.isoformat()) 118 | val = " x " if time_mask[i] else " " 119 | logger.debug(f"{shift}| {date.isoformat():>10} | {val:>10} |") 120 | 121 | logger.debug(f"{shift}| {'----------':>10} | {'----------':>10} |") 122 | 123 | num_slc = time_mask[time_mask].shape[0] 124 | total_num_slc = time_mask.shape[0] 125 | logger.debug(f"Selected {num_slc} out of {total_num_slc} acquisitions in time frame: " 126 | f"{start_date.isoformat()} to {stop_date.isoformat()}") 127 | 128 | return time_mask, num_slc, result_date_list 129 | 130 | 131 | def readSlcFromMiaplpy(*, path: str, box: tuple = None, logger: Logger) -> np.ndarray: 132 | """Read SLC data from phase-linking results of Miaplpy. 133 | 134 | Parameters 135 | ---------- 136 | path: str 137 | Path to the phase_series.h5 file. 138 | box: tuple 139 | Bounding Box to read from. 140 | logger: Logger 141 | Logging handler. 142 | 143 | Returns 144 | ------- 145 | slc: np.ndarray 146 | slc stack created from phase-linking results. 147 | """ 148 | logger.info("reading phase from MiaplPy results...") 149 | phase = readfile.read(path, datasetName='phase', box=box)[0] 150 | 151 | logger.info("reading amplitude from MiaplPy results...") 152 | amp = readfile.read(path, datasetName='amplitude', box=box)[0] 153 | 154 | logger.info("combining phase and amplitude to slc...") 155 | slc = amp * np.exp(phase * 1j) 156 | return slc 157 | 158 | 159 | def readCoherenceFromMiaplpy(*, path: str, box: tuple = None, logger: Logger) -> tuple[np.ndarray, dict]: 160 | """Read the coherence image from phase-linking of MiaplPy. 161 | 162 | Parameters 163 | ---------- 164 | path: str 165 | Path to phase_series.h5 file. 166 | box: tuple 167 | Bounding Box to read from. 168 | logger: Logger 169 | Logging handler. 170 | 171 | Returns 172 | ------- 173 | temp_coh: np.ndarray 174 | temporal coherence image from phase-linking results of MiaplPy. 175 | """ 176 | logger.info("reading quality from MiaplPy results...") 177 | temp_coh = readfile.read(path, datasetName='temporalCoherence', box=box)[0][1, :, :] 178 | return temp_coh 179 | 180 | 181 | def selectPixels(*, path: str, selection_method: str, thrsh: float, 182 | grid_size: int = None, bool_plot: bool = False, logger: Logger): 183 | """Select pixels based on temporal coherence. 184 | 185 | Parameters 186 | ---------- 187 | path: str 188 | Path to the directory with the temporal_coherence.h5 file. 189 | selection_method: str 190 | Pixel selection method. Currently, only "temp_coh" is implemented. 191 | thrsh: float 192 | Threshold for pixel selection. 193 | grid_size: int 194 | Grid size for sparse pixel selection. 195 | bool_plot: bool 196 | Plot the selected pixels. 197 | logger: Logger 198 | Logging handler. 199 | 200 | Returns 201 | ------- 202 | cand_mask: np.ndarray 203 | Mask with selected pixels. 204 | """ 205 | quality = None 206 | grid_min_val = None 207 | cand_mask = None 208 | unit = None 209 | cmap = None 210 | # compute candidates 211 | if selection_method == "temp_coh": 212 | tcoh_file = join(path, "temporal_coherence.h5") 213 | logger.debug(f"Reading temporal coherence file: {tcoh_file}") 214 | temp_coh_obj = BaseStack(file=tcoh_file, logger=logger) 215 | quality = temp_coh_obj.read(dataset_name="temp_coh") 216 | logger.debug(f"[Min, Max] of all temporal coherence pixels: [{np.min(quality):.2f}, {np.max(quality):.2f}].)") 217 | logger.debug(f"[Min, Max] of all temporal coherence pixels excluding invalid values: " 218 | f"[{np.nanmin(quality):.2f}, {np.nanmax(quality):.2f}].)") 219 | cand_mask = quality >= thrsh 220 | grid_min_val = False 221 | unit = "Temporal\nCoherence [ ]" 222 | cmap = "lajolla" 223 | 224 | if selection_method == "miaplpy": 225 | error_msg = "This part is not developed yet. MiaplPy data is read in another way." 226 | logger.error(error_msg) 227 | raise NotImplementedError(error_msg) 228 | # pl_coherence = readCoherenceFromMiaplpy(path=join(path, 'inverted', 'phase_series.h5'), box=None, 229 | # logger=logger) 230 | # cand_mask = pl_coherence >= thrsh 231 | # quality = pl_coherence 232 | # grid_min_val = False 233 | # unit = "Phase-Linking\nCoherence [ ]" 234 | # cmap = "lajolla" 235 | 236 | logger.debug( 237 | f"Number of selected pixels using {thrsh:.2f} temporal coherence threshold: {np.sum(cand_mask)}") 238 | if grid_size is not None: # -> sparse pixel selection 239 | logger.debug(f"Select sparse pixels using grid size {grid_size} m.") 240 | coord_utm_file = join(path, "coordinates_utm.h5") 241 | logger.debug(f"Reading coordinates from file: {coord_utm_file}") 242 | coord_utm_obj = CoordinatesUTM(file_path=coord_utm_file, logger=logger) 243 | coord_utm_obj.open() 244 | box_list = ut.createSpatialGrid(coord_utm_img=coord_utm_obj.coord_utm, 245 | length=coord_utm_obj.coord_utm.shape[1], 246 | width=coord_utm_obj.coord_utm.shape[2], 247 | grid_size=grid_size, 248 | logger=logger)[0] 249 | logger.debug(f"Number of grid boxes for sparse pixel selection: {len(box_list)}.") 250 | cand_mask_sparse = ut.selectBestPointsInGrid(box_list=box_list, quality=quality, sel_min=grid_min_val) 251 | cand_mask &= cand_mask_sparse 252 | logger.debug(f"Number of selected sparse pixels: {np.sum(cand_mask)}") 253 | min_map_coord = np.min(coord_utm_obj.coord_utm[..., cand_mask], axis=1) 254 | max_map_coord = np.max(coord_utm_obj.coord_utm[..., cand_mask], axis=1) 255 | logger.debug( 256 | f"[Min, Max] of map coordinates of selected points along first axis: " 257 | f"[{min_map_coord[0]:.1f}, {max_map_coord[0]:.1f}].") 258 | logger.debug( 259 | f"[Min, Max] of map coordinates of selected points along second axis: " 260 | f"[{min_map_coord[1]:.1f}, {max_map_coord[1]:.1f}].") 261 | 262 | if bool_plot: 263 | logger.debug("Plotting selected pixels...") 264 | coord_xy = np.array(np.where(cand_mask)).transpose() 265 | bmap_obj = AmplitudeImage(file_path=join(path, "background_map.h5")) 266 | viewer.plotScatter(value=quality[cand_mask], coord=coord_xy, bmap_obj=bmap_obj, ttl="Selected pixels", 267 | unit=unit, s=2, cmap=cmap, vmin=0, vmax=1, logger=logger) 268 | # if grid_size is not None: 269 | # psViewer.plotGridFromBoxList(box_list, ax=ax, edgecolor="k", linewidth=0.2) 270 | plt.tight_layout() 271 | plt.gcf().savefig(join(path, "pic", "selected_pixels_{}_{}.png".format(selection_method, thrsh)), 272 | dpi=300) 273 | plt.close(plt.gcf()) 274 | 275 | return cand_mask 276 | 277 | 278 | def createArcsBetweenPoints(*, point_obj: Points, knn: int = None, max_arc_length: float = np.inf, 279 | logger: Logger) -> np.ndarray: 280 | """Create a spatial network of arcs to triangulate the points. 281 | 282 | All points are triangulated with a Delaunay triangulation. If knn is given, the triangulation is done with the k 283 | nearest neighbors. Too long arcs are removed from the network. If, afterward, the network is not connected, a 284 | delaunay triangulation is performed again to ensure connectivity in the network. 285 | 286 | Parameters 287 | ---------- 288 | point_obj: Points 289 | Point object. 290 | knn: int 291 | Number of nearest neighbors to consider (default: None). 292 | max_arc_length: float 293 | Maximum length of an arc. Longer arcs will be removed. Default: np.inf. 294 | logger: Logger 295 | Logging handler. 296 | 297 | Returns 298 | ------- 299 | arcs: np.ndarray 300 | Arcs of the triangulation containing the indices of the points for each arc. 301 | """ 302 | logger.debug(f"Triangulating {point_obj.coord_xy.shape[0]} points...") 303 | triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=point_obj.coord_utm, logger=logger) 304 | 305 | if knn is not None: 306 | triang_obj.triangulateKnn(k=knn) 307 | 308 | triang_obj.triangulateGlobal() 309 | 310 | ut_mask = np.triu(triang_obj.dist_mat, k=1) != 0 311 | logger.debug(f"Triangulation arc lengths - " 312 | f"Min: {np.min(triang_obj.dist_mat[ut_mask]):.0f} m, " 313 | f"Max: {np.max(triang_obj.dist_mat[ut_mask]):.0f} m, " 314 | f"Mean: {np.mean(triang_obj.dist_mat[ut_mask]):.0f} m.") 315 | 316 | triang_obj.removeLongArcs(max_dist=max_arc_length) 317 | 318 | logger.debug(f"Triangulation arc lengths after long arc removal - " 319 | f"Min: {np.min(triang_obj.dist_mat[ut_mask]):.0f} m, " 320 | f"Max: {np.max(triang_obj.dist_mat[ut_mask]):.0f} m, " 321 | f"Mean: {np.mean(triang_obj.dist_mat[ut_mask]):.0f} m.") 322 | 323 | if not triang_obj.isConnected(): 324 | logger.debug("Network is not connected. Triangulating again with global delaunay...") 325 | triang_obj.triangulateGlobal() 326 | 327 | logger.info("Retrieve arcs from adjacency matrix.") 328 | arcs = triang_obj.getArcsFromAdjMat() 329 | logger.debug(f"Final number of arcs: {arcs.shape[0]}.") 330 | 331 | ut_mask = np.triu(triang_obj.dist_mat, k=1) != 0 332 | logger.debug(f"Final triangulation arc lengths - " 333 | f"Min: {np.min(triang_obj.dist_mat[ut_mask]):.0f} m, " 334 | f"Max: {np.max(triang_obj.dist_mat[ut_mask]):.0f} m, " 335 | f"Mean: {np.mean(triang_obj.dist_mat[ut_mask]):.0f} m.") 336 | 337 | return arcs 338 | --------------------------------------------------------------------------------