├── .coveragerc ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml ├── scripts │ ├── apply_license_header.py │ ├── generate_pip_deps_from_conda.py │ ├── generate_yml_env_fixed_py.py │ └── license-header.txt └── workflows │ ├── pip-checks.yml │ ├── pre-commit.yml │ ├── python-publish.yml │ └── python-tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── .relint.yml ├── AUTHORS.md ├── CODE-OF-CONDUCT.md ├── CONTRIBUTING.md ├── GOVERNANCE.md ├── HOW_TO_RELEASE.md ├── LICENSE ├── Makefile ├── NOTICE ├── README.md ├── binder ├── apt.txt ├── environment.yml ├── extra-environment.yml └── postBuild ├── dev-environment.yml ├── doc ├── Makefile ├── make.bat └── source │ ├── _static │ ├── cnes_logo.svg │ ├── cnes_logo_dark.svg │ ├── css │ │ └── custom.css │ ├── nasa_logo.svg │ ├── snsf_logo.svg │ ├── snsf_logo_dark.svg │ ├── xdem_logo.svg │ ├── xdem_logo_dark.svg │ ├── xdem_logo_only.svg │ └── xdem_logo_only_dark.svg │ ├── _templates │ ├── module.md │ └── module.rst │ ├── about_xdem.md │ ├── accuracy_precision.md │ ├── api.md │ ├── authors.md │ ├── biascorr.md │ ├── cheatsheet.md │ ├── citation.md │ ├── code │ ├── comparison_plot_local_hypsometric_interpolation.py │ ├── comparison_plot_regional_hypsometric_interpolation.py │ ├── comparison_plot_spatial_interpolation.py │ ├── intricacies_datatypes.py │ ├── robust_mean_std.py │ ├── robust_vario.py │ ├── spatialstats_heterosc_slope.py │ ├── spatialstats_standardizing.py │ ├── spatialstats_stationarity_assumption.py │ └── spatialstats_variogram_covariance.py │ ├── conf.py │ ├── config.md │ ├── coregistration.md │ ├── credits.md │ ├── dem_class.md │ ├── ecosystem.md │ ├── elevation_intricacies.md │ ├── elevation_objects.md │ ├── elevation_point_cloud.md │ ├── funding.md │ ├── gapfill.md │ ├── guides.md │ ├── history.md │ ├── how_to_install.md │ ├── imgs │ ├── accuracy_precision_dem.png │ ├── precision_accuracy.png │ └── stable_terrain_diagram.png │ ├── index.md │ ├── license.md │ ├── mission.md │ ├── publis.md │ ├── quick_start.md │ ├── release_notes.md │ ├── robust_estimators.md │ ├── spatial_stats.md │ ├── sphinxext.py │ ├── static_surfaces.md │ ├── terrain.md │ ├── uncertainty.md │ └── vertical_ref.md ├── environment.yml ├── examples ├── advanced │ ├── README.rst │ ├── plot_blockwise_coreg.py │ ├── plot_demcollection.py │ ├── plot_deramp.py │ ├── plot_heterosc_estimation_modelling.py │ ├── plot_norm_regional_hypso.py │ ├── plot_slope_methods.py │ ├── plot_standardization.py │ └── plot_variogram_estimation_modelling.py └── basic │ ├── README.rst │ ├── plot_dem_subtraction.py │ ├── plot_icp_coregistration.py │ ├── plot_infer_heterosc.py │ ├── plot_infer_spatial_correlation.py │ ├── plot_logging_configuration.py │ ├── plot_nuth_kaab.py │ ├── plot_spatial_error_propagation.py │ └── plot_terrain_attributes.py ├── mypy.ini ├── pyproject.toml ├── requirements.txt ├── setup.cfg ├── setup.py ├── tests ├── conftest.py ├── test_coreg │ ├── __init__.py │ ├── test_affine.py │ ├── test_base.py │ ├── test_biascorr.py │ ├── test_blockwise.py │ ├── test_filters.py │ └── test_workflows.py ├── test_ddem.py ├── test_dem.py ├── test_demcollection.py ├── test_doc.py ├── test_examples.py ├── test_filters.py ├── test_fit.py ├── test_misc.py ├── test_spatialstats.py ├── test_terrain.py ├── test_vcrs.py └── test_volume.py └── xdem ├── __init__.py ├── _typing.py ├── coreg ├── __init__.py ├── affine.py ├── base.py ├── biascorr.py ├── blockwise.py ├── filters.py └── workflows.py ├── ddem.py ├── dem.py ├── demcollection.py ├── examples.py ├── filters.py ├── fit.py ├── misc.py ├── spatialstats.py ├── terrain.py ├── vcrs.py └── volume.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | exclude_lines = 3 | pragma: not covered 4 | @overload 5 | except ImportError 6 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | - [ ] Resolves #xxx, 4 | - [ ] Tests added, otherwise issue #xxx opened, 5 | - [ ] Fully documented, including `api/*.md` for new API, 6 | - [ ] New optional dependencies or Python version support added to both `dev-environment.yml` and `setup.cfg`, 7 | - [ ] If contributor workflow (test, doc, linting) or Python version support changed, update `CONTRIBUTING.md`. 8 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | # Check for updates to GitHub Actions every week 8 | interval: "weekly" 9 | -------------------------------------------------------------------------------- /.github/scripts/apply_license_header.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | # Path to the license header 4 | HEADER_FILE = os.path.join(os.path.dirname(__file__), "license-header.txt") 5 | 6 | # read license header 7 | with open(HEADER_FILE) as file: 8 | license_header = file.read() 9 | 10 | 11 | # Add license header to a file 12 | def add_license_header(file_path, header): 13 | with open(file_path) as f: 14 | content = f.read() 15 | 16 | # Check if the header is already there 17 | if content.startswith(header): 18 | return 19 | 20 | # If not, add it 21 | with open(file_path, "w") as f: 22 | f.write(header + "\n" + content) 23 | print(f"Header added to {file_path}") 24 | 25 | 26 | # Check the header in every file in root_dir 27 | def apply_license_header_to_all_py_files(root_dir): 28 | for subdir, _, files in os.walk(root_dir): 29 | for file in files: 30 | if file.endswith(".py"): 31 | file_path = os.path.join(subdir, file) 32 | add_license_header(file_path, license_header) 33 | 34 | 35 | # Source directory 36 | PROJECT_SRC = "xdem" 37 | 38 | # Add header to every source files 39 | apply_license_header_to_all_py_files(PROJECT_SRC) 40 | -------------------------------------------------------------------------------- /.github/scripts/generate_pip_deps_from_conda.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | (Copied from pandas: https://github.com/pandas-dev/pandas/blob/main/scripts/generate_pip_deps_from_conda.py) 4 | Convert the conda environment.yml to the pip requirements-dev.txt, 5 | or check that they have the same packages (for the CI) 6 | 7 | Usage: 8 | 9 | Generate `requirements-dev.txt` 10 | $ python scripts/generate_pip_deps_from_conda.py 11 | 12 | Compare and fail (exit status != 0) if `requirements-dev.txt` has not been 13 | generated with this script: 14 | $ python scripts/generate_pip_deps_from_conda.py --compare 15 | """ 16 | import argparse 17 | import pathlib 18 | import re 19 | import sys 20 | 21 | if sys.version_info >= (3, 11): 22 | import tomllib 23 | else: 24 | import tomli as tomllib 25 | import yaml 26 | 27 | EXCLUDE = {"python"} 28 | REMAP_VERSION = {"tzdata": "2022.1"} 29 | RENAME = {} 30 | 31 | 32 | def conda_package_to_pip(package: str): 33 | """ 34 | Convert a conda package to its pip equivalent. 35 | 36 | In most cases they are the same, those are the exceptions: 37 | - Packages that should be excluded (in `EXCLUDE`) 38 | - Packages that should be renamed (in `RENAME`) 39 | - A package requiring a specific version, in conda is defined with a single 40 | equal (e.g. ``pandas=1.0``) and in pip with two (e.g. ``pandas==1.0``) 41 | """ 42 | package = re.sub("(?<=[^<>])=", "==", package).strip() 43 | print(package) 44 | 45 | for compare in ("<=", ">=", "=="): 46 | if compare in package: 47 | pkg, version = package.split(compare) 48 | if pkg in EXCLUDE: 49 | return 50 | if pkg in REMAP_VERSION: 51 | return "".join((pkg, compare, REMAP_VERSION[pkg])) 52 | if pkg in RENAME: 53 | return "".join((RENAME[pkg], compare, version)) 54 | 55 | if package in EXCLUDE: 56 | return 57 | 58 | if package in RENAME: 59 | return RENAME[package] 60 | 61 | return package 62 | 63 | 64 | def generate_pip_from_conda(conda_path: pathlib.Path, pip_path: pathlib.Path, compare: bool = False) -> bool: 65 | """ 66 | Generate the pip dependencies file from the conda file, or compare that 67 | they are synchronized (``compare=True``). 68 | 69 | Parameters 70 | ---------- 71 | conda_path : pathlib.Path 72 | Path to the conda file with dependencies (e.g. `environment.yml`). 73 | pip_path : pathlib.Path 74 | Path to the pip file with dependencies (e.g. `requirements-dev.txt`). 75 | compare : bool, default False 76 | Whether to generate the pip file (``False``) or to compare if the 77 | pip file has been generated with this script and the last version 78 | of the conda file (``True``). 79 | 80 | Returns 81 | ------- 82 | bool 83 | True if the comparison fails, False otherwise 84 | """ 85 | with conda_path.open() as file: 86 | deps = yaml.safe_load(file)["dependencies"] 87 | 88 | pip_deps = [] 89 | for dep in deps: 90 | if isinstance(dep, str): 91 | conda_dep = conda_package_to_pip(dep) 92 | if conda_dep: 93 | pip_deps.append(conda_dep) 94 | elif isinstance(dep, dict) and len(dep) == 1 and "pip" in dep: 95 | # If pulled directly from GitHub (temporary CI passing), 96 | # such as git+https://github.com/GlacioHack/geoutils.git, 97 | # rename to the package repo name 98 | dep_pips = dep["pip"] 99 | for dep_pip in dep_pips: 100 | if "+" in dep_pip and dep_pip.split("+")[0] == "git": 101 | dep_pip = dep_pip.split("/")[-1].split(".git")[0] 102 | pip_deps.append(dep_pip) 103 | else: 104 | raise ValueError(f"Unexpected dependency {dep}") 105 | 106 | header = ( 107 | f"# This file is auto-generated from {conda_path.name}, do not modify.\n" 108 | "# See that file for comments about the need/usage of each dependency.\n\n" 109 | ) 110 | pip_content = header + "\n".join(pip_deps) + "\n" 111 | 112 | # Add setuptools to requirements-dev.txt 113 | # with open(pathlib.Path(conda_path.parent, "pyproject.toml"), "rb") as fd: 114 | # meta = tomllib.load(fd) 115 | # for requirement in meta["build-system"]["requires"]: 116 | # if "setuptools" in requirement: 117 | # pip_content += requirement 118 | # pip_content += "\n" 119 | 120 | if compare: 121 | with pip_path.open() as file: 122 | return pip_content != file.read() 123 | 124 | with pip_path.open("w") as file: 125 | file.write(pip_content) 126 | return False 127 | 128 | 129 | if __name__ == "__main__": 130 | argparser = argparse.ArgumentParser(description="convert (or compare) conda file to pip") 131 | argparser.add_argument( 132 | "--compare", 133 | action="store_true", 134 | help="compare whether the two files are equivalent", 135 | ) 136 | args = argparser.parse_args() 137 | 138 | conda_fname = "environment.yml" 139 | pip_fname = "requirements.txt" 140 | repo_path = pathlib.Path(__file__).parent.parent.parent.absolute() 141 | res = generate_pip_from_conda( 142 | pathlib.Path(repo_path, conda_fname), 143 | pathlib.Path(repo_path, pip_fname), 144 | compare=args.compare, 145 | ) 146 | if res: 147 | msg = f"`{pip_fname}` has to be generated with `{__file__}` after " f"`{conda_fname}` is modified.\n" 148 | sys.stderr.write(msg) 149 | sys.exit(res) 150 | -------------------------------------------------------------------------------- /.github/scripts/generate_yml_env_fixed_py.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | 5 | import yaml # type: ignore 6 | 7 | 8 | def environment_yml_nopy(fn_env: str, py_version: str, add_deps: list[str] = None) -> None: 9 | """ 10 | Generate temporary environment-py3.XX.yml files forcing python versions for setup of continuous integration. 11 | 12 | :param fn_env: Filename path to environment.yml 13 | :param py_version: Python version to force. 14 | :param add_deps: Additional dependencies to solve for directly (for instance graphviz fails with mamba update). 15 | """ 16 | 17 | # Load the yml as dictionary 18 | yaml_env = yaml.safe_load(open(fn_env)) 19 | conda_dep_env = list(yaml_env["dependencies"]) 20 | 21 | # Force python version 22 | conda_dep_env_forced_py = ["python=" + py_version if "python" in dep else dep for dep in conda_dep_env] 23 | 24 | # Optionally, add other dependencies 25 | if add_deps is not None: 26 | conda_dep_env_forced_py.extend(add_deps) 27 | 28 | # Copy back to new yaml dict 29 | yaml_out = yaml_env.copy() 30 | yaml_out["dependencies"] = conda_dep_env_forced_py 31 | 32 | with open("environment-ci-py" + py_version + ".yml", "w") as outfile: 33 | yaml.dump(yaml_out, outfile, default_flow_style=False) 34 | 35 | 36 | if __name__ == "__main__": 37 | parser = argparse.ArgumentParser(description="Generate environment files for CI with fixed python versions.") 38 | parser.add_argument("fn_env", metavar="fn_env", type=str, help="Path to the generic environment file.") 39 | parser.add_argument( 40 | "--pyv", 41 | dest="py_version", 42 | default="3.9", 43 | type=str, 44 | help="List of Python versions to force.", 45 | ) 46 | parser.add_argument( 47 | "--add", 48 | dest="add_deps", 49 | default=None, 50 | type=str, 51 | help="List of dependencies to add.", 52 | ) 53 | args = parser.parse_args() 54 | environment_yml_nopy(fn_env=args.fn_env, py_version=args.py_version, add_deps=args.add_deps.split(",")) 55 | -------------------------------------------------------------------------------- /.github/scripts/license-header.txt: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | -------------------------------------------------------------------------------- /.github/workflows/pip-checks.yml: -------------------------------------------------------------------------------- 1 | # This workflow checks that pip installation works to import the package (tests are in python-tests.yml) 2 | 3 | name: pip-install 4 | 5 | on: 6 | push: 7 | branches: [ main ] 8 | pull_request: 9 | branches: [ main ] 10 | 11 | jobs: 12 | test: 13 | name: ${{ matrix.os }}, python ${{ matrix.python-version }} 14 | runs-on: ${{ matrix.os }} 15 | 16 | strategy: 17 | matrix: 18 | os: ["ubuntu-latest", "macos-latest", "windows-latest"] 19 | python-version: ["3.10", "3.11", "3.12"] 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | 24 | - uses: actions/setup-python@v5 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | 28 | # Use pip install 29 | - name: Install project 30 | run: | 31 | python -m pip install . -vv 32 | 33 | # Check import works 34 | - name: Check import works with base environment 35 | run: python -c "import xdem" 36 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: Linting and formatting (pre-commit) 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | pre-commit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: actions/setup-python@v5 15 | - uses: pre-commit/action@v3.0.1 16 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload package to PyPI 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | workflow_dispatch: 11 | inputs: 12 | reason: 13 | description: 'Reason for manual trigger' 14 | required: true 15 | default: 'testing' 16 | 17 | jobs: 18 | deploy: 19 | 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | with: 25 | fetch-depth: 0 26 | - name: Set up Python 27 | uses: actions/setup-python@v5 28 | with: 29 | python-version: '3.12' 30 | - name: Install dependencies 31 | run: | 32 | python -m pip install --upgrade pip 33 | pip install setuptools setuptools_scm wheel twine 34 | - name: Build and publish 35 | env: 36 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 37 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 38 | # Build package, test pip install works, then upload to PyPI with twine 39 | run: | 40 | python setup.py sdist bdist_wheel 41 | pip install dist/*.tar.gz 42 | twine upload dist/* 43 | -------------------------------------------------------------------------------- /.github/workflows/python-tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: build 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | test: 14 | name: ${{ matrix.os }}, python ${{ matrix.python-version }} 15 | runs-on: ${{ matrix.os }} 16 | 17 | strategy: 18 | matrix: 19 | os: ["ubuntu-latest", "macos-latest"] 20 | python-version: ["3.10", "3.11", "3.12"] 21 | 22 | # Run all shells using bash (including Windows) 23 | defaults: 24 | run: 25 | shell: bash -l {0} 26 | 27 | steps: 28 | - uses: actions/checkout@v4 29 | 30 | # We initiate the environment empty, and check if a key for this environment doesn't already exist in the cache 31 | - name: Initiate empty environment 32 | uses: conda-incubator/setup-miniconda@v3 33 | with: 34 | miniforge-version: latest 35 | auto-update-conda: true 36 | use-mamba: true 37 | mamba-version: "2.0.5" 38 | channel-priority: strict 39 | activate-environment: xdem-dev 40 | python-version: 41 | 42 | - name: Get month for resetting cache 43 | id: get-date 44 | run: echo "cache_date=$(/bin/date -u '+%Y%m')" >> $GITHUB_ENV 45 | shell: bash 46 | 47 | - name: Cache conda env 48 | uses: actions/cache@v4 49 | with: 50 | path: ${{ env.CONDA }}/envs 51 | key: conda-${{ matrix.os }}-${{ matrix.python-version }}-${{ env.cache_date }}-${{ hashFiles('dev-environment.yml') }}-${{ env.CACHE_NUMBER }} 52 | env: 53 | CACHE_NUMBER: 0 # Increase this value to reset cache if environment.yml has not changed 54 | id: cache 55 | 56 | # The trick below is necessary because the generic environment file does not specify a Python version, and ONLY 57 | # "conda env update" CAN BE USED WITH CACHING, which upgrades the Python version when using the base environment 58 | # (we add "graphviz" from dev-environment to solve all dependencies at once, at graphviz relies on image 59 | # processing packages very much like geo-packages; not a problem for docs, dev installs where all is done at once) 60 | - name: Install base environment with a fixed Python version 61 | if: steps.cache.outputs.cache-hit != 'true' 62 | run: | 63 | mamba install pyyaml python=${{ matrix.python-version }} 64 | python .github/scripts/generate_yml_env_fixed_py.py --pyv ${{ matrix.python-version }} --add "graphviz,pytransform3d" "environment.yml" 65 | mamba env update -n xdem-dev -f environment-ci-py${{ matrix.python-version }}.yml 66 | 67 | - name: Install project 68 | run: pip install -e . --no-dependencies 69 | 70 | # This steps allows us to check the "import xdem" with the base environment provided to users, before adding 71 | # development-specific dependencies by differencing the env and dev-env yml files 72 | - name: Check import works with base environment 73 | run: | 74 | # We unset the PROJ_DATA environment variable to make PROJ work on Windows 75 | unset PROJ_DATA 76 | python -c "import xdem" 77 | 78 | # This time, the trick below is necessary because: 1/ "conda update" does not support a file -f as argument 79 | # and also 2/ "conda env update" does not support --freeze-installed or --no-update-deps 80 | - name: Update environment with development packages if cache does not exist 81 | if: steps.cache.outputs.cache-hit != 'true' 82 | run: | 83 | # We unset the PROJ_DATA environment variable to make PROJ work on Windows 84 | unset PROJ_DATA 85 | pkgs_conda_dev=`python -c "import xdem.misc; xdem.misc.diff_environment_yml('environment.yml', 'dev-environment.yml', 'conda')"` 86 | pkgs_pip_dev=`python -c "import xdem.misc; xdem.misc.diff_environment_yml('environment.yml', 'dev-environment.yml', 'pip')"` 87 | mamba install $pkgs_conda_dev --freeze-installed 88 | if [[ "$pkgs_pip_dev" != "None" ]]; then 89 | pip install $pkgs_pip_dev 90 | fi 91 | 92 | - name: Lint with flake8 93 | run: | 94 | # stop the build if there are Python syntax errors or undefined names 95 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 96 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 97 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 98 | 99 | - name: Setup pip dependencies 100 | run: pip install pytest-cov coveralls coveragepy-lcov 'coverage<7' 101 | 102 | - name: Print conda environment (for debugging) 103 | run: | 104 | conda info 105 | conda list 106 | 107 | - name: Test with pytest 108 | run: | 109 | # We unset the PROJ_DATA environment variable to make PROJ work on Windows 110 | unset PROJ_DATA 111 | pytest -ra --cov=xdem/ 112 | 113 | # We can skip the conversion step once this PR of pytest is merged: https://github.com/pytest-dev/pytest-cov/pull/536 114 | # and replace pytest argument by --cov-report=lcov 115 | - name: Converting coverage to LCOV format 116 | run: coveragepy-lcov --data_file_path .coverage --output_file_path coverage.info 117 | 118 | - name: Upload coverage to Coveralls 119 | uses: coverallsapp/github-action@v2 120 | with: 121 | github-token: ${{ secrets.github_token }} 122 | flag-name: run-${{ join(matrix.*, '-') }} 123 | path-to-lcov: coverage.info 124 | parallel: true 125 | 126 | finish: 127 | needs: test 128 | runs-on: ubuntu-latest 129 | steps: 130 | - name: Upload to Coveralls finished 131 | uses: coverallsapp/github-action@v2 132 | with: 133 | github-token: ${{ secrets.github_token }} 134 | parallel-finished: true 135 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | doc/_build/ 73 | doc/build/ 74 | doc/source/api/ 75 | doc/.buildinfo 76 | doc/.doctrees 77 | 78 | # PyBuilder 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 99 | __pypackages__/ 100 | 101 | # Celery stuff 102 | celerybeat-schedule 103 | celerybeat.pid 104 | 105 | # SageMath parsed files 106 | *.sage.py 107 | 108 | # Environments 109 | .env 110 | .venv 111 | env/ 112 | venv/ 113 | ENV/ 114 | env.bak/ 115 | venv.bak/ 116 | .vim/ 117 | 118 | # Spyder project settings 119 | .spyderproject 120 | .spyproject 121 | 122 | # PyCharm project setting 123 | .idea 124 | 125 | # VS code setting 126 | .vscode/ 127 | !.vscode/settings.json 128 | !.vscode/launch.json 129 | 130 | # Rope project settings 131 | .ropeproject 132 | 133 | # mkdocs documentation 134 | /site 135 | 136 | # mypy 137 | .mypy_cache/ 138 | .dmypy.json 139 | dmypy.json 140 | 141 | # Pyre type checker 142 | .pyre/ 143 | 144 | # Version file 145 | xdem/_version.py 146 | 147 | # Example data downloaded/produced during tests 148 | examples/data/ 149 | tests/test_data/ 150 | 151 | doc/source/basic_examples/ 152 | doc/source/advanced_examples/ 153 | doc/source/gen_modules/ 154 | doc/source/sg_execution_times.rst 155 | examples/basic/temp.tif 156 | examples/advanced/aligned_dem.tif 157 | 158 | # Directory where myst_nb executes jupyter code and cache 159 | doc/jupyter_execute/ 160 | doc/.jupyter_cache/ 161 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autofix_prs: false 3 | autoupdate_schedule: quarterly 4 | repos: 5 | - repo: https://github.com/pre-commit/pre-commit-hooks 6 | rev: v5.0.0 7 | hooks: 8 | - id: check-yaml 9 | - id: end-of-file-fixer 10 | exclude: \.txt$ 11 | - id: trailing-whitespace # Remove trailing whitespaces 12 | - id: check-merge-conflict 13 | 14 | # Fix common spelling mistakes 15 | - repo: https://github.com/codespell-project/codespell 16 | rev: v2.3.0 17 | hooks: 18 | - id: codespell 19 | args: [ 20 | '--ignore-words-list', 'nd,alos,inout,theses', 21 | '--ignore-regex', '\bhist\b', 22 | '--' 23 | ] 24 | types_or: [python, rst, markdown] 25 | files: ^(xdem|doc|tests)/ 26 | 27 | # Replace relative imports (e.g. 'from . import georaster' -> 'from geoutils import georaster') 28 | - repo: https://github.com/MarcoGorelli/absolufy-imports 29 | rev: v0.3.1 30 | hooks: 31 | - id: absolufy-imports 32 | 33 | # Format the code aggressively using black 34 | - repo: https://github.com/psf/black 35 | rev: 24.10.0 36 | hooks: 37 | - id: black 38 | args: [--line-length=120] 39 | 40 | # Lint the code using flake8 41 | - repo: https://github.com/pycqa/flake8 42 | rev: 7.1.1 43 | hooks: 44 | - id: flake8 45 | # More than one argument in the second list, so need to pass arguments as below (and -- to finish) 46 | args: [ 47 | '--max-line-length', '120', # we can write dicts however we want 48 | '--extend-ignore', 'E203,C408,B028', # flake8 disagrees with black, so this should be ignored. 49 | '--' 50 | ] 51 | additional_dependencies: 52 | - flake8-comprehensions 53 | - flake8-bugbear 54 | files: ^(xdem|tests) 55 | 56 | # Lint the code using mypy 57 | - repo: https://github.com/pre-commit/mirrors-mypy 58 | rev: v1.13.0 59 | hooks: 60 | - id: mypy 61 | args: [ 62 | --config-file=mypy.ini, 63 | --strict, 64 | --implicit-optional, 65 | --ignore-missing-imports, # Don't warn about stubs since pre-commit runs in a limited env 66 | --allow-untyped-calls, # Dynamic function/method calls are okay. Untyped function definitions are not okay. 67 | --show-error-codes, 68 | --no-warn-unused-ignores, # Ignore 'type: ignore' comments that are not used. 69 | --disable-error-code=attr-defined, # "Module has no attribute 'XXX'" occurs because of the pre-commit env. 70 | --disable-error-code=name-defined, # "Name 'XXX' is not defined" occurs because of the pre-commit env. 71 | --disable-error-code=var-annotated, 72 | --disable-error-code=no-any-return 73 | 74 | ] 75 | additional_dependencies: [tokenize-rt, numpy==2] 76 | files: ^(xdem|tests|doc/code) 77 | 78 | 79 | # Sort imports using isort 80 | - repo: https://github.com/PyCQA/isort 81 | rev: 5.13.2 82 | hooks: 83 | - id: isort 84 | args: ["--profile", "black"] 85 | 86 | # Automatically upgrade syntax to a minimum version 87 | - repo: https://github.com/asottile/pyupgrade 88 | rev: v3.19.0 89 | hooks: 90 | - id: pyupgrade 91 | args: [--py37-plus] 92 | 93 | # Various formattings 94 | - repo: https://github.com/pre-commit/pygrep-hooks 95 | rev: v1.10.0 96 | hooks: 97 | # Single backticks should apparently not be used 98 | - id: rst-backticks 99 | # Check that all directives end with double colon 100 | - id: rst-directive-colons 101 | types: [text] 102 | types_or: [python, rst] 103 | # Inline code should not touch normal text 104 | - id: rst-inline-touching-normal 105 | types: [text] 106 | types_or: [python, rst] 107 | # Eval should never be used (can do arbitrary code execution) 108 | - id: python-no-eval 109 | # Enforce the use of type annotations instead of docstring type comments 110 | - id: python-use-type-annotations 111 | 112 | # Add custom regex lints (see .relint.yml) 113 | - repo: https://github.com/codingjoe/relint 114 | rev: 3.3.1 115 | hooks: 116 | - id: relint 117 | - repo: local 118 | hooks: 119 | # Generate pip's requirements.txt from conda's environment.yml to ensure consistency 120 | - id: pip-to-conda 121 | name: Generate pip dependency from conda 122 | language: python 123 | entry: .github/scripts/generate_pip_deps_from_conda.py 124 | files: ^(environment.yml|requirements.txt)$ 125 | pass_filenames: false 126 | additional_dependencies: [tomli, pyyaml] 127 | 128 | # # Add license header to the source files 129 | # - repo: local 130 | # hooks: 131 | # - id: add-license-header 132 | # name: Add License Header 133 | # entry: python .github/scripts/apply_license_header.py 134 | # language: python 135 | # files: \.py$ 136 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | build: 9 | os: "ubuntu-20.04" 10 | tools: 11 | python: "mambaforge-4.10" 12 | 13 | # Build documentation in the doc/ directory with Sphinx 14 | sphinx: 15 | configuration: doc/source/conf.py 16 | fail_on_warning: false 17 | 18 | # Build the doc in offline formats 19 | formats: 20 | - pdf 21 | - htmlzip 22 | 23 | conda: 24 | environment: dev-environment.yml 25 | -------------------------------------------------------------------------------- /.relint.yml: -------------------------------------------------------------------------------- 1 | - name: Type hint in docstring 2 | pattern: ':[r]?type ' 3 | filePattern: .*\.py 4 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Credits 2 | 3 | --- 4 | © 2024 **xDEM developers**. 5 | 6 | **xDEM** is licensed under permissive Apache 2 license (See LICENSE file). 7 | 8 | All contributors listed in this document are part of the **xDEM developers**, and their 9 | contributions are subject to the project's copyright under the terms of the 10 | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). 11 | 12 | This file keeps track of authors contributions. 13 | 14 | ## Maintainers / Steering Comittee 15 | 16 | --- 17 | 18 | - **Romain Hugonnet** [@rhugonnet](https://github.com/rhugonnet) 19 | - **Amaury Dehecq** [@adehecq](https://github.com/adehecq) 20 | - **Erik Schytt Mannerfelt** [@erikmannerfelt](https://github.com/erikmannerfelt) 21 | - **Emmanuel Dubois** [@duboise-cnes](https://github.com/duboise-cnes) 22 | - **Valentine Bellet** [@Valentine-Bellet](https://github.com/valentine-bellet) 23 | - **Alice de Bardonnèche-Richard** [@adebardo](https://github.com/adebardo) 24 | 25 | 26 | ## Development Lead 27 | 28 | --- 29 | 30 | - **Romain Hugonnet** [@rhugonnet](https://github.com/rhugonnet) 31 | - **Amaury Dehecq** [@adehecq](https://github.com/adehecq) 32 | - **Erik Schytt Mannerfelt** [@erikmannerfelt](https://github.com/erikmannerfelt) 33 | 34 | 35 | ## Contributors 36 | 37 | --- 38 | 39 | - **Friedrich Knuth** [@friedrichknuth](https://github.com/friedrichknuth) 40 | - **Andrew Tedstone** [@atedstone](https://github.com/atedstone) 41 | - **Zhihao LIU** [@liuh886](https://github.com/liuh886) 42 | - **Diego Cusicanqui** [@cusicand](https://github.com/cusicand) 43 | - **Alessandro Gentilini** [@alessandro-gentilini](https://github.com/alessandro-gentilini) 44 | - **Ferdinand Schenck** [@fnands](https://github.com/fnands) 45 | - **Johannes Landmann** [@jlandmann](https://github.com/jlandmann) 46 | - **Valentin Schaffner** [@vschaffn](https://github.com/vschaffn) 47 | - **Bob McNabb** [@iamdonovan](https://github.com/iamdonovan) 48 | - **Enrico Mattea** [@MatteaE](https://github.com/MatteaE) 49 | - **Amelie Froessl** [@ameliefroessl](https://github.com/ameliefroessl) 50 | - **Simon Gascoin** [@sgascoin](https://github.com/sgascoin) 51 | - **Clara Quinto** [@quinto-clara](https://github.com/quinto-clara) 52 | 53 | ## Original Developers/Designers/Supporters 54 | 55 | --- 56 | 57 | - **Romain Hugonnet** [@rhugonnet](https://github.com/rhugonnet) 58 | - **Amaury Dehecq** [@adehecq](https://github.com/adehecq) 59 | - **Erik Schytt Mannerfelt** [@erikmannerfelt](https://github.com/erikmannerfelt) 60 | -------------------------------------------------------------------------------- /CODE-OF-CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, caste, color, religion, or sexual 10 | identity and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the overall 26 | community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or advances of 31 | any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email address, 35 | without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible, please refer to the "Steering 63 | Comittee" section in [AUTHORS.md](AUTHORS.md). 64 | 65 | All complaints will be reviewed and investigated promptly and fairly. 66 | 67 | All community leaders are obligated to respect the privacy and security of the 68 | reporter of any incident. 69 | 70 | ## Enforcement Guidelines 71 | 72 | Community leaders will follow these Community Impact Guidelines in determining 73 | the consequences for any action they deem in violation of this Code of Conduct: 74 | 75 | ### 1. Correction 76 | 77 | **Community Impact**: Use of inappropriate language or other behavior deemed 78 | unprofessional or unwelcome in the community. 79 | 80 | **Consequence**: A private, written warning from community leaders, providing 81 | clarity around the nature of the violation and an explanation of why the 82 | behavior was inappropriate. A public apology may be requested. 83 | 84 | ### 2. Warning 85 | 86 | **Community Impact**: A violation through a single incident or series of 87 | actions. 88 | 89 | **Consequence**: A warning with consequences for continued behavior. No 90 | interaction with the people involved, including unsolicited interaction with 91 | those enforcing the Code of Conduct, for a specified period of time. This 92 | includes avoiding interactions in community spaces as well as external channels 93 | like social media. Violating these terms may lead to a temporary or permanent 94 | ban. 95 | 96 | ### 3. Temporary Ban 97 | 98 | **Community Impact**: A serious violation of community standards, including 99 | sustained inappropriate behavior. 100 | 101 | **Consequence**: A temporary ban from any sort of interaction or public 102 | communication with the community for a specified period of time. No public or 103 | private interaction with the people involved, including unsolicited interaction 104 | with those enforcing the Code of Conduct, is allowed during this period. 105 | Violating these terms may lead to a permanent ban. 106 | 107 | ### 4. Permanent Ban 108 | 109 | **Community Impact**: Demonstrating a pattern of violation of community 110 | standards, including sustained inappropriate behavior, harassment of an 111 | individual, or aggression toward or disparagement of classes of individuals. 112 | 113 | **Consequence**: A permanent ban from any sort of public interaction within the 114 | community. 115 | 116 | ## Attribution 117 | 118 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 119 | version 2.1, available at 120 | [https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. 121 | 122 | Community Impact Guidelines were inspired by 123 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 124 | 125 | For answers to common questions about this code of conduct, see the FAQ at 126 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at 127 | [https://www.contributor-covenant.org/translations][translations]. 128 | 129 | [homepage]: https://www.contributor-covenant.org 130 | [v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html 131 | [Mozilla CoC]: https://github.com/mozilla/diversity 132 | [FAQ]: https://www.contributor-covenant.org/faq 133 | [translations]: https://www.contributor-covenant.org/translations 134 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to contribute 2 | 3 | We welcome new contributions to xDEM that is still very much in expansion! 4 | Below is a guide to contributing to xDEM step by step, ensuring tests are passing and the documentation is updated. 5 | 6 | ## Overview: making a contribution 7 | 8 | The technical steps to contributing to xDEM are: 9 | 10 | 1. Fork `GlacioHack/xdem` and clone your fork repository locally. 11 | 2. Set up the development environment **(see section "Setup" below)**, 12 | 3. Create a branch for the new feature or bug fix, 13 | 4. Make your changes, 14 | 5. Add or modify related tests in `tests/` **(see section "Tests" below)**, 15 | 6. Add or modify related documentation in `doc/` **(see section "Documentation" below)**, 16 | 7. Commit your changes, 17 | 8. Run `pre-commit` separately if not installed as git hook **(see section "Linting" below)**, 18 | 9. Push to your fork, 19 | 10. Open a pull request from GitHub to discuss and eventually merge. 20 | 21 | ## Development environment 22 | 23 | xDEM currently supports Python versions of 3.10 to 3.12 (see `dev-environment.yml` for detailed dependencies), which are 24 | tested in a continuous integration (CI) workflow running on GitHub Actions. 25 | 26 | When you open a PR on xDEM, a single linting action and 9 test actions will automatically start, corresponding to all 27 | supported Python versions (3.10, 3.11 and 3.12) and OS (Ubuntu, Mac, Windows). The coverage change of the tests will also 28 | be reported by CoverAlls. 29 | 30 | ### Setup 31 | 32 | #### With `mamba` 33 | Clone the git repo and create a `mamba` environment (see how to install `mamba` in the [mamba documentation](https://mamba.readthedocs.io/en/latest/)): 34 | 35 | ```bash 36 | git clone https://github.com/GlacioHack/xdem.git 37 | cd xdem 38 | mamba env create -f dev-environment.yml # Add '-n custom_name' if you want. 39 | mamba activate xdem-dev # Or any other name specified above 40 | ``` 41 | 42 | #### With `pip` 43 | ```bash 44 | git clone https://github.com/GlacioHack/xdem.git 45 | cd xdem 46 | make install 47 | ``` 48 | 49 | Please note: pip installation is currently only possible under python3.10. 50 | 51 | ### Tests 52 | 53 | At least one test per feature (in the associated `tests/test_*.py` file) should be included in the PR, using `pytest` (see existing tests for examples). 54 | The structure of test modules and functions in `tests/` largely mirrors that of the package modules and functions in `xdem/`. 55 | 56 | To run the entire test suite, run `pytest` from the root of the repository: 57 | ```bash 58 | pytest 59 | ``` 60 | 61 | Running `pytest` will trigger a script that automatically downloads test data from [https://github.com/GlacioHack/xdem-data](https://github.com/GlacioHack/xdem-data) used to run all tests. 62 | 63 | RichDEM should only be used for testing purposes within the xDEM project. The functionality of xDEM must not depend on RichDEM. 64 | 65 | ### Documentation 66 | 67 | If your changes need to be reflected in the documentation, update the related pages located in `doc/source/`. The documentation is written in MyST markdown syntax, similar to GitHub's default Markdown (see [MyST-NB](https://myst-nb.readthedocs.io/en/latest/authoring/text-notebooks.html) for details). 68 | 69 | To ensure that the documentation is building properly after your changes, if you are on Linux, you can run `pytest tests/test_doc.py`, which is equivalent to building directly calling `sphinx-build source/ build/html/` from the `doc/` folder. On Windows and Mac, the documentation is not maintained, so you can wait to open the PR for it to be checked on Linux by the CI. 70 | 71 | ### Formatting and linting 72 | 73 | Install and run `pre-commit` from the root of the repository (such as with `mamba install pre-commit`, see [pre-commit documentation](https://pre-commit.com/) for details), 74 | which will use `.pre-commit-config.yaml` to verify spelling errors, import sorting, type checking, formatting and linting: 75 | 76 | ```bash 77 | pre-commit run --all 78 | ``` 79 | 80 | You can then commit and push those changes. 81 | Optionally, `pre-commit` can be installed as a git hook to ensure checks have to pass before committing. 82 | 83 | ### Final steps 84 | 85 | That's it! If the tests and documentation are passing, or if you need help to make those work, you can open a PR. 86 | 87 | We'll receive word of your PR as soon as it is opened, and should follow up shortly to discuss the changes, and eventually give approval to merge. Thank you so much for contributing! 88 | 89 | ### Rights 90 | 91 | The license (see LICENSE) applies to all contributions. 92 | -------------------------------------------------------------------------------- /GOVERNANCE.md: -------------------------------------------------------------------------------- 1 | # Governance Policy 2 | 3 | This document provides the governance policy for the Project. Maintainers agree to this policy and to abide by all Project polices, 4 | including the [code of conduct](./CODE-OF-CONDUCT.md) and by adding their name to the [AUTHORS.md file](./AUTHORS.md). 5 | 6 | ## 1. Roles. 7 | 8 | This project may include the following roles. Additional roles may be adopted and documented by the Project. 9 | 10 | **1.1. Maintainers**. Maintainers are responsible for organizing activities around developing, maintaining, and updating 11 | the Project. Maintainers are also responsible for determining consensus. This Project may add or remove Maintainers with 12 | the approval of the current Maintainers. 13 | 14 | **1.2. Contributors**. Contributors are those that have made contributions to the Project. 15 | 16 | ## 2. Decisions. 17 | 18 | **2.1. Consensus-Based Decision Making**. Projects make decisions through consensus of the Maintainers. While explicit 19 | agreement of all Maintainers is preferred, it is not required for consensus. Rather, the Maintainers will determine 20 | consensus based on their good faith consideration of a number of factors, including the dominant view of the 21 | Contributors and nature of support and objections. The Maintainers will document evidence of consensus in accordance 22 | with these requirements. 23 | 24 | **2.2. Appeal Process**. Decisions may be appealed by opening an issue and that appeal will be considered by the 25 | Maintainers in good faith, who will respond in writing within a reasonable time. If the Maintainers deny the appeal, 26 | the appeal may be brought before the Organization Steering Committee, who will also respond in writing in a reasonable 27 | time. 28 | 29 | ## 3. How We Work. 30 | 31 | **3.1. Openness**. Participation is open to anyone who is directly and materially affected by the activity in question. 32 | There shall be no undue financial barriers to participation. 33 | 34 | **3.2. Balance**. The development process should balance the interests of Contributors and other stakeholders. 35 | Contributors from diverse interest categories shall be sought with the objective of achieving balance. 36 | 37 | **3.3. Coordination and Harmonization**. Good faith efforts shall be made to resolve potential conflicts or 38 | incompatibility between releases in this Project. 39 | 40 | **3.4. Consideration of Views and Objections**. Prompt consideration shall be given to the written views and 41 | objections of all Contributors. 42 | 43 | **3.5. Written procedures**. This governance document and other materials documenting this project's development 44 | process shall be available to any interested person. 45 | 46 | ## 4. No Confidentiality. 47 | 48 | Information disclosed in connection with any Project activity, including but not limited to meetings, contributions, 49 | and submissions, is not confidential, regardless of any markings or statements to the contrary. 50 | 51 | ## 5. Trademarks. 52 | 53 | Any names, trademarks, logos, or goodwill developed by and associated with the Project (the "Marks") are controlled by 54 | the Organization. Maintainers may only use these Marks in accordance with the Organization's trademark policy. If a 55 | Maintainer resigns or is removed, any rights the Maintainer may have in the Marks revert to the Organization. 56 | 57 | ## 6. Amendments. 58 | 59 | Amendments to this governance policy may be made by affirmative vote of 2/3 of all Maintainers, with approval by the 60 | Organization's Steering Committee. 61 | 62 | --- 63 | Part of MVG-0.1-beta. 64 | Made with love by GitHub. Licensed under the [CC-BY 4.0 License](https://creativecommons.org/licenses/by/4.0/). 65 | -------------------------------------------------------------------------------- /HOW_TO_RELEASE.md: -------------------------------------------------------------------------------- 1 | # How to issue an xDEM release 2 | 3 | ## GitHub and PyPI 4 | 5 | ### The easy way 6 | 7 | 1. Change the version number in `setup.cfg`. It can be easily done from GitHub directly without a PR. The version number is important for PyPI as it will determine the file name of the wheel. A name can [never be reused](https://pypi.org/help/#file-name-reuse), even if a file or project have been deleted. 8 | 9 | 2. Follow the steps to [create a new release](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository) on GitHub. 10 | Use the same release number and tag as in `setup.cfg`. 11 | 12 | An automatic GitHub action will start to push and publish the new release to PyPI. 13 | 14 | **Note**: A tag and a release can easily be deleted if doing a mistake, but if the release is pushed to PyPI with a new version number, it will not be possible to re-use the same version number anymore. 15 | 16 | **In short, if you mess up a release by forgetting to change the version number**: 17 | 18 | - PyPI will block the upload, so the GitHub action failed. All is fine. 19 | - You can now edit the version number on the main branch. 20 | - Before releasing, you need to delete **both** the tag and the release of the previous release. If you release with the same tag without deletion, it will ignore your commit changing the version number, and PyPI will block the upload again. You're stuck in a circle. 21 | 22 | ### The hard way 23 | 24 | 1. Go to your local main repository (not the fork) and ensure your main branch is synced: 25 | git checkout main 26 | git pull 27 | 2. Look over whats-new.rst and the docs. Make sure "What's New" is complete 28 | (check the date!) and add a brief summary note describing the release at the 29 | top. 30 | 3. If you have any doubts, run the full test suite one final time! 31 | pytest --run-slow --mpl . 32 | 4. Increment the version number "FULLVERSION" in setup.py for PyPI and conda. 33 | 5. On the main branch, commit the release in git: 34 | git commit -a -m 'Release v1.X.Y' 35 | 6. Tag the release: 36 | git tag -a v1.X.Y -m 'v1.X.Y' 37 | 7. Build source and binary wheels for pypi: 38 | git clean -xdf # this deletes all uncommited changes! 39 | python setup.py bdist_wheel sdist 40 | 8. Use twine to register and upload the release on pypi. Be careful, you can't 41 | take this back! 42 | twine upload dist/xdem-1.X.Y* 43 | You will need to be listed as a package owner at 44 | https://pypi.python.org/pypi/xdem for this to work. 45 | 9. Push your changes to main: 46 | git push origin main 47 | git push origin --tags 48 | 10. Update the stable branch (used by ReadTheDocs) and switch back to main: 49 | git checkout stable 50 | git rebase main 51 | git push origin stable 52 | git checkout main 53 | It's OK to force push to 'stable' if necessary. 54 | We also update the stable branch with `git cherrypick` for documentation 55 | only fixes that apply the current released version. 56 | 11. Add a section for the next release (v.X.(Y+1)) to doc/whats-new.rst. 57 | 12. Commit your changes and push to main again: 58 | git commit -a -m 'Revert to dev version' 59 | git push origin main 60 | You're done pushing to main! 61 | 13. Issue the release on GitHub. Click on "Draft a new release" at 62 | https://github.com/xdem/releases. Type in the version number, but 63 | don't bother to describe it -- we maintain that on the docs instead. 64 | 14. Update the docs. Login to https://readthedocs.org/projects/xdem/versions/ 65 | and switch your new release tag (at the bottom) from "Inactive" to "Active". 66 | It should now build automatically. 67 | 15. Issue the release announcement! 68 | 69 | ## Conda-forge 70 | 71 | Conda-forge distributions work by having a "feedstock" version of the package, containing instructions on how to bundle it for conda. 72 | The xDEM feedstock is available at [https://github.com/conda-forge/xdem-feedstock](https://github.com/conda-forge/xdem-feedstock), and only accessible by maintainers. 73 | 74 | ### If the conda-forge bot works 75 | 76 | To update the conda-forge distribution of xDEM, very few steps should have to be performed manually. If the conda bot works, a PR will be opened at [https://github.com/conda-forge/xdem-feedstock](https://github.com/conda-forge/xdem-feedstock) within a day of publishing a new GitHub release. 77 | Assuming the dependencies have not changed, only two lines will be changed in the `meta.yaml` file of the feedstock: (i) the new version number and (ii) the new sha256 checksum for the GitHub-released package. Those will be updated automatically by the bot. 78 | 79 | However, if the dependencies or license need to be updated, this has to be done manually. Then, add the bot branch as a remote branch and push the dependency changes to `meta.yaml` (see additional info from conda bot for license). 80 | 81 | ### If the conda-forge bot does not work 82 | 83 | In this case, the PR has to be opened manually, and the new version number and new sha256 checksum have to be updated manually as well. 84 | 85 | The most straightforward way to obtain the new sha256 checksum is to run `conda-build` (see below) with the old checksum which will fail, and then copying the new hash of the "SHA256 mismatch: ..." error that arises! 86 | 87 | First, the xdem-feedstock repo has to be forked on GitHub. 88 | Then, follow these steps for `NEW_VERSION` (substitute with the actual version name): 89 | ```bash 90 | 91 | >>> conda install conda-build 92 | 93 | >>> git clone https://github.com/your_username/xdem-feedstock # or git pull (and make sure the fork is up to date with the upstream repo) if the repo is already cloned 94 | 95 | >>> cd xdem-feedstock/recipe 96 | 97 | # Update meta.yaml: 98 | # {% set version = "NEW_VERSION" %} 99 | # sha256: NEW_SHA256 100 | 101 | >>> conda-build . # This is to validate that the build process works, but is technically optional. 102 | 103 | >>> git add -u && git commit -m "Updated version to NEW_VERSION" # Or whatever you want to tell us :) 104 | 105 | >>> git push -u origin main 106 | ``` 107 | 108 | An alternative solution to get the sha256sum is to run `sha256sum` on the release file downloaded from GitHub 109 | 110 | Now, a PR can be made from your personal fork to the upstream xdem-feedstock. 111 | An automatic linter will say whether the updates conform to the syntax and a CI action will build the package to validate it. 112 | Note that you have to be a maintainer or have the PR be okayed by a maintainer for the CI action to run. 113 | If this works, the PR can be merged, and the conda-forge version will be updated within a few hours! 114 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Autodocumented Makefile for xDEM 2 | # see: https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html 3 | # Dependencies : python3 venv 4 | 5 | ############### GLOBAL VARIABLES ###################### 6 | .DEFAULT_GOAL := help 7 | SHELL := /bin/bash 8 | 9 | # Virtualenv directory name (can be overridden) 10 | ifndef VENV 11 | VENV = "venv" 12 | endif 13 | 14 | # Python global variables definition 15 | PYTHON_VERSION_MIN = 3.10 16 | # Set PYTHON if not defined in command line 17 | # Example: PYTHON="python3.10" make venv to use python 3.10 for the venv 18 | # By default the default python3 of the system. 19 | ifndef PYTHON 20 | PYTHON = "python3" 21 | endif 22 | PYTHON_CMD=$(shell command -v $(PYTHON)) 23 | 24 | PYTHON_VERSION_CUR=$(shell $(PYTHON_CMD) -c 'import sys; print("%d.%d"% sys.version_info[0:2])') 25 | PYTHON_VERSION_OK=$(shell $(PYTHON_CMD) -c 'import sys; cur_ver = sys.version_info[0:2]; min_ver = tuple(map(int, "$(PYTHON_VERSION_MIN)".split("."))); print(int(cur_ver >= min_ver))') 26 | 27 | ############### Check python version supported ############ 28 | 29 | ifeq (, $(PYTHON_CMD)) 30 | $(error "PYTHON_CMD=$(PYTHON_CMD) not found in $(PATH)") 31 | endif 32 | 33 | ifeq ($(PYTHON_VERSION_OK), 0) 34 | $(error "Requires Python version >= $(PYTHON_VERSION_MIN). Current version is $(PYTHON_VERSION_CUR)") 35 | endif 36 | 37 | ################ MAKE Targets ###################### 38 | 39 | help: ## Show this help 40 | @echo " XDEM MAKE HELP" 41 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 42 | 43 | .PHONY: venv 44 | venv: ## Create a virtual environment in 'venv' directory if it doesn't exist 45 | @test -d ${VENV} || $(PYTHON_CMD) -m venv ${VENV} 46 | @touch ${VENV}/bin/activate 47 | @${VENV}/bin/python -m pip install --upgrade wheel setuptools pip 48 | 49 | 50 | .PHONY: install 51 | install: venv ## Install xDEM for development (depends on venv) 52 | @test -f ${VENV}/bin/xdem || echo "Installing xdem in development mode" 53 | @test -f ${VENV}/bin/xdem || ${VENV}/bin/pip install -e .[dev] 54 | @test -f .git/hooks/pre-commit || echo "Installing pre-commit hooks" 55 | @test -f .git/hooks/pre-commit || ${VENV}/bin/pre-commit install -t pre-commit 56 | @test -f .git/hooks/pre-push || ${VENV}/bin/pre-commit install -t pre-push 57 | @echo "xDEM installed in development mode in virtualenv ${VENV}" 58 | @echo "To use: source ${VENV}/bin/activate; xdem -h" 59 | 60 | 61 | .PHONY: tests 62 | tests: ## run tests 63 | @${VENV}/bin/pytest 64 | 65 | ## Clean section 66 | 67 | .PHONY: clean 68 | clean: clean-venv clean-build clean-pyc clean-precommit ## Clean all 69 | 70 | .PHONY: clean-venv 71 | clean-venv: ## Clean the virtual environment 72 | @echo "+ $@" 73 | @rm -rf ${VENV} 74 | 75 | .PHONY: clean-build 76 | clean-build: ## Remove build artifacts 77 | @echo "+ $@" 78 | @rm -rf build/ dist/ .eggs/ 79 | @find . -name '*.egg-info' -exec rm -rf {} + 80 | @find . -name '*.egg' -exec rm -f {} + 81 | 82 | .PHONY: clean-precommit 83 | clean-precommit: ## Remove pre-commit hooks from .git/hooks 84 | @rm -f .git/hooks/pre-commit 85 | @rm -f .git/hooks/pre-push 86 | 87 | .PHONY: clean-pyc 88 | clean-pyc: ## Remove Python cache and artifacts 89 | @echo "+ $@" 90 | @find . -type f -name "*.py[co]" -exec rm -rf {} + 91 | @find . -type d -name "__pycache__" -exec rm -rf {} + 92 | @find . -name '*~' -exec rm -rf {} + 93 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 xDEM developers. 2 | 3 | This file is part of xDEM (see https://github.com/GlacioHack/xdem). 4 | 5 | Licensed under the Apache License, Version 2.0 (the "License"); 6 | you may not use this file except in compliance with the License. 7 | You may obtain a copy of the License at 8 | 9 | http://www.apache.org/licenses/LICENSE-2.0 10 | 11 | Unless required by applicable law or agreed to in writing, software 12 | distributed under the License is distributed on an "AS IS" BASIS, 13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | See the License for the specific language governing permissions and 15 | limitations under the License. 16 | 17 | xDEM software is distributed under the Apache Software License (ASL) v2.0, see 18 | LICENSE file or http://www.apache.org/licenses/LICENSE-2.0 for details. 19 | 20 | Python: programming language that lets you work quickly and integrate systems more effectively. 21 | Copyright (c) 2001-2023 Python Software Foundation. All Rights reserved. 22 | Website: http://python.org/ 23 | License: Python Software License. 24 | 25 | NumPy: The fundamental package for scientific computing with Python. 26 | Copyright (c) 2005-2024, NumPy Developers. 27 | Website: https://numpy.org/ 28 | License: BSD 3-Clause. 29 | 30 | Matplotlib: Comprehensive library for creating static, animated, and interactive visualizations in Python. 31 | Copyright (C) 2001-2023 Matplotlib Development Team. 32 | Website: https://matplotlib.org/ 33 | License: Matplotlib only uses BSD compatible code, and its license is based on the PSF license. 34 | 35 | SciPy: Open-source software for mathematics, science, and engineering. 36 | Copyright (c) 2001-2002 Enthought, Inc. All rights reserved. 37 | Copyright (c) 2003-2019 SciPy Developers. All rights reserved. 38 | Website: https://www.scipy.org/scipylib/ 39 | License: BSD 3-Clause. 40 | 41 | Geoutils: Libraries and command-line utilities for geospatial data processing/analysis in Python. 42 | Copyright (c) 2020 Amaury Dehecq, Andrew Tedstone. 43 | Website: https://github.com/GeoUtils/geoutils 44 | License: MIT License. 45 | 46 | Rasterio: Access to geospatial raster data 47 | Copyright (c) 2016, MapBox All rights reserved. 48 | Website: https://github.com/mapbox/rasterio 49 | License: BSD 3-Clause. 50 | 51 | GeoPandas: Python tools for geographic data. 52 | Copyright (c) 2013-2022, GeoPandas developers. 53 | Website: https://geopandas.org/ 54 | License: BSD 3-Clause. 55 | 56 | pyogrio: Fast read/write access to OGR-compatible vector formats. 57 | Copyright (c) 2020-2024 Brendan C. Ward and pyogrio contributors. 58 | Website: https://github.com/geopandas/pyogrio 59 | License: MIT. 60 | 61 | pandas: Data analysis and manipulation library for Python. 62 | Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team. 63 | Copyright (c) 2011-2024, Open source contributors. 64 | Website: https://pandas.pydata.org/ 65 | License: BSD 3-Clause. 66 | 67 | scikit-learn: Machine learning library for Python. 68 | Copyright (c) 2007-2023, scikit-learn Developers. 69 | Website: https://scikit-learn.org/ 70 | License: BSD 3-Clause. 71 | 72 | Numba: A Just-in-Time Compiler for Python that accelerates numerical functions. 73 | Copyright (c) 2012-2023 Anaconda, Inc. 74 | Website: https://numba.pydata.org/ 75 | License: BSD 2-Clause. 76 | 77 | scikit-image: Image processing in Python. 78 | Copyright (c) 2009-2022 the scikit-image team. 79 | Website: https://scikit-image.org/ 80 | License: BSD 3-Clause. 81 | 82 | scikit-gstat: A geostatistics toolbox for Python. 83 | Copyright (c) 2017 Mirko Mälicke. 84 | Website: https://github.com/mmaelicke/scikit-gstat 85 | Licence: MIT License. 86 | 87 | affine: Matrix transformations for geospatial coordinates. 88 | Copyright (c) 2014-2023, Sean Gillies. 89 | Website: https://github.com/sgillies/affine 90 | License: BDS 3-Clause. 91 | 92 | Shapely: Manipulation and analysis of geometric objects. 93 | Copyright (c) 2007, Sean C. Gillies. 2019, Casper van der Wel. 2007-2022, Shapely Contributors. 94 | Website: https://shapely.readthedocs.io/ 95 | License: BSD 3-Clause. 96 | 97 | pyproj: Python interface to PROJ (cartographic projections and transformations library). 98 | Copyright (c) 2006-2018, Jeffrey Whitaker. 99 | Copyright (c) 2019-2024, Open source contributors. 100 | Website: https://pyproj4.github.io/pyproj/stable/ 101 | License: MIT License. 102 | 103 | pytransform3d: 3D transformations for Python. 104 | Copyright (c) 2014-2023, Alexander Fabisch, and pytransform3d contributors. 105 | Website: https://github.com/rock-learning/pytransform3d 106 | License: BSD 3-Clause. 107 | 108 | tqdm: A fast, extensible progress bar for Python an CLI applications. 109 | Copyright (c) MIT 2013 Noam Yorav-Raphael, original author. 110 | Copyright (c) MPL-2.0 2015-2024 Casper da Costa-Luis. 111 | Website: https://github.com/tqdm/tqdm 112 | License: MPL-2.0 and MIT License. 113 | 114 | yaml (PyYAML): Python bindings for YAML, a human-readable data serialization language. 115 | Copyright (c) 2006-2023, PyYAML contributors. 116 | Website: https://pyyaml.org/ 117 | License: CC-BY 2.0. 118 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # xDEM: robust analysis of DEMs in Python. 2 | 3 | [![Documentation Status](https://readthedocs.org/projects/xdem/badge/?version=latest)](https://xdem.readthedocs.io/en/latest/?badge=latest) 4 | [![build](https://github.com/GlacioHack/xdem/actions/workflows/python-tests.yml/badge.svg)](https://github.com/GlacioHack/xdem/actions/workflows/python-tests.yml) 5 | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/xdem.svg)](https://anaconda.org/conda-forge/xdem) 6 | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/xdem.svg)](https://anaconda.org/conda-forge/xdem) 7 | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/xdem.svg)](https://anaconda.org/conda-forge/xdem) 8 | [![PyPI version](https://badge.fury.io/py/xdem.svg)](https://badge.fury.io/py/xdem) 9 | [![Coverage Status](https://coveralls.io/repos/github/GlacioHack/xdem/badge.svg?branch=main)](https://coveralls.io/github/GlacioHack/xdem?branch=main) 10 | 11 | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/GlacioHack/xdem/main) 12 | [![Pre-Commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) 13 | [![Formatted with black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black) 14 | [![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/) 15 | [![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) 16 | 17 | **xDEM** is an open source project to develop a core Python package for the analysis of digital elevation models (DEMs). 18 | 19 | It aims at **providing modular and robust tools for the most common analyses needed with DEMs**, including both geospatial 20 | operations specific to DEMs and a wide range of 3D alignment and correction methods from published, peer-reviewed studies. 21 | The core manipulation of DEMs (e.g., vertical alignment, terrain analysis) are **conveniently centered around a `DEM` class** (that, notably, re-implements all tools 22 | of [gdalDEM](https://gdal.org/programs/gdaldem.html)). More complex pipelines (e.g., 3D rigid coregistration, bias corrections, filtering) are **built around 23 | modular `Coreg`, `BiasCorr` classes that easily interface between themselves**. Finally, xDEM includes advanced 24 | uncertainty analysis tools based on spatial statistics of [SciKit-GStat](https://scikit-gstat.readthedocs.io/en/latest/). 25 | 26 | Additionally, xDEM inherits many convenient functionalities from [GeoUtils](https://github.com/GlacioHack/geoutils) such as 27 | **implicit loading**, **numerical interfacing** and **convenient object-based geospatial methods** to easily perform 28 | the most common higher-level tasks needed by geospatial users (e.g., reprojection, cropping, vector masking). Through [GeoUtils](https://github.com/GlacioHack/geoutils), xDEM 29 | relies on [Rasterio](https://github.com/rasterio/rasterio), [GeoPandas](https://github.com/geopandas/geopandas) and [Pyproj](https://github.com/pyproj4/pyproj) 30 | for georeferenced calculations, and on [NumPy](https://github.com/numpy/numpy) and [Xarray](https://github.com/pydata/xarray) for numerical analysis. It allows easy access to 31 | the functionalities of these packages through interfacing or composition, and quick inter-operability through object conversion. 32 | 33 | If you are looking for an accessible Python package to write the Python equivalent of your [GDAL](https://gdal.org/) command lines, or of your 34 | [QGIS](https://www.qgis.org/en/site/) analysis pipeline **without a steep learning curve** on Python GIS syntax, xDEM is perfect for you! For more advanced 35 | users, xDEM also aims at being efficient and scalable by supporting lazy loading and parallel computing (ongoing). 36 | 37 | ## Documentation 38 | 39 | For a quick start, full feature description or search through the API, see xDEM's documentation at: https://xdem.readthedocs.io. 40 | 41 | ## Installation 42 | 43 | ### With `mamba` 44 | 45 | ```bash 46 | mamba install -c conda-forge xdem 47 | ``` 48 | See [mamba's documentation](https://mamba.readthedocs.io/en/latest/) to install `mamba`, which will solve your environment much faster than `conda`. 49 | 50 | ### With `pip` 51 | 52 | ```bash 53 | pip install xdem 54 | ``` 55 | 56 | ## Citing methods implemented in the package 57 | 58 | When using a method implemented in xDEM, please **cite both the package and the related study**: 59 | 60 | Citing xDEM: [![Zenodo](https://zenodo.org/badge/doi/10.5281/zenodo.4809697.svg)](https://zenodo.org/doi/10.5281/zenodo.4809697) 61 | 62 | Citing the related study: 63 | 64 | - **Coregistration**: 65 | - Horizontal shift from aspect/slope relationship of *[Nuth and Kääb (2011)](https://doi.org/10.5194/tc-5-271-2011)*, 66 | - Iterative closest point (ICP) of *[Besl and McKay (1992)](http://dx.doi.org/10.1109/34.121791)*, 67 | - **Bias correction**: 68 | - Along-track multi-sinusoidal noise by basin-hopping of *[Girod et al. (2017)](https://doi.org/10.3390/rs9070704)*, 69 | - **Uncertainty analysis**: 70 | - Heteroscedasticity and multi-range correlations from stable terrain of *[Hugonnet et al. (2022)](https://doi.org/10.1109/JSTARS.2022.3188922)*, 71 | - **Terrain attributes**: 72 | - Slope, aspect and hillshade of either *[Horn (1981)](http://dx.doi.org/10.1109/PROC.1981.11918)* or *[Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107)*, 73 | - Profile, plan and maximum curvature of *[Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107)*, 74 | - Topographic position index of *[Weiss (2001)](http://www.jennessent.com/downloads/TPI-poster-TNC_18x22.pdf)*, 75 | - Terrain ruggedness index of either *[Riley et al. (1999)](http://download.osgeo.org/qgis/doc/reference-docs/Terrain_Ruggedness_Index.pdf)* or *[Wilson et al. (2007)](http://dx.doi.org/10.1080/01490410701295962)*, 76 | - Roughness of *[Dartnell (2000)](http://dx.doi.org/10.14358/PERS.70.9.1081)*, 77 | - Rugosity of *[Jenness (2004)](https://doi.org/10.2193/0091-7648(2004)032[0829:CLSAFD]2.0.CO;2)*, 78 | - Fractal roughness of *[Taud et Parrot (2005)](https://doi.org/10.4000/geomorphologie.622)*. 79 | 80 | ## Contributing 81 | 82 | We welcome new contributions, and will happily help you integrate your own DEM routines into xDEM! 83 | 84 | After discussing a new feature or bug fix in an issue, you can open a PR to xDEM with the following steps: 85 | 86 | 1. Fork the repository, make a feature branch and push changes. 87 | 2. When ready, submit a pull request from the feature branch of your fork to `GlacioHack/xdem:main`. 88 | 3. The PR will be reviewed by at least one maintainer, discussed, then merged. 89 | 90 | More details on [our contributing page](CONTRIBUTING.md). 91 | -------------------------------------------------------------------------------- /binder/apt.txt: -------------------------------------------------------------------------------- 1 | libgl1-mesa-glx 2 | -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | ../environment.yml -------------------------------------------------------------------------------- /binder/extra-environment.yml: -------------------------------------------------------------------------------- 1 | channels: 2 | - conda-forge 3 | dependencies: 4 | - jupytext 5 | - myst-nb 6 | -------------------------------------------------------------------------------- /binder/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e # To avoid silent errors 3 | 4 | # ${MAMBA_EXE} env update -p ${NB_PYTHON_PREFIX} --file "environment.yml" 5 | pip install -e . 6 | ${MAMBA_EXE} env update -p ${NB_PYTHON_PREFIX} --file "binder/extra-environment.yml" 7 | wget https://raw.githubusercontent.com/mwouts/jupytext/main/binder/labconfig/default_setting_overrides.json -P ~/.jupyter/labconfig/ # To automatically open Markdown files as notebooks with Jupytext, see https://github.com/mwouts/jupytext 8 | -------------------------------------------------------------------------------- /dev-environment.yml: -------------------------------------------------------------------------------- 1 | name: xdem-dev 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python>=3.10,<3.13 6 | - geopandas>=0.12.0 7 | - numba=0.* 8 | - numpy>=1,<3 9 | - matplotlib=3.* 10 | - pyproj>=3.4,<4 11 | - rasterio>=1.3,<2 12 | - scipy=1.* 13 | - tqdm 14 | - scikit-gstat>=1.0.18,<1.1 15 | - geoutils=0.1.16 16 | - affine 17 | - pandas 18 | - pyogrio 19 | - shapely 20 | 21 | # Development-specific, to mirror manually in setup.cfg [options.extras_require]. 22 | - pip 23 | 24 | # Optional dependencies 25 | - pytransform3d 26 | - pyyaml 27 | - scikit-learn 28 | 29 | # Test dependencies 30 | - pytest 31 | - pytest-xdist 32 | - pyyaml 33 | - flake8 34 | - pylint 35 | 36 | # Doc dependencies 37 | - sphinx 38 | - pydata-sphinx-theme 39 | - sphinx-book-theme>=1.0 40 | - sphinxcontrib-programoutput 41 | - sphinx-design 42 | - sphinx-autodoc-typehints 43 | - sphinx-gallery 44 | - autovizwidget 45 | - graphviz 46 | - myst-nb 47 | - numpydoc 48 | 49 | - pip: 50 | - -e ./ 51 | 52 | # To run CI against latest GeoUtils 53 | # - git+https://github.com/rhugonnet/geoutils.git 54 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | clean: 16 | echo "Removing build files..." 17 | if [ -d "$(BUILDDIR)" ]; then rm -r "$(BUILDDIR)"; fi 18 | if [ -d "$(SOURCEDIR)/auto_examples" ]; then rm -r "$(SOURCEDIR)/auto_examples"; fi 19 | if [ -d "$(SOURCEDIR)/gen_modules" ]; then rm -r "$(SOURCEDIR)/gen_modules"; fi 20 | 21 | .PHONY: help Makefile 22 | 23 | # Catch-all target: route all unknown targets to Sphinx using the new 24 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 25 | %: Makefile 26 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 27 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /doc/source/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | /* Work around to wrong dark-mode for toggle button: https://github.com/executablebooks/MyST-NB/issues/523 */ 2 | div.cell details.hide > summary { 3 | background-color: var(--pst-color-surface); 4 | } 5 | 6 | div.cell details[open].above-input div.cell_input { 7 | border-top: None; 8 | } 9 | -------------------------------------------------------------------------------- /doc/source/_static/nasa_logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /doc/source/_templates/module.md: -------------------------------------------------------------------------------- 1 | {{ fullname }} 2 | {{ underline }} 3 | 4 | ```{eval-rst} 5 | .. automodule:: {{ fullname }} 6 | 7 | .. contents:: Contents 8 | :local: 9 | 10 | {% block functions %} 11 | {% if functions %} 12 | 13 | Functions 14 | ========= 15 | 16 | {% for item in functions %} 17 | 18 | {{item}} 19 | {{ "-" * (item | length) }} 20 | 21 | .. autofunction:: {{ item }} 22 | 23 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 24 | 25 | .. minigallery:: {{fullname}}.{{item}} 26 | :add-heading: 27 | 28 | {%- endfor %} 29 | {% endif %} 30 | {% endblock %} 31 | 32 | {% block classes %} 33 | {% if classes %} 34 | 35 | Classes 36 | ======= 37 | 38 | {% for item in classes %} 39 | 40 | {{item}} 41 | {{ "-" * (item | length) }} 42 | 43 | .. autoclass:: {{ item }} 44 | :show-inheritance: 45 | :special-members: __init__ 46 | :members: 47 | 48 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 49 | 50 | .. minigallery:: {{fullname}}.{{item}} 51 | :add-heading: 52 | 53 | {%- endfor %} 54 | {% endif %} 55 | {% endblock %} 56 | 57 | {% block exceptions %} 58 | {% if exceptions %} 59 | 60 | Exceptions 61 | ========== 62 | 63 | .. autosummary:: 64 | {% for item in exceptions %} 65 | {{ item }} 66 | {%- endfor %} 67 | {% endif %} 68 | {% endblock %} 69 | ``` 70 | -------------------------------------------------------------------------------- /doc/source/_templates/module.rst: -------------------------------------------------------------------------------- 1 | {{ fullname }} 2 | {{ underline }} 3 | 4 | .. automodule:: {{ fullname }} 5 | 6 | .. contents:: Contents 7 | :local: 8 | 9 | {% block functions %} 10 | {% if functions %} 11 | 12 | Functions 13 | ========= 14 | 15 | {% for item in functions %} 16 | 17 | {{item}} 18 | {{ "-" * (item | length) }} 19 | 20 | .. autofunction:: {{ item }} 21 | 22 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 23 | 24 | .. minigallery:: {{fullname}}.{{item}} 25 | :add-heading: 26 | 27 | {%- endfor %} 28 | {% endif %} 29 | {% endblock %} 30 | 31 | {% block classes %} 32 | {% if classes %} 33 | 34 | Classes 35 | ======= 36 | 37 | {% for item in classes %} 38 | 39 | {{item}} 40 | {{ "-" * (item | length) }} 41 | 42 | .. autoclass:: {{ item }} 43 | :show-inheritance: 44 | :special-members: __init__ 45 | :members: 46 | 47 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 48 | 49 | .. minigallery:: {{fullname}}.{{item}} 50 | :add-heading: 51 | 52 | {%- endfor %} 53 | {% endif %} 54 | {% endblock %} 55 | 56 | {% block exceptions %} 57 | {% if exceptions %} 58 | 59 | Exceptions 60 | ========== 61 | 62 | .. autosummary:: 63 | {% for item in exceptions %} 64 | {{ item }} 65 | {%- endfor %} 66 | {% endif %} 67 | {% endblock %} 68 | -------------------------------------------------------------------------------- /doc/source/about_xdem.md: -------------------------------------------------------------------------------- 1 | (about-xdem)= 2 | 3 | # About xDEM 4 | 5 | ## What is xDEM? 6 | 7 | xDEM is a Python package for the analysis of elevation data, and in particular that of digital elevation models (DEMs), 8 | with name standing for _cross-DEM analysis_[^sn1] and echoing its dependency on [Xarray](https://docs.xarray.dev/en/stable/). 9 | 10 | [^sn1]: Several core features of xDEM, in particular coregistration and uncertainty analysis, rely specifically on cross-analysis of elevation data over static surfaces. 11 | 12 | ## Why use xDEM? 13 | 14 | xDEM implements a wide range of high-level operations required for analyzing elevation data in a consistent framework 15 | tested to ensure the accuracy of these operations. 16 | 17 | It has three main focus points: 18 | 19 | 1. Having an **easy and intuitive interface** based on the principle of least knowledge, 20 | 2. Providing **statistically robust methods** for reliable quantitative analysis, 21 | 3. Allowing **modular user input** to adapt to most applications. 22 | 23 | Although modularity can sometimes hamper performance, we also aim to **preserve scalibility** as much as possible[^sn2]. 24 | 25 | [^sn2]: Out-of-memory, parallelizable computations relying on Dask are planned for 2025! 26 | 27 | We particularly take to heart to verify the accuracy of our methods. For instance, our terrain attributes 28 | which have their own modular Python-based implementation, are tested to match exactly 29 | [gdaldem](https://gdal.org/programs/gdaldem.html) (slope, aspect, hillshade, roughness) and 30 | [RichDEM](https://richdem.readthedocs.io/en/latest/) (curvatures). 31 | 32 | ## Who is behind xDEM? 33 | 34 | xDEM was created by a group of researchers with expertise in elevation data analysis for change detection applied to glaciology. 35 | Nowadays, its development is **jointly led by researchers in elevation data analysis** (including funding from NASA and SNSF) **and 36 | engineers from CNES** (French Space Agency). 37 | 38 | Most contributors and users are scientists or industrials working in **various fields of Earth observation**. 39 | 40 | 41 | ```{note} 42 | :class: tip 43 | :class: margin 44 | 45 | xDEM is **merging efforts with CNES's [demcompare](https://github.com/CNES/demcompare)** to combine the best of both tools into one! 46 | ``` 47 | 48 | ::::{grid} 49 | :reverse: 50 | 51 | :::{grid-item} 52 | :columns: 4 53 | :child-align: center 54 | 55 | ```{image} ./_static/nasa_logo.svg 56 | :width: 200px 57 | :class: dark-light 58 | ``` 59 | 60 | ::: 61 | 62 | :::{grid-item} 63 | :columns: 4 64 | :child-align: center 65 | 66 | ```{image} ./_static/snsf_logo.svg 67 | :width: 220px 68 | :class: only-light 69 | ``` 70 | 71 | ```{image} ./_static/snsf_logo_dark.svg 72 | :width: 220px 73 | :class: only-dark 74 | ``` 75 | 76 | ::: 77 | 78 | :::{grid-item} 79 | :columns: 4 80 | :child-align: center 81 | 82 | ```{image} ./_static/cnes_logo.svg 83 | :width: 200px 84 | :class: only-light 85 | ``` 86 | 87 | ```{image} ./_static/cnes_logo_dark.svg 88 | :width: 200px 89 | :class: only-dark 90 | ``` 91 | 92 | ::: 93 | 94 | 95 | :::: 96 | 97 | More details about the people behind xDEM, funding sources, and the package's objectives can be found on the **{ref}`credits` pages**. 98 | -------------------------------------------------------------------------------- /doc/source/authors.md: -------------------------------------------------------------------------------- 1 | (authors)= 2 | # Authors 3 | 4 | © 2024 **xDEM developers**. 5 | 6 | **xDEM** is licensed under permissive Apache 2 license (See [LICENSE file](license.md) or below). 7 | 8 | All contributors listed in this document are part of the **xDEM developers**, and their 9 | contributions are subject to the project's copyright under the terms of the 10 | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). 11 | 12 | Please refer to [AUTHORS file](https://github.com/GlacioHack/xdem/blob/main/AUTHORS.md) for the complete and detailed list of authors and their contributions. 13 | -------------------------------------------------------------------------------- /doc/source/citation.md: -------------------------------------------------------------------------------- 1 | (citation)= 2 | 3 | # Citing and method overview 4 | 5 | When using a method implemented in xDEM, one should cite both the package and the original study behind the method (if there is any)! 6 | 7 | ## Citing xDEM 8 | 9 | To cite the package, use the Zenodo DOI: [![Zenodo](https://zenodo.org/badge/doi/10.5281/zenodo.4809697.svg)](https://zenodo.org/doi/10.5281/zenodo.4809697). 10 | 11 | ## Method overview 12 | 13 | For citation and other purposes, here's an overview of all methods implemented in the package and their reference, if it exists. 14 | More details are available on each feature page! 15 | 16 | ### Terrain attributes 17 | 18 | ```{list-table} 19 | :widths: 1 2 20 | :header-rows: 1 21 | :stub-columns: 1 22 | 23 | * - Method 24 | - Reference 25 | * - Slope, aspect and hillshade 26 | - [Horn (1981)](http://dx.doi.org/10.1109/PROC.1981.11918) or [Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107) 27 | * - Curvatures 28 | - [Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107) 29 | * - Topographic position index 30 | - [Weiss (2001)](http://www.jennessent.com/downloads/TPI-poster-TNC_18x22.pdf) 31 | * - Terrain ruggedness index 32 | - [Riley et al. (1999)](http://download.osgeo.org/qgis/doc/reference-docs/Terrain_Ruggedness_Index.pdf) or [Wilson et al. (2007)](http://dx.doi.org/10.1080/01490410701295962) 33 | * - Roughness 34 | - [Dartnell (2000)](https://environment.sfsu.edu/node/11292) 35 | * - Rugosity 36 | - [Jenness (2004)]() 37 | * - Fractal roughness 38 | - [Taud and Parrot (2005)](https://doi.org/10.4000/geomorphologie.622) 39 | ``` 40 | 41 | ### Coregistration 42 | 43 | ```{list-table} 44 | :widths: 1 2 45 | :header-rows: 1 46 | :stub-columns: 1 47 | 48 | * - Method 49 | - Reference 50 | * - Nuth and Kääb 51 | - [Nuth and Kääb (2011)](https://doi.org/10.5194/tc-5-271-2011) 52 | * - Dh minimization 53 | - N/A 54 | * - Least Z-difference 55 | - [Rosenholm and Torlegård (1988)](https://www.asprs.org/wp-content/uploads/pers/1988journal/oct/1988_oct_1385-1389.pdf) 56 | * - Iterative closest point 57 | - [Besl and McKay (1992)](https://doi.org/10.1117/12.57955), [Chen and Medioni (1992)](https://doi.org/10.1016/0262-8856(92)90066-C) 58 | * - Coherent point drift 59 | - [Myronenko and Song (2010)](https://doi.org/10.1109/TPAMI.2010.46) 60 | * - Vertical shift 61 | - N/A 62 | ``` 63 | 64 | ### Bias-correction 65 | 66 | ```{list-table} 67 | :widths: 1 2 68 | :header-rows: 1 69 | :stub-columns: 1 70 | 71 | * - Method 72 | - Reference 73 | * - Deramp 74 | - N/A 75 | * - Directional bias (sinusoids) 76 | - [Girod et al. (2017)](https://doi.org/10.3390/rs9070704) 77 | * - Terrain bias (curvature) 78 | - [Gardelle et al. (2012)](https://doi.org/10.3189/2012JoG11J175) 79 | * - Terrain bias (elevation) 80 | - [Nuth and Kääb (2011)](https://doi.org/10.5194/tc-5-271-2011) 81 | * - Vertical shift 82 | - N/A 83 | ``` 84 | 85 | ### Gap-filling 86 | 87 | ```{list-table} 88 | :widths: 1 2 89 | :header-rows: 1 90 | :stub-columns: 1 91 | 92 | * - Method 93 | - Reference 94 | * - Bilinear 95 | - N/A 96 | * - Local and regional hypsometric 97 | - [Arendt et al. (2002)](https://doi.org/10.1126/science.1072497), [McNabb et al. (2019)](https://tc.copernicus.org/articles/13/895/2019/) 98 | ``` 99 | 100 | 101 | ### Uncertainty analysis 102 | 103 | ```{list-table} 104 | :widths: 1 1 105 | :header-rows: 1 106 | :stub-columns: 1 107 | 108 | * - Method 109 | - Reference 110 | * - R2009 (nested ranges, circular approx.) 111 | - [Rolstad et al. (2009)](http://dx.doi.org/10.3189/002214309789470950) 112 | * - H2022 (heterosc., nested ranges, spatial propag.) 113 | - [Hugonnet et al. (2022)](http://dx.doi.org/10.1109/JSTARS.2022.3188922) 114 | ``` 115 | -------------------------------------------------------------------------------- /doc/source/code/comparison_plot_local_hypsometric_interpolation.py: -------------------------------------------------------------------------------- 1 | """Plot an example of local hypsometric interpolation at Scott Turnerbreen, Svalbard.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 11 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 12 | 13 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 14 | 15 | ddem.data /= 2009 - 1990 16 | 17 | scott_1990 = outlines_1990.query("NAME == 'Scott Turnerbreen'") 18 | mask = scott_1990.create_mask(ddem) 19 | 20 | ddem_bins = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask]) 21 | stds = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask], aggregation_function=np.std) 22 | 23 | plt.figure(figsize=(8, 8)) 24 | plt.grid(zorder=0) 25 | plt.plot(ddem_bins["value"], ddem_bins.index.mid, linestyle="--", zorder=1) 26 | 27 | plt.barh( 28 | y=ddem_bins.index.mid, 29 | width=stds["value"], 30 | left=ddem_bins["value"] - stds["value"] / 2, 31 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 32 | zorder=2, 33 | edgecolor="black", 34 | ) 35 | for bin in ddem_bins.index: 36 | plt.vlines(ddem_bins.loc[bin, "value"], bin.left, bin.right, color="black", zorder=3) 37 | 38 | plt.xlabel("Elevation change (m / a)") 39 | plt.twiny() 40 | plt.barh( 41 | y=ddem_bins.index.mid, 42 | width=ddem_bins["count"] / ddem_bins["count"].sum(), 43 | left=0, 44 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 45 | zorder=2, 46 | alpha=0.2, 47 | ) 48 | plt.xlabel("Normalized area distribution (hypsometry)") 49 | 50 | plt.ylabel("Elevation (m a.s.l.)") 51 | 52 | plt.tight_layout() 53 | plt.show() 54 | -------------------------------------------------------------------------------- /doc/source/code/comparison_plot_regional_hypsometric_interpolation.py: -------------------------------------------------------------------------------- 1 | """Plot an example of regional hypsometric interpolation in central Svalbard.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 11 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 12 | 13 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 14 | 15 | ddem.data /= 2009 - 1990 16 | 17 | mask = outlines_1990.create_mask(ddem) 18 | 19 | ddem_bins = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask]) 20 | stds = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask], aggregation_function=np.std) 21 | 22 | plt.figure(figsize=(8, 8)) 23 | plt.grid(zorder=0) 24 | 25 | 26 | plt.plot(ddem_bins["value"], ddem_bins.index.mid, linestyle="--", zorder=1) 27 | 28 | plt.barh( 29 | y=ddem_bins.index.mid, 30 | width=stds["value"], 31 | left=ddem_bins["value"] - stds["value"] / 2, 32 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 33 | zorder=2, 34 | edgecolor="black", 35 | ) 36 | for bin in ddem_bins.index: 37 | plt.vlines(ddem_bins.loc[bin, "value"], bin.left, bin.right, color="black", zorder=3) 38 | 39 | plt.xlabel("Elevation change (m / a)") 40 | plt.twiny() 41 | plt.barh( 42 | y=ddem_bins.index.mid, 43 | width=ddem_bins["count"] / ddem_bins["count"].sum(), 44 | left=0, 45 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 46 | zorder=2, 47 | alpha=0.2, 48 | ) 49 | plt.xlabel("Normalized area distribution (hypsometry)") 50 | plt.ylabel("Elevation (m a.s.l.)") 51 | 52 | plt.tight_layout() 53 | plt.show() 54 | -------------------------------------------------------------------------------- /doc/source/code/comparison_plot_spatial_interpolation.py: -------------------------------------------------------------------------------- 1 | """Plot an example of spatial interpolation of randomly generated errors.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 11 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 12 | 13 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 14 | # The example DEMs are void-free, so let's make some random voids. 15 | ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) # Reset the mask 16 | # Introduce 50000 nans randomly throughout the dDEM. 17 | ddem.data.mask.ravel()[np.random.default_rng(42).choice(ddem.data.size, 50000, replace=False)] = True 18 | 19 | ddem.interpolate(method="idw") 20 | 21 | ylim = (300, 100) 22 | xlim = (800, 1050) 23 | 24 | plt.figure(figsize=(8, 5)) 25 | plt.subplot(121) 26 | plt.imshow(ddem.data.squeeze(), cmap="coolwarm_r", vmin=-50, vmax=50) 27 | plt.ylim(ylim) 28 | plt.xlim(xlim) 29 | plt.axis("off") 30 | plt.title("dDEM with random voids") 31 | plt.subplot(122) 32 | plt.imshow(ddem.filled_data.squeeze(), cmap="coolwarm_r", vmin=-50, vmax=50) 33 | plt.ylim(ylim) 34 | plt.xlim(xlim) 35 | plt.axis("off") 36 | plt.title("Linearly interpolated dDEM") 37 | 38 | 39 | plt.tight_layout() 40 | plt.show() 41 | -------------------------------------------------------------------------------- /doc/source/code/intricacies_datatypes.py: -------------------------------------------------------------------------------- 1 | """Plot example of elevation data types for guide page.""" 2 | 3 | import matplotlib 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | # Open reference DEM and crop to small area 10 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 11 | ref_dem = ref_dem.crop( 12 | (ref_dem.bounds.left, ref_dem.bounds.bottom, ref_dem.bounds.left + 1000, ref_dem.bounds.bottom + 1000) 13 | ) 14 | 15 | # Get point cloud with 100 points 16 | ref_epc = ref_dem.to_pointcloud(subsample=100, random_state=42) 17 | 18 | f, ax = plt.subplots(2, 2, squeeze=False, sharex=True, sharey=True) 19 | # Plot 1: DEM 20 | ax[0, 0].set_title("DEM") 21 | ref_dem.plot(cmap="terrain", ax=ax[0, 0], vmin=280, vmax=420, cbar_title="Elevation (m)") 22 | plt.gca().set_xticklabels([]) 23 | plt.gca().set_yticklabels([]) 24 | plt.gca().set_aspect("equal") 25 | 26 | # Plot 2: EPC 27 | ax[0, 1].set_title("Elevation\npoint cloud") 28 | point = ref_epc.plot(column="b1", cmap="terrain", ax=ax[0, 1], vmin=280, vmax=420, cbar_title="Elevation (m)") 29 | plt.gca().set_xticklabels([]) 30 | plt.gca().set_yticklabels([]) 31 | plt.gca().set_aspect("equal") 32 | 33 | # Plot 3: TIN 34 | ax[1, 1].set_title("Elevation TIN") 35 | triang = matplotlib.tri.Triangulation(ref_epc.geometry.x.values, ref_epc.geometry.y.values) 36 | ax[1, 1].triplot(triang, color="gray", marker=".") 37 | scat = ax[1, 1].scatter( 38 | ref_epc.geometry.x.values, ref_epc.geometry.y.values, c=ref_epc["b1"].values, cmap="terrain", vmin=280, vmax=420 39 | ) 40 | plt.colorbar(mappable=scat, ax=ax[1, 1], label="Elevation (m)", pad=0.02) 41 | ax[1, 1].set_xticklabels([]) 42 | ax[1, 1].set_yticklabels([]) 43 | ax[1, 1].set_aspect("equal") 44 | 45 | # Plot 4: Contour 46 | ax[1, 0].set_title("Elevation contour") 47 | coords = ref_dem.coords(grid=False) 48 | cont = ax[1, 0].contour( 49 | np.flip(coords[0]), coords[1], np.flip(ref_dem.get_nanarray()), levels=15, cmap="terrain", vmin=280, vmax=420 50 | ) 51 | plt.colorbar(mappable=cont, ax=ax[1, 0], label="Elevation (m)", pad=0.02) 52 | ax[1, 0].set_xticklabels([]) 53 | ax[1, 0].set_yticklabels([]) 54 | ax[1, 0].set_aspect("equal") 55 | 56 | plt.suptitle("Types of elevation data") 57 | 58 | plt.tight_layout() 59 | plt.show() 60 | -------------------------------------------------------------------------------- /doc/source/code/robust_mean_std.py: -------------------------------------------------------------------------------- 1 | """Plot example of NMAD/median as robust estimators for guide page.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | # Create example distribution 8 | dh_inliers = np.random.default_rng(42).normal(loc=-5, scale=3, size=10**6) 9 | 10 | # Add outliers 11 | dh_outliers = np.concatenate( 12 | ( 13 | np.repeat(-34, 600), 14 | np.repeat(-33, 1800), 15 | np.repeat(-32, 3600), 16 | np.repeat(-31, 8500), 17 | np.repeat(-30, 15000), 18 | np.repeat(-29, 9000), 19 | np.repeat(-28, 3800), 20 | np.repeat(-27, 1900), 21 | np.repeat(-26, 700), 22 | ) 23 | ) 24 | dh_all = np.concatenate((dh_inliers, dh_outliers)) 25 | 26 | # Get traditional and robust statistics on all data 27 | mean_dh = np.nanmean(dh_all) 28 | median_dh = np.nanmedian(dh_all) 29 | 30 | std_dh = np.nanstd(dh_all) 31 | nmad_dh = gu.stats.nmad(dh_all) 32 | 33 | # Get traditional and robust statistics on inlier data 34 | mean_dh_in = np.nanmean(dh_inliers) 35 | median_dh_in = np.nanmedian(dh_inliers) 36 | 37 | std_dh_in = np.nanstd(dh_inliers) 38 | nmad_dh_in = gu.stats.nmad(dh_inliers) 39 | 40 | # Plot 41 | fig, ax = plt.subplots() 42 | h1 = ax.hist(dh_inliers, bins=np.arange(-40, 25), density=False, color="gray", label="Inlier data") 43 | h2 = ax.hist(dh_outliers, bins=np.arange(-40, 25), density=False, color="red", label="Outlier data") 44 | 45 | max_count = max(h1[0]) 46 | ax.vlines(x=[mean_dh_in, median_dh_in], ymin=0, ymax=max_count, colors=["tab:gray", "black"]) 47 | ax.vlines( 48 | x=[mean_dh_in - std_dh_in, mean_dh_in + std_dh_in, median_dh_in - nmad_dh_in, median_dh_in + nmad_dh_in], 49 | ymin=0, 50 | ymax=max_count, 51 | colors=["gray", "gray", "black", "black"], 52 | linestyles="dashed", 53 | ) 54 | 55 | ax.vlines(x=[mean_dh, median_dh], ymin=0, ymax=max_count, colors=["red", "darkred"]) 56 | ax.vlines( 57 | x=[mean_dh - std_dh, mean_dh + std_dh, median_dh - nmad_dh, median_dh + nmad_dh], 58 | ymin=0, 59 | ymax=max_count, 60 | colors=["red", "red", "darkred", "darkred"], 61 | linestyles="dashed", 62 | ) 63 | 64 | ax.set_xlim((-40, 25)) 65 | ax.set_xlabel("Elevation differences (m)") 66 | ax.set_ylabel("Count") 67 | 68 | from matplotlib.patches import Rectangle 69 | 70 | handles = [ 71 | Rectangle((0, 0), 1, 1, color=h1[-1][0].get_facecolor(), alpha=1), 72 | Rectangle((0, 0), 1, 1, color=h2[-1][0].get_facecolor(), alpha=1), 73 | ] 74 | labels = ["Inlier data", "Outlier data"] 75 | 76 | data_legend = ax.legend(handles=handles, labels=labels, loc="upper right") 77 | ax.add_artist(data_legend) 78 | 79 | # Legends 80 | p1 = plt.plot([], [], color="red", label=f"Mean: {np.round(mean_dh, 2)} m") 81 | p2 = plt.plot([], [], color="red", linestyle="dashed", label=f"±STD: {np.round(std_dh, 2)} m") 82 | p3 = plt.plot([], [], color="darkred", label=f"Median: {np.round(median_dh, 2)} m") 83 | p4 = plt.plot([], [], color="darkred", linestyle="dashed", label=f"±NMAD: {np.round(nmad_dh, 2)} m") 84 | first_legend = ax.legend(handles=[p[0] for p in [p1, p2, p3, p4]], loc="center right", title="All data") 85 | ax.add_artist(first_legend) 86 | 87 | p1 = plt.plot([], [], color="gray", label=f"Mean: {np.round(mean_dh_in, 2)} m") 88 | p2 = plt.plot([], [], color="gray", linestyle="dashed", label=f"±STD: {np.round(std_dh_in, 2)} m") 89 | p3 = plt.plot([], [], color="black", label=f"Median: {np.round(median_dh_in, 2)} m") 90 | p4 = plt.plot([], [], color="black", linestyle="dashed", label=f"±NMAD: {np.round(nmad_dh_in, 2)} m") 91 | second_legend = ax.legend(handles=[p[0] for p in [p1, p2, p3, p4]], loc="center left", title="Inlier data") 92 | ax.add_artist(second_legend) 93 | 94 | ax.set_title("Effect of outliers on estimating\ncentral tendency and dispersion") 95 | -------------------------------------------------------------------------------- /doc/source/code/robust_vario.py: -------------------------------------------------------------------------------- 1 | """Plot example of Dowd variogram as robust estimator for guide page.""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | from skgstat import OrdinaryKriging, Variogram 6 | 7 | import xdem 8 | 9 | # Inspired by test_variogram in skgstat 10 | # Generate some random but spatially correlated data with a range of ~20 11 | np.random.seed(42) 12 | c = np.random.default_rng(41).random((50, 2)) * 60 13 | np.random.seed(42) 14 | v = np.random.default_rng(42).normal(10, 4, 50) 15 | 16 | V = Variogram(c, v).describe() 17 | V["effective_range"] = 20 18 | OK = OrdinaryKriging(V, coordinates=c, values=v) 19 | 20 | c = np.array(np.meshgrid(np.arange(60), np.arange(60).T)).reshape(2, 60 * 60).T 21 | dh = OK.transform(c) 22 | dh = dh.reshape((60, 60)) 23 | 24 | # Add outliers 25 | dh_outliers = dh.copy() 26 | dh_outliers[0:6, 0:6] = -20 27 | 28 | # Derive empirical variogram for Dowd and Matheron 29 | df_inl_matheron = xdem.spatialstats.sample_empirical_variogram( 30 | dh, estimator="matheron", gsd=1, random_state=42, subsample=2000 31 | ) 32 | df_inl_dowd = xdem.spatialstats.sample_empirical_variogram(dh, estimator="dowd", gsd=1, random_state=42, subsample=2000) 33 | 34 | df_all_matheron = xdem.spatialstats.sample_empirical_variogram( 35 | dh_outliers, estimator="matheron", gsd=1, random_state=42, subsample=2000 36 | ) 37 | df_all_dowd = xdem.spatialstats.sample_empirical_variogram( 38 | dh_outliers, estimator="dowd", gsd=1, random_state=42, subsample=2000 39 | ) 40 | 41 | fig, ax = plt.subplots() 42 | 43 | ax.plot(df_inl_matheron.lags, df_inl_matheron.exp, color="black", marker="x") 44 | ax.plot(df_inl_dowd.lags, df_inl_dowd.exp, color="black", linestyle="dashed", marker="x") 45 | ax.plot(df_all_matheron.lags, df_all_matheron.exp, color="red", marker="x") 46 | ax.plot(df_all_dowd.lags, df_all_dowd.exp, color="red", linestyle="dashed", marker="x") 47 | 48 | 49 | p1 = plt.plot([], [], color="darkgrey", label="Matheron", marker="x") 50 | p2 = plt.plot([], [], color="darkgrey", linestyle="dashed", label="Dowd", marker="x") 51 | first_legend = ax.legend(handles=[p[0] for p in [p1, p2]], loc="lower right") 52 | ax.add_artist(first_legend) 53 | 54 | p1 = plt.plot([], [], color="black", label="Inlier data") 55 | p2 = plt.plot([], [], color="red", label="Inlier data + outlier data \n(1% of data replaced by 10 NMAD)") 56 | second_legend = ax.legend(handles=[p[0] for p in [p1, p2]], loc="upper left") 57 | ax.add_artist(second_legend) 58 | 59 | ax.set_xlabel("Spatial lag (m)") 60 | ax.set_ylabel("Variance of elevation changes (m²)") 61 | ax.set_ylim((0, 15)) 62 | ax.set_xlim((0, 40)) 63 | 64 | ax.set_title("Effect of outliers on estimating\nspatial correlation") 65 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_heterosc_slope.py: -------------------------------------------------------------------------------- 1 | """Code example for spatial statistics""" 2 | 3 | import geoutils as gu 4 | 5 | import xdem 6 | 7 | # Load data 8 | dh = gu.Raster(xdem.examples.get_path("longyearbyen_ddem")) 9 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | glacier_mask = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 11 | mask = glacier_mask.create_mask(dh) 12 | 13 | # Get slope for non-stationarity 14 | slope = xdem.terrain.get_terrain_attribute(dem=ref_dem, attribute=["slope"]) 15 | 16 | # Keep only stable terrain data 17 | dh.load() 18 | dh.set_mask(mask) 19 | 20 | # Estimate the measurement error by bin of slope, using the NMAD as robust estimator 21 | df_ns = xdem.spatialstats.nd_binning( 22 | dh.data.ravel(), 23 | list_var=[slope.data.ravel()], 24 | list_var_names=["slope"], 25 | statistics=["count", gu.stats.nmad], 26 | list_var_bins=30, 27 | ) 28 | 29 | xdem.spatialstats.plot_1d_binning(df_ns, "slope", "nmad", "Slope (degrees)", "Random elevation error\n($1\\sigma$, m)") 30 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_standardizing.py: -------------------------------------------------------------------------------- 1 | """Documentation plot illustrating standardization of a distribution""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | 6 | # Example x vector 7 | mu = 15 8 | sig = 5 9 | rng = np.random.default_rng(42) 10 | y = rng.normal(mu, sig, size=300) 11 | 12 | fig, ax1 = plt.subplots(figsize=(8, 3)) 13 | 14 | # Original histogram 15 | ax1.hist(y, color="tab:blue", edgecolor="white", linewidth=0.5, alpha=0.7) 16 | ax1.vlines(mu, ymin=0, ymax=90, color="tab:blue", linestyle="dashed", lw=2) 17 | ax1.vlines([mu - 2 * sig, mu + 2 * sig], ymin=0, ymax=90, colors=["tab:blue", "tab:blue"], linestyles="dotted", lw=2) 18 | ax1.annotate( 19 | "Original\ndata $x$\n$\\mu_{x} = 15$\n$\\sigma_{x} = 5$", 20 | xy=(mu + 0.5, 85), 21 | xytext=(mu + 5, 110), 22 | arrowprops=dict(color="tab:blue", width=0.5, headwidth=8), 23 | color="tab:blue", 24 | fontweight="bold", 25 | ha="left", 26 | ) 27 | ax1.spines["right"].set_visible(False) 28 | ax1.spines["top"].set_visible(False) 29 | ax1.spines["left"].set_visible(False) 30 | ax1.set_yticks([]) 31 | ax1.set_ylim((0, 130)) 32 | 33 | # Standardized histogram 34 | ax1.hist((y - mu) / sig, color="tab:olive", edgecolor="white", linewidth=0.5, alpha=0.7) 35 | ax1.vlines(0, ymin=0, ymax=90, color="tab:olive", linestyle="dashed", lw=2) 36 | ax1.vlines([-2, 2], ymin=0, ymax=90, colors=["tab:olive", "tab:olive"], linestyles="dotted", lw=2) 37 | ax1.annotate( 38 | "Standardized\ndata $z$\n$\\mu_{z} = 0$\n$\\sigma_{z} = 1$", 39 | xy=(-0.3, 85), 40 | xytext=(-5, 110), 41 | arrowprops=dict(color="tab:olive", width=0.5, headwidth=8), 42 | color="tab:olive", 43 | fontweight="bold", 44 | ha="left", 45 | ) 46 | ax1.spines["right"].set_visible(False) 47 | ax1.spines["top"].set_visible(False) 48 | ax1.spines["left"].set_visible(False) 49 | ax1.set_yticks([]) 50 | ax1.set_ylim((0, 130)) 51 | 52 | ax1.annotate( 53 | "", 54 | xy=(0, 65), 55 | xytext=(mu, 65), 56 | arrowprops=dict(arrowstyle="-|>", connectionstyle="arc3,rad=0.2", fc="w"), 57 | color="black", 58 | ) 59 | ax1.text( 60 | mu / 2, 61 | 90, 62 | "Standardization:\n$z = \\frac{x - \\mu}{\\sigma}$", 63 | color="black", 64 | ha="center", 65 | fontsize=14, 66 | fontweight="bold", 67 | ) 68 | ax1.plot([], [], color="tab:gray", linestyle="dashed", label="Mean") 69 | ax1.plot([], [], color="tab:gray", linestyle="dotted", label="Standard\ndeviation (2$\\sigma$)") 70 | ax1.legend(loc="center right") 71 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_stationarity_assumption.py: -------------------------------------------------------------------------------- 1 | """Documentation plot illustrating stationarity of mean and variance""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | 6 | import xdem 7 | 8 | # Example x vector 9 | x = np.linspace(0, 1, 200) 10 | 11 | sig = 0.2 12 | rng = np.random.default_rng(42) 13 | y_rand1 = rng.normal(0, sig, size=len(x)) 14 | y_rand2 = rng.normal(0, sig, size=len(x)) 15 | y_rand3 = rng.normal(0, sig, size=len(x)) 16 | 17 | 18 | y_mean = np.array([0.5 * xval - 0.25 if xval > 0.5 else 0.5 * (1 - xval) - 0.25 for xval in x]) 19 | 20 | fac_y_std = 0.5 + 2 * x 21 | 22 | 23 | fig, (ax1, ax2, ax3) = plt.subplots(ncols=3, figsize=(8, 4)) 24 | 25 | # Stationary mean and variance 26 | ax1.plot(x, y_rand1, color="tab:blue", linewidth=0.5) 27 | ax1.hlines(0, xmin=0, xmax=1, color="black", label="Mean") 28 | ax1.hlines( 29 | [-2 * sig, 2 * sig], 30 | xmin=0, 31 | xmax=1, 32 | colors=["tab:gray", "tab:gray"], 33 | label="Standard deviation", 34 | linestyles="dashed", 35 | ) 36 | ax1.set_xlim((0, 1)) 37 | ax1.set_title("Stationary mean\nStationary variance") 38 | # ax1.legend() 39 | ax1.spines["right"].set_visible(False) 40 | ax1.spines["top"].set_visible(False) 41 | ax1.set_ylim((-1, 1)) 42 | ax1.set_xticks([]) 43 | ax1.set_yticks([]) 44 | ax1.plot(1, 0, ">k", transform=ax1.transAxes, clip_on=False) 45 | ax1.plot(0, 1, "^k", transform=ax1.transAxes, clip_on=False) 46 | 47 | # Non-stationary mean and stationary variance 48 | ax2.plot(x, y_rand2 + y_mean, color="tab:olive", linewidth=0.5) 49 | ax2.plot(x, y_mean, color="black", label="Mean") 50 | ax2.plot(x, y_mean + 2 * sig, color="tab:gray", label="Dispersion (2$\\sigma$)", linestyle="dashed") 51 | ax2.plot(x, y_mean - 2 * sig, color="tab:gray", linestyle="dashed") 52 | ax2.set_xlim((0, 1)) 53 | ax2.set_title("Non-stationary mean\nStationary variance") 54 | ax2.legend(loc="lower center") 55 | ax2.spines["right"].set_visible(False) 56 | ax2.spines["top"].set_visible(False) 57 | ax2.set_xticks([]) 58 | ax2.set_yticks([]) 59 | ax2.set_ylim((-1, 1)) 60 | ax2.plot(1, 0, ">k", transform=ax2.transAxes, clip_on=False) 61 | ax2.plot(0, 1, "^k", transform=ax2.transAxes, clip_on=False) 62 | 63 | # Stationary mean and non-stationary variance 64 | ax3.plot(x, y_rand3 * fac_y_std, color="tab:orange", linewidth=0.5) 65 | ax3.hlines(0, xmin=0, xmax=1, color="black", label="Mean") 66 | ax3.plot(x, 2 * sig * fac_y_std, color="tab:gray", linestyle="dashed") 67 | ax3.plot(x, -2 * sig * fac_y_std, color="tab:gray", linestyle="dashed") 68 | ax3.set_xlim((0, 1)) 69 | ax3.set_title("Stationary mean\nNon-stationary variance") 70 | # ax1.legend() 71 | ax3.spines["right"].set_visible(False) 72 | ax3.spines["top"].set_visible(False) 73 | ax3.set_xticks([]) 74 | ax3.set_yticks([]) 75 | ax3.set_ylim((-1, 1)) 76 | ax3.plot(1, 0, ">k", transform=ax3.transAxes, clip_on=False) 77 | ax3.plot(0, 1, "^k", transform=ax3.transAxes, clip_on=False) 78 | 79 | plt.tight_layout() 80 | plt.show() 81 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_variogram_covariance.py: -------------------------------------------------------------------------------- 1 | """Documentation plot illustrating the link between variogram and covariance""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | from skgstat.models import exponential 6 | 7 | # Example of variogram and covariance relationship with an exponential model form 8 | fig, ax = plt.subplots() 9 | x = np.linspace(0, 100, 100) 10 | ax.plot(x, exponential(x, 15, 10), color="tab:blue", linewidth=2) 11 | ax.plot(x, 10 - exponential(x, 15, 10), color="black", linewidth=2) 12 | ax.hlines(10, xmin=0, xmax=100, linestyles="dashed", colors="tab:red") 13 | ax.text(75, exponential(75, 15, 10) - 1, "Semi-variogram $\\gamma(l)$", ha="center", va="top", color="tab:blue") 14 | ax.text( 15 | 75, 16 | 10 - exponential(75, 15, 10) + 1, 17 | "Covariance $C(l) = \\sigma^{2} - \\gamma(l)$", 18 | ha="center", 19 | va="bottom", 20 | color="black", 21 | ) 22 | ax.text(75, 11, "Variance $\\sigma^{2}$", ha="center", va="bottom", color="tab:red") 23 | ax.set_xlim((0, 100)) 24 | ax.set_ylim((0, 12)) 25 | ax.set_xlabel("Spatial lag $l$") 26 | ax.set_ylabel("Variance of elevation differences (m²)") 27 | ax.spines["right"].set_visible(False) 28 | ax.spines["top"].set_visible(False) 29 | plt.tight_layout() 30 | plt.show() 31 | -------------------------------------------------------------------------------- /doc/source/config.md: -------------------------------------------------------------------------------- 1 | --- 2 | file_format: mystnb 3 | jupytext: 4 | formats: md:myst 5 | text_representation: 6 | extension: .md 7 | format_name: myst 8 | kernelspec: 9 | display_name: xdem-env 10 | language: python 11 | name: xdem 12 | --- 13 | # Configuration 14 | 15 | xDEM allows to configure the **verbosity level** and the **default behaviour of certain operations on elevation data** (such as 16 | resampling method for reprojection, or pixel interpretation) directly at the package level. 17 | 18 | (verbosity)= 19 | ## Verbosity level 20 | 21 | To configure the verbosity level (or logging) for xDEM, you can utilize Python's built-in `logging` module. This module 22 | has five levels of verbosity that are, in ascending order of severity: `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. 23 | Setting a level prints output from that level and all other of higher severity. Logging also allows you to specify other aspects, 24 | such as the destination of the output (console, file). 25 | 26 | ```{important} 27 | **The default verbosity level is `WARNING`, implying that `INFO` and `DEBUG` do not get printed**. Use the basic configuration 28 | as below to setup an `INFO` level. 29 | ``` 30 | 31 | To specify the verbosity level, set up a logging configuration at the start of your script: 32 | 33 | ```{code-cell} ipython3 34 | import logging 35 | 36 | # Basic configuration to simply print info 37 | logging.basicConfig(level=logging.INFO) 38 | ``` 39 | 40 | Optionally, you can specify the logging date, format, and handlers (destinations). 41 | 42 | ```{code-cell} ipython3 43 | 44 | # More advanced configuration 45 | logging.basicConfig( 46 | level=logging.INFO, 47 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 48 | datefmt='%Y-%m-%d %H:%M:%S', 49 | handlers=[ 50 | logging.FileHandler('app.log'), # Log messages will be saved to this file 51 | logging.StreamHandler() # Log messages will also be printed to the console 52 | ]) 53 | ``` 54 | 55 | The above configuration will log messages with a severity level of `INFO` and above, including timestamps, logger names, and 56 | log levels in the output. You can change the logging level as needed. 57 | 58 | 59 | ## Raster–vector–point operations 60 | 61 | To change the configuration at the package level regarding operations for rasters, vectors and points, see 62 | [GeoUtils' configuration](https://geoutils.readthedocs.io/en/stable/config.html). 63 | 64 | For instance, this allows to define a preferred resampling algorithm used when interpolating and reprojecting 65 | (e.g., bilinear, cubic), or the default behaviour linked to pixel interpretation during point–raster comparison. 66 | These changes will then apply to all your operations in xDEM, such as coregistration. 67 | -------------------------------------------------------------------------------- /doc/source/credits.md: -------------------------------------------------------------------------------- 1 | (credits)= 2 | # Credits and background 3 | 4 | ```{toctree} 5 | :maxdepth: 2 6 | 7 | history 8 | mission 9 | authors 10 | funding 11 | license 12 | ``` 13 | -------------------------------------------------------------------------------- /doc/source/ecosystem.md: -------------------------------------------------------------------------------- 1 | (ecosystem)= 2 | 3 | # Ecosystem 4 | 5 | xDEM is but a single tool among a large landscape of open tools for geospatial elevation analysis! Below is a list of 6 | other **tools that you might find useful to combine with xDEM**, in particular for retrieving elevation data or to perform complementary analysis. 7 | 8 | ```{seealso} 9 | Tools listed below only relate to elevation data. To analyze georeferenced rasters, vectors and point cloud data, 10 | check out **xDEM's sister-package [GeoUtils](https://geoutils.readthedocs.io/)**. 11 | ``` 12 | ## Python 13 | 14 | Great Python tools for **pre-processing and retrieving elevation data**: 15 | - [SlideRule](https://slideruleearth.io/) to pre-process and retrieve high-resolution elevation data in the cloud, including in particular [ICESat-2](https://icesat-2.gsfc.nasa.gov/) and [GEDI](https://gedi.umd.edu/), 16 | - [pDEMtools](https://pdemtools.readthedocs.io/en/latest/) to pre-process and retrieve [ArcticDEM](https://www.pgc.umn.edu/data/arcticdem/) and [REMA](https://www.pgc.umn.edu/data/rema/) high-resolution DEMs available in polar regions, 17 | - [icepyx](https://icepyx.readthedocs.io/en/latest/) to retrieve ICESat-2 data. 18 | 19 | Complementary Python tools to **analyze elevation data** are for instance: 20 | - [PDAL](https://pdal.io/en/latest/) for working with dense elevation point clouds, 21 | - [demcompare](https://demcompare.readthedocs.io/en/stable/) to compare two DEMs together, 22 | - [RichDEM](https://richdem.readthedocs.io/en/latest/) for in-depth terrain analysis, with a large range of method including many relevant to hydrology. 23 | 24 | ## Julia 25 | 26 | If you are working in Julia, the [Geomorphometry](https://github.com/Deltares/Geomorphometry.jl) package provides a 27 | wide range of terrain analysis for elevation data. 28 | 29 | ## R 30 | 31 | If you are working in R, the [MultiscaleDTM](https://ailich.github.io/MultiscaleDTM/) package provides modular tools 32 | for terrain analysis at multiple scales! 33 | 34 | ## Other community resources 35 | 36 | Whether to retrieve data among their wide range of open datasets, or to dive into their other resources, be sure to check out the 37 | amazing [OpenTopography](https://opentopography.org/) and [OpenAltimetry](https://openaltimetry.earthdatacloud.nasa.gov/data/) efforts! 38 | -------------------------------------------------------------------------------- /doc/source/elevation_objects.md: -------------------------------------------------------------------------------- 1 | (elevation-objects)= 2 | # Elevation data objects 3 | 4 | Elevation data objects of xDEM inherit their characteristics from raster and vector objects of 5 | our sister-package [GeoUtils](https://geoutils.readthedocs.io/en/stable/). 6 | 7 | ```{toctree} 8 | :maxdepth: 2 9 | 10 | dem_class 11 | elevation_point_cloud 12 | ``` 13 | -------------------------------------------------------------------------------- /doc/source/elevation_point_cloud.md: -------------------------------------------------------------------------------- 1 | --- 2 | file_format: mystnb 3 | jupytext: 4 | formats: md:myst 5 | text_representation: 6 | extension: .md 7 | format_name: myst 8 | kernelspec: 9 | display_name: xdem-env 10 | language: python 11 | name: xdem 12 | --- 13 | (elevation-point-cloud)= 14 | 15 | # The elevation point cloud ({class}`~xdem.EPC`) 16 | 17 | In construction, planned for 2025. 18 | 19 | However, **elevation point clouds are already supported for coregistration and bias correction** by passing a {class}`geopandas.GeoDataFrame` 20 | associated to an elevation column name argument `z_name` to {func}`~xdem.coreg.Coreg.fit_and_apply`. 21 | -------------------------------------------------------------------------------- /doc/source/funding.md: -------------------------------------------------------------------------------- 1 | (funding)= 2 | # Funding acknowledgments 3 | 4 | Members of the lead development team acknowledge funding from: 5 | - SNSF grant no. 184634, a MeteoSwiss [GCOS](https://gcos.wmo.int/en/home) project on elevation data analysis for glaciology, 6 | - NASA award 80NSSC22K1094, an [STV](https://science.nasa.gov/earth-science/decadal-surveys/decadal-stv/) project on the fusion of elevation data, 7 | - NASA award 80NSSC23K0192, an [ICESat-2](https://icesat-2.gsfc.nasa.gov/) project on the processing of elevation data in the cloud, 8 | - CNES (French Space Agency) award on merging [demcompare](https://github.com/CNES/demcompare) and xDEM while further developing related 3D tools. 9 | 10 | 11 | ::::{grid} 12 | :reverse: 13 | 14 | :::{grid-item} 15 | :columns: 4 16 | :child-align: center 17 | 18 | ```{image} ./_static/nasa_logo.svg 19 | :width: 200px 20 | :class: dark-light 21 | ``` 22 | 23 | ::: 24 | 25 | :::{grid-item} 26 | :columns: 4 27 | :child-align: center 28 | 29 | ```{image} ./_static/snsf_logo.svg 30 | :width: 220px 31 | :class: only-light 32 | ``` 33 | 34 | ```{image} ./_static/snsf_logo_dark.svg 35 | :width: 220px 36 | :class: only-dark 37 | ``` 38 | 39 | ::: 40 | 41 | :::{grid-item} 42 | :columns: 4 43 | :child-align: center 44 | 45 | ```{image} ./_static/cnes_logo.svg 46 | :width: 200px 47 | :class: only-light 48 | ``` 49 | 50 | ```{image} ./_static/cnes_logo_dark.svg 51 | :width: 200px 52 | :class: only-dark 53 | ``` 54 | 55 | ::: 56 | 57 | 58 | :::: 59 | -------------------------------------------------------------------------------- /doc/source/guides.md: -------------------------------------------------------------------------------- 1 | (guides)= 2 | # Guides to elevated analysis 3 | 4 | This section is a collection of guides gathering background knowledge related to elevation data to help grasp how to best 5 | elevate your analysis! 6 | 7 | ```{toctree} 8 | :maxdepth: 2 9 | 10 | elevation_intricacies 11 | static_surfaces 12 | accuracy_precision 13 | robust_estimators 14 | spatial_stats 15 | ``` 16 | -------------------------------------------------------------------------------- /doc/source/history.md: -------------------------------------------------------------------------------- 1 | (history)= 2 | # History 3 | 4 | Below, some more information on the history behind the package. 5 | 6 | ## Creation 7 | 8 | ```{margin} 9 | 1More on our GlacioHack founder at [adehecq.github.io](https://adehecq.github.io/). 10 | ``` 11 | 12 | xDEM was created during the **[GlacioHack](https://github.com/GlacioHack) hackathon**, that was initiated by 13 | Amaury Dehecq1 and took place online on November 8, 2020. 14 | 15 | ```{margin} 16 | 2Check-out [glaciology.ch](https://glaciology.ch) on our founding group of VAW glaciology! 17 | ``` 18 | 19 | The initial core development of xDEM was performed by members of the Glaciology group of the Laboratory of Hydraulics, Hydrology and 20 | Glaciology (VAW) at ETH Zürich2, with contributions by members of the University of Oslo, the University of Washington, and University 21 | Grenoble Alpes. 22 | 23 | ## Joining effort with **demcompare** 24 | 25 | In 2024, xDEM and [demcompare](https://github.com/CNES/demcompare) joined efforts in the perspective of 26 | merging the best of both packages into one, and to jointly continue the development of new features for 27 | analyzing elevation data with a larger expertise pool. 28 | 29 | [demcompare](https://github.com/CNES/demcompare) is a tool developed by the CNES (French Space Agency) to 30 | support its 3D satellite missions in analyzing elevation data, for instance from stereophotogrammetric DEMs 31 | that can be generated with [CARS](https://github.com/CNES/cars). 32 | 33 | ## Current team 34 | 35 | ```{margin} 36 | 3More on CNES's 3D missions on the [CO3D constellation page](https://cnes.fr/en/projects/co3d). 37 | ``` 38 | 39 | The current lead development team includes **researchers in Earth observation and engineers from 40 | [CNES](https://cnes.fr/en)** (French Space Agency). We specialize in elevation data analysis, for application in Earth 41 | science or for operational use for 3D satellite missions3. 42 | 43 | Other volunteer contributors span diverse scientific backgrounds in industry or research. We welcome 44 | any new contributors! See how to contribute on [the dedicated page of our repository](https://github.com/GlacioHack/xdem/blob/main/CONTRIBUTING.md). 45 | -------------------------------------------------------------------------------- /doc/source/how_to_install.md: -------------------------------------------------------------------------------- 1 | (how-to-install)= 2 | 3 | # How to install 4 | 5 | ## Installing with ``mamba`` (recommended) 6 | 7 | ```bash 8 | mamba install -c conda-forge xdem 9 | ``` 10 | 11 | ```{tip} 12 | Solving dependencies can take a long time with `conda`, `mamba` significantly speeds up the process. Install it with: 13 | 14 | conda install mamba -n base -c conda-forge 15 | 16 | Once installed, the same commands can be run by simply replacing `conda` by `mamba`. More details available in the [mamba documentation](https://mamba.readthedocs.io/en/latest/). 17 | ``` 18 | 19 | ## Installing with ``pip`` 20 | 21 | ```bash 22 | pip install xdem 23 | ``` 24 | 25 | ```{warning} 26 | Updating packages with `pip` (and sometimes `mamba`) can break your installation. If this happens, re-create an environment from scratch pinning directly all your other dependencies during initial solve (e.g., `mamba create -n xdem-env -c conda-forge xdem myotherpackage==1.0.0`). 27 | ``` 28 | 29 | ## Installing for contributors 30 | 31 | ### With ``mamba`` 32 | 33 | ```bash 34 | git clone https://github.com/GlacioHack/xdem.git 35 | mamba env create -f xdem/dev-environment.yml 36 | ``` 37 | 38 | ### With ``pip`` 39 | 40 | Please note: pip installation is currently only possible under python3.10. 41 | 42 | ```bash 43 | git clone https://github.com/GlacioHack/xdem.git 44 | make install 45 | ``` 46 | 47 | After installing, you can check that everything is working by running the tests: `pytest`. 48 | -------------------------------------------------------------------------------- /doc/source/imgs/accuracy_precision_dem.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/197480ef5aa3388a5f55e2ceb120bf7940a08ebc/doc/source/imgs/accuracy_precision_dem.png -------------------------------------------------------------------------------- /doc/source/imgs/precision_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/197480ef5aa3388a5f55e2ceb120bf7940a08ebc/doc/source/imgs/precision_accuracy.png -------------------------------------------------------------------------------- /doc/source/imgs/stable_terrain_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/197480ef5aa3388a5f55e2ceb120bf7940a08ebc/doc/source/imgs/stable_terrain_diagram.png -------------------------------------------------------------------------------- /doc/source/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: xDEM 3 | --- 4 | 5 | ::::{grid} 6 | :reverse: 7 | :gutter: 2 1 1 1 8 | :margin: 4 4 1 1 9 | 10 | :::{grid-item} 11 | :columns: 4 12 | 13 | ```{image} ./_static/xdem_logo_only.svg 14 | :width: 300px 15 | :class: only-light 16 | ``` 17 | 18 | ```{image} ./_static/xdem_logo_only_dark.svg 19 | :width: 300px 20 | :class: only-dark 21 | ``` 22 | ::: 23 | 24 | :::{grid-item} 25 | :columns: 8 26 | :class: sd-fs-3 27 | :child-align: center 28 | 29 | xDEM aims at making the analysis of digital elevation models **easy**, **modular** and **robust**. 30 | 31 | :::: 32 | 33 | :::{admonition} Announcement 34 | :class: tip 35 | :class: margin 36 | 37 | xDEM `v0.1` is released, with all core features envisioned at creation 4 years ago 🎉! 38 | 39 | We are **merging efforts with [demcompare](https://github.com/CNES/demcompare)** to combine the best of both tools into one! 40 | 41 | We are working on **adding a ``dem`` Xarray accessor** with native Dask support for 2025. 42 | ::: 43 | 44 | xDEM is **tailored to perform quantitative analysis that implicitly understands the intricacies of elevation data**, 45 | both from a **georeferencing viewpoint** (vertical referencing, nodata values, projection, pixel interpretation) and 46 | a **statistical viewpoint** (outlier robustness, specificities of 3D alignment and error structure). 47 | 48 | It exposes **an intuitive object-based API to foster accessibility**, and strives **to be computationally scalable** 49 | through Dask. 50 | 51 | Additionally, through its sister-package [GeoUtils](https://geoutils.readthedocs.io/en/stable/), xDEM is built on top 52 | of core geospatial packages (Rasterio, GeoPandas, PyProj) and numerical packages (NumPy, Xarray, SciPy) to provide 53 | **consistent higher-level functionalities at the interface of DEMs and elevation point cloud objects**. 54 | 55 | ---------------- 56 | 57 | # Where to start? 58 | 59 | ::::{grid} 1 2 2 3 60 | :gutter: 1 1 1 2 61 | 62 | :::{grid-item-card} {material-regular}`edit_note;2em` About xDEM 63 | :link: about-xdem 64 | :link-type: ref 65 | 66 | Learn more about why we developed xDEM. 67 | 68 | +++ 69 | [Learn more »](about-xdem) 70 | ::: 71 | 72 | :::{grid-item-card} {material-regular}`data_exploration;2em` Quick start 73 | :link: quick-start 74 | :link-type: ref 75 | 76 | Run a short example of the package functionalities. 77 | 78 | +++ 79 | [Learn more »](quick-start) 80 | ::: 81 | 82 | :::{grid-item-card} {material-regular}`preview;2em` Features 83 | :link: dem-class 84 | :link-type: ref 85 | 86 | Dive into the full documentation. 87 | 88 | +++ 89 | [Learn more »](dem-class) 90 | ::: 91 | 92 | :::: 93 | 94 | ---------------- 95 | 96 | 97 | ```{toctree} 98 | :caption: Getting started 99 | :maxdepth: 2 100 | 101 | about_xdem 102 | how_to_install 103 | quick_start 104 | citation 105 | ``` 106 | 107 | ```{toctree} 108 | :caption: Features 109 | :maxdepth: 2 110 | 111 | elevation_objects 112 | vertical_ref 113 | terrain 114 | coregistration 115 | biascorr 116 | gapfill 117 | uncertainty 118 | ``` 119 | 120 | ```{toctree} 121 | :caption: Resources 122 | :maxdepth: 2 123 | 124 | guides 125 | cheatsheet 126 | ecosystem 127 | ``` 128 | 129 | ```{toctree} 130 | :caption: Gallery of examples 131 | :maxdepth: 2 132 | 133 | basic_examples/index.rst 134 | advanced_examples/index.rst 135 | ``` 136 | 137 | ```{toctree} 138 | :caption: Reference 139 | :maxdepth: 2 140 | 141 | api 142 | config 143 | release_notes 144 | ``` 145 | 146 | ```{toctree} 147 | :caption: Project information 148 | :maxdepth: 2 149 | 150 | publis 151 | credits 152 | ``` 153 | 154 | # Indices and tables 155 | 156 | - {ref}`genindex` 157 | - {ref}`modindex` 158 | - {ref}`search` 159 | -------------------------------------------------------------------------------- /doc/source/mission.md: -------------------------------------------------------------------------------- 1 | (mission)= 2 | # Mission 3 | 4 | ```{epigraph} 5 | The core mission of xDEM is to be **easy-of-use**, **modular** and **robust**. 6 | 7 | It also attempts to be as **efficient**, **scalable** and **state-of-the-art** as possible. 8 | 9 | Finally, as an open source package, it aspires to foster **reproducibility** and **open science**. 10 | ``` 11 | 12 | In details, those mean: 13 | 14 | - **Ease-of-use:** all basic operations or methods from published works should only require a few lines of code to be performed; 15 | 16 | - **Modularity:** all methods should be fully customizable, to allow both flexibility and inter-comparison; 17 | 18 | - **Robustness:** all methods should be tested within our continuous integration test-suite, to enforce that they always perform as expected; 19 | 20 | ```{note} 21 | :class: margin 22 | **Scalability** is currently being improved towards a first major release ``v1.0``. 23 | ``` 24 | 25 | And, additionally: 26 | 27 | - **Efficiency**: all methods should be optimized at the lower-level, to function with the highest performance offered by Python packages; 28 | 29 | - **Scalability**: all methods should support both lazy processing and distributed parallelized processing, to work with high-resolution data on local machines as well as on HPCs; 30 | 31 | - **State-of-the-art**: all methods should be at the cutting edge of remote sensing science, to provide users with the most reliable and up-to-date tools. 32 | 33 | And finally: 34 | 35 | - **Reproducibility:** all code should be version-controlled and release-based, to ensure consistency of dependent 36 | packages and works; 37 | 38 | - **Open-source:** all code should be accessible and reusable to anyone in the community, for transparency and open governance. 39 | -------------------------------------------------------------------------------- /doc/source/publis.md: -------------------------------------------------------------------------------- 1 | (publis)= 2 | 3 | # Use in publications 4 | 5 | Below, a list of publications making use of the xDEM package (that we are aware of!). 6 | 7 | ## Articles 8 | 9 | ### Pre-prints 10 | 11 | - Hartl, L., Schmitt, P., Schuster, L., Helfricht, K., Abermann, J., & Maussion, F. (2024). **Recent observations and glacier modeling point towards near complete glacier loss in western Austria (Ötztal and Stubai mountain range) if 1.5 °C is not met**. 12 | - Liu, Z., Filhol, S., & Treichler, D. (2024). **Retrieving snow depth distribution by downscaling ERA5 Reanalysis with ICESat-2 laser altimetry**. In arXiv [physics.geo-ph]. arXiv. 13 | - Mattea, E., Berthier, E., Dehecq, A., Bolch, T., Bhattacharya, A., Ghuffar, S., Barandun, M., & Hoelzle, M. (2024). **Five decades of Abramov glacier dynamics reconstructed with multi-sensor optical remote sensing**. 14 | - Zhu, Y., Liu, S., Wei, J., Wu, K., Bolch, T., Xu, J., Guo, W., Jiang, Z., Xie, F., Yi, Y., Shangguan, D., Yao, X., & Zhang, Z. (2024). **Glacier-level and gridded mass change in the rivers’ sources in the eastern Tibetan Plateau (ETPR) from 1970s to 2000**. 15 | - Walden, J., Jacquemart, M., Higman, B., Hugonnet, R., Manconi, A., & Farinotti, D. (2024). **A regional analysis of paraglacial landslide activation in southern coastal Alaska**. 16 | 17 | ### 2024 18 | 19 | - Dømgaard, M., Schomacker, A., Isaksson, E., Millan, R., Huiban, F., Dehecq, A., Fleischer, A., Moholdt, G., Andersen, J. K., & Bjørk, A. A. (2024). **Early aerial expedition photos reveal 85 years of glacier growth and stability in East Antarctica**. *Nature Communications*, 15(1), 4466. 20 | - Piermattei, L., Zemp, M., Sommer, C., Brun, F., Braun, M. H., Andreassen, L. M., Belart, J. M. C., Berthier, E., Bhattacharya, A., Boehm Vock, L., Bolch, T., Dehecq, A., Dussaillant, I., Falaschi, D., Florentine, C., Floricioiu, D., Ginzler, C., Guillet, G., Hugonnet, R., … Yang, R. (2024). **Observing glacier elevation changes from spaceborne optical and radar sensors – an inter-comparison experiment using ASTER and TanDEM-X data**. *The Cryosphere*, 18(7), 3195–3230. 21 | 22 | ### 2023 23 | 24 | - Bernat, M., Belart, J. M. C., Berthier, E., Jóhannesson, T., Hugonnet, R., Dehecq, A., Magnússon, E., & Gunnarsson, A. (2023). **Geodetic mass balance of Mýrdalsjökull ice cap, 1999--2021**. *Jökull*, 73(1), 35–53. 25 | - Khadka, N., Shrestha, N. ., Basnet, K., Manandhar, R., Sharma, S., & Shrestha, B. (2023). **Glacier Area, Mass and Associated Glacial Lake Change in Kawari basin, Western Nepal**. *Jalawaayu*, 3(1), 63–72. 26 | - Brun, F., King, O., Réveillet, M., Amory, C., Planchot, A., Berthier, E., Dehecq, A., Bolch, T., Fourteau, K., Brondex, J., Dumont, M., Mayer, C., Leinss, S., Hugonnet, R., & Wagnon, P. (2023). **Everest South Col Glacier did not thin during the period 1984–2017**. *The Cryosphere*, 17(8), 3251–3268. 27 | - Knuth, F., Shean, D., Bhushan, S., Schwat, E., Alexandrov, O., McNeil, C., Dehecq, A., Florentine, C., & O’Neel, S. (2023). **Historical Structure from Motion (HSfM): Automated processing of historical aerial photographs for long-term topographic change analysis**. *Remote Sensing of Environment*, 285, 113379. 28 | - Schwat, E., Istanbulluoglu, E., Horner-Devine, A., Anderson, S., Knuth, F., & Shean, D. (2023). **Multi-decadal erosion rates from glacierized watersheds on Mount Baker, Washington, USA, reveal topographic, climatic, and lithologic controls on sediment yields**. *Geomorphology*, 438(108805), 108805. 29 | 30 | ### 2022 31 | 32 | - Farnsworth, W. R., Ingólfsson, Ó., Mannerfelt, E. S., Kalliokoski, M. H., Guðmundsdóttir, E. R., Retelle, M., Allaart, L., Brynjólfsson, S., Furze, M. F. A., Hancock, H. J., Kjær, K. H., Pieńkowski, A. J., & Schomacker, A. (2022). **Vedde Ash constrains Younger Dryas glacier re-advance and rapid glacio-isostatic rebound on Svalbard**. *Quaternary Science Advances*, 5(100041), 100041. 33 | - Mannerfelt, E. S., Dehecq, A., Hugonnet, R., Hodel, E., Huss, M., Bauder, A., & Farinotti, D. (2022). **Halving of Swiss glacier volume since 1931 observed from terrestrial image photogrammetry**. *The Cryosphere*, 16(8), 3249–3268. 34 | - Abad, L., Hölbling, D., Dabiri, Z., & Robson, B. A. (2022). **An open-source-based workflow for DEM generation from Sentinel-1 for landslide volume estimation**. *ISPRS - International Archives of the Photogrammetry Remote Sensing and Spatial Information Sciences*, XLVIII-4/W1-2022, 5–11. 35 | - Hugonnet, R., Brun, F., Berthier, E., Dehecq, A., Mannerfelt, E. S., Eckert, N., & Farinotti, D. (2022). **Uncertainty Analysis of Digital Elevation Models by Spatial Inference From Stable Terrain**. *IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing*, 15, 6456–6472. 36 | 37 | ## Theses 38 | 39 | ### PhD 40 | 41 | - Hugonnet, R. (2022). **Global glacier mass change by spatiotemporal analysis of digital elevation models**, 42 | 43 | ### Master 44 | 45 | - Vlieghe, P-L. (2023). **Revealing the Recent Height Changes of the Great Altesch Glacier Using TanDEM-X DEM Series**, 46 | - Saheed, A. (2023). **Investigation of changes in Briksdalsbreen, western Norway from 1966 - 2020**, 47 | - Liu, Z. (2022). **Snow Depth Retrieval and Downscaling using Satellite Laser Altimetry, Machine Learning, and Climate Reanalysis: A Case Study in Mainland Norway**, 48 | - Bernat, M. (2022). **Geodetic mass balance of Mýrdalsjökull ice cap, 1999−2021: DEM processing and climate analysis**, 49 | -------------------------------------------------------------------------------- /doc/source/quick_start.md: -------------------------------------------------------------------------------- 1 | --- 2 | file_format: mystnb 3 | jupytext: 4 | formats: md:myst 5 | text_representation: 6 | extension: .md 7 | format_name: myst 8 | kernelspec: 9 | display_name: xdem-env 10 | language: python 11 | name: xdem 12 | --- 13 | (quick-start)= 14 | 15 | # Quick start 16 | 17 | Below is a short example show-casing some of the core functionalities of xDEM. 18 | To find an example about a specific functionality, jump directly to {ref}`quick-gallery`. 19 | 20 | ## Short example 21 | 22 | ```{note} 23 | :class: margin 24 | xDEM relies largely on [its sister-package GeoUtils](https://geoutils.readthedocs.io/) for geospatial handling 25 | (reprojection, cropping, raster-vector interface, point interpolation) as well as numerics 26 | (NumPy interface). 🙂 27 | ``` 28 | 29 | xDEM revolves around the {class}`~xdem.DEM` class (a subclass of {class}`~geoutils.Raster`), from 30 | which most methods can be called and the {class}`~xdem.coreg.Coreg` classes to build modular coregistration pipelines. 31 | 32 | Below, in a few lines, we load two DEMs and a vector of glacier outlines, crop them to a common extent, 33 | align the DEMs using coregistration, estimate the elevation change, estimate elevation change error using stable 34 | terrain, and finally plot and save the result! 35 | 36 | 37 | ```{code-cell} ipython3 38 | :tags: [remove-cell] 39 | 40 | # To get a good resolution for displayed figures 41 | from matplotlib import pyplot 42 | pyplot.rcParams['figure.dpi'] = 600 43 | pyplot.rcParams['savefig.dpi'] = 600 44 | ``` 45 | 46 | ```{code-cell} ipython3 47 | import xdem 48 | import geoutils as gu 49 | 50 | # Examples files: filenames of two DEMs and some glacier outlines 51 | fn_dem_ref = xdem.examples.get_path("longyearbyen_ref_dem") 52 | fn_dem_tba = xdem.examples.get_path("longyearbyen_tba_dem") 53 | fn_glacier_outlines = xdem.examples.get_path("longyearbyen_glacier_outlines") 54 | 55 | # Print filenames 56 | print(f"DEM 1: {fn_dem_ref}, \nDEM 2: {fn_dem_tba}, \nOutlines: {fn_glacier_outlines}") 57 | ``` 58 | 59 | ```{tip} 60 | :class: margin 61 | Set up your {ref}`verbosity` to manage outputs to the console (or a file) during execution! 62 | ``` 63 | 64 | ```{code-cell} ipython3 65 | # Open files by instantiating DEM and Vector 66 | # (DEMs are loaded lazily = only metadata but not array unless required) 67 | dem_ref = xdem.DEM(fn_dem_ref) 68 | dem_tba = xdem.DEM(fn_dem_tba) 69 | vect_gla = gu.Vector(fn_glacier_outlines) 70 | 71 | # Clip outlines to extent of reference DEM (method from GeoUtils) 72 | vect_gla = vect_gla.crop(dem_ref, clip=True) 73 | 74 | # Create a mask from glacier polygons (method from GeoUtils) 75 | mask_gla = vect_gla.create_mask(dem_ref) 76 | 77 | # We convert the vertical CRS of one DEM to the EGM96 geoid 78 | dem_ref.to_vcrs("EGM96", force_source_vcrs="Ellipsoid") 79 | 80 | # Align the two DEMs with a coregistration: 3D shift + 2nd-order 2D poly 81 | mycoreg = xdem.coreg.NuthKaab() + xdem.coreg.Deramp(poly_order=2) 82 | mycoreg.fit(dem_ref, dem_tba, inlier_mask=~mask_gla) 83 | dem_aligned = mycoreg.apply(dem_tba) 84 | 85 | # Get elevation difference 86 | dh = dem_ref - dem_aligned 87 | 88 | # Derive slope and curvature attributes 89 | slope, maximum_curvature = xdem.terrain.get_terrain_attribute( 90 | dem_ref, attribute=["slope", "maximum_curvature"] 91 | ) 92 | 93 | # Estimate elevation change error from stable terrain as a function of slope and curvature 94 | dh_err = xdem.spatialstats.infer_heteroscedasticity_from_stable( 95 | dh, list_var=[slope, maximum_curvature], unstable_mask=mask_gla 96 | )[0] 97 | 98 | # Plot dh, glacier outlines and its error map 99 | dh.plot(cmap="RdYlBu", cbar_title="Elevation change (m)") 100 | vect_gla.plot(dh, fc='none', ec='k', lw=0.5) 101 | 102 | dh_err.plot(ax="new", vmin=2, vmax=7, cmap="Reds", cbar_title=r"Elevation change error (1$\sigma$, m)") 103 | vect_gla.plot(dh_err, fc='none', ec='k', lw=0.5) 104 | 105 | # Save to file 106 | dh_err.save("dh_error.tif") 107 | ``` 108 | 109 | ```{code-cell} ipython3 110 | :tags: [remove-cell] 111 | import os 112 | os.remove("dh_error.tif") 113 | ``` 114 | 115 | (quick-gallery)= 116 | ## More examples 117 | 118 | To dive into more illustrated code, explore our gallery of examples that is composed of: 119 | - A {ref}`examples-basic` section on simpler routines (terrain attributes, pre-defined coregistration and uncertainty pipelines), 120 | - An {ref}`examples-advanced` section using advanced pipelines (for in-depth coregistration and uncertainty analysis). 121 | 122 | See also the concatenated list of examples below. 123 | 124 | ```{eval-rst} 125 | .. minigallery:: xdem.DEM 126 | :add-heading: Examples using DEMs 127 | ``` 128 | -------------------------------------------------------------------------------- /doc/source/release_notes.md: -------------------------------------------------------------------------------- 1 | # Release notes 2 | 3 | Below, the release notes for all minor versions and our roadmap to a first major version. 4 | 5 | ## 0.1.0 6 | 7 | xDEM version 0.1 is the **first minor release** since the creation of the project in 2020. It is the result of years of work 8 | to consolidate and re-structure features into a mature and stable API to minimize future breaking changes. 9 | 10 | **All the core features drafted at the start of the project are now supported**, and there is a **clear roadmap 11 | towards a first major release 1.0**. This minor release also adds many tests and improves significantly the documentation 12 | from the early-development state of the package. 13 | 14 | The re-structuring created some breaking changes, though minor. 15 | 16 | See details below, including **a guide to help migrate code from early-development versions**. 17 | 18 | ### Features 19 | 20 | xDEM now gathers the following core features: 21 | - **Elevation data objects** core to quantatiative analysis, which are DEMs and elevation point clouds, 22 | - **Vertical referencing** including automatic 3D CRS fetching, 23 | - **Terrain analysis** for many attributes, 24 | - **Coregistration** with the choice of several methods, including modular pipeline building, 25 | - **Bias corrections** for any variable, also modular and supported by pipelines, 26 | - **Uncertainty analysis** based on several robust methods. 27 | 28 | Recent additions include in particular **point-raster support for coregistration**, and the **expansion of 29 | `DEM` class methods** to cover all features of the package, with for instance `DEM.coregister_3d()` or `DEM.slope()`. 30 | 31 | ### Guides and other resources 32 | 33 | xDEM integrates **background material on quantitative analysis of elevation data** to help users use the various methods 34 | of the package. This material includes **several illustrated guide pages**, **a cheatsheet** on how to recognize and correct 35 | typical elevation errors, and more. 36 | 37 | ### Future deprecations 38 | 39 | We have added warnings throughout the documentation and API related to planned deprecations: 40 | - **Gap-filling features specific to glacier-applications** will be moved to a separate package, 41 | - **Uncertainty analysis tools related to variography** will change API to rely on SciKit-GStat variogram objects, 42 | - The **dDEM** and **DEMCollection** classes will likely be refactored or removed. 43 | 44 | Changes related to **gap-filling** and **uncertainty analysis** will have deprecation warnings, while the function 45 | remain available during a few more releases. 46 | 47 | (migrate-early)= 48 | ### Migrate from early versions 49 | 50 | The following changes **might be required to solve breaking changes**, depending on your early-development version: 51 | - Rename `.show()` to `.plot()` for all data objects, 52 | - Rename `.dtypes` to `dtype` for `DEM` objects, 53 | - Operations `.crop()`, `shift()` and `to_vcrs()` are not done in-place by default anymore, replace by `dem = dem.crop()` or `dem.crop(..., inplace=True)` to mirror the old default behaviour, 54 | - Rename `.shift()` to `.translate()` for `DEM` objects, 55 | - Several function arguments are renamed, in particular `dst_xxx` arguments of `.reproject()` are all renamed to `xxx` e.g. `dst_crs` to `crs`, as well as the arguments of `Coreg.fit()` renamed from `xxx_dem` to `xxx_elev` to be generic to any elevation data, 56 | - All `BiasCorr1D`, `BiasCorr2D` and `BiasCorrND` classes are removed in favor of a single `BiasCorr` class that implicitly understands the number of dimensions from the length of input `bias_vars`, 57 | - New user warnings are sometimes raised, in particular if some metadata is not properly defined such as `.nodata`. Those should give an indication as how to silence them. 58 | 59 | Additionally, **some important yet non-breaking changes**: 60 | - The sequential use of `Coreg.fit()` and `Coreg.apply()` to the same `tba_elev` is now discouraged and updated everywhere in the documentation, use `Coreg.fit_and_apply()` or `DEM.coregister_3d()` instead, 61 | - The use of a separate module for terrain attributes such as `xdem.terrain.slope()` is now discouraged, use `DEM.slope()` instead. 62 | 63 | ## Roadmap to 1.0 64 | 65 | Based on recent and ongoing progress, we envision the following roadmap. 66 | 67 | **Releases of 0.2, 0.3, 0.4, etc**, for the following planned (ongoing) additions: 68 | - The **addition of a command-line interface for features such as coregistration**, in the frame of the merging effort with [demcompare](https://github.com/CNES/demcompare), 69 | - The **addition of an elevation point cloud `EPC` data object**, inherited from the ongoing `PointCloud` object of GeoUtils alongside many features at the interface of point and raster, 70 | - The **addition of a Xarray accessor `dem`** mirroring the `DEM` object, to work natively with Xarray objects and add support on out-of-memory Dask operations for most of xDEM's features, 71 | - The **addition of a GeoPandas accessor `epc`** mirroring the `EPC` object, to work natively with GeoPandas objects, 72 | - The **re-structuration of uncertainty analysis features** to rely directly on SciKit-GStat's `Variogram` object. 73 | 74 | **Release of 1.0** once all these additions are fully implemented, and after feedback from the community. 75 | -------------------------------------------------------------------------------- /doc/source/sphinxext.py: -------------------------------------------------------------------------------- 1 | """Functions for documentation configuration only, importable by sphinx""" 2 | 3 | 4 | # To reset resolution setting for each sphinx-gallery example 5 | def reset_mpl(gallery_conf, fname): 6 | # To get a good resolution for displayed figures 7 | from matplotlib import pyplot 8 | 9 | pyplot.rcParams["figure.dpi"] = 400 10 | pyplot.rcParams["savefig.dpi"] = 400 11 | 12 | # Reset logging to default 13 | import logging 14 | 15 | logging.basicConfig(force=True) 16 | -------------------------------------------------------------------------------- /doc/source/static_surfaces.md: -------------------------------------------------------------------------------- 1 | (static-surfaces)= 2 | 3 | # Static surfaces as error proxy 4 | 5 | Below, a short guide explaining the use of static surfaces as an error proxy for quantitative elevation analysis. 6 | 7 | ## The great benefactor of elevation analysis 8 | 9 | Elevation data benefits from an uncommon asset, which is that **large proportions of planetary surface elevations 10 | usually remain virtually unchanged through time** (at least, within decadal time scales). Those static surfaces, 11 | sometimes also referred to as "stable terrain", generally refer to bare-rock, grasslands, and are often isolated by 12 | excluding dynamic surfaces such as glaciers, snow, forests and cities. If small proportions of static surfaces are 13 | not masked, they are generally filtered out by robust estimators (see {ref}`robust-estimators`). 14 | 15 | :::{figure} imgs/stable_terrain_diagram.png 16 | :width: 100% 17 | 18 | Source: [Hugonnet et al. (2022)](https://doi.org/10.1109/jstars.2022.3188922). 19 | ::: 20 | 21 | ## Use for coregistration and further uncertainty analysis 22 | 23 | Elevation data can rarely be compared to simultaneous acquisitions to assess its sources of error. This is 24 | where **static surfaces come to the rescue, and can act as an error proxy**. By assuming no changes happened on these 25 | surfaces, and that they have the same error structure as other surfaces, it becomes possible to perform 26 | coregistration, bias-correction and further uncertainty analysis! 27 | 28 | Below, we summarize the basic principles of how using static surfaces allows to perform coregistration and uncertainty analysis, and the related limitations. 29 | 30 | ### For coregistration and bias-correction (systematic errors) 31 | 32 | **Static surfaces $S$ are key to a coregistration or bias correction transformation $C$** for which it is assumed that, 33 | for two sets of elevation data $h_{1}$ and $h_{2}$, we have: 34 | 35 | $$ 36 | (h_{1} - C(h_{2}))_{S} \approx 0 37 | $$ 38 | 39 | and aim to find the best transformation $C$ to minimize this problem. 40 | 41 | The above relation is not generally true for every pixel or footprint, however, due to random errors that 42 | exist in all data. Consequently, we can only write: 43 | 44 | $$ 45 | \textrm{mean} (h_{1} - C(h_{2}))_{S \gg r^{2}} \approx 0 46 | $$ 47 | 48 | where $r$ is the correlation range of random errors, and $S \gg r^{2}$ assumes that static surfaces cover a domain much 49 | larger than this correlation range. If static surfaces cover too small an area, coregistration will naturally become 50 | less reliable. 51 | 52 | ```{note} 53 | One of the objectives of xDEM is to allow to use knowledge on random errors to refine 54 | coregistration for limited static surface areas, stay tuned! 55 | ``` 56 | 57 | ### For further uncertainty analysis (random errors) 58 | 59 | **Static surfaces are also essential for uncertainty analysis aiming to infer the random errors of elevation 60 | data** but, in this case, we have to consider the effect of random errors from both sets of elevation data. 61 | 62 | We first assume that elevation $h_{2}$ is now largely free of systematic errors after performing coregistration and 63 | bias corrections $C$. The analysis of elevation differences $dh$ on static surfaces $S$ will represent the mixed random 64 | errors of the two sets of data, that we can assume are statistically independent (if indeed acquired separately), which yields: 65 | 66 | $$ 67 | \sigma_{dh, S} = \sigma_{h_{\textrm{1}} - h_{\textrm{2}}} = \sqrt{\sigma_{h_{\textrm{1}}}^{2} + \sigma_{h_{\textrm{2}}}^{2}} 68 | $$ 69 | 70 | where $\sigma$ is the random error at any data point. 71 | 72 | If one set of elevation data is known to be of much higher-precision, one can assume that the analysis of differences 73 | will represent only the precision of the rougher DEM. For instance, $\sigma_{h_{1}} = 3 \sigma_{h_{2}}$ implies that more than 74 | 95% of $\sigma_{dh}$ comes from $\sigma_{h_{1}}$ from the above equation. 75 | 76 | More generally: 77 | 78 | $$ 79 | \sigma_{dh, S} = \sigma_{h_{\textrm{higher precision}} - h_{\textrm{lower precision}}} \approx \sigma_{h_{\textrm{lower precision}}} 80 | $$ 81 | 82 | And the same applies to the spatial correlation of these random errors: 83 | 84 | $$ 85 | \rho_{dh, S}(d) = \rho_{h_{\textrm{higher precision}} - h_{\textrm{lower precision}}}(d) \approx \rho_{h_{\textrm{lower precision}}}(d) 86 | $$ 87 | 88 | where $\rho(d)$ is the spatial correlation, and $d$ is the spatial lag (distance between data points). 89 | 90 | ---------------- 91 | 92 | :::{admonition} References and more reading 93 | :class: tip 94 | 95 | Static surfaces can be used as a **proxy for assessing systematic and random errors**, which directly relates to 96 | what is commonly referred to as accuracy and precision of elevation data, detailed in the **next guide page on {ref}`accuracy-precision`**. 97 | 98 | See the **{ref}`spatial-stats` guide page** for more details on spatial statistics applied to uncertainty quantification. 99 | 100 | **References:** [Hugonnet et al. (2022)](https://doi.org/10.1109/jstars.2022.3188922), Uncertainty analysis of digital elevation models by spatial inference from stable terrain. 101 | ::: 102 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: xdem 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python>=3.10,<3.13 6 | - geopandas>=0.12.0 7 | - numba=0.* 8 | - numpy>=1,<3 9 | - matplotlib=3.* 10 | - pyproj>=3.4,<4 11 | - rasterio>=1.3,<2 12 | - scipy=1.* 13 | - tqdm 14 | - scikit-gstat>=1.0.18,<1.1 15 | - geoutils=0.1.16 16 | - affine 17 | - pandas 18 | - pyogrio 19 | - shapely 20 | - pip 21 | 22 | # To run CI against latest GeoUtils 23 | # - pip: 24 | # - git+https://github.com/rhugonnet/geoutils.git 25 | -------------------------------------------------------------------------------- /examples/advanced/README.rst: -------------------------------------------------------------------------------- 1 | .. _examples-advanced: 2 | 3 | Advanced 4 | ======== 5 | 6 | Examples for setting up **specific coregistration or bias-correction pipelines**, **comparing terrain methods**, 7 | or **refining an error model for DEM uncertainty analysis**. 8 | -------------------------------------------------------------------------------- /examples/advanced/plot_blockwise_coreg.py: -------------------------------------------------------------------------------- 1 | """ 2 | Blockwise coregistration 3 | ======================== 4 | 5 | Often, biases are spatially variable, and a "global" shift may not be enough to coregister a DEM properly. 6 | In the :ref:`sphx_glr_basic_examples_plot_nuth_kaab.py` example, we saw that the method improved the alignment significantly, but there were still possibly nonlinear artefacts in the result. 7 | Clearly, nonlinear coregistration approaches are needed. 8 | One solution is :class:`xdem.coreg.BlockwiseCoreg`, a helper to run any ``Coreg`` class over an arbitrarily small grid, and then "puppet warp" the DEM to fit the reference best. 9 | 10 | The ``BlockwiseCoreg`` class runs in five steps: 11 | 12 | 1. Generate a subdivision grid to divide the DEM in N blocks. 13 | 2. Run the requested coregistration approach in each block. 14 | 3. Extract each result as a source and destination X/Y/Z point. 15 | 4. Interpolate the X/Y/Z point-shifts into three shift-rasters. 16 | 5. Warp the DEM to apply the X/Y/Z shifts. 17 | 18 | """ 19 | 20 | import geoutils as gu 21 | 22 | # sphinx_gallery_thumbnail_number = 2 23 | import matplotlib.pyplot as plt 24 | import numpy as np 25 | from geoutils.raster.distributed_computing import MultiprocConfig 26 | 27 | import xdem 28 | 29 | # %% 30 | # We open example files. 31 | 32 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 33 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 34 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 35 | 36 | # Create a stable ground mask (not glacierized) to mark "inlier data" 37 | inlier_mask = ~glacier_outlines.create_mask(reference_dem) 38 | 39 | plt_extent = [ 40 | reference_dem.bounds.left, 41 | reference_dem.bounds.right, 42 | reference_dem.bounds.bottom, 43 | reference_dem.bounds.top, 44 | ] 45 | 46 | # %% 47 | # The DEM to be aligned (a 1990 photogrammetry-derived DEM) has some vertical and horizontal biases that we want to avoid, as well as possible nonlinear distortions. 48 | # The product is a mosaic of multiple DEMs, so "seams" may exist in the data. 49 | # These can be visualized by plotting a change map: 50 | 51 | diff_before = reference_dem - dem_to_be_aligned 52 | 53 | diff_before.plot(cmap="RdYlBu", vmin=-10, vmax=10) 54 | plt.show() 55 | 56 | # %% 57 | # Horizontal and vertical shifts can be estimated using :class:`xdem.coreg.NuthKaab`. 58 | # Let's prepare a coregistration class with a tiling configuration 59 | # BlockwiseCoreg is also available without mp_config but with parent_path parameters 60 | 61 | mp_config = MultiprocConfig(chunk_size=500, outfile="aligned_dem.tif") 62 | blockwise = xdem.coreg.BlockwiseCoreg(xdem.coreg.NuthKaab(), mp_config=mp_config) 63 | 64 | # %% 65 | # Coregistration is performed with the ``.fit()`` method. 66 | 67 | blockwise.fit(reference_dem, dem_to_be_aligned, inlier_mask) 68 | blockwise.apply() 69 | 70 | aligned_dem = xdem.DEM("aligned_dem.tif") 71 | 72 | 73 | # %% 74 | # The estimated shifts can be visualized by applying the coregistration to a completely flat surface. 75 | # This shows the estimated shifts that would be applied in elevation; 76 | # additional horizontal shifts will also be applied if the method supports it. 77 | 78 | rows, cols, _ = blockwise.shape_tiling_grid 79 | 80 | matrix_x = np.full((rows, cols), np.nan) 81 | matrix_y = np.full((rows, cols), np.nan) 82 | matrix_z = np.full((rows, cols), np.nan) 83 | 84 | for key, value in blockwise.meta["outputs"].items(): 85 | row, col = map(int, key.split("_")) 86 | matrix_x[row, col] = value["shift_x"] 87 | matrix_y[row, col] = value["shift_y"] 88 | matrix_z[row, col] = value["shift_z"] 89 | 90 | 91 | def plot_heatmap(matrix, title, cmap, ax): 92 | im = ax.imshow(matrix, cmap=cmap) 93 | for (i, j), val in np.ndenumerate(matrix): 94 | ax.text(j, i, f"{val:.2f}", ha="center", va="center", color="black") 95 | ax.set_title(title) 96 | ax.set_xticks(np.arange(cols)) 97 | ax.set_yticks(np.arange(rows)) 98 | ax.invert_yaxis() 99 | plt.colorbar(im, ax=ax) 100 | 101 | 102 | fig, axes = plt.subplots(1, 3, figsize=(18, 6)) 103 | plot_heatmap(matrix_x, "shifts in X", "Reds", axes[0]) 104 | plot_heatmap(matrix_y, "shifts in Y", "Greens", axes[1]) 105 | plot_heatmap(matrix_z, "shifts in Z", "Blues", axes[2]) 106 | 107 | plt.tight_layout() 108 | plt.show() 109 | 110 | # %% 111 | # Then, the new difference can be plotted to validate that it improved. 112 | 113 | diff_after = reference_dem - aligned_dem 114 | 115 | diff_after.plot(cmap="RdYlBu", vmin=-10, vmax=10) 116 | plt.show() 117 | 118 | # %% 119 | # We can compare the NMAD to validate numerically that there was an improvement: 120 | 121 | 122 | print(f"Error before: {gu.stats.nmad(diff_before[inlier_mask]):.2f} m") 123 | print(f"Error after: {gu.stats.nmad(diff_after[inlier_mask]):.2f} m") 124 | -------------------------------------------------------------------------------- /examples/advanced/plot_demcollection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Working with a collection of DEMs 3 | ================================= 4 | 5 | .. caution:: This functionality might be removed in future package versions. 6 | 7 | Oftentimes, more than two timestamps (DEMs) are analyzed simultaneously. 8 | One single dDEM only captures one interval, so multiple dDEMs have to be created. 9 | In addition, if multiple masking polygons exist (e.g. glacier outlines from multiple years), these should be accounted for properly. 10 | The :class:`xdem.DEMCollection` is a tool to properly work with multiple timestamps at the same time, and makes calculations of elevation/volume change over multiple years easy. 11 | """ 12 | 13 | from datetime import datetime 14 | 15 | import geoutils as gu 16 | import matplotlib.pyplot as plt 17 | 18 | import xdem 19 | 20 | # %% 21 | # **Example data**. 22 | # 23 | # We can load the DEMs as usual, but with the addition that the ``datetime`` argument should be filled. 24 | # Since multiple DEMs are in question, the "time dimension" is what keeps them apart. 25 | 26 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 27 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 28 | 29 | 30 | # %% 31 | # For glacier research (any many other fields), only a subset of the DEMs are usually interesting. 32 | # These parts can be delineated with masks or polygons. 33 | # Here, we have glacier outlines from 1990 and 2009. 34 | outlines = { 35 | datetime(1990, 8, 1): gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")), 36 | datetime(2009, 8, 1): gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines_2010")), 37 | } 38 | 39 | # %% 40 | # To experiment with a longer time-series, we can also fake a 2060 DEM, by simply exaggerating the 1990-2009 change. 41 | 42 | # Fake a 2060 DEM by assuming twice the change from 1990-2009 between 2009 and 2060 43 | dem_2060 = dem_2009 + (dem_2009 - dem_1990).data * 3 44 | 45 | timestamps = [datetime(1990, 8, 1), datetime(2009, 8, 1), datetime(2060, 8, 1)] 46 | 47 | # %% 48 | # Now, all data are ready to be collected in an :class:`xdem.DEMCollection` object. 49 | # What we have are: 50 | # 1. Three DEMs from 1990, 2009, and 2060 (the last is artificial) 51 | # 2. Two glacier outline timestamps from 1990 and 2009 52 | # 53 | 54 | demcollection = xdem.DEMCollection( 55 | dems=[dem_1990, dem_2009, dem_2060], timestamps=timestamps, outlines=outlines, reference_dem=1 56 | ) 57 | 58 | # %% 59 | # We can generate :class:`xdem.dDEM` objects using :func:`xdem.DEMCollection.subtract_dems`. 60 | # In this case, it will generate three dDEMs: 61 | # 62 | # * 1990-2009 63 | # * 2009-2009 (to maintain the ``dems`` and ``ddems`` list length and order) 64 | # * 2060-2009 (note the inverted order; negative change will be positive) 65 | 66 | _ = demcollection.subtract_dems() 67 | 68 | # %% 69 | # These are saved internally, but are also returned as a list. 70 | # 71 | # An elevation or volume change series can automatically be generated from the ``DEMCollection``. 72 | # In this case, we should specify *which* glacier we want the change for, as a regional value may not always be required. 73 | # We can look at the glacier called "Scott Turnerbreen", specified in the "NAME" column of the outline data. 74 | # `See here for the outline filtering syntax `_. 75 | 76 | demcollection.get_cumulative_series(kind="dh", outlines_filter="NAME == 'Scott Turnerbreen'") 77 | 78 | # %% 79 | # And there we have a cumulative dH series of the glacier Scott Turnerbreen on Svalbard! 80 | # The dDEMs can be visualized to give further context. 81 | 82 | extent = [ 83 | demcollection.dems[0].bounds.left, 84 | demcollection.dems[0].bounds.right, 85 | demcollection.dems[0].bounds.bottom, 86 | demcollection.dems[0].bounds.top, 87 | ] 88 | 89 | scott_extent = [518600, 523800, 8666600, 8672300] 90 | 91 | for i in range(2): 92 | plt.subplot(1, 2, i + 1) 93 | 94 | if i == 0: 95 | title = "1990 - 2009" 96 | ddem_2060 = demcollection.ddems[0].data.squeeze() 97 | else: 98 | title = "2009 - 2060" 99 | # The 2009 - 2060 DEM is inverted since the reference year is 2009 100 | ddem_2060 = -demcollection.ddems[2].data.squeeze() 101 | 102 | plt.imshow(ddem_2060, cmap="RdYlBu", vmin=-50, vmax=50, extent=extent) 103 | plt.xlim(scott_extent[:2]) 104 | plt.ylim(scott_extent[2:]) 105 | 106 | plt.show() 107 | plt.tight_layout() 108 | -------------------------------------------------------------------------------- /examples/advanced/plot_deramp.py: -------------------------------------------------------------------------------- 1 | """ 2 | Bias-correction with deramping 3 | ============================== 4 | 5 | Deramping can help correct rotational or doming errors in elevation data. 6 | In xDEM, this approach is implemented through the :class:`xdem.coreg.Deramp` class. 7 | 8 | See also the :ref:`deramp` section in feature pages. 9 | """ 10 | 11 | import geoutils as gu 12 | import numpy as np 13 | 14 | import xdem 15 | 16 | # %% 17 | # We open example files. 18 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 19 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 20 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 21 | 22 | # Create a stable ground mask (not glacierized) to mark "inlier data" 23 | inlier_mask = ~glacier_outlines.create_mask(reference_dem) 24 | 25 | # %% 26 | # We visualize the patterns of error from the elevation differences. 27 | 28 | diff_before = reference_dem - dem_to_be_aligned 29 | diff_before.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation differences (m)") 30 | 31 | 32 | # %% 33 | # A 2-D 3rd order polynomial is estimated, and applied to the data: 34 | 35 | deramp = xdem.coreg.Deramp(poly_order=2) 36 | 37 | corrected_dem = deramp.fit_and_apply(reference_dem, dem_to_be_aligned, inlier_mask=inlier_mask) 38 | 39 | # %% 40 | # Then, the new difference can be plotted. 41 | 42 | diff_after = reference_dem - corrected_dem 43 | diff_after.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation differences (m)") 44 | 45 | 46 | # %% 47 | # We compare the median and NMAD to validate numerically that there was an improvement (see :ref:`robuststats-meanstd`): 48 | inliers_before = diff_before[inlier_mask] 49 | med_before, nmad_before = np.ma.median(inliers_before), gu.stats.nmad(inliers_before) 50 | 51 | inliers_after = diff_after[inlier_mask] 52 | med_after, nmad_after = np.ma.median(inliers_after), gu.stats.nmad(inliers_after) 53 | 54 | print(f"Error before: median = {med_before:.2f} - NMAD = {nmad_before:.2f} m") 55 | print(f"Error after: median = {med_after:.2f} - NMAD = {nmad_after:.2f} m") 56 | -------------------------------------------------------------------------------- /examples/advanced/plot_norm_regional_hypso.py: -------------------------------------------------------------------------------- 1 | """ 2 | Normalized regional hypsometric interpolation 3 | ============================================= 4 | 5 | .. caution:: This functionality is specific to glaciers, and might be removed in future package versions. 6 | 7 | There are many ways of interpolating gaps in elevation differences. 8 | In the case of glaciers, one very useful fact is that elevation change generally varies with elevation. 9 | This means that if valid pixels exist in a certain elevation bin, their values can be used to fill other pixels in the same approximate elevation. 10 | Filling gaps by elevation is the main basis of "hypsometric interpolation approaches", of which there are many variations of. 11 | 12 | One problem with simple hypsometric approaches is that they may not work for glaciers with different elevation ranges and scales. 13 | Let's say we have two glaciers: one gigantic reaching from 0-1000 m, and one small from 900-1100 m. 14 | Usually in the 2000s, glaciers thin rapidly at the bottom, while they may be neutral or only thin slightly in the top. 15 | If we extrapolate the hypsometric signal of the gigantic glacier to use on the small one, it may seem like the smaller glacier has almost no change whatsoever. 16 | This may be right, or it may be catastrophically wrong! 17 | 18 | Normalized regional hypsometric interpolation solves the scale and elevation range problems in one go. It: 19 | 20 | 1. Calculates a regional signal using the weighted average of each glacier's normalized signal: 21 | 22 | a. The glacier's elevation range is scaled from 0-1 to be elevation-independent. 23 | b. The glaciers elevation change is scaled from 0-1 to be magnitude-independent. 24 | c. A weight is assigned by the amount of valid pixels (well-covered large glaciers gain a higher weight) 25 | 26 | 2. Re-scales that signal to fit each glacier once determined. 27 | 28 | The consequence is a much more accurate interpolation approach that can be used in a multitude of glacierized settings. 29 | """ 30 | 31 | import geoutils as gu 32 | 33 | # sphinx_gallery_thumbnail_number = 2 34 | import matplotlib.pyplot as plt 35 | import numpy as np 36 | 37 | import xdem 38 | import xdem.misc 39 | 40 | # %% 41 | # **Example files** 42 | 43 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 44 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem_coreg")) 45 | 46 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 47 | 48 | # Rasterize the glacier outlines to create an index map. 49 | # Stable ground is 0, the first glacier is 1, the second is 2, etc. 50 | glacier_index_map = glacier_outlines.rasterize(dem_2009) 51 | 52 | plt_extent = [ 53 | dem_2009.bounds.left, 54 | dem_2009.bounds.right, 55 | dem_2009.bounds.bottom, 56 | dem_2009.bounds.top, 57 | ] 58 | 59 | 60 | # %% 61 | # To test the method, we can generate a random mask to assign nans to glacierized areas. 62 | # Let's remove 30% of the data. 63 | index_nans = dem_2009.subsample(subsample=0.3, return_indices=True) 64 | mask_nans = dem_2009.copy(new_array=np.ones(dem_2009.shape)) 65 | mask_nans[index_nans] = 0 66 | 67 | mask_nans.plot() 68 | 69 | # %% 70 | # The normalized hypsometric signal shows the tendency for elevation change as a function of elevation. 71 | # The magnitude may vary between glaciers, but the shape is generally similar. 72 | # Normalizing by both elevation and elevation change, and then re-scaling the signal to every glacier, ensures that it is as accurate as possible. 73 | # **NOTE**: The hypsometric signal does not need to be generated separately; it will be created by :func:`xdem.volume.norm_regional_hypsometric_interpolation`. 74 | # Generating it first, however, allows us to visualize and validate it. 75 | 76 | ddem = dem_2009 - dem_1990 77 | ddem_voided = np.where(mask_nans.data, np.nan, ddem.data) 78 | 79 | signal = xdem.volume.get_regional_hypsometric_signal( 80 | ddem=ddem_voided, 81 | ref_dem=dem_2009.data, 82 | glacier_index_map=glacier_index_map, 83 | ) 84 | 85 | plt.fill_between(signal.index.mid, signal["sigma-1-lower"], signal["sigma-1-upper"], label="Spread (+- 1 sigma)") 86 | plt.plot(signal.index.mid, signal["w_mean"], color="black", label="Weighted mean") 87 | plt.ylabel("Normalized elevation change") 88 | plt.xlabel("Normalized elevation") 89 | plt.legend() 90 | plt.show() 91 | 92 | # %% 93 | # The signal can now be used (or simply estimated again if not provided) to interpolate the DEM. 94 | 95 | ddem_filled = xdem.volume.norm_regional_hypsometric_interpolation( 96 | voided_ddem=ddem_voided, ref_dem=dem_2009, glacier_index_map=glacier_index_map, regional_signal=signal 97 | ) 98 | 99 | 100 | plt.imshow(ddem_filled.data, cmap="RdYlBu", vmin=-10, vmax=10, extent=plt_extent) 101 | plt.colorbar() 102 | plt.show() 103 | 104 | 105 | # %% 106 | # We can plot the difference between the actual and the interpolated values, to validate the method. 107 | 108 | difference = (ddem_filled - ddem)[mask_nans.data] 109 | median = np.ma.median(difference) 110 | nmad = gu.stats.nmad(difference) 111 | 112 | plt.title(f"Median: {median:.2f} m, NMAD: {nmad:.2f} m") 113 | plt.hist(difference.data, bins=np.linspace(-15, 15, 100)) 114 | plt.show() 115 | 116 | # %% 117 | # As we see, the median is close to zero, while the NMAD varies slightly more. 118 | # This is expected, as the regional signal is good for multiple glaciers at once, but it cannot account for difficult local topography and meteorological conditions. 119 | # It is therefore highly recommended for large regions; just don't zoom in too close! 120 | -------------------------------------------------------------------------------- /examples/advanced/plot_slope_methods.py: -------------------------------------------------------------------------------- 1 | """ 2 | Slope and aspect methods 3 | ======================== 4 | 5 | Terrain slope and aspect can be estimated using different methods. 6 | Here is an example of how to generate the two with each method, and understand their differences. 7 | 8 | See also the :ref:`terrain-attributes` feature page. 9 | 10 | **References:** `Horn (1981) `_, `Zevenbergen and Thorne (1987) `_. 11 | """ 12 | 13 | import matplotlib.pyplot as plt 14 | import numpy as np 15 | 16 | import xdem 17 | 18 | # %% 19 | # We open example data. 20 | dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 21 | 22 | 23 | def plot_attribute(attribute, cmap, label=None, vlim=None): 24 | 25 | if vlim is not None: 26 | if isinstance(vlim, (int, np.integer, float, np.floating)): 27 | vlims = {"vmin": -vlim, "vmax": vlim} 28 | elif len(vlim) == 2: 29 | vlims = {"vmin": vlim[0], "vmax": vlim[1]} 30 | else: 31 | vlims = {} 32 | 33 | attribute.plot(cmap=cmap, cbar_title=label, **vlims) 34 | 35 | plt.xticks([]) 36 | plt.yticks([]) 37 | plt.tight_layout() 38 | 39 | plt.show() 40 | 41 | 42 | # %% 43 | # Slope with method of Horn (1981) (GDAL default), based on a refined 44 | # approximation of the gradient (page 18, bottom left, and pages 20-21). 45 | 46 | slope_horn = xdem.terrain.slope(dem) 47 | 48 | plot_attribute(slope_horn, "Reds", "Slope of Horn (1981) (°)") 49 | 50 | # %% 51 | # Slope with method of Zevenbergen and Thorne (1987), Equation 13. 52 | 53 | slope_zevenberg = xdem.terrain.slope(dem, method="ZevenbergThorne") 54 | 55 | plot_attribute(slope_zevenberg, "Reds", "Slope of Zevenberg and Thorne (1987) (°)") 56 | 57 | # %% 58 | # We compute the difference between the slopes computed with each method. 59 | 60 | diff_slope = slope_horn - slope_zevenberg 61 | 62 | plot_attribute(diff_slope, "RdYlBu", "Slope of Horn (1981) minus\n slope of Zevenberg and Thorne (1987) (°)", vlim=3) 63 | 64 | # %% 65 | # The differences are negative, implying that the method of Horn always provides flatter slopes. 66 | # Additionally, they seem to occur in places of high curvatures. We verify this by plotting the maximum curvature. 67 | 68 | maxc = xdem.terrain.maximum_curvature(dem) 69 | 70 | plot_attribute(maxc, "RdYlBu", "Maximum curvature (100 m $^{-1}$)", vlim=2) 71 | 72 | # %% 73 | # We quantify the relationship by computing the median of slope differences in bins of curvatures, and plot the 74 | # result. We define custom bins for curvature, due to its skewed distribution. 75 | 76 | df_bin = xdem.spatialstats.nd_binning( 77 | values=diff_slope[:], 78 | list_var=[maxc[:]], 79 | list_var_names=["maxc"], 80 | list_var_bins=30, 81 | statistics=[np.nanmedian, "count"], 82 | ) 83 | 84 | xdem.spatialstats.plot_1d_binning( 85 | df_bin, 86 | var_name="maxc", 87 | statistic_name="nanmedian", 88 | label_var="Maximum absolute curvature (100 m$^{-1}$)", 89 | label_statistic="Slope of Horn (1981) minus\n " "slope of Zevenberg and Thorne (1987) (°)", 90 | ) 91 | 92 | 93 | # %% 94 | # We perform the same exercise to analyze the differences in terrain aspect. We compute the difference modulo 360°, 95 | # to account for the circularity of aspect. 96 | 97 | aspect_horn = xdem.terrain.aspect(dem) 98 | aspect_zevenberg = xdem.terrain.aspect(dem, method="ZevenbergThorne") 99 | 100 | diff_aspect = aspect_horn - aspect_zevenberg 101 | diff_aspect_mod = np.minimum(diff_aspect % 360, 360 - diff_aspect % 360) 102 | 103 | plot_attribute( 104 | diff_aspect_mod, "Spectral", "Aspect of Horn (1981) minus\n aspect of Zevenberg and Thorne (1987) (°)", vlim=[0, 90] 105 | ) 106 | 107 | # %% 108 | # Same as for slope, differences in aspect seem to coincide with high curvature areas. We observe also observe large 109 | # differences for areas with nearly flat slopes, owing to the high sensitivity of orientation estimation 110 | # for flat terrain. 111 | 112 | # .. note:: The default aspect for a 0° slope is 180°, as in GDAL. 113 | -------------------------------------------------------------------------------- /examples/basic/README.rst: -------------------------------------------------------------------------------- 1 | .. _examples-basic: 2 | 3 | Basic 4 | ===== 5 | 6 | Examples using **terrain methods** and **DEM differences**, as well as 7 | pre-defined **coregistration** and **uncertainty analysis** pipelines. 8 | -------------------------------------------------------------------------------- /examples/basic/plot_dem_subtraction.py: -------------------------------------------------------------------------------- 1 | """ 2 | DEM differencing 3 | ================ 4 | 5 | Subtracting a DEM with another one should be easy. 6 | 7 | xDEM allows to use any operator on :class:`xdem.DEM` objects, such as :func:`+` or :func:`-` as well as most NumPy functions 8 | while respecting nodata values and checking that georeferencing is consistent. This functionality is inherited from `GeoUtils' Raster class `_. 9 | 10 | Before DEMs can be compared, they need to be reprojected to the same grid and have the same 3D CRSs. The :func:`~xdem.DEM.reproject` and :func:`~xdem.DEM.to_vcrs` methods are used for this. 11 | 12 | """ 13 | 14 | import geoutils as gu 15 | 16 | import xdem 17 | 18 | # %% 19 | # We load two DEMs near Longyearbyen, Svalbard. 20 | 21 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 22 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem_coreg")) 23 | 24 | # %% 25 | # We can print the information about the DEMs for a "sanity check". 26 | 27 | dem_2009.info() 28 | dem_1990.info() 29 | 30 | # %% 31 | # In this particular case, the two DEMs are already on the same grid (they have the same bounds, resolution and coordinate system). 32 | # If they don't, we need to reproject one DEM to fit the other using :func:`xdem.DEM.reproject`: 33 | 34 | dem_1990 = dem_1990.reproject(dem_2009) 35 | 36 | # %% 37 | # Oops! 38 | # GeoUtils just warned us that ``dem_1990`` did not need reprojection. We can hide this output with ``silent``. 39 | # By default, :func:`~xdem.DEM.reproject` uses "bilinear" resampling (assuming resampling is needed). 40 | # Other options are detailed at `geoutils.Raster.reproject() `_ and `rasterio.enums.Resampling `_. 41 | # 42 | # We now compute the difference by simply substracting, passing ``stats=True`` to :func:`xdem.DEM.info` to print statistics. 43 | 44 | ddem = dem_2009 - dem_1990 45 | 46 | ddem.info(stats=True) 47 | 48 | # %% 49 | # It is a new :class:`~xdem.DEM` instance, loaded in memory. 50 | # Let's visualize it, with some glacier outlines. 51 | 52 | # Load the outlines 53 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 54 | glacier_outlines = glacier_outlines.crop(ddem, clip=True) 55 | ddem.plot(cmap="RdYlBu", vmin=-20, vmax=20, cbar_title="Elevation differences (m)") 56 | glacier_outlines.plot(ref_crs=ddem, fc="none", ec="k") 57 | 58 | # %% 59 | # And we save the output to file. 60 | 61 | ddem.save("temp.tif") 62 | -------------------------------------------------------------------------------- /examples/basic/plot_icp_coregistration.py: -------------------------------------------------------------------------------- 1 | """ 2 | Iterative closest point coregistration 3 | ====================================== 4 | 5 | Iterative closest point (ICP) is a registration method accounting for both rotations and translations. 6 | 7 | It is used primarily to correct rotations, as it generally performs worse than :ref:`nuthkaab` for sub-pixel shifts. 8 | Fortunately, xDEM provides the best of two worlds by allowing a combination of the two methods in a pipeline, 9 | demonstrated below! 10 | 11 | **References**: `Besl and McKay (1992) `_. 12 | """ 13 | 14 | # sphinx_gallery_thumbnail_number = 2 15 | import matplotlib.pyplot as plt 16 | import numpy as np 17 | 18 | import xdem 19 | 20 | # %% 21 | # We load a DEM and crop it to a single mountain on Svalbard, called Battfjellet. 22 | # Its aspects vary in every direction, and is therefore a good candidate for coregistration exercises. 23 | dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 24 | 25 | subset_extent = [523000, 8660000, 529000, 8665000] 26 | dem = dem.crop(subset_extent) 27 | 28 | # %% 29 | # Let's plot a hillshade of the mountain for context. 30 | xdem.terrain.hillshade(dem).plot(cmap="gray") 31 | 32 | # %% 33 | # To try the effects of rotation, we can artificially rotate the DEM using a transformation matrix. 34 | # Here, a rotation of just one degree is attempted. 35 | # But keep in mind: the window is 6 km wide; 1 degree of rotation at the center equals to a 52 m vertical difference at the edges! 36 | 37 | rotation = np.deg2rad(1) 38 | rotation_matrix = np.array( 39 | [ 40 | [np.cos(rotation), 0, np.sin(rotation), 0], 41 | [0, 1, 0, 0], 42 | [-np.sin(rotation), 0, np.cos(rotation), 0], 43 | [0, 0, 0, 1], 44 | ] 45 | ) 46 | centroid = [dem.bounds.left + dem.width / 2, dem.bounds.bottom + dem.height / 2, np.nanmean(dem)] 47 | # This will apply the matrix along the center of the DEM 48 | rotated_dem = xdem.coreg.apply_matrix(dem, matrix=rotation_matrix, centroid=centroid) 49 | 50 | # %% 51 | # We can plot the difference between the original and rotated DEM. 52 | # It is now artificially tilting from east down to the west. 53 | diff_before = dem - rotated_dem 54 | diff_before.plot(cmap="RdYlBu", vmin=-20, vmax=20, cbar_title="Elevation differences (m)") 55 | plt.show() 56 | 57 | # %% 58 | # As previously mentioned, ``NuthKaab`` works well on sub-pixel scale but does not handle rotation. 59 | # ``ICP`` works with rotation but lacks the sub-pixel accuracy. 60 | # Luckily, these can be combined! 61 | # Any :class:`xdem.coreg.Coreg` subclass can be added with another, making a :class:`xdem.coreg.CoregPipeline`. 62 | # With a pipeline, each step is run sequentially, potentially leading to a better result. 63 | # Let's try all three approaches: ``ICP``, ``NuthKaab`` and ``ICP + NuthKaab``. 64 | 65 | approaches = [ 66 | (xdem.coreg.ICP(), "ICP"), 67 | (xdem.coreg.NuthKaab(), "NuthKaab"), 68 | (xdem.coreg.ICP() + xdem.coreg.NuthKaab(), "ICP + NuthKaab"), 69 | ] 70 | 71 | 72 | plt.figure(figsize=(6, 12)) 73 | 74 | for i, (approach, name) in enumerate(approaches): 75 | corrected_dem = approach.fit_and_apply( 76 | reference_elev=dem, 77 | to_be_aligned_elev=rotated_dem, 78 | ) 79 | 80 | diff = dem - corrected_dem 81 | 82 | ax = plt.subplot(3, 1, i + 1) 83 | plt.title(name) 84 | diff.plot(cmap="RdYlBu", vmin=-20, vmax=20, ax=ax, cbar_title="Elevation differences (m)") 85 | 86 | plt.tight_layout() 87 | plt.show() 88 | 89 | 90 | # %% 91 | # The results show what we expected: 92 | # 93 | # - **ICP** alone handled the rotational offset, but left a horizontal offset as it is not sub-pixel accurate (in this case, the resolution is 20x20m). 94 | # - **Nuth and Kääb** barely helped at all, since the offset is purely rotational. 95 | # - **ICP + Nuth and Kääb** first handled the rotation, then fit the reference with sub-pixel accuracy. 96 | # 97 | # The last result is an almost identical raster that was offset but then corrected back to its original position! 98 | -------------------------------------------------------------------------------- /examples/basic/plot_infer_heterosc.py: -------------------------------------------------------------------------------- 1 | """ 2 | Elevation error map 3 | =================== 4 | 5 | Digital elevation models have a precision that can vary with terrain and instrument-related variables. Here, we 6 | rely on a non-stationary spatial statistics framework to estimate and model this variability in elevation error, 7 | using terrain slope and maximum curvature as explanatory variables, with stable terrain as an error proxy for moving 8 | terrain. 9 | 10 | **Reference:** `Hugonnet et al. (2022) `_. 11 | """ 12 | 13 | import geoutils as gu 14 | 15 | # sphinx_gallery_thumbnail_number = 1 16 | import xdem 17 | 18 | # %% 19 | # We load a difference of DEMs at Longyearbyen, already coregistered using :ref:`nuthkaab` as shown in 20 | # the :ref:`sphx_glr_basic_examples_plot_nuth_kaab.py` example. We also load the reference DEM to derive terrain 21 | # attributes and the glacier outlines here corresponding to moving terrain. 22 | dh = xdem.DEM(xdem.examples.get_path("longyearbyen_ddem")) 23 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 24 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 25 | 26 | # %% 27 | # We derive the terrain slope and maximum curvature from the reference DEM. 28 | slope, maximum_curvature = xdem.terrain.get_terrain_attribute(ref_dem, attribute=["slope", "maximum_curvature"]) 29 | 30 | # %% 31 | # Then, we run the pipeline for inference of elevation heteroscedasticity from stable terrain: 32 | errors, df_binning, error_function = xdem.spatialstats.infer_heteroscedasticity_from_stable( 33 | dvalues=dh, list_var=[slope, maximum_curvature], list_var_names=["slope", "maxc"], unstable_mask=glacier_outlines 34 | ) 35 | 36 | # %% 37 | # The first output corresponds to the error map for the DEM (:math:`\pm` 1\ :math:`\sigma` level): 38 | errors.plot(vmin=2, vmax=7, cmap="Reds", cbar_title=r"Elevation error (1$\sigma$, m)") 39 | 40 | # %% 41 | # The second output is the dataframe of 2D binning with slope and maximum curvature: 42 | df_binning 43 | 44 | # %% 45 | # The third output is the 2D binning interpolant, i.e. an error function with the slope and maximum curvature 46 | # (*Note: below we divide the maximum curvature by 100 to convert it in* m\ :sup:`-1` ): 47 | for slope, maxc in [(0, 0), (40, 0), (0, 5), (40, 5)]: 48 | print( 49 | "Error for a slope of {:.0f} degrees and" 50 | " {:.2f} m-1 max. curvature: {:.1f} m".format(slope, maxc / 100, error_function((slope, maxc))) 51 | ) 52 | 53 | # %% 54 | # This pipeline will not always work optimally with default parameters: spread estimates can be affected by skewed 55 | # distributions, the binning by extreme range of values, some DEMs do not have any error variability with terrain (e.g., 56 | # terrestrial photogrammetry). **To learn how to tune more parameters and use the subfunctions, see the gallery example:** 57 | # :ref:`sphx_glr_advanced_examples_plot_heterosc_estimation_modelling.py`! 58 | -------------------------------------------------------------------------------- /examples/basic/plot_infer_spatial_correlation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Spatial correlation of errors 3 | ============================= 4 | 5 | Digital elevation models have errors that are spatially correlated due to instrument or processing effects. Here, we 6 | rely on a non-stationary spatial statistics framework to estimate and model spatial correlations in elevation error. 7 | We use a sum of variogram forms to model this correlation, with stable terrain as an error proxy for moving terrain. 8 | 9 | **References:** `Rolstad et al. (2009) `_, `Hugonnet et al. (2022) `_. 10 | """ 11 | 12 | import geoutils as gu 13 | 14 | # sphinx_gallery_thumbnail_number = 1 15 | import xdem 16 | 17 | # %% 18 | # We load a difference of DEMs at Longyearbyen, already coregistered using :ref:`nuthkaab` as shown in 19 | # the :ref:`sphx_glr_basic_examples_plot_nuth_kaab.py` example. We also load the glacier outlines here corresponding to 20 | # moving terrain. 21 | dh = xdem.DEM(xdem.examples.get_path("longyearbyen_ddem")) 22 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 23 | 24 | # %% 25 | # Then, we run the pipeline for inference of elevation heteroscedasticity from stable terrain (*Note: we pass a* 26 | # ``random_state`` *argument to ensure a fixed, reproducible random subsampling in this example*). We ask for a fit with 27 | # a Gaussian model for short range (as it is passed first), and Spherical for long range (as it is passed second): 28 | ( 29 | df_empirical_variogram, 30 | df_model_params, 31 | spatial_corr_function, 32 | ) = xdem.spatialstats.infer_spatial_correlation_from_stable( 33 | dvalues=dh, list_models=["Gaussian", "Spherical"], unstable_mask=glacier_outlines, random_state=42 34 | ) 35 | 36 | # %% 37 | # The first output corresponds to the dataframe of the empirical variogram, by default estimated using Dowd's estimator 38 | # and a circular sampling scheme in SciKit-GStat (following Fig. S13 of Hugonnet et al. (2022)). The 39 | # ``lags`` columns is the upper bound of spatial lag bins (lower bound of first bin being 0), the ``exp`` column is the 40 | # "experimental" variance value of the variogram in that bin, the ``count`` the number of pairwise samples, and 41 | # ``err_exp`` the 1-sigma error of the "experimental" variance, if more than one variogram is estimated with the 42 | # ``n_variograms`` parameter. 43 | df_empirical_variogram 44 | 45 | # %% 46 | # The second output is the dataframe of optimized model parameters (``range``, ``sill``, and possibly ``smoothness``) 47 | # for a sum of gaussian and spherical models: 48 | df_model_params 49 | 50 | # %% 51 | # The third output is the spatial correlation function with spatial lags, derived from the variogram: 52 | for spatial_lag in [0, 100, 1000, 10000, 30000]: 53 | print( 54 | "Errors are correlated at {:.1f}% for a {:,.0f} m spatial lag".format( 55 | spatial_corr_function(spatial_lag) * 100, spatial_lag 56 | ) 57 | ) 58 | 59 | # %% 60 | # We can plot the empirical variogram and its model on a non-linear X-axis to identify the multi-scale correlations. 61 | xdem.spatialstats.plot_variogram( 62 | df=df_empirical_variogram, 63 | list_fit_fun=[xdem.spatialstats.get_variogram_model_func(df_model_params)], 64 | xlabel="Spatial lag (m)", 65 | ylabel="Variance of\nelevation differences (m)", 66 | xscale_range_split=[100, 1000], 67 | ) 68 | 69 | # %% 70 | # This pipeline will not always work optimally with default parameters: variogram sampling is more robust with a lot of 71 | # samples but takes long computing times, and the fitting might require multiple tries for forms and possibly bounds 72 | # and first guesses to help the least-squares optimization. **To learn how to tune more parameters and use the 73 | # subfunctions, see the gallery example:** :ref:`sphx_glr_advanced_examples_plot_variogram_estimation_modelling.py`! 74 | -------------------------------------------------------------------------------- /examples/basic/plot_logging_configuration.py: -------------------------------------------------------------------------------- 1 | """ 2 | Configuring verbosity level 3 | =========================== 4 | 5 | This example demonstrates how to configure verbosity level, or logging, using a coregistration method. 6 | Logging can be customized to various severity levels, from ``DEBUG`` for detailed diagnostic output, to ``INFO`` for 7 | general updates, ``WARNING`` for potential issues, and ``ERROR`` or ``CRITICAL`` for serious problems. 8 | 9 | Setting the verbosity to a certain severity level prints all outputs from that level and those above. For instance, 10 | level ``INFO`` also prints warnings, error and critical messages. 11 | 12 | See also :ref:`config`. 13 | 14 | .. important:: The verbosity level defaults to ``WARNING``, so no ``INFO`` or ``DEBUG`` is printed. 15 | """ 16 | 17 | import logging 18 | 19 | import xdem 20 | 21 | # %% 22 | # We start by configuring the logging level, which can be as simple as specifying we want to print information. 23 | logging.basicConfig(level=logging.INFO) 24 | 25 | # %% 26 | # We can change the configuration even more by specifying the format, date, and multiple destinations for the output. 27 | logging.basicConfig( 28 | level=logging.INFO, # Change this level to DEBUG or WARNING to see different outputs. 29 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 30 | datefmt="%Y-%m-%d %H:%M:%S", 31 | handlers=[ 32 | logging.FileHandler("../xdem_example.log"), # Save logs to a file 33 | logging.StreamHandler(), # Also print logs to the console 34 | ], 35 | force=True, # To re-set from previous logging 36 | ) 37 | 38 | # %% 39 | # We can now load example files and demonstrate the logging through a functionality, such as coregistration. 40 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 41 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 42 | coreg = xdem.coreg.NuthKaab() 43 | 44 | # %% 45 | # With the ``INFO`` verbosity level defined above, we can follow the iteration with a detailed format, saved to file. 46 | aligned_dem = coreg.fit_and_apply(reference_dem, dem_to_be_aligned) 47 | 48 | # %% 49 | # With a more severe verbosity level, there is no output. 50 | logging.basicConfig(level=logging.ERROR, force=True) 51 | aligned_dem = coreg.fit_and_apply(reference_dem, dem_to_be_aligned) 52 | -------------------------------------------------------------------------------- /examples/basic/plot_nuth_kaab.py: -------------------------------------------------------------------------------- 1 | """ 2 | Nuth and Kääb coregistration 3 | ============================ 4 | 5 | The Nuth and Kääb coregistration corrects horizontal and vertical shifts, and is especially performant for precise 6 | sub-pixel alignment in areas with varying slope. 7 | In xDEM, this approach is implemented through the :class:`xdem.coreg.NuthKaab` class. 8 | 9 | See also the :ref:`nuthkaab` section in feature pages. 10 | 11 | **Reference:** `Nuth and Kääb (2011) `_. 12 | """ 13 | 14 | import geoutils as gu 15 | import numpy as np 16 | 17 | import xdem 18 | 19 | # %% 20 | # We open example files. 21 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 22 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 23 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 24 | 25 | # We create a stable ground mask (not glacierized) to mark "inlier data". 26 | inlier_mask = ~glacier_outlines.create_mask(reference_dem) 27 | 28 | # %% 29 | # The DEM to be aligned (a 1990 photogrammetry-derived DEM) has some vertical and horizontal biases that we want to reduce. 30 | # These can be visualized by plotting a change map: 31 | 32 | diff_before = reference_dem - dem_to_be_aligned 33 | diff_before.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation change (m)") 34 | 35 | # %% 36 | # Horizontal and vertical shifts can be estimated using :class:`~xdem.coreg.NuthKaab`. 37 | # The shifts are estimated then applied to the to-be-aligned elevation data: 38 | 39 | nuth_kaab = xdem.coreg.NuthKaab() 40 | aligned_dem = nuth_kaab.fit_and_apply(reference_dem, dem_to_be_aligned, inlier_mask) 41 | 42 | # %% 43 | # The shifts are stored in the affine metadata output 44 | 45 | print([nuth_kaab.meta["outputs"]["affine"][s] for s in ["shift_x", "shift_y", "shift_z"]]) 46 | 47 | # %% 48 | # Then, the new difference can be plotted to validate that it improved. 49 | 50 | diff_after = reference_dem - aligned_dem 51 | diff_after.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation change (m)") 52 | 53 | # %% 54 | # We compare the median and NMAD to validate numerically that there was an improvement (see :ref:`robuststats-meanstd`): 55 | inliers_before = diff_before[inlier_mask] 56 | med_before, nmad_before = np.ma.median(inliers_before), gu.stats.nmad(inliers_before) 57 | 58 | inliers_after = diff_after[inlier_mask] 59 | med_after, nmad_after = np.ma.median(inliers_after), gu.stats.nmad(inliers_after) 60 | 61 | print(f"Error before: median = {med_before:.2f} - NMAD = {nmad_before:.2f} m") 62 | print(f"Error after: median = {med_after:.2f} - NMAD = {nmad_after:.2f} m") 63 | 64 | # %% 65 | # In the plot above, one may notice a positive (blue) tendency toward the east. 66 | # The 1990 DEM is a mosaic, and likely has a "seam" near there. 67 | # :ref:`sphx_glr_advanced_examples_plot_blockwise_coreg.py` tackles this issue, using a nonlinear coregistration approach. 68 | -------------------------------------------------------------------------------- /examples/basic/plot_spatial_error_propagation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Spatial propagation of elevation errors 3 | ======================================= 4 | 5 | Propagating elevation errors spatially accounting for heteroscedasticity and spatial correlation is complex. It 6 | requires computing the pairwise correlations between all points of an area of interest (be it for a sum, mean, or 7 | other operation), which is computationally intensive. Here, we rely on published formulations to perform 8 | computationally-efficient spatial propagation for the mean of elevation (or elevation differences) in an area. 9 | 10 | **References:** `Rolstad et al. (2009) `_, `Hugonnet et al. (2022) `_. 11 | """ 12 | 13 | import geoutils as gu 14 | import matplotlib.pyplot as plt 15 | 16 | # sphinx_gallery_thumbnail_number = 1 17 | import numpy as np 18 | 19 | import xdem 20 | 21 | # %% 22 | # We load the same data, and perform the same calculations on heteroscedasticity and spatial correlations of errors as 23 | # in the :ref:`sphx_glr_basic_examples_plot_infer_heterosc.py` and :ref:`sphx_glr_basic_examples_plot_infer_spatial_correlation.py` 24 | # examples. 25 | 26 | dh = xdem.DEM(xdem.examples.get_path("longyearbyen_ddem")) 27 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 28 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 29 | slope, maximum_curvature = xdem.terrain.get_terrain_attribute(ref_dem, attribute=["slope", "maximum_curvature"]) 30 | errors, df_binning, error_function = xdem.spatialstats.infer_heteroscedasticity_from_stable( 31 | dvalues=dh, list_var=[slope, maximum_curvature], list_var_names=["slope", "maxc"], unstable_mask=glacier_outlines 32 | ) 33 | 34 | # %% 35 | # We use the error map to standardize the elevation differences before variogram estimation, which is more robust 36 | # as it removes the variance variability due to heteroscedasticity. 37 | zscores = dh / errors 38 | emp_variogram, params_variogram_model, spatial_corr_function = xdem.spatialstats.infer_spatial_correlation_from_stable( 39 | dvalues=zscores, list_models=["Gaussian", "Spherical"], unstable_mask=glacier_outlines, random_state=42 40 | ) 41 | 42 | # %% 43 | # With our estimated heteroscedasticity and spatial correlation, we can now perform the spatial propagation of errors. 44 | # We select two glaciers intersecting this elevation change map in Svalbard. The best estimation of their standard error 45 | # is done by directly providing the shapefile (Equation 18, Hugonnet et al., 2022). 46 | areas = [ 47 | glacier_outlines.ds[glacier_outlines.ds["NAME"] == "Brombreen"], 48 | glacier_outlines.ds[glacier_outlines.ds["NAME"] == "Medalsbreen"], 49 | ] 50 | stderr_glaciers = xdem.spatialstats.spatial_error_propagation( 51 | areas=areas, errors=errors, params_variogram_model=params_variogram_model 52 | ) 53 | 54 | for glacier_name, stderr_gla in [("Brombreen", stderr_glaciers[0]), ("Medalsbreen", stderr_glaciers[1])]: 55 | print(f"The error (1-sigma) in mean elevation change for {glacier_name} is {stderr_gla:.2f} meters.") 56 | 57 | # %% 58 | # When passing a numerical area value, we compute an approximation with disk shape (Equation 8, Rolstad et al., 2009). 59 | # This approximation is practical to visualize changes in elevation error when averaging over different area 60 | # sizes, but is less accurate to estimate the standard error of a certain area shape. 61 | areas = 10 ** np.linspace(1, 12) 62 | stderrs = xdem.spatialstats.spatial_error_propagation( 63 | areas=areas, errors=errors, params_variogram_model=params_variogram_model 64 | ) 65 | plt.plot(areas / 10**6, stderrs) 66 | plt.xlabel("Averaging area (km²)") 67 | plt.ylabel("Standard error (m)") 68 | plt.vlines( 69 | x=np.pi * params_variogram_model["range"].values[0] ** 2 / 10**6, 70 | ymin=np.min(stderrs), 71 | ymax=np.max(stderrs), 72 | colors="red", 73 | linestyles="dashed", 74 | label="Disk area with radius the\n1st correlation range of {:,.0f} meters".format( 75 | params_variogram_model["range"].values[0] 76 | ), 77 | ) 78 | plt.vlines( 79 | x=np.pi * params_variogram_model["range"].values[1] ** 2 / 10**6, 80 | ymin=np.min(stderrs), 81 | ymax=np.max(stderrs), 82 | colors="blue", 83 | linestyles="dashed", 84 | label="Disk area with radius the\n2nd correlation range of {:,.0f} meters".format( 85 | params_variogram_model["range"].values[1] 86 | ), 87 | ) 88 | plt.xscale("log") 89 | plt.legend() 90 | plt.show() 91 | -------------------------------------------------------------------------------- /examples/basic/plot_terrain_attributes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Terrain attributes 3 | ================== 4 | 5 | Terrain attributes generated from a DEM have a multitude of uses for analytic and visual purposes. 6 | Here is an example of how to generate these products. 7 | 8 | For more information, see the :ref:`terrain-attributes` feature page. 9 | 10 | **References:** `Horn (1981) `_ (slope, aspect, hillshade), 11 | `Zevenbergen and Thorne (1987) `_ (curvature), 12 | `Riley et al. (1999) `_ (terrain 13 | ruggedness index), `Jenness (2004) `_ (rugosity). 14 | """ 15 | 16 | # sphinx_gallery_thumbnail_number = 1 17 | import matplotlib.pyplot as plt 18 | 19 | import xdem 20 | 21 | # %% 22 | # We load the example data. 23 | 24 | dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 25 | 26 | # %% 27 | # We generate multiple terrain attribute at once (more efficient computationally as some depend on each other). 28 | 29 | attributes = xdem.terrain.get_terrain_attribute( 30 | dem.data, 31 | resolution=dem.res, 32 | attribute=["hillshade", "slope", "aspect", "curvature", "terrain_ruggedness_index", "rugosity"], 33 | ) 34 | 35 | plt.figure(figsize=(8, 6.5)) 36 | 37 | plt_extent = [dem.bounds.left, dem.bounds.right, dem.bounds.bottom, dem.bounds.top] 38 | 39 | cmaps = ["Greys_r", "Reds", "twilight", "RdGy_r", "Purples", "YlOrRd"] 40 | labels = ["Hillshade", "Slope (°)", "Aspect (°)", "Curvature (100 / m)", "Terrain Ruggedness Index", "Rugosity"] 41 | vlims = [(None, None) for i in range(6)] 42 | vlims[3] = [-2, 2] 43 | 44 | for i in range(6): 45 | plt.subplot(3, 2, i + 1) 46 | plt.imshow(attributes[i].squeeze(), cmap=cmaps[i], extent=plt_extent, vmin=vlims[i][0], vmax=vlims[i][1]) 47 | cbar = plt.colorbar() 48 | cbar.set_label(labels[i]) 49 | plt.xticks([]) 50 | plt.yticks([]) 51 | 52 | plt.tight_layout() 53 | plt.show() 54 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | plugins = numpy.typing.mypy_plugin 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | # Minimum requirements for the build system to execute. 3 | requires = [ 4 | "setuptools>=64", 5 | "setuptools_scm[toml]>=8", 6 | "wheel", 7 | ] 8 | build-backend = "setuptools.build_meta" 9 | 10 | # To write version to file 11 | [tool.setuptools_scm] 12 | version_file = "xdem/_version.py" 13 | fallback_version = "0.0.1" 14 | 15 | [tool.black] 16 | target_version = ['py310'] 17 | 18 | [tool.pytest.ini_options] 19 | addopts = "--doctest-modules -W error::UserWarning" 20 | testpaths = [ 21 | "tests", 22 | "xdem" 23 | ] 24 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # This file is auto-generated from environment.yml, do not modify. 2 | # See that file for comments about the need/usage of each dependency. 3 | 4 | geopandas>=0.12.0 5 | numba==0.* 6 | numpy>=1,<3 7 | matplotlib==3.* 8 | pyproj>=3.4,<4 9 | rasterio>=1.3,<2 10 | scipy==1.* 11 | tqdm 12 | scikit-gstat>=1.0.18,<1.1 13 | geoutils==0.1.16 14 | affine 15 | pandas 16 | pyogrio 17 | shapely 18 | pip 19 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | author = xDEM developers 3 | name = xdem 4 | version = 0.1.4 5 | description = Analysis of digital elevation models (DEMs) 6 | keywords = dem, elevation, geoutils, xarray 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | license = Apache-2.0 10 | license_files = LICENSE 11 | platform = any 12 | classifiers = 13 | Development Status :: 4 - Beta 14 | Intended Audience :: Developers 15 | Intended Audience :: Science/Research 16 | Natural Language :: English 17 | Operating System :: OS Independent 18 | Topic :: Scientific/Engineering :: GIS 19 | Topic :: Scientific/Engineering :: Image Processing 20 | Topic :: Scientific/Engineering :: Information Analysis 21 | Programming Language :: Python 22 | Programming Language :: Python :: 3.10 23 | Programming Language :: Python :: 3.11 24 | Programming Language :: Python :: 3.12 25 | Programming Language :: Python :: 3 26 | Topic :: Software Development :: Libraries :: Python Modules 27 | Typing :: Typed 28 | url = https://github.com/GlacioHack/xdem 29 | download_url = https://pypi.org/project/xdem/ 30 | 31 | [options] 32 | packages = find: 33 | zip_safe = False # https://mypy.readthedocs.io/en/stable/installed_packages.html 34 | include_package_data = True 35 | python_requires = >=3.10,<3.13 36 | # Avoid pinning dependencies in requirements.txt (which we don't do anyways, and we rely mostly on Conda) 37 | # (https://caremad.io/posts/2013/07/setup-vs-requirement/, https://github.com/pypa/setuptools/issues/1951) 38 | install_requires = file: requirements.txt 39 | 40 | [options.package_data] 41 | xdem = 42 | py.typed 43 | 44 | [options.packages.find] 45 | include = 46 | xdem 47 | xdem.* 48 | 49 | [options.extras_require] 50 | opt = 51 | pytransform3d 52 | noisyopt 53 | scikit-learn 54 | pyyaml 55 | test = 56 | pytest 57 | pytest-xdist 58 | pre-commit 59 | flake8 60 | pylint 61 | scikit-learn 62 | doc = 63 | sphinx 64 | sphinx-book-theme 65 | sphinxcontrib-programoutput 66 | sphinx-design 67 | sphinx-autodoc-typehints 68 | sphinx-gallery 69 | autovizwidget 70 | graphviz 71 | myst-nb 72 | numpydoc 73 | dev = 74 | %(opt)s 75 | %(doc)s 76 | %(test)s 77 | all = 78 | %(dev)s 79 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """This file now only serves for backward-compatibility for routines explicitly calling python setup.py""" 2 | 3 | from setuptools import setup 4 | 5 | setup() 6 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Callable 3 | 4 | import pytest 5 | 6 | from xdem.examples import download_and_extract_tarball 7 | 8 | _TESTDATA_DIRECTORY = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "tests", "test_data")) 9 | 10 | 11 | @pytest.fixture(scope="session") # type: ignore 12 | def get_test_data_path() -> Callable[[str], str]: 13 | def _get_test_data_path(filename: str, overwrite: bool = False) -> str: 14 | """Get file from test_data""" 15 | download_and_extract_tarball(dir="test_data", target_dir=_TESTDATA_DIRECTORY, overwrite=overwrite) 16 | file_path = os.path.join(_TESTDATA_DIRECTORY, filename) 17 | 18 | if not os.path.exists(file_path): 19 | if overwrite: 20 | raise FileNotFoundError(f"The file {filename} was not found in the test_data directory.") 21 | file_path = _get_test_data_path(filename, overwrite=True) 22 | 23 | return file_path 24 | 25 | return _get_test_data_path 26 | -------------------------------------------------------------------------------- /tests/test_coreg/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/197480ef5aa3388a5f55e2ceb120bf7940a08ebc/tests/test_coreg/__init__.py -------------------------------------------------------------------------------- /tests/test_coreg/test_filters.py: -------------------------------------------------------------------------------- 1 | """Functions to test the coregistration filters.""" 2 | -------------------------------------------------------------------------------- /tests/test_ddem.py: -------------------------------------------------------------------------------- 1 | """Functions to test the difference of DEMs tools.""" 2 | 3 | import geoutils as gu 4 | import numpy as np 5 | 6 | import xdem 7 | 8 | 9 | class TestdDEM: 10 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 11 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 12 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 13 | 14 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 15 | 16 | def test_init(self) -> None: 17 | """Test that the dDEM object was instantiated correctly.""" 18 | assert isinstance(self.ddem, xdem.dDEM) 19 | assert isinstance(self.ddem.data, np.ma.masked_array) 20 | 21 | assert self.ddem.nodata is (self.dem_2009 - self.dem_1990).nodata 22 | 23 | def test_copy(self) -> None: 24 | """Test that copying works as it should.""" 25 | ddem2 = self.ddem.copy() 26 | 27 | assert isinstance(ddem2, xdem.dDEM) 28 | 29 | ddem2.data += 1 30 | 31 | assert not self.ddem.raster_equal(ddem2) 32 | 33 | def test_filled_data(self) -> None: 34 | """Test that the filled_data property points to the right data.""" 35 | ddem2 = self.ddem.copy() 36 | 37 | assert not np.any(np.isnan(ddem2.data)) or np.all(~ddem2.data.mask) 38 | assert ddem2.filled_data is not None 39 | 40 | assert np.count_nonzero(np.isnan(ddem2.data)) == 0 41 | ddem2.data.ravel()[0] = np.nan 42 | 43 | assert np.count_nonzero(np.isnan(ddem2.data)) == 1 44 | 45 | assert ddem2.filled_data is None 46 | 47 | ddem2.interpolate(method="idw") 48 | 49 | assert ddem2.fill_method is not None 50 | 51 | def test_regional_hypso(self) -> None: 52 | """Test the regional hypsometric approach.""" 53 | ddem = self.ddem.copy() 54 | ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) 55 | rng = np.random.default_rng(42) 56 | ddem.data.mask.ravel()[rng.choice(ddem.data.size, 50000, replace=False)] = True 57 | assert np.count_nonzero(ddem.data.mask) > 0 58 | 59 | assert ddem.filled_data is None 60 | 61 | ddem.interpolate(method="regional_hypsometric", reference_elevation=self.dem_2009, mask=self.outlines_1990) 62 | 63 | assert ddem._filled_data is not None 64 | assert isinstance(ddem.filled_data, np.ndarray) 65 | 66 | assert ddem.filled_data.shape == ddem.data.shape 67 | 68 | assert np.abs(np.nanmean(self.ddem.data - ddem.filled_data)) < 1 69 | 70 | def test_local_hypso(self) -> None: 71 | """Test the local hypsometric approach.""" 72 | ddem = self.ddem.copy() 73 | scott_1990 = self.outlines_1990.query("NAME == 'Scott Turnerbreen'") 74 | ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) 75 | rng = np.random.default_rng(42) 76 | ddem.data.mask.ravel()[rng.choice(ddem.data.size, 50000, replace=False)] = True 77 | assert np.count_nonzero(ddem.data.mask) > 0 78 | 79 | assert ddem.filled_data is None 80 | 81 | ddem.interpolate(method="local_hypsometric", reference_elevation=self.dem_2009.data, mask=scott_1990) 82 | assert np.abs(np.nanmean(self.ddem.data - ddem.filled_data)) < 1 83 | -------------------------------------------------------------------------------- /tests/test_demcollection.py: -------------------------------------------------------------------------------- 1 | """Functions to test the DEM collection tools.""" 2 | 3 | import datetime 4 | import warnings 5 | 6 | import geoutils as gu 7 | import numpy as np 8 | 9 | import xdem 10 | 11 | 12 | class TestDEMCollection: 13 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 14 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 15 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 16 | outlines_2010 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines_2010")) 17 | 18 | def test_init(self) -> None: 19 | 20 | timestamps = [datetime.datetime(1990, 8, 1), datetime.datetime(2009, 8, 1), datetime.datetime(2060, 8, 1)] 21 | 22 | scott_1990 = gu.Vector(self.outlines_1990.ds.loc[self.outlines_1990.ds["NAME"] == "Scott Turnerbreen"]) 23 | scott_2010 = gu.Vector(self.outlines_2010.ds.loc[self.outlines_2010.ds["NAME"] == "Scott Turnerbreen"]) 24 | 25 | # Make sure the glacier was bigger in 1990, since this is assumed later. 26 | assert scott_1990.ds.area.sum() > scott_2010.ds.area.sum() 27 | 28 | mask_2010 = scott_2010.create_mask(self.dem_2009) 29 | 30 | dem_2060 = self.dem_2009.copy() 31 | dem_2060[mask_2010] -= 30 32 | 33 | dems = xdem.DEMCollection( 34 | [self.dem_1990, self.dem_2009, dem_2060], 35 | timestamps=timestamps, 36 | outlines=dict(zip(timestamps[:2], [self.outlines_1990, self.outlines_2010])), 37 | reference_dem=1, 38 | ) 39 | 40 | # Check that the first raster is the oldest one 41 | assert dems.dems[0].data.max() == self.dem_1990.data.max() 42 | assert dems.reference_dem.data.max() == self.dem_2009.data.max() 43 | 44 | dems.subtract_dems(resampling_method="nearest") 45 | 46 | assert np.mean(dems.ddems[0].data) < 0 47 | 48 | scott_filter = "NAME == 'Scott Turnerbreen'" 49 | 50 | dh_series = dems.get_dh_series(outlines_filter=scott_filter) 51 | 52 | # The 1990-2009 area should be the union of those years. The 2009-2060 area should just be the 2010 area. 53 | assert dh_series.iloc[0]["area"] > dh_series.iloc[-1]["area"] 54 | 55 | cumulative_dh = dems.get_cumulative_series(kind="dh", outlines_filter=scott_filter) 56 | cumulative_dv = dems.get_cumulative_series(kind="dv", outlines_filter=scott_filter) 57 | 58 | # Simple check that the cumulative_dh is overall negative. 59 | assert cumulative_dh.iloc[0] > cumulative_dh.iloc[-1] 60 | 61 | # Simple check that the dV number is of a greater magnitude than the dH number. 62 | assert abs(cumulative_dv.iloc[-1]) > abs(cumulative_dh.iloc[-1]) 63 | 64 | rng = np.random.default_rng(42) 65 | # Generate 10000 NaN values randomly in one of the dDEMs 66 | dems.ddems[0].data[ 67 | rng.integers(0, dems.ddems[0].data.shape[0], 100), 68 | rng.integers(0, dems.ddems[0].data.shape[1], 100), 69 | ] = np.nan 70 | # Check that the cumulative_dh function warns for NaNs 71 | with warnings.catch_warnings(): 72 | try: 73 | dems.get_cumulative_series(nans_ok=False) 74 | except UserWarning as exception: 75 | if "NaNs found in dDEM" not in str(exception): 76 | raise exception 77 | 78 | # logging.info(cumulative_dh) 79 | 80 | # raise NotImplementedError 81 | 82 | def test_dem_datetimes(self) -> None: 83 | """Try to create the DEMCollection without the timestamps argument (instead relying on datetime attributes).""" 84 | self.dem_1990.datetime = datetime.datetime(1990, 8, 1) 85 | self.dem_2009.datetime = datetime.datetime(2009, 8, 1) 86 | 87 | dems = xdem.DEMCollection([self.dem_1990, self.dem_2009]) 88 | 89 | assert len(dems.timestamps) > 0 90 | 91 | def test_ddem_interpolation(self) -> None: 92 | """Test that dDEM interpolation works as it should.""" 93 | 94 | # Create a DEMCollection object 95 | dems = xdem.DEMCollection( 96 | [self.dem_2009, self.dem_1990], timestamps=[datetime.datetime(year, 8, 1) for year in (2009, 1990)] 97 | ) 98 | 99 | # Create dDEMs 100 | dems.subtract_dems(resampling_method="nearest") 101 | 102 | # The example data does not have NaNs, so filled_data should exist. 103 | assert dems.ddems[0].filled_data is not None 104 | 105 | # Try to set the filled_data property with an invalid size. 106 | try: 107 | dems.ddems[0].filled_data = np.zeros(3) 108 | except AssertionError as exception: 109 | if "differs from the data shape" not in str(exception): 110 | raise exception 111 | 112 | # Generate 10000 NaN values randomly in one of the dDEMs 113 | rng = np.random.default_rng(42) 114 | dems.ddems[0].data[ 115 | rng.integers(0, dems.ddems[0].data.shape[0], 100), 116 | rng.integers(0, dems.ddems[0].data.shape[1], 100), 117 | ] = np.nan 118 | 119 | # Make sure that filled_data is not available anymore, since the data now has nans 120 | assert dems.ddems[0].filled_data is None 121 | 122 | # Interpolate the nans 123 | dems.ddems[0].interpolate(method="idw") 124 | 125 | # Make sure that the filled_data is available again 126 | assert dems.ddems[0].filled_data is not None 127 | -------------------------------------------------------------------------------- /tests/test_doc.py: -------------------------------------------------------------------------------- 1 | """Functions to test the documentation.""" 2 | 3 | import logging 4 | import os 5 | import platform 6 | import shutil 7 | import warnings 8 | 9 | import sphinx.cmd.build 10 | 11 | 12 | class TestDocs: 13 | docs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../", "doc/") 14 | n_threads = os.getenv("N_CPUS") 15 | 16 | def test_example_code(self) -> None: 17 | """Try running each python script in the doc/source/code\ 18 | directory and check that it doesn't raise an error.""" 19 | current_dir = os.getcwd() 20 | os.chdir(os.path.join(self.docs_dir, "source")) 21 | 22 | def run_code(filename: str) -> None: 23 | """Run a python script in one thread.""" 24 | with open(filename) as infile: 25 | # Run everything except plt.show() calls. 26 | with warnings.catch_warnings(): 27 | # When running the code asynchronously, matplotlib complains a bit 28 | ignored_warnings = [ 29 | "Starting a Matplotlib GUI outside of the main thread", 30 | ".*fetching the attribute.*Polygon.*", 31 | ] 32 | # This is a GeoPandas issue 33 | for warning_text in ignored_warnings: 34 | warnings.filterwarnings("ignore", warning_text) 35 | try: 36 | exec(infile.read().replace("plt.show()", "plt.close()")) 37 | except Exception as exception: 38 | if isinstance(exception, DeprecationWarning): 39 | logging.warning(exception) 40 | else: 41 | raise RuntimeError(f"Failed on {filename}") from exception 42 | 43 | filenames = [os.path.join("code", filename) for filename in os.listdir("code/") if filename.endswith(".py")] 44 | 45 | for filename in filenames: 46 | run_code(filename) 47 | """ 48 | with concurrent.futures.ThreadPoolExecutor( 49 | max_workers=int(self.n_threads) if self.n_threads is not None else None 50 | ) as executor: 51 | list(executor.map(run_code, filenames)) 52 | """ 53 | 54 | os.chdir(current_dir) 55 | 56 | def test_build(self) -> None: 57 | """Try building the doc and see if it works.""" 58 | 59 | # Ignore all warnings raised in the documentation 60 | # (some UserWarning are shown on purpose in certain examples, so they shouldn't make the test fail, 61 | # and most other warnings are for Sphinx developers, not meant to be seen by us; or we can check on RTD) 62 | warnings.filterwarnings("ignore") 63 | 64 | # Test only on Linux 65 | if platform.system() == "Linux": 66 | # Remove the build directory if it exists. 67 | if os.path.isdir(os.path.join(self.docs_dir, "build")): 68 | shutil.rmtree(os.path.join(self.docs_dir, "build")) 69 | 70 | return_code = sphinx.cmd.build.main( 71 | [ 72 | "-j", 73 | "1", 74 | os.path.join(self.docs_dir, "source"), 75 | os.path.join(self.docs_dir, "build", "html"), 76 | ] 77 | ) 78 | 79 | assert return_code == 0 80 | -------------------------------------------------------------------------------- /tests/test_examples.py: -------------------------------------------------------------------------------- 1 | """Functions to test the example data.""" 2 | 3 | from __future__ import annotations 4 | 5 | import geoutils as gu 6 | import numpy as np 7 | import pytest 8 | from geoutils import Raster, Vector 9 | 10 | from xdem import examples 11 | from xdem._typing import NDArrayf 12 | 13 | 14 | def load_examples() -> tuple[Raster, Raster, Vector, Raster]: 15 | """Load example files to try coregistration methods with.""" 16 | 17 | ref_dem = Raster(examples.get_path("longyearbyen_ref_dem")) 18 | tba_dem = Raster(examples.get_path("longyearbyen_tba_dem")) 19 | glacier_mask = Vector(examples.get_path("longyearbyen_glacier_outlines")) 20 | ddem = Raster(examples.get_path("longyearbyen_ddem")) 21 | 22 | return ref_dem, tba_dem, glacier_mask, ddem 23 | 24 | 25 | class TestExamples: 26 | 27 | ref_dem, tba_dem, glacier_mask, ddem = load_examples() 28 | 29 | @pytest.mark.parametrize( 30 | "rst_and_truevals", 31 | [ 32 | (ref_dem, np.array([465.11816, 207.3236, 208.30563, 748.7337, 797.28644], dtype=np.float32)), 33 | (tba_dem, np.array([464.6715, 213.7554, 207.8788, 760.8192, 797.3268], dtype=np.float32)), 34 | ( 35 | ddem, 36 | np.array( 37 | [ 38 | 1.3699341, 39 | -1.6713867, 40 | 0.12953186, 41 | -10.096802, 42 | 2.486206, 43 | ], 44 | dtype=np.float32, 45 | ), 46 | ), 47 | ], 48 | ) # type: ignore 49 | def test_array_content(self, rst_and_truevals: tuple[Raster, NDArrayf]) -> None: 50 | """Let's ensure the data arrays in the examples are always the same by checking randomly some values""" 51 | 52 | rst = rst_and_truevals[0] 53 | truevals = rst_and_truevals[1] 54 | rng = np.random.default_rng(42) 55 | values = rng.choice(rst.data.data.flatten(), size=5, replace=False) 56 | 57 | assert values == pytest.approx(truevals) 58 | 59 | # Note: Following PR #329, no gaps on DEM edges after coregistration 60 | @pytest.mark.parametrize("rst_and_truenodata", [(ref_dem, 0), (tba_dem, 0), (ddem, 0)]) # type: ignore 61 | def test_array_nodata(self, rst_and_truenodata: tuple[Raster, int]) -> None: 62 | """Let's also check that the data arrays have always the same number of not finite values""" 63 | 64 | rst = rst_and_truenodata[0] 65 | truenodata = rst_and_truenodata[1] 66 | mask = gu.raster.get_array_and_mask(rst)[1] 67 | 68 | assert np.sum(mask) == truenodata 69 | -------------------------------------------------------------------------------- /xdem/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | from xdem import ( # noqa 20 | coreg, 21 | dem, 22 | examples, 23 | filters, 24 | fit, 25 | spatialstats, 26 | terrain, 27 | volume, 28 | ) 29 | from xdem.ddem import dDEM # noqa 30 | from xdem.dem import DEM # noqa 31 | from xdem.demcollection import DEMCollection # noqa 32 | 33 | try: 34 | from xdem._version import __version__ # noqa 35 | except ImportError: # pragma: no cover 36 | raise ImportError( 37 | "xDEM is not properly installed. If you are " 38 | "running from the source directory, please instead " 39 | "create a new virtual environment (using conda or " 40 | "virtualenv) and then install it in-place by running: " 41 | "pip install -e ." 42 | ) 43 | -------------------------------------------------------------------------------- /xdem/_typing.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | from __future__ import annotations 20 | 21 | import sys 22 | from typing import Any, List, Tuple, Union 23 | 24 | import numpy as np 25 | 26 | # Only for Python >= 3.9 27 | if sys.version_info.minor >= 9: 28 | 29 | from numpy.typing import ( # this syntax works starting on Python 3.9 30 | ArrayLike, 31 | DTypeLike, 32 | NDArray, 33 | ) 34 | 35 | # Simply define here if they exist 36 | DTypeLike = DTypeLike 37 | ArrayLike = ArrayLike 38 | 39 | NDArrayf = NDArray[np.floating[Any]] 40 | NDArrayb = NDArray[np.bool_] 41 | MArrayf = np.ma.masked_array[Any, np.dtype[np.floating[Any]]] 42 | 43 | else: 44 | 45 | # Make an array-like type (since the array-like numpy type only exists in numpy>=1.20) 46 | DTypeLike = Union[str, type, np.dtype] # type: ignore 47 | ArrayLike = Union[np.ndarray, np.ma.masked_array, List[Any], Tuple[Any]] # type: ignore 48 | 49 | NDArrayf = np.ndarray # type: ignore 50 | NDArrayb = np.ndarray # type: ignore 51 | MArrayf = np.ma.masked_array # type: ignore 52 | -------------------------------------------------------------------------------- /xdem/coreg/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | """ 20 | DEM coregistration classes and functions, including affine methods, bias corrections (i.e. non-affine) and filters. 21 | """ 22 | 23 | from xdem.coreg.affine import ( # noqa 24 | CPD, 25 | ICP, 26 | LZD, 27 | AffineCoreg, 28 | DhMinimize, 29 | NuthKaab, 30 | VerticalShift, 31 | ) 32 | from xdem.coreg.base import ( # noqa 33 | Coreg, 34 | CoregPipeline, 35 | apply_matrix, 36 | invert_matrix, 37 | matrix_from_translations_rotations, 38 | translations_rotations_from_matrix, 39 | ) 40 | from xdem.coreg.biascorr import BiasCorr, Deramp, DirectionalBias, TerrainBias # noqa 41 | from xdem.coreg.blockwise import BlockwiseCoreg # noqa 42 | from xdem.coreg.workflows import dem_coregistration # noqa 43 | -------------------------------------------------------------------------------- /xdem/coreg/filters.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | """Coregistration filters (coming soon).""" 20 | --------------------------------------------------------------------------------