├── tests ├── test_coreg │ ├── __init__.py │ └── test_filters.py ├── test_workflows │ ├── __init__.py │ └── conftest.py ├── conftest.py ├── test_doc.py ├── test_examples.py ├── test_ddem.py └── test_demcollection.py ├── binder ├── apt.txt ├── environment.yml ├── extra-environment.yml └── postBuild ├── mypy.ini ├── .relint.yml ├── doc ├── source │ ├── imgs │ │ ├── elev_map.png │ │ ├── elev_diff_histo.png │ │ ├── masked_elevation.png │ │ ├── precision_accuracy.png │ │ ├── reference_elev_map.png │ │ ├── terrain_attributes.png │ │ ├── diff_elev_after_coreg.png │ │ ├── accuracy_precision_dem.png │ │ ├── diff_elev_before_coreg.png │ │ ├── stable_terrain_diagram.png │ │ ├── to_be_aligned_elev_map.png │ │ ├── topo_workflow_pipeline.png │ │ └── accuracy_workflow_pipeline.png │ ├── credits.md │ ├── _static │ │ └── css │ │ │ └── custom.css │ ├── elevation_objects.md │ ├── guides.md │ ├── sphinxext.py │ ├── authors.md │ ├── elevation_point_cloud.md │ ├── code │ │ ├── spatialstats_heterosc_slope.py │ │ ├── spatialstats_variogram_covariance.py │ │ ├── comparison_plot_spatial_interpolation.py │ │ ├── comparison_plot_regional_hypsometric_interpolation.py │ │ ├── comparison_plot_local_hypsometric_interpolation.py │ │ ├── intricacies_datatypes.py │ │ ├── spatialstats_standardizing.py │ │ ├── robust_vario.py │ │ ├── spatialstats_stationarity_assumption.py │ │ └── robust_mean_std.py │ ├── _templates │ │ ├── module.rst │ │ └── module.md │ ├── how_to_install.md │ ├── funding.md │ ├── global_cli_information.md │ ├── mission.md │ ├── history.md │ ├── ecosystem.md │ ├── about_xdem.md │ ├── config.md │ ├── index.md │ ├── static_surfaces.md │ ├── release_notes.md │ ├── quick_start.md │ └── citation.md ├── make.bat └── Makefile ├── setup.py ├── .coveragerc ├── examples ├── basic │ ├── README.rst │ ├── plot_logging_configuration.py │ ├── plot_terrain_attributes.py │ ├── plot_dem_subtraction.py │ ├── plot_infer_heterosc.py │ ├── plot_nuth_kaab.py │ ├── plot_infer_spatial_correlation.py │ ├── plot_icp_coregistration.py │ └── plot_spatial_error_propagation.py └── advanced │ ├── README.rst │ ├── plot_deramp.py │ ├── plot_demcollection.py │ ├── plot_slope_methods.py │ ├── plot_blockwise_coreg.py │ └── plot_norm_regional_hypso.py ├── .github ├── dependabot.yml ├── workflows │ ├── pre-commit.yml │ ├── doc-build.yml │ ├── pip-checks.yml │ ├── testpypi-publish.yml │ ├── pypi-publish.yml │ └── python-tests.yml ├── PULL_REQUEST_TEMPLATE.md └── scripts │ ├── license-header.txt │ ├── apply_license_header.py │ ├── generate_yml_env_fixed_py.py │ └── generate_pip_deps_from_conda.py ├── requirements.txt ├── environment.yml ├── .readthedocs.yaml ├── pyproject.toml ├── xdem ├── coreg │ ├── filters.py │ └── __init__.py ├── terrain │ ├── __init__.py │ └── freq.py ├── workflows │ └── __init__.py ├── __init__.py ├── _typing.py └── cli.py ├── dev-environment.yml ├── setup.cfg ├── .gitignore ├── Makefile ├── GOVERNANCE.md ├── AUTHORS.md ├── CONTRIBUTING.md ├── NOTICE └── CODE_OF_CONDUCT.md /tests/test_coreg/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_workflows/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /binder/apt.txt: -------------------------------------------------------------------------------- 1 | libgl1-mesa-glx 2 | -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | ../environment.yml -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | plugins = numpy.typing.mypy_plugin 3 | -------------------------------------------------------------------------------- /tests/test_coreg/test_filters.py: -------------------------------------------------------------------------------- 1 | """Functions to test the coregistration filters.""" 2 | -------------------------------------------------------------------------------- /.relint.yml: -------------------------------------------------------------------------------- 1 | - name: Type hint in docstring 2 | pattern: ':[r]?type ' 3 | filePattern: .*\.py 4 | -------------------------------------------------------------------------------- /doc/source/imgs/elev_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/elev_map.png -------------------------------------------------------------------------------- /binder/extra-environment.yml: -------------------------------------------------------------------------------- 1 | channels: 2 | - conda-forge 3 | dependencies: 4 | - jupytext 5 | - myst-nb 6 | -------------------------------------------------------------------------------- /doc/source/imgs/elev_diff_histo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/elev_diff_histo.png -------------------------------------------------------------------------------- /doc/source/imgs/masked_elevation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/masked_elevation.png -------------------------------------------------------------------------------- /doc/source/imgs/precision_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/precision_accuracy.png -------------------------------------------------------------------------------- /doc/source/imgs/reference_elev_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/reference_elev_map.png -------------------------------------------------------------------------------- /doc/source/imgs/terrain_attributes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/terrain_attributes.png -------------------------------------------------------------------------------- /doc/source/imgs/diff_elev_after_coreg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/diff_elev_after_coreg.png -------------------------------------------------------------------------------- /doc/source/imgs/accuracy_precision_dem.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/accuracy_precision_dem.png -------------------------------------------------------------------------------- /doc/source/imgs/diff_elev_before_coreg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/diff_elev_before_coreg.png -------------------------------------------------------------------------------- /doc/source/imgs/stable_terrain_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/stable_terrain_diagram.png -------------------------------------------------------------------------------- /doc/source/imgs/to_be_aligned_elev_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/to_be_aligned_elev_map.png -------------------------------------------------------------------------------- /doc/source/imgs/topo_workflow_pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/topo_workflow_pipeline.png -------------------------------------------------------------------------------- /doc/source/imgs/accuracy_workflow_pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GlacioHack/xdem/HEAD/doc/source/imgs/accuracy_workflow_pipeline.png -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """This file now only serves for backward-compatibility for routines explicitly calling python setup.py""" 2 | 3 | from setuptools import setup 4 | 5 | setup() 6 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | exclude_lines = 3 | pragma: not covered 4 | @overload 5 | except ImportError 6 | @numba.jit 7 | @jit 8 | @numba.njit 9 | @njit 10 | -------------------------------------------------------------------------------- /doc/source/credits.md: -------------------------------------------------------------------------------- 1 | (credits)= 2 | # Credits and background 3 | 4 | ```{toctree} 5 | :maxdepth: 2 6 | 7 | history 8 | mission 9 | authors 10 | funding 11 | license 12 | ``` 13 | -------------------------------------------------------------------------------- /examples/basic/README.rst: -------------------------------------------------------------------------------- 1 | .. _examples-basic: 2 | 3 | Basic 4 | ===== 5 | 6 | Examples using **terrain methods** and **DEM differences**, as well as 7 | pre-defined **coregistration** and **uncertainty analysis** pipelines. 8 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | # Check for updates to GitHub Actions every week 8 | interval: "weekly" 9 | -------------------------------------------------------------------------------- /examples/advanced/README.rst: -------------------------------------------------------------------------------- 1 | .. _examples-advanced: 2 | 3 | Advanced 4 | ======== 5 | 6 | Examples for setting up **specific coregistration or bias-correction pipelines**, **comparing terrain methods**, 7 | or **refining an error model for DEM uncertainty analysis**. 8 | -------------------------------------------------------------------------------- /doc/source/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | /* Work around to wrong dark-mode for toggle button: https://github.com/executablebooks/MyST-NB/issues/523 */ 2 | div.cell details.hide > summary { 3 | background-color: var(--pst-color-surface); 4 | } 5 | 6 | div.cell details[open].above-input div.cell_input { 7 | border-top: None; 8 | } 9 | -------------------------------------------------------------------------------- /doc/source/elevation_objects.md: -------------------------------------------------------------------------------- 1 | (elevation-objects)= 2 | # Elevation data objects 3 | 4 | Elevation data objects of xDEM inherit their characteristics from raster and vector objects of 5 | our sister-package [GeoUtils](https://geoutils.readthedocs.io/en/stable/). 6 | 7 | ```{toctree} 8 | :maxdepth: 2 9 | 10 | dem_class 11 | elevation_point_cloud 12 | ``` 13 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: Linting and formatting (pre-commit) 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | pre-commit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v6 14 | - uses: actions/setup-python@v6 15 | - uses: pre-commit/action@v3.0.1 16 | -------------------------------------------------------------------------------- /doc/source/guides.md: -------------------------------------------------------------------------------- 1 | (guides)= 2 | # Guides to elevated analysis 3 | 4 | This section is a collection of guides gathering background knowledge related to elevation data to help grasp how to best 5 | elevate your analysis! 6 | 7 | ```{toctree} 8 | :maxdepth: 2 9 | 10 | elevation_intricacies 11 | static_surfaces 12 | accuracy_precision 13 | robust_estimators 14 | spatial_stats 15 | ``` 16 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # This file is auto-generated from environment.yml, do not modify. 2 | # See that file for comments about the need/usage of each dependency. 3 | 4 | geopandas>=0.12.0 5 | numba==0.* 6 | numpy>=1,<3 7 | matplotlib==3.* 8 | pyproj>=3.4,<4 9 | rasterio>=1.3,<2 10 | scipy==1.* 11 | tqdm 12 | geoutils==0.2.1 13 | affine 14 | pandas 15 | pyogrio 16 | shapely 17 | pip 18 | cerberus 19 | pyyaml 20 | weasyprint 21 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | - [ ] Resolves #xxx, 4 | - [ ] Tests added, otherwise issue #xxx opened, 5 | - [ ] Fully documented, including `api/*.md` for new API, 6 | - [ ] New optional dependencies or Python version support added to both `dev-environment.yml` and `setup.cfg`, 7 | - [ ] If contributor workflow (test, doc, linting) or Python version support changed, update `CONTRIBUTING.md`. 8 | -------------------------------------------------------------------------------- /binder/postBuild: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e # To avoid silent errors 3 | 4 | # ${MAMBA_EXE} env update -p ${NB_PYTHON_PREFIX} --file "environment.yml" 5 | pip install -e . 6 | ${MAMBA_EXE} env update -p ${NB_PYTHON_PREFIX} --file "binder/extra-environment.yml" 7 | wget https://raw.githubusercontent.com/mwouts/jupytext/main/binder/labconfig/default_setting_overrides.json -P ~/.jupyter/labconfig/ # To automatically open Markdown files as notebooks with Jupytext, see https://github.com/mwouts/jupytext 8 | -------------------------------------------------------------------------------- /doc/source/sphinxext.py: -------------------------------------------------------------------------------- 1 | """Functions for documentation configuration only, importable by sphinx""" 2 | 3 | 4 | # To reset resolution setting for each sphinx-gallery example 5 | def reset_mpl(gallery_conf, fname): 6 | # To get a good resolution for displayed figures 7 | from matplotlib import pyplot 8 | 9 | pyplot.rcParams["figure.dpi"] = 400 10 | pyplot.rcParams["savefig.dpi"] = 400 11 | 12 | # Reset logging to default 13 | import logging 14 | 15 | logging.basicConfig(force=True) 16 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: xdem 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python>=3.10,<3.14 6 | - geopandas>=0.12.0 7 | - numba=0.* 8 | - numpy>=1,<3 9 | - matplotlib=3.* 10 | - pyproj>=3.4,<4 11 | - rasterio>=1.3,<2 12 | - scipy=1.* 13 | - tqdm 14 | - geoutils=0.2.1 15 | - affine 16 | - pandas 17 | - pyogrio 18 | - shapely 19 | - pip 20 | - cerberus 21 | - pyyaml 22 | - weasyprint 23 | 24 | # To run CI against the latest GeoUtils 25 | # - pip: 26 | # - git+https://github.com/rhugonnet/geoutils.git 27 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | build: 9 | os: "ubuntu-20.04" 10 | tools: 11 | python: "mambaforge-4.10" 12 | 13 | # Build documentation in the doc/ directory with Sphinx 14 | sphinx: 15 | configuration: doc/source/conf.py 16 | fail_on_warning: false 17 | 18 | # Build the doc in offline formats 19 | formats: 20 | - pdf 21 | - htmlzip 22 | 23 | conda: 24 | environment: dev-environment.yml 25 | -------------------------------------------------------------------------------- /.github/workflows/doc-build.yml: -------------------------------------------------------------------------------- 1 | name: documentation 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | doc: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v6 14 | - uses: actions/setup-python@v6 15 | with: 16 | python-version: '3.12' 17 | - name: Install project with documentation dependencies 18 | run: python -m pip install .[doc,opt] -vv 19 | - name: Sphinx build 20 | run: | 21 | cd doc 22 | sphinx-build -b html source/ build/ 23 | -------------------------------------------------------------------------------- /doc/source/authors.md: -------------------------------------------------------------------------------- 1 | (authors)= 2 | # Authors 3 | 4 | © 2024 **xDEM developers**. 5 | 6 | **xDEM** is licensed under permissive Apache 2 license (See [LICENSE file](license.md) or below). 7 | 8 | All contributors listed in this document are part of the **xDEM developers**, and their 9 | contributions are subject to the project's copyright under the terms of the 10 | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). 11 | 12 | Please refer to [AUTHORS file](https://github.com/GlacioHack/xdem/blob/main/AUTHORS.md) for the complete and detailed list of authors and their contributions. 13 | -------------------------------------------------------------------------------- /doc/source/elevation_point_cloud.md: -------------------------------------------------------------------------------- 1 | --- 2 | file_format: mystnb 3 | jupytext: 4 | formats: md:myst 5 | text_representation: 6 | extension: .md 7 | format_name: myst 8 | kernelspec: 9 | display_name: xdem-env 10 | language: python 11 | name: xdem 12 | --- 13 | (elevation-point-cloud)= 14 | 15 | # The elevation point cloud ({class}`~xdem.EPC`) 16 | 17 | In construction, planned for 2025. 18 | 19 | However, **elevation point clouds are already supported for coregistration and bias correction** by passing a {class}`geopandas.GeoDataFrame` 20 | associated to an elevation column name argument `z_name` to {func}`~xdem.coreg.Coreg.fit_and_apply`. 21 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | # Minimum requirements for the build system to execute. 3 | requires = [ 4 | "setuptools>=64", 5 | "setuptools_scm[toml]>=8,<9.0.0", 6 | "wheel", 7 | ] 8 | build-backend = "setuptools.build_meta" 9 | 10 | # To write version to file 11 | [tool.setuptools_scm] 12 | version_file = "xdem/_version.py" 13 | fallback_version = "0.0.1" 14 | # Use no-local-version by default for CI builds 15 | local_scheme = "no-local-version" 16 | 17 | [tool.black] 18 | target_version = ['py310'] 19 | 20 | [tool.pytest.ini_options] 21 | addopts = "--doctest-modules -W error::UserWarning" 22 | testpaths = [ 23 | "tests", 24 | "xdem" 25 | ] 26 | -------------------------------------------------------------------------------- /.github/scripts/license-header.txt: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | -------------------------------------------------------------------------------- /xdem/coreg/filters.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | """Coregistration filters (coming soon).""" 20 | -------------------------------------------------------------------------------- /xdem/terrain/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2025 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | from xdem.terrain.terrain import * # noqa 20 | -------------------------------------------------------------------------------- /xdem/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2025 Centre National d'Etudes Spatiales (CNES). 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | from xdem.workflows.accuracy import Accuracy # noqa 20 | from xdem.workflows.topo import Topo # noqa 21 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | clean: 16 | echo "Removing build files..." 17 | if [ -d "$(BUILDDIR)" ]; then rm -r "$(BUILDDIR)"; fi 18 | if [ -d "$(SOURCEDIR)/auto_examples" ]; then rm -r "$(SOURCEDIR)/auto_examples"; fi 19 | if [ -d "$(SOURCEDIR)/gen_modules" ]; then rm -r "$(SOURCEDIR)/gen_modules"; fi 20 | 21 | .PHONY: help Makefile 22 | 23 | # Catch-all target: route all unknown targets to Sphinx using the new 24 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 25 | %: Makefile 26 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 27 | -------------------------------------------------------------------------------- /.github/workflows/pip-checks.yml: -------------------------------------------------------------------------------- 1 | # This workflow checks that pip installation works to import the package (tests are in python-tests.yml) 2 | 3 | name: pip-install 4 | 5 | on: 6 | push: 7 | branches: [ main ] 8 | pull_request: 9 | branches: [ main ] 10 | 11 | jobs: 12 | test: 13 | name: ${{ matrix.os }}, python ${{ matrix.python-version }} 14 | runs-on: ${{ matrix.os }} 15 | 16 | strategy: 17 | matrix: 18 | os: ["ubuntu-latest", "macos-latest", "windows-latest"] 19 | python-version: ["3.10", "3.11", "3.12", "3.13"] 20 | 21 | steps: 22 | - uses: actions/checkout@v6 23 | 24 | - uses: actions/setup-python@v6 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | 28 | # Use pip install 29 | - name: Install project 30 | run: | 31 | python -m pip install . -vv 32 | 33 | # Check import works 34 | - name: Check import works with base environment 35 | run: python -c "import xdem" 36 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Callable 3 | 4 | import pytest 5 | 6 | from xdem.examples import download_and_extract_tarball 7 | 8 | _TESTDATA_DIRECTORY = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "tests", "test_data")) 9 | 10 | 11 | @pytest.fixture(scope="session") # type: ignore 12 | def get_test_data_path() -> Callable[[str], str]: 13 | def _get_test_data_path(filename: str, overwrite: bool = False) -> str: 14 | """Get file from test_data""" 15 | download_and_extract_tarball(dir="test_data", target_dir=_TESTDATA_DIRECTORY, overwrite=overwrite) 16 | file_path = os.path.join(_TESTDATA_DIRECTORY, filename) 17 | 18 | if not os.path.exists(file_path): 19 | if overwrite: 20 | raise FileNotFoundError(f"The file {filename} was not found in the test_data directory.") 21 | file_path = _get_test_data_path(filename, overwrite=True) 22 | 23 | return file_path 24 | 25 | return _get_test_data_path 26 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_heterosc_slope.py: -------------------------------------------------------------------------------- 1 | """Code example for spatial statistics""" 2 | 3 | import geoutils as gu 4 | 5 | import xdem 6 | 7 | # Load data 8 | dh = gu.Raster(xdem.examples.get_path("longyearbyen_ddem")) 9 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | glacier_mask = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 11 | mask = glacier_mask.create_mask(dh) 12 | 13 | # Get slope for non-stationarity 14 | slope = xdem.terrain.get_terrain_attribute(dem=ref_dem, attribute=["slope"]) 15 | 16 | # Keep only stable terrain data 17 | dh.load() 18 | dh.set_mask(mask) 19 | 20 | # Estimate the measurement error by bin of slope, using the NMAD as robust estimator 21 | df_ns = xdem.spatialstats.nd_binning( 22 | dh.data.ravel(), 23 | list_var=[slope.data.ravel()], 24 | list_var_names=["slope"], 25 | statistics=["count", gu.stats.nmad], 26 | list_var_bins=30, 27 | ) 28 | 29 | xdem.spatialstats.plot_1d_binning(df_ns, "slope", "nmad", "Slope (degrees)", "Random elevation error\n($1\\sigma$, m)") 30 | -------------------------------------------------------------------------------- /.github/scripts/apply_license_header.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | # Path to the license header 4 | HEADER_FILE = os.path.join(os.path.dirname(__file__), "license-header.txt") 5 | 6 | # read license header 7 | with open(HEADER_FILE) as file: 8 | license_header = file.read() 9 | 10 | 11 | # Add license header to a file 12 | def add_license_header(file_path, header): 13 | with open(file_path) as f: 14 | content = f.read() 15 | 16 | # Check if the header is already there 17 | if content.startswith(header): 18 | return 19 | 20 | # If not, add it 21 | with open(file_path, "w") as f: 22 | f.write(header + "\n" + content) 23 | print(f"Header added to {file_path}") 24 | 25 | 26 | # Check the header in every file in root_dir 27 | def apply_license_header_to_all_py_files(root_dir): 28 | for subdir, _, files in os.walk(root_dir): 29 | for file in files: 30 | if file.endswith(".py"): 31 | file_path = os.path.join(subdir, file) 32 | add_license_header(file_path, license_header) 33 | 34 | 35 | # Source directory 36 | PROJECT_SRC = "xdem" 37 | 38 | # Add header to every source files 39 | apply_license_header_to_all_py_files(PROJECT_SRC) 40 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_variogram_covariance.py: -------------------------------------------------------------------------------- 1 | """Documentation plot illustrating the link between variogram and covariance""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | from skgstat.models import exponential 6 | 7 | # Example of variogram and covariance relationship with an exponential model form 8 | fig, ax = plt.subplots() 9 | x = np.linspace(0, 100, 100) 10 | ax.plot(x, exponential(x, 15, 10), color="tab:blue", linewidth=2) 11 | ax.plot(x, 10 - exponential(x, 15, 10), color="black", linewidth=2) 12 | ax.hlines(10, xmin=0, xmax=100, linestyles="dashed", colors="tab:red") 13 | ax.text(75, exponential(75, 15, 10) - 1, "Semi-variogram $\\gamma(l)$", ha="center", va="top", color="tab:blue") 14 | ax.text( 15 | 75, 16 | 10 - exponential(75, 15, 10) + 1, 17 | "Covariance $C(l) = \\sigma^{2} - \\gamma(l)$", 18 | ha="center", 19 | va="bottom", 20 | color="black", 21 | ) 22 | ax.text(75, 11, "Variance $\\sigma^{2}$", ha="center", va="bottom", color="tab:red") 23 | ax.set_xlim((0, 100)) 24 | ax.set_ylim((0, 12)) 25 | ax.set_xlabel("Spatial lag $l$") 26 | ax.set_ylabel("Variance of elevation differences (m²)") 27 | ax.spines["right"].set_visible(False) 28 | ax.spines["top"].set_visible(False) 29 | plt.tight_layout() 30 | plt.show() 31 | -------------------------------------------------------------------------------- /doc/source/_templates/module.rst: -------------------------------------------------------------------------------- 1 | {{ fullname }} 2 | {{ underline }} 3 | 4 | .. automodule:: {{ fullname }} 5 | 6 | .. contents:: Contents 7 | :local: 8 | 9 | {% block functions %} 10 | {% if functions %} 11 | 12 | Functions 13 | ========= 14 | 15 | {% for item in functions %} 16 | 17 | {{item}} 18 | {{ "-" * (item | length) }} 19 | 20 | .. autofunction:: {{ item }} 21 | 22 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 23 | 24 | .. minigallery:: {{fullname}}.{{item}} 25 | :add-heading: 26 | 27 | {%- endfor %} 28 | {% endif %} 29 | {% endblock %} 30 | 31 | {% block classes %} 32 | {% if classes %} 33 | 34 | Classes 35 | ======= 36 | 37 | {% for item in classes %} 38 | 39 | {{item}} 40 | {{ "-" * (item | length) }} 41 | 42 | .. autoclass:: {{ item }} 43 | :show-inheritance: 44 | :special-members: __init__ 45 | :members: 46 | 47 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 48 | 49 | .. minigallery:: {{fullname}}.{{item}} 50 | :add-heading: 51 | 52 | {%- endfor %} 53 | {% endif %} 54 | {% endblock %} 55 | 56 | {% block exceptions %} 57 | {% if exceptions %} 58 | 59 | Exceptions 60 | ========== 61 | 62 | .. autosummary:: 63 | {% for item in exceptions %} 64 | {{ item }} 65 | {%- endfor %} 66 | {% endif %} 67 | {% endblock %} 68 | -------------------------------------------------------------------------------- /dev-environment.yml: -------------------------------------------------------------------------------- 1 | name: xdem-dev 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python>=3.10,<3.14 6 | - geopandas>=0.12.0 7 | - numba=0.* 8 | - numpy>=1,<3 9 | - matplotlib=3.* 10 | - pyproj>=3.4,<4 11 | - rasterio>=1.3,<2 12 | - scipy=1.* 13 | - tqdm 14 | - geoutils=0.2.1 15 | - affine 16 | - pandas 17 | - pyogrio 18 | - shapely 19 | - cerberus 20 | - pyyaml 21 | - weasyprint 22 | 23 | # Development-specific, to mirror manually in setup.cfg [options.extras_require]. 24 | - pip 25 | 26 | # Optional dependencies 27 | - pytransform3d 28 | - scikit-learn 29 | 30 | # Test dependencies 31 | - pytest 32 | - pytest-xdist 33 | - pytest-instafail 34 | - pytest-socket 35 | - pytest-cov 36 | - coveralls 37 | - flake8 38 | - pylint 39 | - pre-commit 40 | 41 | # Doc dependencies 42 | - sphinx 43 | - sphinx-book-theme 44 | - sphinxcontrib-programoutput 45 | - sphinx-design 46 | - sphinx-autodoc-typehints 47 | - sphinx-gallery 48 | - autovizwidget 49 | - graphviz 50 | - myst-nb 51 | - numpydoc 52 | - sphinx-tabs 53 | 54 | - pip: 55 | # Optional dependencies with Pip 56 | # SciKit-GStat temporarily here until Conda version supports Python 3.13 57 | - scikit-gstat>=1.0.18 58 | 59 | - -e ./ 60 | 61 | # To run CI against latest GeoUtils 62 | # - git+https://github.com/rhugonnet/geoutils.git 63 | -------------------------------------------------------------------------------- /doc/source/_templates/module.md: -------------------------------------------------------------------------------- 1 | {{ fullname }} 2 | {{ underline }} 3 | 4 | ```{eval-rst} 5 | .. automodule:: {{ fullname }} 6 | 7 | .. contents:: Contents 8 | :local: 9 | 10 | {% block functions %} 11 | {% if functions %} 12 | 13 | Functions 14 | ========= 15 | 16 | {% for item in functions %} 17 | 18 | {{item}} 19 | {{ "-" * (item | length) }} 20 | 21 | .. autofunction:: {{ item }} 22 | 23 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 24 | 25 | .. minigallery:: {{fullname}}.{{item}} 26 | :add-heading: 27 | 28 | {%- endfor %} 29 | {% endif %} 30 | {% endblock %} 31 | 32 | {% block classes %} 33 | {% if classes %} 34 | 35 | Classes 36 | ======= 37 | 38 | {% for item in classes %} 39 | 40 | {{item}} 41 | {{ "-" * (item | length) }} 42 | 43 | .. autoclass:: {{ item }} 44 | :show-inheritance: 45 | :special-members: __init__ 46 | :members: 47 | 48 | .. _sphx_glr_backref_{{fullname}}.{{item}}: 49 | 50 | .. minigallery:: {{fullname}}.{{item}} 51 | :add-heading: 52 | 53 | {%- endfor %} 54 | {% endif %} 55 | {% endblock %} 56 | 57 | {% block exceptions %} 58 | {% if exceptions %} 59 | 60 | Exceptions 61 | ========== 62 | 63 | .. autosummary:: 64 | {% for item in exceptions %} 65 | {{ item }} 66 | {%- endfor %} 67 | {% endif %} 68 | {% endblock %} 69 | ``` 70 | -------------------------------------------------------------------------------- /xdem/coreg/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | """ 20 | DEM coregistration classes and functions, including affine methods, bias corrections (i.e. non-affine) and filters. 21 | """ 22 | 23 | from xdem.coreg.affine import ( # noqa 24 | CPD, 25 | ICP, 26 | LZD, 27 | AffineCoreg, 28 | DhMinimize, 29 | NuthKaab, 30 | VerticalShift, 31 | ) 32 | from xdem.coreg.base import ( # noqa 33 | Coreg, 34 | CoregPipeline, 35 | apply_matrix, 36 | invert_matrix, 37 | matrix_from_translations_rotations, 38 | translations_rotations_from_matrix, 39 | ) 40 | from xdem.coreg.biascorr import BiasCorr, Deramp, DirectionalBias, TerrainBias # noqa 41 | from xdem.coreg.blockwise import BlockwiseCoreg # noqa 42 | -------------------------------------------------------------------------------- /doc/source/code/comparison_plot_spatial_interpolation.py: -------------------------------------------------------------------------------- 1 | """Plot an example of spatial interpolation of randomly generated errors.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 11 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 12 | 13 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 14 | # The example DEMs are void-free, so let's make some random voids. 15 | ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) # Reset the mask 16 | # Introduce 50000 nans randomly throughout the dDEM. 17 | ddem.data.mask.ravel()[np.random.default_rng(42).choice(ddem.data.size, 50000, replace=False)] = True 18 | 19 | ddem.interpolate(method="idw") 20 | 21 | ylim = (300, 100) 22 | xlim = (800, 1050) 23 | 24 | plt.figure(figsize=(8, 5)) 25 | plt.subplot(121) 26 | plt.imshow(ddem.data.squeeze(), cmap="coolwarm_r", vmin=-50, vmax=50) 27 | plt.ylim(ylim) 28 | plt.xlim(xlim) 29 | plt.axis("off") 30 | plt.title("dDEM with random voids") 31 | plt.subplot(122) 32 | plt.imshow(ddem.filled_data.squeeze(), cmap="coolwarm_r", vmin=-50, vmax=50) 33 | plt.ylim(ylim) 34 | plt.xlim(xlim) 35 | plt.axis("off") 36 | plt.title("Linearly interpolated dDEM") 37 | 38 | 39 | plt.tight_layout() 40 | plt.show() 41 | -------------------------------------------------------------------------------- /xdem/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | from xdem import ( # noqa 20 | coreg, 21 | dem, 22 | examples, 23 | filters, 24 | fit, 25 | spatialstats, 26 | terrain, 27 | volume, 28 | ) 29 | from xdem.ddem import dDEM # noqa 30 | from xdem.dem import DEM # noqa 31 | from xdem.demcollection import DEMCollection # noqa 32 | 33 | try: 34 | from xdem._version import __version__ # noqa 35 | except ImportError: # pragma: no cover 36 | raise ImportError( 37 | "xDEM is not properly installed. If you are " 38 | "running from the source directory, please instead " 39 | "create a new virtual environment (using conda or " 40 | "virtualenv) and then install it in-place by running: " 41 | "pip install -e ." 42 | ) 43 | -------------------------------------------------------------------------------- /doc/source/how_to_install.md: -------------------------------------------------------------------------------- 1 | (how-to-install)= 2 | 3 | # How to install 4 | 5 | ## Installing with ``mamba`` (recommended) 6 | 7 | ```bash 8 | mamba install -c conda-forge xdem 9 | ``` 10 | 11 | ```{tip} 12 | Solving dependencies can take a long time with `conda`, `mamba` significantly speeds up the process. Install it with: 13 | 14 | conda install mamba -n base -c conda-forge 15 | 16 | Once installed, the same commands can be run by simply replacing `conda` by `mamba`. More details available in the [mamba documentation](https://mamba.readthedocs.io/en/latest/). 17 | ``` 18 | 19 | ## Installing with ``pip`` 20 | 21 | ```bash 22 | pip install xdem 23 | ``` 24 | 25 | ```{warning} 26 | Updating packages with `pip` (and sometimes `mamba`) can break your installation. If this happens, re-create an environment from scratch pinning directly all your other dependencies during initial solve (e.g., `mamba create -n xdem-env -c conda-forge xdem myotherpackage==1.0.0`). 27 | ``` 28 | 29 | ## Installing for contributors 30 | 31 | ### With ``mamba`` 32 | 33 | ```bash 34 | git clone https://github.com/GlacioHack/xdem.git 35 | mamba env create -f xdem/dev-environment.yml 36 | ``` 37 | 38 | ### With ``pip`` 39 | 40 | Please note: pip installation is currently only possible under python3.10. 41 | 42 | ```bash 43 | git clone https://github.com/GlacioHack/xdem.git 44 | cd xdem 45 | make install 46 | ``` 47 | 48 | After installing, you can check that everything is working by running the tests: `pytest`. 49 | -------------------------------------------------------------------------------- /doc/source/funding.md: -------------------------------------------------------------------------------- 1 | (funding)= 2 | # Funding acknowledgments 3 | 4 | Members of the lead development team acknowledge funding from: 5 | - SNSF grant no. 184634, a MeteoSwiss [GCOS](https://gcos.wmo.int/en/home) project on elevation data analysis for glaciology, 6 | - NASA award 80NSSC22K1094, an [STV](https://science.nasa.gov/earth-science/decadal-surveys/decadal-stv/) project on the fusion of elevation data, 7 | - NASA award 80NSSC23K0192, an [ICESat-2](https://icesat-2.gsfc.nasa.gov/) project on the processing of elevation data in the cloud, 8 | - CNES (French Space Agency) award on merging [demcompare](https://github.com/CNES/demcompare) and xDEM while further developing related 3D tools. 9 | 10 | 11 | ::::{grid} 12 | :reverse: 13 | 14 | :::{grid-item} 15 | :columns: 4 16 | :child-align: center 17 | 18 | ```{image} ./_static/nasa_logo.svg 19 | :width: 200px 20 | :class: dark-light 21 | ``` 22 | 23 | ::: 24 | 25 | :::{grid-item} 26 | :columns: 4 27 | :child-align: center 28 | 29 | ```{image} ./_static/snsf_logo.svg 30 | :width: 220px 31 | :class: only-light 32 | ``` 33 | 34 | ```{image} ./_static/snsf_logo_dark.svg 35 | :width: 220px 36 | :class: only-dark 37 | ``` 38 | 39 | ::: 40 | 41 | :::{grid-item} 42 | :columns: 4 43 | :child-align: center 44 | 45 | ```{image} ./_static/cnes_logo.svg 46 | :width: 200px 47 | :class: only-light 48 | ``` 49 | 50 | ```{image} ./_static/cnes_logo_dark.svg 51 | :width: 200px 52 | :class: only-dark 53 | ``` 54 | 55 | ::: 56 | 57 | 58 | :::: 59 | -------------------------------------------------------------------------------- /doc/source/global_cli_information.md: -------------------------------------------------------------------------------- 1 | (global_cli_information)= 2 | 3 | # Command line interface (CLI) 4 | 5 | To simplify the use of xDEM and provide a universal tool to analyze one or multiple DEMs, 6 | we have decided to implement a Command Line Interface (CLI). 7 | To support this, we offer a set of workflows that can be easily run using a configuration file. 8 | Users can also create their own workflows and submit them for inclusion. 9 | 10 | ```{note} 11 | **workflow definition** : combinations of various xDEM and GeoUtils features tailored to specific applications 12 | ``` 13 | 14 | All workflows follow the same command structure: 15 | ```{code} 16 | xdem workflow_name --config config_file.yaml 17 | ``` 18 | We need to replace workflow_name by either accuracy or topo (see the description of each one for more detail) 19 | 20 | 21 | The YAML configuration file contains at least the required input parameters for the workflow. 22 | xDEM then automatically fills in the rest with suggested settings. Users are free to edit the 23 | configuration file to run only the parts they need. 24 | 25 | 26 | To display a template of all available configuration options for the YAML file, use the following command: 27 | 28 | ```{code} 29 | xdem workflow_name --display-template-config 30 | ``` 31 | 32 | ```{note} 33 | At the end of the execution, several output files are saved to disk, including an HTML report 34 | and its corresponding PDF version. 35 | ``` 36 | 37 | ```{toctree} 38 | :caption: Available workflows 39 | :maxdepth: 2 40 | cli_accuracy 41 | cli_topo 42 | ``` 43 | -------------------------------------------------------------------------------- /.github/workflows/testpypi-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package on TestPypi 2 | 3 | name: Publish to TestPyPI 4 | 5 | on: 6 | workflow_dispatch: 7 | inputs: 8 | reason: 9 | description: 'Publishing alphas version for testing' 10 | 11 | jobs: 12 | build: 13 | name: Build distribution 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v6 17 | with: 18 | fetch-depth: 0 19 | persist-credentials: false 20 | - name: Set up Python 21 | uses: actions/setup-python@v6 22 | with: 23 | python-version: '3.12' 24 | - name: Install pypa/build 25 | run: python3 -m pip install build --user 26 | - name: Build a binary wheel and a source tarball 27 | run: python3 -m build 28 | - name: Store the distribution packages 29 | uses: actions/upload-artifact@v6 30 | with: 31 | name: python-package-distributions 32 | path: dist/ 33 | 34 | publish-to-testpypi: 35 | name: Publish Python 🐍 distribution 📦 to TestPyPI 36 | needs: 37 | - build 38 | runs-on: ubuntu-latest 39 | 40 | environment: 41 | name: testpypi 42 | url: https://test.pypi.org/p/xdem 43 | 44 | permissions: 45 | id-token: write # IMPORTANT: mandatory for trusted publishing 46 | 47 | steps: 48 | - name: Download all the dists 49 | uses: actions/download-artifact@v7 50 | with: 51 | name: python-package-distributions 52 | path: dist/ 53 | - name: Publish distribution 📦 to TestPyPI 54 | uses: pypa/gh-action-pypi-publish@release/v1 55 | with: 56 | repository-url: https://test.pypi.org/legacy/ 57 | -------------------------------------------------------------------------------- /doc/source/mission.md: -------------------------------------------------------------------------------- 1 | (mission)= 2 | # Mission 3 | 4 | ```{epigraph} 5 | The core mission of xDEM is to be **easy-of-use**, **modular** and **robust**. 6 | 7 | It also attempts to be as **efficient**, **scalable** and **state-of-the-art** as possible. 8 | 9 | Finally, as an open source package, it aspires to foster **reproducibility** and **open science**. 10 | ``` 11 | 12 | In details, those mean: 13 | 14 | - **Ease-of-use:** all basic operations or methods from published works should only require a few lines of code to be performed; 15 | 16 | - **Modularity:** all methods should be fully customizable, to allow both flexibility and inter-comparison; 17 | 18 | - **Robustness:** all methods should be tested within our continuous integration test-suite, to enforce that they always perform as expected; 19 | 20 | ```{note} 21 | :class: margin 22 | **Scalability** is currently being improved towards a first major release ``v1.0``. 23 | ``` 24 | 25 | And, additionally: 26 | 27 | - **Efficiency**: all methods should be optimized at the lower-level, to function with the highest performance offered by Python packages; 28 | 29 | - **Scalability**: all methods should support both lazy processing and distributed parallelized processing, to work with high-resolution data on local machines as well as on HPCs; 30 | 31 | - **State-of-the-art**: all methods should be at the cutting edge of remote sensing science, to provide users with the most reliable and up-to-date tools. 32 | 33 | And finally: 34 | 35 | - **Reproducibility:** all code should be version-controlled and release-based, to ensure consistency of dependent 36 | packages and works; 37 | 38 | - **Open-source:** all code should be accessible and reusable to anyone in the community, for transparency and open governance. 39 | -------------------------------------------------------------------------------- /xdem/_typing.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | from __future__ import annotations 20 | 21 | import sys 22 | from typing import Any, List, Tuple, Union 23 | 24 | import numpy as np 25 | 26 | # Only for Python >= 3.9 27 | if sys.version_info.minor >= 9: 28 | 29 | from numpy.typing import ( # this syntax works starting on Python 3.9 30 | ArrayLike, 31 | DTypeLike, 32 | NDArray, 33 | ) 34 | 35 | # Simply define here if they exist 36 | DTypeLike = DTypeLike 37 | ArrayLike = ArrayLike 38 | 39 | NDArrayf = NDArray[np.floating[Any]] 40 | NDArrayb = NDArray[np.bool_] 41 | MArrayf = np.ma.masked_array[Any, np.dtype[np.floating[Any]]] 42 | 43 | else: 44 | 45 | # Make an array-like type (since the array-like numpy type only exists in numpy>=1.20) 46 | DTypeLike = Union[str, type, np.dtype] # type: ignore 47 | ArrayLike = Union[np.ndarray, np.ma.masked_array, List[Any], Tuple[Any]] # type: ignore 48 | 49 | NDArrayf = np.ndarray # type: ignore 50 | NDArrayb = np.ndarray # type: ignore 51 | MArrayf = np.ma.masked_array # type: ignore 52 | -------------------------------------------------------------------------------- /doc/source/code/comparison_plot_regional_hypsometric_interpolation.py: -------------------------------------------------------------------------------- 1 | """Plot an example of regional hypsometric interpolation in central Svalbard.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 11 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 12 | 13 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 14 | 15 | ddem.data /= 2009 - 1990 16 | 17 | mask = outlines_1990.create_mask(ddem) 18 | 19 | ddem_bins = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask]) 20 | stds = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask], aggregation_function=np.std) 21 | 22 | plt.figure(figsize=(8, 8)) 23 | plt.grid(zorder=0) 24 | 25 | 26 | plt.plot(ddem_bins["value"], ddem_bins.index.mid, linestyle="--", zorder=1) 27 | 28 | plt.barh( 29 | y=ddem_bins.index.mid, 30 | width=stds["value"], 31 | left=ddem_bins["value"] - stds["value"] / 2, 32 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 33 | zorder=2, 34 | edgecolor="black", 35 | ) 36 | for bin in ddem_bins.index: 37 | plt.vlines(ddem_bins.loc[bin, "value"], bin.left, bin.right, color="black", zorder=3) 38 | 39 | plt.xlabel("Elevation change (m / a)") 40 | plt.twiny() 41 | plt.barh( 42 | y=ddem_bins.index.mid, 43 | width=ddem_bins["count"] / ddem_bins["count"].sum(), 44 | left=0, 45 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 46 | zorder=2, 47 | alpha=0.2, 48 | ) 49 | plt.xlabel("Normalized area distribution (hypsometry)") 50 | plt.ylabel("Elevation (m a.s.l.)") 51 | 52 | plt.tight_layout() 53 | plt.show() 54 | -------------------------------------------------------------------------------- /doc/source/code/comparison_plot_local_hypsometric_interpolation.py: -------------------------------------------------------------------------------- 1 | """Plot an example of local hypsometric interpolation at Scott Turnerbreen, Svalbard.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 10 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 11 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 12 | 13 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 14 | 15 | ddem.data /= 2009 - 1990 16 | 17 | scott_1990 = outlines_1990.query("NAME == 'Scott Turnerbreen'") 18 | mask = scott_1990.create_mask(ddem) 19 | 20 | ddem_bins = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask]) 21 | stds = xdem.volume.hypsometric_binning(ddem[mask], dem_2009[mask], aggregation_function=np.std) 22 | 23 | plt.figure(figsize=(8, 8)) 24 | plt.grid(zorder=0) 25 | plt.plot(ddem_bins["value"], ddem_bins.index.mid, linestyle="--", zorder=1) 26 | 27 | plt.barh( 28 | y=ddem_bins.index.mid, 29 | width=stds["value"], 30 | left=ddem_bins["value"] - stds["value"] / 2, 31 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 32 | zorder=2, 33 | edgecolor="black", 34 | ) 35 | for bin in ddem_bins.index: 36 | plt.vlines(ddem_bins.loc[bin, "value"], bin.left, bin.right, color="black", zorder=3) 37 | 38 | plt.xlabel("Elevation change (m / a)") 39 | plt.twiny() 40 | plt.barh( 41 | y=ddem_bins.index.mid, 42 | width=ddem_bins["count"] / ddem_bins["count"].sum(), 43 | left=0, 44 | height=(ddem_bins.index.left - ddem_bins.index.right) * 1, 45 | zorder=2, 46 | alpha=0.2, 47 | ) 48 | plt.xlabel("Normalized area distribution (hypsometry)") 49 | 50 | plt.ylabel("Elevation (m a.s.l.)") 51 | 52 | plt.tight_layout() 53 | plt.show() 54 | -------------------------------------------------------------------------------- /.github/workflows/pypi-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package when a release is created 2 | # See reference: https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ 3 | 4 | name: Publish to PyPI 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | workflow_dispatch: 11 | inputs: 12 | reason: 13 | description: 'Reason for manual trigger' 14 | required: true 15 | default: 'testing' 16 | 17 | jobs: 18 | build: 19 | name: Build distribution 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v6 23 | with: 24 | fetch-depth: 0 25 | persist-credentials: false 26 | - name: Set up Python 27 | uses: actions/setup-python@v6 28 | with: 29 | python-version: '3.12' 30 | - name: Install pypa/build 31 | run: python3 -m pip install build --user 32 | - name: Build a binary wheel and a source tarball 33 | run: python3 -m build 34 | - name: Store the distribution packages 35 | uses: actions/upload-artifact@v6 36 | with: 37 | name: python-package-distributions 38 | path: dist/ 39 | 40 | publish-to-pypi: 41 | name: Publish distribution to PyPI 42 | # if: startsWith(github.ref, 'refs/tags/') # Only publish to PyPI on tag pushes, required depending on trigger at the top 43 | needs: 44 | - build 45 | runs-on: ubuntu-latest 46 | environment: 47 | name: pypi 48 | url: https://pypi.org/p/xdem # Replace with your PyPI project name 49 | permissions: 50 | id-token: write # IMPORTANT: mandatory for trusted publishing 51 | 52 | steps: 53 | - name: Download all the dists 54 | uses: actions/download-artifact@v7 55 | with: 56 | name: python-package-distributions 57 | path: dist/ 58 | - name: Publish distribution to PyPI 59 | uses: pypa/gh-action-pypi-publish@release/v1 60 | -------------------------------------------------------------------------------- /.github/scripts/generate_yml_env_fixed_py.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | 5 | import yaml # type: ignore 6 | 7 | 8 | def environment_yml_nopy(fn_env: str, py_version: str, add_deps: list[str] = None) -> None: 9 | """ 10 | Generate temporary environment-py3.XX.yml files forcing python versions for setup of continuous integration. 11 | 12 | :param fn_env: Filename path to environment.yml 13 | :param py_version: Python version to force. 14 | :param add_deps: Additional dependencies to solve for directly (for instance graphviz fails with mamba update). 15 | """ 16 | 17 | # Load the yml as dictionary 18 | yaml_env = yaml.safe_load(open(fn_env)) 19 | conda_dep_env = list(yaml_env["dependencies"]) 20 | 21 | # Force python version 22 | conda_dep_env_forced_py = ["python=" + py_version if "python" in dep else dep for dep in conda_dep_env] 23 | 24 | # Optionally, add other dependencies 25 | if add_deps is not None: 26 | conda_dep_env_forced_py.extend(add_deps.split(",")) 27 | 28 | # Copy back to new yaml dict 29 | yaml_out = yaml_env.copy() 30 | yaml_out["dependencies"] = conda_dep_env_forced_py 31 | 32 | with open("environment-ci-py" + py_version + ".yml", "w") as outfile: 33 | yaml.dump(yaml_out, outfile, default_flow_style=False) 34 | 35 | 36 | if __name__ == "__main__": 37 | parser = argparse.ArgumentParser(description="Generate environment files for CI with fixed python versions.") 38 | parser.add_argument("fn_env", metavar="fn_env", type=str, help="Path to the generic environment file.") 39 | parser.add_argument( 40 | "--pyv", 41 | dest="py_version", 42 | default="3.9", 43 | type=str, 44 | help="List of Python versions to force.", 45 | ) 46 | parser.add_argument( 47 | "--add", 48 | dest="add_deps", 49 | default=None, 50 | type=str, 51 | help="List of dependencies to add.", 52 | ) 53 | args = parser.parse_args() 54 | environment_yml_nopy(fn_env=args.fn_env, py_version=args.py_version, add_deps=args.add_deps) 55 | -------------------------------------------------------------------------------- /examples/advanced/plot_deramp.py: -------------------------------------------------------------------------------- 1 | """ 2 | Bias-correction with deramping 3 | ============================== 4 | 5 | Deramping can help correct rotational or doming errors in elevation data. 6 | In xDEM, this approach is implemented through the :class:`xdem.coreg.Deramp` class. 7 | 8 | See also the :ref:`deramp` section in feature pages. 9 | """ 10 | 11 | import geoutils as gu 12 | import numpy as np 13 | 14 | import xdem 15 | 16 | # %% 17 | # We open example files. 18 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 19 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 20 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 21 | 22 | # Create a stable ground mask (not glacierized) to mark "inlier data" 23 | inlier_mask = ~glacier_outlines.create_mask(reference_dem) 24 | 25 | # %% 26 | # We visualize the patterns of error from the elevation differences. 27 | 28 | diff_before = reference_dem - dem_to_be_aligned 29 | diff_before.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation differences (m)") 30 | 31 | 32 | # %% 33 | # A 2-D 3rd order polynomial is estimated, and applied to the data: 34 | 35 | deramp = xdem.coreg.Deramp(poly_order=2) 36 | 37 | corrected_dem = deramp.fit_and_apply(reference_dem, dem_to_be_aligned, inlier_mask=inlier_mask) 38 | 39 | # %% 40 | # Then, the new difference can be plotted. 41 | 42 | diff_after = reference_dem - corrected_dem 43 | diff_after.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation differences (m)") 44 | 45 | 46 | # %% 47 | # We compare the median and NMAD to validate numerically that there was an improvement (see :ref:`robuststats-meanstd`): 48 | inliers_before = diff_before[inlier_mask] 49 | med_before, nmad_before = np.ma.median(inliers_before), gu.stats.nmad(inliers_before) 50 | 51 | inliers_after = diff_after[inlier_mask] 52 | med_after, nmad_after = np.ma.median(inliers_after), gu.stats.nmad(inliers_after) 53 | 54 | print(f"Error before: median = {med_before:.2f} - NMAD = {nmad_before:.2f} m") 55 | print(f"Error after: median = {med_after:.2f} - NMAD = {nmad_after:.2f} m") 56 | -------------------------------------------------------------------------------- /doc/source/history.md: -------------------------------------------------------------------------------- 1 | (history)= 2 | # History 3 | 4 | Below, some more information on the history behind the package. 5 | 6 | ## Creation 7 | 8 | ```{margin} 9 | 1More on our GlacioHack founder at [adehecq.github.io](https://adehecq.github.io/). 10 | ``` 11 | 12 | xDEM was created during the **[GlacioHack](https://github.com/GlacioHack) hackathon**, that was initiated by 13 | Amaury Dehecq1 and took place online on November 8, 2020. 14 | 15 | ```{margin} 16 | 2Check-out [glaciology.ch](https://glaciology.ch) on our founding group of VAW glaciology! 17 | ``` 18 | 19 | The initial core development of xDEM was performed by members of the Glaciology group of the Laboratory of Hydraulics, Hydrology and 20 | Glaciology (VAW) at ETH Zürich2, with contributions by members of the University of Oslo, the University of Washington, and University 21 | Grenoble Alpes. 22 | 23 | ## Joining effort with **demcompare** 24 | 25 | In 2024, xDEM and [demcompare](https://github.com/CNES/demcompare) joined efforts in the perspective of 26 | merging the best of both packages into one, and to jointly continue the development of new features for 27 | analyzing elevation data with a larger expertise pool. 28 | 29 | [demcompare](https://github.com/CNES/demcompare) is a tool developed by the CNES (French Space Agency) to 30 | support its 3D satellite missions in analyzing elevation data, for instance from stereophotogrammetric DEMs 31 | that can be generated with [CARS](https://github.com/CNES/cars). 32 | 33 | ## Current team 34 | 35 | ```{margin} 36 | 3More on CNES's 3D missions on the [CO3D constellation page](https://cnes.fr/en/projects/co3d). 37 | ``` 38 | 39 | The current lead development team includes **researchers in Earth observation and engineers from 40 | [CNES](https://cnes.fr/en)** (French Space Agency). We specialize in elevation data analysis, for application in Earth 41 | science or for operational use for 3D satellite missions3. 42 | 43 | Other volunteer contributors span diverse scientific backgrounds in industry or research. We welcome 44 | any new contributors! See how to contribute on [the dedicated page of our repository](https://github.com/GlacioHack/xdem/blob/main/CONTRIBUTING.md). 45 | -------------------------------------------------------------------------------- /doc/source/code/intricacies_datatypes.py: -------------------------------------------------------------------------------- 1 | """Plot example of elevation data types for guide page.""" 2 | 3 | import matplotlib 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | import xdem 8 | 9 | # Open reference DEM and crop to small area 10 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 11 | ref_dem = ref_dem.crop( 12 | (ref_dem.bounds.left, ref_dem.bounds.bottom, ref_dem.bounds.left + 1000, ref_dem.bounds.bottom + 1000) 13 | ) 14 | 15 | # Get point cloud with 100 points 16 | ref_epc = ref_dem.to_pointcloud(subsample=100, random_state=42) 17 | 18 | f, ax = plt.subplots(2, 2, squeeze=False, sharex=True, sharey=True) 19 | # Plot 1: DEM 20 | ax[0, 0].set_title("DEM") 21 | ref_dem.plot(cmap="terrain", ax=ax[0, 0], vmin=280, vmax=420, cbar_title="Elevation (m)") 22 | plt.gca().set_xticklabels([]) 23 | plt.gca().set_yticklabels([]) 24 | plt.gca().set_aspect("equal") 25 | 26 | # Plot 2: EPC 27 | ax[0, 1].set_title("Elevation\npoint cloud") 28 | point = ref_epc.plot(column="b1", cmap="terrain", ax=ax[0, 1], vmin=280, vmax=420, cbar_title="Elevation (m)") 29 | plt.gca().set_xticklabels([]) 30 | plt.gca().set_yticklabels([]) 31 | plt.gca().set_aspect("equal") 32 | 33 | # Plot 3: TIN 34 | ax[1, 1].set_title("Elevation TIN") 35 | triang = matplotlib.tri.Triangulation(ref_epc.geometry.x.values, ref_epc.geometry.y.values) 36 | ax[1, 1].triplot(triang, color="gray", marker=".") 37 | scat = ax[1, 1].scatter( 38 | ref_epc.geometry.x.values, ref_epc.geometry.y.values, c=ref_epc["b1"].values, cmap="terrain", vmin=280, vmax=420 39 | ) 40 | plt.colorbar(mappable=scat, ax=ax[1, 1], label="Elevation (m)", pad=0.02) 41 | ax[1, 1].set_xticklabels([]) 42 | ax[1, 1].set_yticklabels([]) 43 | ax[1, 1].set_aspect("equal") 44 | 45 | # Plot 4: Contour 46 | ax[1, 0].set_title("Elevation contour") 47 | coords = ref_dem.coords(grid=False) 48 | cont = ax[1, 0].contour( 49 | np.flip(coords[0]), coords[1], np.flip(ref_dem.get_nanarray()), levels=15, cmap="terrain", vmin=280, vmax=420 50 | ) 51 | plt.colorbar(mappable=cont, ax=ax[1, 0], label="Elevation (m)", pad=0.02) 52 | ax[1, 0].set_xticklabels([]) 53 | ax[1, 0].set_yticklabels([]) 54 | ax[1, 0].set_aspect("equal") 55 | 56 | plt.suptitle("Types of elevation data") 57 | 58 | plt.tight_layout() 59 | plt.show() 60 | -------------------------------------------------------------------------------- /doc/source/ecosystem.md: -------------------------------------------------------------------------------- 1 | (ecosystem)= 2 | 3 | # Ecosystem 4 | 5 | xDEM is but a single tool among a large landscape of open tools for geospatial elevation analysis! Below is a list of 6 | other **tools that you might find useful to combine with xDEM**, in particular for retrieving elevation data or to perform complementary analysis. 7 | 8 | ```{seealso} 9 | Tools listed below only relate to elevation data. To analyze georeferenced rasters, vectors and point cloud data, 10 | check out **xDEM's sister-package [GeoUtils](https://geoutils.readthedocs.io/)**. 11 | ``` 12 | ## Python 13 | 14 | Great Python tools for **pre-processing and retrieving elevation data**: 15 | - [SlideRule](https://slideruleearth.io/) to pre-process and retrieve high-resolution elevation data in the cloud, including in particular [ICESat-2](https://icesat-2.gsfc.nasa.gov/) and [GEDI](https://gedi.umd.edu/), 16 | - [pDEMtools](https://pdemtools.readthedocs.io/en/latest/) to pre-process and retrieve [ArcticDEM](https://www.pgc.umn.edu/data/arcticdem/) and [REMA](https://www.pgc.umn.edu/data/rema/) high-resolution DEMs available in polar regions, 17 | - [icepyx](https://icepyx.readthedocs.io/en/latest/) to retrieve ICESat-2 data. 18 | 19 | Complementary Python tools to **analyze elevation data** are for instance: 20 | - [PDAL](https://pdal.io/en/latest/) for working with dense elevation point clouds, 21 | - [demcompare](https://demcompare.readthedocs.io/en/stable/) to compare two DEMs together, 22 | - [RichDEM](https://richdem.readthedocs.io/en/latest/) for in-depth terrain analysis, with a large range of method including many relevant to hydrology. 23 | 24 | ## Julia 25 | 26 | If you are working in Julia, the [Geomorphometry](https://github.com/Deltares/Geomorphometry.jl) package provides a 27 | wide range of terrain analysis for elevation data. 28 | 29 | ## R 30 | 31 | If you are working in R, the [MultiscaleDTM](https://ailich.github.io/MultiscaleDTM/) package provides modular tools 32 | for terrain analysis at multiple scales! 33 | 34 | ## Other community resources 35 | 36 | Whether to retrieve data among their wide range of open datasets, or to dive into their other resources, be sure to check out the 37 | amazing [OpenTopography](https://opentopography.org/) and [OpenAltimetry](https://openaltimetry.earthdatacloud.nasa.gov/data/) efforts! 38 | -------------------------------------------------------------------------------- /examples/basic/plot_logging_configuration.py: -------------------------------------------------------------------------------- 1 | """ 2 | Configuring verbosity level 3 | =========================== 4 | 5 | This example demonstrates how to configure verbosity level, or logging, using a coregistration method. 6 | Logging can be customized to various severity levels, from ``DEBUG`` for detailed diagnostic output, to ``INFO`` for 7 | general updates, ``WARNING`` for potential issues, and ``ERROR`` or ``CRITICAL`` for serious problems. 8 | 9 | Setting the verbosity to a certain severity level prints all outputs from that level and those above. For instance, 10 | level ``INFO`` also prints warnings, error and critical messages. 11 | 12 | See also :ref:`config`. 13 | 14 | .. important:: The verbosity level defaults to ``WARNING``, so no ``INFO`` or ``DEBUG`` is printed. 15 | """ 16 | 17 | import logging 18 | 19 | import xdem 20 | 21 | # %% 22 | # We start by configuring the logging level, which can be as simple as specifying we want to print information. 23 | logging.basicConfig(level=logging.INFO) 24 | 25 | # %% 26 | # We can change the configuration even more by specifying the format, date, and multiple destinations for the output. 27 | logging.basicConfig( 28 | level=logging.INFO, # Change this level to DEBUG or WARNING to see different outputs. 29 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 30 | datefmt="%Y-%m-%d %H:%M:%S", 31 | handlers=[ 32 | logging.FileHandler("../xdem_example.log"), # Save logs to a file 33 | logging.StreamHandler(), # Also print logs to the console 34 | ], 35 | force=True, # To re-set from previous logging 36 | ) 37 | 38 | # %% 39 | # We can now load example files and demonstrate the logging through a functionality, such as coregistration. 40 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 41 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 42 | coreg = xdem.coreg.NuthKaab() 43 | 44 | # %% 45 | # With the ``INFO`` verbosity level defined above, we can follow the iteration with a detailed format, saved to file. 46 | aligned_dem = coreg.fit_and_apply(reference_dem, dem_to_be_aligned) 47 | 48 | # %% 49 | # With a more severe verbosity level, there is no output. 50 | logging.basicConfig(level=logging.ERROR, force=True) 51 | aligned_dem = coreg.fit_and_apply(reference_dem, dem_to_be_aligned) 52 | -------------------------------------------------------------------------------- /examples/basic/plot_terrain_attributes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Terrain attributes 3 | ================== 4 | 5 | Terrain attributes generated from a DEM have a multitude of uses for analytic and visual purposes. 6 | Here is an example of how to generate these products. 7 | 8 | As a reminder: the slope, aspect, and hillshade are calculated following Horn (1981); the system of curvatures follows that described by Minár et al. (2020)l and the terrain ruggedness index and rugosity follow Riley et al. (1999) and Jenness (2004), respectively. For more information, see the :ref:`terrain-attributes` feature page. 9 | 10 | **References:** `Horn (1981) `_ (slope, aspect, hillshade), 11 | `Minár et al. (2020) <[Minár et al. (2020)](https://doi.org/10.1016/j.earscirev.2020.103414)>`_ (curvature), 12 | `Riley et al. (1999) `_ (terrain 13 | ruggedness index), `Jenness (2004) `_ (rugosity). 14 | """ 15 | 16 | # sphinx_gallery_thumbnail_number = 1 17 | import matplotlib.pyplot as plt 18 | 19 | import xdem 20 | 21 | # %% 22 | # We load the example data. 23 | 24 | dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 25 | 26 | # %% 27 | # We generate multiple terrain attribute at once (more efficient computationally as some depend on each other). 28 | 29 | attributes = xdem.terrain.get_terrain_attribute( 30 | dem.data, 31 | resolution=dem.res, 32 | attribute=["hillshade", "slope", "aspect", "profile_curvature", "terrain_ruggedness_index", "rugosity"], 33 | ) 34 | 35 | plt.figure(figsize=(8, 6.5)) 36 | 37 | plt_extent = [dem.bounds.left, dem.bounds.right, dem.bounds.bottom, dem.bounds.top] 38 | 39 | cmaps = ["Greys_r", "Reds", "twilight", "RdGy_r", "Purples", "YlOrRd"] 40 | labels = ["Hillshade", "Slope (°)", "Aspect (°)", "Profile Curvature (100 / m)", "Terrain Ruggedness Index", "Rugosity"] 41 | vlims = [(None, None) for i in range(6)] 42 | vlims[3] = [-2, 2] 43 | 44 | for i in range(6): 45 | plt.subplot(3, 2, i + 1) 46 | plt.imshow(attributes[i].squeeze(), cmap=cmaps[i], extent=plt_extent, vmin=vlims[i][0], vmax=vlims[i][1]) 47 | cbar = plt.colorbar() 48 | cbar.set_label(labels[i]) 49 | plt.xticks([]) 50 | plt.yticks([]) 51 | 52 | plt.tight_layout() 53 | plt.show() 54 | -------------------------------------------------------------------------------- /tests/test_doc.py: -------------------------------------------------------------------------------- 1 | """Functions to test the documentation.""" 2 | 3 | import logging 4 | import os 5 | import warnings 6 | 7 | import pytest 8 | 9 | 10 | class TestDocs: 11 | docs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../", "doc/") 12 | n_threads = os.getenv("N_CPUS") 13 | 14 | def test_example_code(self) -> None: 15 | 16 | # Import optional skgstat or skip test 17 | pytest.importorskip("skgstat") 18 | 19 | """Try running each python script in the doc/source/code\ 20 | directory and check that it doesn't raise an error.""" 21 | current_dir = os.getcwd() 22 | os.chdir(os.path.join(self.docs_dir, "source")) 23 | 24 | def run_code(filename: str) -> None: 25 | """Run a python script in one thread.""" 26 | with open(filename) as infile: 27 | # Run everything except plt.show() calls. 28 | with warnings.catch_warnings(): 29 | # When running the code asynchronously, matplotlib complains a bit 30 | ignored_warnings = [ 31 | "Starting a Matplotlib GUI outside of the main thread", 32 | ".*fetching the attribute.*Polygon.*", 33 | ] 34 | # This is a GeoPandas issue 35 | for warning_text in ignored_warnings: 36 | warnings.filterwarnings("ignore", warning_text) 37 | try: 38 | exec(infile.read().replace("plt.show()", "plt.close()")) 39 | except Exception as exception: 40 | if isinstance(exception, DeprecationWarning): 41 | logging.warning(exception) 42 | else: 43 | raise RuntimeError(f"Failed on {filename}") from exception 44 | 45 | filenames = [os.path.join("code", filename) for filename in os.listdir("code/") if filename.endswith(".py")] 46 | 47 | for filename in filenames: 48 | run_code(filename) 49 | """ 50 | with concurrent.futures.ThreadPoolExecutor( 51 | max_workers=int(self.n_threads) if self.n_threads is not None else None 52 | ) as executor: 53 | list(executor.map(run_code, filenames)) 54 | """ 55 | 56 | os.chdir(current_dir) 57 | -------------------------------------------------------------------------------- /tests/test_examples.py: -------------------------------------------------------------------------------- 1 | """Functions to test the example data.""" 2 | 3 | from __future__ import annotations 4 | 5 | import geoutils as gu 6 | import numpy as np 7 | import pytest 8 | from geoutils import Raster, Vector 9 | 10 | from xdem import examples 11 | from xdem._typing import NDArrayf 12 | 13 | 14 | def load_examples() -> tuple[Raster, Raster, Vector, Raster]: 15 | """Load example files to try coregistration methods with.""" 16 | 17 | ref_dem = Raster(examples.get_path("longyearbyen_ref_dem")) 18 | tba_dem = Raster(examples.get_path("longyearbyen_tba_dem")) 19 | glacier_mask = Vector(examples.get_path("longyearbyen_glacier_outlines")) 20 | ddem = Raster(examples.get_path("longyearbyen_ddem")) 21 | 22 | return ref_dem, tba_dem, glacier_mask, ddem 23 | 24 | 25 | class TestExamples: 26 | 27 | ref_dem, tba_dem, glacier_mask, ddem = load_examples() 28 | 29 | @pytest.mark.parametrize( 30 | "rst_truevals_abs", 31 | [ 32 | (ref_dem, np.array([465.11816, 207.3236, 208.30563, 748.7337, 797.28644], dtype=np.float32), None), 33 | (tba_dem, np.array([464.6715, 213.7554, 207.8788, 760.8192, 797.3268], dtype=np.float32), None), 34 | (ddem, np.array([1.37, -1.67, 0.13, -10.10, 2.49], dtype=np.float32), 10e-3), 35 | ], 36 | ) # type: ignore 37 | def test_array_content(self, rst_truevals_abs: tuple[Raster, NDArrayf, float]) -> None: 38 | """Let's ensure the data arrays in the examples are always the same by checking randomly some values""" 39 | 40 | rst = rst_truevals_abs[0] 41 | truevals = rst_truevals_abs[1] 42 | abs = rst_truevals_abs[2] 43 | 44 | rng = np.random.default_rng(42) 45 | values = rng.choice(rst.data.data.flatten(), size=5, replace=False) 46 | 47 | assert values == pytest.approx(truevals, abs=abs) 48 | 49 | # Note: Following PR #329, no gaps on DEM edges after coregistration 50 | @pytest.mark.parametrize("rst_and_truenodata", [(ref_dem, 0), (tba_dem, 0), (ddem, 0)]) # type: ignore 51 | def test_array_nodata(self, rst_and_truenodata: tuple[Raster, int]) -> None: 52 | """Let's also check that the data arrays have always the same number of not finite values""" 53 | 54 | rst = rst_and_truenodata[0] 55 | truenodata = rst_and_truenodata[1] 56 | mask = gu.raster.get_array_and_mask(rst)[1] 57 | 58 | assert np.sum(mask) == truenodata 59 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_standardizing.py: -------------------------------------------------------------------------------- 1 | """Documentation plot illustrating standardization of a distribution""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | 6 | # Example x vector 7 | mu = 15 8 | sig = 5 9 | rng = np.random.default_rng(42) 10 | y = rng.normal(mu, sig, size=300) 11 | 12 | fig, ax1 = plt.subplots(figsize=(8, 3)) 13 | 14 | # Original histogram 15 | ax1.hist(y, color="tab:blue", edgecolor="white", linewidth=0.5, alpha=0.7) 16 | ax1.vlines(mu, ymin=0, ymax=90, color="tab:blue", linestyle="dashed", lw=2) 17 | ax1.vlines([mu - 2 * sig, mu + 2 * sig], ymin=0, ymax=90, colors=["tab:blue", "tab:blue"], linestyles="dotted", lw=2) 18 | ax1.annotate( 19 | "Original\ndata $x$\n$\\mu_{x} = 15$\n$\\sigma_{x} = 5$", 20 | xy=(mu + 0.5, 85), 21 | xytext=(mu + 5, 110), 22 | arrowprops=dict(color="tab:blue", width=0.5, headwidth=8), 23 | color="tab:blue", 24 | fontweight="bold", 25 | ha="left", 26 | ) 27 | ax1.spines["right"].set_visible(False) 28 | ax1.spines["top"].set_visible(False) 29 | ax1.spines["left"].set_visible(False) 30 | ax1.set_yticks([]) 31 | ax1.set_ylim((0, 130)) 32 | 33 | # Standardized histogram 34 | ax1.hist((y - mu) / sig, color="tab:olive", edgecolor="white", linewidth=0.5, alpha=0.7) 35 | ax1.vlines(0, ymin=0, ymax=90, color="tab:olive", linestyle="dashed", lw=2) 36 | ax1.vlines([-2, 2], ymin=0, ymax=90, colors=["tab:olive", "tab:olive"], linestyles="dotted", lw=2) 37 | ax1.annotate( 38 | "Standardized\ndata $z$\n$\\mu_{z} = 0$\n$\\sigma_{z} = 1$", 39 | xy=(-0.3, 85), 40 | xytext=(-5, 110), 41 | arrowprops=dict(color="tab:olive", width=0.5, headwidth=8), 42 | color="tab:olive", 43 | fontweight="bold", 44 | ha="left", 45 | ) 46 | ax1.spines["right"].set_visible(False) 47 | ax1.spines["top"].set_visible(False) 48 | ax1.spines["left"].set_visible(False) 49 | ax1.set_yticks([]) 50 | ax1.set_ylim((0, 130)) 51 | 52 | ax1.annotate( 53 | "", 54 | xy=(0, 65), 55 | xytext=(mu, 65), 56 | arrowprops=dict(arrowstyle="-|>", connectionstyle="arc3,rad=0.2", fc="w"), 57 | color="black", 58 | ) 59 | ax1.text( 60 | mu / 2, 61 | 90, 62 | "Standardization:\n$z = \\frac{x - \\mu}{\\sigma}$", 63 | color="black", 64 | ha="center", 65 | fontsize=14, 66 | fontweight="bold", 67 | ) 68 | ax1.plot([], [], color="tab:gray", linestyle="dashed", label="Mean") 69 | ax1.plot([], [], color="tab:gray", linestyle="dotted", label="Standard\ndeviation (2$\\sigma$)") 70 | ax1.legend(loc="center right") 71 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | author = xDEM developers 3 | name = xdem 4 | version = 0.1.7a2 5 | description = Analysis of digital elevation models (DEMs) 6 | keywords = dem, elevation, geoutils, xarray 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | license = Apache-2.0 10 | license_files = LICENSE 11 | platform = any 12 | classifiers = 13 | Development Status :: 4 - Beta 14 | Intended Audience :: Developers 15 | Intended Audience :: Science/Research 16 | Natural Language :: English 17 | Operating System :: OS Independent 18 | Topic :: Scientific/Engineering :: GIS 19 | Topic :: Scientific/Engineering :: Image Processing 20 | Topic :: Scientific/Engineering :: Information Analysis 21 | Programming Language :: Python 22 | Programming Language :: Python :: 3.10 23 | Programming Language :: Python :: 3.11 24 | Programming Language :: Python :: 3.12 25 | Programming Language :: Python :: 3.13 26 | Programming Language :: Python :: 3 27 | Topic :: Software Development :: Libraries :: Python Modules 28 | Typing :: Typed 29 | url = https://github.com/GlacioHack/xdem 30 | download_url = https://pypi.org/project/xdem/ 31 | 32 | [options] 33 | packages = find: 34 | zip_safe = False # https://mypy.readthedocs.io/en/stable/installed_packages.html 35 | include_package_data = True 36 | python_requires = >=3.10,<3.14 37 | # Avoid pinning dependencies in requirements.txt (which we don't do anyways, and we rely mostly on Conda) 38 | # (https://caremad.io/posts/2013/07/setup-vs-requirement/, https://github.com/pypa/setuptools/issues/1951) 39 | install_requires = file: requirements.txt 40 | 41 | [options.package_data] 42 | xdem = 43 | py.typed 44 | 45 | [options.packages.find] 46 | include = 47 | xdem 48 | xdem.* 49 | 50 | [options.extras_require] 51 | opt = 52 | pytransform3d 53 | scikit-learn 54 | scikit-gstat>=1.0.18 55 | test = 56 | pytest 57 | pytest-xdist 58 | pytest-instafail 59 | pytest-socket 60 | pytest-cov 61 | coveralls 62 | flake8 63 | pylint 64 | pre-commit 65 | doc = 66 | sphinx 67 | sphinx-book-theme 68 | sphinxcontrib-programoutput 69 | sphinx-design 70 | sphinx-autodoc-typehints 71 | sphinx-gallery 72 | autovizwidget 73 | graphviz 74 | myst-nb 75 | numpydoc 76 | sphinx-tabs 77 | dev = 78 | %(opt)s 79 | %(doc)s 80 | %(test)s 81 | all = 82 | %(dev)s 83 | 84 | [options.entry_points] 85 | console_scripts = 86 | xdem = xdem.cli:main 87 | -------------------------------------------------------------------------------- /examples/basic/plot_dem_subtraction.py: -------------------------------------------------------------------------------- 1 | """ 2 | DEM differencing 3 | ================ 4 | 5 | Subtracting a DEM with another one should be easy. 6 | 7 | xDEM allows to use any operator on :class:`xdem.DEM` objects, such as :func:`+` or :func:`-` as well as most NumPy functions 8 | while respecting nodata values and checking that georeferencing is consistent. This functionality is inherited from `GeoUtils' Raster class `_. 9 | 10 | Before DEMs can be compared, they need to be reprojected to the same grid and have the same 3D CRSs. The :func:`~xdem.DEM.reproject` and :func:`~xdem.DEM.to_vcrs` methods are used for this. 11 | 12 | """ 13 | 14 | import geoutils as gu 15 | 16 | import xdem 17 | 18 | # %% 19 | # We load two DEMs near Longyearbyen, Svalbard. 20 | 21 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 22 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem_coreg")) 23 | 24 | # %% 25 | # We can print the information about the DEMs for a "sanity check". 26 | 27 | dem_2009.info() 28 | dem_1990.info() 29 | 30 | # %% 31 | # In this particular case, the two DEMs are already on the same grid (they have the same bounds, resolution and coordinate system). 32 | # If they don't, we need to reproject one DEM to fit the other using :func:`xdem.DEM.reproject`: 33 | 34 | dem_1990 = dem_1990.reproject(dem_2009) 35 | 36 | # %% 37 | # Oops! 38 | # GeoUtils just warned us that ``dem_1990`` did not need reprojection. We can hide this output with ``silent``. 39 | # By default, :func:`~xdem.DEM.reproject` uses "bilinear" resampling (assuming resampling is needed). 40 | # Other options are detailed at `geoutils.Raster.reproject() `_ and `rasterio.enums.Resampling `_. 41 | # 42 | # We now compute the difference by simply substracting, passing ``stats=True`` to :func:`xdem.DEM.info` to print statistics. 43 | 44 | ddem = dem_2009 - dem_1990 45 | 46 | ddem.info(stats=True) 47 | 48 | # %% 49 | # It is a new :class:`~xdem.DEM` instance, loaded in memory. 50 | # Let's visualize it, with some glacier outlines. 51 | 52 | # Load the outlines 53 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 54 | glacier_outlines = glacier_outlines.crop(ddem, clip=True) 55 | ddem.plot(cmap="RdYlBu", vmin=-20, vmax=20, cbar_title="Elevation differences (m)") 56 | glacier_outlines.plot(ref_crs=ddem, fc="none", ec="k") 57 | 58 | # %% 59 | # And we save the output to file. 60 | 61 | ddem.to_file("temp.tif") 62 | -------------------------------------------------------------------------------- /doc/source/code/robust_vario.py: -------------------------------------------------------------------------------- 1 | """Plot example of Dowd variogram as robust estimator for guide page.""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | from skgstat import OrdinaryKriging, Variogram 6 | 7 | import xdem 8 | 9 | # Inspired by test_variogram in skgstat 10 | # Generate some random but spatially correlated data with a range of ~20 11 | np.random.seed(42) 12 | c = np.random.default_rng(41).random((50, 2)) * 60 13 | np.random.seed(42) 14 | v = np.random.default_rng(42).normal(10, 4, 50) 15 | 16 | V = Variogram(c, v).describe() 17 | V["effective_range"] = 20 18 | OK = OrdinaryKriging(V, coordinates=c, values=v) 19 | 20 | c = np.array(np.meshgrid(np.arange(60), np.arange(60).T)).reshape(2, 60 * 60).T 21 | dh = OK.transform(c) 22 | dh = dh.reshape((60, 60)) 23 | 24 | # Add outliers 25 | dh_outliers = dh.copy() 26 | dh_outliers[0:6, 0:6] = -20 27 | 28 | # Derive empirical variogram for Dowd and Matheron 29 | df_inl_matheron = xdem.spatialstats.sample_empirical_variogram( 30 | dh, estimator="matheron", gsd=1, random_state=42, subsample=2000 31 | ) 32 | df_inl_dowd = xdem.spatialstats.sample_empirical_variogram(dh, estimator="dowd", gsd=1, random_state=42, subsample=2000) 33 | 34 | df_all_matheron = xdem.spatialstats.sample_empirical_variogram( 35 | dh_outliers, estimator="matheron", gsd=1, random_state=42, subsample=2000 36 | ) 37 | df_all_dowd = xdem.spatialstats.sample_empirical_variogram( 38 | dh_outliers, estimator="dowd", gsd=1, random_state=42, subsample=2000 39 | ) 40 | 41 | fig, ax = plt.subplots() 42 | 43 | ax.plot(df_inl_matheron.lags, df_inl_matheron.exp, color="black", marker="x") 44 | ax.plot(df_inl_dowd.lags, df_inl_dowd.exp, color="black", linestyle="dashed", marker="x") 45 | ax.plot(df_all_matheron.lags, df_all_matheron.exp, color="red", marker="x") 46 | ax.plot(df_all_dowd.lags, df_all_dowd.exp, color="red", linestyle="dashed", marker="x") 47 | 48 | 49 | p1 = plt.plot([], [], color="darkgrey", label="Matheron", marker="x") 50 | p2 = plt.plot([], [], color="darkgrey", linestyle="dashed", label="Dowd", marker="x") 51 | first_legend = ax.legend(handles=[p[0] for p in [p1, p2]], loc="lower right") 52 | ax.add_artist(first_legend) 53 | 54 | p1 = plt.plot([], [], color="black", label="Inlier data") 55 | p2 = plt.plot([], [], color="red", label="Inlier data + outlier data \n(1% of data replaced by 10 NMAD)") 56 | second_legend = ax.legend(handles=[p[0] for p in [p1, p2]], loc="upper left") 57 | ax.add_artist(second_legend) 58 | 59 | ax.set_xlabel("Spatial lag (m)") 60 | ax.set_ylabel("Variance of elevation changes (m²)") 61 | ax.set_ylim((0, 15)) 62 | ax.set_xlim((0, 40)) 63 | 64 | ax.set_title("Effect of outliers on estimating\nspatial correlation") 65 | -------------------------------------------------------------------------------- /doc/source/code/spatialstats_stationarity_assumption.py: -------------------------------------------------------------------------------- 1 | """Documentation plot illustrating stationarity of mean and variance""" 2 | 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | 6 | import xdem 7 | 8 | # Example x vector 9 | x = np.linspace(0, 1, 200) 10 | 11 | sig = 0.2 12 | rng = np.random.default_rng(42) 13 | y_rand1 = rng.normal(0, sig, size=len(x)) 14 | y_rand2 = rng.normal(0, sig, size=len(x)) 15 | y_rand3 = rng.normal(0, sig, size=len(x)) 16 | 17 | 18 | y_mean = np.array([0.5 * xval - 0.25 if xval > 0.5 else 0.5 * (1 - xval) - 0.25 for xval in x]) 19 | 20 | fac_y_std = 0.5 + 2 * x 21 | 22 | 23 | fig, (ax1, ax2, ax3) = plt.subplots(ncols=3, figsize=(8, 4)) 24 | 25 | # Stationary mean and variance 26 | ax1.plot(x, y_rand1, color="tab:blue", linewidth=0.5) 27 | ax1.hlines(0, xmin=0, xmax=1, color="black", label="Mean") 28 | ax1.hlines( 29 | [-2 * sig, 2 * sig], 30 | xmin=0, 31 | xmax=1, 32 | colors=["tab:gray", "tab:gray"], 33 | label="Standard deviation", 34 | linestyles="dashed", 35 | ) 36 | ax1.set_xlim((0, 1)) 37 | ax1.set_title("Stationary mean\nStationary variance") 38 | # ax1.legend() 39 | ax1.spines["right"].set_visible(False) 40 | ax1.spines["top"].set_visible(False) 41 | ax1.set_ylim((-1, 1)) 42 | ax1.set_xticks([]) 43 | ax1.set_yticks([]) 44 | ax1.plot(1, 0, ">k", transform=ax1.transAxes, clip_on=False) 45 | ax1.plot(0, 1, "^k", transform=ax1.transAxes, clip_on=False) 46 | 47 | # Non-stationary mean and stationary variance 48 | ax2.plot(x, y_rand2 + y_mean, color="tab:olive", linewidth=0.5) 49 | ax2.plot(x, y_mean, color="black", label="Mean") 50 | ax2.plot(x, y_mean + 2 * sig, color="tab:gray", label="Dispersion (2$\\sigma$)", linestyle="dashed") 51 | ax2.plot(x, y_mean - 2 * sig, color="tab:gray", linestyle="dashed") 52 | ax2.set_xlim((0, 1)) 53 | ax2.set_title("Non-stationary mean\nStationary variance") 54 | ax2.legend(loc="lower center") 55 | ax2.spines["right"].set_visible(False) 56 | ax2.spines["top"].set_visible(False) 57 | ax2.set_xticks([]) 58 | ax2.set_yticks([]) 59 | ax2.set_ylim((-1, 1)) 60 | ax2.plot(1, 0, ">k", transform=ax2.transAxes, clip_on=False) 61 | ax2.plot(0, 1, "^k", transform=ax2.transAxes, clip_on=False) 62 | 63 | # Stationary mean and non-stationary variance 64 | ax3.plot(x, y_rand3 * fac_y_std, color="tab:orange", linewidth=0.5) 65 | ax3.hlines(0, xmin=0, xmax=1, color="black", label="Mean") 66 | ax3.plot(x, 2 * sig * fac_y_std, color="tab:gray", linestyle="dashed") 67 | ax3.plot(x, -2 * sig * fac_y_std, color="tab:gray", linestyle="dashed") 68 | ax3.set_xlim((0, 1)) 69 | ax3.set_title("Stationary mean\nNon-stationary variance") 70 | # ax1.legend() 71 | ax3.spines["right"].set_visible(False) 72 | ax3.spines["top"].set_visible(False) 73 | ax3.set_xticks([]) 74 | ax3.set_yticks([]) 75 | ax3.set_ylim((-1, 1)) 76 | ax3.plot(1, 0, ">k", transform=ax3.transAxes, clip_on=False) 77 | ax3.plot(0, 1, "^k", transform=ax3.transAxes, clip_on=False) 78 | 79 | plt.tight_layout() 80 | plt.show() 81 | -------------------------------------------------------------------------------- /examples/basic/plot_infer_heterosc.py: -------------------------------------------------------------------------------- 1 | """ 2 | Elevation error map 3 | =================== 4 | 5 | Digital elevation models have a precision that can vary with terrain and instrument-related variables. Here, we 6 | rely on a non-stationary spatial statistics framework to estimate and model this variability in elevation error, 7 | using terrain slope and maximum curvature as explanatory variables, with stable terrain as an error proxy for moving 8 | terrain. 9 | 10 | **Reference:** `Hugonnet et al. (2022) `_. 11 | """ 12 | 13 | import geoutils as gu 14 | 15 | # sphinx_gallery_thumbnail_number = 1 16 | import xdem 17 | 18 | # %% 19 | # We load a difference of DEMs at Longyearbyen, already coregistered using :ref:`nuthkaab` as shown in 20 | # the :ref:`sphx_glr_basic_examples_plot_nuth_kaab.py` example. We also load the reference DEM to derive terrain 21 | # attributes and the glacier outlines here corresponding to moving terrain. 22 | dh = xdem.DEM(xdem.examples.get_path("longyearbyen_ddem")) 23 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 24 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 25 | 26 | # %% 27 | # We derive the terrain slope and max curvature from the reference DEM. 28 | slope, max_curvature = xdem.terrain.get_terrain_attribute(ref_dem, attribute=["slope", "max_curvature"]) 29 | 30 | # %% 31 | # Then, we run the pipeline for inference of elevation heteroscedasticity from stable terrain: 32 | errors, df_binning, error_function = xdem.spatialstats.infer_heteroscedasticity_from_stable( 33 | dvalues=dh, list_var=[slope, max_curvature], list_var_names=["slope", "maxc"], unstable_mask=glacier_outlines 34 | ) 35 | 36 | # %% 37 | # The first output corresponds to the error map for the DEM (:math:`\pm` 1\ :math:`\sigma` level): 38 | errors.plot(vmin=2, vmax=7, cmap="Reds", cbar_title=r"Elevation error (1$\sigma$, m)") 39 | 40 | # %% 41 | # The second output is the dataframe of 2D binning with slope and max curvature: 42 | df_binning 43 | 44 | # %% 45 | # The third output is the 2D binning interpolant, i.e. an error function with the slope and max curvature 46 | # (*Note: below we divide the max curvature by 100 to convert it in* m\ :sup:`-1` ): 47 | for slope, maxc in [(0, 0), (40, 0), (0, 5), (40, 5)]: 48 | print( 49 | "Error for a slope of {:.0f} degrees and" 50 | " {:.2f} m-1 max. curvature: {:.1f} m".format(slope, maxc / 100, error_function((slope, maxc))) 51 | ) 52 | 53 | # %% 54 | # This pipeline will not always work optimally with default parameters: spread estimates can be affected by skewed 55 | # distributions, the binning by extreme range of values, some DEMs do not have any error variability with terrain (e.g., 56 | # terrestrial photogrammetry). **To learn how to tune more parameters and use the subfunctions, see the gallery example:** 57 | # :ref:`sphx_glr_advanced_examples_plot_heterosc_estimation_modelling.py`! 58 | -------------------------------------------------------------------------------- /examples/basic/plot_nuth_kaab.py: -------------------------------------------------------------------------------- 1 | """ 2 | Nuth and Kääb coregistration 3 | ============================ 4 | 5 | The Nuth and Kääb coregistration corrects horizontal and vertical shifts, and is especially performant for precise 6 | sub-pixel alignment in areas with varying slope. 7 | In xDEM, this approach is implemented through the :class:`xdem.coreg.NuthKaab` class. 8 | 9 | See also the :ref:`nuthkaab` section in feature pages. 10 | 11 | **Reference:** `Nuth and Kääb (2011) `_. 12 | """ 13 | 14 | import geoutils as gu 15 | import numpy as np 16 | 17 | import xdem 18 | 19 | # %% 20 | # We open example files. 21 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 22 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 23 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 24 | 25 | # We create a stable ground mask (not glacierized) to mark "inlier data". 26 | inlier_mask = ~glacier_outlines.create_mask(reference_dem) 27 | 28 | # %% 29 | # The DEM to be aligned (a 1990 photogrammetry-derived DEM) has some vertical and horizontal biases that we want to reduce. 30 | # These can be visualized by plotting a change map: 31 | 32 | diff_before = reference_dem - dem_to_be_aligned 33 | diff_before.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation change (m)") 34 | 35 | # %% 36 | # Horizontal and vertical shifts can be estimated using :class:`~xdem.coreg.NuthKaab`. 37 | # The shifts are estimated then applied to the to-be-aligned elevation data: 38 | 39 | nuth_kaab = xdem.coreg.NuthKaab() 40 | aligned_dem = nuth_kaab.fit_and_apply(reference_dem, dem_to_be_aligned, inlier_mask) 41 | 42 | # %% 43 | # The shifts are stored in the affine metadata output 44 | 45 | print([nuth_kaab.meta["outputs"]["affine"][s] for s in ["shift_x", "shift_y", "shift_z"]]) 46 | 47 | # %% 48 | # Then, the new difference can be plotted to validate that it improved. 49 | 50 | diff_after = reference_dem - aligned_dem 51 | diff_after.plot(cmap="RdYlBu", vmin=-10, vmax=10, cbar_title="Elevation change (m)") 52 | 53 | # %% 54 | # We compare the median and NMAD to validate numerically that there was an improvement (see :ref:`robuststats-meanstd`): 55 | inliers_before = diff_before[inlier_mask] 56 | med_before, nmad_before = np.ma.median(inliers_before), gu.stats.nmad(inliers_before) 57 | 58 | inliers_after = diff_after[inlier_mask] 59 | med_after, nmad_after = np.ma.median(inliers_after), gu.stats.nmad(inliers_after) 60 | 61 | print(f"Error before: median = {med_before:.2f} - NMAD = {nmad_before:.2f} m") 62 | print(f"Error after: median = {med_after:.2f} - NMAD = {nmad_after:.2f} m") 63 | 64 | # %% 65 | # In the plot above, one may notice a positive (blue) tendency toward the east. 66 | # The 1990 DEM is a mosaic, and likely has a "seam" near there. 67 | # :ref:`sphx_glr_advanced_examples_plot_blockwise_coreg.py` tackles this issue, using a nonlinear coregistration approach. 68 | -------------------------------------------------------------------------------- /doc/source/about_xdem.md: -------------------------------------------------------------------------------- 1 | (about-xdem)= 2 | 3 | # About xDEM 4 | 5 | ## What is xDEM? 6 | 7 | xDEM is a Python package for the analysis of elevation data, and in particular that of digital elevation models (DEMs), 8 | with name standing for _cross-DEM analysis_[^sn1] and echoing its dependency on [Xarray](https://docs.xarray.dev/en/stable/). 9 | 10 | [^sn1]: Several core features of xDEM, in particular coregistration and uncertainty analysis, rely specifically on cross-analysis of elevation data over static surfaces. 11 | 12 | ## Why use xDEM? 13 | 14 | xDEM implements a wide range of high-level operations required for analyzing elevation data in a consistent framework 15 | tested to ensure the accuracy of these operations. 16 | 17 | It has three main focus points: 18 | 19 | 1. Having an **easy and intuitive interface** based on the principle of least knowledge, 20 | 2. Providing **statistically robust methods** for reliable quantitative analysis, 21 | 3. Allowing **modular user input** to adapt to most applications. 22 | 23 | Although modularity can sometimes hamper performance, we also aim to **preserve scalibility** as much as possible[^sn2]. 24 | 25 | [^sn2]: Out-of-memory, parallelizable computations relying on Dask are planned for 2025! 26 | 27 | We particularly take to heart to verify the accuracy of our methods. For instance, our terrain attributes 28 | which have their own modular Python-based implementation, are tested to match exactly 29 | [gdaldem](https://gdal.org/programs/gdaldem.html) (slope, aspect, hillshade, roughness) and 30 | [RichDEM](https://richdem.readthedocs.io/en/latest/) (curvatures). 31 | 32 | ## Who is behind xDEM? 33 | 34 | xDEM was created by a group of researchers with expertise in elevation data analysis for change detection applied to glaciology. 35 | Nowadays, its development is **jointly led by researchers in elevation data analysis** (including funding from NASA and SNSF) **and 36 | engineers from CNES** (French Space Agency). 37 | 38 | Most contributors and users are scientists or industrials working in **various fields of Earth observation**. 39 | 40 | 41 | ```{note} 42 | :class: tip 43 | :class: margin 44 | 45 | xDEM is **merging efforts with CNES's [demcompare](https://github.com/CNES/demcompare)** to combine the best of both tools into one! 46 | ``` 47 | 48 | ::::{grid} 49 | :reverse: 50 | 51 | :::{grid-item} 52 | :columns: 4 53 | :child-align: center 54 | 55 | ```{image} ./_static/nasa_logo.svg 56 | :width: 200px 57 | :class: dark-light 58 | ``` 59 | 60 | ::: 61 | 62 | :::{grid-item} 63 | :columns: 4 64 | :child-align: center 65 | 66 | ```{image} ./_static/snsf_logo.svg 67 | :width: 220px 68 | :class: only-light 69 | ``` 70 | 71 | ```{image} ./_static/snsf_logo_dark.svg 72 | :width: 220px 73 | :class: only-dark 74 | ``` 75 | 76 | ::: 77 | 78 | :::{grid-item} 79 | :columns: 4 80 | :child-align: center 81 | 82 | ```{image} ./_static/cnes_logo.svg 83 | :width: 200px 84 | :class: only-light 85 | ``` 86 | 87 | ```{image} ./_static/cnes_logo_dark.svg 88 | :width: 200px 89 | :class: only-dark 90 | ``` 91 | 92 | ::: 93 | 94 | 95 | :::: 96 | 97 | More details about the people behind xDEM, funding sources, and the package's objectives can be found on the **{ref}`credits` pages**. 98 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | coverage.lcov 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | doc/_build/ 74 | doc/build/ 75 | doc/source/api/ 76 | doc/.buildinfo 77 | doc/.doctrees 78 | 79 | # PyBuilder 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | .python-version 91 | 92 | # pipenv 93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 96 | # install all needed dependencies. 97 | #Pipfile.lock 98 | 99 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 100 | __pypackages__/ 101 | 102 | # Celery stuff 103 | celerybeat-schedule 104 | celerybeat.pid 105 | 106 | # SageMath parsed files 107 | *.sage.py 108 | 109 | # Environments 110 | .env 111 | .venv 112 | env/ 113 | venv/ 114 | ENV/ 115 | env.bak/ 116 | venv.bak/ 117 | .vim/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # PyCharm project setting 124 | .idea 125 | 126 | # VS code setting 127 | .vscode/ 128 | !.vscode/settings.json 129 | !.vscode/launch.json 130 | 131 | # Rope project settings 132 | .ropeproject 133 | 134 | # mkdocs documentation 135 | /site 136 | 137 | # mypy 138 | .mypy_cache/ 139 | .dmypy.json 140 | dmypy.json 141 | 142 | # Pyre type checker 143 | .pyre/ 144 | 145 | # Version file 146 | xdem/_version.py 147 | 148 | # Example data downloaded/produced during tests 149 | examples/data/ 150 | tests/test_data/ 151 | 152 | doc/source/basic_examples/ 153 | doc/source/advanced_examples/ 154 | doc/source/gen_modules/ 155 | doc/source/sg_execution_times.rst 156 | examples/basic/temp.tif 157 | examples/advanced/aligned_dem.tif 158 | 159 | # Directory where myst_nb executes jupyter code and cache 160 | doc/jupyter_execute/ 161 | doc/.jupyter_cache/ 162 | 163 | # Workflows outputs created during documentation build 164 | /outputs 165 | -------------------------------------------------------------------------------- /tests/test_ddem.py: -------------------------------------------------------------------------------- 1 | """Functions to test the difference of DEMs tools.""" 2 | 3 | import geoutils as gu 4 | import numpy as np 5 | 6 | import xdem 7 | 8 | 9 | class TestdDEM: 10 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 11 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 12 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 13 | 14 | ddem = xdem.dDEM(dem_2009 - dem_1990, start_time=np.datetime64("1990-08-01"), end_time=np.datetime64("2009-08-01")) 15 | 16 | def test_init(self) -> None: 17 | """Test that the dDEM object was instantiated correctly.""" 18 | assert isinstance(self.ddem, xdem.dDEM) 19 | assert isinstance(self.ddem.data, np.ma.masked_array) 20 | 21 | assert self.ddem.nodata is (self.dem_2009 - self.dem_1990).nodata 22 | 23 | def test_copy(self) -> None: 24 | """Test that copying works as it should.""" 25 | ddem2 = self.ddem.copy() 26 | 27 | assert isinstance(ddem2, xdem.dDEM) 28 | 29 | ddem2.data += 1 30 | 31 | assert not self.ddem.raster_equal(ddem2) 32 | 33 | def test_filled_data(self) -> None: 34 | """Test that the filled_data property points to the right data.""" 35 | ddem2 = self.ddem.copy() 36 | 37 | assert not np.any(np.isnan(ddem2.data)) or np.all(~ddem2.data.mask) 38 | assert ddem2.filled_data is not None 39 | 40 | assert np.count_nonzero(np.isnan(ddem2.data)) == 0 41 | ddem2.data.ravel()[0] = np.nan 42 | 43 | assert np.count_nonzero(np.isnan(ddem2.data)) == 1 44 | 45 | assert ddem2.filled_data is None 46 | 47 | ddem2.interpolate(method="idw") 48 | 49 | assert ddem2.fill_method is not None 50 | 51 | def test_regional_hypso(self) -> None: 52 | """Test the regional hypsometric approach.""" 53 | ddem = self.ddem.copy() 54 | ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) 55 | rng = np.random.default_rng(42) 56 | ddem.data.mask.ravel()[rng.choice(ddem.data.size, 50000, replace=False)] = True 57 | assert np.count_nonzero(ddem.data.mask) > 0 58 | 59 | assert ddem.filled_data is None 60 | 61 | ddem.interpolate(method="regional_hypsometric", reference_elevation=self.dem_2009, mask=self.outlines_1990) 62 | 63 | assert ddem._filled_data is not None 64 | assert isinstance(ddem.filled_data, np.ndarray) 65 | 66 | assert ddem.filled_data.shape == ddem.data.shape 67 | 68 | assert np.abs(np.nanmean(self.ddem.data - ddem.filled_data)) < 1 69 | 70 | def test_local_hypso(self) -> None: 71 | """Test the local hypsometric approach.""" 72 | ddem = self.ddem.copy() 73 | scott_1990 = self.outlines_1990.query("NAME == 'Scott Turnerbreen'") 74 | ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) 75 | rng = np.random.default_rng(42) 76 | ddem.data.mask.ravel()[rng.choice(ddem.data.size, 50000, replace=False)] = True 77 | assert np.count_nonzero(ddem.data.mask) > 0 78 | 79 | assert ddem.filled_data is None 80 | 81 | ddem.interpolate(method="local_hypsometric", reference_elevation=self.dem_2009.data, mask=scott_1990) 82 | assert np.abs(np.nanmean(self.ddem.data - ddem.filled_data)) < 1 83 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Autodocumented Makefile for xDEM 2 | # see: https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html 3 | # Dependencies : python3 venv 4 | 5 | ############### GLOBAL VARIABLES ###################### 6 | .DEFAULT_GOAL := help 7 | SHELL := /bin/bash 8 | 9 | # Virtualenv directory name (can be overridden) 10 | ifndef VENV 11 | VENV = "venv" 12 | endif 13 | 14 | # Python global variables definition 15 | PYTHON_VERSION_MIN = 3.10 16 | # Set PYTHON if not defined in command line 17 | # Example: PYTHON="python3.13" make venv to use python 3.13 for the venv 18 | # By default the default python3 of the system. 19 | ifndef PYTHON 20 | PYTHON = "python3" 21 | endif 22 | PYTHON_CMD=$(shell command -v $(PYTHON)) 23 | 24 | PYTHON_VERSION_CUR=$(shell $(PYTHON_CMD) -c 'import sys; print("%d.%d"% sys.version_info[0:2])') 25 | PYTHON_VERSION_OK=$(shell $(PYTHON_CMD) -c 'import sys; cur_ver = sys.version_info[0:2]; min_ver = tuple(map(int, "$(PYTHON_VERSION_MIN)".split("."))); print(int(cur_ver >= min_ver))') 26 | 27 | ############### Check python version supported ############ 28 | 29 | ifeq (, $(PYTHON_CMD)) 30 | $(error "PYTHON_CMD=$(PYTHON_CMD) not found in $(PATH)") 31 | endif 32 | 33 | ifeq ($(PYTHON_VERSION_OK), 0) 34 | $(error "Requires Python version >= $(PYTHON_VERSION_MIN). Current version is $(PYTHON_VERSION_CUR)") 35 | endif 36 | 37 | ################ MAKE Targets ###################### 38 | 39 | help: ## Show this help 40 | @echo " XDEM MAKE HELP" 41 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 42 | 43 | .PHONY: venv 44 | venv: ## Create a virtual environment in 'venv' directory if it doesn't exist 45 | @test -d ${VENV} || $(PYTHON_CMD) -m venv ${VENV} 46 | @touch ${VENV}/bin/activate 47 | @${VENV}/bin/python -m pip install --upgrade wheel setuptools pip 48 | 49 | 50 | .PHONY: install 51 | install: venv ## Install xDEM for development (depends on venv) 52 | @test -f ${VENV}/bin/xdem || echo "Installing xdem in development mode" 53 | @test -f ${VENV}/bin/xdem || ${VENV}/bin/pip install -e .[dev] 54 | @test -f .git/hooks/pre-commit || echo "Installing pre-commit hooks" 55 | @test -f .git/hooks/pre-commit || ${VENV}/bin/pre-commit install -t pre-commit 56 | @test -f .git/hooks/pre-push || ${VENV}/bin/pre-commit install -t pre-push 57 | @echo "xDEM installed in development mode in virtualenv ${VENV}" 58 | @echo "To use: source ${VENV}/bin/activate; xdem -h" 59 | 60 | 61 | .PHONY: tests 62 | tests: ## run tests 63 | @${VENV}/bin/pytest 64 | 65 | ## Clean section 66 | 67 | .PHONY: clean 68 | clean: clean-venv clean-build clean-pyc clean-precommit ## Clean all 69 | 70 | .PHONY: clean-venv 71 | clean-venv: ## Clean the virtual environment 72 | @echo "+ $@" 73 | @rm -rf ${VENV} 74 | 75 | .PHONY: clean-build 76 | clean-build: ## Remove build artifacts 77 | @echo "+ $@" 78 | @rm -rf build/ dist/ .eggs/ 79 | @find . -name '*.egg-info' -exec rm -rf {} + 80 | @find . -name '*.egg' -exec rm -f {} + 81 | 82 | .PHONY: clean-precommit 83 | clean-precommit: ## Remove pre-commit hooks from .git/hooks 84 | @rm -f .git/hooks/pre-commit 85 | @rm -f .git/hooks/pre-push 86 | 87 | .PHONY: clean-pyc 88 | clean-pyc: ## Remove Python cache and artifacts 89 | @echo "+ $@" 90 | @find . -type f -name "*.py[co]" -exec rm -rf {} + 91 | @find . -type d -name "__pycache__" -exec rm -rf {} + 92 | @find . -name '*~' -exec rm -rf {} + 93 | -------------------------------------------------------------------------------- /doc/source/config.md: -------------------------------------------------------------------------------- 1 | --- 2 | file_format: mystnb 3 | jupytext: 4 | formats: md:myst 5 | text_representation: 6 | extension: .md 7 | format_name: myst 8 | kernelspec: 9 | display_name: xdem-env 10 | language: python 11 | name: xdem 12 | --- 13 | # Configuration 14 | 15 | xDEM allows to configure the **verbosity level** and the **default behaviour of certain operations on elevation data** (such as 16 | resampling method for reprojection, or pixel interpretation) directly at the package level. 17 | 18 | (verbosity)= 19 | ## Verbosity level 20 | 21 | To configure the verbosity level (or logging) for xDEM, you can utilize Python's built-in `logging` module. This module 22 | has five levels of verbosity that are, in ascending order of severity: `DEBUG`, `INFO`, `WARNING`, `ERROR` and `CRITICAL`. 23 | Setting a level prints output from that level and all other of higher severity. Logging also allows you to specify other aspects, 24 | such as the destination of the output (console, file). 25 | 26 | ```{important} 27 | **The default verbosity level is `WARNING`, implying that `INFO` and `DEBUG` do not get printed**. Use the basic configuration 28 | as below to setup an `INFO` level. 29 | ``` 30 | 31 | To specify the verbosity level, set up a logging configuration at the start of your script: 32 | 33 | ```{code-cell} ipython3 34 | import logging 35 | 36 | # Basic configuration to simply print info 37 | logging.basicConfig(level=logging.INFO) 38 | ``` 39 | 40 | Optionally, you can specify the logging date, format, and handlers (destinations). 41 | 42 | ```{code-cell} ipython3 43 | 44 | # More advanced configuration 45 | logging.basicConfig( 46 | level=logging.INFO, 47 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 48 | datefmt='%Y-%m-%d %H:%M:%S', 49 | handlers=[ 50 | logging.FileHandler('app.log'), # Log messages will be saved to this file 51 | logging.StreamHandler() # Log messages will also be printed to the console 52 | ]) 53 | ``` 54 | 55 | The above configuration will log messages with a severity level of `INFO` and above, including timestamps, logger names, and 56 | log levels in the output. You can change the logging level as needed. 57 | 58 | 59 | ## Raster–vector–point operations 60 | 61 | To change the configuration at the package level regarding operations for rasters, vectors and points, see 62 | [GeoUtils' configuration](https://geoutils.readthedocs.io/en/stable/config.html). 63 | 64 | For instance, this allows to define a preferred resampling algorithm used when interpolating and reprojecting 65 | (e.g., bilinear, cubic), or the default behaviour linked to pixel interpretation during point–raster comparison. 66 | These changes will then apply to all your operations in xDEM, such as coregistration. 67 | 68 | ## Profiling 69 | 70 | GeoUtils has a built-in profiling tool, that can be used to provide more insight on the memory and time use of 71 | a function if needed. It can be use, as explained here 72 | [GeoUtils' profiling](https://geoutils.readthedocs.io/en/stable/profiling.html), on every xDem function with a simple decorator like: 73 | 74 | 75 | ```{code-cell} ipython3 76 | from geoutils import profiler 77 | 78 | @profiler.profile("my profiled function name", memprof=True, interval=0.5) # type: ignore 79 | def my_xdem_function(): 80 | ... 81 | ``` 82 | 83 | Currently, some processes are already profiled with a memory consumption report each 0.05 seconds. 84 | - dem initialization 85 | - all the terrain attributes computation {class}`xdem.DEM` attributes computations 86 | - all the co-registration processing through the {function}`xdem.Coreg.fit_and_apply` and {function}`xdem.DEM.coregister_3d` functions 87 | -------------------------------------------------------------------------------- /GOVERNANCE.md: -------------------------------------------------------------------------------- 1 | # Governance Policy 2 | 3 | This document provides the governance policy for the Project. Maintainers agree to this policy and to abide by all Project polices, 4 | including the [code of conduct](./CODE_OF_CONDUCT.md) and by adding their name to the [AUTHORS.md file](./AUTHORS.md). 5 | 6 | ## 1. Roles. 7 | 8 | This project may include the following roles. Additional roles may be adopted and documented by the Project. 9 | 10 | **1.1. Maintainers**. Maintainers are responsible for organizing activities around developing, maintaining, and updating 11 | the Project. Maintainers are also responsible for determining consensus. This Project may add or remove Maintainers with 12 | the approval of the current Maintainers. All past Maintainers will be listed as an Emeritus maintainer, and may rejoin 13 | at any time. 14 | 15 | **1.2. Contributors**. Contributors are those that have made contributions to the Project. 16 | 17 | ## 2. Decisions. 18 | 19 | **2.1. Consensus-Based Decision Making**. Projects make decisions through consensus of the Maintainers. While explicit 20 | agreement of all Maintainers is preferred, it is not required for consensus. Rather, the Maintainers will determine 21 | consensus based on their good faith consideration of a number of factors, including the dominant view of the 22 | Contributors and nature of support and objections. The Maintainers will document evidence of consensus in accordance 23 | with these requirements. 24 | 25 | **2.2. Appeal Process**. Decisions may be appealed by opening an issue and that appeal will be considered by the 26 | Maintainers in good faith, who will respond in writing within a reasonable time. If the Maintainers deny the appeal, 27 | the appeal may be brought before the Organization Steering Committee, who will also respond in writing in a reasonable 28 | time. 29 | 30 | ## 3. How We Work. 31 | 32 | **3.1. Openness**. Participation is open to anyone who is directly and materially affected by the activity in question. 33 | There shall be no undue financial barriers to participation. 34 | 35 | **3.2. Balance**. The development process should balance the interests of Contributors and other stakeholders. 36 | Contributors from diverse interest categories shall be sought with the objective of achieving balance. 37 | 38 | **3.3. Coordination and Harmonization**. Good faith efforts shall be made to resolve potential conflicts or 39 | incompatibility between releases in this Project. 40 | 41 | **3.4. Consideration of Views and Objections**. Prompt consideration shall be given to the written views and 42 | objections of all Contributors. 43 | 44 | **3.5. Written procedures**. This governance document and other materials documenting this project's development 45 | process shall be available to any interested person. 46 | 47 | ## 4. No Confidentiality. 48 | 49 | Information disclosed in connection with any Project activity, including but not limited to meetings, contributions, 50 | and submissions, is not confidential, regardless of any markings or statements to the contrary. 51 | 52 | ## 5. Trademarks. 53 | 54 | Any names, trademarks, logos, or goodwill developed by and associated with the Project (the "Marks") are controlled by 55 | the Organization. Maintainers may only use these Marks in accordance with the Organization's trademark policy. If a 56 | Maintainer resigns or is removed, any rights the Maintainer may have in the Marks revert to the Organization. 57 | 58 | ## 6. Amendments. 59 | 60 | Amendments to this governance policy may be made by affirmative vote of 2/3 of all Maintainers, with approval by the 61 | Organization's Steering Committee. 62 | 63 | --- 64 | Part of MVG-0.1-beta. 65 | Made with love by GitHub. Licensed under the [CC-BY 4.0 License](https://creativecommons.org/licenses/by/4.0/). 66 | -------------------------------------------------------------------------------- /doc/source/code/robust_mean_std.py: -------------------------------------------------------------------------------- 1 | """Plot example of NMAD/median as robust estimators for guide page.""" 2 | 3 | import geoutils as gu 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | 7 | # Create example distribution 8 | dh_inliers = np.random.default_rng(42).normal(loc=-5, scale=3, size=10**6) 9 | 10 | # Add outliers 11 | dh_outliers = np.concatenate( 12 | ( 13 | np.repeat(-34, 600), 14 | np.repeat(-33, 1800), 15 | np.repeat(-32, 3600), 16 | np.repeat(-31, 8500), 17 | np.repeat(-30, 15000), 18 | np.repeat(-29, 9000), 19 | np.repeat(-28, 3800), 20 | np.repeat(-27, 1900), 21 | np.repeat(-26, 700), 22 | ) 23 | ) 24 | dh_all = np.concatenate((dh_inliers, dh_outliers)) 25 | 26 | # Get traditional and robust statistics on all data 27 | mean_dh = np.nanmean(dh_all) 28 | median_dh = np.nanmedian(dh_all) 29 | 30 | std_dh = np.nanstd(dh_all) 31 | nmad_dh = gu.stats.nmad(dh_all) 32 | 33 | # Get traditional and robust statistics on inlier data 34 | mean_dh_in = np.nanmean(dh_inliers) 35 | median_dh_in = np.nanmedian(dh_inliers) 36 | 37 | std_dh_in = np.nanstd(dh_inliers) 38 | nmad_dh_in = gu.stats.nmad(dh_inliers) 39 | 40 | # Plot 41 | fig, ax = plt.subplots() 42 | h1 = ax.hist(dh_inliers, bins=np.arange(-40, 25), density=False, color="gray", label="Inlier data") 43 | h2 = ax.hist(dh_outliers, bins=np.arange(-40, 25), density=False, color="red", label="Outlier data") 44 | 45 | max_count = max(h1[0]) 46 | ax.vlines(x=[mean_dh_in, median_dh_in], ymin=0, ymax=max_count, colors=["tab:gray", "black"]) 47 | ax.vlines( 48 | x=[mean_dh_in - std_dh_in, mean_dh_in + std_dh_in, median_dh_in - nmad_dh_in, median_dh_in + nmad_dh_in], 49 | ymin=0, 50 | ymax=max_count, 51 | colors=["gray", "gray", "black", "black"], 52 | linestyles="dashed", 53 | ) 54 | 55 | ax.vlines(x=[mean_dh, median_dh], ymin=0, ymax=max_count, colors=["red", "darkred"]) 56 | ax.vlines( 57 | x=[mean_dh - std_dh, mean_dh + std_dh, median_dh - nmad_dh, median_dh + nmad_dh], 58 | ymin=0, 59 | ymax=max_count, 60 | colors=["red", "red", "darkred", "darkred"], 61 | linestyles="dashed", 62 | ) 63 | 64 | ax.set_xlim((-40, 25)) 65 | ax.set_xlabel("Elevation differences (m)") 66 | ax.set_ylabel("Count") 67 | 68 | from matplotlib.patches import Rectangle 69 | 70 | handles = [ 71 | Rectangle((0, 0), 1, 1, color=h1[-1][0].get_facecolor(), alpha=1), 72 | Rectangle((0, 0), 1, 1, color=h2[-1][0].get_facecolor(), alpha=1), 73 | ] 74 | labels = ["Inlier data", "Outlier data"] 75 | 76 | data_legend = ax.legend(handles=handles, labels=labels, loc="upper right") 77 | ax.add_artist(data_legend) 78 | 79 | # Legends 80 | p1 = plt.plot([], [], color="red", label=f"Mean: {np.round(mean_dh, 2)} m") 81 | p2 = plt.plot([], [], color="red", linestyle="dashed", label=f"±STD: {np.round(std_dh, 2)} m") 82 | p3 = plt.plot([], [], color="darkred", label=f"Median: {np.round(median_dh, 2)} m") 83 | p4 = plt.plot([], [], color="darkred", linestyle="dashed", label=f"±NMAD: {np.round(nmad_dh, 2)} m") 84 | first_legend = ax.legend(handles=[p[0] for p in [p1, p2, p3, p4]], loc="center right", title="All data") 85 | ax.add_artist(first_legend) 86 | 87 | p1 = plt.plot([], [], color="gray", label=f"Mean: {np.round(mean_dh_in, 2)} m") 88 | p2 = plt.plot([], [], color="gray", linestyle="dashed", label=f"±STD: {np.round(std_dh_in, 2)} m") 89 | p3 = plt.plot([], [], color="black", label=f"Median: {np.round(median_dh_in, 2)} m") 90 | p4 = plt.plot([], [], color="black", linestyle="dashed", label=f"±NMAD: {np.round(nmad_dh_in, 2)} m") 91 | second_legend = ax.legend(handles=[p[0] for p in [p1, p2, p3, p4]], loc="center left", title="Inlier data") 92 | ax.add_artist(second_legend) 93 | 94 | ax.set_title("Effect of outliers on estimating\ncentral tendency and dispersion") 95 | -------------------------------------------------------------------------------- /doc/source/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: xDEM 3 | --- 4 | 5 | ::::{grid} 6 | :reverse: 7 | :gutter: 2 1 1 1 8 | :margin: 4 4 1 1 9 | 10 | :::{grid-item} 11 | :columns: 4 12 | 13 | ```{image} ./_static/xdem_logo_only.svg 14 | :width: 300px 15 | :class: only-light 16 | ``` 17 | 18 | ```{image} ./_static/xdem_logo_only_dark.svg 19 | :width: 300px 20 | :class: only-dark 21 | ``` 22 | ::: 23 | 24 | :::{grid-item} 25 | :columns: 8 26 | :class: sd-fs-3 27 | :child-align: center 28 | 29 | xDEM aims at making the analysis of digital elevation models **easy**, **modular** and **robust**. 30 | 31 | :::: 32 | 33 | :::{admonition} Announcement 34 | :class: tip 35 | :class: margin 36 | 37 | xDEM `v0.1` is released, with all core features envisioned at creation 4 years ago 🎉! 38 | 39 | We are **merging efforts with [demcompare](https://github.com/CNES/demcompare)** to combine the best of both tools into one! 40 | 41 | We are working on **adding a ``dem`` Xarray accessor** with native Dask support for 2025. 42 | ::: 43 | 44 | xDEM is **tailored to perform quantitative analysis that implicitly understands the intricacies of elevation data**, 45 | both from a **georeferencing viewpoint** (vertical referencing, nodata values, projection, pixel interpretation) and 46 | a **statistical viewpoint** (outlier robustness, specificities of 3D alignment and error structure). 47 | 48 | It exposes **an intuitive object-based API to foster accessibility**, and strives **to be computationally scalable** 49 | through Dask. 50 | 51 | Additionally, through its sister-package [GeoUtils](https://geoutils.readthedocs.io/en/stable/), xDEM is built on top 52 | of core geospatial packages (Rasterio, GeoPandas, PyProj) and numerical packages (NumPy, Xarray, SciPy) to provide 53 | **consistent higher-level functionalities at the interface of DEMs and elevation point cloud objects**. 54 | 55 | ---------------- 56 | 57 | # Where to start? 58 | 59 | ::::{grid} 1 2 2 3 60 | :gutter: 1 1 1 2 61 | 62 | :::{grid-item-card} {material-regular}`edit_note;2em` About xDEM 63 | :link: about-xdem 64 | :link-type: ref 65 | 66 | Learn more about why we developed xDEM. 67 | 68 | +++ 69 | [Learn more »](about-xdem) 70 | ::: 71 | 72 | :::{grid-item-card} {material-regular}`data_exploration;2em` Quick start 73 | :link: quick-start 74 | :link-type: ref 75 | 76 | Run a short example of the package functionalities. 77 | 78 | +++ 79 | [Learn more »](quick-start) 80 | ::: 81 | 82 | :::{grid-item-card} {material-regular}`preview;2em` Features 83 | :link: dem-class 84 | :link-type: ref 85 | 86 | Dive into the full documentation. 87 | 88 | +++ 89 | [Learn more »](dem-class) 90 | ::: 91 | 92 | :::: 93 | 94 | ---------------- 95 | 96 | 97 | ```{toctree} 98 | :caption: Getting started 99 | :maxdepth: 2 100 | 101 | about_xdem 102 | how_to_install 103 | quick_start 104 | citation 105 | ``` 106 | 107 | ```{toctree} 108 | :caption: Features 109 | :maxdepth: 2 110 | 111 | elevation_objects 112 | vertical_ref 113 | terrain 114 | coregistration 115 | biascorr 116 | gapfill 117 | uncertainty 118 | ``` 119 | 120 | ```{toctree} 121 | :caption: Resources 122 | :maxdepth: 2 123 | 124 | guides 125 | cheatsheet 126 | ecosystem 127 | ``` 128 | 129 | ```{toctree} 130 | :caption: Gallery of examples 131 | :maxdepth: 2 132 | 133 | basic_examples/index.rst 134 | advanced_examples/index.rst 135 | ``` 136 | 137 | ```{toctree} 138 | :caption: Reference 139 | :maxdepth: 2 140 | 141 | global_cli_information 142 | api 143 | config 144 | release_notes 145 | ``` 146 | 147 | ```{toctree} 148 | :caption: Project information 149 | :maxdepth: 2 150 | 151 | publis 152 | credits 153 | ``` 154 | 155 | # Indices and tables 156 | 157 | - {ref}`genindex` 158 | - {ref}`modindex` 159 | - {ref}`search` 160 | -------------------------------------------------------------------------------- /examples/basic/plot_infer_spatial_correlation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Spatial correlation of errors 3 | ============================= 4 | 5 | Digital elevation models have errors that are spatially correlated due to instrument or processing effects. Here, we 6 | rely on a non-stationary spatial statistics framework to estimate and model spatial correlations in elevation error. 7 | We use a sum of variogram forms to model this correlation, with stable terrain as an error proxy for moving terrain. 8 | 9 | **References:** `Rolstad et al. (2009) `_, `Hugonnet et al. (2022) `_. 10 | """ 11 | 12 | import geoutils as gu 13 | 14 | # sphinx_gallery_thumbnail_number = 1 15 | import xdem 16 | 17 | # %% 18 | # We load a difference of DEMs at Longyearbyen, already coregistered using :ref:`nuthkaab` as shown in 19 | # the :ref:`sphx_glr_basic_examples_plot_nuth_kaab.py` example. We also load the glacier outlines here corresponding to 20 | # moving terrain. 21 | dh = xdem.DEM(xdem.examples.get_path("longyearbyen_ddem")) 22 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 23 | 24 | # %% 25 | # Then, we run the pipeline for inference of elevation heteroscedasticity from stable terrain (*Note: we pass a* 26 | # ``random_state`` *argument to ensure a fixed, reproducible random subsampling in this example*). We ask for a fit with 27 | # a Gaussian model for short range (as it is passed first), and Spherical for long range (as it is passed second): 28 | ( 29 | df_empirical_variogram, 30 | df_model_params, 31 | spatial_corr_function, 32 | ) = xdem.spatialstats.infer_spatial_correlation_from_stable( 33 | dvalues=dh, list_models=["Gaussian", "Spherical"], unstable_mask=glacier_outlines, random_state=42 34 | ) 35 | 36 | # %% 37 | # The first output corresponds to the dataframe of the empirical variogram, by default estimated using Dowd's estimator 38 | # and a circular sampling scheme in SciKit-GStat (following Fig. S13 of Hugonnet et al. (2022)). The 39 | # ``lags`` columns is the upper bound of spatial lag bins (lower bound of first bin being 0), the ``exp`` column is the 40 | # "experimental" variance value of the variogram in that bin, the ``count`` the number of pairwise samples, and 41 | # ``err_exp`` the 1-sigma error of the "experimental" variance, if more than one variogram is estimated with the 42 | # ``n_variograms`` parameter. 43 | df_empirical_variogram 44 | 45 | # %% 46 | # The second output is the dataframe of optimized model parameters (``range``, ``sill``, and possibly ``smoothness``) 47 | # for a sum of gaussian and spherical models: 48 | df_model_params 49 | 50 | # %% 51 | # The third output is the spatial correlation function with spatial lags, derived from the variogram: 52 | for spatial_lag in [0, 100, 1000, 10000, 30000]: 53 | print( 54 | "Errors are correlated at {:.1f}% for a {:,.0f} m spatial lag".format( 55 | spatial_corr_function(spatial_lag) * 100, spatial_lag 56 | ) 57 | ) 58 | 59 | # %% 60 | # We can plot the empirical variogram and its model on a non-linear X-axis to identify the multi-scale correlations. 61 | xdem.spatialstats.plot_variogram( 62 | df=df_empirical_variogram, 63 | list_fit_fun=[xdem.spatialstats.get_variogram_model_func(df_model_params)], 64 | xlabel="Spatial lag (m)", 65 | ylabel="Variance of\nelevation differences (m)", 66 | xscale_range_split=[100, 1000], 67 | ) 68 | 69 | # %% 70 | # This pipeline will not always work optimally with default parameters: variogram sampling is more robust with a lot of 71 | # samples but takes long computing times, and the fitting might require multiple tries for forms and possibly bounds 72 | # and first guesses to help the least-squares optimization. **To learn how to tune more parameters and use the 73 | # subfunctions, see the gallery example:** :ref:`sphx_glr_advanced_examples_plot_variogram_estimation_modelling.py`! 74 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Credits 2 | 3 | --- 4 | © 2025 **xDEM developers**. 5 | 6 | **xDEM** is licensed under permissive Apache 2 license (See LICENSE file). 7 | 8 | All contributors listed in this document are part of the **xDEM developers**, and their 9 | contributions are subject to the project's copyright under the terms of the 10 | [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). 11 | 12 | This file keeps track of authors contributions. 13 | 14 | ## Maintainers / Steering committee 15 | 16 | --- 17 | 18 | | Full name | GitHub | Affiliation | Email | 19 | |----------------------------------|--------------------------------------------|--------------------------------|-----------------------------------------| 20 | | **Romain Hugonnet** | [@rhugonnet](https://github.com/rhugonnet) | University of Alaska Fairbanks | [📧](mailto:romain.hugonnet@gmail.com) | 21 | | **Amaury Dehecq** | [@adehecq](https://github.com/adehecq) | Université Grenoble Alpes, IRD | N/A | 22 | | **Valentine Bellet** | [@belletva](https://github.com/belletva) | CNES (French Space Agency) | [📧](mailto:valentine.bellet@cnes.fr) | 23 | | **Alice de Bardonnèche-Richard** | [@adebardo](https://github.com/adebardo) | CS Group | [📧](mailto:alice.de-bardonneche-richard@cs-soprasteria.com) | 24 | 25 | ## Emeritus maintainers 26 | 27 | --- 28 | 29 | | Full name | GitHub | Affiliation | Email | 30 | |----------------------------|------|----------------------------|--------------------------------------------------------------| 31 | | **Erik Schytt Mannerfelt** | [@erikmannerfelt](https://github.com/erikmannerfelt) | University of Oslo | N/A | 32 | | **Emmanuel Dubois** | [@duboise-cnes](https://github.com/duboise-cnes) | CNES (French Space Agency) | [📧](mailto:emmanuel.dubois@cnes.fr) | 33 | 34 | ## Contributors 35 | 36 | --- 37 | 38 | - **Valentin Schaffner** [@vschaffn](https://github.com/vschaffn) 39 | - **Tom Chudley** [@trchudley](https://github.com/trchudley) 40 | - **Friedrich Knuth** [@friedrichknuth](https://github.com/friedrichknuth) 41 | - **Marine Bouchet** [@marinebcht](https://github.com/marinebcht) 42 | - **Andrew Tedstone** [@atedstone](https://github.com/atedstone) 43 | - **Zhihao Liu** [@liuh886](https://github.com/liuh886) 44 | - **Diego Cusicanqui** [@cusicand](https://github.com/cusicand) 45 | - **Alessandro Gentilini** [@alessandro-gentilini](https://github.com/alessandro-gentilini) 46 | - **Ferdinand Schenck** [@fnands](https://github.com/fnands) 47 | - **Johannes Landmann** [@jlandmann](https://github.com/jlandmann) 48 | - **Bob McNabb** [@iamdonovan](https://github.com/iamdonovan) 49 | - **Enrico Mattea** [@MatteaE](https://github.com/MatteaE) 50 | - **Amelie Froessl** [@ameliefroessl](https://github.com/ameliefroessl) 51 | - **Simon Gascoin** [@sgascoin](https://github.com/sgascoin) 52 | - **Clara Quinto** [@quinto-clara](https://github.com/quinto-clara) 53 | 54 | ## Original creators 55 | 56 | --- 57 | 58 | - **Romain Hugonnet** [@rhugonnet](https://github.com/rhugonnet) 59 | - **Amaury Dehecq** [@adehecq](https://github.com/adehecq) 60 | - **Erik Schytt Mannerfelt** [@erikmannerfelt](https://github.com/erikmannerfelt) 61 | 62 | ## Federation with Demcompare 63 | 64 | --- 65 | 66 | - **Emmanuel Dubois** [@duboise-cnes](https://github.com/duboise-cnes) 67 | - **Alice de Bardonnèche-Richard** [@adebardo](https://github.com/adebardo) 68 | 69 | All Demcompare authors and creators can be found [here](https://github.com/CNES/demcompare/blob/master/AUTHORS.md). 70 | 71 | ## External contributions 72 | 73 | This project includes code originally part of the [texshade-py](https://github.com/fasiha/texshade-py/tree/main) project. 74 | -------------------------------------------------------------------------------- /examples/basic/plot_icp_coregistration.py: -------------------------------------------------------------------------------- 1 | """ 2 | Iterative closest point coregistration 3 | ====================================== 4 | 5 | Iterative closest point (ICP) is a registration method accounting for both rotations and translations. 6 | 7 | It is used primarily to correct rotations, as it generally performs worse than :ref:`nuthkaab` for sub-pixel shifts. 8 | Fortunately, xDEM provides the best of two worlds by allowing a combination of the two methods in a pipeline, 9 | demonstrated below! 10 | 11 | **References**: `Besl and McKay (1992) `_. 12 | """ 13 | 14 | # sphinx_gallery_thumbnail_number = 2 15 | import matplotlib.pyplot as plt 16 | import numpy as np 17 | 18 | import xdem 19 | 20 | # %% 21 | # We load a DEM and crop it to a single mountain on Svalbard, called Battfjellet. 22 | # Its aspects vary in every direction, and is therefore a good candidate for coregistration exercises. 23 | dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 24 | 25 | subset_extent = [523000, 8660000, 529000, 8665000] 26 | dem = dem.crop(subset_extent) 27 | 28 | # %% 29 | # Let's plot a hillshade of the mountain for context. 30 | xdem.terrain.hillshade(dem).plot(cmap="gray") 31 | 32 | # %% 33 | # To try the effects of rotation, we can artificially rotate the DEM using a transformation matrix. 34 | # Here, a rotation of just one degree is attempted. 35 | # But keep in mind: the window is 6 km wide; 1 degree of rotation at the center equals to a 52 m vertical difference at the edges! 36 | 37 | rotation = np.deg2rad(1) 38 | rotation_matrix = np.array( 39 | [ 40 | [np.cos(rotation), 0, np.sin(rotation), 0], 41 | [0, 1, 0, 0], 42 | [-np.sin(rotation), 0, np.cos(rotation), 0], 43 | [0, 0, 0, 1], 44 | ] 45 | ) 46 | centroid = [dem.bounds.left + dem.width / 2, dem.bounds.bottom + dem.height / 2, np.nanmean(dem)] 47 | # This will apply the matrix along the center of the DEM 48 | rotated_dem = xdem.coreg.apply_matrix(dem, matrix=rotation_matrix, centroid=centroid) 49 | 50 | # %% 51 | # We can plot the difference between the original and rotated DEM. 52 | # It is now artificially tilting from east down to the west. 53 | diff_before = dem - rotated_dem 54 | diff_before.plot(cmap="RdYlBu", vmin=-20, vmax=20, cbar_title="Elevation differences (m)") 55 | plt.show() 56 | 57 | # %% 58 | # As previously mentioned, ``NuthKaab`` works well on sub-pixel scale but does not handle rotation. 59 | # ``ICP`` works with rotation but lacks the sub-pixel accuracy. 60 | # Luckily, these can be combined! 61 | # Any :class:`xdem.coreg.Coreg` subclass can be added with another, making a :class:`xdem.coreg.CoregPipeline`. 62 | # With a pipeline, each step is run sequentially, potentially leading to a better result. 63 | # Let's try all three approaches: ``ICP``, ``NuthKaab`` and ``ICP + NuthKaab``. 64 | 65 | approaches = [ 66 | (xdem.coreg.ICP(), "ICP"), 67 | (xdem.coreg.NuthKaab(), "NuthKaab"), 68 | (xdem.coreg.ICP() + xdem.coreg.NuthKaab(), "ICP + NuthKaab"), 69 | ] 70 | 71 | 72 | plt.figure(figsize=(6, 12)) 73 | 74 | for i, (approach, name) in enumerate(approaches): 75 | corrected_dem = approach.fit_and_apply( 76 | reference_elev=dem, 77 | to_be_aligned_elev=rotated_dem, 78 | ) 79 | 80 | diff = dem - corrected_dem 81 | 82 | ax = plt.subplot(3, 1, i + 1) 83 | plt.title(name) 84 | diff.plot(cmap="RdYlBu", vmin=-20, vmax=20, ax=ax, cbar_title="Elevation differences (m)") 85 | 86 | plt.tight_layout() 87 | plt.show() 88 | 89 | 90 | # %% 91 | # The results show what we expected: 92 | # 93 | # - **ICP** alone handled the rotational offset, but left a horizontal offset as it is not sub-pixel accurate (in this case, the resolution is 20x20m). 94 | # - **Nuth and Kääb** barely helped at all, since the offset is purely rotational. 95 | # - **ICP + Nuth and Kääb** first handled the rotation, then fit the reference with sub-pixel accuracy. 96 | # 97 | # The last result is an almost identical raster that was offset but then corrected back to its original position! 98 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to contribute 2 | 3 | We welcome new contributions to xDEM that is still very much in expansion! 4 | Below is a guide to contributing to xDEM step by step, ensuring tests are passing and the documentation is updated. 5 | 6 | ## Overview: making a contribution 7 | 8 | The technical steps to contributing to xDEM are: 9 | 10 | 1. Fork `GlacioHack/xdem` and clone your fork repository locally. 11 | 2. Set up the development environment **(see section "Setup" below)**, 12 | 3. Create a branch for the new feature or bug fix, 13 | 4. Make your changes, 14 | 5. Add or modify related tests in `tests/` **(see section "Tests" below)**, 15 | 6. Add or modify related documentation in `doc/` **(see section "Documentation" below)**, 16 | 7. Commit your changes, 17 | 8. Run `pre-commit` separately if not installed as git hook **(see section "Linting" below)**, 18 | 9. Push to your fork, 19 | 10. Open a pull request from GitHub to discuss and eventually merge. 20 | 21 | ## Development environment 22 | 23 | xDEM currently supports Python versions of 3.10 to 3.13 (see `dev-environment.yml` for detailed dependencies), which are 24 | tested in a continuous integration (CI) workflow running on GitHub Actions. 25 | 26 | When you open a PR on xDEM, a single linting action and 9 test actions will automatically start, corresponding to all 27 | supported Python versions (3.10, 3.11, 3.12 and 3.13) and OS (Ubuntu, Mac, Windows). The coverage change of the tests will also 28 | be reported by CoverAlls. 29 | 30 | ### Setup 31 | 32 | #### With `mamba` 33 | Clone the git repo and create a `mamba` environment (see how to install `mamba` in the [mamba documentation](https://mamba.readthedocs.io/en/latest/)): 34 | 35 | ```bash 36 | git clone https://github.com/GlacioHack/xdem.git 37 | cd xdem 38 | mamba env create -f dev-environment.yml # Add '-n custom_name' if you want. 39 | mamba activate xdem-dev # Or any other name specified above 40 | ``` 41 | 42 | #### With `pip` 43 | ```bash 44 | git clone https://github.com/GlacioHack/xdem.git 45 | cd xdem 46 | make install 47 | ``` 48 | 49 | ### Tests 50 | 51 | At least one test per feature (in the associated `tests/test_*.py` file) should be included in the PR, using `pytest` (see existing tests for examples). 52 | The structure of test modules and functions in `tests/` largely mirrors that of the package modules and functions in `xdem/`. 53 | 54 | To run the entire test suite, run `pytest` from the root of the repository: 55 | ```bash 56 | pytest 57 | ``` 58 | 59 | Running `pytest` will trigger a script that automatically downloads test data from [https://github.com/GlacioHack/xdem-data](https://github.com/GlacioHack/xdem-data) used to run all tests. 60 | 61 | RichDEM should only be used for testing purposes within the xDEM project. The functionality of xDEM must not depend on RichDEM. 62 | 63 | ### Documentation 64 | 65 | If your changes need to be reflected in the documentation, update the related pages located in `doc/source/`. The documentation is written in MyST markdown syntax, similar to GitHub's default Markdown (see [MyST-NB](https://myst-nb.readthedocs.io/en/latest/authoring/text-notebooks.html) for details). 66 | 67 | To ensure that the documentation is building properly after your changes, if you are on Linux, you can run `pytest tests/test_doc.py`, which is equivalent to building directly calling `sphinx-build source/ build/html/` from the `doc/` folder. On Windows and Mac, the documentation is not maintained, so you can wait to open the PR for it to be checked on Linux by the CI. 68 | 69 | ### Formatting and linting 70 | 71 | Install and run `pre-commit` from the root of the repository (such as with `mamba install pre-commit`, see [pre-commit documentation](https://pre-commit.com/) for details), 72 | which will use `.pre-commit-config.yaml` to verify spelling errors, import sorting, type checking, formatting and linting: 73 | 74 | ```bash 75 | pre-commit run --all 76 | ``` 77 | 78 | You can then commit and push those changes. 79 | Optionally, `pre-commit` can be installed as a git hook to ensure checks have to pass before committing. 80 | 81 | ### Final steps 82 | 83 | That's it! If the tests and documentation are passing, or if you need help to make those work, you can open a PR. 84 | 85 | We'll receive word of your PR as soon as it is opened, and should follow up shortly to discuss the changes, and eventually give approval to merge. Thank you so much for contributing! 86 | 87 | ### Rights 88 | 89 | The license (see LICENSE) applies to all contributions. 90 | -------------------------------------------------------------------------------- /examples/basic/plot_spatial_error_propagation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Spatial propagation of elevation errors 3 | ======================================= 4 | 5 | Propagating elevation errors spatially accounting for heteroscedasticity and spatial correlation is complex. It 6 | requires computing the pairwise correlations between all points of an area of interest (be it for a sum, mean, or 7 | other operation), which is computationally intensive. Here, we rely on published formulations to perform 8 | computationally-efficient spatial propagation for the mean of elevation (or elevation differences) in an area. 9 | 10 | **References:** `Rolstad et al. (2009) `_, `Hugonnet et al. (2022) `_. 11 | """ 12 | 13 | import geoutils as gu 14 | import matplotlib.pyplot as plt 15 | 16 | # sphinx_gallery_thumbnail_number = 1 17 | import numpy as np 18 | 19 | import xdem 20 | 21 | # %% 22 | # We load the same data, and perform the same calculations on heteroscedasticity and spatial correlations of errors as 23 | # in the :ref:`sphx_glr_basic_examples_plot_infer_heterosc.py` and :ref:`sphx_glr_basic_examples_plot_infer_spatial_correlation.py` 24 | # examples. 25 | 26 | dh = xdem.DEM(xdem.examples.get_path("longyearbyen_ddem")) 27 | ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 28 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 29 | slope, max_curvature = xdem.terrain.get_terrain_attribute(ref_dem, attribute=["slope", "max_curvature"]) 30 | errors, df_binning, error_function = xdem.spatialstats.infer_heteroscedasticity_from_stable( 31 | dvalues=dh, list_var=[slope, max_curvature], list_var_names=["slope", "maxc"], unstable_mask=glacier_outlines 32 | ) 33 | 34 | # %% 35 | # We use the error map to standardize the elevation differences before variogram estimation, which is more robust 36 | # as it removes the variance variability due to heteroscedasticity. 37 | zscores = dh / errors 38 | emp_variogram, params_variogram_model, spatial_corr_function = xdem.spatialstats.infer_spatial_correlation_from_stable( 39 | dvalues=zscores, list_models=["Gaussian", "Spherical"], unstable_mask=glacier_outlines, random_state=42 40 | ) 41 | 42 | # %% 43 | # With our estimated heteroscedasticity and spatial correlation, we can now perform the spatial propagation of errors. 44 | # We select two glaciers intersecting this elevation change map in Svalbard. The best estimation of their standard error 45 | # is done by directly providing the shapefile (Equation 18, Hugonnet et al., 2022). 46 | areas = [ 47 | glacier_outlines.ds[glacier_outlines.ds["NAME"] == "Brombreen"], 48 | glacier_outlines.ds[glacier_outlines.ds["NAME"] == "Medalsbreen"], 49 | ] 50 | stderr_glaciers = xdem.spatialstats.spatial_error_propagation( 51 | areas=areas, errors=errors, params_variogram_model=params_variogram_model 52 | ) 53 | 54 | for glacier_name, stderr_gla in [("Brombreen", stderr_glaciers[0]), ("Medalsbreen", stderr_glaciers[1])]: 55 | print(f"The error (1-sigma) in mean elevation change for {glacier_name} is {stderr_gla:.2f} meters.") 56 | 57 | # %% 58 | # When passing a numerical area value, we compute an approximation with disk shape (Equation 8, Rolstad et al., 2009). 59 | # This approximation is practical to visualize changes in elevation error when averaging over different area 60 | # sizes, but is less accurate to estimate the standard error of a certain area shape. 61 | areas = 10 ** np.linspace(1, 12) 62 | stderrs = xdem.spatialstats.spatial_error_propagation( 63 | areas=areas, errors=errors, params_variogram_model=params_variogram_model 64 | ) 65 | plt.plot(areas / 10**6, stderrs) 66 | plt.xlabel("Averaging area (km²)") 67 | plt.ylabel("Standard error (m)") 68 | plt.vlines( 69 | x=np.pi * params_variogram_model["range"].values[0] ** 2 / 10**6, 70 | ymin=np.min(stderrs), 71 | ymax=np.max(stderrs), 72 | colors="red", 73 | linestyles="dashed", 74 | label="Disk area with radius the\n1st correlation range of {:,.0f} meters".format( 75 | params_variogram_model["range"].values[0] 76 | ), 77 | ) 78 | plt.vlines( 79 | x=np.pi * params_variogram_model["range"].values[1] ** 2 / 10**6, 80 | ymin=np.min(stderrs), 81 | ymax=np.max(stderrs), 82 | colors="blue", 83 | linestyles="dashed", 84 | label="Disk area with radius the\n2nd correlation range of {:,.0f} meters".format( 85 | params_variogram_model["range"].values[1] 86 | ), 87 | ) 88 | plt.xscale("log") 89 | plt.legend() 90 | plt.show() 91 | -------------------------------------------------------------------------------- /examples/advanced/plot_demcollection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Working with a collection of DEMs 3 | ================================= 4 | 5 | .. caution:: This functionality might be removed in future package versions. 6 | 7 | Oftentimes, more than two timestamps (DEMs) are analyzed simultaneously. 8 | One single dDEM only captures one interval, so multiple dDEMs have to be created. 9 | In addition, if multiple masking polygons exist (e.g. glacier outlines from multiple years), these should be accounted for properly. 10 | The :class:`xdem.DEMCollection` is a tool to properly work with multiple timestamps at the same time, and makes calculations of elevation/volume change over multiple years easy. 11 | """ 12 | 13 | from datetime import datetime 14 | 15 | import geoutils as gu 16 | import matplotlib.pyplot as plt 17 | 18 | import xdem 19 | 20 | # %% 21 | # **Example data**. 22 | # 23 | # We can load the DEMs as usual, but with the addition that the ``datetime`` argument should be filled. 24 | # Since multiple DEMs are in question, the "time dimension" is what keeps them apart. 25 | 26 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 27 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 28 | 29 | 30 | # %% 31 | # For glacier research (any many other fields), only a subset of the DEMs are usually interesting. 32 | # These parts can be delineated with masks or polygons. 33 | # Here, we have glacier outlines from 1990 and 2009. 34 | outlines = { 35 | datetime(1990, 8, 1): gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")), 36 | datetime(2009, 8, 1): gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines_2010")), 37 | } 38 | 39 | # %% 40 | # To experiment with a longer time-series, we can also fake a 2060 DEM, by simply exaggerating the 1990-2009 change. 41 | 42 | # Fake a 2060 DEM by assuming twice the change from 1990-2009 between 2009 and 2060 43 | dem_2060 = dem_2009 + (dem_2009 - dem_1990).data * 3 44 | 45 | timestamps = [datetime(1990, 8, 1), datetime(2009, 8, 1), datetime(2060, 8, 1)] 46 | 47 | # %% 48 | # Now, all data are ready to be collected in an :class:`xdem.DEMCollection` object. 49 | # What we have are: 50 | # 1. Three DEMs from 1990, 2009, and 2060 (the last is artificial) 51 | # 2. Two glacier outline timestamps from 1990 and 2009 52 | # 53 | 54 | demcollection = xdem.DEMCollection( 55 | dems=[dem_1990, dem_2009, dem_2060], timestamps=timestamps, outlines=outlines, reference_dem=1 56 | ) 57 | 58 | # %% 59 | # We can generate :class:`xdem.dDEM` objects using :func:`xdem.DEMCollection.subtract_dems`. 60 | # In this case, it will generate three dDEMs: 61 | # 62 | # * 1990-2009 63 | # * 2009-2009 (to maintain the ``dems`` and ``ddems`` list length and order) 64 | # * 2060-2009 (note the inverted order; negative change will be positive) 65 | 66 | _ = demcollection.subtract_dems() 67 | 68 | # %% 69 | # These are saved internally, but are also returned as a list. 70 | # 71 | # An elevation or volume change series can automatically be generated from the ``DEMCollection``. 72 | # In this case, we should specify *which* glacier we want the change for, as a regional value may not always be required. 73 | # We can look at the glacier called "Scott Turnerbreen", specified in the "NAME" column of the outline data. 74 | # `See here for the outline filtering syntax `_. 75 | 76 | demcollection.get_cumulative_series(kind="dh", outlines_filter="NAME == 'Scott Turnerbreen'") 77 | 78 | # %% 79 | # And there we have a cumulative dH series of the glacier Scott Turnerbreen on Svalbard! 80 | # The dDEMs can be visualized to give further context. 81 | 82 | extent = [ 83 | demcollection.dems[0].bounds.left, 84 | demcollection.dems[0].bounds.right, 85 | demcollection.dems[0].bounds.bottom, 86 | demcollection.dems[0].bounds.top, 87 | ] 88 | 89 | scott_extent = [518600, 523800, 8666600, 8672300] 90 | 91 | for i in range(2): 92 | plt.subplot(1, 2, i + 1) 93 | 94 | if i == 0: 95 | title = "1990 - 2009" 96 | ddem_2060 = demcollection.ddems[0].data.squeeze() 97 | else: 98 | title = "2009 - 2060" 99 | # The 2009 - 2060 DEM is inverted since the reference year is 2009 100 | ddem_2060 = -demcollection.ddems[2].data.squeeze() 101 | 102 | plt.imshow(ddem_2060, cmap="RdYlBu", vmin=-50, vmax=50, extent=extent) 103 | plt.xlim(scott_extent[:2]) 104 | plt.ylim(scott_extent[2:]) 105 | 106 | plt.show() 107 | plt.tight_layout() 108 | -------------------------------------------------------------------------------- /examples/advanced/plot_slope_methods.py: -------------------------------------------------------------------------------- 1 | """ 2 | Slope and aspect methods 3 | ======================== 4 | 5 | Calculating terrain attributes—not only slope and aspect but also curvatures—requires estimating the 6 | elevation derivatives of the surface. xDEM offers three different ways to 7 | calculate elevation derivatives, which can result in slightly different results. 8 | 9 | Here is an example of how to generate the two with each method, and understand their differences. 10 | 11 | See also the :ref:`terrain-attributes` feature page. 12 | 13 | **References:** `Horn (1981) `_, `Zevenbergen and Thorne (1987) `_, `Florinsky (2009) `_. 14 | """ 15 | 16 | import matplotlib.pyplot as plt 17 | import numpy as np 18 | 19 | import xdem 20 | 21 | # %% 22 | # We open example data. 23 | dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 24 | 25 | 26 | def plot_attribute(attribute, cmap, label=None, vlim=None): 27 | 28 | if vlim is not None: 29 | if isinstance(vlim, (int, np.integer, float, np.floating)): 30 | vlims = {"vmin": -vlim, "vmax": vlim} 31 | elif len(vlim) == 2: 32 | vlims = {"vmin": vlim[0], "vmax": vlim[1]} 33 | else: 34 | vlims = {} 35 | 36 | attribute.plot(cmap=cmap, cbar_title=label, **vlims) 37 | 38 | plt.xticks([]) 39 | plt.yticks([]) 40 | plt.tight_layout() 41 | 42 | plt.show() 43 | 44 | 45 | # %% 46 | # Slope with method of Horn (1981). 47 | 48 | # %% 49 | # .. note:: (GDAL default), based on a refined approximation of the gradient (page 18, bottom left, and pages 20-21). 50 | 51 | slope_horn = xdem.terrain.slope(dem, surface_fit="Horn") 52 | 53 | plot_attribute(slope_horn, "Reds", "Slope of Horn (1981) (°)") 54 | 55 | # %% 56 | # Slope with method of Zevenbergen and Thorne (1987), Equation 13. 57 | 58 | slope_zevenberg = xdem.terrain.slope(dem, surface_fit="ZevenbergThorne") 59 | 60 | plot_attribute(slope_zevenberg, "Reds", "Slope of Zevenberg and Thorne (1987) (°)") 61 | 62 | # %% 63 | # Slope with method of Florinsky (2009). 64 | 65 | slope_florinsky = xdem.terrain.slope(dem, surface_fit="Florinsky") 66 | 67 | plot_attribute(slope_florinsky, "Reds", "Slope of Florinsky (2009) (°)") 68 | 69 | 70 | # %% 71 | # We can compute the difference between the different slope computations - for instance, here between with Horn and Zevenberg methods. 72 | 73 | diff_slope = slope_horn - slope_zevenberg 74 | 75 | plot_attribute(diff_slope, "RdYlBu", "Slope of Horn (1981) minus\n slope of Zevenberg and Thorne (1987) (°)", vlim=3) 76 | 77 | # %% 78 | # The differences are negative, implying that the method of Horn always provides flatter slopes. 79 | # Additionally, they seem to occur in places of high curvatures. We verify this by plotting the maximal curvature. 80 | 81 | maxc = xdem.terrain.max_curvature(dem) 82 | 83 | plot_attribute(maxc, "RdYlBu", "Maximal curvature (100 m $^{-1}$)", vlim=2) 84 | 85 | # %% 86 | # We quantify the relationship by computing the median of slope differences in bins of curvatures, and plot the 87 | # result. We define custom bins for curvature, due to its skewed distribution. 88 | 89 | df_bin = xdem.spatialstats.nd_binning( 90 | values=diff_slope[:], 91 | list_var=[maxc[:]], 92 | list_var_names=["maxc"], 93 | list_var_bins=30, 94 | statistics=[np.nanmedian, "count"], 95 | ) 96 | 97 | xdem.spatialstats.plot_1d_binning( 98 | df_bin, 99 | var_name="maxc", 100 | statistic_name="nanmedian", 101 | label_var="Maximal absolute curvature (100 m$^{-1}$)", 102 | label_statistic="Slope of Horn (1981) minus\n " "slope of Zevenberg and Thorne (1987) (°)", 103 | ) 104 | 105 | 106 | # %% 107 | # We perform the same exercise to analyze the differences in terrain aspect. We compute the difference modulo 360°, 108 | # to account for the circularity of aspect. 109 | 110 | aspect_horn = xdem.terrain.aspect(dem) 111 | aspect_zevenberg = xdem.terrain.aspect(dem, method="ZevenbergThorne") 112 | 113 | diff_aspect = aspect_horn - aspect_zevenberg 114 | diff_aspect_mod = np.minimum(diff_aspect % 360, 360 - diff_aspect % 360) 115 | 116 | plot_attribute( 117 | diff_aspect_mod, "Spectral", "Aspect of Horn (1981) minus\n aspect of Zevenberg and Thorne (1987) (°)", vlim=[0, 90] 118 | ) 119 | 120 | # %% 121 | # Same as for slope, differences in aspect seem to coincide with high curvature areas. We observe also observe large 122 | # differences for areas with nearly flat slopes, owing to the high sensitivity of orientation estimation 123 | # for flat terrain. 124 | 125 | # %% 126 | # .. note:: The default aspect for a 0° slope is 180°, as in GDAL. 127 | -------------------------------------------------------------------------------- /tests/test_workflows/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2025 Centre National d'Etudes Spatiales (CNES). 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | """ 19 | Fixtures for test_workflows 20 | """ 21 | # mypy: disable-error-code=no-untyped-def 22 | 23 | import pytest 24 | 25 | import xdem 26 | from xdem.workflows import Accuracy 27 | from xdem.workflows.schemas import TERRAIN_ATTRIBUTES_DEFAULT 28 | 29 | 30 | @pytest.fixture() 31 | def get_topo_inputs_config(): 32 | """ 33 | Return minimal configuration for inputs in topo 34 | """ 35 | return { 36 | "inputs": { 37 | "reference_elev": { 38 | "path_to_elev": xdem.examples.get_path("longyearbyen_tba_dem"), 39 | "path_to_mask": xdem.examples.get_path("longyearbyen_glacier_outlines"), 40 | } 41 | }, 42 | } 43 | 44 | 45 | @pytest.fixture() 46 | def get_dem_config(): 47 | """ 48 | Return minimal configuration for inputs in topo 49 | """ 50 | return { 51 | "path_to_elev": xdem.examples.get_path("longyearbyen_ref_dem"), 52 | "force_source_nodata": -9999, 53 | "from_vcrs": "Ellipsoid", 54 | "to_vcrs": "EGM96", 55 | "path_to_mask": xdem.examples.get_path("longyearbyen_glacier_outlines"), 56 | "downsample": 1, 57 | } 58 | 59 | 60 | @pytest.fixture() 61 | def get_accuracy_inputs_config(): 62 | """ 63 | Return minimal configuration for inputs in accuracy 64 | """ 65 | return { 66 | "inputs": { 67 | "reference_elev": { 68 | "path_to_elev": xdem.examples.get_path("longyearbyen_ref_dem"), 69 | "path_to_mask": xdem.examples.get_path("longyearbyen_glacier_outlines"), 70 | }, 71 | "to_be_aligned_elev": { 72 | "path_to_elev": xdem.examples.get_path("longyearbyen_tba_dem"), 73 | "path_to_mask": xdem.examples.get_path("longyearbyen_glacier_outlines"), 74 | }, 75 | }, 76 | } 77 | 78 | 79 | @pytest.fixture() 80 | def get_accuracy_object_with_run(tmp_path): 81 | """ 82 | Generate classical accuracy object 83 | """ 84 | user_config = { 85 | "inputs": { 86 | "reference_elev": { 87 | "path_to_elev": xdem.examples.get_path("longyearbyen_ref_dem"), 88 | "path_to_mask": xdem.examples.get_path("longyearbyen_glacier_outlines"), 89 | }, 90 | "to_be_aligned_elev": { 91 | "path_to_elev": xdem.examples.get_path("longyearbyen_tba_dem"), 92 | "path_to_mask": xdem.examples.get_path("longyearbyen_glacier_outlines"), 93 | }, 94 | }, 95 | "outputs": {"path": str(tmp_path)}, 96 | } 97 | workflows = Accuracy(user_config) 98 | workflows.run() 99 | 100 | return workflows 101 | 102 | 103 | @pytest.fixture() 104 | def pipeline_topo(): 105 | """ 106 | Return default configuration for pipeline topo_summary 107 | """ 108 | return { 109 | "inputs": { 110 | "reference_elev": { 111 | "path_to_elev": xdem.examples.get_path("longyearbyen_tba_dem"), 112 | "path_to_mask": xdem.examples.get_path("longyearbyen_glacier_outlines"), 113 | "downsample": 1, 114 | } 115 | }, 116 | "statistics": [ 117 | "mean", 118 | "median", 119 | "max", 120 | "min", 121 | "sum", 122 | "sumofsquares", 123 | "90thpercentile", 124 | "le90", 125 | "nmad", 126 | "rmse", 127 | "std", 128 | "standarddeviation", 129 | "validcount", 130 | "totalcount", 131 | "percentagevalidpoints", 132 | ], 133 | "terrain_attributes": TERRAIN_ATTRIBUTES_DEFAULT, 134 | "outputs": {"path": "outputs", "level": 1}, 135 | } 136 | 137 | 138 | @pytest.fixture() 139 | def list_default_terrain_attributes(): 140 | """ 141 | Return default list of terrain attributes 142 | """ 143 | return TERRAIN_ATTRIBUTES_DEFAULT 144 | -------------------------------------------------------------------------------- /.github/workflows/python-tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: build 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | test: 14 | name: ${{ matrix.os }}, python ${{ matrix.python-version }}, ${{ matrix.dep-level }} 15 | runs-on: ${{ matrix.os }} 16 | 17 | strategy: 18 | matrix: 19 | os: ["ubuntu-latest", "macos-latest"] 20 | python-version: ["3.10", "3.11", "3.12", "3.13"] 21 | dep-level: ["base", "opt"] 22 | 23 | # Run all shells using bash (including Windows) 24 | defaults: 25 | run: 26 | shell: bash -l {0} 27 | 28 | steps: 29 | - uses: actions/checkout@v6 30 | 31 | # We initiate the environment empty, and check if a key for this environment doesn't already exist in the cache 32 | - name: Initiate empty environment 33 | uses: conda-incubator/setup-miniconda@v3 34 | with: 35 | miniforge-version: latest 36 | auto-update-conda: true 37 | use-mamba: true 38 | mamba-version: "2.0.5" 39 | channel-priority: strict 40 | activate-environment: xdem-dev 41 | python-version: 42 | 43 | - name: Get month for resetting cache 44 | id: get-date 45 | run: echo "cache_date=$(/bin/date -u '+%Y%m')" >> $GITHUB_ENV 46 | shell: bash 47 | 48 | - name: Cache conda env 49 | uses: actions/cache@v5 50 | with: 51 | path: ${{ env.CONDA }}/envs 52 | key: conda-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.dep-level }}-${{ env.cache_date }}-${{ hashFiles('dev-environment.yml') }}-${{ env.CACHE_NUMBER }} 53 | env: 54 | CACHE_NUMBER: 0 # Increase this value to reset cache if environment.yml has not changed 55 | id: cache 56 | 57 | # The trick below is necessary because the generic environment file does not specify a Python version, and ONLY 58 | # "conda env update" CAN BE USED WITH CACHING, which upgrades the Python version when using the base environment 59 | # (we add "graphviz" from dev-environment to solve all dependencies at once, at graphviz relies on image 60 | # processing packages very much like geo-packages; not a problem for docs, dev installs where all is done at once) 61 | - name: Install base environment with a fixed Python version 62 | if: steps.cache.outputs.cache-hit != 'true' 63 | run: | 64 | mamba install pyyaml python=${{ matrix.python-version }} 65 | python .github/scripts/generate_yml_env_fixed_py.py --pyv ${{ matrix.python-version }} "environment.yml" 66 | mamba env update -n xdem-dev -f environment-ci-py${{ matrix.python-version }}.yml 67 | 68 | # If/else equivalent to install base environment, or base+optional 69 | - name: Install project environment (base) 70 | if: ${{ matrix.dep-level == 'base' }} 71 | run: pip install -e .[test] 72 | 73 | - name: Install project environment (optional) 74 | if: ${{ matrix.dep-level != 'base' }} 75 | run: pip install -e .[test,${{ matrix.dep-level }}] 76 | 77 | # This steps allows us to check the "import xdem" with the base environment provided to users, before adding 78 | # development-specific dependencies by differencing the env and dev-env yml files 79 | - name: Check import works with base environment 80 | run: python -c "import xdem" 81 | 82 | - name: Lint with flake8 83 | run: | 84 | # stop the build if there are Python syntax errors or undefined names 85 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 86 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 87 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 88 | 89 | - name: Print conda environment (for debugging) 90 | run: | 91 | conda info 92 | conda list 93 | 94 | - name: Test with pytest 95 | run: pytest -ra --cov=xdem/ --cov-report=lcov 96 | 97 | - name: Upload coverage to Coveralls 98 | uses: coverallsapp/github-action@v2 99 | continue-on-error: true 100 | with: 101 | github-token: ${{ secrets.github_token }} 102 | flag-name: run-${{ join(matrix.*, '-') }} 103 | path-to-lcov: coverage.lcov 104 | parallel: true 105 | 106 | finish: 107 | needs: test 108 | runs-on: ubuntu-latest 109 | steps: 110 | - name: Upload to Coveralls finished 111 | uses: coverallsapp/github-action@v2 112 | with: 113 | github-token: ${{ secrets.github_token }} 114 | parallel-finished: true 115 | -------------------------------------------------------------------------------- /xdem/cli.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2025 Centre National d'Etudes Spatiales (CNES). 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | 20 | import argparse 21 | import ctypes.util 22 | import logging 23 | import sys 24 | 25 | import yaml # type: ignore 26 | 27 | from xdem.workflows import Accuracy, Topo 28 | from xdem.workflows.schemas import COMPLETE_CONFIG_ACCURACY, COMPLETE_CONFIG_TOPO 29 | 30 | lib_gobject_name = ctypes.util.find_library("gobject-2.0") 31 | lib_pango_name = ctypes.util.find_library("pango-1.0") 32 | 33 | if lib_gobject_name and lib_pango_name: 34 | from weasyprint import HTML 35 | 36 | _has_libgobject = True 37 | else: 38 | _has_libgobject = False 39 | 40 | 41 | def main() -> None: 42 | """ 43 | Main function for the CLI 44 | """ 45 | 46 | parser = argparse.ArgumentParser(prog="xdem", description="CLI tool to process DEM workflows") 47 | parser.add_argument( 48 | "--log-level", 49 | default="INFO", 50 | choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], 51 | help="Set the logging level", 52 | ) 53 | subparsers = parser.add_subparsers( 54 | dest="command", 55 | help="Available workflows as subcommand (see xdem [workflow] -h" 56 | " for more information on the specific workflow)", 57 | ) 58 | 59 | # Subcommand: info 60 | topo_parser = subparsers.add_parser( 61 | "topo", 62 | help="Run DEM qualification workflow", 63 | description="Run a DEM information workflow using a YAML configuration file.", 64 | epilog="Example: xdem topo --config config.yaml", 65 | ) 66 | topo_group = topo_parser.add_mutually_exclusive_group(required=True) 67 | topo_group.add_argument( 68 | "--config", 69 | help="Path to YAML configuration file", 70 | ) 71 | topo_group.add_argument("--display_template_config", action="store_true", help="Show configuration template") 72 | 73 | # Subcommand: accuracy 74 | diff_parser = subparsers.add_parser( 75 | "accuracy", 76 | help="Run DEM comparison workflow", 77 | description="Run a DEM comparison workflow using a YAML configuration file.", 78 | epilog="Example: xdem accuracy --config config.yaml", 79 | ) 80 | diff_group = diff_parser.add_mutually_exclusive_group(required=True) 81 | diff_group.add_argument("--config", help="Path to YAML configuration file") 82 | diff_group.add_argument("--display_template_config", action="store_true", help="Show configuration template") 83 | 84 | args = parser.parse_args(args=None if sys.argv[1:] else ["--help"]) 85 | 86 | # Instance logger 87 | log_level = getattr(logging, args.log_level.upper(), logging.INFO) 88 | logging.basicConfig(level=log_level, format="%(asctime)s - %(levelname)s - %(message)s") 89 | logger = logging.getLogger(__name__) 90 | # fontTools creates noisy logs 91 | logging.getLogger("fontTools").setLevel(logging.WARNING) 92 | logging.getLogger("fontTools").propagate = False 93 | 94 | if args.command == "topo": 95 | if args.display_template_config: 96 | yaml_string = yaml.dump(COMPLETE_CONFIG_TOPO, sort_keys=False, allow_unicode=True) 97 | logging.info("\n" + yaml_string) 98 | elif args.config: 99 | logger.info("Running topo workflow") 100 | workflow = Topo(args.config) 101 | workflow.run() 102 | 103 | elif args.command == "accuracy": 104 | if args.display_template_config: 105 | yaml_string = yaml.dump(COMPLETE_CONFIG_ACCURACY, sort_keys=False, allow_unicode=True) 106 | logging.info("\n" + yaml_string) 107 | elif args.config: 108 | logger.info("Running accuracy workflow") 109 | workflow = Accuracy(args.config) # type: ignore 110 | workflow.run() 111 | 112 | else: 113 | raise ValueError(f"{args.command} doesn't exist, valid command are 'accuracy', 'topo'") 114 | 115 | if args.config and _has_libgobject: 116 | logger.info("Generating HTML and PDF report") 117 | HTML(workflow.outputs_folder / "report.html").write_pdf(workflow.outputs_folder / "report.pdf") 118 | 119 | logger.info("End of execution") 120 | 121 | 122 | if __name__ == "__main__": 123 | main() 124 | -------------------------------------------------------------------------------- /examples/advanced/plot_blockwise_coreg.py: -------------------------------------------------------------------------------- 1 | """ 2 | Blockwise coregistration 3 | ======================== 4 | 5 | Often, biases are spatially variable, and a "global" shift may not be enough to coregister a DEM properly. 6 | In the :ref:`sphx_glr_basic_examples_plot_nuth_kaab.py` example, we saw that the method improved the alignment significantly, but there were still possibly nonlinear artefacts in the result. 7 | Clearly, nonlinear coregistration approaches are needed. 8 | One solution is :class:`xdem.coreg.BlockwiseCoreg`, a helper to run any ``Coreg`` class over an arbitrarily small grid, and then "puppet warp" the DEM to fit the reference best. 9 | 10 | The ``BlockwiseCoreg`` class runs in five steps: 11 | 12 | 1. Generate a subdivision grid to divide the DEM in N blocks. 13 | 2. Run the requested coregistration approach in each block. 14 | 3. Extract each result as a source and destination X/Y/Z point. 15 | 4. Interpolate the X/Y/Z point-shifts into three shift-rasters. 16 | 5. Warp the DEM to apply the X/Y/Z shifts. 17 | 18 | """ 19 | 20 | import geoutils as gu 21 | 22 | # sphinx_gallery_thumbnail_number = 2 23 | import matplotlib.pyplot as plt 24 | import numpy as np 25 | from geoutils.raster.distributed_computing import MultiprocConfig 26 | 27 | import xdem 28 | 29 | # %% 30 | # We open example files. 31 | 32 | reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 33 | dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 34 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 35 | 36 | # Create a stable ground mask (not glacierized) to mark "inlier data" 37 | inlier_mask = ~glacier_outlines.create_mask(reference_dem) 38 | 39 | plt_extent = [ 40 | reference_dem.bounds.left, 41 | reference_dem.bounds.right, 42 | reference_dem.bounds.bottom, 43 | reference_dem.bounds.top, 44 | ] 45 | 46 | # %% 47 | # The DEM to be aligned (a 1990 photogrammetry-derived DEM) has some vertical and horizontal biases that we want to avoid, as well as possible nonlinear distortions. 48 | # The product is a mosaic of multiple DEMs, so "seams" may exist in the data. 49 | # These can be visualized by plotting a change map: 50 | 51 | diff_before = reference_dem - dem_to_be_aligned 52 | 53 | diff_before.plot(cmap="RdYlBu", vmin=-10, vmax=10) 54 | plt.show() 55 | 56 | # %% 57 | # Horizontal and vertical shifts can be estimated using :class:`xdem.coreg.NuthKaab`. 58 | # Let's prepare a coregistration class with a tiling configuration 59 | # BlockwiseCoreg is also available without mp_config but with parent_path parameters 60 | 61 | # Create a configuration without multiprocessing cluster (tasks will be processed sequentially) 62 | mp_config = MultiprocConfig(chunk_size=500, outfile="aligned_dem.tif", cluster=None) 63 | blockwise = xdem.coreg.BlockwiseCoreg(xdem.coreg.NuthKaab(), mp_config=mp_config) 64 | 65 | # %% 66 | # Coregistration is performed with the ``.fit()`` method. 67 | 68 | blockwise.fit(reference_dem, dem_to_be_aligned, inlier_mask) 69 | blockwise.apply() 70 | 71 | aligned_dem = xdem.DEM("aligned_dem.tif") 72 | 73 | 74 | # %% 75 | # The estimated shifts can be visualized by applying the coregistration to a completely flat surface. 76 | # This shows the estimated shifts that would be applied in elevation; 77 | # additional horizontal shifts will also be applied if the method supports it. 78 | 79 | rows, cols, _ = blockwise.shape_tiling_grid 80 | 81 | matrix_x = np.full((rows, cols), np.nan) 82 | matrix_y = np.full((rows, cols), np.nan) 83 | matrix_z = np.full((rows, cols), np.nan) 84 | 85 | for key, value in blockwise.meta["outputs"].items(): 86 | row, col = map(int, key.split("_")) 87 | matrix_x[row, col] = value["shift_x"] 88 | matrix_y[row, col] = value["shift_y"] 89 | matrix_z[row, col] = value["shift_z"] 90 | 91 | 92 | def plot_heatmap(matrix, title, cmap, ax): 93 | im = ax.imshow(matrix, cmap=cmap) 94 | for (i, j), val in np.ndenumerate(matrix): 95 | ax.text(j, i, f"{val:.2f}", ha="center", va="center", color="black") 96 | ax.set_title(title) 97 | ax.set_xticks(np.arange(cols)) 98 | ax.set_yticks(np.arange(rows)) 99 | ax.invert_yaxis() 100 | plt.colorbar(im, ax=ax) 101 | 102 | 103 | fig, axes = plt.subplots(1, 3, figsize=(18, 6)) 104 | plot_heatmap(matrix_x, "shifts in X", "Reds", axes[0]) 105 | plot_heatmap(matrix_y, "shifts in Y", "Greens", axes[1]) 106 | plot_heatmap(matrix_z, "shifts in Z", "Blues", axes[2]) 107 | 108 | plt.tight_layout() 109 | plt.show() 110 | 111 | # %% 112 | # Then, the new difference can be plotted to validate that it improved. 113 | 114 | diff_after = reference_dem - aligned_dem 115 | 116 | diff_after.plot(cmap="RdYlBu", vmin=-10, vmax=10) 117 | plt.show() 118 | 119 | # %% 120 | # We can compare the NMAD to validate numerically that there was an improvement: 121 | 122 | 123 | print(f"Error before: {gu.stats.nmad(diff_before[inlier_mask]):.2f} m") 124 | print(f"Error after: {gu.stats.nmad(diff_after[inlier_mask]):.2f} m") 125 | -------------------------------------------------------------------------------- /xdem/terrain/freq.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2025 xDEM developers 2 | # 3 | # This file is part of the xDEM project: 4 | # https://github.com/glaciohack/xdem 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # 9 | # You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | 19 | """Terrain submodule on frequency attributes computed on the full array, such as texture shading.""" 20 | from __future__ import annotations 21 | 22 | import numpy as np 23 | import scipy.fft as fft 24 | 25 | from xdem._typing import NDArrayf 26 | 27 | ############################ 28 | # FREQUENCY-BASED ATTRIBUTES 29 | ############################ 30 | 31 | 32 | def _nextprod_fft(n: int) -> int: 33 | """ 34 | Find the next valid FFT size (power of 2, 3, 5, or 7). 35 | 36 | Based on MATLAB's nextpow2 and optimized for scipy.fft. 37 | 38 | :param n: Input size 39 | :returns: Next valid FFT size 40 | """ 41 | if n <= 1: 42 | return 1 43 | 44 | # For small sizes, use powers of 2 45 | if n <= 1024: 46 | return int(2 ** np.ceil(np.log2(n))) 47 | 48 | # For larger sizes, find the smallest m >= n such that m = 2^a * 3^b * 5^c * 7^d 49 | factors = [2, 3, 5, 7] 50 | candidate = n 51 | 52 | while True: 53 | temp = candidate 54 | for factor in factors: 55 | while temp % factor == 0: 56 | temp //= factor 57 | if temp == 1: 58 | return candidate 59 | candidate += 1 60 | 61 | 62 | def _texture_shading_fft( 63 | dem: NDArrayf, 64 | alpha: float | None = 0.8, 65 | ) -> NDArrayf: 66 | """ 67 | Core texture shading implementation using fractional Laplacian operator. 68 | 69 | Based on Leland Brown's texture shading technique from: 70 | Brown, L. (2010). Texture Shading: A New Technique for Depicting Terrain Relief. 71 | Workshop on Mountain Cartography, Banff, Canada. 72 | 73 | :param dem: Input DEM array 74 | :param alpha: Fractional exponent for Laplacian operator (0-2, default 0.8) 75 | :returns: Texture shaded array 76 | """ 77 | # Validate inputs 78 | if alpha is None: 79 | alpha = 0.8 # Use default value if None 80 | if not 0 <= alpha <= 2: 81 | raise ValueError(f"Alpha must be between 0 and 2, got {alpha}") 82 | 83 | # Handle NaN values by creating a mask 84 | valid_mask = np.isfinite(dem) 85 | if not np.any(valid_mask): 86 | return np.full_like(dem, np.nan) 87 | 88 | # Work with a copy to avoid modifying input 89 | result = dem.copy() 90 | 91 | # Fill NaN values with mean of valid values for processing 92 | if not np.all(valid_mask): 93 | result[~valid_mask] = np.nanmean(dem) 94 | 95 | # Get dimensions 96 | rows, cols = result.shape 97 | 98 | # Determine FFT sizes for optimal performance 99 | fft_rows = _nextprod_fft(rows) 100 | fft_cols = _nextprod_fft(cols) 101 | 102 | # Pad the array for FFT 103 | pad_rows = (fft_rows - rows) // 2 104 | pad_cols = (fft_cols - cols) // 2 105 | 106 | # Use symmetric padding to reduce edge effects 107 | result = np.pad( 108 | result, 109 | ( 110 | (pad_rows, fft_rows - rows - pad_rows), 111 | (pad_cols, fft_cols - cols - pad_cols), 112 | ), 113 | mode="symmetric", 114 | ) 115 | 116 | # Create frequency domain grids 117 | fy = fft.fftfreq(fft_rows)[:, None] 118 | fx = fft.rfftfreq(fft_cols)[None, :] 119 | 120 | # Calculate frequency magnitude (avoiding division by zero) 121 | freq_magnitude = np.hypot(fx, fy) 122 | freq_magnitude[0, 0] = 1.0 123 | 124 | # Create fractional Laplacian filter in frequency domain 125 | # For alpha=1, this is the standard Laplacian 126 | # For alpha<1, it emphasizes low frequencies 127 | # For alpha>1, it emphasizes high frequencies 128 | laplacian_filter = freq_magnitude**alpha 129 | if alpha > 0: 130 | laplacian_filter[0, 0] = 0.0 # only zero DC when alpha>0 131 | 132 | # Apply FFT 133 | result = fft.rfft2(result, s=(fft_rows, fft_cols), overwrite_x=True) 134 | 135 | # Apply fractional Laplacian in frequency domain in-place 136 | result *= laplacian_filter 137 | 138 | # Transform back to spatial domain 139 | result = fft.irfft2(result, s=(fft_rows, fft_cols), overwrite_x=True) 140 | 141 | # Extract the original size from padded result 142 | result = result[pad_rows : pad_rows + rows, pad_cols : pad_cols + cols] 143 | 144 | # Restore NaN values where original data was invalid 145 | result[~valid_mask] = np.nan 146 | 147 | return result 148 | -------------------------------------------------------------------------------- /doc/source/static_surfaces.md: -------------------------------------------------------------------------------- 1 | (static-surfaces)= 2 | 3 | # Static surfaces as error proxy 4 | 5 | Below, a short guide explaining the use of static surfaces as an error proxy for quantitative elevation analysis. 6 | 7 | ## The great benefactor of elevation analysis 8 | 9 | Elevation data benefits from an uncommon asset, which is that **large proportions of planetary surface elevations 10 | usually remain virtually unchanged through time** (at least, within decadal time scales). Those static surfaces, 11 | sometimes also referred to as "stable terrain", generally refer to bare-rock, grasslands, and are often isolated by 12 | excluding dynamic surfaces such as glaciers, snow, forests and cities. If small proportions of static surfaces are 13 | not masked, they are generally filtered out by robust estimators (see {ref}`robust-estimators`). 14 | 15 | :::{figure} imgs/stable_terrain_diagram.png 16 | :width: 100% 17 | 18 | Source: [Hugonnet et al. (2022)](https://doi.org/10.1109/jstars.2022.3188922). 19 | ::: 20 | 21 | ## Use for coregistration and further uncertainty analysis 22 | 23 | Elevation data can rarely be compared to simultaneous acquisitions to assess its sources of error. This is 24 | where **static surfaces come to the rescue, and can act as an error proxy**. By assuming no changes happened on these 25 | surfaces, and that they have the same error structure as other surfaces, it becomes possible to perform 26 | coregistration, bias-correction and further uncertainty analysis! 27 | 28 | Below, we summarize the basic principles of how using static surfaces allows to perform coregistration and uncertainty analysis, and the related limitations. 29 | 30 | ### For coregistration and bias-correction (systematic errors) 31 | 32 | **Static surfaces $S$ are key to a coregistration or bias correction transformation $C$** for which it is assumed that, 33 | for two sets of elevation data $h_{1}$ and $h_{2}$, we have: 34 | 35 | $$ 36 | (h_{1} - C(h_{2}))_{S} \approx 0 37 | $$ 38 | 39 | and aim to find the best transformation $C$ to minimize this problem. 40 | 41 | The above relation is not generally true for every pixel or footprint, however, due to random errors that 42 | exist in all data. Consequently, we can only write: 43 | 44 | $$ 45 | \textrm{mean} (h_{1} - C(h_{2}))_{S \gg r^{2}} \approx 0 46 | $$ 47 | 48 | where $r$ is the correlation range of random errors, and $S \gg r^{2}$ assumes that static surfaces cover a domain much 49 | larger than this correlation range. If static surfaces cover too small an area, coregistration will naturally become 50 | less reliable. 51 | 52 | ```{note} 53 | One of the objectives of xDEM is to allow to use knowledge on random errors to refine 54 | coregistration for limited static surface areas, stay tuned! 55 | ``` 56 | 57 | ### For further uncertainty analysis (random errors) 58 | 59 | **Static surfaces are also essential for uncertainty analysis aiming to infer the random errors of elevation 60 | data** but, in this case, we have to consider the effect of random errors from both sets of elevation data. 61 | 62 | We first assume that elevation $h_{2}$ is now largely free of systematic errors after performing coregistration and 63 | bias corrections $C$. The analysis of elevation differences $dh$ on static surfaces $S$ will represent the mixed random 64 | errors of the two sets of data, that we can assume are statistically independent (if indeed acquired separately), which yields: 65 | 66 | $$ 67 | \sigma_{dh, S} = \sigma_{h_{\textrm{1}} - h_{\textrm{2}}} = \sqrt{\sigma_{h_{\textrm{1}}}^{2} + \sigma_{h_{\textrm{2}}}^{2}} 68 | $$ 69 | 70 | where $\sigma$ is the random error at any data point. 71 | 72 | If one set of elevation data is known to be of much higher-precision, one can assume that the analysis of differences 73 | will represent only the precision of the rougher DEM. For instance, $\sigma_{h_{1}} = 3 \sigma_{h_{2}}$ implies that more than 74 | 95% of $\sigma_{dh}$ comes from $\sigma_{h_{1}}$ from the above equation. 75 | 76 | More generally: 77 | 78 | $$ 79 | \sigma_{dh, S} = \sigma_{h_{\textrm{higher precision}} - h_{\textrm{lower precision}}} \approx \sigma_{h_{\textrm{lower precision}}} 80 | $$ 81 | 82 | And the same applies to the spatial correlation of these random errors: 83 | 84 | $$ 85 | \rho_{dh, S}(d) = \rho_{h_{\textrm{higher precision}} - h_{\textrm{lower precision}}}(d) \approx \rho_{h_{\textrm{lower precision}}}(d) 86 | $$ 87 | 88 | where $\rho(d)$ is the spatial correlation, and $d$ is the spatial lag (distance between data points). 89 | 90 | ---------------- 91 | 92 | :::{admonition} References and more reading 93 | :class: tip 94 | 95 | Static surfaces can be used as a **proxy for assessing systematic and random errors**, which directly relates to 96 | what is commonly referred to as accuracy and precision of elevation data, detailed in the **next guide page on {ref}`accuracy-precision`**. 97 | 98 | See the **{ref}`spatial-stats` guide page** for more details on spatial statistics applied to uncertainty quantification. 99 | 100 | **References:** [Hugonnet et al. (2022)](https://doi.org/10.1109/jstars.2022.3188922), Uncertainty analysis of digital elevation models by spatial inference from stable terrain. 101 | ::: 102 | -------------------------------------------------------------------------------- /doc/source/release_notes.md: -------------------------------------------------------------------------------- 1 | # Release notes 2 | 3 | Below, the release notes for all minor versions and our roadmap to a first major version. 4 | 5 | ## 0.1.0 6 | 7 | xDEM version 0.1 is the **first minor release** since the creation of the project in 2020. It is the result of years of work 8 | to consolidate and re-structure features into a mature and stable API to minimize future breaking changes. 9 | 10 | **All the core features drafted at the start of the project are now supported**, and there is a **clear roadmap 11 | towards a first major release 1.0**. This minor release also adds many tests and improves significantly the documentation 12 | from the early-development state of the package. 13 | 14 | The re-structuring created some breaking changes, though minor. 15 | 16 | See details below, including **a guide to help migrate code from early-development versions**. 17 | 18 | ### Features 19 | 20 | xDEM now gathers the following core features: 21 | - **Elevation data objects** core to quantatiative analysis, which are DEMs and elevation point clouds, 22 | - **Vertical referencing** including automatic 3D CRS fetching, 23 | - **Terrain analysis** for many attributes, 24 | - **Coregistration** with the choice of several methods, including modular pipeline building, 25 | - **Bias corrections** for any variable, also modular and supported by pipelines, 26 | - **Uncertainty analysis** based on several robust methods. 27 | 28 | Recent additions include in particular **point-raster support for coregistration**, and the **expansion of 29 | `DEM` class methods** to cover all features of the package, with for instance `DEM.coregister_3d()` or `DEM.slope()`. 30 | 31 | ### Guides and other resources 32 | 33 | xDEM integrates **background material on quantitative analysis of elevation data** to help users use the various methods 34 | of the package. This material includes **several illustrated guide pages**, **a cheatsheet** on how to recognize and correct 35 | typical elevation errors, and more. 36 | 37 | ### Future deprecations 38 | 39 | We have added warnings throughout the documentation and API related to planned deprecations: 40 | - **Gap-filling features specific to glacier-applications** will be moved to a separate package, 41 | - **Uncertainty analysis tools related to variography** will change API to rely on SciKit-GStat variogram objects, 42 | - The **dDEM** and **DEMCollection** classes will likely be refactored or removed. 43 | 44 | Changes related to **gap-filling** and **uncertainty analysis** will have deprecation warnings, while the function 45 | remain available during a few more releases. 46 | 47 | (migrate-early)= 48 | ### Migrate from early versions 49 | 50 | The following changes **might be required to solve breaking changes**, depending on your early-development version: 51 | - Rename `.show()` to `.plot()` for all data objects, 52 | - Rename `.dtypes` to `dtype` for `DEM` objects, 53 | - Operations `.crop()`, `shift()` and `to_vcrs()` are not done in-place by default anymore, replace by `dem = dem.crop()` or `dem.crop(..., inplace=True)` to mirror the old default behaviour, 54 | - Rename `.shift()` to `.translate()` for `DEM` objects, 55 | - Several function arguments are renamed, in particular `dst_xxx` arguments of `.reproject()` are all renamed to `xxx` e.g. `dst_crs` to `crs`, as well as the arguments of `Coreg.fit()` renamed from `xxx_dem` to `xxx_elev` to be generic to any elevation data, 56 | - All `BiasCorr1D`, `BiasCorr2D` and `BiasCorrND` classes are removed in favor of a single `BiasCorr` class that implicitly understands the number of dimensions from the length of input `bias_vars`, 57 | - New user warnings are sometimes raised, in particular if some metadata is not properly defined such as `.nodata`. Those should give an indication as how to silence them. 58 | 59 | Additionally, **some important yet non-breaking changes**: 60 | - The sequential use of `Coreg.fit()` and `Coreg.apply()` to the same `tba_elev` is now discouraged and updated everywhere in the documentation, use `Coreg.fit_and_apply()` or `DEM.coregister_3d()` instead, 61 | - The use of a separate module for terrain attributes such as `xdem.terrain.slope()` is now discouraged, use `DEM.slope()` instead. 62 | 63 | ## Roadmap to 1.0 64 | 65 | Based on recent and ongoing progress, we envision the following roadmap. 66 | 67 | **Releases of 0.2, 0.3, 0.4, etc**, for the following planned (ongoing) additions: 68 | - The **addition of a command-line interface for features such as coregistration**, in the frame of the merging effort with [demcompare](https://github.com/CNES/demcompare), 69 | - The **addition of an elevation point cloud `EPC` data object**, inherited from the ongoing `PointCloud` object of GeoUtils alongside many features at the interface of point and raster, 70 | - The **addition of a Xarray accessor `dem`** mirroring the `DEM` object, to work natively with Xarray objects and add support on out-of-memory Dask operations for most of xDEM's features, 71 | - The **addition of a GeoPandas accessor `epc`** mirroring the `EPC` object, to work natively with GeoPandas objects, 72 | - The **re-structuration of uncertainty analysis features** to rely directly on SciKit-GStat's `Variogram` object. 73 | 74 | **Release of 1.0** once all these additions are fully implemented, and after feedback from the community. 75 | -------------------------------------------------------------------------------- /doc/source/quick_start.md: -------------------------------------------------------------------------------- 1 | --- 2 | file_format: mystnb 3 | mystnb: 4 | execution_timeout: 60 5 | jupytext: 6 | formats: md:myst 7 | text_representation: 8 | extension: .md 9 | format_name: myst 10 | kernelspec: 11 | display_name: xdem-env 12 | language: python 13 | name: xdem 14 | --- 15 | (quick-start)= 16 | 17 | # Quick start 18 | 19 | Below is a short example show-casing some of the core functionalities of xDEM. 20 | To find an example about a specific functionality, jump directly to {ref}`quick-gallery`. 21 | 22 | ## Command line interface 23 | 24 | 1. Create a configuration YAML file containing path to your elevation files. 25 | 26 | For example : 27 | 28 | ```yaml 29 | inputs: 30 | reference_elev: 31 | path_to_elev: "xdem/examples/data/Longyearbyen/data/DEM_2009_ref.tif" 32 | to_be_aligned_elev: 33 | path_to_elev: "xdem/examples/data/Longyearbyen/data/DEM_1990.tif" 34 | path_to_mask: "xdem/examples/data/Longyearbyen/data/glacier_mask/CryoClim_GAO_SJ_1990.shp" 35 | ``` 36 | 37 | 2. Execute the command line interface 38 | 39 | ```shell 40 | xdem accuracy --config config_file.yaml 41 | ``` 42 | 43 | ## Short example 44 | 45 | ```{note} 46 | :class: margin 47 | xDEM relies largely on [its sister-package GeoUtils](https://geoutils.readthedocs.io/) for geospatial handling 48 | (reprojection, cropping, raster-vector interface, point interpolation) as well as numerics 49 | (NumPy interface). 🙂 50 | ``` 51 | 52 | xDEM revolves around the {class}`~xdem.DEM` class (a subclass of {class}`~geoutils.Raster`), from 53 | which most methods can be called and the {class}`~xdem.coreg.Coreg` classes to build modular coregistration pipelines. 54 | 55 | Below, in a few lines, we load two DEMs and a vector of glacier outlines, crop them to a common extent, 56 | align the DEMs using coregistration, estimate the elevation change, estimate elevation change error using stable 57 | terrain, and finally plot and save the result! 58 | 59 | 60 | ```{code-cell} ipython3 61 | :tags: [remove-cell] 62 | 63 | # To get a good resolution for displayed figures 64 | from matplotlib import pyplot 65 | pyplot.rcParams['figure.dpi'] = 600 66 | pyplot.rcParams['savefig.dpi'] = 600 67 | ``` 68 | 69 | ```{code-cell} ipython3 70 | import xdem 71 | import geoutils as gu 72 | 73 | # Examples files: filenames of two DEMs and some glacier outlines 74 | fn_dem_ref = xdem.examples.get_path("longyearbyen_ref_dem") 75 | fn_dem_tba = xdem.examples.get_path("longyearbyen_tba_dem") 76 | fn_glacier_outlines = xdem.examples.get_path("longyearbyen_glacier_outlines") 77 | 78 | # Print filenames 79 | print(f"DEM 1: {fn_dem_ref}, \nDEM 2: {fn_dem_tba}, \nOutlines: {fn_glacier_outlines}") 80 | ``` 81 | 82 | ```{tip} 83 | :class: margin 84 | Set up your {ref}`verbosity` to manage outputs to the console (or a file) during execution! 85 | ``` 86 | 87 | ```{code-cell} ipython3 88 | # Open files by instantiating DEM and Vector 89 | # (DEMs are loaded lazily = only metadata but not array unless required) 90 | dem_ref = xdem.DEM(fn_dem_ref) 91 | dem_tba = xdem.DEM(fn_dem_tba) 92 | vect_gla = gu.Vector(fn_glacier_outlines) 93 | 94 | # Clip outlines to extent of reference DEM (method from GeoUtils) 95 | vect_gla = vect_gla.crop(dem_ref, clip=True) 96 | 97 | # Create a mask from glacier polygons (method from GeoUtils) 98 | mask_gla = vect_gla.create_mask(dem_ref) 99 | 100 | # We convert the vertical CRS of one DEM to the EGM96 geoid 101 | dem_ref.to_vcrs("EGM96", force_source_vcrs="Ellipsoid") 102 | 103 | # Align the two DEMs with a coregistration: 3D shift + 2nd-order 2D poly 104 | mycoreg = xdem.coreg.NuthKaab() + xdem.coreg.Deramp(poly_order=2) 105 | mycoreg.fit(dem_ref, dem_tba, inlier_mask=~mask_gla) 106 | dem_aligned = mycoreg.apply(dem_tba) 107 | 108 | # Get elevation difference 109 | dh = dem_ref - dem_aligned 110 | 111 | # Derive slope and curvature attributes 112 | slope, max_curvature = xdem.terrain.get_terrain_attribute( 113 | dem_ref, attribute=["slope", "max_curvature"] 114 | ) 115 | 116 | # Estimate elevation change error from stable terrain as a function of slope and curvature 117 | dh_err = xdem.spatialstats.infer_heteroscedasticity_from_stable( 118 | dh, list_var=[slope, max_curvature], unstable_mask=mask_gla 119 | )[0] 120 | 121 | # Plot dh, glacier outlines and its error map 122 | dh.plot(cmap="RdYlBu", cbar_title="Elevation change (m)") 123 | vect_gla.plot(dh, fc='none', ec='k', lw=0.5) 124 | 125 | dh_err.plot(ax="new", vmin=2, vmax=7, cmap="Reds", cbar_title=r"Elevation change error (1$\sigma$, m)") 126 | vect_gla.plot(dh_err, fc='none', ec='k', lw=0.5) 127 | 128 | # Save to file 129 | dh_err.to_file("dh_error.tif") 130 | ``` 131 | 132 | ```{code-cell} ipython3 133 | :tags: [remove-cell] 134 | import os 135 | os.remove("dh_error.tif") 136 | ``` 137 | 138 | (quick-gallery)= 139 | ## More examples 140 | 141 | To dive into more illustrated code, explore our gallery of examples that is composed of: 142 | - A {ref}`examples-basic` section on simpler routines (terrain attributes, pre-defined coregistration and uncertainty pipelines), 143 | - An {ref}`examples-advanced` section using advanced pipelines (for in-depth coregistration and uncertainty analysis). 144 | 145 | See also the concatenated list of examples below. 146 | 147 | ```{eval-rst} 148 | .. minigallery:: xdem.DEM 149 | :add-heading: Examples using DEMs 150 | ``` 151 | -------------------------------------------------------------------------------- /tests/test_demcollection.py: -------------------------------------------------------------------------------- 1 | """Functions to test the DEM collection tools.""" 2 | 3 | import datetime 4 | import warnings 5 | 6 | import geoutils as gu 7 | import numpy as np 8 | 9 | import xdem 10 | 11 | 12 | class TestDEMCollection: 13 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 14 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")) 15 | outlines_1990 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 16 | outlines_2010 = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines_2010")) 17 | 18 | def test_init(self) -> None: 19 | 20 | timestamps = [datetime.datetime(1990, 8, 1), datetime.datetime(2009, 8, 1), datetime.datetime(2060, 8, 1)] 21 | 22 | scott_1990 = gu.Vector(self.outlines_1990.ds.loc[self.outlines_1990.ds["NAME"] == "Scott Turnerbreen"]) 23 | scott_2010 = gu.Vector(self.outlines_2010.ds.loc[self.outlines_2010.ds["NAME"] == "Scott Turnerbreen"]) 24 | 25 | # Make sure the glacier was bigger in 1990, since this is assumed later. 26 | assert scott_1990.ds.area.sum() > scott_2010.ds.area.sum() 27 | 28 | mask_2010 = scott_2010.create_mask(self.dem_2009) 29 | 30 | dem_2060 = self.dem_2009.copy() 31 | dem_2060[mask_2010] -= 30 32 | 33 | dems = xdem.DEMCollection( 34 | [self.dem_1990, self.dem_2009, dem_2060], 35 | timestamps=timestamps, 36 | outlines=dict(zip(timestamps[:2], [self.outlines_1990, self.outlines_2010])), 37 | reference_dem=1, 38 | ) 39 | 40 | # Check that the first raster is the oldest one 41 | assert dems.dems[0].data.max() == self.dem_1990.data.max() 42 | assert dems.reference_dem.data.max() == self.dem_2009.data.max() 43 | 44 | dems.subtract_dems(resampling_method="nearest") 45 | 46 | assert np.mean(dems.ddems[0].data) < 0 47 | 48 | scott_filter = "NAME == 'Scott Turnerbreen'" 49 | 50 | dh_series = dems.get_dh_series(outlines_filter=scott_filter) 51 | 52 | # The 1990-2009 area should be the union of those years. The 2009-2060 area should just be the 2010 area. 53 | assert dh_series.iloc[0]["area"] > dh_series.iloc[-1]["area"] 54 | 55 | cumulative_dh = dems.get_cumulative_series(kind="dh", outlines_filter=scott_filter) 56 | cumulative_dv = dems.get_cumulative_series(kind="dv", outlines_filter=scott_filter) 57 | 58 | # Simple check that the cumulative_dh is overall negative. 59 | assert cumulative_dh.iloc[0] > cumulative_dh.iloc[-1] 60 | 61 | # Simple check that the dV number is of a greater magnitude than the dH number. 62 | assert abs(cumulative_dv.iloc[-1]) > abs(cumulative_dh.iloc[-1]) 63 | 64 | rng = np.random.default_rng(42) 65 | # Generate 10000 NaN values randomly in one of the dDEMs 66 | dems.ddems[0].data[ 67 | rng.integers(0, dems.ddems[0].data.shape[0], 100), 68 | rng.integers(0, dems.ddems[0].data.shape[1], 100), 69 | ] = np.nan 70 | # Check that the cumulative_dh function warns for NaNs 71 | with warnings.catch_warnings(): 72 | try: 73 | dems.get_cumulative_series(nans_ok=False) 74 | except UserWarning as exception: 75 | if "NaNs found in dDEM" not in str(exception): 76 | raise exception 77 | 78 | # logging.info(cumulative_dh) 79 | 80 | # raise NotImplementedError 81 | 82 | def test_dem_datetimes(self) -> None: 83 | """Try to create the DEMCollection without the timestamps argument (instead relying on datetime attributes).""" 84 | self.dem_1990.datetime = datetime.datetime(1990, 8, 1) 85 | self.dem_2009.datetime = datetime.datetime(2009, 8, 1) 86 | 87 | dems = xdem.DEMCollection([self.dem_1990, self.dem_2009]) 88 | 89 | assert len(dems.timestamps) > 0 90 | 91 | def test_ddem_interpolation(self) -> None: 92 | """Test that dDEM interpolation works as it should.""" 93 | 94 | # Create a DEMCollection object 95 | dems = xdem.DEMCollection( 96 | [self.dem_2009, self.dem_1990], timestamps=[datetime.datetime(year, 8, 1) for year in (2009, 1990)] 97 | ) 98 | 99 | # Create dDEMs 100 | dems.subtract_dems(resampling_method="nearest") 101 | 102 | # The example data does not have NaNs, so filled_data should exist. 103 | assert dems.ddems[0].filled_data is not None 104 | 105 | # Try to set the filled_data property with an invalid size. 106 | try: 107 | dems.ddems[0].filled_data = np.zeros(3) 108 | except AssertionError as exception: 109 | if "differs from the data shape" not in str(exception): 110 | raise exception 111 | 112 | # Generate 10000 NaN values randomly in one of the dDEMs 113 | rng = np.random.default_rng(42) 114 | dems.ddems[0].data[ 115 | rng.integers(0, dems.ddems[0].data.shape[0], 100), 116 | rng.integers(0, dems.ddems[0].data.shape[1], 100), 117 | ] = np.nan 118 | 119 | # Make sure that filled_data is not available anymore, since the data now has nans 120 | assert dems.ddems[0].filled_data is None 121 | 122 | # Interpolate the nans 123 | dems.ddems[0].interpolate(method="idw") 124 | 125 | # Make sure that the filled_data is available again 126 | assert dems.ddems[0].filled_data is not None 127 | -------------------------------------------------------------------------------- /examples/advanced/plot_norm_regional_hypso.py: -------------------------------------------------------------------------------- 1 | """ 2 | Normalized regional hypsometric interpolation 3 | ============================================= 4 | 5 | .. caution:: This functionality is specific to glaciers, and might be removed in future package versions. 6 | 7 | There are many ways of interpolating gaps in elevation differences. 8 | In the case of glaciers, one very useful fact is that elevation change generally varies with elevation. 9 | This means that if valid pixels exist in a certain elevation bin, their values can be used to fill other pixels in the same approximate elevation. 10 | Filling gaps by elevation is the main basis of "hypsometric interpolation approaches", of which there are many variations of. 11 | 12 | One problem with simple hypsometric approaches is that they may not work for glaciers with different elevation ranges and scales. 13 | Let's say we have two glaciers: one gigantic reaching from 0-1000 m, and one small from 900-1100 m. 14 | Usually in the 2000s, glaciers thin rapidly at the bottom, while they may be neutral or only thin slightly in the top. 15 | If we extrapolate the hypsometric signal of the gigantic glacier to use on the small one, it may seem like the smaller glacier has almost no change whatsoever. 16 | This may be right, or it may be catastrophically wrong! 17 | 18 | Normalized regional hypsometric interpolation solves the scale and elevation range problems in one go. It: 19 | 20 | 1. Calculates a regional signal using the weighted average of each glacier's normalized signal: 21 | 22 | a. The glacier's elevation range is scaled from 0-1 to be elevation-independent. 23 | b. The glaciers elevation change is scaled from 0-1 to be magnitude-independent. 24 | c. A weight is assigned by the amount of valid pixels (well-covered large glaciers gain a higher weight) 25 | 26 | 2. Re-scales that signal to fit each glacier once determined. 27 | 28 | The consequence is a much more accurate interpolation approach that can be used in a multitude of glacierized settings. 29 | """ 30 | 31 | import geoutils as gu 32 | 33 | # sphinx_gallery_thumbnail_number = 2 34 | import matplotlib.pyplot as plt 35 | import numpy as np 36 | 37 | import xdem 38 | import xdem.misc 39 | 40 | # %% 41 | # **Example files** 42 | 43 | dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) 44 | dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem_coreg")) 45 | 46 | glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) 47 | 48 | # Rasterize the glacier outlines to create an index map. 49 | # Stable ground is 0, the first glacier is 1, the second is 2, etc. 50 | glacier_index_map = glacier_outlines.rasterize(dem_2009) 51 | 52 | plt_extent = [ 53 | dem_2009.bounds.left, 54 | dem_2009.bounds.right, 55 | dem_2009.bounds.bottom, 56 | dem_2009.bounds.top, 57 | ] 58 | 59 | 60 | # %% 61 | # To test the method, we can generate a random mask to assign nans to glacierized areas. 62 | # Let's remove 30% of the data. 63 | index_nans = dem_2009.subsample(subsample=0.3, return_indices=True) 64 | mask_nans = dem_2009.copy(new_array=np.ones(dem_2009.shape)) 65 | mask_nans[index_nans] = 0 66 | 67 | mask_nans.plot() 68 | 69 | # %% 70 | # The normalized hypsometric signal shows the tendency for elevation change as a function of elevation. 71 | # The magnitude may vary between glaciers, but the shape is generally similar. 72 | # Normalizing by both elevation and elevation change, and then re-scaling the signal to every glacier, ensures that it is as accurate as possible. 73 | # **NOTE**: The hypsometric signal does not need to be generated separately; it will be created by :func:`xdem.volume.norm_regional_hypsometric_interpolation`. 74 | # Generating it first, however, allows us to visualize and validate it. 75 | 76 | ddem = dem_2009 - dem_1990 77 | ddem_voided = np.where(mask_nans.data, np.nan, ddem.data) 78 | 79 | signal = xdem.volume.get_regional_hypsometric_signal( 80 | ddem=ddem_voided, 81 | ref_dem=dem_2009.data, 82 | glacier_index_map=glacier_index_map, 83 | ) 84 | 85 | plt.fill_between(signal.index.mid, signal["sigma-1-lower"], signal["sigma-1-upper"], label="Spread (+- 1 sigma)") 86 | plt.plot(signal.index.mid, signal["w_mean"], color="black", label="Weighted mean") 87 | plt.ylabel("Normalized elevation change") 88 | plt.xlabel("Normalized elevation") 89 | plt.legend() 90 | plt.show() 91 | 92 | # %% 93 | # The signal can now be used (or simply estimated again if not provided) to interpolate the DEM. 94 | 95 | ddem_filled = xdem.volume.norm_regional_hypsometric_interpolation( 96 | voided_ddem=ddem_voided, ref_dem=dem_2009, glacier_index_map=glacier_index_map, regional_signal=signal 97 | ) 98 | 99 | 100 | plt.imshow(ddem_filled.data, cmap="RdYlBu", vmin=-10, vmax=10, extent=plt_extent) 101 | plt.colorbar() 102 | plt.show() 103 | 104 | 105 | # %% 106 | # We can plot the difference between the actual and the interpolated values, to validate the method. 107 | 108 | difference = (ddem_filled - ddem)[mask_nans.data] 109 | median = np.ma.median(difference) 110 | nmad = gu.stats.nmad(difference) 111 | 112 | plt.title(f"Median: {median:.2f} m, NMAD: {nmad:.2f} m") 113 | plt.hist(difference.data, bins=np.linspace(-15, 15, 100)) 114 | plt.show() 115 | 116 | # %% 117 | # As we see, the median is close to zero, while the NMAD varies slightly more. 118 | # This is expected, as the regional signal is good for multiple glaciers at once, but it cannot account for difficult local topography and meteorological conditions. 119 | # It is therefore highly recommended for large regions; just don't zoom in too close! 120 | -------------------------------------------------------------------------------- /.github/scripts/generate_pip_deps_from_conda.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | (Copied from pandas: https://github.com/pandas-dev/pandas/blob/main/scripts/generate_pip_deps_from_conda.py) 4 | Convert the conda environment.yml to the pip requirements-dev.txt, 5 | or check that they have the same packages (for the CI) 6 | 7 | Usage: 8 | 9 | Generate `requirements-dev.txt` 10 | $ python scripts/generate_pip_deps_from_conda.py 11 | 12 | Compare and fail (exit status != 0) if `requirements-dev.txt` has not been 13 | generated with this script: 14 | $ python scripts/generate_pip_deps_from_conda.py --compare 15 | """ 16 | import argparse 17 | import pathlib 18 | import re 19 | import sys 20 | 21 | if sys.version_info >= (3, 11): 22 | import tomllib 23 | else: 24 | import tomli as tomllib 25 | import yaml 26 | 27 | EXCLUDE = {"python"} 28 | REMAP_VERSION = {"tzdata": "2022.1"} 29 | RENAME = {} 30 | 31 | 32 | def conda_package_to_pip(package: str): 33 | """ 34 | Convert a conda package to its pip equivalent. 35 | 36 | In most cases they are the same, those are the exceptions: 37 | - Packages that should be excluded (in `EXCLUDE`) 38 | - Packages that should be renamed (in `RENAME`) 39 | - A package requiring a specific version, in conda is defined with a single 40 | equal (e.g. ``pandas=1.0``) and in pip with two (e.g. ``pandas==1.0``) 41 | """ 42 | package = re.sub("(?<=[^<>])=", "==", package).strip() 43 | print(package) 44 | 45 | for compare in ("<=", ">=", "=="): 46 | if compare in package: 47 | pkg, version = package.split(compare) 48 | if pkg in EXCLUDE: 49 | return 50 | if pkg in REMAP_VERSION: 51 | return "".join((pkg, compare, REMAP_VERSION[pkg])) 52 | if pkg in RENAME: 53 | return "".join((RENAME[pkg], compare, version)) 54 | 55 | if package in EXCLUDE: 56 | return 57 | 58 | if package in RENAME: 59 | return RENAME[package] 60 | 61 | return package 62 | 63 | 64 | def generate_pip_from_conda(conda_path: pathlib.Path, pip_path: pathlib.Path, compare: bool = False) -> bool: 65 | """ 66 | Generate the pip dependencies file from the conda file, or compare that 67 | they are synchronized (``compare=True``). 68 | 69 | Parameters 70 | ---------- 71 | conda_path : pathlib.Path 72 | Path to the conda file with dependencies (e.g. `environment.yml`). 73 | pip_path : pathlib.Path 74 | Path to the pip file with dependencies (e.g. `requirements-dev.txt`). 75 | compare : bool, default False 76 | Whether to generate the pip file (``False``) or to compare if the 77 | pip file has been generated with this script and the last version 78 | of the conda file (``True``). 79 | 80 | Returns 81 | ------- 82 | bool 83 | True if the comparison fails, False otherwise 84 | """ 85 | with conda_path.open() as file: 86 | deps = yaml.safe_load(file)["dependencies"] 87 | 88 | pip_deps = [] 89 | for dep in deps: 90 | if isinstance(dep, str): 91 | conda_dep = conda_package_to_pip(dep) 92 | if conda_dep: 93 | pip_deps.append(conda_dep) 94 | elif isinstance(dep, dict) and len(dep) == 1 and "pip" in dep: 95 | # If pulled directly from GitHub (temporary CI passing), 96 | # such as git+https://github.com/GlacioHack/geoutils.git, 97 | # rename to the package repo name 98 | dep_pips = dep["pip"] 99 | for dep_pip in dep_pips: 100 | if "+" in dep_pip and dep_pip.split("+")[0] == "git": 101 | dep_pip = dep_pip.split("/")[-1].split(".git")[0] 102 | pip_deps.append(dep_pip) 103 | else: 104 | raise ValueError(f"Unexpected dependency {dep}") 105 | 106 | header = ( 107 | f"# This file is auto-generated from {conda_path.name}, do not modify.\n" 108 | "# See that file for comments about the need/usage of each dependency.\n\n" 109 | ) 110 | pip_content = header + "\n".join(pip_deps) + "\n" 111 | 112 | # Add setuptools to requirements-dev.txt 113 | # with open(pathlib.Path(conda_path.parent, "pyproject.toml"), "rb") as fd: 114 | # meta = tomllib.load(fd) 115 | # for requirement in meta["build-system"]["requires"]: 116 | # if "setuptools" in requirement: 117 | # pip_content += requirement 118 | # pip_content += "\n" 119 | 120 | if compare: 121 | with pip_path.open() as file: 122 | return pip_content != file.read() 123 | 124 | with pip_path.open("w") as file: 125 | file.write(pip_content) 126 | return False 127 | 128 | 129 | if __name__ == "__main__": 130 | argparser = argparse.ArgumentParser(description="convert (or compare) conda file to pip") 131 | argparser.add_argument( 132 | "--compare", 133 | action="store_true", 134 | help="compare whether the two files are equivalent", 135 | ) 136 | args = argparser.parse_args() 137 | 138 | conda_fname = "environment.yml" 139 | pip_fname = "requirements.txt" 140 | repo_path = pathlib.Path(__file__).parent.parent.parent.absolute() 141 | res = generate_pip_from_conda( 142 | pathlib.Path(repo_path, conda_fname), 143 | pathlib.Path(repo_path, pip_fname), 144 | compare=args.compare, 145 | ) 146 | if res: 147 | msg = f"`{pip_fname}` has to be generated with `{__file__}` after " f"`{conda_fname}` is modified.\n" 148 | sys.stderr.write(msg) 149 | sys.exit(res) 150 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 xDEM developers. 2 | 3 | This file is part of xDEM (see https://github.com/GlacioHack/xdem). 4 | 5 | Licensed under the Apache License, Version 2.0 (the "License"); 6 | you may not use this file except in compliance with the License. 7 | You may obtain a copy of the License at 8 | 9 | http://www.apache.org/licenses/LICENSE-2.0 10 | 11 | Unless required by applicable law or agreed to in writing, software 12 | distributed under the License is distributed on an "AS IS" BASIS, 13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | See the License for the specific language governing permissions and 15 | limitations under the License. 16 | 17 | xDEM software is distributed under the Apache Software License (ASL) v2.0, see 18 | LICENSE file or http://www.apache.org/licenses/LICENSE-2.0 for details. 19 | 20 | Python: programming language that lets you work quickly and integrate systems more effectively. 21 | Copyright (c) 2001-2023 Python Software Foundation. All Rights reserved. 22 | Website: http://python.org/ 23 | License: Python Software License. 24 | 25 | NumPy: The fundamental package for scientific computing with Python. 26 | Copyright (c) 2005-2024, NumPy Developers. 27 | Website: https://numpy.org/ 28 | License: BSD 3-Clause. 29 | 30 | Matplotlib: Comprehensive library for creating static, animated, and interactive visualizations in Python. 31 | Copyright (C) 2001-2023 Matplotlib Development Team. 32 | Website: https://matplotlib.org/ 33 | License: Matplotlib only uses BSD compatible code, and its license is based on the PSF license. 34 | 35 | SciPy: Open-source software for mathematics, science, and engineering. 36 | Copyright (c) 2001-2002 Enthought, Inc. All rights reserved. 37 | Copyright (c) 2003-2019 SciPy Developers. All rights reserved. 38 | Website: https://www.scipy.org/scipylib/ 39 | License: BSD 3-Clause. 40 | 41 | Geoutils: Libraries and command-line utilities for geospatial data processing/analysis in Python. 42 | Copyright (c) 2020 Amaury Dehecq, Andrew Tedstone. 43 | Website: https://github.com/GeoUtils/geoutils 44 | License: MIT License. 45 | 46 | Rasterio: Access to geospatial raster data 47 | Copyright (c) 2016, MapBox All rights reserved. 48 | Website: https://github.com/mapbox/rasterio 49 | License: BSD 3-Clause. 50 | 51 | GeoPandas: Python tools for geographic data. 52 | Copyright (c) 2013-2022, GeoPandas developers. 53 | Website: https://geopandas.org/ 54 | License: BSD 3-Clause. 55 | 56 | pyogrio: Fast read/write access to OGR-compatible vector formats. 57 | Copyright (c) 2020-2024 Brendan C. Ward and pyogrio contributors. 58 | Website: https://github.com/geopandas/pyogrio 59 | License: MIT. 60 | 61 | pandas: Data analysis and manipulation library for Python. 62 | Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team. 63 | Copyright (c) 2011-2024, Open source contributors. 64 | Website: https://pandas.pydata.org/ 65 | License: BSD 3-Clause. 66 | 67 | scikit-learn: Machine learning library for Python. 68 | Copyright (c) 2007-2023, scikit-learn Developers. 69 | Website: https://scikit-learn.org/ 70 | License: BSD 3-Clause. 71 | 72 | Numba: A Just-in-Time Compiler for Python that accelerates numerical functions. 73 | Copyright (c) 2012-2023 Anaconda, Inc. 74 | Website: https://numba.pydata.org/ 75 | License: BSD 2-Clause. 76 | 77 | scikit-image: Image processing in Python. 78 | Copyright (c) 2009-2022 the scikit-image team. 79 | Website: https://scikit-image.org/ 80 | License: BSD 3-Clause. 81 | 82 | scikit-gstat: A geostatistics toolbox for Python. 83 | Copyright (c) 2017 Mirko Mälicke. 84 | Website: https://github.com/mmaelicke/scikit-gstat 85 | Licence: MIT License. 86 | 87 | affine: Matrix transformations for geospatial coordinates. 88 | Copyright (c) 2014-2023, Sean Gillies. 89 | Website: https://github.com/sgillies/affine 90 | License: BDS 3-Clause. 91 | 92 | Shapely: Manipulation and analysis of geometric objects. 93 | Copyright (c) 2007, Sean C. Gillies. 2019, Casper van der Wel. 2007-2022, Shapely Contributors. 94 | Website: https://shapely.readthedocs.io/ 95 | License: BSD 3-Clause. 96 | 97 | pyproj: Python interface to PROJ (cartographic projections and transformations library). 98 | Copyright (c) 2006-2018, Jeffrey Whitaker. 99 | Copyright (c) 2019-2024, Open source contributors. 100 | Website: https://pyproj4.github.io/pyproj/stable/ 101 | License: MIT License. 102 | 103 | pytransform3d: 3D transformations for Python. 104 | Copyright (c) 2014-2023, Alexander Fabisch, and pytransform3d contributors. 105 | Website: https://github.com/rock-learning/pytransform3d 106 | License: BSD 3-Clause. 107 | 108 | tqdm: A fast, extensible progress bar for Python an CLI applications. 109 | Copyright (c) MIT 2013 Noam Yorav-Raphael, original author. 110 | Copyright (c) MPL-2.0 2015-2024 Casper da Costa-Luis. 111 | Website: https://github.com/tqdm/tqdm 112 | License: MPL-2.0 and MIT License. 113 | 114 | yaml (PyYAML): Python bindings for YAML, a human-readable data serialization language. 115 | Copyright (c) 2006-2023, PyYAML contributors. 116 | Website: https://pyyaml.org/ 117 | License: CC-BY 2.0. 118 | 119 | Texshade: Low-memory approximation to texture-shaded elevation via the fractional-Laplacian operator. 120 | Website: https://github.com/fasiha/texshade-py 121 | License: Unlicense. 122 | 123 | WeasyPrint: Smart solution helping web developers to create PDF documents.. 124 | Copyright (c) 2011-2021, Simon Sapin and contributors. 125 | Website: https://weasyprint.org/ 126 | License: BSD 3-Clause. 127 | 128 | cerberus: Cerberus is a lightweight and extensible data validation library for Python. 129 | Copyright (c) 2012-2016 Nicola Iarocci. 130 | Website: https://github.com/pyeve/cerberus 131 | License: ISC License 132 | -------------------------------------------------------------------------------- /doc/source/citation.md: -------------------------------------------------------------------------------- 1 | (citation)= 2 | 3 | # Citing and method overview 4 | 5 | When using a method implemented in xDEM, one should cite both the package and the original study behind the method (if there is any)! 6 | 7 | ## Citing xDEM 8 | 9 | To cite the package, use the Zenodo DOI: [![Zenodo](https://zenodo.org/badge/doi/10.5281/zenodo.4809697.svg)](https://zenodo.org/doi/10.5281/zenodo.4809697). 10 | 11 | ## Method overview 12 | 13 | For citation and other purposes, here's an overview of all methods implemented in the package and their reference, if it exists. 14 | More details are available on each feature page! 15 | 16 | ### Terrain attributes 17 | 18 | References for **curvatures** are detailed further below the other terrain attributes. 19 | 20 | ```{list-table} 21 | :widths: 1 2 22 | :header-rows: 1 23 | :stub-columns: 1 24 | 25 | * - Method 26 | - Reference 27 | * - Partial derivatives of slope 28 | - [Horn (1981)](http://dx.doi.org/10.1109/PROC.1981.11918) or [Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107) 29 | * - Slope, aspect and hillshade 30 | - [Horn (1981)](http://dx.doi.org/10.1109/PROC.1981.11918) or [Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107) 31 | * - Topographic position index 32 | - [Weiss (2001)](http://www.jennessent.com/downloads/TPI-poster-TNC_18x22.pdf) 33 | * - Terrain ruggedness index 34 | - [Riley et al. (1999)](http://download.osgeo.org/qgis/doc/reference-docs/Terrain_Ruggedness_Index.pdf) or [Wilson et al. (2007)](http://dx.doi.org/10.1080/01490410701295962) 35 | * - Roughness 36 | - [Dartnell (2000)](https://environment.sfsu.edu/node/11292) 37 | * - Rugosity 38 | - [Jenness (2004)]() 39 | * - Fractal roughness 40 | - [Taud and Parrot (2005)](https://doi.org/10.4000/geomorphologie.622) 41 | * - Texture shading 42 | - [Brown (2010)](https://mountaincartography.icaci.org/activities/workshops/banff_canada/papers/brown.pdf) and [Allmendinger and Karabinos (2023)](https://doi.org/10.1130/GES02531.1) 43 | ``` 44 | 45 | **Curvatures** follow the recommended system of [Minár et al. (2020)](https://doi.org/10.1016/j.earscirev.2020.103414). Where no direct DOI can be linked, consult this paper for the full citation. 46 | There are two ways of defining curvatures: either _geometric_ (curvatures can be defined by the radius of a circle), or _directional derivative_ (curvatures can be understood as directional derivatives of the elevation field). 47 | xDEM defaults to the geometric method. 48 | 49 | ```{list-table} 50 | :widths: 1 1 1 51 | :header-rows: 1 52 | :stub-columns: 1 53 | * - Method 54 | - Geometric 55 | - Directional derivative 56 | * - Profile curvature 57 | - Krcho (1973) and Evans (1979) 58 | - [Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107) 59 | * - Tangential curvature 60 | - Krcho (1983) 61 | - [Zevenbergen and Thorne (1987)](http://dx.doi.org/10.1002/esp.3290120107) 62 | * - Planform curvature 63 | - Sobolevsky (1932) 64 | - Sobolevsky (1932) 65 | * - Flowline curvature 66 | - [Minár et al. (2020)](https://doi.org/10.1016/j.earscirev.2020.103414) 67 | - Shary (1991) 68 | * - Maximal/Maximum curvature 69 | - [Shary (1995)](https://doi.org/10.1007/BF02084608) 70 | - [Wood (1996)](https://lra.le.ac.uk/handle/2381/34503) 71 | * - Minimal/Minimum curvature 72 | - [Shary (1995)](https://doi.org/10.1007/BF02084608) 73 | - [Wood (1996)](https://lra.le.ac.uk/handle/2381/34503) 74 | ``` 75 | 76 | ### Coregistration 77 | 78 | ```{list-table} 79 | :widths: 1 2 80 | :header-rows: 1 81 | :stub-columns: 1 82 | 83 | * - Method 84 | - Reference 85 | * - Nuth and Kääb 86 | - [Nuth and Kääb (2011)](https://doi.org/10.5194/tc-5-271-2011) 87 | * - Dh minimization 88 | - N/A 89 | * - Least Z-difference 90 | - [Rosenholm and Torlegård (1988)](https://www.asprs.org/wp-content/uploads/pers/1988journal/oct/1988_oct_1385-1389.pdf) 91 | * - Iterative closest point 92 | - [Besl and McKay (1992)](https://doi.org/10.1117/12.57955), [Chen and Medioni (1992)](https://doi.org/10.1016/0262-8856(92)90066-C) 93 | * - Coherent point drift 94 | - [Myronenko and Song (2010)](https://doi.org/10.1109/TPAMI.2010.46) 95 | * - Vertical shift 96 | - N/A 97 | ``` 98 | 99 | ### Bias-correction 100 | 101 | ```{list-table} 102 | :widths: 1 2 103 | :header-rows: 1 104 | :stub-columns: 1 105 | 106 | * - Method 107 | - Reference 108 | * - Deramp 109 | - N/A 110 | * - Directional bias (sinusoids) 111 | - [Girod et al. (2017)](https://doi.org/10.3390/rs9070704) 112 | * - Terrain bias (curvature) 113 | - [Gardelle et al. (2012)](https://doi.org/10.3189/2012JoG11J175) 114 | * - Terrain bias (elevation) 115 | - [Nuth and Kääb (2011)](https://doi.org/10.5194/tc-5-271-2011) 116 | * - Vertical shift 117 | - N/A 118 | ``` 119 | 120 | ### Gap-filling 121 | 122 | ```{list-table} 123 | :widths: 1 2 124 | :header-rows: 1 125 | :stub-columns: 1 126 | 127 | * - Method 128 | - Reference 129 | * - Bilinear 130 | - N/A 131 | * - Local and regional hypsometric 132 | - [Arendt et al. (2002)](https://doi.org/10.1126/science.1072497), [McNabb et al. (2019)](https://tc.copernicus.org/articles/13/895/2019/) 133 | ``` 134 | 135 | 136 | ### Uncertainty analysis 137 | 138 | ```{list-table} 139 | :widths: 1 1 140 | :header-rows: 1 141 | :stub-columns: 1 142 | 143 | * - Method 144 | - Reference 145 | * - R2009 (nested ranges, circular approx.) 146 | - [Rolstad et al. (2009)](http://dx.doi.org/10.3189/002214309789470950) 147 | * - H2022 (heterosc., nested ranges, spatial propag.) 148 | - [Hugonnet et al. (2022)](http://dx.doi.org/10.1109/JSTARS.2022.3188922) 149 | ``` 150 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, caste, color, religion, or sexual 10 | identity and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the overall 26 | community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or advances of 31 | any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email address, 35 | without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible, please refer to the "Steering 63 | Comittee" section in [AUTHORS.md](AUTHORS.md). 64 | 65 | All complaints will be reviewed and investigated promptly and fairly. 66 | 67 | All community leaders are obligated to respect the privacy and security of the 68 | reporter of any incident. 69 | 70 | ## Enforcement Guidelines 71 | 72 | Community leaders will follow these Community Impact Guidelines in determining 73 | the consequences for any action they deem in violation of this Code of Conduct: 74 | 75 | ### 1. Correction 76 | 77 | **Community Impact**: Use of inappropriate language or other behavior deemed 78 | unprofessional or unwelcome in the community. 79 | 80 | **Consequence**: A private, written warning from community leaders, providing 81 | clarity around the nature of the violation and an explanation of why the 82 | behavior was inappropriate. A public apology may be requested. 83 | 84 | ### 2. Warning 85 | 86 | **Community Impact**: A violation through a single incident or series of 87 | actions. 88 | 89 | **Consequence**: A warning with consequences for continued behavior. No 90 | interaction with the people involved, including unsolicited interaction with 91 | those enforcing the Code of Conduct, for a specified period of time. This 92 | includes avoiding interactions in community spaces as well as external channels 93 | like social media. Violating these terms may lead to a temporary or permanent 94 | ban. 95 | 96 | ### 3. Temporary Ban 97 | 98 | **Community Impact**: A serious violation of community standards, including 99 | sustained inappropriate behavior. 100 | 101 | **Consequence**: A temporary ban from any sort of interaction or public 102 | communication with the community for a specified period of time. No public or 103 | private interaction with the people involved, including unsolicited interaction 104 | with those enforcing the Code of Conduct, is allowed during this period. 105 | Violating these terms may lead to a permanent ban. 106 | 107 | ### 4. Permanent Ban 108 | 109 | **Community Impact**: Demonstrating a pattern of violation of community 110 | standards, including sustained inappropriate behavior, harassment of an 111 | individual, or aggression toward or disparagement of classes of individuals. 112 | 113 | **Consequence**: A permanent ban from any sort of public interaction within the 114 | community. 115 | 116 | ## Attribution 117 | 118 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 119 | version 2.1, available at 120 | [https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. 121 | 122 | Community Impact Guidelines were inspired by 123 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 124 | 125 | For answers to common questions about this code of conduct, see the FAQ at 126 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at 127 | [https://www.contributor-covenant.org/translations][translations]. 128 | 129 | [homepage]: https://www.contributor-covenant.org 130 | [v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html 131 | [Mozilla CoC]: https://github.com/mozilla/diversity 132 | [FAQ]: https://www.contributor-covenant.org/faq 133 | [translations]: https://www.contributor-covenant.org/translations 134 | --------------------------------------------------------------------------------