├── .editorconfig ├── .github ├── ISSUE_TEMPLATE.md └── workflows │ ├── docs.yml │ ├── macos.yml │ ├── pypi.yml │ ├── ubuntu.yml │ └── windows.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── binder └── environment.yml ├── changelog.md ├── contributing.md ├── docs ├── CNAME ├── Makefile ├── authors.rst ├── changelog.md ├── citations.md ├── common.md ├── conf.py ├── contributing.md ├── contributing.rst ├── filling.md ├── filtering.md ├── get-started.md ├── history.rst ├── index.md ├── index.rst ├── installation.md ├── installation.rst ├── lidar.rst ├── make.bat ├── modules.rst ├── mounts.md ├── notebooks │ ├── inundation.ipynb │ ├── lidar.ipynb │ ├── lidar_colab.ipynb │ └── lidar_dsm.ipynb ├── overrides │ └── main.html ├── readme.rst ├── slicing.md ├── usage.md └── usage.rst ├── examples ├── data │ └── huc8.csv ├── inundation.ipynb ├── lidar-dem.zip ├── lidar-dem │ ├── catchment.tif │ ├── dem.tif │ ├── dem_full.tif │ ├── dsm.tif │ └── sink.tif ├── lidar.ipynb ├── lidar_colab.ipynb └── lidar_dsm.ipynb ├── images ├── CLSA_DEM.jpg ├── CLSA_Result.jpg ├── CLSA_Table.jpg ├── toolbox_0.png ├── toolbox_1.png ├── toolbox_2.png ├── toolbox_3.png ├── toolbox_4.png ├── toolbox_5.png ├── toolbox_6.png ├── toolbox_7.png └── toolbox_ui.png ├── lidar ├── __init__.py ├── cli.py ├── common.py ├── data │ ├── dem.tif │ ├── dsm.tif │ └── sink.tif ├── example.py ├── filling.py ├── filtering.py ├── gui.py ├── lidar.py ├── mounts.py ├── slicing.py └── toolbox │ ├── ArcGIS Hydrology Analyst.tbx │ ├── ArcGIS Pro Hydrology Analyst.tbx │ └── scripts │ ├── 1_Extract_Sink.py │ ├── 1_Extract_Sink_ArcMap.py │ ├── 2_Delineate_Catchment.py │ ├── 2_Delineate_Catchment_ArcMap.py │ ├── 3_Flow_Path.py │ ├── 3_Flow_Path_ArcMap.py │ ├── 4_Slicing.py │ ├── 5_Catchment_Hierarchy.py │ ├── 6_Simulate Inundation.py │ └── 7_Play_Animation.py ├── mkdocs.yml ├── paper ├── paper.bib └── paper.md ├── requirements.readthedocs.txt ├── requirements.txt ├── requirements_dev.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py └── test_lidar.py └── tox.ini /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * lidar version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: docs 2 | on: 3 | push: 4 | branches: 5 | - master 6 | jobs: 7 | deploy: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v4 11 | - uses: actions/setup-python@v5 12 | with: 13 | python-version: "3.9" 14 | - name: Install dependencies 15 | run: | 16 | sudo apt-add-repository ppa:ubuntugis/ubuntugis-unstable -y 17 | sudo apt-get -qq update 18 | sudo apt-get install gdal-bin libgdal-dev -y 19 | export CPLUS_INCLUDE_PATH=/usr/include/gdal 20 | export CPLUS_INCLUDE_PATH=/usr/include/gdal 21 | gdal-config --version 22 | gdalinfo --version 23 | python -m pip install --upgrade pip 24 | pip install wheel 25 | pip install --user --no-cache-dir Cython 26 | pip install --find-links=https://girder.github.io/large_image_wheels --no-cache GDAL 27 | pip install --user -r requirements.txt 28 | # pip install pygdal==3.3.2.10 29 | - run: pip install mkdocs-material mkdocstrings mkdocstrings-python mkdocs-git-revision-date-plugin mkdocs-jupyter mkdocs-pdf-export-plugin 30 | - run: mkdocs gh-deploy --force 31 | -------------------------------------------------------------------------------- /.github/workflows/macos.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | pull_request: 6 | branches: 7 | - master 8 | 9 | name: macOS build 10 | jobs: 11 | test-macOS: 12 | runs-on: ${{ matrix.config.os }} 13 | name: ${{ matrix.config.os }} (${{ matrix.config.py }}) 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | config: 18 | - { os: macOS-latest, py: "3.12" } 19 | env: 20 | SDKROOT: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk 21 | steps: 22 | - uses: actions/checkout@v4 23 | 24 | - name: Setup Python 25 | uses: conda-incubator/setup-miniconda@v3 26 | with: 27 | auto-activate-base: true 28 | python-version: ${{ matrix.config.py }} 29 | channels: conda-forge,defaults 30 | channel-priority: true 31 | miniconda-version: latest 32 | 33 | - name: Cache dependencies 34 | uses: actions/cache@v4 35 | with: 36 | path: ~/.cache/pip 37 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }} 38 | restore-keys: | 39 | ${{ runner.os }}-pip- 40 | 41 | # - name: Testing conda 42 | # run: | 43 | # conda info 44 | # conda list 45 | 46 | # - name: Install GDAL 47 | # run: | 48 | # conda install -c conda-forge mamba --yes 49 | # mamba install -c conda-forge gdal pyproj richdem lidar --yes 50 | # pip install -U whitebox 51 | 52 | # - name: Test GDAL installation 53 | # run: | 54 | # python -c "from osgeo import gdal" 55 | # gdalinfo --version 56 | 57 | # - name: Install dependencies 58 | # run: | 59 | # pip install -r requirements.txt -r requirements_dev.txt 60 | # pip install . 61 | 62 | # - name: Test import 63 | # run: python -c "import lidar; print('lidar import successful')" 64 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: pypi 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | deploy: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Set up Python 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: "3.x" 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | pip install setuptools wheel twine 24 | - name: Build and publish 25 | env: 26 | TWINE_USERNAME: ${{ secrets.PYPI_USERS }} 27 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 28 | run: | 29 | python setup.py sdist bdist_wheel 30 | twine upload dist/* 31 | -------------------------------------------------------------------------------- /.github/workflows/ubuntu.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | pull_request: 6 | branches: 7 | - master 8 | 9 | name: Linux build 10 | jobs: 11 | test-ubuntu: 12 | runs-on: ubuntu-latest 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | python-version: ["3.9", "3.10", "3.11", "3.12"] 17 | 18 | defaults: 19 | run: 20 | shell: bash -el {0} 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | 25 | - name: Setup Python 26 | uses: conda-incubator/setup-miniconda@v3 27 | with: 28 | auto-activate-base: true 29 | python-version: ${{ matrix.python-version }} 30 | channels: conda-forge,defaults 31 | channel-priority: true 32 | miniconda-version: latest 33 | 34 | - name: Cache dependencies 35 | uses: actions/cache@v4 36 | with: 37 | path: ~/.cache/pip 38 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }} 39 | restore-keys: | 40 | ${{ runner.os }}-pip- 41 | 42 | - name: Testing conda 43 | run: | 44 | conda info 45 | conda list 46 | 47 | - name: Install GDAL 48 | run: | 49 | # pip install --no-cache-dir Cython 50 | # pip install --find-links=https://girder.github.io/large_image_wheels --no-cache GDAL 51 | conda install gdal --yes 52 | conda install -c conda-forge mamba --yes 53 | mamba install -c conda-forge pyproj richdem lidar --yes 54 | 55 | # - name: Test GDAL installation 56 | # run: | 57 | # python -c "from osgeo import gdal" 58 | # gdalinfo --version 59 | 60 | - name: Install dependencies 61 | run: | 62 | pip install -r requirements.txt -r requirements_dev.txt 63 | pip install . 64 | 65 | # - name: Test import 66 | # run: python -c "import lidar; print('lidar import successful')" 67 | -------------------------------------------------------------------------------- /.github/workflows/windows.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | pull_request: 6 | branches: 7 | - master 8 | 9 | name: Windows build 10 | jobs: 11 | test-windows: 12 | runs-on: windows-latest 13 | strategy: 14 | matrix: 15 | python-version: ["3.12"] 16 | 17 | defaults: 18 | run: 19 | shell: bash -el {0} 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | 24 | - name: Setup Python 25 | uses: conda-incubator/setup-miniconda@v3 26 | with: 27 | auto-activate-base: true 28 | python-version: ${{ matrix.python-version }} 29 | channels: conda-forge,defaults 30 | channel-priority: true 31 | miniconda-version: latest 32 | 33 | - name: Cache dependencies 34 | uses: actions/cache@v4 35 | with: 36 | path: ~/.cache/pip 37 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements*.txt') }} 38 | restore-keys: | 39 | ${{ runner.os }}-pip- 40 | 41 | - name: Testing conda 42 | run: | 43 | conda info 44 | conda list 45 | 46 | - name: Install GDAL 47 | run: | 48 | conda install -c conda-forge mamba --yes 49 | mamba install -c conda-forge gdal pyproj richdem --yes 50 | 51 | # - name: Test GDAL installation 52 | # run: | 53 | # python -c "from osgeo import gdal" 54 | # gdalinfo --version 55 | 56 | - name: Install dependencies 57 | run: | 58 | pip install -r requirements.txt 59 | pip install . 60 | 61 | - name: Test import 62 | run: python -c "import lidar; print('lidar import successful')" 63 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | .idea/ 6 | .vscode/ 7 | docs/_static 8 | testing/ 9 | 10 | # exclude files 11 | lidar/levelset.py 12 | dev/ 13 | examples/temp 14 | **/*.tif 15 | **/*.las 16 | **/*.laz 17 | 18 | # tif files extra 19 | *.tfw 20 | *.aux.xml 21 | 22 | # C extensions 23 | *.so 24 | 25 | # Distribution / packaging 26 | .Python 27 | env/ 28 | build/ 29 | develop-eggs/ 30 | dist/ 31 | downloads/ 32 | eggs/ 33 | .eggs/ 34 | lib/ 35 | lib64/ 36 | parts/ 37 | sdist/ 38 | var/ 39 | wheels/ 40 | *.egg-info/ 41 | .installed.cfg 42 | *.egg 43 | 44 | # PyInstaller 45 | # Usually these files are written by a python script from a template 46 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 47 | *.manifest 48 | *.spec 49 | 50 | # Installer logs 51 | pip-log.txt 52 | pip-delete-this-directory.txt 53 | 54 | # Unit test / coverage reports 55 | htmlcov/ 56 | .tox/ 57 | .coverage 58 | .coverage.* 59 | .cache 60 | nosetests.xml 61 | coverage.xml 62 | *.cover 63 | .hypothesis/ 64 | .pytest_cache/ 65 | 66 | # Translations 67 | *.mo 68 | *.pot 69 | 70 | # Django stuff: 71 | *.log 72 | local_settings.py 73 | 74 | # Flask stuff: 75 | instance/ 76 | .webassets-cache 77 | 78 | # Scrapy stuff: 79 | .scrapy 80 | 81 | # Sphinx documentation 82 | docs/_build/ 83 | 84 | # PyBuilder 85 | target/ 86 | 87 | # Jupyter Notebook 88 | .ipynb_checkpoints 89 | 90 | # pyenv 91 | .python-version 92 | 93 | # celery beat schedule file 94 | celerybeat-schedule 95 | 96 | # SageMath parsed files 97 | *.sage.py 98 | 99 | # dotenv 100 | .env 101 | 102 | # virtualenv 103 | .venv 104 | venv/ 105 | ENV/ 106 | 107 | # Spyder project settings 108 | .spyderproject 109 | .spyproject 110 | 111 | # Rope project settings 112 | .ropeproject 113 | 114 | # mkdocs documentation 115 | /site 116 | 117 | # mypy 118 | .mypy_cache/ 119 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v5.0.0 4 | hooks: 5 | - id: check-toml 6 | - id: check-yaml 7 | - id: end-of-file-fixer 8 | types: [python] 9 | - id: trailing-whitespace 10 | - id: requirements-txt-fixer 11 | - id: check-added-large-files 12 | args: ["--maxkb=500"] 13 | 14 | - repo: https://github.com/psf/black 15 | rev: 25.1.0 16 | hooks: 17 | - id: black-jupyter 18 | 19 | # - repo: https://github.com/codespell-project/codespell 20 | # rev: v2.2.6 21 | # hooks: 22 | # - id: codespell 23 | # args: [--toml, pyproject-codespell.precommit-toml] 24 | 25 | # - repo: https://github.com/kynan/nbstripout 26 | # rev: 0.7.1 27 | # hooks: 28 | # - id: nbstripout 29 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - 3.7 4 | before_install: 5 | - sudo apt-add-repository ppa:ubuntugis/ubuntugis-unstable -y 6 | - sudo apt-get -qq update 7 | - sudo apt-get install gdal-bin libgdal-dev -y 8 | - export CPLUS_INCLUDE_PATH=/usr/include/gdal 9 | - export CPLUS_INCLUDE_PATH=/usr/include/gdal 10 | - gdal-config --version 11 | - gdalinfo --version 12 | install: pip install -U tox-travis 13 | script: tox 14 | #deploy: 15 | # provider: pypi 16 | # skip_existing: true 17 | # distributions: sdist bdist_wheel 18 | # user: giswqs 19 | # password: 20 | # secure: fgwacryZ5tJOFZroAqiu4bln3VG/0/U2XawA79Ury+fFtj30p6m2eokNWHgpcO1bIZecNSOUEB1nNKdDlq5c8RxpiEc2T64HsujQXWybjPHtCrfImccRF0fTBtSJ+mXh+/C7ZzPReJGZnoPzkR4EJWLt2chqYa0eix4W3mR0SLHrEpM7Ouyo/tvl+B2ZHBS8d03opqr4Eou0feL1A7zftGXfgndgBS7LSYi5wgxsAfgu+YvLSQhu3yEYJjCaKfHpFVXI3dbm9YAC0zbFxKMqyY8Px3/H5e/MQ0dO5gRRpica8K5qelFIASZDMN9XRgD+lFbtlpj7SYfNfvM9rMaQCV9EnTI+gghJIzy2mIO8kgd6Uexd3qFOB3XJN8hbX44bATXKzpa9bjuXyBTnFyybBjHfXQJbdCXvDS70jim+uMZapl2fxxrY/Y8VYvduJ6e9BewbNoe7WxAJNfAD5+9fB2O7gDWKIxDvsJRa6kIj04R5aWxw7gUnuOwERcTm9OR1+1F97e4788fHiu1s5sbkZanHH7ximDhfPw4xYz9CgJrl6cOiP3N5w+1/I7Ko2vuR7TsEDY3uQFbw9Rrgex/+Ln7xZLtH1bDgKGirboXd5nQ6Z6n5kUCJk1JLzzs4MhcUg9tV/easbRiGXgYRRYuH5NbX/fCSnx/F3Oj5NcAQpNg= 21 | # on: 22 | # tags: true 23 | # repo: giswqs/lidar 24 | # python: 3.6 25 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018, Qiusheng Wu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include contributing.md 2 | include changelog.md 3 | include LICENSE 4 | include README.md 5 | include requirements.txt 6 | include lidar/data/*.* 7 | 8 | recursive-include tests * 9 | recursive-exclude * __pycache__ 10 | recursive-exclude * *.py[co] 11 | recursive-exclude * levelset.py 12 | 13 | 14 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 15 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | try: 8 | from urllib import pathname2url 9 | except: 10 | from urllib.request import pathname2url 11 | 12 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 13 | endef 14 | export BROWSER_PYSCRIPT 15 | 16 | define PRINT_HELP_PYSCRIPT 17 | import re, sys 18 | 19 | for line in sys.stdin: 20 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 21 | if match: 22 | target, help = match.groups() 23 | print("%-20s %s" % (target, help)) 24 | endef 25 | export PRINT_HELP_PYSCRIPT 26 | 27 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 28 | 29 | help: 30 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 31 | 32 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 33 | 34 | clean-build: ## remove build artifacts 35 | rm -fr build/ 36 | rm -fr dist/ 37 | rm -fr .eggs/ 38 | find . -name '*.egg-info' -exec rm -fr {} + 39 | find . -name '*.egg' -exec rm -f {} + 40 | 41 | clean-pyc: ## remove Python file artifacts 42 | find . -name '*.pyc' -exec rm -f {} + 43 | find . -name '*.pyo' -exec rm -f {} + 44 | find . -name '*~' -exec rm -f {} + 45 | find . -name '__pycache__' -exec rm -fr {} + 46 | 47 | clean-test: ## remove test and coverage artifacts 48 | rm -fr .tox/ 49 | rm -f .coverage 50 | rm -fr htmlcov/ 51 | rm -fr .pytest_cache 52 | 53 | lint: ## check style with flake8 54 | flake8 lidar tests 55 | 56 | test: ## run tests quickly with the default Python 57 | python setup.py test 58 | 59 | test-all: ## run tests on every Python version with tox 60 | tox 61 | 62 | coverage: ## check code coverage quickly with the default Python 63 | coverage run --source lidar setup.py test 64 | coverage report -m 65 | coverage html 66 | $(BROWSER) htmlcov/index.html 67 | 68 | docs: ## generate Sphinx HTML documentation, including API docs 69 | rm -f docs/lidar.rst 70 | rm -f docs/modules.rst 71 | sphinx-apidoc -o docs/ lidar 72 | $(MAKE) -C docs clean 73 | $(MAKE) -C docs html 74 | $(BROWSER) docs/_build/html/index.html 75 | 76 | servedocs: docs ## compile the docs watching for changes 77 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 78 | 79 | release: dist ## package and upload a release 80 | twine upload dist/* 81 | 82 | dist: clean ## builds source and wheel package 83 | python setup.py sdist 84 | python setup.py bdist_wheel 85 | ls -l dist 86 | 87 | install: clean ## install the package to the active Python's site-packages 88 | python setup.py install 89 | -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | name: lidar-environment 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.6 6 | - scikit-image 7 | - numpy 8 | - matplotlib 9 | - scipy 10 | - gdal 11 | - imageio 12 | - pip: 13 | - pygdal 14 | - lidar 15 | - tifffile 16 | - requests 17 | - pygis 18 | 19 | 20 | -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## v0.5.2 - February 10, 2021 4 | 5 | - Added new documentation website () 6 | - Improved JOSS paper 7 | - Cleaned up source code 8 | 9 | ## v0.5.1 - December 12, 2020 10 | 11 | ## v0.2.0 - September 16, 2018 12 | 13 | ## v0.1.6 - May 21, 2018 14 | 15 | ## 0.1.5 - May 16, 2018 16 | 17 | ## 0.1.3 - May 15, 2018 18 | 19 | ## 0.1.0 - May 14, 2018 20 | -------------------------------------------------------------------------------- /contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Contributions are welcome, and they are greatly appreciated! Every 4 | little bit helps, and credit will always be given. You can contribute in many ways: 5 | 6 | ## Types of Contributions 7 | 8 | ### Report Bugs 9 | 10 | Report bugs at . 11 | 12 | If you are reporting a bug, please include: 13 | 14 | - Your operating system name and version. 15 | - Any details about your local setup that might be helpful in troubleshooting. 16 | - Detailed steps to reproduce the bug. 17 | 18 | ### Fix Bugs 19 | 20 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help wanted" is open to whoever wants to implement it. 21 | 22 | ### Implement Features 23 | 24 | Look through the GitHub issues for features. Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. 25 | 26 | ### Write Documentation 27 | 28 | lidar could always use more documentation, whether as part of the official lidar docs, in docstrings, or even on the web in blog posts, articles, and such. 29 | 30 | ### Submit Feedback 31 | 32 | The best way to send feedback is to file an issue at . 33 | 34 | If you are proposing a feature: 35 | 36 | - Explain in detail how it would work. 37 | - Keep the scope as narrow as possible, to make it easier to implement. 38 | - Remember that this is a volunteer-driven project, and that contributions are welcome. 39 | 40 | ## Get Started 41 | 42 | Ready to contribute? Here's how to set up _lidar_ for local development. 43 | 44 | 1. Fork the [lidar](https://github.com/opengeos/lidar) repo on GitHub. 45 | 46 | 2. Clone your fork locally: 47 | 48 | ```console 49 | git clone git@github.com:your_name_here/lidar.git 50 | ``` 51 | 52 | 3. Install your local copy into a conda env. Assuming you have conda installed, this is how you set up your fork for local development: 53 | 54 | ```console 55 | conda create -n lidar-test python 56 | conda activate lidar-test 57 | cd lidar/ 58 | pip install -e . 59 | ``` 60 | 61 | 4. Create a branch for local development: 62 | 63 | ```console 64 | git checkout -b name-of-your-bugfix-or-feature 65 | ``` 66 | 67 | Now you can make your changes locally. 68 | 69 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox: 70 | 71 | ```console 72 | flake8 lidar tests 73 | python setup.py test or pytest 74 | ``` 75 | 76 | To get flake8 and tox, just pip install them into your conda env. 77 | 78 | 6. Commit your changes and push your branch to GitHub: 79 | 80 | ```console 81 | git add . 82 | git commit -m "Your detailed description of your changes." 83 | git push origin name-of-your-bugfix-or-feature 84 | ``` 85 | 86 | 7. Submit a pull request through the GitHub website. 87 | 88 | ## Pull Request Guidelines 89 | 90 | Before you submit a pull request, check that it meets these guidelines: 91 | 92 | 1. The pull request should include tests. 93 | 2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add the feature to the list in README.md. 94 | 3. The pull request should work for Python 3.7 and 3.8. Check and make sure that the tests pass for all supported Python versions. 95 | -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | lidar.gishub.org -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = lidar 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## v0.7.0 - Sep 23, 2022 4 | 5 | - Added extract sinks by huc and bbox 6 | - Added huc8 data 7 | - Added download ned functions 8 | - Removed pygdal 9 | 10 | ## v0.6.0 - Feb 27, 2021 11 | 12 | - Improved documentation 13 | - Added ArcGIS toolbox tutorials 14 | - Addressed JOSS review comments 15 | 16 | ## v0.5.3 - Feb 10, 2021 17 | 18 | - Fixed PyPI markdown rendering error 19 | 20 | ## v0.5.2 - Feb 10, 2021 21 | 22 | - Added new documentation website () 23 | - Improved JOSS paper 24 | - Cleaned up source code 25 | 26 | ## v0.5.1 - Dec 12, 2020 27 | 28 | ## v0.2.0 - Sep 16, 2018 29 | 30 | ## v0.1.6 - May 21, 2018 31 | 32 | ## 0.1.5 - May 16, 2018 33 | 34 | ## 0.1.3 - May 15, 2018 35 | 36 | ## 0.1.0 - May 14, 2018 37 | -------------------------------------------------------------------------------- /docs/citations.md: -------------------------------------------------------------------------------- 1 | The level-set algorithm was proposed by **Wu** et al. (2019): 2 | 3 | - **Wu, Q.**, Lane, C.R., Wang, L., Vanderhoof, M.K., Christensen, 4 | J.R., & Liu, H. (2019). Efficient Delineation of Nested Depression 5 | Hierarchy in Digital Elevation Models for Hydrological Analysis 6 | Using Level-Set Method. *Journal of the American Water Resources 7 | Association*. DOI: [10.1111/1752-1688.12689](https://doi.org/10.1111/1752-1688.12689) ([PDF](https://spatial.utk.edu/pubs/2019_JAWRA.pdf)) 8 | 9 | Applications of the level-set and contour-tree methods for feature 10 | extraction from LiDAR data: 11 | 12 | - **Wu, Q.**, & Lane, C.R. (2017). Delineating wetland catchments and 13 | modeling hydrologic connectivity using LiDAR data and aerial 14 | imagery. *Hydrology and Earth System Sciences*. 21: 3579-3595. DOI: 15 | [10.5194/hess-21-3579-2017](https://doi.org/10.5194/hess-21-3579-2017) 16 | - **Wu, Q.**, Deng, C., & Chen, Z. (2016). Automated delineation of 17 | karst sinkholes from LiDAR-derived digital elevation models. 18 | *Geomorphology*. 266: 1-10. DOI: 19 | [10.1016/j.geomorph.2016.05.006](http://dx.doi.org/10.1016/j.geomorph.2016.05.006) 20 | - **Wu, Q.**, Su, H., Sherman, D.J., Liu, H., Wozencraft, J.M., Yu, 21 | B., & Chen, Z. (2016). A graph-based approach for assessing 22 | storm-induced coastal changes. *International Journal of Remote 23 | Sensing*. 37:4854-4873. DOI: 24 | [10.1080/01431161.2016.1225180](http://dx.doi.org/10.1080/01431161.2016.1225180) 25 | - **Wu, Q.**, & Lane, C.R. (2016). Delineation and quantification of 26 | wetland depressions in the Prairie Pothole Region of North Dakota. 27 | *Wetlands*. 36(2):215–227. DOI: 28 | [10.1007/s13157-015-0731-6](http://dx.doi.org/10.1007/s13157-015-0731-6) 29 | - **Wu, Q.**, Liu, H., Wang, S., Yu, B., Beck, R., & Hinkel, K. 30 | (2015). A localized contour tree method for deriving geometric and 31 | topological properties of complex surface depressions based on 32 | high-resolution topographic data. *International Journal of 33 | Geographical Information Science*. 29(12): 2041-2060. DOI: 34 | [10.1080/13658816.2015.1038719](http://dx.doi.org/10.1080/13658816.2015.1038719) 35 | - **Wu, Q.**, Lane, C.R., & Liu, H. (2014). An effective method for 36 | detecting potential woodland vernal pools using high-resolution 37 | LiDAR data and aerial imagery. *Remote Sensing*. 6(11):11444-11467. 38 | DOI: [10.3390/rs61111444](http://dx.doi.org/10.3390/rs61111444) -------------------------------------------------------------------------------- /docs/common.md: -------------------------------------------------------------------------------- 1 | # utilities module 2 | 3 | ::: lidar.common 4 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # lidar documentation build configuration file, created by 5 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another 17 | # directory, add these directories to sys.path here. If the directory is 18 | # relative to the documentation root, use os.path.abspath to make it 19 | # absolute, like shown here. 20 | # 21 | import os 22 | import sys 23 | 24 | sys.path.insert(0, os.path.abspath("..")) 25 | 26 | from unittest.mock import MagicMock 27 | 28 | 29 | class Mock(MagicMock): 30 | @classmethod 31 | def __getattr__(cls, name): 32 | return MagicMock() 33 | 34 | 35 | MOCK_MODULES = [ 36 | "pygtk", 37 | "gtk", 38 | "gobject", 39 | "argparse", 40 | "numpy", 41 | "pandas", 42 | "richdem", 43 | "scipy", 44 | "skimage", 45 | "osgeo", 46 | # "PySimpleGUI", 47 | ] 48 | sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) 49 | import lidar 50 | 51 | # -- General configuration --------------------------------------------- 52 | 53 | # If your documentation needs a minimal Sphinx version, state it here. 54 | # 55 | # needs_sphinx = '1.0' 56 | 57 | # Add any Sphinx extension module names here, as strings. They can be 58 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 59 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx.ext.napoleon"] 60 | 61 | # Add any paths that contain templates here, relative to this directory. 62 | templates_path = ["_templates"] 63 | 64 | # The suffix(es) of source filenames. 65 | # You can specify multiple suffix as a list of string: 66 | # 67 | # source_suffix = ['.rst', '.md'] 68 | source_suffix = ".rst" 69 | 70 | # The master toctree document. 71 | master_doc = "index" 72 | 73 | # General information about the project. 74 | project = "lidar" 75 | copyright = "2020, Qiusheng Wu" 76 | author = "Qiusheng Wu" 77 | 78 | # The version info for the project you're documenting, acts as replacement 79 | # for |version| and |release|, also used in various other places throughout 80 | # the built documents. 81 | # 82 | # # The short X.Y version. 83 | version = lidar.__version__ 84 | # # The full version, including alpha/beta/rc tags. 85 | release = lidar.__version__ 86 | 87 | # The language for content autogenerated by Sphinx. Refer to documentation 88 | # for a list of supported languages. 89 | # 90 | # This is also used if you do content translation via gettext catalogs. 91 | # Usually you set "language" from the command line for these cases. 92 | language = None 93 | 94 | # List of patterns, relative to source directory, that match files and 95 | # directories to ignore when looking for source files. 96 | # This patterns also effect to html_static_path and html_extra_path 97 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 98 | 99 | # The name of the Pygments (syntax highlighting) style to use. 100 | pygments_style = "sphinx" 101 | 102 | # If true, `todo` and `todoList` produce output, else they produce nothing. 103 | todo_include_todos = False 104 | 105 | 106 | # -- Options for HTML output ------------------------------------------- 107 | 108 | # The theme to use for HTML and HTML Help pages. See the documentation for 109 | # a list of builtin themes. 110 | # 111 | # html_theme = 'alabaster' 112 | html_theme = "sphinx_rtd_theme" 113 | on_rtd = os.environ.get("READTHEDOCS", None) == "True" 114 | 115 | # if not on_rtd: # only import and set the theme if we're building docs locally 116 | # import sphinx_rtd_theme 117 | # html_theme = 'sphinx_rtd_theme' 118 | # html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 119 | 120 | # html_context = { 121 | # 'css_files': [ 122 | # '_static/theme_overrides.css' 123 | # ] 124 | # } 125 | # else: 126 | # html_context = { 127 | # 'css_files': [ 128 | # '//media.readthedocs.org/css/sphinx_rtd_theme.css', 129 | # '//media.readthedocs.org/css/readthedocs-doc-embed.css', 130 | # '_static/theme_overrides.css' 131 | # ] 132 | # } 133 | 134 | # Theme options are theme-specific and customize the look and feel of a 135 | # theme further. For a list of options available for each theme, see the 136 | # documentation. 137 | # 138 | # html_theme_options = {} 139 | 140 | # Add any paths that contain custom static files (such as style sheets) here, 141 | # relative to this directory. They are copied after the builtin static files, 142 | # so a file named "default.css" will overwrite the builtin "default.css". 143 | html_static_path = ["_static"] 144 | 145 | 146 | # -- Options for HTMLHelp output --------------------------------------- 147 | 148 | # Output file base name for HTML help builder. 149 | htmlhelp_basename = "lidardoc" 150 | 151 | 152 | # -- Options for LaTeX output ------------------------------------------ 153 | 154 | latex_elements = { 155 | # The paper size ('letterpaper' or 'a4paper'). 156 | # 157 | # 'papersize': 'letterpaper', 158 | # The font size ('10pt', '11pt' or '12pt'). 159 | # 160 | # 'pointsize': '10pt', 161 | # Additional stuff for the LaTeX preamble. 162 | # 163 | # 'preamble': '', 164 | # Latex figure (float) alignment 165 | # 166 | # 'figure_align': 'htbp', 167 | } 168 | 169 | # Grouping the document tree into LaTeX files. List of tuples 170 | # (source start file, target name, title, author, documentclass 171 | # [howto, manual, or own class]). 172 | latex_documents = [ 173 | (master_doc, "lidar.tex", "lidar Documentation", "Qiusheng Wu", "manual"), 174 | ] 175 | 176 | 177 | # -- Options for manual page output ------------------------------------ 178 | 179 | # One entry per manual page. List of tuples 180 | # (source start file, name, description, authors, manual section). 181 | man_pages = [(master_doc, "lidar", "lidar Documentation", [author], 1)] 182 | 183 | 184 | # -- Options for Texinfo output ---------------------------------------- 185 | 186 | # Grouping the document tree into Texinfo files. List of tuples 187 | # (source start file, target name, title, author, 188 | # dir menu entry, description, category) 189 | texinfo_documents = [ 190 | ( 191 | master_doc, 192 | "lidar", 193 | "lidar Documentation", 194 | author, 195 | "lidar", 196 | "One line description of project.", 197 | "Miscellaneous", 198 | ), 199 | ] 200 | -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Contributions are welcome, and they are greatly appreciated! Every 4 | little bit helps, and credit will always be given. You can contribute in many ways: 5 | 6 | ## Types of Contributions 7 | 8 | ### Report Bugs 9 | 10 | Report bugs at . 11 | 12 | If you are reporting a bug, please include: 13 | 14 | - Your operating system name and version. 15 | - Any details about your local setup that might be helpful in troubleshooting. 16 | - Detailed steps to reproduce the bug. 17 | 18 | ### Fix Bugs 19 | 20 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help wanted" is open to whoever wants to implement it. 21 | 22 | ### Implement Features 23 | 24 | Look through the GitHub issues for features. Anything tagged with "enhancement" and "help wanted" is open to whoever wants to implement it. 25 | 26 | ### Write Documentation 27 | 28 | lidar could always use more documentation, whether as part of the official lidar docs, in docstrings, or even on the web in blog posts, articles, and such. 29 | 30 | ### Submit Feedback 31 | 32 | The best way to send feedback is to file an issue at . 33 | 34 | If you are proposing a feature: 35 | 36 | - Explain in detail how it would work. 37 | - Keep the scope as narrow as possible, to make it easier to implement. 38 | - Remember that this is a volunteer-driven project, and that contributions are welcome. 39 | 40 | ## Get Started 41 | 42 | Ready to contribute? Here's how to set up _lidar_ for local development. 43 | 44 | 1. Fork the [lidar](https://github.com/opengeos/lidar) repo on GitHub. 45 | 46 | 2. Clone your fork locally: 47 | 48 | ```console 49 | git clone git@github.com:your_name_here/lidar.git 50 | ``` 51 | 52 | 3. Install your local copy into a conda env. Assuming you have conda installed, this is how you set up your fork for local development: 53 | 54 | ```console 55 | conda create -n lidar-test python 56 | conda activate lidar-test 57 | cd lidar/ 58 | pip install -e . 59 | ``` 60 | 61 | 4. Create a branch for local development: 62 | 63 | ```console 64 | git checkout -b name-of-your-bugfix-or-feature 65 | ``` 66 | 67 | Now you can make your changes locally. 68 | 69 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox: 70 | 71 | ```console 72 | flake8 lidar tests 73 | python setup.py test or pytest 74 | ``` 75 | 76 | To get flake8 and tox, just pip install them into your conda env. 77 | 78 | 6. Commit your changes and push your branch to GitHub: 79 | 80 | ```console 81 | git add . 82 | git commit -m "Your detailed description of your changes." 83 | git push origin name-of-your-bugfix-or-feature 84 | ``` 85 | 86 | 7. Submit a pull request through the GitHub website. 87 | 88 | ## Pull Request Guidelines 89 | 90 | Before you submit a pull request, check that it meets these guidelines: 91 | 92 | 1. The pull request should include tests. 93 | 2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add the feature to the list in README.md. 94 | 3. The pull request should work for Python 3.7 and 3.8. Check and make sure that the tests pass for all supported Python versions. 95 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/filling.md: -------------------------------------------------------------------------------- 1 | # filling module 2 | 3 | ::: lidar.filling -------------------------------------------------------------------------------- /docs/filtering.md: -------------------------------------------------------------------------------- 1 | # filtering module 2 | 3 | ::: lidar.filtering -------------------------------------------------------------------------------- /docs/get-started.md: -------------------------------------------------------------------------------- 1 | Launch the interactive notebook tutorial for the **lidar** Python 2 | package with **Google Colab** now: 3 | 4 | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/giswqs/lidar/blob/master/examples/lidar_colab.ipynb) 5 | 6 | ## A Quick Example 7 | 8 | ```python 9 | import os 10 | import pkg_resources 11 | from lidar import * 12 | 13 | # identify the sample data directory of the package 14 | package_name = 'lidar' 15 | data_dir = pkg_resources.resource_filename(package_name, 'data/') 16 | 17 | # use the sample dem. Change it to your own dem if needed 18 | in_dem = os.path.join(data_dir, 'dem.tif') 19 | # set the output directory 20 | out_dir = os.getcwd() 21 | 22 | # parameters for identifying sinks and delineating nested depressions 23 | min_size = 1000 # minimum number of pixels as a depression 24 | min_depth = 0.5 # minimum depth as a depression 25 | interval = 0.3 # slicing interval for the level-set method 26 | bool_shp = True # output shapefiles for each individual level 27 | 28 | # extracting sinks based on user-defined minimum depression size 29 | out_dem = os.path.join(out_dir, "median.tif") 30 | in_dem = MedianFilter(in_dem, kernel_size=3, out_file=out_dem) 31 | sink_path = ExtractSinks(in_dem, min_size, out_dir) 32 | dep_id_path, dep_level_path = DelineateDepressions(sink_path, 33 | min_size, 34 | min_depth, 35 | interval, 36 | out_dir, 37 | bool_shp) 38 | print('Results are saved in: {}'.format(out_dir)) 39 | ``` 40 | ## lidar GUI 41 | 42 | **lidar** also provides a Graphical User Interface (GUI), which can be 43 | invoked using the following Python script: 44 | 45 | ```python 46 | import lidar 47 | lidar.gui() 48 | ``` 49 | 50 | ![image](https://i.imgur.com/6hLGeV5.png) 51 | 52 | 53 | ## ArcGIS Toolbox 54 | 55 | ### Toolbox interface 56 | 57 | ![toolbox](https://raw.githubusercontent.com/giswqs/lidar/master/images/toolbox_0.png) 58 | 59 | ![toolbox_ui](https://raw.githubusercontent.com/giswqs/lidar/master/images/toolbox_ui.png) 60 | 61 | ### Video tutorials 62 | 63 | [**Delineating nested surface depressions and catchments using ArcGIS Pro**](https://youtu.be/PpF8sfvCATE) 64 | 65 | [![demo](http://img.youtube.com/vi/W9PFHNV3cT0/0.jpg)](http://www.youtube.com/watch?v=W9PFHNV3cT0) 66 | 67 | [**Delineating nested surface depressions and catchments using ArcMap**](https://youtu.be/PpF8sfvCATE) 68 | 69 | [![demo](http://img.youtube.com/vi/PpF8sfvCATE/0.jpg)](http://www.youtube.com/watch?v=PpF8sfvCATE) 70 | 71 | 72 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Welcome to the lidar package 2 | 3 | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/giswqs/lidar/blob/master/examples/lidar_colab.ipynb) 4 | [![image](https://img.shields.io/pypi/v/lidar.svg)](https://pypi.python.org/pypi/lidar) 5 | [![image](https://pepy.tech/badge/lidar)](https://pepy.tech/project/lidar) 6 | [![image](https://img.shields.io/conda/vn/conda-forge/lidar.svg)](https://anaconda.org/conda-forge/lidar) 7 | [![image](https://github.com/opengeos/lidar/workflows/build/badge.svg)](https://github.com/opengeos/lidar/actions?query=workflow%3Abuild) 8 | [![image](https://github.com/opengeos/lidar/workflows/docs/badge.svg)](https://lidar.gishub.org) 9 | [![image](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 10 | [![image](https://img.shields.io/twitter/follow/giswqs?style=social)](https://twitter.com/giswqs) 11 | [![image](https://img.shields.io/badge/Donate-Buy%20me%20a%20coffee-yellowgreen.svg)](https://www.buymeacoffee.com/giswqs) 12 | [![DOI](https://joss.theoj.org/papers/10.21105/joss.02965/status.svg)](https://doi.org/10.21105/joss.02965) 13 | 14 | **lidar** is Python package for delineating the nested hierarchy of surface depressions in digital elevation models (DEMs). It is 15 | particularly useful for analyzing high-resolution topographic data, such as DEMs derived from Light Detection and Ranging (LiDAR) data. 16 | 17 | - GitHub repo: 18 | - Documentation: 19 | - PyPI: 20 | - Conda-forge: 21 | - Open in Colab: 22 | - Free software: [MIT license](https://opensource.org/licenses/MIT) 23 | 24 | **Citations** 25 | 26 | - **Wu, Q.**, (2021). lidar: A Python package for delineating nested surface depressions from digital elevation data. _Journal of Open Source Software_, 6(59), 2965, 27 | 28 | - **Wu, Q.**, Lane, C.R., Wang, L., Vanderhoof, M.K., Christensen, 29 | J.R., & Liu, H. (2019). Efficient Delineation of Nested Depression 30 | Hierarchy in Digital Elevation Models for Hydrological Analysis 31 | Using Level-Set Method. _Journal of the American Water Resources 32 | Association_. ([PDF](https://spatial.utk.edu/pubs/2019_JAWRA.pdf)) 33 | 34 | ## Introduction 35 | 36 | **lidar** is a Python package for delineating the nested hierarchy of 37 | surface depressions in digital elevation models (DEMs). In traditional 38 | hydrological modeling, surface depressions in a DEM are commonly treated 39 | as artifacts and thus filled and removed to create a depressionless DEM, 40 | which can then be used to generate continuous stream networks. In 41 | reality, however, surface depressions in DEMs are commonly a combination 42 | of spurious and actual terrain features. Fine-resolution DEMs derived 43 | from Light Detection and Ranging (LiDAR) data can capture and represent 44 | actual surface depressions, especially in glaciated and karst 45 | landscapes. During the past decades, various algorithms have been 46 | developed to identify and delineate surface depressions, such as 47 | depression filling, depression breaching, hybrid breaching-filling, and 48 | contour tree method. More recently, a level-set method based on graph 49 | theory was proposed to delineate the nested hierarchy of surface 50 | depressions. The **lidar** Python package implements the level-set 51 | method and makes it possible for delineating the nested hierarchy of 52 | surface depressions as well as elevated terrain features. It also 53 | provides an interactive Graphical User Interface (GUI) that allows users 54 | to run the program with minimal coding. 55 | 56 | ## Statement of Need 57 | 58 | The **lidar** package is intended for scientists and researchers who 59 | would like to integrate surface depressions into hydrological modeling. 60 | It can also facilitate the identification and delineation of 61 | depressional features, such as sinkholes, detention basins, and prairie 62 | potholes. The detailed topological and geometric properties of surface 63 | depressions can be useful for terrain analysis and hydrological 64 | modeling, including the size, volume, mean depth, maximum depth, lowest 65 | elevation, spill elevation, perimeter, major axis length, minor axis 66 | length, elongatedness. 67 | 68 | ## State of the Field 69 | 70 | Currently, there are a few open-source Python packages that can perform depression filling on digital elevation data, such as [RichDEM](https://richdem.readthedocs.io/) and [whitebox](https://github.com/giswqs/whitebox-python), the Python frontend for [WhiteboxTools](https://github.com/jblindsay/whitebox-tools). However, there are no Python packages offering tools for delineating the nested hierarchy of surface depressions and catchments as well as simulating inundation dynamics. The **lidar** Python package is intended for filling this gap. 71 | 72 | ## Key Features 73 | 74 | - Smoothing DEMs using mean, median, and Gaussian filters. 75 | - Extracting depressions from DEMs. 76 | - Filtering out small artifact depressions based on user-specified minimum depression size. 77 | - Generating refined DEMs with small depressions filled but larger depressions kept intact. 78 | - Delineating depression nested hierarchy using the level-set method. 79 | - Delineating mount nested hierarchy using the level-set method. 80 | - Computing topological and geometric properties of depressions, including size, volume, mean depth, maximum depth, lowest elevation, 81 | spill elevation, perimeter, major axis length, minor axis length, elongatedness, eccentricity, orientation, and area-bbox-ratio. 82 | - Exporting depression properties as a csv file. 83 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to lidar's documentation! 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | readme 9 | installation 10 | usage 11 | modules 12 | contributing 13 | authors 14 | history 15 | 16 | Indices and tables 17 | ================== 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | **lidar** supports a variety of platforms, including Microsoft Windows, 4 | macOS, and Linux operating systems. Note that you will need to have 5 | **Python 3.x** (< 3.9) installed. Python 2.x is not supported. 6 | **lidar** is available on both [PyPI](https://pypi.python.org/pypi/lidar) and [conda-forge](https://anaconda.org/conda-forge/lidar). 7 | lidar has a [GDAL](https://gdal.org/) dependency, which can be challenging to install using pip on Windows. 8 | Therefore, it is highly recommended to install lidar from the conda-forge channel. 9 | If you encounter any errors, please check the [Dependencies](#dependencies) section below. 10 | 11 | ## Install from PyPI 12 | 13 | To install **lidar** from PyPI, run this command in your terminal: 14 | 15 | ```console 16 | pip install lidar 17 | ``` 18 | 19 | ## Install from conda-forage 20 | 21 | If you have [Anaconda](https://www.anaconda.com/distribution/#download-section) or [Miniconda](https://docs.conda.io/en/latest/miniconda.html) 22 | installed on your computer, you can create a fresh conda environment to install lidar: 23 | 24 | ```console 25 | conda create -n geo python=3.11 26 | conda activate geo 27 | conda install -c conda-forge mamba 28 | mamba install -c conda-forge lidar 29 | ``` 30 | 31 | ## Upgrade lidar 32 | 33 | If you have installed lidar before and want to upgrade to the latest version, you can run the following command in your terminal: 34 | 35 | ```console 36 | pip install -U lidar 37 | ``` 38 | 39 | If you use conda, you can update lidar to the latest version by running the following command in your terminal: 40 | 41 | ```console 42 | mamba update lidar -c conda-forge 43 | ``` 44 | 45 | To install the development version from GitHub directly using Git, run the following code: 46 | 47 | ```console 48 | pip install git+https://github.com/opengeos/lidar 49 | ``` 50 | 51 | ## Dependencies 52 | 53 | lidar's Python dependencies are listed in its [requirements.txt](https://github.com/opengeos/lidar/blob/master/requirements.txt) file. In 54 | addition, lidar has a C library dependency: GDAL >=1.11.2. How to 55 | install GDAL in different operating systems will be explained below. 56 | More information about GDAL can be found [here](https://trac.osgeo.org/gdal/wiki/DownloadingGdalBinaries). 57 | 58 | ### Linux 59 | 60 | #### Debian-based Linux 61 | 62 | The following commands can be used to install GDAL for Debian-based 63 | Linux distributions (e.g., Ubuntu, Linux Mint). 64 | 65 | ```console 66 | sudo add-apt-repository ppa:ubuntugis/ppa 67 | sudo apt-get update 68 | sudo apt-get install gdal-bin libgdal-dev 69 | ``` 70 | 71 | If you encounter any compiling errors, try the following commands. 72 | 73 | ```console 74 | sudo apt-get install --reinstall build-essential 75 | sudo apt-get install python3-dev 76 | pip install wheel 77 | ``` 78 | 79 | #### Pacman-based Linux 80 | 81 | The following commands can be used to install GDAL for Pacman-based 82 | Linux distributions (e.g., Arch Linux, Manjaro). You might need to use 83 | **sudo** if you encounter permission errors. 84 | 85 | ```console 86 | sudo pacman -S yaourt --noconfirm 87 | yaourt -S gdal --noconfirm 88 | yaourt -S python-gdal --noconfirm 89 | ``` 90 | 91 | ### macOS 92 | 93 | For a Homebrew based Python environment, do the following. 94 | 95 | ```console 96 | brew update 97 | brew install gdal 98 | ``` 99 | 100 | Alternatively, you can install GDAL binaries from [kyngchaos](http://www.kyngchaos.com/software/frameworks#gdal_complete). You will 101 | then need to add the installed location 102 | `/Library/Frameworks/GDAL.framework/Programs` to your system path. 103 | 104 | ### Windows 105 | 106 | The instruction below assumes that you have installed [Anaconda](https://www.anaconda.com/download). Open 107 | **Anaconda Prompt** and enter the following commands to create a conda 108 | environment and install required packages 109 | 110 | ```console 111 | conda create -n geo python=3.11 112 | conda activate geo 113 | conda install -c conda-forge mamba 114 | mamba install -c conda-forge lidar 115 | ``` 116 | 117 | When installing the **lidar** package, if you encounter an error 118 | saying `Microsoft Visual C++ 14.0 is required`, please follow the steps 119 | below to fix the error and reinstall **lidar**. More information can 120 | be found at this link [Fix Python 3 on Windows error - Microsoft Visual C++ 14.0 is required](https://www.scivision.co/python-windows-visual-c++-14-required/). 121 | 122 | - Download [Microsoft Build Tools for Visual Studio 2017](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools&rel=15) 123 | - Double click to install the downloaded installer - **Microsoft Build Tools for Visual Studio 2017**. 124 | - Open **Microsoft Build Tools for Visual Studio 2017** 125 | - Select **Workloads --> Visual C++ build tools** and click the install button 126 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install lidar, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install lidar 16 | 17 | This is the preferred method to install lidar, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for lidar can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/giswqs/lidar 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OL https://github.com/opengeos/lidar/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with: 44 | 45 | .. code-block:: console 46 | 47 | $ python setup.py install 48 | 49 | 50 | .. _Github repo: https://github.com/opengeos/lidar 51 | .. _tarball: https://github.com/opengeos/lidar/tarball/master 52 | -------------------------------------------------------------------------------- /docs/lidar.rst: -------------------------------------------------------------------------------- 1 | lidar package 2 | ============== 3 | 4 | Submodules 5 | ---------- 6 | 7 | filtering module 8 | ---------------------- 9 | 10 | .. automodule:: lidar.filtering 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | filling module 16 | -------------------- 17 | 18 | .. automodule:: lidar.filling 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | slicing module 24 | -------------------- 25 | 26 | .. automodule:: lidar.slicing 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | mounts module 32 | ------------------- 33 | 34 | .. automodule:: lidar.mounts 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | 40 | 41 | Module contents 42 | --------------- 43 | 44 | .. automodule:: lidar 45 | :members: 46 | :undoc-members: 47 | :show-inheritance: 48 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=lidar 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | Modules 2 | ======= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | lidar 8 | -------------------------------------------------------------------------------- /docs/mounts.md: -------------------------------------------------------------------------------- 1 | # mounts module 2 | 3 | ::: lidar.mounts -------------------------------------------------------------------------------- /docs/notebooks/inundation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Inundation Dynamics Simulation " 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Create an ArcGIS Pro project\n", 15 | "\n", 16 | "Open ArcGIS Pro and create a new project titled `inundation`.\n", 17 | "\n", 18 | "## Clone the arcgispro-py3 env\n", 19 | "\n", 20 | "Clone the `arcgispro-py` env to create a new env named `arcgispro-py3-clone`.\n", 21 | "\n", 22 | "![image](https://github.com/user-attachments/assets/52836e4a-7246-48d2-8081-27ec6f21395f)\n", 23 | "\n", 24 | "## Install scikit-image \n", 25 | "\n", 26 | "Activate the `arcgispro-py3-clone` env and install the `scikit-image` package into the env.\n", 27 | "\n", 28 | "![image](https://github.com/user-attachments/assets/320c21a1-7ad1-4df7-87c4-800945a1edc1)\n", 29 | "\n", 30 | "## Import libraries" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "import os\n", 40 | "import arcpy" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "## Set workspace\n", 48 | "\n", 49 | "Set to working space to the project folder instead of a GeoDatabase." 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": null, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "arcpy.env.workspace = os.path.dirname(arcpy.env.workspace)\n", 59 | "print(arcpy.env.workspace)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "## Download the lidar toolbox\n", 67 | "\n", 68 | "Click this [link](https://github.com/opengeos/lidar/archive/refs/heads/master.zip) and download it to the inundation project folder. Unzip the downloaded file and rename the folder from `lidar-master` to `lidar.`\n", 69 | "\n", 70 | "You will find the ArcGIS toolbox under `inundation\\lidar\\lidar\\toolbox\\ArcGIS Pro Hydrology Analyst.tbx`\n", 71 | "\n", 72 | "## Import the lidar toolbox" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": null, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "# Path to the custom toolbox\n", 82 | "toolbox_path = r\"lidar\\lidar\\toolbox\\ArcGIS Pro Hydrology Analyst.tbx\" # Change to your toolbox path\n", 83 | "\n", 84 | "# Import the toolbox\n", 85 | "arcpy.ImportToolbox(toolbox_path, \"HydroTools\")" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "metadata": {}, 91 | "source": [ 92 | "## Set input data and output folder" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "input_dem = os.path.join(arcpy.env.workspace, r\"lidar\\examples\\lidar-dem\\dem_full.tif\")\n", 102 | "out_dir = os.path.join(os.path.expanduser(\"~\\Downloads\"), \"output\")" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "if not os.path.exists(out_dir):\n", 112 | " os.makedirs(out_dir)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "print(input_dem)" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "print(out_dir)" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "arcpy.env.workspace = out_dir" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "## Extract sinks" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "arcpy.HydroTools.ExtrackSink(\n", 156 | " Input_DEM=input_dem,\n", 157 | " Minimum_Sink_Size=1000,\n", 158 | " Minimum_Sink_Depth__from_water_surface_to_spill_point_=1,\n", 159 | " Buffer_Distance=0,\n", 160 | " Output_Sink_Polygon=\"sink.shp\",\n", 161 | ")" 162 | ] 163 | }, 164 | { 165 | "cell_type": "markdown", 166 | "metadata": {}, 167 | "source": [ 168 | "## Delineate catchments" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "metadata": {}, 175 | "outputs": [], 176 | "source": [ 177 | "arcpy.HydroTools.DelineateCatchment(\n", 178 | " Input_Partially_Filled_DEM=\"dem_partially_filled.tif\",\n", 179 | " Input_Sink_Polygon=\"sink.shp\",\n", 180 | " Output_Catchment_Polygon=\"catchment.shp\",\n", 181 | ")" 182 | ] 183 | }, 184 | { 185 | "cell_type": "markdown", 186 | "metadata": {}, 187 | "source": [ 188 | "## Delineate flowpaths" 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "execution_count": null, 194 | "metadata": {}, 195 | "outputs": [], 196 | "source": [ 197 | "arcpy.HydroTools.DelineateFlowPath(\n", 198 | " Input_Fully_Filled_DEM=\"dem_fully_filled.tif\",\n", 199 | " Input_Sink_Polygon=\"sink.shp\",\n", 200 | " Rainfall_Intensity__cm_h_=\"5\",\n", 201 | " Output_Flow_Path=\"flowpath.shp\",\n", 202 | ")" 203 | ] 204 | }, 205 | { 206 | "cell_type": "markdown", 207 | "metadata": {}, 208 | "source": [ 209 | "## Delineate depression hierarchy" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "metadata": {}, 216 | "outputs": [], 217 | "source": [ 218 | "arcpy.HydroTools.DelineateDepressionHierarchy(\n", 219 | " Input_DEM_Sink=\"sink.tif\",\n", 220 | " Minimum_Depression_Size=\"1000\",\n", 221 | " Minimum_Depression_Depth=\"0.5\",\n", 222 | " Slicing_Interval=\"0.2\",\n", 223 | " Output_Depression_Level_Image=\"level.tif\",\n", 224 | ")" 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": {}, 230 | "source": [ 231 | "## Delinate catchment hierarchy" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": null, 237 | "metadata": {}, 238 | "outputs": [], 239 | "source": [ 240 | "arcpy.HydroTools.CatchmentHierarchy(\n", 241 | " Input_Partially_Filled_DEM=\"dem_partially_filled.tif\",\n", 242 | " Input_Depression_Hierarchy_Shapefiles=\"shp\",\n", 243 | " Output_Catchment_Hierarchy=\"catchment_hir.tif\",\n", 244 | ")" 245 | ] 246 | }, 247 | { 248 | "cell_type": "markdown", 249 | "metadata": {}, 250 | "source": [ 251 | "## Simulate inundation" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": null, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "os.makedirs(os.path.join(arcpy.env.workspace, \"simulation\"), exist_ok=True)" 261 | ] 262 | }, 263 | { 264 | "cell_type": "code", 265 | "execution_count": null, 266 | "metadata": {}, 267 | "outputs": [], 268 | "source": [ 269 | "arcpy.HydroTools.SimulateInundation(\n", 270 | " Input_Sink_Image=\"sink.tif\",\n", 271 | " Input_Catchment_Hierarchy_Image=\"catchment_hir.tif\",\n", 272 | " Minimum_Depression_Size=\"1000\",\n", 273 | " Minimum_Depression_Depth=\"0.2\",\n", 274 | " Slicing_Interval=\"0.2\",\n", 275 | " Rainfall_Intensity__cm_h_=\"5\",\n", 276 | " Rainfall_Duration__h_=\"50\",\n", 277 | " Simulation_Time_Step__h_=\"1\",\n", 278 | " Output_Inundation_Image_Folder=\"simulation\",\n", 279 | ")" 280 | ] 281 | }, 282 | { 283 | "cell_type": "markdown", 284 | "metadata": {}, 285 | "source": [ 286 | "## Play the animation" 287 | ] 288 | }, 289 | { 290 | "cell_type": "code", 291 | "execution_count": null, 292 | "metadata": {}, 293 | "outputs": [], 294 | "source": [ 295 | "arcpy.HydroTools.PlayAnimation(\n", 296 | " Input_DEM=\"dem_partially_filled.tif\",\n", 297 | " Loops=\"3\",\n", 298 | " Input_Inundation_Image_Folder=\"simulation\",\n", 299 | ")" 300 | ] 301 | } 302 | ], 303 | "metadata": { 304 | "kernelspec": { 305 | "display_name": "ArcGISPro", 306 | "language": "python", 307 | "name": "python3" 308 | }, 309 | "language_info": { 310 | "file_extension": ".py", 311 | "mimetype": "text/x-python", 312 | "name": "python", 313 | "version": "3.11.10" 314 | } 315 | }, 316 | "nbformat": 4, 317 | "nbformat_minor": 4 318 | } 319 | -------------------------------------------------------------------------------- /docs/notebooks/lidar_colab.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "colab_type": "text", 7 | "id": "AMhnFYl5J4hK" 8 | }, 9 | "source": [ 10 | "\"Run" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": { 17 | "colab": { 18 | "base_uri": "https://localhost:8080/", 19 | "height": 561 20 | }, 21 | "colab_type": "code", 22 | "executionInfo": { 23 | "elapsed": 8031, 24 | "status": "ok", 25 | "timestamp": 1580231550377, 26 | "user": { 27 | "displayName": "Qiusheng Wu", 28 | "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mAJ1pblBoPT5aypeg-B7BjE8_M5yncXVTVrFLRGbb0=s64", 29 | "userId": "16735277279940296378" 30 | }, 31 | "user_tz": 300 32 | }, 33 | "id": "JDX8t-xpI1st", 34 | "outputId": "c3af31a4-86fd-4850-b84a-cd0f3d11ac08" 35 | }, 36 | "outputs": [], 37 | "source": [ 38 | "import subprocess\n", 39 | "\n", 40 | "try:\n", 41 | " import lidar\n", 42 | "except ImportError:\n", 43 | " print(\"Installing lidar ...\")\n", 44 | " subprocess.check_call([\"python\", \"-m\", \"pip\", \"install\", \"lidar\"])" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": { 51 | "colab": { 52 | "base_uri": "https://localhost:8080/", 53 | "height": 561 54 | }, 55 | "colab_type": "code", 56 | "executionInfo": { 57 | "elapsed": 1651, 58 | "status": "ok", 59 | "timestamp": 1580231609943, 60 | "user": { 61 | "displayName": "Qiusheng Wu", 62 | "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mAJ1pblBoPT5aypeg-B7BjE8_M5yncXVTVrFLRGbb0=s64", 63 | "userId": "16735277279940296378" 64 | }, 65 | "user_tz": 300 66 | }, 67 | "id": "_m14VPJmI7rl", 68 | "outputId": "ba501f19-c289-47aa-9f5c-30c7af04d112" 69 | }, 70 | "outputs": [ 71 | { 72 | "name": "stdout", 73 | "output_type": "stream", 74 | "text": [ 75 | "Median filtering ...\n", 76 | "Run time: 0.0599 seconds\n", 77 | "Saving dem ...\n", 78 | "Loading data ...\n", 79 | "min = 379.70, max = 410.72, no_data = -3.402823e+38, cell_size = 1.0\n", 80 | "Depression filling ...\n", 81 | "Saving filled dem ...\n", 82 | "Region grouping ...\n", 83 | "Computing properties ...\n", 84 | "Saving sink dem ...\n", 85 | "Saving refined dem ...\n", 86 | "Converting raster to vector ...\n", 87 | "Total run time:\t\t\t 0.0972 s\n", 88 | "\n", 89 | "Reading data ...\n", 90 | "rows, cols: (400, 400)\n", 91 | "Pixel resolution: 1.0\n", 92 | "Read data time: 0.0029 seconds\n", 93 | "Data preparation time: 0.0092 seconds\n", 94 | "Total number of regions: 1\n", 95 | "Processing Region # 1 ...\n", 96 | "=========== Run time statistics =========== \n", 97 | "(rows, cols):\t\t\t (400, 400)\n", 98 | "Pixel resolution:\t\t 1.0 m\n", 99 | "Number of regions:\t\t 1\n", 100 | "Data preparation time:\t\t 0.0092 s\n", 101 | "Identify level time:\t\t 0.2358 s\n", 102 | "Write image time:\t\t 0.0026 s\n", 103 | "Polygonize time:\t\t 0.0098 s\n", 104 | "Extract level time:\t\t 0.0570 s\n", 105 | "Total run time:\t\t\t 0.3150 s\n", 106 | "Results are saved in: /home/qiusheng/temp\n" 107 | ] 108 | } 109 | ], 110 | "source": [ 111 | "import os\n", 112 | "import pkg_resources\n", 113 | "from lidar import *\n", 114 | "\n", 115 | "# identify the sample data directory of the package\n", 116 | "package_name = \"lidar\"\n", 117 | "data_dir = pkg_resources.resource_filename(package_name, \"data/\")\n", 118 | "\n", 119 | "# use the sample dem. Change it to your own dem if needed\n", 120 | "in_dem = os.path.join(data_dir, \"dem.tif\")\n", 121 | "# set the output directory\n", 122 | "out_dir = os.getcwd()\n", 123 | "\n", 124 | "# parameters for identifying sinks and delineating nested depressions\n", 125 | "min_size = 1000 # minimum number of pixels as a depression\n", 126 | "min_depth = 0.5 # minimum depth as a depression\n", 127 | "interval = 0.3 # slicing interval for the level-set method\n", 128 | "bool_shp = True # output shapefiles for each individual level\n", 129 | "\n", 130 | "# extracting sinks based on user-defined minimum depression size\n", 131 | "out_dem = os.path.join(out_dir, \"median.tif\")\n", 132 | "in_dem = MedianFilter(in_dem, kernel_size=3, out_file=out_dem)\n", 133 | "sink_path = ExtractSinks(in_dem, min_size, out_dir)\n", 134 | "dep_id_path, dep_level_path = DelineateDepressions(\n", 135 | " sink_path, min_size, min_depth, interval, out_dir, bool_shp\n", 136 | ")\n", 137 | "print(\"Results are saved in: {}\".format(out_dir))" 138 | ] 139 | } 140 | ], 141 | "metadata": { 142 | "colab": { 143 | "authorship_tag": "ABX9TyMlOtzA1LPi7dD2X1wsx0jk", 144 | "name": "lidar_colab.ipynb", 145 | "provenance": [] 146 | }, 147 | "kernelspec": { 148 | "display_name": "Python 3", 149 | "language": "python", 150 | "name": "python3" 151 | }, 152 | "language_info": { 153 | "codemirror_mode": { 154 | "name": "ipython", 155 | "version": 3 156 | }, 157 | "file_extension": ".py", 158 | "mimetype": "text/x-python", 159 | "name": "python", 160 | "nbconvert_exporter": "python", 161 | "pygments_lexer": "ipython3", 162 | "version": "3.8.5" 163 | } 164 | }, 165 | "nbformat": 4, 166 | "nbformat_minor": 1 167 | } -------------------------------------------------------------------------------- /docs/notebooks/lidar_dsm.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Creating a Digital Surface Model (DSM) from LiDAR data" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import os\n", 17 | "import lidar" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": null, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "url = \"https://open.gishub.org/data/lidar/madison.laz\"" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": null, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "lidar.download_file(url)" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "filename = os.path.abspath(os.path.basename(url))\n", 45 | "output = os.path.splitext(filename)[0] + \".tif\"" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "lidar.lidar_to_dsm(filename, output, resolution=1.0, minz=0, maxz=450)" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [ 63 | "lidar.add_crs(output, epsg=2255)" 64 | ] 65 | } 66 | ], 67 | "metadata": { 68 | "kernelspec": { 69 | "display_name": "lidar", 70 | "language": "python", 71 | "name": "python3" 72 | }, 73 | "language_info": { 74 | "codemirror_mode": { 75 | "name": "ipython", 76 | "version": 3 77 | }, 78 | "file_extension": ".py", 79 | "mimetype": "text/x-python", 80 | "name": "python", 81 | "nbconvert_exporter": "python", 82 | "pygments_lexer": "ipython3", 83 | "version": "3.10.13" 84 | } 85 | }, 86 | "nbformat": 4, 87 | "nbformat_minor": 2 88 | } 89 | -------------------------------------------------------------------------------- /docs/overrides/main.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 | {% if page.nb_url %} 5 | 6 | {% include ".icons/material/download.svg" %} 7 | 8 | {% endif %} 9 | 10 | {{ super() }} 11 | {% endblock content %} 12 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/slicing.md: -------------------------------------------------------------------------------- 1 | # slicing module 2 | 3 | ::: lidar.slicing -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | # Usage 2 | 3 | The images below show real-world examples of the level set method for 4 | delineating nested depressions in the Cottonwood Lake Study Area (CLSA), 5 | North Dakota. More test datasets (e.g., the Pipestem watershed in the 6 | Prairie Pothole Region of North Dakota) can be downloaded from 7 | 8 | 9 | The following example was conducted on a 64-bit Linux machine with a 10 | quad-core Intel i7-7700 CPU and 16 GB RAM. The average running time of 11 | the algorithm for this DEM was 0.75 seconds. 12 | 13 | ![image](https://wetlands.io/file/images/CLSA_DEM.jpg) 14 | 15 | ![image](https://wetlands.io/file/images/CLSA_Result.jpg) 16 | 17 | ![image](https://wetlands.io/file/images/CLSA_Table.jpg) 18 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | To use lidar in a project: 6 | 7 | .. code:: python 8 | 9 | import os 10 | import pkg_resources 11 | import lidar 12 | import richdem as rd 13 | 14 | # identify the sample data directory of the package 15 | package_name = 'lidar' 16 | data_dir = pkg_resources.resource_filename(package_name, 'data/') 17 | 18 | # use the sample dem. Change it to your own dem if needed 19 | in_dem = os.path.join(data_dir, 'dem.tif') 20 | # set output directory. By default, use the temp directory under user's home directory 21 | out_dir = os.path.join(os.path.expanduser("~"), "temp") 22 | 23 | # parameters for identifying sinks and delineating nested depressions 24 | min_size = 1000 # minimum number of pixels as a depression 25 | min_depth = 0.3 # minimum depth as a depression 26 | interval = 0.3 # slicing interval for the level-set method 27 | bool_shp = False # output shapefiles for each individual level 28 | 29 | # extracting sinks based on user-defined minimum depression size 30 | sink_path = lidar.ExtractSinks(in_dem, min_size, out_dir) 31 | dep_id_path, dep_level_path = lidar.DelineateDepressions(sink_path, min_size, min_depth, interval, out_dir, bool_shp) 32 | 33 | # loading data and results 34 | dem = rd.LoadGDAL(in_dem) 35 | sink = rd.LoadGDAL(sink_path) 36 | dep_id = rd.LoadGDAL(dep_id_path) 37 | dep_level = rd.LoadGDAL(dep_level_path) 38 | 39 | # plotting results 40 | dem_fig = rd.rdShow(dem, ignore_colours=[0], axes=False, cmap='jet', figsize=(6, 5.5)) 41 | sink_fig = rd.rdShow(sink, ignore_colours=[0], axes=False, cmap='jet', figsize=(6, 5.5)) 42 | dep_id_fig = rd.rdShow(dep_id, ignore_colours=[0], axes=False, cmap='jet', figsize=(6, 5.5)) 43 | dep_level_path = rd.rdShow(dep_level, ignore_colours=[0], axes=False, cmap='jet', figsize=(6, 5.5)) 44 | 45 | Check the example.py for more details. 46 | -------------------------------------------------------------------------------- /examples/inundation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Inundation Dynamics Simulation " 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Create an ArcGIS Pro project\n", 15 | "\n", 16 | "Open ArcGIS Pro and create a new project titled `inundation`.\n", 17 | "\n", 18 | "## Clone the arcgispro-py3 env\n", 19 | "\n", 20 | "Clone the `arcgispro-py` env to create a new env named `arcgispro-py3-clone`.\n", 21 | "\n", 22 | "![image](https://github.com/user-attachments/assets/52836e4a-7246-48d2-8081-27ec6f21395f)\n", 23 | "\n", 24 | "## Install scikit-image \n", 25 | "\n", 26 | "Activate the `arcgispro-py3-clone` env and install the `scikit-image` package into the env.\n", 27 | "\n", 28 | "![image](https://github.com/user-attachments/assets/320c21a1-7ad1-4df7-87c4-800945a1edc1)\n", 29 | "\n", 30 | "## Import libraries" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "import os\n", 40 | "import arcpy" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "metadata": {}, 46 | "source": [ 47 | "## Set workspace\n", 48 | "\n", 49 | "Set to working space to the project folder instead of a GeoDatabase." 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": null, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "arcpy.env.workspace = os.path.dirname(arcpy.env.workspace)\n", 59 | "print(arcpy.env.workspace)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "## Download the lidar toolbox\n", 67 | "\n", 68 | "Click this [link](https://github.com/opengeos/lidar/archive/refs/heads/master.zip) and download it to the inundation project folder. Unzip the downloaded file and rename the folder from `lidar-master` to `lidar.`\n", 69 | "\n", 70 | "You will find the ArcGIS toolbox under `inundation\\lidar\\lidar\\toolbox\\ArcGIS Pro Hydrology Analyst.tbx`\n", 71 | "\n", 72 | "## Import the lidar toolbox" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": null, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "# Path to the custom toolbox\n", 82 | "toolbox_path = r\"lidar\\lidar\\toolbox\\ArcGIS Pro Hydrology Analyst.tbx\" # Change to your toolbox path\n", 83 | "\n", 84 | "# Import the toolbox\n", 85 | "arcpy.ImportToolbox(toolbox_path, \"HydroTools\")" 86 | ] 87 | }, 88 | { 89 | "cell_type": "markdown", 90 | "metadata": {}, 91 | "source": [ 92 | "## Set input data and output folder" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [ 101 | "input_dem = os.path.join(arcpy.env.workspace, r\"lidar\\examples\\lidar-dem\\dem_full.tif\")\n", 102 | "out_dir = os.path.join(os.path.expanduser(\"~\\Downloads\"), \"output\")" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "metadata": {}, 109 | "outputs": [], 110 | "source": [ 111 | "if not os.path.exists(out_dir):\n", 112 | " os.makedirs(out_dir)" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "print(input_dem)" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "print(out_dir)" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "arcpy.env.workspace = out_dir" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "## Extract sinks" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "metadata": {}, 153 | "outputs": [], 154 | "source": [ 155 | "arcpy.HydroTools.ExtrackSink(\n", 156 | " Input_DEM=input_dem,\n", 157 | " Minimum_Sink_Size=1000,\n", 158 | " Minimum_Sink_Depth__from_water_surface_to_spill_point_=1,\n", 159 | " Buffer_Distance=0,\n", 160 | " Output_Sink_Polygon=\"sink.shp\",\n", 161 | ")" 162 | ] 163 | }, 164 | { 165 | "cell_type": "markdown", 166 | "metadata": {}, 167 | "source": [ 168 | "## Delineate catchments" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "metadata": {}, 175 | "outputs": [], 176 | "source": [ 177 | "arcpy.HydroTools.DelineateCatchment(\n", 178 | " Input_Partially_Filled_DEM=\"dem_partially_filled.tif\",\n", 179 | " Input_Sink_Polygon=\"sink.shp\",\n", 180 | " Output_Catchment_Polygon=\"catchment.shp\",\n", 181 | ")" 182 | ] 183 | }, 184 | { 185 | "cell_type": "markdown", 186 | "metadata": {}, 187 | "source": [ 188 | "## Delineate flowpaths" 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "execution_count": null, 194 | "metadata": {}, 195 | "outputs": [], 196 | "source": [ 197 | "arcpy.HydroTools.DelineateFlowPath(\n", 198 | " Input_Fully_Filled_DEM=\"dem_fully_filled.tif\",\n", 199 | " Input_Sink_Polygon=\"sink.shp\",\n", 200 | " Rainfall_Intensity__cm_h_=\"5\",\n", 201 | " Output_Flow_Path=\"flowpath.shp\",\n", 202 | ")" 203 | ] 204 | }, 205 | { 206 | "cell_type": "markdown", 207 | "metadata": {}, 208 | "source": [ 209 | "## Delineate depression hierarchy" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "metadata": {}, 216 | "outputs": [], 217 | "source": [ 218 | "arcpy.HydroTools.DelineateDepressionHierarchy(\n", 219 | " Input_DEM_Sink=\"sink.tif\",\n", 220 | " Minimum_Depression_Size=\"1000\",\n", 221 | " Minimum_Depression_Depth=\"0.5\",\n", 222 | " Slicing_Interval=\"0.2\",\n", 223 | " Output_Depression_Level_Image=\"level.tif\",\n", 224 | ")" 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": {}, 230 | "source": [ 231 | "## Delinate catchment hierarchy" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": null, 237 | "metadata": {}, 238 | "outputs": [], 239 | "source": [ 240 | "arcpy.HydroTools.CatchmentHierarchy(\n", 241 | " Input_Partially_Filled_DEM=\"dem_partially_filled.tif\",\n", 242 | " Input_Depression_Hierarchy_Shapefiles=\"shp\",\n", 243 | " Output_Catchment_Hierarchy=\"catchment_hir.tif\",\n", 244 | ")" 245 | ] 246 | }, 247 | { 248 | "cell_type": "markdown", 249 | "metadata": {}, 250 | "source": [ 251 | "## Simulate inundation" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": null, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "os.makedirs(os.path.join(arcpy.env.workspace, \"simulation\"), exist_ok=True)" 261 | ] 262 | }, 263 | { 264 | "cell_type": "code", 265 | "execution_count": null, 266 | "metadata": {}, 267 | "outputs": [], 268 | "source": [ 269 | "arcpy.HydroTools.SimulateInundation(\n", 270 | " Input_Sink_Image=\"sink.tif\",\n", 271 | " Input_Catchment_Hierarchy_Image=\"catchment_hir.tif\",\n", 272 | " Minimum_Depression_Size=\"1000\",\n", 273 | " Minimum_Depression_Depth=\"0.2\",\n", 274 | " Slicing_Interval=\"0.2\",\n", 275 | " Rainfall_Intensity__cm_h_=\"5\",\n", 276 | " Rainfall_Duration__h_=\"50\",\n", 277 | " Simulation_Time_Step__h_=\"1\",\n", 278 | " Output_Inundation_Image_Folder=\"simulation\",\n", 279 | ")" 280 | ] 281 | }, 282 | { 283 | "cell_type": "markdown", 284 | "metadata": {}, 285 | "source": [ 286 | "## Play the animation" 287 | ] 288 | }, 289 | { 290 | "cell_type": "code", 291 | "execution_count": null, 292 | "metadata": {}, 293 | "outputs": [], 294 | "source": [ 295 | "arcpy.HydroTools.PlayAnimation(\n", 296 | " Input_DEM=\"dem_partially_filled.tif\",\n", 297 | " Loops=\"3\",\n", 298 | " Input_Inundation_Image_Folder=\"simulation\",\n", 299 | ")" 300 | ] 301 | } 302 | ], 303 | "metadata": { 304 | "kernelspec": { 305 | "display_name": "ArcGISPro", 306 | "language": "python", 307 | "name": "python3" 308 | }, 309 | "language_info": { 310 | "file_extension": ".py", 311 | "mimetype": "text/x-python", 312 | "name": "python", 313 | "version": "3.11.10" 314 | } 315 | }, 316 | "nbformat": 4, 317 | "nbformat_minor": 4 318 | } 319 | -------------------------------------------------------------------------------- /examples/lidar-dem.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/examples/lidar-dem.zip -------------------------------------------------------------------------------- /examples/lidar-dem/catchment.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/examples/lidar-dem/catchment.tif -------------------------------------------------------------------------------- /examples/lidar-dem/dem.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/examples/lidar-dem/dem.tif -------------------------------------------------------------------------------- /examples/lidar-dem/dem_full.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/examples/lidar-dem/dem_full.tif -------------------------------------------------------------------------------- /examples/lidar-dem/dsm.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/examples/lidar-dem/dsm.tif -------------------------------------------------------------------------------- /examples/lidar-dem/sink.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/examples/lidar-dem/sink.tif -------------------------------------------------------------------------------- /examples/lidar_colab.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "colab_type": "text", 7 | "id": "AMhnFYl5J4hK" 8 | }, 9 | "source": [ 10 | "\"Run" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": { 17 | "colab": { 18 | "base_uri": "https://localhost:8080/", 19 | "height": 561 20 | }, 21 | "colab_type": "code", 22 | "executionInfo": { 23 | "elapsed": 8031, 24 | "status": "ok", 25 | "timestamp": 1580231550377, 26 | "user": { 27 | "displayName": "Qiusheng Wu", 28 | "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mAJ1pblBoPT5aypeg-B7BjE8_M5yncXVTVrFLRGbb0=s64", 29 | "userId": "16735277279940296378" 30 | }, 31 | "user_tz": 300 32 | }, 33 | "id": "JDX8t-xpI1st", 34 | "outputId": "c3af31a4-86fd-4850-b84a-cd0f3d11ac08" 35 | }, 36 | "outputs": [], 37 | "source": [ 38 | "import subprocess\n", 39 | "\n", 40 | "try:\n", 41 | " import lidar\n", 42 | "except ImportError:\n", 43 | " print(\"Installing lidar ...\")\n", 44 | " subprocess.check_call([\"python\", \"-m\", \"pip\", \"install\", \"lidar\"])" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": { 51 | "colab": { 52 | "base_uri": "https://localhost:8080/", 53 | "height": 561 54 | }, 55 | "colab_type": "code", 56 | "executionInfo": { 57 | "elapsed": 1651, 58 | "status": "ok", 59 | "timestamp": 1580231609943, 60 | "user": { 61 | "displayName": "Qiusheng Wu", 62 | "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mAJ1pblBoPT5aypeg-B7BjE8_M5yncXVTVrFLRGbb0=s64", 63 | "userId": "16735277279940296378" 64 | }, 65 | "user_tz": 300 66 | }, 67 | "id": "_m14VPJmI7rl", 68 | "outputId": "ba501f19-c289-47aa-9f5c-30c7af04d112" 69 | }, 70 | "outputs": [ 71 | { 72 | "name": "stdout", 73 | "output_type": "stream", 74 | "text": [ 75 | "Median filtering ...\n", 76 | "Run time: 0.0599 seconds\n", 77 | "Saving dem ...\n", 78 | "Loading data ...\n", 79 | "min = 379.70, max = 410.72, no_data = -3.402823e+38, cell_size = 1.0\n", 80 | "Depression filling ...\n", 81 | "Saving filled dem ...\n", 82 | "Region grouping ...\n", 83 | "Computing properties ...\n", 84 | "Saving sink dem ...\n", 85 | "Saving refined dem ...\n", 86 | "Converting raster to vector ...\n", 87 | "Total run time:\t\t\t 0.0972 s\n", 88 | "\n", 89 | "Reading data ...\n", 90 | "rows, cols: (400, 400)\n", 91 | "Pixel resolution: 1.0\n", 92 | "Read data time: 0.0029 seconds\n", 93 | "Data preparation time: 0.0092 seconds\n", 94 | "Total number of regions: 1\n", 95 | "Processing Region # 1 ...\n", 96 | "=========== Run time statistics =========== \n", 97 | "(rows, cols):\t\t\t (400, 400)\n", 98 | "Pixel resolution:\t\t 1.0 m\n", 99 | "Number of regions:\t\t 1\n", 100 | "Data preparation time:\t\t 0.0092 s\n", 101 | "Identify level time:\t\t 0.2358 s\n", 102 | "Write image time:\t\t 0.0026 s\n", 103 | "Polygonize time:\t\t 0.0098 s\n", 104 | "Extract level time:\t\t 0.0570 s\n", 105 | "Total run time:\t\t\t 0.3150 s\n", 106 | "Results are saved in: /home/qiusheng/temp\n" 107 | ] 108 | } 109 | ], 110 | "source": [ 111 | "import os\n", 112 | "import pkg_resources\n", 113 | "from lidar import *\n", 114 | "\n", 115 | "# identify the sample data directory of the package\n", 116 | "package_name = \"lidar\"\n", 117 | "data_dir = pkg_resources.resource_filename(package_name, \"data/\")\n", 118 | "\n", 119 | "# use the sample dem. Change it to your own dem if needed\n", 120 | "in_dem = os.path.join(data_dir, \"dem.tif\")\n", 121 | "# set the output directory\n", 122 | "out_dir = os.getcwd()\n", 123 | "\n", 124 | "# parameters for identifying sinks and delineating nested depressions\n", 125 | "min_size = 1000 # minimum number of pixels as a depression\n", 126 | "min_depth = 0.5 # minimum depth as a depression\n", 127 | "interval = 0.3 # slicing interval for the level-set method\n", 128 | "bool_shp = True # output shapefiles for each individual level\n", 129 | "\n", 130 | "# extracting sinks based on user-defined minimum depression size\n", 131 | "out_dem = os.path.join(out_dir, \"median.tif\")\n", 132 | "in_dem = MedianFilter(in_dem, kernel_size=3, out_file=out_dem)\n", 133 | "sink_path = ExtractSinks(in_dem, min_size, out_dir)\n", 134 | "dep_id_path, dep_level_path = DelineateDepressions(\n", 135 | " sink_path, min_size, min_depth, interval, out_dir, bool_shp\n", 136 | ")\n", 137 | "print(\"Results are saved in: {}\".format(out_dir))" 138 | ] 139 | } 140 | ], 141 | "metadata": { 142 | "colab": { 143 | "authorship_tag": "ABX9TyMlOtzA1LPi7dD2X1wsx0jk", 144 | "name": "lidar_colab.ipynb", 145 | "provenance": [] 146 | }, 147 | "kernelspec": { 148 | "display_name": "Python 3", 149 | "language": "python", 150 | "name": "python3" 151 | }, 152 | "language_info": { 153 | "codemirror_mode": { 154 | "name": "ipython", 155 | "version": 3 156 | }, 157 | "file_extension": ".py", 158 | "mimetype": "text/x-python", 159 | "name": "python", 160 | "nbconvert_exporter": "python", 161 | "pygments_lexer": "ipython3", 162 | "version": "3.8.5" 163 | } 164 | }, 165 | "nbformat": 4, 166 | "nbformat_minor": 1 167 | } -------------------------------------------------------------------------------- /examples/lidar_dsm.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Creating a Digital Surface Model (DSM) from LiDAR data" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import os\n", 17 | "import lidar" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": null, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "url = \"https://open.gishub.org/data/lidar/madison.laz\"" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": null, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "lidar.download_file(url)" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": null, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "filename = os.path.abspath(os.path.basename(url))\n", 45 | "output = os.path.splitext(filename)[0] + \".tif\"" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "lidar.lidar_to_dsm(filename, output, resolution=1.0, minz=0, maxz=450)" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [ 63 | "lidar.add_crs(output, epsg=2255)" 64 | ] 65 | } 66 | ], 67 | "metadata": { 68 | "kernelspec": { 69 | "display_name": "lidar", 70 | "language": "python", 71 | "name": "python3" 72 | }, 73 | "language_info": { 74 | "codemirror_mode": { 75 | "name": "ipython", 76 | "version": 3 77 | }, 78 | "file_extension": ".py", 79 | "mimetype": "text/x-python", 80 | "name": "python", 81 | "nbconvert_exporter": "python", 82 | "pygments_lexer": "ipython3", 83 | "version": "3.10.13" 84 | } 85 | }, 86 | "nbformat": 4, 87 | "nbformat_minor": 2 88 | } 89 | -------------------------------------------------------------------------------- /images/CLSA_DEM.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/CLSA_DEM.jpg -------------------------------------------------------------------------------- /images/CLSA_Result.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/CLSA_Result.jpg -------------------------------------------------------------------------------- /images/CLSA_Table.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/CLSA_Table.jpg -------------------------------------------------------------------------------- /images/toolbox_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_0.png -------------------------------------------------------------------------------- /images/toolbox_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_1.png -------------------------------------------------------------------------------- /images/toolbox_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_2.png -------------------------------------------------------------------------------- /images/toolbox_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_3.png -------------------------------------------------------------------------------- /images/toolbox_4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_4.png -------------------------------------------------------------------------------- /images/toolbox_5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_5.png -------------------------------------------------------------------------------- /images/toolbox_6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_6.png -------------------------------------------------------------------------------- /images/toolbox_7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_7.png -------------------------------------------------------------------------------- /images/toolbox_ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/images/toolbox_ui.png -------------------------------------------------------------------------------- /lidar/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Top-level package for lidar.""" 4 | 5 | __author__ = """Qiusheng Wu""" 6 | __email__ = "giswqs@gmail.com" 7 | __version__ = "0.8.4" 8 | 9 | from .filling import ( 10 | ExtractSinks, 11 | extract_sinks_by_huc8, 12 | extract_sinks_by_huc8_batch, 13 | extract_sinks_by_bbox, 14 | ) 15 | from .slicing import DelineateDepressions 16 | from .filtering import MeanFilter, MedianFilter, GaussianFilter 17 | from .mounts import DelineateMounts 18 | from .gui import gui 19 | from .common import * 20 | 21 | # from .mounts import DelineateMounts 22 | -------------------------------------------------------------------------------- /lidar/cli.py: -------------------------------------------------------------------------------- 1 | """Console script for lidar.""" 2 | 3 | import click 4 | 5 | 6 | @click.command() 7 | def main(args=None): 8 | """Console script for lidar.""" 9 | click.echo("Replace this message by putting your code into " "lidar.cli.main") 10 | click.echo("See click documentation at http://click.pocoo.org/") 11 | return 0 12 | -------------------------------------------------------------------------------- /lidar/data/dem.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/lidar/data/dem.tif -------------------------------------------------------------------------------- /lidar/data/dsm.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/lidar/data/dsm.tif -------------------------------------------------------------------------------- /lidar/data/sink.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/lidar/data/sink.tif -------------------------------------------------------------------------------- /lidar/example.py: -------------------------------------------------------------------------------- 1 | import os 2 | import richdem as rd 3 | from filtering import MedianFilter 4 | from filling import ExtractSinks 5 | from slicing import DelineateDepressions 6 | import importlib.resources as resources 7 | 8 | # identify the sample data directory of the package 9 | package_name = "lidar" 10 | data_dir = resources.files(package_name) / "data" 11 | 12 | # use the sample dem. Change it to your own dem if needed 13 | in_dem = os.path.join(data_dir, "dem.tif") 14 | # set output directory. By default, use the temp directory under user's home directory 15 | out_dir = os.path.join(os.path.expanduser("~"), "temp") 16 | 17 | if not os.path.exists(out_dir): 18 | os.mkdir(out_dir) 19 | 20 | # parameters for identifying sinks and delineating nested depressions 21 | min_size = 1000 # minimum number of pixels as a depression 22 | min_depth = 0.3 # minimum depth as a depression 23 | interval = 0.3 # slicing interval for the level-set method 24 | bool_shp = False # output shapefiles for each individual level 25 | 26 | # extracting sinks based on user-defined minimum depression size 27 | out_dem = os.path.join(out_dir, "median.tif") 28 | in_dem = MedianFilter(in_dem, kernel_size=3, out_file=out_dem) 29 | sink_path = ExtractSinks(in_dem, min_size, out_dir) 30 | dep_id_path, dep_level_path = DelineateDepressions( 31 | sink_path, min_size, min_depth, interval, out_dir, bool_shp 32 | ) 33 | 34 | print("Results are saved in: {}".format(out_dir)) 35 | 36 | # loading data and results 37 | dem = rd.LoadGDAL(in_dem) 38 | sink = rd.LoadGDAL(sink_path) 39 | dep_id = rd.LoadGDAL(dep_id_path) 40 | dep_level = rd.LoadGDAL(dep_level_path) 41 | 42 | # plotting results 43 | dem_fig = rd.rdShow(dem, ignore_colours=[0], axes=False, cmap="jet", figsize=(6, 5.5)) 44 | sink_fig = rd.rdShow(sink, ignore_colours=[0], axes=False, cmap="jet", figsize=(6, 5.5)) 45 | dep_id_fig = rd.rdShow( 46 | dep_id, ignore_colours=[0], axes=False, cmap="jet", figsize=(6, 5.5) 47 | ) 48 | dep_level_path = rd.rdShow( 49 | dep_level, ignore_colours=[0], axes=False, cmap="jet", figsize=(6, 5.5) 50 | ) 51 | -------------------------------------------------------------------------------- /lidar/filtering.py: -------------------------------------------------------------------------------- 1 | """Module for applying filters to image.""" 2 | 3 | import os 4 | from scipy import ndimage 5 | import numpy as np 6 | import time 7 | 8 | try: 9 | import richdem as rd 10 | except ImportError: 11 | print( 12 | "richdem is not installed. Please install it with `pip install richdem` or `conda install richdem -c conda-forge`." 13 | ) 14 | 15 | 16 | def np2rdarray(in_array, no_data, projection, geotransform): 17 | """Converts an numpy array to rdarray. 18 | 19 | Args: 20 | in_array (np.array): The input numpy array. 21 | no_data (float): The no_data value of the array. 22 | projection (str): The projection of the image. 23 | geotransform (str): The geotransform of the image. 24 | 25 | Returns: 26 | object: The richDEM array. 27 | """ 28 | out_array = rd.rdarray(in_array, no_data=no_data) 29 | out_array.projection = projection 30 | out_array.geotransform = geotransform 31 | return out_array 32 | 33 | 34 | def MeanFilter(in_dem, kernel_size=3, out_file=None): 35 | """Applies a mean filter to an image. 36 | 37 | Args: 38 | in_dem (str): File path to the input image. 39 | kernel_size (int, optional): The size of the moving window. Defaults to 3. 40 | out_file (str, optional): File path to the output image. Defaults to None. 41 | 42 | Returns: 43 | np.array: The numpy array containing the filtered image. 44 | """ 45 | print("Mean filtering ...") 46 | start_time = time.time() 47 | dem = rd.LoadGDAL(in_dem) 48 | no_data = dem.no_data 49 | projection = dem.projection 50 | geotransform = dem.geotransform 51 | 52 | weights = np.full((kernel_size, kernel_size), 1.0 / (kernel_size * kernel_size)) 53 | mean = ndimage.filters.convolve(dem, weights) 54 | mean = np2rdarray(mean, no_data, projection, geotransform) 55 | print("Run time: {:.4f} seconds".format(time.time() - start_time)) 56 | 57 | if out_file is not None: 58 | print("Saving dem ...") 59 | rd.SaveGDAL(out_file, mean) 60 | return out_file 61 | 62 | return mean 63 | 64 | 65 | def MedianFilter(in_dem, kernel_size=3, out_file=None): 66 | """Applies a median filter to an image. 67 | 68 | Args: 69 | in_dem (str): File path to the input image. 70 | kernel_size (int, optional): The size of the moving window. Defaults to 3. 71 | out_file (str, optional): File path to the output image. Defaults to None. 72 | 73 | Returns: 74 | np.array: The numpy array containing the filtered image. 75 | """ 76 | print("Median filtering ...") 77 | start_time = time.time() 78 | dem = rd.LoadGDAL(in_dem) 79 | no_data = dem.no_data 80 | projection = dem.projection 81 | geotransform = dem.geotransform 82 | 83 | med = ndimage.median_filter(dem, size=kernel_size) 84 | med = np2rdarray(med, no_data, projection, geotransform) 85 | print("Run time: {:.4f} seconds".format(time.time() - start_time)) 86 | 87 | if out_file is not None: 88 | print("Saving dem ...") 89 | rd.SaveGDAL(out_file, med) 90 | return out_file 91 | 92 | return med 93 | 94 | 95 | def GaussianFilter(in_dem, sigma=1, out_file=None): 96 | """Applies a Gaussian filter to an image. 97 | 98 | Args: 99 | in_dem (str): File path to the input image. 100 | sigma (int, optional): Standard deviation. Defaults to 1. 101 | out_file (str, optional): File path to the output image. Defaults to None. 102 | 103 | Returns: 104 | np.array: The numpy array containing the filtered image. 105 | """ 106 | print("Gaussian filtering ...") 107 | start_time = time.time() 108 | dem = rd.LoadGDAL(in_dem) 109 | no_data = dem.no_data 110 | projection = dem.projection 111 | geotransform = dem.geotransform 112 | 113 | gau = ndimage.gaussian_filter(dem, sigma=sigma) 114 | gau = np2rdarray(gau, no_data, projection, geotransform) 115 | print("Run time: {:.4f} seconds".format(time.time() - start_time)) 116 | 117 | if out_file is not None: 118 | print("Saving dem ...") 119 | rd.SaveGDAL(out_file, gau) 120 | return out_file 121 | 122 | return gau 123 | -------------------------------------------------------------------------------- /lidar/gui.py: -------------------------------------------------------------------------------- 1 | import os 2 | import importlib.resources as resources 3 | from .filtering import MedianFilter, MeanFilter, GaussianFilter 4 | from .filling import ExtractSinks 5 | from .slicing import DelineateDepressions 6 | from .mounts import DelineateMounts 7 | 8 | try: 9 | import richdem as rd 10 | except ImportError: 11 | print( 12 | "richdem is not installed. Please install it with `pip install richdem` or `conda install richdem -c conda-forge`." 13 | ) 14 | 15 | 16 | def gui(): 17 | """An interactive Graphical User Interface (GUI) for the lidar package.""" 18 | 19 | try: 20 | import PySimpleGUI as sg 21 | except ImportError: 22 | raise ImportError( 23 | "PySimpleGUI is not installed. Please install it via `pip install PySimpleGUI`" 24 | ) 25 | 26 | # identify the sample data directory of the package 27 | package_name = "lidar" 28 | data_dir = resources.files(package_name) / "data" 29 | 30 | # use the sample dem. Change it to your own dem if needed 31 | in_dem = os.path.join(data_dir, "dem.tif") 32 | # set output directory. By default, use the temp directory under user's home directory 33 | out_dir = os.path.join(os.path.expanduser("~"), "temp") 34 | 35 | if not os.path.exists(out_dir): 36 | os.mkdir(out_dir) 37 | 38 | with sg.FlexForm("lidar package GUI") as form: 39 | form_rows = [ 40 | [ 41 | sg.Text( 42 | "Level-set Method for Delineating Topographic Hierarchy", 43 | size=(50, 1), 44 | font=("Arial", 14), 45 | text_color="black", 46 | ) 47 | ], 48 | [sg.Text("Select DEM:", font=("Arial", 14))], 49 | [sg.InputText(in_dem, size=(60, 1)), sg.FileBrowse()], 50 | [sg.Text("Delineation Mode:", font=("Arial", 14))], 51 | [ 52 | sg.Radio("Depressions", "RADIO1", default=True), 53 | sg.Radio("Mounts", "RADIO1"), 54 | ], 55 | [sg.Text("DEM Filtering:", font=("Arial", 14))], 56 | [ 57 | sg.Text("Select Filter:"), 58 | sg.InputCombo( 59 | ["None", "Mean Filter", "Median Filter", "Gaussian Filter"] 60 | ), 61 | sg.Text("Kernel Size: "), 62 | sg.InputText(default_text="3", size=(10, 1)), 63 | ], 64 | [sg.Text("Level-set Parameters:", font=("Arial", 14))], 65 | [ 66 | sg.Text("Minimum size:"), 67 | sg.InputText(default_text="1000", size=(10, 1)), 68 | sg.Text("Minimum depth:"), 69 | sg.InputText(default_text="1.0", size=(10, 1)), 70 | ], 71 | [ 72 | sg.Text("Slicing interval:"), 73 | sg.InputText(default_text="0.5", size=(10, 1)), 74 | sg.Text("Output shapefiles:"), 75 | sg.InputCombo(["Yes", "No"], default_value="No"), 76 | ], 77 | [sg.Text("Display Results:", font=("Arial", 14))], 78 | [sg.InputCombo(["Yes", "No"], default_value="No")], 79 | [sg.Text("Select Output Directory:", font=("Arial", 14))], 80 | [sg.InputText(out_dir, size=(60, 1)), sg.FolderBrowse()], 81 | [sg.Submit(), sg.Cancel()], 82 | ] 83 | button, ( 84 | in_dem, 85 | mode_dep, 86 | mode_mnt, 87 | filter_type, 88 | kernel_szie, 89 | min_size, 90 | min_depth, 91 | interval, 92 | bool_shp, 93 | display, 94 | out_dir, 95 | ) = form.LayoutAndRead(form_rows) 96 | 97 | if button == "Submit": 98 | 99 | kernel_szie = int(kernel_szie) 100 | min_size = int(min_size) 101 | min_depth = float(min_depth) 102 | interval = float(interval) 103 | if bool_shp == "Yes": 104 | bool_shp = True 105 | else: 106 | bool_shp = False 107 | if display == "Yes": 108 | display = True 109 | else: 110 | display = False 111 | if mode_mnt and in_dem == os.path.join(data_dir, "dem.tif"): 112 | in_dem = os.path.join(data_dir, "dsm.tif") 113 | 114 | out_dem_name = filter_type.split(" ")[0].lower() + ".tif" 115 | out_dem = os.path.join(out_dir, out_dem_name) 116 | 117 | sg.Popup( 118 | "Please Wait!", 119 | "The program is running! You will receive another message when it is done!", 120 | ) 121 | 122 | if filter_type == "Mean Filter": 123 | in_dem = MeanFilter(in_dem, kernel_size=kernel_szie, out_file=out_dem) 124 | elif filter_type == "Median Filter": 125 | in_dem = MedianFilter(in_dem, kernel_size=kernel_szie, out_file=out_dem) 126 | elif filter_type == "Gaussian Filter": 127 | in_dem = GaussianFilter(in_dem, sigma=kernel_szie, out_file=out_dem) 128 | 129 | if mode_dep: 130 | sink_path = ExtractSinks(in_dem, min_size, out_dir) 131 | dep_id_path, dep_level_path = DelineateDepressions( 132 | sink_path, min_size, min_depth, interval, out_dir, bool_shp 133 | ) 134 | else: 135 | sink_path = os.path.join(out_dir, "sink.tif") 136 | dep_id_path, dep_level_path = DelineateMounts( 137 | in_dem, min_size, min_depth, interval, out_dir, bool_shp 138 | ) 139 | 140 | if display: 141 | # loading data and results 142 | dem = rd.LoadGDAL(in_dem) 143 | sink = rd.LoadGDAL(sink_path) 144 | dep_id = rd.LoadGDAL(dep_id_path) 145 | dep_level = rd.LoadGDAL(dep_level_path) 146 | 147 | # plotting results 148 | dem_fig = rd.rdShow( 149 | dem, ignore_colours=[0], axes=False, cmap="jet", figsize=(6, 5.5) 150 | ) 151 | sink_fig = rd.rdShow( 152 | sink, ignore_colours=[0], axes=False, cmap="jet", figsize=(6, 5.5) 153 | ) 154 | dep_id_fig = rd.rdShow( 155 | dep_id, ignore_colours=[0], axes=False, cmap="jet", figsize=(6, 5.5) 156 | ) 157 | dep_level_path = rd.rdShow( 158 | dep_level, 159 | ignore_colours=[0], 160 | axes=False, 161 | cmap="jet", 162 | figsize=(6, 5.5), 163 | ) 164 | 165 | del ( 166 | dem, 167 | sink, 168 | dep_id, 169 | dep_level, 170 | dem_fig, 171 | sink_fig, 172 | dep_id_fig, 173 | dep_id_path, 174 | ) 175 | 176 | sg.Popup("Success!", "The results are saved in: {}".format(out_dir)) 177 | -------------------------------------------------------------------------------- /lidar/lidar.py: -------------------------------------------------------------------------------- 1 | """Main module.""" 2 | -------------------------------------------------------------------------------- /lidar/mounts.py: -------------------------------------------------------------------------------- 1 | """Module for delineating the nested hierarchy of elevated features (i.e., mounts).""" 2 | 3 | import os 4 | import numpy as np 5 | import lidar 6 | from .filling import ExtractSinks 7 | from .slicing import DelineateDepressions 8 | 9 | try: 10 | import richdem as rd 11 | except ImportError: 12 | print( 13 | "richdem is not installed. Please install it with `pip install richdem` or `conda install richdem -c conda-forge`." 14 | ) 15 | 16 | 17 | def get_min_max_nodata(dem): 18 | """Gets the minimum, maximum, and no_data value of a numpy array. 19 | 20 | Args: 21 | dem (np.array): The numpy array containing the image. 22 | 23 | Returns: 24 | tuple: The minimum, maximum, and no_data value. 25 | """ 26 | no_data = dem.no_data 27 | max_elev = float(np.max(dem[dem != no_data])) 28 | min_elev = float(np.min(dem[dem != no_data])) 29 | 30 | return min_elev, max_elev, no_data 31 | 32 | 33 | def FlipDEM(dem, delta=100, out_file=None): 34 | """Flips the DEM. 35 | 36 | Args: 37 | dem (np.array): The numpy array containing the image. 38 | delta (int, optional): The base value to be added to the flipped DEM. Defaults to 100. 39 | out_file (str, optional): File path to the output image. Defaults to None. 40 | 41 | Returns: 42 | np.array: The numpy array containing the flipped DEM. 43 | """ 44 | # get min and max elevation of the dem 45 | no_data = dem.no_data 46 | max_elev = float(np.max(dem[dem != no_data])) 47 | # min_elev = float(np.min(dem[dem != no_data])) 48 | 49 | dem = dem * (-1) + max_elev + delta 50 | dem[dem == no_data * (-1)] = no_data 51 | 52 | if out_file is not None: 53 | print("Saving flipped dem ...") 54 | rd.SaveGDAL(out_file, dem) 55 | return out_file 56 | 57 | return dem 58 | 59 | 60 | def DelineateMounts(in_dem, min_size, min_height, interval, out_dir, bool_shp=False): 61 | """Delineates the nested hierarchy of elevated features (i.e., mounts). 62 | 63 | Args: 64 | in_dem (str): File path to the input DEM. 65 | min_size (int): The minimum number of pixels to be considered as an object. 66 | min_height (float): The minimum depth of the feature to be considered as an object. 67 | interval (float): The slicing interval. 68 | out_dir (str): The output directory. 69 | bool_shp (bool, optional): Whether to generate shapefiles. Defaults to False. 70 | 71 | Returns: 72 | tuple: File paths to the depression ID and level. 73 | """ 74 | if not os.path.exists(out_dir): 75 | os.mkdir(out_dir) 76 | 77 | print("Loading data ...") 78 | dem = rd.LoadGDAL(in_dem) 79 | # projection = dem.projection 80 | geotransform = dem.geotransform 81 | cell_size = np.round(geotransform[1], decimals=3) 82 | 83 | out_dem = os.path.join(out_dir, "dem_flip.tif") 84 | in_dem = FlipDEM(dem, delta=100, out_file=out_dem) 85 | 86 | min_elev, max_elev, no_data = get_min_max_nodata(dem) 87 | print( 88 | "min = {:.2f}, max = {:.2f}, no_data = {}, cell_size = {}".format( 89 | min_elev, max_elev, no_data, cell_size 90 | ) 91 | ) 92 | 93 | sink_path = ExtractSinks(in_dem, min_size, out_dir) 94 | dep_id_path, dep_level_path = DelineateDepressions( 95 | sink_path, min_size, min_height, interval, out_dir, bool_shp 96 | ) 97 | 98 | return dep_id_path, dep_level_path 99 | -------------------------------------------------------------------------------- /lidar/toolbox/ArcGIS Hydrology Analyst.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/lidar/toolbox/ArcGIS Hydrology Analyst.tbx -------------------------------------------------------------------------------- /lidar/toolbox/ArcGIS Pro Hydrology Analyst.tbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/lidar/toolbox/ArcGIS Pro Hydrology Analyst.tbx -------------------------------------------------------------------------------- /lidar/toolbox/scripts/1_Extract_Sink.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | import os 3 | import time 4 | import shutil 5 | from arcpy import env 6 | from arcpy.mp import * 7 | 8 | # from arcpy.mapping import * 9 | 10 | 11 | def extract_sink(in_dem, min_size, min_depth, buffer_dist, out_sink): 12 | arcpy.CheckOutExtension("Spatial") 13 | start_time = time.time() 14 | workspace = os.path.split(out_sink)[0] 15 | arcpy.env.workspace = workspace 16 | arcpy.env.overwriteOutput = True 17 | 18 | img_ext = ".tif" 19 | vec_ext = ".shp" 20 | if os.path.splitext(workspace)[1].lower() == ".gdb": 21 | img_ext = "" 22 | vec_ext = "" 23 | if arcpy.Exists(in_dem) == False: 24 | arcpy.AddMessage("The input raster does not exist") 25 | quit() 26 | 27 | ### Mean Focal Statistics 28 | arcpy.AddMessage("DEM filtering ...") 29 | ras_mf = arcpy.sa.FocalStatistics(in_dem, "Rectangle 3 3 CELL", "MEAN", "DATA") 30 | 31 | ### Fill depression 32 | arcpy.AddMessage("Filling sinks ...") 33 | ras_fill = arcpy.sa.Fill(ras_mf) 34 | dem_filled_name = "dem_fully_filled" + img_ext 35 | ras_fill = arcpy.sa.ApplyEnvironment(ras_fill) 36 | ras_fill.save(os.path.join(workspace, dem_filled_name)) 37 | ### Get sink 38 | ras_sink = ras_fill - ras_mf 39 | 40 | ### Convert sink to binary image 41 | arcpy.AddMessage("Creating binary sink image ...") 42 | ras_sink_bin = arcpy.sa.Con(ras_sink > 0, 1) 43 | ras_sink_bin_name = "ras_sink_bin" + img_ext 44 | ras_sink_bin.save(os.path.join(workspace, ras_sink_bin_name)) 45 | ### Region group 46 | arcpy.AddMessage("Grouping regions ...") 47 | ras_region_bk = arcpy.sa.RegionGroup(ras_sink_bin, "FOUR", "WITHIN", "ADD_LINK") 48 | ras_region_bk.save(os.path.join(workspace, "ras_region" + img_ext)) 49 | 50 | ras_region_zonal = arcpy.sa.ZonalStatistics(ras_region_bk, "Value", in_dem, "RANGE") 51 | ras_region_zonal.save("ras_region_zonal" + img_ext) 52 | ras_region = arcpy.sa.Con(ras_region_zonal > min_depth, 1) 53 | ras_region = arcpy.sa.ApplyEnvironment(ras_region) 54 | ras_region.save("ras_region_sub" + img_ext) 55 | 56 | ### Convert raster to polygon 57 | arcpy.AddMessage("Converting raster to polygon ...") 58 | region_poly_name = os.path.join(workspace, "region_poly" + vec_ext) 59 | arcpy.RasterToPolygon_conversion(ras_region, region_poly_name, "NO_SIMPLIFY") 60 | 61 | ### Select polygon based on minimum size 62 | arcpy.AddMessage("Selecting polygons ...") 63 | area_field = "Area" 64 | arcpy.AddField_management(region_poly_name, area_field, "DOUBLE") 65 | arcpy.CalculateField_management( 66 | region_poly_name, "Area", "!shape.area@squaremeters!", "PYTHON_9.3", "#" 67 | ) 68 | sqlExp = area_field + ">=" + str(min_size) 69 | 70 | region_poly_select_name = out_sink 71 | arcpy.Select_analysis(region_poly_name, region_poly_select_name, sqlExp) 72 | arcpy.CalculateField_management(region_poly_select_name, "gridcode", "1", "PYTHON") 73 | region_poly_ras = os.path.join(workspace, "region_poly_ras" + img_ext) 74 | arcpy.PolygonToRaster_conversion( 75 | region_poly_select_name, "gridcode", region_poly_ras, "CELL_CENTER", "NONE", "1" 76 | ) 77 | 78 | ### Convert foreground sink to 0 79 | arcpy.AddMessage("Converting foreground sink ...") 80 | ras_sink_bg = ras_mf - ras_mf 81 | ras_sink_bg.save(os.path.join(workspace, "ras_sink_bg" + img_ext)) 82 | 83 | # ras_sink_final = "ras_sink_final" 84 | arcpy.AddMessage("Calculating cell statistics ...") 85 | in_ras_list = [region_poly_ras, ras_sink_bg] 86 | ras_sink_final_name = arcpy.sa.CellStatistics(in_ras_list, "SUM", "DATA") 87 | arcpy.env.extent = ras_mf.extent 88 | ras_sink_final_name = arcpy.sa.ApplyEnvironment(ras_sink_final_name) 89 | 90 | ### Convert foreground sink 91 | arcpy.AddMessage("Creating partially filled DEM ...") 92 | dem_name = arcpy.sa.Con(ras_sink_final_name == 1, ras_mf, ras_fill) 93 | dem_name = arcpy.sa.ApplyEnvironment(dem_name) 94 | dem_name.save(os.path.join(workspace, "dem_partially_filled" + img_ext)) 95 | 96 | arcpy.AddMessage("Creating sink DEM ...") 97 | dem_sink = arcpy.sa.Con(ras_sink_final_name == 1, ras_mf, ras_fill) 98 | dem_sink = arcpy.sa.ApplyEnvironment(dem_sink) 99 | 100 | arcpy.AddMessage("Calculating sink depth ...") 101 | dem_sink_depth = ras_fill - dem_name 102 | dem_sink_depth_name = arcpy.sa.Con(dem_sink_depth > 0, dem_sink) 103 | dem_sink_depth_name = arcpy.sa.ApplyEnvironment(dem_sink_depth_name) 104 | dem_sink_depth_name.save(os.path.join(workspace, "sink" + img_ext)) 105 | 106 | sink_depth = arcpy.sa.Con(dem_sink_depth > 0, dem_sink_depth) 107 | sink_depth = arcpy.sa.ApplyEnvironment(sink_depth) 108 | sink_depth.save(os.path.join(workspace, "sink_depth" + img_ext)) 109 | 110 | arcpy.AddMessage("Zonal statistics ...") 111 | zonalStatistics(out_sink, in_dem) 112 | 113 | if buffer_dist > 0: 114 | arcpy.AddMessage("Creating buffered sink DEM ...") 115 | sink_buffer_name = os.path.join(workspace, "sink_buffer_poly" + vec_ext) 116 | sqlExp = str(buffer_dist) + " Meters" 117 | arcpy.Buffer_analysis(out_sink, sink_buffer_name, sqlExp, "", "", "", "") 118 | dem_sink_buffer = arcpy.sa.ExtractByMask(dem_name, sink_buffer_name) 119 | sink_buffer_img = os.path.join(workspace, "sink_buffer" + img_ext) 120 | arcpy.CopyRaster_management(dem_sink_buffer, sink_buffer_img) 121 | 122 | # add output data to map 123 | # arcpy.AddMessage("Adding data to map ...") 124 | # mxd = MapDocument("CURRENT") 125 | # df = ListDataFrames(mxd, "*")[0] 126 | # lyr_fully_filled_dem = Layer(os.path.join(workspace, dem_filled_name)) 127 | # AddLayer(df, lyr_fully_filled_dem) 128 | # lyr_partially_filled_dem = Layer( 129 | # os.path.join(workspace, "dem_partially_filled" + img_ext) 130 | # ) 131 | # AddLayer(df, lyr_partially_filled_dem) 132 | # lyr_sink_dem = Layer(os.path.join(workspace, "sink" + img_ext)) 133 | # AddLayer(df, lyr_sink_dem) 134 | 135 | # add output data to map 136 | arcpy.AddMessage("Adding data to map ...") 137 | p = arcpy.mp.ArcGISProject("CURRENT") 138 | m = p.listMaps("*")[0] 139 | lyr_fully_filled_dem = os.path.join(workspace, dem_filled_name) 140 | m.addDataFromPath(lyr_fully_filled_dem) 141 | lyr_partially_filled_dem = os.path.join(workspace, "dem_partially_filled" + img_ext) 142 | m.addDataFromPath(lyr_partially_filled_dem) 143 | lyr_sink_dem = os.path.join(workspace, "sink" + img_ext) 144 | m.addDataFromPath(lyr_sink_dem) 145 | 146 | arcpy.AddMessage("Deleting temporary data ...") 147 | arcpy.Delete_management(region_poly_name) 148 | arcpy.Delete_management(region_poly_ras) 149 | arcpy.Delete_management(ras_sink_bin_name) 150 | arcpy.Delete_management(os.path.join(workspace, "ras_sink_bg" + img_ext)) 151 | arcpy.Delete_management(os.path.join(workspace, "ras_region" + img_ext)) 152 | arcpy.Delete_management(os.path.join(workspace, "ras_region_sub" + img_ext)) 153 | arcpy.Delete_management(os.path.join(workspace, "ras_region_zonal" + img_ext)) 154 | if buffer_dist > 0: 155 | arcpy.Delete_management(sink_buffer_name) 156 | arcpy.AddMessage("Extract sink done!") 157 | 158 | end_time = time.time() 159 | arcpy.AddMessage("Total run time: {:.4f}".format(end_time - start_time)) 160 | 161 | return out_sink 162 | 163 | 164 | def zonalStatistics(in_shp_path, in_dem): 165 | in_shp_dir = os.path.split(in_shp_path)[0] 166 | in_shp_name = os.path.split(in_shp_path)[1] 167 | arcpy.env.workspace = in_shp_dir 168 | arcpy.env.overwriteOutput = True 169 | arcpy.env.snapRaster = in_dem 170 | dbf_dir = os.path.join(in_shp_dir, "dbf") 171 | os.mkdir(dbf_dir) 172 | tif_dir = os.path.join(in_shp_dir, "tif") 173 | os.mkdir(tif_dir) 174 | shapefiles = os.listdir(in_shp_dir) 175 | dem = arcpy.Raster(in_dem) 176 | cell_size = dem.meanCellHeight 177 | for shp in shapefiles: 178 | if shp.endswith(".shp") and shp == in_shp_name: 179 | shp_path = os.path.join(in_shp_dir, shp) 180 | dbf_path = os.path.join(dbf_dir, "zonal_" + shp.replace("shp", "dbf")) 181 | tif_path = os.path.join(tif_dir, shp.replace("shp", "tif")) 182 | arcpy.PolygonToRaster_conversion( 183 | shp_path, 184 | value_field="FID", 185 | out_rasterdataset=tif_path, 186 | cell_assignment="CELL_CENTER", 187 | priority_field="NONE", 188 | cellsize=cell_size, 189 | ) 190 | arcpy.sa.ZonalStatisticsAsTable( 191 | tif_path, "Value", in_dem, dbf_path, "DATA", "ALL" 192 | ) 193 | arcpy.JoinField_management( 194 | shp_path, 195 | in_field="FID", 196 | join_table=dbf_path, 197 | join_field="Value", 198 | fields="COUNT;MIN;MAX;RANGE;MEAN;STD;SUM", 199 | ) 200 | # arcpy.AddField_management(shp_path,field_name="dep2catR",field_type="FLOAT") 201 | arcpy.AddField_management(shp_path, field_name="volume", field_type="FLOAT") 202 | arcpy.AddField_management( 203 | shp_path, field_name="mean_depth", field_type="FLOAT" 204 | ) 205 | # arcpy.CalculateField_management(shp_path,field="dep2catR",expression="!AREA! / !cat_area!", expression_type="PYTHON_9.3") 206 | arcpy.CalculateField_management( 207 | shp_path, 208 | field="volume", 209 | expression="( !COUNT! * !MAX! - !SUM!) * ( !AREA! / !COUNT! )", 210 | expression_type="PYTHON_9.3", 211 | ) 212 | arcpy.CalculateField_management( 213 | shp_path, 214 | field="mean_depth", 215 | expression="!volume! / !AREA!", 216 | expression_type="PYTHON_9.3", 217 | ) 218 | arcpy.CalculateField_management( 219 | shp_path, 220 | field="ID", 221 | expression="!FID! + 1", 222 | expression_type="PYTHON_9.3", 223 | ) 224 | arcpy.DeleteField_management(shp_path, drop_field="GRIDCODE") 225 | 226 | if os.path.exists(dbf_dir): 227 | shutil.rmtree(dbf_dir) 228 | if os.path.exists(tif_dir): 229 | shutil.rmtree(tif_dir) 230 | return True 231 | 232 | 233 | # main script 234 | if __name__ == "__main__": 235 | 236 | in_dem = arcpy.GetParameterAsText(0) 237 | min_size = float(arcpy.GetParameterAsText(1)) 238 | min_depth = float(arcpy.GetParameterAsText(2)) 239 | buffer_dist = float(arcpy.GetParameterAsText(3)) 240 | out_sink = arcpy.GetParameterAsText(4) 241 | 242 | extract_sink(in_dem, min_size, min_depth, buffer_dist, out_sink) 243 | -------------------------------------------------------------------------------- /lidar/toolbox/scripts/1_Extract_Sink_ArcMap.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | import os 3 | import time 4 | import shutil 5 | from arcpy import env 6 | from arcpy.mapping import * 7 | 8 | 9 | def extract_sink(in_dem, min_size, min_depth, buffer_dist, out_sink): 10 | arcpy.CheckOutExtension("Spatial") 11 | start_time = time.time() 12 | workspace = os.path.split(out_sink)[0] 13 | arcpy.env.workspace = workspace 14 | arcpy.env.overwriteOutput = True 15 | 16 | img_ext = ".tif" 17 | vec_ext = ".shp" 18 | if os.path.splitext(workspace)[1].lower() == ".gdb": 19 | img_ext = "" 20 | vec_ext = "" 21 | if arcpy.Exists(in_dem) == False: 22 | arcpy.AddMessage("The input raster does not exist") 23 | quit() 24 | 25 | ### Mean Focal Statistics 26 | arcpy.AddMessage("DEM filtering ...") 27 | ras_mf = arcpy.sa.FocalStatistics(in_dem, "Rectangle 3 3 CELL", "MEAN", "DATA") 28 | 29 | ### Fill depression 30 | arcpy.AddMessage("Filling sinks ...") 31 | ras_fill = arcpy.sa.Fill(ras_mf) 32 | dem_filled_name = "dem_fully_filled" + img_ext 33 | ras_fill = arcpy.sa.ApplyEnvironment(ras_fill) 34 | ras_fill.save(os.path.join(workspace, dem_filled_name)) 35 | ### Get sink 36 | ras_sink = ras_fill - ras_mf 37 | 38 | ### Convert sink to binary image 39 | arcpy.AddMessage("Creating binary sink image ...") 40 | ras_sink_bin = arcpy.sa.Con(ras_sink > 0, 1) 41 | ras_sink_bin_name = "ras_sink_bin" + img_ext 42 | ras_sink_bin.save(os.path.join(workspace, ras_sink_bin_name)) 43 | ### Region group 44 | arcpy.AddMessage("Grouping regions ...") 45 | ras_region_bk = arcpy.sa.RegionGroup(ras_sink_bin, "FOUR", "WITHIN", "ADD_LINK") 46 | ras_region_bk.save(os.path.join(workspace, "ras_region" + img_ext)) 47 | 48 | ras_region_zonal = arcpy.sa.ZonalStatistics(ras_region_bk, "Value", in_dem, "RANGE") 49 | ras_region_zonal.save("ras_region_zonal" + img_ext) 50 | ras_region = arcpy.sa.Con(ras_region_zonal > min_depth, 1) 51 | ras_region = arcpy.sa.ApplyEnvironment(ras_region) 52 | ras_region.save("ras_region_sub" + img_ext) 53 | 54 | ### Convert raster to polygon 55 | arcpy.AddMessage("Converting raster to polygon ...") 56 | region_poly_name = os.path.join(workspace, "region_poly" + vec_ext) 57 | arcpy.RasterToPolygon_conversion(ras_region, region_poly_name, "NO_SIMPLIFY") 58 | 59 | ### Select polygon based on minimum size 60 | arcpy.AddMessage("Selecting polygons ...") 61 | area_field = "Area" 62 | arcpy.AddField_management(region_poly_name, area_field, "DOUBLE") 63 | arcpy.CalculateField_management( 64 | region_poly_name, "Area", "!shape.area@squaremeters!", "PYTHON_9.3", "#" 65 | ) 66 | sqlExp = area_field + ">=" + str(min_size) 67 | 68 | region_poly_select_name = out_sink 69 | arcpy.Select_analysis(region_poly_name, region_poly_select_name, sqlExp) 70 | arcpy.CalculateField_management(region_poly_select_name, "gridcode", "1", "PYTHON") 71 | region_poly_ras = os.path.join(workspace, "region_poly_ras" + img_ext) 72 | arcpy.PolygonToRaster_conversion( 73 | region_poly_select_name, "gridcode", region_poly_ras, "CELL_CENTER", "NONE", "1" 74 | ) 75 | 76 | ### Convert foreground sink to 0 77 | arcpy.AddMessage("Converting foreground sink ...") 78 | ras_sink_bg = ras_mf - ras_mf 79 | ras_sink_bg.save(os.path.join(workspace, "ras_sink_bg" + img_ext)) 80 | 81 | # ras_sink_final = "ras_sink_final" 82 | arcpy.AddMessage("Calculating cell statistics ...") 83 | in_ras_list = [region_poly_ras, ras_sink_bg] 84 | ras_sink_final_name = arcpy.sa.CellStatistics(in_ras_list, "SUM", "DATA") 85 | arcpy.env.extent = ras_mf.extent 86 | ras_sink_final_name = arcpy.sa.ApplyEnvironment(ras_sink_final_name) 87 | 88 | ### Convert foreground sink 89 | arcpy.AddMessage("Creating partially filled DEM ...") 90 | dem_name = arcpy.sa.Con(ras_sink_final_name == 1, ras_mf, ras_fill) 91 | dem_name = arcpy.sa.ApplyEnvironment(dem_name) 92 | dem_name.save(os.path.join(workspace, "dem_partially_filled" + img_ext)) 93 | 94 | arcpy.AddMessage("Creating sink DEM ...") 95 | dem_sink = arcpy.sa.Con(ras_sink_final_name == 1, ras_mf, ras_fill) 96 | dem_sink = arcpy.sa.ApplyEnvironment(dem_sink) 97 | 98 | arcpy.AddMessage("Calculating sink depth ...") 99 | dem_sink_depth = ras_fill - dem_name 100 | dem_sink_depth_name = arcpy.sa.Con(dem_sink_depth > 0, dem_sink) 101 | dem_sink_depth_name = arcpy.sa.ApplyEnvironment(dem_sink_depth_name) 102 | dem_sink_depth_name.save(os.path.join(workspace, "sink" + img_ext)) 103 | 104 | sink_depth = arcpy.sa.Con(dem_sink_depth > 0, dem_sink_depth) 105 | sink_depth = arcpy.sa.ApplyEnvironment(sink_depth) 106 | sink_depth.save(os.path.join(workspace, "sink_depth" + img_ext)) 107 | 108 | arcpy.AddMessage("Zonal statistics ...") 109 | zonalStatistics(out_sink, in_dem) 110 | 111 | if buffer_dist > 0: 112 | arcpy.AddMessage("Creating buffered sink DEM ...") 113 | sink_buffer_name = os.path.join(workspace, "sink_buffer_poly" + vec_ext) 114 | sqlExp = str(buffer_dist) + " Meters" 115 | arcpy.Buffer_analysis(out_sink, sink_buffer_name, sqlExp, "", "", "", "") 116 | dem_sink_buffer = arcpy.sa.ExtractByMask(dem_name, sink_buffer_name) 117 | sink_buffer_img = os.path.join(workspace, "sink_buffer" + img_ext) 118 | arcpy.CopyRaster_management(dem_sink_buffer, sink_buffer_img) 119 | 120 | # add output data to map 121 | arcpy.AddMessage("Adding data to map ...") 122 | mxd = MapDocument("CURRENT") 123 | df = ListDataFrames(mxd, "*")[0] 124 | lyr_fully_filled_dem = Layer(os.path.join(workspace, dem_filled_name)) 125 | AddLayer(df, lyr_fully_filled_dem) 126 | lyr_partially_filled_dem = Layer( 127 | os.path.join(workspace, "dem_partially_filled" + img_ext) 128 | ) 129 | AddLayer(df, lyr_partially_filled_dem) 130 | lyr_sink_dem = Layer(os.path.join(workspace, "sink" + img_ext)) 131 | AddLayer(df, lyr_sink_dem) 132 | 133 | arcpy.AddMessage("Deleting temporary data ...") 134 | arcpy.Delete_management(region_poly_name) 135 | arcpy.Delete_management(region_poly_ras) 136 | arcpy.Delete_management(ras_sink_bin_name) 137 | arcpy.Delete_management(os.path.join(workspace, "ras_sink_bg" + img_ext)) 138 | arcpy.Delete_management(os.path.join(workspace, "ras_region" + img_ext)) 139 | arcpy.Delete_management(os.path.join(workspace, "ras_region_sub" + img_ext)) 140 | arcpy.Delete_management(os.path.join(workspace, "ras_region_zonal" + img_ext)) 141 | if buffer_dist > 0: 142 | arcpy.Delete_management(sink_buffer_name) 143 | arcpy.AddMessage("Extract sink done!") 144 | 145 | end_time = time.time() 146 | arcpy.AddMessage("Total run time: {:.4f}".format(end_time - start_time)) 147 | 148 | return out_sink 149 | 150 | 151 | def zonalStatistics(in_shp_path, in_dem): 152 | in_shp_dir = os.path.split(in_shp_path)[0] 153 | in_shp_name = os.path.split(in_shp_path)[1] 154 | arcpy.env.workspace = in_shp_dir 155 | arcpy.env.overwriteOutput = True 156 | arcpy.env.snapRaster = in_dem 157 | dbf_dir = os.path.join(in_shp_dir, "dbf") 158 | os.mkdir(dbf_dir) 159 | tif_dir = os.path.join(in_shp_dir, "tif") 160 | os.mkdir(tif_dir) 161 | shapefiles = os.listdir(in_shp_dir) 162 | dem = arcpy.Raster(in_dem) 163 | cell_size = dem.meanCellHeight 164 | for shp in shapefiles: 165 | if shp.endswith(".shp") and shp == in_shp_name: 166 | shp_path = os.path.join(in_shp_dir, shp) 167 | dbf_path = os.path.join(dbf_dir, "zonal_" + shp.replace("shp", "dbf")) 168 | tif_path = os.path.join(tif_dir, shp.replace("shp", "tif")) 169 | arcpy.PolygonToRaster_conversion( 170 | shp_path, 171 | value_field="FID", 172 | out_rasterdataset=tif_path, 173 | cell_assignment="CELL_CENTER", 174 | priority_field="NONE", 175 | cellsize=cell_size, 176 | ) 177 | arcpy.sa.ZonalStatisticsAsTable( 178 | tif_path, "Value", in_dem, dbf_path, "DATA", "ALL" 179 | ) 180 | arcpy.JoinField_management( 181 | shp_path, 182 | in_field="FID", 183 | join_table=dbf_path, 184 | join_field="Value", 185 | fields="COUNT;MIN;MAX;RANGE;MEAN;STD;SUM", 186 | ) 187 | # arcpy.AddField_management(shp_path,field_name="dep2catR",field_type="FLOAT") 188 | arcpy.AddField_management(shp_path, field_name="volume", field_type="FLOAT") 189 | arcpy.AddField_management( 190 | shp_path, field_name="mean_depth", field_type="FLOAT" 191 | ) 192 | # arcpy.CalculateField_management(shp_path,field="dep2catR",expression="!AREA! / !cat_area!", expression_type="PYTHON_9.3") 193 | arcpy.CalculateField_management( 194 | shp_path, 195 | field="volume", 196 | expression="( !COUNT! * !MAX! - !SUM!) * ( !AREA! / !COUNT! )", 197 | expression_type="PYTHON_9.3", 198 | ) 199 | arcpy.CalculateField_management( 200 | shp_path, 201 | field="mean_depth", 202 | expression="!volume! / !AREA!", 203 | expression_type="PYTHON_9.3", 204 | ) 205 | arcpy.CalculateField_management( 206 | shp_path, 207 | field="ID", 208 | expression="!FID! + 1", 209 | expression_type="PYTHON_9.3", 210 | ) 211 | arcpy.DeleteField_management(shp_path, drop_field="GRIDCODE") 212 | 213 | if os.path.exists(dbf_dir): 214 | shutil.rmtree(dbf_dir) 215 | if os.path.exists(tif_dir): 216 | shutil.rmtree(tif_dir) 217 | return True 218 | 219 | 220 | # main script 221 | if __name__ == "__main__": 222 | 223 | in_dem = arcpy.GetParameterAsText(0) 224 | min_size = float(arcpy.GetParameterAsText(1)) 225 | min_depth = float(arcpy.GetParameterAsText(2)) 226 | buffer_dist = float(arcpy.GetParameterAsText(3)) 227 | out_sink = arcpy.GetParameterAsText(4) 228 | 229 | extract_sink(in_dem, min_size, min_depth, buffer_dist, out_sink) 230 | -------------------------------------------------------------------------------- /lidar/toolbox/scripts/2_Delineate_Catchment.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | import os 3 | import time 4 | from arcpy.mp import * 5 | 6 | # from arcpy.mapping import * 7 | 8 | 9 | def delineate_catchment(in_dem, in_sink, out_catchment): 10 | arcpy.CheckOutExtension("Spatial") 11 | workspace = os.path.split(out_catchment)[0] 12 | arcpy.env.workspace = workspace 13 | arcpy.env.overwriteOutput = True 14 | 15 | if arcpy.Exists(in_dem) == False: 16 | arcpy.AddMessage("The input raster does not exist") 17 | quit() 18 | 19 | if os.path.splitext(out_catchment)[1].lower() == ".shp": 20 | # FieldOID = "FID" 21 | FieldOID = "ID" 22 | FlowDirection = os.path.join(workspace, "FlowDirection.tif") 23 | SinkRaster = os.path.join(workspace, "SinkRaster.tif") 24 | Watershed = os.path.join(workspace, "Watershed.tif") 25 | Catchment_tmp = os.path.join(workspace, "Catchment_tmp.shp") 26 | Catchment_select = os.path.join(workspace, "Catchment_select.shp") 27 | # Catchment_dissolve = os.path.join(workspace, "Catchment.shp") 28 | else: 29 | FieldOID = "OBJECTID" 30 | FlowDirection = os.path.join(workspace, "FlowDirection") 31 | SinkRaster = os.path.join(workspace, "SinkRaster") 32 | Watershed = os.path.join(workspace, "Watershed") 33 | Catchment_tmp = os.path.join(workspace, "Catchment") 34 | 35 | input_dem = arcpy.Raster(in_dem) 36 | flow_direction = arcpy.sa.FlowDirection(input_dem) 37 | flow_direction.save(FlowDirection) 38 | 39 | cell_size = input_dem.meanCellWidth 40 | arcpy.env.extent = input_dem.extent 41 | arcpy.PolygonToRaster_conversion( 42 | in_sink, FieldOID, SinkRaster, "CELL_CENTER", "NONE", cell_size 43 | ) 44 | 45 | watershed = arcpy.sa.Watershed(flow_direction, SinkRaster, "Value") 46 | watershed.save(Watershed) 47 | 48 | arcpy.RasterToPolygon_conversion(watershed, Catchment_tmp, "NO_SIMPLIFY", "Value") 49 | field = "GRIDCODE" 50 | sqlExp = field + ">" + str(0) 51 | arcpy.Select_analysis(Catchment_tmp, Catchment_select, sqlExp) 52 | arcpy.Dissolve_management( 53 | Catchment_select, 54 | out_catchment, 55 | dissolve_field="GRIDCODE", 56 | statistics_fields="", 57 | multi_part="MULTI_PART", 58 | unsplit_lines="DISSOLVE_LINES", 59 | ) 60 | 61 | area_field = "cat_area" 62 | arcpy.AddField_management(out_catchment, area_field, "DOUBLE") 63 | arcpy.CalculateField_management( 64 | out_catchment, area_field, "!shape.area@squaremeters!", "PYTHON_9.3", "#" 65 | ) 66 | 67 | arcpy.JoinField_management( 68 | in_sink, in_field="ID", join_table=Watershed, join_field="Value", fields="Count" 69 | ) 70 | arcpy.AddField_management(in_sink, field_name="cat_area", field_type="FLOAT") 71 | arcpy.CalculateField_management( 72 | in_sink, 73 | field="cat_area", 74 | expression="!Count_1! * math.pow(" + str(cell_size) + ",2)", 75 | expression_type="PYTHON_9.3", 76 | ) 77 | arcpy.DeleteField_management(in_sink, drop_field="Count_1") 78 | arcpy.AddField_management(in_sink, field_name="dep2catR", field_type="FLOAT") 79 | arcpy.CalculateField_management( 80 | in_sink, 81 | field="dep2catR", 82 | expression="!AREA! / !cat_area!", 83 | expression_type="PYTHON_9.3", 84 | ) 85 | arcpy.Delete_management(Catchment_tmp) 86 | arcpy.Delete_management(Catchment_select) 87 | 88 | # #add output data to map 89 | # mxd = MapDocument("CURRENT") 90 | # df = ListDataFrames(mxd, "*")[0] 91 | # lyr_watershed = Layer(Watershed) 92 | # AddLayer(df, lyr_watershed) 93 | 94 | return out_catchment 95 | 96 | 97 | # main script 98 | if __name__ == "__main__": 99 | 100 | in_dem = arcpy.GetParameterAsText(0) 101 | in_sink = arcpy.GetParameterAsText(1) 102 | out_catchment = arcpy.GetParameterAsText(2) 103 | 104 | start_time = time.time() 105 | delineate_catchment(in_dem, in_sink, out_catchment) 106 | end_time = time.time() 107 | arcpy.AddMessage("Total run time: {:.4f}".format(end_time - start_time)) 108 | -------------------------------------------------------------------------------- /lidar/toolbox/scripts/2_Delineate_Catchment_ArcMap.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | import os 3 | import time 4 | from arcpy.mapping import * 5 | 6 | 7 | def delineate_catchment(in_dem, in_sink, out_catchment): 8 | arcpy.CheckOutExtension("Spatial") 9 | workspace = os.path.split(out_catchment)[0] 10 | arcpy.env.workspace = workspace 11 | arcpy.env.overwriteOutput = True 12 | 13 | if arcpy.Exists(in_dem) == False: 14 | arcpy.AddMessage("The input raster does not exist") 15 | quit() 16 | 17 | if os.path.splitext(out_catchment)[1].lower() == ".shp": 18 | # FieldOID = "FID" 19 | FieldOID = "ID" 20 | FlowDirection = os.path.join(workspace, "FlowDirection.tif") 21 | SinkRaster = os.path.join(workspace, "SinkRaster.tif") 22 | Watershed = os.path.join(workspace, "Watershed.tif") 23 | Catchment_tmp = os.path.join(workspace, "Catchment_tmp.shp") 24 | Catchment_select = os.path.join(workspace, "Catchment_select.shp") 25 | # Catchment_dissolve = os.path.join(workspace, "Catchment.shp") 26 | else: 27 | FieldOID = "OBJECTID" 28 | FlowDirection = os.path.join(workspace, "FlowDirection") 29 | SinkRaster = os.path.join(workspace, "SinkRaster") 30 | Watershed = os.path.join(workspace, "Watershed") 31 | Catchment_tmp = os.path.join(workspace, "Catchment") 32 | 33 | input_dem = arcpy.Raster(in_dem) 34 | flow_direction = arcpy.sa.FlowDirection(input_dem) 35 | flow_direction.save(FlowDirection) 36 | 37 | cell_size = input_dem.meanCellWidth 38 | arcpy.env.extent = input_dem.extent 39 | arcpy.PolygonToRaster_conversion( 40 | in_sink, FieldOID, SinkRaster, "CELL_CENTER", "NONE", cell_size 41 | ) 42 | 43 | watershed = arcpy.sa.Watershed(flow_direction, SinkRaster, "Value") 44 | watershed.save(Watershed) 45 | 46 | arcpy.RasterToPolygon_conversion(watershed, Catchment_tmp, "NO_SIMPLIFY", "Value") 47 | field = "GRIDCODE" 48 | sqlExp = field + ">" + str(0) 49 | arcpy.Select_analysis(Catchment_tmp, Catchment_select, sqlExp) 50 | arcpy.Dissolve_management( 51 | Catchment_select, 52 | out_catchment, 53 | dissolve_field="GRIDCODE", 54 | statistics_fields="", 55 | multi_part="MULTI_PART", 56 | unsplit_lines="DISSOLVE_LINES", 57 | ) 58 | 59 | area_field = "cat_area" 60 | arcpy.AddField_management(out_catchment, area_field, "DOUBLE") 61 | arcpy.CalculateField_management( 62 | out_catchment, area_field, "!shape.area@squaremeters!", "PYTHON_9.3", "#" 63 | ) 64 | 65 | arcpy.JoinField_management( 66 | in_sink, in_field="ID", join_table=Watershed, join_field="Value", fields="Count" 67 | ) 68 | arcpy.AddField_management(in_sink, field_name="cat_area", field_type="FLOAT") 69 | arcpy.CalculateField_management( 70 | in_sink, 71 | field="cat_area", 72 | expression="!Count_1! * math.pow(" + str(cell_size) + ",2)", 73 | expression_type="PYTHON_9.3", 74 | ) 75 | arcpy.DeleteField_management(in_sink, drop_field="Count_1") 76 | arcpy.AddField_management(in_sink, field_name="dep2catR", field_type="FLOAT") 77 | arcpy.CalculateField_management( 78 | in_sink, 79 | field="dep2catR", 80 | expression="!AREA! / !cat_area!", 81 | expression_type="PYTHON_9.3", 82 | ) 83 | arcpy.Delete_management(Catchment_tmp) 84 | arcpy.Delete_management(Catchment_select) 85 | 86 | # #add output data to map 87 | # mxd = MapDocument("CURRENT") 88 | # df = ListDataFrames(mxd, "*")[0] 89 | # lyr_watershed = Layer(Watershed) 90 | # AddLayer(df, lyr_watershed) 91 | 92 | return out_catchment 93 | 94 | 95 | # main script 96 | if __name__ == "__main__": 97 | 98 | in_dem = arcpy.GetParameterAsText(0) 99 | in_sink = arcpy.GetParameterAsText(1) 100 | out_catchment = arcpy.GetParameterAsText(2) 101 | 102 | start_time = time.time() 103 | delineate_catchment(in_dem, in_sink, out_catchment) 104 | end_time = time.time() 105 | arcpy.AddMessage("Total run time: {:.4f}".format(end_time - start_time)) 106 | -------------------------------------------------------------------------------- /lidar/toolbox/scripts/3_Flow_Path.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | import os 3 | import sys 4 | import time 5 | import string 6 | from arcpy import env 7 | import collections 8 | 9 | 10 | def FlowPath(in_dem, in_sink, rain_intensity, out_flowpath): 11 | arcpy.CheckOutExtension("Spatial") 12 | workspace = os.path.split(out_flowpath)[0] 13 | arcpy.env.workspace = workspace 14 | arcpy.env.overwriteOutput = True 15 | dem = arcpy.Raster(in_dem) 16 | cell_size = dem.meanCellWidth 17 | 18 | if arcpy.Exists(in_dem) == False: 19 | arcpy.AddMessage("The input raster does not exist") 20 | quit() 21 | 22 | if os.path.splitext(out_flowpath)[1].lower() == ".shp": 23 | FieldOID = "FID" 24 | FlowDir = os.path.join(workspace, "FlowDir.tif") 25 | SinkCentroid = os.path.join(workspace, "SinkCentroid.shp") 26 | CostPath = os.path.join(workspace, "CostPath.tif") 27 | PathThin = os.path.join(workspace, "PathThin.tif") 28 | PathLine = os.path.join(workspace, "PathLine.shp") 29 | PathLineErase = os.path.join(workspace, "PathLineErase.shp") 30 | Path = os.path.join(workspace, "FlowPath_Raw.shp") 31 | # Path = out_flowpath 32 | LineFlip = os.path.join(workspace, "LineFlip.shp") 33 | LineNoFlip = os.path.join(workspace, "LineNoFlip.shp") 34 | FlowFrom = os.path.join(workspace, "FlowFrom.shp") 35 | FlowTo = os.path.join(workspace, "FlowTo.shp") 36 | PathLineEraseSingle = os.path.join(workspace, "PathLineEraseSingle.shp") 37 | LineStart = os.path.join(workspace, "LineStart.shp") 38 | LineEnd = os.path.join(workspace, "LineEnd.shp") 39 | LineStartElev = os.path.join(workspace, "LineStartElev.shp") 40 | LineEndElev = os.path.join(workspace, "LineEndElev.shp") 41 | PathBuffer = os.path.join(workspace, "PathBuffer.shp") 42 | PathBufferSingle = os.path.join(workspace, "PathBufferSingle.shp") 43 | FlowFromJoin = os.path.join(workspace, "FlowFromJoin.shp") 44 | FlowToJoin = os.path.join(workspace, "FlowToJoin.shp") 45 | FlowFromJoinBuffer = os.path.join(workspace, "FlowFromJoinBuffer.shp") 46 | FlowToJoinBuffer = os.path.join(workspace, "FlowToJoinBuffer.shp") 47 | 48 | else: 49 | FieldOID = "OBJECTID" 50 | FlowDir = os.path.join(workspace, "FlowDir") 51 | SinkCentroid = os.path.join(workspace, "SinkCentroid") 52 | CostPath = os.path.join(workspace, "CostPath") 53 | PathThin = os.path.join(workspace, "PathThin") 54 | PathLine = os.path.join(workspace, "PathLine") 55 | PathLineErase = os.path.join(workspace, "PathLineErase") 56 | Path = os.path.join(workspace, "FlowPath") 57 | LineFlip = os.path.join(workspace, "LineFlip") 58 | LineNoFlip = os.path.join(workspace, "LineNoFlip") 59 | FlowFrom = os.path.join(workspace, "FlowFrom") 60 | FlowTo = os.path.join(workspace, "FlowTo") 61 | LineStart = os.path.join(workspace, "LineStart.shp") 62 | LineEnd = os.path.join(workspace, "LineEnd.shp") 63 | ### Delineate flow direction 64 | flow_dir = arcpy.sa.FlowDirection(in_dem) 65 | flow_dir.save(FlowDir) 66 | 67 | ### Extract the depression polygon centroids 68 | arcpy.FeatureToPoint_management(in_sink, SinkCentroid, "INSIDE") 69 | 70 | ### Delineate cost path 71 | cost_path = arcpy.sa.CostPath(SinkCentroid, in_dem, FlowDir, "EACH_CELL", FieldOID) 72 | cost_path.save(CostPath) 73 | 74 | ### Thin the raster cost path to single-cell width 75 | path_thin = arcpy.sa.Thin(cost_path, "#", "#", "#", 1) 76 | path_thin.save(PathThin) 77 | 78 | ### Convert the raster path to vector 79 | arcpy.RasterToPolyline_conversion(path_thin, PathLine, simplify="NO_SIMPLIFY") 80 | 81 | ### Erase the flow path within depression polygons 82 | arcpy.Erase_analysis(PathLine, in_sink, PathLineErase) 83 | arcpy.MultipartToSinglepart_management(PathLineErase, PathLineEraseSingle) 84 | arcpy.FeatureVerticesToPoints_management(PathLineEraseSingle, LineStart, "START") 85 | arcpy.FeatureVerticesToPoints_management(PathLineEraseSingle, LineEnd, "END") 86 | arcpy.sa.ExtractValuesToPoints(LineStart, in_dem, LineStartElev) 87 | arcpy.sa.ExtractValuesToPoints(LineEnd, in_dem, LineEndElev) 88 | arcpy.AddField_management(LineStartElev, field_name="FromElev", field_type="FLOAT") 89 | arcpy.AddField_management(LineEndElev, field_name="ToElev", field_type="FLOAT") 90 | arcpy.CalculateField_management( 91 | in_table=LineStartElev, 92 | field="FromElev", 93 | expression="!RASTERVALU!", 94 | expression_type="PYTHON", 95 | code_block="", 96 | ) 97 | arcpy.CalculateField_management( 98 | in_table=LineEndElev, 99 | field="ToElev", 100 | expression="!RASTERVALU!", 101 | expression_type="PYTHON", 102 | code_block="", 103 | ) 104 | arcpy.JoinField_management( 105 | in_data=PathLineEraseSingle, 106 | in_field="FID", 107 | join_table=LineStartElev, 108 | join_field="FID", 109 | fields="FromElev", 110 | ) 111 | arcpy.JoinField_management( 112 | in_data=PathLineEraseSingle, 113 | in_field="FID", 114 | join_table=LineEndElev, 115 | join_field="FID", 116 | fields="ToElev", 117 | ) 118 | arcpy.CopyFeatures_management(PathLineEraseSingle, Path) 119 | # ExtractElevation(PathLineErase, in_dem, Path) 120 | 121 | arcpy.AddField_management(Path, "Flip", "SHORT") 122 | 123 | FromElev = arcpy.AddFieldDelimiters(workspace, "FromElev") 124 | ToElev = arcpy.AddFieldDelimiters(workspace, "ToElev") 125 | sql = FromElev + "<" + ToElev 126 | sql2 = FromElev + ">=" + ToElev 127 | 128 | arcpy.Select_analysis(Path, LineFlip, sql) 129 | arcpy.CalculateField_management(LineFlip, "Flip", "1", "PYTHON") 130 | arcpy.FlipLine_edit(LineFlip) 131 | arcpy.Select_analysis(Path, LineNoFlip, sql2) 132 | 133 | arcpy.Delete_management(Path) 134 | MergeList = [] 135 | MergeList.append(LineFlip) 136 | MergeList.append(LineNoFlip) 137 | arcpy.Merge_management(MergeList, Path) 138 | arcpy.AddField_management(Path, field_name="StartElev", field_type="FLOAT") 139 | arcpy.AddField_management(Path, field_name="EndElev", field_type="FLOAT") 140 | arcpy.AddField_management(Path, field_name="DiffElev", field_type="FLOAT") 141 | arcpy.AddField_management(Path, field_name="Length", field_type="FLOAT") 142 | arcpy.CalculateField_management( 143 | in_table=Path, 144 | field="StartElev", 145 | expression="max( !FromElev! , !ToElev! )", 146 | expression_type="PYTHON", 147 | code_block="", 148 | ) 149 | arcpy.CalculateField_management( 150 | in_table=Path, 151 | field="EndElev", 152 | expression="min( !FromElev! , !ToElev! )", 153 | expression_type="PYTHON", 154 | code_block="", 155 | ) 156 | arcpy.CalculateField_management( 157 | in_table=Path, 158 | field="DiffElev", 159 | expression="!StartElev! - !EndElev!", 160 | expression_type="PYTHON", 161 | code_block="", 162 | ) 163 | arcpy.CalculateField_management( 164 | Path, "Length", "!shape.length@meters!", "PYTHON_9.3", "#" 165 | ) 166 | arcpy.DeleteField_management( 167 | in_table=Path, 168 | drop_field="ARCID;GRID_CODE;FROM_NODE;TO_NODE;ORIG_FID;FromElev;ToElev;Flip", 169 | ) 170 | sql3 = "Length >" + str( 171 | 2 * cell_size 172 | ) # if flow path is shorter than 2 pixels, delete 173 | arcpy.Select_analysis(Path, out_flowpath, sql3) 174 | 175 | arcpy.FeatureVerticesToPoints_management(out_flowpath, FlowFrom, "START") 176 | arcpy.FeatureVerticesToPoints_management(out_flowpath, FlowTo, "END") 177 | arcpy.AddField_management(FlowFrom, field_name="FlowFromID", field_type="Long") 178 | arcpy.AddField_management(FlowTo, field_name="FlowToID", field_type="Long") 179 | arcpy.CalculateField_management( 180 | in_table=FlowFrom, 181 | field="FlowFromID", 182 | expression="!FID! + 1", 183 | expression_type="PYTHON", 184 | code_block="", 185 | ) 186 | arcpy.CalculateField_management( 187 | in_table=FlowTo, 188 | field="FlowToID", 189 | expression="!FID! + 1", 190 | expression_type="PYTHON", 191 | code_block="", 192 | ) 193 | # derive sink connectivity 194 | 195 | arcpy.Buffer_analysis( 196 | in_features=Path, 197 | out_feature_class=PathBuffer, 198 | buffer_distance_or_field="0.1 Meters", 199 | line_side="FULL", 200 | line_end_type="FLAT", 201 | dissolve_option="ALL", 202 | dissolve_field="", 203 | method="PLANAR", 204 | ) 205 | arcpy.MultipartToSinglepart_management( 206 | in_features=PathBuffer, out_feature_class=PathBufferSingle 207 | ) 208 | arcpy.AddField_management( 209 | PathBufferSingle, field_name="BufferID", field_type="Long" 210 | ) 211 | arcpy.CalculateField_management( 212 | in_table=PathBufferSingle, 213 | field="BufferID", 214 | expression="!FID! + 1", 215 | expression_type="PYTHON", 216 | code_block="", 217 | ) 218 | 219 | search_radius = str(2.1 * cell_size) + " Meters" 220 | arcpy.SpatialJoin_analysis( 221 | target_features=FlowFrom, 222 | join_features=in_sink, 223 | out_feature_class=FlowFromJoin, 224 | join_operation="JOIN_ONE_TO_ONE", 225 | join_type="KEEP_COMMON", 226 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 227 | match_option="INTERSECT", 228 | search_radius=search_radius, 229 | distance_field_name="", 230 | ) 231 | arcpy.SpatialJoin_analysis( 232 | target_features=FlowTo, 233 | join_features=in_sink, 234 | out_feature_class=FlowToJoin, 235 | join_operation="JOIN_ONE_TO_ONE", 236 | join_type="KEEP_COMMON", 237 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 238 | match_option="INTERSECT", 239 | search_radius=search_radius, 240 | distance_field_name="", 241 | ) 242 | arcpy.SpatialJoin_analysis( 243 | target_features=FlowFromJoin, 244 | join_features=PathBufferSingle, 245 | out_feature_class=FlowFromJoinBuffer, 246 | join_operation="JOIN_ONE_TO_ONE", 247 | join_type="KEEP_COMMON", 248 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 249 | match_option="INTERSECT", 250 | search_radius=search_radius, 251 | distance_field_name="", 252 | ) 253 | arcpy.SpatialJoin_analysis( 254 | target_features=FlowToJoin, 255 | join_features=PathBufferSingle, 256 | out_feature_class=FlowToJoinBuffer, 257 | join_operation="JOIN_ONE_TO_ONE", 258 | join_type="KEEP_COMMON", 259 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 260 | match_option="INTERSECT", 261 | search_radius=search_radius, 262 | distance_field_name="", 263 | ) 264 | arcpy.JoinField_management( 265 | in_data=FlowFromJoinBuffer, 266 | in_field="BufferID", 267 | join_table=FlowToJoinBuffer, 268 | join_field="BufferID", 269 | fields="ID", 270 | ) 271 | arcpy.JoinField_management( 272 | in_data=in_sink, 273 | in_field="ID", 274 | join_table=FlowFromJoinBuffer, 275 | join_field="ID", 276 | fields="ID_12", 277 | ) 278 | arcpy.AddField_management(in_sink, field_name="Downstream", field_type="LONG") 279 | arcpy.CalculateField_management( 280 | in_table=in_sink, 281 | field="Downstream", 282 | expression="!ID_12!", 283 | expression_type="PYTHON", 284 | code_block="", 285 | ) 286 | arcpy.DeleteField_management(in_table=in_sink, drop_field="ID_12") 287 | 288 | arcpy.AddField_management(in_sink, field_name="simu_depth", field_type="FLOAT") 289 | arcpy.AddField_management(in_sink, field_name="rain_inten", field_type="FLOAT") 290 | arcpy.AddField_management(in_sink, field_name="time_inund", field_type="FLOAT") 291 | arcpy.CalculateField_management( 292 | in_table=in_sink, 293 | field="simu_depth", 294 | expression="!volume! / !cat_area!", 295 | expression_type="PYTHON", 296 | code_block="", 297 | ) 298 | arcpy.CalculateField_management( 299 | in_table=in_sink, 300 | field="rain_inten", 301 | expression=rain_intensity, 302 | expression_type="PYTHON", 303 | code_block="", 304 | ) 305 | arcpy.CalculateField_management( 306 | in_table=in_sink, 307 | field="time_inund", 308 | expression="!simu_depth! / !rain_inten!", 309 | expression_type="PYTHON", 310 | code_block="", 311 | ) 312 | 313 | arcpy.JoinField_management( 314 | in_data=out_flowpath, 315 | in_field="FID", 316 | join_table=FlowFromJoin, 317 | join_field="ORIG_FID", 318 | fields="ID", 319 | ) 320 | arcpy.AddField_management( 321 | in_table=out_flowpath, field_name="start_sink", field_type="LONG" 322 | ) 323 | arcpy.CalculateField_management( 324 | in_table=out_flowpath, 325 | field="start_sink", 326 | expression="!ID!", 327 | expression_type="PYTHON", 328 | code_block="", 329 | ) 330 | arcpy.DeleteField_management(in_table=out_flowpath, drop_field="ID") 331 | arcpy.JoinField_management( 332 | in_data=out_flowpath, 333 | in_field="FID", 334 | join_table=FlowToJoin, 335 | join_field="ORIG_FID", 336 | fields="ID", 337 | ) 338 | arcpy.AddField_management( 339 | in_table=out_flowpath, field_name="end_sink", field_type="LONG" 340 | ) 341 | arcpy.CalculateField_management( 342 | in_table=out_flowpath, 343 | field="end_sink", 344 | expression="!ID!", 345 | expression_type="PYTHON", 346 | code_block="", 347 | ) 348 | arcpy.DeleteField_management(in_table=out_flowpath, drop_field="ID") 349 | arcpy.JoinField_management( 350 | in_data=out_flowpath, 351 | in_field="start_sink", 352 | join_table=in_sink, 353 | join_field="ID", 354 | fields="volume;cat_area;simu_depth;rain_inten;time_inund", 355 | ) 356 | 357 | arcpy.Delete_management(LineFlip) 358 | arcpy.Delete_management(LineNoFlip) 359 | arcpy.Delete_management(CostPath) 360 | arcpy.Delete_management(FlowDir) 361 | arcpy.Delete_management(PathLineErase) 362 | arcpy.Delete_management(PathThin) 363 | arcpy.Delete_management(LineStart) 364 | arcpy.Delete_management(LineStartElev) 365 | arcpy.Delete_management(LineEnd) 366 | arcpy.Delete_management(LineEndElev) 367 | arcpy.Delete_management(PathLineEraseSingle) 368 | arcpy.Delete_management(SinkCentroid) 369 | arcpy.Delete_management(PathLine) 370 | arcpy.Delete_management(PathBuffer) 371 | arcpy.Delete_management(PathBufferSingle) 372 | arcpy.Delete_management(FlowFromJoin) 373 | arcpy.Delete_management(FlowToJoin) 374 | arcpy.Delete_management(FlowFromJoinBuffer) 375 | arcpy.Delete_management(FlowToJoinBuffer) 376 | 377 | arcpy.AddMessage("Flow path delineation done!") 378 | return out_flowpath 379 | 380 | 381 | def delete_row(in_shp, field): 382 | fields = [] 383 | fields.append(field) 384 | with arcpy.da.UpdateCursor(in_shp, fields) as cursor: 385 | for row in cursor: 386 | if row[0] == 0: 387 | cursor.deleteRow() 388 | 389 | 390 | def add_rank(in_shp, sort_field, rank_field): 391 | try: 392 | arcpy.AddField_management( 393 | in_table=in_shp, field_name=rank_field, field_type="LONG" 394 | ) 395 | inDict = dict() 396 | with arcpy.da.SearchCursor(in_shp, sort_field) as cursor: 397 | for row in cursor: 398 | # arcpy.AddMessage(row[0]) 399 | if row[0] not in inDict.keys(): 400 | inDict[row[0]] = 0 401 | od = collections.OrderedDict(sorted(inDict.items())) 402 | i = 1 403 | for key, value in od.items(): 404 | od[key] = i 405 | i += 1 406 | fields = [] 407 | fields.append(sort_field) 408 | fields.append(rank_field) 409 | with arcpy.da.UpdateCursor(in_shp, fields) as cursor: 410 | for row in cursor: 411 | row[1] = od[row[0]] 412 | cursor.updateRow(row) 413 | except: 414 | arcpy.GetMessages() 415 | 416 | 417 | if __name__ == "__main__": 418 | 419 | in_dem = arcpy.GetParameterAsText(0) 420 | in_sink = arcpy.GetParameterAsText(1) 421 | rain_intensity = float(arcpy.GetParameterAsText(2)) / 100 # convert cm/h to m/h 422 | out_flowpath = arcpy.GetParameterAsText(3) 423 | 424 | start_time = time.time() 425 | FlowPath(in_dem, in_sink, rain_intensity, out_flowpath) 426 | delete_row(out_flowpath, "volume") 427 | add_rank(out_flowpath, sort_field="time_inund", rank_field="rank") 428 | add_rank(in_sink, sort_field="time_inund", rank_field="rank") 429 | end_time = time.time() 430 | arcpy.AddMessage("Total run time: {:.4f}".format(end_time - start_time)) 431 | -------------------------------------------------------------------------------- /lidar/toolbox/scripts/3_Flow_Path_ArcMap.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | import os 3 | import sys 4 | import time 5 | import string 6 | from arcpy import env 7 | import collections 8 | 9 | 10 | def FlowPath(in_dem, in_sink, rain_intensity, out_flowpath): 11 | arcpy.CheckOutExtension("Spatial") 12 | workspace = os.path.split(out_flowpath)[0] 13 | arcpy.env.workspace = workspace 14 | arcpy.env.overwriteOutput = True 15 | dem = arcpy.Raster(in_dem) 16 | cell_size = dem.meanCellWidth 17 | 18 | if arcpy.Exists(in_dem) == False: 19 | arcpy.AddMessage("The input raster does not exist") 20 | quit() 21 | 22 | if os.path.splitext(out_flowpath)[1].lower() == ".shp": 23 | FieldOID = "FID" 24 | FlowDir = os.path.join(workspace, "FlowDir.tif") 25 | SinkCentroid = os.path.join(workspace, "SinkCentroid.shp") 26 | CostPath = os.path.join(workspace, "CostPath.tif") 27 | PathThin = os.path.join(workspace, "PathThin.tif") 28 | PathLine = os.path.join(workspace, "PathLine.shp") 29 | PathLineErase = os.path.join(workspace, "PathLineErase.shp") 30 | Path = os.path.join(workspace, "FlowPath_Raw.shp") 31 | # Path = out_flowpath 32 | LineFlip = os.path.join(workspace, "LineFlip.shp") 33 | LineNoFlip = os.path.join(workspace, "LineNoFlip.shp") 34 | FlowFrom = os.path.join(workspace, "FlowFrom.shp") 35 | FlowTo = os.path.join(workspace, "FlowTo.shp") 36 | PathLineEraseSingle = os.path.join(workspace, "PathLineEraseSingle.shp") 37 | LineStart = os.path.join(workspace, "LineStart.shp") 38 | LineEnd = os.path.join(workspace, "LineEnd.shp") 39 | LineStartElev = os.path.join(workspace, "LineStartElev.shp") 40 | LineEndElev = os.path.join(workspace, "LineEndElev.shp") 41 | PathBuffer = os.path.join(workspace, "PathBuffer.shp") 42 | PathBufferSingle = os.path.join(workspace, "PathBufferSingle.shp") 43 | FlowFromJoin = os.path.join(workspace, "FlowFromJoin.shp") 44 | FlowToJoin = os.path.join(workspace, "FlowToJoin.shp") 45 | FlowFromJoinBuffer = os.path.join(workspace, "FlowFromJoinBuffer.shp") 46 | FlowToJoinBuffer = os.path.join(workspace, "FlowToJoinBuffer.shp") 47 | 48 | else: 49 | FieldOID = "OBJECTID" 50 | FlowDir = os.path.join(workspace, "FlowDir") 51 | SinkCentroid = os.path.join(workspace, "SinkCentroid") 52 | CostPath = os.path.join(workspace, "CostPath") 53 | PathThin = os.path.join(workspace, "PathThin") 54 | PathLine = os.path.join(workspace, "PathLine") 55 | PathLineErase = os.path.join(workspace, "PathLineErase") 56 | Path = os.path.join(workspace, "FlowPath") 57 | LineFlip = os.path.join(workspace, "LineFlip") 58 | LineNoFlip = os.path.join(workspace, "LineNoFlip") 59 | FlowFrom = os.path.join(workspace, "FlowFrom") 60 | FlowTo = os.path.join(workspace, "FlowTo") 61 | LineStart = os.path.join(workspace, "LineStart.shp") 62 | LineEnd = os.path.join(workspace, "LineEnd.shp") 63 | ### Delineate flow direction 64 | flow_dir = arcpy.sa.FlowDirection(in_dem) 65 | flow_dir.save(FlowDir) 66 | 67 | ### Extract the depression polygon centroids 68 | arcpy.FeatureToPoint_management(in_sink, SinkCentroid, "INSIDE") 69 | 70 | ### Delineate cost path 71 | cost_path = arcpy.sa.CostPath(SinkCentroid, in_dem, FlowDir, "EACH_CELL", FieldOID) 72 | cost_path.save(CostPath) 73 | 74 | ### Thin the raster cost path to single-cell width 75 | path_thin = arcpy.sa.Thin(cost_path, "#", "#", "#", 1) 76 | path_thin.save(PathThin) 77 | 78 | ### Convert the raster path to vector 79 | arcpy.RasterToPolyline_conversion(path_thin, PathLine, simplify="NO_SIMPLIFY") 80 | 81 | ### Erase the flow path within depression polygons 82 | arcpy.Erase_analysis(PathLine, in_sink, PathLineErase) 83 | arcpy.MultipartToSinglepart_management(PathLineErase, PathLineEraseSingle) 84 | arcpy.FeatureVerticesToPoints_management(PathLineEraseSingle, LineStart, "START") 85 | arcpy.FeatureVerticesToPoints_management(PathLineEraseSingle, LineEnd, "END") 86 | arcpy.sa.ExtractValuesToPoints(LineStart, in_dem, LineStartElev) 87 | arcpy.sa.ExtractValuesToPoints(LineEnd, in_dem, LineEndElev) 88 | arcpy.AddField_management(LineStartElev, field_name="FromElev", field_type="FLOAT") 89 | arcpy.AddField_management(LineEndElev, field_name="ToElev", field_type="FLOAT") 90 | arcpy.CalculateField_management( 91 | in_table=LineStartElev, 92 | field="FromElev", 93 | expression="!RASTERVALU!", 94 | expression_type="PYTHON", 95 | code_block="", 96 | ) 97 | arcpy.CalculateField_management( 98 | in_table=LineEndElev, 99 | field="ToElev", 100 | expression="!RASTERVALU!", 101 | expression_type="PYTHON", 102 | code_block="", 103 | ) 104 | arcpy.JoinField_management( 105 | in_data=PathLineEraseSingle, 106 | in_field="FID", 107 | join_table=LineStartElev, 108 | join_field="FID", 109 | fields="FromElev", 110 | ) 111 | arcpy.JoinField_management( 112 | in_data=PathLineEraseSingle, 113 | in_field="FID", 114 | join_table=LineEndElev, 115 | join_field="FID", 116 | fields="ToElev", 117 | ) 118 | arcpy.CopyFeatures_management(PathLineEraseSingle, Path) 119 | # ExtractElevation(PathLineErase, in_dem, Path) 120 | 121 | arcpy.AddField_management(Path, "Flip", "SHORT") 122 | 123 | FromElev = arcpy.AddFieldDelimiters(workspace, "FromElev") 124 | ToElev = arcpy.AddFieldDelimiters(workspace, "ToElev") 125 | sql = FromElev + "<" + ToElev 126 | sql2 = FromElev + ">=" + ToElev 127 | 128 | arcpy.Select_analysis(Path, LineFlip, sql) 129 | arcpy.CalculateField_management(LineFlip, "Flip", "1", "PYTHON") 130 | arcpy.FlipLine_edit(LineFlip) 131 | arcpy.Select_analysis(Path, LineNoFlip, sql2) 132 | 133 | arcpy.Delete_management(Path) 134 | MergeList = [] 135 | MergeList.append(LineFlip) 136 | MergeList.append(LineNoFlip) 137 | arcpy.Merge_management(MergeList, Path) 138 | arcpy.AddField_management(Path, field_name="StartElev", field_type="FLOAT") 139 | arcpy.AddField_management(Path, field_name="EndElev", field_type="FLOAT") 140 | arcpy.AddField_management(Path, field_name="DiffElev", field_type="FLOAT") 141 | arcpy.AddField_management(Path, field_name="Length", field_type="FLOAT") 142 | arcpy.CalculateField_management( 143 | in_table=Path, 144 | field="StartElev", 145 | expression="max( !FromElev! , !ToElev! )", 146 | expression_type="PYTHON", 147 | code_block="", 148 | ) 149 | arcpy.CalculateField_management( 150 | in_table=Path, 151 | field="EndElev", 152 | expression="min( !FromElev! , !ToElev! )", 153 | expression_type="PYTHON", 154 | code_block="", 155 | ) 156 | arcpy.CalculateField_management( 157 | in_table=Path, 158 | field="DiffElev", 159 | expression="!StartElev! - !EndElev!", 160 | expression_type="PYTHON", 161 | code_block="", 162 | ) 163 | arcpy.CalculateField_management( 164 | Path, "Length", "!shape.length@meters!", "PYTHON_9.3", "#" 165 | ) 166 | arcpy.DeleteField_management( 167 | in_table=Path, 168 | drop_field="ARCID;GRID_CODE;FROM_NODE;TO_NODE;ORIG_FID;FromElev;ToElev;Flip", 169 | ) 170 | sql3 = "Length >" + str( 171 | 2 * cell_size 172 | ) # if flow path is shorter than 2 pixels, delete 173 | arcpy.Select_analysis(Path, out_flowpath, sql3) 174 | 175 | arcpy.FeatureVerticesToPoints_management(out_flowpath, FlowFrom, "START") 176 | arcpy.FeatureVerticesToPoints_management(out_flowpath, FlowTo, "END") 177 | arcpy.AddField_management(FlowFrom, field_name="FlowFromID", field_type="Long") 178 | arcpy.AddField_management(FlowTo, field_name="FlowToID", field_type="Long") 179 | arcpy.CalculateField_management( 180 | in_table=FlowFrom, 181 | field="FlowFromID", 182 | expression="!FID! + 1", 183 | expression_type="PYTHON", 184 | code_block="", 185 | ) 186 | arcpy.CalculateField_management( 187 | in_table=FlowTo, 188 | field="FlowToID", 189 | expression="!FID! + 1", 190 | expression_type="PYTHON", 191 | code_block="", 192 | ) 193 | # derive sink connectivity 194 | 195 | arcpy.Buffer_analysis( 196 | in_features=Path, 197 | out_feature_class=PathBuffer, 198 | buffer_distance_or_field="0.1 Meters", 199 | line_side="FULL", 200 | line_end_type="FLAT", 201 | dissolve_option="ALL", 202 | dissolve_field="", 203 | method="PLANAR", 204 | ) 205 | arcpy.MultipartToSinglepart_management( 206 | in_features=PathBuffer, out_feature_class=PathBufferSingle 207 | ) 208 | arcpy.AddField_management( 209 | PathBufferSingle, field_name="BufferID", field_type="Long" 210 | ) 211 | arcpy.CalculateField_management( 212 | in_table=PathBufferSingle, 213 | field="BufferID", 214 | expression="!FID! + 1", 215 | expression_type="PYTHON", 216 | code_block="", 217 | ) 218 | 219 | search_radius = str(2.1 * cell_size) + " Meters" 220 | arcpy.SpatialJoin_analysis( 221 | target_features=FlowFrom, 222 | join_features=in_sink, 223 | out_feature_class=FlowFromJoin, 224 | join_operation="JOIN_ONE_TO_ONE", 225 | join_type="KEEP_COMMON", 226 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 227 | match_option="INTERSECT", 228 | search_radius=search_radius, 229 | distance_field_name="", 230 | ) 231 | arcpy.SpatialJoin_analysis( 232 | target_features=FlowTo, 233 | join_features=in_sink, 234 | out_feature_class=FlowToJoin, 235 | join_operation="JOIN_ONE_TO_ONE", 236 | join_type="KEEP_COMMON", 237 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 238 | match_option="INTERSECT", 239 | search_radius=search_radius, 240 | distance_field_name="", 241 | ) 242 | arcpy.SpatialJoin_analysis( 243 | target_features=FlowFromJoin, 244 | join_features=PathBufferSingle, 245 | out_feature_class=FlowFromJoinBuffer, 246 | join_operation="JOIN_ONE_TO_ONE", 247 | join_type="KEEP_COMMON", 248 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 249 | match_option="INTERSECT", 250 | search_radius=search_radius, 251 | distance_field_name="", 252 | ) 253 | arcpy.SpatialJoin_analysis( 254 | target_features=FlowToJoin, 255 | join_features=PathBufferSingle, 256 | out_feature_class=FlowToJoinBuffer, 257 | join_operation="JOIN_ONE_TO_ONE", 258 | join_type="KEEP_COMMON", 259 | # field_mapping="""ID "ID" true true false 10 Long 0 10 ,First,#,poly,ID,-1,-1""", 260 | match_option="INTERSECT", 261 | search_radius=search_radius, 262 | distance_field_name="", 263 | ) 264 | arcpy.JoinField_management( 265 | in_data=FlowFromJoinBuffer, 266 | in_field="BufferID", 267 | join_table=FlowToJoinBuffer, 268 | join_field="BufferID", 269 | fields="ID", 270 | ) 271 | arcpy.JoinField_management( 272 | in_data=in_sink, 273 | in_field="ID", 274 | join_table=FlowFromJoinBuffer, 275 | join_field="ID", 276 | fields="ID_12", 277 | ) 278 | arcpy.AddField_management(in_sink, field_name="Downstream", field_type="LONG") 279 | arcpy.CalculateField_management( 280 | in_table=in_sink, 281 | field="Downstream", 282 | expression="!ID_12!", 283 | expression_type="PYTHON", 284 | code_block="", 285 | ) 286 | arcpy.DeleteField_management(in_table=in_sink, drop_field="ID_12") 287 | 288 | arcpy.AddField_management(in_sink, field_name="simu_depth", field_type="FLOAT") 289 | arcpy.AddField_management(in_sink, field_name="rain_inten", field_type="FLOAT") 290 | arcpy.AddField_management(in_sink, field_name="time_inund", field_type="FLOAT") 291 | arcpy.CalculateField_management( 292 | in_table=in_sink, 293 | field="simu_depth", 294 | expression="!volume! / !cat_area!", 295 | expression_type="PYTHON", 296 | code_block="", 297 | ) 298 | arcpy.CalculateField_management( 299 | in_table=in_sink, 300 | field="rain_inten", 301 | expression=rain_intensity, 302 | expression_type="PYTHON", 303 | code_block="", 304 | ) 305 | arcpy.CalculateField_management( 306 | in_table=in_sink, 307 | field="time_inund", 308 | expression="!simu_depth! / !rain_inten!", 309 | expression_type="PYTHON", 310 | code_block="", 311 | ) 312 | 313 | arcpy.JoinField_management( 314 | in_data=out_flowpath, 315 | in_field="FID", 316 | join_table=FlowFromJoin, 317 | join_field="ORIG_FID", 318 | fields="ID", 319 | ) 320 | arcpy.AddField_management( 321 | in_table=out_flowpath, field_name="start_sink", field_type="LONG" 322 | ) 323 | arcpy.CalculateField_management( 324 | in_table=out_flowpath, 325 | field="start_sink", 326 | expression="!ID!", 327 | expression_type="PYTHON", 328 | code_block="", 329 | ) 330 | arcpy.DeleteField_management(in_table=out_flowpath, drop_field="ID") 331 | arcpy.JoinField_management( 332 | in_data=out_flowpath, 333 | in_field="FID", 334 | join_table=FlowToJoin, 335 | join_field="ORIG_FID", 336 | fields="ID", 337 | ) 338 | arcpy.AddField_management( 339 | in_table=out_flowpath, field_name="end_sink", field_type="LONG" 340 | ) 341 | arcpy.CalculateField_management( 342 | in_table=out_flowpath, 343 | field="end_sink", 344 | expression="!ID!", 345 | expression_type="PYTHON", 346 | code_block="", 347 | ) 348 | arcpy.DeleteField_management(in_table=out_flowpath, drop_field="ID") 349 | arcpy.JoinField_management( 350 | in_data=out_flowpath, 351 | in_field="start_sink", 352 | join_table=in_sink, 353 | join_field="ID", 354 | fields="volume;cat_area;simu_depth;rain_inten;time_inund", 355 | ) 356 | 357 | arcpy.Delete_management(LineFlip) 358 | arcpy.Delete_management(LineNoFlip) 359 | arcpy.Delete_management(CostPath) 360 | arcpy.Delete_management(FlowDir) 361 | arcpy.Delete_management(PathLineErase) 362 | arcpy.Delete_management(PathThin) 363 | arcpy.Delete_management(LineStart) 364 | arcpy.Delete_management(LineStartElev) 365 | arcpy.Delete_management(LineEnd) 366 | arcpy.Delete_management(LineEndElev) 367 | arcpy.Delete_management(PathLineEraseSingle) 368 | arcpy.Delete_management(SinkCentroid) 369 | arcpy.Delete_management(PathLine) 370 | arcpy.Delete_management(PathBuffer) 371 | arcpy.Delete_management(PathBufferSingle) 372 | arcpy.Delete_management(FlowFromJoin) 373 | arcpy.Delete_management(FlowToJoin) 374 | arcpy.Delete_management(FlowFromJoinBuffer) 375 | arcpy.Delete_management(FlowToJoinBuffer) 376 | 377 | arcpy.AddMessage("Flow path delineation done!") 378 | return out_flowpath 379 | 380 | 381 | def delete_row(in_shp, field): 382 | fields = [] 383 | fields.append(field) 384 | with arcpy.da.UpdateCursor(in_shp, fields) as cursor: 385 | for row in cursor: 386 | if row[0] == 0: 387 | cursor.deleteRow() 388 | 389 | 390 | def add_rank(in_shp, sort_field, rank_field): 391 | try: 392 | arcpy.AddField_management( 393 | in_table=in_shp, field_name=rank_field, field_type="LONG" 394 | ) 395 | inDict = dict() 396 | with arcpy.da.SearchCursor(in_shp, sort_field) as cursor: 397 | for row in cursor: 398 | # arcpy.AddMessage(row[0]) 399 | if row[0] not in inDict.keys(): 400 | inDict[row[0]] = 0 401 | od = collections.OrderedDict(sorted(inDict.items())) 402 | i = 1 403 | for key, value in od.items(): 404 | od[key] = i 405 | i += 1 406 | fields = [] 407 | fields.append(sort_field) 408 | fields.append(rank_field) 409 | with arcpy.da.UpdateCursor(in_shp, fields) as cursor: 410 | for row in cursor: 411 | row[1] = od[row[0]] 412 | cursor.updateRow(row) 413 | except: 414 | arcpy.GetMessages() 415 | 416 | 417 | if __name__ == "__main__": 418 | 419 | in_dem = arcpy.GetParameterAsText(0) 420 | in_sink = arcpy.GetParameterAsText(1) 421 | rain_intensity = float(arcpy.GetParameterAsText(2)) / 100 # convert cm/h to m/h 422 | out_flowpath = arcpy.GetParameterAsText(3) 423 | 424 | start_time = time.time() 425 | FlowPath(in_dem, in_sink, rain_intensity, out_flowpath) 426 | delete_row(out_flowpath, "volume") 427 | add_rank(out_flowpath, sort_field="time_inund", rank_field="rank") 428 | add_rank(in_sink, sort_field="time_inund", rank_field="rank") 429 | end_time = time.time() 430 | arcpy.AddMessage("Total run time: {:.4f}".format(end_time - start_time)) 431 | -------------------------------------------------------------------------------- /lidar/toolbox/scripts/5_Catchment_Hierarchy.py: -------------------------------------------------------------------------------- 1 | import arcpy 2 | import os 3 | import time 4 | import shutil 5 | 6 | 7 | def DelineateCatchment(DEMRasterPath, flow_direction, SinkPolyPath, OutputPath): 8 | # arcpy.CheckOutExtension("Spatial") 9 | workspace = os.path.split(OutputPath)[0] 10 | arcpy.env.workspace = workspace 11 | arcpy.env.overwriteOutput = True 12 | 13 | if arcpy.Exists(DEMRasterPath) == False: 14 | print("The input raster does not exist") 15 | quit() 16 | 17 | if os.path.splitext(OutputPath)[1].lower() == ".shp": 18 | FieldOID = "FID" 19 | FlowDirection = os.path.join(workspace, "FlowDirection.tif") 20 | SinkRaster = os.path.join(workspace, "SinkRaster.tif") 21 | Watershed = os.path.join(workspace, "Watershed.tif") 22 | Catchment = os.path.join(workspace, "Catchment.shp") 23 | else: 24 | FieldOID = "OBJECTID" 25 | FlowDirection = os.path.join(workspace, "FlowDirection") 26 | SinkRaster = os.path.join(workspace, "SinkRaster") 27 | Watershed = os.path.join(workspace, "Watershed") 28 | Catchment = os.path.join(workspace, "Catchment") 29 | 30 | input_dem = arcpy.Raster(DEMRasterPath) 31 | # flow_direction = arcpy.sa.FlowDirection(input_dem) 32 | # flow_direction.save(FlowDirection) 33 | 34 | cell_size = input_dem.meanCellWidth 35 | arcpy.env.extent = input_dem.extent 36 | arcpy.PolygonToRaster_conversion( 37 | SinkPolyPath, FieldOID, SinkRaster, "CELL_CENTER", "NONE", cell_size 38 | ) 39 | 40 | watershed = arcpy.sa.Watershed(flow_direction, SinkRaster, "Value") 41 | # watershed.save(Watershed) 42 | arcpy.RasterToPolygon_conversion(watershed, OutputPath, "NO_SIMPLIFY", "Value") 43 | return OutputPath 44 | 45 | 46 | def calculateArea(in_shp, fieldname): 47 | arcpy.AddField_management(in_shp, fieldname, field_type="DOUBLE") 48 | arcpy.CalculateField_management( 49 | in_shp, fieldname, "!shape.area@squaremeters!", "PYTHON_9.3" 50 | ) 51 | arcpy.DeleteField_management(in_shp, "GRIDCODE") 52 | 53 | 54 | def spatialJoin(target_shp, join_shp, out_shp): 55 | arcpy.SpatialJoin_analysis(target_shp, join_shp, out_shp, match_option="INTERSECT") 56 | 57 | 58 | def mergeShapefiles(in_dir, out_shp): 59 | arcpy.env.workspace = in_dir 60 | shapefiles = arcpy.ListFeatureClasses() 61 | arcpy.Merge_management(shapefiles, out_shp) 62 | 63 | 64 | def zonalStatistics(in_shp_dir, in_dem): 65 | arcpy.env.workspace = in_shp_dir 66 | arcpy.env.overwriteOutput = True 67 | arcpy.env.snapRaster = in_dem 68 | dbf_dir = os.path.join(in_shp_dir, "dbf") 69 | os.mkdir(dbf_dir) 70 | tif_dir = os.path.join(in_shp_dir, "tif") 71 | os.mkdir(tif_dir) 72 | shapefiles = os.listdir(in_shp_dir) 73 | dem = arcpy.Raster(in_dem) 74 | cell_size = dem.meanCellHeight 75 | for shp in shapefiles: 76 | if shp.endswith(".shp"): 77 | shp_path = os.path.join(in_shp_dir, shp) 78 | dbf_path = os.path.join(dbf_dir, "zonal_" + shp.replace("shp", "dbf")) 79 | tif_path = os.path.join(tif_dir, shp.replace("shp", "tif")) 80 | arcpy.PolygonToRaster_conversion( 81 | shp_path, 82 | value_field="FID", 83 | out_rasterdataset=tif_path, 84 | cell_assignment="CELL_CENTER", 85 | priority_field="NONE", 86 | cellsize=cell_size, 87 | ) 88 | arcpy.sa.ZonalStatisticsAsTable( 89 | tif_path, "Value", in_dem, dbf_path, "DATA", "ALL" 90 | ) 91 | arcpy.JoinField_management( 92 | shp_path, 93 | in_field="FID", 94 | join_table=dbf_path, 95 | join_field="Value", 96 | fields="COUNT;AREA;MIN;MAX;RANGE;MEAN;STD;SUM", 97 | ) 98 | arcpy.AddField_management( 99 | shp_path, field_name="dep2catR", field_type="FLOAT" 100 | ) 101 | arcpy.AddField_management(shp_path, field_name="volume", field_type="FLOAT") 102 | arcpy.AddField_management( 103 | shp_path, field_name="mean_depth", field_type="FLOAT" 104 | ) 105 | arcpy.CalculateField_management( 106 | shp_path, 107 | field="dep2catR", 108 | expression="!AREA! / !cat_area!", 109 | expression_type="PYTHON_9.3", 110 | ) 111 | arcpy.CalculateField_management( 112 | shp_path, 113 | field="volume", 114 | expression="( !COUNT! * !MAX! - !SUM!) * ( !AREA! / !COUNT! )", 115 | expression_type="PYTHON_9.3", 116 | ) 117 | arcpy.CalculateField_management( 118 | shp_path, 119 | field="mean_depth", 120 | expression="!volume! / !AREA!", 121 | expression_type="PYTHON_9.3", 122 | ) 123 | if os.path.exists(dbf_dir): 124 | shutil.rmtree(dbf_dir) 125 | if os.path.exists(tif_dir): 126 | shutil.rmtree(tif_dir) 127 | return True 128 | 129 | 130 | if __name__ == "__main__": 131 | 132 | arcpy.CheckOutExtension("Spatial") 133 | init_time = time.time() 134 | in_dem = arcpy.GetParameterAsText(0) 135 | in_shp_dir = arcpy.GetParameterAsText(1) 136 | out_img = arcpy.GetParameterAsText(2) 137 | 138 | desc = arcpy.Describe(in_dem) 139 | in_dem = desc.catalogPath # get file path 140 | 141 | out_catchment_dir = os.path.split(out_img)[0] 142 | workspace = out_catchment_dir 143 | arcpy.env.workspace = workspace 144 | arcpy.env.overwriteOutput = True 145 | out_shp_dir = os.path.join(out_catchment_dir, "shapefiles") 146 | out_level_dir = os.path.join(out_shp_dir, "level") 147 | 148 | if os.path.exists(out_shp_dir) == False: 149 | os.mkdir(out_shp_dir) 150 | if os.path.exists(out_level_dir) == False: 151 | os.mkdir(out_level_dir) 152 | 153 | dem = arcpy.Raster(in_dem) 154 | cell_size = dem.meanCellWidth 155 | flow_direction = arcpy.sa.FlowDirection(dem) 156 | flow_direction.save(os.path.join(out_shp_dir, "flowDir.tif")) 157 | 158 | for in_shp in os.listdir(in_shp_dir): 159 | if in_shp.endswith(".shp") and "Single" in in_shp: 160 | # print(in_shp) 161 | # out_shp_name = "Catchment_level_" + in_shp[len(in_shp)-5:] 162 | out_shp_name = in_shp.replace("Single", "Catchment") 163 | out_shp_path = os.path.join(out_shp_dir, out_shp_name) 164 | in_shp_path = os.path.join(in_shp_dir, in_shp) 165 | arcpy.AddMessage("Generating {} ...".format(out_shp_name)) 166 | DelineateCatchment(in_dem, flow_direction, in_shp_path, out_shp_path) 167 | calculateArea(out_shp_path, "cat_area") 168 | out_level_path = os.path.join(out_level_dir, in_shp) 169 | spatialJoin(in_shp_path, out_shp_path, out_level_path) 170 | 171 | zonalStatistics(out_level_dir, in_dem) 172 | arcpy.Delete_management(os.path.join(out_shp_dir, "flowDir.tif")) 173 | arcpy.Delete_management(os.path.join(out_shp_dir, "SinkRaster.tif")) 174 | 175 | out_merge_levels = os.path.join(out_shp_dir, "Merge_levels.shp") 176 | mergeShapefiles(out_level_dir, out_merge_levels) 177 | 178 | tmp_catchment_img = os.path.join(out_catchment_dir, "catchment_tmp.tif") 179 | arcpy.PolygonToRaster_conversion( 180 | out_merge_levels, 181 | value_field="cat_area", 182 | out_rasterdataset=tmp_catchment_img, 183 | cellsize=cell_size, 184 | ) 185 | # out_catchment_img = os.path.join(out_catchment_dir, "catchment.tif") 186 | out_catchment_img = out_img 187 | arcpy.env.compression = "NONE" 188 | arcpy.CopyRaster_management( 189 | tmp_catchment_img, 190 | out_catchment_img, 191 | nodata_value="0", 192 | pixel_type="32_BIT_UNSIGNED", 193 | format="TIFF", 194 | ) 195 | arcpy.Delete_management(tmp_catchment_img) 196 | 197 | tmp_info_dir = os.path.join(out_shp_dir, "info") 198 | if os.path.exists(tmp_info_dir): 199 | shutil.rmtree(tmp_info_dir) 200 | log_file = os.path.join(out_shp_dir, "log") 201 | if os.path.exists(log_file): 202 | os.remove(log_file) 203 | 204 | end_time = time.time() 205 | arcpy.AddMessage("Total run time: {}".format(end_time - init_time)) 206 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: lidar 2 | 3 | site_url: https://lidar.github.org 4 | 5 | repo_url: https://github.com/opengeos/lidar 6 | 7 | theme: 8 | palette: 9 | scheme: preference 10 | name: material 11 | icon: 12 | repo: fontawesome/brands/github 13 | features: 14 | - navigation.instant 15 | # - navigation.expand 16 | # - navigation.tabs 17 | - search.highlight 18 | custom_dir: "docs/overrides" 19 | 20 | plugins: 21 | - search 22 | - mkdocstrings 23 | - git-revision-date 24 | # - pdf-export 25 | - mkdocs-jupyter: 26 | include_source: True 27 | # execute: True 28 | 29 | markdown_extensions: 30 | - attr_list 31 | - pymdownx.superfences 32 | - pymdownx.highlight: 33 | linenums: true 34 | - toc: 35 | permalink: true 36 | 37 | # google_analytics: 38 | # - UA-186406134-1 39 | # - auto 40 | 41 | nav: 42 | - Home: index.md 43 | - Installation: installation.md 44 | - Get Started: get-started.md 45 | - Usage: usage.md 46 | - Contributing: contributing.md 47 | - Citations: citations.md 48 | - Changelog: changelog.md 49 | - Report Issues: https://github.com/opengeos/lidar/issues 50 | - Notebooks: 51 | - notebooks/lidar.ipynb 52 | - notebooks/lidar_colab.ipynb 53 | - notebooks/lidar_dsm.ipynb 54 | - notebooks/inundation.ipynb 55 | - API Reference: 56 | - common module: common.md 57 | - filling module: filling.md 58 | - filtering module: filtering.md 59 | - mounts module: mounts.md 60 | - slicing module: slicing.md 61 | -------------------------------------------------------------------------------- /paper/paper.bib: -------------------------------------------------------------------------------- 1 | @ARTICLE{Lindsay2006, 2 | title = "{Distinguishing actual and artefact depressions in digital 3 | elevation data}", 4 | author = "Lindsay, J B and Creed, Irena F", 5 | journal = "Computers \& geosciences", 6 | volume = 32, 7 | number = 8, 8 | pages = "1192--1204", 9 | month = "1~" # oct, 10 | year = 2006, 11 | url = "http://www.sciencedirect.com/science/article/pii/S0098300405002578", 12 | issn = "0098-3004", 13 | doi = "10.1016/j.cageo.2005.11.002" 14 | } 15 | 16 | @ARTICLE{Wu2016, 17 | title = "{Automated delineation of karst sinkholes from {LiDAR-derived} 18 | digital elevation models}", 19 | author = "Wu, Qiusheng and Deng, Chengbin and Chen, Zuoqi", 20 | journal = "Geomorphology", 21 | volume = 266, 22 | number = "Supplement C", 23 | pages = "1--10", 24 | month = "1~" # aug, 25 | year = 2016, 26 | url = "http://www.sciencedirect.com/science/article/pii/S0169555X1630280X", 27 | issn = "0169-555X", 28 | doi = "10.1016/j.geomorph.2016.05.006" 29 | } 30 | 31 | @ARTICLE{Wu2016-ub, 32 | title = "{Delineation and Quantification of Wetland Depressions in the 33 | Prairie Pothole Region of North Dakota}", 34 | author = "Wu, Qiusheng and Lane, Charles R", 35 | journal = "Wetlands", 36 | publisher = "Springer Netherlands", 37 | volume = 36, 38 | number = 2, 39 | pages = "215--227", 40 | month = "1~" # apr, 41 | year = 2016, 42 | url = "https://link.springer.com/article/10.1007/s13157-015-0731-6", 43 | language = "en", 44 | issn = "0277-5212, 1943-6246", 45 | doi = "10.1007/s13157-015-0731-6" 46 | } 47 | 48 | @ARTICLE{Wu2015, 49 | title = "{A localized contour tree method for deriving geometric and 50 | topological properties of complex surface depressions based on 51 | high-resolution topographical data}", 52 | author = "Wu, Qiusheng and Liu, Hongxing and Wang, Shujie and Yu, Bailang 53 | and Beck, Richard and Hinkel, Kenneth", 54 | journal = "International Journal of Geographical Information Science", 55 | publisher = "Taylor \& Francis", 56 | volume = 29, 57 | number = 12, 58 | pages = "2041--2060", 59 | month = "2~" # dec, 60 | year = 2015, 61 | url = "https://doi.org/10.1080/13658816.2015.1038719", 62 | annote = "doi: 10.1080/13658816.2015.1038719", 63 | issn = "1365-8816", 64 | doi = "10.1080/13658816.2015.1038719" 65 | } 66 | 67 | @ARTICLE{Wu2019, 68 | title = "{Efficient delineation of nested depression hierarchy in digital 69 | elevation models for hydrological analysis using the level-set 70 | method}", 71 | author = "Wu, Qiusheng and Lane, Charles R and Wang, Lei and Vanderhoof, 72 | Melanie K and Christensen, Jay R and Liu, Hongxing", 73 | journal = "Journal of the American Water Resources Association", 74 | publisher = "Wiley Online Library", 75 | volume = 55, 76 | number = 2, 77 | pages = "354--368", 78 | month = "28~" # apr, 79 | year = 2019, 80 | url = "https://onlinelibrary.wiley.com/doi/abs/10.1111/1752-1688.12689", 81 | issn = "1093-474X, 1752-1688", 82 | doi = "10.1111/1752-1688.12689" 83 | } 84 | 85 | @ARTICLE{Wang2006, 86 | title = "{An efficient method for identifying and filling surface 87 | depressions in digital elevation models for hydrologic analysis 88 | and modelling}", 89 | author = "Wang, L and Liu, H", 90 | journal = "International journal of geographical information science", 91 | publisher = "Taylor \& Francis", 92 | volume = 20, 93 | number = 2, 94 | pages = "193--213", 95 | month = "1~" # feb, 96 | year = 2006, 97 | url = "https://doi.org/10.1080/13658810500433453", 98 | annote = "doi: 10.1080/13658810500433453", 99 | issn = "1365-8816", 100 | doi = "10.1080/13658810500433453" 101 | } 102 | 103 | @ARTICLE{Lindsay2015, 104 | title = "{Modelling surface drainage patterns in altered landscapes using 105 | {LiDAR}}", 106 | author = "Lindsay, J B and Dhun, K", 107 | journal = "International Journal of Geographical Information Science", 108 | publisher = "Taylor \& Francis", 109 | volume = 29, 110 | number = 3, 111 | pages = "397--411", 112 | month = "4~" # mar, 113 | year = 2015, 114 | url = "https://doi.org/10.1080/13658816.2014.975715", 115 | issn = "1365-8816", 116 | doi = "10.1080/13658816.2014.975715" 117 | } 118 | 119 | @ARTICLE{Lindsay2016, 120 | title = "{Efficient hybrid breaching-filling sink removal methods for flow 121 | path enforcement in digital elevation models}", 122 | author = "Lindsay, J B", 123 | journal = "Hydrological processes", 124 | publisher = "Wiley Online Library", 125 | volume = 30, 126 | number = 6, 127 | pages = "846--857", 128 | year = 2016, 129 | url = "http://onlinelibrary.wiley.com/doi/10.1002/hyp.10648/full", 130 | issn = "0885-6087", 131 | doi = "10.1002/hyp.10648" 132 | } 133 | 134 | @ARTICLE{Harris2020, 135 | title = "{Array programming with NumPy}", 136 | author = "Harris, Charles R and Millman, K Jarrod and van der Walt, 137 | St{\'e}fan J and Gommers, Ralf and Virtanen, Pauli and 138 | Cournapeau, David and Wieser, Eric and Taylor, Julian and Berg, 139 | Sebastian and Smith, Nathaniel J and Kern, Robert and Picus, 140 | Matti and Hoyer, Stephan and van Kerkwijk, Marten H and Brett, 141 | Matthew and Haldane, Allan and Del R{\'\i}o, Jaime Fern{\'a}ndez 142 | and Wiebe, Mark and Peterson, Pearu and G{\'e}rard-Marchant, 143 | Pierre and Sheppard, Kevin and Reddy, Tyler and Weckesser, 144 | Warren and Abbasi, Hameer and Gohlke, Christoph and Oliphant, 145 | Travis E", 146 | journal = "Nature", 147 | publisher = "nature.com", 148 | volume = 585, 149 | number = 7825, 150 | pages = "357--362", 151 | month = sep, 152 | year = 2020, 153 | url = "http://dx.doi.org/10.1038/s41586-020-2649-2", 154 | language = "en", 155 | issn = "0028-0836, 1476-4687", 156 | pmid = "32939066", 157 | doi = "10.1038/s41586-020-2649-2" 158 | } 159 | 160 | @ARTICLE{Virtanen2020, 161 | title = "{SciPy 1.0: fundamental algorithms for scientific computing in 162 | Python}", 163 | author = "Virtanen, Pauli and Gommers, Ralf and Oliphant, Travis E and 164 | Haberland, Matt and Reddy, Tyler and Cournapeau, David and 165 | Burovski, Evgeni and Peterson, Pearu and Weckesser, Warren and 166 | Bright, Jonathan and van der Walt, St{\'e}fan J and Brett, 167 | Matthew and Wilson, Joshua and Millman, K Jarrod and Mayorov, 168 | Nikolay and Nelson, Andrew R J and Jones, Eric and Kern, Robert 169 | and Larson, Eric and Carey, C J and Polat, {\.I}lhan and Feng, 170 | Yu and Moore, Eric W and VanderPlas, Jake and Laxalde, Denis and 171 | Perktold, Josef and Cimrman, Robert and Henriksen, Ian and 172 | Quintero, E A and Harris, Charles R and Archibald, Anne M and 173 | Ribeiro, Ant{\^o}nio H and Pedregosa, Fabian and van Mulbregt, 174 | Paul and {SciPy 1.0 Contributors}", 175 | journal = "Nature methods", 176 | publisher = "nature.com", 177 | volume = 17, 178 | number = 3, 179 | pages = "261--272", 180 | month = mar, 181 | year = 2020, 182 | url = "http://dx.doi.org/10.1038/s41592-019-0686-2", 183 | language = "en", 184 | issn = "1548-7091, 1548-7105", 185 | pmid = "32015543", 186 | doi = "10.1038/s41592-019-0686-2", 187 | pmc = "PMC7056644" 188 | } 189 | 190 | @ARTICLE{Van_der_Walt2014, 191 | title = "{scikit-image: image processing in Python}", 192 | author = "van der Walt, St{\'e}fan and Sch{\"o}nberger, Johannes L and 193 | Nunez-Iglesias, Juan and Boulogne, Fran{\c c}ois and Warner, 194 | Joshua D and Yager, Neil and Gouillart, Emmanuelle and Yu, Tony 195 | and {scikit-image contributors}", 196 | journal = "PeerJ", 197 | volume = 2, 198 | pages = "e453", 199 | month = "19~" # jun, 200 | year = 2014, 201 | url = "http://dx.doi.org/10.7717/peerj.453", 202 | keywords = "Education; Image processing; Open source; Python; Reproducible 203 | research; Scientific programming; Visualization;VIP;Priority 204 | Flood", 205 | language = "en", 206 | issn = "2167-8359", 207 | pmid = "25024921", 208 | doi = "10.7717/peerj.453", 209 | pmc = "PMC4081273" 210 | } 211 | 212 | @ARTICLE{Hunter2007, 213 | title = "{Matplotlib: A 2D Graphics Environment}", 214 | author = "Hunter, John D", 215 | journal = "Computing in science \& engineering", 216 | publisher = "IEEE Computer Society", 217 | volume = 9, 218 | number = 3, 219 | pages = "90--95", 220 | month = "1~" # may, 221 | year = 2007, 222 | url = "https://aip.scitation.org/doi/abs/10.1109/MCSE.2007.55", 223 | issn = "1521-9615", 224 | doi = "10.1109/MCSE.2007.55" 225 | } 226 | 227 | @ARTICLE{Barnes2018, 228 | title = "{RichDEM: High-performance terrain analysis}", 229 | author = "Barnes, Richard", 230 | journal = "PeerJ Preprints", 231 | publisher = "PeerJ Inc.", 232 | number = "e27099v1", 233 | institution = "PeerJ Preprints", 234 | month = "8~" # aug, 235 | year = 2018, 236 | url = "https://peerj.com/preprints/27099/", 237 | keywords = "algorithm; parallel computing; high-performance computing; 238 | terrain analysis; raster; graph theory; open source; flow 239 | accumulation; depression-filling; hydrological modeling", 240 | language = "en", 241 | issn = "2167-9843", 242 | doi = "10.7287/peerj.preprints.27099v1" 243 | } 244 | 245 | @MISC{Lindsay2018, 246 | title = "{WhiteboxTools User Manual}", 247 | author = "Lindsay, J B", 248 | publisher = "GitHub.com", 249 | year = 2018, 250 | url = "https://jblindsay.github.io/wbt_book", 251 | note = "Accessed: 2021-1-7" 252 | } -------------------------------------------------------------------------------- /paper/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "lidar: A Python package for delineating nested surface depressions from digital elevation data" 3 | tags: 4 | - Python 5 | - Terrain analysis 6 | - Hydrological analysis 7 | - Surface depressions 8 | - Digital elevation models 9 | - DEM 10 | - LiDAR 11 | - Jupyter notebook 12 | authors: 13 | - name: Qiusheng Wu 14 | orcid: 0000-0001-5437-4073 15 | affiliation: "1" 16 | affiliations: 17 | - name: Department of Geography, University of Tennessee, Knoxville, TN 37996, United States 18 | index: 1 19 | date: 27 February 2021 20 | bibliography: paper.bib 21 | --- 22 | 23 | # Summary 24 | 25 | **lidar** is a Python package for delineating the nested hierarchy of surface depressions in digital elevation models (DEMs). 26 | In traditional hydrological modeling, surface depressions in a DEM are commonly treated as artifacts and thus filled and removed to create a depressionless DEM, which can then be used to generate continuous stream networks. In reality, however, surface depressions in DEMs are commonly a combination of spurious and actual terrain features [@Lindsay2006]. Fine-resolution DEMs derived from Light Detection and Ranging (LiDAR) data can capture and represent actual surface depressions, especially in glaciated [@Wu2016-ub] and karst landscapes [@Wu2016]. During the past decades, various algorithms have been developed to identify and delineate surface depressions, such as depression filling [@Wang2006], depression breaching [@Lindsay2015], hybrid breaching-filling [@Lindsay2016], and contour tree method [@Wu2015]. More recently, a level-set method based on graph theory was proposed to delineate the nested hierarchy of surface depressions [@Wu2019]. The **lidar** Python package implements the level-set method and makes it possible for delineating the nested hierarchy of surface depressions as well as elevated terrain features. It also provides an interactive Graphical User Interface (GUI) that allows users to run the program with minimal coding. 27 | 28 | # Statement of Need 29 | 30 | The **lidar** package is intended for scientists and researchers who would like to integrate surface depressions into hydrological modeling. It can also facilitate the identification and delineation of depressional features, such as sinkholes, detention basins, and prairie potholes. The detailed topological and geometric properties of surface depressions can be useful for terrain analysis and hydrological modeling, including the size, volume, mean depth, maximum depth, lowest elevation, spill elevation, perimeter, major axis length, minor axis length, elongatedness. 31 | 32 | # State of the Field 33 | 34 | Currently, there are a few open-source Python packages that can perform depression filling on digital elevation data, such as RichDEM [@Barnes2018] and [whitebox](https://github.com/giswqs/whitebox-python), the Python frontend for [WhiteboxTools](https://github.com/jblindsay/whitebox-tools) [@Lindsay2018]. However, there are no Python packages offering tools for delineating the nested hierarchy of surface depressions and catchments as well as simulating inundation dynamics. The **lidar** Python package is intended for filling this gap. 35 | 36 | # lidar Functionality 37 | 38 | The key functionality of the **lidar** package is organized into several modules: 39 | 40 | - [filtering](https://github.com/opengeos/lidar/blob/master/lidar/filtering.py): Smoothing DEMs using mean, median, and Gaussian filters. 41 | - [filling](https://github.com/opengeos/lidar/blob/master/lidar/filling.py): Delineating surface depressions from DEMs using the traditional depression filling method. 42 | - [slicing](https://github.com/opengeos/lidar/blob/master/lidar/slicing.py): Delineating the nested hierarchy of surface depressions using the level-set method; computing topological and geometric properties of depressions; and exporting depression properties as a CSV file. 43 | - [mounts](https://github.com/opengeos/lidar/blob/master/lidar/mounts.py): Delineating the nested hierarchy of elevated features (i.e., mounts) using the level-set method; computing topological and geometric properties of mounts; and exporting mount properties as a CSV file. 44 | - [toolbox](https://github.com/opengeos/lidar/blob/master/lidar/toolbox): An [ArcGIS](https://www.esri.com/en-us/arcgis/about-arcgis/overview) toolbox for delineating the nested hierarchy of surface depressions and simulating inundation dynamics. 45 | 46 | # lidar Tutorials 47 | 48 | The **lidar** Python package has a C library dependency called [GDAL](https://gdal.org/index.html), which can be challenging for some users to install on their computer. Alternatively, users can try out the **lidar** package using just a browser without having to install anything on their computer. 49 | 50 | - Try it out with Binder: 51 | - Try it out with Google Colab: 52 | - Help documentation: 53 | 54 | The **lidar** package also provides an ArcGIS toolbox for delineating the nested hierarchy of surface depressions and catchments as well as simulating inundation dynamics. Video tutorials for using the toolbox are available at . 55 | 56 | ![The ArcGIS toolbox for the lidar Python package](https://raw.githubusercontent.com/giswqs/lidar/master/images/toolbox_0.png) 57 | 58 | # Acknowledgments 59 | 60 | The author would like to thank the open-source community, especially the developers of numpy [@Harris2020], scipy [@Virtanen2020], scikit-image [@Van_der_Walt2014], matplotlib [@Hunter2007], and richDEM [@Barnes2018]. These open-source packages empower the **lidar** Python package. 61 | 62 | # References 63 | -------------------------------------------------------------------------------- /requirements.readthedocs.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opengeos/lidar/ebd2b26cbe46a22002f101fd23866a8341f2e0e5/requirements.readthedocs.txt -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Click>=6.0 2 | gdown 3 | geopandas 4 | matplotlib 5 | numpy 6 | pandas 7 | pyshp 8 | # richdem 9 | scikit-image 10 | scipy 11 | whitebox 12 | # PySimpleGUI 13 | # poppler<0.62 14 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | bumpversion 2 | coverage 3 | flake8 4 | pip 5 | Sphinx 6 | tox 7 | twine 8 | watchdog 9 | wheel 10 | 11 | 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.8.4 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:setup.py] 7 | search = version="{current_version}" 8 | replace = version="{new_version}" 9 | 10 | [bumpversion:file:lidar/__init__.py] 11 | search = __version__ = "{current_version}" 12 | replace = __version__ = "{new_version}" 13 | 14 | [bdist_wheel] 15 | universal = 1 16 | 17 | [flake8] 18 | exclude = docs 19 | 20 | [aliases] 21 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """The setup script.""" 5 | import io 6 | import json 7 | import os 8 | import shutil 9 | from os import path as op 10 | import urllib.request 11 | from setuptools import setup, find_packages 12 | 13 | # Find available package versions 14 | 15 | 16 | def pkg_versions(package_name): 17 | url = "https://pypi.python.org/pypi/%s/json" % (package_name,) 18 | text = urllib.request.urlopen(url).read() 19 | data = json.loads(text) 20 | versions = data["releases"].keys() 21 | return list(versions) 22 | 23 | 24 | # Find a matching version 25 | def find_version(version, version_list): 26 | match_version = None 27 | for v in version_list: 28 | if v.startswith(version): 29 | match_version = v 30 | return match_version 31 | 32 | return match_version 33 | 34 | 35 | if shutil.which("gdal-config") is None: 36 | print("GDAL is not installed. Installing GDAL ...") 37 | cmd = "pip install --find-links=https://girder.github.io/large_image_wheels --no-cache GDAL" 38 | os.system(cmd) 39 | 40 | # check GDAL version installed in the system 41 | # GDAL_VERSION = os.popen("gdal-config --version").read().rstrip() 42 | GDAL_INFO = os.popen("gdalinfo --version").read().rstrip() 43 | GDAL_VERSION = GDAL_INFO.split(",")[0].replace("GDAL", "").lstrip() 44 | GDAL_VERSION_NUM = str(GDAL_VERSION.replace(".", "")) 45 | PYGDAL_VERSION = find_version(GDAL_VERSION, pkg_versions("pygdal")) 46 | 47 | # if PYGDAL_VERSION is None: 48 | # print( 49 | # "GDAL version not found in PyPI. Please install GDAL version %s or higher." 50 | # % (GDAL_VERSION,) 51 | # ) 52 | # exit(1) 53 | 54 | print("GDAL version: %s" % (GDAL_VERSION,)) 55 | 56 | 57 | with open("README.md", mode="rb") as readme_file: 58 | readme = readme_file.read().decode("utf-8") 59 | 60 | here = op.abspath(op.dirname(__file__)) 61 | 62 | # get the dependencies and installs 63 | with io.open(op.join(here, "requirements.txt"), encoding="utf-8") as f: 64 | all_reqs = f.read().split("\n") 65 | 66 | install_requires = [x.strip() for x in all_reqs if "git+" not in x] 67 | 68 | # install_requires.append('pygdal==' + PYGDAL_VERSION) 69 | 70 | dependency_links = [x.strip().replace("git+", "") for x in all_reqs if "git+" not in x] 71 | 72 | 73 | extras_requires = { 74 | "all": ["geopandas", "rasterio"], 75 | } 76 | 77 | requirements = [ 78 | "Click>=6.0", 79 | ] 80 | 81 | setup_requirements = [] 82 | 83 | test_requirements = [] 84 | 85 | setup( 86 | author="Qiusheng Wu", 87 | author_email="giswqs@gmail.com", 88 | classifiers=[ 89 | "Development Status :: 2 - Pre-Alpha", 90 | "Intended Audience :: Developers", 91 | "License :: OSI Approved :: MIT License", 92 | "Natural Language :: English", 93 | "Programming Language :: Python :: 3.7", 94 | "Programming Language :: Python :: 3.8", 95 | "Programming Language :: Python :: 3.9", 96 | ], 97 | description="A Python package for delineating nested surface depressions in digital elevation data", 98 | entry_points={ 99 | "console_scripts": [ 100 | "lidar=lidar.cli:main", 101 | ], 102 | }, 103 | install_requires=install_requires, 104 | extras_require=extras_requires, 105 | dependency_links=dependency_links, 106 | license="MIT license", 107 | long_description=readme, 108 | long_description_content_type="text/markdown", 109 | include_package_data=True, 110 | keywords="lidar", 111 | name="lidar", 112 | packages=find_packages(include=["lidar"]), 113 | setup_requires=setup_requirements, 114 | test_suite="tests", 115 | tests_require=test_requirements, 116 | url="https://github.com/opengeos/lidar", 117 | version="0.8.4", 118 | zip_safe=False, 119 | ) 120 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Unit test package for lidar.""" 4 | -------------------------------------------------------------------------------- /tests/test_lidar.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Tests for `lidar` package.""" 5 | 6 | 7 | import unittest 8 | from click.testing import CliRunner 9 | 10 | # from lidar import lidar 11 | from lidar import cli 12 | 13 | import os 14 | import importlib.resources as resources 15 | import richdem as rd 16 | from scipy import ndimage 17 | import numpy as np 18 | import time 19 | import lidar 20 | 21 | 22 | class TestLidar(unittest.TestCase): 23 | """Tests for `lidar` package.""" 24 | 25 | def setUp(self): 26 | """Set up test fixtures, if any.""" 27 | 28 | def tearDown(self): 29 | """Tear down test fixtures, if any.""" 30 | 31 | def test_000_something(self): 32 | """Test something.""" 33 | 34 | def test_mean_filter(self): 35 | 36 | # identify the sample data directory of the package 37 | package_name = "lidar" 38 | data_dir = resources.files(package_name) / "data" 39 | print("Sample data directory: {}".format(data_dir)) 40 | 41 | # use the sample dem. Change it to your own dem if needed 42 | in_dem = os.path.join(data_dir, "dem.tif") 43 | out_dir = os.path.join(os.path.expanduser("~"), "temp") 44 | if not os.path.exists(out_dir): 45 | os.mkdir(out_dir) 46 | 47 | mean_dem = os.path.join(out_dir, "mean.tif") 48 | mean = lidar.MeanFilter(in_dem, kernel_size=3) 49 | rd.SaveGDAL(mean_dem, mean) 50 | 51 | self.assertTrue(os.path.exists(mean_dem)) 52 | 53 | def test_median_filter(self): 54 | 55 | # identify the sample data directory of the package 56 | package_name = "lidar" 57 | data_dir = resources.files(package_name) / "data" 58 | print("Sample data directory: {}".format(data_dir)) 59 | 60 | # use the sample dem. Change it to your own dem if needed 61 | in_dem = os.path.join(data_dir, "dem.tif") 62 | out_dir = os.path.join(os.path.expanduser("~"), "temp") 63 | if not os.path.exists(out_dir): 64 | os.mkdir(out_dir) 65 | 66 | median_dem = os.path.join(out_dir, "median.tif") 67 | median = lidar.MedianFilter(in_dem, kernel_size=3) 68 | rd.SaveGDAL(median_dem, median) 69 | 70 | self.assertTrue(os.path.exists(median_dem)) 71 | 72 | def test_gaussian_filter(self): 73 | 74 | # identify the sample data directory of the package 75 | package_name = "lidar" 76 | data_dir = resources.files(package_name) / "data" 77 | print("Sample data directory: {}".format(data_dir)) 78 | 79 | # use the sample dem. Change it to your own dem if needed 80 | in_dem = os.path.join(data_dir, "dem.tif") 81 | out_dir = os.path.join(os.path.expanduser("~"), "temp") 82 | if not os.path.exists(out_dir): 83 | os.mkdir(out_dir) 84 | 85 | gaussian_dem = os.path.join(out_dir, "gaussian.tif") 86 | gaussian = lidar.GaussianFilter(in_dem, sigma=1) 87 | rd.SaveGDAL(gaussian_dem, gaussian) 88 | 89 | self.assertTrue(os.path.exists(gaussian_dem)) 90 | 91 | def test_sink_filling(self): 92 | 93 | # identify the sample data directory of the package 94 | package_name = "lidar" 95 | data_dir = resources.files(package_name) / "data" 96 | print("Sample data directory: {}".format(data_dir)) 97 | 98 | # use the sample dem. Change it to your own dem if needed 99 | in_dem = os.path.join(data_dir, "dem.tif") 100 | # parameters for depression filling 101 | min_size = 1000 # minimum number of pixels as a depression 102 | # min_depth = 0.3 # minimum depression depth 103 | # set output directory 104 | out_dir = os.path.join( 105 | os.path.expanduser("~"), "temp" 106 | ) # create a temp folder under user home directory 107 | if not os.path.exists(out_dir): 108 | os.mkdir(out_dir) 109 | 110 | sink_path = lidar.ExtractSinks(in_dem, min_size=min_size, out_dir=out_dir) 111 | self.assertTrue(os.path.exists(sink_path)) 112 | 113 | def test_slicing(self): 114 | 115 | # set input files 116 | # identify the sample data directory of the package 117 | package_name = "lidar" 118 | data_dir = resources.files(package_name) / "data" 119 | # in_dem = os.path.join(data_dir, "dem.tif") 120 | in_sink = os.path.join(data_dir, "sink.tif") 121 | # parameters for level set method 122 | min_size = 1000 # minimum number of pixels as a depression 123 | min_depth = 0.3 # minimum depression depth 124 | interval = 0.3 # slicing interval, top-down approach 125 | bool_level_shp = ( 126 | True # whether or not to extract polygons for each individual level 127 | ) 128 | # set output directory 129 | out_dir = os.path.join( 130 | os.path.expanduser("~"), "temp" 131 | ) # create a temp folder under user home directory 132 | if not os.path.exists(out_dir): 133 | os.mkdir(out_dir) 134 | 135 | dep_id_path, dep_level_path = lidar.DelineateDepressions( 136 | in_sink, min_size, min_depth, interval, out_dir, bool_level_shp 137 | ) 138 | print("Results are saved in: {}".format(out_dir)) 139 | 140 | self.assertTrue(os.path.exists(dep_id_path)) 141 | self.assertTrue(os.path.exists(dep_level_path)) 142 | 143 | def test_mounts(self): 144 | 145 | # identify the sample data directory of the package 146 | package_name = "lidar" 147 | data_dir = resources.files(package_name) / "data" 148 | 149 | # use the sample dem. Change it to your own dem if needed 150 | in_dem = os.path.join(data_dir, "dsm.tif") 151 | # set output directory. By default, use the temp directory under user's home directory 152 | out_dir = os.path.join(os.path.expanduser("~"), "temp") 153 | 154 | if not os.path.exists(out_dir): 155 | os.mkdir(out_dir) 156 | 157 | # parameters for identifying sinks and delineating nested depressions 158 | min_size = 1000 # minimum number of pixels as a depression 159 | min_height = 0.3 # minimum depth as a depression 160 | interval = 0.3 # slicing interval for the level-set method 161 | bool_shp = False # output shapefiles for each individual level 162 | 163 | mount_id_path, mount_level_path = lidar.DelineateMounts( 164 | in_dem, min_size, min_height, interval, out_dir, bool_shp 165 | ) 166 | self.assertTrue(os.path.exists(mount_id_path)) 167 | self.assertTrue(os.path.exists(mount_level_path)) 168 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py37, py38 flake8 3 | 4 | [travis] 5 | python = 6 | 3.8: py38 7 | 3.7: py37 8 | 9 | [testenv:flake8] 10 | basepython = python 11 | deps = flake8 12 | commands = flake8 lidar 13 | 14 | [testenv] 15 | setenv = 16 | PYTHONPATH = {toxinidir} 17 | 18 | commands = python setup.py test 19 | 20 | --------------------------------------------------------------------------------