├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature-proposal.md ├── pull_request_template.md └── workflows │ ├── codecov.yml │ ├── pypi-publish.yml │ └── test.yml ├── .gitignore ├── .readthedocs.yml ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Citation-styles.md ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── RELEASE.md ├── benchmarks ├── README.md ├── asv.conf.json ├── benchmarks │ ├── __init__.py │ ├── aspect.py │ ├── classify.py │ ├── common.py │ ├── curvature.py │ ├── focal.py │ ├── hillshade.py │ ├── multispectral.py │ ├── pathfinding.py │ ├── perlin.py │ ├── polygonize.py │ ├── proximity.py │ ├── slope.py │ ├── terrain.py │ ├── viewshed.py │ └── zonal.py └── results.md ├── codecov.yml ├── docs ├── .gitignore ├── Makefile ├── docs-requirements.txt ├── make.bat └── source │ ├── _static │ ├── css │ │ └── styles.css │ └── img │ │ ├── 0-0.png │ │ ├── 0-1.png │ │ ├── 0-2.png │ │ ├── 0-3.png │ │ ├── 0-4.png │ │ ├── 1-0.png │ │ ├── 1-1.png │ │ ├── 1-2.png │ │ ├── 1-3.png │ │ ├── 1-4.png │ │ ├── 2-0.png │ │ ├── 2-1.png │ │ ├── 2-2.png │ │ ├── 2-3.png │ │ ├── 2-4.png │ │ ├── 3-0.png │ │ ├── 3-1.png │ │ ├── 3-2.png │ │ ├── 3-3.png │ │ ├── 3-4.png │ │ ├── 4-0.png │ │ ├── 4-1.png │ │ ├── 4-2.png │ │ ├── 4-3.png │ │ ├── 4-4.png │ │ ├── Xarray-Spatial-logo.svg │ │ ├── composite_map-large.jpg │ │ ├── composite_map.gif │ │ ├── composite_map.png │ │ ├── dask-logo.svg │ │ ├── dependencies.svg │ │ ├── favicon.ico │ │ ├── featured-badge-gh.svg │ │ ├── img001.png │ │ ├── makepath-supply-chain-international-shipping.png │ │ ├── numba-logo.svg │ │ └── python-logo.svg │ ├── _templates │ ├── autosummary │ │ └── module.rst │ ├── description_panel.html │ ├── docs-navbar.html │ └── versions.html │ ├── conf.py │ ├── getting_started │ ├── index.rst │ ├── installation.rst │ ├── raster_huh.rst │ └── usage.rst │ ├── index.rst │ ├── reference │ ├── classification.rst │ ├── focal.rst │ ├── index.rst │ ├── local.rst │ ├── multispectral.rst │ ├── pathfinding.rst │ ├── proximity.rst │ ├── surface.rst │ └── zonal.rst │ └── user_guide │ ├── classification.ipynb │ ├── data │ ├── LC80030172015001LGN00_B2.tiff │ ├── LC80030172015001LGN00_B3.tiff │ ├── LC80030172015001LGN00_B4.tiff │ └── LC80030172015001LGN00_B5.tiff │ ├── focal.ipynb │ ├── index.rst │ ├── local.ipynb │ ├── multispectral.ipynb │ ├── pathfinding.ipynb │ ├── proximity.ipynb │ ├── surface.ipynb │ └── zonal.ipynb ├── examples ├── Pathfinding_Austin_Road_Network.ipynb ├── animated_hillshade.py ├── cloudless-mosaic-sentinel2.ipynb ├── composite_map.gif ├── datasets.yml ├── housing_price_feature_engineering.ipynb ├── pharmacy-deserts.ipynb ├── user_guide │ ├── 0_Getting_Setup.ipynb │ ├── 1_Surface.ipynb │ ├── 2_Proximity.ipynb │ ├── 3_Zonal.ipynb │ ├── 4_Focal.ipynb │ ├── 5_Classification.ipynb │ ├── 6_Remote_Sensing.ipynb │ ├── 7_Pathfinding.ipynb │ ├── 8_Local_Tools.ipynb │ └── zonal_crosstab.ipynb ├── viewshed_gpu.ipynb └── xarray-spatial_classification-methods.ipynb ├── img ├── 0-0.png ├── 0-1.png ├── 0-2.png ├── 0-3.png ├── 0-4.png ├── 1-0.png ├── 1-1.png ├── 1-2.png ├── 1-3.png ├── 1-4.png ├── 2-0.png ├── 2-1.png ├── 2-2.png ├── 2-3.png ├── 2-4.png ├── 3-0.png ├── 3-1.png ├── 3-2.png ├── 3-3.png ├── 3-4.png ├── 4-0.png ├── 4-1.png ├── 4-2.png ├── 4-3.png ├── 4-4.png ├── Xarray-Spatial-logo.svg ├── composite_map.gif ├── composite_map.png ├── dependencies.dot ├── dependencies.png ├── dependencies.svg ├── featured-badge-gh.svg └── makepath-supply-chain-international-shipping.png ├── pyproject.toml ├── requirements-dev.txt ├── requirements.txt ├── setup.cfg ├── setup.py └── xrspatial ├── __init__.py ├── __main__.py ├── analytics.py ├── aspect.py ├── bump.py ├── classify.py ├── convolution.py ├── curvature.py ├── datasets ├── __init__.py └── sentinel-2 │ ├── blue_band.nc │ ├── green_band.nc │ ├── nir_band.nc │ ├── red_band.nc │ ├── swir1_band.nc │ └── swir2_band.nc ├── esri.py ├── experimental ├── __init__.py └── polygonize.py ├── focal.py ├── gpu_rtx ├── __init__.py ├── cuda_utils.py ├── hillshade.py ├── mesh_utils.py └── viewshed.py ├── hillshade.py ├── local.py ├── multispectral.py ├── pathfinding.py ├── perlin.py ├── proximity.py ├── slope.py ├── terrain.py ├── tests ├── __init__.py ├── conftest.py ├── general_checks.py ├── test_analytics.py ├── test_aspect.py ├── test_bump.py ├── test_classify.py ├── test_curvature.py ├── test_datasets.py ├── test_focal.py ├── test_hillshade.py ├── test_local.py ├── test_multispectral.py ├── test_pathfinding.py ├── test_perlin.py ├── test_polygonize.py ├── test_proximity.py ├── test_slope.py ├── test_terrain.py ├── test_utils.py ├── test_viewshed.py └── test_zonal.py ├── utils.py ├── viewshed.py └── zonal.py /.gitattributes: -------------------------------------------------------------------------------- 1 | __init__.py export-subst 2 | setup.py export-subst -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **Expected behavior** 14 | A clear and concise description of what you expected to happen. 15 | 16 | **Screenshots** 17 | If applicable, add screenshots to help explain your problem. 18 | 19 | **Desktop (please complete the following information):** 20 | - OS: [e.g. iOS] 21 | - Browser [e.g. chrome, safari] 22 | - Version [e.g. 22] 23 | 24 | **Smartphone (please complete the following information):** 25 | - Device: [e.g. iPhone6] 26 | - OS: [e.g. iOS8.1] 27 | - Browser [e.g. stock browser, safari] 28 | - Version [e.g. 22] 29 | 30 | **Additional context** 31 | Add any other context about the problem here. 32 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-proposal.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature proposal 3 | about: Suggest an idea 4 | title: '' 5 | labels: proposal 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Author of Proposal:** 11 | ## Reason or Problem 12 | Describe what the need for this new feature is or what problem this new feature will address. 13 | ## Proposal 14 | Description of the new feature, how it will be used, what it will fix, etc. 15 | 16 | **Design:** 17 | Include description of this feature's design with enough detail for those who are familiar enough with this project to understand the feature and how it could be implmented. This section should get into specifics of how the feature will be designed and implemented. 18 | 19 | **Usage:** 20 | Detailed instructions for this feature's use. 21 | 22 | **Value:** What value does the implementation of this new feature bring to xarray-spatial. 23 | ## Stakeholders and Impacts 24 | Who are the stakeholders in this update? Will you be implementing this new feature or will someone else? What is the potential impact of implementing this new feature? Specifically, what are some other components would be impacted? 25 | ## Drawbacks 26 | What are potential reasons why this feature should not be implemented? 27 | ## Alternatives 28 | Describe other solutions or features you have considered when coming up with this proposal. 29 | 30 | ## Unresolved Questions 31 | What are parts of this feature's design that are undecided. 32 | ## Additional Notes or Context 33 | Anything elses that is important to know for the implementation of this new feature. 34 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Fixes # 2 | 3 | ## Proposed Changes 4 | 5 | - 6 | - 7 | - 8 | -------------------------------------------------------------------------------- /.github/workflows/codecov.yml: -------------------------------------------------------------------------------- 1 | name: Codecov 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - '*' 9 | 10 | jobs: 11 | run: 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest] 16 | python: [3.12] 17 | env: 18 | OS: ${{ matrix.os }} 19 | PYTHON: ${{ matrix.python }} 20 | steps: 21 | - uses: actions/checkout@master 22 | - name: Setup Python 23 | uses: actions/setup-python@master 24 | with: 25 | python-version: ${{ matrix.python }} 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install -e .[tests] 30 | - name: Generate coverage report 31 | run: | 32 | NUMBA_DISABLE_JIT=1 pytest --cov=./ --cov-report=xml --ignore ./xrspatial/tests/test_polygonize.py 33 | - name: Upload coverage to Codecov 34 | uses: codecov/codecov-action@v3 35 | with: 36 | token: ${{ secrets.CODECOV_TOKEN }} 37 | env_vars: OS,PYTHON 38 | fail_ci_if_error: true 39 | verbose: true 40 | -------------------------------------------------------------------------------- /.github/workflows/pypi-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish package to PyPI 2 | on: 3 | push: 4 | tags: 5 | - '*' 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-20.04 9 | steps: 10 | - uses: actions/checkout@master 11 | - name: Set up Python 3.9 12 | uses: actions/setup-python@v1 13 | with: 14 | python-version: 3.9 15 | - name: Get release version 16 | run: | 17 | echo "CHANGELOG_VERSION=$(cat CHANGELOG.md | grep -oP '(?<=###\s)(.*)(?=\s\-)' | head -n 1 | sed 's/Version\s/v/')" >> $GITHUB_ENV 18 | echo "TAG_VERSION=`echo $(git describe --tags --abbrev=0)`" >> $GITHUB_ENV 19 | - name: Check changelog release version 20 | if: ${{ env.TAG_VERSION != env.CHANGELOG_VERSION }} 21 | run: | 22 | echo "CHANGELOG_VERSION($CHANGELOG_VERSION) is different from TAG_VERSION($TAG_VERSION)" 23 | exit 1 24 | - name: Install dependencies 25 | run: | 26 | python -m pip install --upgrade pip 27 | pip install build 28 | - name: Get all git tags 29 | run: git fetch --tags -f 30 | - name: Build package 31 | run: | 32 | python -m build --sdist --wheel 33 | - name: Get package size 34 | run: echo "PKG_SIZE=$(find dist -maxdepth 1 -regex '.*gz' | xargs stat --format='%s')" >> $GITHUB_ENV 35 | - name: Check package size 36 | if: ${{ env.PKG_SIZE > 1e+8 }} 37 | run: | 38 | echo "PKG_SIZE($PKG_SIZE bytes) is greater than 100MB" 39 | exit 1 40 | - name: Publish package 41 | if: startsWith(github.ref, 'refs/tags') 42 | uses: pypa/gh-action-pypi-publish@master 43 | with: 44 | skip_existing: true 45 | password: ${{ secrets.PYPI_API_TOKEN }} 46 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: pytest 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - '*' 9 | 10 | jobs: 11 | run: 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: ['ubuntu-latest', 'macos-latest', 'windows-latest'] 16 | python: ['3.10', 3.11, 3.12] 17 | env: 18 | OS: ${{ matrix.os }} 19 | PYTHON: ${{ matrix.python }} 20 | steps: 21 | - uses: actions/checkout@master 22 | - name: Setup Python 23 | uses: actions/setup-python@master 24 | with: 25 | python-version: ${{ matrix.python }} 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install -e .[tests] 30 | - name: Run pytest 31 | run: pytest 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | xrspatial/.version 30 | xrspatial/_version.py 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Sphinx documentation 61 | docs/_build/ 62 | 63 | # PyBuilder 64 | target/ 65 | 66 | # Jupyter Notebook 67 | .ipynb_checkpoints 68 | 69 | # IPython 70 | profile_default/ 71 | ipython_config.py 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # Environments 77 | .env 78 | .venv 79 | env/ 80 | venv/ 81 | ENV/ 82 | env.bak/ 83 | venv.bak/ 84 | 85 | # mypy 86 | .mypy_cache/ 87 | .dmypy.json 88 | dmypy.json 89 | 90 | # Pyre type checker 91 | .pyre/ 92 | .DS_Store 93 | /test_tiles_output 94 | *.TIF* 95 | 96 | # airspeed velocity 97 | .asv/ 98 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.12" 13 | jobs: 14 | pre_build: 15 | - pip install '.[doc,tests]' 16 | 17 | # Build documentation in the "docs/" directory with Sphinx 18 | sphinx: 19 | configuration: docs/source/conf.py 20 | 21 | # Optional but recommended, declare the Python requirements required 22 | # to build your documentation 23 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 24 | python: 25 | install: 26 | # - requirements: docs/docs-requirements.txt 27 | - method: pip 28 | path: . 29 | # extra_requirements: 30 | # - [doc, tests] 31 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Citizen Code of Conduct 2 | 3 | ## 1. Purpose 4 | 5 | A primary goal of Xarray Spatial is to be inclusive to the largest number of contributors, with the most varied and diverse backgrounds possible. As such, we are committed to providing a friendly, safe and welcoming environment for all, regardless of gender, sexual orientation, ability, ethnicity, socioeconomic status, and religion (or lack thereof). 6 | 7 | This code of conduct outlines our expectations for all those who participate in our community, as well as the consequences for unacceptable behavior. 8 | 9 | We invite all those who participate in Xarray Spatial to help us create safe and positive experiences for everyone. 10 | 11 | ## 2. Open [Source/Culture/Tech] Citizenship 12 | 13 | A supplemental goal of this Code of Conduct is to increase open [source/culture/tech] citizenship by encouraging participants to recognize and strengthen the relationships between our actions and their effects on our community. 14 | 15 | Communities mirror the societies in which they exist and positive action is essential to counteract the many forms of inequality and abuses of power that exist in society. 16 | 17 | If you see someone who is making an extra effort to ensure our community is welcoming, friendly, and encourages all participants to contribute to the fullest extent, we want to know. 18 | 19 | ## 3. Expected Behavior 20 | 21 | The following behaviors are expected and requested of all community members: 22 | 23 | * Participate in an authentic and active way. In doing so, you contribute to the health and longevity of this community. 24 | * Exercise consideration and respect in your speech and actions. 25 | * Attempt collaboration before conflict. 26 | * Refrain from demeaning, discriminatory, or harassing behavior and speech. 27 | * Be mindful of your surroundings and of your fellow participants. Alert community leaders if you notice a dangerous situation, someone in distress, or violations of this Code of Conduct, even if they seem inconsequential. 28 | * Remember that community event venues may be shared with members of the public; please be respectful to all patrons of these locations. 29 | 30 | ## 4. Unacceptable Behavior 31 | 32 | The following behaviors are considered harassment and are unacceptable within our community: 33 | 34 | * Violence, threats of violence or violent language directed against another person. 35 | * Sexist, racist, homophobic, transphobic, ableist or otherwise discriminatory jokes and language. 36 | * Posting or displaying sexually explicit or violent material. 37 | * Posting or threatening to post other people's personally identifying information ("doxing"). 38 | * Personal insults, particularly those related to gender, sexual orientation, race, religion, or disability. 39 | * Inappropriate photography or recording. 40 | * Inappropriate physical contact. You should have someone's consent before touching them. 41 | * Unwelcome sexual attention. This includes sexualized comments or jokes, inappropriate touching, groping, and unwelcomed sexual advances. 42 | * Deliberate intimidation, stalking or following (online or in-person). 43 | * Advocating for, or encouraging, any of the above behavior. 44 | * Sustained disruption of community events, including talks and presentations. 45 | 46 | ## 5. Weapons Policy 47 | 48 | No weapons will be allowed at Xarray Spatial events, community spaces, or in other spaces covered by the scope of this Code of Conduct. Weapons include but are not limited to guns, explosives (including fireworks), and large knives such as those used for hunting or display, as well as any other item used for the purpose of causing injury or harm to others. Anyone seen in possession of one of these items will be asked to leave immediately, and will only be allowed to return without the weapon. Community members are further expected to comply with all state and local laws on this matter. 49 | 50 | ## 6. Consequences of Unacceptable Behavior 51 | 52 | Unacceptable behavior from any community member, including sponsors and those with decision-making authority, will not be tolerated. 53 | 54 | Anyone asked to stop unacceptable behavior is expected to comply immediately. 55 | 56 | If a community member engages in unacceptable behavior, the community organizers may take any action they deem appropriate, up to and including a temporary ban or permanent expulsion from the community without warning (and without refund in the case of a paid event). 57 | 58 | ## 7. Reporting Guidelines 59 | 60 | If you are subject to or witness unacceptable behavior, or have any other concerns, please notify a community organizer as soon as possible. 61 | 62 | Additionally, community organizers are available to help community members engage with local law enforcement or to otherwise help those experiencing unacceptable behavior feel safe. In the context of in-person events, organizers will also provide escorts as desired by the person experiencing distress. 63 | 64 | ## 8. Addressing Grievances 65 | 66 | If you feel you have been falsely or unfairly accused of violating this Code of Conduct, you should notify makepath with a concise description of your grievance. Your grievance will be handled in accordance with our existing governing policies. 67 | 68 | 69 | 70 | ## 9. Scope 71 | 72 | We expect all community participants (contributors, paid or otherwise; sponsors; and other guests) to abide by this Code of Conduct in all community venues--online and in-person--as well as in all one-on-one communications pertaining to community business. 73 | 74 | This code of conduct and its related procedures also applies to unacceptable behavior occurring outside the scope of community activities when such behavior has the potential to adversely affect the safety and well-being of community members. 75 | 76 | ## 10. Contact info 77 | 78 | Brendan Collins (brendan@makepath.com) 79 | 80 | ## 11. License and attribution 81 | 82 | The Citizen Code of Conduct is distributed by [Stumptown Syndicate](http://stumptownsyndicate.org) under a [Creative Commons Attribution-ShareAlike license](http://creativecommons.org/licenses/by-sa/3.0/). 83 | 84 | Portions of text derived from the [Django Code of Conduct](https://www.djangoproject.com/conduct/) and the [Geek Feminism Anti-Harassment Policy](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy). 85 | 86 | _Revision 2.3. Posted 6 March 2017._ 87 | 88 | _Revision 2.2. Posted 4 February 2016._ 89 | 90 | _Revision 2.1. Posted 23 June 2014._ 91 | 92 | _Revision 2.0, adopted by the [Stumptown Syndicate](http://stumptownsyndicate.org) board on 10 January 2013. Posted 17 March 2013._ 93 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Xarray-Spatial 2 | 3 | As stated in [Xarray Spatial code of conduct](https://github.com/makepath/xarray-spatial/blob/master/CODE_OF_CONDUCT.md), a primary goal of Xarray Spatial is to be inclusive to the largest number of contributors. However, we do have some requests for how contributions should be made. Please read these guidelines before contributing to have a most positive experience with Xarray Spatial. 4 | 5 | ### Getting Started 6 | 7 | Information about installation and setting up a development environment can be found at the [Getting Started page] https://xarray-spatial.org/getting_started/index.html. 8 | 9 | ### Choosing something to work on 10 | 11 | The issue tracker has a list of items that you can start working on. 12 | In order to avoid duplication of effort, it's always a good idea to comment on an issue and let everybody know that you intend to work on it. 13 | 14 | ### Opening a new issue 15 | 16 | 1. Avoid duplicate reports. Search GitHub for similar or identical issues. Keyword searches for your error messages are usually effective. 17 | 18 | 2. The issue may already be resolved. Always try to reproduce the issue in the latest stable release. 19 | 20 | 3. Always include a minimal, self-contained, reproducible test case or example. It is not possible to investigate issues that cannot be reproduced. 21 | 22 | 4. Include relevant system information. 23 | 24 | 5. State the expected behavior. 25 | 26 | 27 | ### Creating a pull request (PR) 28 | 29 | 1. Make sure that there is a corresponding issue for your change first. If there isn't yet, create one. 30 | 31 | 2. Create a fork of the Xarray Spatial repository on GitHub (this is only done before *first*) contribution). 32 | 33 | 3. Create a branch off the `master` branch with a meaningful name. Preferably include issue number and a few keywords, so that we will have a rough idea what the branch refers to, without looking up the issue. 34 | 35 | 4. Commit your changes and push them to GitHub. 36 | 37 | 5. Create a pull request against the default base branch. The PR must have a meaningful title and a message explaining what was achieved, what remains to be done, maybe an example, etc. 38 | 39 | 6. Use the Create draft pull request option as you first open a pull request, so everyone knows it's a work in progress. Once you finish the work on the pull request you can convert it to Ready for review. In addition to this, please use the labels WIP and ready to merge. 40 | 41 | 7. We don't accept code contributions without tests. If there are valid reasons for not including a test, please discuss this in the issue. 42 | 43 | 8. We will review your PR as time permits. Reviewers may comment on your contributions, ask you questions regarding the implementation or request changes. If changes are requested, push new commits to the existing branch. Do *NOT* rebase, amend, or cherry-pick published commits. Any of those actions will make us start the review from scratch. If you need updates from `master`, just merge it into your branch. 44 | 45 | 46 | ### Attribution 47 | 48 | Portions of text derived from [Bokeh CONTRIBUTING file]: (https://github.com/bokeh/bokeh/blob/branch-2.4/.github/CONTRIBUTING.md) 49 | -------------------------------------------------------------------------------- /Citation-styles.md: -------------------------------------------------------------------------------- 1 | ## Citations Formats: 2 | 3 | - For scientific literature in the form of books or articles in peer-reviewed journals: 4 | APA7 guidelines 5 | - For Websites or resources available by following a link to an html, pdf, or other formatted style output: 6 | Author, Organization, Title, url, "Accesssed" date accessed [Mon. dd, yyyy] 7 | - For YouTube: 8 | Poster username, Channel, date posted [Mon. dd, yyyy] "YouTube", , url, "Accessed" date accessed [Mon. dd, yyyy] 9 | - For software packages: 10 | Platform username, package name, version, link 11 | - For source code: 12 | - On GitHub: 13 | username/repo, link, access date [Mon. dd, yyyy] 14 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020-2022 makepath 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include xrspatial *.py 2 | recursive-include xrspatial *.nc 3 | 4 | include LICENSE.txt 5 | include xrspatial/.version 6 | 7 | graft xrspatial/examples 8 | 9 | graft docs 10 | prune docs/build -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | ## Release process 2 | 3 | ### Preparation 4 | - Create a new branch containing the following changes: 5 | - Update CHANGELOG.md with new version number and list of changes extracted from `git log --pretty=oneline --pretty=format:"- %s" ..HEAD`. 6 | - Commit changes and submit them as a PR to the `master` branch. 7 | - If the CI passes OK, merge the PR. 8 | 9 | ### Tag release 10 | - To sign the release you need a GPG key registered with your github account. See https://docs.github.com/en/authentication/managing-commit-signature-verification 11 | - Create new tag, with the correct version number, using: 12 | ```bash 13 | git tag -a v0.1.2 -s -m "Version 0.1.2" 14 | git push --tags 15 | ``` 16 | 17 | ### PyPI packages 18 | - These are automatically built and uploaded to PyPI via a github action when a new tag is pushed to the github repo. 19 | - Check that both an sdist (`.tar.gz` file) and wheel (`.whl` file) are available on PyPI. 20 | - Check you can install the new version in a new virtual environment using `pip install xarray-spatial`. 21 | 22 | ### github release notes 23 | - Convert the tag into a release on github: 24 | - On the right-hand side of the github repo, click on `Releases`. 25 | - Click on `Draft a new release`. 26 | - Select the correct tag, and enter the title and description by copying and pasting from the CHANGELOG.md. 27 | - Click `Publish release`. 28 | 29 | ### Documentation 30 | 31 | - When the github release is created, a github action automatically builds the documentation and uploads it to https://xarray-spatial.readthedocs.io/. 32 | 33 | ### conda-forge packages 34 | - A bot in https://github.com/conda-forge/xarray-spatial-feedstock runs periodically to identify the new PyPI release and update the conda recipe appropriately. This should create a new PR, run tests to check that the conda build works, and automatically upload the packages to conda-forge if everything is OK. Check this works, a few hours after the PyPI release. 35 | -------------------------------------------------------------------------------- /benchmarks/README.md: -------------------------------------------------------------------------------- 1 | Benchmarking 2 | ============ 3 | 4 | `xarray-spatial` uses ASV (https://asv.readthedocs.io) for benchmarking. 5 | 6 | Installing ASV 7 | -------------- 8 | 9 | ASV creates virtualenvs to run benchmarks in. Before using it you need to 10 | 11 | ``` 12 | pip install asv virtualenv 13 | ``` 14 | or the `conda` equivalent. 15 | 16 | Running benchmarks 17 | ------------------ 18 | 19 | ASV configuration information is stored in `benchmarks/asv.conf.json`. This includes a `matrix` section that lists the dependencies to install in the virtual environments in addition to those installed by default. You always need `pyct` as `setup.py` uses it. There are also some other optional dependencies that are commented out in the `matrix` section. 20 | 21 | If you want to benchmark `cupy`-backed `DataArray`s and have the hardware and drivers to support this then uncomment the `cupy-cuda101` line in `asv.conf.json` and change the `101` version number part of this to match the version of your CUDA setup. This can by determined by the last line of the output of `nvcc --version`. 22 | 23 | If you want to benchmark algorithms that use the ray-tracing code in `rtxpy`, then uncomment the `rtxpy` line in `asv.conf.json` as well as the `cupy` line. 24 | 25 | To run all benchmarks against the default `master` branch: 26 | ``` 27 | cd benchmarks 28 | asv run 29 | ``` 30 | 31 | The first time this is run it will create a machine file to store information about your machine. Then a virtual environment will be created and each benchmark will be run multiple times to obtain a statistically valid benchmark time. 32 | 33 | To list the benchmark timings stored for the `master` branch use: 34 | ``` 35 | asv show master 36 | ``` 37 | 38 | ASV ships with its own simple webserver to interactively display the results in a webbrowser. To use this: 39 | ``` 40 | asv publish 41 | asv preview 42 | ``` 43 | and then open a web browser at the URL specified. 44 | 45 | If you want to quickly run all benchmarks once only to check for errors, etc, use: 46 | ``` 47 | asv dev 48 | ``` 49 | instead of `asv run`. 50 | 51 | 52 | Adding new benchmarks 53 | --------------------- 54 | 55 | Add new benchmarks to existing or new classes in the `benchmarks/benchmarks` directory. Any class member function with a name that starts with `time` will be identified as a timing benchmark when `asv` is run. 56 | 57 | Data that is required to run benchmarks is usually created in the `setup()` member function. This ensures that the time taken to setup the data is not included in the benchmark time. The `setup()` function is called once for each invocation of each benchmark, the data are not cached. 58 | 59 | At the top of each benchmark class there are lists of parameter names and values. Each benchmark is repeated for each unique combination of these parameters. 60 | 61 | If you wish to benchmark `cupy` and/or `rtxpy` functionality, ensure that you test for the availability of the correct libraries and hardware first. This is illustrated in the `get_xr_dataarray()` function. 62 | 63 | If you only want to run a subset of benchmarks, use syntax like: 64 | ``` 65 | asv run -b Slope 66 | ``` 67 | where the text after the `-b` flag is used as a regex to match benchmark file, class and function names. 68 | 69 | 70 | Benchmarking code changes 71 | ------------------------- 72 | 73 | You can compare the performance of code on different branches and in different commits. Usually if you want to determine how much faster a new algorithm is, the old code will be in the `master` branch and the new code will be in a new feature branch. Because ASV uses virtual environments and checks out the `xarray-spatial` source code into these virtual environments, your new code must be committed into the new feature branch. 74 | 75 | To benchmark the latest commits on `master` and your new feature branch, edit `asv.conf.json` to change the line 76 | ``` 77 | "branches": ["master"], 78 | ``` 79 | into 80 | ``` 81 | "branches": ["master", "new_feature_branch"], 82 | ``` 83 | or similar. 84 | 85 | Now when you `asv run` the benchmarks will be run against both branches in turn. 86 | 87 | Then use 88 | ``` 89 | asv show 90 | ``` 91 | to list the commits that have been benchmarked, and 92 | ``` 93 | asv compare commit1 commit2 94 | ``` 95 | to give you a side-by-side comparison of the two commits. 96 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/benchmarks/benchmarks/__init__.py -------------------------------------------------------------------------------- /benchmarks/benchmarks/aspect.py: -------------------------------------------------------------------------------- 1 | from xrspatial import aspect 2 | 3 | from .common import Benchmarking 4 | 5 | 6 | class Aspect(Benchmarking): 7 | def __init__(self): 8 | super().__init__(func=aspect) 9 | 10 | def time_aspect(self, nx, type): 11 | return self.time(nx, type) 12 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/classify.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from xrspatial.classify import equal_interval, natural_breaks, quantile, reclassify 4 | 5 | from .common import get_xr_dataarray 6 | 7 | 8 | class Classify: 9 | params = ([100, 300, 1000, 3000, 10000], [1, 10, 100], ["numpy", "cupy"]) 10 | param_names = ("nx", "nbins", "type") 11 | 12 | def setup(self, nx, nbins, type): 13 | ny = nx // 2 14 | self.agg = get_xr_dataarray((ny, nx), type) 15 | min_val = np.nanmin(self.agg.data) 16 | max_val = np.nanmax(self.agg.data) 17 | self.nbins = nbins 18 | self.bins = np.linspace(min_val, max_val, self.nbins) 19 | self.new_values = np.arange(nbins) 20 | 21 | 22 | class Reclassify(Classify): 23 | def time_reclassify(self, nx, nbins, type): 24 | reclassify(self.agg, self.bins, self.new_values) 25 | 26 | 27 | class Quantile(Classify): 28 | def time_quantile(self, nx, nbins, type): 29 | quantile(self.agg, k=self.nbins) 30 | 31 | 32 | class NaturalBreaks(Classify): 33 | params = ([100, 300, 1000, 3000, 10000], [1, 10], ["numpy"]) 34 | 35 | def time_natural_breaks(self, nx, nbins, type): 36 | natural_breaks(self.agg, k=self.nbins) 37 | 38 | 39 | class EqualInterval(Classify): 40 | def time_equal_interval(self, nx, nbins, type): 41 | equal_interval(self.agg, k=self.nbins) 42 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/common.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | 4 | from xrspatial.gpu_rtx import has_rtx 5 | from xrspatial.utils import has_cuda_and_cupy 6 | 7 | 8 | def get_xr_dataarray( 9 | shape, type, different_each_call=False, seed=71942, is_int=False, include_nan=False 10 | ): 11 | # Gaussian bump with noise. 12 | # 13 | # Valid types are "numpy", "cupy" and "rtxpy". Using "numpy" will return 14 | # a numpy-backed xarray DataArray. Using either of the other two will 15 | # return a cupy-backed DataArray but only if the required dependencies are 16 | # available, otherwise a NotImplementedError will be raised so that the 17 | # benchmark will not be run, 18 | # 19 | # Calling with different_each_call=True will ensure that each array 20 | # returned by this function is different by randomly changing the last 21 | # element. This is required for functions that create an rtxpy 22 | # triangulation to avoid them reusing a cached triangulation leading to 23 | # optimistically fast benchmark times. 24 | ny, nx = shape 25 | 26 | x = np.linspace(-180, 180, nx) 27 | y = np.linspace(-90, 90, ny) 28 | x2, y2 = np.meshgrid(x, y) 29 | rng = np.random.default_rng(seed) 30 | 31 | if is_int: 32 | z = rng.integers(-nx, nx, size=shape).astype(np.float32) 33 | else: 34 | z = 100.0*np.exp(-x2**2 / 5e5 - y2**2 / 2e5) 35 | z += rng.normal(0.0, 2.0, (ny, nx)) 36 | 37 | if different_each_call: 38 | if is_int: 39 | z[-1, -1] = np.random.default_rng().integers(-nx, nx) 40 | else: 41 | z[-1, -1] = np.random.default_rng().normal(0.0, 2.0) 42 | 43 | if include_nan: 44 | z[0, 0] = np.nan 45 | 46 | if type == "numpy": 47 | pass 48 | elif type == "cupy": 49 | if not has_cuda_and_cupy: 50 | raise NotImplementedError() 51 | import cupy 52 | z = cupy.asarray(z) 53 | elif type == "rtxpy": 54 | if not has_rtx(): 55 | raise NotImplementedError() 56 | import cupy 57 | z = cupy.asarray(z) 58 | else: 59 | raise RuntimeError(f"Unrecognised type {type}") 60 | 61 | return xr.DataArray(z, coords=dict(y=y, x=x), dims=["y", "x"]) 62 | 63 | 64 | class Benchmarking: 65 | params = ([100, 300, 1000, 3000, 10000], ["numpy", "cupy"]) 66 | param_names = ("nx", "type") 67 | 68 | def __init__(self, func=None): 69 | self.func = func 70 | 71 | def setup(self, nx, type): 72 | ny = nx // 2 73 | self.xr = get_xr_dataarray((ny, nx), type) 74 | 75 | def time(self, nx, type): 76 | if self.func is not None: 77 | self.func(self.xr) 78 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/curvature.py: -------------------------------------------------------------------------------- 1 | from xrspatial import curvature 2 | 3 | from .common import Benchmarking 4 | 5 | 6 | class Curvature(Benchmarking): 7 | def __init__(self): 8 | super().__init__(func=curvature) 9 | 10 | def time_curvature(self, nx, type): 11 | return self.time(nx, type) 12 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/focal.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from xrspatial.convolution import custom_kernel 4 | from xrspatial.focal import apply, focal_stats, hotspots, mean 5 | 6 | from .common import get_xr_dataarray 7 | 8 | 9 | class Focal: 10 | params = ([100, 300, 1000, 3000], [(5, 5), (25, 25)], ["numpy", "cupy"]) 11 | param_names = ("nx", "kernelsize", "type") 12 | 13 | def setup(self, nx, kernelsize, type): 14 | ny = nx // 2 15 | self.agg = get_xr_dataarray((ny, nx), type) 16 | kernel_w, kernel_h = kernelsize 17 | self.kernel = custom_kernel(np.ones((kernel_h, kernel_w))) 18 | 19 | 20 | class FocalApply(Focal): 21 | params = ([100, 300, 1000, 3000], [(5, 5), (25, 25)], ["numpy"]) 22 | 23 | def time_apply(self, nx, kernelsize, type): 24 | apply(self.agg, self.kernel) 25 | 26 | 27 | class FocalHotspots(Focal): 28 | def time_hotspots(self, nx, kernelsize, type): 29 | hotspots(self.agg, self.kernel) 30 | 31 | 32 | class FocalStats(Focal): 33 | params = ([100, 300, 1000, 3000], [(5, 5), (15, 15)], ["numpy", "cupy"]) 34 | 35 | def time_focal_stats(self, nx, kernelsize, type): 36 | focal_stats(self.agg, self.kernel) 37 | 38 | 39 | class FocalMean: 40 | params = ([100, 300, 1000, 3000, 10000], [1, 10], ["numpy", "cupy"]) 41 | param_names = ("nx", "passes", "type") 42 | 43 | def setup(self, nx, passes, type): 44 | ny = nx // 2 45 | self.agg = get_xr_dataarray((ny, nx), type) 46 | 47 | def time_mean(self, nx, passes, type): 48 | mean(self.agg, passes) 49 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/hillshade.py: -------------------------------------------------------------------------------- 1 | from xrspatial import hillshade 2 | 3 | from .common import get_xr_dataarray 4 | 5 | 6 | class Hillshade: 7 | # Note that rtxpy hillshade includes shadow calculations so timings are 8 | # not comparable with numpy and cupy hillshade. 9 | params = ([100, 300, 1000, 3000], ["numpy", "cupy", "rtxpy"]) 10 | param_names = ("nx", "type") 11 | 12 | def setup(self, nx, type): 13 | ny = nx // 2 14 | self.xr = get_xr_dataarray( 15 | (ny, nx), type, different_each_call=(type == "rtxpy")) 16 | 17 | def time_hillshade(self, nx, type): 18 | shadows = (type == "rtxpy") 19 | hillshade(self.xr, shadows=shadows) 20 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/multispectral.py: -------------------------------------------------------------------------------- 1 | from xrspatial.multispectral import arvi, ebbi, evi, gci, nbr, nbr2, ndmi, ndvi, savi, sipi 2 | 3 | from .common import Benchmarking, get_xr_dataarray 4 | 5 | 6 | class Multispectral(Benchmarking): 7 | def __init__(self): 8 | super().__init__() 9 | 10 | def setup(self, nx, type): 11 | ny = nx // 2 12 | self.red = get_xr_dataarray((ny, nx), type, seed=100) 13 | self.green = get_xr_dataarray((ny, nx), type, seed=200) 14 | self.blue = get_xr_dataarray((ny, nx), type, seed=300) 15 | self.nir = get_xr_dataarray((ny, nx), type, seed=400) 16 | self.swir1 = get_xr_dataarray((ny, nx), type, seed=500) 17 | self.swir2 = get_xr_dataarray((ny, nx), type, seed=600) 18 | self.tir = get_xr_dataarray((ny, nx), type, seed=700) 19 | 20 | 21 | class Arvi(Multispectral): 22 | def time_arvi(self, nx, type): 23 | arvi(self.nir, self.red, self.blue) 24 | 25 | 26 | class Evi(Multispectral): 27 | def time_evi(self, nx, type): 28 | evi(self.nir, self.red, self.blue) 29 | 30 | 31 | class Gci(Multispectral): 32 | def time_gci(self, nx, type): 33 | gci(self.nir, self.green) 34 | 35 | 36 | class Nbr(Multispectral): 37 | def time_nbr(self, nx, type): 38 | nbr(self.nir, self.swir2) 39 | 40 | 41 | class Nbr2(Multispectral): 42 | def time_nbr2(self, nx, type): 43 | nbr2(self.swir1, self.swir2) 44 | 45 | 46 | class Ndvi(Multispectral): 47 | def time_ndvi(self, nx, type): 48 | ndvi(self.nir, self.red) 49 | 50 | 51 | class Ndmi(Multispectral): 52 | def time_ndmi(self, nx, type): 53 | ndmi(self.nir, self.swir1) 54 | 55 | 56 | class Savi(Multispectral): 57 | def time_savi(self, nx, type): 58 | savi(self.nir, self.red) 59 | 60 | 61 | class Sipi(Multispectral): 62 | def time_sipi(self, nx, type): 63 | sipi(self.nir, self.red, self.blue) 64 | 65 | 66 | class Ebbi(Multispectral): 67 | def time_ebbi(self, nx, type): 68 | ebbi(self.red, self.swir1, self.tir) 69 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/pathfinding.py: -------------------------------------------------------------------------------- 1 | from xrspatial.pathfinding import a_star_search 2 | 3 | from .common import get_xr_dataarray 4 | 5 | 6 | class AStarSearch: 7 | params = ([10, 100, 300], [4, 8], ["numpy"]) 8 | param_names = ("nx", "connectivity", "type") 9 | 10 | def setup(self, nx, connectivity, type): 11 | ny = nx // 2 12 | self.agg = get_xr_dataarray((ny, nx), type) 13 | self.start = self.agg.y[0], self.agg.x[0] 14 | self.goal = self.agg.y[-1], self.agg.x[-1] 15 | 16 | def time_a_star_search(self, nx, connectivity, type): 17 | a_star_search( 18 | self.agg, self.start, self.goal, 19 | connectivity=connectivity, 20 | snap_start=True, snap_goal=True 21 | ) 22 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/perlin.py: -------------------------------------------------------------------------------- 1 | from xrspatial.perlin import perlin 2 | 3 | from .common import Benchmarking 4 | 5 | 6 | class Perlin(Benchmarking): 7 | def __init__(self): 8 | super().__init__(func=perlin) 9 | 10 | def time_perlin(self, nx, type): 11 | return self.time(nx, type) 12 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/polygonize.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | 4 | from xrspatial.experimental import polygonize 5 | 6 | 7 | class Polygonize: 8 | params = ( 9 | [100, 300, 1000], 10 | ["numpy", "geopandas", "spatialpandas", "rasterio", 11 | "rasterio-geopandas"], 12 | ) 13 | param_names = ("nx", "ret") 14 | 15 | def setup(self, nx, ret): 16 | # Raster and mask with many small regions. 17 | ny = nx // 2 18 | rng = np.random.default_rng(9461713) 19 | raster = rng.integers(low=0, high=4, size=(ny, nx), dtype=np.int32) 20 | mask = rng.uniform(0, 1, size=(ny, nx)) < 0.9 21 | self.raster = xr.DataArray(raster) 22 | self.mask = xr.DataArray(mask) 23 | 24 | def time_polygonize(self, nx, ret): 25 | if ret.startswith("rasterio"): 26 | import rasterio.features 27 | if ret == "rasterio": 28 | # Cast to list to ensure generator is run. 29 | list(rasterio.features.shapes( 30 | self.raster.data, self.mask.data)) 31 | else: 32 | import geopandas as gpd 33 | from shapely.geometry import shape 34 | values = [] 35 | shapes = [] 36 | for shape_dict, value in rasterio.features.shapes( 37 | self.raster.data, self.mask.data): 38 | shapes.append(shape(shape_dict)) 39 | values.append(value) 40 | gpd.GeoDataFrame({"DN": values, "geometry": shapes}) 41 | else: 42 | polygonize(self.raster, mask=self.mask, return_type=ret) 43 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/proximity.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from xrspatial.proximity import allocation, direction, proximity 4 | 5 | from .common import get_xr_dataarray 6 | 7 | 8 | class Base: 9 | params = ( 10 | [100, 1000], 11 | [1, 10, 100], 12 | ["EUCLIDEAN", "GREAT_CIRCLE", "MANHATTAN"], 13 | ["numpy"] 14 | ) 15 | param_names = ("nx", "n_target_values", "distance_metric", "type") 16 | 17 | def setup(self, nx, n_target_values, distance_metric, type): 18 | ny = nx // 2 19 | self.agg = get_xr_dataarray((ny, nx), type, is_int=True) 20 | unique_values = np.unique(self.agg.data) 21 | self.target_values = unique_values[:n_target_values] 22 | 23 | 24 | class Proximity(Base): 25 | def time_proximity(self, nx, n_target_values, distance_metric, type): 26 | proximity(self.agg, target_values=self.target_values, distance_metric=distance_metric) 27 | 28 | 29 | class Allocation(Base): 30 | def time_allocation(self, nx, n_target_values, distance_metric, type): 31 | allocation(self.agg, target_values=self.target_values, distance_metric=distance_metric) 32 | 33 | 34 | class Direction(Base): 35 | def time_direction(self, nx, n_target_values, distance_metric, type): 36 | direction(self.agg, target_values=self.target_values, distance_metric=distance_metric) 37 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/slope.py: -------------------------------------------------------------------------------- 1 | from xrspatial import slope 2 | 3 | from .common import Benchmarking 4 | 5 | 6 | class Slope(Benchmarking): 7 | def __init__(self): 8 | super().__init__(func=slope) 9 | 10 | def time_slope(self, nx, type): 11 | return self.time(nx, type) 12 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/terrain.py: -------------------------------------------------------------------------------- 1 | from xrspatial.terrain import generate_terrain 2 | 3 | from .common import Benchmarking 4 | 5 | 6 | class GenerateTerrain(Benchmarking): 7 | def __init__(self): 8 | super().__init__(func=generate_terrain) 9 | 10 | def time_generate_terrain(self, nx, type): 11 | return self.time(nx, type) 12 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/viewshed.py: -------------------------------------------------------------------------------- 1 | from xrspatial import viewshed 2 | 3 | from .common import get_xr_dataarray 4 | 5 | 6 | class Viewshed: 7 | # Note there is no option available for cupy without rtxpy. 8 | params = ([100, 300, 1000, 3000], ["numpy", "rtxpy"]) 9 | param_names = ("nx", "type") 10 | 11 | def setup(self, nx, type): 12 | ny = nx // 2 13 | self.xr = get_xr_dataarray( 14 | (ny, nx), type, different_each_call=(type == "rtxpy")) 15 | self.x = 100 16 | self.y = 50 17 | 18 | def time_viewshed(self, nx, type): 19 | viewshed(self.xr, x=self.x, y=self.y, observer_elev=1.0) 20 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/zonal.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import xarray as xr 3 | 4 | from xrspatial import zonal 5 | from xrspatial.utils import has_cuda_and_cupy 6 | 7 | from .common import get_xr_dataarray 8 | 9 | 10 | def create_arr(data=None, H=10, W=10, backend='numpy'): 11 | assert(backend in ['numpy', 'cupy', 'dask']) 12 | if data is None: 13 | data = np.zeros((H, W), dtype=np.float32) 14 | raster = xr.DataArray(data, dims=['y', 'x']) 15 | 16 | if has_cuda_and_cupy() and 'cupy' in backend: 17 | import cupy 18 | raster.data = cupy.asarray(raster.data) 19 | 20 | if 'dask' in backend: 21 | import dask.array as da 22 | raster.data = da.from_array(raster.data, chunks=(10, 10)) 23 | 24 | return raster 25 | 26 | 27 | class Zonal: 28 | # Note that rtxpy hillshade includes shadow calculations so timings are 29 | # not comparable with numpy and cupy hillshade. 30 | params = ([400, 1600, 3200], [2, 4, 8], ["numpy", "cupy"]) 31 | param_names = ("raster_dim", "zone_dim", "backend") 32 | 33 | def setup(self, raster_dim, zone_dim, backend): 34 | W = H = raster_dim 35 | zW = zH = zone_dim 36 | # Make sure that the raster dim is multiple of the zones dim 37 | assert(W % zW == 0) 38 | assert(H % zH == 0) 39 | # initialize the values raster 40 | self.values = get_xr_dataarray((H, W), backend) 41 | 42 | # initialize the zones raster 43 | zones = xr.DataArray(np.zeros((H, W))) 44 | hstep = H//zH 45 | wstep = W//zW 46 | for i in range(zH): 47 | for j in range(zW): 48 | zones[i * hstep: (i+1)*hstep, j*wstep: (j+1)*wstep] = i*zW + j 49 | 50 | ''' zones now looks like this 51 | >>> zones = np.array([ 52 | [0, 0, 0, 0, 0, 1, 1, 1, 1, 1], 53 | [0, 0, 0, 0, 0, 1, 1, 1, 1, 1], 54 | [0, 0, 0, 0, 0, 1, 1, 1, 1, 1], 55 | [0, 0, 0, 0, 0, 1, 1, 1, 1, 1], 56 | [0, 0, 0, 0, 0, 1, 1, 1, 1, 1], 57 | [2, 2, 2, 2, 2, 3, 3, 3, 3, 3], 58 | [2, 2, 2, 2, 2, 3, 3, 3, 3, 3], 59 | [2, 2, 2, 2, 2, 3, 3, 3, 3, 3], 60 | [2, 2, 2, 2, 2, 3, 3, 3, 3, 3], 61 | [2, 2, 2, 2, 2, 3, 3, 3, 3, 3]]) 62 | ''' 63 | 64 | self.zones = create_arr(zones, backend=backend) 65 | 66 | # Now setup the custom stat funcs 67 | if backend == 'cupy': 68 | import cupy 69 | l2normKernel = cupy.ReductionKernel( 70 | in_params='T x', out_params='float64 y', 71 | map_expr='x*x', reduce_expr='a+b', 72 | post_map_expr='y = sqrt(a)', 73 | identity='0', name='l2normKernel' 74 | ) 75 | self.custom_stats = { 76 | 'double_sum': lambda val: val.sum()*2, 77 | 'l2norm': lambda val: np.sqrt(cupy.sum(val * val)), 78 | 'l2normKernel': lambda val: l2normKernel(val) 79 | } 80 | else: 81 | from xrspatial.utils import ngjit 82 | 83 | @ngjit 84 | def l2normKernel(arr): 85 | acc = 0 86 | for x in arr: 87 | acc += x * x 88 | return np.sqrt(acc) 89 | 90 | self.custom_stats = { 91 | 'double_sum': lambda val: val.sum()*2, 92 | 'l2norm': lambda val: np.sqrt(np.sum(val * val)), 93 | 'l2normKernel': lambda val: l2normKernel(val) 94 | } 95 | 96 | def time_zonal_stats_default(self, raster_dim, zone_dim, backend): 97 | zonal.stats(zones=self.zones, values=self.values) 98 | 99 | def time_zonal_stats_custom(self, raster_dim, zone_dim, backend): 100 | zonal.stats(zones=self.zones, values=self.values, 101 | stats_funcs=self.custom_stats) 102 | -------------------------------------------------------------------------------- /benchmarks/results.md: -------------------------------------------------------------------------------- 1 | Benchmarking Results 2 | ============ 3 | 4 | Windows 10 5 | -------------- 6 | CPU: AMD Ryzen 5 1600 7 | 8 | Cores: 12 9 | 10 | GPU: GeForce RTX 3060 11 | 12 | RAM: 32GB 13 | 14 | ``` 15 | hillshade.Hillshade.time_hillshade 16 | ``` 17 | nx | numpy | cupy | rtxpy 18 | --- | --- | --- | --- 19 | 100 | 564±9μs | 1.33±0.07ms | 6.76±0.2ms 20 | 300 | 2.70±0.1ms | 1.30±0.04ms | 9.36±0.8ms 21 | 1000 | 38.0±2ms | 1.56±0.06ms | 26.6±2ms 22 | 3000 | 352±30ms | 2.13±1ms | 172±1ms 23 | 24 | ``` 25 | polygonize.Polygonize.time_polygonize 26 | ``` 27 | nx | numpy | geopandas | spatialpandas | rasterio-geopandas 28 | --- | --- | --- | --- | --- 29 | 100 | 3.74±0.8ms | failed | failed | failed 30 | 300 | 42.7±0.4ms | failed | failed | failed 31 | 1000 | 492±4ms | failed | failed | failed 32 | 33 | ``` 34 | slope.Slope.time_slope 35 | ``` 36 | nx | numpy | cupy 37 | --- | --- | --- 38 | 100 | 784±50μs | 2.70±0.06ms 39 | 300 | 1.83±0.1ms | 2.61±0.1ms 40 | 1000 | 17.9±0.2ms | 2.70±0.08ms 41 | 3000 | 171±1ms | 4.61±1ms 42 | 10000 | 1.62±0.02s | 105±100ms 43 | 44 | ``` 45 | viewshed.Viewshed.time_viewshed 46 | ``` 47 | nx | numpy | rtxpy 48 | --- | --- | --- 49 | 100 | 7.24±0.01ms | 8.18±0.2ms 50 | 300 | 53.1±0.3ms | 10.4±0.2ms 51 | 1000 | 657±0.07ms | 27.1±0.3ms 52 | 3000 | 7.24±0.04s | 170±1ms 53 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | patch: off 4 | project: 5 | default: 6 | target: auto 7 | threshold: 0.5% 8 | 9 | comment: 10 | require_changes: yes 11 | 12 | ignore: 13 | - "./setup.py" 14 | - "xrspatial/tests/*" 15 | - "xrspatial/experimental/*" 16 | - "xrspatial/gpu_rtx/*" 17 | - "xrspatial/datasets/*" 18 | - "xrspatial/__init__.py" 19 | - "xrspatial/__main__.py" 20 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | _autosummary/ -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/docs-requirements.txt: -------------------------------------------------------------------------------- 1 | dask[dataframe] 2 | geopandas 3 | Jinja2 >=2.11 4 | ipykernel 5 | matplotlib 6 | nbsphinx 7 | numpydoc 8 | pandoc 9 | pydata_sphinx_theme 10 | sphinx 11 | sphinx-panels 12 | sphinx_rtd_theme 13 | flake8 14 | isort 15 | noise >= 1.2.2 16 | pytest 17 | pytest-cov 18 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/_static/css/styles.css: -------------------------------------------------------------------------------- 1 | /* 2 | Logo 3 | ==== 4 | */ 5 | 6 | .logo { 7 | max-height: 200px; 8 | } 9 | 10 | 11 | /* 12 | Examples Panel 13 | ============== 14 | */ 15 | 16 | .examples-container { 17 | background-color: #e4e7eb; 18 | } 19 | 20 | .examples-item { 21 | padding: 0px; 22 | } 23 | 24 | .examples-item>a>img { 25 | width: 100%; 26 | height: 100%; 27 | } 28 | 29 | .examples-card { 30 | flex: 0 0 20%; 31 | max-width: 20%; 32 | padding: 0.5rem!important; 33 | } 34 | 35 | 36 | /* 37 | Description Panel 38 | ================= 39 | */ 40 | 41 | .description-libs { 42 | display: flex; 43 | justify-content: center; 44 | text-align: center; 45 | margin-top: -25px; 46 | margin-bottom: -15px; 47 | } 48 | 49 | .python { 50 | font-size: 1.25rem; 51 | margin: 1rem; 52 | padding-bottom: 0.5rem; 53 | padding-top: 0.25rem; 54 | width: 25%; 55 | } 56 | 57 | .python>img { 58 | display: block; 59 | vertical-align: middle; 60 | max-width: 100%; 61 | width: 7rem; 62 | margin: 0px auto; 63 | height: 75px; 64 | } 65 | 66 | .numba { 67 | font-size: 1.25rem; 68 | --tw-border-opacity: 1; 69 | border-color: rgba(255, 255, 255, var(--tw-border-opacity)); 70 | margin: 1rem; 71 | padding-bottom: 0.5rem; 72 | padding-top: 0.25rem; 73 | width: 25%; 74 | } 75 | 76 | .numba>img { 77 | display: block; 78 | vertical-align: middle; 79 | max-width: 100%; 80 | width: 7rem; 81 | margin: 0px auto; 82 | height: 75px; 83 | } 84 | 85 | .dask { 86 | font-size: 1.25rem; 87 | margin: 1rem; 88 | padding-bottom: 0.5rem; 89 | width: 25%; 90 | } 91 | 92 | .dask>img { 93 | text-align: center; 94 | display: block; 95 | vertical-align: middle; 96 | max-width: 100%; 97 | width: 7rem; 98 | margin: 0px auto; 99 | height: 75px; 100 | } 101 | 102 | .description-text { 103 | font-weight: bold; 104 | justify-content: center; 105 | font-size: 1.2rem; 106 | line-height: 2.25rem; 107 | margin-top: 0.25rem; 108 | margin-bottom: 0.25rem; 109 | text-align: center; 110 | } 111 | 112 | .description-container { 113 | display: flex; 114 | } 115 | 116 | .description-left { 117 | flex: 0 0 40%; 118 | } 119 | 120 | .description-right { 121 | flex: 1; 122 | } 123 | 124 | 125 | /* 126 | Header 127 | ====== 128 | */ 129 | 130 | #navbar-main { 131 | background-color: #F1F1F1!important; 132 | border-bottom: 3px solid #16255F; 133 | height: 70px; 134 | } 135 | 136 | .navbar-brand { 137 | padding: 0.25rem; 138 | } 139 | 140 | 141 | /* 142 | Body 143 | ==== 144 | */ 145 | 146 | body { 147 | background-color: #FAFAFA!important; 148 | color: #353D46!important; 149 | } 150 | 151 | 152 | /* 153 | Left side bar 154 | ============= 155 | */ 156 | 157 | .form-control { 158 | background-color: #FAFAFA!important; 159 | } 160 | 161 | [type=search] { 162 | background-color: #FAFAFA; 163 | } 164 | 165 | 166 | /* 167 | Headings 168 | ======== 169 | */ 170 | 171 | h1, 172 | h2, 173 | h3, 174 | h4, 175 | h5, 176 | h6 { 177 | color: #062161!important; 178 | } 179 | 180 | /* 181 | Releases dropdown 182 | ======== 183 | */ 184 | 185 | li.dropdown { 186 | margin-right: auto; 187 | } 188 | 189 | .dropdown { 190 | list-style: none; 191 | } 192 | 193 | .dropdown > a:hover { 194 | COLOR: #00194f; 195 | TEXT-DECORATION: none; 196 | font-weight: none; 197 | } 198 | 199 | div.rst-versions { 200 | overflow: hidden!important; 201 | } 202 | 203 | .deprecated-warning { 204 | font-family: sans-serif; 205 | font-weight:bold; 206 | text-align: center; 207 | background-color: pink; 208 | color: firebrick; 209 | right:0em; 210 | z-index:1000000000000; 211 | } -------------------------------------------------------------------------------- /docs/source/_static/img/0-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/0-0.png -------------------------------------------------------------------------------- /docs/source/_static/img/0-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/0-1.png -------------------------------------------------------------------------------- /docs/source/_static/img/0-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/0-2.png -------------------------------------------------------------------------------- /docs/source/_static/img/0-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/0-3.png -------------------------------------------------------------------------------- /docs/source/_static/img/0-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/0-4.png -------------------------------------------------------------------------------- /docs/source/_static/img/1-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/1-0.png -------------------------------------------------------------------------------- /docs/source/_static/img/1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/1-1.png -------------------------------------------------------------------------------- /docs/source/_static/img/1-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/1-2.png -------------------------------------------------------------------------------- /docs/source/_static/img/1-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/1-3.png -------------------------------------------------------------------------------- /docs/source/_static/img/1-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/1-4.png -------------------------------------------------------------------------------- /docs/source/_static/img/2-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/2-0.png -------------------------------------------------------------------------------- /docs/source/_static/img/2-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/2-1.png -------------------------------------------------------------------------------- /docs/source/_static/img/2-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/2-2.png -------------------------------------------------------------------------------- /docs/source/_static/img/2-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/2-3.png -------------------------------------------------------------------------------- /docs/source/_static/img/2-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/2-4.png -------------------------------------------------------------------------------- /docs/source/_static/img/3-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/3-0.png -------------------------------------------------------------------------------- /docs/source/_static/img/3-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/3-1.png -------------------------------------------------------------------------------- /docs/source/_static/img/3-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/3-2.png -------------------------------------------------------------------------------- /docs/source/_static/img/3-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/3-3.png -------------------------------------------------------------------------------- /docs/source/_static/img/3-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/3-4.png -------------------------------------------------------------------------------- /docs/source/_static/img/4-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/4-0.png -------------------------------------------------------------------------------- /docs/source/_static/img/4-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/4-1.png -------------------------------------------------------------------------------- /docs/source/_static/img/4-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/4-2.png -------------------------------------------------------------------------------- /docs/source/_static/img/4-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/4-3.png -------------------------------------------------------------------------------- /docs/source/_static/img/4-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/4-4.png -------------------------------------------------------------------------------- /docs/source/_static/img/composite_map-large.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/composite_map-large.jpg -------------------------------------------------------------------------------- /docs/source/_static/img/composite_map.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/composite_map.gif -------------------------------------------------------------------------------- /docs/source/_static/img/composite_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/composite_map.png -------------------------------------------------------------------------------- /docs/source/_static/img/dask-logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /docs/source/_static/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/favicon.ico -------------------------------------------------------------------------------- /docs/source/_static/img/img001.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/img001.png -------------------------------------------------------------------------------- /docs/source/_static/img/makepath-supply-chain-international-shipping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/_static/img/makepath-supply-chain-international-shipping.png -------------------------------------------------------------------------------- /docs/source/_static/img/numba-logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /docs/source/_static/img/python-logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /docs/source/_templates/autosummary/module.rst: -------------------------------------------------------------------------------- 1 | {{ fullname }} 2 | {{ underline }} 3 | 4 | .. automodule:: {{ fullname }} 5 | :members: -------------------------------------------------------------------------------- /docs/source/_templates/description_panel.html: -------------------------------------------------------------------------------- 1 |
2 |
3 | Fast, Accurate Python library for Raster Operations 4 |
5 |
6 | Extensible with Numba 7 |
8 |
9 | Scalable with Dask 10 |
11 |
12 |
13 |
14 |
15 |
16 | Free of GDAL / GEOS Dependencies 17 |
18 |
19 | General-Purpose Spatial Processing,
Geared Towards GIS Professionals. 20 |
21 |
22 | 23 |
-------------------------------------------------------------------------------- /docs/source/_templates/docs-navbar.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | {% if logo %} 5 | {% if not theme_logo_link %} 6 | 7 | 8 | 9 | {% elif theme_logo_link[:4] == 'http' %} 10 | 11 | 12 | 13 | {% else %} 14 | 15 | 16 | 17 | {% endif %} 18 | {% else %} 19 | 20 |

{{ project }}

21 |
22 | {% endif %} 23 | 33 | 36 | 37 | {% set navbar_class, navbar_align = navbar_align_class() %} 38 | 56 |
57 | -------------------------------------------------------------------------------- /docs/source/_templates/versions.html: -------------------------------------------------------------------------------- 1 |
2 |
3 | The documentation on this page refers to a PREVIOUS VERSION. For the latest release, go to https://xarray-spatial.org/
5 | 6 | Versions 7 | 8 | 9 |
10 |
11 |
{{ _('Versions') }}
12 | {% for release in releases %} 13 |
{{ release[0] }}
14 | {% endfor %} 15 |
16 |
17 |
18 |
-------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | from datetime import datetime 12 | import json 13 | # If extensions (or modules to document with autodoc) are in another directory, 14 | # add these directories to sys.path here. If the directory is relative to the 15 | # documentation root, use os.path.abspath to make it absolute, like shown here. 16 | # 17 | import os 18 | import sys 19 | 20 | import xrspatial 21 | 22 | sys.path.insert(0, os.path.abspath('../..')) 23 | 24 | # -- Project information ----------------------------------------------------- 25 | 26 | project = u'xarray_spatial' 27 | copyright = u'2020-{}, makepath'.format(datetime.now().year) 28 | author = u'makepath' 29 | 30 | version = release = xrspatial.__version__ 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | 'sphinx_panels', 43 | 'sphinx.ext.autodoc', 44 | 'sphinx.ext.napoleon', 45 | 'sphinx.ext.viewcode', 46 | 'sphinx.ext.autosummary', 47 | 'nbsphinx', 48 | 'sphinx.ext.mathjax', 49 | 'matplotlib.sphinxext.plot_directive', 50 | ] 51 | 52 | # Add any paths that contain templates here, relative to this directory. 53 | templates_path = ['_templates'] 54 | 55 | # The suffix(es) of source filenames. 56 | # You can specify multiple suffix as a list of string: 57 | # 58 | # source_suffix = ['.rst', '.md'] 59 | source_suffix = '.rst' 60 | 61 | # The master toctree document. 62 | master_doc = 'index' 63 | 64 | # The language for content autogenerated by Sphinx. Refer to documentation 65 | # for a list of supported languages. 66 | # 67 | # This is also used if you do content translation via gettext catalogs. 68 | # Usually you set "language" from the command line for these cases. 69 | language = 'en' 70 | 71 | # List of patterns, relative to source directory, that match files and 72 | # directories to ignore when looking for source files. 73 | # This pattern also affects html_static_path and html_extra_path. 74 | exclude_patterns = [] 75 | 76 | # The name of the Pygments (syntax highlighting) style to use. 77 | pygments_style = None 78 | 79 | 80 | # -- Options for HTML output ------------------------------------------------- 81 | 82 | # The theme to use for HTML and HTML Help pages. See the documentation for 83 | # a list of builtin themes. 84 | # 85 | 86 | html_theme = 'pydata_sphinx_theme' 87 | 88 | html_logo = '_static/img/Xarray-Spatial-logo.svg' 89 | 90 | html_favicon = '_static/img/favicon.ico' 91 | 92 | # Theme options are theme-specific and customize the look and feel of a theme 93 | # further. For a list of options available for each theme, see the 94 | # documentation. 95 | # 96 | html_theme_options = { 97 | "external_links": [], 98 | "github_url": "https://github.com/makepath/xarray-spatial", 99 | "navbar_end": ["navbar-icon-links"], 100 | } 101 | 102 | html_context = { 103 | 'css_files': ['_static/css/styles.css'], 104 | "default_mode": "light", 105 | } 106 | 107 | autosummary_generate = True 108 | 109 | exclude_patterns = ['_build', '_templates'] 110 | 111 | # Add any paths that contain custom static files (such as style sheets) here, 112 | # relative to this directory. They are copied after the builtin static files, 113 | # so a file named "default.css" will overwrite the builtin "default.css". 114 | html_static_path = ['_static'] 115 | 116 | # Custom sidebar templates, must be a dictionary that maps document names 117 | # to template names. 118 | # 119 | # The default sidebars (for documents that don't match any pattern) are 120 | # defined by theme itself. Builtin themes are using these templates by 121 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 122 | # 'searchbox.html']``. 123 | # 124 | 125 | 126 | # -- Options for HTMLHelp output --------------------------------------------- 127 | 128 | # Output file base name for HTML help builder. 129 | htmlhelp_basename = 'xarray_spatialdoc' 130 | 131 | 132 | # -- Options for LaTeX output ------------------------------------------------ 133 | 134 | latex_elements = { 135 | # The paper size ('letterpaper' or 'a4paper'). 136 | # 137 | # 'papersize': 'letterpaper', 138 | 139 | # The font size ('10pt', '11pt' or '12pt'). 140 | # 141 | # 'pointsize': '10pt', 142 | 143 | # Additional stuff for the LaTeX preamble. 144 | # 145 | # 'preamble': '', 146 | 147 | # Latex figure (float) alignment 148 | # 149 | # 'figure_align': 'htbp', 150 | } 151 | 152 | # Grouping the document tree into LaTeX files. List of tuples 153 | # (source start file, target name, title, 154 | # author, documentclass [howto, manual, or own class]). 155 | latex_documents = [ 156 | (master_doc, 'xarray_spatial.tex', u'xarray\\_spatial Documentation', 157 | u'KristinePetrosyan', 'manual'), 158 | ] 159 | 160 | 161 | # -- Options for manual page output ------------------------------------------ 162 | 163 | # One entry per manual page. List of tuples 164 | # (source start file, name, description, authors, manual section). 165 | man_pages = [ 166 | (master_doc, 'xarray_spatial', u'xarray_spatial Documentation', 167 | [author], 1) 168 | ] 169 | 170 | 171 | # -- Options for Texinfo output ---------------------------------------------- 172 | 173 | # Grouping the document tree into Texinfo files. List of tuples 174 | # (source start file, target name, title, author, 175 | # dir menu entry, description, category) 176 | texinfo_documents = [ 177 | (master_doc, 'xarray_spatial', u'xarray_spatial Documentation', 178 | author, 'xarray_spatial', 'One line description of project.', 179 | 'Miscellaneous'), 180 | ] 181 | 182 | 183 | # -- Options for Epub output ------------------------------------------------- 184 | 185 | # Bibliographic Dublin Core info. 186 | epub_title = project 187 | 188 | # The unique identifier of the text. This can be a ISBN number 189 | # or the project homepage. 190 | # 191 | # epub_identifier = '' 192 | 193 | # A unique identification for the text. 194 | # 195 | # epub_uid = '' 196 | 197 | # A list of files that should not be packed into the epub file. 198 | epub_exclude_files = ['search.html'] 199 | 200 | # sphinxext config 201 | plot_html_show_source_link = False 202 | 203 | # nbsphinx configuration 204 | nbsphinx_allow_errors = True 205 | -------------------------------------------------------------------------------- /docs/source/getting_started/index.rst: -------------------------------------------------------------------------------- 1 | .. _getting_started: 2 | 3 | *************** 4 | Getting started 5 | *************** 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | installation 11 | raster_huh 12 | usage 13 | -------------------------------------------------------------------------------- /docs/source/getting_started/installation.rst: -------------------------------------------------------------------------------- 1 | .. _getting_started.installation: 2 | 3 | ************ 4 | Installation 5 | ************ 6 | 7 | .. code-block:: bash 8 | 9 | # via pip 10 | pip install xarray-spatial 11 | 12 | # via conda 13 | conda install -c conda-forge xarray-spatial -------------------------------------------------------------------------------- /docs/source/getting_started/raster_huh.rst: -------------------------------------------------------------------------------- 1 | .. _getting_started.raster_huh: 2 | 3 | *********** 4 | Raster-huh? 5 | *********** 6 | 7 | Rasters are regularly gridded datasets like GeoTIFFs, JPGs, and PNGs. 8 | 9 | In the GIS world, rasters are used for representing continuous phenomena (e.g. elevation, rainfall, distance), either directly as numerical values, or as RGB images created for humans to view. Rasters typically have two spatial dimensions, but may have any number of other dimensions (time, type of measurement, etc.) 10 | -------------------------------------------------------------------------------- /docs/source/getting_started/usage.rst: -------------------------------------------------------------------------------- 1 | .. _getting_started.usage: 2 | 3 | ***** 4 | Usage 5 | ***** 6 | 7 | Basic Pattern 8 | ============== 9 | 10 | .. code-block:: python 11 | 12 | import xarray as xr 13 | from xrspatial import hillshade 14 | 15 | my_dataarray = xr.DataArray(...) 16 | hillshaded_dataarray = hillshade(my_dataarray) 17 | 18 | Check out the user guide `here `_. 19 | 20 | 21 | Dependencies 22 | ============ 23 | 24 | ``xarray-spatial`` currently depends on Datashader, but will soon be updated to depend only on ``xarray`` and ``numba``\ , while still being able to make use of Datashader output when available. 25 | 26 | 27 | .. image:: ../_static/img/dependencies.svg 28 | :target: ../_static/img/dependencies.svg 29 | :alt: title 30 | 31 | 32 | Notes on GDAL 33 | ============= 34 | 35 | Within the Python ecosystem, many geospatial libraries interface with the GDAL C++ library for raster and vector input, output, and analysis (e.g. rasterio, rasterstats, geopandas). GDAL is robust, performant, and has decades of great work behind it. For years, off-loading expensive computations to the C/C++ level in this way has been a key performance strategy for Python libraries (obviously...Python itself is implemented in C!). 36 | 37 | However, wrapping GDAL has a few drawbacks for Python developers and data scientists: 38 | 39 | 40 | * GDAL can be a pain to build / install. 41 | * GDAL is hard for Python developers/analysts to extend, because it requires understanding multiple languages. 42 | * GDAL's data structures are defined at the C/C++ level, which constrains how they can be accessed from Python. 43 | 44 | With the introduction of projects like Numba, Python gained new ways to provide high-performance code directly in Python, without depending on or being constrained by separate C/C++ extensions. ``xarray-spatial`` implements algorithms using Numba and Dask, making all of its source code available as pure Python without any "black box" barriers that obscure what is going on and prevent full optimization. Projects can make use of the functionality provided by ``xarray-spatial`` where available, while still using GDAL where required for other tasks. 45 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. _index: 2 | 3 | **************************** 4 | Xarray-spatial documentation 5 | **************************** 6 | 7 | **Xarray-Spatial implements common raster analysis functions using Numba and provides an easy-to-install, easy-to-extend codebase for raster analysis.** 8 | 9 | xarray-spatial grew out of the `Datashader project `_, which provides fast rasterization of vector data (points, lines, polygons, meshes, and rasters) for use with xarray-spatial. 10 | 11 | xarray-spatial does not depend on GDAL / GEOS, which makes it fully extensible in Python but does limit the breadth of operations that can be covered. xarray-spatial is meant to include the core raster-analysis functions needed for GIS developers / analysts, implemented independently of the non-Python geo stack. 12 | 13 | ------- 14 | 15 | .. raw:: html 16 | :file: _templates/description_panel.html 17 | 18 | ------- 19 | 20 | .. panels:: 21 | :body: examples-item 22 | :container: container-fluid examples-container 23 | :column: examples-card 24 | 25 | --- 26 | .. image:: _static/img/0-0.png 27 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples 28 | --- 29 | .. image:: _static/img/0-1.png 30 | :target: user_guide/proximity.html 31 | --- 32 | .. image:: _static/img/0-2.png 33 | :target: user_guide/proximity.html 34 | --- 35 | .. image:: _static/img/0-3.png 36 | :target: user_guide/proximity.html 37 | --- 38 | .. image:: _static/img/0-4.png 39 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples/pharmacy-deserts.ipynb 40 | --- 41 | .. image:: _static/img/1-0.png 42 | :target: user_guide/surface.html 43 | --- 44 | .. image:: _static/img/1-1.png 45 | :target: user_guide/surface.html 46 | --- 47 | .. image:: _static/img/1-2.png 48 | :target: user_guide/surface.html 49 | --- 50 | .. image:: _static/img/1-3.png 51 | :target: user_guide/surface.html 52 | --- 53 | .. image:: _static/img/1-4.png 54 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples/pharmacy-deserts.ipynb 55 | --- 56 | .. image:: _static/img/2-0.png 57 | :target: user_guide/surface.html 58 | --- 59 | .. image:: _static/img/2-1.png 60 | :target: user_guide/multispectral.html 61 | --- 62 | .. image:: _static/img/2-2.png 63 | :target: user_guide/multispectral.html 64 | --- 65 | .. image:: _static/img/2-3.png 66 | :target: user_guide/classification.html 67 | --- 68 | .. image:: _static/img/2-4.png 69 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples/pharmacy-deserts.ipynb 70 | --- 71 | .. image:: _static/img/3-0.png 72 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples 73 | --- 74 | .. image:: _static/img/3-1.png 75 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples 76 | --- 77 | .. image:: _static/img/3-2.png 78 | :target: user_guide/classification.html 79 | --- 80 | .. image:: _static/img/3-3.png 81 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples/pharmacy-deserts.ipynb 82 | --- 83 | .. image:: _static/img/3-4.png 84 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples 85 | --- 86 | .. image:: _static/img/4-0.png 87 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples/Pathfinding_Austin_Road_Network.ipynb 88 | --- 89 | .. image:: _static/img/4-1.png 90 | :target: user_guide/surface.html#Hillshade 91 | --- 92 | .. image:: _static/img/4-2.png 93 | :target: user_guide/surface.html#Hillshade 94 | --- 95 | .. image:: _static/img/4-3.png 96 | :target: user_guide/surface.html#Slope 97 | --- 98 | .. image:: _static/img/4-4.png 99 | :target: https://github.com/makepath/xarray-spatial/blob/master/examples/pharmacy-deserts.ipynb#Create-a-%22Distance-to-Nearest-Pharmacy%22-Layer-&-Classify-into-5-Groups 100 | 101 | .. toctree:: 102 | :maxdepth: 1 103 | :hidden: 104 | :titlesonly: 105 | 106 | getting_started/index 107 | user_guide/index 108 | reference/index 109 | -------------------------------------------------------------------------------- /docs/source/reference/classification.rst: -------------------------------------------------------------------------------- 1 | .. _reference.classification: 2 | 3 | ************** 4 | Classification 5 | ************** 6 | 7 | Equal Interval 8 | ============== 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.classify.equal_interval 13 | 14 | Natural Breaks 15 | ============== 16 | .. autosummary:: 17 | :toctree: _autosummary 18 | 19 | xrspatial.classify.natural_breaks 20 | 21 | Reclassify 22 | ========== 23 | .. autosummary:: 24 | :toctree: _autosummary 25 | 26 | xrspatial.classify.reclassify 27 | 28 | Quantile 29 | ======== 30 | .. autosummary:: 31 | :toctree: _autosummary 32 | 33 | xrspatial.classify.quantile 34 | -------------------------------------------------------------------------------- /docs/source/reference/focal.rst: -------------------------------------------------------------------------------- 1 | .. _reference.focal: 2 | 3 | ***** 4 | Focal 5 | ***** 6 | 7 | Apply 8 | ===== 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.focal.apply 13 | 14 | Hotspots 15 | ======== 16 | .. autosummary:: 17 | :toctree: _autosummary 18 | 19 | xrspatial.focal.hotspots 20 | 21 | Mean 22 | ==== 23 | .. autosummary:: 24 | :toctree: _autosummary 25 | 26 | xrspatial.focal.mean 27 | 28 | 29 | Focal Statistics 30 | ================ 31 | .. autosummary:: 32 | :toctree: _autosummary 33 | 34 | xrspatial.convolution.convolution_2d 35 | xrspatial.convolution.annulus_kernel 36 | xrspatial.convolution.calc_cellsize 37 | xrspatial.convolution.circle_kernel 38 | xrspatial.focal.custom_kernel 39 | xrspatial.focal.focal_stats 40 | -------------------------------------------------------------------------------- /docs/source/reference/index.rst: -------------------------------------------------------------------------------- 1 | .. _reference: 2 | 3 | ********* 4 | Reference 5 | ********* 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | classification 11 | focal 12 | multispectral 13 | pathfinding 14 | proximity 15 | surface 16 | zonal 17 | local 18 | -------------------------------------------------------------------------------- /docs/source/reference/local.rst: -------------------------------------------------------------------------------- 1 | .. _local: 2 | 3 | ***** 4 | Local 5 | ***** 6 | 7 | Cell Stats 8 | ========== 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.local.cell_stats 13 | 14 | Combine 15 | ======= 16 | .. autosummary:: 17 | :toctree: _autosummary 18 | 19 | xrspatial.local.combine 20 | 21 | Lesser Frequency 22 | ================ 23 | .. autosummary:: 24 | :toctree: _autosummary 25 | 26 | xrspatial.local.lesser_frequency 27 | 28 | Equal Frequency 29 | =============== 30 | .. autosummary:: 31 | :toctree: _autosummary 32 | 33 | xrspatial.local.equal_frequency 34 | 35 | Greater Frequency 36 | ================= 37 | .. autosummary:: 38 | :toctree: _autosummary 39 | 40 | xrspatial.local.greater_frequency 41 | 42 | Lowest Position 43 | =============== 44 | .. autosummary:: 45 | :toctree: _autosummary 46 | 47 | xrspatial.local.lowest_position 48 | 49 | Highest Position 50 | ================ 51 | .. autosummary:: 52 | :toctree: _autosummary 53 | 54 | xrspatial.local.highest_position 55 | 56 | Popularity 57 | ========== 58 | .. autosummary:: 59 | :toctree: _autosummary 60 | 61 | xrspatial.local.popularity 62 | 63 | Rank 64 | ==== 65 | .. autosummary:: 66 | :toctree: _autosummary 67 | 68 | xrspatial.local.rank 69 | -------------------------------------------------------------------------------- /docs/source/reference/multispectral.rst: -------------------------------------------------------------------------------- 1 | .. _reference.multispectral: 2 | 3 | ************* 4 | Multispectral 5 | ************* 6 | 7 | Atmospherically Resistant Vegetation Index (ARVI) 8 | ================================================= 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.multispectral.arvi 13 | 14 | Enhanced Built=Up and Bareness Index (EBBI) 15 | =========================================== 16 | .. autosummary:: 17 | :toctree: _autosummary 18 | 19 | xrspatial.multispectral.ebbi 20 | 21 | Enhanced Vegetation Index (EVI) 22 | =============================== 23 | .. autosummary:: 24 | :toctree: _autosummary 25 | 26 | xrspatial.multispectral.evi 27 | 28 | Green Chlorophyll Index (GCI) 29 | ============================= 30 | .. autosummary:: 31 | :toctree: _autosummary 32 | 33 | xrspatial.multispectral.gci 34 | 35 | Normalized Burn Ratio (NBR) 36 | =========================== 37 | .. autosummary:: 38 | :toctree: _autosummary 39 | 40 | xrspatial.multispectral.nbr 41 | 42 | Normalized Burn Ratio 2 (NBR2) 43 | ============================== 44 | .. autosummary:: 45 | :toctree: _autosummary 46 | 47 | xrspatial.multispectral.nbr2 48 | 49 | Normalized Difference Moisture Index (NDMI) 50 | =========================================== 51 | .. autosummary:: 52 | :toctree: _autosummary 53 | 54 | xrspatial.multispectral.ndmi 55 | 56 | Normalized Difference Vegetation Index (NDVI) 57 | ============================================= 58 | .. autosummary:: 59 | :toctree: _autosummary 60 | 61 | xrspatial.multispectral.ndvi 62 | 63 | Soil Adjusted Vegetation Index (SAVI) 64 | ===================================== 65 | .. autosummary:: 66 | :toctree: _autosummary 67 | 68 | xrspatial.multispectral.savi 69 | 70 | Structure Insensitive Pigment Index (SIPI) 71 | ========================================== 72 | .. autosummary:: 73 | :toctree: _autosummary 74 | 75 | xrspatial.multispectral.sipi 76 | xrspatial.multispectral.true_color 77 | -------------------------------------------------------------------------------- /docs/source/reference/pathfinding.rst: -------------------------------------------------------------------------------- 1 | .. _pathfinding: 2 | 3 | *********** 4 | Pathfinding 5 | *********** 6 | 7 | A* Pathfinding 8 | ============== 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.pathfinding.a_star_search 13 | -------------------------------------------------------------------------------- /docs/source/reference/proximity.rst: -------------------------------------------------------------------------------- 1 | .. _proximity: 2 | 3 | ********* 4 | Proximity 5 | ********* 6 | 7 | Allocation 8 | ========== 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.proximity.allocation 13 | 14 | Direction 15 | ========== 16 | .. autosummary:: 17 | :toctree: _autosummary 18 | 19 | xrspatial.proximity.direction 20 | 21 | Proximity 22 | ========== 23 | .. autosummary:: 24 | :toctree: _autosummary 25 | 26 | xrspatial.proximity.euclidean_distance 27 | xrspatial.proximity.great_circle_distance 28 | xrspatial.proximity.manhattan_distance 29 | xrspatial.proximity.proximity 30 | -------------------------------------------------------------------------------- /docs/source/reference/surface.rst: -------------------------------------------------------------------------------- 1 | .. _surface: 2 | 3 | ******* 4 | Surface 5 | ******* 6 | 7 | Aspect 8 | ====== 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.aspect.aspect 13 | 14 | Curvature 15 | ========= 16 | .. autosummary:: 17 | :toctree: _autosummary 18 | 19 | xrspatial.curvature.curvature 20 | 21 | Hillshade 22 | ========= 23 | .. autosummary:: 24 | :toctree: _autosummary 25 | 26 | xrspatial.hillshade.hillshade 27 | 28 | Slope 29 | ===== 30 | .. autosummary:: 31 | :toctree: _autosummary 32 | 33 | xrspatial.slope.slope 34 | 35 | Terrain Generation 36 | ================== 37 | .. autosummary:: 38 | :toctree: _autosummary 39 | 40 | xrspatial.terrain.generate_terrain 41 | 42 | Viewshed 43 | ======== 44 | .. autosummary:: 45 | :toctree: _autosummary 46 | 47 | xrspatial.viewshed.viewshed 48 | 49 | Perlin Noise 50 | ============ 51 | .. autosummary:: 52 | :toctree: _autosummary 53 | 54 | xrspatial.perlin.perlin 55 | 56 | Bump Mapping 57 | ============ 58 | .. autosummary:: 59 | :toctree: _autosummary 60 | 61 | xrspatial.bump.bump 62 | -------------------------------------------------------------------------------- /docs/source/reference/zonal.rst: -------------------------------------------------------------------------------- 1 | .. _zonal: 2 | 3 | ***** 4 | Zonal 5 | ***** 6 | 7 | Apply 8 | ===== 9 | .. autosummary:: 10 | :toctree: _autosummary 11 | 12 | xrspatial.zonal.apply 13 | 14 | Crop 15 | ==== 16 | .. autosummary:: 17 | :toctree: _autosummary 18 | 19 | xrspatial.zonal.crop 20 | 21 | Regions 22 | ======= 23 | .. autosummary:: 24 | :toctree: _autosummary 25 | 26 | xrspatial.zonal.regions 27 | 28 | Trim 29 | ==== 30 | .. autosummary:: 31 | :toctree: _autosummary 32 | 33 | xrspatial.zonal.trim 34 | 35 | Zonal Statistics 36 | ================ 37 | .. autosummary:: 38 | :toctree: _autosummary 39 | 40 | xrspatial.zonal.get_full_extent 41 | xrspatial.zonal.stats 42 | xrspatial.zonal.suggest_zonal_canvas 43 | 44 | Zonal Cross Tabulate 45 | ==================== 46 | .. autosummary:: 47 | :toctree: _autosummary 48 | 49 | xrspatial.zonal.crosstab 50 | -------------------------------------------------------------------------------- /docs/source/user_guide/data/LC80030172015001LGN00_B2.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/user_guide/data/LC80030172015001LGN00_B2.tiff -------------------------------------------------------------------------------- /docs/source/user_guide/data/LC80030172015001LGN00_B3.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/user_guide/data/LC80030172015001LGN00_B3.tiff -------------------------------------------------------------------------------- /docs/source/user_guide/data/LC80030172015001LGN00_B4.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/user_guide/data/LC80030172015001LGN00_B4.tiff -------------------------------------------------------------------------------- /docs/source/user_guide/data/LC80030172015001LGN00_B5.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/docs/source/user_guide/data/LC80030172015001LGN00_B5.tiff -------------------------------------------------------------------------------- /docs/source/user_guide/index.rst: -------------------------------------------------------------------------------- 1 | .. _user_guide: 2 | 3 | ********** 4 | User Guide 5 | ********** 6 | 7 | .. toctree:: 8 | :maxdepth: 1 9 | 10 | classification 11 | focal 12 | multispectral 13 | pathfinding 14 | proximity 15 | surface 16 | zonal 17 | local 18 | -------------------------------------------------------------------------------- /examples/Pathfinding_Austin_Road_Network.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Path Finding in the City of Austin\n", 8 | "\n", 9 | "This notebook demonstrates pathfinding along the city of Austin street network using Xarray-spatial's `pathfinding` module.\n", 10 | "The a_star_search function provides the shortest path between any two points." 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "#### Setup:\n", 18 | "\n", 19 | "First, we'll need to import some packages: these include the basic array manipulation ones, \n", 20 | "as well as some geospatial-focused ones.\n", 21 | "We'll also grab a few datashader functions for easy rendering." 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "import datashader as ds\n", 31 | "from datashader.transfer_functions import shade, stack, dynspread, set_background\n", 32 | "import geopandas\n", 33 | "import numpy as np\n", 34 | "import pandas as pd\n", 35 | "import spatialpandas\n", 36 | "import xarray as xa\n", 37 | "\n", 38 | "from xrspatial import a_star_search" 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "metadata": {}, 44 | "source": [ 45 | "### Load data\n", 46 | "\n", 47 | "Now, we're ready to load up the data and transform it into a format we can work with.\n", 48 | "\n", 49 | "To download the examples data, run the command `xrspatial examples` in your terminal. All the data will be stored in your current directory inside a folder named `xrspatial-examples`." 50 | ] 51 | }, 52 | { 53 | "cell_type": "markdown", 54 | "metadata": {}, 55 | "source": [ 56 | "We'll start by opening the shapefile, transforming the crs (coordinate reference system) to the commonly-used longitude/latitude, \n", 57 | "and, after a quick clean-up, transforming it to a spatialpandas GeoDataFrame.\n", 58 | "\n", 59 | "Now our data is ready to be aggregated to an xarray DataArray raster." 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": null, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "streets = geopandas.read_file(\n", 69 | " \"data/geo_export_9c395dda-0b29-41ec-89b4-a51a898f7104.shp\"\n", 70 | ")\n", 71 | "streets = streets.to_crs(\"EPSG:4326\")\n", 72 | "streets = streets.explode(\"geometry\").reset_index(drop=True)\n", 73 | "streets_spd = spatialpandas.GeoDataFrame(streets, geometry=\"geometry\")" 74 | ] 75 | }, 76 | { 77 | "cell_type": "markdown", 78 | "metadata": {}, 79 | "source": [ 80 | "### Define study area (find range of x and y) and aggregate:\n", 81 | "\n", 82 | "To finish off our set-up:\n", 83 | "- We'll define a study area, with xmin, xmax, ymin, and ymax; this set the x, y coordinates we'll be using in our aggregate.\n", 84 | "- We'll set up a datashader Canvas object, which provides an easy frame for setting up a new raster and aggregating data to it.\n", 85 | "- Finally, we'll aggregate the streets data into a lines raster with Canvas.line.\n", 86 | "\n", 87 | "- We also set up the start and goal point (y, x) coordinates, and set up a DataFrame and aggregation for visualization.\n", 88 | "\n", 89 | "Some shading and stacking of all of this displays our complete setup below." 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "xmin, ymin, xmax, ymax = (\n", 99 | " streets.geometry.bounds.minx.min(),\n", 100 | " streets.geometry.bounds.miny.min(),\n", 101 | " streets.geometry.bounds.maxx.max(),\n", 102 | " streets.geometry.bounds.maxy.max(),\n", 103 | ")\n", 104 | "xrange = (xmin, xmax)\n", 105 | "yrange = (ymin, ymax)\n", 106 | "xrange, yrange" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": null, 112 | "metadata": {}, 113 | "outputs": [], 114 | "source": [ 115 | "H, W = 600, 800\n", 116 | "cvs = ds.Canvas(plot_width=W, plot_height=H, x_range=xrange, y_range=yrange)\n", 117 | "\n", 118 | "street_agg = cvs.line(streets_spd, geometry=\"geometry\")\n", 119 | "street_shaded = dynspread(shade(street_agg, cmap=[\"salmon\"]))\n", 120 | "\n", 121 | "# Pick two locations\n", 122 | "start = (30.08214069, -97.73662282)\n", 123 | "goal = (30.17656606, -97.63753489)\n", 124 | "\n", 125 | "start_df = pd.DataFrame({\"x\": [start[1]], \"y\": [start[0]]})\n", 126 | "start_agg = cvs.points(start_df, \"x\", \"y\")\n", 127 | "start_shaded = dynspread(shade(start_agg, cmap=[\"red\"]), threshold=1, max_px=5)\n", 128 | "\n", 129 | "goal_df = pd.DataFrame({\"x\": [goal[1]], \"y\": [goal[0]]})\n", 130 | "goal_agg = cvs.points(goal_df, \"x\", \"y\")\n", 131 | "goal_shaded = dynspread(shade(goal_agg, cmap=[\"lime\"]), threshold=1, max_px=5)\n", 132 | "\n", 133 | "set_background(stack(street_shaded, start_shaded, goal_shaded), \"black\")" 134 | ] 135 | }, 136 | { 137 | "cell_type": "markdown", 138 | "metadata": {}, 139 | "source": [ 140 | "### Shortest path using A* from start location to goal location\n", 141 | "\n", 142 | "Now, we can do some pathfinding:\n", 143 | "\n", 144 | "In `a_star_search`, we'll input the Austin city streets lines aggregate we built above, the start and goal point coordinates, and barriers:\n", 145 | " - Barriers defines all non-crossable points in the raster: for our streets raster, this includes all non-street areas, all of which have 0 set as their value. \n", 146 | "\n", 147 | "We've also set `snap-start` and `snap-goal` to `True`: this helps ensure the start and goal points are set correctly.\n", 148 | "\n", 149 | "The result is a the shortest path al\n", 150 | " " 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": {}, 157 | "outputs": [], 158 | "source": [ 159 | "# find the path from start to goal,\n", 160 | "# barriers are uncrossable cells. In this case, they are cells with a value of 0\n", 161 | "\n", 162 | "path_agg = a_star_search(\n", 163 | " street_agg, start, goal, barriers=[0], snap_start=True, snap_goal=True\n", 164 | ")\n", 165 | "\n", 166 | "path_shaded = dynspread(shade(path_agg, cmap=[\"green\"]), threshold=1, max_px=1)\n", 167 | "set_background(stack(street_shaded, path_shaded, start_shaded, goal_shaded), \"black\")" 168 | ] 169 | } 170 | ], 171 | "metadata": { 172 | "language_info": { 173 | "name": "python" 174 | } 175 | }, 176 | "nbformat": 4, 177 | "nbformat_minor": 4 178 | } 179 | -------------------------------------------------------------------------------- /examples/animated_hillshade.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | 3 | import datashader as ds 4 | import numpy as np 5 | from datashader.colors import Elevation 6 | from datashader.transfer_functions import shade, stack 7 | 8 | from xrspatial import bump, generate_terrain, hillshade, mean 9 | 10 | W = 600 11 | H = 400 12 | 13 | cvs = ds.Canvas(plot_width=W, plot_height=H, 14 | x_range=(-20e6, 20e6), 15 | y_range=(-20e6, 20e6)) 16 | 17 | terrain = generate_terrain(canvas=cvs) 18 | 19 | 20 | def heights(locations, src, src_range, height=20): 21 | num_bumps = locations.shape[0] 22 | out = np.zeros(num_bumps, dtype=np.uint16) 23 | for r in range(0, num_bumps): 24 | loc = locations[r] 25 | x = loc[0] 26 | y = loc[1] 27 | val = src[y, x] 28 | if val >= src_range[0] and val < src_range[1]: 29 | out[r] = height 30 | return out 31 | 32 | 33 | T = 300000 # Number of trees to add per call 34 | src = terrain.data 35 | trees = bump(W, H, count=T, height_func=partial(heights, src=src, 36 | src_range=(1000, 1300), height=5)) 37 | trees += bump(W, H, count=T//2, height_func=partial( 38 | heights, src=src, src_range=(1300, 1700), height=20)) 39 | trees += bump(W, H, count=T//3, height_func=partial( 40 | heights, src=src, src_range=(1700, 2000), height=5)) 41 | 42 | tree_colorize = trees.copy() 43 | tree_colorize.data[tree_colorize.data == 0] = np.nan 44 | 45 | LAND_CONSTANT = 50.0 46 | 47 | water = terrain.copy() 48 | water.data = np.where(water.data > 0, LAND_CONSTANT, 0) 49 | water = mean(water, passes=50, excludes=[LAND_CONSTANT]) 50 | water.data[water.data == LAND_CONSTANT] = np.nan 51 | 52 | 53 | def create_map(azimuth): 54 | 55 | global cvs 56 | global terrain 57 | global water 58 | global trees 59 | 60 | img = stack(shade(terrain, cmap=Elevation, how='linear'), 61 | shade(water, cmap=['aqua', 'white']), 62 | shade(hillshade(terrain + trees, azimuth=azimuth), 63 | cmap=['black', 'white'], how='linear', alpha=128), 64 | shade(tree_colorize, cmap='limegreen', how='linear') 65 | ) 66 | 67 | print('image created') 68 | 69 | return img.to_pil() 70 | 71 | 72 | def create_map2(): 73 | 74 | global cvs 75 | global terrain 76 | global water 77 | global trees 78 | 79 | img = stack(shade(terrain, cmap=['black', 'white'], how='linear')) 80 | 81 | yield img.to_pil() 82 | 83 | img = stack(shade(terrain, cmap=Elevation, how='linear')) 84 | 85 | yield img.to_pil() 86 | 87 | img = stack(shade(terrain, cmap=Elevation, how='linear'), 88 | shade(hillshade(terrain, azimuth=210), 89 | cmap=['black', 'white'], how='linear', alpha=128), 90 | ) 91 | 92 | yield img.to_pil() 93 | 94 | img = stack(shade(terrain, cmap=Elevation, how='linear'), 95 | shade(water, cmap=['aqua', 'white']), 96 | shade(hillshade(terrain, azimuth=210), 97 | cmap=['black', 'white'], how='linear', alpha=128), 98 | ) 99 | 100 | yield img.to_pil() 101 | 102 | img = stack(shade(terrain, cmap=Elevation, how='linear'), 103 | shade(water, cmap=['aqua', 'white']), 104 | shade(hillshade(terrain + trees, azimuth=210), 105 | cmap=['black', 'white'], how='linear', alpha=128), 106 | shade(tree_colorize, cmap='limegreen', how='linear') 107 | ) 108 | 109 | yield img.to_pil() 110 | yield img.to_pil() 111 | yield img.to_pil() 112 | yield img.to_pil() 113 | 114 | 115 | def gif1(): 116 | 117 | images = [] 118 | 119 | for i in np.linspace(0, 360, 6): 120 | images.append(create_map(int(i))) 121 | 122 | images[0].save('animated_hillshade.gif', 123 | save_all=True, append_images=images[1:], 124 | optimize=False, duration=5000, loop=0) 125 | 126 | 127 | def gif2(): 128 | 129 | images = list(create_map2()) 130 | 131 | images[0].save('composite_map.gif', 132 | save_all=True, append_images=images[1:], 133 | optimize=False, duration=1000, loop=0) 134 | 135 | 136 | gif2() 137 | -------------------------------------------------------------------------------- /examples/composite_map.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/examples/composite_map.gif -------------------------------------------------------------------------------- /examples/datasets.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | data: 4 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/facilities.csv 5 | title: 'Pharmacy Facilities Data' 6 | files: facilities.csv 7 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/Street Centerline.zip 8 | title: 'Austin Streets Shapefile' 9 | files: Street_Centerline.zip 10 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/USA_Block_Groups-shp.zip 11 | title: 'USA Block Groups Shapefile' 12 | files: USA_Block_Groups-shp.zip 13 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/USA_Counties-shp.zip 14 | title: 'USA Counties Shapefile' 15 | files: USA_Counties-shp.zip 16 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B1.tiff 17 | title: 'coastal aerosol' 18 | files: LC80030172015001LGN00_B1.TIF 19 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B2.tiff 20 | title: 'blue' 21 | files: LC80030172015001LGN00_B2.TIF 22 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B3.tiff 23 | title: 'green' 24 | files: LC80030172015001LGN00_B3.TIF 25 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B4.tiff 26 | title: 'red' 27 | files: LC80030172015001LGN00_B4.TIF 28 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B5.tiff 29 | title: 'nir' 30 | files: LC80030172015001LGN00_B5.TIF 31 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B6.tiff 32 | title: 'swir' 33 | files: LC80030172015001LGN00_B6.TIF 34 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B7.tiff 35 | title: 'swir2' 36 | files: LC80030172015001LGN00_B7.TIF 37 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B8.tiff 38 | title: 'panchromatic' 39 | files: LC80030172015001LGN00_B8.TIF 40 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B9.tiff 41 | title: 'cirrhus' 42 | files: LC80030172015001LGN00_B9.TIF 43 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B10.tiff 44 | title: 'tir1' 45 | files: LC80030172015001LGN00_B10.TIF 46 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_B11.tiff 47 | title: 'tir2' 48 | files: LC80030172015001LGN00_B11.TIF 49 | - url: https://xarrayspatial.blob.core.windows.net/examples-data/LC80030172015001LGN00_BQA.tiff 50 | title: 'qa' 51 | files: LC80030172015001LGN00_BQA.TIF -------------------------------------------------------------------------------- /examples/user_guide/0_Getting_Setup.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Xarray-spatial\n", 8 | "### User Guide: Getting Set-up" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "### Installation\n", 16 | "\n", 17 | "The package can be easily installed via conda or pip. \n", 18 | "\n", 19 | "#### To install with conda run:\n", 20 | " conda install -c conda-forge xarray-spatial\n", 21 | "\n", 22 | "#### To install with pip run:\n", 23 | " pip install xarray-spatial\n", 24 | "\n", 25 | "#### To verify whether the installation was successful, open a Python session and import the package:\n", 26 | " import xrspatial" 27 | ] 28 | } 29 | ], 30 | "metadata": { 31 | "language_info": { 32 | "name": "python" 33 | } 34 | }, 35 | "nbformat": 4, 36 | "nbformat_minor": 4 37 | } 38 | -------------------------------------------------------------------------------- /img/0-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/0-0.png -------------------------------------------------------------------------------- /img/0-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/0-1.png -------------------------------------------------------------------------------- /img/0-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/0-2.png -------------------------------------------------------------------------------- /img/0-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/0-3.png -------------------------------------------------------------------------------- /img/0-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/0-4.png -------------------------------------------------------------------------------- /img/1-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/1-0.png -------------------------------------------------------------------------------- /img/1-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/1-1.png -------------------------------------------------------------------------------- /img/1-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/1-2.png -------------------------------------------------------------------------------- /img/1-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/1-3.png -------------------------------------------------------------------------------- /img/1-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/1-4.png -------------------------------------------------------------------------------- /img/2-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/2-0.png -------------------------------------------------------------------------------- /img/2-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/2-1.png -------------------------------------------------------------------------------- /img/2-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/2-2.png -------------------------------------------------------------------------------- /img/2-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/2-3.png -------------------------------------------------------------------------------- /img/2-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/2-4.png -------------------------------------------------------------------------------- /img/3-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/3-0.png -------------------------------------------------------------------------------- /img/3-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/3-1.png -------------------------------------------------------------------------------- /img/3-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/3-2.png -------------------------------------------------------------------------------- /img/3-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/3-3.png -------------------------------------------------------------------------------- /img/3-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/3-4.png -------------------------------------------------------------------------------- /img/4-0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/4-0.png -------------------------------------------------------------------------------- /img/4-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/4-1.png -------------------------------------------------------------------------------- /img/4-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/4-2.png -------------------------------------------------------------------------------- /img/4-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/4-3.png -------------------------------------------------------------------------------- /img/4-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/4-4.png -------------------------------------------------------------------------------- /img/composite_map.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/composite_map.gif -------------------------------------------------------------------------------- /img/composite_map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/composite_map.png -------------------------------------------------------------------------------- /img/dependencies.dot: -------------------------------------------------------------------------------- 1 | digraph G { 2 | 3 | graph [fontname = "helvetica"]; 4 | node [fontname = "helvetica"]; 5 | edge [fontname = "helvetica"]; 6 | 7 | 8 | subgraph required { 9 | xarray_spatial -> datashader; 10 | xarray_spatial -> numba; 11 | xarray_spatial -> xarray; 12 | xarray_spatial -> cupy; 13 | xarray -> numpy; 14 | xarray -> dask; 15 | xarray -> cupy; 16 | dask -> cupy; 17 | numba -> numpy; 18 | numba -> cupy; 19 | }; 20 | 21 | subgraph required { 22 | xarray -> rasterio -> gdal [style=dashed]; 23 | datashader -> numba; 24 | datashader -> xarray; 25 | datashader -> dask; 26 | datashader -> pandas; 27 | datashader -> spatialpandas; 28 | datashader -> holoviews [style=dashed]; 29 | pandas -> numpy; 30 | dask -> pandas; 31 | spatialpandas -> pandas; 32 | spatialpandas -> numba; 33 | spatialpandas -> geopandas [style=dashed]; 34 | geopandas -> rtree -> libspatialindex [style=dashed]; 35 | geopandas -> fiona -> geos [style=dashed]; 36 | geopandas -> shapely -> geos [style=dashed]; 37 | fiona -> shapely [style=dashed]; 38 | }; 39 | 40 | 41 | 42 | 43 | xarray_spatial [label=<Xarray-Spatial
(Raster Analysis)> fillcolor=palegreen2 fontsize=20 width=0.5 shape=circle style=filled]; 44 | 45 | xarray [label=<Xarray
(Labeled-Ndarray)> fillcolor=aquamarine style=filled]; 46 | datashader [label=<Datashader
(Rasterization Pipeline)> fillcolor=aquamarine style=filled]; 47 | dask [label=<Dask
(Distributed-Ndarray)> fillcolor=aquamarine style=filled href="https://dask.pydata.org"]; 48 | cupy [label=<CuPy
(GPU-Ndarray)> fillcolor=lemonchiffon style=filled href="https://github.com/cupy/cupy"]; 49 | numpy [label=<NumPy
(Ndarray)> fillcolor=aquamarine style=filled]; 50 | numba [label=<Numba
(JIT Compiler)> fillcolor=aquamarine style=filled]; 51 | pandas [label=<Pandas
(Tabular Data)> fillcolor=aquamarine style=filled]; 52 | spatialpandas [label=<SpatialPandas
(Geometry-Ndarray)> fillcolor=aquamarine style=filled]; 53 | rasterio [label=<Rasterio
(Python Raster I/O)> fillcolor=lemonchiffon style=filled]; 54 | gdal [label=<GDAL
(C/C++ Raster I/O)> fillcolor=lemonchiffon style=filled]; 55 | geos [label=<GEOS
(C/C++ Vector I/O)> fillcolor=lemonchiffon style=filled]; 56 | fiona [label=<Fiona
(Python Vector I/O)> fillcolor=lemonchiffon style=filled]; 57 | holoviews [label=<HoloViews
(Advanced Visualization)> fillcolor=lemonchiffon style=filled]; 58 | shapely [label=<Shapely
(Python Geometry)> fillcolor=lemonchiffon style=filled]; 59 | geopandas [label=<Geopandas
(GeoDataFrame)> fillcolor=lemonchiffon style=filled]; 60 | rtree [label=<RTree
(Python Spatial Index)> fillcolor=lemonchiffon style=filled]; 61 | libspatialindex [label=<libspatialindex
(C/C++ Spatial Index)> fillcolor=lemonchiffon style=filled]; 62 | 63 | } 64 | -------------------------------------------------------------------------------- /img/dependencies.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/dependencies.png -------------------------------------------------------------------------------- /img/makepath-supply-chain-international-shipping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/img/makepath-supply-chain-international-shipping.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | 'build', 4 | 'param', 5 | 'pyct', 6 | 'setuptools >= 54.1.2', 7 | ] 8 | 9 | [tool.check-manifest] 10 | ignore = [ 11 | '.appveyor.yml', 12 | '.version', 13 | 'CHANGELOG.md', 14 | 'CODE_OF_CONDUCT.md', 15 | 'CONTRIBUTING.md', 16 | 'Citation-styles.md', 17 | 'RELEASE.md', 18 | 'requirements-dev.txt', 19 | 'requirements.txt', 20 | 'test_examples_cli.py', 21 | 'tox.ini', 22 | 'benchmarks/*', 23 | 'examples/*', 24 | 'examples/user_guide/*', 25 | 'img/*', 26 | 'test_pip_packaging/*', 27 | ] -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | check-manifest 2 | pytest 3 | setuptools_scm 4 | tox 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | datashader 2 | dask[dataframe] 3 | noise >=1.2.2 4 | numba 5 | xarray 6 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | author = makepath 3 | classifiers = 4 | Development Status :: 4 - Beta 5 | Intended Audience :: Developers 6 | Intended Audience :: Science/Research 7 | License :: OSI Approved :: MIT License 8 | Operating System :: OS Independent 9 | Programming Language :: Python :: 3 10 | description = xarray-based spatial analysis tools 11 | license = MIT 12 | license_file = LICENSE.txt 13 | long_description = file: README.md 14 | long_description_content_type = text/markdown 15 | name = xarray-spatial 16 | platforms = any 17 | url = https://github.com/makepath/xarray-spatial 18 | 19 | [options] 20 | include_package_data = True 21 | install_requires = 22 | datashader >= 0.15.0 23 | numba 24 | xarray 25 | numpy 26 | packages = find: 27 | python_requires = >=3.8 28 | setup_requires = setuptools_scm 29 | zip_safe = False 30 | 31 | [options.entry_points] 32 | console_scripts = 33 | xrspatial = xrspatial.__main__:main 34 | 35 | [options.extras_require] 36 | doc = 37 | dask[dataframe] 38 | geopandas 39 | Jinja2 >=2.11 40 | ipykernel 41 | matplotlib 42 | nbsphinx 43 | numpydoc 44 | pandoc 45 | pydata_sphinx_theme 46 | sphinx 47 | sphinx-panels 48 | sphinx_rtd_theme 49 | examples = 50 | optional = 51 | # Optional for polygonize return types. 52 | awkward>=1.4 53 | geopandas 54 | shapely 55 | spatialpandas 56 | # Optional for gpu_rtx functions. Also requires cupy. 57 | rtxpy 58 | tests = 59 | flake8 60 | isort 61 | noise >= 1.2.2 62 | pytest 63 | pytest-cov 64 | 65 | [flake8] 66 | exclude = 67 | .git, 68 | .asv, 69 | __pycache__, 70 | max-line-length = 100 71 | 72 | [isort] 73 | line_length = 100 74 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import sys 4 | 5 | import pyct.build 6 | from setuptools import setup 7 | 8 | 9 | if __name__ == '__main__': 10 | example_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 11 | 'xrspatial', 'examples') 12 | if 'develop' not in sys.argv: 13 | pyct.build.examples(example_path, __file__, force=True) 14 | 15 | use_scm = { 16 | "write_to": "xrspatial/_version.py" 17 | } 18 | 19 | setup(use_scm_version=use_scm) 20 | 21 | if os.path.isdir(example_path): 22 | shutil.rmtree(example_path) 23 | -------------------------------------------------------------------------------- /xrspatial/__init__.py: -------------------------------------------------------------------------------- 1 | from xrspatial.aspect import aspect # noqa 2 | from xrspatial.bump import bump # noqa 3 | from xrspatial.classify import binary # noqa 4 | from xrspatial.classify import equal_interval # noqa 5 | from xrspatial.classify import natural_breaks # noqa 6 | from xrspatial.classify import quantile # noqa 7 | from xrspatial.classify import reclassify # noqa 8 | from xrspatial.curvature import curvature # noqa 9 | from xrspatial.focal import mean # noqa 10 | from xrspatial.hillshade import hillshade # noqa 11 | from xrspatial.multispectral import arvi # noqa 12 | from xrspatial.multispectral import evi # noqa 13 | from xrspatial.multispectral import nbr # noqa 14 | from xrspatial.multispectral import ndvi # noqa 15 | from xrspatial.multispectral import savi # noqa 16 | from xrspatial.multispectral import sipi # noqa 17 | from xrspatial.pathfinding import a_star_search # noqa 18 | from xrspatial.perlin import perlin # noqa 19 | from xrspatial.proximity import allocation # noqa 20 | from xrspatial.proximity import direction # noqa 21 | from xrspatial.proximity import euclidean_distance # noqa 22 | from xrspatial.proximity import great_circle_distance # noqa 23 | from xrspatial.proximity import manhattan_distance # noqa 24 | from xrspatial.proximity import proximity # noqa 25 | from xrspatial.slope import slope # noqa 26 | from xrspatial.terrain import generate_terrain # noqa 27 | from xrspatial.viewshed import viewshed # noqa 28 | from xrspatial.zonal import apply as zonal_apply # noqa 29 | from xrspatial.zonal import crop # noqa 30 | from xrspatial.zonal import trim # noqa 31 | from xrspatial.zonal import crosstab as zonal_crosstab # noqa 32 | from xrspatial.zonal import regions as regions # noqa 33 | from xrspatial.zonal import stats as zonal_stats # noqa 34 | from xrspatial.zonal import suggest_zonal_canvas as suggest_zonal_canvas # noqa 35 | 36 | 37 | try: 38 | from ._version import version as __version__ 39 | except ImportError: 40 | __version__ = "unknown" 41 | 42 | 43 | def test(): 44 | """Run the xarray-spatial test suite.""" 45 | import os 46 | try: 47 | import pytest 48 | except ImportError: 49 | import sys 50 | sys.stderr.write("You need to install py.test to run tests.\n\n") 51 | raise 52 | pytest.main([os.path.dirname(__file__)]) 53 | -------------------------------------------------------------------------------- /xrspatial/__main__.py: -------------------------------------------------------------------------------- 1 | def main(args=None): 2 | try: 3 | import pyct.cmd 4 | except ImportError: 5 | import sys 6 | 7 | from . import _missing_cmd 8 | print(_missing_cmd()) 9 | sys.exit(1) 10 | try: 11 | return pyct.cmd.substitute_main('xrspatial', args=args) 12 | except Exception as e: 13 | print(e) 14 | 15 | 16 | if __name__ == "__main__": 17 | main() 18 | -------------------------------------------------------------------------------- /xrspatial/analytics.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | 3 | from xrspatial import aspect, curvature, slope 4 | 5 | 6 | def summarize_terrain(terrain: xr.DataArray): 7 | """ 8 | Calculates slope, aspect, and curvature of an elevation terrain and return a dataset 9 | of the computed data. 10 | 11 | Parameters 12 | ---------- 13 | terrain: xarray.DataArray 14 | 2D NumPy, CuPy, or Dask with NumPy-backed xarray DataArray of elevation values. 15 | 16 | Returns 17 | ------- 18 | summarized_terrain: xarray.Dataset 19 | Dataset with slope, aspect, curvature variables with a naming convention of 20 | `terrain.name-variable_name` 21 | 22 | Examples 23 | -------- 24 | .. sourcecode:: python 25 | 26 | >>> import numpy as np 27 | >>> import xarray as xr 28 | >>> from xrspatial.analytics import summarize_terrain 29 | >>> data = np.array([ 30 | [0, 0, 0, 0, 0, 0, 0, 0], 31 | [0, 0, 0, 0, 0, 0, 0, 0], 32 | [0, 0, 1, 0, 0, -1, 0, 0], 33 | [0, 0, 0, 0, 0, 0, 0, 0], 34 | [0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.float64) 35 | >>> raster = xr.DataArray(data, name='myraster', attrs={'res': (1, 1)}) 36 | >>> summarized_terrain = summarize_terrain(raster) 37 | >>> summarized_terrain 38 | 39 | Dimensions: (dim_0: 5, dim_1: 8) 40 | Dimensions without coordinates: dim_0, dim_1 41 | Data variables: 42 | myraster (dim_0, dim_1) float64 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 43 | myraster-slope (dim_0, dim_1) float32 nan nan nan nan ... nan nan nan 44 | myraster-curvature (dim_0, dim_1) float64 nan nan nan nan ... nan nan nan 45 | myraster-aspect (dim_0, dim_1) float32 nan nan nan nan ... nan nan nan 46 | >>> summarized_terrain['myraster-slope'] 47 | 48 | array([[ nan, nan, nan, nan, nan, nan, nan, nan], 49 | [ nan, 10.024988, 14.036243, 10.024988, 10.024988, 14.036243, 10.024988, nan], 50 | [ nan, 14.036243, 0. , 14.036243, 14.036243, 0. , 14.036243, nan], 51 | [ nan, 10.024988, 14.036243, 10.024988, 10.024988, 14.036243, 10.024988, nan], 52 | [ nan, nan, nan, nan, nan, nan, nan, nan]], dtype=float32) # noqa 53 | Dimensions without coordinates: dim_0, dim_1 54 | Attributes: 55 | res: (1, 1) 56 | 57 | >>> summarized_terrain['myraster-curvature'] 58 | 59 | array([[ nan, nan, nan, nan, nan, nan, nan, nan], 60 | [ nan, -0., -100., -0., -0., 100., -0., nan], 61 | [ nan, -100., 400., -100., 100., -400., 100., nan], 62 | [ nan, -0., -100., -0., -0., 100., -0., nan], 63 | [ nan, nan, nan, nan, nan, nan, nan, nan]]) 64 | Dimensions without coordinates: dim_0, dim_1 65 | Attributes: 66 | res: (1, 1) 67 | 68 | >>> summarized_terrain['myraster-aspect'] 69 | 70 | array([[ nan, nan, nan, nan, nan, nan, nan, nan], 71 | [ nan, 315., 0., 45., 135., 180., 225., nan], 72 | [ nan, 270., -1., 90., 90., -1., 270., nan], 73 | [ nan, 225., 180., 135., 45., 0., 315., nan], 74 | [ nan, nan, nan, nan, nan, nan, nan, nan]], dtype=float32) 75 | Dimensions without coordinates: dim_0, dim_1 76 | Attributes: 77 | res: (1, 1) 78 | """ 79 | 80 | if terrain.name is None: 81 | raise NameError('Requires xr.DataArray.name property to be set') 82 | 83 | ds = terrain.to_dataset() 84 | ds[f'{terrain.name}-slope'] = slope(terrain) 85 | ds[f'{terrain.name}-curvature'] = curvature(terrain) 86 | ds[f'{terrain.name}-aspect'] = aspect(terrain) 87 | return ds 88 | -------------------------------------------------------------------------------- /xrspatial/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import dask.array as da 4 | import datashader as ds 5 | import noise 6 | import numpy as np 7 | import pandas as pd 8 | import xarray as xr 9 | 10 | __all__ = [ 11 | 'available', 12 | 'get_data', 13 | 'make_terrain', 14 | ] 15 | 16 | _module_path = os.path.dirname(os.path.abspath(__file__)) 17 | _available_datasets = [p for p in next(os.walk(_module_path))[1] 18 | if not p.startswith("__")] 19 | available_datasets = _available_datasets 20 | 21 | 22 | def get_data(dataset): 23 | """ 24 | Open example multispectral band data. 25 | Parameters 26 | ---------- 27 | dataset : str 28 | The name of the dataset. See ``xrspatial.datasets.available`` for 29 | all options. 30 | Examples 31 | -------- 32 | >>> xrspatial.datasets.get_data("sentinel-2") 33 | """ 34 | data = {} 35 | if dataset in _available_datasets: 36 | folder_path = os.path.abspath(os.path.join(_module_path, dataset)) 37 | band_files = [p for p in next(os.walk(folder_path))[2]] 38 | for band_file in band_files: 39 | array = xr.open_dataarray(os.path.join(folder_path, band_file)) 40 | data[array.Name] = array 41 | else: 42 | msg = f'The dataset {dataset} is not available. ' 43 | msg += f'Available folders are {available_datasets}.' 44 | raise ValueError(msg) 45 | return data 46 | 47 | 48 | def make_terrain( 49 | shape=(1024, 1024), 50 | scale=100.0, 51 | octaves=6, 52 | persistence=0.5, 53 | lacunarity=2.0, 54 | chunks=(512, 512) 55 | ): 56 | """ 57 | Generate a pseudo-random terrain data dask array. 58 | 59 | Parameters 60 | ---------- 61 | shape : int or tuple of int, default=(1024, 1024) 62 | Output array shape. 63 | scale : float, default=100.0 64 | Noise factor scale. 65 | octaves : int, default=6 66 | Number of waves when generating the noise. 67 | persistence : float, default=0.5 68 | Amplitude of each successive octave relative. 69 | lacunarity : float, default=2.0 70 | Frequency of each successive octave relative. 71 | chunks : int or tuple of int, default=(512, 512) 72 | Number of samples on each block. 73 | 74 | Returns 75 | ------- 76 | terrain : xarray.DataArray 77 | 2D array of generated terrain values. 78 | """ 79 | def _func(arr, block_id=None): 80 | block_ystart = block_id[0] * arr.shape[0] 81 | block_xstart = block_id[1] * arr.shape[1] 82 | out = np.zeros(arr.shape) 83 | for i in range(out.shape[0]): 84 | for j in range(out.shape[1]): 85 | out[i][j] = noise.pnoise2( 86 | (block_ystart + i)/scale, 87 | (block_xstart + j)/scale, 88 | octaves=octaves, 89 | persistence=persistence, 90 | lacunarity=lacunarity, 91 | repeatx=1024, 92 | repeaty=1024, 93 | base=42, 94 | ) 95 | return out 96 | 97 | data = ( 98 | da.zeros(shape=shape, chunks=chunks, dtype=np.float32) 99 | .map_blocks(_func, dtype=np.float32) 100 | ) 101 | 102 | cvs = ds.Canvas( 103 | x_range=(0, 500), 104 | y_range=(0, 500), 105 | plot_width=shape[1], 106 | plot_height=shape[0], 107 | ) 108 | 109 | hack_agg = cvs.points(pd.DataFrame({'x': [], 'y': []}), 'x', 'y') 110 | 111 | agg = xr.DataArray( 112 | data, 113 | name='terrain', 114 | coords=hack_agg.coords, 115 | dims=hack_agg.dims, 116 | attrs={'res': 1}, 117 | ) 118 | 119 | return agg 120 | -------------------------------------------------------------------------------- /xrspatial/datasets/sentinel-2/blue_band.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/xrspatial/datasets/sentinel-2/blue_band.nc -------------------------------------------------------------------------------- /xrspatial/datasets/sentinel-2/green_band.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/xrspatial/datasets/sentinel-2/green_band.nc -------------------------------------------------------------------------------- /xrspatial/datasets/sentinel-2/nir_band.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/xrspatial/datasets/sentinel-2/nir_band.nc -------------------------------------------------------------------------------- /xrspatial/datasets/sentinel-2/red_band.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/xrspatial/datasets/sentinel-2/red_band.nc -------------------------------------------------------------------------------- /xrspatial/datasets/sentinel-2/swir1_band.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/xrspatial/datasets/sentinel-2/swir1_band.nc -------------------------------------------------------------------------------- /xrspatial/datasets/sentinel-2/swir2_band.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/xrspatial/datasets/sentinel-2/swir2_band.nc -------------------------------------------------------------------------------- /xrspatial/esri.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import requests 3 | 4 | 5 | def featureset_to_dataframe(featureset, 6 | convert_geometry=False, 7 | use_aliases=False): 8 | items = [x['attributes'] for x in featureset['features']] 9 | df = pd.DataFrame(items) 10 | if use_aliases and featureset.get('fieldAliases'): 11 | df.rename(columns=featureset['fieldAliases'], inplace=True) 12 | if convert_geometry: 13 | pass 14 | return df 15 | 16 | 17 | def query_to_dataframe(layer, where, token=None, outFields='*', chunkSize=100, 18 | use_aliases=True): 19 | featureset = query_layer(layer, where, token, outFields, chunkSize) 20 | return featureset_to_dataframe(featureset, use_aliases=use_aliases) 21 | 22 | 23 | def chunker(seq, size): 24 | return (seq[pos:pos + size] for pos in range(0, len(seq), size)) 25 | 26 | 27 | def query_layer(layer, where, token=None, outFields='*', chunkSize=100, 28 | returnGeometry=False): 29 | 30 | url = layer + r'/query' 31 | 32 | params = {} 33 | params['where'] = where 34 | params['outFields'] = outFields 35 | params['returnGeometry'] = returnGeometry 36 | params['token'] = token 37 | params['f'] = 'json' 38 | params['returnIdsOnly'] = True 39 | 40 | ids_req = requests.post(url, data=params) 41 | ids_req.raise_for_status() 42 | ids_response = ids_req.json().get('objectIds') 43 | params['returnIdsOnly'] = False 44 | params['where'] = '' 45 | 46 | featureset = None 47 | for ids in chunker(ids_response, chunkSize): 48 | params['objectIds'] = ','.join(map(str, ids)) 49 | req = requests.post(url, data=params) 50 | req.raise_for_status() 51 | feat_response = req.json() 52 | if not featureset: 53 | featureset = feat_response 54 | else: 55 | featureset['features'] += feat_response['features'] 56 | if not featureset: 57 | featureset = {} 58 | featureset['features'] = [] 59 | 60 | return featureset 61 | -------------------------------------------------------------------------------- /xrspatial/experimental/__init__.py: -------------------------------------------------------------------------------- 1 | from .polygonize import polygonize # noqa 2 | -------------------------------------------------------------------------------- /xrspatial/gpu_rtx/__init__.py: -------------------------------------------------------------------------------- 1 | from ..utils import has_cuda_and_cupy 2 | 3 | try: 4 | from rtxpy import RTX 5 | except ImportError: 6 | RTX = None 7 | 8 | 9 | def has_rtx(): 10 | return has_cuda_and_cupy and RTX is not None 11 | -------------------------------------------------------------------------------- /xrspatial/gpu_rtx/cuda_utils.py: -------------------------------------------------------------------------------- 1 | import numba as nb 2 | import numpy as np 3 | 4 | 5 | @nb.cuda.jit(device=True) 6 | def add(a, b): 7 | return float3(a[0]+b[0], a[1]+b[1], a[2]+b[2]) 8 | 9 | 10 | @nb.cuda.jit(device=True) 11 | def diff(a, b): 12 | return float3(a[0]-b[0], a[1]-b[1], a[2]-b[2]) 13 | 14 | 15 | @nb.cuda.jit(device=True) 16 | def dot(a, b): 17 | return a[0]*b[0] + a[1]*b[1] + a[2]*b[2] 18 | 19 | 20 | @nb.cuda.jit(device=True) 21 | def float3(a, b, c): 22 | return (np.float32(a), np.float32(b), np.float32(c)) 23 | 24 | 25 | @nb.cuda.jit(device=True) 26 | def invert(a): 27 | return float3(-a[0], -a[1], -a[2]) 28 | 29 | 30 | @nb.cuda.jit(device=True) 31 | def mix(a, b, k): 32 | return add(mul(a, k), mul(b, 1-k)) 33 | 34 | 35 | @nb.cuda.jit(device=True) 36 | def make_float3(a, offset): 37 | return float3(a[offset], a[offset+1], a[offset+2]) 38 | 39 | 40 | @nb.cuda.jit(device=True) 41 | def mul(a, b): 42 | return float3(a[0]*b, a[1]*b, a[2]*b) 43 | 44 | 45 | @nb.cuda.jit(device=True) 46 | def mult_color(a, b): 47 | return float3(a[0]*b[0], a[1]*b[1], a[2]*b[2]) 48 | -------------------------------------------------------------------------------- /xrspatial/gpu_rtx/hillshade.py: -------------------------------------------------------------------------------- 1 | # Only call functions in this file if has_rtx() returns True as this checks 2 | # that the required dependent libraries are installed. 3 | 4 | import cupy 5 | import numba as nb 6 | import numpy as np 7 | import xarray as xr 8 | from rtxpy import RTX 9 | from scipy.spatial.transform import Rotation as R 10 | 11 | from ..utils import calc_cuda_dims 12 | from .cuda_utils import add, dot, invert, make_float3, mul 13 | from .mesh_utils import create_triangulation 14 | 15 | 16 | @nb.cuda.jit 17 | def _generate_primary_rays_kernel(data, H, W): 18 | """ 19 | A GPU kernel that given a set of x and y discrete coordinates on a raster 20 | terrain generates in @data a list of parallel rays that represent camera 21 | rays generated from an orthographic camera that is looking straight down 22 | at the surface from an origin height 10000. 23 | """ 24 | i, j = nb.cuda.grid(2) 25 | if i >= 0 and i < H and j >= 0 and j < W: 26 | if (j == W-1): 27 | data[i, j, 0] = j - 1e-3 28 | else: 29 | data[i, j, 0] = j + 1e-3 30 | 31 | if (i == H-1): 32 | data[i, j, 1] = i - 1e-3 33 | else: 34 | data[i, j, 1] = i + 1e-3 35 | 36 | data[i, j, 2] = 10000 # Location of the camera (height) 37 | data[i, j, 3] = 1e-3 38 | data[i, j, 4] = 0 39 | data[i, j, 5] = 0 40 | data[i, j, 6] = -1 41 | data[i, j, 7] = np.inf 42 | 43 | 44 | def _generate_primary_rays(rays, H, W): 45 | griddim, blockdim = calc_cuda_dims((H, W)) 46 | _generate_primary_rays_kernel[griddim, blockdim](rays, H, W) 47 | return 0 48 | 49 | 50 | @nb.cuda.jit 51 | def _generate_shadow_rays_kernel(rays, hits, normals, H, W, sun_dir): 52 | """ 53 | A GPU kernel that takes a set rays and their intersection points with the 54 | triangulated surface, and calculates a set of shadow rays (overwriting the 55 | original rays) that have their origins at the intersection points and 56 | directions towards the sun. 57 | The normals vectors at the point of intersection of the original rays are 58 | cached in normals, thus we can later use them to do Lambertian shading, 59 | after the shadow rays have been traced. 60 | """ 61 | i, j = nb.cuda.grid(2) 62 | if i >= 0 and i < H and j >= 0 and j < W: 63 | dist = hits[i, j, 0] 64 | norm = make_float3(hits[i, j], 1) 65 | if (norm[2] < 0): 66 | norm = invert(norm) 67 | ray = rays[i, j] 68 | ray_origin = make_float3(ray, 0) 69 | ray_dir = make_float3(ray, 4) 70 | p = add(ray_origin, mul(ray_dir, dist)) 71 | 72 | new_origin = add(p, mul(norm, 1e-3)) 73 | ray[0] = new_origin[0] 74 | ray[1] = new_origin[1] 75 | ray[2] = new_origin[2] 76 | ray[3] = 1e-3 77 | ray[4] = sun_dir[0] 78 | ray[5] = sun_dir[1] 79 | ray[6] = sun_dir[2] 80 | ray[7] = np.inf if dist > 0 else 0 81 | 82 | normals[i, j, 0] = norm[0] 83 | normals[i, j, 1] = norm[1] 84 | normals[i, j, 2] = norm[2] 85 | 86 | 87 | def _generate_shadow_rays(rays, hits, normals, H, W, sunDir): 88 | griddim, blockdim = calc_cuda_dims((H, W)) 89 | _generate_shadow_rays_kernel[griddim, blockdim]( 90 | rays, hits, normals, H, W, sunDir) 91 | return 0 92 | 93 | 94 | @nb.cuda.jit 95 | def _shade_lambert_kernel(hits, normals, output, H, W, sun_dir, cast_shadows): 96 | """ 97 | This kernel does a simple Lambertian shading. 98 | The hits array contains the results of tracing the shadow rays through the 99 | scene. If the value in hits[x, y, 0] is > 0 then a valid intersection 100 | occurred and that means that the point at location x, y is in shadow. 101 | The normals array stores the normal at the intersecion point of each 102 | camera ray. We then use the information for light visibility and normal to 103 | apply Lambert's cosine law. 104 | """ 105 | i, j = nb.cuda.grid(2) 106 | if i >= 0 and i < H and j >= 0 and j < W: 107 | # Normal at the intersection of camera ray (i,j) with the scene 108 | norm = make_float3(normals[i, j], 0) 109 | 110 | light_dir = make_float3(sun_dir, 0) 111 | cos_theta = dot(light_dir, norm) # light_dir and norm are normalised. 112 | 113 | temp = (cos_theta + 1) / 2 114 | 115 | if cast_shadows and hits[i, j, 0] >= 0: 116 | temp = temp / 2 117 | 118 | if temp > 1: 119 | temp = 1 120 | elif temp < 0: 121 | temp = 0 122 | 123 | output[i, j] = temp 124 | 125 | 126 | def _shade_lambert(hits, normals, output, H, W, sun_dir, cast_shadows): 127 | griddim, blockdim = calc_cuda_dims((H, W)) 128 | _shade_lambert_kernel[griddim, blockdim]( 129 | hits, normals, output, H, W, sun_dir, cast_shadows) 130 | return 0 131 | 132 | 133 | def _get_sun_dir(angle_altitude, azimuth): 134 | """ 135 | Calculate the vector towards the sun based on sun altitude angle and 136 | azimuth. 137 | """ 138 | north = (0, 1, 0) 139 | rx = R.from_euler('x', angle_altitude, degrees=True) 140 | rz = R.from_euler('z', azimuth+180, degrees=True) 141 | sun_dir = rx.apply(north) 142 | sun_dir = rz.apply(sun_dir) 143 | return sun_dir 144 | 145 | 146 | def _hillshade_rt(raster: xr.DataArray, 147 | optix: RTX, 148 | azimuth: int, 149 | angle_altitude: int, 150 | shadows: bool) -> xr.DataArray: 151 | H, W = raster.shape 152 | sun_dir = cupy.array(_get_sun_dir(angle_altitude, azimuth)) 153 | 154 | # Device buffers 155 | d_rays = cupy.empty((H, W, 8), np.float32) 156 | d_hits = cupy.empty((H, W, 4), np.float32) 157 | d_aux = cupy.empty((H, W, 3), np.float32) 158 | d_output = cupy.empty((H, W), np.float32) 159 | 160 | _generate_primary_rays(d_rays, H, W) 161 | device = cupy.cuda.Device(0) 162 | device.synchronize() 163 | res = optix.trace(d_rays, d_hits, W*H) 164 | if res: 165 | raise RuntimeError(f"Failed trace 1, error code: {res}") 166 | 167 | _generate_shadow_rays(d_rays, d_hits, d_aux, H, W, sun_dir) 168 | if shadows: 169 | device.synchronize() 170 | res = optix.trace(d_rays, d_hits, W*H) 171 | if res: 172 | raise RuntimeError(f"Failed trace 2, error code: {res}") 173 | 174 | _shade_lambert(d_hits, d_aux, d_output, H, W, sun_dir, shadows) 175 | 176 | d_output[0, :] = cupy.nan 177 | d_output[-1, :] = cupy.nan 178 | d_output[:, 0] = cupy.nan 179 | d_output[:, -1] = cupy.nan 180 | 181 | return d_output 182 | 183 | 184 | def hillshade_rtx(raster: xr.DataArray, 185 | azimuth: int, 186 | angle_altitude: int, 187 | shadows: bool) -> xr.DataArray: 188 | if not isinstance(raster.data, cupy.ndarray): 189 | raise TypeError("raster.data must be a cupy array") 190 | 191 | optix = RTX() 192 | create_triangulation(raster, optix) 193 | 194 | return _hillshade_rt( 195 | raster, optix, azimuth=azimuth, angle_altitude=angle_altitude, 196 | shadows=shadows) 197 | -------------------------------------------------------------------------------- /xrspatial/gpu_rtx/mesh_utils.py: -------------------------------------------------------------------------------- 1 | import cupy 2 | import numba as nb 3 | import numpy as np 4 | 5 | 6 | def create_triangulation(raster, optix): 7 | datahash = np.uint64(hash(str(raster.data.get()))) 8 | optixhash = np.uint64(optix.getHash()) 9 | 10 | # Calculate a scale factor for the height that maintains the ratio 11 | # width/height 12 | H, W = raster.shape 13 | 14 | # Scale the terrain so that the width is proportional to the height 15 | # Thus the terrain would be neither too flat nor too steep and 16 | # raytracing will give best accuracy 17 | maxH = float(cupy.amax(raster.data)) 18 | maxDim = max(H, W) 19 | scale = maxDim / maxH 20 | 21 | if optixhash != datahash: 22 | num_tris = (H - 1) * (W - 1) * 2 23 | verts = cupy.empty(H * W * 3, np.float32) 24 | triangles = cupy.empty(num_tris * 3, np.int32) 25 | # Generate a mesh from the terrain (buffers are on the GPU, so 26 | # generation happens also on GPU) 27 | res = _triangulate_terrain(verts, triangles, raster, scale) 28 | if res: 29 | raise RuntimeError( 30 | f"Failed to generate mesh from terrain, error code: {res}") 31 | 32 | res = optix.build(datahash, verts, triangles) 33 | if res: 34 | raise RuntimeError( 35 | f"OptiX failed to build GAS, error code: {res}") 36 | 37 | # Enable for debug purposes 38 | if False: 39 | write("mesh.stl", verts, triangles) 40 | # Clear some GPU memory that we no longer need 41 | verts = None 42 | triangles = None 43 | cupy.get_default_memory_pool().free_all_blocks() 44 | return scale 45 | 46 | 47 | @nb.cuda.jit 48 | def _triangulate_terrain_kernel(verts, triangles, data, H, W, scale, stride): 49 | global_id = stride + nb.cuda.grid(1) 50 | if global_id < W*H: 51 | h = global_id // W 52 | w = global_id % W 53 | mesh_map_index = h * W + w 54 | 55 | val = data[h, w] 56 | 57 | offset = 3*mesh_map_index 58 | verts[offset] = w 59 | verts[offset+1] = h 60 | verts[offset+2] = val * scale 61 | 62 | if w != W - 1 and h != H - 1: 63 | offset = 6*(h * (W-1) + w) 64 | triangles[offset+0] = np.int32(mesh_map_index + W) 65 | triangles[offset+1] = np.int32(mesh_map_index + W + 1) 66 | triangles[offset+2] = np.int32(mesh_map_index) 67 | triangles[offset+3] = np.int32(mesh_map_index + W + 1) 68 | triangles[offset+4] = np.int32(mesh_map_index + 1) 69 | triangles[offset+5] = np.int32(mesh_map_index) 70 | 71 | 72 | @nb.njit(parallel=True) 73 | def _triangulate_cpu(verts, triangles, data, H, W, scale): 74 | for h in nb.prange(H): 75 | for w in range(W): 76 | mesh_map_index = h * W + w 77 | 78 | val = data[h, w] 79 | 80 | offset = 3*mesh_map_index 81 | verts[offset] = w 82 | verts[offset+1] = h 83 | verts[offset+2] = val * scale 84 | 85 | if w != W - 1 and h != H - 1: 86 | offset = 6*(h*(W-1) + w) 87 | triangles[offset+0] = np.int32(mesh_map_index + W) 88 | triangles[offset+1] = np.int32(mesh_map_index + W+1) 89 | triangles[offset+2] = np.int32(mesh_map_index) 90 | triangles[offset+3] = np.int32(mesh_map_index + W+1) 91 | triangles[offset+4] = np.int32(mesh_map_index + 1) 92 | triangles[offset+5] = np.int32(mesh_map_index) 93 | 94 | 95 | def _triangulate_terrain(verts, triangles, terrain, scale=1): 96 | H, W = terrain.shape 97 | if isinstance(terrain.data, np.ndarray): 98 | _triangulate_cpu(verts, triangles, terrain.data, H, W, scale) 99 | if isinstance(terrain.data, cupy.ndarray): 100 | job_size = H*W 101 | blockdim = 1024 102 | griddim = (job_size + blockdim - 1) // 1024 103 | d = 100 104 | offset = 0 105 | while job_size > 0: 106 | batch = min(d, griddim) 107 | _triangulate_terrain_kernel[batch, blockdim]( 108 | verts, triangles, terrain.data, H, W, scale, offset) 109 | offset += batch*blockdim 110 | job_size -= batch*blockdim 111 | return 0 112 | 113 | 114 | @nb.jit(nopython=True) 115 | def _fill_contents(content, verts, triangles, num_tris): 116 | v = np.empty(12, np.float32) 117 | pad = np.zeros(2, np.int8) 118 | offset = 0 119 | for i in range(num_tris): 120 | t0 = triangles[3*i+0] 121 | t1 = triangles[3*i+1] 122 | t2 = triangles[3*i+2] 123 | v[3*0+0] = 0 124 | v[3*0+1] = 0 125 | v[3*0+2] = 0 126 | v[3*1+0] = verts[3*t0+0] 127 | v[3*1+1] = verts[3*t0+1] 128 | v[3*1+2] = verts[3*t0+2] 129 | v[3*2+0] = verts[3*t1+0] 130 | v[3*2+1] = verts[3*t1+1] 131 | v[3*2+2] = verts[3*t1+2] 132 | v[3*3+0] = verts[3*t2+0] 133 | v[3*3+1] = verts[3*t2+1] 134 | v[3*3+2] = verts[3*t2+2] 135 | 136 | offset = 50*i 137 | content[offset:offset+48] = v.view(np.uint8) 138 | content[offset+48:offset+50] = pad 139 | 140 | 141 | def write(name, verts, triangles): 142 | """ 143 | Save a triangulated raster to a standard STL file. 144 | Windows has a default STL viewer and probably all 3D viewers have native 145 | support for it because of its simplicity. Can be used to verify the 146 | correctness of the algorithm or to visualize the mesh to get a notion of 147 | the size/complexity etc. 148 | @param name - The name of the mesh file we're going to save. 149 | Should end in .stl 150 | @param verts - A numpy array containing all the vertices of the mesh. 151 | Format is 3 float32 per vertex (vertex buffer) 152 | @param triangles - A numpy array containing all the triangles of the mesh. 153 | Format is 3 int32 per triangle (index buffer) 154 | """ 155 | ib = triangles 156 | vb = verts 157 | if isinstance(ib, cupy.ndarray): 158 | ib = cupy.asnumpy(ib) 159 | if isinstance(vb, cupy.ndarray): 160 | vb = cupy.asnumpy(vb) 161 | 162 | header = np.zeros(80, np.uint8) 163 | nf = np.empty(1, np.uint32) 164 | num_tris = triangles.shape[0] // 3 165 | nf[0] = num_tris 166 | f = open(name, 'wb') 167 | f.write(header) 168 | f.write(nf) 169 | 170 | # size of 1 triangle in STL is 50 bytes 171 | # 12 floats (each 4 bytes) for a total of 48 172 | # And additional 2 bytes for padding 173 | content = np.empty(num_tris*(50), np.uint8) 174 | _fill_contents(content, vb, ib, num_tris) 175 | f.write(content) 176 | f.close() 177 | -------------------------------------------------------------------------------- /xrspatial/hillshade.py: -------------------------------------------------------------------------------- 1 | import math 2 | from functools import partial 3 | from typing import Optional 4 | 5 | import dask.array as da 6 | import numpy as np 7 | import xarray as xr 8 | from numba import cuda 9 | 10 | from .gpu_rtx import has_rtx 11 | from .utils import calc_cuda_dims, has_cuda_and_cupy, is_cupy_array, is_cupy_backed 12 | 13 | 14 | def _run_numpy(data, azimuth=225, angle_altitude=25): 15 | data = data.astype(np.float32) 16 | 17 | azimuth = 360.0 - azimuth 18 | x, y = np.gradient(data) 19 | slope = np.pi/2. - np.arctan(np.sqrt(x*x + y*y)) 20 | aspect = np.arctan2(-x, y) 21 | azimuthrad = azimuth*np.pi/180. 22 | altituderad = angle_altitude*np.pi/180. 23 | shaded = np.sin(altituderad) * np.sin(slope) + \ 24 | np.cos(altituderad) * np.cos(slope) * \ 25 | np.cos((azimuthrad - np.pi/2.) - aspect) 26 | result = (shaded + 1) / 2 27 | result[(0, -1), :] = np.nan 28 | result[:, (0, -1)] = np.nan 29 | return result 30 | 31 | 32 | def _run_dask_numpy(data, azimuth, angle_altitude): 33 | data = data.astype(np.float32) 34 | 35 | _func = partial(_run_numpy, azimuth=azimuth, angle_altitude=angle_altitude) 36 | out = data.map_overlap(_func, 37 | depth=(1, 1), 38 | boundary=np.nan, 39 | meta=np.array(())) 40 | return out 41 | 42 | 43 | @cuda.jit 44 | def _gpu_calc_numba( 45 | data, 46 | output, 47 | sin_altituderad, 48 | cos_altituderad, 49 | azimuthrad 50 | ): 51 | 52 | i, j = cuda.grid(2) 53 | if i > 0 and i < data.shape[0]-1 and j > 0 and j < data.shape[1] - 1: 54 | x = (data[i+1, j]-data[i-1, j])/2 55 | y = (data[i, j+1]-data[i, j-1])/2 56 | 57 | len = math.sqrt(x*x + y*y) 58 | slope = 1.57079632679 - math.atan(len) 59 | aspect = (azimuthrad - 1.57079632679) - math.atan2(-x, y) 60 | 61 | sin_slope = math.sin(slope) 62 | sin_part = sin_altituderad * sin_slope 63 | 64 | cos_aspect = math.cos(aspect) 65 | cos_slope = math.cos(slope) 66 | cos_part = cos_altituderad * cos_slope * cos_aspect 67 | 68 | res = sin_part + cos_part 69 | output[i, j] = (res + 1) * 0.5 70 | 71 | 72 | def _run_cupy(d_data, azimuth, angle_altitude): 73 | # Precompute constant values shared between all threads 74 | altituderad = angle_altitude * np.pi / 180. 75 | sin_altituderad = np.sin(altituderad) 76 | cos_altituderad = np.cos(altituderad) 77 | azimuthrad = (360.0 - azimuth) * np.pi / 180. 78 | 79 | # Allocate output buffer and launch kernel with appropriate dimensions 80 | import cupy 81 | d_data = d_data.astype(cupy.float32) 82 | output = cupy.empty(d_data.shape, np.float32) 83 | griddim, blockdim = calc_cuda_dims(d_data.shape) 84 | _gpu_calc_numba[griddim, blockdim]( 85 | d_data, output, sin_altituderad, cos_altituderad, azimuthrad 86 | ) 87 | 88 | # Fill borders with nans. 89 | output[0, :] = cupy.nan 90 | output[-1, :] = cupy.nan 91 | output[:, 0] = cupy.nan 92 | output[:, -1] = cupy.nan 93 | 94 | return output 95 | 96 | 97 | def hillshade(agg: xr.DataArray, 98 | azimuth: int = 225, 99 | angle_altitude: int = 25, 100 | name: Optional[str] = 'hillshade', 101 | shadows: bool = False) -> xr.DataArray: 102 | """ 103 | Calculates, for all cells in the array, an illumination value of 104 | each cell based on illumination from a specific azimuth and 105 | altitude. 106 | 107 | Parameters 108 | ---------- 109 | agg : xarray.DataArray 110 | 2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array 111 | of elevation values. 112 | angle_altitude : int, default=25 113 | Altitude angle of the sun specified in degrees. 114 | azimuth : int, default=225 115 | The angle between the north vector and the perpendicular 116 | projection of the light source down onto the horizon 117 | specified in degrees. 118 | name : str, default='hillshade' 119 | Name of output DataArray. 120 | shadows : bool, default=False 121 | Whether to calculate shadows or not. Shadows are available 122 | only for Cupy-backed Dask arrays and only if rtxpy is 123 | installed and appropriate graphics hardware is available. 124 | 125 | Returns 126 | ------- 127 | hillshade_agg : xarray.DataArray, of same type as `agg` 128 | 2D aggregate array of illumination values. 129 | 130 | References 131 | ---------- 132 | - GeoExamples: http://geoexamples.blogspot.com/2014/03/shaded-relief-images-using-gdal-python.html # noqa 133 | 134 | Examples 135 | -------- 136 | .. sourcecode:: python 137 | 138 | >>> import numpy as np 139 | >>> import xarray as xr 140 | >>> from xrspatial import hillshade 141 | >>> data = np.array([ 142 | ... [0., 0., 0., 0., 0.], 143 | ... [0., 1., 0., 2., 0.], 144 | ... [0., 0., 3., 0., 0.], 145 | ... [0., 0., 0., 0., 0.], 146 | ... [0., 0., 0., 0., 0.]]) 147 | >>> n, m = data.shape 148 | >>> raster = xr.DataArray(data, dims=['y', 'x'], name='raster') 149 | >>> raster['y'] = np.arange(n)[::-1] 150 | >>> raster['x'] = np.arange(m) 151 | >>> hillshade_agg = hillshade(raster) 152 | >>> print(hillshade_agg) 153 | 154 | array([[ nan, nan, nan, nan, nan], 155 | [ nan, 0.71130913, 0.44167341, 0.71130913, nan], 156 | [ nan, 0.95550163, 0.71130913, 0.52478473, nan], 157 | [ nan, 0.71130913, 0.88382559, 0.71130913, nan], 158 | [ nan, nan, nan, nan, nan]]) 159 | Coordinates: 160 | * y (y) int32 4 3 2 1 0 161 | * x (x) int32 0 1 2 3 4 162 | """ 163 | 164 | if shadows and not has_rtx(): 165 | raise RuntimeError( 166 | "Can only calculate shadows if cupy and rtxpy are available") 167 | 168 | # numpy case 169 | if isinstance(agg.data, np.ndarray): 170 | out = _run_numpy(agg.data, azimuth, angle_altitude) 171 | 172 | # cupy/numba case 173 | elif has_cuda_and_cupy() and is_cupy_array(agg.data): 174 | if shadows and has_rtx(): 175 | from .gpu_rtx.hillshade import hillshade_rtx 176 | out = hillshade_rtx(agg, azimuth, angle_altitude, shadows=shadows) 177 | else: 178 | out = _run_cupy(agg.data, azimuth, angle_altitude) 179 | 180 | # dask + cupy case 181 | elif (has_cuda_and_cupy() and isinstance(agg.data, da.Array) and 182 | is_cupy_backed(agg)): 183 | raise NotImplementedError("Dask/CuPy hillshade not implemented") 184 | 185 | # dask + numpy case 186 | elif isinstance(agg.data, da.Array): 187 | out = _run_dask_numpy(agg.data, azimuth, angle_altitude) 188 | 189 | else: 190 | raise TypeError('Unsupported Array Type: {}'.format(type(agg.data))) 191 | 192 | return xr.DataArray(out, 193 | name=name, 194 | coords=agg.coords, 195 | dims=agg.dims, 196 | attrs=agg.attrs) 197 | -------------------------------------------------------------------------------- /xrspatial/slope.py: -------------------------------------------------------------------------------- 1 | # std lib 2 | from functools import partial 3 | from math import atan 4 | from typing import Union 5 | 6 | # 3rd-party 7 | try: 8 | import cupy 9 | except ImportError: 10 | class cupy(object): 11 | ndarray = False 12 | 13 | import dask.array as da 14 | import numpy as np 15 | import xarray as xr 16 | from numba import cuda 17 | 18 | # local modules 19 | from xrspatial.utils import (ArrayTypeFunctionMapping, cuda_args, get_dataarray_resolution, ngjit, 20 | not_implemented_func) 21 | 22 | 23 | @ngjit 24 | def _cpu(data, cellsize_x, cellsize_y): 25 | data = data.astype(np.float32) 26 | out = np.zeros_like(data, dtype=np.float32) 27 | out[:] = np.nan 28 | rows, cols = data.shape 29 | for y in range(1, rows - 1): 30 | for x in range(1, cols - 1): 31 | a = data[y + 1, x - 1] 32 | b = data[y + 1, x] 33 | c = data[y + 1, x + 1] 34 | d = data[y, x - 1] 35 | f = data[y, x + 1] 36 | g = data[y - 1, x - 1] 37 | h = data[y - 1, x] 38 | i = data[y - 1, x + 1] 39 | dz_dx = ((c + 2 * f + i) - (a + 2 * d + g)) / (8 * cellsize_x) 40 | dz_dy = ((g + 2 * h + i) - (a + 2 * b + c)) / (8 * cellsize_y) 41 | p = (dz_dx * dz_dx + dz_dy * dz_dy) ** .5 42 | out[y, x] = np.arctan(p) * 57.29578 43 | return out 44 | 45 | 46 | def _run_numpy(data: np.ndarray, 47 | cellsize_x: Union[int, float], 48 | cellsize_y: Union[int, float]) -> np.ndarray: 49 | out = _cpu(data, cellsize_x, cellsize_y) 50 | return out 51 | 52 | 53 | def _run_dask_numpy(data: da.Array, 54 | cellsize_x: Union[int, float], 55 | cellsize_y: Union[int, float]) -> da.Array: 56 | data = data.astype(np.float32) 57 | _func = partial(_cpu, 58 | cellsize_x=cellsize_x, 59 | cellsize_y=cellsize_y) 60 | 61 | out = data.map_overlap(_func, 62 | depth=(1, 1), 63 | boundary=np.nan, 64 | meta=np.array(())) 65 | return out 66 | 67 | 68 | @cuda.jit(device=True) 69 | def _gpu(arr, cellsize_x, cellsize_y): 70 | a = arr[2, 0] 71 | b = arr[2, 1] 72 | c = arr[2, 2] 73 | d = arr[1, 0] 74 | f = arr[1, 2] 75 | g = arr[0, 0] 76 | h = arr[0, 1] 77 | i = arr[0, 2] 78 | 79 | dz_dx = ((c + 2 * f + i) - (a + 2 * d + g)) / (8 * cellsize_x[0]) 80 | dz_dy = ((g + 2 * h + i) - (a + 2 * b + c)) / (8 * cellsize_y[0]) 81 | p = (dz_dx * dz_dx + dz_dy * dz_dy) ** 0.5 82 | return atan(p) * 57.29578 83 | 84 | 85 | @cuda.jit 86 | def _run_gpu(arr, cellsize_x_arr, cellsize_y_arr, out): 87 | i, j = cuda.grid(2) 88 | di = 1 89 | dj = 1 90 | if (i - di >= 0 and i + di < out.shape[0] and 91 | j - dj >= 0 and j + dj < out.shape[1]): 92 | out[i, j] = _gpu(arr[i - di:i + di + 1, j - dj:j + dj + 1], 93 | cellsize_x_arr, 94 | cellsize_y_arr) 95 | 96 | 97 | def _run_cupy(data: cupy.ndarray, 98 | cellsize_x: Union[int, float], 99 | cellsize_y: Union[int, float]) -> cupy.ndarray: 100 | cellsize_x_arr = cupy.array([float(cellsize_x)], dtype='f4') 101 | cellsize_y_arr = cupy.array([float(cellsize_y)], dtype='f4') 102 | data = data.astype(cupy.float32) 103 | 104 | griddim, blockdim = cuda_args(data.shape) 105 | out = cupy.empty(data.shape, dtype='f4') 106 | out[:] = cupy.nan 107 | 108 | _run_gpu[griddim, blockdim](data, 109 | cellsize_x_arr, 110 | cellsize_y_arr, 111 | out) 112 | return out 113 | 114 | 115 | def slope(agg: xr.DataArray, 116 | name: str = 'slope') -> xr.DataArray: 117 | """ 118 | Returns slope of input aggregate in degrees. 119 | 120 | Parameters 121 | ---------- 122 | agg : xr.DataArray 123 | 2D array of elevation data. 124 | name : str, default='slope' 125 | Name of output DataArray. 126 | 127 | Returns 128 | ------- 129 | slope_agg : xr.DataArray of same type as `agg` 130 | 2D array of slope values. 131 | All other input attributes are preserved. 132 | 133 | References 134 | ---------- 135 | - arcgis: http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-slope-works.htm # noqa 136 | 137 | Examples 138 | -------- 139 | .. sourcecode:: python 140 | 141 | >>> import numpy as np 142 | >>> import xarray as xr 143 | >>> from xrspatial import slope 144 | >>> data = np.array([ 145 | ... [0, 0, 0, 0, 0], 146 | ... [0, 0, 0, -1, 2], 147 | ... [0, 0, 0, 0, 1], 148 | ... [0, 0, 0, 5, 0]]) 149 | >>> agg = xr.DataArray(data) 150 | >>> slope_agg = slope(agg) 151 | >>> slope_agg 152 | 153 | array([[ nan, nan, nan, nan, nan], 154 | [ nan, 0. , 14.036243, 32.512516, nan], 155 | [ nan, 0. , 42.031113, 53.395725, nan], 156 | [ nan, nan, nan, nan, nan]], 157 | dtype=float32) 158 | Dimensions without coordinates: dim_0, dim_1 159 | """ 160 | 161 | cellsize_x, cellsize_y = get_dataarray_resolution(agg) 162 | mapper = ArrayTypeFunctionMapping( 163 | numpy_func=_run_numpy, 164 | cupy_func=_run_cupy, 165 | dask_func=_run_dask_numpy, 166 | dask_cupy_func=lambda *args: not_implemented_func( 167 | *args, messages='slope() does not support dask with cupy backed DataArray' # noqa 168 | ), 169 | ) 170 | out = mapper(agg)(agg.data, cellsize_x, cellsize_y) 171 | 172 | return xr.DataArray(out, 173 | name=name, 174 | coords=agg.coords, 175 | dims=agg.dims, 176 | attrs=agg.attrs) 177 | -------------------------------------------------------------------------------- /xrspatial/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/makepath/xarray-spatial/6dd5faa2770e032dcc8c77eca71790051bd1d3ee/xrspatial/tests/__init__.py -------------------------------------------------------------------------------- /xrspatial/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | 5 | @pytest.fixture 6 | def random_data(size, dtype): 7 | rng = np.random.default_rng(2841) 8 | data = rng.integers(-100, 100, size=size) 9 | data = data.astype(dtype) 10 | return data 11 | 12 | 13 | @pytest.fixture 14 | def elevation_raster(): 15 | elevation = np.array([ 16 | [ np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 17 | [704.237 , 242.24084, 429.3324 , 779.8816 , 193.29506, 984.6926 ], 18 | [226.56795, 815.7483 , 290.6041 , 76.49687, 820.89716, 32.27882], 19 | [344.8238 , 256.34998, 806.8326 , 602.0442 , 721.1633 , 496.95636], 20 | [185.43515, 834.10425, 387.0871 , 716.0262 , 49.61273, 752.95483], 21 | [302.4271 , 151.49211, 442.32797, 358.4702 , 659.8187 , 447.1241 ], 22 | [148.04834, 819.2133 , 468.97913, 977.11694, 597.69666, 999.14185], 23 | [268.1575 , 625.96466, 840.26483, 448.28333, 859.2699 , 528.04095] 24 | ], dtype=np.float32) 25 | return elevation 26 | 27 | 28 | @pytest.fixture 29 | def elevation_raster_no_nans(): 30 | elevation = np.array([ 31 | [870.5345 , 283.04907, 845.2779 , 51.21859, 990.8278 , 600.64545], 32 | [704.237 , 242.24084, 429.3324 , 779.8816 , 193.29506, 984.6926 ], 33 | [226.56795, 815.7483 , 290.6041 , 76.49687, 820.89716, 32.27882], 34 | [344.8238 , 256.34998, 806.8326 , 602.0442 , 721.1633 , 496.95636], 35 | [185.43515, 834.10425, 387.0871 , 716.0262 , 49.61273, 752.95483], 36 | [302.4271 , 151.49211, 442.32797, 358.4702 , 659.8187 , 447.1241 ], 37 | [148.04834, 819.2133 , 468.97913, 977.11694, 597.69666, 999.14185], 38 | [268.1575 , 625.96466, 840.26483, 448.28333, 859.2699 , 528.04095] 39 | ], dtype=np.float32) 40 | return elevation 41 | 42 | 43 | @pytest.fixture 44 | def raster(): 45 | data = np.array([ 46 | [6., 7., 3., 4., 8., 1.], 47 | [4., 9., 7., 5., 6., 9.], 48 | [4., 3., 3., 1., 3., 7.], 49 | [3., 4., 9., 3., 7., 0.], 50 | [2., 1., 6., 5., 6., 2.], 51 | [4., 2., 4., 3., 8., 5.], 52 | [4., 1., 8., 5., 7., 0.], 53 | [7., 4., 6., 4., 1., 1.] 54 | ], dtype=np.float32) 55 | return data 56 | -------------------------------------------------------------------------------- /xrspatial/tests/general_checks.py: -------------------------------------------------------------------------------- 1 | import dask.array as da 2 | import numpy as np 3 | import pytest 4 | import xarray as xr 5 | 6 | from xrspatial.utils import ArrayTypeFunctionMapping, has_cuda_and_cupy 7 | 8 | # Use this as a decorator to skip tests if do not have both CUDA and CuPy available. 9 | cuda_and_cupy_available = pytest.mark.skipif( 10 | not has_cuda_and_cupy(), reason="Requires CUDA and CuPy") 11 | 12 | 13 | def create_test_raster( 14 | data, 15 | backend='numpy', 16 | name='myraster', 17 | dims=['y', 'x'], 18 | attrs={'res': (0.5, 0.5), 'crs': 'EPSG: 4326'}, 19 | chunks=(3, 3) 20 | ): 21 | raster = xr.DataArray(data, name=name, dims=dims, attrs=attrs) 22 | 23 | # default res if none provided 24 | res = (0.5, 0.5) 25 | if attrs is not None: 26 | if 'res' in attrs: 27 | res = attrs['res'] 28 | # set coords for test raster, 2D coords only 29 | raster[dims[0]] = np.linspace((data.shape[0] - 1) * res[0], 0, data.shape[0]) 30 | raster[dims[1]] = np.linspace(0, (data.shape[1] - 1) * res[1], data.shape[1]) 31 | 32 | raster[dims[0]] = np.linspace((data.shape[0] - 1)/2, 0, data.shape[0]) 33 | raster[dims[1]] = np.linspace(0, (data.shape[1] - 1)/2, data.shape[1]) 34 | 35 | if has_cuda_and_cupy() and 'cupy' in backend: 36 | import cupy 37 | raster.data = cupy.asarray(raster.data) 38 | 39 | if 'dask' in backend: 40 | raster.data = da.from_array(raster.data, chunks=chunks) 41 | 42 | return raster 43 | 44 | 45 | def general_output_checks(input_agg: xr.DataArray, 46 | output_agg: xr.DataArray, 47 | expected_results: np.ndarray = None, 48 | verify_attrs: bool = True, 49 | verify_dtype: bool = False, 50 | rtol=1e-06): 51 | 52 | # type of output is the same as of input 53 | assert isinstance(output_agg.data, type(input_agg.data)) 54 | 55 | if isinstance(input_agg.data, da.Array): 56 | # dask case 57 | assert isinstance( 58 | output_agg.data.compute(), type(input_agg.data.compute())) 59 | 60 | if verify_attrs: 61 | # shape and other attributes remain the same 62 | assert output_agg.shape == input_agg.shape 63 | assert output_agg.dims == input_agg.dims 64 | assert output_agg.attrs == input_agg.attrs 65 | for coord in input_agg.coords: 66 | np.testing.assert_allclose( 67 | output_agg[coord].data, input_agg[coord].data, equal_nan=True 68 | ) 69 | 70 | if expected_results is not None: 71 | get_numpy_data = lambda output: output # noqa: E731 72 | get_dask_numpy_data = lambda output: output.compute() # noqa: E731 73 | get_cupy_data = lambda output: output.get() # noqa: E731 74 | get_dask_cupy_data = lambda output: output.compute().get() # noqa: E731 75 | 76 | mapper = ArrayTypeFunctionMapping( 77 | numpy_func=get_numpy_data, 78 | dask_func=get_dask_numpy_data, 79 | cupy_func=get_cupy_data, 80 | dask_cupy_func=get_dask_cupy_data, 81 | ) 82 | output_data = mapper(output_agg)(output_agg.data) 83 | np.testing.assert_allclose(output_data, expected_results, equal_nan=True, rtol=rtol) 84 | 85 | if verify_dtype: 86 | assert output_data.dtype == expected_results.dtype 87 | 88 | 89 | def assert_input_data_unmodified(data_before, data_after): 90 | assert data_before.equals(data_after) 91 | 92 | 93 | def assert_nan_edges_effect(result_agg): 94 | # nan edge effect 95 | edges = [ 96 | result_agg.data[0, :], 97 | result_agg.data[-1, :], 98 | result_agg.data[:, 0], 99 | result_agg.data[:, -1], 100 | ] 101 | for edge in edges: 102 | np.testing.assert_array_equal(edge, np.nan) 103 | 104 | 105 | def assert_numpy_equals_dask_numpy(numpy_agg, dask_agg, func, nan_edges=True): 106 | numpy_result = func(numpy_agg) 107 | if nan_edges: 108 | assert_nan_edges_effect(numpy_result) 109 | 110 | dask_result = func(dask_agg) 111 | general_output_checks(dask_agg, dask_result) 112 | np.testing.assert_allclose(numpy_result.data, dask_result.data.compute(), equal_nan=True) 113 | 114 | 115 | def assert_numpy_equals_cupy(numpy_agg, cupy_agg, func, nan_edges=True, atol=0, rtol=1e-7): 116 | numpy_result = func(numpy_agg) 117 | if nan_edges: 118 | assert_nan_edges_effect(numpy_result) 119 | 120 | cupy_result = func(cupy_agg) 121 | general_output_checks(cupy_agg, cupy_result) 122 | np.testing.assert_allclose( 123 | numpy_result.data, cupy_result.data.get(), equal_nan=True, atol=atol, rtol=rtol) 124 | 125 | 126 | def assert_numpy_equals_dask_cupy(numpy_agg, dask_cupy_agg, func, nan_edges=True): 127 | numpy_result = func(numpy_agg) 128 | if nan_edges: 129 | assert_nan_edges_effect(numpy_result) 130 | 131 | dask_cupy_result = func(dask_cupy_agg) 132 | general_output_checks(dask_cupy_agg, dask_cupy_result) 133 | np.testing.assert_allclose( 134 | numpy_result.data, dask_cupy_result.data.compute().get(), equal_nan=True 135 | ) 136 | -------------------------------------------------------------------------------- /xrspatial/tests/test_analytics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from xrspatial import aspect, curvature, slope 5 | from xrspatial.analytics import summarize_terrain 6 | from xrspatial.tests.general_checks import create_test_raster 7 | 8 | 9 | def test_summarize_terrain_no_name(): 10 | data = np.zeros((10, 20)) 11 | test_terrain = create_test_raster(data, name=None) 12 | msg = "Requires xr.DataArray.name property to be set" 13 | with pytest.raises(NameError, match=msg): 14 | summarize_terrain(test_terrain) 15 | 16 | 17 | @pytest.mark.parametrize("size", [(2, 4), (100, 150)]) 18 | @pytest.mark.parametrize( 19 | "dtype", [np.int32, np.int64, np.uint32, np.uint64, np.float32, np.float64] 20 | ) 21 | def test_summarize_terrain(random_data): 22 | test_terrain = create_test_raster(random_data, name='myterrain') 23 | summarized_ds = summarize_terrain(test_terrain) 24 | variables = [v for v in summarized_ds] 25 | should_have = ['myterrain', 26 | 'myterrain-slope', 27 | 'myterrain-curvature', 28 | 'myterrain-aspect'] 29 | assert variables == should_have 30 | 31 | np.testing.assert_allclose(summarized_ds['myterrain-slope'], slope(test_terrain)) 32 | np.testing.assert_allclose(summarized_ds['myterrain-curvature'], curvature(test_terrain)) 33 | np.testing.assert_allclose(summarized_ds['myterrain-aspect'], aspect(test_terrain)) 34 | -------------------------------------------------------------------------------- /xrspatial/tests/test_aspect.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from xrspatial import aspect 5 | from xrspatial.tests.general_checks import (assert_nan_edges_effect, assert_numpy_equals_cupy, 6 | assert_numpy_equals_dask_numpy, create_test_raster, 7 | cuda_and_cupy_available, general_output_checks) 8 | 9 | 10 | def input_data(data, backend='numpy'): 11 | raster = create_test_raster(data, backend) 12 | return raster 13 | 14 | 15 | @pytest.fixture 16 | def qgis_aspect(): 17 | result = np.array([ 18 | [ np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 19 | [ np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 20 | [233.19478 , 278.358 , 45.18813 , 306.6476 , 358.34296 , 106.45898 ], 21 | [267.7002 , 274.42487 , 11.035832, 357.9641 , 129.98279 , 50.069843], 22 | [263.18484 , 238.47426 , 196.37103 , 149.25227 , 187.85748 , 263.684 ], 23 | [266.63937 , 271.05124 , 312.09726 , 348.89136 , 351.618 , 315.59424 ], 24 | [279.90872 , 314.11356 , 345.76315 , 327.5568 , 339.5455 , 312.9249 ], 25 | [271.93985 , 268.81046 , 24.793104, 185.978 , 299.82904 ,159.0188 ]], dtype=np.float32) 26 | return result 27 | 28 | 29 | def test_numpy_equals_qgis(elevation_raster, qgis_aspect): 30 | numpy_agg = input_data(elevation_raster, backend='numpy') 31 | xrspatial_aspect = aspect(numpy_agg, name='numpy_aspect') 32 | 33 | general_output_checks(numpy_agg, xrspatial_aspect, verify_dtype=True) 34 | assert xrspatial_aspect.name == 'numpy_aspect' 35 | 36 | xrspatial_vals = xrspatial_aspect.data[1:-1, 1:-1] 37 | qgis_vals = qgis_aspect[1:-1, 1:-1] 38 | # aspect is nan if nan input 39 | # aspect is invalid (-1) if slope equals 0 40 | # otherwise aspect are from 0 to 360 41 | np.testing.assert_allclose(xrspatial_vals, qgis_vals, rtol=1e-05, equal_nan=True) 42 | # nan edge effect 43 | assert_nan_edges_effect(xrspatial_aspect) 44 | 45 | 46 | def test_numpy_equals_dask_qgis_data(elevation_raster): 47 | # compare using the data run through QGIS 48 | numpy_agg = input_data(elevation_raster, 'numpy') 49 | dask_agg = input_data(elevation_raster, 'dask+numpy') 50 | assert_numpy_equals_dask_numpy(numpy_agg, dask_agg, aspect) 51 | 52 | 53 | @pytest.mark.parametrize("size", [(2, 4), (10, 15)]) 54 | @pytest.mark.parametrize( 55 | "dtype", [np.int32, np.int64, np.uint32, np.uint64, np.float32, np.float64]) 56 | def test_numpy_equals_dask_random_data(random_data): 57 | numpy_agg = create_test_raster(random_data, backend='numpy') 58 | dask_agg = create_test_raster(random_data, backend='dask') 59 | assert_numpy_equals_dask_numpy(numpy_agg, dask_agg, aspect) 60 | 61 | 62 | @cuda_and_cupy_available 63 | def test_numpy_equals_cupy_qgis_data(): 64 | # compare using the data run through QGIS 65 | numpy_agg = input_data(elevation_raster) 66 | cupy_agg = input_data(elevation_raster, 'cupy') 67 | assert_numpy_equals_cupy(numpy_agg, cupy_agg, aspect) 68 | 69 | 70 | @cuda_and_cupy_available 71 | @pytest.mark.parametrize("size", [(2, 4), (10, 15)]) 72 | @pytest.mark.parametrize( 73 | "dtype", [np.int32, np.int64, np.uint32, np.uint64, np.float32, np.float64]) 74 | def test_numpy_equals_cupy_random_data(random_data): 75 | numpy_agg = create_test_raster(random_data, backend='numpy') 76 | cupy_agg = create_test_raster(random_data, backend='cupy') 77 | assert_numpy_equals_cupy(numpy_agg, cupy_agg, aspect, atol=1e-6, rtol=1e-6) 78 | -------------------------------------------------------------------------------- /xrspatial/tests/test_bump.py: -------------------------------------------------------------------------------- 1 | from xrspatial import bump 2 | 3 | 4 | def test_bump(): 5 | bumps = bump(20, 20) 6 | assert bumps is not None 7 | -------------------------------------------------------------------------------- /xrspatial/tests/test_curvature.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from xrspatial import curvature 5 | from xrspatial.tests.general_checks import (assert_numpy_equals_cupy, 6 | assert_numpy_equals_dask_numpy, create_test_raster, 7 | cuda_and_cupy_available, general_output_checks) 8 | 9 | 10 | @pytest.fixture 11 | def flat_surface(size, dtype): 12 | flat = np.zeros(size, dtype=dtype) 13 | expected_result = np.zeros(size, dtype=np.float32) 14 | # nan edges effect 15 | expected_result[0, :] = np.nan 16 | expected_result[-1, :] = np.nan 17 | expected_result[:, 0] = np.nan 18 | expected_result[:, -1] = np.nan 19 | return flat, expected_result 20 | 21 | 22 | @pytest.fixture 23 | def convex_surface(): 24 | convex_data = np.array([ 25 | [0, 0, 0, 0, 0, 0], 26 | [0, 0, 0, 0, 0, 0], 27 | [0, 0, 0, -1, 0, 0], 28 | [0, 0, 0, 0, 0, 0], 29 | [0, 0, 0, 0, 0, 0]]) 30 | expected_result = np.asarray([ 31 | [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 32 | [np.nan, 0, 0., 100., 0., np.nan], 33 | [np.nan, 0, 100., -400., 100., np.nan], 34 | [np.nan, 0, 0., 100., 0., np.nan], 35 | [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan] 36 | ], dtype=np.float32) 37 | return convex_data, expected_result 38 | 39 | 40 | @pytest.fixture 41 | def concave_surface(): 42 | concave_data = np.array([ 43 | [0, 0, 0, 0, 0, 0], 44 | [0, 0, 0, 0, 0, 0], 45 | [0, 0, 0, 1, 0, 0], 46 | [0, 0, 0, 0, 0, 0], 47 | [0, 0, 0, 0, 0, 0]]) 48 | expected_result = np.asarray([ 49 | [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 50 | [np.nan, 0, 0., -100., 0., np.nan], 51 | [np.nan, 0, -100., 400., -100., np.nan], 52 | [np.nan, 0, 0., -100., 0., np.nan], 53 | [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan] 54 | ], dtype=np.float32) 55 | return concave_data, expected_result 56 | 57 | 58 | @pytest.mark.parametrize("size", [(2, 4), (10, 15)]) 59 | @pytest.mark.parametrize( 60 | "dtype", [np.int32, np.int64, np.uint32, np.uint64, np.float32, np.float64]) 61 | def test_curvature_on_flat_surface(flat_surface): 62 | flat_data, expected_result = flat_surface 63 | numpy_agg = create_test_raster(flat_data, attrs={'res': (1, 1)}) 64 | numpy_result = curvature(numpy_agg) 65 | general_output_checks(numpy_agg, numpy_result, expected_result, verify_dtype=True) 66 | 67 | 68 | def test_curvature_on_convex_surface(convex_surface): 69 | convex_data, expected_result = convex_surface 70 | numpy_agg = create_test_raster(convex_data, attrs={'res': (1, 1)}) 71 | numpy_result = curvature(numpy_agg) 72 | general_output_checks(numpy_agg, numpy_result, expected_result, verify_dtype=True) 73 | 74 | 75 | def test_curvature_on_concave_surface(concave_surface): 76 | concave_data, expected_result = concave_surface 77 | numpy_agg = create_test_raster(concave_data, attrs={'res': (1, 1)}) 78 | numpy_result = curvature(numpy_agg) 79 | general_output_checks(numpy_agg, numpy_result, expected_result, verify_dtype=True) 80 | 81 | 82 | @cuda_and_cupy_available 83 | @pytest.mark.parametrize("size", [(2, 4), (10, 15)]) 84 | @pytest.mark.parametrize( 85 | "dtype", [np.int32, np.int64, np.uint32, np.uint64, np.float32, np.float64]) 86 | def test_numpy_equals_cupy_random_data(random_data): 87 | numpy_agg = create_test_raster(random_data, backend='numpy') 88 | cupy_agg = create_test_raster(random_data, backend='cupy') 89 | assert_numpy_equals_cupy(numpy_agg, cupy_agg, curvature) 90 | # NOTE: Dask + GPU code paths don't currently work because of 91 | # dask casting cupy arrays to numpy arrays during 92 | # https://github.com/dask/dask/issues/4842 93 | 94 | 95 | @pytest.mark.parametrize("size", [(2, 4), (10, 15)]) 96 | @pytest.mark.parametrize( 97 | "dtype", [np.int32, np.int64, np.uint32, np.uint64, np.float32, np.float64]) 98 | def test_numpy_equals_dask_random_data(random_data): 99 | numpy_agg = create_test_raster(random_data, backend='numpy') 100 | dask_agg = create_test_raster(random_data, backend='dask') 101 | assert_numpy_equals_dask_numpy(numpy_agg, dask_agg, curvature) 102 | -------------------------------------------------------------------------------- /xrspatial/tests/test_datasets.py: -------------------------------------------------------------------------------- 1 | import dask.array as da 2 | import xarray as xr 3 | 4 | from xrspatial.datasets import make_terrain 5 | 6 | 7 | def test_make_terrain(): 8 | terrain = make_terrain() 9 | assert terrain is not None 10 | assert isinstance(terrain, xr.DataArray) 11 | assert isinstance(terrain.data, da.Array) 12 | -------------------------------------------------------------------------------- /xrspatial/tests/test_hillshade.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | import xarray as xr 4 | from numpy.testing import assert_allclose, assert_array_less 5 | 6 | from xrspatial import hillshade 7 | from xrspatial.tests.general_checks import (assert_numpy_equals_cupy, 8 | assert_numpy_equals_dask_numpy, create_test_raster, 9 | cuda_and_cupy_available, general_output_checks) 10 | 11 | from ..gpu_rtx import has_rtx 12 | 13 | 14 | @pytest.fixture 15 | def data_gaussian(): 16 | _x = np.linspace(0, 50, 101) 17 | _y = _x.copy() 18 | _mean = 25 19 | _sdev = 5 20 | X, Y = np.meshgrid(_x, _y, sparse=True) 21 | x_fac = -np.power(X-_mean, 2) 22 | y_fac = -np.power(Y-_mean, 2) 23 | gaussian = np.exp((x_fac+y_fac)/(2*_sdev**2)) / (2.5*_sdev) 24 | return gaussian 25 | 26 | 27 | def test_hillshade(data_gaussian): 28 | """ 29 | Assert Simple Hillshade transfer function 30 | """ 31 | da_gaussian = xr.DataArray(data_gaussian) 32 | da_gaussian_shade = hillshade(da_gaussian, name='hillshade_agg') 33 | general_output_checks(da_gaussian, da_gaussian_shade) 34 | assert da_gaussian_shade.name == 'hillshade_agg' 35 | assert da_gaussian_shade.mean() > 0 36 | assert da_gaussian_shade[60, 60] > 0 37 | 38 | 39 | @pytest.mark.parametrize("size", [(2, 4), (10, 15)]) 40 | @pytest.mark.parametrize( 41 | "dtype", [np.int32, np.int64, np.float32, np.float64]) 42 | def test_hillshade_numpy_equals_dask_numpy(random_data): 43 | numpy_agg = create_test_raster(random_data, backend='numpy') 44 | dask_agg = create_test_raster(random_data, backend='dask') 45 | assert_numpy_equals_dask_numpy(numpy_agg, dask_agg, hillshade) 46 | 47 | 48 | @cuda_and_cupy_available 49 | @pytest.mark.parametrize("size", [(2, 4), (10, 15)]) 50 | @pytest.mark.parametrize( 51 | "dtype", [np.int32, np.int64, np.float32, np.float64]) 52 | def test_hillshade_gpu_equals_cpu(random_data): 53 | numpy_agg = create_test_raster(random_data, backend='numpy') 54 | cupy_agg = create_test_raster(random_data, backend='cupy') 55 | assert_numpy_equals_cupy(numpy_agg, cupy_agg, hillshade, rtol=1e-6) 56 | 57 | 58 | @pytest.mark.skipif(not has_rtx(), reason="RTX not available") 59 | def test_hillshade_rtx_with_shadows(data_gaussian): 60 | import cupy 61 | 62 | tall_gaussian = 400*data_gaussian 63 | cpu = hillshade(xr.DataArray(tall_gaussian)) 64 | 65 | tall_gaussian = cupy.asarray(tall_gaussian) 66 | rtx = hillshade(xr.DataArray(tall_gaussian)) 67 | rtx.data = cupy.asnumpy(rtx.data) 68 | 69 | assert cpu.shape == rtx.shape 70 | nhalf = cpu.shape[0] // 2 71 | 72 | # Quadrant nearest sun direction should be almost identical. 73 | quad_cpu = cpu.data[nhalf::, ::nhalf] 74 | quad_rtx = cpu.data[nhalf::, ::nhalf] 75 | assert_allclose(quad_cpu, quad_rtx, atol=0.03) 76 | 77 | # Opposite diagonal should be in shadow. 78 | diag_cpu = np.diagonal(cpu.data[::-1])[nhalf:] 79 | diag_rtx = np.diagonal(rtx.data[::-1])[nhalf:] 80 | assert_array_less(diag_rtx, diag_cpu + 1e-3) 81 | -------------------------------------------------------------------------------- /xrspatial/tests/test_pathfinding.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from xrspatial import a_star_search 5 | from xrspatial.tests.general_checks import create_test_raster, general_output_checks 6 | 7 | 8 | @pytest.fixture 9 | def input_data(): 10 | data = np.array([[0, 1, 0, 0], 11 | [1, 1, 0, 0], 12 | [0, 1, 2, 2], 13 | [1, 0, 2, 0], 14 | [0, 2, 2, 2]]) 15 | agg = create_test_raster(data, dims=['lat', 'lon']) 16 | return agg 17 | 18 | 19 | @pytest.fixture 20 | def input_data_with_nans(): 21 | data = np.array([[0, 1, 0, 0], 22 | [1, 1, np.nan, 0], 23 | [0, 1, 2, 2], 24 | [1, 0, 2, 0], 25 | [0, np.nan, 2, 2]]) 26 | agg = create_test_raster(data, dims=['lat', 'lon']) 27 | 28 | # start and end at a nan pixel, coordinate in (lat, lon) format 29 | start = (1.5, 1) 30 | goal = (0, 0.5) 31 | return agg, start, goal 32 | 33 | 34 | @pytest.fixture 35 | def result_8_connectivity(): 36 | expected_result = np.array([[np.nan, np.nan, 0., np.nan], 37 | [np.nan, 1.41421356, np.nan, np.nan], 38 | [np.nan, 2.41421356, np.nan, np.nan], 39 | [np.nan, 3.41421356, np.nan, np.nan], 40 | [np.nan, np.nan, np.nan, np.nan]]) 41 | return expected_result 42 | 43 | 44 | @pytest.fixture 45 | def result_4_connectivity(): 46 | expected_result = np.array([[np.nan, 1, 0., np.nan], 47 | [np.nan, 2, np.nan, np.nan], 48 | [np.nan, 3, np.nan, np.nan], 49 | [np.nan, 4, np.nan, np.nan], 50 | [np.nan, np.nan, np.nan, np.nan]]) 51 | return expected_result 52 | 53 | 54 | def test_a_star_search_no_barriers(input_data): 55 | agg = input_data 56 | barriers = [] 57 | # no barriers, there always path from a start location to a goal location 58 | for x0 in agg['lon']: 59 | for y0 in agg['lat']: 60 | start = (y0, x0) 61 | for x1 in agg['lon']: 62 | for y1 in agg['lat']: 63 | goal = (y1, x1) 64 | path_agg = a_star_search( 65 | agg, start, goal, barriers, 'lon', 'lat' 66 | ) 67 | general_output_checks(agg, path_agg) 68 | assert type(path_agg.values[0][0]) == np.float64 69 | if start == goal: 70 | assert np.nanmax(path_agg) == 0 71 | assert np.nanmin(path_agg) == 0 72 | else: 73 | assert np.nanmax(path_agg) > 0 74 | assert np.nanmin(path_agg) == 0 75 | 76 | 77 | def test_a_star_search_with_barriers(input_data): 78 | agg = input_data 79 | barriers = [1] 80 | # set pixels with value 1 as barriers, 81 | # cannot go from (0, 0) to anywhere since it is surrounded by 1s 82 | start = (2, 0) 83 | for x1 in agg['lon']: 84 | for y1 in agg['lat']: 85 | goal = (y1, x1) 86 | if (goal != start): 87 | path_agg = a_star_search( 88 | agg, start, goal, barriers, 'lon', 'lat' 89 | ) 90 | # no path, all cells in path_agg are nans 91 | expected_results = np.full(agg.shape, np.nan) 92 | general_output_checks(agg, path_agg, expected_results) 93 | 94 | 95 | def test_a_star_search_snap(input_data_with_nans): 96 | agg, start, goal = input_data_with_nans 97 | 98 | # no barriers 99 | barriers = [] 100 | # no snap 101 | no_snap_path_agg = a_star_search(agg, start, goal, barriers, 'lon', 'lat') 102 | # no path, all cells in path_agg are nans 103 | np.testing.assert_array_equal(no_snap_path_agg, np.nan) 104 | 105 | # set snap_start = True, snap_goal = False 106 | snap_start_path_agg = a_star_search(agg, start, goal, barriers, 'lon', 'lat', snap_start=True) 107 | # no path, all cells in path_agg are nans 108 | np.testing.assert_array_equal(snap_start_path_agg, np.nan) 109 | 110 | # set snap_start = False, snap_goal = True 111 | snap_goal_path_agg = a_star_search(agg, start, goal, barriers, 'lon', 'lat', snap_goal=True) 112 | # no path, all cells in path_agg are nans 113 | np.testing.assert_array_equal(snap_goal_path_agg, np.nan) 114 | 115 | 116 | def test_a_star_search_connectivity( 117 | input_data_with_nans, 118 | result_8_connectivity, 119 | result_4_connectivity 120 | ): 121 | agg, start, goal = input_data_with_nans 122 | # no barriers 123 | barriers = [] 124 | 125 | # set snap_start = True, snap_goal = True 126 | # 8-connectivity as default 127 | path_agg_8 = a_star_search( 128 | agg, start, goal, barriers, 'lon', 'lat', snap_start=True, snap_goal=True 129 | ) 130 | np.testing.assert_allclose(path_agg_8, result_8_connectivity, equal_nan=True) 131 | 132 | # 4-connectivity 133 | path_agg_4 = a_star_search( 134 | agg, start, goal, barriers, 'lon', 'lat', snap_start=True, snap_goal=True, connectivity=4 135 | ) 136 | np.testing.assert_allclose(path_agg_4, result_4_connectivity, equal_nan=True) 137 | -------------------------------------------------------------------------------- /xrspatial/tests/test_perlin.py: -------------------------------------------------------------------------------- 1 | import dask.array as da 2 | import numpy as np 3 | import xarray as xr 4 | 5 | from xrspatial import perlin 6 | from xrspatial.tests.general_checks import cuda_and_cupy_available, general_output_checks 7 | from xrspatial.utils import has_cuda_and_cupy 8 | 9 | 10 | def create_test_arr(backend='numpy'): 11 | W = 50 12 | H = 50 13 | data = np.zeros((H, W), dtype=np.float32) 14 | raster = xr.DataArray(data, dims=['y', 'x']) 15 | 16 | if has_cuda_and_cupy() and 'cupy' in backend: 17 | import cupy 18 | raster.data = cupy.asarray(raster.data) 19 | 20 | if 'dask' in backend: 21 | raster.data = da.from_array(raster.data, chunks=(10, 10)) 22 | 23 | return raster 24 | 25 | 26 | def test_perlin_cpu(): 27 | # vanilla numpy version 28 | data_numpy = create_test_arr() 29 | perlin_numpy = perlin(data_numpy) 30 | general_output_checks(data_numpy, perlin_numpy) 31 | 32 | # dask 33 | data_dask = create_test_arr(backend='dask') 34 | perlin_dask = perlin(data_dask) 35 | general_output_checks(data_dask, perlin_dask) 36 | 37 | np.testing.assert_allclose( 38 | perlin_numpy.data, perlin_dask.data.compute(), 39 | rtol=1e-05, atol=1e-07, equal_nan=True 40 | ) 41 | 42 | 43 | @cuda_and_cupy_available 44 | def test_perlin_gpu(): 45 | # vanilla numpy version 46 | data_numpy = create_test_arr() 47 | perlin_numpy = perlin(data_numpy) 48 | 49 | # cupy 50 | data_cupy = create_test_arr(backend='cupy') 51 | perlin_cupy = perlin(data_cupy) 52 | general_output_checks(data_cupy, perlin_cupy) 53 | np.testing.assert_allclose( 54 | perlin_numpy.data, perlin_cupy.data.get(), 55 | rtol=1e-05, atol=1e-07, equal_nan=True 56 | ) 57 | -------------------------------------------------------------------------------- /xrspatial/tests/test_slope.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | from xrspatial import slope 5 | from xrspatial.tests.general_checks import (assert_nan_edges_effect, assert_numpy_equals_cupy, 6 | assert_numpy_equals_dask_numpy, create_test_raster, 7 | cuda_and_cupy_available, general_output_checks) 8 | 9 | 10 | def input_data(data, backend): 11 | # Notes: 12 | # ------ 13 | # The `elevation` data was run through QGIS slope function to 14 | # get values to compare against. Xarray-Spatial currently handles 15 | # edges by padding with nan which is different than QGIS but acknowledged 16 | raster = create_test_raster(data, backend, attrs={'res': (1, 1)}) 17 | return raster 18 | 19 | 20 | @pytest.fixture 21 | def qgis_slope(): 22 | qgis_result = np.array([ 23 | [ np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 24 | [ np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 25 | [89.707756, 88.56143 , 89.45366 , 89.50229 , 88.82584 , 89.782394], 26 | [89.78415 , 89.61588 , 89.47127 , 89.24196 , 88.385376, 89.67071 ], 27 | [89.7849 , 89.61132 , 89.59183 , 89.56854 , 88.90889 , 89.765114], 28 | [89.775246, 89.42886 , 89.25054 , 89.60963 , 89.71719 , 89.76396 ], 29 | [89.85427 , 89.75693 , 89.67336 , 89.502174, 89.24611 , 89.352 ], 30 | [89.87612 , 89.76542 , 89.269966, 89.78526 , 88.35767 , 89.764206]], 31 | dtype=np.float32) 32 | return qgis_result 33 | 34 | 35 | def test_numpy_equals_qgis(elevation_raster, qgis_slope): 36 | # slope by xrspatial 37 | numpy_agg = input_data(elevation_raster, backend='numpy') 38 | xrspatial_slope_numpy = slope(numpy_agg, name='slope_numpy') 39 | general_output_checks(numpy_agg, xrspatial_slope_numpy) 40 | assert xrspatial_slope_numpy.name == 'slope_numpy' 41 | print('numpy_agg', numpy_agg) 42 | print('xrspatial_slope_numpy', xrspatial_slope_numpy) 43 | xrspatial_vals = xrspatial_slope_numpy.data[1:-1, 1:-1] 44 | qgis_vals = qgis_slope[1:-1, 1:-1] 45 | print('xrspatial_vals', xrspatial_vals) 46 | 47 | np.testing.assert_allclose(xrspatial_vals, qgis_vals, rtol=1e-05, equal_nan=True) 48 | 49 | # nan border edges 50 | assert_nan_edges_effect(xrspatial_slope_numpy) 51 | 52 | 53 | def test_numpy_equals_dask_qgis_data(elevation_raster): 54 | # compare using the data run through QGIS 55 | numpy_agg = input_data(elevation_raster, 'numpy') 56 | dask_agg = input_data(elevation_raster, 'dask+numpy') 57 | assert_numpy_equals_dask_numpy(numpy_agg, dask_agg, slope) 58 | 59 | 60 | @cuda_and_cupy_available 61 | def test_numpy_equals_cupy_qgis_data(elevation_raster): 62 | # compare using the data run through QGIS 63 | numpy_agg = input_data(elevation_raster, 'numpy') 64 | cupy_agg = input_data(elevation_raster, 'cupy') 65 | assert_numpy_equals_cupy(numpy_agg, cupy_agg, slope) 66 | -------------------------------------------------------------------------------- /xrspatial/tests/test_terrain.py: -------------------------------------------------------------------------------- 1 | import dask.array as da 2 | import numpy as np 3 | import xarray as xr 4 | 5 | from xrspatial import generate_terrain 6 | from xrspatial.tests.general_checks import cuda_and_cupy_available 7 | from xrspatial.utils import has_cuda_and_cupy 8 | 9 | 10 | def create_test_arr(backend='numpy'): 11 | W = 50 12 | H = 50 13 | data = np.zeros((H, W), dtype=np.float32) 14 | raster = xr.DataArray(data, dims=['y', 'x']) 15 | 16 | if has_cuda_and_cupy() and 'cupy' in backend: 17 | import cupy 18 | raster.data = cupy.asarray(raster.data) 19 | 20 | if 'dask' in backend: 21 | raster.data = da.from_array(raster.data, chunks=(10, 10)) 22 | 23 | return raster 24 | 25 | 26 | def test_terrain_cpu(): 27 | # vanilla numpy version 28 | data_numpy = create_test_arr() 29 | terrain_numpy = generate_terrain(data_numpy) 30 | 31 | # dask 32 | data_dask = create_test_arr(backend='dask') 33 | terrain_dask = generate_terrain(data_dask) 34 | assert isinstance(terrain_dask.data, da.Array) 35 | 36 | terrain_dask = terrain_dask.compute() 37 | np.testing.assert_allclose(terrain_numpy.data, terrain_dask.data, rtol=1e-05, atol=1e-07) 38 | 39 | 40 | @cuda_and_cupy_available 41 | def test_terrain_gpu(): 42 | # vanilla numpy version 43 | data_numpy = create_test_arr() 44 | terrain_numpy = generate_terrain(data_numpy) 45 | 46 | # cupy 47 | data_cupy = create_test_arr(backend='cupy') 48 | terrain_cupy = generate_terrain(data_cupy) 49 | 50 | np.testing.assert_allclose(terrain_numpy.data, terrain_cupy.data.get(), rtol=1e-05, atol=1e-07) 51 | -------------------------------------------------------------------------------- /xrspatial/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from xrspatial.datasets import make_terrain 2 | from xrspatial.utils import canvas_like 3 | 4 | 5 | def test_canvas_like(): 6 | # aspect ratio is 1:1 7 | terrain_shape = (1000, 1000) 8 | terrain = make_terrain(shape=terrain_shape) 9 | terrain_res = canvas_like(terrain, width=50) 10 | assert terrain_res.shape == (50, 50) 11 | -------------------------------------------------------------------------------- /xrspatial/tests/test_viewshed.py: -------------------------------------------------------------------------------- 1 | import datashader as ds 2 | import numpy as np 3 | import pandas as pd 4 | import pytest 5 | import xarray as xa 6 | 7 | from xrspatial import viewshed 8 | from xrspatial.tests.general_checks import general_output_checks 9 | 10 | from ..gpu_rtx import has_rtx 11 | 12 | 13 | @pytest.fixture 14 | def empty_agg(): 15 | # create an empty image of size 5*5 16 | H = 5 17 | W = 5 18 | 19 | canvas = ds.Canvas(plot_width=W, plot_height=H, 20 | x_range=(-20, 20), y_range=(-20, 20)) 21 | 22 | empty_df = pd.DataFrame({ 23 | 'x': np.random.normal(.5, 1, 0), 24 | 'y': np.random.normal(.5, 1, 0) 25 | }) 26 | agg = canvas.points(empty_df, 'x', 'y') 27 | return agg 28 | 29 | 30 | def test_viewshed_invalid_x_view(empty_agg): 31 | xs = empty_agg.coords['x'].values 32 | OBSERVER_X = xs[0] - 1 33 | OBSERVER_Y = 0 34 | with pytest.raises(Exception): 35 | viewshed(raster=empty_agg, x=OBSERVER_X, y=OBSERVER_Y, observer_elev=10) 36 | 37 | 38 | def test_viewshed_invalid_y_view(empty_agg): 39 | ys = empty_agg.coords['y'].values 40 | OBSERVER_X = 0 41 | OBSERVER_Y = ys[-1] + 1 42 | with pytest.raises(Exception): 43 | viewshed(raster=empty_agg, x=OBSERVER_X, y=OBSERVER_Y, observer_elev=10) 44 | 45 | 46 | def test_viewshed(empty_agg): 47 | H, W = empty_agg.shape 48 | 49 | # coordinates 50 | xs = empty_agg.coords['x'].values 51 | ys = empty_agg.coords['y'].values 52 | 53 | # define some values for observer's elevation to test 54 | OBS_ELEVS = [-1, 0, 1] 55 | TERRAIN_ELEV_AT_VP = [-1, 0, 1] 56 | 57 | # check if a matrix is symmetric 58 | def check_symmetric(matrix, rtol=1e-05, atol=1e-08): 59 | return np.allclose(matrix, matrix.T, rtol=rtol, atol=atol) 60 | 61 | def get_matrices(y, x, height, width): 62 | # indexing 0 1 ... height-1 and 0 1 ... width-1 63 | height = height - 1 64 | width = width - 1 65 | 66 | # find first matrix's diagonal 67 | tmp = min(y, x) 68 | f_top_y, f_left_x = y - tmp, x - tmp 69 | 70 | tmp = min(height - y, width - x) 71 | f_bottom_y, f_right_x = y + tmp, x + tmp 72 | 73 | # find second matrix's antidiagonal 74 | tmp = min(y, width - x) 75 | s_top_y, s_right_x = y - tmp, x + tmp 76 | 77 | tmp = min(height - y, x) 78 | s_bottom_y, s_left_x = y + tmp, x - tmp 79 | 80 | return ((f_top_y, f_left_x, f_bottom_y + 1, f_right_x + 1), 81 | (s_top_y, s_left_x, s_bottom_y + 1, s_right_x + 1)) 82 | 83 | # test on 3 scenarios: 84 | # empty image. 85 | # image with all 0s, except 1 cell with a negative value. 86 | # image with all 0s, except 1 cell with a positive value. 87 | 88 | # for each scenario: 89 | # if not empty image, 90 | # observer is located at the same position as the non zero value. 91 | # observer elevation can be: negative, zero, or positive. 92 | 93 | # assertion: 94 | # angle at viewpoint is always 180. 95 | # when the observer is above the terrain, all cells are visible. 96 | # the symmetric property of observer's visibility. 97 | 98 | for obs_elev in OBS_ELEVS: 99 | for elev_at_vp in TERRAIN_ELEV_AT_VP: 100 | for col_id, x in enumerate(xs): 101 | for row_id, y in enumerate(ys): 102 | 103 | empty_agg.values[row_id, col_id] = elev_at_vp 104 | v = viewshed(raster=empty_agg, x=x, y=y, observer_elev=obs_elev) 105 | 106 | # validate output properties 107 | general_output_checks(empty_agg, v) 108 | 109 | # angle at viewpoint is always 180 110 | assert v[row_id, col_id] == 180 111 | 112 | if obs_elev + elev_at_vp >= 0 and obs_elev >= abs(elev_at_vp): 113 | # all cells are visible 114 | assert (v.values > -1).all() 115 | 116 | b1, b2 = get_matrices(row_id, col_id, H, W) 117 | m1 = v.values[b1[0]:b1[2], b1[1]:b1[3]] 118 | m2 = v.values[b2[0]:b2[2], b2[1]:b2[3]] 119 | 120 | assert check_symmetric(m1) 121 | assert check_symmetric(m2[::-1]) 122 | 123 | # empty image for next uses 124 | empty_agg.values[row_id, col_id] = 0 125 | 126 | 127 | @pytest.mark.parametrize("observer_elev", [5, 2]) 128 | @pytest.mark.parametrize("target_elev", [0, 1]) 129 | @pytest.mark.parametrize("backend", ["numpy", "cupy"]) 130 | def test_viewshed_flat(backend, observer_elev, target_elev): 131 | if backend == "cupy": 132 | if not has_rtx(): 133 | pytest.skip("rtxpy not available") 134 | else: 135 | import cupy as cp 136 | 137 | x, y = 0, 0 138 | ny, nx = 5, 4 139 | arr = np.full((ny, nx), 1.3) 140 | xs = np.arange(nx)*0.5 141 | ys = np.arange(ny)*1.5 142 | if backend == "cupy": 143 | arr = cp.asarray(arr) 144 | xarr = xa.DataArray(arr, coords=dict(x=xs, y=ys), dims=["y", "x"]) 145 | v = viewshed( 146 | xarr, x=x, y=y, observer_elev=observer_elev, target_elev=target_elev) 147 | if backend == "cupy": 148 | v.data = cp.asnumpy(v.data) 149 | xs2, ys2 = np.meshgrid(xs, ys) 150 | d_vert = observer_elev - target_elev 151 | d_horz = np.sqrt((xs2 - x)**2 + (ys2 - y)**2) 152 | angle = np.rad2deg(np.arctan2(d_horz, d_vert)) 153 | # Don't want to compare value under observer. 154 | angle[0, 0] = v.data[0, 0] 155 | if backend == "numpy": 156 | np.testing.assert_allclose(v.data, angle) 157 | else: 158 | # Should do better with viewshed gpu output angles. 159 | mask = (v.data < 90) 160 | np.testing.assert_allclose(v.data[mask], angle[mask], atol=0.03) 161 | --------------------------------------------------------------------------------