├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .github └── workflows │ └── tests.yml ├── .gitignore ├── AUTHORS.rst ├── CHANGELOG.rst ├── LICENSE ├── README.md ├── docs ├── Makefile ├── _static │ └── .gitignore ├── authors.rst ├── changelog.rst ├── conf.py ├── index.rst └── license.rst ├── noxfile.py ├── poetry.lock ├── pyproject.toml ├── src └── openeo_processes │ ├── __init__.py │ ├── arrays.py │ ├── comparison.py │ ├── cubes.py │ ├── errors.py │ ├── extension │ ├── __init__.py │ ├── odc.py │ └── product_model.py │ ├── logic.py │ ├── math.py │ ├── texts.py │ └── utils.py └── tests ├── __init__.py ├── conftest.py ├── data ├── array.nc └── out.time.nc ├── test_arrays.py ├── test_comparison.py ├── test_cubes.py ├── test_logic.py ├── test_math.py ├── test_texts.py └── test_utils.py /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM osgeo/gdal:ubuntu-small-3.5.0 AS base 2 | 3 | ARG DEBIAN_FRONTEND=noninteractive 4 | ARG USERNAME=ubuntu 5 | ARG GROUPNAME=ubuntu 6 | ARG USER_UID=1000 7 | ARG USER_GID=1000 8 | ENV TZ=Etc/GMT 9 | 10 | ENV PYTHONUNBUFFERED=1 \ 11 | PYTHONDONTWRITEBYTECODE=1 \ 12 | PIP_NO_CACHE_DIR=off \ 13 | PIP_DISABLE_PIP_VERSION_CHECK=on \ 14 | PIP_DEFAULT_TIMEOUT=100 \ 15 | POETRY_HOME="/opt/poetry/" \ 16 | POETRY_NO_INTERACTION=1 \ 17 | POETRY_VIRTUALENVS_IN_PROJECT=true \ 18 | VENV_PATH=".venv/" \ 19 | APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn 20 | 21 | ENV PATH="$POETRY_HOME/bin:/home/$USERNAME/.local/bin:$VENV_PATH/bin:$PATH" 22 | 23 | RUN apt-get update -qy && \ 24 | apt-get install -qy --no-install-recommends \ 25 | # libpq-dev and build-essential are necessary for psycopg2, which is required for datacube 26 | build-essential \ 27 | libpq-dev 28 | 29 | # Adapted from https://github.com/TheKevJames/tools/blob/master/docker-nox/Dockerfile 30 | RUN apt-get update -qy && \ 31 | apt-get install -qy --no-install-recommends \ 32 | ca-certificates \ 33 | curl \ 34 | gnupg2 && \ 35 | . /etc/os-release && \ 36 | echo "deb http://ppa.launchpad.net/deadsnakes/ppa/ubuntu ${UBUNTU_CODENAME} main" > /etc/apt/sources.list.d/deadsnakes.list && \ 37 | apt-key adv --keyserver keyserver.ubuntu.com --recv-keys F23C5A6CF475977595C89F51BA6932366A755776 && \ 38 | apt-get update -qy && \ 39 | apt-get install -qy --fix-missing --no-install-recommends \ 40 | git \ 41 | openssh-client \ 42 | python3.6 \ 43 | python3.6-dev \ 44 | python3.6-distutils \ 45 | python3.6-venv \ 46 | python3.7 \ 47 | python3.7-dev \ 48 | python3.7-distutils \ 49 | python3.7-venv \ 50 | python3.8 \ 51 | python3.8-dev \ 52 | python3.8-distutils \ 53 | python3.8-venv \ 54 | python3.9 \ 55 | python3.9-dev \ 56 | python3.9-distutils \ 57 | python3.9-venv \ 58 | python3.10 \ 59 | python3.10-dev \ 60 | python3.10-distutils \ 61 | python3.10-venv \ 62 | python3-pip \ 63 | python3-distutils 64 | 65 | # Install Poetry - respects $POETRY_VERSION & $POETRY_HOME 66 | ENV POETRY_VERSION=1.1.13 67 | RUN curl -sSL https://install.python-poetry.org | python3 - 68 | 69 | RUN useradd -ms /bin/bash --uid $USER_UID --user-group $USERNAME \ 70 | && chown -R $USER_UID:$USER_GID /home/$USERNAME 71 | USER $USER_UID 72 | 73 | ENV PATH="$POETRY_HOME/bin:/home/$USERNAME/.local/bin:$VENV_PATH/bin:$PATH" 74 | 75 | RUN curl -fsS https://bootstrap.pypa.io/get-pip.py --output /tmp/get-pip.py && \ 76 | python3.10 /tmp/get-pip.py && \ 77 | rm /tmp/get-pip.py 78 | 79 | RUN python3.6 -m pip install --user --no-cache-dir --upgrade pip && \ 80 | python3.7 -m pip install --user --no-cache-dir --upgrade pip && \ 81 | python3.8 -m pip install --user --no-cache-dir --upgrade pip && \ 82 | python3.9 -m pip install --user --no-cache-dir --upgrade pip && \ 83 | python3.10 -m pip install --user --no-cache-dir --upgrade pip && \ 84 | rm -rf /var/cache/apt/lists 85 | 86 | RUN python3.10 -m pip install --user --no-cache-dir 'nox-poetry==1.0.0' 87 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "openeo-processes-python", 3 | 4 | "dockerFile": "./Dockerfile", 5 | 6 | // Sets the run context to one level up instead of the .devcontainer folder. 7 | "context": "..", 8 | 9 | // Set *default* container specific settings.json values on container create. 10 | "settings": { 11 | "python.defaultInterpreterPath": ".venv/bin/python", 12 | "python.venvPath": ".venv", 13 | "python.pythonPath": ".venv/bin/python", 14 | "python.testing.pytestArgs": [ 15 | "tests" 16 | ], 17 | "python.testing.unittestEnabled": false, 18 | "python.testing.pytestEnabled": true, 19 | "python.linting.enabled": true 20 | }, 21 | 22 | // Add the IDs of extensions you want installed when the container is created. 23 | "extensions": [ 24 | "ms-python.python", 25 | "ms-python.vscode-pylance" 26 | ], 27 | 28 | "postCreateCommand": "poetry install" 29 | } 30 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | # Inspired by https://github.com/python-poetry/poetry/blob/master/.github/workflows/main.yml 2 | 3 | name: CI Pipeline 4 | 5 | on: 6 | push: 7 | paths-ignore: 8 | - 'docs/**' 9 | branches: 10 | - master 11 | pull_request: 12 | paths-ignore: 13 | - 'docs/**' 14 | branches: 15 | - '**' 16 | workflow_dispatch: 17 | 18 | env: 19 | POETRY_VERSION: 1.1.13 20 | 21 | defaults: 22 | run: 23 | shell: bash 24 | 25 | jobs: 26 | image-build: 27 | name: Build devcontainer 28 | runs-on: ubuntu-20.04 29 | steps: 30 | - uses: actions/checkout@v3 31 | 32 | - name: Set up QEMU 33 | uses: docker/setup-qemu-action@v2 34 | 35 | - name: Setup buildx 36 | uses: docker/setup-buildx-action@v2 37 | 38 | - name: Downcase IMAGE_NAME 39 | run: | 40 | echo "IMAGE_NAME=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV} 41 | 42 | - name: Add env variables 43 | run: | 44 | echo "USER_UID=$( id --user )" >>${GITHUB_ENV} 45 | echo "USERNAME=$( id --user --name )" >>${GITHUB_ENV} 46 | echo "USER_GID=$( id --group )" >>${GITHUB_ENV} 47 | 48 | - name: Build image 49 | id: image-build 50 | uses: docker/build-push-action@v3 51 | with: 52 | file: ./.devcontainer/Dockerfile 53 | context: . 54 | tags: ${{ env.IMAGE_NAME }} 55 | cache-from: type=gha 56 | cache-to: type=gha,mode=max 57 | build-args: | 58 | "USER_UID=${{ env.USER_UID }}" 59 | "USERNAME=${{ env.USERNAME }}" 60 | "USER_GID=${{ env.USER_GID }}" 61 | 62 | tests: 63 | name: ${{ matrix.python-version }} / ${{ matrix.session }} 64 | needs: [image-build] 65 | runs-on: ubuntu-20.04 66 | strategy: 67 | fail-fast: false # This prevents the entire matrix from stopping early if any job in the matrix fails. 68 | matrix: 69 | python-version: ["3.9", "3.8"] 70 | session: ["tests", "mypy"] 71 | env: 72 | NOXSESSION: ${{ matrix.session }} 73 | 74 | steps: 75 | - uses: actions/checkout@v3 76 | 77 | - name: Downcase IMAGE_NAME 78 | run: | 79 | echo "IMAGE_NAME=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV} 80 | 81 | - name: Add env variables 82 | run: | 83 | echo "USER_UID=$( id --user )" >>${GITHUB_ENV} 84 | echo "USERNAME=$( id --user --name )" >>${GITHUB_ENV} 85 | echo "USER_GID=$( id --group )" >>${GITHUB_ENV} 86 | 87 | - name: Set up QEMU 88 | uses: docker/setup-qemu-action@v2 89 | 90 | - name: Setup buildx 91 | uses: docker/setup-buildx-action@v2 92 | 93 | - name: Build image 94 | id: image-build 95 | uses: docker/build-push-action@v3 96 | with: 97 | file: ./.devcontainer/Dockerfile 98 | context: . 99 | load: true 100 | tags: ${{ env.IMAGE_NAME }} 101 | cache-from: type=gha 102 | build-args: | 103 | "USER_UID=${{ env.USER_UID }}" 104 | "USERNAME=${{ env.USERNAME }}" 105 | "USER_GID=${{ env.USER_GID }}" 106 | 107 | - name: Run nox session 108 | run: | 109 | docker run --rm --mount type=bind,source="$GITHUB_WORKSPACE",target="$GITHUB_WORKSPACE" --workdir="$GITHUB_WORKSPACE" --env NOXSESSION=$NOXSESSION ${IMAGE_NAME} nox --python=${{ matrix.python-version }} 110 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | .idea/ 161 | .scripts/ 162 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributors 3 | ============ 4 | 5 | * Luca Foresta 6 | * Claudio Navacchi 7 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | Version 0.1 6 | =========== 7 | 8 | - Feature A added 9 | - FIX: nasty bug #1729 fixed 10 | - add your changes here! 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | 204 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # openeo_processes 2 | 3 | > :warning: **`openeo-processes-python` is being sunset and will not be actively maintained going forward. Please see [#198](https://github.com/Open-EO/openeo-processes-python/issues/198) for further details.** 4 | 5 | `openeo_processes` provides implementations of many [openEO processes](https://github.com/Open-EO/openeo-processes) in Python. 6 | Processes are currently aligned with openEO API version 1.0. 7 | 8 | ## Installation 9 | TODO, will be installable through PyPI by the next release. 10 | **This library requires the GDAL library to be present.** 11 | 12 | ## Development Environment 13 | ### Managing dependencies 14 | This project uses [poetry](https://github.com/python-poetry/poetry) to manage dependencies through virtual environments. The poetry CLI can be installed easily following the instructions in the [official documentation](https://python-poetry.org/docs/master/#installing-with-the-official-installer). Note that poetry is already installed and setup on the provided devcontainer. 15 | 16 | To install this project and its dependencies into a fresh virtual environment run: 17 | - `poetry install` to install all dependencies (core + development) 18 | or 19 | - `poetry install --no-dev` to install only the core dependencies 20 | 21 | To add a dependency run `poetry add @`. Use the optional `--dev` flag to add it as a development dependency. 22 | Note: When adding new dependencies, please do not pin to specific versions unless absolutely necessary (see discussion in #91). Usage of the caret-operator is preferred for specifying versions, this will allow versions to range up to the next major version (`^1.2.3` is equivalent to `>=1.2.3 <2.0.0`, see [poetry documentation on caret requirements](https://python-poetry.org/docs/master/dependency-specification#caret-requirements) for additional examples). 23 | 24 | The `poetry.lock` file is only included in the source to speed up dependency resolution during CI, this can be ignored on a local build. 25 | 26 | To run a shell command within this Poetry-managed virtual environment use `poetry run [CMD]`, e.g. `poetry run python -m pytest`. 27 | 28 | For advanced options, please see the documentation at https://python-poetry.org/docs/. 29 | 30 | ### Devcontainer (recommended) 31 | Several processes depend on the GDAL library for I/O, which requires a range of C libraries to be present on the system. 32 | In order to ensure a reproducible development environment across contributors, this project comes with a development container image that has the following dependencies preinstalled: 33 | - GDAL 34 | - poetry for dependency management 35 | - nox and older versions of python to test against 36 | 37 | This image is also what is used to run the CI pipeline. 38 | 39 | How to use? 40 | 1) Install the [`VSCode Remote - Containers`](https://code.visualstudio.com/docs/remote/containers) extension 41 | 2) Open the repo in VSCode 42 | 3) Run the `Remote-Containers: Rebuild and Reopen in Container` command or click the `Open in Container` pop-up 43 | 4) Optional: Mount your ssh keys as shown [here](https://code.visualstudio.com/docs/remote/containers#_sharing-git-credentials-with-your-container) to authenticate with Github. 44 | 5) Done, you should now be able to run this library entirely from the isolation of a container. 45 | 46 | This devcontainer is intended to be used in conjunction with the extension, which lets you use it as a full-featured development environment. Note that this isn't a strict requirement, but a strong recommendation that will make life easier. Also note that this package is distributed in the standard formats through PyPI and can be installed using pip without any need for Docker or poetry - provided that GDAL is already correctly installed on the target system. 47 | 48 | Building the devcontainer without VSCode is not recommended, it is **not enough** to only run `docker build -f ./.devcontainer/Dockerfile .`. You will also have to mount the source code into the container and afterwards run the `postCreateCommand` from `devcontainer.json` in order for everything to be set up. 49 | 50 | ### Virtual Environment 51 | If you already have GDAL installed on your system and don't want to develop in a container, it is possible to setup a local virtual environment using 52 | ``` 53 | poetry install 54 | ``` 55 | 56 | Testing across multiple versions of Python using `nox` might not work if those versions are not installed on your system. See the `.devcontainer/Dockerfile` for an example of how this can be done on Ubuntu. 57 | 58 | 59 | ## Continuous Integration 60 | CI on this project runs the following checks against all push events: 61 | - Build the devcontainer 62 | - Run the following nox sessions on multiple versions of Python inside the devcontainer: 63 | - mypy: static type checking 64 | - test: run testsuite using pytest 65 | 66 | Because the CI uses the devcontainer as the base environment to run nox sessions, they can also be run locally from the devcontainers shell, e.g. 67 | `nox --session tests --python=3.8` to run the tests session on a specific version of python. 68 | 69 | ## FAQ 70 | ### `Permission denied` errors with devcontainer 71 | From [the docs on VSCode Remote - Containers](https://code.visualstudio.com/remote/advancedcontainers/add-nonroot-user): 72 | > Inside the container, any mounted files/folders will have the exact same permissions as outside the container - including the owner user ID (UID) and group ID (GID). Because of this, your container user will either need to have the same UID or be in a group with the same GID. The actual name of the user / group does not matter. The first user on a machine typically gets a UID of 1000, so most containers use this as the ID of the user to try to avoid this problem. 73 | 74 | By default, the devcontainer uses UID=1000 and GID=1000, but if the files on your host system belong to a different user, you can configure these values using the `USER_UID` and `USER_GID` docker build arguments. 75 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | AUTODOCDIR = api 10 | AUTODOCBUILD = sphinx-apidoc 11 | PROJECT = eoFunctions 12 | MODULEDIR = ../src/eofunctions 13 | 14 | # User-friendly check for sphinx-build 15 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) 16 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 17 | endif 18 | 19 | # Internal variables. 20 | PAPEROPT_a4 = -D latex_paper_size=a4 21 | PAPEROPT_letter = -D latex_paper_size=letter 22 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 23 | # the i18n builder cannot share the environment and doctrees with the others 24 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 25 | 26 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext doc-requirements 27 | 28 | help: 29 | @echo "Please use \`make ' where is one of" 30 | @echo " html to make standalone HTML files" 31 | @echo " dirhtml to make HTML files named index.html in directories" 32 | @echo " singlehtml to make a single large HTML file" 33 | @echo " pickle to make pickle files" 34 | @echo " json to make JSON files" 35 | @echo " htmlhelp to make HTML files and a HTML help project" 36 | @echo " qthelp to make HTML files and a qthelp project" 37 | @echo " devhelp to make HTML files and a Devhelp project" 38 | @echo " epub to make an epub" 39 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 40 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 41 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 42 | @echo " text to make text files" 43 | @echo " man to make manual pages" 44 | @echo " texinfo to make Texinfo files" 45 | @echo " info to make Texinfo files and run them through makeinfo" 46 | @echo " gettext to make PO message catalogs" 47 | @echo " changes to make an overview of all changed/added/deprecated items" 48 | @echo " xml to make Docutils-native XML files" 49 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 50 | @echo " linkcheck to check all external links for integrity" 51 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 52 | 53 | clean: 54 | rm -rf $(BUILDDIR)/* $(AUTODOCDIR) 55 | 56 | $(AUTODOCDIR): $(MODULEDIR) 57 | mkdir -p $@ 58 | $(AUTODOCBUILD) -f -o $@ $^ 59 | 60 | doc-requirements: $(AUTODOCDIR) 61 | 62 | html: doc-requirements 63 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 64 | @echo 65 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 66 | 67 | dirhtml: doc-requirements 68 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 69 | @echo 70 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 71 | 72 | singlehtml: doc-requirements 73 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 74 | @echo 75 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 76 | 77 | pickle: doc-requirements 78 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 79 | @echo 80 | @echo "Build finished; now you can process the pickle files." 81 | 82 | json: doc-requirements 83 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 84 | @echo 85 | @echo "Build finished; now you can process the JSON files." 86 | 87 | htmlhelp: doc-requirements 88 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 89 | @echo 90 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 91 | ".hhp project file in $(BUILDDIR)/htmlhelp." 92 | 93 | qthelp: doc-requirements 94 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 95 | @echo 96 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 97 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 98 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/$(PROJECT).qhcp" 99 | @echo "To view the help file:" 100 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/$(PROJECT).qhc" 101 | 102 | devhelp: doc-requirements 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $HOME/.local/share/devhelp/$(PROJECT)" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $HOME/.local/share/devhelp/$(PROJEC)" 109 | @echo "# devhelp" 110 | 111 | epub: doc-requirements 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | patch-latex: 117 | find _build/latex -iname "*.tex" | xargs -- \ 118 | sed -i'' 's~includegraphics{~includegraphics\[keepaspectratio,max size={\\textwidth}{\\textheight}\]{~g' 119 | 120 | latex: doc-requirements 121 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 122 | $(MAKE) patch-latex 123 | @echo 124 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 125 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 126 | "(use \`make latexpdf' here to do that automatically)." 127 | 128 | latexpdf: doc-requirements 129 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 130 | $(MAKE) patch-latex 131 | @echo "Running LaTeX files through pdflatex..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | latexpdfja: doc-requirements 136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 137 | @echo "Running LaTeX files through platex and dvipdfmx..." 138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 140 | 141 | text: doc-requirements 142 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 143 | @echo 144 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 145 | 146 | man: doc-requirements 147 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 148 | @echo 149 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 150 | 151 | texinfo: doc-requirements 152 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 153 | @echo 154 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 155 | @echo "Run \`make' in that directory to run these through makeinfo" \ 156 | "(use \`make info' here to do that automatically)." 157 | 158 | info: doc-requirements 159 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 160 | @echo "Running Texinfo files through makeinfo..." 161 | make -C $(BUILDDIR)/texinfo info 162 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 163 | 164 | gettext: doc-requirements 165 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 166 | @echo 167 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 168 | 169 | changes: doc-requirements 170 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 171 | @echo 172 | @echo "The overview file is in $(BUILDDIR)/changes." 173 | 174 | linkcheck: doc-requirements 175 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 176 | @echo 177 | @echo "Link check complete; look for any errors in the above output " \ 178 | "or in $(BUILDDIR)/linkcheck/output.txt." 179 | 180 | doctest: doc-requirements 181 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 182 | @echo "Testing of doctests in the sources finished, look at the " \ 183 | "results in $(BUILDDIR)/doctest/output.txt." 184 | 185 | xml: doc-requirements 186 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 187 | @echo 188 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 189 | 190 | pseudoxml: doc-requirements 191 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 192 | @echo 193 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 194 | -------------------------------------------------------------------------------- /docs/_static/.gitignore: -------------------------------------------------------------------------------- 1 | # Empty directory 2 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. _authors: 2 | .. include:: ../AUTHORS.rst 3 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changes: 2 | .. include:: ../CHANGELOG.rst 3 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # This file is execfile()d with the current directory set to its containing dir. 4 | # 5 | # Note that not all possible configuration values are present in this 6 | # autogenerated file. 7 | # 8 | # All configuration values have a default; values that are commented out 9 | # serve to show the default. 10 | 11 | import os 12 | import sys 13 | import inspect 14 | import shutil 15 | 16 | __location__ = os.path.join(os.getcwd(), os.path.dirname( 17 | inspect.getfile(inspect.currentframe()))) 18 | 19 | # If extensions (or modules to document with autodoc) are in another directory, 20 | # add these directories to sys.path here. If the directory is relative to the 21 | # documentation root, use os.path.abspath to make it absolute, like shown here. 22 | sys.path.insert(0, os.path.join(__location__, '../src')) 23 | 24 | # -- Run sphinx-apidoc ------------------------------------------------------ 25 | # This hack is necessary since RTD does not issue `sphinx-apidoc` before running 26 | # `sphinx-build -b html . _build/html`. See Issue: 27 | # https://github.com/rtfd/readthedocs.org/issues/1139 28 | # DON'T FORGET: Check the box "Install your project inside a virtualenv using 29 | # setup.py install" in the RTD Advanced Settings. 30 | # Additionally it helps us to avoid running apidoc manually 31 | 32 | try: # for Sphinx >= 1.7 33 | from sphinx.ext import apidoc 34 | except ImportError: 35 | from sphinx import apidoc 36 | 37 | output_dir = os.path.join(__location__, "api") 38 | module_dir = os.path.join(__location__, "../src/openeo_processes") 39 | try: 40 | shutil.rmtree(output_dir) 41 | except FileNotFoundError: 42 | pass 43 | 44 | try: 45 | import sphinx 46 | from distutils.version import LooseVersion 47 | 48 | cmd_line_template = "sphinx-apidoc -f -o {outputdir} {moduledir}" 49 | cmd_line = cmd_line_template.format(outputdir=output_dir, moduledir=module_dir) 50 | 51 | args = cmd_line.split(" ") 52 | if LooseVersion(sphinx.__version__) >= LooseVersion('1.7'): 53 | args = args[1:] 54 | 55 | apidoc.main(args) 56 | except Exception as e: 57 | print("Running `sphinx-apidoc` failed!\n{}".format(e)) 58 | 59 | # -- General configuration ----------------------------------------------------- 60 | 61 | # If your documentation needs a minimal Sphinx version, state it here. 62 | # needs_sphinx = '1.0' 63 | 64 | # Add any Sphinx extension module names here, as strings. They can be extensions 65 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 66 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 67 | 'sphinx.ext.autosummary', 'sphinx.ext.viewcode', 'sphinx.ext.coverage', 68 | 'sphinx.ext.doctest', 'sphinx.ext.ifconfig', 'sphinx.ext.mathjax', 69 | 'sphinx.ext.napoleon'] 70 | 71 | # Add any paths that contain templates here, relative to this directory. 72 | templates_path = ['_templates'] 73 | 74 | # The suffix of source filenames. 75 | source_suffix = '.rst' 76 | 77 | # The encoding of source files. 78 | # source_encoding = 'utf-8-sig' 79 | 80 | # The master toctree document. 81 | master_doc = 'index' 82 | 83 | # General information about the project. 84 | project = u'eoFunctions' 85 | copyright = u'2018, Luca Foresta' 86 | 87 | # The version info for the project you're documenting, acts as replacement for 88 | # |version| and |release|, also used in various other places throughout the 89 | # built documents. 90 | # 91 | # The short X.Y version. 92 | version = '' # Is set by calling `setup.py docs` 93 | # The full version, including alpha/beta/rc tags. 94 | release = '' # Is set by calling `setup.py docs` 95 | 96 | # The language for content autogenerated by Sphinx. Refer to documentation 97 | # for a list of supported languages. 98 | # language = None 99 | 100 | # There are two options for replacing |today|: either, you set today to some 101 | # non-false value, then it is used: 102 | # today = '' 103 | # Else, today_fmt is used as the format for a strftime call. 104 | # today_fmt = '%B %d, %Y' 105 | 106 | # List of patterns, relative to source directory, that match files and 107 | # directories to ignore when looking for source files. 108 | exclude_patterns = ['_build'] 109 | 110 | # The reST default role (used for this markup: `text`) to use for all documents. 111 | # default_role = None 112 | 113 | # If true, '()' will be appended to :func: etc. cross-reference text. 114 | # add_function_parentheses = True 115 | 116 | # If true, the current module name will be prepended to all description 117 | # unit titles (such as .. function::). 118 | # add_module_names = True 119 | 120 | # If true, sectionauthor and moduleauthor directives will be shown in the 121 | # output. They are ignored by default. 122 | # show_authors = False 123 | 124 | # The name of the Pygments (syntax highlighting) style to use. 125 | pygments_style = 'sphinx' 126 | 127 | # A list of ignored prefixes for module index sorting. 128 | # modindex_common_prefix = [] 129 | 130 | # If true, keep warnings as "system message" paragraphs in the built documents. 131 | # keep_warnings = False 132 | 133 | 134 | # -- Options for HTML output --------------------------------------------------- 135 | 136 | # The theme to use for HTML and HTML Help pages. See the documentation for 137 | # a list of builtin themes. 138 | html_theme = 'alabaster' 139 | 140 | # Theme options are theme-specific and customize the look and feel of a theme 141 | # further. For a list of options available for each theme, see the 142 | # documentation. 143 | # html_theme_options = {} 144 | 145 | # Add any paths that contain custom themes here, relative to this directory. 146 | # html_theme_path = [] 147 | 148 | # The name for this set of Sphinx documents. If None, it defaults to 149 | # " v documentation". 150 | try: 151 | from openeo_processes import __version__ as version 152 | except ImportError: 153 | pass 154 | else: 155 | release = version 156 | 157 | # A shorter title for the navigation bar. Default is the same as html_title. 158 | # html_short_title = None 159 | 160 | # The name of an image file (relative to this directory) to place at the top 161 | # of the sidebar. 162 | # html_logo = "" 163 | 164 | # The name of an image file (within the static path) to use as favicon of the 165 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 166 | # pixels large. 167 | # html_favicon = None 168 | 169 | # Add any paths that contain custom static files (such as style sheets) here, 170 | # relative to this directory. They are copied after the builtin static files, 171 | # so a file named "default.css" will overwrite the builtin "default.css". 172 | html_static_path = ['_static'] 173 | 174 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 175 | # using the given strftime format. 176 | # html_last_updated_fmt = '%b %d, %Y' 177 | 178 | # If true, SmartyPants will be used to convert quotes and dashes to 179 | # typographically correct entities. 180 | # html_use_smartypants = True 181 | 182 | # Custom sidebar templates, maps document names to template names. 183 | # html_sidebars = {} 184 | 185 | # Additional templates that should be rendered to pages, maps page names to 186 | # template names. 187 | # html_additional_pages = {} 188 | 189 | # If false, no module index is generated. 190 | # html_domain_indices = True 191 | 192 | # If false, no index is generated. 193 | # html_use_index = True 194 | 195 | # If true, the index is split into individual pages for each letter. 196 | # html_split_index = False 197 | 198 | # If true, links to the reST sources are added to the pages. 199 | # html_show_sourcelink = True 200 | 201 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 202 | # html_show_sphinx = True 203 | 204 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 205 | # html_show_copyright = True 206 | 207 | # If true, an OpenSearch description file will be output, and all pages will 208 | # contain a tag referring to it. The value of this option must be the 209 | # base URL from which the finished HTML is served. 210 | # html_use_opensearch = '' 211 | 212 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 213 | # html_file_suffix = None 214 | 215 | # Output file base name for HTML help builder. 216 | htmlhelp_basename = 'openeo_processes-doc' 217 | 218 | 219 | # -- Options for LaTeX output -------------------------------------------------- 220 | 221 | latex_elements = { 222 | # The paper size ('letterpaper' or 'a4paper'). 223 | # 'papersize': 'letterpaper', 224 | 225 | # The font size ('10pt', '11pt' or '12pt'). 226 | # 'pointsize': '10pt', 227 | 228 | # Additional stuff for the LaTeX preamble. 229 | # 'preamble': '', 230 | } 231 | 232 | # Grouping the document tree into LaTeX files. List of tuples 233 | # (source start file, target name, title, author, documentclass [howto/manual]). 234 | latex_documents = [ 235 | ('index', 'user_guide.tex', u'eoFunctions Documentation', 236 | u'Luca Foresta', 'manual'), 237 | ] 238 | 239 | # The name of an image file (relative to this directory) to place at the top of 240 | # the title page. 241 | # latex_logo = "" 242 | 243 | # For "manual" documents, if this is true, then toplevel headings are parts, 244 | # not chapters. 245 | # latex_use_parts = False 246 | 247 | # If true, show page references after internal links. 248 | # latex_show_pagerefs = False 249 | 250 | # If true, show URL addresses after external links. 251 | # latex_show_urls = False 252 | 253 | # Documents to append as an appendix to all manuals. 254 | # latex_appendices = [] 255 | 256 | # If false, no module index is generated. 257 | # latex_domain_indices = True 258 | 259 | # -- External mapping ------------------------------------------------------------ 260 | python_version = '.'.join(map(str, sys.version_info[0:2])) 261 | intersphinx_mapping = { 262 | 'sphinx': ('http://www.sphinx-doc.org/en/stable', None), 263 | 'python': ('https://docs.python.org/' + python_version, None), 264 | 'matplotlib': ('https://matplotlib.org', None), 265 | 'numpy': ('https://docs.scipy.org/doc/numpy', None), 266 | 'sklearn': ('http://scikit-learn.org/stable', None), 267 | 'pandas': ('http://pandas.pydata.org/pandas-docs/stable', None), 268 | 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), 269 | } 270 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | eoFunctions 3 | =========== 4 | 5 | This is the documentation of **eoFunctions**. 6 | 7 | .. note:: 8 | 9 | This is the main page of your project's `Sphinx `_ 10 | documentation. It is formatted in `reStructuredText 11 | `__. Add additional pages by creating 12 | rst-files in ``docs`` and adding them to the `toctree 13 | `_ below. Use then 14 | `references `__ in order to link 15 | them from this page, e.g. :ref:`authors ` and :ref:`changes`. 16 | 17 | It is also possible to refer to the documentation of other Python packages 18 | with the `Python domain syntax 19 | `__. By default you 20 | can reference the documentation of `Sphinx `__, 21 | `Python `__, `NumPy 22 | `__, `SciPy 23 | `__, `matplotlib 24 | `__, `Pandas 25 | `__, `Scikit-Learn 26 | `__. You can add more by 27 | extending the ``intersphinx_mapping`` in your Sphinx's ``conf.py``. 28 | 29 | The pretty useful extension `autodoc 30 | `__ is activated by 31 | default and lets you include documentation from docstrings. Docstrings can 32 | be written in `Google 33 | `__ 34 | (recommended!), `NumPy 35 | `__ 36 | and `classical 37 | `__ 38 | style. 39 | 40 | 41 | Contents 42 | ======== 43 | 44 | .. toctree:: 45 | :maxdepth: 2 46 | 47 | License 48 | Authors 49 | Changelog 50 | Module Reference 51 | 52 | 53 | Indices and tables 54 | ================== 55 | 56 | * :ref:`genindex` 57 | * :ref:`modindex` 58 | * :ref:`search` 59 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | .. _license: 2 | 3 | ======= 4 | License 5 | ======= 6 | 7 | .. literalinclude:: ../LICENSE.txt 8 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | from nox_poetry import session 2 | 3 | python_versions=["3.8", "3.9"] 4 | 5 | @session(python=python_versions) 6 | def tests(session): 7 | session.install("pytest", ".") 8 | session.run("pytest") 9 | 10 | @session(python=python_versions) 11 | def mypy(session): 12 | session.install("mypy") 13 | session.run("mypy", "--install-types", "--non-interactive") 14 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "openeo-processes" 3 | version = "0.1.11" 4 | description = "Python implementations of many OpenEO processes." 5 | authors = ["Your Name "] 6 | license = "Apache 2.0" 7 | packages = [ 8 | {include = "openeo_processes", from = "src"} 9 | ] 10 | 11 | [[tool.poetry.source]] 12 | name = "packages.dea.ga.gov.au" 13 | url = "https://packages.dea.ga.gov.au" 14 | secondary = true 15 | 16 | [tool.poetry.dependencies] 17 | python = "^3.8" 18 | numpy = "^1" 19 | pandas = "^1" 20 | xarray = ">=0.18.2" 21 | pyproj = "^3" 22 | xgboost = "^1.5.0" 23 | rioxarray = "^0.9" 24 | geopandas = "^0.10" 25 | Equi7Grid = "^0.1.0" 26 | datacube = "^1.8.4" 27 | dask-geopandas = "^v0.1.0a7" 28 | dask = {extras = ["array"], version = "^2022.02.1"} 29 | odc-algo = {extras = ["xarray"], version = "^0.2.2"} 30 | GDAL = "^3.5.0" 31 | PyYAML = "^6.0" 32 | 33 | [tool.poetry.dev-dependencies] 34 | pytest = "^7.1.2" 35 | mypy = "^0.961" 36 | types-PyYAML = "^6.0" 37 | 38 | [tool.mypy] 39 | files = "src" 40 | mypy_path = "src" 41 | ignore_missing_imports = true 42 | 43 | [build-system] 44 | requires = ["poetry-core>=1.0.0"] 45 | build-backend = "poetry.core.masonry.api" 46 | -------------------------------------------------------------------------------- /src/openeo_processes/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # type: ignore 3 | from pkg_resources import get_distribution, DistributionNotFound 4 | 5 | try: 6 | # Change here if project is renamed and does not equal the package name 7 | dist_name = 'openeo_processes' 8 | __version__ = get_distribution(dist_name).version 9 | except DistributionNotFound: 10 | __version__ = 'unknown' 11 | 12 | 13 | from openeo_processes.arrays import * 14 | from openeo_processes.comparison import * 15 | from openeo_processes.cubes import * 16 | from openeo_processes.logic import * 17 | from openeo_processes.math import * 18 | from openeo_processes.texts import * 19 | from openeo_processes.utils import get_process, has_process 20 | -------------------------------------------------------------------------------- /src/openeo_processes/errors.py: -------------------------------------------------------------------------------- 1 | class QuantilesParameterMissing(Exception): 2 | def __init__(self): 3 | self.message = "The process 'quantiles' requires either the 'probabilities' or 'q' parameter to be set." 4 | 5 | def __str__(self): 6 | return self.message 7 | 8 | 9 | class QuantilesParameterConflict(Exception): 10 | def __init__(self): 11 | self.message = "The process 'quantiles' only allows that either the 'probabilities' or the 'q' parameter is set." 12 | 13 | def __str__(self): 14 | return self.message 15 | 16 | 17 | class ArrayElementParameterMissing(Exception): 18 | def __init__(self): 19 | self.message = "The process 'array_element' requires either the 'index' or 'labels' parameter to be set." 20 | 21 | def __str__(self): 22 | return self.message 23 | 24 | 25 | class ArrayElementParameterConflict(Exception): 26 | def __init__(self): 27 | self.message = "The process 'array_element' only allows that either the 'index' or the 'labels' parameter is " \ 28 | "set." 29 | 30 | def __str__(self): 31 | return self.message 32 | 33 | 34 | class ArrayElementNotAvailable(Exception): 35 | def __init__(self): 36 | self.message = "The array has no element with the specified index or label." 37 | 38 | def __str__(self): 39 | return self.message 40 | 41 | 42 | class GenericError(Exception): 43 | def __init__(self, msg): 44 | self.message = msg 45 | 46 | def __str__(self): 47 | return self.message 48 | 49 | 50 | class DimensionNotAvailable(Exception): 51 | def __init__(self, msg): 52 | self.message = "A dimension with the specified name does not exist." 53 | 54 | def __str__(self): 55 | return self.message 56 | 57 | 58 | class TooManyDimensions(Exception): 59 | def __init__(self, msg): 60 | self.message = "The number of dimensions must be reduced to three for `aggregate_spatial`." 61 | 62 | def __str__(self): 63 | return self.message -------------------------------------------------------------------------------- /src/openeo_processes/extension/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Open-EO/openeo-processes-python/6a85abcbb6648c91b60a09b36ca25984dc26b5cc/src/openeo_processes/extension/__init__.py -------------------------------------------------------------------------------- /src/openeo_processes/extension/odc.py: -------------------------------------------------------------------------------- 1 | import os 2 | import yaml 3 | import xarray as xr 4 | 5 | from openeo_processes.extension.product_model import get_prod_dict 6 | 7 | 8 | def write_odc_product(dataset: xr.Dataset, output_filepath_data: str): 9 | """Create ODC product definition. 10 | 11 | Uses properties of xr.Dataset to define product. 12 | """ 13 | folder_path = os.path.dirname(output_filepath_data) 14 | product_filepath = os.path.join(folder_path, "product.yml") 15 | product = get_prod_dict(dataset) 16 | with open(product_filepath, "w") as product_file: 17 | yaml.dump(product, product_file) 18 | -------------------------------------------------------------------------------- /src/openeo_processes/extension/product_model.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, asdict, field 2 | from typing import Any, Dict, List, Literal, Optional, Union 3 | 4 | import numpy as np 5 | import xarray as xr 6 | 7 | @dataclass 8 | class Storage: 9 | crs: str 10 | resolution: Dict[str, float] 11 | 12 | 13 | @dataclass 14 | class MetadataProduct: 15 | name: str 16 | 17 | 18 | @dataclass 19 | class MetadataProperties: 20 | pass 21 | 22 | @dataclass 23 | class Metadata: 24 | product: MetadataProduct 25 | # properties: MetadataProperties 26 | 27 | 28 | @dataclass 29 | class Measurement: 30 | name: str 31 | units: str 32 | dtype: str 33 | nodata: Any 34 | aliases: List[str] = field(default_factory=list) 35 | extra_dim: str = None # type: ignore 36 | 37 | 38 | @dataclass 39 | class ExtraDimensions: 40 | name: str 41 | values: List[Union[int, float]] 42 | dtype: Literal['float16', 'float32', 'float64', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 43 | 'uint64', 'complex64', 'complex128'] 44 | 45 | @dataclass 46 | class Product: 47 | name: str 48 | description: str 49 | storage: Storage 50 | metadata: Metadata 51 | measurements: List[Measurement] 52 | metadata_type: str = "eo3" 53 | extra_dimensions: Optional[List[ExtraDimensions]] = None 54 | 55 | 56 | def create_product(data: xr.Dataset) -> Product: 57 | """Create a product definition form an xr.Dataset.""" 58 | first_data_var = data.data_vars[list(data.data_vars.keys())[0]] 59 | 60 | # handle extra-dims 61 | extra_dims = list(set(first_data_var.dims).difference({'bands', 'y', 'x', 'time'})) 62 | if extra_dims: 63 | extra_dimensions = [ 64 | ExtraDimensions( 65 | name=dim, 66 | values=getattr(first_data_var, dim).values.tolist(), 67 | dtype=str(getattr(first_data_var, dim).values.dtype), # type: ignore 68 | ) 69 | for dim in extra_dims] 70 | 71 | measurements = [ 72 | Measurement( 73 | name=name, 74 | dtype=str(msnt.dtype), 75 | nodata=-9999, # no data value set to -9999 in save result 76 | units="", # TODO not implemented - currently ignored! 77 | extra_dim=extra_dims[0] if extra_dims else None # currently one a single extra dim is supported 78 | ) 79 | for name, msnt in data.data_vars.items() 80 | ] 81 | # Use first data var to define storage - arbitray selection 82 | is_geographic = first_data_var.geobox.crs.geographic 83 | res = first_data_var.geobox.resolution 84 | if is_geographic: 85 | resolution = {"latitude": res[0], "longitude": res[1]} 86 | else: 87 | resolution = {"y": res[0], "x": res[1]} 88 | 89 | prod = Product( 90 | name="PLACEHOLDER_PRODCUT_NAME", 91 | description=f"Results of job PLACEHOLDER_JOB_ID.", 92 | storage=Storage( 93 | crs=first_data_var.geobox.crs.to_wkt(), 94 | resolution=resolution, 95 | ), 96 | metadata=Metadata( 97 | product=MetadataProduct( 98 | name="PLACEHOLDER_PRODCUT_NAME", 99 | ), 100 | ), 101 | measurements=measurements, 102 | extra_dimensions=None 103 | #extra_dimensions if extra_dims else None, 104 | ) 105 | return prod 106 | 107 | 108 | def get_prod_dict(data: xr.Dataset) -> dict: 109 | """Create a product definition from an xr.Dataset and return it as dict.""" 110 | product = create_product(data) 111 | prod_dict = asdict(product) 112 | if not prod_dict["extra_dimensions"]: 113 | prod_dict.pop("extra_dimensions") 114 | for idx in range(len(prod_dict["measurements"])): 115 | prod_dict["measurements"][idx].pop("extra_dim") 116 | return prod_dict 117 | -------------------------------------------------------------------------------- /src/openeo_processes/logic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from openeo_processes.utils import process 3 | from openeo_processes.comparison import is_empty 4 | import xarray as xr 5 | 6 | 7 | ######################################################################################################################## 8 | # And Process 9 | ######################################################################################################################## 10 | 11 | @process 12 | def and_(): 13 | """ 14 | Returns class instance of `And`. 15 | For more details, please have a look at the implementations inside `And`. 16 | 17 | Returns 18 | ------- 19 | And : 20 | Class instance implementing all 'and' processes. 21 | 22 | """ 23 | return And() 24 | 25 | 26 | class And: 27 | """ 28 | Class implementing all 'and' processes. 29 | 30 | """ 31 | 32 | @staticmethod 33 | def exec_num(x, y): 34 | """ 35 | Checks if both values are true. 36 | Evaluates parameter `x` before `y` and stops once the outcome is unambiguous. 37 | If any argument is None, the result will be None if the outcome is ambiguous. 38 | 39 | Parameters 40 | ---------- 41 | x : bool 42 | A boolean value. 43 | y : bool 44 | A boolean value. 45 | 46 | Returns 47 | ------- 48 | bool : 49 | Boolean result of the logical AND. 50 | 51 | """ 52 | return x and y if None not in [x, y] else None 53 | 54 | @staticmethod 55 | def exec_np(x, y): 56 | """ 57 | Checks if both arrays are true. 58 | Evaluates parameter `x` before `y` and stops once the outcome is unambiguous. 59 | If any argument is np.nan, the result will be np.nan if the outcome is ambiguous. 60 | 61 | Parameters 62 | ---------- 63 | x : np.array or bool 64 | A boolean value. 65 | y : np.array or bool 66 | A boolean value. 67 | 68 | Returns 69 | ------- 70 | np.array : 71 | Boolean result of the logical AND. 72 | 73 | """ 74 | return x & y 75 | 76 | @staticmethod 77 | def exec_xar(x, y): 78 | """ 79 | Checks if both arrays are true. 80 | Evaluates parameter `x` before `y` and stops once the outcome is unambiguous. 81 | If any argument is np.nan, the result will be np.nan if the outcome is ambiguous. 82 | 83 | Parameters 84 | ---------- 85 | x : xr.DataArray or bool 86 | A boolean value. 87 | y : xr.DataArray or bool 88 | A boolean value. 89 | 90 | Returns 91 | ------- 92 | xr.DataArray : 93 | Boolean result of the logical AND. 94 | """ 95 | x_nan = x.where(x == True, False) # Set NaN to False 96 | y_nan = y.where(y == True, False) 97 | logical_and = xr.ufuncs.logical_and(x, y) 98 | logical_and = logical_and.where(x == x_nan, np.nan) 99 | logical_and = logical_and.where(y == y_nan, np.nan) 100 | return logical_and 101 | 102 | @staticmethod 103 | def exec_da(): 104 | pass 105 | 106 | 107 | ######################################################################################################################## 108 | # Or Process 109 | ######################################################################################################################## 110 | 111 | @process 112 | def or_(): 113 | """ 114 | Returns class instance of `Or`. 115 | For more details, please have a look at the implementations inside `Or`. 116 | 117 | Returns 118 | ------- 119 | Or : 120 | Class instance implementing all 'or' processes. 121 | 122 | """ 123 | return Or() 124 | 125 | 126 | class Or: 127 | """ 128 | Class implementing all 'or' processes. 129 | 130 | """ 131 | 132 | @staticmethod 133 | def exec_num(x, y): 134 | """ 135 | Checks if at least one of the values is True. Evaluates parameter `x` before `y` and stops once the outcome 136 | is unambiguous. If a component is None, the result will be None if the outcome is ambiguous. 137 | 138 | Parameters 139 | ---------- 140 | x : bool 141 | A boolean value. 142 | y : bool 143 | A boolean value. 144 | 145 | Returns 146 | ------- 147 | bool : 148 | Boolean result of the logical OR. 149 | 150 | """ 151 | 152 | return None if None in [x, y] and False in [x, y] else x or y 153 | 154 | @staticmethod 155 | def exec_np(x, y): 156 | """ 157 | Checks if at least one of the array values is True. Evaluates parameter `x` before `y` and stops once the 158 | outcome is unambiguous. If a component is np.nan, the result will be np.nan if the outcome is ambiguous. 159 | 160 | Parameters 161 | ---------- 162 | x : bool 163 | A boolean value. 164 | y : bool 165 | A boolean value. 166 | 167 | Returns 168 | ------- 169 | np.array : 170 | Boolean result of the logical OR. 171 | 172 | """ 173 | return x | y 174 | 175 | @staticmethod 176 | def exec_xar(x, y): 177 | """ 178 | Checks if at least one of the array values is True. Evaluates parameter `x` before `y` and stops once the 179 | outcome is unambiguous. If a component is np.nan, the result will be np.nan if the outcome is ambiguous. 180 | 181 | Parameters 182 | ---------- 183 | x : xr.DataArray 184 | A boolean value. 185 | y : xr.DataArray 186 | A boolean value. 187 | 188 | Returns 189 | ------- 190 | xr.DataArray: 191 | Boolean result of the logical OR. 192 | """ 193 | x_nan = x.where(x == True, False) # Set NaN to False 194 | y_nan = y.where(y == True, False) 195 | logical_or = xr.ufuncs.logical_or(x, y) 196 | logical_or = logical_or.where(x == x_nan, np.nan) 197 | logical_or = logical_or.where(y == y_nan, np.nan) 198 | return logical_or 199 | 200 | @staticmethod 201 | def exec_da(): 202 | pass 203 | 204 | 205 | ######################################################################################################################## 206 | # Xor Process 207 | ######################################################################################################################## 208 | 209 | @process 210 | def xor(): 211 | """ 212 | Returns class instance of `Xor`. 213 | For more details, please have a look at the implementations inside `Xor`. 214 | 215 | Returns 216 | ------- 217 | Xor : 218 | Class instance implementing all 'xor' processes. 219 | 220 | """ 221 | return Xor() 222 | 223 | 224 | class Xor: 225 | """ 226 | Class implementing all 'xor' processes. 227 | 228 | """ 229 | 230 | @staticmethod 231 | def exec_num(x, y): 232 | """ 233 | Checks if exactly one of the values is true. If a component is None, the result will be None if the outcome 234 | is ambiguous. 235 | 236 | Parameters 237 | ---------- 238 | x : bool 239 | A boolean value. 240 | y : bool 241 | A boolean value. 242 | 243 | Returns 244 | ------- 245 | bool : 246 | Boolean result of the logical XOR. 247 | 248 | """ 249 | return sum([x, y]) == 1 if None not in [x, y] else None 250 | 251 | @staticmethod 252 | def exec_np(x, y): 253 | """ 254 | Checks if exactly one of the array values is true. If a component is np.nan, the result will be np.nan if the 255 | outcome is ambiguous. 256 | 257 | Parameters 258 | ---------- 259 | x : bool 260 | A boolean value. 261 | y : bool 262 | A boolean value. 263 | 264 | Returns 265 | ------- 266 | np.array : 267 | Boolean result of the logical XOR. 268 | 269 | """ 270 | if np.any(np.isnan(x)) or np.any(np.isnan(y)): 271 | return np.nan 272 | else: 273 | return (x + y) == 1 274 | 275 | @staticmethod 276 | def exec_xar(x, y): 277 | """ 278 | Checks if exactly one of the array values is true. If a component is np.nan, the result will be np.nan if the 279 | outcome is ambiguous. 280 | 281 | Parameters 282 | ---------- 283 | x : xr.DataArray 284 | A boolean value. 285 | y : xr.DataArray 286 | A boolean value. 287 | 288 | Returns 289 | ------- 290 | xr.DataArray : 291 | Boolean result of the logical XOR. 292 | """ 293 | x_nan = x.where(x == True, False) # Set NaN to False 294 | y_nan = y.where(y == True, False) 295 | logical_xor = xr.ufuncs.logical_xor(x, y) 296 | logical_xor = logical_xor.where(x == x_nan, np.nan) 297 | logical_xor = logical_xor.where(y == y_nan, np.nan) 298 | return logical_xor 299 | 300 | @staticmethod 301 | def exec_da(): 302 | pass 303 | 304 | 305 | ######################################################################################################################## 306 | # Not Process 307 | ######################################################################################################################## 308 | 309 | @process 310 | def not_(): 311 | """ 312 | Returns class instance of `Not`. 313 | For more details, please have a look at the implementations inside `Not`. 314 | 315 | Returns 316 | ------- 317 | Not 318 | Class instance implementing all 'not' processes. 319 | 320 | """ 321 | return Not() 322 | 323 | 324 | class Not: 325 | """ 326 | Class implementing all 'not' processes. 327 | """ 328 | 329 | @staticmethod 330 | def exec_num(x): 331 | """ 332 | Inverts a boolean so that True gets False and False gets True. 333 | The no-data value None is passed through and therefore gets propagated. 334 | 335 | Parameters 336 | ---------- 337 | x : bool 338 | Boolean value to invert. 339 | 340 | Returns 341 | ------- 342 | bool : 343 | Inverted boolean value. 344 | 345 | """ 346 | return not x if x is not None else x 347 | 348 | @staticmethod 349 | def exec_np(x): 350 | """ 351 | Inverts booleans so that True/1 gets False/0 and False/0 gets True/1. 352 | The no-data value np.nan is passed through and therefore gets propagated. 353 | 354 | Parameters 355 | ---------- 356 | x : np.array 357 | Boolean values to invert. 358 | 359 | Returns 360 | ------- 361 | np.array : 362 | Inverted boolean values. 363 | 364 | """ 365 | return ~x 366 | 367 | @staticmethod 368 | def exec_xar(x): 369 | """ 370 | Inverts booleans so that True/1 gets False/0 and False/0 gets True/1. 371 | The no-data value np.nan is passed through and therefore gets propagated. 372 | 373 | Parameters 374 | ---------- 375 | x : xr.DataArray 376 | Boolean values to invert. 377 | 378 | Returns 379 | ------- 380 | xr.DataArray : 381 | Inverted boolean values. 382 | """ 383 | return xr.ufuncs.logical_not(x) 384 | 385 | @staticmethod 386 | def exec_da(): 387 | pass 388 | 389 | 390 | ######################################################################################################################## 391 | # If Process 392 | ######################################################################################################################## 393 | 394 | @process 395 | def if_(): 396 | """ 397 | Returns class instance of `If`. 398 | For more details, please have a look at the implementations inside `If`. 399 | 400 | Returns 401 | ------- 402 | If : 403 | Class instance implementing all 'if' processes. 404 | 405 | """ 406 | return If() 407 | 408 | 409 | class If: 410 | """ 411 | Class implementing all 'if' processes. 412 | """ 413 | 414 | @staticmethod 415 | def exec_num(value, accept, reject=None): 416 | """ 417 | If the value passed is True, returns the value of the `accept` parameter, 418 | otherwise returns the value of the `reject` parameter. 419 | 420 | Parameters 421 | ---------- 422 | value : bool 423 | A boolean value. 424 | accept : object 425 | A value that is returned if the boolean value is True. 426 | reject : object, optional 427 | A value that is returned if the boolean value is not True. Defaults to None. 428 | 429 | Returns 430 | ------- 431 | object : 432 | Either the `accept` or `reject` argument depending on the given boolean value. 433 | 434 | """ 435 | return accept if value else reject 436 | 437 | @staticmethod 438 | def exec_np(value, accept, reject=np.nan): 439 | """ 440 | If the array value passed is True, returns the value of the `accept` parameter, 441 | otherwise returns the value of the `reject` parameter. 442 | 443 | Parameters 444 | ---------- 445 | value : np.array 446 | A boolean array. 447 | accept : object 448 | A value that is returned if the boolean value is True. 449 | reject : object, optional 450 | A value that is returned if the boolean value is not True. Defaults to None. 451 | 452 | Returns 453 | ------- 454 | np.array : 455 | Either the `accept` or `reject` argument depending on the given boolean value. 456 | 457 | """ 458 | 459 | return np.where(value, accept, reject) 460 | 461 | @staticmethod 462 | def exec_xar(value, accept, reject=np.nan): 463 | """ 464 | If the array value passed is True, returns the value of the `accept` parameter, 465 | otherwise returns the value of the `reject` parameter. 466 | 467 | Parameters 468 | ---------- 469 | value : xr.DataArray 470 | A boolean array. 471 | accept : object 472 | A value that is returned if the boolean value is True. 473 | reject : object, optional 474 | A value that is returned if the boolean value is not True. Defaults to None. 475 | 476 | Returns 477 | ------- 478 | xr.DataArray : 479 | Either the `accept` or `reject` argument depending on the given boolean value. 480 | """ 481 | p = value.where(value == 0, accept) 482 | p = p.where(value == 1, reject) 483 | return p 484 | 485 | @staticmethod 486 | def exec_da(): 487 | pass 488 | 489 | 490 | ######################################################################################################################## 491 | # Any Process 492 | ######################################################################################################################## 493 | 494 | @process 495 | def any_(): 496 | """ 497 | Returns class instance of `Any`. 498 | For more details, please have a look at the implementations inside `Any`. 499 | 500 | Returns 501 | ------- 502 | Any : 503 | Class instance implementing all 'any' processes. 504 | 505 | """ 506 | return Any() 507 | 508 | 509 | class Any: 510 | """ 511 | Class implementing all 'any' processes. 512 | """ 513 | 514 | @staticmethod 515 | def exec_num(): 516 | pass 517 | 518 | @staticmethod 519 | def exec_np(data, ignore_nodata=True, dimension=0): 520 | """ 521 | Checks if any (i.e. at least one) value is True. Evaluates all values from the first to the last element and 522 | stops once the outcome is unambiguous. If only one value is given, the process evaluates to the given value. 523 | If no value is given (i.e. the array is empty) the process returns None. 524 | By default all NaN values are ignored so that the process returns np.nan if all values are NaN, 525 | True if at least one of the other values is True and False otherwise. 526 | Setting the `ignore_nodata` flag to False considers NaN values so that np.nan is a valid logical object. 527 | If a component is np.nan, the result will be np.nan if the outcome is ambiguous. 528 | 529 | Parameters 530 | ---------- 531 | data : np.array 532 | A boolean array. An empty array resolves always with None. 533 | ignore_nodata : bool, optional 534 | Indicates whether no-data values are ignored or not. Ignores them by default (=True). 535 | Setting this flag to False considers no-data values so that np.nan is returned if any value is such a value. 536 | dimension : int, optional 537 | Defines the dimension to evaluate 'any' along (default is 0). 538 | 539 | Returns 540 | ------- 541 | np.array : 542 | Boolean result of the logical operation. 543 | 544 | """ 545 | if is_empty(data): 546 | return np.nan 547 | 548 | if len(data.shape) == 1: # exand data if it has only one dimension 549 | data = data[:, None] 550 | 551 | nan_ar = np.isnan(data) 552 | if ignore_nodata: 553 | nan_mask = np.all(nan_ar, axis=dimension) 554 | data[nan_ar] = False 555 | else: 556 | nan_mask = np.any(nan_ar, axis=dimension) 557 | 558 | data_any = np.any(data, axis=dimension) 559 | data_any = data_any.astype(np.float32) # convert to float to store NaN values 560 | data_any[nan_mask] = np.nan 561 | return data_any 562 | 563 | @staticmethod 564 | def exec_xar(data, ignore_nodata = True, dimension = None, axis = None): 565 | """ 566 | Checks if any (i.e. at least one) value is True. Evaluates all values from the first to the last element and 567 | stops once the outcome is unambiguous. If only one value is given, the process evaluates to the given value. 568 | If no value is given (i.e. the array is empty) the process returns None. 569 | By default all NaN values are ignored so that the process returns np.nan if all values are NaN, 570 | True if at least one of the other values is True and False otherwise. 571 | Setting the `ignore_nodata` flag to False considers NaN values so that np.nan is a valid logical object. 572 | If a component is np.nan, the result will be np.nan if the outcome is ambiguous. 573 | 574 | Parameters 575 | ---------- 576 | data : xr.DataArray 577 | A boolean array. An empty array resolves always with None. 578 | ignore_nodata : bool, optional 579 | Indicates whether no-data values are ignored or not. Ignores them by default (=True). 580 | Setting this flag to False considers no-data values so that np.nan is returned if any value is such a value. 581 | dimension : str, optional 582 | Defines the dimension to evaluate 'any' along (default is None). 583 | axis : int, optional 584 | Defines the axis to evaluate 'any' along. 585 | Only one of the ‘dimension’ and ‘axis’ arguments can be supplied. If neither are supplied, then 'any' is calculated over axes 586 | 587 | Returns 588 | ------- 589 | xr.DataArray : 590 | Boolean result of the logical operation. 591 | """ 592 | if len(data) == 0: 593 | return xr.DataArray(np.nan) 594 | 595 | data_nan = data.where(data == True, False) # Set NaN to False 596 | if ignore_nodata: 597 | return data_nan.any(dim=dimension, axis=axis) 598 | else: 599 | data = data.any(dim=dimension, axis=axis) 600 | data_nan = data_nan.any(dim=dimension, axis=axis) 601 | if (data == data_nan).all(): # See if there are NaNs, that were set to False 602 | return data 603 | else: 604 | return data.where(data == data_nan, np.nan) 605 | 606 | @staticmethod 607 | def exec_da(): 608 | pass 609 | 610 | 611 | ######################################################################################################################## 612 | # All Process 613 | ######################################################################################################################## 614 | 615 | @process 616 | def all_(): 617 | """ 618 | Returns class instance of `All`. 619 | For more details, please have a look at the implementations inside `All`. 620 | 621 | Returns 622 | ------- 623 | All : 624 | Class instance implementing all 'all' processes. 625 | 626 | """ 627 | return All() 628 | 629 | 630 | class All: 631 | """ 632 | Class implementing all 'all' processes. 633 | """ 634 | 635 | @staticmethod 636 | def exec_num(): 637 | pass 638 | 639 | @staticmethod 640 | def exec_np(data, ignore_nodata=True, dimension=0): 641 | """ 642 | Checks if all of the values are True. Evaluates all values from the first to the last element and stops once 643 | the outcome is unambiguous. If only one value is given, the process evaluates to the given value. If no value 644 | is given (i.e. the array is empty) the process returns None. By default all no-data values are ignored so 645 | that the process returns np.nan if all values are no-data, True if all other values are True and False 646 | otherwise. Setting the `ignore_nodata` flag to False considers no-data values so that np.nan is a valid 647 | logical object. If a component is np.nan, the result will be np.nan if the outcome is ambiguous. 648 | 649 | Parameters 650 | ---------- 651 | data : np.array 652 | A boolean array. An empty array resolves always with None. 653 | ignore_nodata : bool, optional 654 | Indicates whether no-data values are ignored or not. Ignores them by default (=True). 655 | Setting this flag to False considers no-data values so that np.nan is returned if any value is such a value. 656 | dimension : int, optional 657 | Defines the dimension to evaluate 'all' along (default is 0). 658 | 659 | Returns 660 | ------- 661 | np.array : 662 | Boolean result of the logical operation. 663 | 664 | """ 665 | if is_empty(data): 666 | return np.nan 667 | 668 | if len(data.shape) == 1: # exand data if it has only one dimension 669 | data = data[:, None] 670 | 671 | nan_ar = np.isnan(data) 672 | if ignore_nodata: 673 | nan_mask = np.all(nan_ar, axis=dimension) 674 | data_all = np.all(data, axis=dimension) 675 | else: 676 | nan_mask = np.any(nan_ar, axis=dimension) # flag elements with at least one NaN value along the dimension 677 | data_all = np.all(data, axis=dimension) 678 | nan_mask = nan_mask & data_all # reset nan mask to only mask trues and NaN values 679 | 680 | data_all = data_all.astype(np.float32) # convert to float to store NaN values 681 | data_all[nan_mask] = np.nan 682 | return data_all 683 | 684 | @staticmethod 685 | def exec_xar(data, ignore_nodata = True, dimension = None, axis = None): 686 | """ 687 | Checks if all of the values are True. Evaluates all values from the first to the last element and stops once 688 | the outcome is unambiguous. If only one value is given, the process evaluates to the given value. If no value 689 | is given (i.e. the array is empty) the process returns None. By default all no-data values are ignored so 690 | that the process returns np.nan if all values are no-data, True if all other values are True and False 691 | otherwise. Setting the `ignore_nodata` flag to False considers no-data values so that np.nan is a valid 692 | logical object. If a component is np.nan, the result will be np.nan if the outcome is ambiguous. 693 | 694 | Parameters 695 | ---------- 696 | data : xr.DataArray 697 | A boolean array. An empty array resolves always with None. 698 | ignore_nodata : bool, optional 699 | Indicates whether no-data values are ignored or not. Ignores them by default (=True). 700 | Setting this flag to False considers no-data values so that np.nan is returned if any value is such a value. 701 | dimension : int, optional 702 | Defines the dimension to evaluate 'all' along (default is 0). 703 | axis : int, optional 704 | Defines the axis to evaluate 'all' along. 705 | Only one of the ‘dimension’ and ‘axis’ arguments can be supplied. If neither are supplied, then 'all' is calculated over axes 706 | 707 | 708 | Returns 709 | ------- 710 | xr.DataArray : 711 | Boolean result of the logical operation. 712 | 713 | """ 714 | if len(data) == 0: 715 | return xr.DataArray(np.nan) 716 | data_nan = data.where(data == True, False) 717 | if ignore_nodata: 718 | return data.all(dim=dimension, axis=axis) # all ignores NaNs 719 | else: 720 | data = data.all(dim=dimension, axis=axis) 721 | data_nan = data_nan.all(dim=dimension, axis=axis) 722 | if (data == data_nan).all(): # See if there are NaNs, that were set to False 723 | return data 724 | else: 725 | return data.where(data == data_nan, np.nan) 726 | 727 | @staticmethod 728 | def exec_da(): 729 | pass -------------------------------------------------------------------------------- /src/openeo_processes/texts.py: -------------------------------------------------------------------------------- 1 | 2 | def text_begins(text, pattern, case_sensitive=True): 3 | """ 4 | Checks whether the text (also known as `text`) contains the text specified for `pattern` at the beginning. 5 | The no-data value None is passed through and therefore gets propagated. 6 | 7 | Parameters 8 | ---------- 9 | text : str 10 | Text in which to find something at the beginning. 11 | pattern : str 12 | Text to find at the beginning of `text`. 13 | case_sensitive : bool, optional 14 | Case sensitive comparison can be disabled by setting this parameter to False (default is True). 15 | 16 | Returns 17 | ------- 18 | bool : 19 | True if `text` begins with `pattern`, False otherwise. 20 | 21 | """ 22 | if text is None: 23 | return None 24 | 25 | if case_sensitive: 26 | return text.startswith(pattern) 27 | else: 28 | return text.lower().startswith(pattern.lower()) 29 | 30 | 31 | def text_ends(text, pattern, case_sensitive=True): 32 | """ 33 | Checks whether the text (also known as `text`) contains the text specified for `pattern` at the end. 34 | The no-data value None is passed through and therefore gets propagated. 35 | 36 | Parameters 37 | ---------- 38 | text : str 39 | Text in which to find something at the end. 40 | pattern : str 41 | Text to find at the end of `text`. 42 | case_sensitive : bool, optional 43 | Case sensitive comparison can be disabled by setting this parameter to False (default is True). 44 | 45 | Returns 46 | ------- 47 | bool : 48 | True if `text` ends with `pattern`, False otherwise. 49 | 50 | """ 51 | if text is None: 52 | return None 53 | 54 | if case_sensitive: 55 | return text.endswith(pattern) 56 | else: 57 | return text.lower().endswith(pattern.lower()) 58 | 59 | 60 | def text_contains(text, pattern, case_sensitive=True): 61 | """ 62 | Checks whether the text (also known as `text`) contains the text specified for `pattern`. 63 | The no-data value None is passed through and therefore gets propagated. 64 | 65 | Parameters 66 | ---------- 67 | text : str 68 | String in which to find something in. 69 | pattern : str 70 | String to find in `text`. 71 | case_sensitive : bool, optional 72 | Case sensitive comparison can be disabled by setting this parameter to False (default is True). 73 | 74 | Returns 75 | ------- 76 | bool : 77 | True if `text` contains the `pattern`, False otherwise. 78 | 79 | """ 80 | if text is None: 81 | return None 82 | 83 | if case_sensitive: 84 | return pattern in text 85 | else: 86 | return pattern.lower() in text.lower() 87 | 88 | 89 | def text_merge(data, separator=''): 90 | """ 91 | Merges string representations of a set of elements together to a single string, with the separator 92 | between each element. 93 | 94 | Parameters 95 | ---------- 96 | data : list 97 | A list of elements. Numbers, boolean values and None values get converted to their (lower case) string 98 | representation. For example: 1 (int), -1.5 (float), True / False (boolean values) 99 | separator : object, optional 100 | A separator to put between each of the individual texts. Defaults to an empty string (''). 101 | 102 | Returns 103 | ------- 104 | str : 105 | Returns a string containing a string representation of all the array elements in the same order, 106 | with the separator between each element. 107 | 108 | """ 109 | if data is None: 110 | return None 111 | 112 | data = [str(elem).lower() if type(elem) != str else elem for elem in data] 113 | separator = str(separator) 114 | 115 | return separator.join(data) 116 | -------------------------------------------------------------------------------- /src/openeo_processes/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import functools 3 | import re 4 | from datetime import timezone, timedelta, datetime 5 | from typing import Any, Callable, Tuple, List 6 | 7 | import dask 8 | import dask.dataframe as dd 9 | import numpy as np 10 | import xarray as xr 11 | import geopandas as gpd 12 | import rasterio 13 | 14 | # This is a workaround for this package now requiring gdal, which isn't straightforward to install with pip. 15 | # TODO: Remove this once we've figured out how to properly integrate the gdal dependency for this library 16 | try: 17 | from equi7grid import equi7grid 18 | except ImportError: 19 | equi7grid = None 20 | try: 21 | from osgeo import osr 22 | except ImportError: 23 | osr = None 24 | 25 | 26 | def eval_datatype(data): 27 | """ 28 | Returns a data type tag depending on the data type of `data`. 29 | This can be: 30 | - "numpy": `nump.ndarray` 31 | - "xarray": `xarray.DataArray` 32 | - "dask": `dask.array.core.Array` 33 | - "int", "float", "dict", "list", "set", "tuple", "NoneType": Python builtins 34 | - "datetime": `datetime.datetime` 35 | - "function": callable object 36 | 37 | Parameters 38 | ---------- 39 | data : object 40 | Data to get the data type from. 41 | 42 | Returns 43 | ------- 44 | str : 45 | Data type tag. 46 | 47 | """ 48 | package = type(data).__module__ 49 | package_root = package.split(".", 1)[0] 50 | if package in ("builtins", "datetime"): 51 | return type(data).__name__ 52 | elif package_root in ("numpy", "xarray", "dask", "datacube", "geopandas", "dask_geopandas", "xgboost"): 53 | return package_root 54 | else: 55 | return package + '.' + type(data).__name__ 56 | 57 | 58 | def tuple_args_to_np_array(args, kwargs) -> np.array: 59 | np_args = [np.array(arg) for arg in args if isinstance(arg, tuple)] 60 | np_kwargs = {} 61 | for key, value in kwargs.items(): 62 | if isinstance(value, tuple): 63 | np_kwargs[key] = np.array(value) 64 | else: 65 | np_kwargs[key] = value 66 | return np_args, np_kwargs 67 | 68 | 69 | # Registry of processes (dict mapping process id to wrapped process implementation). 70 | _processes = {} 71 | 72 | 73 | def process(processor): 74 | """ 75 | This function serves as a decorator for empty openEO process definitions, which call a class `processor` defining 76 | the process implementations for different data types. 77 | 78 | Parameters 79 | ---------- 80 | processor : class 81 | Class implementing an openEO process containing the methods `exec_num`, `exec_np`, `exec_xar`, or `exec_dar`. 82 | 83 | Returns 84 | ------- 85 | object : 86 | Process/function wrapper returning the result of the process. 87 | 88 | """ 89 | @functools.wraps(processor) 90 | def fun_wrapper(*args, **kwargs): 91 | cls = processor() 92 | 93 | # Workaround to allow mapping correctly also List(xr.DataArray) 94 | # TODO: remove automatic conversion from List to np.Array and update all tests 95 | # Convert lists to numpy arrays 96 | datatypes = None 97 | if args: 98 | # Check if there is a list of xr.DataArrays in the first variable 99 | if isinstance(args[0], list) and np.any(tuple(True if isinstance(a, xr.DataArray) else False for a in args[0])): 100 | datatypes = ["xarray"] 101 | else: 102 | args = tuple(list2nparray(a) if isinstance(a, list) else a for a in args) 103 | if kwargs: 104 | # Check if there is a list of xr.DataArrays in variable 'data' 105 | if 'data' in kwargs and isinstance(kwargs['data'], list) and np.any(tuple(True if isinstance(a, xr.DataArray) else False for a in kwargs['data'])): 106 | datatypes = ["xarray"] 107 | else: 108 | kwargs = {k: (list2nparray(v) if isinstance(v, list) else v) for k, v in kwargs.items()} 109 | if not datatypes: 110 | # retrieve data types of input (keyword) arguments 111 | datatypes = set(eval_datatype(a) for a in args) 112 | datatypes.update(eval_datatype(v) for v in kwargs.values()) 113 | 114 | datatypes = set(datatypes) 115 | if "datacube" in datatypes: 116 | cls_fun = getattr(cls, "exec_odc") 117 | elif datatypes.intersection(["xarray", "dask", "geopandas", "xgboost", "dask_geopandas"]): 118 | cls_fun = getattr(cls, "exec_xar") 119 | elif "numpy" in datatypes: 120 | cls_fun = getattr(cls, "exec_np") 121 | elif datatypes.issubset({"int", "float", "NoneType", "str", "bool", "datetime", "dict"}): 122 | cls_fun = getattr(cls, "exec_num") 123 | elif "tuple" in datatypes: 124 | args, kwargs = tuple_args_to_np_array(args, kwargs) 125 | cls_fun = getattr(cls, "exec_np") 126 | else: 127 | raise Exception('Datatype unknown.') 128 | 129 | return cls_fun(*args, **kwargs) 130 | 131 | process_id = processor.__name__.rstrip('_') 132 | _processes[process_id] = fun_wrapper 133 | 134 | return fun_wrapper 135 | 136 | 137 | def has_process(process_id: str) -> bool: 138 | """ 139 | Check if the given process is defined 140 | 141 | Parameters 142 | ---------- 143 | process_id : str 144 | Process id 145 | 146 | Returns 147 | ------- 148 | True if the process is defined, False otherwise 149 | """ 150 | return process_id in _processes 151 | 152 | 153 | def get_process(process_id: str) -> Callable: 154 | """ 155 | Get the function corresponding with given process id 156 | 157 | Parameters 158 | ---------- 159 | process_id : str 160 | Process id 161 | 162 | Returns 163 | ------- 164 | Python function (callable) that wraps the process 165 | """ 166 | return _processes[process_id] 167 | 168 | 169 | 170 | 171 | 172 | def list2nparray(x): 173 | """ 174 | Converts a list in a nump 175 | 176 | Parameters 177 | ---------- 178 | x : list or np.ndarray 179 | List to convert. 180 | 181 | Returns 182 | ------- 183 | np.ndarray 184 | 185 | """ 186 | x_tmp = np.array(x) 187 | if x_tmp.dtype.kind in ['U', 'S']: 188 | x = np.array(x, dtype=object) 189 | else: 190 | x = x_tmp 191 | 192 | return x 193 | 194 | 195 | def create_slices(index, axis=0, n_axes=1): 196 | """ 197 | Creates a multidimensional slice index. 198 | 199 | Parameters 200 | ---------- 201 | index : int 202 | The zero-based index of the element to retrieve (default is 0). 203 | axis : int, optional 204 | Axis of the given index (default is 0). 205 | n_axes : int, optional 206 | Number of axes (default is 1). 207 | 208 | Returns 209 | ------- 210 | tuple of slice: 211 | Tuple of index slices. 212 | 213 | """ 214 | 215 | slices = [slice(None)] * n_axes 216 | slices[axis] = index 217 | 218 | return tuple(slices) 219 | 220 | 221 | def str2time(string, allow_24h=False): 222 | """ 223 | Converts time strings in various formats to a datetime object. 224 | The datetime formats follow the RFC3339 convention. 225 | 226 | Parameters 227 | ---------- 228 | string : str 229 | String representation of time or date. 230 | allow_24h : bool, optional 231 | If True, `string` is allowed to contain '24' as hour value. 232 | 233 | Returns 234 | ------- 235 | datetime.datetime : 236 | Parsed datetime object. 237 | 238 | """ 239 | 240 | # handle timezone formatting and replace possibly occuring ":" in time zone string 241 | # handle timezone formatting for + 242 | if "+" in string: 243 | string_parts = string.split('+') 244 | string_parts[-1] = string_parts[-1].replace(':', '') 245 | string = "+".join(string_parts) 246 | 247 | # handle timezone formatting for - 248 | if "t" in string.lower(): # a full datetime string is given 249 | time_string = string[10:] 250 | if "-" in time_string: 251 | string_parts = time_string.split('-') 252 | string_parts[-1] = string_parts[-1].replace(':', '') 253 | string = string[:10] + "-".join(string_parts) 254 | else: # a time string is given 255 | if "-" in string: 256 | string_parts = string.split('-') 257 | string_parts[-1] = string_parts[-1].replace(':', '') 258 | string = "-".join(string_parts) 259 | 260 | # searches for 24 in hour value 261 | pattern = re.compile("24:\d{2}:\d{2}") 262 | pattern_match = re.search(pattern, string) 263 | if pattern_match: 264 | if allow_24h: # if the user allows 24 as an hour value, replace 24 by 23 and add a timedelta of one hour later 265 | old_sub_string = pattern_match.group() 266 | new_sub_string = "23" + old_sub_string[2:] 267 | string = string.replace(old_sub_string, new_sub_string) 268 | else: 269 | err_msg = "24 is not allowed as an hour value. Hours are only allowed to be given in the range 0 - 23. " \ 270 | "Set 'allow_24h' to 'True' if you want to translate 24 as a an hour." 271 | raise ValueError(err_msg) 272 | 273 | rfc3339_time_formats = ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%dT%H:%M:%S.%f", 274 | "%Y-%m-%dT%H:%M:%Sz", "%Y-%m-%dt%H:%M:%SZ", "%Y-%m-%dt%H:%M:%Sz", "%Y-%m-%dT%H:%M:%S%z", 275 | "%Y-%m-%dt%H:%M:%S%z", "%H:%M:%SZ", "%H:%M:%S%z"] 276 | date_time = None 277 | # loops through each format and takes the one for which the translation succeeded first 278 | for i, used_time_format in enumerate(rfc3339_time_formats): 279 | try: 280 | date_time = datetime.strptime(string, used_time_format) 281 | if date_time.tzinfo is None: 282 | date_time = date_time.replace(tzinfo=timezone.utc) 283 | break 284 | except: 285 | continue 286 | 287 | # add a timedelta of one hour if 24 is allowed as an hour value 288 | if date_time and allow_24h: 289 | date_time += timedelta(hours=1) 290 | 291 | return date_time 292 | 293 | 294 | def get_time_dimension_from_data(data: xr.DataArray, dim: str = "time") -> str: 295 | """Automatically find the time dimension available in the dataset. 296 | 297 | Support 't' and 'time' (OpenEO preferres 't', internally 'time' is used 298 | """ 299 | if dim in data.dims: 300 | return dim 301 | time_dimensions = ["time", "t", "times"] 302 | for time_dim in time_dimensions: 303 | if time_dim in data.dims: 304 | return time_dim 305 | raise Exception("Time dimension could not be identified in the data.") 306 | 307 | def keep_attrs(x, y, data): 308 | """Keeps the attributes of the inputs x and y in the output data. 309 | 310 | When a processes, which requires two inputs x and y is used, 311 | the attributes of x and y are not forwarded to the output data. 312 | This checks if one of the inputs is a Dataarray, which has attributes. 313 | The attributes of x or y are then given to the output data. 314 | """ 315 | if isinstance(x, xr.DataArray) and isinstance(y, xr.DataArray): 316 | for a in x.attrs: 317 | if a in y.attrs and (x.attrs[a] == y.attrs[a]): 318 | data.attrs[a] = x.attrs[a] 319 | elif isinstance(x, xr.DataArray): 320 | data.attrs = x.attrs 321 | elif isinstance(y, xr.DataArray): 322 | data.attrs = y.attrs 323 | return data 324 | 325 | def eodc_collections_to_res(): 326 | """ 327 | Function returning a dic to map collections to their related resolution. 328 | 329 | Parameters 330 | ---------- 331 | None 332 | 333 | Returns 334 | ---------- 335 | dict: A dictionairy containing collection ids and their resolution. 336 | """ 337 | return { 338 | 'boa_landsat_8': 30, 339 | 'SIG0_Sentinel_1': 20, 340 | 'corine_land_cover': 10, 341 | 'forest_type': 10, 342 | 'tree_cover_density': 10, 343 | 'boa_sentinel_2': 10, 344 | 'gamma0_sentinel_1_dh': 10, 345 | 'gamma0_sentinel_1_dv': 10, 346 | 'gamma0_sentinel_1_sh': 10, 347 | 'gamma0_sentinel_1_sv': 10, 348 | } 349 | 350 | def get_equi7_tiles(data: xr.Dataset): 351 | """ 352 | A function taking an xarray.Dataset and returning a list of EQUI7 Tiles at the relevant resolution layer along with 353 | an EQUI7Grid object. 354 | 355 | Parameters 356 | ---------- 357 | xarray.Dataset: data 358 | 359 | Returns 360 | ---------- 361 | list[str]: A dictionairy containing collection ids and their resolution. 362 | equi7grid.equi7grid: Grid object with various functions for mapping and searching ROI with coordinates. 363 | """ 364 | # Setting env variable will be moved to environment cfg 365 | os.environ['PROJ_LIB'] = '/opt/conda/share/proj' 366 | input_p4 = '+proj=aeqd +lat_0=53 +lon_0=24 +x_0=5837287.81977 +y_0=2121415.69617 +datum=WGS84 +units=m +no_defs' 367 | 368 | try: 369 | src_crs = osr.SpatialReference(data.crs) 370 | except AttributeError: 371 | src_crs = data.attrs["crs"] 372 | 373 | src_crs.ImportFromProj4(input_p4) 374 | 375 | x_min, x_max = float(data.x.min().values), float(data.x.max().values) 376 | y_min, y_max = float(data.y.min().values), float(data.y.max().values) 377 | 378 | if x_min == x_max: 379 | x_max = x_max + 1 380 | if y_min == y_max: 381 | y_max = y_max + 1 382 | 383 | bbox = [[x_min, y_min],[x_max, y_max]] 384 | 385 | collection_map_to_res = eodc_collections_to_res() 386 | 387 | if 'id' in data.attrs.keys(): 388 | if data.attrs['id'] in collection_map_to_res: 389 | gridder = equi7grid.Equi7Grid(collection_map_to_res[data.attrs['id']]) 390 | else: 391 | gridder = equi7grid.Equi7Grid(10) 392 | else: 393 | gridder = equi7grid.Equi7Grid(10) 394 | 395 | tiles = gridder.search_tiles_in_roi(bbox=bbox, osr_spref=src_crs) 396 | 397 | return tiles, gridder 398 | 399 | def derive_datasets_and_filenames_from_tiles(gridder, times: List[str], datasets: List[xr.Dataset], 400 | tiles: List[str], output_filepath: str, ext: str): 401 | """ 402 | A function taking an xarray.Dataset and returning a list of EQUI7 Tiles at the relevant resolution layer along with 403 | an EQUI7Grid object. 404 | 405 | Parameters 406 | ---------- 407 | equi7grid.equi7grid: Grid object for searching ROI to return a temporary bounding box for a tile. 408 | list[str]: A list of times. 409 | list[xarray.Dataset]: A list of datasets corresponding to the list of times. 410 | list[str]: A list of tiles to split the datasets across. 411 | str: A root output filepath for storing results. 412 | str: The format extension to store the files with. 413 | 414 | Returns 415 | ---------- 416 | list[xarray.Dataset]: The resulting datasets split across the EQUI7 tile grid. 417 | list[str]: The list of filepaths split across the EQUI7 tile grid, corresponding to the list of datasets. 418 | """ 419 | final_datasets = [] 420 | dataset_filenames = [] 421 | 422 | for idx, time in enumerate(times): 423 | dataset = datasets[idx] 424 | file_time = np.datetime_as_string(time)[:19].replace('-', '_').replace(':', '_') 425 | for tile in tiles: 426 | temp_bbox = gridder.get_tile_bbox_proj(tile) 427 | 428 | x_min, x_max = temp_bbox[0], temp_bbox[2] 429 | y_min, y_max = temp_bbox[1], temp_bbox[3] 430 | 431 | temp_bbox = [[x_min, y_min],[x_max, y_max]] 432 | 433 | with dask.config.set(**{'array.slicing.split_large_chunks': False}): 434 | temp_data = dataset.where(dataset.x > temp_bbox[0][0], 435 | drop=True).where(dataset.x < temp_bbox[1][0], 436 | drop=True).where(dataset.y > temp_bbox[0][1], 437 | drop=True).where(dataset.y < temp_bbox[1][1], 438 | drop=True) 439 | 440 | temp_file = output_filepath + '_{}_{}.{}'.format(file_time, tile, ext) 441 | final_datasets.append(temp_data) 442 | dataset_filenames.append(temp_file) 443 | 444 | return final_datasets, dataset_filenames 445 | 446 | 447 | def geometry_mask(geoms, geobox, all_touched=False, invert=False): 448 | """ 449 | Create a mask from shapes. 450 | 451 | By default, mask is intended for use as a 452 | numpy mask, where pixels that overlap shapes are False. 453 | :param list[Geometry] geoms: geometries to be rasterized 454 | :param datacube.utils.GeoBox geobox: 455 | :param bool all_touched: If True, all pixels touched by geometries will be burned in. If 456 | false, only pixels whose center is within the polygon or that 457 | are selected by Bresenham's line algorithm will be burned in. 458 | :param bool invert: If True, mask will be True for pixels that overlap shapes. 459 | """ 460 | return rasterio.features.geometry_mask([geom.to_crs(geobox.crs) for geom in geoms], 461 | out_shape=geobox.shape, 462 | transform=geobox.affine, 463 | all_touched=all_touched, 464 | invert=invert) 465 | 466 | 467 | 468 | if __name__ == '__main__': 469 | pass 470 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Open-EO/openeo-processes-python/6a85abcbb6648c91b60a09b36ca25984dc26b5cc/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | import numpy as np 4 | import pytest 5 | import xarray as xr 6 | from affine import Affine 7 | from datacube import Datacube 8 | from datacube.model import Measurement 9 | from datacube.utils.dates import mk_time_coord 10 | from datacube.utils.geometry import GeoBox, CRS 11 | 12 | 13 | # Taken from https://github.com/opendatacube/datacube-core/blob/develop/datacube/testutils/__init__.py#L429 14 | # with minor adoptions 15 | def mk_sample_xr_dataset(crs="EPSG:3578", 16 | shape=(33, 74), 17 | resolution=None, 18 | xy=(0, 0), 19 | time=None, 20 | name='band', 21 | dtype='int16', 22 | nodata=np.nan, 23 | units='1'): 24 | """ Note that resolution is in Y,X order to match that of GeoBox. 25 | 26 | shape (height, width) 27 | resolution (y: float, x: float) - in YX, to match GeoBox/shape notation 28 | 29 | xy (x: float, y: float) -- location of the top-left corner of the top-left pixel in CRS units 30 | """ 31 | 32 | if isinstance(crs, str): 33 | crs = CRS(crs) 34 | 35 | if resolution is None: 36 | resolution = (-10, 10) if crs is None or crs.projected else (-0.01, 0.01) 37 | 38 | t_coords = {} 39 | if time is not None: 40 | t_coords['time'] = mk_time_coord(time) 41 | 42 | transform = Affine.translation(*xy)*Affine.scale(*resolution[::-1]) 43 | h, w = shape 44 | geobox = GeoBox(w, h, transform, crs) 45 | 46 | storage = Datacube.create_storage( 47 | t_coords, geobox, [Measurement(name=name, dtype=dtype, nodata=nodata, units=units)]) 48 | return storage.to_array(dim="bands") 49 | 50 | 51 | @pytest.fixture(scope="class") 52 | def test_data(request): 53 | class TestDataDriver: 54 | def __init__(self): 55 | self.coords_extra_dim = { 56 | 'bands': ['band_1', 'band_2', 'band_3'], 57 | 'y': np.array([1477835.]), 58 | 'x': np.array([4882815.]), 59 | 'params': np.array([0, 1, 2]), 60 | } 61 | self.steps = {'y': 5, 'x': 3} 62 | self.coords_4d = { 63 | 'bands': ['B08', 'B04', 'B02'], 64 | 'time': [datetime(2019, 12, 1), datetime(2019, 12, 5)], 65 | 'y': np.arange(55.3, 55.3 + self.steps['y']), 66 | 'x': np.arange(118.9, 118.9 + self.steps['x']) 67 | } 68 | self.coords_3d = { 69 | 'time': [datetime(2019, 12, 1), datetime(2019, 12, 5)], 70 | 'y': np.arange(55.3, 55.3 + self.steps['y']), 71 | 'x': np.arange(118.9, 118.9 + self.steps['x']) 72 | } 73 | self._get_numpy() 74 | self._get_xarray() 75 | self.equi7xarray = self._get_equi7xarray() 76 | self.geojson_polygon = self._get_geojson_polygon() 77 | self.geojson_multipolygon = self._get_geojson_multipolygon() 78 | 79 | def _get_numpy(self): 80 | """ 81 | Returns a fixed numpy array with 4 dimensions. 82 | """ 83 | 84 | data = np.ones((3, 2, self.steps['y'], self.steps['x'])) 85 | data[0, :] *= 8 # identify band 8 by its value 86 | data[1, :] *= 4 # identify band 4 by its value 87 | data[2, :] *= 2 # identify band 2 by its value 88 | 89 | data[:, 1, :] *= 10 # second t-step of each band multiplied by 10 90 | 91 | self.np_data_4d = data 92 | self.np_data_3d = data[0, :] 93 | 94 | data_extra_dim = np.ones((3, 1, 1, 3)) 95 | data_extra_dim[0, :] *= 1 96 | data_extra_dim[1, :] *= 2 97 | data_extra_dim[2, :] *= 3 98 | data_extra_dim[:, :, :, 1] *= 10 99 | data_extra_dim[:, :, :, 2] *= 100 100 | self.np_data_extra_dim = data_extra_dim 101 | 102 | 103 | def _get_xarray(self): 104 | """ 105 | Returns a fixed xarray DataArray array with 3 labelled dimensions 106 | with coordinates. 107 | """ 108 | 109 | self.xr_data_4d = xr.DataArray(data=self.np_data_4d, 110 | dims=self.coords_4d.keys(), 111 | coords=self.coords_4d) 112 | self.xr_data_4d.attrs['crs'] = 'EPSG:4326' 113 | self.xr_data_3d = xr.DataArray(data=self.np_data_3d, 114 | dims=self.coords_3d.keys(), 115 | coords=self.coords_3d) 116 | self.xr_data_3d.attrs['crs'] = 'EPSG:4326' 117 | self.xr_odc_data_3d = mk_sample_xr_dataset() 118 | self.xr_odc_data_4d = mk_sample_xr_dataset( 119 | time=['2020-02-13T11:12:13.1234567Z', '2020-02-14T11:12:13.1234567Z']) 120 | self.xr_data_extra_dim = xr.DataArray(data=self.np_data_extra_dim, 121 | dims=self.coords_extra_dim.keys(), 122 | coords=self.coords_extra_dim) 123 | 124 | 125 | def _get_equi7xarray(self): 126 | y = [1459509.1198203214, 1462676.7740152855, 1463645.2966358056] 127 | x = [4869567.340356644, 4870511.829134757, 4870695.104272628, 4870878.396641726, 4871822.373331639] 128 | bands = ['B04', 'B08'] 129 | equi7xarray = xr.DataArray(np.array([[[550, 1200], [550, 1200], [550, 1200], [550, 1200], [550, 1200]], 130 | [[550, 1200], [550, 1200], [550, 1200], [550, 1200], [550, 1200]], 131 | [[550, 1200], [550, 1200], [550, 1200], [550, 1200], [550, 1200]]]), 132 | coords = [y, x, bands], dims= ["y", "x", "bands"]) 133 | equi7xarray.attrs["crs"] = 'PROJCS["Azimuthal_Equidistant",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433]],PROJECTION["Azimuthal_Equidistant"],PARAMETER["latitude_of_center",53],PARAMETER["longitude_of_center",24],PARAMETER["false_easting",5837287.81977],PARAMETER["false_northing",2121415.69617],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]' 134 | return equi7xarray 135 | 136 | def _get_geojson_polygon(self): 137 | return { 138 | 'type': 'FeatureCollection', 139 | 'features': [ 140 | { 141 | 'type': 'Feature', 142 | 'properties': {}, 143 | 'geometry': { 144 | 'type': 'Polygon', 145 | 'coordinates': [ 146 | [ 147 | [ 148 | 11.402550080548934, 149 | 46.299634105980964 150 | ], 151 | [ 152 | 11.437437058344888, 153 | 46.299634105980964 154 | ], 155 | [ 156 | 11.437437058344888, 157 | 46.350398112827055 158 | ], 159 | [ 160 | 11.402550080548934, 161 | 46.350398112827055 162 | ], 163 | [ 164 | 11.402550080548934, 165 | 46.299634105980964 166 | ] 167 | ] 168 | ] 169 | } 170 | } 171 | ] 172 | } 173 | 174 | def _get_geojson_multipolygon(self): 175 | return { 176 | 'type': 'FeatureCollection', 177 | 'features': [ 178 | { 179 | 'type': 'Feature', 180 | 'properties': {}, 181 | 'geometry': { 182 | 'type': 'MultiPolygon', 183 | 'coordinates': [ 184 | [ 185 | [ 186 | [ 187 | 11.437437058344888, 188 | 46.350398112827055 189 | ], 190 | [ 191 | 11.457437058344888, 192 | 46.350398112827055 193 | ], 194 | [ 195 | 11.457437058344888, 196 | 46.390398112827055 197 | ], 198 | [ 199 | 11.437437058344888, 200 | 46.350398112827055 201 | ] 202 | ] 203 | ], 204 | [ 205 | [ 206 | [ 207 | 11.457437058344888, 208 | 46.390398112827055 209 | ], 210 | [ 211 | 11.477437058344888, 212 | 46.400398112827055 213 | ], 214 | [ 215 | 11.477437058344888, 216 | 46.390398112827055 217 | ], 218 | [ 219 | 11.457437058344888, 220 | 46.390398112827055 221 | ] 222 | ] 223 | ] 224 | ] 225 | } 226 | } 227 | ] 228 | } 229 | 230 | 231 | def xr_data_factor(self, factor_1=1.0, factor_2=1.0): 232 | data = np.ones((3, 2, self.steps['y'], self.steps['x'])) 233 | data[0, 0] *= factor_1 234 | data[:, 1] *= factor_2 235 | xdata = xr.DataArray(data=data[0, :], 236 | dims=self.coords_3d.keys(), 237 | coords=self.coords_3d) 238 | xdata.attrs['crs'] = 'EPSG:4326' # create a data array with variable values 239 | return xdata 240 | 241 | 242 | request.cls.test_data = TestDataDriver() 243 | 244 | -------------------------------------------------------------------------------- /tests/data/array.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Open-EO/openeo-processes-python/6a85abcbb6648c91b60a09b36ca25984dc26b5cc/tests/data/array.nc -------------------------------------------------------------------------------- /tests/data/out.time.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Open-EO/openeo-processes-python/6a85abcbb6648c91b60a09b36ca25984dc26b5cc/tests/data/out.time.nc -------------------------------------------------------------------------------- /tests/test_arrays.py: -------------------------------------------------------------------------------- 1 | """ 2 | Most tests are in alignment with: 3 | https://openeo.org/documentation/1.0/processes.html 4 | """ 5 | 6 | import numpy as np 7 | import openeo_processes as oeop 8 | import pytest 9 | import xarray as xr 10 | from unittest import TestCase 11 | import math 12 | 13 | 14 | @pytest.mark.usefixtures("test_data") 15 | class ArrayTester(TestCase): 16 | """ Tests all array functions. """ 17 | 18 | def test_array_create(self): 19 | """ Tests `array_create` function. """ 20 | assert (oeop.array_create(2,3) == np.array([2,2,2])).all() 21 | assert (len(oeop.array_create([])) == 0) 22 | assert (oeop.array_create([1, 2, 3], repeat=2) == np.array([1,2,3,1,2,3])).all() 23 | assert (oeop.array_create(["A", "B", "C"]) == np.array(["A", "B", "C"])).all() 24 | assert len(oeop.array_create([np.nan, 1], 2) == 4) 25 | 26 | def test_array_modify(self): 27 | """ Tests `array_modify` function. """ 28 | assert (oeop.array_modify(np.array([2,3]), np.array([4,7]), 1, 0) == np.array([2,4,7,3])).all() 29 | assert (oeop.array_modify(data = ["a","d","c"], values = ["b"], index = 1) == ['a','b','c']).all() 30 | assert (oeop.array_modify(data = ["a","b",4,5], values = [1,2,3], index = 0, length = 2) == [1,2,3,4,5]).all() 31 | assert (oeop.array_modify(data = ["a","c"], values = ["b"], index = 1, length = 0) == ["a","b","c"]).all() 32 | assert (oeop.array_modify(data = [np.nan,np.nan,"a","b","c"], values = [], index = 0, length = 2) == ["a","b","c"]).all() 33 | assert (oeop.array_modify(data=["a", "b", "c"], values=[], index=1, length=10) == ["a"]) 34 | 35 | def test_array_concat(self): 36 | """ Tests `array_concat` function. """ 37 | assert (oeop.array_concat(np.array([2, 3]), np.array([4, 7]))== np.array([2,3,4,7])).all() 38 | assert (oeop.array_concat(array1 = ["a","b"], array2 = [1,2]) == np.array(['a','b',1,2], dtype=object)).all() 39 | 40 | def test_array_contains(self): 41 | """ Tests `array_contains` function. """ 42 | assert oeop.array_contains([1, 2, 3], value=2) 43 | assert not oeop.array_contains(["A", "B", "C"], value="b") 44 | assert not oeop.array_contains([1, 2, 3], value="2") 45 | assert oeop.array_contains([1, 2, 3], value=2) 46 | assert oeop.array_contains([1, 2, np.nan], value=np.nan) 47 | assert oeop.array_contains([[1, 2], [3, 4]], value=[1, 2]) 48 | assert not oeop.array_contains([[1, 2], [3, 4]], value=2) 49 | assert oeop.array_contains([{"a": "b"}, {"c": "d"}], value={"a": "b"}) 50 | assert oeop.array_contains(self.test_data.xr_data_factor(3, 5), value = 5) 51 | assert not oeop.array_contains(self.test_data.xr_data_factor(3, 5), value= 1) 52 | assert oeop.array_contains(self.test_data.xr_data_factor(np.nan, 5), value=np.NaN) 53 | assert oeop.array_contains(self.test_data.xr_data_factor(3, 5), value= [3, 5]).all() 54 | 55 | def test_array_element(self): 56 | """ Tests `array_element` function. """ 57 | # numpy tests 58 | assert oeop.array_element([9, 8], label="B", labels=np.array(["A", "B"])) == 8 59 | assert oeop.array_element([9, 8, 7, 6, 5], index=2) == 7 60 | assert oeop.array_element(["A", "B", "C"], index=0) == "A" 61 | assert np.isnan(oeop.array_element([], index=0, return_nodata=True)) 62 | 63 | # multi-dim 64 | test_array = np.empty((3, 2, 2)) 65 | test_array[0, :, :] = np.array([[1, 2], [3, 4]]) 66 | test_array[1, :, :] = np.array([[1, 2], [3, 4]]) * 20 67 | test_array[2, :, :] = np.array([[1, 2], [3, 4]]) * 500 68 | array_i2_d0 = np.array([ 69 | [500, 1000], 70 | [1500, 2000] 71 | ]) 72 | array_i0_d1 = np.array([ 73 | [1, 2], 74 | [20, 40], 75 | [500, 1000] 76 | ]) 77 | assert np.isclose(oeop.array_element(test_array, index=2, dimension=0), array_i2_d0, equal_nan=True).all() 78 | assert np.isclose(oeop.array_element(test_array, index=0, dimension=1), array_i0_d1, equal_nan=True).all() 79 | 80 | # xarray tests 81 | xr.testing.assert_equal( 82 | oeop.array_element(self.test_data.xr_data_4d, dimension='bands', label="B08"), 83 | self.test_data.xr_data_3d) 84 | xr.testing.assert_equal( 85 | oeop.array_element(self.test_data.xr_data_4d, dimension='bands', index=0), 86 | self.test_data.xr_data_3d) 87 | # Assert raised errors? 88 | # ArrayElementNotAvailable 89 | # oeop.array_element(self.xr_data_4d, dimension='s', label="B09") 90 | # oeop.array_element(self.xr_data_4d, dimension='s', index=4) 91 | 92 | def test_count(self): 93 | """ Tests `count` function. """ 94 | assert oeop.count([]) == 0 95 | assert oeop.count([], condition=True) == 0 96 | assert oeop.count([1, 0, 3, 2]) == 4 97 | assert oeop.count(["ABC", np.nan]) == 1 98 | assert oeop.count([False, np.nan], condition=True) == 2 99 | assert oeop.count([0, 1, 2, 3, 4, 5, np.nan], condition=oeop.gt, context={'y': 2}) == 3 100 | assert oeop.count([0, 1, 2, 3, 4, 5, np.nan], condition=oeop.lte, context={'y': 2}) == 3 101 | assert oeop.count(xr.DataArray(np.array([0, 1, 2, 3, 4, 5, np.nan])), condition=oeop.lte, context={'y': 2}) == 3 102 | assert oeop.count(xr.DataArray(np.array([0, 1, 2, 3, 4, 5, np.nan])), condition=oeop.gt, context={'y': 2}) == 3 103 | assert oeop.count(xr.DataArray(np.array([0, 1, 2, 3, 4, 5, np.nan])), condition=oeop.between, context={'min': 2, 'max': 4}) == 3 104 | assert oeop.count(self.test_data.xr_data_factor(3, 5), condition=oeop.gte, context={'y': 5}) == 15 105 | 106 | def test_array_apply(self): 107 | """ Tests `array_apply` function. """ 108 | assert oeop.array_apply([1, 0, 3, 2], process=oeop.gt, context={'y':-1, 'reduce':True}) 109 | assert oeop.array_apply(self.test_data.xr_data_factor(3, 5), process=oeop.gt, context={'y': 1, 'reduce': True}) 110 | assert oeop.array_apply(self.test_data.xr_data_factor(3, 5), process=oeop.gt, context={'y': 1}).all() 111 | 112 | def test_array_filter(self): 113 | """ Tests `array_filter` function. """ 114 | assert oeop.array_filter([1, 0, 3, 2], condition=oeop.lte, context={'y':0.5}) == 0 115 | assert (oeop.array_filter([1, 0, 3, 2, np.nan, 3], condition=oeop.gt, context={'y': 1}) == [3, 2, 3]).all() 116 | assert (oeop.array_filter([0, 1, 2, 3, 4, 5, np.nan], condition=oeop.gt, context={'y': 2}) == [3, 4, 5]).all() 117 | xr.testing.assert_equal(oeop.array_filter(self.test_data.xr_data_factor(3, 5), condition=oeop.lte, context={'y': 4}), 118 | self.test_data.xr_data_factor(3, 5)[:1, :, :]) 119 | 120 | def test_array_find(self): 121 | """ Tests `array_find` function. """ 122 | assert oeop.array_find([1, 0, 3, 2], value= 3) == 2 123 | assert np.isnan(oeop.array_find([1, 0, 3, 2, np.nan, 3], value = np.nan)) 124 | assert (oeop.array_find(self.test_data.xr_data_factor(3,5), value = 5, dimension = 'time').values == 1).all() 125 | assert np.isnan((oeop.array_find(self.test_data.xr_data_factor(3,5), value = 4)).values).all() 126 | assert np.isnan(oeop.array_find(self.test_data.xr_data_factor(-3, -5), value=(0), dimension='time').values).all() 127 | 128 | def test_array_labels(self): 129 | """ Tests `array_labels` function. """ 130 | assert (oeop.array_labels([1, 0, 3, 2]) == np.array([0, 1, 2, 3])).all() 131 | assert (oeop.array_labels([[1, 0, 3, 2]], dimension = 0) == np.array([0])) 132 | assert (oeop.array_labels(self.test_data.xr_data_factor(3,5), dimension = 0) == xr.DataArray(np.arange(2))).all() 133 | assert (oeop.array_labels(self.test_data.xr_data_factor(3,5), dimension = 'x') == xr.DataArray(np.arange(3))).all() 134 | assert (oeop.array_labels(self.test_data.xr_data_factor(3,5), dimension = 'y') == xr.DataArray(np.arange(5))).all() 135 | 136 | 137 | def test_first(self): 138 | """ Tests `first` function. """ 139 | assert oeop.first([1, 0, 3, 2]) == 1 140 | assert oeop.first([np.nan, "A", "B"]) == "A" 141 | assert np.isnan(oeop.first([np.nan, 2, 3], ignore_nodata=False)) 142 | assert np.isnan(oeop.first([])) 143 | 144 | # 2D test 145 | test_arr = np.array([[[np.nan, 2], [1, 2]], [[3, 2], [1, 2]], [[1, 2], [1, np.nan]]]) 146 | first_elem_ref = np.array([[[3., 2.], [1., 2.]]]) 147 | first_elem = oeop.first(test_arr) 148 | assert np.isclose(first_elem, first_elem_ref, equal_nan=True).all() 149 | test_arr = np.array([[[np.nan, 2], [1, 2]], [[3, 2], [1, 2]], [[1, 2], [1, np.nan]]]) 150 | first_elem_ref = np.array([[[np.nan, 2.], [1., 2.]]]) 151 | first_elem = oeop.first(test_arr, ignore_nodata=False) 152 | assert np.isclose(first_elem, first_elem_ref, equal_nan=True).all() 153 | xr.testing.assert_equal( oeop.first(self.test_data.xr_data_factor(3,5), dimension = 1, ignore_nodata = True) , self.test_data.xr_data_factor(3,5)[:,0,:]) 154 | xr.testing.assert_equal( oeop.first(self.test_data.xr_data_factor(3,5), dimension = 'x', ignore_nodata = False) , self.test_data.xr_data_factor(3,5)[:,:,0]) 155 | 156 | def test_last(self): 157 | """ Tests `last` function. """ 158 | assert oeop.last([1, 0, 3, 2]) == 2 159 | assert oeop.last(["A", "B", np.nan]) == "B" 160 | assert np.isnan(oeop.last([0, 1, np.nan], ignore_nodata=False)) 161 | assert np.isnan(oeop.last([])) 162 | 163 | # 2D test 164 | test_arr = np.array([[[np.nan, 2], [1, 2]], [[3, 2], [1, 3]], [[1, 2], [1, np.nan]]]) 165 | last_elem_ref = np.array([[[1., 2.], [1., 3.]]]) 166 | last_elem = oeop.last(test_arr) 167 | assert np.isclose(last_elem, last_elem_ref, equal_nan=True).all() 168 | test_arr = np.array([[[np.nan, 2], [1, 2]], [[3, 2], [1, 2]], [[1, 2], [1, np.nan]]]) 169 | last_elem_ref = np.array([[[1., 2.], [1., np.nan]]]) 170 | last_elem = oeop.last(test_arr, ignore_nodata=False) 171 | assert np.isclose(last_elem, last_elem_ref, equal_nan=True).all() 172 | xr.testing.assert_equal(oeop.last(self.test_data.xr_data_factor(3, 5), dimension='time', ignore_nodata=True), 173 | self.test_data.xr_data_factor(3, 5)[-1, :, :]) 174 | xr.testing.assert_equal(oeop.last(self.test_data.xr_data_factor(3, 5), dimension='x', ignore_nodata=False), 175 | self.test_data.xr_data_factor(3, 5)[:, :, -1]) 176 | assert (oeop.last(self.test_data.xr_data_factor(3, np.nan), dimension='time', ignore_nodata=True).values == 177 | (self.test_data.xr_data_factor(3, 5)[0, :, :]).values).all() 178 | 179 | def test_order(self): 180 | """ Tests `order` function. """ 181 | self.assertListEqual(oeop.order([6, -1, 2, np.nan, 7, 4, np.nan, 8, 3, 9, 9]).tolist(), 182 | [1, 2, 8, 5, 0, 4, 7, 9, 10]) 183 | self.assertListEqual(oeop.order([6, -1, 2, np.nan, 7, 4, np.nan, 8, 3, 9, 9], nodata=True).tolist(), 184 | [1, 2, 8, 5, 0, 4, 7, 9, 10, 3, 6]) 185 | self.assertListEqual(oeop.order([6, -1, 2, np.nan, 7, 4, np.nan, 8, 3, 9, 9], asc=False, nodata=True).tolist(), 186 | [9, 10, 7, 4, 0, 5, 8, 2, 1, 3, 6]) 187 | self.assertListEqual(oeop.order([6, -1, 2, np.nan, 7, 4, np.nan, 8, 3, 9, 9], asc=False, nodata=False).tolist(), 188 | [6, 3, 9, 10, 7, 4, 0, 5, 8, 2, 1]) 189 | 190 | def test_rearrange(self): 191 | """ Tests `rearrange` function. """ 192 | self.assertListEqual(oeop.rearrange([5, 4, 3], [2, 1, 0]).tolist(), [3, 4, 5]) 193 | self.assertListEqual(oeop.rearrange([5, 4, 3, 2], [0, 2, 1, 3]).tolist(), [5, 3, 4, 2]) 194 | self.assertListEqual(oeop.rearrange([5, 4, 3, 2], [1, 3]).tolist(), [4, 2]) 195 | xr.testing.assert_equal(oeop.rearrange(self.test_data.xr_data_factor(3, 5), [1,0]), 196 | xr.concat([self.test_data.xr_data_factor(3, 5)[1], self.test_data.xr_data_factor(3, 5)[0]], 'time')) 197 | 198 | def test_sort(self): 199 | """ Tests `sort` function. """ 200 | self.assertListEqual(oeop.sort([6, -1, 2, np.nan, 7, 4, np.nan, 8, 3, 9, 9]).tolist(), 201 | [-1, 2, 3, 4, 6, 7, 8, 9, 9]) 202 | assert np.isclose(oeop.sort([6, -1, 2, np.nan, 7, 4, np.nan, 8, 3, 9, 9], asc=False, nodata=True), 203 | [9, 9, 8, 7, 6, 4, 3, 2, -1, np.nan, np.nan], equal_nan=True).all() 204 | xr.testing.assert_equal(oeop.sort(self.test_data.xr_data_factor(3, 5), dimension='time'), self.test_data.xr_data_factor(3, 5)) 205 | assert (oeop.sort(self.test_data.xr_data_factor(5, 3), dimension='time', asc=False).values == self.test_data.xr_data_factor(3, 5).values).all() 206 | 207 | 208 | def test_vector_to_regular_points(self): 209 | """ Tests `vector_to_regular_points` function. """ 210 | assert isinstance(oeop.vector_to_regular_points(self.test_data.geojson_polygon, 0.01), dict) 211 | -------------------------------------------------------------------------------- /tests/test_comparison.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import numpy as np 4 | import openeo_processes as oeop 5 | import pytest 6 | import xarray as xr 7 | 8 | def test_is_empty(): 9 | assert oeop.is_empty([]) 10 | assert oeop.is_empty(np.array([])) 11 | assert oeop.is_empty(xr.DataArray([])) 12 | 13 | 14 | @pytest.mark.parametrize(["value", "expected"], [ 15 | # Based on https://github.com/Open-EO/openeo-processes/issues/189 16 | (None, True), 17 | (np.nan, True), 18 | (0, False), 19 | (1, False), 20 | (np.inf, False), 21 | ("a string", True), 22 | ([1, 2], True), 23 | ([None, None], True), 24 | ([np.nan, np.nan], True), 25 | ]) 26 | def test_is_nan(value, expected): 27 | """ Tests `is_nan` function. """ 28 | assert oeop.is_nan(value) == expected 29 | 30 | 31 | @pytest.mark.parametrize(["value", "expected"], [ 32 | # Based on https://github.com/Open-EO/openeo-processes/issues/189 33 | (None, True), 34 | (np.nan, False), 35 | (0, False), 36 | (1, False), 37 | (np.inf, False), 38 | ("a string", False), 39 | ([1, 2], False), 40 | ([None, None], False), 41 | ([np.nan, np.nan], False), 42 | ]) 43 | def test_is_nodata(value, expected): 44 | """ Tests `is_nodata` function. """ 45 | assert oeop.is_nodata(value) == expected 46 | 47 | 48 | @pytest.mark.parametrize(["value", "expected"], [ 49 | # Based on https://github.com/Open-EO/openeo-processes/issues/189 50 | (None, False), 51 | (np.nan, False), 52 | (0, True), 53 | (1, True), 54 | (np.inf, False), 55 | ("a string", True), 56 | ([1, 2], True), 57 | ([None, None], True), 58 | ([np.nan, np.nan], True), 59 | ]) 60 | def test_is_valid(value, expected): 61 | """ Tests `is_valid` function. """ 62 | assert oeop.is_valid(value) == expected 63 | 64 | @pytest.mark.usefixtures("test_data") 65 | class ComparisonTester(unittest.TestCase): 66 | """ Tests all comparison functions. """ 67 | 68 | def test_eq(self): 69 | """ Tests `eq` function. """ 70 | assert oeop.eq(1, None) is None 71 | assert oeop.eq(1, 1) 72 | assert not oeop.eq(1, "1") 73 | assert not oeop.eq(1.02, 1, delta=0.01) 74 | assert oeop.eq(-1, -1.001, delta=0.01) 75 | assert oeop.eq(115, 110, delta=10) 76 | assert not oeop.eq("Test", "test") 77 | assert oeop.eq("Test", "test", case_sensitive=False) 78 | assert oeop.eq("Ä", "ä", case_sensitive=False) 79 | assert oeop.eq("00:00:00+00:00", "00:00:00Z") 80 | assert not oeop.eq("2018-01-01T12:00:00Z", "2018-01-01T12:00:00") 81 | assert oeop.eq("2018-01-01T00:00:00Z", "2018-01-01T01:00:00+01:00") 82 | assert oeop.eq(self.test_data.xr_data_factor(3, 5), self.test_data.xr_data_factor(3, 5), reduce=True) 83 | assert oeop.eq(self.test_data.xr_data_factor(3, 5), self.test_data.xr_data_factor(4.4, 6.2), delta = 1.401 , reduce = True) 84 | assert oeop.eq(xr.DataArray(['test']), xr.DataArray(['Test']), case_sensitive=False) 85 | assert oeop.eq(xr.DataArray(['test', 'Test', 'TEST']), 'Test', case_sensitive=False, reduce=True) 86 | assert oeop.eq(self.test_data.xr_data_factor(1, 1), 1.1, delta = 0.5, reduce = True) 87 | assert oeop.eq(self.test_data.xr_data_factor(1, 1), np.array([1,1,1]), reduce=True) 88 | 89 | def test_neq(self): 90 | """ Tests `neq` function. """ 91 | assert oeop.neq(1, None) is None 92 | assert not oeop.neq(1, 1) 93 | assert oeop.neq(1, "1") 94 | assert oeop.neq(1.02, 1, delta=0.01) 95 | assert not oeop.neq(-1, -1.001, delta=0.01) 96 | assert not oeop.neq(115, 110, delta=10) 97 | assert oeop.neq("Test", "test") 98 | assert not oeop.neq("Test", "test", case_sensitive=False) 99 | assert not oeop.neq("Ä", "ä", case_sensitive=False) 100 | assert not oeop.neq("00:00:00+00:00", "00:00:00Z") 101 | assert oeop.neq("2018-01-01T12:00:00Z", "2018-01-01T12:00:00") 102 | assert not oeop.neq("2018-01-01T00:00:00Z", "2018-01-01T01:00:00+01:00") 103 | assert oeop.neq(self.test_data.xr_data_factor(3, 5), self.test_data.xr_data_factor(4.45, 6.2), delta=1.4, 104 | reduce=True) 105 | 106 | def test_gt(self): 107 | """ Tests `gt` function. """ 108 | assert oeop.gt(1, None) is None 109 | assert not oeop.gt(0, 0) 110 | assert oeop.gt(2, 1) 111 | assert oeop.gt(-0.5, -0.6) 112 | assert oeop.gt("00:00:00Z", "00:00:00+01:00") 113 | assert not oeop.gt("1950-01-01T00:00:00Z", "2018-01-01T12:00:00Z") 114 | assert not oeop.gt("2018-01-01T12:00:00+00:00", "2018-01-01T12:00:00Z") 115 | assert oeop.gt(self.test_data.xr_data_factor(3, 5), self.test_data.xr_data_factor(2.4, 4.2), reduce=True) 116 | 117 | def test_gte(self): 118 | """ Tests `gte` function. """ 119 | assert oeop.gte(1, None) is None 120 | assert oeop.gte(0, 0) 121 | assert not oeop.gte(1, 2) 122 | assert oeop.gte(-0.5, -0.6) 123 | assert oeop.gte("00:00:00Z", "00:00:00+01:00") 124 | assert not oeop.gte("1950-01-01T00:00:00Z", "2018-01-01T12:00:00Z") 125 | assert oeop.gte("2018-01-01T12:00:00+00:00", "2018-01-01T12:00:00Z") 126 | assert oeop.gte(self.test_data.xr_data_factor(3, 5.4), self.test_data.xr_data_factor(2.4, 5.4), reduce=True) 127 | 128 | def test_lt(self): 129 | """ Tests `lt` function. """ 130 | assert oeop.lt(1, None) is None 131 | assert not oeop.lt(0, 0) 132 | assert oeop.lt(1, 2) 133 | assert not oeop.lt(-0.5, -0.6) 134 | assert oeop.lt("00:00:00+01:00", "00:00:00Z") 135 | assert oeop.lt("1950-01-01T00:00:00Z", "2018-01-01T12:00:00Z") 136 | assert not oeop.lt("2018-01-01T12:00:00+00:00", "2018-01-01T12:00:00Z") 137 | assert oeop.lt(self.test_data.xr_data_factor(3, 5.4), self.test_data.xr_data_factor(4.4, 5.5), reduce=True) 138 | 139 | def test_lte(self): 140 | """ Tests `lte` function. """ 141 | assert oeop.lte(1, None) is None 142 | assert oeop.lte(0, 0) 143 | assert oeop.lte(1, 2) 144 | assert not oeop.lte(-0.5, -0.6) 145 | assert oeop.lte("00:00:00+01:00", "00:00:00Z") 146 | assert oeop.lte("1950-01-01T00:00:00Z", "2018-01-01T12:00:00Z") 147 | assert oeop.lte("2018-01-01T12:00:00+00:00", "2018-01-01T12:00:00Z") 148 | assert oeop.lte(self.test_data.xr_data_factor(3, 5.4), self.test_data.xr_data_factor(4.4, 5.4), reduce=True) 149 | assert oeop.lte(self.test_data.xr_data_factor(3, 5.4), 5.4, reduce=True) 150 | 151 | def test_between(self): 152 | """ Tests `between` function. """ 153 | assert oeop.between(None, min=0, max=1) is None 154 | assert not oeop.between(0.5, min=1, max=0) 155 | assert not oeop.between(-0.5, min=0, max=-1) 156 | assert oeop.between("00:59:59Z", min="01:00:00+01:00", max="01:00:00Z") 157 | assert oeop.between("2018-07-23T17:22:45Z", min="2018-01-01T00:00:00Z", max="2018-12-31T23:59:59Z") 158 | assert not oeop.between("2000-01-01", min="2018-01-01", max="2020-01-01") 159 | assert not oeop.between("2018-12-31T17:22:45Z", min="2018-01-01", max="2018-12-31", exclude_max=True) 160 | assert oeop.between(self.test_data.xr_data_factor(3, 5.4), min = 2, max = 5.4, reduce=True) 161 | assert not oeop.between(self.test_data.xr_data_factor(3, 5.4), min = 3, max = 5.4, exclude_max = True, reduce=True) 162 | 163 | if __name__ == '__main__': 164 | unittest.main() 165 | -------------------------------------------------------------------------------- /tests/test_cubes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Most tests are in alignment with: 3 | https://openeo.org/documentation/1.0/processes.html 4 | """ 5 | 6 | import os 7 | import unittest 8 | 9 | import pytest 10 | import openeo_processes as oeop 11 | import xarray as xr 12 | import numpy as np 13 | import pandas as pd 14 | import geopandas as gpd 15 | import dask_geopandas 16 | 17 | 18 | @pytest.mark.usefixtures("test_data") 19 | class CubesTester(unittest.TestCase): 20 | """ Tests all cubes functions. """ 21 | 22 | def test_reduce_dimension(self): 23 | """ Tests `reduce_dimension` function. """ 24 | 25 | # xarray tests 26 | # Reduce spectral dimension using the process `sum` 27 | # Take sum over 's' dimension in a 4d array 28 | reduced = oeop.reduce_dimension(self.test_data.xr_data_4d, reducer=oeop.sum, dimension='bands') 29 | self.assertListEqual( 30 | list(reduced[:, 0, 0].data), 31 | [14, 140] 32 | ) 33 | 34 | def test_merge_cubes(self): 35 | """Tests 'merge_cubes' function. """ 36 | merged = oeop.merge_cubes(self.test_data.xr_data_4d, self.test_data.xr_data_4d, 37 | oeop.add) # merges two cubes together with add: x + x 38 | assert (merged.dims == self.test_data.xr_data_4d.dims) # dimensions did not change 39 | xr.testing.assert_equal(merged, self.test_data.xr_data_4d * 2) # x + x is the same as the cube*2 40 | xr.testing.assert_equal( 41 | oeop.merge_cubes(self.test_data.xr_data_factor(5, 9), self.test_data.xr_data_factor(2, 3), oeop.subtract), 42 | self.test_data.xr_data_factor(3, 6)) 43 | merged2 = oeop.merge_cubes(self.test_data.xr_data_factor(5, 9)[:, :3], 44 | self.test_data.xr_data_factor(2, 7)[:, 3:]) 45 | assert (merged2.dims == self.test_data.xr_data_factor(5, 7).dims) 46 | xr.testing.assert_equal( 47 | oeop.merge_cubes(self.test_data.xr_data_factor(5, 9).isel(time=0), 48 | self.test_data.xr_data_factor(2, 3).isel(time=1)), 49 | self.test_data.xr_data_factor(5, 3)) 50 | xr.testing.assert_equal( 51 | oeop.merge_cubes(self.test_data.xr_data_factor(5, 9).isel(time=0), 52 | self.test_data.xr_data_factor(2, 3).isel(time=0), oeop.add), 53 | self.test_data.xr_data_factor(7, 3).isel(time=0)) 54 | merged3 = oeop.merge_cubes(self.test_data.xr_data_factor(5, 9), self.test_data.xr_data_factor(2, 3)) 55 | assert (merged3.shape == (2, 2, 5, 3)) # added first dimension, so shape is now longer 56 | 57 | 58 | # @pytest.mark.skip(reason="Behaviour is not correctly implemented for vector cubes yet.") 59 | def test_merge_cubes_vector_cubes(self): 60 | # Test when both cubes are geopandas dataframes 61 | geopandas1 = (gpd.GeoDataFrame.from_features(self.test_data.geojson_polygon)) 62 | assert len(oeop.merge_cubes(geopandas1, geopandas1)) == len(geopandas1) + len(geopandas1) 63 | assert ((oeop.merge_cubes(geopandas1, geopandas1)).shape) == (2,1) 64 | 65 | delayed_vector_cube = dask_geopandas.from_geopandas(geopandas1, chunksize=1500) 66 | assert ((oeop.merge_cubes(delayed_vector_cube, delayed_vector_cube)).compute().shape) == (2,1) 67 | 68 | @pytest.mark.skip(reason="This is failing at the time CI was setup - fix asap!") 69 | def test_save_result(self): 70 | # TODO improve file check 71 | # xarray tests 72 | out_filename = "out.tif" 73 | out_filename_0 = "out_00000.tif" 74 | out_filename_1 = "out_00001.tif" 75 | out_product = "product.yml" 76 | oeop.save_result(self.test_data.xr_odc_data_3d, out_filename) 77 | assert os.path.exists(out_filename_0) 78 | assert os.path.exists(out_product) 79 | os.remove(out_filename_0) 80 | os.remove(out_product) 81 | 82 | oeop.save_result(self.test_data.xr_odc_data_4d, out_filename) 83 | assert os.path.exists(out_filename_0) 84 | assert os.path.exists(out_filename_1) 85 | assert os.path.exists(out_product) 86 | os.remove(out_filename_0) 87 | os.remove(out_filename_1) 88 | os.remove(out_product) 89 | 90 | out_filename = "out.nc" 91 | out_filename_0 = "out_00000.nc" 92 | out_filename_1 = "out_00001.nc" 93 | out_filename_combined = "out_combined.nc" 94 | 95 | oeop.save_result(self.test_data.xr_odc_data_3d, out_filename, format='netCDF') 96 | assert os.path.exists(out_filename_0) 97 | assert os.path.exists(out_product) 98 | os.remove(out_filename_0) 99 | os.remove(out_product) 100 | 101 | oeop.save_result(self.test_data.xr_odc_data_3d, format='netCDF') 102 | assert os.path.exists(out_filename_0) 103 | assert os.path.exists(out_product) 104 | os.remove(out_filename_0) 105 | os.remove(out_product) 106 | 107 | oeop.save_result(self.test_data.xr_odc_data_4d, format='netCDF') 108 | assert os.path.exists(out_filename_0) 109 | assert os.path.exists(out_filename_1) 110 | assert os.path.exists(out_filename_combined) 111 | assert os.path.exists(out_product) 112 | os.remove(out_filename_0) 113 | os.remove(out_filename_1) 114 | os.remove(out_filename_combined) 115 | os.remove(out_product) 116 | 117 | @pytest.mark.skip(reason="This is failing at the time CI was setup - fix asap!") 118 | def test_save_result_from_file(self): 119 | src = os.path.join(os.path.dirname(__file__), "data", "array.nc") 120 | ref_ds = xr.open_dataarray(src) 121 | oeop.save_result(ref_ds, format='netCDF') 122 | actual_ds_0 = xr.load_dataset("out_00000.nc") 123 | assert ref_ds_0.dims == actual_ds_0.dims 124 | assert ref_ds_0.coords == actual_ds_0.coords 125 | assert ref_ds_0.variables == actual_ds_0.variables 126 | assert ref_ds_0.geobox == actual_ds_0.geobox 127 | assert ref_ds_0.extent == actual_ds_0.extent 128 | assert "crs" in actual_ds_0.attrs and actual_ds_0.attrs["crs"] == 'PROJCRS["Azimuthal_Equidistant",BASEGEOGCRS["WGS 84",DATUM["World Geodetic System 1984",ELLIPSOID["WGS 84",6378137,298.257223563,LENGTHUNIT["metre",1]],ID["EPSG",6326]],PRIMEM["Greenwich",0,ANGLEUNIT["degree",0.0174532925199433]]],CONVERSION["unnamed",METHOD["Modified Azimuthal Equidistant",ID["EPSG",9832]],PARAMETER["Latitude of natural origin",53,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8801]],PARAMETER["Longitude of natural origin",24,ANGLEUNIT["degree",0.0174532925199433],ID["EPSG",8802]],PARAMETER["False easting",5837287.81977,LENGTHUNIT["metre",1],ID["EPSG",8806]],PARAMETER["False northing",2121415.69617,LENGTHUNIT["metre",1],ID["EPSG",8807]]],CS[Cartesian,2],AXIS["easting",east,ORDER[1],LENGTHUNIT["metre",1,ID["EPSG",9001]]],AXIS["northing",north,ORDER[2],LENGTHUNIT["metre",1,ID["EPSG",9001]]]]' 129 | assert "datetime_from_dim" in actual_ds_0.attrs 130 | assert actual_ds_0.result.dims == ("y", "x") 131 | for i in range(10): 132 | os.remove(f"out_{str(i).zfill(5)}.nc") 133 | os.remove("out_combined.nc") 134 | os.remove("product.yml") 135 | 136 | def test_fit_curve(self): 137 | """Tests 'fit_curve' function. """ 138 | rang = np.linspace(0, 4 * np.pi, 24) 139 | rang = [np.cos(rang) + 0.5 * np.sin(rang) + np.random.rand(24) * 0.1, 140 | np.cos(rang) + 0.5 * np.sin(rang) + np.random.rand( 141 | 24) * 0.2] # define data with y = 0 + 1 * cos() + 0.5 *sin() 142 | xdata = xr.DataArray(rang, coords=[["NY", "LA"], pd.date_range("2000-01-01", periods=24, freq='M')], 143 | dims=["space", "time"]) 144 | 145 | def func_oeop(x, *parameters): 146 | _2sjyaa699_11 = oeop.pi(**{}) 147 | _9k6vt7qcn_2 = oeop.multiply(**{'x': 2, 'y': _2sjyaa699_11}) 148 | _p42lrxmbq_16 = oeop.divide(**{'x': _9k6vt7qcn_2, 'y': 31557600}) 149 | _wz26aglyi_5 = oeop.multiply(**{'x': _p42lrxmbq_16, 'y': x}) 150 | _v81bsalku_7 = oeop.cos(**{'x': _wz26aglyi_5}) 151 | _32frj455b_1 = oeop.pi(**{}) 152 | _lyjcuq5vd_15 = oeop.multiply(**{'x': 2, 'y': _32frj455b_1}) 153 | _1ipvki94n_4 = oeop.divide(**{'x': _lyjcuq5vd_15, 'y': 31557600}) 154 | _ya3hbxpot_17 = oeop.multiply(**{'x': _1ipvki94n_4, 'y': x}) 155 | _0p7xlqeyo_8 = oeop.sin(**{'x': _ya3hbxpot_17}) 156 | 157 | _kryhimf6r_6 = oeop.array_element(**{'data': parameters, 'index': 0}) 158 | _jxs4umqsh_10 = oeop.array_element(**{'data': parameters, 'index': 1}) 159 | _8jjjztmya_12 = oeop.array_element(**{'data': parameters, 'index': 2}) 160 | 161 | _jhus2gz74_13 = oeop.multiply(**{'x': _jxs4umqsh_10, 'y': _v81bsalku_7}) 162 | _0v09jn699_14 = oeop.multiply(**{'x': _8jjjztmya_12, 'y': _0p7xlqeyo_8}) 163 | _xb4c1hk1f_9 = oeop.add(**{'x': _kryhimf6r_6, 'y': _jhus2gz74_13}) 164 | _b4mf181yp_3 = oeop.add(**{'x': _xb4c1hk1f_9, 'y': _0v09jn699_14}) 165 | return _b4mf181yp_3 166 | 167 | params = (oeop.fit_curve(xdata, parameters=[1, 1, 1], function=func_oeop, dimension='time')) 168 | assert (np.isclose(params, [0, 1, 0.5], atol=0.3)).all() # output should be close to 0, 1, 0.5 169 | params_2 = (oeop.fit_curve(xdata, parameters=params, function=func_oeop, dimension='time')) 170 | assert (np.isclose(params_2, [0, 1, 0.5], atol=0.3)).all() 171 | assert (np.isclose(params, params_2, atol=0.01)).all() 172 | 173 | def test_predict_curve(self): 174 | """Tests 'predict_curve' function. """ 175 | rang = np.linspace(0, 4 * np.pi, 24) 176 | rang = [np.cos(rang) + 0.5 * np.sin(rang) + np.random.rand(24) * 0.1, 177 | np.cos(rang) + 0.5 * np.sin(rang) + np.random.rand( 178 | 24) * 0.2] # define data with y = 0 + 1 * cos() + 0.5 *sin() 179 | xdata = xr.DataArray(rang, coords=[["NY", "LA"], pd.date_range("2000-01-01", periods=24, freq='M')], 180 | dims=["space", "time"]) 181 | 182 | def func(x, a, b, c): 183 | return a + b * np.cos(2 * np.pi / 31557600 * x) + c * np.sin(2 * np.pi / 31557600 * x) 184 | 185 | params = (oeop.fit_curve(xdata, parameters=(0, 1, 0), function=func, dimension='time')) 186 | predicted = oeop.predict_curve(xdata, params, func, dimension='time', 187 | labels=pd.date_range("2002-01-01", periods=24, freq='M')) 188 | assert xdata.dims == predicted.dims 189 | assert (predicted < 1.8).all() 190 | predicted = oeop.predict_curve(xdata, params, func, dimension='time', 191 | labels=pd.date_range("2000-01-01", periods=24, freq='M')) 192 | assert (np.isclose(xdata, predicted, atol=0.5)).all() 193 | dim_times = oeop.dimension_labels(self.test_data.xr_data_factor(), 't') 194 | predicted_dim_labels = (oeop.predict_curve(xdata, params, func, dimension='time', labels=dim_times)) 195 | assert xdata.dims == predicted_dim_labels.dims 196 | assert (predicted_dim_labels < 1.8).all() 197 | xdata_t = xr.DataArray(rang, coords=[["NY", "LA"], pd.date_range("2000-01-01", periods=24, freq='M')], 198 | dims=["space", "t"]) 199 | predicted_t = oeop.predict_curve(xdata_t, params, func, dimension='t', 200 | labels=pd.date_range("2000-01-01", periods=24, freq='M')) 201 | xr.testing.assert_equal(predicted, predicted_t) 202 | predicted_time = oeop.predict_curve(xdata, params, func, dimension='time', 203 | labels=pd.date_range("2002-01-01", periods=2, freq='M')) 204 | predicted_str = oeop.predict_curve(xdata, params, func, dimension='time', 205 | labels=["2002-01-31 00:00", "2002-02-28"]) 206 | assert (predicted_time.values == predicted_str.values).all() 207 | 208 | 209 | def test_resample_cube_temporal(self): 210 | """ Tests `reduce_dimension` function. """ 211 | xdata = xr.DataArray(np.array([[1, 3], [7, 8]]), 212 | coords=[["NY", "LA"], pd.date_range("2000-01-01", "2000-02-01", periods=2)], 213 | dims=["space", "time"]) 214 | target = xr.DataArray(np.array([[1, 3], [7, 8]]), 215 | coords=[["NY", "LA"], pd.date_range("2000-01-10", "2000-02-10", periods=2)], 216 | dims=["space", "time"]) 217 | resample = oeop.resample_cube_temporal(xdata, target, dimension='time') 218 | xr.testing.assert_equal(resample, target) 219 | xdata2 = xr.DataArray(np.array([[1, 3, 4], [7, 8, 10]]), 220 | coords=[["NY", "LA"], pd.date_range("2000-01-01", "2000-03-01", periods=3)], 221 | dims=["space", "time"]) 222 | resample2 = oeop.resample_cube_temporal(xdata2, target, dimension='time', valid_within=15) 223 | xr.testing.assert_equal(resample2, target) 224 | 225 | def test_create_raster_cube(self): 226 | """Tests 'create_raster_cube' function. """ 227 | assert len(oeop.create_raster_cube()) == 0 228 | 229 | def test_add_dimension(self): 230 | """Tests 'add_dimension' function. """ 231 | assert oeop.add_dimension(self.test_data.xr_data_factor(), 'cubes', 'Cube01').shape == (1, 2, 5, 3) 232 | 233 | def test_dimension_labels(self): 234 | """Tests 'dimension_labels' function. """ 235 | assert (oeop.dimension_labels(self.test_data.xr_data_factor(), 'x') == [118.9, 119.9, 120.9]).all() 236 | assert (oeop.dimension_labels(self.test_data.xr_data_factor(), 't') == oeop.dimension_labels(self.test_data.xr_data_factor(), 'time')).all() 237 | 238 | def test_drop_dimension(self): 239 | """Tests 'drop_dimension' function. """ 240 | data = oeop.add_dimension(self.test_data.xr_data_factor(), 'cubes', 'Cube01') 241 | xr.testing.assert_equal(oeop.drop_dimension(data, 'cubes'), self.test_data.xr_data_factor()) 242 | 243 | def test_rename_dimension(self): 244 | """Tests 'rename_dimension' function. """ 245 | data = oeop.rename_dimension(self.test_data.xr_data_factor(), 'x', 'longitude') 246 | assert (data.dims == ('time', 'y', 'longitude')) 247 | 248 | def test_rename_labels(self): 249 | """Tests 'rename_labels' function. """ 250 | data = oeop.rename_labels(self.test_data.xr_data_factor(), 'x', [119, 120, 121], [118.9, 119.9, 120.9]) 251 | assert (data['x'].values == (119, 120, 121)).all() 252 | 253 | def test_filter_temporal(self): 254 | """Tests 'filter_temporal' function. """ 255 | data = oeop.filter_temporal(self.test_data.xr_data_factor(), ['2019-12-01', '2019-12-05']) 256 | data2 = oeop.filter_temporal(self.test_data.xr_data_factor(), ['2019-12-01T00:00:00Z', '2019-12-02T00:00:00Z']) 257 | xr.testing.assert_equal(data, data2) 258 | 259 | def test_filter_spatial(self): 260 | """Tests 'filter_spatial' function. """ 261 | geo = self.test_data.geojson_polygon 262 | assert (oeop.filter_spatial(self.test_data.equi7xarray, geo).dims == self.test_data.equi7xarray.dims) 263 | 264 | def test_filter_labels(self): 265 | """Tests 'filter_labels' function. """ 266 | xr.testing.assert_equal(oeop.filter_labels(self.test_data.xr_data_factor(), oeop.gt, 'x', {'y': 120}), self.test_data.xr_data_factor().loc[{'x': [120.9]}]) 267 | 268 | def test_filter_bbox(self): 269 | """Tests 'filter_bbox' function. """ 270 | extent = {'west': 11.4, 'east': 11.45, 'north': 46.35, 'south': 46.30, 'crs':'EPSG:4326'} 271 | xr.testing.assert_equal(oeop.filter_bbox(self.test_data.equi7xarray, extent), self.test_data.equi7xarray) 272 | extent = {'west': 11.41, 'east': 11.44, 'north': 46.35, 'south': 46.31, 'crs':'EPSG:4326'} 273 | xr.testing.assert_equal(oeop.filter_bbox(self.test_data.equi7xarray, extent), self.test_data.equi7xarray[1:, 1:]) 274 | extent = {'west': 63, 'east': 62, 'north': 124, 'south': 123, 'crs': 4326} 275 | assert len((oeop.filter_bbox(self.test_data.xr_data_factor(), extent)).values[0])==0 276 | 277 | def test_mask(self): 278 | """ Tests `mask` function. """ 279 | assert (oeop.mask(np.array([[1,3,6],[2,2,2]]), np.array([[True,False,True],[False,False,True]]), 999) == np.array([[999,3,999],[2,2,999]])).all() 280 | xr.testing.assert_equal(oeop.mask(self.test_data.xr_data_factor(1, 5),self.test_data.xr_data_factor(True, False), replacement = 999), 281 | self.test_data.xr_data_factor(999, 5)) 282 | 283 | def test_mask_polygon(self): 284 | """Tests 'mask_polygon function. """ 285 | geo = self.test_data.geojson_polygon 286 | assert (oeop.mask_polygon(self.test_data.equi7xarray, geo).dims == self.test_data.equi7xarray.dims) 287 | 288 | def test_aggregate_temporal_period(self): 289 | """ Tests 'aggregate_temporal_period' function. """ 290 | assert (oeop.aggregate_temporal_period(self.test_data.xr_data_4d,'day',oeop.min) == self.test_data.xr_data_4d.values).all() 291 | xr.testing.assert_equal(oeop.aggregate_temporal_period(self.test_data.xr_data_4d,'day',oeop.min), 292 | oeop.aggregate_temporal_period(self.test_data.xr_data_4d, 'day',oeop.max, 'time', {})) 293 | 294 | def test_apply_dimension(self): 295 | """Tests 'apply_dimension' function. """ 296 | assert (oeop.apply_dimension(self.test_data.xr_data_factor(1,2), oeop.min, 'time', 'time').values == 297 | self.test_data.xr_data_factor(1,2)[0].values).all() 298 | assert (oeop.apply_dimension(self.test_data.xr_data_factor(1, 2), oeop.min, 'time', 'time').dims == 299 | self.test_data.xr_data_factor(1, 2).dims) 300 | 301 | 302 | def test_aggregate_spatial(self): 303 | """Tests 'aggregate_spatial' function. """ 304 | vector_points = oeop.vector_to_regular_points(self.test_data.geojson_polygon, 0.01) 305 | vector_cube = gpd.GeoDataFrame.from_features(vector_points) 306 | vector_cube_lazy = dask_geopandas.from_geopandas(vector_cube, chunksize=1500) 307 | assert isinstance(oeop.aggregate_spatial(self.test_data.equi7xarray, vector_cube, oeop.mean, 'result'), dask_geopandas.core.GeoDataFrame) 308 | assert isinstance(oeop.aggregate_spatial(self.test_data.equi7xarray, vector_cube_lazy, oeop.mean, 'result'), dask_geopandas.core.GeoDataFrame) 309 | 310 | 311 | 312 | 313 | 314 | 315 | if __name__ == "__main__": 316 | unittest.main() 317 | -------------------------------------------------------------------------------- /tests/test_logic.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import numpy as np 3 | import openeo_processes as oeop 4 | import pytest 5 | import xarray as xr 6 | 7 | @pytest.mark.usefixtures("test_data") 8 | class LogicTester(unittest.TestCase): 9 | """ Tests all logic functions. """ 10 | 11 | def test_not_(self): 12 | """ Tests `not_` function. """ 13 | assert not oeop.not_(True) 14 | assert oeop.not_(False) 15 | assert oeop.not_(None) is None 16 | xr.testing.assert_equal( 17 | oeop.not_(self.test_data.xr_data_factor(False, True)), 18 | self.test_data.xr_data_factor(True, False)) 19 | 20 | def test_and_(self): 21 | """ Tests `and_` function. """ 22 | assert not oeop.and_(False, None) 23 | assert oeop.and_(True, None) is None 24 | assert not oeop.and_(False, False) 25 | assert not oeop.and_(True, False) 26 | assert oeop.and_(True, True) 27 | xr.testing.assert_equal( 28 | oeop.and_(self.test_data.xr_data_factor(0, 1), self.test_data.xr_data_factor(1, 1)), 29 | self.test_data.xr_data_factor(0, 1)) 30 | xr.testing.assert_equal( 31 | oeop.and_(self.test_data.xr_data_factor(0, 1), self.test_data.xr_data_factor(np.nan, np.nan)), 32 | self.test_data.xr_data_factor(np.nan, np.nan)) 33 | 34 | def test_or_(self): 35 | """ Tests `or_` function. """ 36 | assert oeop.or_(False, None) is None 37 | assert oeop.or_(True, None) 38 | assert not oeop.or_(False, False) 39 | assert oeop.or_(True, False) 40 | assert oeop.or_(True, True) 41 | xr.testing.assert_equal( 42 | oeop.or_(self.test_data.xr_data_factor(0, 1), self.test_data.xr_data_factor(1, 1)), 43 | self.test_data.xr_data_factor(1, 1)) 44 | xr.testing.assert_equal( 45 | oeop.or_(self.test_data.xr_data_factor(0, 1), self.test_data.xr_data_factor(np.nan, np.nan)), 46 | self.test_data.xr_data_factor(np.nan, np.nan)) 47 | 48 | def test_xor(self): 49 | """ Tests `xor` function. """ 50 | assert oeop.xor(False, None) is None 51 | assert oeop.xor(True, None) is None 52 | assert not oeop.xor(False, False) 53 | assert oeop.xor(True, False) 54 | assert not oeop.xor(True, True) 55 | xr.testing.assert_equal( 56 | oeop.xor(self.test_data.xr_data_factor(0, 1), self.test_data.xr_data_factor(1, 1)), 57 | self.test_data.xr_data_factor(1, 0)) 58 | xr.testing.assert_equal( 59 | oeop.xor(self.test_data.xr_data_factor(0, 1), self.test_data.xr_data_factor(np.nan, np.nan)), 60 | self.test_data.xr_data_factor(np.nan, np.nan)) 61 | 62 | def test_if_(self): 63 | """ Tests `if_` function. """ 64 | assert oeop.if_(True, "A", "B") == "A" 65 | assert oeop.if_(None, "A", "B") == "B" 66 | assert all(oeop.if_(False, [1, 2, 3], [4, 5, 6]) == [4, 5, 6]) 67 | assert oeop.if_(True, 123) == 123 68 | assert oeop.if_(False, 1) is None 69 | xr.testing.assert_equal( 70 | oeop.if_(self.test_data.xr_data_factor(0, 1), -3.5, 5), 71 | self.test_data.xr_data_factor(5, -3.5)) 72 | xr.testing.assert_equal( 73 | oeop.if_(self.test_data.xr_data_factor(0, 1), 3), 74 | self.test_data.xr_data_factor(np.nan, 3)) 75 | 76 | 77 | def test_any_(self): 78 | """ Tests `any_` function. """ 79 | assert not oeop.any_([False, np.nan]) 80 | assert oeop.any_([True, np.nan]) 81 | assert np.isnan(oeop.any_([False, np.nan], ignore_nodata=False)) 82 | assert oeop.any_([True, np.nan], ignore_nodata=False) 83 | assert oeop.any_([True, False, True, False]) 84 | assert oeop.any_([True, False]) 85 | assert not oeop.any_([False, False]) 86 | assert oeop.any_([True]) 87 | assert np.isnan(oeop.any_([np.nan], ignore_nodata=False)) 88 | assert np.isnan(oeop.any_([])) 89 | xr.testing.assert_equal( 90 | oeop.any_(self.test_data.xr_data_factor(0, np.nan)), 91 | oeop.any_(self.test_data.xr_data_factor(0, 0))) 92 | xr.testing.assert_equal( 93 | oeop.any_(self.test_data.xr_data_factor(0, np.nan), ignore_nodata = False), 94 | oeop.any_(self.test_data.xr_data_factor(np.nan, np.nan), ignore_nodata = False)) 95 | xr.testing.assert_equal( 96 | oeop.any_(self.test_data.xr_data_factor(1, np.nan), ignore_nodata=False), 97 | oeop.any_(self.test_data.xr_data_factor(1, 1))) 98 | 99 | def test_all_(self): 100 | """ Tests `all_` function. """ 101 | assert not oeop.all_([False, np.nan]) 102 | assert oeop.all_([True, np.nan]) 103 | assert not oeop.all_([False, np.nan], ignore_nodata=False) 104 | assert np.isnan(oeop.all_([True, np.nan], ignore_nodata=False)) 105 | assert not oeop.all_([True, False, True, False]) 106 | assert not oeop.all_([True, False]) 107 | assert oeop.all_([True, True]) 108 | assert oeop.all_([True]) 109 | assert np.isnan(oeop.all_([np.nan], ignore_nodata=False)) 110 | assert np.isnan(oeop.all_([])) 111 | xr.testing.assert_equal( 112 | oeop.all_(self.test_data.xr_data_factor(0, 1)), 113 | oeop.all_(self.test_data.xr_data_factor(0, 0))) 114 | xr.testing.assert_equal( 115 | oeop.all_(self.test_data.xr_data_factor(1, np.nan), ignore_nodata=False), 116 | oeop.all_(self.test_data.xr_data_factor(np.nan, np.nan), ignore_nodata=False)) 117 | xr.testing.assert_equal( 118 | oeop.all_(self.test_data.xr_data_factor(0, np.nan), ignore_nodata=False), 119 | oeop.all_(self.test_data.xr_data_factor(0, 0))) 120 | 121 | if __name__ == '__main__': 122 | unittest.main() 123 | -------------------------------------------------------------------------------- /tests/test_math.py: -------------------------------------------------------------------------------- 1 | """ 2 | Most tests are in alignment with: 3 | https://openeo.org/documentation/1.0/processes.html 4 | """ 5 | 6 | import unittest 7 | import numpy as np 8 | import pytest 9 | from copy import deepcopy 10 | import openeo_processes as oeop 11 | import xarray as xr 12 | import scipy 13 | 14 | @pytest.mark.usefixtures("test_data") 15 | class MathTester(unittest.TestCase): 16 | """ Tests all math functions. """ 17 | 18 | def test_e(self): 19 | """ Tests `e` function. """ 20 | assert oeop.e() == np.e 21 | 22 | def test_pi(self): 23 | """ Tests `pi` function. """ 24 | assert oeop.pi() == np.pi 25 | 26 | def test_constant(self): 27 | """ Tests `pi` function. """ 28 | assert oeop.constant(5) == 5 29 | 30 | def test_floor(self): 31 | """ Tests `floor` function. """ 32 | assert oeop.floor(0) == 0 33 | assert oeop.floor(3.5) == 3 34 | assert oeop.floor(-0.4) == -1 35 | assert oeop.floor(-3.5) == -4 36 | xr.testing.assert_equal(oeop.floor(self.test_data.xr_data_factor(3.5, -3.5)), self.test_data.xr_data_factor(3, -4)) 37 | 38 | def test_ceil(self): 39 | """ Tests `ceil` function. """ 40 | assert oeop.ceil(0) == 0 41 | assert oeop.ceil(3.5) == 4 42 | assert oeop.ceil(-0.4) == 0 43 | assert oeop.ceil(-3.5) == -3 44 | xr.testing.assert_equal( 45 | oeop.ceil(self.test_data.xr_data_factor(3.5, -3.5)), self.test_data.xr_data_factor(4, -3)) 46 | 47 | def test_int(self): 48 | """ Tests `int` function. """ 49 | assert oeop.int(0) == 0 50 | assert oeop.int(3.5) == 3 51 | assert oeop.int(-0.4) == 0 52 | assert oeop.int(-3.5) == -3 53 | xr.testing.assert_equal( 54 | oeop.int(self.test_data.xr_data_factor(3.5, -3.5)), self.test_data.xr_data_factor(3, -3)) 55 | 56 | def test_round(self): 57 | """ Tests `round` function. """ 58 | assert oeop.round(0) == 0 59 | assert oeop.round(3.56, p=1) == 3.6 60 | assert oeop.round(-0.4444444, p=2) == -0.44 61 | assert oeop.round(-2.5) == -2 62 | assert oeop.round(-3.5) == -4 63 | assert oeop.round(1234.5, p=-2) == 1200 64 | xr.testing.assert_equal( 65 | oeop.round(self.test_data.xr_data_factor(-2.5, -3.5)), self.test_data.xr_data_factor(-2, -4)) 66 | 67 | def test_exp(self): 68 | """ Tests `exp` function. """ 69 | assert oeop.exp(0) == 1 70 | assert np.isnan(oeop.exp(np.nan)) 71 | xr.testing.assert_equal( 72 | oeop.exp(self.test_data.xr_data_factor(0, np.nan)), self.test_data.xr_data_factor(1, np.nan)) 73 | 74 | def test_log(self): 75 | """ Tests `log` function. """ 76 | assert oeop.log(10, 10) == 1 77 | assert oeop.log(2, 2) == 1 78 | assert oeop.log(4, 2) == 2 79 | assert oeop.log(1, 16) == 0 80 | xr.testing.assert_equal( 81 | oeop.log(self.test_data.xr_data_factor(10, 10), 10), self.test_data.xr_data_factor(1, 1)) 82 | 83 | def test_ln(self): 84 | """ Tests `ln` function. """ 85 | # since ln(e) returns 0.9999999999999999 it needs to be almost equal 86 | np.testing.assert_almost_equal(oeop.ln(oeop.e()), 1) 87 | assert oeop.ln(1) == 0 88 | xr.testing.assert_allclose( 89 | oeop.ln(self.test_data.xr_data_factor(oeop.e(), 1)), self.test_data.xr_data_factor(1, 0)) 90 | 91 | def test_cos(self): 92 | """ Tests `cos` function. """ 93 | assert oeop.cos(0) == 1 94 | xr.testing.assert_equal( 95 | oeop.cos(self.test_data.xr_data_factor(oeop.pi(), 0)), self.test_data.xr_data_factor(-1, 1)) 96 | 97 | def test_arccos(self): 98 | """ Tests `arccos` function. """ 99 | assert oeop.arccos(1) == 0 100 | xr.testing.assert_equal( 101 | oeop.arccos(self.test_data.xr_data_factor(-1, 1)), self.test_data.xr_data_factor(oeop.pi(), 0)) 102 | 103 | def test_cosh(self): 104 | """ Tests `cosh` function. """ 105 | assert oeop.cosh(0) == 1 106 | xr.testing.assert_equal( 107 | oeop.cosh(self.test_data.xr_data_factor(0, 0)), self.test_data.xr_data_factor(1, 1)) 108 | 109 | def test_arcosh(self): 110 | """ Tests `arcosh` function. """ 111 | assert oeop.arcosh(1) == 0 112 | xr.testing.assert_equal( 113 | oeop.arcosh(self.test_data.xr_data_factor(1, 1)), self.test_data.xr_data_factor(0, 0)) 114 | 115 | def test_sin(self): 116 | """ Tests `sin` function. """ 117 | assert oeop.sin(0) == 0 118 | xr.testing.assert_equal( 119 | oeop.sin(self.test_data.xr_data_factor(0, oeop.pi()/2)), self.test_data.xr_data_factor(0, 1)) 120 | 121 | def test_arcsin(self): 122 | """ Tests `arcsin` function. """ 123 | assert oeop.arcsin(0) == 0 124 | xr.testing.assert_equal( 125 | oeop.arcsin(self.test_data.xr_data_factor(0, 1)), self.test_data.xr_data_factor(0, oeop.pi()/2)) 126 | 127 | def test_sinh(self): 128 | """ Tests `sinh` function. """ 129 | assert oeop.sinh(0) == 0 130 | xr.testing.assert_equal( 131 | oeop.sinh(self.test_data.xr_data_factor(0, 0)), self.test_data.xr_data_factor(0, 0)) 132 | 133 | def test_arsinh(self): 134 | """ Tests `arsinh` function. """ 135 | assert oeop.arsinh(0) == 0 136 | xr.testing.assert_equal( 137 | oeop.arsinh(self.test_data.xr_data_factor(0, 0)), self.test_data.xr_data_factor(0, 0)) 138 | 139 | def test_tan(self): 140 | """ Tests `tan` function. """ 141 | assert oeop.tan(0) == 0 142 | xr.testing.assert_equal( 143 | oeop.tan(self.test_data.xr_data_factor(0, 0)), self.test_data.xr_data_factor(0, 0)) 144 | 145 | def test_arctan(self): 146 | """ Tests `arctan` function. """ 147 | assert oeop.arctan(0) == 0 148 | xr.testing.assert_equal( 149 | oeop.arctan(self.test_data.xr_data_factor(0, 0)), self.test_data.xr_data_factor(0, 0)) 150 | 151 | def test_tanh(self): 152 | """ Tests `tanh` function. """ 153 | assert oeop.tanh(0) == 0 154 | xr.testing.assert_equal( 155 | oeop.tanh(self.test_data.xr_data_factor(0, 0)), self.test_data.xr_data_factor(0, 0)) 156 | 157 | def test_artanh(self): 158 | """ Tests `artanh` function. """ 159 | assert oeop.artanh(0) == 0 160 | xr.testing.assert_equal( 161 | oeop.artanh(self.test_data.xr_data_factor(0, 0)), self.test_data.xr_data_factor(0, 0)) 162 | 163 | def test_arctan2(self): 164 | """ Tests `arctan2` function. """ 165 | assert oeop.arctan2(0, 0) == 0 166 | assert np.isnan(oeop.arctan2(np.nan, 1.5)) 167 | xr.testing.assert_equal( 168 | oeop.arctan2(self.test_data.xr_data_factor(0, np.nan), self.test_data.xr_data_factor(0, 1.5)), self.test_data.xr_data_factor(0, np.nan)) 169 | 170 | def test_linear_scale_range(self): 171 | """ Tests `linear_scale_range` function. """ 172 | assert oeop.linear_scale_range(0.3, inputMin=-1, inputMax=1, outputMin=0, outputMax=255) == 165.75 173 | assert oeop.linear_scale_range(25.5, inputMin=0, inputMax=255) == 0.1 174 | assert np.isnan(oeop.linear_scale_range(np.nan, inputMin=0, inputMax=100)) 175 | xr.testing.assert_equal( 176 | oeop.linear_scale_range(self.test_data.xr_data_factor(25.5, 51), inputMin=0, inputMax=255), self.test_data.xr_data_factor(0.1, 0.2)) 177 | 178 | def test_scale(self): 179 | """ Tests `scale` function. """ 180 | arr = np.random.randn(10) 181 | assert np.all(oeop.scale(arr) == arr) 182 | xr.testing.assert_equal( 183 | oeop.scale(self.test_data.xr_data_factor(1, 2), 8), 184 | self.test_data.xr_data_factor(8, 16)) 185 | 186 | def test_mod(self): 187 | """ Tests `mod` function. """ 188 | assert oeop.mod(27, 5) == 2 189 | assert oeop.mod(-27, 5) == 3 190 | assert oeop.mod(27, -5) == -3 191 | assert oeop.mod(-27, -5) == -2 192 | assert oeop.mod(27, 5) == 2 193 | assert np.isnan(oeop.mod(27, np.nan)) 194 | assert np.isnan(oeop.mod(np.nan, 5)) 195 | xr.testing.assert_equal( 196 | oeop.mod(self.test_data.xr_data_factor(27, -27), self.test_data.xr_data_factor(5, 5)), 197 | self.test_data.xr_data_factor(2, 3)) 198 | 199 | def test_absolute(self): 200 | """ Tests `absolute` function. """ 201 | assert oeop.absolute(0) == 0 202 | assert oeop.absolute(3.5) == 3.5 203 | assert oeop.absolute(-0.4) == 0.4 204 | assert oeop.absolute(-3.5) == 3.5 205 | xr.testing.assert_equal( 206 | oeop.absolute(self.test_data.xr_data_factor(0, -3.5)), 207 | self.test_data.xr_data_factor(0, 3.5)) 208 | 209 | def test_sgn(self): 210 | """ Tests `sgn` function. """ 211 | assert oeop.sgn(-2) == -1 212 | assert oeop.sgn(3.5) == 1 213 | assert oeop.sgn(0) == 0 214 | assert np.isnan(oeop.sgn(np.nan)) 215 | xr.testing.assert_equal( 216 | oeop.sgn(self.test_data.xr_data_factor(-2, 3.5)), 217 | self.test_data.xr_data_factor(-1, 1)) 218 | 219 | def test_sqrt(self): 220 | """ Tests `sqrt` function. """ 221 | assert oeop.sqrt(0) == 0 222 | assert oeop.sqrt(1) == 1 223 | assert oeop.sqrt(9) == 3 224 | assert np.isnan(oeop.sqrt(np.nan)) 225 | xr.testing.assert_equal( 226 | oeop.sqrt(self.test_data.xr_data_factor(9, 4)), 227 | self.test_data.xr_data_factor(3, 2)) 228 | xr.testing.assert_equal( 229 | oeop.sqrt(self.test_data.xr_data_factor(np.nan, -4)), 230 | self.test_data.xr_data_factor(np.nan, np.nan)) 231 | 232 | def test_power(self): 233 | """ Tests `power` function. """ 234 | assert oeop.power(0, 2) == 0 235 | assert oeop.power(2.5, 0) == 1 236 | assert oeop.power(3, 3) == 27 237 | assert oeop.round(oeop.power(5, -1), 1) == 0.2 238 | assert oeop.power(1, 0.5) == 1 239 | assert oeop.power(1, None) is None 240 | assert oeop.power(None, 2) is None 241 | xr.testing.assert_equal( 242 | oeop.power(self.test_data.xr_data_factor(2, 3), 3), 243 | self.test_data.xr_data_factor(8, 27)) 244 | 245 | def test_mean(self): 246 | """ Tests `mean` function. """ 247 | assert oeop.mean([1, 0, 3, 2]) == 1.5 248 | assert oeop.mean([9, 2.5, np.nan, -2.5]) == 3 249 | assert np.isnan(oeop.mean([1, np.nan], ignore_nodata=False)) 250 | assert np.isnan(oeop.mean([])) 251 | assert (oeop.mean(self.test_data.xr_data_factor(3, 5)).values == self.test_data.xr_data_factor(4, 4)[0].values).all() 252 | 253 | def test_min(self): 254 | """ Tests `min` function. """ 255 | assert oeop.min([1, 0, 3, 2]) == 0 256 | assert oeop.min([5, 2.5, np.nan, -0.7]) == -0.7 257 | assert np.isnan(oeop.min([1, 0, 3, np.nan, 2], ignore_nodata=False)) 258 | assert np.isnan(oeop.min([np.nan, np.nan])) 259 | assert (oeop.min(self.test_data.xr_data_factor(3, 5), dimension = 'time') == 3).all() 260 | assert (oeop.min(self.test_data.xr_data_factor(np.nan, 5), dimension='time') == 5).all() 261 | 262 | def test_max(self): 263 | """ Tests `max` function. """ 264 | assert oeop.max([1, 0, 3, 2]) == 3 265 | assert oeop.max([5, 2.5, np.nan, -0.7]) == 5 266 | assert np.isnan(oeop.max([1, 0, 3, np.nan, 2], ignore_nodata=False)) 267 | assert np.isnan(oeop.max([np.nan, np.nan])) 268 | assert (oeop.max(self.test_data.xr_data_factor(3, 5), dimension = 'time') == 5).all() 269 | assert (oeop.max(self.test_data.xr_data_factor(np.nan, 5), dimension='time') == (oeop.max(self.test_data.xr_data_factor(3, 5), dimension = 'time'))).all() 270 | 271 | def test_median(self): 272 | """ Tests `median` function. """ 273 | assert oeop.median([1, 3, 3, 6, 7, 8, 9]) == 6 274 | assert oeop.median([1, 2, 3, 4, 5, 6, 8, 9]) == 4.5 275 | assert oeop.median([-1, -0.5, np.nan, 1]) == -0.5 276 | assert np.isnan(oeop.median([-1, 0, np.nan, 1], ignore_nodata=False)) 277 | assert np.isnan(oeop.median([])) 278 | assert (oeop.median(self.test_data.xr_data_factor(3, 5)).values == self.test_data.xr_data_factor(4, 4)[0].values).all() 279 | 280 | def test_sd(self): 281 | """ Tests `sd` function. """ 282 | assert oeop.sd([-1, 1, 3, np.nan]) == 2 283 | assert np.isnan(oeop.sd([-1, 1, 3, np.nan], ignore_nodata=False)) 284 | assert np.isnan(oeop.sd([])) 285 | assert (oeop.sd(self.test_data.xr_data_factor(3, 5)).values == self.test_data.xr_data_factor()[0].values).all() 286 | 287 | def test_variance(self): 288 | """ Tests `variance` function. """ 289 | assert oeop.variance([-1, 1, 3]) == 4 290 | assert oeop.variance([2, 3, 3, np.nan, 4, 4, 5]) == 1.1 291 | assert np.isnan(oeop.variance([-1, 1, np.nan, 3], ignore_nodata=False)) 292 | assert np.isnan(oeop.variance([])) 293 | assert (oeop.variance(self.test_data.xr_data_factor(3, 5)).values == self.test_data.xr_data_factor()[0].values).all() 294 | 295 | def test_extrema(self): 296 | """ Tests `extrema` function. """ 297 | self.assertListEqual(oeop.extrema([1, 0, 3, 2]), [0, 3]) 298 | self.assertListEqual(oeop.extrema([5, 2.5, np.nan, -0.7]), [-0.7, 5]) 299 | assert np.isclose(oeop.extrema([1, 0, 3, np.nan, 2], ignore_nodata=False), [np.nan, np.nan], 300 | equal_nan=True).all() 301 | assert np.isclose(oeop.extrema([]), [np.nan, np.nan], equal_nan=True).all() 302 | assert (oeop.extrema(self.test_data.xr_data_factor(3, 5)).values == self.test_data.xr_data_factor(3, 5).values).all() 303 | 304 | def test_clip(self): 305 | """ Tests `clip` function. """ 306 | assert oeop.clip(-5, min=-1, max=1) == -1 307 | assert oeop.clip(10.001, min=1, max=10) == 10 308 | assert oeop.clip(0.000001, min=0, max=0.02) == 0.000001 309 | assert oeop.clip(None, min=0, max=1) is None 310 | 311 | # test array clipping 312 | assert np.isclose(oeop.clip([-2, -1, 0, 1, 2], min=-1, max=1), [-1, -1, 0, 1, 1], equal_nan=True).all() 313 | assert np.isclose(oeop.clip([-0.1, -0.001, np.nan, 0, 0.25, 0.75, 1.001, np.nan], min=0, max=1), 314 | [0, 0, np.nan, 0, 0.25, 0.75, 1, np.nan], equal_nan=True).all() 315 | xr.testing.assert_equal( 316 | oeop.clip(self.test_data.xr_data_factor(-5, 2), min = 1, max = 8), 317 | self.test_data.xr_data_factor(1, 2)) 318 | xr.testing.assert_equal( 319 | oeop.clip(self.test_data.xr_data_factor(1, 9), min=1, max=8), 320 | self.test_data.xr_data_factor(1, 8)) 321 | xr.testing.assert_equal( 322 | oeop.clip(self.test_data.xr_data_factor(np.nan, 9), min=1, max=8), 323 | self.test_data.xr_data_factor(np.nan, 8)) 324 | 325 | def test_quantiles(self): 326 | """ Tests `quantiles` function. """ 327 | quantiles_1 = oeop.quantiles(data=[2, 4, 4, 4, 5, 5, 7, 9], probabilities=[0.005, 0.01, 0.02, 0.05, 0.1, 0.5]) 328 | quantiles_1 = [oeop.round(quantile, p=2) for quantile in quantiles_1] 329 | assert quantiles_1 == [2.07, 2.14, 2.28, 2.7, 3.4, 4.5] 330 | quantiles_2 = oeop.quantiles(data=[2, 4, 4, 4, 5, 5, 7, 9], q=4) 331 | quantiles_2 = [oeop.round(quantile, p=2) for quantile in quantiles_2] 332 | assert quantiles_2 == [4, 4.5, 5.5] 333 | quantiles_3 = oeop.quantiles(data=[-1, -0.5, np.nan, 1], q=2) 334 | quantiles_3 = [oeop.round(quantile, p=2) for quantile in quantiles_3] 335 | assert quantiles_3 == [-0.5] 336 | quantiles_4 = oeop.quantiles(data=[-1, -0.5, np.nan, 1], q=4, ignore_nodata=False) 337 | assert np.all([np.isnan(quantile) for quantile in quantiles_4]) and len(quantiles_4) == 3 338 | quantiles_5 = oeop.quantiles(data=[], probabilities=[0.1, 0.5]) 339 | assert np.all([np.isnan(quantile) for quantile in quantiles_5]) and len(quantiles_5) == 2 340 | assert (oeop.quantiles(self.test_data.xr_data_factor(1, 2), dimension = None, q = 2) == xr.DataArray(np.array([1.5, 1.5, 1.5]))).all() 341 | assert (oeop.quantiles(self.test_data.xr_data_factor(1, 2), dimension='time', q=2) == xr.DataArray( 342 | np.array([1.5, 1.5, 1.5]))).all() 343 | assert (oeop.quantiles(self.test_data.xr_data_factor(np.nan, 2), dimension='time', q=2) == xr.DataArray( 344 | np.array([2, 2, 2]))).all() 345 | 346 | def test_cummin(self): 347 | """ Tests `cummin` function. """ 348 | self.assertListEqual(oeop.cummin([5, 3, 1, 3, 5]).tolist(), [5, 3, 1, 1, 1]) 349 | assert np.isclose(oeop.cummin([5, 3, np.nan, 1, 5]), [5, 3, np.nan, 1, 1], equal_nan=True).all() 350 | assert np.isclose(oeop.cummin([5, 3, np.nan, 1, 5], ignore_nodata=False), 351 | [5, 3, np.nan, np.nan, np.nan], equal_nan=True).all() 352 | assert (oeop.cummin(xr.DataArray(np.array([3, 5, 2]))) == [3, 3, 2]).all() 353 | 354 | def test_cummax(self): 355 | """ Tests `cummax` function. """ 356 | self.assertListEqual(oeop.cummax([1, 3, 5, 3, 1]).tolist(), [1, 3, 5, 5, 5]) 357 | assert np.isclose(oeop.cummax([1, 3, np.nan, 5, 1]), [1, 3, np.nan, 5, 5], equal_nan=True).all() 358 | assert np.isclose(oeop.cummax([1, 3, np.nan, 5, 1], ignore_nodata=False), 359 | [1, 3, np.nan, np.nan, np.nan], equal_nan=True).all() 360 | assert (oeop.cummax(xr.DataArray(np.array([3, 5, 2]))) == [3, 5, 5]).all() 361 | 362 | def test_cumproduct(self): 363 | """ Tests `cumproduct` function. """ 364 | self.assertListEqual(oeop.cumproduct([1, 3, 5, 3, 1]).tolist(), [1, 3, 15, 45, 45]) 365 | assert np.isclose(oeop.cumproduct([1, 2, 3, np.nan, 3, 1]), [1, 2, 6, np.nan, 18, 18], equal_nan=True).all() 366 | assert np.isclose(oeop.cumproduct([1, 2, 3, np.nan, 3, 1], ignore_nodata=False), 367 | [1, 2, 6, np.nan, np.nan, np.nan], equal_nan=True).all() 368 | assert (oeop.cumproduct(xr.DataArray(np.array([3, 5, 2]))) == [3, 15, 30]).all() 369 | xr.testing.assert_equal(oeop.cumproduct(xr.DataArray(np.array([3, np.nan, 2])), ignore_nodata=True), 370 | (xr.DataArray(np.array([3, np.nan, 6])))) 371 | xr.testing.assert_equal(oeop.cumproduct(xr.DataArray(np.array([3, np.nan, 2])), ignore_nodata=False), 372 | (xr.DataArray(np.array([3, np.nan, np.nan])))) 373 | 374 | def test_cumsum(self): 375 | """ Tests `cumsum` function. """ 376 | self.assertListEqual(oeop.cumsum([1, 3, 5, 3, 1]).tolist(), [1, 4, 9, 12, 13]) 377 | assert np.isclose(oeop.cumsum([1, 3, np.nan, 3, 1]), [1, 4, np.nan, 7, 8], equal_nan=True).all() 378 | assert np.isclose(oeop.cumsum([1, 3, np.nan, 3, 1], ignore_nodata=False), 379 | [1, 4, np.nan, np.nan, np.nan], equal_nan=True).all() 380 | assert (oeop.cumsum(xr.DataArray(np.array([3, 5, 2]))) == [3, 8, 10]).all() 381 | xr.testing.assert_equal(oeop.cumsum(xr.DataArray(np.array([3, np.nan, 2])), ignore_nodata=True), 382 | xr.DataArray(np.array([3, np.nan, 5]))) 383 | xr.testing.assert_equal(oeop.cumsum(xr.DataArray(np.array([3, np.nan, 2])), ignore_nodata=False), 384 | xr.DataArray(np.array([3, np.nan, np.nan]))) 385 | 386 | 387 | def test_sum(self): 388 | """ Tests `sum` function. """ 389 | assert oeop.sum([5, 1]) == 6 390 | assert oeop.sum([-2, 4, 2.5]) == 4.5 391 | assert np.isnan(oeop.sum([1, np.nan], ignore_nodata=False)) 392 | 393 | # xarray tests 394 | # Take sum over 't' dimension in a 3d array 395 | self.assertEqual( 396 | int(oeop.sum(self.test_data.xr_data_3d)[0, 0].data), 397 | 88 398 | ) 399 | # Take sum over 't' dimension in a 3d array 400 | self.assertEqual( 401 | list(oeop.sum([self.test_data.xr_data_3d, 1000])[:, 0, 0].data), 402 | [1008., 1080.] 403 | ) 404 | # Take sum over 's' dimension in a 4d array 405 | self.assertListEqual( 406 | list(oeop.sum(self.test_data.xr_data_4d)[:, 0, 0].data), 407 | [14, 140] 408 | ) 409 | # Test with input as [xr.DataArray, xr.DataArray] 410 | self.assertEqual( 411 | ( 412 | oeop.sum([self.test_data.xr_data_3d, self.test_data.xr_data_3d]) - 413 | self.test_data.xr_data_3d * 2 414 | ).sum(), 0) 415 | 416 | assert (oeop.sum(self.test_data.xr_data_factor(1, 2), dimension='time').values == 3).all() 417 | 418 | def test_product(self): 419 | """ Tests `product` function. """ 420 | assert oeop.product([5, 0]) == 0 421 | assert oeop.product([-2, 4, 2.5]) == -20 422 | assert np.isnan(oeop.product([1, np.nan], ignore_nodata=False)) 423 | assert oeop.product([-1]) == -1 424 | assert np.isnan(oeop.product([np.nan], ignore_nodata=False)) 425 | assert np.isnan(oeop.product([])) 426 | 427 | C = np.ones((2, 5, 5)) * 100 428 | assert np.sum(oeop.product(C) - np.ones((5, 5)) * 10000) == 0 429 | assert np.sum(oeop.product(deepcopy(C), extra_values=[2]) - np.ones((5, 5)) * 20000) == 0 430 | assert np.sum(oeop.product(deepcopy(C), extra_values=[2, 3]) - np.ones((5, 5)) * 60000) == 0 431 | 432 | # xarray tests 433 | # Take sum over 't' dimension in a 3d array 434 | self.assertEqual( 435 | int(oeop.product(self.test_data.xr_data_3d)[0, 0].data), 436 | 640 437 | ) 438 | # Take sum over 's' dimension in a 4d array 439 | self.assertListEqual( 440 | list(oeop.product(self.test_data.xr_data_4d)[:, 0, 0].data), 441 | [64, 64000] 442 | ) 443 | 444 | def test_add(self): 445 | """ Tests `add` function. """ 446 | assert oeop.add(5, 2.5) == 7.5 447 | assert oeop.add(-2, -4) == -6 448 | assert oeop.add(1, None) is None 449 | xr.testing.assert_equal( 450 | oeop.add(self.test_data.xr_data_factor(1, 9), self.test_data.xr_data_factor(2, 2)), 451 | self.test_data.xr_data_factor(3, 11)) 452 | xr.testing.assert_equal( 453 | oeop.add(self.test_data.xr_data_factor(1, 9), self.test_data.xr_data_factor(np.nan, -2)), 454 | self.test_data.xr_data_factor(np.nan, 7)) 455 | assert self.test_data.xr_data_factor(1, 9).attrs == oeop.add(7, self.test_data.xr_data_factor(1, 9)).attrs 456 | 457 | def test_subtract(self): 458 | """ Tests `subtract` function. """ 459 | assert oeop.subtract(5, 2.5) == 2.5 460 | assert oeop.subtract(-2, 4) == -6 461 | assert oeop.subtract(1, None) is None 462 | 463 | # xarray tests 464 | assert (oeop.subtract(self.test_data.xr_data_3d, 465 | self.test_data.xr_data_3d)).sum() == 0 466 | 467 | def test_multiply(self): 468 | """ Tests `multiply` function. """ 469 | assert oeop.multiply(5, 2.5) == 12.5 470 | assert oeop.multiply(-2, -4) == 8 471 | assert oeop.multiply(1, None) is None 472 | xr.testing.assert_equal( 473 | oeop.multiply(self.test_data.xr_data_factor(3, 9), self.test_data.xr_data_factor(2, np.nan)), 474 | self.test_data.xr_data_factor(6, np.nan)) 475 | 476 | def test_divide(self): 477 | """ Tests `divide` function. """ 478 | assert oeop.divide(5, 2.5) == 2. 479 | assert oeop.divide(-2, 4) == -0.5 480 | assert oeop.divide(1, None) is None 481 | xr.testing.assert_equal( 482 | oeop.divide(self.test_data.xr_data_factor(1, 6.4), self.test_data.xr_data_factor(np.nan, 2)), 483 | self.test_data.xr_data_factor(np.nan, 3.2)) 484 | 485 | def test_normalized_difference(self): 486 | """ Tests `normalized_difference` function. """ 487 | assert oeop.normalized_difference(5, 3) == 0.25 488 | assert oeop.normalized_difference(1, 1) == 0 489 | assert (oeop.normalized_difference(np.array([1, 1]), np.array([0, 1])) == np.array([1, 0])).all() 490 | xr.testing.assert_equal(oeop.normalized_difference(self.test_data.xr_data_factor(1, 5), self.test_data.xr_data_factor(1, 3)), self.test_data.xr_data_factor(0, 0.25)) 491 | 492 | def test_ndvi(self): 493 | """ Tests 'ndvi' function. """ 494 | assert (oeop.ndvi(self.test_data.xr_data_4d, target_band='B') == 1/3).all() 495 | 496 | def test_apply_kernel(self): 497 | """ Tests `apply_kernel` function. """ 498 | # xarray tests 499 | kernel = np.asarray([[0,0,0],[0,1,0],[0,0,0]]) 500 | # With the given kernel the result must be the same as the input 501 | xr.testing.assert_equal(oeop.apply_kernel(self.test_data.xr_data_4d,kernel,border=0, factor=1),self.test_data.xr_data_4d) 502 | xr.testing.assert_equal(oeop.apply_kernel(self.test_data.xr_data_3d,kernel,border=0, factor=1),self.test_data.xr_data_3d) 503 | 504 | if __name__ == "__main__": 505 | unittest.main() 506 | -------------------------------------------------------------------------------- /tests/test_texts.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import numpy as np 3 | import openeo_processes as oeop 4 | 5 | 6 | class TextTester(unittest.TestCase): 7 | """ Tests all math functions. """ 8 | 9 | def test_text_begins(self): 10 | """ Tests `text_begins` function. """ 11 | assert not oeop.text_begins("Lorem ipsum dolor sit amet", pattern="amet") 12 | assert oeop.text_begins("Lorem ipsum dolor sit amet", pattern="Lorem") 13 | assert not oeop.text_begins("Lorem ipsum dolor sit amet", pattern="lorem") 14 | assert oeop.text_begins("Lorem ipsum dolor sit amet", pattern="lorem", case_sensitive=False) 15 | assert oeop.text_begins("Ä", pattern="ä", case_sensitive=False) 16 | assert oeop.text_begins(None, pattern="None") is None 17 | 18 | def test_text_ends(self): 19 | """ Tests `text_ends` function. """ 20 | assert oeop.text_ends("Lorem ipsum dolor sit amet", pattern="amet") 21 | assert not oeop.text_ends("Lorem ipsum dolor sit amet", pattern="AMET") 22 | assert not oeop.text_ends("Lorem ipsum dolor sit amet", pattern="Lorem") 23 | assert oeop.text_ends("Lorem ipsum dolor sit amet", pattern="AMET", case_sensitive=False) 24 | assert oeop.text_ends("Ä", pattern="ä", case_sensitive=False) 25 | assert oeop.text_ends(None, pattern="None") is None 26 | 27 | def test_text_contains(self): 28 | """ Tests `text_contains` function. """ 29 | assert not oeop.text_contains("Lorem ipsum dolor sit amet", pattern="openEO") 30 | assert oeop.text_contains("Lorem ipsum dolor sit amet", pattern="ipsum dolor") 31 | assert not oeop.text_contains("Lorem ipsum dolor sit amet", pattern="Ipsum Dolor") 32 | assert oeop.text_contains("Lorem ipsum dolor sit amet", pattern="SIT", case_sensitive=False) 33 | assert oeop.text_contains("ÄÖÜ", pattern="ö", case_sensitive=False) 34 | assert oeop.text_contains(None, pattern="None") is None 35 | 36 | def test_text_merge(self): 37 | """ Tests `text_merge` function. """ 38 | assert oeop.text_merge(["Hello", "World"], separator=" ") == "Hello World" 39 | assert oeop.text_merge([1, 2, 3, 4, 5, 6, 7, 8, 9, 0]) == "1234567890" 40 | assert oeop.text_merge([np.nan, True, False, 1, -1.5, "ß"], separator="\n") == "nan\ntrue\nfalse\n1\n-1.5\nß" 41 | assert oeop.text_merge([2, 0], separator=1) == "210" 42 | assert oeop.text_merge([]) == "" 43 | 44 | 45 | if __name__ == '__main__': 46 | unittest.main() 47 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import dask.array.core 4 | import numpy as np 5 | import pytest 6 | import xarray 7 | from openeo_processes.utils import eval_datatype, get_process, has_process 8 | 9 | 10 | @pytest.mark.parametrize(["data", "expected"], [ 11 | (None, "NoneType"), 12 | (True, "bool"), 13 | (False, "bool"), 14 | ("123", "str"), 15 | (123, "int"), 16 | (123.456, "float"), 17 | ([1, 2, 3], "list"), 18 | ((1, 2, 3), "tuple"), 19 | ({1, 2, 3}, "set"), 20 | ({1: 2, 3: 4}, "dict"), 21 | (lambda x, y: x + y, "function"), 22 | (datetime.datetime.now(), "datetime"), 23 | (np.array([1, 2, 3]), "numpy"), 24 | (xarray.DataArray([1, 2, 3]), "xarray"), 25 | (dask.array.core.from_array([1, 2, 3]), "dask"), 26 | ]) 27 | def test_eval_datatype(data, expected): 28 | assert eval_datatype(data) == expected 29 | 30 | 31 | def test_has_process(): 32 | assert has_process("add") 33 | assert has_process("multiply") 34 | assert not has_process("foobar") 35 | assert has_process("and") 36 | assert not has_process("and_") 37 | assert has_process("or") 38 | assert not has_process("or_") 39 | assert has_process("if") 40 | assert not has_process("if_") 41 | 42 | 43 | @pytest.mark.parametrize(["pid", "args", "expected"], [ 44 | ("add", (2, 3), 5), 45 | ("multiply", (2, 3), 6), 46 | ("sum", ([1, 2, 3, 4, 5, 6],), 21), 47 | ("median", ([2, 5, 3, 8, 11],), 5), 48 | ("and", (False, True), False), 49 | ("or", (False, True), True), 50 | ]) 51 | def test_get_process(pid, args, expected): 52 | fun = get_process(pid) 53 | assert fun(*args) == expected 54 | --------------------------------------------------------------------------------