├── .gitattributes ├── .github ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── ci.yaml │ └── deploy.yaml ├── .gitignore ├── .stickler.yml ├── AUTHORS.md ├── CHANGELOG.md ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── RELEASING.md ├── changelog.rst ├── continuous_integration ├── build-manylinux-wheels.sh └── environment.yaml ├── cython_test.pyx ├── cython_test2.pyx ├── doc ├── Makefile └── source │ ├── conf.py │ ├── images │ ├── scatter_cartesian_distances.png │ ├── scatter_cartesian_distances_thumb.png │ ├── terra_20110821_1115_maspalomas_rednsow.png │ ├── terra_20110821_1115_maspalomas_rednsow_int.png │ ├── terra_20110821_1115_maspalomas_rednsow_int_thumb.png │ ├── terra_20110821_1115_rednsow.png │ ├── terra_20110821_1115_rednsow_int.png │ └── terra_20110821_1115_rednsow_int_thumb.png │ └── index.rst ├── geotiepoints ├── __init__.py ├── _modis_interpolator.pyx ├── _modis_utils.pxd ├── _modis_utils.pyx ├── _simple_modis_interpolator.pyx ├── basic_interpolator.py ├── geointerpolator.py ├── interpolator.py ├── modisinterpolator.py ├── multilinear.py ├── multilinear_cython.pyx ├── simple_modis_interpolator.py ├── tests │ ├── __init__.py │ ├── test_geointerpolator.py │ ├── test_interpolator.py │ ├── test_modis.py │ ├── test_modisinterpolator.py │ ├── test_multilinear.py │ ├── test_satelliteinterpolator.py │ ├── test_simple_modis_interpolator.py │ ├── test_viiinterpolator.py │ └── utils.py ├── version.py └── viiinterpolator.py ├── mytest_fillborders.py ├── mytest_modis5to1.py ├── pyproject.toml ├── requirements.txt ├── setup.cfg ├── setup.py ├── testdata ├── 250m_lonlat_section_input.h5 ├── 250m_lonlat_section_result.h5 ├── create_modis_test_data.py ├── modis_test_data.h5 ├── test_5_to_1_geoloc_5km.h5 └── test_5_to_1_geoloc_full.h5 └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | geotiepoints/version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | #### Code Sample, a minimal, complete, and verifiable piece of code 2 | 3 | ```python 4 | # Your code here 5 | 6 | ``` 7 | #### Problem description 8 | 9 | [this should also explain **why** the current behaviour is a problem and why the 10 | expected output is a better solution.] 11 | 12 | #### Expected Output 13 | 14 | #### Actual Result, Traceback if applicable 15 | 16 | #### Versions of Python, package at hand and relevant dependencies 17 | 18 | 19 | Thank you for reporting an issue ! 20 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | - [ ] Closes #xxxx 4 | - [ ] Tests added 5 | - [ ] Tests passed 6 | - [ ] Passes ``git diff origin/main **/*py | flake8 --diff`` 7 | - [ ] Fully documented 8 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "github-actions" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "monthly" 12 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | test: 7 | runs-on: ${{ matrix.os }} 8 | continue-on-error: ${{ matrix.experimental }} 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | os: ["windows-latest", "ubuntu-latest", "macos-latest"] 13 | python-version: ["3.10", "3.11", "3.12"] 14 | experimental: [false] 15 | include: 16 | - python-version: "3.12" 17 | os: "ubuntu-latest" 18 | experimental: true 19 | 20 | env: 21 | PYTHON_VERSION: ${{ matrix.python-version }} 22 | OS: ${{ matrix.os }} 23 | UNSTABLE: ${{ matrix.experimental }} 24 | ACTIONS_ALLOW_UNSECURE_COMMANDS: true 25 | 26 | steps: 27 | - name: Checkout source 28 | uses: actions/checkout@v4 29 | 30 | - name: Setup Conda Environment 31 | uses: conda-incubator/setup-miniconda@v3 32 | with: 33 | miniforge-version: latest 34 | channel-priority: strict 35 | python-version: ${{ matrix.python-version }} 36 | activate-environment: test-environment 37 | environment-file: continuous_integration/environment.yaml 38 | 39 | - name: Install unstable dependencies 40 | if: matrix.experimental == true 41 | shell: bash -l {0} 42 | run: | 43 | python -m pip install versioneer pkgconfig setuptools-scm; \ 44 | conda uninstall --force-remove -y scipy h5py pyresample pykdtree pandas xarray; \ 45 | python -m pip install \ 46 | -f https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ 47 | --trusted-host pypi.anaconda.org \ 48 | --no-deps --pre --upgrade \ 49 | matplotlib \ 50 | numpy \ 51 | pandas \ 52 | scipy; \ 53 | python -m pip install \ 54 | --no-deps --upgrade --pre --no-build-isolation \ 55 | git+https://github.com/dask/dask \ 56 | git+https://github.com/dask/distributed \ 57 | git+https://github.com/h5py/h5py \ 58 | git+https://github.com/storpipfugl/pykdtree \ 59 | git+https://github.com/pytroll/pyresample \ 60 | git+https://github.com/pydata/bottleneck \ 61 | git+https://github.com/pydata/xarray; 62 | 63 | - name: Install geotiepoints 64 | shell: bash -l {0} 65 | run: | 66 | pip install -e . 67 | python setup.py build_ext --inplace --cython-coverage --force 68 | 69 | - name: Run unit tests 70 | shell: bash -l {0} 71 | run: | 72 | pytest --cov=geotiepoints geotiepoints/tests --cov-report=xml --cov-report= 73 | 74 | # FIXME: These fail 75 | # - name: Test website 76 | # shell: bash -l {0} 77 | # run: | 78 | # cd doc && mkdir doctest && sphinx-build -E -n -b doctest ./source ./doctest && cd .. 79 | 80 | - name: Upload unittest coverage to Codecov 81 | uses: codecov/codecov-action@v5 82 | with: 83 | flags: unittests 84 | file: ./coverage.xml 85 | env_vars: OS,PYTHON_VERSION,UNSTABLE 86 | 87 | - name: Coveralls Parallel 88 | uses: AndreMiras/coveralls-python-action@develop 89 | with: 90 | flag-name: run-${{ matrix.test_number }} 91 | parallel: true 92 | if: runner.os == 'Linux' 93 | 94 | coveralls: 95 | needs: [test] 96 | runs-on: ubuntu-latest 97 | steps: 98 | - name: Coveralls Finished 99 | uses: AndreMiras/coveralls-python-action@develop 100 | with: 101 | parallel-finished: true 102 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy sdist and wheels 2 | 3 | 4 | on: 5 | push: 6 | pull_request: 7 | release: 8 | types: 9 | - published 10 | 11 | jobs: 12 | build_sdist: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Checkout source 16 | uses: actions/checkout@v4 17 | 18 | - name: Create sdist 19 | shell: bash -l {0} 20 | run: | 21 | python -m pip install -q build 22 | python -m build -s 23 | 24 | - name: Upload sdist to build artifacts 25 | uses: actions/upload-artifact@v4 26 | with: 27 | name: sdist 28 | path: dist/*.tar.gz 29 | 30 | build_wheels: 31 | name: "Build wheels on ${{ matrix.os }} ${{ matrix.cibw_archs }}" 32 | runs-on: ${{ matrix.os }} 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | include: 37 | - os: windows-2019 38 | cibw_archs: "AMD64 ARM64" 39 | artifact_name: "win" 40 | - os: macos-latest 41 | cibw_archs: "x86_64 arm64" 42 | artifact_name: "mac" 43 | - os: "ubuntu-latest" 44 | cibw_archs: "aarch64" 45 | artifact_name: "ubuntu-aarch" 46 | - os: "ubuntu-latest" 47 | cibw_archs: "x86_64" 48 | artifact_name: "ubuntu-x86_64" 49 | 50 | steps: 51 | - uses: actions/checkout@v4 52 | - run: | 53 | git fetch --prune --unshallow 54 | 55 | - name: Set up QEMU 56 | if: runner.os == 'Linux' 57 | uses: docker/setup-qemu-action@v3 58 | with: 59 | platforms: all 60 | 61 | - name: Build wheels 62 | uses: pypa/cibuildwheel@v2.23.3 63 | env: 64 | CIBW_SKIP: "cp36-* cp37-* cp38-* pp* *-manylinux_i686 *-musllinux_i686 *-musllinux_aarch64 *-win32" 65 | CIBW_ARCHS: "${{ matrix.cibw_archs }}" 66 | CIBW_TEST_SKIP: "*_arm64 *_universal2:arm64" 67 | 68 | - uses: actions/upload-artifact@v4 69 | with: 70 | name: wheels-${{ matrix.artifact_name }} 71 | path: ./wheelhouse/*.whl 72 | 73 | upload_to_pypi: 74 | needs: [build_sdist, build_wheels] 75 | runs-on: ubuntu-latest 76 | steps: 77 | - name: Download sdist artifact 78 | uses: actions/download-artifact@v4 79 | with: 80 | name: sdist 81 | path: dist 82 | - name: Download wheels artifact - win 83 | uses: actions/download-artifact@v4 84 | with: 85 | name: wheels-win 86 | path: dist 87 | - name: Download wheels artifact - mac 88 | uses: actions/download-artifact@v4 89 | with: 90 | name: wheels-mac 91 | path: dist 92 | - name: Download wheels artifact - ubuntu aarch 93 | uses: actions/download-artifact@v4 94 | with: 95 | name: wheels-ubuntu-aarch 96 | path: dist 97 | - name: Download wheels artifact - ubuntu x86_64 98 | uses: actions/download-artifact@v4 99 | with: 100 | name: wheels-ubuntu-x86_64 101 | path: dist 102 | - name: Publish package to Test PyPI 103 | if: github.event.action != 'published' && github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') 104 | uses: pypa/gh-action-pypi-publish@v1.12.4 105 | with: 106 | user: __token__ 107 | password: ${{ secrets.test_pypi_password }} 108 | repository_url: https://test.pypi.org/legacy/ 109 | - name: Publish package to PyPI 110 | if: github.event.action == 'published' 111 | uses: pypa/gh-action-pypi-publish@v1.12.4 112 | with: 113 | user: __token__ 114 | password: ${{ secrets.pypi_password }} 115 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[co] 2 | 3 | # Packages 4 | *.egg 5 | *.egg-info 6 | dist 7 | build 8 | eggs 9 | .eggs 10 | parts 11 | bin 12 | var 13 | sdist 14 | develop-eggs 15 | .installed.cfg 16 | 17 | # Installer logs 18 | pip-log.txt 19 | 20 | # Unit test / coverage reports 21 | .coverage 22 | .tox 23 | 24 | #Translations 25 | *.mo 26 | 27 | #Mr Developer 28 | .mr.developer.cfg 29 | 30 | # Don't include the C files in the repository 31 | geotiepoints/*.c 32 | 33 | # pycharm 34 | .idea 35 | 36 | # vscode 37 | .vscode -------------------------------------------------------------------------------- /.stickler.yml: -------------------------------------------------------------------------------- 1 | linters: 2 | flake8: 3 | python: 3 4 | config: setup.cfg 5 | 6 | -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Project Contributors 2 | 3 | The following people have made contributions to this project: 4 | 5 | 6 | 7 | 8 | 9 | 10 | - [Amit Aronovitch (AmitAronovitch)](https://github.com/AmitAronovitch) 11 | - [Adam Dybbroe (adybbroe)](https://github.com/adybbroe) 12 | - [Rolf Helge Pfeiffer (HelgeDMI)](https://github.com/HelgeDMI) 13 | - [David Hoese (djhoese)](https://github.com/djhoese) 14 | - [Mikhail Itkin (mitkin)](https://github.com/mitkin) 15 | - [Sauli Joro (sjoro)](https://github.com/sjoro) 16 | - [Panu Lahtinen (pnuu)](https://github.com/pnuu) 17 | - [Pepe Phillips (pepephillips)](https://github.com/pepephillips) 18 | - [Martin Raspaud (mraspaud)](https://github.com/mraspaud) 19 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## Version 1.7.5 (2024/10/12) 2 | 3 | ### Issues Closed 4 | 5 | * [Issue 79](https://github.com/pytroll/python-geotiepoints/issues/79) - Test failure with scipy 1.13.x 6 | 7 | In this release 1 issue was closed. 8 | 9 | ### Pull Requests Merged 10 | 11 | #### Features added 12 | 13 | * [PR 85](https://github.com/pytroll/python-geotiepoints/pull/85) - Add a spline interpolator for 2d arrays 14 | 15 | In this release 1 pull request was closed. 16 | 17 | 18 | ## Version 1.7.4 (2024/06/26) 19 | 20 | ### Pull Requests Merged 21 | 22 | #### Bugs fixed 23 | 24 | * [PR 76](https://github.com/pytroll/python-geotiepoints/pull/76) - Fix numpy 2 dtype issues 25 | 26 | In this release 1 pull request was closed. 27 | 28 | 29 | ## Version 1.7.3 (2024/04/15) 30 | 31 | ### Pull Requests Merged 32 | 33 | #### Bugs fixed 34 | 35 | * [PR 74](https://github.com/pytroll/python-geotiepoints/pull/74) - Build wheels with numpy 2.0rc1 and fix scipy 1.13.0 compatibility 36 | 37 | In this release 1 pull request was closed. 38 | 39 | 40 | ## Version 1.7.2 (2024/02/14) 41 | 42 | ### Pull Requests Merged 43 | 44 | #### Bugs fixed 45 | 46 | * [PR 63](https://github.com/pytroll/python-geotiepoints/pull/63) - Deploy to test pypi only on tags 47 | 48 | #### Features added 49 | 50 | * [PR 70](https://github.com/pytroll/python-geotiepoints/pull/70) - Build wheels with numpy 2 51 | 52 | In this release 2 pull requests were closed. 53 | 54 | 55 | ## Version 1.7.1 (2023/11/28) 56 | 57 | ### Pull Requests Merged 58 | 59 | #### Bugs fixed 60 | 61 | * [PR 62](https://github.com/pytroll/python-geotiepoints/pull/62) - Fix python versions in deploy ci 62 | 63 | In this release 1 pull request was closed. 64 | 65 | 66 | ## Version 1.7.0 (2023/11/21) 67 | 68 | ### Issues Closed 69 | 70 | * [Issue 56](https://github.com/pytroll/python-geotiepoints/issues/56) - Upgrade to Cython 3.0 and check annotations ([PR 57](https://github.com/pytroll/python-geotiepoints/pull/57) by [@djhoese](https://github.com/djhoese)) 71 | * [Issue 47](https://github.com/pytroll/python-geotiepoints/issues/47) - Help wanted: verify the interpolation of MERSI-2 1000M GEO to 250M GEO 72 | * [Issue 23](https://github.com/pytroll/python-geotiepoints/issues/23) - Docstring headers still include authors. 73 | * [Issue 21](https://github.com/pytroll/python-geotiepoints/issues/21) - Interpolation of MODIS lat/lons is incorrect 74 | * [Issue 18](https://github.com/pytroll/python-geotiepoints/issues/18) - Make the interpolators dask-compatible 75 | 76 | In this release 5 issues were closed. 77 | 78 | ### Pull Requests Merged 79 | 80 | #### Bugs fixed 81 | 82 | * [PR 60](https://github.com/pytroll/python-geotiepoints/pull/60) - Add missing noexcept on cython function 83 | * [PR 46](https://github.com/pytroll/python-geotiepoints/pull/46) - Fix tests on i386 architectures 84 | 85 | #### Features added 86 | 87 | * [PR 61](https://github.com/pytroll/python-geotiepoints/pull/61) - Fix geogrid chunking to accept "auto" and to preserve dtype 88 | * [PR 57](https://github.com/pytroll/python-geotiepoints/pull/57) - Upgrade to Cython 3+ in building ([56](https://github.com/pytroll/python-geotiepoints/issues/56)) 89 | 90 | In this release 4 pull requests were closed. 91 | 92 | 93 | ## Version 1.6.0 (2023/03/17) 94 | 95 | 96 | ### Pull Requests Merged 97 | 98 | #### Bugs fixed 99 | 100 | * [PR 45](https://github.com/pytroll/python-geotiepoints/pull/45) - Fix VII interpolator compatibility with future versions of xarray 101 | 102 | #### Features added 103 | 104 | * [PR 44](https://github.com/pytroll/python-geotiepoints/pull/44) - Add interpolators based on scipy's RegularGridInterpolator 105 | 106 | In this release 2 pull requests were closed. 107 | 108 | 109 | ## Version 1.5.1 (2022/12/09) 110 | 111 | ### Pull Requests Merged 112 | 113 | #### Bugs fixed 114 | 115 | * [PR 43](https://github.com/pytroll/python-geotiepoints/pull/43) - Fix deprecation for numpy array equality 116 | 117 | In this release 1 pull request was closed. 118 | 119 | 120 | ## Version 1.5.0 (2022/10/25) 121 | 122 | ### Pull Requests Merged 123 | 124 | #### Features added 125 | 126 | * [PR 38](https://github.com/pytroll/python-geotiepoints/pull/38) - Rewrite simple and tiepoint modis interpolation in cython 127 | 128 | In this release 1 pull request was closed. 129 | 130 | 131 | ## Version 1.4.1 (2022/06/08) 132 | 133 | ### Issues Closed 134 | 135 | * [Issue 39](https://github.com/pytroll/python-geotiepoints/issues/39) - MODIS Interpolation Comparisons ([PR 41](https://github.com/pytroll/python-geotiepoints/pull/41) by [@djhoese](https://github.com/djhoese)) 136 | 137 | In this release 1 issue was closed. 138 | 139 | ### Pull Requests Merged 140 | 141 | #### Bugs fixed 142 | 143 | * [PR 41](https://github.com/pytroll/python-geotiepoints/pull/41) - Fix MODIS cviirs-based interpolation ([39](https://github.com/pytroll/python-geotiepoints/issues/39)) 144 | 145 | #### Features added 146 | 147 | * [PR 35](https://github.com/pytroll/python-geotiepoints/pull/35) - Optimize angle-based modis interpolation for dask 148 | 149 | In this release 2 pull requests were closed. 150 | 151 | 152 | ## Version 1.4.0 (2022/02/21) 153 | 154 | ### Pull Requests Merged 155 | 156 | #### Features added 157 | 158 | * [PR 34](https://github.com/pytroll/python-geotiepoints/pull/34) - Updated interpolator for vii tie points for test data version V2 159 | 160 | In this release 1 pull request was closed. 161 | 162 | 163 | ## Version 1.3.1 (2022/02/04) 164 | 165 | ### Pull Requests Merged 166 | 167 | #### Bugs fixed 168 | 169 | * [PR 33](https://github.com/pytroll/python-geotiepoints/pull/33) - Fix deprecated use of np.int 170 | 171 | #### Features added 172 | 173 | * [PR 32](https://github.com/pytroll/python-geotiepoints/pull/32) - Change tested Python versions to 3.8, 3.9 and 3.10 174 | 175 | In this release 2 pull requests were closed. 176 | 177 | 178 | ## Version 1.3.0 (2021/09/12) 179 | 180 | ### Pull Requests Merged 181 | 182 | #### Features added 183 | 184 | * [PR 31](https://github.com/pytroll/python-geotiepoints/pull/31) - Add simple lon/lat based MODIS interpolation 185 | 186 | In this release 1 pull request was closed. 187 | 188 | 189 | ## Version 1.2.1 (2021/03/08) 190 | 191 | ### Issues Closed 192 | 193 | * [Issue 29](https://github.com/pytroll/python-geotiepoints/issues/29) - C extension does not compile on py3.9 without re-cythonizing ([PR 30](https://github.com/pytroll/python-geotiepoints/pull/30)) 194 | * [Issue 28](https://github.com/pytroll/python-geotiepoints/issues/28) - I'm trying to install pycups on mac os using the treminal, but I'm getting “building wheel for pycups (setup.py) … error” 195 | * [Issue 27](https://github.com/pytroll/python-geotiepoints/issues/27) - MNT: Stop using ci-helpers in appveyor.yml ([PR 30](https://github.com/pytroll/python-geotiepoints/pull/30)) 196 | * [Issue 26](https://github.com/pytroll/python-geotiepoints/issues/26) - pip install pysbrl --no-binary=pysbrl gives error 197 | 198 | In this release 4 issues were closed. 199 | 200 | ### Pull Requests Merged 201 | 202 | #### Bugs fixed 203 | 204 | * [PR 30](https://github.com/pytroll/python-geotiepoints/pull/30) - Switch build system to require Cython and build extensions on install ([29](https://github.com/pytroll/python-geotiepoints/issues/29), [27](https://github.com/pytroll/python-geotiepoints/issues/27)) 205 | 206 | #### Features added 207 | 208 | * [PR 30](https://github.com/pytroll/python-geotiepoints/pull/30) - Switch build system to require Cython and build extensions on install ([29](https://github.com/pytroll/python-geotiepoints/issues/29), [27](https://github.com/pytroll/python-geotiepoints/issues/27)) 209 | 210 | In this release 2 pull requests were closed. 211 | 212 | 213 | ## Version 1.2.0 (2020/06/05) 214 | 215 | 216 | ### Pull Requests Merged 217 | 218 | #### Bugs fixed 219 | 220 | * [PR 19](https://github.com/pytroll/python-geotiepoints/pull/19) - Fix interpolation of symetrical tiepoints 221 | 222 | #### Features added 223 | 224 | * [PR 22](https://github.com/pytroll/python-geotiepoints/pull/22) - Add VII interpolator. 225 | * [PR 16](https://github.com/pytroll/python-geotiepoints/pull/16) - Add MODIS 5km to 500m and 250m interpolation 226 | 227 | In this release 3 pull requests were closed. 228 | 229 | 230 | ## Version 1.1.8 (2019/04/24) 231 | 232 | ### Issues Closed 233 | 234 | ### Pull Requests Merged 235 | 236 | #### Bugs fixed 237 | 238 | * [PR 14](https://github.com/pytroll/python-geotiepoints/pull/14) - Fix modis interpolation in tricky places 239 | 240 | #### Features added 241 | 242 | * [PR 15](https://github.com/pytroll/python-geotiepoints/pull/15) - Add support for modis l2 geolocation interpolation 243 | 244 | In this release 2 pull requests were closed. 245 | 246 | 247 | ## Version v1.1.7 (2018/10/09) 248 | 249 | ### Issues Closed 250 | 251 | * [Issue 8](https://github.com/pytroll/python-geotiepoints/issues/8) - When I install this package,it said 'Failed building wheel for python-geotiepoints'. 252 | 253 | In this release 1 issue was closed. 254 | 255 | ### Pull Requests Merged 256 | 257 | #### Bugs fixed 258 | 259 | * [PR 12](https://github.com/pytroll/python-geotiepoints/pull/12) - Fix python 3 compatibility for the metop interpolator 260 | 261 | #### Features added 262 | 263 | * [PR 13](https://github.com/pytroll/python-geotiepoints/pull/13) - Switch to versioneer and loghub 264 | * [PR 11](https://github.com/pytroll/python-geotiepoints/pull/11) - Add cviirs-based fast modis interpolator ([405](https://github.com/pytroll/satpy/issues/405)) 265 | 266 | In this release 3 pull requests were closed. 267 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include doc/Makefile 2 | recursive-include doc/source * 3 | include LICENSE.txt 4 | include geotiepoints/*.pyx 5 | include geotiepoints/*.pxd 6 | include versioneer.py 7 | include geotiepoints/version.py 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | python-geotiepoints 2 | =================== 3 | 4 | [![Build Status](https://github.com/pytroll/python-geotiepoints/workflows/CI/badge.svg?branch=main)](https://github.com/pytroll/python-geotiepoints/actions?query=workflow%3A%22CI%22) 5 | [![Coverage Status](https://coveralls.io/repos/github/pytroll/python-geotiepoints/badge.svg?branch=main)](https://coveralls.io/github/pytroll/python-geotiepoints?branch=main) 6 | 7 | 8 | Python-geotiepoints is a python module that interpolates (and extrapolates if 9 | needed) geographical tiepoints into a larger geographical grid. This is usefull 10 | when the full resolution lon/lat grid is needed while only a lower resolution 11 | grid of tiepoints was provided. 12 | 13 | Some helper functions are provided to accomodate for satellite data, but the 14 | package should be generic enough to be used for any kind of data. 15 | 16 | In addition we have added a fast multilinear interpolation of regular gridded 17 | data using Cython. 18 | 19 | Adam & Martin 20 | May 2017, Norrköping, Sweden 21 | -------------------------------------------------------------------------------- /RELEASING.md: -------------------------------------------------------------------------------- 1 | # Releasing python-geotiepoints 2 | 3 | prerequisites: `pip install loghub setuptools twine` 4 | 5 | 1. checkout main branch 6 | 2. pull from repo 7 | 3. run the unittests 8 | 4. run `loghub` and update the `CHANGELOG.md` file: 9 | 10 | ``` 11 | loghub pytroll/python-geotiepoints --token $LOGHUB_GITHUB_TOKEN -st $(git tag --sort=-version:refname --list 'v*' | head -n 1) -plg bug "Bugs fixed" -plg enhancement "Features added" -plg documentation "Documentation changes" -plg backwards-incompatibility "Backward incompatible changes" -plg refactor "Refactoring" 12 | ``` 13 | 14 | This uses a `LOGHUB_GITHUB_TOKEN` environment variable. This must be created 15 | on GitHub and it is recommended that you add it to your `.bashrc` or 16 | `.bash_profile` or equivalent. 17 | 18 | Don't forget to commit! 19 | 20 | 5. Create a tag with the new version number, starting with a 'v', eg: 21 | 22 | ``` 23 | git tag -a v0.22.45 -m "Version 0.22.45" 24 | ``` 25 | 26 | See [semver.org](http://semver.org/) on how to write a version number. 27 | 28 | 6. push changes to github `git push --follow-tags` 29 | 7. Verify github action unittests passed. 30 | 8. Create a "Release" on GitHub by going to 31 | https://github.com/pytroll/python-geotiepoints/releases and clicking "Draft a new release". 32 | On the next page enter the newly created tag in the "Tag version" field, 33 | "Version X.Y.Z" in the "Release title" field, and paste the markdown from 34 | the changelog (the portion under the version section header) in the 35 | "Describe this release" box. Finally click "Publish release". 36 | 9. Verify the GitHub actions for deployment succeed and the release is on PyPI. 37 | -------------------------------------------------------------------------------- /changelog.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | 5 | v1.1.6 (2018-09-25) 6 | ------------------- 7 | - Update changelog. [David Hoese] 8 | - Bump version: 1.1.5 → 1.1.6. [David Hoese] 9 | - Merge pull request #10 from pytroll/bugfix-travis-37. [David Hoese] 10 | 11 | Fix python 3.7 environment on travis 12 | - Add generic language settings for osx environments on travis. [David 13 | Hoese] 14 | - Remove generic language setting for travis and added OSX 3.7 env. 15 | [David Hoese] 16 | - Merge pull request #9 from AmitAronovitch/py37-support. [David Hoese] 17 | 18 | support Python 3.7 19 | - Add py3.7 on linux test in travis. [Amit Aronovitch] 20 | - Rebuild multilinear_cython.c with Cython 0.28 (supports py3.7) [Amit 21 | Aronovitch] 22 | - Add templates for issues and PRs. [Adam.Dybbroe] 23 | 24 | 25 | v1.1.5 (2018-05-21) 26 | ------------------- 27 | - Update changelog. [davidh-ssec] 28 | - Bump version: 1.1.4 → 1.1.5. [davidh-ssec] 29 | - Remove wheel deployment on travis. [davidh-ssec] 30 | - Add skip_existing to travis deploy. [davidh-ssec] 31 | 32 | 33 | v1.1.4 (2018-05-21) 34 | ------------------- 35 | - Update changelog. [davidh-ssec] 36 | - Bump version: 1.1.3 → 1.1.4. [davidh-ssec] 37 | - Merge pull request #7 from pytroll/bugfix-packaging. [David Hoese] 38 | 39 | Add MANIFEST to include cython files and update travis to use ci-helpers 40 | - Replace 'Cython' dependency with 'cython' in conda environment on 41 | travis. [davidh-ssec] 42 | - Fix adding osx to travis tests by using a matrix. [davidh-ssec] 43 | - Add osx to travis tests and add python_requires to setup.py. [davidh- 44 | ssec] 45 | - Add appveyor config. [davidh-ssec] 46 | - Add MANIFEST to include cython files and update travis to use ci- 47 | helpers. [davidh-ssec] 48 | - Fix travis/slack integration. [Adam.Dybbroe] 49 | 50 | 51 | v1.1.3 (2018-03-12) 52 | ------------------- 53 | - Update changelog. [Adam.Dybbroe] 54 | - Bump version: 1.1.2 → 1.1.3. [Adam.Dybbroe] 55 | - Add unittest2 as a test-requirement. [Adam.Dybbroe] 56 | 57 | setuptools installs h5py2.8.0rc from PyPI, which in turn requires 58 | unittest2, however, which is not in the h5py requirements! 59 | 60 | - Fix unit tests for Python 3. [Adam.Dybbroe] 61 | - Merge branch 'new_release' into develop. [Adam.Dybbroe] 62 | - Build and test Python 3.4, 3.5, 3.6 on Travis. [Adam.Dybbroe] 63 | - Fix doc tests. [Adam.Dybbroe] 64 | - Merge pull request #6 from mitkin/develop. [Adam Dybbroe] 65 | 66 | Set "C" order of interpolated arrays explicitly 67 | - Set "C" order of interpolated arrays explicitly. [Mikhail Itkin] 68 | 69 | It appears that scipy's spline interpolator returns an array that are in F and C 70 | order simultaneously. Transposing and viewing the array converts it into 71 | F-contiguous array. By specifying the order explicitly we convert it to 72 | C order 73 | 74 | 75 | 76 | v1.1.2 (2017-12-01) 77 | ------------------- 78 | - Update changelog. [Adam.Dybbroe] 79 | - Bump version: 1.1.1 → 1.1.2. [Adam.Dybbroe] 80 | - Fix bumpversion file. [Adam.Dybbroe] 81 | - Go back one version number - bumpversion will bump it. [Adam.Dybbroe] 82 | - Fix author mail address. [Adam.Dybbroe] 83 | - Add separate version file. [Adam.Dybbroe] 84 | - Bugfix documentation - code example. [Adam.Dybbroe] 85 | 86 | 87 | v1.1.1 (2017-05-31) 88 | ------------------- 89 | - Update changelog. [Adam.Dybbroe] 90 | - Bump version: 1.1.0 → 1.1.1. [Adam.Dybbroe] 91 | - Merge branch 'bugfix_201608_change' into develop. [Adam.Dybbroe] 92 | - Fix tests for modis data interpolation. [Adam.Dybbroe] 93 | - Add h5py to test_requires. [Martin Raspaud] 94 | - Fix modis interpolators. [Martin Raspaud] 95 | - Fix temporary mytest code. [Adam.Dybbroe] 96 | - Comment out test that fails in the post aug2016 ode change. 97 | [Adam.Dybbroe] 98 | - Add unittest for modis5kmto1km. Make testing of code before and after 99 | the august 2016 change possible. [Adam.Dybbroe] 100 | - Add Cython generated C-code and make installation possible without 101 | having Cython and numpy header files available. [Adam.Dybbroe] 102 | 103 | Looked at how it was done for pyresample. 104 | 105 | 106 | 107 | v1.1.0 (2017-05-19) 108 | ------------------- 109 | - Update changelog. [Adam.Dybbroe] 110 | - Bump version: 1.0.0 → 1.1.0. [Adam.Dybbroe] 111 | - Merge pull request #3 from pytroll/multilinear-cython. [Adam Dybbroe] 112 | 113 | Multilinear cython 114 | - Fix unittests. [Martin Raspaud] 115 | - Merge remote-tracking branch 'origin/multilinear-cython' into 116 | multilinear-cython. [Martin Raspaud] 117 | - Remove pyresample from the list of required packages. [Adam.Dybbroe] 118 | - Fix extrapolation after lowres indices are in highres numberspace. 119 | [Martin Raspaud] 120 | - Add back and fix the test_extrapolate_rows test. [Adam.Dybbroe] 121 | - Restructure test-suite. Comment out tests that hasn't been maintained. 122 | [Adam.Dybbroe] 123 | - Add badges to frontpage. [Adam.Dybbroe] 124 | - Add unittest for multilinear interpolation. [Adam.Dybbroe] 125 | - Add Cython to requirements file. [Adam.Dybbroe] 126 | - Add requirements file. [Adam.Dybbroe] 127 | - Rename README file. [Adam.Dybbroe] 128 | - Prepare for travis, and clean up. [Adam.Dybbroe] 129 | - Add fast multilinear interpolation on regular grid with Cython. 130 | [Adam.Dybbroe] 131 | 132 | 133 | v1.0.0 (2016-10-27) 134 | ------------------- 135 | 136 | Fix 137 | ~~~ 138 | - Bugfix: new_data attr was not initialized correctly in 139 | GeoInterpolator. [Martin Raspaud] 140 | 141 | Other 142 | ~~~~~ 143 | - Update changelog. [Martin Raspaud] 144 | - Bump version: 0.3.0 → 1.0.0. [Martin Raspaud] 145 | - Add .bumpversion.cfg and .gitchangelog.rc. [Martin Raspaud] 146 | - Fix row extrapolation in the chunked case. [Martin Raspaud] 147 | - Merge pull request #1 from mitkin/develop. [Adam Dybbroe] 148 | 149 | [setup.py] added missing dependency Pandas 150 | - [setup.py] added missing dependency Pandas. [Mikhail Itkin] 151 | 152 | `basic_interpolator` imports pandas, which was not in the `install_requires` 153 | this commit adds `pandas` to the `install_requires` in setup.py 154 | 155 | - Add setup.cfg for rpm building. [Martin Raspaud] 156 | - Removed dependency to memory profiler. [HelgeDMI] 157 | - Basic bilinear interpolation of geotie points, which is even running 158 | on my local machine on the biggest Sentinel-1 input files (ca. 530MB). 159 | I have to add a test and test data. [Rolf-Helge Pfeiffer] 160 | - Bump up version number to v0.3.0. [Martin Raspaud] 161 | - Update documentation with new interface. [Martin Raspaud] 162 | - Major reorganization and tests. [Martin Raspaud] 163 | 164 | * A new generic Interpolator has been introduced. 165 | * The SatelliteInterpolator is renamed to GeoInterpolator 166 | * The GeoInterpolator uses the generic Interpolator 167 | * SatelliteInterpolator is an alias for GeoInterpolator 168 | * Added regular unittests instead of heavy doctests. 169 | 170 | - Merge branch 'multicore-feature' into develop. [Martin Raspaud] 171 | 172 | Conflicts: 173 | tests/test_modis.py 174 | 175 | - Cleanup. [Martin Raspaud] 176 | - Core number fix. [Martin Raspaud] 177 | - Remove unneeded arguments. [Martin Raspaud] 178 | - Generalize multiprocessing. [Martin Raspaud] 179 | - Bug fixing. [Adam Dybbroe] 180 | - Adding util functions for cpu-setting and scene splitting. Cleaning up 181 | a bit. [Adam Dybbroe] 182 | - Adding multiprocessing capability to the modis 1km to 250 meter 183 | interpolation. [Adam Dybbroe] 184 | - Test multicore interpolation. [Martin Raspaud] 185 | - Merge branch 'develop' of github.com:adybbroe/python-geotiepoints into 186 | develop. [Martin Raspaud] 187 | - Merge branch 'develop' of github.com:adybbroe/python-geotiepoints into 188 | develop. [Martin Raspaud] 189 | - Merge branch 'release-0.2' into develop. [Adam Dybbroe] 190 | - Merge github.com:adybbroe/python-geotiepoints into develop. [Martin 191 | Raspaud] 192 | - Tell about automatic extrapolation. [Martin Raspaud] 193 | - Bump up version number. [Martin Raspaud] 194 | - Merge branch 'release-0.2' [Adam Dybbroe] 195 | - Autodocs: More mockup... [Adam Dybbroe] 196 | - Mockup to avoid import errors when using autodoc. [Adam Dybbroe] 197 | - Conf.py pythonpath settings. [Adam Dybbroe] 198 | - Docs... [Adam Dybbroe] 199 | - Docs... [Adam Dybbroe] 200 | - Autodocs... [Adam Dybbroe] 201 | - Fixing for autodoc... [Adam Dybbroe] 202 | - Merge branch 'master' into release-0.2. [Adam Dybbroe] 203 | - Clean up and try prepare for ReadTheDocs. [Adam Dybbroe] 204 | - Merge branch 'release-0.2' [Adam Dybbroe] 205 | - Testdata. [Adam Dybbroe] 206 | - Temporary fix of file paths in tests. [Adam Dybbroe] 207 | - Testdata added. [Adam Dybbroe] 208 | - Test-code and data added. [Adam Dybbroe] 209 | - Fixing bug in fill_borders. MODIS 250 meter fixed. [Adam Dybbroe] 210 | - Added more documentation - examples and images. [Adam Dybbroe] 211 | - Added documentation. [Martin Raspaud] 212 | 213 | 214 | v0.1.0 (2012-05-15) 215 | ------------------- 216 | - Doc: Added a few things in the readme. [Martin Raspaud] 217 | - Fixing urls. [Martin Raspaud] 218 | - Prepare for pypi. [Martin Raspaud] 219 | - Merge branch 'master' of https://github.com/adybbroe/python- 220 | geotiepoints. [Adam Dybbroe] 221 | - Initial commit. [Adam Dybbroe] 222 | - Changing dir name also. [Martin Raspaud] 223 | - Changed the name of the project to python-geotiepoints. [Martin 224 | Raspaud] 225 | - Removed dependency to pyresample, and cleaned up. [Martin Raspaud] 226 | - Cleanup a bit. [Martin Raspaud] 227 | - Merge branch 'develop' of /data/proj/SAF/GIT/geo_interpolator into 228 | develop. [Martin Raspaud] 229 | - Added GPLv3 license text. [Adam Dybbroe] 230 | - Added metop interpolator and 1d interpolation. [Martin Raspaud] 231 | - Documentation. [Martin Raspaud] 232 | - Fixed documentation. [Martin Raspaud] 233 | - Cleanup. [Martin Raspaud] 234 | - Added modis functions and orders are now passed to interpolator 235 | constructor. [Martin Raspaud] 236 | - Cleanup. [Martin Raspaud] 237 | - Cleaning and bugfixing. Seems to work. [Martin Raspaud] 238 | 239 | Tested against real data. 240 | 241 | - WIP: Reshaped SatelliteInterpolator, and added modis5kmto1km function. 242 | [Martin Raspaud] 243 | 244 | Relatively untested version. Should be functional though. 245 | 246 | - Added a setup.py and renamed for consistency. [Martin Raspaud] 247 | - Initial commit. [Martin Raspaud] 248 | 249 | 250 | -------------------------------------------------------------------------------- /continuous_integration/build-manylinux-wheels.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e -x 3 | 4 | # This is to be run by Docker inside a Docker image. 5 | # You can test it locally on a Linux machine by installing docker and running from this repo's root: 6 | # $ docker run -e PLAT=manylinux2014_x86_64 -v `pwd`:/io quay.io/pypa/manylinux1_x86_64 /io/scripts/build-manylinux-wheels.sh 7 | 8 | # * The -e just defines an environment variable PLAT=[docker name] inside the 9 | # docker - auditwheel can't detect the docker name automatically. 10 | # * The -v gives a directory alias for passing files in and out of the docker 11 | # (/io is arbitrary). E.g the `setup.py` script would be accessed in the 12 | # docker via `/io/setup.py`. 13 | # * quay.io/pypa/manylinux2014_x86_64 is the full docker image name. Docker 14 | # downloads it automatically. 15 | # * The last argument is a shell command that the Docker will execute. 16 | # Filenames must be from the Docker's perspective. 17 | 18 | # Wheels are initially generated as you would usually, but put in a temp 19 | # directory temp-wheels. The pip-cache is optional but can speed up local builds 20 | # having a real permanent pip-cache dir. 21 | mkdir -p /io/pip-cache 22 | mkdir -p /io/temp-wheels 23 | 24 | # Clean out any old existing wheels. 25 | find /io/temp-wheels/ -type f -delete 26 | 27 | # /io might be owned by someone else since we are in docker 28 | # this may stop versioneer from using git the way it needs 29 | git config --global --add safe.directory /io 30 | 31 | # Iterate through available pythons. 32 | for PYBIN in /opt/python/cp3{7,8,9,10}*/bin; do 33 | "${PYBIN}/pip" install -q -U setuptools wheel build --cache-dir /io/pip-cache 34 | # Run the following in root of this repo. 35 | (cd /io/ && "${PYBIN}/pip" install -q .) 36 | (cd /io/ && "${PYBIN}/python" -m build -w -o /io/temp-wheels) 37 | done 38 | 39 | "$PYBIN/pip" install -q auditwheel 40 | 41 | # Wheels aren't considered manylinux unless they have been through 42 | # auditwheel. Audited wheels go in /io/dist/. 43 | mkdir -p /io/dist/ 44 | 45 | for whl in /io/temp-wheels/*.whl; do 46 | auditwheel repair "$whl" --plat "$PLAT" -w /io/dist/ 47 | done 48 | -------------------------------------------------------------------------------- /continuous_integration/environment.yaml: -------------------------------------------------------------------------------- 1 | name: test-environment 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - xarray 6 | - dask 7 | - Cython 8 | - sphinx 9 | - pillow 10 | - pyyaml 11 | - coveralls 12 | - coverage 13 | - scipy 14 | - h5py 15 | - pytest 16 | - pytest-cov 17 | - pyproj >=3 18 | - pyresample 19 | -------------------------------------------------------------------------------- /cython_test.pyx: -------------------------------------------------------------------------------- 1 | # cython: language_level=3, boundscheck=False, cdivision=True, wraparound=False, initializedcheck=False, nonecheck=False 2 | cimport cython 3 | import numpy as np 4 | cimport numpy as np 5 | 6 | np.import_array() 7 | 8 | def test_func(): 9 | cdef np.ndarray[float, ndim=2] arr = np.zeros((5, 5), dtype=np.float32) 10 | cdef float[:, ::1] arr_view = arr 11 | _run(arr_view) 12 | 13 | cdef void _run(float[:, ::1] arr_view) noexcept nogil: 14 | cdef float[:, :] tmp = _get_upper_left_corner(arr_view) 15 | 16 | cdef inline float[:, :] _get_upper_left_corner(float[:, ::1] arr) noexcept nogil: 17 | return arr[:1, :1] 18 | -------------------------------------------------------------------------------- /cython_test2.pyx: -------------------------------------------------------------------------------- 1 | # cython: language_level=3, boundscheck=False, cdivision=True, wraparound=False, initializedcheck=False, nonecheck=False 2 | cimport cython 3 | import numpy as np 4 | cimport numpy as np 5 | 6 | np.import_array() 7 | 8 | def test_func(): 9 | cdef np.ndarray[float, ndim=2] arr = np.zeros((5, 5), dtype=np.float32) 10 | cdef float[:, ::1] arr_view = arr 11 | t = Test(5.0) 12 | t.call_me(arr_view) 13 | 14 | 15 | cdef class Test: 16 | 17 | cdef float _a 18 | 19 | def __cinit__(self, float a): 20 | self._a = a 21 | 22 | cdef void call_me(self, float[:, ::1] my_arr) noexcept: 23 | with nogil: 24 | self._call_me(my_arr) 25 | 26 | cdef void _call_me(self, float[:, ::1] my_arr) noexcept nogil: 27 | cdef Py_ssize_t idx 28 | cdef float[:, :] my_arr2 = _get_upper_left_corner(my_arr) 29 | for idx in range(my_arr2.shape[0]): 30 | my_arr2[idx, 0] = self._a 31 | 32 | 33 | cdef inline float[:, :] _get_upper_left_corner(float[:, ::1] arr) noexcept nogil: 34 | return arr[:3, :3] 35 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " singlehtml to make a single large HTML file" 22 | @echo " pickle to make pickle files" 23 | @echo " json to make JSON files" 24 | @echo " htmlhelp to make HTML files and a HTML help project" 25 | @echo " qthelp to make HTML files and a qthelp project" 26 | @echo " devhelp to make HTML files and a Devhelp project" 27 | @echo " epub to make an epub" 28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 29 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 30 | @echo " text to make text files" 31 | @echo " man to make manual pages" 32 | @echo " changes to make an overview of all changed/added/deprecated items" 33 | @echo " linkcheck to check all external links for integrity" 34 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 35 | 36 | clean: 37 | -rm -rf $(BUILDDIR)/* 38 | 39 | html: 40 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 41 | @echo 42 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 43 | 44 | dirhtml: 45 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 48 | 49 | singlehtml: 50 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 51 | @echo 52 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 53 | 54 | pickle: 55 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 56 | @echo 57 | @echo "Build finished; now you can process the pickle files." 58 | 59 | json: 60 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 61 | @echo 62 | @echo "Build finished; now you can process the JSON files." 63 | 64 | htmlhelp: 65 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 66 | @echo 67 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 68 | ".hhp project file in $(BUILDDIR)/htmlhelp." 69 | 70 | qthelp: 71 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 72 | @echo 73 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 74 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 75 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-geotiepoints.qhcp" 76 | @echo "To view the help file:" 77 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-geotiepoints.qhc" 78 | 79 | devhelp: 80 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 81 | @echo 82 | @echo "Build finished." 83 | @echo "To view the help file:" 84 | @echo "# mkdir -p $$HOME/.local/share/devhelp/python-geotiepoints" 85 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/python-geotiepoints" 86 | @echo "# devhelp" 87 | 88 | epub: 89 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 90 | @echo 91 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 92 | 93 | latex: 94 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 95 | @echo 96 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 97 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 98 | "(use \`make latexpdf' here to do that automatically)." 99 | 100 | latexpdf: 101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 102 | @echo "Running LaTeX files through pdflatex..." 103 | make -C $(BUILDDIR)/latex all-pdf 104 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 105 | 106 | text: 107 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 108 | @echo 109 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 110 | 111 | man: 112 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 113 | @echo 114 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 115 | 116 | changes: 117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 118 | @echo 119 | @echo "The overview file is in $(BUILDDIR)/changes." 120 | 121 | linkcheck: 122 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 123 | @echo 124 | @echo "Link check complete; look for any errors in the above output " \ 125 | "or in $(BUILDDIR)/linkcheck/output.txt." 126 | 127 | doctest: 128 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 129 | @echo "Testing of doctests in the sources finished, look at the " \ 130 | "results in $(BUILDDIR)/doctest/output.txt." 131 | -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # python-geotiepoints documentation build configuration file, created by 4 | # sphinx-quickstart on Tue May 15 10:12:57 2012. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import os 15 | import sys 16 | from geotiepoints import __version__ 17 | from datetime import datetime 18 | 19 | # If extensions (or modules to document with autodoc) are in another directory, 20 | # add these directories to sys.path here. If the directory is relative to the 21 | # documentation root, use os.path.abspath to make it absolute, like shown here. 22 | #sys.path.insert(0, os.path.abspath('.')) 23 | sys.path.insert(0, os.path.abspath('../../')) 24 | sys.path.insert(0, os.path.abspath('../../geotiepoints')) 25 | 26 | 27 | class Mock(object): 28 | def __init__(self, *args, **kwargs): 29 | pass 30 | 31 | def __call__(self, *args, **kwargs): 32 | return Mock() 33 | 34 | @classmethod 35 | def __getattr__(cls, name): 36 | if name in ('__file__', '__path__'): 37 | return '/dev/null' 38 | if name[0] == name[0].upper(): 39 | mockType = type(name, (), {}) 40 | mockType.__module__ = __name__ 41 | return mockType 42 | return Mock() 43 | 44 | 45 | MOCK_MODULES = ['numpy', 'scipy.interpolate', 'scipy', 46 | 'pyhdf.SD', 'pyhdf.error'] 47 | for mod_name in MOCK_MODULES: 48 | sys.modules[mod_name] = Mock() 49 | 50 | 51 | # -- General configuration ----------------------------------------------------- 52 | 53 | # If your documentation needs a minimal Sphinx version, state it here. 54 | #needs_sphinx = '1.0' 55 | 56 | # Add any Sphinx extension module names here, as strings. They can be extensions 57 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 58 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest'] 59 | #extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 60 | # 'sphinx.ext.coverage', 'sphinxtogithub'] 61 | 62 | # Add any paths that contain templates here, relative to this directory. 63 | #templates_path = ['.templates'] 64 | templates_path = ['sphinx_templates'] 65 | 66 | # The suffix of source filenames. 67 | source_suffix = '.rst' 68 | 69 | # The encoding of source files. 70 | #source_encoding = 'utf-8-sig' 71 | 72 | # The master toctree document. 73 | master_doc = 'index' 74 | 75 | # General information about the project. 76 | project = u'python-geotiepoints' 77 | copyright = u'2012-%d, Python-geotiepoints Developers' % datetime.utcnow().year # noqa 78 | 79 | # The version info for the project you're documenting, acts as replacement for 80 | # |version| and |release|, also used in various other places throughout the 81 | # built documents. 82 | # 83 | # The short X.Y version. 84 | version = __version__.split('+')[0] 85 | # The full version, including alpha/beta/rc tags. 86 | release = __version__ 87 | 88 | # The language for content autogenerated by Sphinx. Refer to documentation 89 | # for a list of supported languages. 90 | #language = None 91 | 92 | # There are two options for replacing |today|: either, you set today to some 93 | # non-false value, then it is used: 94 | #today = '' 95 | # Else, today_fmt is used as the format for a strftime call. 96 | #today_fmt = '%B %d, %Y' 97 | 98 | # List of patterns, relative to source directory, that match files and 99 | # directories to ignore when looking for source files. 100 | exclude_patterns = [] 101 | 102 | # The reST default role (used for this markup: `text`) to use for all documents. 103 | #default_role = None 104 | 105 | # If true, '()' will be appended to :func: etc. cross-reference text. 106 | #add_function_parentheses = True 107 | 108 | # If true, the current module name will be prepended to all description 109 | # unit titles (such as .. function::). 110 | #add_module_names = True 111 | 112 | # If true, sectionauthor and moduleauthor directives will be shown in the 113 | # output. They are ignored by default. 114 | #show_authors = False 115 | 116 | # The name of the Pygments (syntax highlighting) style to use. 117 | pygments_style = 'sphinx' 118 | 119 | # A list of ignored prefixes for module index sorting. 120 | #modindex_common_prefix = [] 121 | 122 | 123 | # -- Options for HTML output --------------------------------------------------- 124 | 125 | # The theme to use for HTML and HTML Help pages. See the documentation for 126 | # a list of builtin themes. 127 | #html_theme = 'sphinxdoc' 128 | html_theme = 'default' 129 | 130 | # Theme options are theme-specific and customize the look and feel of a theme 131 | # further. For a list of options available for each theme, see the 132 | # documentation. 133 | #html_theme_options = {} 134 | 135 | # Add any paths that contain custom themes here, relative to this directory. 136 | #html_theme_path = [] 137 | 138 | # The name for this set of Sphinx documents. If None, it defaults to 139 | # " v documentation". 140 | #html_title = None 141 | 142 | # A shorter title for the navigation bar. Default is the same as html_title. 143 | #html_short_title = None 144 | 145 | # The name of an image file (relative to this directory) to place at the top 146 | # of the sidebar. 147 | #html_logo = None 148 | 149 | # The name of an image file (within the static path) to use as favicon of the 150 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 151 | # pixels large. 152 | #html_favicon = None 153 | 154 | # Add any paths that contain custom static files (such as style sheets) here, 155 | # relative to this directory. They are copied after the builtin static files, 156 | # so a file named "default.css" will overwrite the builtin "default.css". 157 | #html_static_path = ['.static'] 158 | html_static_path = ['sphinx_static'] 159 | 160 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 161 | # using the given strftime format. 162 | #html_last_updated_fmt = '%b %d, %Y' 163 | 164 | # If true, SmartyPants will be used to convert quotes and dashes to 165 | # typographically correct entities. 166 | #html_use_smartypants = True 167 | 168 | # Custom sidebar templates, maps document names to template names. 169 | #html_sidebars = {} 170 | 171 | # Additional templates that should be rendered to pages, maps page names to 172 | # template names. 173 | #html_additional_pages = {} 174 | 175 | # If false, no module index is generated. 176 | #html_domain_indices = True 177 | 178 | # If false, no index is generated. 179 | #html_use_index = True 180 | 181 | # If true, the index is split into individual pages for each letter. 182 | #html_split_index = False 183 | 184 | # If true, links to the reST sources are added to the pages. 185 | #html_show_sourcelink = True 186 | 187 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 188 | #html_show_sphinx = True 189 | 190 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 191 | #html_show_copyright = True 192 | 193 | # If true, an OpenSearch description file will be output, and all pages will 194 | # contain a tag referring to it. The value of this option must be the 195 | # base URL from which the finished HTML is served. 196 | #html_use_opensearch = '' 197 | 198 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 199 | #html_file_suffix = None 200 | 201 | # Output file base name for HTML help builder. 202 | htmlhelp_basename = 'python-geotiepointsdoc' 203 | 204 | 205 | # -- Options for LaTeX output -------------------------------------------------- 206 | 207 | # The paper size ('letter' or 'a4'). 208 | #latex_paper_size = 'letter' 209 | 210 | # The font size ('10pt', '11pt' or '12pt'). 211 | #latex_font_size = '10pt' 212 | 213 | # Grouping the document tree into LaTeX files. List of tuples 214 | # (source start file, target name, title, author, documentclass [howto/manual]). 215 | latex_documents = [ 216 | ('index', 'python-geotiepoints.tex', u'python-geotiepoints Documentation', 217 | u'Pytroll crew', 'manual'), 218 | ] 219 | 220 | # The name of an image file (relative to this directory) to place at the top of 221 | # the title page. 222 | #latex_logo = None 223 | 224 | # For "manual" documents, if this is true, then toplevel headings are parts, 225 | # not chapters. 226 | #latex_use_parts = False 227 | 228 | # If true, show page references after internal links. 229 | #latex_show_pagerefs = False 230 | 231 | # If true, show URL addresses after external links. 232 | #latex_show_urls = False 233 | 234 | # Additional stuff for the LaTeX preamble. 235 | #latex_preamble = '' 236 | 237 | # Documents to append as an appendix to all manuals. 238 | #latex_appendices = [] 239 | 240 | # If false, no module index is generated. 241 | #latex_domain_indices = True 242 | 243 | 244 | # -- Options for manual page output -------------------------------------------- 245 | 246 | # One entry per manual page. List of tuples 247 | # (source start file, name, description, authors, manual section). 248 | man_pages = [ 249 | ('index', 'python-geotiepoints', u'python-geotiepoints Documentation', 250 | [u'Pytroll crew'], 1) 251 | ] 252 | -------------------------------------------------------------------------------- /doc/source/images/scatter_cartesian_distances.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/scatter_cartesian_distances.png -------------------------------------------------------------------------------- /doc/source/images/scatter_cartesian_distances_thumb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/scatter_cartesian_distances_thumb.png -------------------------------------------------------------------------------- /doc/source/images/terra_20110821_1115_maspalomas_rednsow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/terra_20110821_1115_maspalomas_rednsow.png -------------------------------------------------------------------------------- /doc/source/images/terra_20110821_1115_maspalomas_rednsow_int.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/terra_20110821_1115_maspalomas_rednsow_int.png -------------------------------------------------------------------------------- /doc/source/images/terra_20110821_1115_maspalomas_rednsow_int_thumb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/terra_20110821_1115_maspalomas_rednsow_int_thumb.png -------------------------------------------------------------------------------- /doc/source/images/terra_20110821_1115_rednsow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/terra_20110821_1115_rednsow.png -------------------------------------------------------------------------------- /doc/source/images/terra_20110821_1115_rednsow_int.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/terra_20110821_1115_rednsow_int.png -------------------------------------------------------------------------------- /doc/source/images/terra_20110821_1115_rednsow_int_thumb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/doc/source/images/terra_20110821_1115_rednsow_int_thumb.png -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | Python-geotiepoints 2 | =================== 3 | 4 | Python-geotiepoints is an application for the interpolation (and extrapolation if 5 | needed) of geographical tiepoints into a larger/denser geographical grid. This is usefull 6 | when the full resolution lon/lat grid is needed while only a lower resolution 7 | grid of tiepoints is available. 8 | 9 | Some helper functions are provided to accomodate for satellite data, but the 10 | package should be generic enough to be used for any kind of data. 11 | 12 | The source code of the module can be found on the github_ page. 13 | 14 | .. _github: http://github.com/adybbroe/python-geotiepoints 15 | 16 | .. contents:: 17 | 18 | Installation - Pip 19 | ------------------ 20 | 21 | You can install the latest version of python-geotiepoints with pip:: 22 | 23 | pip install python-geotiepoints 24 | 25 | Alternatively, you can download the source code from github_:: 26 | 27 | git clone git://github.com/adybbroe/python-geotiepoints.git 28 | 29 | and then run:: 30 | 31 | pip install . 32 | 33 | Alternatively, to install the package in a "development" mode when you 34 | want to edit the source code and see the effects:: 35 | 36 | pip install -e . 37 | 38 | Installation - Conda 39 | -------------------- 40 | 41 | The python-geotiepoints package is also available on conda-forge. To install 42 | into an existing conda environment run:: 43 | 44 | conda install -c conda-forge python-geotiepoints 45 | 46 | Usage for longitude and latitude interpolation 47 | ---------------------------------------------- 48 | 49 | A typical usage of the package. 50 | 51 | >>> from geotiepoints.geointerpolator import GeoInterpolator 52 | >>> import numpy as np 53 | >>> tie_cols = np.arange(0, 11, 5) 54 | >>> tie_rows = np.arange(0, 5, 2) 55 | >>> fine_cols = np.arange(0, 11) 56 | >>> fine_rows = np.arange(0, 5) 57 | >>> tie_lons = (np.arange(3*3) + 30).reshape((3, 3)) 58 | >>> tie_lats = (np.arange(3*3) - 4.5).reshape((3, 3)) 59 | >>> interpolator = GeoInterpolator((tie_lons, tie_lats), 60 | ... (tie_rows, tie_cols), 61 | ... (fine_rows, fine_cols), 62 | ... 2, 2) 63 | >>> lons, lats = interpolator.interpolate() 64 | >>> print([round(lon, 5) for lon in lons[2:4,2:4].ravel()]) # doctest: +SKIP 65 | [33.40008, 33.60007, 34.89802, 35.09801] 66 | >>> print([round(lat, 5) for lat in lats[2:4,2:4].ravel()]) # doctest: +SKIP 67 | [-1.09998, -0.89998, 0.39951, 0.5995] 68 | 69 | 70 | Example - Aqua/Terra MODIS data 71 | ------------------------------- 72 | 73 | The application is currently being used in operation at SMHI to upsample the 74 | thinned MODIS products received via EUMETCast. For this purpose the performance 75 | is good, both in terms of achieved accuracy and processing speed. 76 | 77 | EUMETSAT is relaying Terra and Aqua MODIS level 1 data from NOAA in real time 78 | for the European User community. The data are disseminated via EUMETCast and 79 | can be received with a common DVB antenna. Before uploading it EUMETSAT is 80 | thinning the data to contain only a subset of MODIS channels, and also data are 81 | being filtered so only data over the European area are being sent. 82 | 83 | The radiance data (reflectances and brightness temperatures) are in 1km 84 | resolution but contain geolocation only on a thinned grid - on tiepoints every 85 | 5th km. So in order to project and further process the data we need to upsample or 86 | interpolate and exrapolate the data to the full 1km grid. 87 | 88 | We have checked the accuracy against the full resolution geolocation 89 | data. These full geolocation data were available previously in seperate files 90 | on the same EUMETCast stream. But due to the significant band width occupied 91 | and the relatively modets usage of the data EUMETSAT decided to stop this 92 | dissemination in February 2012 and save costs and band width for other 93 | products. With this tool the need for the full resolution geolocation is no 94 | more as critical as before. 95 | 96 | There is one significant issue here, however, which is that the geolocation 97 | provided in the MODIS level1b products are terrain corrected. So in order to 98 | restore the full 1km geolocation data from the thinned tiepoint gridded data 99 | now available on EUMETCast one would need access to the exact terrain model 100 | used in the MODIS processing, and also some more advanced method than what is 101 | provided with this application. 102 | 103 | But outside areas with high and rough topography the accuracy is rather good 104 | using this tool. Below is an example with a 5 minute granule over Western 105 | Africa and the Canary Islands from August 21st, 2011. First we show the 106 | accuracy of the geolocation after interpolation. And then the image data first 107 | the raw unprojected granule, and then the projected (1km Mercator) data. 108 | 109 | Comparing interpolated lon,lat with the true ones for the granule shown below. 110 | 111 | .. image:: images/scatter_cartesian_distances_thumb.png 112 | 113 | We see that the method applied give deviations far less than one pixel, except 114 | for a few pixels towards the edge of the swath. These larger deviations we 115 | refer to the inherrent problem of interpolating terrain corrected geolocation 116 | without the knowledge of the terrain model used. 117 | 118 | 119 | Below is an RGB image of the granule. 120 | 121 | .. image:: images/terra_20110821_1115_rednsow_int_thumb.png 122 | 123 | 124 | And here the projected data. 125 | 126 | .. image:: images/terra_20110821_1115_maspalomas_rednsow_int_thumb.png 127 | 128 | 129 | 130 | 131 | Predefined functions for satellite data 132 | --------------------------------------- 133 | 134 | .. autofunction:: geotiepoints.metop20kmto1km 135 | .. autofunction:: geotiepoints.modis5kmto1km 136 | .. autofunction:: geotiepoints.modis1kmto500m 137 | .. autofunction:: geotiepoints.modis1kmto250m 138 | 139 | 140 | 141 | Indices and tables 142 | ================== 143 | 144 | * :ref:`genindex` 145 | * :ref:`modindex` 146 | * :ref:`search` 147 | 148 | -------------------------------------------------------------------------------- /geotiepoints/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2010-2018 Python-geotiepoints developers 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | """Interpolation of geographical tiepoints.""" 16 | 17 | from multiprocessing import Pool 18 | 19 | import numpy as np 20 | from numpy import arccos, arcsin, rad2deg, sign, sqrt 21 | from scipy.interpolate import RectBivariateSpline, splev, splrep 22 | 23 | from geotiepoints.geointerpolator import \ 24 | GeoInterpolator as SatelliteInterpolator 25 | 26 | EARTH_RADIUS = 6370997.0 27 | 28 | 29 | def get_scene_splits(nlines_swath, nlines_scan, n_cpus): 30 | """Calculate the line numbers where the swath will be split in smaller 31 | granules for parallel processing""" 32 | 33 | nscans = nlines_swath // nlines_scan 34 | if nscans < n_cpus: 35 | nscans_subscene = 1 36 | else: 37 | nscans_subscene = nscans // n_cpus 38 | nlines_subscene = nscans_subscene * nlines_scan 39 | 40 | return range(nlines_subscene, nlines_swath, nlines_subscene) 41 | 42 | 43 | def metop20kmto1km(lons20km, lats20km): 44 | """Getting 1km geolocation for metop avhrr from 20km tiepoints. 45 | """ 46 | cols20km = np.array([0] + list(range(4, 2048, 20)) + [2047]) 47 | cols1km = np.arange(2048) 48 | lines = lons20km.shape[0] 49 | rows20km = np.arange(lines) 50 | rows1km = np.arange(lines) 51 | 52 | along_track_order = 1 53 | cross_track_order = 3 54 | 55 | satint = SatelliteInterpolator((lons20km, lats20km), 56 | (rows20km, cols20km), 57 | (rows1km, cols1km), 58 | along_track_order, 59 | cross_track_order) 60 | return satint.interpolate() 61 | 62 | 63 | def modis5kmto1km(lons5km, lats5km): 64 | """Getting 1km geolocation for modis from 5km tiepoints. 65 | 66 | http://www.icare.univ-lille1.fr/tutorials/MODIS_geolocation 67 | """ 68 | cols5km = np.arange(2, 1354, 5) / 5.0 69 | cols1km = np.arange(1354) / 5.0 70 | lines = lons5km.shape[0] * 5 71 | rows5km = np.arange(2, lines, 5) / 5.0 72 | rows1km = np.arange(lines) / 5.0 73 | 74 | along_track_order = 1 75 | cross_track_order = 3 76 | 77 | satint = SatelliteInterpolator((lons5km, lats5km), 78 | (rows5km, cols5km), 79 | (rows1km, cols1km), 80 | along_track_order, 81 | cross_track_order, 82 | chunk_size=10) 83 | satint.fill_borders("y", "x") 84 | lons1km, lats1km = satint.interpolate() 85 | return lons1km, lats1km 86 | 87 | 88 | def _multi(fun, lons, lats, chunk_size, cores=1): 89 | """Work on multiple cores. 90 | """ 91 | pool = Pool(processes=cores) 92 | 93 | splits = get_scene_splits(lons.shape[0], chunk_size, cores) 94 | 95 | lons_parts = np.vsplit(lons, splits) 96 | lats_parts = np.vsplit(lats, splits) 97 | 98 | results = [pool.apply_async(fun, 99 | (lons_parts[i], 100 | lats_parts[i])) 101 | for i in range(len(lons_parts))] 102 | 103 | pool.close() 104 | pool.join() 105 | 106 | lons, lats = zip(*(res.get() for res in results)) 107 | 108 | return np.vstack(lons), np.vstack(lats) 109 | 110 | 111 | def modis1kmto500m(lons1km, lats1km, cores=1): 112 | """Getting 500m geolocation for modis from 1km tiepoints. 113 | 114 | http://www.icare.univ-lille1.fr/tutorials/MODIS_geolocation 115 | """ 116 | if cores > 1: 117 | return _multi(modis1kmto500m, lons1km, lats1km, 10, cores) 118 | 119 | cols1km = np.arange(1354) 120 | cols500m = np.arange(1354 * 2) / 2.0 121 | lines = lons1km.shape[0] 122 | rows1km = np.arange(lines) 123 | rows500m = (np.arange(lines * 2) - 0.5) / 2. 124 | 125 | along_track_order = 1 126 | cross_track_order = 3 127 | 128 | satint = SatelliteInterpolator((lons1km, lats1km), 129 | (rows1km, cols1km), 130 | (rows500m, cols500m), 131 | along_track_order, 132 | cross_track_order, 133 | chunk_size=20) 134 | satint.fill_borders("y", "x") 135 | lons500m, lats500m = satint.interpolate() 136 | return lons500m, lats500m 137 | 138 | 139 | def modis1kmto250m(lons1km, lats1km, cores=1): 140 | """Getting 250m geolocation for modis from 1km tiepoints. 141 | 142 | http://www.icare.univ-lille1.fr/tutorials/MODIS_geolocation 143 | """ 144 | if cores > 1: 145 | return _multi(modis1kmto250m, lons1km, lats1km, 10, cores) 146 | 147 | cols1km = np.arange(1354) 148 | cols250m = np.arange(1354 * 4) / 4.0 149 | 150 | along_track_order = 1 151 | cross_track_order = 3 152 | 153 | lines = lons1km.shape[0] 154 | rows1km = np.arange(lines) 155 | rows250m = (np.arange(lines * 4) - 1.5) / 4.0 156 | 157 | satint = SatelliteInterpolator((lons1km, lats1km), 158 | (rows1km, cols1km), 159 | (rows250m, cols250m), 160 | along_track_order, 161 | cross_track_order, 162 | chunk_size=40) 163 | satint.fill_borders("y", "x") 164 | lons250m, lats250m = satint.interpolate() 165 | 166 | return lons250m, lats250m 167 | 168 | 169 | from . import version 170 | __version__ = version.get_versions()['version'] 171 | 172 | -------------------------------------------------------------------------------- /geotiepoints/_modis_utils.pxd: -------------------------------------------------------------------------------- 1 | # cython: language_level=3, boundscheck=False, cdivision=True, wraparound=False, initializedcheck=False, nonecheck=False 2 | cimport numpy as np 3 | 4 | ctypedef fused floating: 5 | np.float32_t 6 | np.float64_t 7 | 8 | cdef void lonlat2xyz( 9 | floating[:, ::1] lons, 10 | floating[:, ::1] lats, 11 | floating[:, :, ::1] xyz, 12 | ) noexcept nogil 13 | 14 | cdef void xyz2lonlat( 15 | floating[:, :, ::1] xyz, 16 | floating[:, ::1] lons, 17 | floating[:, ::1] lats, 18 | bint low_lat_z=*, 19 | floating thr=*, 20 | ) noexcept nogil 21 | 22 | cdef floating rad2deg(floating x) noexcept nogil 23 | cdef floating deg2rad(floating x) noexcept nogil 24 | -------------------------------------------------------------------------------- /geotiepoints/_modis_utils.pyx: -------------------------------------------------------------------------------- 1 | # cython: language_level=3, boundscheck=False, cdivision=True, wraparound=False, initializedcheck=False, nonecheck=False 2 | from functools import wraps 3 | 4 | cimport cython 5 | cimport numpy as np 6 | from libc.math cimport asin, sin, cos, sqrt, acos, M_PI 7 | import numpy as np 8 | 9 | np.import_array() 10 | 11 | try: 12 | import dask.array as da 13 | except ImportError: 14 | # if dask can't be imported then we aren't going to be given dask arrays 15 | da = None 16 | 17 | try: 18 | import xarray as xr 19 | except ImportError: 20 | xr = None 21 | 22 | 23 | DEF EARTH_RADIUS = 6370997.0 24 | 25 | 26 | cdef void lonlat2xyz( 27 | floating[:, ::1] lons, 28 | floating[:, ::1] lats, 29 | floating[:, :, ::1] xyz, 30 | ) noexcept nogil: 31 | """Convert lons and lats to cartesian coordinates.""" 32 | cdef Py_ssize_t i, j, k 33 | cdef floating lon_rad, lat_rad 34 | for i in range(lons.shape[0]): 35 | for j in range(lons.shape[1]): 36 | lon_rad = deg2rad(lons[i, j]) 37 | lat_rad = deg2rad(lats[i, j]) 38 | xyz[i, j, 0] = EARTH_RADIUS * cos(lat_rad) * cos(lon_rad) 39 | xyz[i, j, 1] = EARTH_RADIUS * cos(lat_rad) * sin(lon_rad) 40 | xyz[i, j, 2] = EARTH_RADIUS * sin(lat_rad) 41 | 42 | 43 | cdef void xyz2lonlat( 44 | floating[:, :, ::1] xyz, 45 | floating[:, ::1] lons, 46 | floating[:, ::1] lats, 47 | bint low_lat_z=True, 48 | floating thr=0.8) noexcept nogil: 49 | """Get longitudes from cartesian coordinates.""" 50 | cdef Py_ssize_t i, j 51 | cdef np.float64_t x, y, z 52 | for i in range(xyz.shape[0]): 53 | for j in range(xyz.shape[1]): 54 | # 64-bit precision matters apparently 55 | x = xyz[i, j, 0] 56 | y = xyz[i, j, 1] 57 | z = xyz[i, j, 2] 58 | lons[i, j] = rad2deg(acos(x / sqrt(x ** 2 + y ** 2))) * _sign(y) 59 | # if we are at low latitudes - small z, then get the 60 | # latitudes only from z. If we are at high latitudes (close to the poles) 61 | # then derive the latitude using x and y: 62 | if low_lat_z and (z < thr * EARTH_RADIUS) and (z > -1.0 * thr * EARTH_RADIUS): 63 | lats[i, j] = 90 - rad2deg(acos(z / EARTH_RADIUS)) 64 | else: 65 | lats[i, j] = _sign(z) * (90 - rad2deg(asin(sqrt(x ** 2 + y ** 2) / EARTH_RADIUS))) 66 | 67 | 68 | cdef inline int _sign(floating x) noexcept nogil: 69 | return 1 if x > 0 else (-1 if x < 0 else 0) 70 | 71 | 72 | cdef inline floating rad2deg(floating x) noexcept nogil: 73 | return x * (180.0 / M_PI) 74 | 75 | 76 | cdef inline floating deg2rad(floating x) noexcept nogil: 77 | return x * (M_PI / 180.0) 78 | 79 | 80 | def rows_per_scan_for_resolution(res): 81 | return { 82 | 5000: 2, 83 | 1000: 10, 84 | 500: 20, 85 | 250: 40, 86 | }[res] 87 | 88 | 89 | def scanline_mapblocks(func): 90 | """Convert dask array inputs to appropriate map_blocks calls. 91 | 92 | This function, applied as a decorator, will call the wrapped function 93 | using dask's ``map_blocks``. It will rechunk dask array inputs when 94 | necessary to make sure that the input chunks are entire scanlines to 95 | avoid incorrect interpolation. 96 | 97 | """ 98 | @wraps(func) 99 | def _wrapper(*args, coarse_resolution=None, fine_resolution=None, **kwargs): 100 | if coarse_resolution is None or fine_resolution is None: 101 | raise ValueError("'coarse_resolution' and 'fine_resolution' are required keyword arguments.") 102 | first_arr = [arr for arr in args if hasattr(arr, "ndim")][0] 103 | if first_arr.ndim != 2 or first_arr.ndim != 2: 104 | raise ValueError("Expected 2D input arrays.") 105 | if hasattr(first_arr, "compute"): 106 | # assume it is dask or xarray with dask, ensure proper chunk size 107 | # if DataArray get just the dask array 108 | dask_args = _extract_dask_arrays_from_args(args) 109 | rows_per_scan = rows_per_scan_for_resolution(coarse_resolution) 110 | rechunked_args = _rechunk_dask_arrays_if_needed(dask_args, rows_per_scan) 111 | results = _call_map_blocks_interp( 112 | func, 113 | coarse_resolution, 114 | fine_resolution, 115 | *rechunked_args, 116 | **kwargs 117 | ) 118 | if hasattr(first_arr, "dims"): 119 | # recreate DataArrays 120 | results = _results_to_data_arrays(first_arr.dims, *results) 121 | return results 122 | return func( 123 | *args, 124 | coarse_resolution=coarse_resolution, 125 | fine_resolution=fine_resolution, 126 | **kwargs 127 | ) 128 | 129 | return _wrapper 130 | 131 | 132 | def _extract_dask_arrays_from_args(args): 133 | return [arr_obj.data if hasattr(arr_obj, "dims") else arr_obj for arr_obj in args] 134 | 135 | 136 | def _call_map_blocks_interp(func, coarse_resolution, fine_resolution, *args, **kwargs): 137 | first_arr = [arr for arr in args if hasattr(arr, "ndim")][0] 138 | res_factor = coarse_resolution // fine_resolution 139 | new_row_chunks = tuple(x * res_factor for x in first_arr.chunks[0]) 140 | fine_pixels_per_1km = {250: 4, 500: 2, 1000: 1}[fine_resolution] 141 | fine_scan_width = 1354 * fine_pixels_per_1km 142 | new_col_chunks = (fine_scan_width,) 143 | wrapped_func = _map_blocks_handler(func) 144 | res = da.map_blocks(wrapped_func, *args, 145 | coarse_resolution=coarse_resolution, 146 | fine_resolution=fine_resolution, 147 | **kwargs, 148 | new_axis=[0], 149 | chunks=(2, new_row_chunks, new_col_chunks), 150 | dtype=first_arr.dtype, 151 | meta=np.empty((2, 2, 2), dtype=first_arr.dtype)) 152 | return tuple(res[idx] for idx in range(res.shape[0])) 153 | 154 | 155 | def _results_to_data_arrays(dims, *results): 156 | new_results = [] 157 | for result in results: 158 | if not isinstance(result, da.Array): 159 | continue 160 | data_arr = xr.DataArray(result, dims=dims) 161 | new_results.append(data_arr) 162 | return new_results 163 | 164 | 165 | def _rechunk_dask_arrays_if_needed(args, rows_per_scan: int): 166 | # take current chunk size and get a relatively similar chunk size 167 | first_arr = [arr for arr in args if hasattr(arr, "ndim")][0] 168 | row_chunks = first_arr.chunks[0] 169 | col_chunks = first_arr.chunks[1] 170 | num_rows = first_arr.shape[0] 171 | num_cols = first_arr.shape[1] 172 | good_row_chunks = all(x % rows_per_scan == 0 for x in row_chunks) 173 | good_col_chunks = len(col_chunks) == 1 and col_chunks[0] != num_cols 174 | all_orig_chunks = [arr.chunks for arr in args if hasattr(arr, "chunks")] 175 | 176 | if num_rows % rows_per_scan != 0: 177 | raise ValueError("Input longitude/latitude data does not consist of " 178 | "whole scans (10 rows per scan).") 179 | all_same_chunks = all( 180 | all_orig_chunks[0] == some_chunks 181 | for some_chunks in all_orig_chunks[1:] 182 | ) 183 | if good_row_chunks and good_col_chunks and all_same_chunks: 184 | return args 185 | 186 | new_row_chunks = (row_chunks[0] // rows_per_scan) * rows_per_scan 187 | new_args = [arr.rechunk((new_row_chunks, -1)) if hasattr(arr, "chunks") else arr for arr in args] 188 | return new_args 189 | 190 | 191 | def _map_blocks_handler(func): 192 | @wraps(func) 193 | def _map_blocks_wrapper(*args, **kwargs): 194 | results = func(*args, **kwargs) 195 | return np.concatenate( 196 | tuple(result[np.newaxis] for result in results), 197 | axis=0) 198 | return _map_blocks_wrapper 199 | 200 | 201 | -------------------------------------------------------------------------------- /geotiepoints/_simple_modis_interpolator.pyx: -------------------------------------------------------------------------------- 1 | # cython: language_level=3, boundscheck=False, cdivision=True, wraparound=False, initializedcheck=False, nonecheck=False 2 | cimport cython 3 | 4 | from ._modis_utils cimport floating 5 | from ._modis_utils cimport lonlat2xyz, xyz2lonlat 6 | from ._modis_utils import rows_per_scan_for_resolution 7 | cimport numpy as np 8 | import numpy as np 9 | from scipy.ndimage import map_coordinates 10 | 11 | np.import_array() 12 | 13 | def interpolate_geolocation_cartesian( 14 | np.ndarray[floating, ndim=2] lon_array, 15 | np.ndarray[floating, ndim=2] lat_array, 16 | unsigned int coarse_resolution, 17 | unsigned int fine_resolution): 18 | lon_array = np.ascontiguousarray(lon_array) 19 | lat_array = np.ascontiguousarray(lat_array) 20 | cdef unsigned int rows_per_scan = rows_per_scan_for_resolution(coarse_resolution) 21 | cdef unsigned int res_factor = coarse_resolution // fine_resolution 22 | cdef Py_ssize_t num_rows = lon_array.shape[0] 23 | cdef Py_ssize_t num_cols = lon_array.shape[1] 24 | cdef unsigned int num_scans = num_rows // rows_per_scan 25 | 26 | # SciPy's map_coordinates requires the x/y dimension to be first 27 | cdef np.ndarray[floating, ndim=3] coordinates = np.empty( 28 | (2, res_factor * rows_per_scan, res_factor * num_cols), dtype=lon_array.dtype) 29 | cdef floating[:, :, ::1] coordinates_view = coordinates 30 | _compute_yx_coordinate_arrays(res_factor, coordinates_view) 31 | 32 | cdef np.ndarray[floating, ndim=3] xyz_result = np.empty( 33 | (res_factor * rows_per_scan, num_cols * res_factor, 3), dtype=lon_array.dtype) 34 | cdef floating[:, :, ::1] xyz_result_view = xyz_result 35 | cdef np.ndarray[floating, ndim=3] xyz_in = np.empty( 36 | (rows_per_scan, num_cols, 3), dtype=lon_array.dtype) 37 | cdef floating[:, :, ::1] xyz_in_view = xyz_in 38 | cdef floating[:, ::1] lon_in_view = lon_array 39 | cdef floating[:, ::1] lat_in_view = lat_array 40 | 41 | cdef np.ndarray[floating, ndim=2] new_lons = np.empty((res_factor * num_rows, res_factor * num_cols), 42 | dtype=lon_array.dtype) 43 | cdef np.ndarray[floating, ndim=2] new_lats = np.empty((res_factor * num_rows, res_factor * num_cols), 44 | dtype=lon_array.dtype) 45 | cdef floating[:, ::1] new_lons_view = new_lons 46 | cdef floating[:, ::1] new_lats_view = new_lats 47 | 48 | # Interpolate each scan, one at a time, otherwise the math doesn't work well 49 | cdef Py_ssize_t scan_idx, j0, j1, k0, k1, comp_index 50 | with nogil: 51 | for scan_idx in range(num_scans): 52 | # Calculate indexes 53 | j0 = rows_per_scan * scan_idx 54 | j1 = j0 + rows_per_scan 55 | k0 = rows_per_scan * res_factor * scan_idx 56 | k1 = k0 + rows_per_scan * res_factor 57 | lonlat2xyz(lon_in_view[j0:j1, :], lat_in_view[j0:j1, :], xyz_in_view) 58 | 59 | _compute_interpolated_xyz_scan( 60 | res_factor, coordinates_view, xyz_in_view, 61 | xyz_result_view) 62 | 63 | xyz2lonlat(xyz_result_view, new_lons_view[k0:k1], new_lats_view[k0:k1], low_lat_z=True) 64 | return new_lons, new_lats 65 | 66 | 67 | @cython.boundscheck(False) 68 | @cython.cdivision(True) 69 | @cython.wraparound(False) 70 | @cython.initializedcheck(False) 71 | cdef void _compute_yx_coordinate_arrays( 72 | unsigned int res_factor, 73 | floating[:, :, ::1] coordinates, 74 | ) noexcept nogil: 75 | cdef Py_ssize_t i, j 76 | for j in range(coordinates.shape[1]): 77 | for i in range(coordinates.shape[2]): 78 | # y coordinate - 0.375 for 250m, 0.25 for 500m 79 | coordinates[0, j, i] = j * (1.0 / res_factor) - (res_factor * (1.0 / 16) + (1.0 / 8)) 80 | # x coordinate 81 | coordinates[1, j, i] = i * (1.0 / res_factor) 82 | 83 | 84 | @cython.boundscheck(False) 85 | cdef void _compute_interpolated_xyz_scan( 86 | unsigned int res_factor, 87 | floating[:, :, ::1] coordinates_view, 88 | floating[:, :, ::1] xyz_input_view, 89 | floating[:, :, ::1] xyz_result_view, 90 | ) noexcept nogil: 91 | cdef Py_ssize_t comp_index 92 | cdef floating[:, :] input_view, result_view 93 | with gil: 94 | for comp_index in range(3): 95 | input_view = xyz_input_view[:, :, comp_index] 96 | result_view = xyz_result_view[:, :, comp_index] 97 | _call_map_coordinates( 98 | input_view, 99 | coordinates_view, 100 | result_view, 101 | ) 102 | 103 | if res_factor == 4: 104 | for comp_index in range(3): 105 | result_view = xyz_result_view[:, :, comp_index] 106 | _extrapolate_xyz_rightmost_columns(result_view, 3) 107 | _interpolate_xyz_250( 108 | result_view, 109 | coordinates_view, 110 | ) 111 | else: 112 | for comp_index in range(3): 113 | result_view = xyz_result_view[:, :, comp_index] 114 | _extrapolate_xyz_rightmost_columns(result_view, 1) 115 | _interpolate_xyz_500( 116 | result_view, 117 | coordinates_view, 118 | ) 119 | 120 | 121 | cdef void _call_map_coordinates( 122 | floating[:, :] nav_array_view, 123 | floating[:, :, ::1] coordinates_view, 124 | floating[:, :] result_view, 125 | ): 126 | cdef np.ndarray[floating, ndim=2] nav_array = np.asarray(nav_array_view) 127 | cdef np.ndarray[floating, ndim=3] coordinates_array = np.asarray(coordinates_view) 128 | cdef np.ndarray[floating, ndim=2] result_array = np.asarray(result_view) 129 | # Use bilinear interpolation for all 250 meter pixels 130 | map_coordinates(nav_array, coordinates_array, 131 | output=result_array, 132 | order=1, mode='nearest') 133 | 134 | 135 | @cython.boundscheck(False) 136 | @cython.cdivision(True) 137 | @cython.wraparound(False) 138 | @cython.initializedcheck(False) 139 | cdef void _extrapolate_xyz_rightmost_columns( 140 | floating[:, :] result_view, 141 | int num_columns, 142 | ) noexcept nogil: 143 | cdef Py_ssize_t row_idx, col_offset 144 | cdef floating last_interp_col_diff 145 | for row_idx in range(result_view.shape[0]): 146 | last_interp_col_diff = result_view[row_idx, result_view.shape[1] - num_columns - 1] - \ 147 | result_view[row_idx, result_view.shape[1] - num_columns - 2] 148 | for col_offset in range(num_columns): 149 | # map_coordinates repeated the last columns value, we now add more to it as an "extrapolation" 150 | result_view[row_idx, result_view.shape[1] - num_columns + col_offset] += last_interp_col_diff * (col_offset + 1) 151 | 152 | 153 | @cython.boundscheck(False) 154 | @cython.cdivision(True) 155 | @cython.wraparound(False) 156 | @cython.initializedcheck(False) 157 | cdef void _interpolate_xyz_250( 158 | floating[:, :] result_view, 159 | floating[:, :, ::1] coordinates_view, 160 | ) noexcept nogil: 161 | cdef Py_ssize_t col_idx 162 | cdef floating m, b 163 | cdef floating[:] result_col_view 164 | cdef floating[:, ::1] y_coordinates = coordinates_view[0] 165 | for col_idx in range(result_view.shape[1]): 166 | result_col_view = result_view[:, col_idx] 167 | # Use linear extrapolation for the first two 250 meter pixels along track 168 | m = _calc_slope_250(result_col_view, 169 | y_coordinates, 170 | 2) 171 | b = _calc_offset_250(result_col_view, 172 | y_coordinates, 173 | m, 174 | 2) 175 | result_view[0, col_idx] = m * y_coordinates[0, 0] + b 176 | result_view[1, col_idx] = m * y_coordinates[1, 0] + b 177 | 178 | # Use linear extrapolation for the last two 250 meter pixels along track 179 | # m = (result_array[k0 + 37, :] - result_array[k0 + 34, :]) / (y[37, 0] - y[34, 0]) 180 | # b = result_array[k0 + 37, :] - m * y[37, 0] 181 | m = _calc_slope_250(result_col_view, 182 | y_coordinates, 183 | 34) 184 | b = _calc_offset_250(result_col_view, 185 | y_coordinates, 186 | m, 187 | 34) 188 | result_view[38, col_idx] = m * y_coordinates[38, 0] + b 189 | result_view[39, col_idx] = m * y_coordinates[39, 0] + b 190 | 191 | 192 | @cython.boundscheck(False) 193 | @cython.cdivision(True) 194 | @cython.wraparound(False) 195 | @cython.initializedcheck(False) 196 | cdef void _interpolate_xyz_500( 197 | floating[:, :] result_view, 198 | floating[:, :, ::1] coordinates_view, 199 | ) noexcept nogil: 200 | cdef Py_ssize_t col_idx 201 | cdef floating m, b 202 | for col_idx in range(result_view.shape[1]): 203 | # Use linear extrapolation for the first two 250 meter pixels along track 204 | m = _calc_slope_500( 205 | result_view[:, col_idx], 206 | coordinates_view[0], 207 | 1) 208 | b = _calc_offset_500( 209 | result_view[:, col_idx], 210 | coordinates_view[0], 211 | m, 212 | 1) 213 | result_view[0, col_idx] = m * coordinates_view[0, 0, 0] + b 214 | 215 | # Use linear extrapolation for the last two 250 meter pixels along track 216 | m = _calc_slope_500( 217 | result_view[:, col_idx], 218 | coordinates_view[0], 219 | 17) 220 | b = _calc_offset_500( 221 | result_view[:, col_idx], 222 | coordinates_view[0], 223 | m, 224 | 17) 225 | result_view[19, col_idx] = m * coordinates_view[0, 19, 0] + b 226 | 227 | 228 | @cython.boundscheck(False) 229 | @cython.cdivision(True) 230 | @cython.wraparound(False) 231 | @cython.initializedcheck(False) 232 | cdef inline floating _calc_slope_250( 233 | floating[:] result_view, 234 | floating[:, ::1] y, 235 | Py_ssize_t offset, 236 | ) noexcept nogil: 237 | return (result_view[offset + 3] - result_view[offset]) / \ 238 | (y[offset + 3, 0] - y[offset, 0]) 239 | 240 | 241 | @cython.boundscheck(False) 242 | @cython.cdivision(True) 243 | @cython.wraparound(False) 244 | @cython.initializedcheck(False) 245 | cdef inline floating _calc_offset_250( 246 | floating[:] result_view, 247 | floating[:, ::1] y, 248 | floating m, 249 | Py_ssize_t offset, 250 | ) noexcept nogil: 251 | return result_view[offset + 3] - m * y[offset + 3, 0] 252 | 253 | 254 | @cython.boundscheck(False) 255 | @cython.cdivision(True) 256 | @cython.wraparound(False) 257 | @cython.initializedcheck(False) 258 | cdef inline floating _calc_slope_500( 259 | floating[:] result_view, 260 | floating[:, ::1] y, 261 | Py_ssize_t offset, 262 | ) noexcept nogil: 263 | return (result_view[offset + 1] - result_view[offset]) / \ 264 | (y[offset + 1, 0] - y[offset, 0]) 265 | 266 | 267 | @cython.boundscheck(False) 268 | @cython.cdivision(True) 269 | @cython.wraparound(False) 270 | @cython.initializedcheck(False) 271 | cdef inline floating _calc_offset_500( 272 | floating[:] result_view, 273 | floating[:, ::1] y, 274 | floating m, 275 | Py_ssize_t offset, 276 | ) noexcept nogil: 277 | return result_view[offset + 1] - m * y[offset + 1, 0] 278 | -------------------------------------------------------------------------------- /geotiepoints/basic_interpolator.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | # from memory_profiler import profile 3 | from pandas import DataFrame, Series 4 | 5 | class BasicSatelliteInterpolator(object): 6 | """ 7 | Handles interpolation of geolocation data from a grid of tie points. 8 | 9 | Currently, it is assumed, that tie-points reach out til the edges if the 10 | tiepoint grid. Methods for extrapolation need to be added if needed. 11 | 12 | Uses numpy and pandas. 13 | 14 | The constructor gets the tiepointed lat/lon data as *lat_data* and 15 | *lon_data*. 16 | """ 17 | 18 | def __init__(self, cols, rows, lat_data, lon_data): 19 | self.row_indices = rows 20 | self.col_indices = cols 21 | 22 | self.lon_tiepoint = lon_data 23 | self.lat_tiepoint = lat_data 24 | 25 | self.longitudes = None 26 | self.latitudes = None 27 | 28 | 29 | def _interp(self, data): 30 | """The interpolation method implemented here is a kind of a billinear 31 | interpolation. The input *data* field is first interpolated along the 32 | rows and subsequently along its columns. 33 | 34 | The final size of the interpolated *data* field is determined by the 35 | last indices in self.row_indices and self.col_indices. 36 | """ 37 | row_interpol_data = self._interp_axis(data, 0) 38 | interpol_data = self._interp_axis(row_interpol_data, 1) 39 | 40 | return interpol_data 41 | 42 | 43 | # @profile 44 | def _interp_axis(self, data, axis): 45 | """The *data* field contains the data to be interpolated. It is 46 | expected that values reach out to the *data* boundaries. 47 | With *axis*=0 this method interpolates along rows and *axis*=1 it 48 | interpolates along colums. 49 | 50 | For column mode the *data* input is transposed before interpolation 51 | and subsequently transposed back. 52 | """ 53 | if axis == 0: 54 | return self._pandas_interp(data, self.row_indices) 55 | 56 | if axis == 1: 57 | data_transposed = data.as_matrix().T 58 | data_interpol_transposed = self._pandas_interp(data_transposed, 59 | self.col_indices) 60 | data_interpol = data_interpol_transposed.as_matrix().T 61 | 62 | return data_interpol 63 | 64 | 65 | def _pandas_interp(self, data, indices): 66 | """The actual transformation based on the following stackoverflow 67 | entry: http://stackoverflow.com/a/10465162 68 | """ 69 | new_index = np.arange(indices[-1] + 1) 70 | 71 | data_frame = DataFrame(data, index=indices) 72 | data_frame_reindexed = data_frame.reindex(new_index) 73 | data_interpol = data_frame_reindexed.apply(Series.interpolate) 74 | 75 | del new_index 76 | del data_frame 77 | del data_frame_reindexed 78 | 79 | return data_interpol 80 | 81 | 82 | def interpolate(self): 83 | """Do the interpolation and return resulting longitudes and latitudes. 84 | """ 85 | self.latitude = self._interp(self.lat_tiepoint) 86 | self.longitude = self._interp(self.lon_tiepoint) 87 | 88 | return self.latitude, self.longitude -------------------------------------------------------------------------------- /geotiepoints/geointerpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Copyright (c) 2013-2021 Python-geotiepoints developers 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | """Geographical interpolation (lon/lats).""" 19 | 20 | import numpy as np 21 | from geotiepoints.interpolator import Interpolator, MultipleGridInterpolator, MultipleSplineInterpolator 22 | 23 | 24 | EARTH_RADIUS = 6370997.0 25 | 26 | 27 | class GeoInterpolator(Interpolator): 28 | """Handles interpolation of geolocation from a grid of tie points. 29 | 30 | It is 31 | preferable to have tie-points out till the edges if the tiepoint grid, but 32 | a method is provided to extrapolate linearly the tiepoints to the borders 33 | of the grid. The extrapolation is done automatically if it seems necessary. 34 | 35 | Uses numpy, scipy, and optionally pyresample. 36 | 37 | The constructor takes in the tiepointed data as *data*, the 38 | *tiepoint_grid* and the desired *final_grid*. As optional arguments, one 39 | can provide *kx_* and *ky_* as interpolation orders (in x and y directions 40 | respectively), and the *chunksize* if the data has to be handled by pieces 41 | along the y axis (this affects how the extrapolator behaves). If 42 | *chunksize* is set, don't forget to adjust the interpolation orders 43 | accordingly: the interpolation is indeed done globaly (not chunkwise). 44 | 45 | """ 46 | 47 | def __init__(self, lon_lat_data, *args, **kwargs): 48 | try: 49 | # Maybe it's a pyresample object ? 50 | self.lon_tiepoint = lon_lat_data.lons 51 | self.lat_tiepoint = lon_lat_data.lats 52 | xyz = lon_lat_data.get_cartesian_coords() 53 | tie_data = [xyz[:, :, 0], xyz[:, :, 1], xyz[:, :, 2]] 54 | except AttributeError: 55 | self.lon_tiepoint = lon_lat_data[0] 56 | self.lat_tiepoint = lon_lat_data[1] 57 | x__, y__, z__ = lonlat2xyz(self.lon_tiepoint, self.lat_tiepoint) 58 | tie_data = [x__, y__, z__] 59 | 60 | super().__init__(tie_data, *args, **kwargs) 61 | 62 | def interpolate(self): 63 | """Run the interpolation.""" 64 | newx, newy, newz = super().interpolate() 65 | lon, lat = xyz2lonlat(newx, newy, newz) 66 | return lon, lat 67 | 68 | 69 | def lonlat2xyz(lons, lats, radius=EARTH_RADIUS): 70 | """Convert lons and lats to cartesian coordinates.""" 71 | lons_rad = np.deg2rad(lons) 72 | lats_rad = np.deg2rad(lats) 73 | x_coords = radius * np.cos(lats_rad) * np.cos(lons_rad) 74 | y_coords = radius * np.cos(lats_rad) * np.sin(lons_rad) 75 | z_coords = radius * np.sin(lats_rad) 76 | return x_coords, y_coords, z_coords 77 | 78 | 79 | def xyz2lonlat(x__, y__, z__, radius=EARTH_RADIUS, thr=0.8, low_lat_z=True): 80 | """Get longitudes from cartesian coordinates.""" 81 | lons = np.rad2deg(np.arccos(x__ / np.sqrt(x__ ** 2 + y__ ** 2))) * np.sign(y__) 82 | lats = np.sign(z__) * (90 - np.rad2deg(np.arcsin(np.sqrt(x__ ** 2 + y__ ** 2) / radius))) 83 | if low_lat_z: 84 | # if we are at low latitudes - small z, then get the 85 | # latitudes only from z. If we are at high latitudes (close to the poles) 86 | # then derive the latitude using x and y: 87 | normalized_z = z__ / radius 88 | lat_mask_cond = abs(normalized_z) < thr 89 | lat_z_only = 90 - np.rad2deg(np.arccos(normalized_z)) 90 | lats = np.where(lat_mask_cond, lat_z_only, lats) 91 | 92 | return lons, lats 93 | 94 | 95 | def _work_with_lonlats(klass): 96 | """Adapt MultipleInterpolator classes to work with geographical coordinates.""" 97 | 98 | class GeoKlass(klass): 99 | 100 | def __init__(self, tie_points, *data, **interpolator_init_kwargs): 101 | """Set up the interpolator.""" 102 | data = to_xyz(data) 103 | super().__init__(tie_points, *data, **interpolator_init_kwargs) 104 | 105 | def interpolate(self, fine_points, **interpolator_call_kwargs): 106 | """Interpolate to *fine_points*.""" 107 | x, y, z = super().interpolate(fine_points, **interpolator_call_kwargs) 108 | return xyz2lonlat(x, y, z) 109 | 110 | return GeoKlass 111 | 112 | 113 | def to_xyz(data): 114 | """Convert data to cartesian. 115 | 116 | Data can be a class with a `get_cartesian_coords` method, or a tuple of (lon, lat) arrays. 117 | """ 118 | if len(data) == 1: 119 | xyz = data[0].get_cartesian_coords() 120 | data = [xyz[:, :, 0], xyz[:, :, 1], xyz[:, :, 2]] 121 | elif len(data) == 2: 122 | data = lonlat2xyz(*data) 123 | else: 124 | raise ValueError("Either pass lon/lats or a pyresample definition.") 125 | return data 126 | 127 | 128 | GeoGridInterpolator = _work_with_lonlats(MultipleGridInterpolator) 129 | GeoSplineInterpolator = _work_with_lonlats(MultipleSplineInterpolator) 130 | -------------------------------------------------------------------------------- /geotiepoints/interpolator.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013-2018 Python-geotiepoints developers 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | """Generic interpolation routines.""" 16 | 17 | from abc import ABC, abstractmethod 18 | from functools import partial 19 | import numpy as np 20 | from scipy.interpolate import RectBivariateSpline, splev, splrep, RegularGridInterpolator 21 | 22 | 23 | def generic_modis5kmto1km(*data5km): 24 | """Get 1km data for modis from 5km tiepoints.""" 25 | cols5km = np.arange(2, 1354, 5) 26 | cols1km = np.arange(1354) 27 | lines = data5km[0].shape[0] * 5 28 | rows5km = np.arange(2, lines, 5) 29 | rows1km = np.arange(lines) 30 | 31 | along_track_order = 1 32 | cross_track_order = 3 33 | 34 | satint = Interpolator(list(data5km), 35 | (rows5km, cols5km), 36 | (rows1km, cols1km), 37 | along_track_order, 38 | cross_track_order, 39 | chunk_size=10) 40 | satint.fill_borders("y", "x") 41 | return satint.interpolate() 42 | 43 | # NOTE: extrapolate on a sphere ? 44 | 45 | 46 | def _linear_extrapolate(pos, data, xev): 47 | """Perform linear extrapolation. 48 | 49 | >>> import numpy as np 50 | >>> pos = np.array([1, 2]) 51 | >>> data = np.arange(10).reshape((2, 5), order="F") 52 | >>> xev = 5 53 | >>> retv = _linear_extrapolate(pos, data, xev) 54 | >>> print([val for val in retv]) 55 | [4.0, 6.0, 8.0, 10.0, 12.0] 56 | >>> xev = 0 57 | >>> retv = _linear_extrapolate(pos, data, xev) 58 | >>> print([val for val in retv]) 59 | [-1.0, 1.0, 3.0, 5.0, 7.0] 60 | """ 61 | if len(data) != 2 or len(pos) != 2: 62 | raise ValueError("len(pos) and the number of lines of data" 63 | " must be 2.") 64 | 65 | return data[1] + ((xev - pos[1]) / (1.0 * (pos[0] - pos[1])) * (data[0] - data[1])) 66 | 67 | 68 | class Interpolator: 69 | """Handles interpolation of data from a grid of tie points. 70 | 71 | It is preferable to have tie-points out till the edges if the tiepoint grid, 72 | but a method is provided to extrapolate linearly the tiepoints to the borders of the 73 | grid. The extrapolation is done automatically if it seems necessary. 74 | 75 | Uses numpy and scipy. 76 | 77 | The constructor takes in the tiepointed data as *data*, the *tiepoint_grid* and the desired *final_grid*. As 78 | optional arguments, one can provide *kx_* and *ky_* as interpolation orders (in x and y directions respectively), 79 | and the *chunksize* if the data has to be handled by pieces along the y axis (this affects how the extrapolator 80 | behaves). If *chunksize* is set, don't forget to adjust the interpolation orders accordingly: the interpolation 81 | is indeed done globaly (not chunkwise). 82 | """ 83 | 84 | def __init__(self, data, tiepoint_grid, final_grid, 85 | kx_=1, ky_=1, chunk_size=0): 86 | self.row_indices = tiepoint_grid[0] 87 | self.col_indices = tiepoint_grid[1] 88 | self.hrow_indices = final_grid[0] 89 | self.hcol_indices = final_grid[1] 90 | self.chunk_size = chunk_size 91 | if not isinstance(data, (tuple, list)): 92 | self.tie_data = [data] 93 | else: 94 | self.tie_data = list(data) 95 | 96 | self.new_data = [[] for _ in self.tie_data] 97 | self.kx_, self.ky_ = kx_, ky_ 98 | 99 | def fill_borders(self, *args): 100 | """Extrapolate tiepoint lons and lats to fill in the border of the chunks.""" 101 | to_run = [] 102 | cases = {"y": self._fill_row_borders, 103 | "x": self._fill_col_borders} 104 | for dim in args: 105 | try: 106 | to_run.append(cases[dim]) 107 | except KeyError: 108 | raise NameError("Unrecognized dimension: " + str(dim)) 109 | 110 | for fun in to_run: 111 | fun() 112 | 113 | def _extrapolate_cols(self, data, first=True, last=True): 114 | """Extrapolate the column of data, to get the first and last together with the data.""" 115 | if first: 116 | pos = self.col_indices[:2] 117 | first_column = _linear_extrapolate(pos, 118 | (data[:, 0], data[:, 1]), 119 | self.hcol_indices[0]) 120 | if last: 121 | pos = self.col_indices[-2:] 122 | last_column = _linear_extrapolate(pos, 123 | (data[:, -2], data[:, -1]), 124 | self.hcol_indices[-1]) 125 | 126 | if first and last: 127 | return np.hstack((np.expand_dims(first_column, 1), 128 | data, 129 | np.expand_dims(last_column, 1))) 130 | elif first: 131 | return np.hstack((np.expand_dims(first_column, 1), 132 | data)) 133 | elif last: 134 | return np.hstack((data, 135 | np.expand_dims(last_column, 1))) 136 | else: 137 | return data 138 | 139 | def _fill_col_borders(self): 140 | """Add the first and last column to the data by extrapolation.""" 141 | first = True 142 | last = True 143 | if self.col_indices[0] == self.hcol_indices[0]: 144 | first = False 145 | if self.col_indices[-1] == self.hcol_indices[-1]: 146 | last = False 147 | for num, data in enumerate(self.tie_data): 148 | self.tie_data[num] = self._extrapolate_cols(data, first, last) 149 | 150 | if first and last: 151 | self.col_indices = np.concatenate((np.array([self.hcol_indices[0]]), 152 | self.col_indices, 153 | np.array([self.hcol_indices[-1]]))) 154 | elif first: 155 | self.col_indices = np.concatenate((np.array([self.hcol_indices[0]]), 156 | self.col_indices)) 157 | elif last: 158 | self.col_indices = np.concatenate((self.col_indices, 159 | np.array([self.hcol_indices[-1]]))) 160 | 161 | def _extrapolate_rows(self, data, row_indices, first_index, last_index): 162 | """Extrapolate the rows of data, to get the first and last together with the data.""" 163 | pos = row_indices[:2] 164 | first_row = _linear_extrapolate(pos, 165 | (data[0, :], data[1, :]), 166 | first_index) 167 | pos = row_indices[-2:] 168 | last_row = _linear_extrapolate(pos, 169 | (data[-2, :], data[-1, :]), 170 | last_index) 171 | return np.vstack((np.expand_dims(first_row, 0), 172 | data, 173 | np.expand_dims(last_row, 0))) 174 | 175 | def _fill_row_borders(self): 176 | """Add the first and last rows to the data by extrapolation.""" 177 | lines = len(self.hrow_indices) 178 | chunk_size = self.chunk_size or lines 179 | factor = len(self.hrow_indices) / len(self.row_indices) 180 | 181 | tmp_data = [] 182 | for _num in range(len(self.tie_data)): 183 | tmp_data.append([]) 184 | row_indices = [] 185 | 186 | for index in range(0, lines, chunk_size): 187 | indices = np.logical_and(self.row_indices >= index / factor, 188 | self.row_indices < (index + chunk_size) / factor) 189 | ties = np.argwhere(indices).squeeze() 190 | tiepos = self.row_indices[indices].squeeze() 191 | 192 | for num, data in enumerate(self.tie_data): 193 | to_extrapolate = data[ties, :] 194 | if len(to_extrapolate) > 0: 195 | extrapolated = self._extrapolate_rows(to_extrapolate, 196 | tiepos, 197 | self.hrow_indices[ 198 | index], 199 | self.hrow_indices[index + chunk_size - 1]) 200 | tmp_data[num].append(extrapolated) 201 | 202 | row_indices.append(np.array([self.hrow_indices[index]])) 203 | row_indices.append(tiepos) 204 | row_indices.append(np.array([self.hrow_indices[index + chunk_size - 1]])) 205 | 206 | for num in range(len(self.tie_data)): 207 | self.tie_data[num] = np.vstack(tmp_data[num]) 208 | self.row_indices = np.concatenate(row_indices) 209 | 210 | def _interp(self): 211 | """Interpolate the cartesian coordinates.""" 212 | if np.array_equal(self.hrow_indices, self.row_indices): 213 | return self._interp1d() 214 | 215 | for num, data in enumerate(self.tie_data): 216 | spl = RectBivariateSpline(self.row_indices, 217 | self.col_indices, 218 | data, 219 | s=0, 220 | kx=self.kx_, 221 | ky=self.ky_) 222 | 223 | self.new_data[num] = spl(self.hrow_indices, self.hcol_indices, grid=True) 224 | 225 | def _interp1d(self): 226 | """Interpolate in one dimension.""" 227 | lines = len(self.hrow_indices) 228 | 229 | for num, data in enumerate(self.tie_data): 230 | self.new_data[num] = np.empty((len(self.hrow_indices), 231 | len(self.hcol_indices)), 232 | data.dtype) 233 | 234 | for cnt in range(lines): 235 | tck = splrep(self.col_indices, data[cnt, :], k=self.ky_, s=0) 236 | self.new_data[num][cnt, :] = splev( 237 | self.hcol_indices, tck, der=0) 238 | 239 | def interpolate(self): 240 | """Do the interpolation, and return resulting longitudes and latitudes.""" 241 | self._interp() 242 | 243 | return self.new_data 244 | 245 | 246 | class AbstractSingleInterpolator(ABC): 247 | """An abstract interpolator for a single 2d data array.""" 248 | 249 | def __init__(self, points, values, scipy_interpolator, **interpolator_init_kwargs): 250 | """Set up the interpolator. 251 | 252 | *kwargs* are passed to the underlying scipy interpolator instance. 253 | So for example, to allow extrapolation, the kwargs can be `bounds_error=False, fill_value=None`. 254 | """ 255 | self.interpolator = scipy_interpolator(points, values, **interpolator_init_kwargs) 256 | self.points = points 257 | self.values = values 258 | 259 | def interpolate(self, fine_points, chunks=None, **interpolator_call_kwargs): 260 | """Interpolate the value points to the *fine_points* grid. 261 | 262 | Args: 263 | fine_points: the points on the target grid to use, as one dimensional vectors for each dimension. 264 | chunks: If not None, a lazy (dask-based) interpolation will be performed using the chunk sizes specified. 265 | The result will be a dask array in this case. Defaults to None. 266 | interpolator_kwargs: The keyword arguments to pass to the underlying scipy interpolator. 267 | """ 268 | if chunks is not None: 269 | res = self.interpolate_dask(fine_points, chunks=chunks, **interpolator_call_kwargs) 270 | else: 271 | res = self.interpolate_numpy(fine_points, **interpolator_call_kwargs) 272 | 273 | return res 274 | 275 | def interpolate_dask(self, fine_points, chunks, **interpolator_call_kwargs): 276 | """Interpolate (lazily) to a dask array.""" 277 | from dask.base import tokenize 278 | import dask.array as da 279 | from dask.array.core import normalize_chunks 280 | v_fine_points, h_fine_points = fine_points 281 | shape = len(v_fine_points), len(h_fine_points) 282 | 283 | chunks = normalize_chunks(chunks, shape, dtype=self.values.dtype) 284 | 285 | token = tokenize(chunks, self.points, self.values, fine_points, interpolator_call_kwargs) 286 | name = 'interpolate-' + token 287 | 288 | interpolate_slices = partial(self.interpolate_slices, **interpolator_call_kwargs) 289 | 290 | dskx = {(name, ) + position: (interpolate_slices, 291 | slices) 292 | for position, slices in _enumerate_chunk_slices(chunks)} 293 | 294 | res = da.Array(dskx, name, shape=list(shape), 295 | chunks=chunks, 296 | dtype=self.values.dtype) 297 | return res 298 | 299 | @abstractmethod 300 | def interpolate_numpy(self, fine_points, **interpolator_call_kwargs): 301 | """Interpolate to a numpy array.""" 302 | raise NotImplementedError 303 | 304 | def interpolate_slices(self, fine_points, **interpolator_call_kwargs): 305 | """Interpolate using slices. 306 | 307 | *fine_points* are a tuple of slices for the y and x dimensions 308 | """ 309 | slice_y, slice_x = fine_points 310 | points_y = np.arange(slice_y.start, slice_y.stop) 311 | points_x = np.arange(slice_x.start, slice_x.stop) 312 | fine_points = points_y, points_x 313 | 314 | return self.interpolate_numpy(fine_points, **interpolator_call_kwargs) 315 | 316 | 317 | def _enumerate_chunk_slices(chunks): 318 | """Enumerate chunks with slices.""" 319 | for position in np.ndindex(tuple(map(len, (chunks)))): 320 | slices = [] 321 | for pos, chunk in zip(position, chunks): 322 | chunk_size = chunk[pos] 323 | offset = sum(chunk[:pos]) 324 | slices.append(slice(offset, offset + chunk_size)) 325 | 326 | yield (position, slices) 327 | 328 | 329 | class SingleGridInterpolator(AbstractSingleInterpolator): 330 | """A regular grid interpolator for a single 2d data array.""" 331 | 332 | def __init__(self, *args, **interpolator_init_kwargs): 333 | """Set up the grid interpolator.""" 334 | super().__init__(*args, scipy_interpolator=RegularGridInterpolator, **interpolator_init_kwargs) 335 | 336 | def interpolate_numpy(self, fine_points, **interpolator_call_kwargs): 337 | """Interpolate to a numpy array.""" 338 | fine_x, fine_y = np.meshgrid(*fine_points, indexing='ij') 339 | return self.interpolator((fine_x, fine_y), **interpolator_call_kwargs).astype(self.values.dtype) 340 | 341 | 342 | class SingleSplineInterpolator(AbstractSingleInterpolator): 343 | """An spline interpolator for a single 2d data array.""" 344 | 345 | def __init__(self, points, values, **interpolator_init_kwargs): 346 | """Set up the spline interpolator.""" 347 | self.interpolator = RectBivariateSpline(*points, values, **interpolator_init_kwargs) 348 | self.points = points 349 | self.values = values 350 | 351 | def interpolate_numpy(self, fine_points, **interpolator_call_kwargs): 352 | """Interpolate to a numpy array.""" 353 | return self.interpolator(*fine_points, **interpolator_call_kwargs).astype(self.values.dtype) 354 | 355 | 356 | class AbstractMultipleInterpolator(ABC): # noqa: B024 357 | """Abstract interpolator that works on mulitple arrays.""" 358 | 359 | def __init__(self, interpolator, tie_points, *data, **interpolator_init_kwargs): 360 | """Set up the interpolator from the multiple `data` arrays.""" 361 | self.interpolators = [] 362 | for values in data: 363 | self.interpolators.append(interpolator(tie_points, values, **interpolator_init_kwargs)) 364 | 365 | def interpolate(self, fine_points, **kwargs): 366 | """Interpolate the data. 367 | 368 | The keyword arguments will be passed on to SingleGridInterpolator's interpolate function. 369 | """ 370 | return (interpolator.interpolate(fine_points, **kwargs) for interpolator in self.interpolators) 371 | 372 | def interpolate_to_shape(self, shape, **interpolator_call_kwargs): 373 | """Interpolate to a given *shape*.""" 374 | fine_points = [np.arange(size) for size in shape] 375 | return self.interpolate(fine_points, **interpolator_call_kwargs) 376 | 377 | 378 | class MultipleGridInterpolator(AbstractMultipleInterpolator): 379 | """Grid interpolator that works on multiple data arrays.""" 380 | 381 | def __init__(self, tie_points, *data, **interpolator_init_kwargs): 382 | """Set up the interpolator from the multiple `data` arrays.""" 383 | super().__init__(SingleGridInterpolator, tie_points, *data, **interpolator_init_kwargs) 384 | 385 | 386 | class MultipleSplineInterpolator(AbstractMultipleInterpolator): 387 | """Spline interpolator that works on multiple data arrays.""" 388 | 389 | def __init__(self, tie_points, *data, **interpolator_init_kwargs): 390 | """Set up the interpolator from the multiple `data` arrays.""" 391 | super().__init__(SingleSplineInterpolator, tie_points, *data, **interpolator_init_kwargs) 392 | -------------------------------------------------------------------------------- /geotiepoints/modisinterpolator.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018-2023 Python-geotiepoints developers 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | """Interpolation of MODIS data using satellite zenith angle. 16 | 17 | Interpolation of geographical tiepoints using the second order interpolation 18 | scheme implemented in the CVIIRS software, as described here: 19 | Compact VIIRS SDR Product Format User Guide (V1J) 20 | https://www.eumetsat.int/media/45988 21 | and 22 | Anders Meier Soerensen, Stephan Zinke, 23 | A tie-point zone group compaction schema for the geolocation data of S-NPP and NOAA-20 VIIRS SDRs to reduce file sizes 24 | in memory-sensitive environments, 25 | Applied Computing and Geosciences, Volume 6, 2020, 100025, ISSN 2590-1974, 26 | https://doi.org/10.1016/j.acags.2020.100025. 27 | (https://www.sciencedirect.com/science/article/pii/S2590197420300070) 28 | """ 29 | 30 | import warnings 31 | 32 | from ._modis_interpolator import interpolate 33 | 34 | 35 | def modis_1km_to_250m(lon1, lat1, satz1): 36 | """Interpolate MODIS geolocation from 1km to 250m resolution.""" 37 | return interpolate(lon1, lat1, satz1, 38 | coarse_resolution=1000, 39 | fine_resolution=250) 40 | 41 | 42 | def modis_1km_to_500m(lon1, lat1, satz1): 43 | """Interpolate MODIS geolocation from 1km to 500m resolution.""" 44 | return interpolate(lon1, lat1, satz1, 45 | coarse_resolution=1000, 46 | fine_resolution=500) 47 | 48 | 49 | def modis_5km_to_1km(lon1, lat1, satz1): 50 | """Interpolate MODIS geolocation from 5km to 1km resolution.""" 51 | return interpolate(lon1, lat1, satz1, 52 | coarse_resolution=5000, 53 | fine_resolution=1000, 54 | coarse_scan_width=lon1.shape[1]) 55 | 56 | 57 | def modis_5km_to_500m(lon1, lat1, satz1): 58 | """Interpolate MODIS geolocation from 5km to 500m resolution.""" 59 | warnings.warn( 60 | "Interpolating 5km geolocation to 500m resolution " "may result in poor quality" 61 | ) 62 | return interpolate(lon1, lat1, satz1, 63 | coarse_resolution=5000, 64 | fine_resolution=500, 65 | coarse_scan_width=lon1.shape[1]) 66 | 67 | 68 | def modis_5km_to_250m(lon1, lat1, satz1): 69 | """Interpolate MODIS geolocation from 5km to 250m resolution.""" 70 | warnings.warn( 71 | "Interpolating 5km geolocation to 250m resolution " "may result in poor quality" 72 | ) 73 | return interpolate(lon1, lat1, satz1, 74 | coarse_resolution=5000, 75 | fine_resolution=250, 76 | coarse_scan_width=lon1.shape[1]) 77 | -------------------------------------------------------------------------------- /geotiepoints/multilinear.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | import numpy as np 4 | 5 | from geotiepoints.multilinear_cython import multilinear_interpolation 6 | 7 | 8 | def mlinspace(smin, smax, orders): 9 | if len(orders) == 1: 10 | res = np.atleast_2d( 11 | np.linspace(np.array(smin), np.array(smax), np.array(orders))) 12 | return res.copy() # workaround for strange bug 13 | else: 14 | meshes = np.meshgrid( 15 | *[np.linspace(smin[i], smax[i], orders[i]) for i in range(len(orders))], indexing='ij') 16 | return np.vstack([l.flatten() for l in meshes]) 17 | 18 | 19 | class MultilinearInterpolator: 20 | """Multilinear interpolation. 21 | 22 | Methods 23 | ------- 24 | smin, smax, orders : iterable objects 25 | Specifies the boundaries of a cartesian grid, with the number of points along each dimension. 26 | values : array_like (2D), optional 27 | Each line enumerates values taken by a function on the cartesian grid, with last index varying faster. 28 | 29 | 30 | Attributes 31 | ---------- 32 | smin, smax, orders : 33 | Boundaries and number of points along each dimension. 34 | d : number of dimensions 35 | grid : array_like (2D) 36 | Enumerate the approximation grid. Each column contains coordinates of a point in R^d. 37 | values : 38 | Values on the grid to be interpolated. 39 | 40 | Example 41 | ------- 42 | 43 | smin = [-1,-1] 44 | smax = [1,1] 45 | orders = [5,5] 46 | 47 | f = lambda x: np.vstack([np.sqrt( x[0,:]**2 + x[1,:]**2 ), 48 | np.power( x[0,:]**3 + x[1,:]**3, 1.0/3.0 )]) 49 | 50 | interp = MultilinearInterpolator(smin,smax,orders) 51 | interp.set_values( f(interp.grid) ) 52 | 53 | random_points = np.random.random( (2, 1000) ) 54 | 55 | interpolated_values = interp(random_points) 56 | exact_values = f(random_points) 57 | """ 58 | 59 | __grid__ = None 60 | 61 | def __init__(self, smin, smax, orders, values=None, dtype=np.float64): 62 | self.smin = np.asarray(smin, dtype=dtype) 63 | self.smax = np.asarray(smax, dtype=dtype) 64 | self.orders = np.asarray(orders, dtype="long") 65 | self.d = len(orders) 66 | self.dtype = dtype 67 | if values is not None: 68 | self.set_values(values) 69 | 70 | @property 71 | def grid(self): 72 | if self.__grid__ is None: 73 | self.__grid__ = mlinspace(self.smin, self.smax, self.orders) 74 | return self.__grid__ 75 | 76 | def set_values(self, values): 77 | self.values = np.ascontiguousarray(values, dtype=self.dtype) 78 | 79 | def interpolate(self, s): 80 | s = np.ascontiguousarray(s, dtype=self.dtype) 81 | a = multilinear_interpolation( 82 | self.smin, self.smax, self.orders, self.values, s) 83 | return a 84 | 85 | def __call__(self, s): 86 | return self.interpolate(s) 87 | -------------------------------------------------------------------------------- /geotiepoints/multilinear_cython.pyx: -------------------------------------------------------------------------------- 1 | # cython: language_level=3, boundscheck=False, cdivision=True, wraparound=False, initializedcheck=False, nonecheck=False 2 | 3 | cimport cython 4 | from cython.parallel import prange,parallel 5 | from cython cimport floating 6 | 7 | cimport numpy as np 8 | import numpy as np 9 | 10 | np.import_array() 11 | 12 | 13 | def multilinear_interpolation(floating[:] smin, floating[:] smax, long[:] orders, floating[:,::1] values, floating[:,::1] s): 14 | 15 | cdef Py_ssize_t d = s.shape[0] 16 | cdef Py_ssize_t n_s = s.shape[1] 17 | cdef Py_ssize_t n_v = values.shape[0] 18 | 19 | if floating is float: 20 | dtype = np.single 21 | else: 22 | dtype = np.double 23 | 24 | cdef np.ndarray[floating, ndim=2] result_arr = np.empty((n_v, n_s), dtype=dtype) 25 | cdef floating[:, ::1] result = result_arr 26 | cdef floating[:] vals 27 | cdef floating[:] res 28 | 29 | if d > 4: 30 | raise Exception("Can't interpolate in dimension strictly greater than 5") 31 | 32 | with nogil: 33 | for i in range(n_v): 34 | vals = values[i, :] 35 | res = result[i, :] 36 | if d == 1: 37 | multilinear_interpolation_1d(smin, smax, orders, vals, n_s, s, res) 38 | elif d == 2: 39 | multilinear_interpolation_2d(smin, smax, orders, vals, n_s, s, res) 40 | elif d == 3: 41 | multilinear_interpolation_3d(smin, smax, orders, vals, n_s, s, res) 42 | elif d == 4: 43 | multilinear_interpolation_4d(smin, smax, orders, vals, n_s, s, res) 44 | 45 | return result_arr 46 | 47 | 48 | cdef void multilinear_interpolation_1d(floating[:] smin, floating[:] smax, 49 | long[:] orders, floating[:] V, 50 | int n_s, floating[:,::1] s, floating[:] output) noexcept nogil: 51 | 52 | cdef int i 53 | cdef floating lam_0, s_0, sn_0, snt_0 54 | cdef int order_0 = orders[0] 55 | cdef int q_0 56 | cdef floating v_0 57 | cdef floating v_1 58 | with parallel(): 59 | for i in prange(n_s): 60 | 61 | # (s_1, ..., s_d) : evaluation point 62 | s_0 = s[ 0 , i ] 63 | 64 | # (sn_1, ..., sn_d) : normalized evaluation point (in [0,1] inside the grid) 65 | sn_0 = (s_0-smin[0])/(smax[0]-smin[0]) 66 | 67 | # q_k : index of the interval "containing" s_k 68 | q_0 = max( min( (sn_0 *(order_0-1)), (order_0-2) ), 0 ) 69 | 70 | # lam_k : barycentric coordinate in interval k 71 | lam_0 = sn_0*(order_0-1) - q_0 72 | 73 | # v_ij: values on vertices of hypercube "containing" the point 74 | v_0 = V[(q_0)] 75 | v_1 = V[(q_0+1)] 76 | 77 | # interpolated/extrapolated value 78 | output[i] = (1-lam_0)*(v_0) + (lam_0)*(v_1) 79 | 80 | 81 | cdef void multilinear_interpolation_2d(floating[:] smin, floating[:] smax, 82 | long[:] orders, floating[:] V, 83 | int n_s, floating[:,::1] s, floating[:] output) noexcept nogil: 84 | 85 | cdef int i 86 | cdef floating lam_0, s_0, sn_0, snt_0 87 | cdef floating lam_1, s_1, sn_1, snt_1 88 | cdef int order_0 = orders[0] 89 | cdef int order_1 = orders[1] 90 | cdef int q_0 91 | cdef int q_1 92 | cdef int M_0 = order_1 93 | cdef floating v_00 94 | cdef floating v_01 95 | cdef floating v_10 96 | cdef floating v_11 97 | with parallel(): 98 | for i in prange(n_s): 99 | 100 | # (s_1, ..., s_d) : evaluation point 101 | s_0 = s[ 0 , i ] 102 | s_1 = s[ 1 , i ] 103 | 104 | # (sn_1, ..., sn_d) : normalized evaluation point (in [0,1] inside the grid) 105 | sn_0 = (s_0-smin[0])/(smax[0]-smin[0]) 106 | sn_1 = (s_1-smin[1])/(smax[1]-smin[1]) 107 | 108 | # q_k : index of the interval "containing" s_k 109 | q_0 = max( min( (sn_0 *(order_0-1)), (order_0-2) ), 0 ) 110 | q_1 = max( min( (sn_1 *(order_1-1)), (order_1-2) ), 0 ) 111 | 112 | # lam_k : barycentric coordinate in interval k 113 | lam_0 = sn_0*(order_0-1) - q_0 114 | lam_1 = sn_1*(order_1-1) - q_1 115 | 116 | # v_ij: values on vertices of hypercube "containing" the point 117 | v_00 = V[M_0*(q_0) + (q_1)] 118 | v_01 = V[M_0*(q_0) + (q_1+1)] 119 | v_10 = V[M_0*(q_0+1) + (q_1)] 120 | v_11 = V[M_0*(q_0+1) + (q_1+1)] 121 | 122 | # interpolated/extrapolated value 123 | output[i] = (1-lam_0)*((1-lam_1)*(v_00) + (lam_1)*(v_01)) + (lam_0)*((1-lam_1)*(v_10) + (lam_1)*(v_11)) 124 | 125 | 126 | cdef void multilinear_interpolation_3d(floating[:] smin, floating[:] smax, 127 | long[:] orders, floating[:] V, 128 | int n_s, floating[:,::1] s, floating[:] output) noexcept nogil: 129 | cdef int i 130 | cdef floating lam_0, s_0, sn_0, snt_0 131 | cdef floating lam_1, s_1, sn_1, snt_1 132 | cdef floating lam_2, s_2, sn_2, snt_2 133 | cdef int order_0 = orders[0] 134 | cdef int order_1 = orders[1] 135 | cdef int order_2 = orders[2] 136 | cdef int q_0 137 | cdef int q_1 138 | cdef int q_2 139 | cdef int M_0 = order_1*order_2 140 | cdef int M_1 = order_2 141 | cdef floating v_000 142 | cdef floating v_001 143 | cdef floating v_010 144 | cdef floating v_011 145 | cdef floating v_100 146 | cdef floating v_101 147 | cdef floating v_110 148 | cdef floating v_111 149 | with parallel(): 150 | for i in prange(n_s): 151 | 152 | # (s_1, ..., s_d) : evaluation point 153 | s_0 = s[ 0 , i ] 154 | s_1 = s[ 1 , i ] 155 | s_2 = s[ 2 , i ] 156 | 157 | # (sn_1, ..., sn_d) : normalized evaluation point (in [0,1] inside the grid) 158 | sn_0 = (s_0-smin[0])/(smax[0]-smin[0]) 159 | sn_1 = (s_1-smin[1])/(smax[1]-smin[1]) 160 | sn_2 = (s_2-smin[2])/(smax[2]-smin[2]) 161 | 162 | # q_k : index of the interval "containing" s_k 163 | q_0 = max( min( (sn_0 *(order_0-1)), (order_0-2) ), 0 ) 164 | q_1 = max( min( (sn_1 *(order_1-1)), (order_1-2) ), 0 ) 165 | q_2 = max( min( (sn_2 *(order_2-1)), (order_2-2) ), 0 ) 166 | 167 | # lam_k : barycentric coordinate in interval k 168 | lam_0 = sn_0*(order_0-1) - q_0 169 | lam_1 = sn_1*(order_1-1) - q_1 170 | lam_2 = sn_2*(order_2-1) - q_2 171 | 172 | # v_ij: values on vertices of hypercube "containing" the point 173 | v_000 = V[M_0*(q_0) + M_1*(q_1) + (q_2)] 174 | v_001 = V[M_0*(q_0) + M_1*(q_1) + (q_2+1)] 175 | v_010 = V[M_0*(q_0) + M_1*(q_1+1) + (q_2)] 176 | v_011 = V[M_0*(q_0) + M_1*(q_1+1) + (q_2+1)] 177 | v_100 = V[M_0*(q_0+1) + M_1*(q_1) + (q_2)] 178 | v_101 = V[M_0*(q_0+1) + M_1*(q_1) + (q_2+1)] 179 | v_110 = V[M_0*(q_0+1) + M_1*(q_1+1) + (q_2)] 180 | v_111 = V[M_0*(q_0+1) + M_1*(q_1+1) + (q_2+1)] 181 | 182 | # interpolated/extrapolated value 183 | output[i] = (1-lam_0)*((1-lam_1)*((1-lam_2)*(v_000) + (lam_2)*(v_001)) + (lam_1)*((1-lam_2)*(v_010) + (lam_2)*(v_011))) + (lam_0)*((1-lam_1)*((1-lam_2)*(v_100) + (lam_2)*(v_101)) + (lam_1)*((1-lam_2)*(v_110) + (lam_2)*(v_111))) 184 | 185 | 186 | cdef void multilinear_interpolation_4d(floating[:] smin, floating[:] smax, 187 | long[:] orders, floating[:] V, 188 | int n_s, floating[:,::1] s, floating[:] output) noexcept nogil: 189 | 190 | cdef int i 191 | cdef floating lam_0, s_0, sn_0, snt_0 192 | cdef floating lam_1, s_1, sn_1, snt_1 193 | cdef floating lam_2, s_2, sn_2, snt_2 194 | cdef floating lam_3, s_3, sn_3, snt_3 195 | cdef int order_0 = orders[0] 196 | cdef int order_1 = orders[1] 197 | cdef int order_2 = orders[2] 198 | cdef int order_3 = orders[3] 199 | cdef int q_0 200 | cdef int q_1 201 | cdef int q_2 202 | cdef int q_3 203 | cdef int M_0 = order_1*order_2*order_3 204 | cdef int M_1 = order_2*order_3 205 | cdef int M_2 = order_3 206 | cdef floating v_0000 207 | cdef floating v_0001 208 | cdef floating v_0010 209 | cdef floating v_0011 210 | cdef floating v_0100 211 | cdef floating v_0101 212 | cdef floating v_0110 213 | cdef floating v_0111 214 | cdef floating v_1000 215 | cdef floating v_1001 216 | cdef floating v_1010 217 | cdef floating v_1011 218 | cdef floating v_1100 219 | cdef floating v_1101 220 | cdef floating v_1110 221 | cdef floating v_1111 222 | with parallel(): 223 | for i in prange(n_s): 224 | 225 | # (s_1, ..., s_d) : evaluation point 226 | s_0 = s[ 0 , i ] 227 | s_1 = s[ 1 , i ] 228 | s_2 = s[ 2 , i ] 229 | s_3 = s[ 3 , i ] 230 | 231 | # (sn_1, ..., sn_d) : normalized evaluation point (in [0,1] inside the grid) 232 | sn_0 = (s_0-smin[0])/(smax[0]-smin[0]) 233 | sn_1 = (s_1-smin[1])/(smax[1]-smin[1]) 234 | sn_2 = (s_2-smin[2])/(smax[2]-smin[2]) 235 | sn_3 = (s_3-smin[3])/(smax[3]-smin[3]) 236 | 237 | # q_k : index of the interval "containing" s_k 238 | q_0 = max( min( (sn_0 *(order_0-1)), (order_0-2) ), 0 ) 239 | q_1 = max( min( (sn_1 *(order_1-1)), (order_1-2) ), 0 ) 240 | q_2 = max( min( (sn_2 *(order_2-1)), (order_2-2) ), 0 ) 241 | q_3 = max( min( (sn_3 *(order_3-1)), (order_3-2) ), 0 ) 242 | 243 | # lam_k : barycentric coordinate in interval k 244 | lam_0 = sn_0*(order_0-1) - q_0 245 | lam_1 = sn_1*(order_1-1) - q_1 246 | lam_2 = sn_2*(order_2-1) - q_2 247 | lam_3 = sn_3*(order_3-1) - q_3 248 | 249 | # v_ij: values on vertices of hypercube "containing" the point 250 | v_0000 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2) + (q_3)] 251 | v_0001 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2) + (q_3+1)] 252 | v_0010 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2+1) + (q_3)] 253 | v_0011 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2+1) + (q_3+1)] 254 | v_0100 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2) + (q_3)] 255 | v_0101 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2) + (q_3+1)] 256 | v_0110 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2+1) + (q_3)] 257 | v_0111 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2+1) + (q_3+1)] 258 | v_1000 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2) + (q_3)] 259 | v_1001 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2) + (q_3+1)] 260 | v_1010 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2+1) + (q_3)] 261 | v_1011 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2+1) + (q_3+1)] 262 | v_1100 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2) + (q_3)] 263 | v_1101 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2) + (q_3+1)] 264 | v_1110 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2+1) + (q_3)] 265 | v_1111 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2+1) + (q_3+1)] 266 | 267 | # interpolated/extrapolated value 268 | output[i] = (1-lam_0)*((1-lam_1)*((1-lam_2)*((1-lam_3)*(v_0000) + (lam_3)*(v_0001)) + (lam_2)*((1-lam_3)*(v_0010) + (lam_3)*(v_0011))) + (lam_1)*((1-lam_2)*((1-lam_3)*(v_0100) + (lam_3)*(v_0101)) + (lam_2)*((1-lam_3)*(v_0110) + (lam_3)*(v_0111)))) + (lam_0)*((1-lam_1)*((1-lam_2)*((1-lam_3)*(v_1000) + (lam_3)*(v_1001)) + (lam_2)*((1-lam_3)*(v_1010) + (lam_3)*(v_1011))) + (lam_1)*((1-lam_2)*((1-lam_3)*(v_1100) + (lam_3)*(v_1101)) + (lam_2)*((1-lam_3)*(v_1110) + (lam_3)*(v_1111)))) 269 | 270 | 271 | cdef void multilinear_interpolation_5d(floating[:] smin, floating[:] smax, 272 | long[:] orders, floating[:] V, 273 | int n_s, floating[:,::1] s, floating[:] output) noexcept nogil: 274 | cdef int i 275 | cdef floating lam_0, s_0, sn_0, snt_0 276 | cdef floating lam_1, s_1, sn_1, snt_1 277 | cdef floating lam_2, s_2, sn_2, snt_2 278 | cdef floating lam_3, s_3, sn_3, snt_3 279 | cdef floating lam_4, s_4, sn_4, snt_4 280 | cdef int order_0 = orders[0] 281 | cdef int order_1 = orders[1] 282 | cdef int order_2 = orders[2] 283 | cdef int order_3 = orders[3] 284 | cdef int order_4 = orders[4] 285 | cdef int q_0 286 | cdef int q_1 287 | cdef int q_2 288 | cdef int q_3 289 | cdef int q_4 290 | cdef int M_0 = order_1*order_2*order_3*order_4 291 | cdef int M_1 = order_2*order_3*order_4 292 | cdef int M_2 = order_3*order_4 293 | cdef int M_3 = order_4 294 | cdef floating v_00000 295 | cdef floating v_00001 296 | cdef floating v_00010 297 | cdef floating v_00011 298 | cdef floating v_00100 299 | cdef floating v_00101 300 | cdef floating v_00110 301 | cdef floating v_00111 302 | cdef floating v_01000 303 | cdef floating v_01001 304 | cdef floating v_01010 305 | cdef floating v_01011 306 | cdef floating v_01100 307 | cdef floating v_01101 308 | cdef floating v_01110 309 | cdef floating v_01111 310 | cdef floating v_10000 311 | cdef floating v_10001 312 | cdef floating v_10010 313 | cdef floating v_10011 314 | cdef floating v_10100 315 | cdef floating v_10101 316 | cdef floating v_10110 317 | cdef floating v_10111 318 | cdef floating v_11000 319 | cdef floating v_11001 320 | cdef floating v_11010 321 | cdef floating v_11011 322 | cdef floating v_11100 323 | cdef floating v_11101 324 | cdef floating v_11110 325 | cdef floating v_11111 326 | with parallel(): 327 | for i in prange(n_s): 328 | 329 | # (s_1, ..., s_d) : evaluation point 330 | s_0 = s[ 0 , i ] 331 | s_1 = s[ 1 , i ] 332 | s_2 = s[ 2 , i ] 333 | s_3 = s[ 3 , i ] 334 | s_4 = s[ 4 , i ] 335 | 336 | # (sn_1, ..., sn_d) : normalized evaluation point (in [0,1] inside the grid) 337 | sn_0 = (s_0-smin[0])/(smax[0]-smin[0]) 338 | sn_1 = (s_1-smin[1])/(smax[1]-smin[1]) 339 | sn_2 = (s_2-smin[2])/(smax[2]-smin[2]) 340 | sn_3 = (s_3-smin[3])/(smax[3]-smin[3]) 341 | sn_4 = (s_4-smin[4])/(smax[4]-smin[4]) 342 | 343 | # q_k : index of the interval "containing" s_k 344 | q_0 = max( min( (sn_0 *(order_0-1)), (order_0-2) ), 0 ) 345 | q_1 = max( min( (sn_1 *(order_1-1)), (order_1-2) ), 0 ) 346 | q_2 = max( min( (sn_2 *(order_2-1)), (order_2-2) ), 0 ) 347 | q_3 = max( min( (sn_3 *(order_3-1)), (order_3-2) ), 0 ) 348 | q_4 = max( min( (sn_4 *(order_4-1)), (order_4-2) ), 0 ) 349 | 350 | # lam_k : barycentric coordinate in interval k 351 | lam_0 = sn_0*(order_0-1) - q_0 352 | lam_1 = sn_1*(order_1-1) - q_1 353 | lam_2 = sn_2*(order_2-1) - q_2 354 | lam_3 = sn_3*(order_3-1) - q_3 355 | lam_4 = sn_4*(order_4-1) - q_4 356 | 357 | # v_ij: values on vertices of hypercube "containing" the point 358 | v_00000 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3) + (q_4)] 359 | v_00001 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3) + (q_4+1)] 360 | v_00010 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3+1) + (q_4)] 361 | v_00011 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3+1) + (q_4+1)] 362 | v_00100 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3) + (q_4)] 363 | v_00101 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3) + (q_4+1)] 364 | v_00110 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4)] 365 | v_00111 = V[M_0*(q_0) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4+1)] 366 | v_01000 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3) + (q_4)] 367 | v_01001 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3) + (q_4+1)] 368 | v_01010 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3+1) + (q_4)] 369 | v_01011 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3+1) + (q_4+1)] 370 | v_01100 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3) + (q_4)] 371 | v_01101 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3) + (q_4+1)] 372 | v_01110 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4)] 373 | v_01111 = V[M_0*(q_0) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4+1)] 374 | v_10000 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3) + (q_4)] 375 | v_10001 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3) + (q_4+1)] 376 | v_10010 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3+1) + (q_4)] 377 | v_10011 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2) + M_3*(q_3+1) + (q_4+1)] 378 | v_10100 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3) + (q_4)] 379 | v_10101 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3) + (q_4+1)] 380 | v_10110 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4)] 381 | v_10111 = V[M_0*(q_0+1) + M_1*(q_1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4+1)] 382 | v_11000 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3) + (q_4)] 383 | v_11001 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3) + (q_4+1)] 384 | v_11010 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3+1) + (q_4)] 385 | v_11011 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2) + M_3*(q_3+1) + (q_4+1)] 386 | v_11100 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3) + (q_4)] 387 | v_11101 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3) + (q_4+1)] 388 | v_11110 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4)] 389 | v_11111 = V[M_0*(q_0+1) + M_1*(q_1+1) + M_2*(q_2+1) + M_3*(q_3+1) + (q_4+1)] 390 | 391 | # interpolated/extrapolated value 392 | output[i] = (1-lam_0)*((1-lam_1)*((1-lam_2)*((1-lam_3)*((1-lam_4)*(v_00000) + (lam_4)*(v_00001)) + (lam_3)*((1-lam_4)*(v_00010) + (lam_4)*(v_00011))) + (lam_2)*((1-lam_3)*((1-lam_4)*(v_00100) + (lam_4)*(v_00101)) + (lam_3)*((1-lam_4)*(v_00110) + (lam_4)*(v_00111)))) + (lam_1)*((1-lam_2)*((1-lam_3)*((1-lam_4)*(v_01000) + (lam_4)*(v_01001)) + (lam_3)*((1-lam_4)*(v_01010) + (lam_4)*(v_01011))) + (lam_2)*((1-lam_3)*((1-lam_4)*(v_01100) + (lam_4)*(v_01101)) + (lam_3)*((1-lam_4)*(v_01110) + (lam_4)*(v_01111))))) + (lam_0)*((1-lam_1)*((1-lam_2)*((1-lam_3)*((1-lam_4)*(v_10000) + (lam_4)*(v_10001)) + (lam_3)*((1-lam_4)*(v_10010) + (lam_4)*(v_10011))) + (lam_2)*((1-lam_3)*((1-lam_4)*(v_10100) + (lam_4)*(v_10101)) + (lam_3)*((1-lam_4)*(v_10110) + (lam_4)*(v_10111)))) + (lam_1)*((1-lam_2)*((1-lam_3)*((1-lam_4)*(v_11000) + (lam_4)*(v_11001)) + (lam_3)*((1-lam_4)*(v_11010) + (lam_4)*(v_11011))) + (lam_2)*((1-lam_3)*((1-lam_4)*(v_11100) + (lam_4)*(v_11101)) + (lam_3)*((1-lam_4)*(v_11110) + (lam_4)*(v_11111))))) 393 | 394 | 395 | -------------------------------------------------------------------------------- /geotiepoints/simple_modis_interpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2021 Python-geotiepoints developers 4 | # 5 | # This file is part of python-geotiepoints. 6 | # 7 | # This program is free software: you can redistribute it and/or modify 8 | # it under the terms of the GNU General Public License as published by 9 | # the Free Software Foundation, either version 3 of the License, or 10 | # (at your option) any later version. 11 | 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU General Public License for more details. 16 | 17 | # You should have received a copy of the GNU General Public License 18 | # along with this program. If not, see . 19 | """Interpolate MODIS 1km navigation arrays to 250m and 500m resolutions. 20 | 21 | The code used here is a rewrite of the IDL function ``MODIS_GEO_INTERP_250`` 22 | used by Liam Gumley. It has been modified to convert coordinates to cartesian 23 | (X, Y, Z) coordinates first to avoid problems with the anti-meridian and poles. 24 | This code was originally part of the CSPP Polar2Grid project, but has been 25 | moved here for integration with Satpy and newer versions of Polar2Grid. 26 | 27 | This algorithm differs from the one in ``modisinterpolator`` as it only 28 | requires the original longitude and latitude arrays. This is useful in the 29 | case of reading the 250m or 500m MODIS L1b files or any MODIS L2 files without 30 | including the MOD03 geolocation file as there is no SensorZenith angle dataset 31 | in these files. 32 | 33 | """ 34 | 35 | from ._modis_utils import scanline_mapblocks 36 | from ._simple_modis_interpolator import interpolate_geolocation_cartesian as interp_cython 37 | 38 | 39 | @scanline_mapblocks 40 | def interpolate_geolocation_cartesian(lon_array, lat_array, coarse_resolution, fine_resolution): 41 | """Interpolate MODIS navigation from 1000m resolution to 250m. 42 | 43 | Python rewrite of the IDL function ``MODIS_GEO_INTERP_250`` but converts to cartesian (X, Y, Z) coordinates 44 | first to avoid problems with the anti-meridian/poles. 45 | 46 | Arguments: 47 | lon_array: Longitude data as a 2D numpy, dask, or xarray DataArray object. 48 | The input data is expected to represent 1000m geolocation. 49 | lat_array: Latitude data as a 2D numpy, dask, or xarray DataArray object. 50 | The input data is expected to represent 1000m geolocation. 51 | res_factor (int): Expansion factor for the function. Should be 2 for 52 | 500m output or 4 for 250m output. 53 | 54 | Returns: 55 | A two-element tuple (lon, lat). 56 | 57 | """ 58 | return interp_cython( 59 | lon_array, lat_array, coarse_resolution, fine_resolution 60 | ) 61 | 62 | 63 | def modis_1km_to_250m(lon1, lat1): 64 | """Interpolate MODIS geolocation from 1km to 250m resolution.""" 65 | return interpolate_geolocation_cartesian( 66 | lon1, 67 | lat1, 68 | coarse_resolution=1000, 69 | fine_resolution=250, 70 | ) 71 | 72 | 73 | def modis_1km_to_500m(lon1, lat1): 74 | """Interpolate MODIS geolocation from 1km to 500m resolution.""" 75 | return interpolate_geolocation_cartesian( 76 | lon1, 77 | lat1, 78 | coarse_resolution=1000, 79 | fine_resolution=500) 80 | -------------------------------------------------------------------------------- /geotiepoints/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 Python-geotiepoints developers 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | """The tests package.""" 16 | -------------------------------------------------------------------------------- /geotiepoints/tests/test_geointerpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2013-2021 Python-geotiepoints developers 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | """Tests for GeoInterpolator.""" 18 | 19 | import unittest 20 | 21 | import numpy as np 22 | import pytest 23 | from pyresample.geometry import SwathDefinition 24 | 25 | from geotiepoints.geointerpolator import GeoInterpolator, GeoGridInterpolator, GeoSplineInterpolator 26 | 27 | TIES_EXP1 = np.array([[6384905.78040055, 6381081.08333225, 6371519.34066148, 28 | 6328950.00792935, 6253610.69157758, 6145946.19489936, 29 | 6124413.29556372], 30 | [6377591.95940176, 6370997., 6354509.6014956, 31 | 6305151.62592155, 6223234.99818839, 6109277.14889072, 32 | 6086485.57903118], 33 | [6359307.40690478, 6345786.79166939, 6311985.2535809, 34 | 6245655.67090206, 6147295.76471541, 6017604.5338691, 35 | 5991666.28769983], 36 | [6351993.58590599, 6335702.70833714, 6294975.51441502, 37 | 6221857.28889426, 6116920.07132621, 5980935.48786045, 38 | 5953738.5711673], 39 | [6338032.26190294, 6320348.4990906, 6276139.09205974, 40 | 6199670.56624433, 6091551.90273768, 5952590.38414781, 41 | 5924798.08042984], 42 | [6290665.5946295, 6270385.16249031, 6219684.08214232, 43 | 6137100.75832981, 6023313.2794414, 5879194.72399075, 44 | 5850371.01290062], 45 | [6172248.92644589, 6145476.82098957, 6078546.55734877, 46 | 5980676.23854351, 5852716.72120069, 5695705.57359808, 47 | 5664303.34407756], 48 | [6124882.25917245, 6095513.48438928, 6022091.54743135, 49 | 5918106.430629, 5784478.09790441, 5622309.91344102, 50 | 5589876.27654834]]) 51 | 52 | TIES_EXP2 = np.array([[6372937.31273379, 6370997., 6366146.21816553, 53 | 6351605.98629588, 6327412.61244969, 6293626.50067273, 54 | 6286869.27831734], 55 | [6353136.46335726, 6345786.79166939, 6327412.61244969, 56 | 6299445.69529922, 6261968.60390423, 6215087.60607344, 57 | 6205711.40650728]]) 58 | 59 | TIES_EXP4 = np.array([[6381081.08333225, 6381639.66045187, 6372470.10269454, 60 | 6353590.21586788, 6325042.05851245], 61 | [6370997., 6366146.21816553, 6351605.98629588, 62 | 6327412.61244969, 6293626.50067273], 63 | [6345786.79166939, 6327412.61244969, 6299445.69529922, 64 | 6261968.60390423, 6215087.60607344], 65 | [6335702.70833714, 6311919.17016336, 6278581.57890056, 66 | 6235791.00048604, 6183672.04823372]]) 67 | 68 | TIES_EXP5 = np.array([[6381081.08333225, 6371519.34066148, 6328950.00792935, 69 | 6253610.69157758, 6145946.19489936], 70 | [6370997., 6354509.6014956, 6305151.62592155, 71 | 6223234.99818839, 6109277.14889072], 72 | [6345786.79166939, 6311985.2535809, 6245655.67090206, 73 | 6147295.76471541, 6017604.5338691], 74 | [6270385.16249031, 6219684.08214232, 6137100.75832981, 75 | 6023313.2794414, 5879194.72399075], 76 | [6145476.82098957, 6078546.55734877, 5980676.23854351, 77 | 5852716.72120069, 5695705.57359808], 78 | [6095513.48438928, 6022091.54743135, 5918106.430629, 79 | 5784478.09790441, 5622309.91344102]]) 80 | 81 | TIES_EXP6 = np.array([[6381081.08333225, 6371519.34066148, 6328950.00792935, 82 | 6253610.69157758, 6145946.19489936], 83 | [6370997., 6354509.6014956, 6305151.62592155, 84 | 6223234.99818839, 6109277.14889072], 85 | [6345786.79166939, 6311985.2535809, 6245655.67090206, 86 | 6147295.76471541, 6017604.5338691], 87 | [6335702.70833714, 6294975.51441502, 6221857.28889426, 88 | 6116920.07132621, 5980935.48786045], 89 | [6320348.4990906, 6276139.09205974, 6199670.56624433, 90 | 6091551.90273768, 5952590.38414781], 91 | [6270385.16249031, 6219684.08214232, 6137100.75832981, 92 | 6023313.2794414, 5879194.72399075], 93 | [6145476.82098957, 6078546.55734877, 5980676.23854351, 94 | 5852716.72120069, 5695705.57359808], 95 | [6095513.48438928, 6022091.54743135, 5918106.430629, 96 | 5784478.09790441, 5622309.91344102]]) 97 | 98 | TIES_EXP7 = np.array([[6372937.31273379, 6370997., 6366146.21816553, 99 | 6351605.98629588, 6327412.61244969, 6293626.50067273, 100 | 6286869.27831734], 101 | [6353136.46335726, 6345786.79166939, 6327412.61244969, 102 | 6299445.69529922, 6261968.60390423, 6215087.60607344, 103 | 6205711.40650728]]) 104 | 105 | 106 | class TestGeoInterpolator(unittest.TestCase): 107 | """Class for unit testing the ancillary interpolation functions.""" 108 | 109 | def test_fillborders(self): 110 | """Test filling borders.""" 111 | lons = np.arange(20).reshape((4, 5), order="F") 112 | lats = np.arange(20).reshape((4, 5), order="C") 113 | lines = np.array([2, 7, 12, 17]) / 5.0 114 | cols = np.array([2, 7, 12, 17, 22]) 115 | hlines = np.arange(20) / 5.0 116 | hcols = np.arange(24) 117 | satint = GeoInterpolator( 118 | (lons, lats), (lines, cols), (hlines, hcols), chunk_size=10) 119 | satint.fill_borders('x', 'y') 120 | 121 | np.testing.assert_allclose(satint.tie_data[0], TIES_EXP1) 122 | np.testing.assert_allclose(satint.row_indices, np.array( 123 | [0, 2, 7, 9, 10, 12, 17, 19]) / 5.0) 124 | self.assertTrue( 125 | np.allclose(satint.col_indices, np.array([0, 2, 7, 12, 17, 22, 23]))) 126 | 127 | def test_extrapolate_cols(self): 128 | """Test extrapolating columns.""" 129 | lons = np.arange(10).reshape((2, 5), order="F") 130 | lats = np.arange(10).reshape((2, 5), order="C") 131 | lines = np.array([2, 7]) 132 | cols = np.array([2, 7, 12, 17, 22]) 133 | hlines = np.arange(10) 134 | hcols = np.arange(24) 135 | satint = GeoInterpolator((lons, lats), (lines, cols), (hlines, hcols)) 136 | 137 | self.assertTrue(np.allclose(satint._extrapolate_cols(satint.tie_data[0]), 138 | TIES_EXP2)) 139 | 140 | def test_fill_col_borders(self): 141 | """Test filling the column borders.""" 142 | lons = np.arange(10).reshape((2, 5), order="F") 143 | lats = np.arange(10).reshape((2, 5), order="C") 144 | lines = np.array([2, 7]) 145 | cols = np.array([2, 7, 12, 17, 22]) 146 | hlines = np.arange(10) 147 | hcols = np.arange(24) 148 | satint = GeoInterpolator((lons, lats), (lines, cols), (hlines, hcols)) 149 | satint._fill_col_borders() 150 | np.testing.assert_allclose(satint.tie_data[0], TIES_EXP7) 151 | np.testing.assert_allclose(satint.col_indices, 152 | np.array([0, 2, 7, 12, 17, 22, 23])) 153 | 154 | def test_extrapolate_rows(self): 155 | """Test extrapolation of rows.""" 156 | lons = np.arange(10).reshape((2, 5), order="F") 157 | lats = np.arange(10).reshape((2, 5), order="C") 158 | lines = np.array([2, 7]) 159 | cols = np.array([2, 7, 12, 17, 22]) 160 | hlines = np.arange(10) 161 | hcols = np.arange(24) 162 | satint = GeoInterpolator((lons, lats), (lines, cols), (hlines, hcols)) 163 | np.testing.assert_allclose(satint._extrapolate_rows(satint.tie_data[0], 164 | hlines, -0.4, 9.4), 165 | TIES_EXP4) 166 | 167 | def test_fill_row_borders(self): 168 | """Test filling the row borders.""" 169 | lons = np.arange(20).reshape((4, 5), order="F") 170 | lats = np.arange(20).reshape((4, 5), order="C") 171 | lines = np.array([2, 7, 12, 17]) / 5.0 172 | cols = np.array([2, 7, 12, 17, 22]) 173 | hlines = np.arange(20) / 5.0 174 | hcols = np.arange(24) 175 | satint = GeoInterpolator((lons, lats), (lines, cols), (hlines, hcols)) 176 | satint._fill_row_borders() 177 | np.testing.assert_allclose(satint.tie_data[0], 178 | TIES_EXP5) 179 | np.testing.assert_allclose(satint.row_indices, 180 | np.array([0, 2, 7, 12, 17, 19]) / 5.0) 181 | satint = GeoInterpolator((lons, lats), (lines, cols), 182 | (hlines, hcols), chunk_size=10) 183 | satint._fill_row_borders() 184 | np.testing.assert_allclose(satint.tie_data[0], 185 | TIES_EXP6) 186 | np.testing.assert_allclose(satint.row_indices, 187 | np.array([0, 2, 7, 9, 10, 12, 17, 19]) / 5.0) 188 | 189 | 190 | TIE_LONS = np.array([[1, 2, 3, 4], 191 | [1, 2, 3, 4], 192 | [1, 2, 3, 4], 193 | [1, 2, 3, 4], 194 | [1, 2, 3, 4]]) 195 | 196 | TIE_LATS = np.array([[1, 1, 1, 1], 197 | [2, 2, 2, 2], 198 | [3, 3, 3, 3], 199 | [4, 4, 4, 4], 200 | [5, 5, 5, 5]]) 201 | 202 | 203 | class TestGeoGridInterpolator: 204 | """Test the GeoGridInterpolator.""" 205 | 206 | @pytest.mark.parametrize("args", ((TIE_LONS, TIE_LATS), 207 | [SwathDefinition(TIE_LONS, TIE_LATS)] 208 | )) 209 | def test_geogrid_interpolation(self, args): 210 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 211 | x_points = np.array([0, 1, 3, 7]) 212 | y_points = np.array([0, 1, 3, 7, 15]) 213 | 214 | interpolator = GeoGridInterpolator((y_points, x_points), *args) 215 | 216 | fine_x_points = np.arange(8) 217 | fine_y_points = np.arange(16) 218 | 219 | lons, lats = interpolator.interpolate((fine_y_points, fine_x_points)) 220 | 221 | lons_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4.]) 222 | lats_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4., 4.125, 223 | 4.25, 4.375, 4.5, 4.625, 4.75, 4.875, 5.]) 224 | 225 | np.testing.assert_allclose(lons[0, :], lons_expected, rtol=5e-5) 226 | np.testing.assert_allclose(lats[:, 0], lats_expected, rtol=5e-5) 227 | 228 | def test_geogrid_interpolation_counts_its_arguments(self): 229 | """Test that an arbitrary number of argument is not allowed in the interpolator.""" 230 | with pytest.raises(ValueError): 231 | _ = GeoGridInterpolator((None, None), None, None, None) 232 | 233 | def test_geogrid_interpolation_to_shape(self): 234 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 235 | x_points = np.array([0, 1, 3, 7]) 236 | y_points = np.array([0, 1, 3, 7, 15]) 237 | 238 | interpolator = GeoGridInterpolator((y_points, x_points), TIE_LONS, TIE_LATS) 239 | 240 | lons, lats = interpolator.interpolate_to_shape((16, 8)) 241 | 242 | lons_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4.]) 243 | lats_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4., 4.125, 244 | 4.25, 4.375, 4.5, 4.625, 4.75, 4.875, 5.]) 245 | 246 | np.testing.assert_allclose(lons[0, :], lons_expected, rtol=5e-5) 247 | np.testing.assert_allclose(lats[:, 0], lats_expected, rtol=5e-5) 248 | 249 | def test_geogrid_interpolation_preserves_dtype(self): 250 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 251 | x_points = np.array([0, 1, 3, 7]) 252 | y_points = np.array([0, 1, 3, 7, 15]) 253 | 254 | interpolator = GeoGridInterpolator((y_points, x_points), 255 | TIE_LONS.astype(np.float32), TIE_LATS.astype(np.float32)) 256 | 257 | lons, lats = interpolator.interpolate_to_shape((16, 8)) 258 | 259 | assert lons.dtype == np.float32 260 | assert lats.dtype == np.float32 261 | 262 | def test_chunked_geogrid_interpolation(self): 263 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 264 | dask = pytest.importorskip("dask") 265 | 266 | x_points = np.array([0, 1, 3, 7]) 267 | y_points = np.array([0, 1, 3, 7, 15]) 268 | 269 | interpolator = GeoGridInterpolator((y_points, x_points), 270 | TIE_LONS.astype(np.float32), TIE_LATS.astype(np.float32)) 271 | 272 | lons, lats = interpolator.interpolate_to_shape((16, 8), chunks=4) 273 | 274 | assert lons.chunks == ((4, 4, 4, 4), (4, 4)) 275 | assert lats.chunks == ((4, 4, 4, 4), (4, 4)) 276 | 277 | with dask.config.set({"array.chunk-size": 64}): 278 | 279 | lons, lats = interpolator.interpolate_to_shape((16, 8), chunks="auto") 280 | assert lons.chunks == ((4, 4, 4, 4), (4, 4)) 281 | assert lats.chunks == ((4, 4, 4, 4), (4, 4)) 282 | 283 | def test_geogrid_interpolation_can_extrapolate(self): 284 | """Test that the interpolator can also extrapolate given the right parameters.""" 285 | x_points = np.array([0, 1, 3, 7]) 286 | y_points = np.array([0, 1, 3, 7, 15]) 287 | 288 | interpolator = GeoGridInterpolator((y_points, x_points), TIE_LONS, TIE_LATS, 289 | bounds_error=False, fill_value=None) 290 | 291 | lons, lats = interpolator.interpolate_to_shape((16, 16), method="cubic") 292 | 293 | assert lons.shape == (16, 16) 294 | 295 | 296 | class TestGeoSplineInterpolator: 297 | """Test the GeoGridInterpolator.""" 298 | 299 | @pytest.mark.parametrize("args", ((TIE_LONS, TIE_LATS), 300 | [SwathDefinition(TIE_LONS, TIE_LATS)] 301 | )) 302 | def test_geospline_interpolation(self, args): 303 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 304 | x_points = np.array([0, 1, 3, 7]) 305 | y_points = np.array([0, 1, 3, 7, 15]) 306 | 307 | interpolator = GeoSplineInterpolator((y_points, x_points), *args, kx=1, ky=1) 308 | 309 | fine_x_points = np.arange(8) 310 | fine_y_points = np.arange(16) 311 | 312 | lons, lats = interpolator.interpolate((fine_y_points, fine_x_points)) 313 | 314 | lons_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4.]) 315 | lats_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4., 4.125, 316 | 4.25, 4.375, 4.5, 4.625, 4.75, 4.875, 5.]) 317 | 318 | np.testing.assert_allclose(lons[0, :], lons_expected, rtol=5e-5) 319 | np.testing.assert_allclose(lats[:, 0], lats_expected, rtol=5e-5) 320 | 321 | def test_geospline_interpolation_to_shape(self): 322 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 323 | x_points = np.array([0, 1, 3, 7]) 324 | y_points = np.array([0, 1, 3, 7, 15]) 325 | 326 | interpolator = GeoSplineInterpolator((y_points, x_points), TIE_LONS, TIE_LATS, kx=1, ky=1) 327 | 328 | lons, lats = interpolator.interpolate_to_shape((16, 8)) 329 | 330 | lons_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4.]) 331 | lats_expected = np.array([1., 2., 2.5, 3., 3.25, 3.5, 3.75, 4., 4.125, 332 | 4.25, 4.375, 4.5, 4.625, 4.75, 4.875, 5.]) 333 | 334 | np.testing.assert_allclose(lons[0, :], lons_expected, rtol=5e-5) 335 | np.testing.assert_allclose(lats[:, 0], lats_expected, rtol=5e-5) 336 | 337 | def test_geospline_interpolation_preserves_dtype(self): 338 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 339 | x_points = np.array([0, 1, 3, 7]) 340 | y_points = np.array([0, 1, 3, 7, 15]) 341 | 342 | interpolator = GeoGridInterpolator((y_points, x_points), 343 | TIE_LONS.astype(np.float32), TIE_LATS.astype(np.float32)) 344 | 345 | lons, lats = interpolator.interpolate_to_shape((16, 8)) 346 | 347 | assert lons.dtype == np.float32 348 | assert lats.dtype == np.float32 349 | 350 | def test_chunked_geospline_interpolation(self): 351 | """Test that the interpolator works with both explicit tie-point arrays and swath definition objects.""" 352 | dask = pytest.importorskip("dask") 353 | 354 | x_points = np.array([0, 1, 3, 7]) 355 | y_points = np.array([0, 1, 3, 7, 15]) 356 | 357 | interpolator = GeoGridInterpolator((y_points, x_points), 358 | TIE_LONS.astype(np.float32), TIE_LATS.astype(np.float32)) 359 | 360 | lons, lats = interpolator.interpolate_to_shape((16, 8), chunks=4) 361 | 362 | assert lons.chunks == ((4, 4, 4, 4), (4, 4)) 363 | assert lats.chunks == ((4, 4, 4, 4), (4, 4)) 364 | 365 | with dask.config.set({"array.chunk-size": 64}): 366 | 367 | lons, lats = interpolator.interpolate_to_shape((16, 8), chunks="auto") 368 | assert lons.chunks == ((4, 4, 4, 4), (4, 4)) 369 | assert lats.chunks == ((4, 4, 4, 4), (4, 4)) 370 | -------------------------------------------------------------------------------- /geotiepoints/tests/test_modis.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2013-2021 Python-geotiepoints developers 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | """Unit tests for python-geotiepoints: MODIS examples.""" 18 | 19 | import numpy as np 20 | import h5py 21 | import os 22 | 23 | import pytest 24 | 25 | FILENAME_250M_RESULT = os.path.join( 26 | os.path.dirname(__file__), '../../testdata/250m_lonlat_section_result.h5') 27 | FILENAME_250M_INPUT = os.path.join( 28 | os.path.dirname(__file__), '../../testdata/250m_lonlat_section_input.h5') 29 | 30 | FILENAME_FULL = os.path.join( 31 | os.path.dirname(__file__), '../../testdata/test_5_to_1_geoloc_full.h5') 32 | FILENAME_5KM = os.path.join( 33 | os.path.dirname(__file__), '../../testdata/test_5_to_1_geoloc_5km.h5') 34 | 35 | from geotiepoints import (modis5kmto1km, modis1kmto250m) 36 | 37 | from geotiepoints import get_scene_splits 38 | 39 | 40 | class TestUtils: 41 | """Class for unit testing the ancillary interpolation functions.""" 42 | 43 | def test_get_numof_subscene_lines(self): 44 | """Test getting the number of sub-scene lines. 45 | 46 | Function is dependent on the number of CPUs and for various number of 47 | lines in a scan. 48 | """ 49 | ncpus = 3 50 | scene_splits = get_scene_splits(1060, 10, ncpus) 51 | assert list(scene_splits) == [350, 700, 1050] 52 | 53 | 54 | class TestMODIS: 55 | """Class for system testing the MODIS interpolation.""" 56 | 57 | def test_5_to_1(self): 58 | """Test the 5km to 1km interpolation facility.""" 59 | 60 | with h5py.File(FILENAME_FULL) as h5f: 61 | glons = h5f['longitude'][:] / 1000. 62 | glats = h5f['latitude'][:] / 1000. 63 | 64 | with h5py.File(FILENAME_5KM) as h5f: 65 | lons = h5f['longitude'][:] / 1000. 66 | lats = h5f['latitude'][:] / 1000. 67 | 68 | tlons, tlats = modis5kmto1km(lons, lats) 69 | 70 | np.testing.assert_allclose(tlons, glons, atol=0.05) 71 | np.testing.assert_allclose(tlats, glats, atol=0.05) 72 | 73 | @pytest.mark.parametrize("ncores", [None, 4]) 74 | def test_1000m_to_250m(self, ncores): 75 | """Test the 1 km to 250 meter interpolation facility.""" 76 | if ncores: 77 | import multiprocessing as mp 78 | mp.set_start_method("spawn", force=True) 79 | 80 | with h5py.File(FILENAME_250M_RESULT) as h5f: 81 | glons = h5f['longitude'][:] / 1000. 82 | glats = h5f['latitude'][:] / 1000. 83 | 84 | with h5py.File(FILENAME_250M_INPUT) as h5f: 85 | lons = h5f['longitude'][:] / 1000. 86 | lats = h5f['latitude'][:] / 1000. 87 | 88 | kwargs = {"cores": ncores} if ncores is not None else {} 89 | tlons, tlats = modis1kmto250m(lons, lats, **kwargs) 90 | np.testing.assert_allclose(tlons, glons, atol=0.05) 91 | np.testing.assert_allclose(tlats, glats, atol=0.05) 92 | -------------------------------------------------------------------------------- /geotiepoints/tests/test_modisinterpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2017-2022 Python-geotiepoints developers 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | """Tests for MODIS interpolators.""" 18 | import warnings 19 | 20 | import numpy as np 21 | from pyproj import Geod 22 | import h5py 23 | import os 24 | import dask 25 | import dask.array as da 26 | import xarray as xr 27 | import pytest 28 | from .utils import CustomScheduler 29 | from geotiepoints.modisinterpolator import (modis_1km_to_250m, 30 | modis_1km_to_500m, 31 | modis_5km_to_1km, 32 | modis_5km_to_500m, 33 | modis_5km_to_250m) 34 | FILENAME_DATA = os.path.join( 35 | os.path.dirname(__file__), '../../testdata/modis_test_data.h5') 36 | 37 | 38 | def _to_dask(arr): 39 | return da.from_array(arr, chunks=4096) 40 | 41 | 42 | def _to_da(arr): 43 | return xr.DataArray(_to_dask(arr), dims=['y', 'x']) 44 | 45 | 46 | def _load_h5_geo_vars(*var_names): 47 | h5f = h5py.File(FILENAME_DATA, 'r') 48 | return tuple(h5f[var_name] for var_name in var_names) 49 | 50 | 51 | def load_1km_lonlat_as_numpy(): 52 | lon1, lat1 = _load_h5_geo_vars('lon_1km', 'lat_1km') 53 | return lon1[:], lat1[:] 54 | 55 | 56 | def load_1km_lonlat_as_dask(): 57 | lon1, lat1 = _load_h5_geo_vars('lon_1km', 'lat_1km') 58 | return _to_dask(lon1), _to_dask(lat1) 59 | 60 | 61 | def load_1km_lonlat_as_xarray_dask(): 62 | lon1, lat1 = _load_h5_geo_vars('lon_1km', 'lat_1km') 63 | return _to_da(lon1), _to_da(lat1) 64 | 65 | 66 | def load_1km_lonlat_satz_as_xarray_dask(): 67 | lon1, lat1, satz1 = _load_h5_geo_vars('lon_1km', 'lat_1km', 'satz_1km') 68 | return _to_da(lon1), _to_da(lat1), _to_da(satz1) 69 | 70 | 71 | def load_5km_lonlat_satz1_as_xarray_dask(): 72 | lon1, lat1, satz1 = _load_h5_geo_vars('lon_1km', 'lat_1km', 'satz_1km') 73 | lon5 = lon1[2::5, 2::5] 74 | lat5 = lat1[2::5, 2::5] 75 | satz5 = satz1[2::5, 2::5] 76 | return _to_da(lon5), _to_da(lat5), _to_da(satz5) 77 | 78 | 79 | def load_l2_5km_lonlat_satz1_as_xarray_dask(): 80 | lon1, lat1, satz1 = _load_h5_geo_vars('lon_1km', 'lat_1km', 'satz_1km') 81 | lon5 = lon1[2::5, 2:-5:5] 82 | lat5 = lat1[2::5, 2:-5:5] 83 | satz5 = satz1[2::5, 2:-5:5] 84 | return _to_da(lon5), _to_da(lat5), _to_da(satz5) 85 | 86 | 87 | def load_500m_lonlat_expected_as_xarray_dask(): 88 | h5f = h5py.File(FILENAME_DATA, 'r') 89 | lon500 = _to_da(h5f['lon_500m']) 90 | lat500 = _to_da(h5f['lat_500m']) 91 | return lon500, lat500 92 | 93 | 94 | def load_250m_lonlat_expected_as_xarray_dask(): 95 | h5f = h5py.File(FILENAME_DATA, 'r') 96 | lon250 = _to_da(h5f['lon_250m']) 97 | lat250 = _to_da(h5f['lat_250m']) 98 | return lon250, lat250 99 | 100 | 101 | def assert_geodetic_distance( 102 | lons_actual: np.ndarray, 103 | lats_actual: np.ndarray, 104 | lons_desired: np.ndarray, 105 | lats_desired: np.ndarray, 106 | max_distance_diff: float, 107 | ) -> None: 108 | """Check that the geodetic distance between two sets of coordinates is smaller than a threshold. 109 | 110 | Args: 111 | lons_actual: Longitude array produced by interpolation being tested. 112 | lats_actual: Latitude array produced by interpolation being tested. 113 | lons_desired: Longitude array of expected/truth coordinates. 114 | lats_desired: Latitude array of expected/truth coordinates. 115 | max_distance_diff: Limit of allowed distance difference in meters. 116 | 117 | """ 118 | g = Geod(ellps="WGS84") 119 | _, _, dist = g.inv(lons_actual, lats_actual, lons_desired, lats_desired) 120 | np.testing.assert_array_less( 121 | dist, max_distance_diff, 122 | err_msg=f"Coordinates are greater than {max_distance_diff} geodetic " 123 | "meters from the expected coordinates.") 124 | 125 | 126 | @pytest.mark.parametrize( 127 | ("input_func", "exp_func", "interp_func", "dist_max", "exp_5km_warning"), 128 | [ 129 | (load_1km_lonlat_satz_as_xarray_dask, load_500m_lonlat_expected_as_xarray_dask, modis_1km_to_500m, 5, False), 130 | (load_1km_lonlat_satz_as_xarray_dask, load_250m_lonlat_expected_as_xarray_dask, modis_1km_to_250m, 8.30, False), 131 | (load_5km_lonlat_satz1_as_xarray_dask, load_1km_lonlat_as_xarray_dask, modis_5km_to_1km, 25, False), 132 | (load_l2_5km_lonlat_satz1_as_xarray_dask, load_1km_lonlat_as_xarray_dask, modis_5km_to_1km, 110, False), 133 | (load_5km_lonlat_satz1_as_xarray_dask, load_500m_lonlat_expected_as_xarray_dask, modis_5km_to_500m, 134 | 19500, True), 135 | (load_5km_lonlat_satz1_as_xarray_dask, load_250m_lonlat_expected_as_xarray_dask, modis_5km_to_250m, 136 | 25800, True), 137 | ] 138 | ) 139 | def test_sat_angle_based_interp(input_func, exp_func, interp_func, dist_max, exp_5km_warning): 140 | lon1, lat1, satz1 = input_func() 141 | lons_exp, lats_exp = exp_func() 142 | 143 | # when working with dask arrays, we shouldn't compute anything 144 | with dask.config.set(scheduler=CustomScheduler(0)), warnings.catch_warnings(record=True) as warns: 145 | lons, lats = interp_func(lon1, lat1, satz1) 146 | has_5km_warning = any("may result in poor quality" in str(w.message) for w in warns) 147 | if exp_5km_warning: 148 | assert has_5km_warning 149 | else: 150 | assert not has_5km_warning 151 | 152 | if hasattr(lons, "compute"): 153 | lons, lats = da.compute(lons, lats) 154 | assert_geodetic_distance(lons, lats, lons_exp, lats_exp, dist_max) 155 | assert not np.any(np.isnan(lons)) 156 | assert not np.any(np.isnan(lats)) 157 | 158 | 159 | def test_sat_angle_based_interp_nan_handling(): 160 | # See GH #19 161 | lon1, lat1, satz1 = load_1km_lonlat_satz_as_xarray_dask() 162 | satz1 = _to_da(abs(np.linspace(-65.4, 65.4, 1354, dtype=np.float32)).repeat(20).reshape(-1, 20).T) 163 | lons, lats = modis_1km_to_500m(lon1, lat1, satz1) 164 | assert not np.any(np.isnan(lons.compute())) 165 | assert not np.any(np.isnan(lats.compute())) 166 | 167 | 168 | def test_poles_datum(): 169 | orig_lon, lat1, satz1 = load_1km_lonlat_satz_as_xarray_dask() 170 | lon1 = orig_lon + 180 171 | lon1 = xr.where(lon1 > 180, lon1 - 360, lon1) 172 | 173 | lat5 = lat1[2::5, 2::5] 174 | lon5 = lon1[2::5, 2::5] 175 | satz5 = satz1[2::5, 2::5] 176 | lons, lats = modis_5km_to_1km(lon5, lat5, satz5) 177 | 178 | lons = lons + 180 179 | lons = xr.where(lons > 180, lons - 360, lons) 180 | assert_geodetic_distance(lons, lats, orig_lon, lat1, 25.0) 181 | -------------------------------------------------------------------------------- /geotiepoints/tests/test_multilinear.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2017-2021 Python-geotiepoints developers 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | """Test the multilinear interpolation Cython implementation.""" 18 | 19 | import unittest 20 | import numpy as np 21 | 22 | from geotiepoints.multilinear import MultilinearInterpolator 23 | 24 | ARR1 = np.array([[0.99819991, 0.46872418, 0.39356122, 0.33170364, 0.79171217, 25 | 0.31300346, 0.69075206, 0.47492703, 0.2012821, 0.57347047, 26 | 0.12934437, 0.7384143, 0.84650654, 0.63113452, 0.35514309, 27 | 0.36773267, 0.42887193, 0.83856559, 0.1081537, 0.33634562, 28 | 0.25445117, 0.30121727, 0.19697695, 0.92937056, 0.40359487, 29 | 0.10237384, 0.37803665, 0.94699248, 0.08045698, 0.01366914], 30 | [0.72748552, 0.70872219, 0.36883461, 0.52914895, 0.22308535, 31 | 0.4444687, 0.52394334, 0.52870835, 0.31756298, 0.13776131, 32 | 0.11812231, 0.46974149, 0.12318789, 0.76525517, 0.97328814, 33 | 0.6580273, 0.93059119, 0.32785305, 0.57033161, 0.80133526, 34 | 0.4311177, 0.44957946, 0.81073879, 0.79356296, 0.77565555, 35 | 0.90520185, 0.76064422, 0.78609587, 0.43915797, 0.50745485]]) 36 | 37 | RES1 = np.array([[1.25149566, 0.86801702, 0.59233148, 0.66310018, 0.86093089, 38 | 0.59448266, 0.88145882, 0.72057176, 0.44395844, 0.62692691, 39 | 0.22956686, 0.89308545, 0.8823242, 1.01691582, 1.06050602, 40 | 0.78964244, 1.04244001, 0.93481853, 0.61242017, 0.90454307, 41 | 0.5570492, 0.59214105, 0.87052131, 1.24339837, 0.9031915, 42 | 0.93282679, 0.88212567, 1.25090173, 0.47821939, 0.51308049], 43 | [1.13891573, 0.79536539, 0.54753759, 0.61188808, 0.82629032, 44 | 0.55155224, 0.8025709, 0.64724795, 0.42423377, 0.60563713, 45 | 0.22485215, 0.82022895, 0.86317281, 0.92860053, 1.00514142, 46 | 0.73270743, 0.9756435, 0.88385772, 0.59570712, 0.85230456, 47 | 0.52319791, 0.55035212, 0.83992216, 1.12698577, 0.84054343, 48 | 0.91689093, 0.82346408, 1.13391799, 0.46731605, 0.5109711]]) 49 | 50 | 51 | def assertNumpyArraysEqual(self, other): 52 | if self.shape != other.shape: 53 | raise AssertionError("Shapes don't match") 54 | if not np.allclose(self, other): 55 | raise AssertionError("Elements don't match!") 56 | 57 | 58 | class TestMultilinearInterpolator(unittest.TestCase): 59 | 60 | """Class for unit testing the multilinear interpolation method 61 | """ 62 | 63 | def setUp(self): 64 | pass 65 | 66 | def test_multilinear_interp(self): 67 | """Test the multilinear interpolation""" 68 | 69 | smin = [-1, -1] 70 | smax = [1, 1] 71 | orders = [5, 5] 72 | 73 | f = lambda x: np.vstack([ 74 | np.sqrt(x[0, :]**2 + x[1, :]**2), 75 | np.power(x[0, :]**3 + x[1, :]**3, 1.0 / 3.0) 76 | ]) 77 | 78 | interp = MultilinearInterpolator(smin, smax, orders) 79 | interp.set_values(f(interp.grid)) 80 | 81 | result = interp(ARR1) 82 | # exact_values = f(ARR1) 83 | 84 | assertNumpyArraysEqual(result, RES1) 85 | 86 | def tearDown(self): 87 | """Clean up""" 88 | return 89 | 90 | 91 | def suite(): 92 | """The suite for Multilinear Interpolator""" 93 | loader = unittest.TestLoader() 94 | mysuite = unittest.TestSuite() 95 | mysuite.addTest(loader.loadTestsFromTestCase(TestMultilinearInterpolator)) 96 | 97 | return mysuite 98 | 99 | if __name__ == '__main__': 100 | unittest.main() 101 | -------------------------------------------------------------------------------- /geotiepoints/tests/test_satelliteinterpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2013-2021 Python-geotiepoints developers 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | """Tests for SatelliteInterpolator.""" 18 | 19 | import unittest 20 | import numpy as np 21 | 22 | from geotiepoints import SatelliteInterpolator 23 | 24 | TIES_EXP1 = np.array([[6384905.78040055, 6381081.08333225, 6371519.34066148, 25 | 6328950.00792935, 6253610.69157758, 6145946.19489936, 26 | 6124413.29556372], 27 | [6377591.95940176, 6370997., 6354509.6014956, 28 | 6305151.62592155, 6223234.99818839, 6109277.14889072, 29 | 6086485.57903118], 30 | [6359307.40690478, 6345786.79166939, 6311985.2535809, 31 | 6245655.67090206, 6147295.76471541, 6017604.5338691, 32 | 5991666.28769983], 33 | [6351993.58590599, 6335702.70833714, 6294975.51441502, 34 | 6221857.28889426, 6116920.07132621, 5980935.48786045, 35 | 5953738.5711673], 36 | [6338032.26190294, 6320348.4990906, 6276139.09205974, 37 | 6199670.56624433, 6091551.90273768, 5952590.38414781, 38 | 5924798.08042984], 39 | [6290665.5946295, 6270385.16249031, 6219684.08214232, 40 | 6137100.75832981, 6023313.2794414, 5879194.72399075, 41 | 5850371.01290062], 42 | [6172248.92644589, 6145476.82098957, 6078546.55734877, 43 | 5980676.23854351, 5852716.72120069, 5695705.57359808, 44 | 5664303.34407756], 45 | [6124882.25917245, 6095513.48438928, 6022091.54743135, 46 | 5918106.430629, 5784478.09790441, 5622309.91344102, 47 | 5589876.27654834]]) 48 | 49 | TIES_EXP2 = np.array([[6372937.31273379, 6370997., 6366146.21816553, 50 | 6351605.98629588, 6327412.61244969, 6293626.50067273, 51 | 6286869.27831734], 52 | [6353136.46335726, 6345786.79166939, 6327412.61244969, 53 | 6299445.69529922, 6261968.60390423, 6215087.60607344, 54 | 6205711.40650728]]) 55 | 56 | TIES_EXP3 = np.array([[6372937.31273379, 6370997., 6366146.21816553, 57 | 6351605.98629588, 6327412.61244969, 6293626.50067273, 58 | 6286869.27831734], 59 | [6353136.46335726, 6345786.79166939, 6327412.61244969, 60 | 6299445.69529922, 6261968.60390423, 6215087.60607344, 61 | 6205711.40650728]]) 62 | 63 | TIES_EXP4 = np.array([[6381081.08333225, 6381639.66045187, 6372470.10269454, 64 | 6353590.21586788, 6325042.05851245], 65 | [6370997., 6366146.21816553, 6351605.98629588, 66 | 6327412.61244969, 6293626.50067273], 67 | [6345786.79166939, 6327412.61244969, 6299445.69529922, 68 | 6261968.60390423, 6215087.60607344], 69 | [6335702.70833714, 6311919.17016336, 6278581.57890056, 70 | 6235791.00048604, 6183672.04823372]]) 71 | 72 | TIES_EXP5 = np.array([[6381081.08333225, 6371519.34066148, 6328950.00792935, 73 | 6253610.69157758, 6145946.19489936], 74 | [6370997., 6354509.6014956, 6305151.62592155, 75 | 6223234.99818839, 6109277.14889072], 76 | [6345786.79166939, 6311985.2535809, 6245655.67090206, 77 | 6147295.76471541, 6017604.5338691], 78 | [6270385.16249031, 6219684.08214232, 6137100.75832981, 79 | 6023313.2794414, 5879194.72399075], 80 | [6145476.82098957, 6078546.55734877, 5980676.23854351, 81 | 5852716.72120069, 5695705.57359808], 82 | [6095513.48438928, 6022091.54743135, 5918106.430629, 83 | 5784478.09790441, 5622309.91344102]]) 84 | 85 | TIES_EXP6 = np.array([[6381081.08333225, 6371519.34066148, 6328950.00792935, 86 | 6253610.69157758, 6145946.19489936], 87 | [6370997., 6354509.6014956, 6305151.62592155, 88 | 6223234.99818839, 6109277.14889072], 89 | [6345786.79166939, 6311985.2535809, 6245655.67090206, 90 | 6147295.76471541, 6017604.5338691], 91 | [6335702.70833714, 6294975.51441502, 6221857.28889426, 92 | 6116920.07132621, 5980935.48786045], 93 | [6320348.4990906, 6276139.09205974, 6199670.56624433, 94 | 6091551.90273768, 5952590.38414781], 95 | [6270385.16249031, 6219684.08214232, 6137100.75832981, 96 | 6023313.2794414, 5879194.72399075], 97 | [6145476.82098957, 6078546.55734877, 5980676.23854351, 98 | 5852716.72120069, 5695705.57359808], 99 | [6095513.48438928, 6022091.54743135, 5918106.430629, 100 | 5784478.09790441, 5622309.91344102]]) 101 | 102 | 103 | class TestSatelliteInterpolator(unittest.TestCase): 104 | 105 | """Class for unit testing the ancillary interpolation functions 106 | """ 107 | 108 | def setUp(self): 109 | pass 110 | 111 | # def test_fillborders(self): 112 | # lons = np.arange(20).reshape((4, 5), order="F") 113 | # lats = np.arange(20).reshape((4, 5), order="C") 114 | # lines = np.array([2, 7, 12, 17]) 115 | # cols = np.array([2, 7, 12, 17, 22]) 116 | # hlines = np.arange(20) 117 | # hcols = np.arange(24) 118 | # satint = SatelliteInterpolator( 119 | # (lons, lats), (lines, cols), (hlines, hcols), chunk_size=10) 120 | # satint.fill_borders('x', 'y') 121 | # self.assertTrue(np.allclose(satint.tie_data[0], TIES_EXP1)) 122 | # self.assertTrue( 123 | # np.allclose(satint.row_indices, np.array([0, 2, 7, 9, 10, 12, 17, 19]))) 124 | # self.assertTrue( 125 | # np.allclose(satint.col_indices, np.array([0, 2, 7, 12, 17, 22, 23]))) 126 | 127 | def test_extrapolate_cols(self): 128 | lons = np.arange(10).reshape((2, 5), order="F") 129 | lats = np.arange(10).reshape((2, 5), order="C") 130 | lines = np.array([2, 7]) 131 | cols = np.array([2, 7, 12, 17, 22]) 132 | hlines = np.arange(10) 133 | hcols = np.arange(24) 134 | satint = SatelliteInterpolator( 135 | (lons, lats), (lines, cols), (hlines, hcols)) 136 | 137 | self.assertTrue(np.allclose(satint._extrapolate_cols(satint.tie_data[0]), 138 | TIES_EXP2)) 139 | 140 | def test_fill_col_borders(self): 141 | lons = np.arange(10).reshape((2, 5), order="F") 142 | lats = np.arange(10).reshape((2, 5), order="C") 143 | lines = np.array([2, 7]) 144 | cols = np.array([2, 7, 12, 17, 22]) 145 | hlines = np.arange(10) 146 | hcols = np.arange(24) 147 | satint = SatelliteInterpolator( 148 | (lons, lats), (lines, cols), (hlines, hcols)) 149 | satint._fill_col_borders() 150 | self.assertTrue(np.allclose(satint.tie_data[0], TIES_EXP3)) 151 | self.assertTrue(np.allclose(satint.col_indices, 152 | np.array([0, 2, 7, 12, 17, 22, 23]))) 153 | 154 | # def test_extrapolate_rows(self): 155 | # lons = np.arange(10).reshape((2, 5), order="F") 156 | # lats = np.arange(10).reshape((2, 5), order="C") 157 | # lines = np.array([2, 7]) 158 | # cols = np.array([2, 7, 12, 17, 22]) 159 | # hlines = np.arange(10) 160 | # hcols = np.arange(24) 161 | # satint = SatelliteInterpolator( 162 | # (lons, lats), (lines, cols), (hlines, hcols)) 163 | # self.assertTrue(np.allclose(satint._extrapolate_rows(satint.tie_data[0]), 164 | # TIES_EXP4) 165 | 166 | # def test_fill_row_borders(self): 167 | # lons = np.arange(20).reshape((4, 5), order="F") 168 | # lats = np.arange(20).reshape((4, 5), order="C") 169 | # lines = np.array([2, 7, 12, 17]) 170 | # cols = np.array([2, 7, 12, 17, 22]) 171 | # hlines = np.arange(20) 172 | # hcols = np.arange(24) 173 | # satint = SatelliteInterpolator( 174 | # (lons, lats), (lines, cols), (hlines, hcols)) 175 | # satint._fill_row_borders() 176 | # self.assertTrue(np.allclose(satint.tie_data[0], TIES_EXP5)) 177 | # self.assertTrue(np.allclose(satint.row_indices, 178 | # np.array([0, 2, 7, 12, 17, 19]))) 179 | # satint = SatelliteInterpolator((lons, lats), (lines, cols), 180 | # (hlines, hcols), chunk_size=10) 181 | # satint._fill_row_borders() 182 | # self.assertTrue(np.allclose(satint.tie_data[0], TIES_EXP6)) 183 | # self.assertTrue(np.allclose(satint.row_indices, 184 | # np.array([0, 2, 7, 9, 10, 12, 17, 19]))) 185 | -------------------------------------------------------------------------------- /geotiepoints/tests/test_simple_modis_interpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2021 Python-geotiepoints developers 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | """Tests for simple MODIS interpolators.""" 18 | 19 | import numpy as np 20 | import pytest 21 | import dask 22 | import dask.array as da 23 | 24 | from geotiepoints.simple_modis_interpolator import modis_1km_to_250m, modis_1km_to_500m 25 | from .test_modisinterpolator import ( 26 | assert_geodetic_distance, 27 | load_1km_lonlat_as_xarray_dask, 28 | load_1km_lonlat_as_dask, 29 | load_1km_lonlat_as_numpy, 30 | load_500m_lonlat_expected_as_xarray_dask, 31 | load_250m_lonlat_expected_as_xarray_dask, 32 | ) 33 | 34 | from .utils import CustomScheduler 35 | 36 | 37 | @pytest.mark.parametrize( 38 | ("input_func", "exp_func", "interp_func", "dist_max"), 39 | [ 40 | (load_1km_lonlat_as_xarray_dask, load_500m_lonlat_expected_as_xarray_dask, modis_1km_to_500m, 16), 41 | (load_1km_lonlat_as_xarray_dask, load_250m_lonlat_expected_as_xarray_dask, modis_1km_to_250m, 27.35), 42 | (load_1km_lonlat_as_dask, load_500m_lonlat_expected_as_xarray_dask, modis_1km_to_500m, 16), 43 | (load_1km_lonlat_as_dask, load_250m_lonlat_expected_as_xarray_dask, modis_1km_to_250m, 27.35), 44 | (load_1km_lonlat_as_numpy, load_500m_lonlat_expected_as_xarray_dask, modis_1km_to_500m, 16), 45 | (load_1km_lonlat_as_numpy, load_250m_lonlat_expected_as_xarray_dask, modis_1km_to_250m, 27.35), 46 | ] 47 | ) 48 | def test_basic_interp(input_func, exp_func, interp_func, dist_max): 49 | lon1, lat1 = input_func() 50 | lons_exp, lats_exp = exp_func() 51 | 52 | # when working with dask arrays, we shouldn't compute anything 53 | with dask.config.set(scheduler=CustomScheduler(0)): 54 | lons, lats = interp_func(lon1, lat1) 55 | 56 | if hasattr(lons, "compute"): 57 | lons, lats = da.compute(lons, lats) 58 | assert_geodetic_distance(lons, lats, lons_exp, lats_exp, dist_max) 59 | assert not np.any(np.isnan(lons)) 60 | assert not np.any(np.isnan(lats)) 61 | 62 | 63 | def test_nonstandard_scan_size(): 64 | lon1, lat1 = load_1km_lonlat_as_xarray_dask() 65 | # remove 1 row from the end 66 | lon1 = lon1[:-1] 67 | lat1 = lat1[:-1] 68 | 69 | pytest.raises(ValueError, modis_1km_to_250m, lon1, lat1) 70 | -------------------------------------------------------------------------------- /geotiepoints/tests/test_viiinterpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2017-2021 Python-geotiepoints developers 4 | # 5 | # This file is part of python-geotiepoints. 6 | # 7 | # python-geotiepoints is free software: you can redistribute it and/or modify it under the 8 | # terms of the GNU General Public License as published by the Free Software 9 | # Foundation, either version 3 of the License, or (at your option) any later 10 | # version. 11 | # 12 | # python-geotiepoints is distributed in the hope that it will be useful, but WITHOUT ANY 13 | # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR 14 | # A PARTICULAR PURPOSE. See the GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License along with 17 | # python-geotiepoints. If not, see . 18 | """Test of the interpolation of geographical tiepoints for the VII products. 19 | 20 | It follows the description provided in document "EPS-SG VII Level 1B Product Format Specification V4A". 21 | This version is compatible for vii (METimage) test data version V2 (Jan 2022). It is not back compatible 22 | with V1. 23 | 24 | """ 25 | 26 | import unittest 27 | import numpy as np 28 | import xarray as xr 29 | import pytest 30 | from geotiepoints.viiinterpolator import tie_points_interpolation, tie_points_geo_interpolation 31 | 32 | 33 | TEST_N_SCANS = 2 34 | TEST_TIE_POINTS_FACTOR = 2 35 | TEST_SCAN_ALT_TIE_POINTS = 3 36 | TEST_VALID_ALT_TIE_POINTS = TEST_SCAN_ALT_TIE_POINTS * TEST_N_SCANS 37 | TEST_INVALID_ALT_TIE_POINTS = TEST_SCAN_ALT_TIE_POINTS * TEST_N_SCANS + 1 38 | TEST_ACT_TIE_POINTS = 4 39 | 40 | # Results of latitude/longitude interpolation with simple interpolation on coordinates 41 | TEST_LON_1 = np.array( 42 | [[-12., -11.5, -11., -10.5, -10.0, -9.5], 43 | [-10., -9.5, -9., -8.5, -8.0, -7.5], 44 | [-8., -7.5, -7., -6.5, -6.0, -5.5], 45 | [-6., -5.5, -5., -4.5, -4.0, -3.5], 46 | [0., 0.5, 1., 1.5, 2.0, 2.5], 47 | [2., 2.5, 3., 3.5, 4.0, 4.5], 48 | [4., 4.5, 5., 5.5, 6.0, 6.5], 49 | [6., 6.5, 7., 7.5, 8.0, 8.5]] 50 | ) 51 | TEST_LAT_1 = np.array( 52 | [[0., 0.5, 1., 1.5, 2., 2.5], 53 | [2., 2.5, 3., 3.5, 4., 4.5], 54 | [4., 4.5, 5., 5.5, 6., 6.5], 55 | [6., 6.5, 7., 7.5, 8., 8.5], 56 | [12., 12.5, 13., 13.5, 14.0, 14.5], 57 | [14., 14.5, 15., 15.5, 16., 16.5], 58 | [16., 16.5, 17., 17.5, 18., 18.5], 59 | [18., 18.5, 19., 19.5, 20., 20.5]] 60 | ) 61 | 62 | # Results of latitude/longitude interpolation on cartesian coordinates (latitude above 60 degrees) 63 | TEST_LON_2 = np.array( 64 | [[-12., -11.50003808, -11., -10.50011426, -10., -9.50019052], 65 | [-10.00243991, -9.5032411, -9.00366173, -8.50454031, -8.00488578, -7.5058423], 66 | [-8., -7.50034342, -7., -6.50042016, -6., -5.50049716], 67 | [-6.00734362, -5.50845783, -5.00857895, -4.50977302, -4.00981958, -3.51109426], 68 | [0., 0.49903263, 1., 1.49895241, 2., 2.49887151], 69 | [1.98257947, 2.48080192, 2.98127841, 3.47941324, 3.97996512, 4.47801105], 70 | [4., 4.49870746, 5., 5.49862418, 6., 6.49853998], 71 | [5.97729789, 6.47516183, 6.97594186, 7.47371256, 7.97456943, 8.47224525]] 72 | ) 73 | 74 | TEST_LAT_2 = np.array( 75 | [[0., 0.49998096, 1., 1.49994287, 2., 2.49990475], 76 | [1.99878116, 2.49838091, 2.99817081, 3.49773189, 3.99755935, 4.49708148], 77 | [4., 4.4998283, 5., 5.49978993, 6., 6.49975143], 78 | [5.99633155, 6.4957749, 6.99571447, 7.49511791, 7.99509473, 8.49445789], 79 | [12., 12.49951634, 13., 13.49947623, 14., 14.499435779], 80 | [13.99129786, 14.4904098, 14.99064796, 15.48971613, 15.98999196, 16.48901572], 81 | [16., 16.49935377, 17., 17.49931213, 18., 18.49927003], 82 | [17.98865968, 18.48759253, 18.98798235, 19.48686863, 19.98729684, 20.48613573]] 83 | ) 84 | 85 | # Results of latitude/longitude interpolation on cartesian coordinates (longitude with a 360 degrees step) 86 | TEST_LON_3 = np.array( 87 | [[-12., -11.50444038, -11., -10.50459822, -10., -9.50476197], 88 | [-10.07492627, -9.58101155, -9.07759836, -8.5839056, - 8.0803761, -7.58691614], 89 | [-8., -7.50510905, -7., -6.5052934, -6., -5.50548573], 90 | [-6.0862821, -5.59332416, -5.08942935, -4.59674283, -4.09272043, -3.60032066], 91 | [0., 0.49315061, 1., 1.49287934, 2., 2.49259217], 92 | [1.88371709, 2.3739768, 2.87898193, 3.36879304, 3.87395153, 4.36327924], 93 | [4., 4.49196335, 5., 5.49161771, 6., 6.49124808], 94 | [5.86287282, 6.35111105, 6.85674573, 7.3443667, 7.85016382, 8.33710998]] 95 | ) 96 | 97 | TEST_LAT_3 = np.array( 98 | [[45., 45.49777998, 46., 46.49770107, 47., 47.4976192], 99 | [46.96258417, 47.4595462, 47.9612508, 48.4581022, 48.95986481, 49.45660021], 100 | [49., 49.49744569, 50., 50.49735352, 51., 51.49725738], 101 | [50.95691833, 51.45340364, 51.95534841, 52.45169855, 52.95370691, 53.55477452], 102 | [57., 57.50277937, 58., 58.50267347, 59., 59.50256981], 103 | [59.04185095, 59.54357898, 60.04020844, 60.54185139, 61.03859839, 61.54015723], 104 | [61., 61.5023687, 62., 62.502271, 63., 63.50217506], 105 | [63.03546804, 63.53686131, 64.03394419, 64.53525587, 65.03244567, 65.53367647]] 106 | ) 107 | 108 | 109 | class TestViiInterpolator(unittest.TestCase): 110 | """Test the vii_utils module.""" 111 | 112 | def setUp(self): 113 | """Set up the test.""" 114 | # Create the arrays for the interpolation test 115 | # The first has a valid number of n_tie_alt points (multiple of SCAN_ALT_TIE_POINTS) 116 | self.valid_data_for_interpolation = xr.DataArray( 117 | np.arange( 118 | TEST_VALID_ALT_TIE_POINTS * TEST_ACT_TIE_POINTS, 119 | dtype=np.float64, 120 | ).reshape(TEST_VALID_ALT_TIE_POINTS, TEST_ACT_TIE_POINTS), 121 | dims=('num_tie_points_alt', 'num_tie_points_act'), 122 | ) 123 | # The second has an invalid number of n_tie_alt points (not multiple of SCAN_ALT_TIE_POINTS) 124 | self.invalid_data_for_interpolation = xr.DataArray( 125 | np.arange( 126 | TEST_INVALID_ALT_TIE_POINTS * TEST_ACT_TIE_POINTS, 127 | dtype=np.float64, 128 | ).reshape(TEST_INVALID_ALT_TIE_POINTS, TEST_ACT_TIE_POINTS), 129 | dims=('num_tie_points_alt', 'num_tie_points_act'), 130 | ) 131 | # Then two arrays containing valid longitude and latitude data 132 | self.longitude = xr.DataArray( 133 | np.linspace( 134 | -12, 135 | 11, 136 | num=TEST_VALID_ALT_TIE_POINTS * TEST_ACT_TIE_POINTS, 137 | dtype=np.float64, 138 | ).reshape(TEST_VALID_ALT_TIE_POINTS, TEST_ACT_TIE_POINTS), 139 | dims=('num_tie_points_alt', 'num_tie_points_act'), 140 | ) 141 | self.latitude = xr.DataArray( 142 | np.linspace( 143 | 0, 144 | 23, 145 | num=TEST_VALID_ALT_TIE_POINTS * TEST_ACT_TIE_POINTS, 146 | dtype=np.float64, 147 | ).reshape(TEST_VALID_ALT_TIE_POINTS, TEST_ACT_TIE_POINTS), 148 | dims=('num_tie_points_alt', 'num_tie_points_act'), 149 | ) 150 | # Then one containing latitude data above 60 degrees 151 | self.latitude_over60 = xr.DataArray( 152 | np.linspace( 153 | 45, 154 | 68, 155 | num=TEST_VALID_ALT_TIE_POINTS * TEST_ACT_TIE_POINTS, 156 | dtype=np.float64, 157 | ).reshape(TEST_VALID_ALT_TIE_POINTS, TEST_ACT_TIE_POINTS), 158 | dims=('num_tie_points_alt', 'num_tie_points_act'), 159 | ) 160 | # Then one containing longitude data with a 360 degrees step 161 | self.longitude_over360 = xr.DataArray( 162 | np.linspace( 163 | -12, 164 | 11, 165 | num=TEST_VALID_ALT_TIE_POINTS * TEST_ACT_TIE_POINTS, 166 | dtype=np.float64, 167 | ).reshape(TEST_VALID_ALT_TIE_POINTS, TEST_ACT_TIE_POINTS) % 360., 168 | dims=('num_tie_points_alt', 'num_tie_points_act'), 169 | ) 170 | 171 | def tearDown(self): 172 | """Tear down the test.""" 173 | # Nothing to do 174 | pass 175 | 176 | def test_tie_points_interpolation(self): 177 | """# Test the interpolation routine with valid and invalid input.""" 178 | # Test the interpolation routine with valid input 179 | result_valid = tie_points_interpolation( 180 | [self.valid_data_for_interpolation], 181 | TEST_SCAN_ALT_TIE_POINTS, 182 | TEST_TIE_POINTS_FACTOR 183 | )[0] 184 | 185 | act_points_interp = (TEST_ACT_TIE_POINTS - 1) * TEST_TIE_POINTS_FACTOR 186 | num_scans = TEST_VALID_ALT_TIE_POINTS // TEST_SCAN_ALT_TIE_POINTS 187 | scan_alt_points_interp = (TEST_SCAN_ALT_TIE_POINTS - 1) * TEST_TIE_POINTS_FACTOR 188 | 189 | # Across the track 190 | delta_axis_0 = [0., 0.5, 1., 1.5, 2., 2.5] 191 | self.assertTrue(np.allclose(result_valid[0, :], delta_axis_0)) 192 | # Along track 193 | delta_axis_1 = [0., 2., 4., 6., 12., 14., 16., 18] 194 | self.assertTrue(np.allclose(result_valid[:, 0], delta_axis_1)) 195 | 196 | # Test the interpolation routine with invalid input 197 | pytest.raises(ValueError, tie_points_interpolation, 198 | [self.invalid_data_for_interpolation], 199 | TEST_SCAN_ALT_TIE_POINTS, 200 | TEST_TIE_POINTS_FACTOR) 201 | 202 | def test_tie_points_geo_interpolation(self): 203 | """# Test the coordinates interpolation routine with valid and invalid input.""" 204 | # Test the interpolation routine with valid input 205 | lon, lat = tie_points_geo_interpolation( 206 | self.longitude, 207 | self.latitude, 208 | TEST_SCAN_ALT_TIE_POINTS, 209 | TEST_TIE_POINTS_FACTOR 210 | ) 211 | self.assertTrue(np.allclose(lon, TEST_LON_1)) 212 | self.assertTrue(np.allclose(lat, TEST_LAT_1)) 213 | 214 | lon, lat = tie_points_geo_interpolation( 215 | self.longitude_over360, 216 | self.latitude, 217 | TEST_SCAN_ALT_TIE_POINTS, 218 | TEST_TIE_POINTS_FACTOR 219 | ) 220 | self.assertTrue(np.allclose(lon, TEST_LON_2)) 221 | self.assertTrue(np.allclose(lat, TEST_LAT_2)) 222 | 223 | lon, lat = tie_points_geo_interpolation( 224 | self.longitude, 225 | self.latitude_over60, 226 | TEST_SCAN_ALT_TIE_POINTS, 227 | TEST_TIE_POINTS_FACTOR 228 | ) 229 | self.assertTrue(np.allclose(lon, TEST_LON_3)) 230 | self.assertTrue(np.allclose(lat, TEST_LAT_3)) 231 | 232 | # Test the interpolation routine with invalid input (different dimensions of the two arrays) 233 | with self.assertRaises(ValueError): 234 | tie_points_geo_interpolation( 235 | self.longitude, 236 | self.invalid_data_for_interpolation, 237 | TEST_SCAN_ALT_TIE_POINTS, 238 | TEST_TIE_POINTS_FACTOR 239 | ) 240 | -------------------------------------------------------------------------------- /geotiepoints/tests/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2021 Python-geotiepoints developers 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | """Utilities for creating and checking tests.""" 18 | 19 | 20 | class CustomScheduler(object): 21 | """Scheduler raising an exception if data are computed too many times. 22 | 23 | This only makes sense to include when working with dask-based arrays. To 24 | use it:: 25 | 26 | with dask.config.set(scheduler=CustomScheduler(2)): 27 | my_dask_arr.compute() # allowed 28 | my_dask_arr.compute() # 2nd call, not allowed, fails 29 | 30 | """ 31 | 32 | def __init__(self, max_computes=1): 33 | """Set starting and maximum compute counts.""" 34 | self.max_computes = max_computes 35 | self.total_computes = 0 36 | 37 | def __call__(self, dsk, keys, **kwargs): 38 | """Compute dask task and keep track of number of times we do so.""" 39 | import dask 40 | self.total_computes += 1 41 | if self.total_computes > self.max_computes: 42 | raise RuntimeError("Too many dask computations were scheduled: " 43 | "{}".format(self.total_computes)) 44 | return dask.get(dsk, keys, **kwargs) 45 | -------------------------------------------------------------------------------- /geotiepoints/viiinterpolator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Copyright (c) 2017-2020 Python-geotiepoints developers 4 | # 5 | # This file is part of python-geotiepoints. 6 | # 7 | # python-geotiepoints is free software: you can redistribute it and/or modify it under the 8 | # terms of the GNU General Public License as published by the Free Software 9 | # Foundation, either version 3 of the License, or (at your option) any later 10 | # version. 11 | # 12 | # python-geotiepoints is distributed in the hope that it will be useful, but WITHOUT ANY 13 | # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR 14 | # A PARTICULAR PURPOSE. See the GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License along with 17 | # python-geotiepoints. If not, see . 18 | 19 | """Interpolation of geographical tiepoints for the VII products. 20 | It follows the description provided in document "EPS-SG VII Level 1B Product Format Specification". 21 | Tiepoints are typically subsampled by a factor 8 with respect to the pixels, along and across the satellite track. 22 | Because of the bowtie effect, tiepoints at the scan edges are not continuous between neighbouring scans, 23 | therefore each scan has its own edge tiepoints in the along-track direction. 24 | However, for computational efficiency, the edge tie points that lie outside the original data point raster 25 | are excluded from the interpolation grid which is then carried out per granule, rather than per scan 26 | at a (very) small geolocation accuracy cost at the swath edge (investigation to quantify this ongoing). 27 | The interpolation functions are implemented for xarray.DataArrays as input. 28 | This version works with vii test data V2 to be released Jan 2022 which has the data stored 29 | in alt, act (row,col) format instead of act,alt (col,row) 30 | """ 31 | 32 | import xarray as xr 33 | import dask.array as da 34 | import numpy as np 35 | 36 | # MEAN EARTH RADIUS AS DEFINED BY IUGG 37 | MEAN_EARTH_RADIUS = 6371008.7714 # [m] 38 | 39 | 40 | def tie_points_interpolation(data_on_tie_points, scan_alt_tie_points, tie_points_factor): 41 | """Interpolate the data from the tie points to the pixel points. 42 | The data are provided as a list of xarray DataArray objects, allowing to interpolate on several arrays 43 | at the same time; however the individual arrays must have exactly the same dimensions. 44 | Args: 45 | data_on_tie_points: list of xarray DataArray objects containing the values defined on the tie points. 46 | scan_alt_tie_points: number of tie points along the satellite track for each scan 47 | tie_points_factor: sub-sampling factor of tie points wrt pixel points 48 | Returns: 49 | list of xarray DataArray objects containing the interpolated values on the pixel points. 50 | """ 51 | # Extract the dimensions of the tie points array across and along track 52 | n_tie_alt, n_tie_act = data_on_tie_points[0].shape 53 | dim_alt, dim_act = data_on_tie_points[0].dims 54 | 55 | # Check that the number of tie points along track is multiple of the number of tie points per scan 56 | if n_tie_alt % scan_alt_tie_points != 0: 57 | raise ValueError("The number of tie points in the along-route dimension must be a multiple of %d", 58 | scan_alt_tie_points) 59 | 60 | # Compute the number of scans 61 | n_scans = n_tie_alt // scan_alt_tie_points 62 | 63 | # Compute the dimensions of the pixel points array across and along track 64 | n_pixel_act = (n_tie_act - 1) * tie_points_factor 65 | n_pixel_alt = (n_tie_alt - 1) * tie_points_factor 66 | 67 | # Create the grids used for interpolation across the track 68 | tie_grid_act = np.arange(0, n_pixel_act + 1, tie_points_factor) 69 | pixel_grid_act = np.arange(0, n_pixel_act) 70 | 71 | # Create the grids used for the interpolation along the track (must not include the spurious points between scans) 72 | tie_grid_alt = np.arange(0, n_pixel_alt + 1, tie_points_factor) 73 | n_pixel_alt_per_scan = (scan_alt_tie_points - 1) * tie_points_factor 74 | pixel_grid_alt = [] 75 | 76 | for j_scan in range(n_scans): 77 | start_index_scan = j_scan * scan_alt_tie_points * tie_points_factor 78 | pixel_grid_alt.append(np.arange(start_index_scan, start_index_scan + n_pixel_alt_per_scan)) 79 | pixel_grid_alt = np.concatenate(pixel_grid_alt) 80 | 81 | # Loop on all arrays 82 | data_on_pixel_points = [] 83 | for data in data_on_tie_points: 84 | 85 | if data.shape != (n_tie_alt, n_tie_act) or data.dims != (dim_alt, dim_act): 86 | raise ValueError("The dimensions of the arrays are not consistent") 87 | 88 | # Interpolate using the xarray interp function twice: first across, then along the scan 89 | # (much faster than interpolating directly in the two dimensions) 90 | data = data.assign_coords({dim_alt: tie_grid_alt, dim_act: tie_grid_act}) 91 | data_pixel = data.interp({dim_alt: pixel_grid_alt}, assume_sorted=True) \ 92 | .interp({dim_act: pixel_grid_act}, assume_sorted=True).drop_vars([dim_alt, dim_act]) 93 | 94 | data_on_pixel_points.append(data_pixel) 95 | 96 | return data_on_pixel_points 97 | 98 | 99 | 100 | def tie_points_geo_interpolation(longitude, latitude, 101 | scan_alt_tie_points, tie_points_factor, 102 | lat_threshold_use_cartesian=60., 103 | z_threshold_use_xy=0.8): 104 | """Interpolate the geographical position from the tie points to the pixel points. 105 | 106 | The longitude and latitude values are provided as xarray DataArray objects. 107 | 108 | Args: 109 | data_on_tie_points: list of xarray DataArray objects containing the values defined on the tie points. 110 | scan_alt_tie_points: number of tie points along the satellite track for each scan 111 | tie_points_factor: sub-sampling factor of tie points wrt pixel points 112 | 113 | Returns: 114 | list of xarray DataArray objects containing the interpolated values on the pixel points. 115 | 116 | Args: 117 | longitude: xarray DataArray containing the longitude values defined on the tie points (degrees). 118 | latitude: xarray DataArray containing the latitude values defined on the tie points (degrees). 119 | scan_alt_tie_points: number of tie points along the satellite track for each scan. 120 | tie_points_factor: sub-sampling factor of tie points wrt pixel points. 121 | lat_threshold_use_cartesian: latitude threshold to use cartesian coordinates. 122 | z_threshold_use_xy: z threshold to compute latitude from x and y in cartesian coordinates. 123 | 124 | Returns: 125 | two xarray DataArray objects containing the interpolated longitude and latitude values on the pixel points. 126 | 127 | """ 128 | # Check that the two arrays have the same dimensions 129 | if longitude.shape != latitude.shape: 130 | raise ValueError("The dimensions of longitude and latitude don't match") 131 | 132 | # Determine if the interpolation should be done in cartesian or geodetic coordinates 133 | to_cart = np.max(np.fabs(latitude)) > lat_threshold_use_cartesian or (np.max(longitude) - np.min(longitude)) > 180. 134 | 135 | if to_cart: 136 | 137 | x, y, z = _lonlat2xyz(longitude, latitude) 138 | 139 | interp_x, interp_y, interp_z = tie_points_interpolation([x, y, z], 140 | scan_alt_tie_points, 141 | tie_points_factor) 142 | 143 | interp_longitude, interp_latitude = _xyz2lonlat(interp_x, interp_y, interp_z, z_threshold_use_xy) 144 | 145 | else: 146 | 147 | interp_longitude, interp_latitude = tie_points_interpolation([longitude, latitude], 148 | scan_alt_tie_points, 149 | tie_points_factor) 150 | 151 | return interp_longitude, interp_latitude 152 | 153 | 154 | def _lonlat2xyz(lons, lats): 155 | """Convert longitudes and latitudes to cartesian coordinates. 156 | 157 | Args: 158 | lons: array containing the longitude values in degrees. 159 | lats: array containing the latitude values in degrees. 160 | 161 | Returns: 162 | tuple of arrays containing the x, y, and z values in meters. 163 | 164 | """ 165 | lons_rad = np.deg2rad(lons) 166 | lats_rad = np.deg2rad(lats) 167 | x_coords = MEAN_EARTH_RADIUS * np.cos(lats_rad) * np.cos(lons_rad) 168 | y_coords = MEAN_EARTH_RADIUS * np.cos(lats_rad) * np.sin(lons_rad) 169 | z_coords = MEAN_EARTH_RADIUS * np.sin(lats_rad) 170 | return x_coords, y_coords, z_coords 171 | 172 | 173 | def _xyz2lonlat(x_coords, y_coords, z_coords, z_threshold_use_xy=0.8): 174 | """Get longitudes and latitudes from cartesian coordinates. 175 | 176 | Args: 177 | x_coords: array containing the x values in meters. 178 | y_coords: array containing the y values in meters. 179 | z_coords: array containing the z values in meters. 180 | z_threshold_use_xy: z threshold to compute latitude from x and y in cartesian coordinates. 181 | 182 | Returns: 183 | tuple of arrays containing the longitude and latitude values in degrees. 184 | 185 | """ 186 | r = np.sqrt(x_coords ** 2 + y_coords ** 2) 187 | thr_z = z_threshold_use_xy * MEAN_EARTH_RADIUS 188 | lons = np.rad2deg(np.arccos(x_coords / r)) * np.sign(y_coords) 189 | # Compute latitude from z at low z and from x and y at high z 190 | lats = xr.where( 191 | np.logical_and(np.less(z_coords, thr_z), np.greater(z_coords, -thr_z)), 192 | 90. - np.rad2deg(np.arccos(z_coords / MEAN_EARTH_RADIUS)), 193 | np.sign(z_coords) * (90. - np.rad2deg(np.arcsin(r / MEAN_EARTH_RADIUS))) 194 | ) 195 | return lons, lats 196 | -------------------------------------------------------------------------------- /mytest_fillborders.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Copyright (c) 2017 Adam.Dybbroe 5 | 6 | # Author(s): 7 | 8 | # Adam.Dybbroe 9 | 10 | # This program is free software: you can redistribute it and/or modify 11 | # it under the terms of the GNU General Public License as published by 12 | # the Free Software Foundation, either version 3 of the License, or 13 | # (at your option) any later version. 14 | 15 | # This program is distributed in the hope that it will be useful, 16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 18 | # GNU General Public License for more details. 19 | 20 | # You should have received a copy of the GNU General Public License 21 | # along with this program. If not, see . 22 | 23 | """ 24 | """ 25 | 26 | import numpy as np 27 | from geotiepoints.geointerpolator import GeoInterpolator 28 | 29 | lons = np.arange(20).reshape((4, 5), order="F") 30 | lats = np.arange(20).reshape((4, 5), order="C") 31 | lines = np.array([2, 7, 12, 17]) / 5.0 32 | cols = np.array([2, 7, 12, 17, 22]) 33 | hlines = np.arange(20) / 5.0 34 | hcols = np.arange(24) 35 | satint = GeoInterpolator( 36 | (lons, lats), (lines, cols), (hlines, hcols), chunk_size=10) 37 | satint.fill_borders('x', 'y') 38 | -------------------------------------------------------------------------------- /mytest_modis5to1.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Copyright (c) 2017 Adam.Dybbroe 5 | 6 | # Author(s): 7 | 8 | # Adam.Dybbroe 9 | 10 | # This program is free software: you can redistribute it and/or modify 11 | # it under the terms of the GNU General Public License as published by 12 | # the Free Software Foundation, either version 3 of the License, or 13 | # (at your option) any later version. 14 | 15 | # This program is distributed in the hope that it will be useful, 16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 18 | # GNU General Public License for more details. 19 | 20 | # You should have received a copy of the GNU General Public License 21 | # along with this program. If not, see . 22 | 23 | """ 24 | """ 25 | 26 | import h5py 27 | import numpy as np 28 | from geotiepoints import modis5kmto1km 29 | 30 | FILENAME_FULL = 'testdata/test_5_to_1_geoloc_full.h5' 31 | FILENAME_5KM = 'testdata/test_5_to_1_geoloc_5km.h5' 32 | 33 | with h5py.File(FILENAME_FULL) as h5f: 34 | glons = h5f['longitude'][:] / 1000. 35 | glats = h5f['latitude'][:] / 1000. 36 | 37 | with h5py.File(FILENAME_5KM) as h5f: 38 | lons = h5f['longitude'][:] / 1000. 39 | lats = h5f['latitude'][:] / 1000. 40 | 41 | tlons, tlats = modis5kmto1km(lons, lats) 42 | 43 | print np.allclose(tlons, glons, atol=0.05) 44 | print np.allclose(tlats, glats, atol=0.05) 45 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel", "numpy>=2.0.0rc1,<3", "Cython>=3", "versioneer"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.coverage.run] 6 | relative_files = true 7 | plugins = ["Cython.Coverage"] 8 | omit = ["geotiepoints/version.py"] 9 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.5.1 2 | scipy>=0.14 3 | pandas 4 | Cython 5 | 6 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_rpm] 2 | requires=numpy cython scipy 3 | release=1 4 | 5 | [flake8] 6 | max-line-length = 120 7 | ignore = D107 8 | 9 | [versioneer] 10 | VCS = git 11 | style = pep440 12 | versionfile_source = geotiepoints/version.py 13 | versionfile_build = geotiepoints/version.py 14 | tag_prefix = v 15 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012-2023 Python-geotiepoints developers 2 | # 3 | # This program is free software: you can redistribute it and/or modify 4 | # it under the terms of the GNU General Public License as published by 5 | # the Free Software Foundation, either version 3 of the License, or 6 | # (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 11 | # GNU General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program. If not, see . 15 | """Setting up the geo_interpolator project.""" 16 | 17 | import sys 18 | 19 | from setuptools import setup, find_packages 20 | import versioneer 21 | import numpy as np 22 | from Cython.Build import build_ext 23 | from Cython.Distutils import Extension 24 | 25 | requirements = ['numpy', 'scipy', 'pandas'] 26 | test_requires = ['pytest', 'pytest-cov', 'h5py', 'xarray', 'dask', 'pyproj', "pyresample"] 27 | 28 | if sys.platform.startswith("win"): 29 | extra_compile_args = [] 30 | else: 31 | extra_compile_args = ["-O3"] 32 | 33 | EXTENSIONS = [ 34 | Extension( 35 | 'geotiepoints.multilinear_cython', 36 | sources=['geotiepoints/multilinear_cython.pyx'], 37 | extra_compile_args=extra_compile_args, 38 | include_dirs=[np.get_include()], 39 | ), 40 | Extension( 41 | 'geotiepoints._modis_interpolator', 42 | sources=['geotiepoints/_modis_interpolator.pyx'], 43 | extra_compile_args=extra_compile_args, 44 | include_dirs=[np.get_include()], 45 | ), 46 | Extension( 47 | 'geotiepoints._simple_modis_interpolator', 48 | sources=['geotiepoints/_simple_modis_interpolator.pyx'], 49 | extra_compile_args=extra_compile_args, 50 | include_dirs=[np.get_include()], 51 | ), 52 | Extension( 53 | 'geotiepoints._modis_utils', 54 | sources=['geotiepoints/_modis_utils.pyx'], 55 | extra_compile_args=extra_compile_args, 56 | include_dirs=[np.get_include()], 57 | ), 58 | ] 59 | 60 | 61 | try: 62 | sys.argv.remove("--cython-coverage") 63 | cython_coverage = True 64 | except ValueError: 65 | cython_coverage = False 66 | 67 | 68 | cython_directives = { 69 | "language_level": "3", 70 | } 71 | define_macros = [("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")] 72 | if cython_coverage: 73 | print("Enabling directives/macros for Cython coverage support") 74 | cython_directives.update({ 75 | "linetrace": True, 76 | "profile": True, 77 | }) 78 | define_macros.extend([ 79 | ("CYTHON_TRACE", "1"), 80 | ("CYTHON_TRACE_NOGIL", "1"), 81 | ]) 82 | for ext in EXTENSIONS: 83 | ext.define_macros = define_macros 84 | ext.cython_directives.update(cython_directives) 85 | 86 | cmdclass = versioneer.get_cmdclass(cmdclass={"build_ext": build_ext}) 87 | 88 | with open('README.md', 'r') as readme: 89 | README = readme.read() 90 | 91 | if __name__ == "__main__": 92 | setup(name='python-geotiepoints', 93 | version=versioneer.get_version(), 94 | description='Interpolation of geographic tiepoints in Python', 95 | long_description=README, 96 | long_description_content_type='text/markdown', 97 | author='Adam Dybbroe, Martin Raspaud', 98 | author_email='martin.raspaud@smhi.se', 99 | classifiers=["Development Status :: 5 - Production/Stable", 100 | "Intended Audience :: Science/Research", 101 | "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", 102 | "Operating System :: OS Independent", 103 | "Programming Language :: Python", 104 | "Programming Language :: Cython", 105 | "Topic :: Scientific/Engineering"], 106 | url="https://github.com/pytroll/python-geotiepoints", 107 | packages=find_packages(), 108 | python_requires='>=3.10', 109 | cmdclass=cmdclass, 110 | install_requires=requirements, 111 | ext_modules=EXTENSIONS, 112 | tests_require=test_requires, 113 | zip_safe=False 114 | ) 115 | -------------------------------------------------------------------------------- /testdata/250m_lonlat_section_input.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/testdata/250m_lonlat_section_input.h5 -------------------------------------------------------------------------------- /testdata/250m_lonlat_section_result.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/testdata/250m_lonlat_section_result.h5 -------------------------------------------------------------------------------- /testdata/create_modis_test_data.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Generate MODIS interpolation test data from real input data. 3 | 4 | This script is used to generate the "testdata/modis_test_data.h5" file that is 5 | used to validate the various modis interpolation algorithms in 6 | python-geotiepoints. The test data file consists of 1km "truth" longitude and 7 | latitude arrays from an input MOD03 HDF4 file and interpolated longitude and 8 | latitude arrays at 500m and 250m resolution. The interpolation is done using 9 | the CVIIRS based algorithm in ``geotiepoints/modisinterpolator.py``. 10 | The CVIIRS algorithm was used as opposed to the "simple" or other interpolation 11 | methods due to the smoother interpolation between pixels (no linear "steps"). 12 | 13 | MOD03 files geolocation data is terrain corrected. This means that the 14 | interpolation methods currently in python-geotiepoints can't produce an 15 | exact matching result for a round trip test of 1km (truth) -> 16 | 5km (every 5th pixel) -> 1km (interpolation result). 17 | The input MOD03 test data was chosen due to its lack of varying terrain 18 | (almost entirely ocean view) to minimize error/differences between the 19 | 1km truth and 1km interpolation results. 20 | 21 | To limit size of the test data file and the reduce the execution time of tests 22 | the test data is limited to the last 2 scans (20 rows of 1km data) of the 23 | provided input data. 24 | 25 | """ 26 | import os 27 | import sys 28 | from datetime import datetime 29 | 30 | import h5py 31 | import numpy as np 32 | from pyhdf.SD import SD, SDC 33 | import xarray as xr 34 | import dask.array as da 35 | 36 | from geotiepoints.modisinterpolator import modis_1km_to_500m, modis_1km_to_250m 37 | 38 | 39 | def main(): 40 | import argparse 41 | parser = argparse.ArgumentParser() 42 | parser.add_argument("-i", "--input", required=True, 43 | help="Input MOD03 geolocation HDF4 filename to read 1km lon/lat data from.") 44 | parser.add_argument("-o", "--output", default="modis_test_data.h5", 45 | help="Output test data HDF5 filename being created") 46 | args = parser.parse_args() 47 | 48 | num_1km_rows = 20 49 | lons_1km, lats_1km, satz_1km = _get_1km_lon_lat_satz_from_mod03(args.input) 50 | lons_1km = lons_1km[-num_1km_rows:] 51 | lats_1km = lats_1km[-num_1km_rows:] 52 | satz_1km = satz_1km[-num_1km_rows:] 53 | lons_1km = xr.DataArray(da.from_array(lons_1km), dims=("y", "x")) 54 | lats_1km = xr.DataArray(da.from_array(lats_1km), dims=("y", "x")) 55 | satz_1km = xr.DataArray(da.from_array(satz_1km), dims=("y", "x")) 56 | 57 | with h5py.File(args.output, "w") as output_h: 58 | lons_500m, lats_500m = modis_1km_to_500m(lons_1km, lats_1km, satz_1km) 59 | lons_500m = lons_500m.astype(np.float32, copy=False) 60 | lats_500m = lats_500m.astype(np.float32, copy=False) 61 | 62 | lons_250m, lats_250m = modis_1km_to_250m(lons_1km, lats_1km, satz_1km) 63 | lons_250m = lons_250m.astype(np.float32, copy=False) 64 | lats_250m = lats_250m.astype(np.float32, copy=False) 65 | 66 | output_h.create_dataset("lon_1km", data=lons_1km, compression="gzip", compression_opts=9, shuffle=True) 67 | output_h.create_dataset("lat_1km", data=lats_1km, compression="gzip", compression_opts=9, shuffle=True) 68 | output_h.create_dataset("satz_1km", data=satz_1km, compression="gzip", compression_opts=9, shuffle=True) 69 | output_h.create_dataset("lon_500m", data=lons_500m, compression="gzip", compression_opts=9, shuffle=True) 70 | output_h.create_dataset('lat_500m', data=lats_500m, compression="gzip", compression_opts=9, shuffle=True) 71 | output_h.create_dataset("lon_250m", data=lons_250m, compression="gzip", compression_opts=9, shuffle=True) 72 | output_h.create_dataset("lat_250m", data=lats_250m, compression="gzip", compression_opts=9, shuffle=True) 73 | output_h.attrs["1km_data_origin"] = os.path.basename(args.input) 74 | output_h.attrs["description"] = ( 75 | "MODIS interpolation test data for the python-geotiepoints package. " 76 | "The 1 km data is taken directly from a MOD03 file. The 250m and " 77 | "500m is generated using the cviirs-based algorithm in " 78 | "`geotiepoints/modisinterpolator.py`. For more information see " 79 | "the generation script in `testdata/create_modis_test_data.py` in " 80 | "the python-geotiepoints git repository." 81 | ) 82 | output_h.attrs["creation_date"] = datetime.utcnow().strftime("%Y-%m-%d") 83 | 84 | 85 | def _get_1km_lon_lat_satz_from_mod03(hdf4_filename: str) -> tuple: 86 | h = SD(hdf4_filename, mode=SDC.READ) 87 | lon_var = h.select("Longitude") 88 | lat_var = h.select("Latitude") 89 | sat_zen_var = h.select("SensorZenith") 90 | 91 | # ensure 32-bit float 92 | lon_data = lon_var[:].astype(np.float32, copy=False) 93 | lat_data = lat_var[:].astype(np.float32, copy=False) 94 | sat_zen_attrs = sat_zen_var.attributes() 95 | scale_factor = sat_zen_attrs.get("scale_factor", 1.0) 96 | add_offset = sat_zen_attrs.get("add_offset", 0.0) 97 | sat_zen_data = (sat_zen_var[:] * scale_factor + add_offset).astype(np.float32, copy=False) 98 | 99 | return lon_data, lat_data, sat_zen_data 100 | 101 | 102 | if __name__ == "__main__": 103 | sys.exit(main()) 104 | -------------------------------------------------------------------------------- /testdata/modis_test_data.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/testdata/modis_test_data.h5 -------------------------------------------------------------------------------- /testdata/test_5_to_1_geoloc_5km.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/testdata/test_5_to_1_geoloc_5km.h5 -------------------------------------------------------------------------------- /testdata/test_5_to_1_geoloc_full.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pytroll/python-geotiepoints/2e0b014fd324eddc3d72d2d85658c582821840a0/testdata/test_5_to_1_geoloc_full.h5 --------------------------------------------------------------------------------