├── .github └── workflows │ └── python-publish.yml ├── .gitignore ├── .readthedocs.yaml ├── CITATION.cff ├── LICENSE ├── MANIFEST.in ├── README.md ├── docs ├── Makefile ├── make.bat ├── requirements.txt └── source │ ├── _static │ ├── favicon.ico │ ├── shxarraylogo_dark.png │ └── shxarraylogo_light.png │ ├── _templates │ ├── module.rst_t │ ├── package.rst_t │ └── toc.rst_t │ ├── api.rst │ ├── changelog.rst │ ├── conf.py │ ├── dev.rst │ ├── index.rst │ ├── installation.rst │ ├── introduction.rst │ ├── logo_etc │ ├── favicon.ico │ ├── logo_base.png │ ├── logo_base.xcf │ ├── make_logobase.ipynb │ ├── makefavicon │ ├── shxarray_logo_wtext.svg │ ├── shxarray_logo_wtext_dark.png │ ├── shxarray_logo_wtext_light.png │ ├── shxarraylogo_dark.png │ └── shxarraylogo_light.png │ ├── notebooks │ ├── .gitignore │ ├── Geometry2sphericalHarmonics.ipynb │ ├── OceanMask.ipynb │ ├── SealevelEquation.ipynb │ ├── TerrestrialWaterStorage.ipynb │ └── visualize_filter.ipynb │ └── tutorial.rst ├── headerlicense.txt ├── pyproject.toml ├── requirements.txt ├── sample-data ├── GRACEDataSample_2020.sql ├── GRACEDataSample_2020_files.tgz └── dump-grace.py ├── setup.py ├── src ├── builtin_backend │ ├── .gitignore │ ├── Gaunt.hpp │ ├── Helpers.hpp │ ├── Legendre.hpp │ ├── Legendre_nm.hpp │ ├── Wigner3j.hpp │ ├── Ynm.hpp │ ├── analysis.pyx │ ├── gaunt.pxd │ ├── gaunt.pyx │ ├── legendre.pxd │ ├── legendre.pyx │ ├── shindex.pxd │ ├── shlib.cpp │ ├── shlib.pyx │ ├── sinex.pyx │ ├── synthesis.pyx │ ├── wigner3j.pxd │ ├── wigner3j.pyx │ └── ynm.pyx ├── shtns_backend │ └── shtns.py └── shxarray │ ├── __init__.py │ ├── _version.py │ ├── core │ ├── __init__.py │ ├── admin.py │ ├── cf.py │ ├── logging.py │ ├── sh_indexing.py │ ├── shcomputebase.py │ ├── shxarbase.py │ ├── time.py │ └── xr_accessor.py │ ├── earth │ ├── __init__.py │ ├── constants.py │ ├── ellipsoid.py │ ├── rotation.py │ ├── sealevel │ │ ├── __init__.py │ │ ├── sealevel.py │ │ └── spectralsealevel.py │ └── snrei.py │ ├── exp │ ├── __init__.py │ ├── multiply.py │ └── p2s.py │ ├── geom │ ├── __init__.py │ ├── points.py │ └── polygons.py │ ├── geoslurp │ ├── GRACEDsets.py │ ├── TUGRAZDsets.py │ ├── __init__.py │ ├── deg1n2.py │ ├── gracefilters.py │ ├── graceviews.py │ ├── gravity.py │ ├── icgem.py │ ├── icgemdset.py │ └── loadlove.py │ ├── io │ ├── __init__.py │ ├── binv_legacy.py │ ├── gsmv6.py │ ├── gzipwrap.py │ ├── icgem.py │ ├── shascii.py │ ├── shiobackend.py │ └── sinex.py │ ├── kernels │ ├── __init__.py │ ├── anisokernel.py │ ├── axial.py │ ├── ddk.py │ ├── factory.py │ ├── gauss.py │ ├── gravfunctionals.py │ └── isokernelbase.py │ └── signal │ ├── basinav.py │ └── leakage_vishwa.py └── tests ├── fixtures.py ├── test_analysis.py ├── test_basic_ops.py ├── test_basinav.py ├── test_filters.py ├── test_prod2sum.py ├── test_shformats.py ├── test_shtns_backend.py ├── test_synthesis.py ├── test_wigner3j_gaunt.py └── testdata ├── GSM-2_2008122-2008153_0030_EIGEN_G---_0004in.gz ├── GSM-2_2008122-2008153_0030_EIGEN_G---_0004lmax60out.gz ├── GSM-2_2008122-2008153_0030_EIGEN_G---_0004out.gz ├── P2Sum_ocean10.nc ├── gauss300testoutsub.sh.gz ├── icgem_test_nosig_ITSG.gfc ├── icgem_test_sig_ITSG.gfc.gz ├── shanalysis-test-paracap-n200.nc ├── sympy_gauntvalidation.pkl.gz ├── sympy_realgauntvalidation.pkl.gz └── sympy_wigner3jvalidation.pkl.gz /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Build, Publish and release shxarray distribution 📦 to PyPI 10 | 11 | on: 12 | push: 13 | tags: 14 | - 'v[0-9]+.[0-9]+.[0-9]+' 15 | 16 | permissions: 17 | contents: read 18 | 19 | jobs: 20 | build: 21 | name: Build distribution 📦 22 | runs-on: ubuntu-latest 23 | 24 | steps: 25 | - uses: actions/checkout@v4 26 | - name: Set up Python 27 | uses: actions/setup-python@v4 28 | with: 29 | python-version: "3.x" 30 | - name: Install pypa/build 31 | run: >- 32 | python3 -m 33 | pip install 34 | build 35 | --user 36 | - name: Build a source tarball 37 | run: python3 -m build -s 38 | - name: Store the distribution packages 39 | uses: actions/upload-artifact@v4 40 | with: 41 | name: python-package-distributions 42 | path: dist/ 43 | publish-to-pypi: 44 | name: >- 45 | Publish Shxarray Python 🐍 distribution 📦 to PyPI 46 | if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes 47 | needs: 48 | - build 49 | runs-on: ubuntu-latest 50 | environment: 51 | name: pypi 52 | url: https://pypi.org/p/shxarray 53 | permissions: 54 | id-token: write # IMPORTANT: mandatory for trusted publishing 55 | 56 | steps: 57 | - name: Download all the dists 58 | uses: actions/download-artifact@v4 59 | with: 60 | name: python-package-distributions 61 | path: dist/ 62 | - name: Publish distribution 📦 to PyPI 63 | uses: pypa/gh-action-pypi-publish@release/v1 64 | github-release: 65 | name: >- 66 | Upload the Python 🐍 distribution 📦 to gituhub release 67 | needs: 68 | - publish-to-pypi 69 | runs-on: ubuntu-latest 70 | 71 | permissions: 72 | contents: write # IMPORTANT: mandatory for making GitHub Releases 73 | id-token: write # IMPORTANT: mandatory for sigstore 74 | 75 | steps: 76 | - name: Download all the dists 77 | uses: actions/download-artifact@v4 78 | with: 79 | name: python-package-distributions 80 | path: dist/ 81 | #- name: Sign the dists with Sigstore 82 | #uses: sigstore/gh-action-sigstore-python@v2.1.1 83 | #with: 84 | #inputs: >- 85 | #./dist/*.tar.gz 86 | #./dist/*.whl 87 | - name: Create GitHub Release 88 | env: 89 | GITHUB_TOKEN: ${{ github.token }} 90 | run: >- 91 | gh release create 92 | '${{ github.ref_name }}' 93 | --repo '${{ github.repository }}' 94 | --notes "" 95 | - name: Upload artifact signatures to GitHub Release 96 | env: 97 | GITHUB_TOKEN: ${{ github.token }} 98 | # Upload to GitHub Release using the `gh` CLI. 99 | # `dist/` contains the built packages, and the 100 | # sigstore-produced signatures and certificates. 101 | run: >- 102 | gh release upload 103 | '${{ github.ref_name }}' dist/** 104 | --repo '${{ github.repository }}' 105 | #publish-to-testpypi: 106 | #name: Publish Python 🐍 distribution 📦 to TestPyPI 107 | #needs: 108 | #- build 109 | #runs-on: ubuntu-latest 110 | 111 | #environment: 112 | #name: testpypi 113 | #url: https://test.pypi.org/p/shxarray 114 | 115 | #permissions: 116 | #id-token: write # IMPORTANT: mandatory for trusted publishing 117 | 118 | #steps: 119 | #- name: Download all the dists 120 | #uses: actions/download-artifact@v3 121 | #with: 122 | #name: python-package-distributions 123 | #path: dist/ 124 | #- name: Publish distribution 📦 to TestPyPI 125 | #uses: pypa/gh-action-pypi-publish@release/v1 126 | #with: 127 | #repository-url: https://test.pypi.org/legacy/ 128 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .ipynb_checkpoints/ 3 | *egg-info/ 4 | build/ 5 | #Exclude in source pip environment for testing the code 6 | pyshxarray/ 7 | #don't include the compiled shared library 8 | shlib.cpython-*so 9 | 10 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | build: 4 | os: "ubuntu-22.04" 5 | tools: 6 | python: "3.10" 7 | 8 | python: 9 | install: 10 | - method: pip 11 | path: . 12 | - requirements: docs/requirements.txt 13 | 14 | sphinx: 15 | configuration: docs/source/conf.py 16 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | authors: 4 | - family-names: "Rietbroek" 5 | given-names: "Roelof" 6 | orcid: "https://orcid.org/0000-0001-5276-5943" 7 | - family-names: "Karimi" 8 | given-names: "Sedigheh" 9 | title: "SHxarray: an extension to xarray providing functionality to work with spherical harmonic datasets" 10 | version: 1.3.2 11 | doi: 10.5281/zenodo.15236539 12 | date-released: 2025-04-17 13 | url: "https://github.com/ITC-Water-Resources/shxarray" 14 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | graft src/builtin_backend/ 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Xarray Extension for working with spherical harmonic data 2 | [![DOI](https://zenodo.org/badge/719643889.svg)](https://doi.org/10.5281/zenodo.15236539) 3 | [![Build](https://github.com/ITC-Water-Resources/shxarray/actions/workflows/python-publish.yml/badge.svg)](https://github.com/ITC-Water-Resources/shxarray/actions/workflows/python-publish.yml) 4 | [![PyPI version](https://badge.fury.io/py/shxarray.svg)](https://badge.fury.io/py/shxarray) 5 | [![Documentation Status](https://readthedocs.org/projects/shxarray/badge/?version=latest)](https://shxarray.wobbly.earth/latest/?badge=latest) 6 | 7 | 8 | This extension adds functionality to work with Spherical Harmonics to [Xarray](https://github.com/pydata/xarray). 9 | 10 | 11 | ## Features and functionality 12 | * Gravity functionals: (convert from Stokes coefficients to various gravity functionals, such as equivalent water heights, geoid changes, etc.) 13 | * Filter (e.g. Gaussian or anisotropic filter applied in the spectral domain) 14 | * The use of Xarray-like operations allow for applying functionality to multi-dimensional datasets 15 | * A spectral sea level equation solver 16 | 17 | ## Getting started 18 | The tutorials in the [documentation](https://shxarray.wobbly.earth/stable/tutorial.html) provide Jupyter Notebooks with examples of how to make use of the module. The notebooks can also be found on the [github repository](https://github.com/ITC-Water-Resources/shxarray/tree/main/docs/source/notebooks). 19 | 20 | The functionality of shxarray becomes available when importing the module together with Xarray: 21 | 22 | ``` 23 | import shxarray 24 | import xarray as xr 25 | ``` 26 | after which the shxarray accessor becomes available for use, e.g.: 27 | ``` 28 | nmax=20 29 | nmin=2 30 | dazeros=xr.DataArray.sh.ones(nmax=nmax,nmin=nmin) 31 | ``` 32 | 33 | ## Installation 34 | You can install this package from PyPi using: 35 | ``` 36 | pip install shxarray 37 | ``` 38 | 39 | ## Backends 40 | Shxarray comes with a default **shlib** backend written in C++ and Cython. In addition, a very fast 'shtns' backend can be used when [SHTns](https://nschaeff.bitbucket.io/shtns/) is installed. The backends can be specified in enabled routines as the options: `engine='shlib'` or `engine='shtns'`. 41 | 42 | ## Development Installation 43 | If you want to help in the development of this package, it's best to clone the repository to allow for modifications and pull requests. The extension makes use of [Cython](https://cython.readthedocs.io/en/latest/) generated code to speed up spherical harmonic synthesis and analysis. 44 | 45 | 1. Create your own virtual environment with `venv` or Anaconda *(Optional but recommended, when a user installation is desired)* 46 | 2. Clone this repository `git clone https://github.com/ITC-Water-Resources/shxarray.git` 47 | 3. Change to the repository directory `cd shaxarray` 48 | 4. Set the environment variable `export USE_CYTHON=1` *(Optional and only in the case Cython code is being developed or needs to be regenerated)* 49 | 5. Install using pip `pip install .` or use `pip install -e .` for an editable install 50 | 51 | ### Cython build tip on an editable install 52 | From the repository root directory, regenerating the shared library running 53 | 54 | ```python ./setup.py build_ext``` 55 | 56 | will be much faster than using 57 | 58 | ```pip install -e .``` 59 | 60 | 61 | This will build the shared library in for example `./build/lib.linux-x86_64-cpython-3xx/shxarray/shlib.cpython-3xx-x86_64-linux-gnu.so`. To make sure changes are picked up in your editable install you should create a symbolic link in the Python part of the library e.g. : 62 | 63 | ``` 64 | cd src/shxarray/ 65 | ln -sf ../../build/lib.linux-x86_64-cpython-311/shxarray/shlib.cpython-311-x86_64-linux-gnu.so 66 | ``` 67 | 68 | ### Numpy version issues 69 | The provided c++ files are cythonized against numpy > 2. When building against older numpy versions (<2), the cpp files are re-cythonized upon install, this requires a working cython installation. 70 | 71 | 72 | ## Contributing 73 | This repository is under development and contributions and feedback is welcome. 74 | 75 | ### Contributors 76 | * Main developer: Roelof Rietbroek (r.rietbroek@utwente.nl) 77 | * Kiana Karimi 78 | 79 | 80 | 81 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | #target to regenerate api doc files (in a new directory so it doesn't override older ones) 18 | apidoc: 19 | sphinx-apidoc -o source/references ../src/shxarray 20 | 21 | # Catch-all target: route all unknown targets to Sphinx using the new 22 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 23 | %: Makefile 24 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 25 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | #sphinx-rtd-theme >= 1.3.0 2 | nbsphinx >= 0.8.11 3 | sphinx-tabs==3.4.4 4 | sphinxcontrib-apidoc >= 0.5.0 5 | sphinxcontrib-applehelp >= 1.0.7 6 | sphinxcontrib-devhelp >= 1.0.5 7 | sphinxcontrib-htmlhelp >= 2.0.4 8 | sphinxcontrib-jquery >= 4.1 9 | sphinxcontrib-jsmath >= 1.0.1 10 | sphinxcontrib-qthelp >= 1.0.6 11 | sphinxcontrib-serializinghtml >= 1.1.9 12 | sphinx-github-style >= 0.7.0 13 | pydata_sphinx_theme >= 0.4.0 14 | 15 | -------------------------------------------------------------------------------- /docs/source/_static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/_static/favicon.ico -------------------------------------------------------------------------------- /docs/source/_static/shxarraylogo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/_static/shxarraylogo_dark.png -------------------------------------------------------------------------------- /docs/source/_static/shxarraylogo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/_static/shxarraylogo_light.png -------------------------------------------------------------------------------- /docs/source/_templates/module.rst_t: -------------------------------------------------------------------------------- 1 | {%- if show_headings %} 2 | {{- basename | e | heading }} 3 | 4 | {% endif -%} 5 | .. automodule:: {{ qualname }} 6 | {%- for option in automodule_options %} 7 | :{{ option }}: 8 | {%- endfor %} 9 | 10 | -------------------------------------------------------------------------------- /docs/source/_templates/package.rst_t: -------------------------------------------------------------------------------- 1 | {%- macro automodule(modname, options) -%} 2 | .. automodule:: {{ modname }} 3 | {%- for option in options %} 4 | :{{ option }}: 5 | {%- endfor %} 6 | {%- endmacro %} 7 | 8 | {%- macro toctree(docnames) -%} 9 | .. toctree:: 10 | :maxdepth: {{ maxdepth }} 11 | {% for docname in docnames %} 12 | {{ docname }} 13 | {%- endfor %} 14 | {%- endmacro %} 15 | 16 | {%- if is_namespace %} 17 | {{- [pkgname, "namespace"] | join(" ") | e | heading }} 18 | {% else %} 19 | {{- pkgname | e | heading }} 20 | {% endif %} 21 | 22 | {%- if is_namespace %} 23 | .. py:module:: {{ pkgname }} 24 | {% endif %} 25 | 26 | {%- if modulefirst and not is_namespace %} 27 | {{ automodule(pkgname, automodule_options) }} 28 | {% endif %} 29 | 30 | {{ toctree(subpackages) }} 31 | 32 | {%- if submodules %} 33 | {% if separatemodules %} 34 | {{ toctree(submodules) }} 35 | {% else %} 36 | {%- for submodule in submodules %} 37 | {% if show_headings %} 38 | {{- submodule | e | heading(2) }} 39 | {% endif %} 40 | {{ automodule(submodule, automodule_options) }} 41 | {% endfor %} 42 | {%- endif %} 43 | {%- endif %} 44 | 45 | {%- if not modulefirst and not is_namespace %} 46 | Module contents 47 | --------------- 48 | 49 | {{ automodule(pkgname, automodule_options) }} 50 | {% endif %} 51 | -------------------------------------------------------------------------------- /docs/source/_templates/toc.rst_t: -------------------------------------------------------------------------------- 1 | {{ header | heading }} 2 | 3 | .. toctree:: 4 | :maxdepth: {{ maxdepth }} 5 | {% for docname in docnames %} 6 | {{ docname }} 7 | {%- endfor %} 8 | 9 | -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | 7 | references/shxarray.core.rst 8 | references/shxarray.kernels.rst 9 | references/shxarray.earth.rst 10 | references/shxarray.geom.rst 11 | references/shxarray.io.rst 12 | references/shxarray.exp.rst 13 | references/shxarray.geoslurp.rst 14 | references/shxarray.shlib.rst 15 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | Releases 2 | ======== 3 | Change log for shxarray 4 | 5 | Last change: |today| 6 | 7 | Upcoming Version 8 | ---------------- 9 | - .. 10 | 11 | 12 | Version 1.3.0 13 | ------------- 14 | - Add logo, favicon and change documentation theme 15 | - Add `SHtns `_ as a (very fast) computational backend for analysis and synthesis operations of spherical harmonic data. 16 | - Fix code such that it can be compiled on Windows with MSVC see: `Installation fails on Windows `_ 17 | - Add Xarray reading backend to read SINEX files 18 | - Add option to decorate xarray with user defined attributes which can be saved to file 19 | - Add a Spectral Sea Level Equation solver 20 | 21 | 22 | Version <= 1.2.0 23 | ---------------- 24 | Consult the `commit messages `_ for information on earlier releases 25 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | # -- Project information ----------------------------------------------------- 7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 8 | from datetime import datetime 9 | import os 10 | 11 | project = 'shxarray' 12 | copyright = str(datetime.now().year)+', Roelof Rietbroek' 13 | author = 'Roelof Rietbroek' 14 | 15 | # -- General configuration --------------------------------------------------- 16 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 17 | 18 | 19 | extensions = ['nbsphinx', 'sphinxcontrib.apidoc', 'sphinx.ext.autodoc','sphinx.ext.napoleon'] 20 | 21 | templates_path = ['_templates'] 22 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store','**.ipynb_checkpoints'] 23 | 24 | apidoc_template_dir='_templates' 25 | #figure out the actual installation directory 26 | import shxarray 27 | apidoc_module_dir=os.path.dirname(shxarray.__file__) 28 | 29 | apidoc_output_dir = 'references' 30 | apidoc_separate_modules = True 31 | apidoc_module_first=True 32 | apidoc_toc_file=False 33 | 34 | # -- Options for HTML output ------------------------------------------------- 35 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 36 | #html_theme = 'sphinx_rtd_theme' 37 | html_theme='pydata_sphinx_theme' 38 | 39 | html_static_path = ['_static'] 40 | 41 | napoleon_numpy_docstring = True 42 | 43 | nbsphinx_prolog = """ 44 | Download this Jupyter notebook from `github `_ 45 | 46 | ---- 47 | """ 48 | html_theme_options = { 49 | "use_edit_page_button": False, 50 | "icon_links": [ 51 | { 52 | # Label for this link 53 | "name": "GitHub", 54 | # URL where the link will redirect 55 | "url": "https://github.com/ITC-Water-Resources/shxarray", # required 56 | # Icon class (if "type": "fontawesome"), or path to local image (if "type": "local") 57 | "icon": "fa-brands fa-github", 58 | # The type of image to be used (see below for details) 59 | "type": "fontawesome", 60 | } 61 | ], 62 | "logo": { 63 | "alt_text": "shxarray - Home", 64 | "image_light": "_static/shxarraylogo_dark.png", 65 | "image_dark": "_static/shxarraylogo_light.png", 66 | } 67 | } 68 | 69 | html_favicon= '_static/favicon.ico' 70 | 71 | # html_context = { 72 | # # "github_url": "https://github.com", # or your GitHub Enterprise site 73 | # "github_user": "ITC-Water-Resources", 74 | # "github_repo": "shxarray", 75 | # "github_version": "main", 76 | # "doc_path": "docs/source", 77 | # } 78 | 79 | 80 | # def linkcode_resolve(domain, info): 81 | # if domain != 'py': 82 | # return None 83 | # if not info['module']: 84 | # return None 85 | # filename = info['module'].replace('.', '/') 86 | # return "https://github.com/ITC-Water-Resources/shxarray/%s.py" % filename 87 | 88 | # linkcode_url="https://github.com/ITC-Water-Resources/shxarray" 89 | # linkcode_link_text="view on" 90 | -------------------------------------------------------------------------------- /docs/source/dev.rst: -------------------------------------------------------------------------------- 1 | Development 2 | =========== 3 | 4 | 5 | Development install 6 | ------------------- 7 | Users interested in developing can install the latest version from `github `_. Cython is needed in case the binary extension is being developed, and users can consult the dedicated instructions on the github repository. 8 | 9 | 10 | 11 | 12 | Testing suite with pytest 13 | ------------------------- 14 | From the repositories root directory, the entire test suite can be run with the following command: 15 | 16 | .. code-block:: console 17 | 18 | python -m pytest tests/ 19 | 20 | 21 | Coding style 22 | ------------ 23 | Code can be supplied with `numpy docstrings `_ so they can be parsed into this documentation. 24 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. shxarray documentation master file, created by 2 | sphinx-quickstart on Fri Dec 8 22:28:53 2023. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | 7 | Welcome to shxarray's documentation! 8 | ==================================== 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | :caption: Contents: 13 | 14 | introduction.rst 15 | installation.rst 16 | tutorial.rst 17 | api.rst 18 | dev.rst 19 | changelog.rst 20 | 21 | Indices and tables 22 | ================== 23 | 24 | * :ref:`genindex` 25 | * :ref:`modindex` 26 | * :ref:`search` 27 | -------------------------------------------------------------------------------- /docs/source/installation.rst: -------------------------------------------------------------------------------- 1 | Getting started 2 | =============== 3 | 4 | Installation 5 | ------------ 6 | The latest **shxarray** is hosted on `pypi `_ and can be installed through pip: 7 | 8 | ``pip install shxarray`` 9 | 10 | Part of the module is written is `Cython `_, which means that a c compiler is needed to build the package. A binary wheel is currently not offered, but this may be offered in the future. 11 | 12 | Import and usage 13 | ---------------- 14 | For most operations, a simple import will expose the xarray extensions. For example: 15 | 16 | .. code-block:: python 17 | 18 | import shxarray 19 | import xarray as xr 20 | 21 | #Initialize a DataArray with zeros which has a dimension spanning degrees from nmin to nmax 22 | 23 | nmax=20 24 | nmin=2 25 | dazeros=xr.DataArray.sh.ones(nmax=nmax,nmin=nmin) 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /docs/source/introduction.rst: -------------------------------------------------------------------------------- 1 | Introduction 2 | ============ 3 | 4 | The **shxarray** package is aimed to make using spherical harmonic operations more accessible to a community which is used to xarray. 5 | 6 | Putting degrees and orders in a MultiIndex 7 | ------------------------------------------ 8 | 9 | Spherical harmonic coefficients are often put in 2-dimensional arrays with degree and order spanning the 2 dimensions. This has the advantage that individual coefficients can be easily referenced as e.g. `cnm=mat[n,m]`. However, since only the upper triangle of those matrices are non-zero, the sparseness of these matrices is not made use of. When working with large datasets which also span other dimensions such as time or different levels, this will cause large segments of zeros. 10 | 11 | A `pandas.MultiIndex `_ can facilitate the stacking of degrees and orders in a single dimension. On top of that, this multindex can then be used in `xarray` to work with spherical harmonics along a single coordinate. In **shxarray**, the spherical harmonic index is generally denoted as ``nm``, while when two spherical harmonic coordinates are needed alternative versions such as ``nm_`` are added. 12 | 13 | Exposing spherical harmonic functionality to xarray 14 | --------------------------------------------------- 15 | 16 | Some of the functionality needed for working with spherical harmonics need specialized access to the degree and order information. The aim of **shxarray** is to expose common functionality through `xarray accessors `_. This allows, besides familiar syntax for xarray users, also chaining of operations in a compact syntax. 17 | 18 | Delegate common operations to xarray 19 | ------------------------------------ 20 | 21 | In contrast to specialized spherical harmonic operations, many operations on spherical harmonic data can be delegated to xarray itself. Wherever possible, functionality and broadcasting features of xarray are made use for a consistent syntax. 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /docs/source/logo_etc/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/logo_etc/favicon.ico -------------------------------------------------------------------------------- /docs/source/logo_etc/logo_base.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/logo_etc/logo_base.png -------------------------------------------------------------------------------- /docs/source/logo_etc/logo_base.xcf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/logo_etc/logo_base.xcf -------------------------------------------------------------------------------- /docs/source/logo_etc/makefavicon: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | fin=shxarraylogo_light.png 4 | fout=favicon.ico 5 | extent=`identify $fin | awk '{\ 6 | split($3,dim,"x");\ 7 | if(dim[1] > dim[2]){\ 8 | printf "0x%d\n",(dim[1])\ 9 | }else{\ 10 | printf "%dx0\n",(dim[2])}\ 11 | }'` 12 | 13 | convert $fin -background none -gravity center -extent $extent -resize 16x16 $fout 14 | -------------------------------------------------------------------------------- /docs/source/logo_etc/shxarray_logo_wtext_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/logo_etc/shxarray_logo_wtext_dark.png -------------------------------------------------------------------------------- /docs/source/logo_etc/shxarray_logo_wtext_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/logo_etc/shxarray_logo_wtext_light.png -------------------------------------------------------------------------------- /docs/source/logo_etc/shxarraylogo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/logo_etc/shxarraylogo_dark.png -------------------------------------------------------------------------------- /docs/source/logo_etc/shxarraylogo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/docs/source/logo_etc/shxarraylogo_light.png -------------------------------------------------------------------------------- /docs/source/notebooks/.gitignore: -------------------------------------------------------------------------------- 1 | data/* 2 | -------------------------------------------------------------------------------- /docs/source/tutorial.rst: -------------------------------------------------------------------------------- 1 | Tutorials 2 | ========= 3 | The examples in this tutorials are builds using Jupyter notebooks, which can be downloaded from `the github repository `_. 4 | 5 | .. toctree:: 6 | 7 | notebooks/visualize_filter 8 | notebooks/TerrestrialWaterStorage 9 | notebooks/OceanMask 10 | notebooks/Geometry2sphericalHarmonics 11 | notebooks/SealevelEquation 12 | -------------------------------------------------------------------------------- /headerlicense.txt: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2025 4 | # 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0","setuptools-scm>=8","numpy","Cython>=3","wheel","pytest","scipy","packaging"] 3 | build-backend = "setuptools.build_meta" 4 | [project] 5 | name = "shxarray" 6 | dynamic = ["version"] 7 | #version = "0.0.1" 8 | authors = [ 9 | { name="Roelof Rietbroek", email="r.rietbroek@utwente.nl" }, 10 | ] 11 | description = "Spherical harmonic extension for Xarray" 12 | readme = "README.md" 13 | requires-python = ">=3.8" 14 | classifiers = [ 15 | "Programming Language :: Python :: 3", 16 | "License :: OSI Approved :: Apache Software License", 17 | "Operating System :: POSIX :: Linux", 18 | "Intended Audience :: Science/Research", 19 | "Topic :: Scientific/Engineering", 20 | "Development Status :: 1 - Planning" 21 | ] 22 | dependencies = [ "pandas >= 2.0", "pyaml >= 23.9.0", "scipy", "xarray >= 2023.1.0", 23 | "numpy","numba", "sparse","importlib_metadata","requests","openpyxl","geopandas"] 24 | 25 | [project.optional-dependencies] 26 | #you need dask in combination with when using older xarray versions 27 | dask=["dask>=2022.9.2"] 28 | #Optionally speed up reading of gzip compressed files: 29 | rapidgzip=["rapidgzip"] 30 | 31 | [tool.setuptools_scm] 32 | version_file = "src/shxarray/_version.py" 33 | 34 | #For some reason specifying the xarray-backend as below is not working in toml (it's added in setup.py) 35 | [project.entry-points."xarray.backends"] 36 | icgem = "shxarray.io.shiobackend:ICGEMBackEntryPoint" 37 | gsmv6 = "shxarray.io.shiobackend:GSMv6BackEntryPoint" 38 | shascii ="shxarray.io.shiobackend:SHAsciiBackEntryPoint" 39 | sinex ="shxarray.io.shiobackend:SINEXBackEntryPoint" 40 | #ddk = "shxarray.io.shiobackend:DDKBackEntryPoint" 41 | 42 | [project.entry-points."shxarray.computebackends"] 43 | shtns = "shtns_backend.shtns:SHTnsBackend" 44 | shlib = "shxarray.shlib:SHComputeBackend" 45 | 46 | [project.urls] 47 | "Homepage" = "https://github.com/ITC-Water-Resources/shxarray" 48 | "Bug Tracker" = "https://github.com/ITC-Water-Resources/shxarray/issues" 49 | 50 | [project.entry-points."geoslurp.dsetfactories"] 51 | deg1n2corr = "shxarray.geoslurp.deg1n2:getDeg1n2corrDsets" 52 | tugrazgrace = "shxarray.geoslurp.TUGRAZDsets:TUGRAZGRACEDsets" 53 | gracel2 = "shxarray.geoslurp.GRACEDsets:GRACEDsets" 54 | 55 | [project.entry-points."geoslurp.dsets"] 56 | icgemstatic = "shxarray.geoslurp.icgemdset:ICGEMstatic" 57 | loadlove = "shxarray.geoslurp.loadlove:LLove" 58 | gracefilter = "shxarray.geoslurp.gracefilters:GRACEfilter" 59 | 60 | [project.entry-points."geoslurp.viewfactories"] 61 | graceviews = "shxarray.geoslurp.graceviews:getGRACEviews" 62 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas >= 2.0 2 | pyaml >= 23.9.0 3 | scipy 4 | xarray >= 2023.1.0 5 | numba 6 | sparse 7 | importlib_metadata 8 | requests 9 | openpyxl 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /sample-data/GRACEDataSample_2020.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/sample-data/GRACEDataSample_2020.sql -------------------------------------------------------------------------------- /sample-data/GRACEDataSample_2020_files.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/sample-data/GRACEDataSample_2020_files.tgz -------------------------------------------------------------------------------- /sample-data/dump-grace.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # R. Rietbroek 3 | # Extract a subset of GRACE data from a geoslurp database in a sqlite table and tar archive 4 | # This can be used in the testing and tutorial examples 5 | 6 | #Note to execute this script a populated database and a geoslurp client (https://github.com/strawpants/geoslurp) is required 7 | from geoslurp.db.exporter import exportQuery 8 | from geoslurp.db import geoslurpConnect 9 | import os 10 | 11 | qry2020="select * from gravity.gracecomb_l2_jpl_n60 WHERE date_part('year',time) = 2020" 12 | 13 | outputfile="GRACEDataSample_2020.sql" 14 | 15 | conn=geoslurpConnect(dbalias="marge") 16 | localdataroot=conn.localdataroot 17 | exportQuery(conn.dbeng.execute(qry2020),outputfile,layer="gracel2",packUriCols=["gsm","gac","gaa","gad"],striproot=os.path.join(localdataroot,"gravity"),localdataroot=localdataroot) 18 | 19 | #Also add a static gravity field to subtract 20 | staticqry="select * from gravity.icgem_static where uri LIKE '%%GGM05C%%'" 21 | exportQuery(conn.dbeng.execute(staticqry),outputfile,layer="static",packUriCols=["uri"],striproot=os.path.join(localdataroot,"gravity"),localdataroot=localdataroot) 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | 7 | 8 | 9 | from setuptools import setup,Extension 10 | from setuptools_scm import get_version 11 | from Cython.Build import cythonize 12 | import Cython.Compiler.Options 13 | from packaging.version import Version 14 | 15 | import os 16 | import numpy as np 17 | import sys 18 | 19 | if sys.platform.startswith("win"): 20 | winplatform=True 21 | extra_args = ['/openmp'] 22 | else: 23 | winplatform=False 24 | extra_args = ['-fopenmp'] 25 | 26 | if "DEBUG_CYTHON" in os.environ: 27 | debug=True 28 | extra_args.append('-O0') 29 | #extra_args.append('-pg') 30 | else: 31 | debug=False 32 | extra_args.append('-O3') 33 | 34 | #don't necessarily use cython 35 | if "USE_CYTHON" in os.environ or winplatform: 36 | # note being on windows forces the use of cython 37 | useCython=True 38 | ext=".pyx" 39 | Cython.Compiler.Options.annotate = True 40 | else: 41 | useCython=False 42 | ext=".cpp" 43 | 44 | #Force the use of cython if numpy has a version < 2 45 | if not useCython and Version(np.__version__) < Version ("2.0.0"): 46 | useCython=True 47 | ext=".pyx" 48 | 49 | 50 | def listexts(): 51 | names=["shlib"] 52 | exts=[] 53 | for nm in names: 54 | exts.append(Extension("shxarray."+nm.replace("/","."),["src/builtin_backend/"+nm+ext],include_dirs=[np.get_include(),"."], define_macros=[('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')],extra_compile_args=extra_args,extra_link_args=extra_args)) 55 | return exts 56 | 57 | extensions=listexts() 58 | 59 | 60 | if useCython: 61 | #additionally cythonize pyx files before building 62 | extensions=cythonize(extensions,language_level=3,annotate=True,gdb_debug=debug,compiler_directives={'embedsignature': True}) 63 | 64 | setup( 65 | version = get_version(root='.', relative_to=__file__), 66 | ext_modules=extensions 67 | ) 68 | -------------------------------------------------------------------------------- /src/builtin_backend/.gitignore: -------------------------------------------------------------------------------- 1 | *html 2 | -------------------------------------------------------------------------------- /src/builtin_backend/Helpers.hpp: -------------------------------------------------------------------------------- 1 | /*! \file 2 | \brief 3 | \copyright Roelof Rietbroek 2023 4 | \license 5 | This file is part of shxarray. 6 | */ 7 | 8 | 9 | #include 10 | #include 11 | #ifndef HELPERS_HPP_ 12 | #define HELPERS_HPP_ 13 | 14 | 15 | inline int csphase(int m){ 16 | return 1-2*(std::abs(m)%2); 17 | } 18 | 19 | inline int kronecker(int n1,int n2){ 20 | return n1==n2?1:0; 21 | } 22 | 23 | 24 | 25 | 26 | typedef std::pair nmpair; 27 | struct nm_hash 28 | { 29 | template 30 | std::size_t operator() (const std::pair &pair) const { 31 | //we take advantage of the fact that we don't expect very large degrees which saturate the 32 bit integer 32 | //so we left shift the degree and add the absolute of the order (left shifted by 1 position) and add the signbit of the order 33 | // this should be fine for hasing degree order combinations up to 2**15 (32768) 34 | return static_cast((pair.first<<15) + (std::abs(pair.second)<<1)+std::signbit(pair.second)); 35 | } 36 | }; 37 | 38 | typedef std::unordered_map nm_umap; 39 | 40 | class Nmindex{ 41 | public: 42 | Nmindex():nmmap_(1){}; 43 | Nmindex(size_t maxsize):nmmap_(maxsize){}; 44 | //size_t operator()(int n,int m)const{ 45 | //return nmmap_[std::make_pair(n,m)]; 46 | //} 47 | //const size_t & operator[](const nmpair & nm)const{ 48 | //return nmmap_[nm]; 49 | //} 50 | const size_t & operator[](const nmpair & nm)const{ 51 | return nmmap_.at(nm); 52 | } 53 | const size_t & operator()(const int & n,const int & m)const{ 54 | return nmmap_.at(std::make_pair(n,m)); 55 | } 56 | 57 | void set(nmpair nm,size_t ix){ 58 | nmmap_[nm]=ix; 59 | } 60 | //size_t get(const nmpair & nm){ 61 | //return nmmap_[nm]; 62 | //} 63 | private: 64 | nm_umap nmmap_; 65 | }; 66 | 67 | 68 | 69 | #endif 70 | -------------------------------------------------------------------------------- /src/builtin_backend/Legendre.hpp: -------------------------------------------------------------------------------- 1 | /*! \file 2 | \brief 3 | \copyright Roelof Rietbroek 2019 4 | \license 5 | This file is part of shxarray. 6 | */ 7 | 8 | #include 9 | #include "cassert" 10 | #ifndef FR_SH_LEGENDRE_HPP_ 11 | #define FR_SH_LEGENDRE_HPP_ 12 | 13 | #define UNNORM 0 14 | #define GEODESY4PI 1 15 | 16 | 17 | ///@brief a class which computes and caches a (un)normalized Legendre polynomial 18 | template 19 | class Legendre{ 20 | public: 21 | Legendre(){} 22 | Legendre(int nmax):nmax_(nmax),pn_(nmax+1){} 23 | Legendre(int nmax,int norm):nmax_(nmax),pn_(nmax+1),norm_(norm){} 24 | const std::vector get(const ftype costheta); 25 | private: 26 | int nmax_=-1; 27 | std::vector pn_{}; 28 | int norm_=UNNORM; 29 | }; 30 | 31 | 32 | ///@brief a class which computes and caches a (un)normalized Legendre polynomial 33 | template 34 | const std::vector Legendre::get(const ftype costheta){ 35 | assert(nmax_ >0); 36 | if (pn_[1] == costheta){ 37 | ///Quick return if already computed 38 | return pn_; 39 | } 40 | 41 | ftype pnmin1=costheta; 42 | ftype pnmin2=1; 43 | ftype pn; 44 | pn_[0]=pnmin2; 45 | pn_[1]=pnmin1; 46 | 47 | for(int n=2;n<=nmax_;++n){ 48 | pn=((2*n-1)*costheta*pnmin1-(n-1)*pnmin2)/static_cast(n); 49 | pnmin2=pnmin1; 50 | pnmin1=pn; 51 | pn_[n]=pn; 52 | } 53 | 54 | /* possibly normalize */ 55 | if (norm_ == GEODESY4PI){ 56 | for(int n=0;n<=nmax_;++n){ 57 | pn_[n]=pn_[n]/(2*n+1); 58 | 59 | } 60 | } 61 | return pn_; 62 | } 63 | 64 | #endif 65 | -------------------------------------------------------------------------------- /src/builtin_backend/Legendre_nm.hpp: -------------------------------------------------------------------------------- 1 | /*! \file 2 | \brief Computation of (associated) Legendre functions (fast version) 3 | \copyright Roelof Rietbroek 2022 4 | \license 5 | This file is part of shxarray. 6 | */ 7 | 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | #ifndef FR_SH_LEGENDRE_NM_HPP 14 | #define FR_SH_LEGENDRE_NM_HPP 15 | template 16 | class Legendre_nm { 17 | public: 18 | Legendre_nm(const int nmax); 19 | Legendre_nm() {} 20 | using nmp=std::pair; 21 | void set(const ftype costheta, ftype pnm[])const; 22 | inline size_t idx(int n, int m)const { 23 | return Legendre_nm::i_from_nm(n,m,nmax_); 24 | } 25 | inline static size_t i_from_nm(int n, int m, int nmax) { 26 | assert(m <= n); 27 | assert(n <= nmax); 28 | return m * (nmax + 1) - (m * (m + 1)) / 2 + n; 29 | } 30 | inline nmp nm(const size_t idx)const{ 31 | return nmindex_[idx]; 32 | } 33 | inline static nmp nm_from_i(const size_t idx, 34 | const int nmax) { 35 | int m = (3.0 + 2 * nmax) / 2 - 36 | std::sqrt(std::pow(3 + 2 * nmax, 2) / 4.0 - 2 * idx); 37 | int n = idx - (((m + 1) * (m + 2)) / 2 + m * (nmax - m)) + m + 1; 38 | assert(m <= n); 39 | assert(n <= nmax); 40 | return std::make_pair(n, m); 41 | } 42 | 43 | int nmax() const { return nmax_; } 44 | int size() const { return sz_; } 45 | 46 | private: 47 | struct alignas(64) cacheEntry { 48 | ftype pnmin2 = 0.0; 49 | ftype pnmin1 = 0.0; 50 | ftype pn = 0.0; 51 | ftype sectorial = 0.0; 52 | }; 53 | 54 | int nmax_ = -1; 55 | size_t sz_ = 0; 56 | std::vector wnn_ = {}; 57 | std::vector wnm_ = {}; 58 | std::vector nmindex_ = {}; 59 | }; 60 | 61 | template 62 | Legendre_nm::Legendre_nm(int nmax) 63 | : nmax_(nmax), 64 | sz_(i_from_nm(nmax, nmax, nmax) + 1), 65 | wnn_(nmax + 1), 66 | wnm_(sz_),nmindex_(sz_){ 67 | // precompute factors involving square roots 68 | wnn_[0] = 0.0; 69 | wnn_[1] = sqrt(3.0); 70 | for (int n = 2; n <= nmax_; ++n) { 71 | wnn_[n] = sqrt((2 * n + 1) / (2.0 * n)); 72 | } 73 | for (int m = 0; m <= nmax_; ++m) { 74 | for (int n = m + 1; n <= nmax_; ++n) { 75 | wnm_[i_from_nm(n, m, nmax_)] = 76 | sqrt((2 * n + 1.0) / (n + m) * (2 * n - 1.0) / (n - m)); 77 | } 78 | } 79 | 80 | //create a cached lookup index for degree and order 81 | for(size_t i =0;i 89 | void Legendre_nm::set(const ftype costheta, ftype pnm[])const { 90 | assert(nmax_ > 0); 91 | assert(costheta >= -1.0 and costheta <= 1.0); 92 | 93 | ftype sinTheta = std::sqrt(1 - pow(costheta, 2)); 94 | 95 | ftype numericStabilityFactor = 1e-280; 96 | 97 | // Loop over orders (slowly varying) 98 | cacheEntry L1CacheEntry; 99 | 100 | // initial rescaling is 1e280 101 | L1CacheEntry.sectorial = 1.0 / numericStabilityFactor; 102 | /// Initial value of the recursion 103 | pnm[0] = 1.0; 104 | 105 | size_t idx; 106 | for (int m = 0; m < nmax_; ++m) { 107 | idx = i_from_nm(m, m, nmax_); 108 | L1CacheEntry.pnmin2 = numericStabilityFactor; 109 | 110 | // compute offdiagonal element 111 | L1CacheEntry.pnmin1 = wnm_[idx + 1] * costheta * L1CacheEntry.pnmin2; 112 | pnm[idx + 1] = L1CacheEntry.pnmin1 * L1CacheEntry.sectorial; 113 | // loop over remaining degrees 114 | for (int n = m + 2; n <= nmax_; ++n) { 115 | idx = i_from_nm(n, m, nmax_); 116 | 117 | L1CacheEntry.pn = wnm_[idx] * (costheta * L1CacheEntry.pnmin1 - 118 | L1CacheEntry.pnmin2 / wnm_[idx - 1]); 119 | // write value to output vector and shift entries in the cache 120 | pnm[idx] = L1CacheEntry.pn * L1CacheEntry.sectorial; 121 | // shift entry to prepare for the next degree 122 | L1CacheEntry.pnmin2 = L1CacheEntry.pnmin1; 123 | L1CacheEntry.pnmin1 = L1CacheEntry.pn; 124 | } 125 | 126 | // Update the m+1 sectorial (applies n+1,n+1 <- n,n recursion) 127 | L1CacheEntry.sectorial *= wnn_[m + 1] * sinTheta; 128 | // also write the next sectorial to the output vector (scaled correctly) 129 | pnm[i_from_nm(m + 1, m + 1, nmax_)] = 130 | L1CacheEntry.sectorial * numericStabilityFactor; 131 | } 132 | } 133 | 134 | #endif /// FR_SH_LEGENDRE_NM_HPP/// 135 | -------------------------------------------------------------------------------- /src/builtin_backend/Ynm.hpp: -------------------------------------------------------------------------------- 1 | /*! \file 2 | \brief Computation of (associated) Legendre functions (fast version) 3 | \copyright Roelof Rietbroek 2022 4 | \license 5 | This file is part of shxarray. 6 | */ 7 | 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include "Legendre_nm.hpp" 14 | #include 15 | #ifndef YNM_CPP_HPP 16 | #define YNM_CPP_HPP 17 | 18 | struct mni{ 19 | int m; 20 | int n; 21 | size_t i; 22 | 23 | }; 24 | 25 | //using mni=struct mni_str; 26 | 27 | bool operator <(const mni& x, const mni& y) { 28 | return std::tie(x.m, x.n) < std::tie(y.m, y.n); 29 | } 30 | 31 | template 32 | class Ynm_cpp{ 33 | public: 34 | Ynm_cpp(const int nmax); 35 | Ynm_cpp(const size_t size, const int n [],const int m[ ]); 36 | Ynm_cpp(){}; 37 | void set(const ftype lon, const ftype lat); 38 | 39 | 40 | //using mn=std::pair; 41 | //inline ssize_t idx(int n, int m,int trig)const { 42 | //assert(m<=n); 43 | //int sgn=trig?-1:1; 44 | //mni mnisearch={m,n, 45 | //std::find(mnd.begin(), vec.end(), item) != vec.end() 46 | 47 | //return mnidx_.at(std::make_pair(sgn*m,n)); 48 | 49 | //} 50 | 51 | int nmax() const { return legnm.nmax(); } 52 | int size() const { return sz_; } 53 | ftype * data(){return ynmdata_.data();} 54 | const ftype * data()const{return ynmdata_.data();} 55 | ftype & operator [](size_t i) {return ynmdata_[i];} 56 | const ftype & operator [](size_t i) const {return ynmdata_[i];} 57 | //inline std::map getmn()const{return mnidx_;} 58 | inline std::vector getmn()const{return mnidx_;} 59 | private: 60 | Legendre_nm legnm; 61 | size_t sz_ = 0; 62 | std::vector pnmcache_ = {}; 63 | std::vector ynmdata_ = {}; 64 | std::vector mnidx_={}; 65 | //std::map mnidx_={}; 66 | ftype latprev=-1000; //initialize to impossible value 67 | bool sort=false; 68 | }; 69 | 70 | template 71 | Ynm_cpp::Ynm_cpp(int nmax):legnm(nmax),sz_(2*(legnm.idx(nmax,nmax)+1)-(nmax+1)) 72 | ,pnmcache_(legnm.size()),ynmdata_(sz_,0.0),mnidx_(sz_){ 73 | /// Fill internal index 74 | size_t i=0; 75 | for (int m=-nmax;m<=nmax;++m){ 76 | for (int n=abs(m);n<=nmax;++n){ 77 | mnidx_[i]={m,n,i}; 78 | i++; 79 | } 80 | } 81 | } 82 | 83 | template 84 | Ynm_cpp::Ynm_cpp(const size_t size, const int n [],const int m[ ]):sz_(size),ynmdata_(size,0.0),mnidx_(sz_){ 85 | 86 | ///find nmax 87 | int nmax=-1; 88 | //create a temporary map used to figure out the correct indices of the desired output order 89 | //map,ssize_t> mninputidx; 90 | for (size_t i=0;i(nmax); 96 | pnmcache_=std::vector(legnm.size()); 97 | 98 | ///sort the vector so that order varies slowest (avoids unneeded recomputation of trigonometric functions) 99 | std::sort(mnidx_.begin(),mnidx_.end()); 100 | 101 | } 102 | 103 | 104 | 105 | template 106 | void Ynm_cpp::set(const ftype lon, const ftype lat){ 107 | if (lat != latprev){ 108 | ///We need to recompute the associated legendre functions 109 | ftype costheta=sin(lat*M_PI/180.0); 110 | legnm.set(costheta,pnmcache_.data()); 111 | //std::cout << "recompute for lat "<< lat << " " << latprev << std::endl; 112 | latprev=lat; 113 | } 114 | ftype lonr = lon*M_PI/180.0; 115 | ftype trig_mlon=0.0; 116 | int n,m; 117 | size_t idx; 118 | ///assign max to mold so it will trigger the computation of the trigonometric term on the first entry 119 | int mold=std::numeric_limits::max(); 120 | 121 | for (const auto & mni_item : mnidx_){ 122 | m=mni_item.m; 123 | n=mni_item.n; 124 | idx=mni_item.i; 125 | ///Note: the order (m) of the key varies slowest in the nmidx_ map so trigonometric factors are only recomputed when necessary 126 | if (m != mold){ 127 | trig_mlon=(m<0)?sin(abs(m)*lonr):cos(m*lonr); 128 | mold=m; 129 | } 130 | ynmdata_[idx]=trig_mlon*pnmcache_[legnm.idx(n,abs(m))]; 131 | } 132 | 133 | 134 | } 135 | 136 | 137 | #endif /// YNM_CPP_HPP/// 138 | -------------------------------------------------------------------------------- /src/builtin_backend/analysis.pyx: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | # distutils: language = c++ 7 | 8 | import xarray as xr 9 | import cython 10 | # import numpy as np 11 | cimport numpy as np 12 | from cython.parallel cimport parallel, prange 13 | from legendre cimport Ynm_cpp 14 | from libc.stdio cimport printf 15 | from openmp cimport omp_lock_t,omp_init_lock,omp_set_lock,omp_unset_lock 16 | from shxarray.core.sh_indexing import SHindexBase 17 | from shxarray.core.cf import find_lon,find_lat 18 | from scipy.linalg.cython_blas cimport dger 19 | from libc.math cimport cos,abs 20 | 21 | @cython.boundscheck(False) 22 | @cython.wraparound(False) 23 | @cython.initializedcheck(False) 24 | cdef class Analysis: 25 | cdef public object _dsobj 26 | def __cinit__(self, int nmax): 27 | 28 | #create a spherical harmonic index 29 | self._dsobj=xr.Dataset(coords=SHindexBase.nm(nmax,0)) 30 | 31 | def __call__(self,dain:xr.DataArray): 32 | """Perform spherical harmonic analysis on an input xarray DataArray object""" 33 | if type(dain) != xr.DataArray: 34 | raise RuntimeError("input type should be a xarray.DataArray") 35 | 36 | #check for longitude/latitude grid and grid separation 37 | loninfo=find_lon(dain.coords) 38 | latinfo=find_lat(dain.coords) 39 | if loninfo.step is None or latinfo.step is None: 40 | raise RuntimeError("grid is not equidistant in lon and lat directions") 41 | 42 | dain=dain.rename({loninfo.var.name:"lon",latinfo.var.name:"lat"}) 43 | 44 | coordsout={ky:val for ky,val in dain.coords.items() if val.ndim == 1 and val.dims[0] not in ["lon","lat"]} 45 | 46 | # add the SH index to the output coordinates 47 | coordsout.update({ky:val for ky,val in self._dsobj.coords.items()}) 48 | 49 | dimsin=[(dim,sz) for dim,sz in dain.sizes.items() if dim not in ["lon","lat"]] 50 | # make sure the SHI index is the one who varies quickest (last dimension) 51 | dimsout=dimsin +[(dim,sz) for dim,sz in self._dsobj.sizes.items()] 52 | 53 | #allocate space (c-contiguous) for the output data 54 | daout=xr.DataArray(np.zeros([val[1] for val in dimsout]),coords=coordsout,dims=[val[0] for val in dimsout]) 55 | 56 | #scale input with area weights 57 | cdef double stepx=loninfo.step*np.pi/180 58 | cdef double stepy=latinfo.step*np.pi/180 59 | # printf("stepx %f stepy %f\n",stepx,stepy) 60 | cdef double weight=abs(stepx*stepy)/(4*np.pi) 61 | self._apply_ana(dain,daout,weight) 62 | return daout 63 | 64 | cdef _apply_ana(self,dain:xr.DataArray,dout:xr.DataArray,double weight): 65 | 66 | 67 | cdef double[::1] lonv=dain.lon.data.astype(np.double) 68 | cdef double[::1] latv=dain.lat.data.astype(np.double) 69 | cdef int nlat=len(latv) 70 | cdef int nlon=len(lonv) 71 | 72 | cdef int auxsize=np.prod([val for ky,val in dain.sizes.items() if ky not in ["lon","lat"]]) 73 | 74 | cdef int shsize=len(self._dsobj.indexes['nm']) 75 | #memoryview to output data (sh dimension should vary quickest) 76 | cdef double [:,:] outv=dout.data.reshape([auxsize,shsize]) 77 | #This is the same a s a Fortran contiguous array with dimension shsize,auxsize, lada=shsize 78 | 79 | 80 | 81 | 82 | cdef int[::1] nv = self._dsobj.nm.n.data.astype(np.int32) 83 | cdef int[::1] mv = self._dsobj.nm.m.data.astype(np.int32) 84 | 85 | cdef Ynm_cpp[double] ynm 86 | cdef int ilat,ilon 87 | 88 | cdef: 89 | int m=shsize 90 | int n=auxsize 91 | double alpha=1.0 92 | int lda=shsize 93 | int incx=1 94 | int incy=1 95 | 96 | cdef double [:,:,:] inval 97 | 98 | 99 | #check the order of lon lat 100 | londim=dain.get_axis_num('lon') 101 | latdim=dain.get_axis_num('lat') 102 | 103 | if abs(londim -latdim) != 1: 104 | raise RuntimeError("Longitude and latitude dimensions of input are not neighbouring, cannot handle this layout") 105 | 106 | cdef bint latfirst=False 107 | if londim == 0 or latdim == 0: 108 | if londim < latdim: 109 | inval=dain.data.reshape([nlon,nlat,auxsize],order='C') 110 | latfirst=False 111 | else: 112 | inval=dain.data.reshape([nlat,nlon,auxsize],order='C') 113 | latfirst=True 114 | elif londim == dain.ndim -1 or latdim == dain.ndim-1: 115 | if londim < latdim: 116 | inval=dain.data.reshape([auxsize,nlon,nlat],order='C').T 117 | latfirst=True 118 | else: 119 | inval=dain.data.reshape([auxsize,nlat,nlon],order='C').T 120 | latfirst=False 121 | cdef double d2r =np.pi/180 122 | incy=int(inval.strides[2]/8) 123 | cdef omp_lock_t lock 124 | omp_init_lock(&lock) 125 | with nogil, parallel(): 126 | 127 | ynm=Ynm_cpp[double](shsize,&nv[0],&mv[0]) 128 | for ilat in prange(nlat): 129 | alpha=weight*cos(latv[ilat]*d2r) 130 | for ilon in range(nlon): 131 | ynm.set(lonv[ilon],latv[ilat]) 132 | # void dger(int *m, int *n, d *alpha, d *x, int *incx, d *y, int *incy, d *a, int *lda) 133 | #critical section for openmp because outv gets updated by all threads 134 | omp_set_lock(&lock) 135 | if latfirst: 136 | dger(&m,&n,&alpha,ynm.data(),&incx,&inval[ilat,ilon,0],&incy,&outv[0,0],&lda) 137 | else: 138 | dger(&m,&n,&alpha,ynm.data(),&incx,&inval[ilon,ilat,0],&incy,&outv[0,0],&lda) 139 | omp_unset_lock(&lock) 140 | 141 | 142 | 143 | 144 | 145 | -------------------------------------------------------------------------------- /src/builtin_backend/gaunt.pxd: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | # distutils: language = c++ 6 | 7 | 8 | cimport cython 9 | from libcpp.vector cimport vector 10 | from libcpp.pair cimport pair 11 | # C++ / Cython interface declaration 12 | 13 | 14 | cdef extern from "Gaunt.hpp": 15 | cdef cppclass Gaunt[T] nogil: 16 | Gaunt() except + 17 | Gaunt(int n2,int n3,int m2, int m3) except + 18 | void set (int n2,int n3,int m2, int m3) nogil 19 | vector[T] get() except+ 20 | T operator[](int j)except+ 21 | int m()except+ 22 | int nmin()except+ 23 | int nmax()except+ 24 | 25 | cdef cppclass GauntReal[T] nogil: 26 | GauntReal() except + 27 | GauntReal(int nmax) nogil 28 | GauntReal(int n2,int n3,int mu2, int mu3) nogil 29 | void set(int n2,int n3,int mu2, int mu3) nogil 30 | T operator[](cython.size_t i) 31 | T at(int n1,int m1) except+ 32 | vector[T] get() except+ 33 | cython.size_t size() 34 | int nmin()except+ 35 | int nmax()except+ 36 | vector[pair[int,int]] & nmvec() except+ 37 | pair[int,int] & nm(cython.size_t i) 38 | # End of interface declaration 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /src/builtin_backend/legendre.pxd: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | # distutils: language = c++ 6 | 7 | cimport cython 8 | from libcpp.vector cimport vector 9 | from libcpp.pair cimport pair 10 | # from libcpp.map cimport map 11 | 12 | 13 | # C++ / Cython interface declaration 14 | cdef extern from "Legendre.hpp": 15 | cdef cppclass Legendre[T] nogil: 16 | Legendre(int nmax) except + 17 | Legendre(int nmax,int norm) except + 18 | vector[T] get(T costheta) except+ 19 | 20 | # C++ associated Legendre functions 21 | cdef extern from "Legendre_nm.hpp": 22 | cdef cppclass Legendre_nm[T] nogil: 23 | Legendre_nm() except + 24 | Legendre_nm(int nmax) except + 25 | void set(T costheta, T arr[]) except+ 26 | @staticmethod 27 | cython.size_t i_from_nm(int n,int m, int nmax) 28 | cython.size_t idx(int n,int m) 29 | @staticmethod 30 | pair[int,int] nm_from_i(cython.size_t idx, int nmax) 31 | #cached version: 32 | pair[int,int] nm(cython.size_t idx) 33 | int nmax() 34 | cython.size_t size() 35 | 36 | # C++ Surface Spherical Harmonics functions 37 | cdef extern from "Ynm.hpp": 38 | struct mni: 39 | int n 40 | int m 41 | size_t i 42 | cdef cppclass Ynm_cpp[T] nogil: 43 | Ynm_cpp() except + 44 | Ynm_cpp(int nmax) except + 45 | Ynm_cpp(cython.size_t size, const int n[],const int m[]) except + 46 | void set( T lon, T lat) nogil 47 | cython.ssize_t idx(int n,int m) 48 | T& operator[](size_t i) 49 | int nmax() 50 | T* data() 51 | cython.size_t size() 52 | vector[mni] getmn() 53 | # End of interface declaration 54 | 55 | -------------------------------------------------------------------------------- /src/builtin_backend/legendre.pyx: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | # distutils: language = c++ 6 | 7 | import cython 8 | from legendre cimport Legendre_nm,Legendre 9 | 10 | from cython.operator cimport dereference as deref 11 | import numpy as np 12 | cimport numpy as np 13 | 14 | 15 | # Cython wrapper class 16 | cdef class Pn: 17 | """Double precision Legendre polynomial wrapper""" 18 | cdef Legendre[double]*leg_ptr # Pointer to the wrapped C++ class 19 | def __cinit__(self, int nmax,norm=0): 20 | self.leg_ptr = new Legendre[double](nmax,norm) 21 | 22 | def __dealloc__(self): 23 | del self.leg_ptr 24 | 25 | def __call__(self,double costheta): 26 | #set the new colatitude 27 | return deref(self.leg_ptr).get(costheta) 28 | 29 | 30 | cdef class Pnm: 31 | """Double precision Legendre polynomial wrapper""" 32 | cdef Legendre_nm[double]*legnm_ptr # Pointer to the wrapped C++ class 33 | def __cinit__(self, int nmax): 34 | self.legnm_ptr = new Legendre_nm[double](nmax) 35 | 36 | def __dealloc__(self): 37 | del self.legnm_ptr 38 | 39 | def __call__(self,double costheta): 40 | cdef np.ndarray[np.double_t, ndim=1] pnmdata = np.zeros([deref(self.legnm_ptr).size()],dtype=np.double) 41 | cdef double[::1] mview = pnmdata 42 | deref(self.legnm_ptr).set(costheta,&mview[0]) 43 | return pnmdata 44 | def __len__(self): 45 | return deref(self.legnm_ptr).size() 46 | def nmax(self): 47 | return deref(self.legnm_ptr).nmax() 48 | 49 | def idx(self,int n,int m): 50 | return deref(self.legnm_ptr).i_from_nm(n,m,deref(self.legnm_ptr).nmax()) 51 | 52 | def nm(self,cython.size_t idx): 53 | return deref(self.legnm_ptr).nm_from_i(idx,deref(self.legnm_ptr).nmax()) 54 | 55 | def index(self): 56 | nmax=deref(self.legnm_ptr).nmax() 57 | sz=deref(self.legnm_ptr).size() 58 | cdef pair[int,int] (*nm_from_i)(cython.size_t,int) 59 | nm_from_i=deref(self.legnm_ptr).nm_from_i 60 | # return [deref(self.legnm_ptr).nm_from_i(idx,nmax) for idx in range(sz)] 61 | # nm_from_i=deref(self.legnm_ptr).nm_from_i 62 | return [nm_from_i(idx,nmax) for idx in range(sz)] 63 | -------------------------------------------------------------------------------- /src/builtin_backend/shindex.pxd: -------------------------------------------------------------------------------- 1 | from cython cimport size_t 2 | from libcpp.pair cimport pair 3 | cdef extern from "Helpers.hpp": 4 | cdef cppclass Nmindex nogil: 5 | Nmindex() except + 6 | Nmindex(size_t maxsize) except + 7 | void set(pair[int,int] nm, size_t ix) 8 | size_t operator[] (const pair[int,int] & nm) 9 | size_t operator() (const int & n,const int & m) 10 | -------------------------------------------------------------------------------- /src/builtin_backend/shlib.pyx: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | # distutils: language = c++ 6 | # cython: profile=False 7 | """ 8 | **shlib** is shxarray's default binary Cython backend. 9 | Some of the heavy lifting such as synthesis and analysis operations, is done using this the functions of this shared library. 10 | """ 11 | 12 | include "legendre.pyx" 13 | include "wigner3j.pyx" 14 | include "gaunt.pyx" 15 | include "ynm.pyx" 16 | include "synthesis.pyx" 17 | include "analysis.pyx" 18 | include "sinex.pyx" 19 | 20 | from shxarray.core.shcomputebase import SHComputeBackendBase 21 | from shxarray.core.cf import get_cfatts,get_cfglobal 22 | import numpy as np 23 | 24 | class SHComputeBackend(SHComputeBackendBase): 25 | _credits="Used backend: shlib cython extension from shxarray (https://shxarray.wobbly.earth/latest/references/shxarray.html#module-shxarray.shlib)" 26 | 27 | 28 | def synthesis(self,dain,dslonlat,**kwargs): 29 | 30 | syn=Synthesis(dslonlat) 31 | synres=syn(dain) 32 | synres.name=dain.name 33 | synres.lon.attrs=get_cfatts("longitude") 34 | synres.lat.attrs=get_cfatts("latitude") 35 | synres.attrs.update(get_cfglobal()) 36 | synres.attrs['history']="Synthesis performed using shlib backend" 37 | synres.attrs['comments']=self._credits 38 | return synres 39 | 40 | def analysis(self,dain,nmax,**kwargs): 41 | ana=Analysis(nmax) 42 | anares=ana(dain) 43 | anares.attrs.update(get_cfglobal()) 44 | anares.attrs['history']="Analysis performed using shlib backend" 45 | anares.attrs['comments']=self._credits 46 | return anares 47 | 48 | 49 | def gaunt(self,n2,n3,m2,m3): 50 | return getGaunt(n2,n3,m2,m3) 51 | 52 | def gauntReal(self,n2,n3,m2,m3): 53 | return getGauntReal(n2,n3,m2,m3) 54 | 55 | def wigner3j(self,j2,j3,m2,m3): 56 | return getWigner3j(j2,j3,m2,m3) 57 | 58 | def p2s(self,daobj): 59 | return getp2s(daobj) 60 | 61 | def lonlat_grid(self,nmax=None,lon=None,lat=None,gtype="regular"): 62 | """ 63 | Create a lon-lat grid compatible with the SHlib backend 64 | Parameters 65 | ---------- 66 | nmax : int, optional 67 | Maximum expected degree of the spherical harmonic expansion. 68 | 69 | gtype : str, optional 70 | Type of grid to create. Only 'regular','regular_lon0' and "point" are supported. The regular_lon0 option is a regular grid with the first longitude point at 0 degrees. 71 | Returns 72 | ------- 73 | An empty xarray.Dataset with CF-complying lon, lat coordinates variables 74 | 75 | """ 76 | if gtype is None: 77 | gtype="regular" 78 | 79 | 80 | if gtype not in ["regular","regular_lon0","point"]: 81 | raise ValueError("Only 'regular','regular_lon0' and 'point' type is supported") 82 | 83 | if nmax is not None: 84 | if lon is not None or lat is not None: 85 | raise ValueError("If nmax is specified, lon and lat should not be specified") 86 | #autogenerate a grid based on nmax 87 | idres=1 88 | while idres > 360/nmax/4: 89 | idres=idres/2 90 | if gtype == "regular": 91 | lon=np.arange(-180+idres/2,180,idres) 92 | lat=np.arange(-90+idres/2,90,idres) 93 | 94 | elif gtype == "regular_lon0": 95 | lon=np.arange(0,360,idres) 96 | lat=np.arange(-90+idres/2,90,idres) 97 | else: 98 | raise ValueError("Only 'regular' and 'regular_lon0' grid type are currently supported") 99 | 100 | dslonlat=xr.Dataset(coords=dict(lon=lon,lat=lat)) 101 | elif lon is not None and lat is not None: 102 | if gtype == "point": 103 | if len(lon) != len(lat): 104 | raise ValueError("For point type 'grid', lon and lat should have the same length") 105 | dslonlat=xr.Dataset(coords=dict(lon=("nlonlat",lon.data),lat=("nlonlat",lat.data))) 106 | else: 107 | dslonlat=xr.Dataset(coords=dict(lon=lon,lat=lat)) 108 | dslonlat=xr.Dataset(coords=dict(lon=lon,lat=lat)) 109 | else: 110 | raise ValueError("Either nmax or lon and lat should be specified") 111 | 112 | dslonlat.lon.attrs.update(get_cfatts("longitude")) 113 | dslonlat.lat.attrs.update(get_cfatts("latitude")) 114 | 115 | dslonlat.lon.attrs["shxarray_gtype"]=gtype 116 | dslonlat.lat.attrs["shxarray_gtype"]=gtype 117 | return dslonlat 118 | 119 | 120 | def multiply(self,dash1:xr.DataArray,dash2:xr.DataArray,method="spectral")->xr.DataArray: 121 | """ 122 | Multiply two spherical harmonics DataArrays together (equivalent to multiplying in the spatial domain) 123 | This function uses the computation of Real Gaunt coefficients 124 | Parameters 125 | ---------- 126 | 127 | dash1 : xr.DataArray 128 | 129 | dash2 : xr.DataArray 130 | 131 | 132 | 133 | Returns 134 | ------- 135 | xr.DataArray 136 | 137 | 138 | """ 139 | from time import time 140 | if method == "spatial": 141 | # Note: approximate method only 142 | shxlogger.info("Multiplying in the spatial domain") 143 | nmax=dash1.sh.nmax+dash2.sh.nmax 144 | dslonlat=self.lonlat_grid(nmax=nmax) 145 | t0=time() 146 | dagrd1=self.synthesis(dash1,dslonlat=dslonlat) 147 | dagrd2=self.synthesis(dash2,dslonlat=dslonlat) 148 | t1=time() 149 | dagrd=dagrd1*dagrd2 150 | dashout=self.analysis(dagrd,nmax=nmax) 151 | t2= time() 152 | shxlogger.info(f"Multiplication in the spatial domain took {t1-t0:.2f} seconds for synthesis and {t2-t1:.2f} seconds for analysis") 153 | elif method == "spectral": 154 | shxlogger.info("Multiplying in the spectral domain") 155 | dashout=multiply_sh(dash1,dash2) 156 | else: 157 | raise ValueError("Method for shlib should be either 'spatial' or 'spectral'") 158 | 159 | return dashout 160 | 161 | -------------------------------------------------------------------------------- /src/builtin_backend/sinex.pyx: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | # distutils: language = c++ 6 | # cython: profile=False 7 | 8 | cimport numpy as np 9 | import xarray as xr 10 | import shxarray 11 | import os 12 | from shxarray.core.sh_indexing import SHindexBase 13 | from shxarray.io.gzipwrap import gzip_open_r 14 | from libc.stdlib cimport strtol,strtod 15 | from shxarray.core.logging import shxlogger 16 | 17 | 18 | #A bit of a hack to directly allow access to the data of a Python unicode str object 19 | cdef extern from *: 20 | void *PyUnicode_DATA(object unicode) 21 | 22 | @cython.boundscheck(False) 23 | @cython.wraparound(False) 24 | @cython.initializedcheck(False) 25 | def read_symmat_fast(fileobj,dsout,blockname): 26 | """ 27 | Reads a triangular matrix from a SINEX block and returns a symmetric version (Cython version) 28 | This version is optimized and uses the following tricks 29 | 1. Direct access through a character pointer to the data of a unicode object (line of the files) 30 | 2. Use of Cython memoryviews to avoid boiunds checking of the numpy arrays 31 | 3. Use of Cython cdef variables to avoid type checking and optimize loop 32 | 4. Use of stdlib strtol, and strod to parse the lines 33 | Parameters 34 | ---------- 35 | fileobj : 36 | io buffer to read lines from 37 | 38 | dsout : xarray.Dataset 39 | xarray.Dataset to augment the matrix data to 40 | 41 | 42 | blockname : str 43 | name of the SINEX block. should be one of: 44 | SOLUTION/NORMAL_EQUATION_MATRIX U 45 | SOLUTION/NORMAL_EQUATION_MATRIX L 46 | 47 | 48 | Returns 49 | ------- 50 | an updated xarray.Dataset holding the new matrix in a new variable 'N' 51 | 52 | """ 53 | if not blockname.startswith('SOLUTION/NORMAL_EQUATION_MATRIX'): 54 | raise RuntimeError(f"Wrong block {blockname}?") 55 | cdef int nest=dsout.sizes['nm'] 56 | mat=np.zeros([nest,nest],order='C') 57 | 58 | cdef double [:,:] cmat =mat 59 | cdef int irow 60 | cdef int icol 61 | cdef int ndat 62 | # cdef const unsigned char[:] ucline 63 | cdef char* cline 64 | cdef str line 65 | for line in fileobj: 66 | #get the char pointer to the buffer in the unicode object 67 | cline= PyUnicode_DATA(line) 68 | # cline = &line[0] 69 | #cline = line[0] 70 | if cline[0] == '-': 71 | #end of block encountered 72 | break 73 | elif cline[0] == '*': 74 | #comment 75 | continue 76 | 77 | # sscanf(cline, " %5li %5li", &irow, &icol) 78 | irow=strtol(cline,&cline,10) 79 | 80 | icol=strtol(cline,&cline,10) 81 | irow-=1 #note zero indexing 82 | icol-=1 83 | 84 | ndat=nest-icol if icol>nest-3 else 3 85 | 86 | if ndat == 1: 87 | # #read one element 88 | cmat[irow,icol]=strtod(cline,&cline) 89 | elif ndat == 2: 90 | # #read 2 elements 91 | cmat[irow,icol]=strtod(cline,&cline) 92 | cmat[irow,icol+1]=strtod(cline,&cline) 93 | 94 | else: 95 | # #read 3 elements 96 | cmat[irow,icol]=strtod(cline,&cline) 97 | cmat[irow,icol+1]=strtod(cline,&cline) 98 | cmat[irow,icol+2]=strtod(cline,&cline) 99 | 100 | #mirror the upper triangle in the lower part 101 | mat=np.triu(mat,k=1).T+mat 102 | 103 | if "nm_" not in dsout.indexes: 104 | #add the transposed index 105 | mi_=SHindexBase.mi_toggle(dsout.indexes['nm']) 106 | dsout=dsout.sh.set_nmindex(mi_,'_') 107 | 108 | dsout['N']=(['nm','nm_'],mat) 109 | return dsout 110 | 111 | -------------------------------------------------------------------------------- /src/builtin_backend/wigner3j.pxd: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | # distutils: language = c++ 6 | 7 | 8 | cimport cython 9 | from libcpp.vector cimport vector 10 | # C++ / Cython interface declaration 11 | cdef extern from "Wigner3j.hpp": 12 | cdef cppclass Wigner3j[T] nogil: 13 | Wigner3j() except + 14 | Wigner3j(int j2,int j3,int m2, int m3) except + 15 | vector[T] get() except+ 16 | void set(int j2,int j3,int m2, int m3) except+ 17 | T operator[](int j)except+ 18 | int jmin() 19 | int jmax() 20 | int m() 21 | 22 | # End of interface declaration 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /src/builtin_backend/wigner3j.pyx: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | # distutils: language = c++ 6 | 7 | cimport cython 8 | from wigner3j cimport Wigner3j 9 | 10 | from cython.operator cimport dereference as deref 11 | # import numpy as np 12 | cimport numpy as np 13 | import xarray as xr 14 | import pandas as pd 15 | 16 | @cython.boundscheck(False) 17 | @cython.wraparound(False) 18 | @cython.initializedcheck(False) 19 | def getWigner3j(j2,j3,m2,m3): 20 | """ 21 | Compute non-zero Wigner3J symbols with their valid (j1,m1) for j2,j3,m2,m3 input 22 | """ 23 | w3j = Wigner3j[double](j2,j3,m2,m3) 24 | assert(w3j.jmin() <= w3j.jmax()) 25 | m=w3j.m() 26 | jm=pd.MultiIndex.from_tuples([(j,m) for j in range(w3j.jmin(),w3j.jmax()+1)],names=("j","m")) 27 | return xr.DataArray(w3j.get(), coords=dict(jm=jm),dims=["jm"]) 28 | 29 | -------------------------------------------------------------------------------- /src/builtin_backend/ynm.pyx: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | # distutils: language = c++ 7 | 8 | import cython 9 | cimport numpy as np 10 | import xarray as xr 11 | from legendre cimport Ynm_cpp,mni 12 | from shxarray.core.sh_indexing import SHindexBase 13 | from libc.stdio cimport printf 14 | 15 | 16 | @cython.boundscheck(False) 17 | @cython.wraparound(False) 18 | @cython.initializedcheck(False) 19 | cdef class Ynm: 20 | """Compute surface spherical harmonics in double precision""" 21 | cdef Ynm_cpp[double] _ynm 22 | cdef double[::1] data 23 | cdef public object _shindex 24 | def __cinit__(self,nmax_or_index): 25 | 26 | cdef int[::1] nv 27 | cdef int[::1] mv 28 | cdef int [:,::1] nm 29 | cdef cython.size_t sz,idx 30 | cdef int nmax,n,m 31 | cdef mni it 32 | if type(nmax_or_index) == int: 33 | nmax=nmax_or_index 34 | self._ynm=Ynm_cpp[double](nmax) 35 | sz=self._ynm.size() 36 | #create a sh index 37 | nm=np.zeros([sz,2],dtype=np.int32) 38 | for it in self._ynm.getmn(): 39 | n=it.n 40 | m=it.m 41 | idx=it.i 42 | nm[idx,0]=n 43 | nm[idx,1]=m 44 | 45 | self._shindex=SHindexBase.mi_fromarrays(np.asarray(nm).T) 46 | else: 47 | nv=np.array([n for n,_,_ in nmax_or_index.values]).astype(np.int32) 48 | mv=np.array([m for _,m,_ in nmax_or_index.values]).astype(np.int32) 49 | 50 | sz=len(nmax_or_index) 51 | self._ynm=Ynm_cpp[double](sz,&nv[0],&mv[0]) 52 | self._shindex=nmax_or_index 53 | 54 | #have data memory view point to the memory of the cpp class 55 | self.data = (self._ynm.data()) 56 | 57 | 58 | @property 59 | def nmax(self): 60 | return self._ynm.nmax() 61 | 62 | def __len__(self): 63 | return self._ynm.size() 64 | 65 | def __call__(self,lon, lat): 66 | 67 | cdef int npos 68 | cdef double[:,::1] data; 69 | 70 | if np.isscalar(lon) and np.isscalar(lat): 71 | 72 | self._ynm.set(lon,lat) 73 | dsout=xr.DataArray(self.data,coords={"nm":self._shindex,"lon":lon,"lat":lat},dims=["nm"],name="Ynm") 74 | else: 75 | #multiple sets requested 76 | if len(lon) != len(lat): 77 | raise RuntimeError("input longitude and latitude needs to be of the same length") 78 | npos=len(lon) 79 | data=np.empty([npos,self._ynm.size()]) 80 | 81 | for i in range(npos): 82 | self._ynm.set(lon[i],lat[i]) 83 | data[i,:]=self.data 84 | 85 | dsout=xr.DataArray(data,coords={"nm":("nm",self._shindex),"lon":("nlonlat",lon),"lat":("nlonlat",lat)},dims=["nlonlat","nm"],name="Ynm") 86 | 87 | return dsout 88 | 89 | -------------------------------------------------------------------------------- /src/shxarray/__init__.py: -------------------------------------------------------------------------------- 1 | """ Top level API documentation""" 2 | from .core.xr_accessor import SHDaAccessor,SHDsAccessor 3 | from ._version import version 4 | 5 | __version__=version() 6 | 7 | -------------------------------------------------------------------------------- /src/shxarray/_version.py: -------------------------------------------------------------------------------- 1 | def version(): 2 | try: 3 | import importlib 4 | return importlib.metadata.version('shxarray') 5 | except: 6 | return "unknown" 7 | -------------------------------------------------------------------------------- /src/shxarray/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/src/shxarray/core/__init__.py -------------------------------------------------------------------------------- /src/shxarray/core/admin.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | import os 7 | import yaml 8 | 9 | def defaultcache(subdir=None): 10 | path=os.path.expanduser("~/.cache/shxarray_storage") 11 | if subdir is not None: 12 | path=os.path.join(path,subdir) 13 | os.makedirs(path,exist_ok=True) 14 | return path 15 | 16 | def infofile(): 17 | return os.path.join(defaultcache(),"shxarray_userinfo.yaml") 18 | 19 | 20 | infodictglobal=None 21 | 22 | def get_userinfo(infodict=infodictglobal): 23 | if not infodict: 24 | userinfo=infofile() 25 | if os.path.exists(userinfo): 26 | with open(userinfo) as f: 27 | infodict=yaml.safe_load(f) 28 | else: 29 | infodict={} 30 | return infodict 31 | 32 | def set_userinfo(namecontact:str=None,institution:str=None,write=False): 33 | """ 34 | Set user information which will eb stored in xarray outputs 35 | Parameters 36 | ---------- 37 | namecontact : str 38 | e.g. J. Doe (j.doe@email.com) 39 | 40 | institution : str 41 | Affiliation of the author as a string 42 | 43 | write : bool 44 | Write the info to disk (make it persistent for next sessions) 45 | 46 | """ 47 | if namecontact is not None: 48 | infodict["contact"]=namecontact 49 | 50 | if institution is not None: 51 | infodict["institution"]=institution 52 | 53 | 54 | if write: 55 | userinfo=infofile() 56 | with open(userinfo,"w") as f: 57 | yaml.dump(infodict,f) 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /src/shxarray/core/cf.py: -------------------------------------------------------------------------------- 1 | # Contain some basic functionality for finding and using CF variables 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | # 6 | 7 | import numpy as np 8 | from collections import namedtuple 9 | from shxarray.core.admin import get_userinfo 10 | from shxarray._version import version 11 | 12 | CoordInfo=namedtuple("CoordInfo",['min','max','step','direction','var']) 13 | 14 | def find_coord(coordvars, names): 15 | """ 16 | Find a coordinate variable in a dictionary of coordinate variables 17 | """ 18 | tmp=[ky for ky in coordvars.keys() if ky in names] 19 | if len(tmp) > 1 or len(tmp) == 0: 20 | raise KeyError(f"Cannot find an unambigious coordinate variable from {names}") 21 | coordvar=coordvars[tmp[0]] 22 | 23 | #also compute minmax and possibly uniform step size 24 | cmin=coordvar.min().item() 25 | cmax=coordvar.max().item() 26 | 27 | cincr=np.diff(coordvar.data) 28 | dmin=cincr.min() 29 | dmax=cincr.max() 30 | if dmin == dmax: 31 | #uniform step 32 | cstep=dmin 33 | if dmin < 0: 34 | direction='descending' 35 | else: 36 | direction='ascending' 37 | else: 38 | #nonuniform step 39 | cstep=None 40 | if dmin < 0 and dmax < 0: 41 | direction='descending' 42 | elif dmin > 0 and dmax > 0: 43 | direction='ascending' 44 | else: 45 | direction="random" 46 | 47 | return CoordInfo(min=cmin,max=cmax,step=cstep,direction=direction,var=coordvar) 48 | 49 | 50 | 51 | def find_lon(coordvars): 52 | return find_coord(coordvars,['lon','Longitude','x','longitude']) 53 | 54 | def find_lat(coordvars): 55 | return find_coord(coordvars,['lat','Latitude','y','latitude']) 56 | 57 | def change_central_longitude(dain,central_longitude=0,resort=True): 58 | """ 59 | Change the central longitude of a dataset to 180 or 360 degrees 60 | Parameters 61 | ---------- 62 | dain : xarry.DataArray or xarray.Dataset 63 | input data to change 64 | central_longitude : int 65 | central longitude to change to. either 0 or 180 degrees (default is 0) 66 | 67 | 68 | Returns 69 | ------- 70 | xarray.DataArray or xarray.Dataset 71 | Input data with changed central longitude (if needed) otherwise returns the original input 72 | 73 | """ 74 | if central_longitude not in [180,0]: 75 | raise ValueError("Central longitude should be 180 or 0") 76 | 77 | loninfo = find_lon(dain.coords) 78 | #guess for correct central longitude 79 | if loninfo.min < 0 or loninfo.max <= 180: 80 | given_central_longitude=0 81 | else: 82 | given_central_longitude=180 83 | if given_central_longitude != central_longitude: 84 | if central_longitude == 180: 85 | dain.coords[loninfo.var.name]=(dain.coords[loninfo.var.name] + 360) % 360 86 | else: 87 | dain.coords[loninfo.var.name]=(dain.coords[loninfo.var.name] + 180) % 360 - 180 88 | if resort: 89 | dain=dain.sortby(dain.coords[loninfo.var.name]) 90 | 91 | return dain 92 | 93 | 94 | cflookup={ 95 | "longitude":{'units':'degrees_east','standard_name':'longitude','long_name':'longitude'}, 96 | "latitude":{'units':'degrees_north','standard_name':'latitude','long_name':'latitude'}, 97 | "stokes":{'units':'-',"long_name":"Stokes Coefficients","gravtype":"stokes"}, 98 | "stokes stdv":{'units':'-',"long_name":"Standard deviation of the Stokes Coefficients","gravtype":"stokes"}, 99 | "tws":{'units':'m',"long_name":"Total water storage","gravtype":"tws"}, 100 | 101 | } 102 | 103 | def get_cfatts(standard_name): 104 | """Return CF attributes for certain coordinate types""" 105 | return cflookup[standard_name] 106 | 107 | def get_cfglobal(): 108 | """Return global attributes and possible user details for the CF convention""" 109 | cfattr={'Conventions':'CF-1.8',"source":f"shxarray-{version()} "} 110 | cfattr.update(get_userinfo()) 111 | return cfattr 112 | 113 | 114 | 115 | -------------------------------------------------------------------------------- /src/shxarray/core/logging.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | 7 | import logging 8 | # shxarray wide logger 9 | shxlogger=logging.getLogger("shxarray") 10 | 11 | ch = logging.StreamHandler() 12 | 13 | # create formatter 14 | formatter = logging.Formatter('%(name)s-%(levelname)s: %(message)s') 15 | 16 | # add formatter to ch 17 | ch.setFormatter(formatter) 18 | 19 | # add ch to logger 20 | shxlogger.addHandler(ch) 21 | 22 | 23 | def debugging(): 24 | return shxlogger.getEffectiveLevel() == logging.DEBUG 25 | 26 | def setInfoLevel(): 27 | """Set logging level for both python and c++ to INFO severity""" 28 | shxlogger.setLevel(logging.INFO) 29 | 30 | def setDebugLevel(): 31 | """Set logging level for both python and c++ to DEBUG severity""" 32 | shxlogger.setLevel(logging.DEBUG) 33 | 34 | 35 | def setWarningLevel(): 36 | """Set logging level for both python and c++ to WARNING severity""" 37 | shxlogger.setLevel(logging.WARNING) 38 | 39 | def setErrorLevel(): 40 | """Set logging level for both python and c++ to WARNING severity""" 41 | shxlogger.setLevel(logging.ERROR) 42 | 43 | setInfoLevel() 44 | -------------------------------------------------------------------------------- /src/shxarray/core/sh_indexing.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | 7 | import pandas as pd 8 | 9 | 10 | class SHindexBase: 11 | name="nm" 12 | name_t="nm_" 13 | 14 | @staticmethod 15 | def nsh(nmax,nmin=0, squeeze=True): 16 | """ 17 | Compute the total amount of spherical harmonic coefficients for a given range 18 | 19 | Parameters 20 | ---------- 21 | nmax : int 22 | maximum spherical harmonic degree 23 | nmin : int, optional 24 | minimum spherical harmonic degree 25 | squeeze: bool,optional 26 | Legacy option used when Sine coefficients which have m=0 need to be included 27 | 28 | 29 | Returns 30 | ------- 31 | int 32 | The amount of spherical harmonic coefficients in this range 33 | """ 34 | assert nmax>=nmin 35 | 36 | sz=(nmax+1)*(nmax+1) 37 | if not squeeze: 38 | sz+=nmax+1 39 | 40 | if nmin > 0: 41 | #possibly remove the number of coefficients which have n < nmin (calls this function itself) 42 | sz-=SHindexBase.nsh(nmin-1,0,squeeze=squeeze) 43 | 44 | return sz 45 | 46 | 47 | @staticmethod 48 | def nm_mi(nmax,nmin=0): 49 | """ 50 | Generate a MultiIndex of degree and order which span a spherical harmonic degree range 51 | 52 | In the case of real spherical harmonic, orders m < 0 denote Sine coefficients 53 | 54 | Parameters 55 | ---------- 56 | nmax : int 57 | maximum spherical harmonic degree 58 | nmin : int, optional 59 | minimum spherical harmonic degree 60 | 61 | Returns 62 | ------- 63 | pandas.MultiIndex 64 | A MultiIndex with degrees "n" and orders "m" 65 | """ 66 | nm=[(n,m) for n in range(nmin,nmax+1) for m in range(-n,n+1)] 67 | return SHindexBase.mi_fromtuples(nm) 68 | 69 | @staticmethod 70 | def nm(nmax,nmin=0): 71 | """ 72 | Convenience function which returns a dictionary which can be used as input for xarray constructors 73 | 74 | Parameters 75 | ---------- 76 | nmax : int 77 | maximum spherical harmonic degree 78 | nmin : int, optional 79 | minimum spherical harmonic degree 80 | 81 | Returns 82 | ------- 83 | dictionary 84 | A dictionary specifying the degree and orders and corresponding dimension names 85 | in the form of {dim:(dim,nm)} 86 | """ 87 | return {SHindexBase.name:(SHindexBase.name,SHindexBase.nm_mi(nmax,nmin))} 88 | 89 | 90 | @staticmethod 91 | def mi_fromtuples(nm,suf=''): 92 | """ 93 | Generate a MultiIndex of degree and order from a list of (degree,order) tuples 94 | 95 | In the case of real spherical harmonic, orders m < 0 denote Sine coefficients 96 | 97 | Parameters 98 | ---------- 99 | nm : list 100 | A list of tuples with degree and order 101 | 102 | Returns 103 | ------- 104 | pandas.MultiIndex 105 | A MultiIndex with degrees "n" and orders "m" 106 | """ 107 | if suf: 108 | names=[f"n{suf}",f"m{suf}"] 109 | else: 110 | names=["n","m"] 111 | 112 | return pd.MultiIndex.from_tuples(nm,names=names) 113 | 114 | @staticmethod 115 | def mi_fromarrays(nm): 116 | """ 117 | Generate a MultiIndex of degree and order from an array of degree and order [[n..],[..m]] 118 | 119 | In the case of real spherical harmonic, orders m < 0 denote Sine coefficients 120 | 121 | Parameters 122 | ---------- 123 | nm : array-like 124 | An array which hold a vector of degrees and orders 125 | 126 | Returns 127 | ------- 128 | pandas.MultiIndex 129 | A MultiIndex with degrees "n" and orders "m" 130 | """ 131 | return pd.MultiIndex.from_arrays(nm,names=["n","m"]) 132 | 133 | @staticmethod 134 | def mi_toggle(mi,ending=''): 135 | """ 136 | Rename the levels of a (nm)-multindex so that they can be use as alternative coordinates (e.g. transposed versions) 137 | 138 | The levels will be swicthed back and fort between the following formats 139 | oldname <-> oldname_[ending] 140 | 141 | Parameters 142 | ---------- 143 | mi : pandas.MultiIndex 144 | A MultiIndex with degree and orders 145 | ending: str, optional 146 | A string which can be additionally appended 147 | 148 | Returns 149 | ------- 150 | pandas.MultiIndex 151 | A MultiIndex with renamed levels 152 | """ 153 | 154 | app="_"+ending 155 | applen=len(app) 156 | if "n" in mi.names: 157 | return mi.rename([nm+app for nm in mi.names]) 158 | else: 159 | return mi.rename([nm[:-applen] for nm in mi.names]) 160 | 161 | -------------------------------------------------------------------------------- /src/shxarray/core/shcomputebase.py: -------------------------------------------------------------------------------- 1 | class SHComputeBackendBase: 2 | """ 3 | Base class providing the calling interface for more compute intensive Spherical harmonic operations. 4 | The backend can be implemented in another module and registered as an entry_point in the pyproject.toml filei according to e.g.: 5 | 6 | [project.entry-points."shxarray.computebackends"] 7 | yourbackend = "yourpackage.module:SHComputeBackendclass" 8 | """ 9 | _credits="Overwrite this statement in your derived class" 10 | def synthesis(self,*argv): 11 | self._notImplemented("synthesis") 12 | 13 | def analysis(self,*argv): 14 | self._notImplemented("analysis") 15 | 16 | def lonlat_grid(self,nmax): 17 | self._notImplemented("lonlat_grid") 18 | 19 | def _notImplemented(self,methodname): 20 | raise RuntimeError(f"Method '{methodname}' is not implemented in backend {self.__class__.__name__}") 21 | -------------------------------------------------------------------------------- /src/shxarray/core/time.py: -------------------------------------------------------------------------------- 1 | # This file is part of frommle2. 2 | # frommle2 is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # frommle2 is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with Frommle; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (r.rietbroek@utwente.nl), 2022 17 | 18 | from datetime import datetime,timedelta 19 | 20 | def decyear2dt(decyear): 21 | """Convert a decimal year to a datetime object""" 22 | year=int(decyear) 23 | if year == 0: 24 | return datetime.min 25 | 26 | jan1=datetime(year,1,1) 27 | return jan1+(decyear-year)*(datetime(year+1,1,1)-jan1) 28 | 29 | 30 | def dt2decyear(dt): 31 | """Convert a datetime object to a decimal year""" 32 | year=dt.year 33 | 34 | jan1=datetime(year,1,1) 35 | 36 | jan1next=datetime(year+1,1,1) 37 | yrlen=(jan1next-jan1).total_seconds() 38 | return year+(dt-jan1).total_seconds()/yrlen 39 | 40 | -------------------------------------------------------------------------------- /src/shxarray/earth/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/src/shxarray/earth/__init__.py -------------------------------------------------------------------------------- /src/shxarray/earth/constants.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | a_earth=0.6378136460e+07 #mean radius in meter 7 | rho_sea=1.025e3 #average density of sea water kg/m^3 8 | rho_water=1.e3 # average density of water 9 | rho_earth=5517.0 #average density of the Earth 10 | rho_ice=931.0 #density of ice kg/m^3 taken from G. Spada and friends 11 | g=9.80665e0 # mean gravity m/s^2 12 | 13 | 14 | 15 | #flattening accroding to GRS80 16 | grs80_C20=-4.84166854896120e-04 17 | 18 | #Mean moments of interita from the earth ( kg m^2) 19 | ixx_A=8.0102e+37 #~= (I_xx+Iyy)/2 20 | izz_C=8.0365e+37 21 | 22 | #average earth rotation rate in rad/s 23 | ohm_earth=7.292115467064e-5 24 | 25 | #Chandler frequency ( radians per second) from iers constants 26 | ohm_chandler=1.679064144e-7 27 | 28 | #some standard degree 2 elastic body and load Love numbers 29 | k2loadprem=-0.3054020195e+00 30 | k2bodyprem=0.303 31 | l2bodyprem=0.0855 32 | h2bodyprem=0.612 33 | -------------------------------------------------------------------------------- /src/shxarray/earth/ellipsoid.py: -------------------------------------------------------------------------------- 1 | 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2025 5 | # 6 | 7 | import xarray as xr 8 | import shxarray 9 | 10 | def get_GRS80_Stokes(): 11 | """ 12 | Return the GRS80 ellipsoid as Stokes coefficients 13 | """ 14 | a = 6378137.0 15 | f = 1.0 / 298.257222101 16 | nmax=8 17 | dsgrs80=xr.DataArray.sh.zeros(nmax) 18 | dsgrs80.attrs['title']='GRS80' 19 | dsgrs80.attrs['a_earth']=a 20 | dsgrs80.attrs['f_earth']=f 21 | #fill out derived values 22 | dsgrs80.loc[dict(n=0,m=0)]=1 23 | dsgrs80.loc[dict(n=2,m=0)]=-0.48416685489612e-03 24 | dsgrs80.loc[dict(n=4,m=0)]=0.79030407333333e-06 25 | dsgrs80.loc[dict(n=6,m=0)]=-0.16872510013651e-08 26 | dsgrs80.loc[dict(n=8,m=0)]=-0.34609833692685e-11 27 | 28 | return dsgrs80 29 | 30 | -------------------------------------------------------------------------------- /src/shxarray/earth/rotation.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # Source: See chapter 2.4.3 of Rietbroek et al 2014 http://nbn-resolving.de/urn:nbn:de:hbz:5n-35460i and corrections in https://github.com/strawpants/rlftlbx/blob/master/SHtlbx/rotfeedback.f90 5 | 6 | import xarray as xr 7 | import numpy as np 8 | from shxarray.earth.constants import a_earth, rho_water, ohm_chandler,ohm_earth,ixx_A,izz_C,k2loadprem, k2bodyprem,h2bodyprem, g 9 | 10 | from shxarray.core.sh_indexing import SHindexBase 11 | 12 | def chi_T2J(): 13 | fac=np.pi*(a_earth**4)*rho_water 14 | chimat=np.zeros([3,4]) 15 | chimat[0,2]=-4.0/5.0*np.sqrt(10.0/6.0) 16 | chimat[1,3]=chimat[0,2] 17 | chimat[2,0]=8.0/3.0 18 | chimat[2,1]=-8/(3*np.sqrt(15)) 19 | return fac*chimat 20 | 21 | def gamma_j2m(k2load=None): 22 | if k2load is None: 23 | k2load=k2loadprem 24 | 25 | gmat=np.zeros([3,3]) 26 | gmat[0,0]=(ohm_earth/ohm_chandler)*(1+k2load)/ixx_A 27 | gmat[1,1]=gmat[0,0] 28 | gmat[2,2]=-(1+k2load)/izz_C 29 | 30 | return gmat 31 | 32 | 33 | def psi_m2ilam(): 34 | pmat=np.zeros([4,3]) 35 | pmat[0,2]=2/3 36 | pmat[1,2]=-2/(3*np.sqrt(5)) 37 | pmat[2,0]=-1/(np.sqrt(15)) 38 | pmat[3,1]=pmat[2,0] 39 | fac=(a_earth*ohm_earth)**2 40 | return fac*pmat 41 | 42 | def t_lam2s(): 43 | tmat=np.zeros([3,4]) 44 | tmat[0,1]=1 45 | tmat[1,2]=1 46 | tmat[2,3]=1 47 | 48 | fac=(1+k2bodyprem-h2bodyprem)/g 49 | return fac*tmat 50 | 51 | def qs_rotfeedback_slow(): 52 | """ 53 | Returns a matrix relating sh surface load coefficients to degree 2 changes in Quasi spectral sea level 54 | """ 55 | 56 | chi=chi_T2J() 57 | gam=gamma_j2m() 58 | psi=psi_m2ilam() 59 | tm=t_lam2s() 60 | rotmat=tm@psi@gam@chi 61 | nmi_=SHindexBase.mi_fromtuples([(2,0),(2,1),(2,-1)],'_') 62 | nmi=SHindexBase.mi_fromtuples([(0,0),(2,0),(2,1),(2,-1)]) 63 | return xr.DataArray(rotmat,dims=["nm_","nm"],coords=[nmi_,nmi]) 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /src/shxarray/earth/sealevel/__init__.py: -------------------------------------------------------------------------------- 1 | from .spectralsealevel import SpectralSeaLevelSolver 2 | -------------------------------------------------------------------------------- /src/shxarray/earth/sealevel/sealevel.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # 5 | 6 | import xarray as xr 7 | import numpy as np 8 | from shxarray.core.logging import shxlogger 9 | from shxarray.core.cf import * 10 | from datetime import datetime 11 | 12 | class SeaLevelSolver: 13 | """ 14 | Base Class to aid in solving the self consistent sea level for various discretizations and load types 15 | """ 16 | def __init__(self,rotfeedback=False): 17 | #whether to apply a simple (static) rotational feedback 18 | self.rotfeedback=rotfeedback 19 | 20 | @staticmethod 21 | def set_global_mean(load,level): 22 | """Sets the global mean value of of a load. Depending on the discretization, this needs to be differently implemented""" 23 | 24 | raise NotImplementedError("set_global_mean(), needs to be implemented in derived class") 25 | 26 | @staticmethod 27 | def global_mean(load): 28 | raise NotImplementedError("global_mean(), needs to be implemented in derived class") 29 | def rotfeed(self,load): 30 | raise NotImplementedError("rotfeed(), needs to be implemented in derived class") 31 | 32 | def oceanf(self,load=None): 33 | raise NotImplementedError("oceanf(), needs to be implemented in derived class") 34 | 35 | def load_earth(self,load): 36 | raise NotImplementedError("load_earth(), needs to be implemented in derived class") 37 | 38 | 39 | 40 | def __call__(self, load_force): 41 | """Iteratively solve the sea level equation for a given load""" 42 | 43 | #compute the mean contributions of the fixed (forced) load 44 | force00=self.global_mean(load_force) 45 | #Set up initial ocean load ocean function 46 | unioce=self.oceanf() 47 | oce_surf_ratio=self.global_mean(unioce) 48 | dphi_g=-force00/oce_surf_ratio 49 | #initial value for the ocean load 50 | load_sea=dphi_g*unioce 51 | 52 | relratio=1 53 | rel_thres=1e-5 54 | maxit=7 55 | it=0 56 | damp=0.95 57 | while it < maxit and rel_thres < abs(relratio): 58 | #compute the loading response 59 | load_tot=load_sea+load_force 60 | ds_loadresp=self.load_earth(load_tot) 61 | quasi_sea=ds_loadresp.geoid-ds_loadresp.uplift 62 | if self.rotfeedback: 63 | qsrot=self.rotfeed(load_tot) 64 | #add static rotational feedback component to quasi_sea 65 | quasi_sea.loc[qsrot.nm]+=qsrot 66 | self.set_global_mean(quasi_sea,dphi_g) 67 | #compute new ocean load 68 | load_sea=self.oceanf(quasi_sea) 69 | delta=force00+self.global_mean(load_sea) 70 | relratio=np.max(np.abs(delta/force00)).item() 71 | #update dphi_g in the right direction 72 | dphi_g-=damp*delta/oce_surf_ratio 73 | shxlogger.info(f"current mass inconsistency: iteration {it}, relratio:{relratio}") 74 | 75 | it+=1 76 | 77 | 78 | quasi_sea.name="quasi_sea" 79 | load_sea.name="load_sea" 80 | load_force.name="load_force" 81 | ds_loadresp.geoid.name="geoid" 82 | ds_loadresp.uplift.name="uplift" 83 | 84 | dsout=xr.merge([quasi_sea,load_sea,load_force,ds_loadresp.geoid,ds_loadresp.uplift]) 85 | 86 | #add some useful attributes to the output 87 | dsout.attrs['history'] = str(datetime.utcnow()) + f": shxarray ({self.__class__.__name__}" 88 | 89 | 90 | dsout.quasi_sea.attrs["long_name"]="Quasi spectral sea level (non zero over land)" 91 | dsout.quasi_sea.attrs["units"]="m" 92 | 93 | dsout.load_sea.attrs["long_name"]="Relative Sea level (load) as equivalent water height" 94 | dsout.load_sea.attrs["units"]="m" 95 | 96 | dsout.load_force.attrs["long_name"]="Applied load as equivalent water height" 97 | dsout.load_force.attrs["units"]="m" 98 | 99 | dsout.geoid.attrs["long_name"]="Geoid height change induced by combined load (force+sea)" 100 | dsout.geoid.attrs["units"]="m" 101 | 102 | dsout.uplift.attrs["long_name"]="Uplift induced by combined load (force+sea)" 103 | dsout.uplift.attrs["units"]="m" 104 | 105 | 106 | return dsout 107 | 108 | 109 | 110 | 111 | -------------------------------------------------------------------------------- /src/shxarray/earth/sealevel/spectralsealevel.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # 5 | 6 | 7 | from shxarray.earth.sealevel.sealevel import SeaLevelSolver 8 | import xarray as xr 9 | from math import floor 10 | import os 11 | from shxarray.core.admin import defaultcache 12 | from shxarray.core.logging import shxlogger 13 | from shxarray.kernels.gravfunctionals import Load2Geoid,Load2Uplift 14 | from shxarray.earth.rotation import qs_rotfeedback_slow 15 | 16 | 17 | class SpectralSeaLevelSolver(SeaLevelSolver): 18 | 19 | def __init__(self,oceansh:xr.DataArray=None, nmax=None,dssnrei=None,p2scache=None,rotfeedback=False): 20 | super().__init__(rotfeedback) 21 | 22 | if oceansh is None: 23 | if nmax is None or nmax > 150: 24 | raise RuntimeError("nmax (<=150) must be provided when no oceansh datatarray is provided") 25 | #use default ocean function (download) 26 | oceanshfile=os.path.join(defaultcache("ocean"),"ne_10m_oceansh_n300.nc") 27 | if not os.path.exists(oceanshfile): 28 | import requests 29 | url="https://github.com/strawpants/geoshapes/raw/refs/heads/master/ocean/ne_10m_oceansh_n300.nc" 30 | r = requests.get(url) 31 | shxlogger.info(f"Downloading ocean SH coefficients {oceanshfile}") 32 | with open(oceanshfile,'wb') as fid: 33 | fid.write(r.content) 34 | else: 35 | shxlogger.info(f"{oceanshfile}, already downloaded") 36 | 37 | oceansh=xr.open_dataset(oceanshfile).sh.truncate(nmax=2*nmax).oceansh 38 | else: 39 | if nmax is not None: 40 | if nmax > oceansh.sh.nmax/2: 41 | raise RuntimeError(f"Requested maximum degree {nmax} not supported by file (needs to be at least 2*nmax)") 42 | oceansh=oceansh.sh.truncate(nmax=nmax*2) 43 | 44 | if nmax is None: 45 | # Note: for full spectral consistency, the maximum degree of the ocean function is half that of the input ocean function 46 | self.nmax=floor(oceansh.sh.nmax/2) 47 | else: 48 | self.nmax=nmax 49 | 50 | #note: ocean coefficients need to be supported up to 2*nmax 51 | 52 | #setup SNREI Earth loading function 53 | if dssnrei is None: 54 | #default uses PREM Earth Model 55 | self.geoidKernel=Load2Geoid(nmax=self.nmax,deg0scale=0.0) 56 | self.upliftKernel=Load2Uplift(nmax=self.nmax,deg0scale=0.0) 57 | else: 58 | self.geoidKernel=Load2Geoid(knLove=dssnrei.kn,nmax=self.nmax) 59 | self.upliftKernel=Load2Uplift(hnLove=dssnrei.hn,nmax=self.nmax) 60 | 61 | #setup ocean function 62 | if p2scache is None: 63 | p2scache=os.path.join(defaultcache("P2S"),f"p2s_ocean_n{self.nmax}.nc") 64 | if os.path.exists(p2scache): 65 | #Read product to sum mat from cache 66 | shxlogger.info(f"Reading product2sum ocean function from cache: {p2scache}") 67 | self.dsp2s_oce=xr.open_dataset(p2scache).cnm.sh.build_nmindex().sh.build_nmindex('_') 68 | else: 69 | shxlogger.info(f"Computing ocean function and saving to cache: {p2scache}") 70 | if oceansh.sh.nmax != 2*self.nmax: 71 | oceansh=oceansh.sh.truncate(nmax=self.nmax*2) 72 | 73 | 74 | self.dsp2s_oce=oceansh.sh.p2s() 75 | self.dsp2s_oce.sh.drop_nmindex().sh.drop_nmindex('_').to_netcdf(p2scache) 76 | 77 | if self.rotfeedback: 78 | shxlogger.warning("Adding static rotation feedback probably only makes sense for very slowly changing loads (> Chandler wobble freq)") 79 | self.rotmat=qs_rotfeedback_slow() 80 | 81 | def rotfeed(self,load): 82 | qsrot=self.rotmat@load 83 | return qsrot.sh.toggle_nm() 84 | 85 | @staticmethod 86 | def set_global_mean(load,level): 87 | load.loc[dict(n=0,m=0)]=level 88 | return load 89 | 90 | 91 | @staticmethod 92 | def global_mean(load:xr.DataArray): 93 | """Returns the degree 0, order 0 coefficients of a spherical harmonic dataset""" 94 | return load.loc[dict(n=0,m=0)] 95 | 96 | 97 | def oceanf(self,load=None): 98 | if load is None: 99 | #return the ocean function itself 100 | return self.dsp2s_oce.sel(n_=0,m_=0).drop(['n_','m_','nm_']) 101 | else: 102 | #apply the ocean function to a load 103 | load_oce=self.dsp2s_oce@load 104 | return load_oce.sh.toggle_nm() 105 | 106 | def load_earth(self,load): 107 | dsdef=self.geoidKernel(load).to_dataset(name='geoid') 108 | dsdef['uplift']=self.upliftKernel(load) 109 | return dsdef 110 | 111 | 112 | 113 | -------------------------------------------------------------------------------- /src/shxarray/earth/snrei.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | from shxarray.core.logging import shxlogger 7 | from shxarray.core.admin import defaultcache 8 | import xarray as xr 9 | import os 10 | import requests 11 | import json 12 | 13 | def download_snrei(dbfilename): 14 | if os.path.exists(dbfilename): 15 | shxlogger.info(f"{dbfilename} already exists, no need to download)") 16 | return 17 | else: 18 | shxlogger.info(f" Downloading {dbfilename}") 19 | 20 | url="https://github.com/strawpants/snrei/raw/master/Love/geoslurp_dump_llove.sql" 21 | req=requests.get(url) 22 | with open(dbfilename,'wb') as fid: 23 | fid.write(req.content) 24 | 25 | 26 | def change_frame(dsLove,frame): 27 | ds1=dsLove.sel(n=1) 28 | if frame == "CF": 29 | alpha=((ds1.hn+2*ds1.ln)/3).item() 30 | elif frame == "CM": 31 | alpha=(1+ds1.kn).item() 32 | elif frame == "CE": 33 | alpha=ds1.kn.item() 34 | elif frame == "CH": 35 | alpha=ds1.hn.item() 36 | elif frame == "CL": 37 | alpha=ds1.ln.item() 38 | else: 39 | raise RuntimeError(f"Unknown frame {frame} selected") 40 | 41 | return dsLove.where(dsLove.n != 1,dsLove-alpha) 42 | 43 | 44 | 45 | def snrei_load(model,dbfile_or_con,frame="CF",nmax=None,deg0scale=None): 46 | """Loads (load) Love numbers""" 47 | if dbfile_or_con is None: 48 | #create a default filename 49 | dbfile_or_con=os.path.join(defaultcache('Love'),"geoslurp_dump_llove.sql") 50 | 51 | if str(dbfile_or_con).endswith(".sql"): 52 | download_snrei(dbfile_or_con) 53 | import sqlite3 54 | qry=f"SELECT data from llove WHERE name = '{model}'" 55 | dbcon = sqlite3.connect(dbfile_or_con) 56 | res=json.loads(dbcon.execute(qry).fetchone()[0]) 57 | else: 58 | #assume the input is already a open (sqlalchemy-like ) database connection 59 | qry=f"SELECT data from earthmodels.llove WHERE name = '{model}'" 60 | res=dbfile_or_con.execute(qry).first()[0] 61 | 62 | if deg0scale is not None: 63 | res["coords"]["degree"]["data"].insert(0,0) 64 | res["data_vars"]["kn"]["data"].insert(0,deg0scale) 65 | res["data_vars"]["hn"]["data"].insert(0,deg0scale) 66 | res["data_vars"]["ln"]["data"].insert(0,deg0scale) 67 | 68 | 69 | dsout=xr.Dataset.from_dict(res).rename({"degree":"n"}) 70 | if nmax is not None: 71 | dsout=dsout.sel(n=slice(0,nmax)) 72 | #possibly convert to a different isomorphic reference frame 73 | dsout=change_frame(dsout,frame) 74 | return dsout 75 | 76 | 77 | 78 | 79 | class SnreiFactory: 80 | @staticmethod 81 | def load(model="PREM",dbfile_or_con=None,frame="CF",nmax=None,deg0scale=None): 82 | return snrei_load(model=model,dbfile_or_con=dbfile_or_con,frame=frame,nmax=nmax,deg0scale=deg0scale) 83 | 84 | 85 | 86 | -------------------------------------------------------------------------------- /src/shxarray/exp/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Experimental compute backend for shxarray 3 | 4 | This submodule can be used to test and stage functionality in pure Python before moving it to a `shlib compute engine `_. 5 | 6 | Functionality in this submodule is available when using the ``engine='exp'`` option in the xarray accessor routines. 7 | 8 | Developer Notes: 9 | To add functionality to the engine add a Python file in the ``exp`` directory and import it in the ``__init__.py`` file. 10 | 11 | """ 12 | from shxarray.exp.p2s import * 13 | from shxarray.exp.multiply import * 14 | -------------------------------------------------------------------------------- /src/shxarray/exp/multiply.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2025 4 | # 5 | 6 | 7 | 8 | 9 | 10 | import xarray as xr 11 | from shxarray.shlib import getGauntReal 12 | from shxarray.core.logging import shxlogger 13 | from tqdm import tqdm 14 | from tqdm.contrib.logging import logging_redirect_tqdm 15 | import numpy as np 16 | 17 | 18 | def multiply_spat(dash1:xr.DataArray,dash2:xr.DataArray,engine="shtns")->xr.DataArray: 19 | """ 20 | Multiply two spherical harmonics DataArrays together (equivalent to multiplying in the spatial domain) 21 | Currently this function uses a spherical harmonic synthesis followed, a multiplication in the spatial domain and a spherical harmonic analysis 22 | A future implementation may use the realGaunt coefficients to directly erform the multiplication in the spectral domain 23 | Parameters 24 | ---------- 25 | 26 | dash1 : xr.DataArray 27 | 28 | dash2 : xr.DataArray 29 | 30 | engine : str 31 | Engine to use for the synthesis and analysis step 32 | 33 | 34 | Returns 35 | ------- 36 | xr.DataArray 37 | 38 | 39 | """ 40 | dagrd1=dash1.sh.synthesis(engine=engine) 41 | dagrd2=dash2.sh.synthesis(engine=engine) 42 | dagrd=dagrd1*dagrd2 43 | dashout=dagrd.sh.analysis(nmax=dash1.sh.nmax,engine=engine) 44 | return dashout 45 | 46 | def multiply(dash1:xr.DataArray,dash2:xr.DataArray): 47 | 48 | norm=np.sqrt(4*np.pi) 49 | nm1=dash1.nm 50 | nm2=dash2.nm 51 | # breakpoint() 52 | #allocate space for the output 53 | nmaxout=dash1.sh.nmax+dash2.sh.nmax 54 | auxcoords=dash1.sh.auxcoords() 55 | dashout=xr.DataArray.sh.zeros(nmax=nmaxout,auxcoords=auxcoords) 56 | with logging_redirect_tqdm(): 57 | for n1,m1 in tqdm(nm1.data): 58 | shxlogger.info(f"Multiplying n1,m1: {n1},{m1}") 59 | for n2,m2 in nm2.data: 60 | gnt = getGauntReal(n1,n2,m1,m2) 61 | if 28 in gnt.n and 28 in gnt.m: 62 | breakpoint() 63 | dashout.loc[{"nm":gnt.nm}]+= norm*gnt*dash1.loc[{"nm":(n1,m1)}]*dash2.loc[{"nm":(n2,m2)}] 64 | return dashout 65 | 66 | -------------------------------------------------------------------------------- /src/shxarray/exp/p2s.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import shxarray 3 | from math import sqrt 4 | import numpy as np 5 | 6 | def p2s(daobj): 7 | #Initialize the output array 8 | nmax=int(daobj.sh.nmax/2) 9 | dsp2s=xr.DataArray.sh.zeros(nmax,nshdims=2) 10 | nsh=len(dsp2s['nm']) 11 | ntot=0 12 | norm=sqrt(4*np.pi) 13 | for i in range(nsh): 14 | n2,m2=dsp2s.nm[i].item() 15 | for j in range(i+1): 16 | n3,m3=dsp2s.nm_[j].item() 17 | dagaunt=xr.DataArray.sh.gauntReal(n3,n2,m3,m2) 18 | ntot+=1 19 | dsp2s.data[i,j]=norm*daobj.dot(dagaunt).item() 20 | if i != j: 21 | #also mirror data 22 | dsp2s.data[j,i]=dsp2s.data[i,j] 23 | return dsp2s 24 | -------------------------------------------------------------------------------- /src/shxarray/geom/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/src/shxarray/geom/__init__.py -------------------------------------------------------------------------------- /src/shxarray/geom/points.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # 5 | 6 | from shapely.geometry import Point 7 | import xarray as xr 8 | import numpy as np 9 | import geopandas as gpd 10 | import pandas as pd 11 | from shxarray.kernels.axial import Disk,Unit,ParabolicCap 12 | 13 | def point2sh(pointgeom,nmax:int=100,auxcoord=None,axialtype="unit",psi=None) ->xr.DataArray: 14 | """ 15 | Convert a GeoSeries of points to axially symmetric loads expressed as SH coefficients 16 | Parameters 17 | ---------- 18 | pointgeom : GeoSeries or array_like 19 | Iterable with points which describes the center of the loads 20 | auxcoord: named Pandas.Series or dict(dimname=coordvalues) 21 | Auxiliary coordinate to map to the dimension of pointgeom. The default will construct a coordinate with an sequential numerical index and index "id" 22 | axialtype : str 23 | Type of the load to be constructed. One of 'unit','disk','paraboliccap'. Defaults to a unit load 24 | nmax : int 25 | maximum degree and order to resolve 26 | 27 | psi : int 28 | spherical width in degrees of the disk or parabolic cap. Ignored for unit loads. defaults to 1 degree 29 | 30 | Returns 31 | ------- 32 | xr.DataArray 33 | A DataArray holding the spherical harmonic coefficients of the prescribed loads 34 | 35 | See Also 36 | -------- 37 | shxarray.geom.polygons.polygon2sh 38 | 39 | """ 40 | 41 | if type(pointgeom) != gpd.GeoSeries: 42 | #convert to GeoSeries 43 | pointgeom=gpd.GeoSeries(pointgeom) 44 | 45 | if axialtype== "unit": 46 | axiso=Unit(nmax) 47 | elif axialtype == 'disk': 48 | axiso=Disk(nmax,psi) 49 | elif axialtype == "paraboliccap": 50 | axiso=ParabolicCap(nmax,psi) 51 | else: 52 | raise RuntimeError(f"Axial type {axialtype} is unknown") 53 | 54 | #Position the isotropic load on the chosen point locations 55 | daout=axiso.position(pointgeom.x,pointgeom.y) 56 | 57 | if auxcoord is None: 58 | dimk="id" 59 | coords=np.arange(len(pointgeom)) 60 | else: 61 | if type(auxcoord) == pd.Series: 62 | dimk=auxcoord.name 63 | coords=auxcoord.values 64 | else: 65 | # should be a dictionary like object 66 | if len(auxcoord) != 1: 67 | raise RuntimeError("Only one input coordinate is accepted") 68 | dimk=next(iter(auxcoord)) 69 | coords=auxcoord[dimk] 70 | return daout.assign_coords({dimk:("nlonlat",coords)}) 71 | 72 | 73 | 74 | -------------------------------------------------------------------------------- /src/shxarray/geom/polygons.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # 5 | 6 | from shapely.geometry import Point 7 | import xarray as xr 8 | import numpy as np 9 | import geopandas as gpd 10 | import pandas as pd 11 | 12 | from shxarray.core.logging import shxlogger 13 | 14 | def polygon2sh(polygeom,nmax:int=100,auxcoord=None,engine="shlib",**kwargs) ->xr.DataArray: 15 | """ 16 | Convert a mask defined by a polygon to spherical harmonic coefficients. 17 | 18 | Parameters 19 | ---------- 20 | polygeom : GeoSeries or array_like 21 | Iterable with polygons which describes the mask (1 inside/0 outside) 22 | If the input is a GeoSeries, the crs of the provided geoseries will be used to do the polygon test. E.g. use a Antarctic Polar Stereographic epsg 3031, to do test for polgyons which contain the South Pole 23 | 24 | nmax : int 25 | Maximum degree and order of the output 26 | auxcoord: named Pandas.Series or dict(dimname=coordvalues) 27 | Auxiliary coordinate to map to the dimension of polygeom. The default will construct a coordinate with an sequential numerical index and index "id" 28 | engine: str, default: 'shlib' 29 | Backend to use for the SH analysis step. Other options could be 'shtns' (when installed) 30 | Returns 31 | ------- 32 | xr.DataArray 33 | A DataArray holding the spherical harmonic coefficients up to maximum degree specified 34 | 35 | See Also 36 | -------- 37 | shxarray.geom.points.point2sh 38 | 39 | """ 40 | 41 | if type(polygeom) != gpd.GeoSeries: 42 | polygeom=gpd.GeoSeries(polygeom) 43 | 44 | 45 | #create a dense enough grid encompassing all polgyons to use for spherical harmonic synthesis 46 | # heuristic way to figure out the resolution based on nmax 47 | dslonlat=xr.Dataset.sh.lonlat_grid(nmax,engine=engine) 48 | 49 | dims=["lon","lat"] 50 | coords={"lon":dslonlat.lon,"lat":dslonlat.lat} 51 | 52 | if auxcoord is None: 53 | coords["id"]=np.arange(len(polygeom)) 54 | dims.append("id") 55 | else: 56 | if type(auxcoord) == pd.Series: 57 | dimk=auxcoord.name 58 | coords[dimk]=auxcoord.values 59 | else: 60 | # should be a dictionary like object 61 | if len(auxcoord) != 1: 62 | raise RuntimeError("Only one input coordinate is accepted") 63 | dimk=next(iter(auxcoord)) 64 | coords[dimk]=auxcoord[dimk] 65 | 66 | dims.append(dimk) 67 | 68 | 69 | dtmp=xr.DataArray(np.zeros([dslonlat.sizes['lon'],dslonlat.sizes['lat'],len(polygeom)]),coords=coords,dims=dims).stack(lonlat=("lon","lat")) 70 | if dtmp.lon.min() < 0: 71 | #grid has 0 central meridian already 72 | #create a geoDataframe of points from the grid (lon is already with 0 central meridian 73 | ggrd=gpd.GeoDataFrame(geometry=[Point(lon,lat) for lon,lat in dtmp.lonlat.values],crs=4326) 74 | else: 75 | #grid has 180 central meridian 76 | #create a geoDataframe of points from the grid and convert lon to have 0 central meridian 77 | ggrd=gpd.GeoDataFrame(geometry=[Point((lon+180)%360-180,lat) for lon,lat in dtmp.lonlat.values],crs=4326) 78 | 79 | if polygeom.crs != ggrd.crs: 80 | #possibly convert the lon/lat grid in the desired projection before doing the polygon test 81 | ggrd=ggrd.to_crs(polygeom.crs) 82 | 83 | 84 | #query using a spatial index and set values to 1 85 | shxlogger.info("Masking and gridding polygons") 86 | for i,poly in enumerate(polygeom): 87 | # if i == 18: 88 | # breakpoint() 89 | # from IPython.core.debugger import set_trace 90 | # set_trace() 91 | idx=ggrd.sindex.query(poly,predicate="contains") 92 | dtmp[i,idx]=1.0 93 | 94 | 95 | dtmp=dtmp.unstack("lonlat") 96 | shxlogger.info("Applying SH analysis") 97 | dsout=dtmp.sh.analysis(nmax,engine=engine) 98 | 99 | return dsout 100 | 101 | 102 | 103 | 104 | 105 | 106 | -------------------------------------------------------------------------------- /src/shxarray/geoslurp/GRACEDsets.py: -------------------------------------------------------------------------------- 1 | # This file is part of geoslurp. 2 | # geoslurp is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # geoslurp is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with Frommle; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (roelof@geod.uni-bonn.de), 2025 17 | 18 | from geoslurp.dataset.dataSetBase import DataSet 19 | from geoslurp.datapull.ftp import Crawler as ftpCrawler 20 | from geoslurp.config.slurplogger import slurplog 21 | from geoslurp.datapull import findFiles 22 | from glob import glob 23 | import gzip 24 | import yaml 25 | from geoslurp.datapull import UriFile 26 | from io import StringIO 27 | import os 28 | from datetime import datetime 29 | from shxarray.geoslurp.gravity import GravitySHTBase 30 | import re 31 | 32 | schema="shxarray" 33 | 34 | def graceMetaExtractor(uri): 35 | """Extract meta information from a GRACE file""" 36 | 37 | #some dirty search rand replace hacks to fix faulty yaml header in the grace/fo data 38 | hdrpatches=[(re.compile("0000-00-00T00:00:00"),"1970-01-01T00:00:00"), 39 | (re.compile(r"Dahle et al\. \(2019\)\:"),"Dahle et al. (2019),"), 40 | (re.compile(r"Dobslaw et al\. \(2019\)\:"),"Dobslaw et al. (2019),")] 41 | patchedLines=0 42 | 43 | buf=StringIO() 44 | with gzip.open(uri.url,'rt') as fid: 45 | slurplog.info("Extracting info from %s"%(uri.url)) 46 | for ln in fid: 47 | if '# End of YAML header' in ln: 48 | #parse the yaml header 49 | hdr=yaml.safe_load(buf.getvalue())["header"] 50 | break 51 | else: 52 | # if re.search("Dahle",ln): 53 | # import pdb;pdb.set_trace() 54 | #see if the line needs patching 55 | for reg,repl in hdrpatches: 56 | ln,nr=re.subn(reg,repl,ln,count=1) 57 | patchedLines+=nr 58 | #hack replace 0000-00-00 dates because yaml can't parse them 59 | buf.write(ln) 60 | if patchedLines > 0: 61 | #we want to fix the header and patch the input file 62 | buf.write(ln) #write end of YAML file 63 | #dunp the remainder of the file in the stringio buffer 64 | buf.write(fid.read()) 65 | 66 | 67 | if patchedLines > 0: 68 | slurplog.info("Patching faulty yaml header in file %s"%uri.url) 69 | with gzip.open(uri.url,'wt') as fidout: 70 | fidout.write(buf.getvalue()) 71 | 72 | nonstand=hdr["non-standard_attributes"] 73 | 74 | 75 | meta={"nmax":hdr["dimensions"]["degree"], 76 | "omax":hdr["dimensions"]["order"], 77 | "tstart":hdr["global_attributes"]["time_coverage_start"], 78 | "tend":hdr["global_attributes"]["time_coverage_end"], 79 | "lastupdate":uri.lastmod, 80 | "format":nonstand["format_id"]["short_name"], 81 | "gm":nonstand["earth_gravity_param"]["value"], 82 | "re":nonstand["mean_equator_radius"]["value"], 83 | "uri":uri.url, 84 | "type":nonstand["product_id"][0:3], 85 | "data":{"description":hdr["global_attributes"]["title"]} 86 | } 87 | 88 | #add tide system 89 | try: 90 | tmp=nonstand["permanent_tide_flag"] 91 | if re.search('inclusive',tmp): 92 | meta["tidesystem"]="zero-tide" 93 | elif re.search('exclusive'): 94 | meta["tidesystem"]="tide-free" 95 | except: 96 | pass 97 | 98 | return meta 99 | 100 | class GRACEL2Base(DataSet): 101 | """Derived type representing GRACE spherical harmonic coefficients on the podaac server""" 102 | release=None 103 | center=None 104 | updated=None 105 | schema=schema 106 | stripuri=True 107 | def __init__(self,dbconn): 108 | super().__init__(dbconn) 109 | #initialize postgreslq table 110 | GravitySHTBase.metadata.create_all(self.db.dbeng, checkfirst=True) 111 | 112 | def pull(self): 113 | url="ftp://isdcftp.gfz.de/"+self.mission+"/Level-2/"+self.center+"/"+self.release+"/" 114 | ftp=ftpCrawler(url,pattern='G.*gz$') 115 | 116 | self.updated=ftp.parallelDownload(self.dataDir(),maxconn=3,check=True) 117 | 118 | def register(self): 119 | 120 | #create a list of files which need to be (re)registered 121 | if self.updated: 122 | files=self.updated 123 | else: 124 | files=[UriFile(file) for file in findFiles(self.dataDir(),r'G.*\.gz$',self._dbinvent.lastupdate)] 125 | filesnew=self.retainnewUris(files) 126 | 127 | #loop over the newer files 128 | for uri in filesnew: 129 | meta=graceMetaExtractor(uri) 130 | self.addEntry(meta) 131 | 132 | self.updateInvent() 133 | 134 | 135 | def GRACEL2ClassFactory(clsName): 136 | """Dynamically construct GRACE Level 2 dataset classes""" 137 | mission,center,release=clsName.split("_") 138 | clsname_ad=clsName.replace("-","_").replace(".","") 139 | table=type(clsname_ad+"Table", (GravitySHTBase,), {}) 140 | return type(clsname_ad, (GRACEL2Base,), {"release": release, "center":center,"table":table,"mission":mission}) 141 | 142 | # setup GRACE datasets 143 | def GRACEDsets(conf): 144 | out=[] 145 | 146 | 147 | combos=[("grace","CSR","RL06"), 148 | ("grace","GFZ","RL06"), 149 | ("grace","JPL","RL06"), 150 | ("grace-fo","CSR","RL06.3"), 151 | ("grace-fo","GFZ","RL06.3"), 152 | ("grace-fo","JPL","RL06.3")] 153 | 154 | for mission,center,release in combos: 155 | clsName=mission+"_"+center+"_"+release 156 | out.append(GRACEL2ClassFactory(clsName)) 157 | 158 | return out 159 | 160 | 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /src/shxarray/geoslurp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/src/shxarray/geoslurp/__init__.py -------------------------------------------------------------------------------- /src/shxarray/geoslurp/gracefilters.py: -------------------------------------------------------------------------------- 1 | # This file is part of geoslurp. 2 | # geoslurp is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # geoslurp is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with geoslurp; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (r.rietbroek@utwente.nl), 2021 17 | from geoslurp.dataset.pandasbase import PandasBase 18 | from geoslurp.config.slurplogger import slurplogger 19 | from geoslurp.datapull.http import Uri as http 20 | from geoslurp.datapull import UriFile 21 | import os 22 | from datetime import datetime 23 | import pandas as pd 24 | 25 | schema='shxarray' 26 | 27 | class GRACEfilter(PandasBase): 28 | """Class for registering SH filters (downloads from github) """ 29 | schema=schema 30 | version=(0,0,0) 31 | def __init__(self,dbconn): 32 | super().__init__(dbconn) 33 | self.pdfile=os.path.join(self.cacheDir(),'inventory_upd.csv') 34 | def pull(self): 35 | """Pulls the dataset from github and unpacks it in the cache directory""" 36 | #download the inventory file 37 | lastchanged=datetime(2021,11,5) 38 | inventory="https://github.com/strawpants/GRACE-filter/raw/master/inventory.xlsx" 39 | uri,upd=http(inventory,lastmod=lastchanged).download(self.cacheDir(),check=True) 40 | pdinvent=pd.read_excel(uri.url,engine="openpyxl") 41 | #download all the files 42 | ddir=self.dataDir() 43 | for idx,row in pdinvent.iterrows(): 44 | ffile,upd=http(row.uri,lastmod=lastchanged).download(ddir,check=True) 45 | #update file with newly downloaded file 46 | pdinvent.at[idx,'uri']=self.conf.generalize_path(ffile.url) 47 | 48 | #write updated excel file 49 | pdinvent.to_csv(os.path.join(self.pdfile)) 50 | 51 | -------------------------------------------------------------------------------- /src/shxarray/geoslurp/graceviews.py: -------------------------------------------------------------------------------- 1 | # This file is part of geoslurp. 2 | # geoslurp is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # geoslurp is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with Frommle; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 17 | 18 | from geoslurp.view.viewBase import TView 19 | 20 | schema='shxarray' 21 | 22 | def subqry1(table,nmax,tol,apptypes): 23 | tname=f"{schema}.{table}" 24 | sbqry="SELECT gsm.tstart+(gsm.tend-gsm.tstart)/2 AS time, gsm.uri AS gsm" 25 | for apptype in apptypes: 26 | applo=apptype.lower() 27 | sbqry+=f", {applo}.uri AS {applo}" 28 | sbqry+=f" FROM (SELECT tstart,tend,uri from {tname} WHERE type ='GSM' AND nmax = {nmax})AS gsm" 29 | 30 | for apptype in apptypes: 31 | applo=apptype.lower() 32 | sbqry+=f" INNER JOIN {tname} {applo} ON ABS(EXTRACT(day FROM gsm.tstart-{applo}.tstart)) < {tol} AND ABS(EXTRACT(day FROM gsm.tend-{applo}.tend)) < {tol} AND {applo}.type = '{apptype}'" 33 | return sbqry 34 | 35 | def buildGSML2qry(gtable,gfotable,nmax,tol=8,apptypes=["GAA","GAB","GAC","GAD"]): 36 | qry="%s UNION %s ORDER BY time"%(subqry1(gtable,nmax,tol,apptypes),subqry1(gfotable,nmax,tol,apptypes)) 37 | return qry 38 | 39 | class GRACECOMB_L2_JPL_n96(TView): 40 | schema=schema 41 | qry=buildGSML2qry("grace_jpl_rl06","grace_fo_jpl_rl063",96) 42 | 43 | class GRACECOMB_L2_JPL_n60(TView): 44 | schema=schema 45 | qry=buildGSML2qry("grace_jpl_rl06","grace_fo_jpl_rl063",60) 46 | 47 | 48 | class GRACECOMB_L2_GFZ_n96(TView): 49 | schema=schema 50 | qry=buildGSML2qry("grace_gfz_rl06","grace_fo_gfz_rl063",96) 51 | 52 | class GRACECOMB_L2_GFZ_n60(TView): 53 | schema=schema 54 | qry=buildGSML2qry("grace_gfz_rl06","grace_fo_gfz_rl063",60) 55 | 56 | 57 | class GRACECOMB_L2_CSR_n96(TView): 58 | schema=schema 59 | qry=buildGSML2qry("grace_csr_rl06","grace_fo_csr_rl063",96,8,["GAC","GAD"]) 60 | 61 | 62 | class GRACECOMB_L2_CSR_n60(TView): 63 | schema=schema 64 | qry=buildGSML2qry("grace_csr_rl06","grace_fo_csr_rl063",60,8,["GAC","GAD"]) 65 | 66 | def getGRACEviews(): 67 | return [ GRACECOMB_L2_JPL_n96, GRACECOMB_L2_JPL_n60, GRACECOMB_L2_GFZ_n96, GRACECOMB_L2_GFZ_n60, GRACECOMB_L2_CSR_n96, GRACECOMB_L2_CSR_n60] 68 | 69 | 70 | 71 | -------------------------------------------------------------------------------- /src/shxarray/geoslurp/gravity.py: -------------------------------------------------------------------------------- 1 | # This file is part of geoslurp. 2 | # geoslurp is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # geoslurp is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with geoslurp; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (roelof@geod.uni-bonn.de), 2018 17 | 18 | from sqlalchemy.ext.declarative import declared_attr, as_declarative 19 | from sqlalchemy import MetaData 20 | from sqlalchemy import Column,Integer,String, Boolean,Float 21 | from sqlalchemy.dialects.postgresql import TIMESTAMP, JSONB 22 | from geoslurp.types.json import DataArrayJSONType 23 | 24 | 25 | schema="shxarray" 26 | 27 | #define a declarative baseclass for spherical harmonics gravity data 28 | @as_declarative(metadata=MetaData(schema=schema)) 29 | class GravitySHTBase(object): 30 | @declared_attr 31 | def __tablename__(cls): 32 | #strip of the 'Table' from the class name 33 | return cls.__name__[:-5].lower() 34 | id = Column(Integer, primary_key=True) 35 | lastupdate=Column(TIMESTAMP) 36 | tstart=Column(TIMESTAMP,index=True) 37 | tend=Column(TIMESTAMP,index=True) 38 | time=Column(TIMESTAMP,index=True) 39 | nmax=Column(Integer) 40 | omax=Column(Integer) 41 | gm=Column(Float) 42 | re=Column(Float) 43 | tidesystem=Column(String) 44 | origin=Column(String) 45 | format=Column(String) 46 | type=Column(String) 47 | uri=Column(String) 48 | data=Column(JSONB) 49 | 50 | @as_declarative(metadata=MetaData(schema=schema)) 51 | class GravitySHinDBTBase(object): 52 | @declared_attr 53 | def __tablename__(cls): 54 | #strip of the 'Table' from the class name 55 | return cls.__name__[:-5].lower() 56 | id = Column(Integer, primary_key=True) 57 | lastupdate=Column(TIMESTAMP) 58 | tstart=Column(TIMESTAMP,index=True) 59 | tend=Column(TIMESTAMP,index=True) 60 | time=Column(TIMESTAMP,index=True) 61 | nmax=Column(Integer) 62 | omax=Column(Integer) 63 | gm=Column(Float) 64 | re=Column(Float) 65 | tidesystem=Column(String) 66 | origin=Column(String) 67 | format=Column(String) 68 | type=Column(String) 69 | data=Column(DataArrayJSONType) 70 | __table_args__ = {'extend_existing': True} 71 | 72 | 73 | class Trig(): 74 | """Enum to distinguish between a trigonometric cosine and sine coefficient""" 75 | c = 0 76 | s = 1 77 | 78 | class JSONSHArchive(): 79 | """JSON Archive which stores SH data, with sigmas and possibly a covariance 80 | Note this mimics the Archive interface of frommle without actually requiring its import""" 81 | def __init__(self,nmax=None,datadict=None): 82 | 83 | if nmax: 84 | #create from maximum degree 85 | self.data_={"attr":{},"vars":{"shg":[]}} 86 | shg=[] 87 | for n in range(nmax+1): 88 | for m in range(n+1): 89 | shg.append((n,m,Trig.c)) 90 | if m> 0: 91 | shg.append((n,m,Trig.s)) 92 | self.data_["vars"]["shg"]=shg 93 | self.data_["attr"]["nmax"]=nmax 94 | elif not nmax and datadict: 95 | self.data_=datadic 96 | else: 97 | raise RunTimeError("Can only construct a JSONSHArchive from either nmax or a datadict") 98 | 99 | def __getitem__(self,key): 100 | """retrieves a named variable, and lazily creates allowed variables when requested""" 101 | if not key in self.data_["vars"]: 102 | if key in ["cnm","sigcnm"]: 103 | self.data_["vars"][key]=[0]*len(self.data_["vars"]["shg"]) 104 | elif key == "covcnm": 105 | nl=len(self.data_["vars"]["shg"]) 106 | self.data_["vars"][key]=[[0]*nl]*nl 107 | return self.data_["vars"][key] 108 | 109 | def __setitem__(self,key,item): 110 | self.data_["vars"][key]=item 111 | 112 | 113 | def idx(self,nmt): 114 | """returns the index of the n,m,t tuple""" 115 | return self.data_["vars"]["shg"].index(nmt) 116 | 117 | @property 118 | def attr(self): 119 | """get the stored global attributes of the file""" 120 | return self.data_["attr"] 121 | 122 | @attr.setter 123 | def attr(self,attrdict): 124 | """sets the stored global attributes of the file""" 125 | self.data_["attr"]=attrdict 126 | 127 | @property 128 | def dict(self): 129 | return self.data_ 130 | 131 | -------------------------------------------------------------------------------- /src/shxarray/geoslurp/icgem.py: -------------------------------------------------------------------------------- 1 | # This file is part of geoslurp. 2 | # geoslurp is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # geoslurp is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with Frommle; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (roelof@geod.uni-bonn.de), 2018 17 | 18 | from geoslurp.datapull import UriBase,CrawlerBase 19 | from geoslurp.datapull.http import Uri as http 20 | from geoslurp.config.slurplogger import slurplogger 21 | import gzip as gz 22 | import re 23 | from lxml.etree import HTML as HTMLtree 24 | import os 25 | from datetime import datetime 26 | 27 | def icgemMetaExtractor(uri): 28 | """Extract meta information from a gzipped icgem file""" 29 | 30 | #first extract the icgem header 31 | headstart=False 32 | hdr={} 33 | with gz.open(uri.url,'rt') as fid: 34 | slurplogger().info("Extracting info from %s"%(uri.url)) 35 | for ln in fid: 36 | # if "begin_of_head" in ln: 37 | # headstart=True 38 | # continue 39 | 40 | if headstart and 'end_of_head' in ln: 41 | break 42 | 43 | # if headstart: 44 | spl=ln.split() 45 | if len(spl) == 2: 46 | hdr[spl[0]]=spl[1] 47 | 48 | 49 | try: 50 | meta={"nmax":int(hdr["max_degree"]), 51 | "lastupdate":uri.lastmod, 52 | "format":"icgem", 53 | "gm":float(hdr["earth_gravity_constant"].replace('D','E')), 54 | "re":float(hdr["radius"].replace('D','E')), 55 | "uri":uri.url, 56 | "type":"GSM", 57 | "data":{"name":hdr["modelname"]} 58 | } 59 | except Exception as e: 60 | pass 61 | 62 | #add tide system 63 | try: 64 | tmp=hdr["tide_system"] 65 | if re.search('zero_tide',tmp): 66 | meta["tidesystem"]="zero-tide" 67 | elif re.search('tide_free',tmp): 68 | meta["tidesystem"]="tide-free" 69 | except: 70 | pass 71 | 72 | return meta 73 | 74 | class Uri(UriBase): 75 | """Holds an uri to an icgem static field""" 76 | def __init__(self,url,lastmod=None,name=None,ref=None,nmax=None,year=None): 77 | if year and not lastmod: 78 | #use year as the last modification time 79 | lastmod=datetime(year,12,31) 80 | super().__init__(url,lastmod) 81 | self.name=name 82 | self.ref=ref 83 | self.nmax=nmax 84 | 85 | 86 | class Crawler(CrawlerBase): 87 | """Crawl icgem static fields""" 88 | def __init__(self): 89 | super().__init__(url="http://icgem.gfz-potsdam.de/tom_longtime") 90 | buf=http(self.rooturl).buffer() 91 | self._roothtml=HTMLtree(buf.getvalue()) 92 | 93 | def uris(self): 94 | """List uris of available static models""" 95 | 96 | rowregex=re.compile('(^tom-row(?!-header))|(^tom-row-odd)') 97 | for elem in self._roothtml.iterfind('.//tr'): 98 | uridict={} 99 | if not rowregex.match(elem.attrib['class']): 100 | continue 101 | 102 | nameelem=elem.find(".//td[@class='tom-cell-name']") 103 | if nameelem.text.strip() != '': 104 | #just find the name end strip line ending 105 | uridict["name"]=nameelem.text.lstrip()[:-1] 106 | else: 107 | #find a name and reference 108 | nameelem=nameelem.find(".//a[@href]") 109 | uridict["name"]=nameelem.text 110 | uridict["ref"]=nameelem.attrib['href'] 111 | 112 | #find the year, maximum degree, doi etc 113 | uridict["year"]=int(elem.find(".//td[@class='tom-cell-year']").text) 114 | uridict["nmax"]=int(elem.find(".//td[@class='tom-cell-degree']").text) 115 | try: 116 | uridict["url"]=os.path.dirname(self.rooturl)+elem.find(".//td[@class='tom-cell-modelfile']").find(".//a[@href]").attrib["href"] 117 | except AttributeError: 118 | #not avaailable for download so skip this entry 119 | continue 120 | 121 | try: 122 | uridict["ref"]=elem.find(".//td[@class='tom-cell-doilink']").find(".//a[@href]").attrib["href"] 123 | except AttributeError: 124 | #no problem as this entry is optional just pass 125 | pass 126 | yield Uri(**uridict) 127 | -------------------------------------------------------------------------------- /src/shxarray/geoslurp/icgemdset.py: -------------------------------------------------------------------------------- 1 | # This file is part of shxarray. 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 5 | # provides a dataset and table for static gravity fields from the icgem website 6 | 7 | 8 | 9 | from geoslurp.dataset import DataSet 10 | from shxarray.geoslurp.gravity import GravitySHTBase 11 | from shxarray.geoslurp.icgem import Crawler as IcgemCrawler 12 | from shxarray.geoslurp.icgem import icgemMetaExtractor 13 | from geoslurp.datapull.uri import findFiles 14 | import re 15 | from geoslurp.datapull import UriFile 16 | import os 17 | 18 | schema="shxarray" 19 | 20 | class ICGEMstatic(DataSet): 21 | """Manages the static gravity fields which are hosted at http://icgem.gfz-potsdam.de/tom_longtime""" 22 | table=type("ICGEMstaticTable",(GravitySHTBase,), {}) 23 | schema=schema 24 | stripuri=True 25 | def __init__(self, dbconn): 26 | super().__init__(dbconn) 27 | #initialize postgreslq table 28 | GravitySHTBase.metadata.create_all(self.db.dbeng, checkfirst=True) 29 | self.updated=[] 30 | 31 | def pull(self,pattern=None,list=False): 32 | """Pulls static gravity fields from the icgem website 33 | :param pattern: only download files whose name obeys this regular expression 34 | :param list (bool): only list available models""" 35 | self.updated=[] 36 | crwl=IcgemCrawler() 37 | if pattern: 38 | regex=re.compile(pattern) 39 | outdir=self.dataDir() 40 | if list: 41 | print("%12s %5s %4s"%("name","nmax", "year")) 42 | for uri in crwl.uris(): 43 | if pattern: 44 | if not regex.search(uri.name): 45 | continue 46 | if list: 47 | #only list available models 48 | print("%-12s %5d %4d"%(uri.name,uri.nmax,uri.lastmod.year)) 49 | else: 50 | tmp,upd=uri.download(outdir,check=True, gzip=True) 51 | if upd: 52 | self.updated.append(tmp) 53 | 54 | def register(self,pattern=None): 55 | """Register static gravity fields donwloaded in the data director 56 | :param pattern: only register files whose filename obeys this regular expression 57 | """ 58 | if not pattern: 59 | pattern='.*\.gz' 60 | #create a list of files which need to be (re)registered 61 | if self.updated: 62 | files=self.updated 63 | else: 64 | files=[UriFile(file) for file in findFiles(self.dataDir(),pattern)] 65 | 66 | #loop over files 67 | for uri in files: 68 | urilike=os.path.basename(uri.url) 69 | 70 | if not self.uriNeedsUpdate(urilike,uri.lastmod): 71 | continue 72 | 73 | meta=icgemMetaExtractor(uri) 74 | self.addEntry(meta) 75 | 76 | self.updateInvent() 77 | 78 | 79 | -------------------------------------------------------------------------------- /src/shxarray/geoslurp/loadlove.py: -------------------------------------------------------------------------------- 1 | # This file is part of geoslurp. 2 | # geoslurp is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # geoslurp is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with geoslurp; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (roelof@geod.uni-bonn.de), 2025 17 | from geoslurp.dataset import DataSet 18 | from geoslurp.datapull.http import Uri as http 19 | from sqlalchemy.ext.declarative import declarative_base 20 | from sqlalchemy import Column,Integer,String,MetaData 21 | from sqlalchemy.dialects.postgresql import TIMESTAMP,JSONB 22 | from geoslurp.datapull.github import Crawler as ghCrawler 23 | from geoslurp.datapull.github import GithubFilter as ghfilter 24 | from geoslurp.config.slurplogger import slurplogger 25 | from geoslurp.types.json import DataArrayJSONType 26 | from geoslurp.datapull import UriFile 27 | from geoslurp.datapull import findFiles 28 | import numpy as np 29 | import xarray as xr 30 | import gzip 31 | import re 32 | import os 33 | 34 | def lloveMetaExtractor(uri): 35 | """extract some metainfo from the load Lovenumber file""" 36 | #extract maximum degree from file and heuristically derive loadtype from the filename) 37 | if re.search("body",uri.url): 38 | ltype="body" 39 | else: 40 | ltype="surface" 41 | 42 | nmax=0 43 | reentry=re.compile('^ *[0-9]') 44 | hn=[] 45 | ln=[] 46 | kn=[] 47 | deg=[] 48 | slurplogger().info(f"Processing {uri.url}") 49 | descr="" 50 | ref=None 51 | with gzip.open(uri.url,'rt') as fid: 52 | for line in fid: 53 | if reentry.search(line): 54 | linespl=line.split() 55 | n=int(linespl[0]) 56 | if n == 1: 57 | #look for CF degree 1 coefficients only 58 | ref="CF" 59 | if linespl[4] != ref: 60 | #only use the degree 1 numbers of the chosen reference system 61 | continue 62 | deg.append(n) 63 | hln=[float(el.replace('D','E')) for el in linespl[1:4]] 64 | #possibly replace infinity values with NaN 65 | hln=[None if np.isinf(el) else el for el in hln] 66 | 67 | hn.append(hln[0]) 68 | ln.append(hln[1]) 69 | kn.append(hln[2]) 70 | else: 71 | #append comment to description 72 | descr+=line 73 | 74 | #create an xarray dataset 75 | dslove=xr.Dataset(data_vars=dict(kn=(["degree"],kn),hn=(["degree"],hn),ln=(["degree"],ln)),coords=dict(degree=(["degree"],deg))) 76 | 77 | #extract the maximum degree 78 | nmax=dslove.degree.max().data.item() 79 | meta={"name":os.path.basename(uri.url).replace(".love.gz",""),"lastupdate":uri.lastmod, 80 | "descr":descr,"loadtype":ltype,"nmax":nmax,"ref":ref,"data":dslove} 81 | 82 | 83 | return meta 84 | 85 | schema='shxarray' 86 | LLoveTBase=declarative_base(metadata=MetaData(schema=schema)) 87 | 88 | class LLoveTable(LLoveTBase): 89 | """Defines the Load Love number PostgreSQL table""" 90 | __tablename__='llove' 91 | id=Column(Integer,primary_key=True) 92 | name=Column(String,unique=True) 93 | loadtype=Column(String) 94 | ref=Column(String) 95 | lastupdate=Column(TIMESTAMP) 96 | nmax=Column(Integer) 97 | descr=Column(String) 98 | data=Column(DataArrayJSONType) 99 | 100 | class LLove(DataSet): 101 | """Class for registering load love numbers (downloads from github) """ 102 | schema=schema 103 | version=(1,0,0) 104 | table=LLoveTable 105 | def __init__(self,dbconn): 106 | super().__init__(dbconn) 107 | 108 | def pull(self): 109 | """Pulls the dataset from github and unpacks it in the cache directory""" 110 | #crawls the github repository for Load Love numbers 111 | 112 | reponame="strawpants/snrei" 113 | commitsha="e61d3e2a9eb328d48f56f5aa73fa2aaba60f1d5c" 114 | 115 | try: 116 | cred=self.conf.authCred("github",['oauthtoken']) 117 | token=cred.oauthtoken 118 | except: 119 | token=None 120 | ghcrawler=ghCrawler(reponame,commitsha=commitsha, 121 | filter=ghfilter({"type":"blob","path":"\.love"}), 122 | followfilt=ghfilter({"type":"tree","path":"Love"}), 123 | oauthtoken=token) 124 | 125 | #download all datasets 126 | ghcrawler.parallelDownload(self.cacheDir(),check=True,maxconn=3,gzip=True) 127 | 128 | def register(self): 129 | slurplogger().info("Building file list..") 130 | files=[UriFile(file) for file in findFiles(self.cacheDir(),'.*love',self._dbinvent.lastupdate)] 131 | 132 | if len(files) == 0: 133 | slurplogger().info("LLove: No new files found since last update") 134 | return 135 | 136 | self.truncateTable() 137 | #loop over files 138 | for uri in files: 139 | self.addEntry(lloveMetaExtractor(uri)) 140 | self.updateInvent() 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | -------------------------------------------------------------------------------- /src/shxarray/io/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | The shxarray.io submodule contains `backends `_ for reading data with spherical harmonic information into xarray DataArrays/DataSets. 3 | 4 | 5 | """ 6 | -------------------------------------------------------------------------------- /src/shxarray/io/gsmv6.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | import gzip 7 | import xarray as xr 8 | from shxarray.core.sh_indexing import SHindexBase 9 | import re 10 | import sys 11 | from io import BytesIO 12 | import yaml 13 | import numpy as np 14 | from shxarray.core.cf import get_cfatts 15 | 16 | def readGSMv6(fileobj,nmaxstop=sys.maxsize): 17 | needsClosing=False 18 | if type(fileobj) == str: 19 | needsClosing=True 20 | if fileobj.endswith('.gz'): 21 | fileobj=gzip.open(fileobj,'rb') 22 | else: 23 | fileobj=open(fileobj,'rb') 24 | 25 | #first read the yaml header 26 | buf=BytesIO() 27 | for ln in fileobj: 28 | if b'# End of YAML header' in ln: 29 | break 30 | else: 31 | buf.write(ln) 32 | 33 | hdr=yaml.safe_load(buf.getvalue())["header"] 34 | 35 | #setup global attributes 36 | attr={} 37 | attr["nmaxfile"]=hdr["dimensions"]["degree"] 38 | if not "nmax" in attr: 39 | attr["nmax"]=attr["nmaxfile"] 40 | 41 | attr["tstart"]=hdr["global_attributes"]["time_coverage_start"] 42 | attr["tend"]=hdr["global_attributes"]["time_coverage_end"] 43 | 44 | nonstand=hdr["non-standard_attributes"] 45 | 46 | attr["gm"]=nonstand["earth_gravity_param"]["value"] 47 | attr["re"]=nonstand["mean_equator_radius"]["value"] 48 | 49 | 50 | nmax=attr["nmax"] 51 | 52 | 53 | nsh=SHindexBase.nsh(nmax,squeeze=True) 54 | 55 | cnm=np.zeros([nsh]) 56 | sigcnm=np.zeros([nsh]) 57 | if "tstart" in attr and "tend" in attr: 58 | time=[attr['tstart']+(attr['tend']-attr['tstart'])/2] 59 | else: 60 | time=None 61 | 62 | ncount=0 63 | nm=[] 64 | #continue reading the data 65 | dataregex=re.compile(b'^GRCOF2') 66 | for ln in fileobj: 67 | if dataregex.match(ln): 68 | lnspl=ln.split() 69 | n=int(lnspl[1]) 70 | if n> nmaxstop: 71 | if ncount > nsh: 72 | #all required coefficients have been read (no need to read the file further) 73 | break 74 | continue 75 | 76 | 77 | m=int(lnspl[2]) 78 | 79 | cnm[ncount]=float(lnspl[3]) 80 | sigcnm[ncount]=float(lnspl[5]) 81 | nm.append((n,m)) 82 | ncount+=1 83 | 84 | #possibly also add snm coefficients 85 | if m!=0: 86 | cnm[ncount]=float(lnspl[4]) 87 | sigcnm[ncount]=float(lnspl[6]) 88 | nm.append((n,-m)) 89 | ncount+=1 90 | 91 | if needsClosing: 92 | fileobj.close() 93 | 94 | if time: 95 | shp=["time",SHindexBase.name] 96 | coords={SHindexBase.name:SHindexBase.mi_fromtuples(nm),"time":time} 97 | #also expand variables 98 | cnm=np.expand_dims(cnm[0:ncount], axis=0) 99 | sigcnm=np.expand_dims(sigcnm[0:ncount],axis=0) 100 | else: 101 | shp=[SHindexBase.name] 102 | coords={SHindexBase.name:SHindexBase.mi_fromtuples(nm)} 103 | cnm=cnm[0:ncount] 104 | sigcnm=sigcnm[0:ncount] 105 | 106 | ds=xr.Dataset(data_vars=dict(cnm=(shp,cnm,get_cfatts("stokes")),sigcnm=(shp,sigcnm,get_cfatts("stokes stdv"))),coords=coords,attrs=attr) 107 | return ds 108 | -------------------------------------------------------------------------------- /src/shxarray/io/gzipwrap.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # 5 | 6 | from gzip import GzipFile 7 | import os 8 | from io import TextIOWrapper 9 | 10 | try: 11 | from rapidgzip import RapidgzipFile 12 | except: 13 | #failed to load (ok) 14 | RapidgzipFile=None 15 | 16 | def gzip_open_r(filename,textmode=False,encoding=None): 17 | """ 18 | GZip file reading wrapper leveraging parallel decompression speed when rapidgzip is installed on the system 19 | Parameters 20 | ---------- 21 | filename : str 22 | Filename of the gzip archive to open 23 | 24 | textmode : 25 | Whether to open in textmode (allows iterating over lines) 26 | 27 | """ 28 | if RapidgzipFile: 29 | gzfid=RapidgzipFile(filename,parallelization=os.cpu_count()) 30 | 31 | else: 32 | gzfid=GzipFile(filename,'rb',encoding=encoding) 33 | 34 | 35 | if textmode: 36 | return TextIOWrapper(gzfid) 37 | else: 38 | return gzfid 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /src/shxarray/io/icgem.py: -------------------------------------------------------------------------------- 1 | # This file is part of frommle2. 2 | # frommle2 is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 3 of the License, or (at your option) any later version. 6 | 7 | # frommle2 is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with Frommle; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 15 | 16 | # Author Roelof Rietbroek (r.rietbroek@utwente.nl), 2021 17 | 18 | import gzip 19 | import xarray as xr 20 | import re 21 | import sys 22 | import numpy as np 23 | from shxarray.core.sh_indexing import SHindexBase 24 | from shxarray.core.logging import shxlogger 25 | from datetime import datetime,timedelta 26 | from shxarray.core.cf import get_cfatts 27 | 28 | def readIcgem(fileobj,nmaxstop=sys.maxsize): 29 | needsClosing=False 30 | if type(fileobj) == str: 31 | needsClosing=True 32 | if fileobj.endswith('.gz'): 33 | fileobj=gzip.open(fileobj,'rb') 34 | else: 35 | fileobj=open(fileobj,'rb') 36 | 37 | #first read the icgem header 38 | inheader=False 39 | hdr={} 40 | for ln in fileobj: 41 | if b'begin_of_head' in ln: 42 | inheader=True 43 | continue 44 | if b'end_of_head' in ln: 45 | break 46 | 47 | spl=ln.decode('utf-8').split() 48 | if len(spl) == 2: 49 | #insert name value pairs in the hdr dict 50 | hdr[spl[0]]=spl[1] 51 | 52 | #extract relevant parameters from the header 53 | attr={} 54 | try: 55 | nmaxsupp=int(hdr["max_degree"]) 56 | attr["nmaxfile"]=nmaxsupp 57 | if nmaxsupp < nmaxstop: 58 | attr["nmax"]=nmaxsupp 59 | else: 60 | attr["nmax"]=nmaxstop 61 | nmax=attr["nmax"] 62 | 63 | if nmax > nmaxsupp: 64 | shxlogger.warning("Nmax ({nmax}) requested larger than supported, higher degree coefficients will be set to zero") 65 | 66 | 67 | if 'format' in hdr: 68 | attr["format"]=hdr['format'] 69 | else: 70 | attr["format"]="icgem" 71 | 72 | if "norm" in hdr: 73 | attr["norm"]=hdr["norm"] 74 | 75 | attr["gm"]=float(hdr["earth_gravity_constant"]) 76 | attr["re"]=float(hdr["radius"]) 77 | attr["modelname"]=hdr["modelname"] 78 | except KeyError: 79 | #some values may not be present but that is ok 80 | pass 81 | 82 | #Non standard HACK to try to retrieve the epoch from the modelname (GRAZ monthly solutions only) 83 | if "modelname" in hdr: 84 | try: 85 | time=[datetime.strptime(hdr['modelname'][-7:],"%Y-%m")+timedelta(days=14.5)] 86 | except ValueError: 87 | time=None 88 | else: 89 | time=None 90 | 91 | 92 | nsh=SHindexBase.nsh(nmax,squeeze=True) 93 | cnm=np.zeros([nsh]) 94 | sigcnm=np.zeros([nsh]) 95 | ncount=0 96 | nm=[] 97 | #continue reading the data 98 | dataregex=re.compile(b'^gfc') 99 | ncolumns= -1 100 | for ln in fileobj: 101 | if dataregex.match(ln): 102 | lnspl=ln.replace(b"D",b"E").split() 103 | if ncolumns < 0: 104 | ncolumns = len(lnspl) 105 | 106 | n=int(lnspl[1]) 107 | if n> nmaxstop: 108 | if ncount > nsh: 109 | #all required coefficients have been read (no need to read the file further) 110 | break 111 | continue 112 | 113 | 114 | m=int(lnspl[2]) 115 | 116 | cnm[ncount]=float(lnspl[3]) 117 | if ncolumns >= 6: 118 | sigcnm[ncount]=float(lnspl[5]) 119 | 120 | nm.append((n,m)) 121 | ncount+=1 122 | 123 | #possibly also add snm coefficients 124 | if m!=0: 125 | cnm[ncount]=float(lnspl[4]) 126 | if ncolumns >= 6: 127 | sigcnm[ncount]=float(lnspl[6]) 128 | 129 | nm.append((n,-m)) 130 | ncount+=1 131 | 132 | if needsClosing: 133 | fileobj.close() 134 | 135 | 136 | if hasattr(xr,'Coordinates'): 137 | #only in newer xarray versions.. 138 | coords=xr.Coordinates.from_pandas_multiindex(SHindexBase.mi_fromtuples(nm), SHindexBase.name) 139 | if time: 140 | coords=coords.assign(time=time) 141 | else: 142 | coords={SHindexBase.name:SHindexBase.mi_fromtuples(nm)} 143 | if time: 144 | coords["time"]=time 145 | 146 | if time: 147 | shp=["time",SHindexBase.name] 148 | # coords={SHindexBase.name:shimi,"time":time} 149 | #also expand variables 150 | cnm=np.expand_dims(cnm[0:ncount], axis=0) 151 | sigcnm=np.expand_dims(sigcnm[0:ncount],axis=0) 152 | else: 153 | shp=[SHindexBase.name] 154 | # coords={SHindexBase.name:shimi} 155 | cnm=cnm[0:ncount] 156 | sigcnm=sigcnm[0:ncount] 157 | 158 | if ncolumns >= 6: 159 | ds=xr.Dataset(data_vars=dict(cnm=(shp,cnm,get_cfatts("stokes")),sigcnm=(shp,sigcnm,get_cfatts("stokes stdv"))),coords=coords,attrs=attr) 160 | 161 | else: 162 | ds=xr.Dataset(data_vars=dict(cnm=(shp,cnm,get_cfatts("stokes"))),coords=coords,attrs=attr) 163 | 164 | return ds 165 | -------------------------------------------------------------------------------- /src/shxarray/io/shascii.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # 5 | 6 | import xarray as xr 7 | from shxarray.core.time import decyear2dt 8 | import numpy as np 9 | import sys 10 | import gzip 11 | from shxarray.core.sh_indexing import SHindexBase 12 | import re 13 | from io import StringIO 14 | import pandas as pd 15 | 16 | def to_shascii(daobj,out_obj=None): 17 | nmax=daobj.sh.nmax 18 | tstart=0.0 19 | tend=0.0 20 | tcent=0.0 21 | 22 | t=np.where(daobj.m < 0,1,0) 23 | nm=[(n,m) for n,m in zip(daobj.n.data,np.abs(daobj.m).data)] 24 | 25 | nmt_mi=pd.MultiIndex.from_arrays([nm,t.data],names=["nm","t"]) 26 | daobj=daobj.drop_vars(["n","m"]).rename(nm='nmt').assign_coords(nmt=nmt_mi) 27 | if out_obj is None: 28 | buff=StringIO() 29 | else: 30 | buff=out_obj 31 | 32 | buff.write(f" META {nmax} {tstart} {tcent} {tend}\n") 33 | for nmi,cnm in daobj.unstack('nmt',fill_value=0.0).groupby('nm'): 34 | buff.write(f"{nmi[0]} {nmi[1]} {cnm.loc[nmi,0].item()} {cnm.loc[nmi,1].item()}\n") 35 | 36 | if out_obj is not None: 37 | buff.seek(0) 38 | return buff 39 | 40 | 41 | 42 | # def writeSHAscii(fileobj,ds,cnmv='cnm',sigcnmv=None): 43 | # """Writes a dataset array with sh data to an ascii file""" 44 | 45 | # needsClosing=False 46 | 47 | # if type(fileobj) == str: 48 | # needsClosing=True 49 | # if fileobj.endswith('.gz'): 50 | # fileobj=gzip.open(fileobj,'wt') 51 | # else: 52 | # fileobj=open(fileobj,'wt') 53 | # nmax=ds.sh.nmax 54 | 55 | # #TODO extract time epochs in decimal years from the data 56 | # tstart=0.0 57 | # tcent=0.0 58 | # tend=0.0 59 | 60 | # fileobj.write(f" META {nmax} {tstart} {tcent} {tend}\n") 61 | # #loop over all coefficients (make sure to sort them appropriately) 62 | # sortds=ds.sortby(['n','m','t']) 63 | # cnmvals=np.zeros([2]) 64 | # ncoef=0 65 | # for idx,el in zip(sortds.shg,sortds[cnmv].values): 66 | # n,m,t=idx.data[()] 67 | # cnmvals[t]=el 68 | 69 | # if m == 0: 70 | # #no need to wait for a sine coefficient 71 | # ncoef=2 72 | # cnmvals[1]=0.0 73 | # else: 74 | # ncoef+=1 75 | 76 | # if ncoef == 2: 77 | # fileobj.write(f"{n:6d} {m:6d} {cnmvals[0]:17.10e} {cnmvals[1]:17.10e}\n") 78 | # ncoef=0 79 | # cnmvals[:]=0.0 80 | 81 | # if needsClosing: 82 | # fileobj.close() 83 | 84 | 85 | def readSHAscii(fileobj,nmaxstop=sys.maxsize): 86 | 87 | if type(fileobj) == str: 88 | if fileobj.endswith(".gz"): 89 | fid=gzip.open(fileobj,'rt') 90 | else: 91 | fid=open(fileobj,'rt') 92 | else: 93 | fid=fileobj 94 | 95 | #defaults 96 | attr={} 97 | nmaxfile=-1 # unknown 98 | 99 | #Try seeing if a META header line is present 100 | frstln=fid.readline() 101 | if re.search(' +META',frstln): 102 | metaln=frstln.split() 103 | nmaxfile=int(metaln[1]) 104 | 105 | if nmaxstop <= nmaxfile: 106 | nmax=nmaxstop 107 | else: 108 | nmax=nmaxfile 109 | 110 | #Extract time tags 111 | stime,ctime,etime=[decyear2dt(float(x)) for x in metaln[2:5]] 112 | attr={"nmax":nmax,"nmaxfile":nmaxfile,"CTime":ctime,"STime":stime,"ETime":etime} 113 | #read next line already (may already contain data) 114 | frstln=fid.readline() 115 | 116 | #check how many columns are present 117 | ln=frstln.split() 118 | nd=len(ln) 119 | if nd == 4: 120 | sigma=False 121 | elif nd == 6: 122 | sigma=True 123 | else: 124 | raise IOError(f"Unexpected amount of columns: {len(ln)}") 125 | if nmaxfile > 0: 126 | nshmax=SHindexBase.nsh(nmaxfile) 127 | else: 128 | nshmax=sys.maxsize 129 | 130 | ncount=0 131 | n,m=[int(x) for x in ln[0:2]] 132 | coefs=[float(x) for x in ln[2:nd]] 133 | 134 | # Add cosine entry 135 | nm=[(n,m)] 136 | cnm=[coefs[0]] 137 | if nd == 6: 138 | sigcnm=[coefs[2]] 139 | ncount+=1 140 | # add Sine entry if order is not zero 141 | if m > 0: 142 | nm.append((n,-m)) 143 | cnm.append(coefs[1]) 144 | if nd == 6: 145 | sigcnm.append(coefs[3]) 146 | ncount+=1 147 | 148 | 149 | for lnstr in fid: 150 | ln=lnstr.split() 151 | if len(ln) == 0: 152 | break 153 | n,m=[int(x) for x in ln[0:2]] 154 | coefs=[float(x) for x in ln[2:nd]] 155 | 156 | # Add cosine entry 157 | nm.append((n,m)) 158 | cnm.append(coefs[0]) 159 | if nd == 6: 160 | sigcnm.append(coefs[2]) 161 | ncount+=1 162 | # add Sine entry if order is not zero 163 | if m > 0: 164 | nm.append((n,-m)) 165 | cnm.append(coefs[1]) 166 | if nd == 6: 167 | sigcnm.append(coefs[3]) 168 | ncount+=1 169 | if n > nmaxstop and ncount > nshmax: 170 | #all requested data has been read already 171 | break 172 | 173 | #create an xarray dataset 174 | 175 | if nd == 6: 176 | ds=xr.Dataset(data_vars=dict(cnm=([SHindexBase.name],cnm),sigcnm=([SHindexBase.name],sigcnm)),coords={SHindexBase.name:SHindexBase.mi_fromtuples(nm)},attrs=attr) 177 | else: 178 | ds=xr.Dataset(data_vars=dict(cnm=([SHindexBase.name],cnm)),coords={SHindexBase.name:SHindexBase.mi_fromtuples(nm)},attrs=attr) 179 | return ds 180 | -------------------------------------------------------------------------------- /src/shxarray/io/shiobackend.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | 5 | from xarray.backends import BackendEntrypoint 6 | from shxarray.io.icgem import readIcgem 7 | from shxarray.io.gsmv6 import readGSMv6 8 | from shxarray.io.binv_legacy import readBINV 9 | from shxarray.io.shascii import readSHAscii 10 | from shxarray.io.sinex import read_sinex 11 | import os 12 | 13 | import re 14 | 15 | class ICGEMBackEntryPoint(BackendEntrypoint): 16 | url="https://github.com/ITC-Water-Resources/shxarray" 17 | description = "Read spherical harmonic coefficients in ICGEM format" 18 | def open_dataset(self,filename_or_obj,*,drop_variables=None): 19 | dsout=readIcgem(filename_or_obj) 20 | if drop_variables is not None: 21 | dsout=dsout.drop_vars(drop_variables) 22 | return dsout 23 | 24 | def guess_can_open(self,filename_or_obj): 25 | try: 26 | strrep=str(filename_or_obj) 27 | if strrep.endswith(".gfc"): 28 | return True 29 | if strrep.endswith("gfc.gz"): 30 | return True 31 | except AttributeError: 32 | return False 33 | 34 | return False 35 | 36 | 37 | class GSMv6BackEntryPoint(BackendEntrypoint): 38 | url="https://github.com/ITC-Water-Resources/shxarray" 39 | description = "Read spherical harmonic coefficients in GSM-V6 format" 40 | def open_dataset(self,filename_or_obj,*,drop_variables=None): 41 | dsout=readGSMv6(filename_or_obj) 42 | if drop_variables is not None: 43 | dsout=dsout.drop_vars(drop_variables) 44 | return dsout 45 | 46 | def guess_can_open(self,filename_or_obj): 47 | try: 48 | strrep=str(filename_or_obj) 49 | # search for conventional file naming of GRACE 50 | if re.search(r'G[SA][MBCA]-2[^\s]*.gz',strrep): 51 | return True 52 | except AttributeError: 53 | return False 54 | 55 | return False 56 | 57 | class SHAsciiBackEntryPoint(BackendEntrypoint): 58 | url="https://github.com/ITC-Water-Resources/shxarray" 59 | description = "Read spherical harmonic coefficients in generic n,m, cnm, snm, sigcnm, sigsnm ascii format" 60 | def open_dataset(self,filename_or_obj,*,drop_variables=None): 61 | dsout=readSHAscii(filename_or_obj) 62 | if drop_variables is not None: 63 | dsout=dsout.drop_vars(drop_variables) 64 | return dsout 65 | 66 | def guess_can_open(self,filename_or_obj): 67 | #User need to use this engine explicitly as the filenaming can be anything 68 | return False 69 | 70 | class SINEXBackEntryPoint(BackendEntrypoint): 71 | url="https://github.com/ITC-Water-Resources/shxarray" 72 | description = "Read normal equation systems in SINEX format" 73 | def open_dataset(self,filename_or_obj,*,drop_variables=None): 74 | if drop_variables is not None and "N" in drop_variables: 75 | #Special case: it is much quicker to abandon reading when a matrix is present and not needed 76 | dsout=read_sinex(filename_or_obj,stopatmat=True) 77 | drop_variables 78 | elif drop_variables is not None: 79 | dsout=read_sinex(filename_or_obj) 80 | dsout=dsout.drop_vars(drop_variables) 81 | else: 82 | dsout=read_sinex(filename_or_obj) 83 | 84 | return dsout 85 | 86 | def guess_can_open(self,filename_or_obj): 87 | try: 88 | strrep=str(filename_or_obj).lower() 89 | # search for usual file naming of SINEX files 90 | if strrep.endswith('.snx') or strrep.endswith('.snx.gz'): 91 | #Found a file name which probably is a sinex file 92 | return True 93 | except AttributeError: 94 | return False 95 | 96 | return False 97 | 98 | ## NOte: this currently does not work (xarray changes the underlying sparse array) 99 | class DDKBackEntryPoint(BackendEntrypoint): 100 | url="https://github.com/ITC-Water-Resources/shxarray" 101 | description = "Read spherical harmonic filter coefficients in legacy BINV format" 102 | def open_dataset(self,filename_or_obj,*,drop_variables=None): 103 | dsout=readBINV(filename_or_obj) 104 | breakpoint() 105 | if drop_variables is not None: 106 | dsout=dsout.drop_vars(drop_variables) 107 | return dsout 108 | 109 | def guess_can_open(self,filename_or_obj): 110 | try: 111 | strrep=str(filename_or_obj) 112 | # search for conventional file naming of DDK files 113 | if strrep.startswith('Wbd_2-120'): 114 | #known anisotropic DDK filter matrix 115 | return True 116 | except AttributeError: 117 | return False 118 | 119 | return False 120 | -------------------------------------------------------------------------------- /src/shxarray/kernels/__init__.py: -------------------------------------------------------------------------------- 1 | from .axial import * 2 | from .factory import getSHfilter 3 | 4 | -------------------------------------------------------------------------------- /src/shxarray/kernels/anisokernel.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | import xarray as xr 7 | from shxarray.core.logging import shxlogger 8 | from shxarray.shlib import Ynm 9 | from shxarray.core.sh_indexing import SHindexBase 10 | import sparse 11 | import numpy as np 12 | from packaging import version 13 | 14 | 15 | 16 | 17 | 18 | class AnisoKernel: 19 | """ 20 | Provides functionality to work with anisotropic spherical harmonic kernels 21 | """ 22 | attr={"shtype":"shaniso"} 23 | def __init__(self,dsobj,name="aniso",truncate=True): 24 | self._dskernel=dsobj 25 | self.name=name 26 | self.truncate=truncate 27 | self.useDask=version.parse(xr.__version__) < version.parse('2023.11.0') 28 | if self.useDask: 29 | from dask.array.core import einsum_lookup 30 | #Register the einsum functions which are needed to do the sparse dot functions (for earlier versions of xarray) 31 | einsum_lookup.register(sparse.COO,AnisoKernel.daskeinsumReplace) 32 | #convert to xarray with dask structure 33 | self._dskernel=self._dskernel.chunk() 34 | 35 | @property 36 | def nmax(self): 37 | return self._dskernel.sh.nmax 38 | 39 | @property 40 | def nmin(self): 41 | return self._dskernel.sh.nmin 42 | 43 | 44 | def __call__(self,dain:xr.DataArray): 45 | if SHindexBase.name not in dain.indexes: 46 | 47 | raise RuntimeError("SH harmonic index not found in input, cannot apply kernel operator to object") 48 | 49 | if self.nmax < dain.sh.nmax: 50 | raise RuntimeError("Input data has higher degree than kernel, cannot apply kernel operator to object") 51 | 52 | if type(dain.data) != np.ndarray: 53 | dain=dain.compute() 54 | 55 | daout=xr.dot(self._dskernel.mat,dain,dims=[SHindexBase.name]) 56 | 57 | #rename nm and convert to dense array 58 | daout=daout.sh.toggle_nm() 59 | if self.useDask: 60 | daout=xr.DataArray(daout.compute().data,coords=daout.coords,name=self.name) 61 | else: 62 | if hasattr(daout.data,'todense'): 63 | #still needs expanding 64 | daout=xr.DataArray(daout.data.todense(),coords=daout.coords,name=self.name) 65 | else: 66 | #just rename 67 | daout.name=self.name 68 | 69 | if not self.truncate and self.nmin > dain.sh.nmin: 70 | #also add the unfiltered lower degree coefficients back to the results 71 | daout=xr.concat([dain.sh.truncate(self.nmin-1,dain.sh.nmin),daout],dim=SHindexBase.name) 72 | 73 | if daout.sh.nmax == dain.sh.nmax and daout.sh.nmin == dain.sh.nmin: 74 | #resort to original order when output nmax and nmin agree 75 | daout=daout.sel({SHindexBase.name:dain.nm}) 76 | 77 | return daout 78 | 79 | def position(self,lon,lat): 80 | """ 81 | Position this kernel on a specific location of the sphere 82 | :param lon: Longitude(s) in degrees of position (list like) 83 | :param lat: Latitude(s) in degrees of position (list-like) 84 | :return: A xarray.DataArray with the kernel located on the specified locations 85 | """ 86 | ynm=Ynm(self.nmax) 87 | ynmdata=ynm(lon,lat) 88 | # scale elements by 1/(2n+1) 89 | normv=[2*n+1 for n in ynmdata.n.data] 90 | ynmdata=ynmdata/normv 91 | 92 | return self.__call__(ynmdata) 93 | 94 | @staticmethod 95 | def daskeinsumReplace(subscripts, *operands, out=None, dtype=None, order='K', casting='safe', optimize=False): 96 | """Mimics the interface of https://numpy.org/doc/stable/reference/generated/numpy.einsum.html, but uses the sparse.COO dot function""" 97 | if subscripts == "ab,cb->ac": 98 | return operands[0].dot(operands[1].T) 99 | elif subscripts == "ab,ca->bc": 100 | return operands[0].T.dot(operands[1].T) 101 | elif subscripts == "ab,bc->ac": 102 | return operands[0].dot(operands[1]) 103 | elif subscripts == "ab,b->a": 104 | return operands[0].dot(operands[1]) 105 | elif subscripts == "ab,a->b": 106 | return operands[0].T.dot(operands[1]) 107 | elif subscripts == "ab,ac->bc": 108 | return operands[0].T.dot(operands[1]) 109 | 110 | else: 111 | raise NotImplementedError(f"Don't know (yet) how to handle this einsum: {subscripts} with sparse.dot operations") 112 | 113 | -------------------------------------------------------------------------------- /src/shxarray/kernels/axial.py: -------------------------------------------------------------------------------- 1 | # Axial symmetric expansions of unit and disk loads 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | # 6 | 7 | 8 | from shxarray.kernels.isokernelbase import IsoKernelBase 9 | import xarray as xr 10 | import numpy as np 11 | from shxarray.shlib import Pn 12 | from math import cos, radians 13 | class Unit(IsoKernelBase): 14 | name="shunit" 15 | def __init__(self,nmax): 16 | """ 17 | Create an isotropic kernel representing a unit load 18 | :param nmax: maximum degree to resolve 19 | :return: A Isotropic Kernel object representing a unit load 20 | """ 21 | super().__init__() 22 | scales=[2*n+1 for n in range(nmax+1)] 23 | self._dsiso=xr.DataArray(scales,coords={"n":("n",range(nmax+1))},dims=["n"]) 24 | 25 | class Disk(IsoKernelBase): 26 | name="shdisk" 27 | def __init__(self,nmax,psi): 28 | """ 29 | Create an isotropic kernel representing a disk load 30 | :param nmax: maximum degree to resolve 31 | :param psi: disk size in angular degrees 32 | :return: A Isotropic Kernel object representing a disk load 33 | """ 34 | super().__init__() 35 | if psi is None: 36 | raise RuntimeError("psi must be defined for a disk load") 37 | self.psi=psi 38 | cospsi=np.cos(radians(psi)) 39 | legendre=Pn(nmax+1)(cospsi) 40 | scales=np.zeros([nmax+1]) 41 | scales[0]=(1-cospsi)/2 42 | for n in range(1,nmax+1): 43 | scales[n]=(legendre[n-1]-legendre[n+1])/2 44 | 45 | self._dsiso=xr.DataArray(scales,coords={"n":("n",range(nmax+1))},dims=["n"]) 46 | 47 | class ParabolicCap(IsoKernelBase): 48 | name="shcap" 49 | def __init__(self,nmax,psi): 50 | """ 51 | Create an isotropic kernel representing a Parabolic Cap 52 | :param nmax: maximum degree to resolve 53 | :param psi: Cap size in angular degrees 54 | :return: A Isotropic Kernel object representing a parabolic cap 55 | """ 56 | super().__init__() 57 | if psi is None: 58 | raise RuntimeError("psi must be defined for a Parabolic cap") 59 | self.psi=psi 60 | psi=radians(psi) 61 | cospsi=cos(psi) 62 | scales=np.zeros([nmax+1]) 63 | scales[0]=(1-cospsi)/3 64 | for n in range(1,nmax+1): 65 | scales[n]=((cos((n-1)*psi)-cos(n*psi))/(n-0.5)-(cos((n+1)*psi)-cos((n+2)*psi))/(n+1.5))/((1.0-cos(psi))*4.0) 66 | 67 | self._dsiso=xr.DataArray(scales,coords={"n":("n",range(nmax+1))},dims=["n"]) 68 | 69 | 70 | -------------------------------------------------------------------------------- /src/shxarray/kernels/ddk.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 4 | # 5 | 6 | from shxarray.core.admin import defaultcache 7 | import requests 8 | import pandas as pd 9 | from shxarray.io.binv_legacy import readBINV 10 | from shxarray.kernels.anisokernel import AnisoKernel 11 | import os 12 | 13 | 14 | 15 | def load_catalogue(): 16 | catalogfile=os.path.join(defaultcache('grace-filter'),'inventory.xlsx') 17 | if not os.path.exists(catalogfile): 18 | #dowload from github 19 | url="https://github.com/strawpants/GRACE-filter/raw/master/inventory.xlsx" 20 | r=requests.get(url) 21 | with open(catalogfile,'wb') as fid: 22 | fid.write(r.content) 23 | 24 | dfcat=pd.read_excel(catalogfile,index_col='name') 25 | return dfcat 26 | 27 | 28 | def load_ddk(ddkversion,trans=False,nmax=-1,truncate=True): 29 | 30 | dfcat=load_catalogue() 31 | url=dfcat.loc[ddkversion].uri 32 | ddkfile=os.path.join(defaultcache('grace-filter'),os.path.basename(url)) 33 | 34 | if not os.path.exists(ddkfile): 35 | #dowload from github 36 | r=requests.get(url) 37 | with open(ddkfile,'wb') as fid: 38 | fid.write(r.content) 39 | df=readBINV(ddkfile,trans,nmax) 40 | 41 | return AnisoKernel(df,name=ddkversion,truncate=truncate) 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /src/shxarray/kernels/factory.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2025 4 | # 5 | 6 | from shxarray.kernels.ddk import load_ddk 7 | from shxarray.kernels.gauss import Gaussian 8 | 9 | def getSHfilter(filtername,nmax,**kwargs): 10 | """ 11 | Retrieve the (aniso) tropic kernel for known SH filters 12 | Parameters 13 | ---------- 14 | filtername : str 15 | e.g. Gauss200 for a Gaussian filter with a halfwidth of 200 km, DDK5 for a DDK filter with sharpness 5 16 | 17 | nmax : int 18 | maximum degree of the filter to accomodate 19 | 20 | **kwargs : dict 21 | additional keyword arguments to pass to the filter 22 | 23 | 24 | Returns 25 | ------- 26 | A kernel filter operator 27 | 28 | """ 29 | if filtername.startswith('DDK'): 30 | #load a dedicated DDK filter 31 | if "transpose" in kwargs: 32 | trans=kwargs["transpose"] 33 | else: 34 | trans=False 35 | if "truncate" in kwargs: 36 | truncate=kwargs["truncate"] 37 | else: 38 | truncate=True 39 | kernel=load_ddk(filtername,trans,nmax,truncate) 40 | elif filtername.startswith('Gauss'): 41 | if "halfwidth" in kwargs: 42 | radius=kwargs["halfwidth"] 43 | else: 44 | try: 45 | radius=int(filtername[5:]) 46 | except: 47 | raise RuntimeError("Cannot parse the Gaussian halfwidth in km.\n Specify either 'Gaussxxx'or add halfwidth=xxx to the sh.filter call") 48 | kernel=Gaussian(nmax,radius*1e3) 49 | else: 50 | raise RuntimeError(f"SH Filter {filtername} not recognized") 51 | 52 | return kernel 53 | -------------------------------------------------------------------------------- /src/shxarray/kernels/gauss.py: -------------------------------------------------------------------------------- 1 | # Smoothing kernels 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | 6 | 7 | from shxarray.kernels.isokernelbase import IsoKernelBase 8 | import numpy as np 9 | from shxarray.earth.constants import a_earth 10 | from math import cos,log,exp 11 | import xarray as xr 12 | class Gaussian(IsoKernelBase): 13 | name="gauss" 14 | def __init__(self,nmax,halfwidth): 15 | """ 16 | Create an isotropic kernel representing a unit load. According to Wahr et al 1998 with a cutoff criteria to avoid numerical instability 17 | :param nmax: maximum degree to resolve 18 | :param halfwidth: Halfwidth of the Gaussian kernel in m (at the Earth's surface) 19 | :return: A Isotropic Kernel object representing a unit load 20 | """ 21 | super().__init__() 22 | self.halfwidth=halfwidth 23 | arg=log(2.0)/(1-cos(halfwidth/a_earth)) 24 | exparg=exp(-2*arg) 25 | 26 | wn=np.zeros([nmax+1]) 27 | wn[0]=1 28 | wn[1]=(1+exparg)/(1-exparg) -1/arg 29 | for n in range(1,nmax): 30 | wn[n+1]= wn[n-1] -(2*n+1)/arg*wn[n] 31 | if wn[n+1] < 1e-8: 32 | break 33 | self._dsiso=xr.DataArray(wn,coords={"n":("n",range(nmax+1))},dims=["n"]) 34 | -------------------------------------------------------------------------------- /src/shxarray/kernels/gravfunctionals.py: -------------------------------------------------------------------------------- 1 | # Gravity functionals 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | # 6 | 7 | 8 | 9 | 10 | 11 | from shxarray.kernels.isokernelbase import IsoKernelBase 12 | 13 | from shxarray.earth.constants import a_earth,rho_water,rho_earth,rho_sea 14 | from shxarray.earth.snrei import SnreiFactory 15 | import xarray as xr 16 | import numpy as np 17 | 18 | class Stokes2TWS(IsoKernelBase): 19 | """Provides an isotropic kernel representing the transformation of Stokes coefficients [-] to equivalent water height [m]""" 20 | name="stokes2tws" 21 | transform=("stokes","tws") 22 | def __init__(self,knLove=None,nmax=None): 23 | super().__init__() 24 | if knLove is None: 25 | #retrieve the default 26 | knLove=SnreiFactory.load(nmax=nmax).kn 27 | 28 | self._dsiso=(2*knLove.n+1)*rho_earth*a_earth/(3*rho_water*(1+knLove)) 29 | 30 | class Stokes2Geoid(IsoKernelBase): 31 | """Provides an isotropic kernel representing the transformation of disturbing potential to geoid height in meter, using Brun's formula""" 32 | name="stoked2geoid" 33 | transform=("stokes","geoid") 34 | def __init__(self,nmax): 35 | super().__init__() 36 | self._dsiso=a_earth*xr.DataArray(np.ones([nmax+1]),dims=['n'],coords=dict(n=np.arange(nmax+1))) 37 | 38 | class Load2Geoid(IsoKernelBase): 39 | """Provides an isotropic kernel representing the transformation of a surface load (in m) to geoid height in meter""" 40 | name="load2geoid" 41 | transform=("load","geoid") 42 | def __init__(self,knLove=None,nmax=None,deg0scale=None): 43 | super().__init__() 44 | if knLove is None: 45 | #retrieve the default 46 | knLove=SnreiFactory.load(nmax=nmax,deg0scale=deg0scale).kn 47 | 48 | self._dsiso=(3*rho_water/rho_earth)*((1+knLove)/(2*knLove.n+1)) 49 | if deg0scale is not None: 50 | self._dsiso.loc[0]=deg0scale 51 | 52 | class Load2Uplift(IsoKernelBase): 53 | """Provides an isotropic kernel representing the transformation of surface load (in m) to elastic uplift in meter""" 54 | name="load2uplift" 55 | transform=("load","uplift") 56 | def __init__(self,hnLove=None,nmax=None,deg0scale=None): 57 | super().__init__() 58 | if hnLove is None: 59 | #retrieve the default 60 | hnLove=SnreiFactory.load(nmax=nmax,deg0scale=deg0scale).hn 61 | self._dsiso=(3*rho_water/rho_earth)*(hnLove/(2*hnLove.n+1)) 62 | 63 | gravclasses=[Stokes2TWS,Load2Geoid,Load2Uplift,Stokes2Geoid] 64 | 65 | gravlookup={cls.transform:cls for cls in gravclasses} 66 | 67 | def gravFunc(fromType,toType,**kwargs): 68 | """Computes a kernel to transform of one gravitational function into another""" 69 | transtype=(fromType,toType) 70 | if transtype in gravlookup: 71 | return gravlookup[transtype](**kwargs) 72 | elif transtype[::-1] in gravlookup: 73 | #inverse version is found 74 | return gravlookup[transtype[::-1]](**kwargs).inv() 75 | 76 | raise RuntimeError(f"No gravity transfer function available for transform {fromType}->{toType}") 77 | 78 | -------------------------------------------------------------------------------- /src/shxarray/kernels/isokernelbase.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 4 | # 5 | 6 | import xarray as xr 7 | from shxarray.core.logging import shxlogger 8 | from scipy.sparse import diags 9 | from shxarray.shlib import Ynm 10 | from shxarray.core.cf import get_cfatts 11 | from shxarray.core.sh_indexing import SHindexBase 12 | 13 | class IsoKernelBase: 14 | """ 15 | Provides functionality to work with isotropic spherical harmonic kernels 16 | """ 17 | attr={"shtype":"shiso","kernelstate":"collapsed"} 18 | name="shkernel" 19 | transform=None 20 | def __init__(self,dsiso=None,name=None,transform=None): 21 | self._dsiso=dsiso 22 | if name is not None: 23 | self.name=name 24 | if transform is not None: 25 | self.transform=transform 26 | 27 | @property 28 | def nmax(self): 29 | return self._dsiso.n.max().item() 30 | 31 | @property 32 | def nmin(self): 33 | return self._dsiso.n.min().item() 34 | 35 | def expanddiag(self,shindex): 36 | nmin=shindex.n.min().item() 37 | nmax=shindex.n.max().item() 38 | nminsup= self._dsiso.n.min().item() 39 | nmaxsup= self._dsiso.n.max().item() 40 | if nmin < nminsup or nmax > nmaxsup: 41 | raise RuntimeError(f"Requested kernel operation is only supported for degrees {nminsup} < = n <= {nmaxsup}") 42 | 43 | if self._dsiso.n.diff(dim="n").max().item() > 1: 44 | shxlogger.info("Some degrees are missing in the kernel, interpolating") 45 | coeff=self._dsiso.interp(n=shindex.n) 46 | else: 47 | coeff=self._dsiso.sel(n=shindex.n) 48 | 49 | return xr.DataArray(coeff.data,coords=dict(nm=shindex)) 50 | 51 | 52 | def jacobian(self,shindex): 53 | # create a sparse diagnonal array 54 | return diags(self.expanddiag(shindex).values) 55 | 56 | def __call__(self,dain:xr.DataArray): 57 | #create the jacobian matrix based on the input maximum and minimum degrees 58 | if SHindexBase.name not in dain.indexes: 59 | raise RuntimeError("Spherical harmonic index not found in input, cannot apply kernel operator to object") 60 | #expand kernel to the same degrees as the input 61 | daexpand=self.expanddiag(dain.nm) 62 | daout=dain*daexpand 63 | if self.transform is not None: 64 | name=self.transform[1] 65 | else: 66 | name=self.name 67 | try: 68 | #try to update the dataarray attributes 69 | daout.attrs.update(get_cfatts(name)) 70 | except: 71 | pass 72 | return daout.rename(name) 73 | 74 | def inv(self): 75 | """Returns the inverse of the isotropic Kernel""" 76 | invkernel=IsoKernelBase(1/self._dsiso,name=f'inv({self.name}',transform=(self.transform[::-1])) 77 | return invkernel 78 | 79 | def position(self,lon,lat): 80 | """ 81 | Position this kernel on a specific location of the sphere 82 | :param lon: Longitude(s) in degrees of position (list like) 83 | :param lat: Latitude(s) in degrees of position (list-like) 84 | :return: A xarray.DataArray with the kernel located on the specified locations 85 | """ 86 | ynm=Ynm(self.nmax) 87 | ynmdata=ynm(lon,lat) 88 | # scale elements by 1/(2n+1) 89 | normv=[2*n+1 for n in ynmdata.n.data] 90 | ynmdata=ynmdata/normv 91 | 92 | return self.__call__(ynmdata) 93 | 94 | def Greensfunc(self,theta): 95 | """ 96 | Map an isotropic kernel to a 1D Greens function (as a function of distance from the center) 97 | :param theta: discretized isotropic distance in degrees 98 | :return: A xarray.DataArray with the 1-D Greens function in the spatial domain 99 | """ 100 | pass 101 | 102 | def plot(self,ax=None,**kwargs): 103 | """ 104 | Plot the isotropic kernel as a function of degree 105 | 106 | Parameters 107 | ---------- 108 | ax : matplotlib axis object, optional 109 | The axis to plot the kernel on. If None, a new axis is created. 110 | 111 | **kwargs: additional keyword arguments for plotting 112 | 113 | 114 | Returns 115 | ------- 116 | ax : matplotlib axis object 117 | 118 | """ 119 | 120 | lplt=self._dsiso.plot(ax=ax,**kwargs) 121 | if ax is None: 122 | ax=lplt[0].axes 123 | 124 | ax.set_title(f"{self.name} Kernel") 125 | ax.set_xlabel("Degree") 126 | ax.set_ylabel("Kernel Coefficient") 127 | return ax 128 | -------------------------------------------------------------------------------- /src/shxarray/signal/basinav.py: -------------------------------------------------------------------------------- 1 | # This file is part of the shxarray software which is licensed 2 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 3 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2025 4 | # 5 | 6 | import xarray as xr 7 | from shxarray.kernels import getSHfilter 8 | from shxarray.signal.leakage_vishwa import leakage_corr_vishwa2016,delta_leakage_corr_vishwa2017 9 | 10 | class Basinav: 11 | def __init__(self, dabasins,filtername=None,leakage_corr=None): 12 | 13 | if leakage_corr is not None and leakage_corr not in ['scale','vishwa2016','vishwa2017']: 14 | raise RuntimeError(f"Leakage correction method {leakage_corr} not recognized") 15 | self._leakage_corr=leakage_corr 16 | self._dabin = dabasins 17 | self._filtername=filtername 18 | 19 | 20 | def __call__(self, datws,**kwargs): 21 | nmax=datws.sh.nmax 22 | if self._filtername is not None: 23 | filterOp = getSHfilter(self._filtername,nmax=nmax,transpose=True) 24 | dabin_f = filterOp(self._dabin.sh.truncate(nmax=nmax)) 25 | else: 26 | #just truncate to the same nmax as the input 27 | dabin_f=self._dabin.sh.truncate(nmax) 28 | 29 | dabin=self._dabin.sh.truncate(nmax) 30 | 31 | #compute the unscaled basin average 32 | da_av=(dabin_f@datws)/dabin.sel(n=0,m=0) 33 | 34 | if self._leakage_corr in ['scale','vishwa2016']: 35 | dascales = dabin.dot(dabin,dim='nm')/dabin.dot(dabin_f,dim='nm') 36 | else: 37 | dascales=None 38 | 39 | if "engine" in kwargs: 40 | engine=kwargs["engine"] 41 | else: 42 | engine='shlib' 43 | 44 | if self._leakage_corr == 'scale': 45 | 46 | da_av=da_av*dascales 47 | elif self._leakage_corr == "vishwa2016": 48 | leakage=leakage_corr_vishwa2016(datws, dabin, self._filtername,engine=engine) 49 | da_av=(da_av-leakage)*dascales 50 | elif self._leakage_corr == "vishwa2017": 51 | leakage=leakage_corr_vishwa2016(datws, dabin, self._filtername,engine=engine) 52 | deltaleakage=delta_leakage_corr_vishwa2017(datws, dabin, self._filtername,engine=engine) 53 | #note: no scaling applied 54 | da_av=(da_av-leakage-deltaleakage) 55 | # da_av=(da_av-leakage) 56 | 57 | return da_av.drop_vars(['n','m','nm']) 58 | 59 | -------------------------------------------------------------------------------- /tests/test_analysis.py: -------------------------------------------------------------------------------- 1 | # Test spherical harmonic analysis 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2025 5 | 6 | 7 | 8 | from fixtures import shcapsvalidation as validation 9 | from fixtures import grdinput 10 | 11 | 12 | tol=1e-7 13 | 14 | def test_analysis_shlib(grdinput,validation): 15 | """ Test the SH analysis to see if an acceptable sh is computued""" 16 | nmax=validation.sh.nmax 17 | dsgrd=grdinput 18 | checkdata=validation 19 | dscheck=dsgrd.sh.analysis(nmax) 20 | 21 | #ok we need to rename the input dimension time 22 | dadiff=checkdata-dscheck#.rename(time='npoints') 23 | print("Checking whether retrieved SH solution is within tolerance") 24 | maxdiff=max(abs(dadiff.max()),abs(dadiff.min())) 25 | assert(maxdiff < tol) 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /tests/test_basic_ops.py: -------------------------------------------------------------------------------- 1 | # Test some basic operations of xarray objects filled with spherical harmonic datasets 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | 6 | 7 | import pytest 8 | import shxarray 9 | from shxarray.core.sh_indexing import SHindexBase 10 | from shxarray.core.logging import shxlogger 11 | import numpy as np 12 | 13 | import xarray as xr 14 | from datetime import datetime,timedelta 15 | import time 16 | 17 | @pytest.fixture 18 | def sh_sample1(): 19 | startdate=datetime(2023,12,8) 20 | daterange= [startdate - timedelta(days=dy) for dy in range(60)] 21 | nmax=30 22 | auxcoords={"time":daterange} 23 | da=xr.DataArray.sh.ones(nmax,auxcoords=auxcoords) 24 | #make coefficients decline with degree 25 | power=2 26 | degscale=xr.DataArray([1/((n+1)**power) for n in da.n],dims="nm") 27 | return nmax,0,da*degscale 28 | 29 | 30 | @pytest.fixture 31 | def sh_truncated(sh_sample1): 32 | _,_,dasample=sh_sample1 33 | nmax2=28 34 | nmin2=3 35 | datrunc=dasample.sh.truncate(nmax2,nmin2) 36 | return nmax2,nmin2,datrunc 37 | 38 | def test_truncate(sh_truncated): 39 | nmax2,nmin2,datrunc=sh_truncated 40 | assert datrunc.sh.nmax == nmax2, "nmax of truncated DataArray does not agree with expectation" 41 | assert datrunc.sh.nmin == nmin2, "nmin of truncated DataArray does not agree with expectation" 42 | assert len(datrunc.nm) == SHindexBase.nsh(nmax2,nmin2,squeeze=True), "Size of truncated DataArray does not agree with expectation" 43 | 44 | 45 | def test_add_sub(sh_sample1,sh_truncated): 46 | nmax,nmin,dasample=sh_sample1 47 | nmax2,nmin2,datrunc=sh_truncated 48 | da_add=datrunc + 2*dasample 49 | assert da_add.sh.nmin == nmin2, "Nmin of adding results is not consistent" 50 | assert da_add.sh.nmax == nmax2, "Nmax of adding results is not consistent" 51 | assert da_add.sum().item() == 3*np.prod(da_add.shape), "Adding operation not consistent" 52 | 53 | da_sub=dasample - 2* datrunc 54 | assert da_sub.sh.nmin == nmin2, "Nmin of subtracting results is not consistent" 55 | assert da_sub.sh.nmax == nmax2, "Nmax of subtracting results is not consistent" 56 | assert da_sub.sum().item() == -np.prod(da_sub.shape), "Adding operation not consistent" 57 | 58 | 59 | @pytest.mark.skip(reason="Not yet properly implemented") 60 | def test_multiply(sh_sample1,sh_truncated): 61 | 62 | nmax1,nmin,dasample=sh_sample1 63 | nmax2,nmin2,datrunc=sh_truncated 64 | # #truncate 65 | # dasample=dasample.sh.truncate(nmax1) 66 | # datrunc=datrunc.sh.truncate(nmax2) 67 | 68 | #multiply by 2 69 | datrunc*=2 70 | # t0=time.time() 71 | # daout=dasample.sh.multiply(datrunc,engine="shtns") 72 | t1=time.time() 73 | 74 | # shxlogger.info(f"Time to multiply with shtns {t1-t0:.2f} seconds") 75 | # breakpoint() 76 | daout2=dasample.sh.multiply(datrunc,engine="shlib",method="spatial") 77 | t2=time.time() 78 | shxlogger.info(f"Time to multiply with shlib spatial {t2-t1:.2f} seconds") 79 | 80 | # daout=dasample.sh.multiply(datrunc,engine="exp") 81 | # breakpoint() 82 | 83 | daout3=dasample.sh.multiply(datrunc,engine="shlib",method="spectral") 84 | t3=time.time() 85 | shxlogger.info(f"Time to multiply with shlib (spectral) {t3-t2:.2f} seconds") 86 | 87 | # dsout=daout.to_dataset(name='shtns') 88 | # dsout['shlibspat']=daout2 89 | # dsout['shlibspec']=daout3 90 | # dsout.sh.synthesis().to_netcdf('tests/testdata/mult_test.nc',mode='w') 91 | 92 | 93 | assert np.allclose(daout,daout2.sh.truncate(nmax2),atol=1e-13), "Results of multiply with different engines do not agree" 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | -------------------------------------------------------------------------------- /tests/test_basinav.py: -------------------------------------------------------------------------------- 1 | # Test spherical harmonic analysis 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2025 5 | 6 | 7 | 8 | from fixtures import basin_sim_data 9 | from shxarray.signal.basinav import Basinav 10 | import numpy as np 11 | 12 | 13 | 14 | 15 | def test_sh_basinav(basin_sim_data): 16 | """ Test (spectral) basin averaging routines""" 17 | filtername='Gauss500' 18 | # filtername='DDK1' 19 | #retrieve the basin average on the simulated data 20 | nmax=int(basin_sim_data.basin_sh.sh.nmax/2) 21 | datws=basin_sim_data.tws.sh.truncate(nmax) 22 | engine='shtns' #shtns is faster, but needs to be installed separately 23 | engine='shlib' 24 | tws_av=datws.sh.basinav(basin_sim_data.basin_sh,filtername,leakage_corr='scale') 25 | 26 | tws_av_no_scale=datws.sh.basinav(basin_sim_data.basin_sh,filtername) 27 | tws_av_nofilt=datws.sh.basinav(basin_sim_data.basin_sh,leakage_corr='scale') 28 | tws_av_vishwa2016=datws.sh.basinav(basin_sim_data.basin_sh,filtername,leakage_corr='vishwa2016',engine=engine) 29 | # import matplotlib.pyplot as plt 30 | # ds=basin_sim_data.basin_sh.sum('basins').sh.synthesis(engine=engine) 31 | # ds.plot() 32 | # plt.show() 33 | # breakpoint() 34 | # fig,axs=plt.subplots(3,1,sharex=True) 35 | # for ibas in range(3): 36 | # axs[ibas].plot(basin_sim_data.time,basin_sim_data.basin_avs[:,ibas],label="Truth") 37 | # axs[ibas].plot(tws_av.time,tws_av[ibas,:],label="scaled") 38 | # axs[ibas].plot(tws_av_no_scale.time,tws_av_no_scale[ibas,:],label="no scale") 39 | # axs[ibas].plot(tws_av_vishwa2016.time,tws_av_vishwa2016[ibas,:],label="Vishwakarma2016") 40 | # axs[ibas].plot(tws_av_nofilt.time,tws_av_nofilt[ibas,:],label="No filter") 41 | # axs[ibas].legend() 42 | # plt.show() 43 | 44 | #The unfiltered basin average should be close to the truth 45 | rtol=0.03 46 | for i in range(basin_sim_data.dims['basins']): 47 | atol=0.03*np.abs(basin_sim_data.basin_avs[:,i]).max().item() 48 | closeenough=np.allclose(tws_av_nofilt[i,:],basin_sim_data.basin_avs[:,i],rtol=rtol,atol=atol) 49 | assert closeenough 50 | 51 | 52 | #The filtered basin averages can be off by a much larger amount, so we're just going to check if the values are not outrageously off 53 | 54 | rtol=0.5 55 | for i in range(basin_sim_data.dims['basins']): 56 | atol=0.5*np.abs(basin_sim_data.basin_avs[:,i]).max().item() 57 | closeenough=np.allclose(tws_av_no_scale[i,:],basin_sim_data.basin_avs[:,i],rtol=rtol,atol=atol) 58 | assert closeenough 59 | closeenough=np.allclose(tws_av[i,:],basin_sim_data.basin_avs[:,i],rtol=rtol,atol=atol) 60 | assert closeenough 61 | 62 | closeenough=np.allclose(tws_av_vishwa2016[i,:],basin_sim_data.basin_avs[:,i],rtol=rtol,atol=atol) 63 | assert closeenough 64 | 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /tests/test_filters.py: -------------------------------------------------------------------------------- 1 | # Test DDK filtering of spherical harmonic datasets 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2024 5 | 6 | 7 | 8 | import pytest 9 | import xarray as xr 10 | import os 11 | import numpy as np 12 | 13 | 14 | #relative error 15 | tol=1e-11 16 | 17 | 18 | # #low degree testing (quicker) 19 | # nmax=20 20 | 21 | 22 | # Validation data set ( see https://github.com/strawpants/GRACE-filter/tree/master/tests) 23 | @pytest.fixture 24 | def shinput(): 25 | testdir=os.path.join(os.path.dirname(os.path.realpath(__file__)),'testdata') 26 | insh=os.path.join(testdir,"GSM-2_2008122-2008153_0030_EIGEN_G---_0004in.gz") 27 | ds=xr.open_dataset(insh,engine='shascii') 28 | return ds 29 | 30 | @pytest.fixture 31 | def shoutput(): 32 | testdir=os.path.join(os.path.dirname(os.path.realpath(__file__)),'testdata') 33 | outsh=os.path.join(testdir,"GSM-2_2008122-2008153_0030_EIGEN_G---_0004out.gz") 34 | ds=xr.open_dataset(outsh,engine='shascii') 35 | return ds 36 | 37 | @pytest.fixture 38 | def shoutputn60(): 39 | testdir=os.path.join(os.path.dirname(os.path.realpath(__file__)),'testdata') 40 | outsh=os.path.join(testdir,"GSM-2_2008122-2008153_0030_EIGEN_G---_0004lmax60out.gz") 41 | ds=xr.open_dataset(outsh,engine='shascii') 42 | return ds 43 | 44 | 45 | 46 | 47 | def test_ddkbasic(shinput,shoutput): 48 | """ Test the filtering of SH coefficients using an anisotropic ddk filter""" 49 | dacheck=shinput.cnm.sh.filter('DDK2') 50 | dadiff=(dacheck-shoutput.cnm)/shoutput.cnm 51 | maxdiff=max(abs(dadiff.max()),abs(dadiff.min())) 52 | assert(maxdiff < tol) 53 | 54 | 55 | def test_ddknmax60(shinput,shoutputn60): 56 | """ Test the filtering of a truncated set of SH coefficients using an anisotropic ddk filter""" 57 | shin=shinput.cnm.sh.truncate(60) 58 | dacheck=shin.sh.filter('DDK2') 59 | dadiff=(dacheck-shoutputn60.cnm)/shoutputn60.cnm 60 | maxdiff=max(abs(dadiff.max()),abs(dadiff.min())) 61 | assert(maxdiff < tol) 62 | 63 | def test_ddkmult(shinput,shoutput): 64 | """Test the filtering on a set (multiple) of SH coefficients using an anisotropic ddk filter""" 65 | 66 | nbasins=5 67 | 68 | basinscales=np.arange(nbasins) 69 | 70 | scales1=xr.DataArray(basinscales,coords={"basins":basinscales}) 71 | shinm=shinput.cnm*scales1 72 | 73 | shoutm=shoutput.cnm*scales1 74 | 75 | dacheck=shinm.sh.filter('DDK2') 76 | dadiff=(dacheck-shoutm)/shoutm 77 | maxdiff=max(abs(dadiff.max()),abs(dadiff.min())) 78 | reltol=3e-11 79 | assert(maxdiff < reltol) 80 | 81 | @pytest.fixture 82 | def shgausstest(): 83 | testdir=os.path.join(os.path.dirname(os.path.realpath(__file__)),'testdata') 84 | outsh=os.path.join(testdir,"gauss300testoutsub.sh.gz") 85 | ds=xr.open_dataset(outsh,engine='shascii') 86 | return ds 87 | 88 | def test_Gauss(shinput,shgausstest): 89 | shfilt=shinput.cnm.sh.filter("Gauss300") 90 | 91 | dadiff=(shgausstest.cnm-shfilt.loc[shgausstest.nm])/shgausstest.cnm 92 | maxdiff=max(abs(dadiff.max()),abs(dadiff.min())) 93 | reltol=1e-7 94 | assert(maxdiff < reltol) 95 | 96 | 97 | -------------------------------------------------------------------------------- /tests/test_prod2sum.py: -------------------------------------------------------------------------------- 1 | # Test Wigner 3j symbols and Gaunt 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | 6 | 7 | 8 | import pytest 9 | import shxarray 10 | import xarray as xr 11 | import numpy as np 12 | import gzip 13 | import os 14 | 15 | 16 | @pytest.fixture 17 | def product2sumVal(): 18 | # """Create a on-disk dataset with product 2 sum matrix based on ocean coefficients""" 19 | nmax=10 20 | p2sumfile=os.path.join(os.path.dirname(__file__),f'testdata/P2Sum_ocean{nmax}.nc') 21 | if not os.path.exists(p2sumfile): 22 | print(f"Generating {p2sumfile} using rlftlbx") 23 | import requests 24 | import subprocess 25 | from shxarray.io.binv_legacy import readBINV 26 | oceurl="https://github.com/strawpants/geoshapes/raw/refs/heads/master/raster/ocean/ne_10m_oceansh_n300.nc" 27 | ocet=os.path.join("/tmp",os.path.basename(oceurl)) 28 | if not os.path.exists(ocet): 29 | with requests.Session() as s: 30 | r=s.get(oceurl) 31 | with open(ocet,'wb') as fid: 32 | fid.write(r.content) 33 | from io import StringIO,BytesIO 34 | #Load data and rewrite to a file with 35 | dsoce=xr.open_dataset(ocet).sh.truncate(2*nmax) 36 | ocesht='/tmp/oce.sh' 37 | with open(ocesht,'wt') as fid: 38 | oceascii=dsoce.oceansh.sh.to_ascii(fid) 39 | 40 | #Create the validation file using the RLFtlbx tool box 41 | p2sumtmp='/tmp/p2sum.bin' 42 | with open(p2sumtmp,'wb') as fid: 43 | outp=subprocess.run(['SH_prod2sum_mat_openmp',f"-l{nmax}",ocesht],capture_output=True) 44 | fid.write(outp.stdout) 45 | 46 | dsp2s=readBINV(p2sumtmp) 47 | 48 | #add original ocean function coefficients 49 | dsp2s['oceansh']=dsoce.rename(nm='nm_orig',n='n_orig',m='m_orig').oceansh 50 | #save to the p2sumfile 51 | dsp2s.reset_index(['nm','nm_']).to_netcdf(p2sumfile) 52 | else: 53 | dsp2s=xr.open_dataset(p2sumfile) 54 | 55 | 56 | return dsp2s 57 | 58 | 59 | 60 | def test_product2sum(product2sumVal): 61 | rtol=1e-8 62 | #generate Product to sum matrix (note result will have a nmax which is half the size of the input max degree 63 | dsocean=product2sumVal.oceansh.rename(nm_orig='nm',n_orig='n',m_orig='m').sh.build_nmindex() 64 | 65 | dsp2s=dsocean.sh.p2s().T 66 | daval=product2sumVal.mat.sh.build_nmindex().sh.build_nmindex('_').loc[dsp2s.nm,dsp2s.nm_] 67 | closeEnough=np.allclose(daval.data,dsp2s.data,rtol=rtol) 68 | 69 | # import pdb;pdb.set_trace() 70 | assert(closeEnough) 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /tests/test_shformats.py: -------------------------------------------------------------------------------- 1 | # Test some basic operations of xarray objects filled with spherical harmonic datasets 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | 6 | 7 | import pytest 8 | import shxarray 9 | import xarray as xr 10 | import os 11 | import numpy as np 12 | import time 13 | from shxarray.core.logging import shxlogger 14 | 15 | def test_icgem(): 16 | """Test ICGEM Backends for xarray""" 17 | #note gzipped version (should also work) 18 | icgem1f=os.path.join(os.path.dirname(__file__),'testdata/icgem_test_sig_ITSG.gfc.gz') 19 | dsicgem=xr.open_dataset(icgem1f,engine='icgem') 20 | nmax=60 21 | assert dsicgem.sh.nmax == nmax, "nmax of icgem input is not 60" 22 | #note some data are missing so we only expect 599 entries 23 | assert dsicgem.sizes['nm'] == 599, "Not all dat has been read" 24 | assert np.datetime64('2004-10-15T12:00:00.000000000') ==dsicgem.time.data[0], "Time epoch is off or not present" 25 | testcnm={(60,46):1.665788238155e-09} 26 | testsigcnm={(60,-46):2.363195963172e-12} 27 | for nm,cnm in testcnm.items(): 28 | assert cnm == dsicgem.cnm.loc[:,nm].item(),f"Coefficient {nm} {cnm} not mathching" 29 | 30 | for nm,scnm in testsigcnm.items(): 31 | assert scnm == dsicgem.sigcnm.loc[:,nm].item(),f"Sigma Coefficient {nm} {scnm} not mathching" 32 | 33 | 34 | 35 | 36 | def test_icgem_nosig(): 37 | """Test ICGEM Backend for xarray for file without sigmas""" 38 | icgem2f=os.path.join(os.path.dirname(__file__),'testdata/icgem_test_nosig_ITSG.gfc') 39 | dsicgem=xr.open_dataset(icgem2f,engine='icgem') 40 | nmax=2 41 | assert dsicgem.sh.nmax == nmax, "nmax of icgem input is not 2" 42 | assert dsicgem.sizes['nm'] == 9, "Not all dat has been read" 43 | assert np.datetime64('2013-11-15T12:00:00.000000000') ==dsicgem.time.data[0], "Time epoch is off or not present" 44 | testcnm={(2,0):-4.841696152100e-04,(1,1):0.0} 45 | for nm,cnm in testcnm.items(): 46 | assert cnm == dsicgem.cnm.loc[:,nm].item(),f"Coefficient {nm} {cnm} not mathching" 47 | 48 | 49 | @pytest.fixture 50 | def sinexval(): 51 | # some validation values based on prior visual inspection of sinex file 52 | #+SOLUTION/ESTIMATE 53 | # ... 54 | # 91 CN 9 -- 7 03:259:00000 ---- 2 -1.17979570334447e-07 1.01491e-12 55 | # 92 SN 9 -- 7 03:259:00000 ---- 2 -9.69270369824925e-08 1.02558e-12 56 | # .. 57 | #+SOLUTION/APRIORI 58 | #... 59 | # 25 CN 5 -- 2 03:259:00000 ---- 2 6.52120740523400e-07 0.00000e+00 60 | # 26 SN 5 -- 2 03:259:00000 ---- 2 -3.23349434999185e-07 0.00000e+00 61 | #.. 62 | #+SOLUTION/NORMAL_EQUATION_VECTOR 63 | #*INDEX _TYPE_ CODE PT SOLN _REF_EPOCH__ UNIT S ___RIGHT_HAND_SIDE___ 64 | #1 CN 2 -- 0 03:259:00000 ---- 2 -3.27811136401904e+12 65 | #2 CN 2 -- 1 03:259:00000 ---- 2 -4.32367855180938e+11 66 | #+SOLUTION/NORMAL_EQUATION_MATRIX U 67 | # .... 68 | # 7 7 2.92598076849103e+23 -3.55351048890917e+21 -3.78432268065340e+21 69 | # 7 10 -6.34992891236174e+21 2.34638506385098e+20 4.78761459861900e+21 70 | # ... 71 | valdata={} 72 | valdata["sol_est"]=[(9,7, -1.17979570334447e-07),(9,-7,-9.69270369824925e-08)] 73 | valdata["sol_std"]=[(9,7,1.01491e-12),(9,-7, 1.02558e-12)] 74 | valdata["apri_est"]=[(5,2,6.52120740523400e-07),(5,-2,-3.23349434999185e-07)] 75 | valdata["rhs"]=[(2,0,-3.27811136401904e+12),(2,1,-4.32367855180938e+11)] 76 | valdata["N"]=[(7,7, 2.92598076849103e+23),(7,8,-3.55351048890917e+21),(7,9,-3.78432268065340e+21),(7,10,-6.34992891236174e+21),(7,11,2.34638506385098e+20),(7,12,4.78761459861900e+21)] 77 | return valdata 78 | 79 | 80 | 81 | 82 | def test_sinex(sinexval): 83 | """ Test reading of Normal equation systems in SINEX format""" 84 | import requests 85 | url="https://ftp.tugraz.at/outgoing/ITSG/GRACE/ITSG-Grace2018/monthly/normals_SINEX/monthly_n96/ITSG-Grace2018_n96_2003-09.snx.gz" 86 | sinexfile=os.path.join(os.path.dirname(__file__),'testdata', os.path.basename(url)) 87 | 88 | if not os.path.exists(sinexfile): 89 | print(f"Downloading {sinexfile}...") 90 | r=requests.get(url) 91 | with open(sinexfile,'wb') as fid: 92 | fid.write(r.content) 93 | sinexfile=os.path.join(os.path.dirname(__file__),'testdata/ITSG-Grace2018_n96_2003-09.snx') 94 | #quick read which stops when encountering a normal matrix 95 | #Engine does not need to be specified because file corresponds to commonly used filename pattern for sinex 96 | start=time.time() 97 | dsneqsinex=xr.open_dataset(sinexfile,drop_variables=["N"]) 98 | end=time.time() 99 | shxlogger.info(f"Time to read sinex file without normal matrix {end-start:.2f} seconds") 100 | 101 | for var in ["sol_est","apri_est","sol_std","rhs"]: 102 | for n,m,val in sinexval[var]: 103 | assert val == dsneqsinex[var].sel(n=n,m=m).item() 104 | # assert dsneqsinex[var].sel(n= 105 | 106 | #read version with entire normal matrix 107 | start=time.time() 108 | dsneqsinex=xr.open_dataset(sinexfile,engine='sinex') 109 | end=time.time() 110 | shxlogger.info(f"Time to read sinex file with normal matrix {end-start:.2f} seconds") 111 | for ix,iy,val in sinexval["N"]: 112 | #note index ix,and iy are 1-indexed 113 | assert val == dsneqsinex.N[ix-1,iy-1].item() 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /tests/test_synthesis.py: -------------------------------------------------------------------------------- 1 | # Test spherical harmonic synthesis 2 | # This file is part of the shxarray software which is licensed 3 | # under the Apache License version 2.0 (see the LICENSE file in the main repository) 4 | # Copyright Roelof Rietbroek (r.rietbroek@utwente.nl), 2023 5 | 6 | 7 | 8 | import pytest 9 | import shxarray 10 | import xarray as xr 11 | import numpy as np 12 | from fixtures import generategrddata,grdvalidation 13 | tol=1e-7 14 | 15 | @pytest.fixture 16 | def lon(): 17 | return np.arange(-180,180.01,60.0) 18 | 19 | 20 | @pytest.fixture 21 | def lat(): 22 | return np.arange(-60,60.01,60.0) 23 | 24 | def test_synthesis_full(generategrddata,lon,lat): 25 | dain,valdata=generategrddata 26 | dacheck=dain.sh.synthesis(lon,lat) 27 | #select a subset for checking the input 28 | dacheck=dacheck.sel(lon=valdata.lon,lat=valdata.lat) 29 | # # breakpoint() 30 | assert ((dacheck-valdata)/valdata).max().item() < tol 31 | 32 | def test_synthesis_truncated(generategrddata,lon,lat): 33 | dain,valdata=generategrddata 34 | dain=dain.sh.truncate(nmin=1) 35 | dacheck=dain.sh.synthesis(lon,lat) 36 | #select a subset for checking the input 37 | dacheck=dacheck.sel(lon=valdata.lon,lat=valdata.lat) 38 | 39 | #we can correct the validation data for ignoring n=0, m=0 which has Y00=1 for all positions on the globe 40 | valdata=valdata-dain.basins*dain.time 41 | # # breakpoint() 42 | assert ((dacheck-valdata)/valdata).max().item() < tol 43 | 44 | 45 | def test_synthesis_1d(generategrddata): 46 | dain,valdata=generategrddata 47 | #note: since valdata.lon and valdata.lat share the same dimensions analysis on the pairs is performed rather than on a grid 48 | dacheck=dain.sh.synthesis(valdata.lon,valdata.lat) 49 | 50 | assert ((dacheck-valdata)/valdata).max().item() < tol 51 | 52 | def test_synthesis_slice(generategrddata,lon,lat): 53 | # test synthesis for the case where th input data is a view of the original memory 54 | dain,valdata=generategrddata 55 | 56 | # select a subset from the input and validation 57 | bassl=slice(1,4) 58 | timesl=slice(1,3) 59 | if dain.get_axis_num('nm') == 2: 60 | dain=dain[bassl,timesl] 61 | else: 62 | dain=dain[:,timesl,bassl] 63 | 64 | valdata=valdata[:,timesl,bassl] 65 | 66 | dacheck=dain.sh.synthesis(lon,lat) 67 | #select a subset for checking the input 68 | dacheck=dacheck.sel(lon=valdata.lon,lat=valdata.lat) 69 | assert ((dacheck-valdata)/valdata).max().item() < tol 70 | 71 | -------------------------------------------------------------------------------- /tests/testdata/GSM-2_2008122-2008153_0030_EIGEN_G---_0004in.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/GSM-2_2008122-2008153_0030_EIGEN_G---_0004in.gz -------------------------------------------------------------------------------- /tests/testdata/GSM-2_2008122-2008153_0030_EIGEN_G---_0004lmax60out.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/GSM-2_2008122-2008153_0030_EIGEN_G---_0004lmax60out.gz -------------------------------------------------------------------------------- /tests/testdata/GSM-2_2008122-2008153_0030_EIGEN_G---_0004out.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/GSM-2_2008122-2008153_0030_EIGEN_G---_0004out.gz -------------------------------------------------------------------------------- /tests/testdata/P2Sum_ocean10.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/P2Sum_ocean10.nc -------------------------------------------------------------------------------- /tests/testdata/gauss300testoutsub.sh.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/gauss300testoutsub.sh.gz -------------------------------------------------------------------------------- /tests/testdata/icgem_test_nosig_ITSG.gfc: -------------------------------------------------------------------------------- 1 | Monthly mean of c20 from SLR (CSR RL06) 2 | for details see 3 | http://ifg.tugraz.at/downloads/gravity-field-models/itsg-grace2018 4 | 5 | begin_of_head ======================================== 6 | 7 | modelname model_c20_2013-11 8 | product_type gravity_field 9 | earth_gravity_constant 3.9860044150e+14 10 | radius 6.3781363000e+06 11 | max_degree 2 12 | norm fully_normalized 13 | errors no 14 | 15 | key L M C S 16 | end_of_head ========================================== 17 | gfc 0 0 0.000000000000e+00 0.000000000000e+00 18 | gfc 1 0 0.000000000000e+00 0.000000000000e+00 19 | gfc 1 1 0.000000000000e+00 0.000000000000e+00 20 | gfc 2 0 -4.841696152100e-04 0.000000000000e+00 21 | gfc 2 1 0.000000000000e+00 0.000000000000e+00 22 | gfc 2 2 0.000000000000e+00 0.000000000000e+00 23 | -------------------------------------------------------------------------------- /tests/testdata/icgem_test_sig_ITSG.gfc.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/icgem_test_sig_ITSG.gfc.gz -------------------------------------------------------------------------------- /tests/testdata/shanalysis-test-paracap-n200.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/shanalysis-test-paracap-n200.nc -------------------------------------------------------------------------------- /tests/testdata/sympy_gauntvalidation.pkl.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/sympy_gauntvalidation.pkl.gz -------------------------------------------------------------------------------- /tests/testdata/sympy_realgauntvalidation.pkl.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/sympy_realgauntvalidation.pkl.gz -------------------------------------------------------------------------------- /tests/testdata/sympy_wigner3jvalidation.pkl.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ITC-Water-Resources/shxarray/72aa3a0ff47fc0a075e79c28f8dd203d9951c725/tests/testdata/sympy_wigner3jvalidation.pkl.gz --------------------------------------------------------------------------------