├── .github ├── dependabot.yml └── workflows │ ├── cibuildwheel.yml │ ├── deploy-docs.yml │ ├── pre-commit.yml │ ├── test_code_generation.yml │ └── tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .zenodo.json ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── docs ├── Makefile ├── _static │ └── readme_static.txt ├── conf.py ├── conversions.rst ├── density.rst ├── energy.rst ├── freezing.rst ├── geostrophy.rst ├── gsw.rst ├── gsw_flat.rst ├── ice.rst ├── index.rst ├── install.rst ├── intro.rst └── stability.rst ├── gsw ├── __init__.py ├── _fixed_wrapped_ufuncs.py ├── _utilities.py ├── _wrapped_ufuncs.py ├── conversions.py ├── density.py ├── energy.py ├── freezing.py ├── geostrophy.py ├── ice.py ├── interpolation.py ├── stability.py ├── tests │ ├── _WIP_test_ufuncs.py │ ├── check_functions.py │ ├── geo_strf_dyn_height.npy │ ├── geo_strf_velocity.npy │ ├── gsw_check_functions_save.m │ ├── gsw_cv_v3_0.npz │ ├── list_check_functions.py │ ├── test_check_functions.py │ ├── test_dll_export.py │ ├── test_geostrophy.py │ ├── test_gibbs.py │ ├── test_interpolation.py │ ├── test_utility.py │ ├── test_xarray.py │ └── write_geo_npyfiles.py └── utility.py ├── pyproject.toml ├── requirements-dev.txt ├── setup.py ├── src ├── _ufuncs.c ├── c_gsw │ ├── gsw_internal_const.h │ ├── gsw_oceanographic_toolbox.c │ ├── gsw_saar.c │ ├── gsw_saar_data.h │ └── gswteos-10.h ├── method_bodies.c └── method_def_entries.c └── tools ├── _utilities.py ├── c_header_parser.py ├── categories.py ├── codegen ├── copy_from_GSW-C.py ├── docstring_parts.py ├── docstring_utils.py ├── fix_wrapped_ufunc_typos.py ├── make_ufuncs.py ├── make_wrapped_ufuncs.py ├── mat2npz.py └── matlab_parser.py /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # See https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot 2 | 3 | version: 2 4 | updates: 5 | 6 | - package-ecosystem: "github-actions" 7 | directory: "/" 8 | schedule: 9 | interval: "daily" 10 | labels: 11 | - "Bot" 12 | groups: 13 | github-actions: 14 | patterns: 15 | - '*' 16 | -------------------------------------------------------------------------------- /.github/workflows/cibuildwheel.yml: -------------------------------------------------------------------------------- 1 | name: Wheels 2 | 3 | on: 4 | pull_request: 5 | 6 | push: 7 | tags: 8 | - "v*" 9 | 10 | jobs: 11 | build_bdist: 12 | name: "Build ${{ matrix.os }} (${{ matrix.arch }}) wheels" 13 | runs-on: ${{ matrix.os }} 14 | timeout-minutes: 60 # should be long enough even on tags, but let's prevent hangs 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | include: 19 | - os: ubuntu-22.04 20 | arch: x86_64 21 | - os: ubuntu-22.04 22 | arch: aarch64 23 | - os: windows-2022 24 | arch: AMD64 25 | - os: macos-14 26 | arch: arm64 27 | - os: macos-13 28 | arch: x86_64 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | with: 33 | fetch-depth: 0 34 | 35 | # For aarch64 support 36 | # https://cibuildwheel.pypa.io/en/stable/faq/#emulation 37 | - uses: docker/setup-qemu-action@v3 38 | with: 39 | platforms: all 40 | if: runner.os == 'Linux' && matrix.arch == 'aarch64' 41 | 42 | - name: "Building ${{ matrix.os }} (${{ matrix.arch }}) wheels" 43 | uses: pypa/cibuildwheel@v2.23.3 44 | env: 45 | # Skips pypy and musllinux for now. 46 | CIBW_SKIP: "pp* cp36-* cp37-* cp38-* *-musllinux*" 47 | CIBW_ARCHS: ${{ matrix.arch }} 48 | CIBW_BUILD_FRONTEND: build 49 | CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014 50 | CIBW_TEST_REQUIRES: pytest pandas>=2 51 | CIBW_TEST_COMMAND: > 52 | python -c "import gsw; print(f'gsw v{gsw.__version__}')" && 53 | python -m pytest --pyargs gsw 54 | 55 | - uses: actions/upload-artifact@v4 56 | with: 57 | name: pypi-artifacts-${{ matrix.os }}-${{ matrix.arch }} 58 | path: ${{ github.workspace }}/wheelhouse/*.whl 59 | 60 | 61 | build_sdist: 62 | name: Build source distribution 63 | runs-on: ubuntu-22.04 64 | steps: 65 | - uses: actions/checkout@v4 66 | with: 67 | fetch-depth: 0 68 | 69 | - name: Build sdist 70 | run: > 71 | pip install build twine check-manifest 72 | && python -m build --sdist . --outdir dist 73 | && twine check dist/* 74 | && check-manifest --verbose 75 | 76 | - uses: actions/upload-artifact@v4 77 | with: 78 | name: pypi-artifacts 79 | path: ${{ github.workspace }}/dist/*.tar.gz 80 | 81 | show-artifacts: 82 | needs: [build_bdist, build_sdist] 83 | name: "Show artifacts" 84 | runs-on: ubuntu-22.04 85 | steps: 86 | - uses: actions/download-artifact@v4 87 | with: 88 | pattern: pypi-artifacts* 89 | path: ${{ github.workspace }}/dist 90 | merge-multiple: true 91 | 92 | - shell: bash 93 | run: | 94 | ls -l ${{ github.workspace }}/dist 95 | 96 | 97 | publish-artifacts-pypi: 98 | needs: [build_bdist, build_sdist] 99 | name: "Publish to PyPI" 100 | runs-on: ubuntu-22.04 101 | # upload to PyPI for every tag starting with 'v' 102 | if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') 103 | steps: 104 | - uses: actions/download-artifact@v4 105 | with: 106 | pattern: pypi-artifacts* 107 | path: ${{ github.workspace }}/dist 108 | merge-multiple: true 109 | 110 | - uses: pypa/gh-action-pypi-publish@release/v1 111 | with: 112 | user: __token__ 113 | password: ${{ secrets.PYPI_PASSWORD }} 114 | print_hash: true 115 | -------------------------------------------------------------------------------- /.github/workflows/deploy-docs.yml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy docs 2 | 3 | on: 4 | pull_request: 5 | 6 | push: 7 | tags: 8 | - "v*" 9 | 10 | defaults: 11 | run: 12 | shell: bash 13 | 14 | jobs: 15 | run: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: "3.x" 24 | 25 | - name: Install gsw 26 | run: > 27 | python -m pip install -r requirements-dev.txt 28 | && python -m pip install -e . 29 | 30 | - name: Build documentation 31 | run: > 32 | set -e 33 | && pushd docs 34 | && make clean html linkcheck 35 | && popd 36 | 37 | - name: GitHub Pages action 38 | if: success() && github.event_name == 'release' 39 | uses: peaceiris/actions-gh-pages@v4 40 | with: 41 | github_token: ${{ secrets.GITHUB_TOKEN }} 42 | publish_dir: docs/_build/html 43 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: pre-commit 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: [main] 7 | 8 | jobs: 9 | pre-commit: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - uses: actions/setup-python@v5 14 | - uses: pre-commit/action@v3.0.1 15 | -------------------------------------------------------------------------------- /.github/workflows/test_code_generation.yml: -------------------------------------------------------------------------------- 1 | name: Test code generation 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: [main] 7 | 8 | defaults: 9 | run: 10 | shell: bash 11 | 12 | jobs: 13 | code-generation: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Set up Python 20 | uses: actions/setup-python@v5 21 | with: 22 | python-version: "3.x" 23 | 24 | - name: Install gsw 25 | run: > 26 | python -m pip install -r requirements-dev.txt 27 | && python -m pip install -e . 28 | 29 | - name: Test Code Generation 30 | run: > 31 | git clone https://github.com/TEOS-10/GSW-C.git ../GSW-C 32 | && git clone https://github.com/TEOS-10/GSW-Matlab.git ../GSW-Matlab 33 | && python tools/copy_from_GSW-C.py 34 | && python tools/mat2npz.py 35 | && python tools/make_ufuncs.py 36 | && python tools/make_wrapped_ufuncs.py 37 | && python tools/fix_wrapped_ufunc_typos.py 38 | 39 | - name: Install gsw 40 | run: > 41 | python -m pip install -v -e . --no-deps --no-build-isolation --force-reinstall 42 | && python -m pytest -s -rxs -v gsw/tests -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: [main] 7 | 8 | defaults: 9 | run: 10 | shell: bash 11 | 12 | jobs: 13 | run: 14 | runs-on: ${{ matrix.os }} 15 | strategy: 16 | matrix: 17 | python-version: [ "3.9", "3.10", "3.11", "3.12" ] 18 | os: [ windows-latest, ubuntu-latest, macos-latest ] 19 | # https://scientific-python.org/specs/spec-0000/ 20 | numpy-version: ["==1.24", ">=2"] 21 | exclude: 22 | - python-version: "3.12" 23 | numpy-version: "==1.24" 24 | fail-fast: false 25 | 26 | steps: 27 | - uses: actions/checkout@v4 28 | 29 | - name: Set up Python 30 | uses: actions/setup-python@v5 31 | with: 32 | python-version: ${{ matrix.python-version }} 33 | 34 | - name: Install gsw 35 | run: > 36 | python -m pip install -r requirements-dev.txt 37 | && python -m pip install -e . 38 | && python -m pip install numpy${{ matrix.numpy-version }} 39 | 40 | - name: Tests 41 | run: | 42 | python -m pytest -s -rxs -v gsw/tests 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ######################################### 2 | # Editor temporary/working/backup files # 3 | .#* 4 | [#]*# 5 | *~ 6 | *$ 7 | *.bak 8 | *.kdev4 9 | .project 10 | .pydevproject 11 | *.orig 12 | 13 | # Compiled source # 14 | ################### 15 | *.a 16 | *.com 17 | *.class 18 | *.dll 19 | *.exe 20 | *.o 21 | *.py[ocd] 22 | *.so 23 | 24 | # Python files # 25 | ################ 26 | # setup.py working directory 27 | build 28 | # sphinx build directory 29 | doc/_build 30 | docs/_build 31 | # setup.py dist directory 32 | dist 33 | # Egg metadata 34 | *.egg-info 35 | .eggs/ 36 | # tox testing tool 37 | .tox 38 | MANIFEST 39 | gsw/_version.py 40 | 41 | # OS generated files # 42 | ###################### 43 | .directory 44 | .gdb_history 45 | .DS_Store 46 | ehthumbs.db 47 | Icon 48 | Thumbs.db 49 | 50 | # Things specific to this project # 51 | ################################### 52 | *.list 53 | *.tmp 54 | 55 | # Documentation generated files # 56 | ################################# 57 | 58 | # pytest 59 | .cache 60 | .pytest_cache 61 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.6.0 4 | hooks: 5 | - id: check-ast 6 | - id: debug-statements 7 | - id: check-added-large-files 8 | 9 | - repo: https://github.com/codespell-project/codespell 10 | rev: v2.2.6 11 | hooks: 12 | - id: codespell 13 | exclude: > 14 | (?x)^( 15 | .*\.c| 16 | tools/fix_wrapped_ufunc_typos\.py| 17 | gsw/tests/test_gibbs\.py 18 | )$ 19 | args: 20 | - --ignore-words-list=nin,preformed,wih, 21 | 22 | - repo: https://github.com/tox-dev/pyproject-fmt 23 | rev: 1.8.0 24 | hooks: 25 | - id: pyproject-fmt 26 | 27 | 28 | - repo: https://github.com/charliermarsh/ruff-pre-commit 29 | rev: v0.3.7 30 | hooks: 31 | - id: ruff 32 | 33 | ci: 34 | autofix_commit_msg: | 35 | [pre-commit.ci] auto fixes from pre-commit.com hooks 36 | 37 | for more information, see https://pre-commit.ci 38 | autofix_prs: false 39 | autoupdate_commit_msg: '[pre-commit.ci] pre-commit autoupdate' 40 | autoupdate_schedule: monthly 41 | skip: [] 42 | submodules: false 43 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "related_identifiers": [ 3 | { 4 | "scheme": "isbn", 5 | "identifier": "978-0-646-55621-5", 6 | "relation": "references" 7 | }, 8 | { 9 | "scheme": "handle", 10 | "identifier": "http://hdl.handle.net/11329/286", 11 | "relation": "references" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Licence for the use of the Gibbs SeaWater (GSW) Oceanographic Toolbox, covering this 2 | Python implementation with C components 3 | 4 | Copyright (c) 2011, SCOR/IAPSO WG127 (Scientific Committee on Oceanic Research/ 5 | International Association for the Physical Sciences of the Oceans, Working Group 127). 6 | Copyright 2017-2020 Eric Firing and contributors 7 | 8 | All rights reserved. 9 | 10 | Redistribution and use, in source and binary forms, without modification, is permitted 11 | provided that the following conditions are met: 12 | 13 | • Redistributions of source code must retain the above copyright notice, this list 14 | of conditions and the following disclaimer. 15 | 16 | • Redistributions in binary form must reproduce the above copyright notice, this 17 | list of conditions and the following disclaimer in the documentation and/or other 18 | materials provided with the distribution. 19 | 20 | • Neither the name of SCOR/IAPSO WG127 nor the names of its contributors may be used 21 | to endorse or promote products derived from this software without specific prior 22 | written permission. 23 | 24 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY 25 | EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 26 | OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT 27 | SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 28 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED 29 | TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 30 | BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 31 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 32 | ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 33 | DAMAGE. 34 | 35 | The software is available from http://www.TEOS-10.org 36 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.txt 2 | include README.md 3 | 4 | graft gsw 5 | recursive-include src *.c 6 | recursive-include src *.h 7 | 8 | prune docs 9 | prune .github 10 | prune tools 11 | prune notebooks 12 | prune *.egg-info 13 | 14 | global-exclude *.so 15 | 16 | exclude *.yml 17 | exclude *.yaml 18 | exclude .gitignore 19 | exclude .isort.cfg 20 | exclude .zenodo.json 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GSW-Python 2 | 3 | [![Tests](https://github.com/TEOS-10/GSW-Python/actions/workflows/tests.yml/badge.svg)](https://github.com/TEOS-10/GSW-Python/actions/workflows/tests.yml) 4 | [![Wheels](https://github.com/TEOS-10/GSW-Python/actions/workflows/cibuildwheel.yml/badge.svg)](https://github.com/TEOS-10/GSW-Python/actions/workflows/cibuildwheel.yml) 5 | [![DOI](https://zenodo.org/badge/86503067.svg)](https://zenodo.org/badge/latestdoi/86503067) 6 | 7 | 8 | This Python implementation of the Thermodynamic Equation of Seawater 2010 (TEOS-10) is based primarily on numpy ufunc wrappers of the GSW-C implementation. 9 | This library replaces the original [python-gsw](https://github.com/TEOS-10/python-gsw) pure-python implementation. 10 | The primary reasons for this change are that by building on the C implementation we reduce code duplication and we gain an immediate update to the 75-term equation. 11 | Additional benefits include a major increase in speed, 12 | a reduction in memory usage, 13 | and the inclusion of more functions. 14 | The penalty is that a C (or MSVC C++ for Windows) compiler is required to build the package from source. 15 | 16 | **Warning: this is for Python >=3.8 only.** 17 | 18 | Documentation is provided at https://teos-10.github.io/GSW-Python/. 19 | 20 | For the core functionality, we use an auto-generated C extension 21 | module to wrap the C functions as numpy [ufuncs](https://docs.scipy.org/doc/numpy/reference/ufuncs.html), 22 | and then use an autogenerated Python module to add docstrings and handle masked arrays. 23 | 165 scalar C functions with only double-precision arguments and return values are wrapped as ufuncs, 24 | and 158 of these are exposed in the ``gsw`` namespace with an additional wrapper in Python. 25 | 26 | A hand-written wrapper is used for one C function, and others are re-implemented directly in Python instead of being wrapped. 27 | Additional functions present in GSW-Matlab but not in GSW-C may be re-implemented in Python, 28 | but there is no expectation that all such functions will be provided. 29 | 30 | ## Installation 31 | 32 | Pip users can install the pre-built wheels with: 33 | 34 | ```shell 35 | pip install gsw 36 | ``` 37 | 38 | conda users will find binaries on conda-forge, 39 | 40 | ```shell 41 | conda install gsw --channel conda-forge 42 | ``` 43 | 44 | The development version of the package can be installed from a clone of the repo using 45 | 46 | ```shell 47 | pip install . 48 | ``` 49 | 50 | ## Citation 51 | 52 | If you use GSW-Python, please cite: McDougall, T.J. and P.M. Barker, 2011: Getting started with TEOS-10 and the Gibbs Seawater (GSW) Oceanographic Toolbox, 28pp., SCOR/IAPSO WG127, ISBN 978-0-646-55621-5 53 | 54 | ```bibtex 55 | @book{mcdougall2011getting, 56 | author = {McDougall, T. J. and Barker, P. M.}, 57 | title = {Getting started with TEOS-10 and the Gibbs Seawater (GSW) Oceanographic Toolbox}, 58 | year = {2011}, 59 | pages = {28}, 60 | publisher = {SCOR/IAPSO WG127}, 61 | isbn = {978-0-646-55621-5} 62 | } 63 | ``` 64 | 65 | ## Note for xarray users 66 | 67 | A wrapper around gsw called [gsw-xarray](https://github.com/DocOtak/gsw-xarray) exists for xarray. 68 | It adds CF compliant attributes when possible, units, and name. 69 | 70 | ## Note on generating the docstrings 71 | 72 | The autogenerated docstrings are checked with codespell in the CIs. 73 | When autogenerating them we need to run ``pre-commit run --all-files`` and fix the documentation issues found. 74 | 75 | ## Development notes 76 | 77 | You will need a suitable compiler: 78 | gcc or clang for unix-like systems, 79 | or the MSVC compiler set used for Python itself on Windows. 80 | For Windows, some of the source code has been modified to C++ because the MSVC C compiler does not support the 81 | C99 complex data type used in original GSW-C. 82 | 83 | The subdirectory ('tools') contains modules and scripts for 84 | maintaining the code that is autogenerated from the upstream 85 | GSW-Matlab and GSW-C repos. The scripts are to be run from 86 | this directory; they make assumptions about where they 87 | are, and about where upstream repos are. Specifically, it 88 | is assumed that GSW-Matlab, GSW-C, and GSW-Python git repos 89 | are all in the same base directory. 90 | 91 | ### Scripts 92 | - `copy_from_GSW-C.py`: copies the relevant .c and .h files from a 93 | sibling GSW-C repo, if the latter are newer. 94 | - `mat2npz.py`: generates an npz file in gsw/tests containing the 95 | test data and check values from the gsw_data_v3_0.mat file 96 | in the sibling GSW-Matlab repo. 97 | - `make_ufuncs.py`: Generates the src/_ufuncs.c file to turn the 98 | scalar C functions into numpy ufuncs. It writes ufuncs.list 99 | in the current directory as a record of the ufunc names. 100 | Functions are identified as ufunc candidates based on their 101 | signatures, parsed from src/c_gsw/gswteos-10.h. 102 | - `make_wrapped_ufuncs.py`: Generates gsw/_wrapped_ufuncs.py based on 103 | the output of make_ufuncs.py. It adds docstrings constructed 104 | from the Matlab help text. 105 | - `fix_wrapped_ufunc_typos.py`: Fixes docstring typos that have been 106 | identified, but not yet fixed, in the GSW-Matlab repo. 107 | - `codegen`: Runs the last three python scripts. 108 | 109 | ### Modules 110 | - `c_header_parser.py`: Functions for taking apart the function 111 | declarations in gswteos-10.h. 112 | - `matlab_parser.py`: Functions specifically for reading the GSW-Matlab 113 | function signatures and for splitting out the help text. 114 | - `docstring_parts.py`: Blocks of text for assembling docstrings. 115 | - `docstring_utils.py`: Functions for assembling docstrings in numpydoc 116 | format. 117 | - `categories.py`: Functions for listing gsw function names by category. 118 | This is not used by any of the other functions or scripts, but 119 | was used when initially categorizing the functions for inclusion 120 | in submodules. 121 | 122 | ### Notes 123 | 124 | - In addition to the generated src/_ufuncs.c, there are two C files 125 | that are hand-written: src/method_bodies.c and src/method_def_entries.c. 126 | These are imported by src/_ufuncs.c. They handle some C functions 127 | that are not suitable for ufuncs. 128 | - Specialized Matlab parsing is also done in gsw/tests/check_functions.py, 129 | which is used by gsw/tests/test_check_functions.py; see the docstring 130 | of the former for more info. 131 | 132 | ### Testing 133 | 134 | To test, after installation, run "pytest --pyargs gsw". 135 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_elements.papersize=a4 12 | PAPEROPT_letter = -D latex_elements.papersize=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and an HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " lualatexpdf to make LaTeX files and run them through lualatex" 35 | @echo " xelatexpdf to make LaTeX files and run them through xelatex" 36 | @echo " text to make text files" 37 | @echo " man to make manual pages" 38 | @echo " texinfo to make Texinfo files" 39 | @echo " info to make Texinfo files and run them through makeinfo" 40 | @echo " gettext to make PO message catalogs" 41 | @echo " changes to make an overview of all changed/added/deprecated items" 42 | @echo " xml to make Docutils-native XML files" 43 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 44 | @echo " linkcheck to check all external links for integrity" 45 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 46 | @echo " coverage to run coverage check of the documentation (if enabled)" 47 | @echo " dummy to check syntax errors of document sources" 48 | 49 | .PHONY: clean 50 | clean: 51 | rm -rf $(BUILDDIR)/* 52 | 53 | .PHONY: html 54 | html: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | .PHONY: dirhtml 60 | dirhtml: 61 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 62 | @echo 63 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 64 | 65 | .PHONY: singlehtml 66 | singlehtml: 67 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 68 | @echo 69 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 70 | 71 | .PHONY: pickle 72 | pickle: 73 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 74 | @echo 75 | @echo "Build finished; now you can process the pickle files." 76 | 77 | .PHONY: json 78 | json: 79 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 80 | @echo 81 | @echo "Build finished; now you can process the JSON files." 82 | 83 | .PHONY: htmlhelp 84 | htmlhelp: 85 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 86 | @echo 87 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 88 | ".hhp project file in $(BUILDDIR)/htmlhelp." 89 | 90 | .PHONY: qthelp 91 | qthelp: 92 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 93 | @echo 94 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 95 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 96 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/gsw.qhcp" 97 | @echo "To view the help file:" 98 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/gsw.qhc" 99 | 100 | .PHONY: applehelp 101 | applehelp: 102 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 103 | @echo 104 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 105 | @echo "N.B. You won't be able to view it unless you put it in" \ 106 | "~/Library/Documentation/Help or install it in your application" \ 107 | "bundle." 108 | 109 | .PHONY: devhelp 110 | devhelp: 111 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 112 | @echo 113 | @echo "Build finished." 114 | @echo "To view the help file:" 115 | @echo "# mkdir -p $$HOME/.local/share/devhelp/gsw" 116 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/gsw" 117 | @echo "# devhelp" 118 | 119 | .PHONY: epub 120 | epub: 121 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 122 | @echo 123 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 124 | 125 | .PHONY: epub3 126 | epub3: 127 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 128 | @echo 129 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 130 | 131 | .PHONY: latex 132 | latex: 133 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 134 | @echo 135 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 136 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 137 | "(use \`make latexpdf' here to do that automatically)." 138 | 139 | .PHONY: latexpdf 140 | latexpdf: 141 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 142 | @echo "Running LaTeX files through pdflatex..." 143 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 144 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 145 | 146 | .PHONY: latexpdfja 147 | latexpdfja: 148 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 149 | @echo "Running LaTeX files through platex and dvipdfmx..." 150 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 151 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 152 | 153 | .PHONY: lualatexpdf 154 | lualatexpdf: 155 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 156 | @echo "Running LaTeX files through lualatex..." 157 | $(MAKE) PDFLATEX=lualatex -C $(BUILDDIR)/latex all-pdf 158 | @echo "lualatex finished; the PDF files are in $(BUILDDIR)/latex." 159 | 160 | .PHONY: xelatexpdf 161 | xelatexpdf: 162 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 163 | @echo "Running LaTeX files through xelatex..." 164 | $(MAKE) PDFLATEX=xelatex -C $(BUILDDIR)/latex all-pdf 165 | @echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex." 166 | 167 | .PHONY: text 168 | text: 169 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 170 | @echo 171 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 172 | 173 | .PHONY: man 174 | man: 175 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 176 | @echo 177 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 178 | 179 | .PHONY: texinfo 180 | texinfo: 181 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 182 | @echo 183 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 184 | @echo "Run \`make' in that directory to run these through makeinfo" \ 185 | "(use \`make info' here to do that automatically)." 186 | 187 | .PHONY: info 188 | info: 189 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 190 | @echo "Running Texinfo files through makeinfo..." 191 | make -C $(BUILDDIR)/texinfo info 192 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 193 | 194 | .PHONY: gettext 195 | gettext: 196 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 197 | @echo 198 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 199 | 200 | .PHONY: changes 201 | changes: 202 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 203 | @echo 204 | @echo "The overview file is in $(BUILDDIR)/changes." 205 | 206 | .PHONY: linkcheck 207 | linkcheck: 208 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 209 | @echo 210 | @echo "Link check complete; look for any errors in the above output " \ 211 | "or in $(BUILDDIR)/linkcheck/output.txt." 212 | 213 | .PHONY: doctest 214 | doctest: 215 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 216 | @echo "Testing of doctests in the sources finished, look at the " \ 217 | "results in $(BUILDDIR)/doctest/output.txt." 218 | 219 | .PHONY: coverage 220 | coverage: 221 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 222 | @echo "Testing of coverage in the sources finished, look at the " \ 223 | "results in $(BUILDDIR)/coverage/python.txt." 224 | 225 | .PHONY: xml 226 | xml: 227 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 228 | @echo 229 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 230 | 231 | .PHONY: pseudoxml 232 | pseudoxml: 233 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 234 | @echo 235 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 236 | 237 | .PHONY: dummy 238 | dummy: 239 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 240 | @echo 241 | @echo "Build finished. Dummy builder generates no files." 242 | -------------------------------------------------------------------------------- /docs/_static/readme_static.txt: -------------------------------------------------------------------------------- 1 | This is a dummy file so that we can keep this otherwise-empty 2 | directory until we need it, thereby suppressing a Sphinx warning. 3 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # gsw documentation build configuration file, created by 4 | # sphinx-quickstart on Mon Mar 13 15:27:45 2017. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another directory, 16 | # add these directories to sys.path here. If the directory is relative to the 17 | # documentation root, use os.path.abspath to make it absolute, like shown here. 18 | # 19 | 20 | # -- General configuration ------------------------------------------------ 21 | 22 | # If your documentation needs a minimal Sphinx version, state it here. 23 | # 24 | # needs_sphinx = '1.0' 25 | 26 | # Add any Sphinx extension module names here, as strings. They can be 27 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 28 | # ones. 29 | extensions = [ 30 | 'sphinx.ext.autodoc', 31 | 'sphinx.ext.todo', 32 | 'sphinx.ext.viewcode', 33 | 'sphinx.ext.mathjax', 34 | 'numpydoc' 35 | ] 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | templates_path = ['_templates'] 39 | 40 | # The suffix(es) of source filenames. 41 | # You can specify multiple suffix as a list of string: 42 | # 43 | # source_suffix = ['.rst', '.md'] 44 | source_suffix = '.rst' 45 | 46 | # The master toctree document. 47 | master_doc = 'index' 48 | 49 | # General information about the project. 50 | project = 'gsw' 51 | copyright = '2017, TEOS-10 developers' 52 | author = 'TEOS-10 developers' 53 | 54 | # The version info for the project you're documenting, acts as replacement for 55 | # |version| and |release|, also used in various other places throughout the 56 | # built documents. 57 | import gsw 58 | 59 | version = release = gsw.__version__ 60 | 61 | # The language for content autogenerated by Sphinx. Refer to documentation 62 | # for a list of supported languages. 63 | # 64 | # This is also used if you do content translation via gettext catalogs. 65 | # Usually you set "language" from the command line for these cases. 66 | language = 'en' 67 | 68 | # List of patterns, relative to source directory, that match files and 69 | # directories to ignore when looking for source files. 70 | # This patterns also effect to html_static_path and html_extra_path 71 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 72 | 73 | # The name of the Pygments (syntax highlighting) style to use. 74 | pygments_style = 'sphinx' 75 | 76 | # If true, `todo` and `todoList` produce output, else they produce nothing. 77 | todo_include_todos = True 78 | 79 | 80 | # -- Options for HTML output ---------------------------------------------- 81 | 82 | # The theme to use for HTML and HTML Help pages. See the documentation for 83 | # a list of builtin themes. 84 | # 85 | html_theme = 'sphinx_rtd_theme' 86 | 87 | # Theme options are theme-specific and customize the look and feel of a theme 88 | # further. For a list of options available for each theme, see the 89 | # documentation. 90 | # 91 | # html_theme_options = {} 92 | 93 | # Add any paths that contain custom static files (such as style sheets) here, 94 | # relative to this directory. They are copied after the builtin static files, 95 | # so a file named "default.css" will overwrite the builtin "default.css". 96 | html_static_path = ['_static'] 97 | 98 | 99 | # -- Options for HTMLHelp output ------------------------------------------ 100 | 101 | # Output file base name for HTML help builder. 102 | htmlhelp_basename = 'gswdoc' 103 | 104 | 105 | # -- Options for LaTeX output --------------------------------------------- 106 | 107 | latex_elements = { 108 | # The paper size ('letterpaper' or 'a4paper'). 109 | # 110 | # 'papersize': 'letterpaper', 111 | 112 | # The font size ('10pt', '11pt' or '12pt'). 113 | # 114 | # 'pointsize': '10pt', 115 | 116 | # Additional stuff for the LaTeX preamble. 117 | # 118 | # 'preamble': '', 119 | 120 | # Latex figure (float) alignment 121 | # 122 | # 'figure_align': 'htbp', 123 | } 124 | 125 | # Grouping the document tree into LaTeX files. List of tuples 126 | # (source start file, target name, title, 127 | # author, documentclass [howto, manual, or own class]). 128 | latex_documents = [ 129 | (master_doc, 'gsw.tex', 'gsw Documentation', 130 | 'Author', 'manual'), 131 | ] 132 | 133 | 134 | # -- Options for manual page output --------------------------------------- 135 | 136 | # One entry per manual page. List of tuples 137 | # (source start file, name, description, authors, manual section). 138 | man_pages = [ 139 | (master_doc, 'gsw', 'gsw Documentation', 140 | [author], 1) 141 | ] 142 | 143 | 144 | # -- Options for Texinfo output ------------------------------------------- 145 | 146 | # Grouping the document tree into Texinfo files. List of tuples 147 | # (source start file, target name, title, author, 148 | # dir menu entry, description, category) 149 | texinfo_documents = [ 150 | (master_doc, 'gsw', 'gsw Documentation', 151 | author, 'gsw', 'One line description of project.', 152 | 'Miscellaneous'), 153 | ] 154 | 155 | 156 | 157 | # -- Options for Epub output ---------------------------------------------- 158 | 159 | # Bibliographic Dublin Core info. 160 | epub_title = project 161 | epub_author = author 162 | epub_publisher = author 163 | epub_copyright = copyright 164 | 165 | # The unique identifier of the text. This can be a ISBN number 166 | # or the project homepage. 167 | # 168 | # epub_identifier = '' 169 | 170 | # A unique identification for the text. 171 | # 172 | # epub_uid = '' 173 | 174 | # A list of files that should not be packed into the epub file. 175 | epub_exclude_files = ['search.html'] 176 | -------------------------------------------------------------------------------- /docs/conversions.rst: -------------------------------------------------------------------------------- 1 | Conversion functions 2 | ~~~~~~~~~~~~~~~~~~~~ 3 | 4 | .. automodule:: gsw.conversions 5 | :members: 6 | :undoc-members: 7 | :imported-members: 8 | :noindex: 9 | -------------------------------------------------------------------------------- /docs/density.rst: -------------------------------------------------------------------------------- 1 | Density 2 | ~~~~~~~ 3 | .. automodule:: gsw.density 4 | :members: 5 | :undoc-members: 6 | :imported-members: 7 | :noindex: 8 | -------------------------------------------------------------------------------- /docs/energy.rst: -------------------------------------------------------------------------------- 1 | Energy 2 | ~~~~~~ 3 | .. automodule:: gsw.energy 4 | :members: 5 | :undoc-members: 6 | :imported-members: 7 | :noindex: 8 | -------------------------------------------------------------------------------- /docs/freezing.rst: -------------------------------------------------------------------------------- 1 | Freezing 2 | ~~~~~~~~ 3 | .. automodule:: gsw.freezing 4 | :members: 5 | :undoc-members: 6 | :imported-members: 7 | :noindex: 8 | -------------------------------------------------------------------------------- /docs/geostrophy.rst: -------------------------------------------------------------------------------- 1 | Geostrophy 2 | ~~~~~~~~~~ 3 | .. automodule:: gsw.geostrophy 4 | :members: 5 | :undoc-members: 6 | :imported-members: 7 | :noindex: 8 | -------------------------------------------------------------------------------- /docs/gsw.rst: -------------------------------------------------------------------------------- 1 | Subpackages 2 | =========== 3 | 4 | All functions are available in the base module namespace, and 5 | via the index to these web pages. 6 | 7 | Subsets of functions are grouped in subpackages, each of which corresponds 8 | approximately to one or more of the groups in the table on pages 9 | 16-19 of https://www.teos-10.org/pubs/Getting_Started.pdf. These 10 | subpackages are particularly useful for finding functions using 11 | tab-completion in IPython. 12 | 13 | When importing functions in a module or script, however, it is safer 14 | to import them directly 15 | from the ``gsw`` namespace; it is more concise and future-proof; 16 | the organization of the subpackages is subject to change. 17 | 18 | 19 | .. toctree:: 20 | :maxdepth: 4 21 | 22 | conversions 23 | density 24 | energy 25 | stability 26 | geostrophy 27 | ice 28 | freezing 29 | -------------------------------------------------------------------------------- /docs/gsw_flat.rst: -------------------------------------------------------------------------------- 1 | All functions 2 | ============= 3 | 4 | The following section documents all functions alphabetically. 5 | 6 | .. toctree:: 7 | :maxdepth: 3 8 | 9 | .. automodule:: gsw 10 | :members: 11 | :undoc-members: 12 | :imported-members: 13 | :noindex: -------------------------------------------------------------------------------- /docs/ice.rst: -------------------------------------------------------------------------------- 1 | Ice 2 | ~~~ 3 | .. automodule:: gsw.ice 4 | :members: 5 | :undoc-members: 6 | :imported-members: 7 | :noindex: 8 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | GSW-Python 2 | ========== 3 | This Python implementation of the Thermodynamic Equation of 4 | Seawater 2010 (`TEOS-10 `__) is based 5 | primarily on numpy ufunc wrappers of 6 | the `GSW-C `__ implementation. 7 | This library replaces the original 8 | `python-gsw `__ pure-python implementation. 9 | The primary reasons for this change are that by building on the 10 | C implementation we reduce code duplication and we gain an immediate 11 | update to the 75-term equation. Additional benefits include a 12 | major increase in speed, a reduction in memory usage, and the 13 | inclusion of more functions. The penalty is that a C (or MSVC C++ for 14 | Windows) compiler is required to build the package from source. 15 | 16 | .. toctree:: 17 | :maxdepth: 4 18 | :caption: Contents: 19 | 20 | intro 21 | install 22 | gsw 23 | gsw_flat 24 | 25 | 26 | Indices and tables 27 | ================== 28 | 29 | * :ref:`genindex` 30 | * :ref:`modindex` 31 | * :ref:`search` 32 | -------------------------------------------------------------------------------- /docs/install.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | If you have cloned the github repository, you can change directory to the 5 | cloned repo and install with pip:: 6 | 7 | pip install . 8 | 9 | The ``gsw`` package is available from the Python Package index:: 10 | 11 | pip install gsw 12 | 13 | Both of the methods above require building from the source code, so you will 14 | need an appropriate build environment. 15 | 16 | For most people, we recommend using the 17 | `miniconda `__ Python distribution, 18 | and then installing a binary package from conda-forge:: 19 | 20 | conda install -c conda-forge gsw 21 | -------------------------------------------------------------------------------- /docs/intro.rst: -------------------------------------------------------------------------------- 1 | Introduction 2 | ============ 3 | 4 | .. automodule:: gsw 5 | 6 | A wrapper around gsw called `gsw-xarray `_ exists for xarray. It adds CF compliant attributes when possible, units, and name. 7 | -------------------------------------------------------------------------------- /docs/stability.rst: -------------------------------------------------------------------------------- 1 | Stability 2 | ~~~~~~~~~ 3 | .. automodule:: gsw.stability 4 | :members: 5 | :undoc-members: 6 | :imported-members: 7 | :noindex: 8 | -------------------------------------------------------------------------------- /gsw/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is a Python implementation of the Gibbs SeaWater (GSW) Oceanographic 3 | Toolbox of TEOS-10. Extensive documentation is available from 4 | https://www.teos-10.org/; users of this Python package are strongly 5 | encouraged to study the documents posted there. 6 | 7 | This implementation is based on GSW-C for core functions, with 8 | additional functions written in Python. GSW-C is the 9 | work of Frank Delahoyde and Glenn Hyland (author of GSW-Fortran, 10 | on which GSW-C is based), who translated and re-implemented the 11 | algorithms originally written in Matlab by David Jackett, 12 | Trevor McDougall, and Paul Barker. 13 | 14 | The present Python library has an interface that is similar to the 15 | original Matlab code, but with a few important differences: 16 | 17 | - Many functions in the GSW-Matlab toolbox are not yet available here. 18 | - Taking advantage of Python namespaces, we omit the "gsw" prefix 19 | from the function names. 20 | - Missing values may be handled using `numpy.ma` masked arrays, or 21 | using `nan` values. 22 | - All functions follow numpy broadcasting rules; function arguments 23 | must be broadcastable to the dimensions of the highest-dimensioned 24 | argument. Recall that with numpy broadcasting, extra dimensions 25 | are automatically added as needed on the left, but must be added 26 | explicitly as needed on the right. 27 | - Functions such as `Nsquared` that operate on profiles rather than 28 | scalars have an `axis` keyword argument to specify the index that 29 | is incremented along the pressure (depth) axis. 30 | 31 | """ 32 | 33 | 34 | from . import conversions, density, energy, geostrophy, ice, stability, utility 35 | from ._fixed_wrapped_ufuncs import * # noqa 36 | from .conversions import t90_from_t68 37 | from .geostrophy import * # noqa 38 | from .interpolation import * # noqa 39 | from .stability import * # noqa 40 | from .utility import * # noqa 41 | 42 | try: 43 | from ._version import __version__ 44 | except ImportError: 45 | __version__ = "unknown" 46 | -------------------------------------------------------------------------------- /gsw/_fixed_wrapped_ufuncs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Internally import from this, not from _wrapped_ufuncs. 3 | Users should import only from non-private modules, of course. 4 | """ 5 | 6 | import numpy 7 | 8 | from ._wrapped_ufuncs import * 9 | 10 | _p_from_z = p_from_z 11 | def p_from_z(z, lat, geo_strf_dyn_height=0, sea_surface_geopotential=0): 12 | return _p_from_z(z, lat, geo_strf_dyn_height, sea_surface_geopotential) 13 | p_from_z.__doc__ = _p_from_z.__doc__ 14 | 15 | _z_from_p = z_from_p 16 | def z_from_p(p, lat, geo_strf_dyn_height=0, sea_surface_geopotential=0): 17 | return _z_from_p(p, lat, geo_strf_dyn_height, sea_surface_geopotential) 18 | z_from_p.__doc__ = _z_from_p.__doc__ 19 | 20 | _gibbs = gibbs 21 | def gibbs(ns, nt, np, SA, t, p): 22 | params = {"ns": ns, "nt": nt, "np": np} 23 | for k, v in params.items(): 24 | u = numpy.unique(v) 25 | if u.min() < 0 or u.max() > 2 or u.dtype.kind != "i": 26 | raise ValueError("ns, nt, np must contain integers 0, 1, or 2;" 27 | f" found {k}={v}") 28 | return _gibbs(ns, nt, np, SA, t, p) 29 | gibbs.__doc__ = _gibbs.__doc__ 30 | 31 | 32 | _gibbs_ice = gibbs_ice 33 | def gibbs_ice(nt, np, t, p): 34 | params = {"nt": nt, "np": np} 35 | for k, v in params.items(): 36 | u = numpy.unique(v) 37 | if u.min() < 0 or u.max() > 2 or u.dtype.kind != "i": 38 | raise ValueError("nt, np must contain integers 0, 1, or 2;" 39 | f" found {k}={v}") 40 | return _gibbs_ice(nt, np, t, p) 41 | gibbs_ice.__doc__ = _gibbs_ice.__doc__ 42 | -------------------------------------------------------------------------------- /gsw/_utilities.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | 3 | import numpy as np 4 | 5 | 6 | def masked_to_nan(arg): 7 | """ 8 | Convert a masked array to a float ndarray with nans; ensure 9 | other arguments are float arrays or scalars. 10 | """ 11 | if np.ma.isMaskedArray(arg): 12 | if arg.dtype.kind == 'f': 13 | return arg.filled(np.nan) 14 | else: 15 | return arg.astype(float).filled(np.nan) 16 | else: 17 | return np.asarray(arg, dtype=float) 18 | 19 | def match_args_return(f): 20 | """ 21 | Decorator for most functions that operate on profile data. 22 | """ 23 | @wraps(f) 24 | def wrapper(*args, **kw): 25 | p = kw.get('p', None) 26 | if p is not None: 27 | args = list(args) 28 | args.append(p) 29 | 30 | isarray = [hasattr(a, '__iter__') for a in args] 31 | ismasked = [np.ma.isMaskedArray(a) for a in args] 32 | isduck = [hasattr(a, '__array_ufunc__') 33 | and not isinstance(a, np.ndarray) for a in args] 34 | 35 | hasarray = np.any(isarray) 36 | hasmasked = np.any(ismasked) 37 | hasduck = np.any(isduck) 38 | 39 | # Handle the leading integer arguments in gibbs and gibbs_ice. 40 | # Wrapped ufuncs are constructed with the "types" attribute from the 41 | # underlying ufunc. 42 | if hasattr(f, "types"): 43 | argtypes, ret_types = f.types[0].split("->") 44 | first_double = argtypes.index("d") 45 | int_return = ret_types[0] == 'i' 46 | else: 47 | first_double = 0 48 | int_return = False 49 | 50 | 51 | def fixup(ret): 52 | if hasduck: 53 | return ret 54 | if hasmasked and not int_return: 55 | ret = np.ma.masked_invalid(ret) 56 | if not hasarray and isinstance(ret, np.ndarray) and ret.size == 1: 57 | try: 58 | ret = ret[0] 59 | except IndexError: 60 | pass 61 | return ret 62 | 63 | newargs = [] 64 | for i, arg in enumerate(args): 65 | if i < first_double: 66 | newargs.append(arg) # for gibbs and gibbs_ice 67 | elif ismasked[i]: 68 | newargs.append(masked_to_nan(arg)) 69 | elif isduck[i]: 70 | newargs.append(arg) 71 | else: 72 | newargs.append(np.asarray(arg, dtype=float)) 73 | 74 | if p is not None: 75 | kw['p'] = newargs.pop() 76 | 77 | ret = f(*newargs, **kw) 78 | 79 | if isinstance(ret, tuple): 80 | retlist = [fixup(arg) for arg in ret] 81 | ret = tuple(retlist) 82 | else: 83 | ret = fixup(ret) 84 | return ret 85 | wrapper.__wrapped__ = f 86 | return wrapper 87 | 88 | 89 | def axis_slicer(n, sl, axis): 90 | """ 91 | Return an indexing tuple for an array with `n` dimensions, 92 | with slice `sl` taken on `axis`. 93 | """ 94 | itup = [slice(None)] * n 95 | itup[axis] = sl 96 | return tuple(itup) 97 | 98 | 99 | def indexer(shape, axis, order='C'): 100 | """ 101 | Generator of indexing tuples for "apply_along_axis" usage. 102 | 103 | The generator cycles through all axes other than `axis`. 104 | The numpy np.apply_along_axis function only works with functions 105 | of a single array; this generator allows us work with a function 106 | of more than one array. 107 | """ 108 | 109 | ndim = len(shape) 110 | ind_shape = list(shape) 111 | ind_shape[axis] = 1 # "axis" and any dim of 1 will not be incremented 112 | # list of indices, with a slice at "axis" 113 | inds = [0] * ndim 114 | inds[axis] = slice(None) 115 | kmax = np.prod(ind_shape) 116 | 117 | if order == 'C': 118 | index_position = list(reversed(range(ndim))) 119 | else: 120 | index_position = list(range(ndim)) 121 | 122 | for _k in range(kmax): 123 | yield tuple(inds) 124 | 125 | for i in index_position: 126 | if ind_shape[i] == 1: 127 | continue 128 | inds[i] += 1 129 | if inds[i] == ind_shape[i]: 130 | inds[i] = 0 131 | else: 132 | break 133 | 134 | 135 | # This is straight from pycurrents.system. We can trim out 136 | # the parts we don't need, but there is no rush to do so. 137 | class Bunch(dict): 138 | """ 139 | A dictionary that also provides access via attributes. 140 | 141 | Additional methods update_values and update_None provide 142 | control over whether new keys are added to the dictionary 143 | when updating, and whether an attempt to add a new key is 144 | ignored or raises a KeyError. 145 | 146 | The Bunch also prints differently than a normal 147 | dictionary, using str() instead of repr() for its 148 | keys and values, and in key-sorted order. The printing 149 | format can be customized by subclassing with a different 150 | str_ftm class attribute. Do not assign directly to this 151 | class attribute, because that would substitute an instance 152 | attribute which would then become part of the Bunch, and 153 | would be reported as such by the keys() method. 154 | 155 | To output a string representation with 156 | a particular format, without subclassing, use the 157 | formatted() method. 158 | """ 159 | 160 | str_fmt = "{0!s:<{klen}} : {1!s:>{vlen}}\n" 161 | 162 | def __init__(self, *args, **kwargs): 163 | """ 164 | *args* can be dictionaries, bunches, or sequences of 165 | key,value tuples. *kwargs* can be used to initialize 166 | or add key, value pairs. 167 | """ 168 | dict.__init__(self) 169 | for arg in args: 170 | self.update(arg) 171 | self.update(kwargs) 172 | 173 | def __getattr__(self, name): 174 | try: 175 | return self[name] 176 | except KeyError as err: 177 | raise AttributeError(f"'Bunch' object has no attribute {name}. {err}") 178 | 179 | def __setattr__(self, name, value): 180 | self[name] = value 181 | 182 | def __str__(self): 183 | return self.formatted() 184 | 185 | def formatted(self, fmt=None, types=False): 186 | """ 187 | Return a string with keys and/or values or types. 188 | 189 | *fmt* is a format string as used in the str.format() method. 190 | 191 | The str.format() method is called with key, value as positional 192 | arguments, and klen, vlen as kwargs. The latter are the maxima 193 | of the string lengths for the keys and values, respectively, 194 | up to respective maxima of 20 and 40. 195 | """ 196 | if fmt is None: 197 | fmt = self.str_fmt 198 | 199 | items = list(self.items()) 200 | items.sort() 201 | 202 | klens = [] 203 | vlens = [] 204 | for i, (k, v) in enumerate(items): 205 | lenk = len(str(k)) 206 | if types: 207 | v = type(v).__name__ 208 | lenv = len(str(v)) 209 | items[i] = (k, v) 210 | klens.append(lenk) 211 | vlens.append(lenv) 212 | 213 | klen = min(20, max(klens)) 214 | vlen = min(40, max(vlens)) 215 | slist = [fmt.format(k, v, klen=klen, vlen=vlen) for k, v in items] 216 | return ''.join(slist) 217 | 218 | def from_pyfile(self, filename): 219 | """ 220 | Read in variables from a python code file. 221 | """ 222 | # We can't simply exec the code directly, because in 223 | # Python 3 the scoping for list comprehensions would 224 | # lead to a NameError. Wrapping the code in a function 225 | # fixes this. 226 | d = {} 227 | lines = ["def _temp_func():\n"] 228 | with open(filename) as f: 229 | lines.extend([" " + line for line in f]) 230 | lines.extend(["\n return(locals())\n", 231 | "_temp_out = _temp_func()\n", 232 | "del(_temp_func)\n"]) 233 | codetext = "".join(lines) 234 | code = compile(codetext, filename, 'exec') 235 | exec(code, globals(), d) 236 | self.update(d["_temp_out"]) 237 | return self 238 | 239 | def update_values(self, *args, **kw): 240 | """ 241 | arguments are dictionary-like; if present, they act as 242 | additional sources of kwargs, with the actual kwargs 243 | taking precedence. 244 | 245 | One reserved optional kwarg is "strict". If present and 246 | True, then any attempt to update with keys that are not 247 | already in the Bunch instance will raise a KeyError. 248 | """ 249 | strict = kw.pop("strict", False) 250 | newkw = {} 251 | for d in args: 252 | newkw.update(d) 253 | newkw.update(kw) 254 | self._check_strict(strict, newkw) 255 | dsub = {k: v for (k, v) in newkw.items() if k in self} 256 | self.update(dsub) 257 | 258 | def update_None(self, *args, **kw): 259 | """ 260 | Similar to update_values, except that an existing value 261 | will be updated only if it is None. 262 | """ 263 | strict = kw.pop("strict", False) 264 | newkw = {} 265 | for d in args: 266 | newkw.update(d) 267 | newkw.update(kw) 268 | self._check_strict(strict, newkw) 269 | dsub = {k: v for (k, v) in newkw.items() 270 | if k in self and self[k] is None} 271 | self.update(dsub) 272 | 273 | def _check_strict(self, strict, kw): 274 | if strict: 275 | bad = set(kw.keys()) - set(self.keys()) 276 | if bad: 277 | bk = list(bad) 278 | bk.sort() 279 | ek = list(self.keys()) 280 | ek.sort() 281 | raise KeyError( 282 | f"Update keys {bk} don't match existing keys {ek}") 283 | -------------------------------------------------------------------------------- /gsw/conversions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Conversions involving temperature, salinity, entropy, pressure, 3 | and height. 4 | 5 | Those most commonly used probably include: 6 | 7 | - :func:`gsw.CT_from_t` 8 | - :func:`gsw.SA_from_SP` 9 | - :func:`gsw.SP_from_C` 10 | - :func:`gsw.p_from_z` 11 | - :func:`gsw.z_from_p` 12 | 13 | """ 14 | 15 | __all__ = ['t90_from_t68', 16 | 'adiabatic_lapse_rate_from_CT', 17 | 'C_from_SP', 18 | 'CT_from_enthalpy', 19 | 'CT_from_entropy', 20 | 'CT_from_pt', 21 | 'CT_from_rho', 22 | 'CT_from_t', 23 | 'deltaSA_from_SP', 24 | 'entropy_from_pt', 25 | 'entropy_from_t', 26 | 'pt0_from_t', 27 | 'pt_from_CT', 28 | 'pt_from_entropy', 29 | 'pt_from_t', 30 | 'SA_from_rho', 31 | 'SA_from_SP', 32 | 'SA_from_Sstar', 33 | 'SP_from_C', 34 | 'SP_from_SA', 35 | 'SP_from_SK', 36 | 'SP_from_SR', 37 | 'SP_from_Sstar', 38 | 'SR_from_SP', 39 | 'Sstar_from_SA', 40 | 'Sstar_from_SP', 41 | 't_from_CT', 42 | 'p_from_z', 43 | 'z_from_p', 44 | ] 45 | 46 | from ._fixed_wrapped_ufuncs import ( 47 | C_from_SP, 48 | CT_from_enthalpy, 49 | CT_from_entropy, 50 | CT_from_pt, 51 | CT_from_rho, 52 | CT_from_t, 53 | SA_from_rho, 54 | SA_from_SP, 55 | SA_from_Sstar, 56 | SP_from_C, 57 | SP_from_SA, 58 | SP_from_SK, 59 | SP_from_SR, 60 | SP_from_Sstar, 61 | SR_from_SP, 62 | Sstar_from_SA, 63 | Sstar_from_SP, 64 | adiabatic_lapse_rate_from_CT, 65 | deltaSA_from_SP, 66 | entropy_from_pt, 67 | entropy_from_t, 68 | p_from_z, 69 | pt0_from_t, 70 | pt_from_CT, 71 | pt_from_entropy, 72 | pt_from_t, 73 | t_from_CT, 74 | z_from_p, 75 | ) 76 | from ._utilities import match_args_return 77 | 78 | 79 | @match_args_return 80 | def t90_from_t68(t68): 81 | """ 82 | ITS-90 temperature from IPTS-68 temperature 83 | 84 | This conversion should be applied to all in-situ 85 | data collected between 1/1/1968 and 31/12/1989. 86 | 87 | """ 88 | return t68 / 1.00024 89 | -------------------------------------------------------------------------------- /gsw/density.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions related to density and specific volume. 3 | 4 | These are a subset of the TEOS-10 table category 5 | "specific volume, density, and enthalpy". 6 | 7 | We are grouping the functions related to enthalpy and internal energy 8 | in their own "energy" module. 9 | """ 10 | from ._wrapped_ufuncs import ( 11 | alpha, 12 | alpha_on_beta, 13 | beta, 14 | kappa, 15 | rho, 16 | rho_alpha_beta, 17 | rho_t_exact, 18 | sigma0, 19 | sigma1, 20 | sigma2, 21 | sigma3, 22 | sigma4, 23 | sound_speed, 24 | specvol, 25 | specvol_alpha_beta, 26 | specvol_anom_standard, 27 | ) 28 | -------------------------------------------------------------------------------- /gsw/energy.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions involving internal energy, enthalpy, latent heat. 3 | """ 4 | from ._wrapped_ufuncs import ( 5 | enthalpy, 6 | enthalpy_diff, 7 | internal_energy, 8 | latentheat_evap_CT, 9 | latentheat_evap_t, 10 | ) 11 | -------------------------------------------------------------------------------- /gsw/freezing.py: -------------------------------------------------------------------------------- 1 | """ 2 | Freezing-point functions. 3 | """ 4 | 5 | from ._wrapped_ufuncs import ( 6 | CT_freezing, 7 | CT_freezing_first_derivatives, 8 | CT_freezing_first_derivatives_poly, 9 | CT_freezing_poly, 10 | SA_freezing_from_CT, 11 | SA_freezing_from_CT_poly, 12 | SA_freezing_from_t, 13 | SA_freezing_from_t_poly, 14 | pot_enthalpy_ice_freezing, 15 | pot_enthalpy_ice_freezing_first_derivatives, 16 | pot_enthalpy_ice_freezing_first_derivatives_poly, 17 | pot_enthalpy_ice_freezing_poly, 18 | pressure_freezing_CT, 19 | t_freezing, 20 | t_freezing_first_derivatives, 21 | t_freezing_first_derivatives_poly, 22 | ) 23 | -------------------------------------------------------------------------------- /gsw/geostrophy.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions for calculating geostrophic currents. 3 | """ 4 | 5 | import numpy as np 6 | 7 | from . import _gsw_ufuncs 8 | from ._utilities import indexer, match_args_return 9 | from .conversions import z_from_p 10 | 11 | __all__ = ['geo_strf_dyn_height', 12 | 'distance', 13 | 'f', 14 | 'geostrophic_velocity', 15 | ] 16 | 17 | @match_args_return 18 | def geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0, max_dp=1.0, 19 | interp_method='pchip'): 20 | """ 21 | Dynamic height anomaly as a function of pressure. 22 | 23 | Parameters 24 | ---------- 25 | SA : array-like 26 | Absolute Salinity, g/kg 27 | CT : array-like 28 | Conservative Temperature (ITS-90), degrees C 29 | p : array-like 30 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar 31 | p_ref : float or array-like, optional 32 | Reference pressure, dbar 33 | axis : int, optional, default is 0 34 | The index of the pressure dimension in SA and CT. 35 | max_dp : float 36 | If any pressure interval in the input p exceeds max_dp, the dynamic 37 | height will be calculated after interpolating to a grid with this 38 | spacing. 39 | interp_method : string {'mrst', 'pchip', 'linear'} 40 | Interpolation algorithm. 41 | 42 | Returns 43 | ------- 44 | dynamic_height : array 45 | This is the integral of specific volume anomaly with respect 46 | to pressure, from each pressure in p to the specified 47 | reference pressure. It is the geostrophic streamfunction 48 | in an isobaric surface, relative to the reference surface. 49 | 50 | """ 51 | interp_methods = {'mrst' : 3, 'pchip' : 2, 'linear' : 1} 52 | if interp_method not in interp_methods: 53 | raise ValueError(f'interp_method must be one of {interp_methods.keys()}') 54 | if SA.shape != CT.shape: 55 | raise ValueError(f'Shapes of SA and CT must match; found {SA.shape} and {CT.shape}') 56 | if p.ndim == 1 and SA.ndim > 1: 57 | if len(p) != SA.shape[axis]: 58 | raise ValueError( 59 | f'With 1-D p, len(p) must be SA.shape[axis];\n' 60 | f' found {len(p)} versus {SA.shape[axis]} on specified axis, {axis}' 61 | ) 62 | ind = [np.newaxis] * SA.ndim 63 | ind[axis] = slice(None) 64 | p = p[tuple(ind)] 65 | p_ref = float(p_ref) 66 | with np.errstate(invalid='ignore'): 67 | # The need for this context seems to be a bug in np.ma.any. 68 | if np.ma.any(np.ma.diff(np.ma.masked_invalid(p), axis=axis) <= 0): 69 | raise ValueError('p must be increasing along the specified axis') 70 | p = np.broadcast_to(p, SA.shape) 71 | goodmask = ~(np.isnan(SA) | np.isnan(CT) | np.isnan(p)) 72 | dh = np.empty(SA.shape, dtype=float) 73 | dh.fill(np.nan) 74 | 75 | try: 76 | order = 'F' if SA.flags.fortran else 'C' 77 | except AttributeError: 78 | order = 'C' # e.g., xarray DataArray doesn't have flags 79 | for ind in indexer(SA.shape, axis, order=order): 80 | # this is needed to support xarray inputs for numpy < 1.23 81 | igood = np.asarray(goodmask[ind]) 82 | # If p_ref is below the deepest value, skip the profile. 83 | pgood = p[ind][igood] 84 | if len(pgood) > 1 and pgood[-1] >= p_ref: 85 | sa = SA[ind][igood] 86 | ct = CT[ind][igood] 87 | # Temporarily add a top (typically surface) point and mixed layer 88 | # if p_ref is above the shallowest pressure. 89 | if pgood[0] > p_ref: 90 | ptop = np.arange(p_ref, pgood[0], max_dp) 91 | ntop = len(ptop) 92 | sa = np.hstack(([sa[0]]*ntop, sa)) 93 | ct = np.hstack(([ct[0]]*ntop, ct)) 94 | pgood = np.hstack((ptop, pgood)) 95 | else: 96 | ntop = 0 97 | dh_all = _gsw_ufuncs.geo_strf_dyn_height_1( 98 | sa, ct, pgood, p_ref, max_dp, 99 | interp_methods[interp_method]) 100 | if ntop > 0: 101 | dh[ind][igood] = dh_all[ntop:] 102 | else: 103 | dh[ind][igood] = dh_all 104 | 105 | return dh 106 | 107 | 108 | def unwrap(lon, centered=True, copy=True): 109 | """ 110 | Unwrap a sequence of longitudes or headings in degrees. 111 | 112 | Optionally center it as close to zero as possible 113 | 114 | By default, return a copy; if *copy* is False, avoid a 115 | copy when possible. 116 | 117 | Returns a masked array only if the input is a masked array. 118 | """ 119 | # From pycurrents.data.ocean. It could probably be simplified 120 | # for use here. 121 | 122 | masked_input = np.ma.isMaskedArray(lon) 123 | if masked_input: 124 | fill_value = lon.fill_value 125 | # masked_invalid loses the original fill_value (ma bug, 2011/01/20) 126 | lon = np.ma.masked_invalid(lon).astype(float) 127 | if lon.ndim != 1: 128 | raise ValueError("Only 1-D sequences are supported") 129 | if lon.shape[0] < 2: 130 | return lon 131 | x = lon.compressed() 132 | if len(x) < 2: 133 | return lon 134 | w = np.zeros(x.shape[0]-1, int) 135 | ld = np.diff(x) 136 | np.putmask(w, ld > 180, -1) 137 | np.putmask(w, ld < -180, 1) 138 | x[1:] += (w.cumsum() * 360.0) 139 | 140 | if centered: 141 | x -= 360 * np.round(x.mean() / 360.0) 142 | 143 | if lon.mask is np.ma.nomask: 144 | lon[:] = x 145 | else: 146 | lon[~lon.mask] = x 147 | if masked_input: 148 | lon.fill_value = fill_value 149 | return lon 150 | else: 151 | return lon.filled(np.nan) 152 | 153 | 154 | @match_args_return 155 | def distance(lon, lat, p=0, axis=-1): 156 | """ 157 | Great-circle distance in m between lon, lat points. 158 | 159 | Parameters 160 | ---------- 161 | lon, lat : array-like, 1-D or 2-D (shapes must match) 162 | Longitude, latitude, in degrees. 163 | p : array-like, scalar, 1-D or 2-D, optional, default is 0 164 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar 165 | axis : int, -1, 0, 1, optional 166 | The axis or dimension along which *lat and lon* vary. 167 | This differs from most functions, for which axis is the 168 | dimension along which p increases. 169 | 170 | Returns 171 | ------- 172 | distance : 1-D or 2-D array 173 | distance in meters between adjacent points. 174 | 175 | """ 176 | earth_radius = 6371e3 177 | 178 | if not lon.shape == lat.shape: 179 | raise ValueError(f'lon, lat shapes must match; found {lon.shape}, {lat.shape}') 180 | if not (lon.ndim in (1, 2) and lon.shape[axis] > 1): 181 | raise ValueError('lon, lat must be 1-D or 2-D with more than one point' 182 | f' along axis; found shape {lon.shape} and axis {axis}') 183 | if lon.ndim == 1: 184 | one_d = True 185 | # xarray requires expand_dims() rather than [newaxis, :] 186 | lon = np.expand_dims(lon, 0) 187 | lat = np.expand_dims(lat, 0) 188 | axis = -1 189 | else: 190 | one_d = False 191 | 192 | # Handle scalar default; match_args_return doesn't see it. 193 | p = np.atleast_1d(p) 194 | one_d = (one_d and p.ndim == 1) 195 | 196 | if axis == 0: 197 | indm = (slice(0, -1), slice(None)) 198 | indp = (slice(1, None), slice(None)) 199 | else: 200 | indm = (slice(None), slice(0, -1)) 201 | indp = (slice(None), slice(1, None)) 202 | 203 | if np.all(p == 0): 204 | z = 0 205 | else: 206 | lon, lat, p = np.broadcast_arrays(lon, lat, p) 207 | 208 | p_mid = 0.5 * (p[indm] + p[indp]) 209 | lat_mid = 0.5 * (lat[indm] + lat[indp]) 210 | 211 | z = z_from_p(p_mid, lat_mid) 212 | 213 | lon = np.radians(lon) 214 | lat = np.radians(lat) 215 | 216 | dlon = np.diff(lon, axis=axis) 217 | dlat = np.diff(lat, axis=axis) 218 | 219 | a = ((np.sin(dlat / 2)) ** 2 + np.cos(lat[indm]) * 220 | np.cos(lat[indp]) * (np.sin(dlon / 2)) ** 2) 221 | 222 | angles = 2 * np.arctan2(np.sqrt(a), np.sqrt(1 - a)) 223 | 224 | distance = (earth_radius + z) * angles 225 | 226 | if one_d: 227 | distance = distance[0] 228 | 229 | return distance 230 | 231 | 232 | @match_args_return 233 | def f(lat): 234 | """ 235 | Coriolis parameter in 1/s for latitude in degrees. 236 | """ 237 | omega = 7.292115e-5 # (1/s) (Groten, 2004). 238 | f = 2 * omega * np.sin(np.radians(lat)) 239 | return f 240 | 241 | @match_args_return 242 | def geostrophic_velocity(geo_strf, lon, lat, p=0, axis=0): 243 | """ 244 | Calculate geostrophic velocity from a streamfunction. 245 | 246 | Calculates geostrophic velocity relative to a reference pressure, 247 | given a geostrophic streamfunction and the position of each station 248 | in sequence along an ocean section. The data can be from a single 249 | isobaric or "density" surface, or from a series of such surfaces. 250 | 251 | Parameters 252 | ---------- 253 | geo_strf : array-like, 1-D or 2-D 254 | geostrophic streamfunction; see Notes below. 255 | lon : array-like, 1-D 256 | Longitude, -360 to 360 degrees 257 | lat : array-like, 1-D 258 | Latitude, degrees 259 | p : float or array-like, optional 260 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar. 261 | This used only for a tiny correction in the distance calculation; 262 | it is safe to omit it. 263 | axis : int, 0 or 1, optional 264 | The axis or dimension along which pressure increases in geo_strf. 265 | If geo_strf is 1-D, it is ignored. 266 | 267 | Returns 268 | ------- 269 | velocity : array, 2-D or 1-D 270 | Geostrophic velocity in m/s relative to the sea surface, 271 | averaged between each successive pair of positions. 272 | mid_lon, mid_lat : array, 1-D 273 | Midpoints of input lon and lat. 274 | 275 | Notes 276 | ----- 277 | The geostrophic streamfunction can be: 278 | 279 | - geo_strf_dyn_height (in an isobaric surface) 280 | - geo_strf_Montgomery (in a specific volume anomaly surface) 281 | - geo_strf_Cunninhgam (in an approximately neutral surface 282 | such as a potential density surface). 283 | - geo_strf_isopycnal (in an approximately neutral surface 284 | such as a potential density surface, a Neutral Density 285 | surface, or an omega surface (Klocker et al., 2009)). 286 | 287 | Only :func:`geo_strf_dyn_height` is presently implemented 288 | in GSW-Python. 289 | 290 | """ 291 | lon = unwrap(lon) 292 | 293 | if lon.shape != lat.shape or lon.ndim != 1: 294 | raise ValueError(f'lon, lat must be 1-D and matching; found shapes {lon.shape} and {lat.shape}') 295 | 296 | if geo_strf.ndim not in (1, 2): 297 | raise ValueError(f'geo_strf must be 1-D or 2-d; found shape {geo_strf.shape}') 298 | 299 | laxis = 0 if axis else -1 300 | 301 | ds = distance(lon, lat, p) 302 | 303 | mid_lon = 0.5 * (lon[:-1] + lon[1:]) 304 | mid_lat = 0.5 * (lat[:-1] + lat[1:]) 305 | 306 | u = np.diff(geo_strf, axis=laxis) / (ds * f(mid_lat)) 307 | 308 | return u, mid_lon, mid_lat 309 | -------------------------------------------------------------------------------- /gsw/ice.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions about ice and melting, but not the freezing point. 3 | """ 4 | 5 | from ._wrapped_ufuncs import ( 6 | Helmholtz_energy_ice, 7 | adiabatic_lapse_rate_ice, 8 | alpha_wrt_t_ice, 9 | chem_potential_water_ice, 10 | cp_ice, 11 | enthalpy_ice, 12 | entropy_ice, 13 | ice_fraction_to_freeze_seawater, 14 | internal_energy_ice, 15 | kappa_const_t_ice, 16 | kappa_ice, 17 | melting_ice_equilibrium_SA_CT_ratio, 18 | melting_ice_equilibrium_SA_CT_ratio_poly, 19 | melting_ice_into_seawater, 20 | melting_ice_SA_CT_ratio, 21 | melting_ice_SA_CT_ratio_poly, 22 | melting_seaice_equilibrium_SA_CT_ratio, 23 | melting_seaice_equilibrium_SA_CT_ratio_poly, 24 | melting_seaice_into_seawater, 25 | melting_seaice_SA_CT_ratio, 26 | melting_seaice_SA_CT_ratio_poly, 27 | pot_enthalpy_from_pt_ice, 28 | pot_enthalpy_from_pt_ice_poly, 29 | pressure_coefficient_ice, 30 | pt0_from_t_ice, 31 | pt_from_pot_enthalpy_ice, 32 | pt_from_pot_enthalpy_ice_poly, 33 | pt_from_t_ice, 34 | rho_ice, 35 | seaice_fraction_to_freeze_seawater, 36 | sound_speed_ice, 37 | specvol_ice, 38 | t_from_pt0_ice, 39 | ) 40 | -------------------------------------------------------------------------------- /gsw/interpolation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions for vertical interpolation. 3 | """ 4 | 5 | import numpy as np 6 | 7 | from . import _gsw_ufuncs 8 | from ._utilities import indexer, match_args_return 9 | 10 | __all__ = ['sa_ct_interp', 11 | 'tracer_ct_interp', 12 | ] 13 | 14 | @match_args_return 15 | def sa_ct_interp(SA, CT, p, p_i, axis=0): 16 | """ 17 | Interpolates vertical casts of values of Absolute Salinity 18 | and Conservative Temperature to the arbitrary pressures p_i. 19 | 20 | Parameters 21 | ---------- 22 | SA : array-like 23 | Absolute Salinity, g/kg 24 | CT : array-like 25 | Conservative Temperature (ITS-90), degrees C 26 | p : array-like 27 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar 28 | p_i : array-like 29 | Sea pressure to interpolate on, dbar 30 | axis : int, optional, default is 0 31 | The index of the pressure dimension in SA and CT. 32 | 33 | 34 | Returns 35 | ------- 36 | SA_i : array 37 | Values of SA interpolated to p_i along the specified axis. 38 | CT_i : array 39 | Values of CT interpolated to p_i along the specified axis. 40 | 41 | """ 42 | if SA.shape != CT.shape: 43 | raise ValueError(f'Shapes of SA and CT must match; found {SA.shape} and {CT.shape}') 44 | if p.ndim != p_i.ndim: 45 | raise ValueError(f'p and p_i must have the same number of dimensions;\n' 46 | f' found {p.ndim} versus {p_i.ndim}') 47 | if p.ndim == 1 and SA.ndim > 1: 48 | if len(p) != SA.shape[axis]: 49 | raise ValueError( 50 | f'With 1-D p, len(p) must be SA.shape[axis];\n' 51 | f' found {len(p)} versus {SA.shape[axis]} on specified axis, {axis}' 52 | ) 53 | ind = [np.newaxis] * SA.ndim 54 | ind[axis] = slice(None) 55 | p = p[tuple(ind)] 56 | p_i = p_i[tuple(ind)] 57 | elif p.ndim > 1: 58 | if p.shape != SA.shape: 59 | raise ValueError(f'With {p.ndim}-D p, shapes of p and SA must match;\n' 60 | f'found {p.shape} and {SA.shape}') 61 | if any(p.shape[i] != p_i.shape[i] for i in range(p.ndim) if i != axis): 62 | raise ValueError(f'With {p.ndim}-D p, p and p_i must have the same dimensions outside of axis {axis};\n' 63 | f' found {p.shape} versus {p_i.shape}') 64 | with np.errstate(invalid='ignore'): 65 | # The need for this context seems to be a bug in np.ma.any. 66 | if np.ma.any(np.ma.diff(np.ma.masked_invalid(p_i), axis=axis) <= 0) \ 67 | or np.ma.any(np.ma.diff(np.ma.masked_invalid(p), axis=axis) <= 0): 68 | raise ValueError('p and p_i must be increasing along the specified axis') 69 | p = np.broadcast_to(p, SA.shape) 70 | goodmask = ~(np.isnan(SA) | np.isnan(CT) | np.isnan(p)) 71 | SA_i = np.empty(p_i.shape, dtype=float) 72 | CT_i = np.empty(p_i.shape, dtype=float) 73 | SA_i.fill(np.nan) 74 | CT_i.fill(np.nan) 75 | 76 | try: 77 | order = 'F' if SA.flags.fortran else 'C' 78 | except AttributeError: 79 | order = 'C' # e.g., xarray DataArray doesn't have flags 80 | for ind in indexer(SA.shape, axis, order=order): 81 | # this is needed to support xarray inputs for numpy < 1.23 82 | igood = np.asarray(goodmask[ind]) 83 | pgood = p[ind][igood] 84 | pi = p_i[ind] 85 | # There must be at least 2 non-NaN values for interpolation 86 | if len(pgood) > 2: 87 | sa = SA[ind][igood] 88 | ct = CT[ind][igood] 89 | sai, cti = _gsw_ufuncs.sa_ct_interp(sa, ct, pgood, pi) 90 | SA_i[ind] = sai 91 | CT_i[ind] = cti 92 | 93 | return (SA_i, CT_i) 94 | 95 | @match_args_return 96 | def tracer_ct_interp(tracer, CT, p, p_i, factor=9., axis=0): 97 | """ 98 | Interpolates vertical casts of values of a tracer 99 | and Conservative Temperature to the arbitrary pressures p_i. 100 | 101 | Parameters 102 | ---------- 103 | tracer : array-like 104 | tracer 105 | CT : array-like 106 | Conservative Temperature (ITS-90), degrees C 107 | p : array-like 108 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar 109 | p_i : array-like 110 | Sea pressure to interpolate on, dbar 111 | factor: float, optional, default is 9. 112 | Ratio between the ranges of Conservative Temperature 113 | and tracer in the world ocean. 114 | axis : int, optional, default is 0 115 | The index of the pressure dimension in tracer and CT. 116 | 117 | 118 | Returns 119 | ------- 120 | tracer_i : array 121 | Values of tracer interpolated to p_i along the specified axis. 122 | CT_i : array 123 | Values of CT interpolated to p_i along the specified axis. 124 | 125 | """ 126 | if tracer.shape != CT.shape: 127 | raise ValueError(f'Shapes of tracer and CT must match; found {tracer.shape} and {CT.shape}') 128 | if p.ndim != p_i.ndim: 129 | raise ValueError(f'p and p_i must have the same number of dimensions;\n' 130 | f' found {p.ndim} versus {p_i.ndim}') 131 | if p.ndim == 1 and tracer.ndim > 1: 132 | if len(p) != tracer.shape[axis]: 133 | raise ValueError( 134 | f'With 1-D p, len(p) must be tracer.shape[axis];\n' 135 | f' found {len(p)} versus {tracer.shape[axis]} on specified axis, {axis}' 136 | ) 137 | ind = [np.newaxis] * tracer.ndim 138 | ind[axis] = slice(None) 139 | p = p[tuple(ind)] 140 | p_i = p_i[tuple(ind)] 141 | elif p.ndim > 1: 142 | if p.shape != tracer.shape: 143 | raise ValueError(f'With {p.ndim}-D p, shapes of p and tracer must match;\n' 144 | f'found {p.shape} and {tracer.shape}') 145 | if any(p.shape[i] != p_i.shape[i] for i in range(p.ndim) if i != axis): 146 | raise ValueError(f'With {p.ndim}-D p, p and p_i must have the same dimensions outside of axis {axis};\n' 147 | f' found {p.shape} versus {p_i.shape}') 148 | with np.errstate(invalid='ignore'): 149 | # The need for this context seems to be a bug in np.ma.any. 150 | if np.ma.any(np.ma.diff(np.ma.masked_invalid(p_i), axis=axis) <= 0) \ 151 | or np.ma.any(np.ma.diff(np.ma.masked_invalid(p), axis=axis) <= 0): 152 | raise ValueError('p and p_i must be increasing along the specified axis') 153 | p = np.broadcast_to(p, tracer.shape) 154 | goodmask = ~(np.isnan(tracer) | np.isnan(CT) | np.isnan(p)) 155 | tracer_i = np.empty(p_i.shape, dtype=float) 156 | CT_i = np.empty(p_i.shape, dtype=float) 157 | tracer_i.fill(np.nan) 158 | CT_i.fill(np.nan) 159 | 160 | try: 161 | order = 'F' if tracer.flags.fortran else 'C' 162 | except AttributeError: 163 | order = 'C' # e.g., xarray DataArray doesn't have flags 164 | for ind in indexer(tracer.shape, axis, order=order): 165 | # this is needed to support xarray inputs for numpy < 1.23 166 | igood = np.asarray(goodmask[ind]) 167 | pgood = p[ind][igood] 168 | pi = p_i[ind] 169 | # There must be at least 2 non-NaN values for interpolation 170 | if len(pgood) > 2: 171 | tr = tracer[ind][igood] 172 | ct = CT[ind][igood] 173 | tri, cti = _gsw_ufuncs.tracer_ct_interp(tr, ct, pgood, pi, factor) 174 | tracer_i[ind] = tri 175 | CT_i[ind] = cti 176 | 177 | return (tracer_i, CT_i) 178 | -------------------------------------------------------------------------------- /gsw/stability.py: -------------------------------------------------------------------------------- 1 | """ 2 | Vertical stability functions. 3 | 4 | These work with ndarrays of profiles; use the `axis` keyword 5 | argument to specify the axis along which pressure varies. 6 | For example, the default, following the Matlab versions, is 7 | `axis=0`, meaning the pressure varies along the first dimension. 8 | Use `axis=-1` if pressure varies along the last dimension--that 9 | is, along a row, as the column index increases, in the 2-D case. 10 | 11 | Docstrings will be added later, either manually or via 12 | an automated mechanism. 13 | 14 | """ 15 | 16 | 17 | import numpy as np 18 | 19 | from ._gsw_ufuncs import grav, specvol_alpha_beta 20 | from ._utilities import axis_slicer, match_args_return 21 | 22 | __all__ = ['Nsquared', 23 | 'Turner_Rsubrho', 24 | 'IPV_vs_fNsquared_ratio', 25 | ] 26 | 27 | # In the following, axis=0 matches the Matlab behavior. 28 | 29 | @match_args_return 30 | def Nsquared(SA, CT, p, lat=None, axis=0): 31 | """ 32 | Calculate the square of the buoyancy frequency. 33 | 34 | Parameters 35 | ---------- 36 | SA : array-like 37 | Absolute Salinity, g/kg 38 | CT : array-like 39 | Conservative Temperature (ITS-90), degrees C 40 | p : array-like 41 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar 42 | lat : array-like, 1-D, optional 43 | Latitude, degrees. 44 | axis : int, optional 45 | The dimension along which pressure increases. 46 | 47 | Returns 48 | ------- 49 | N2 : array 50 | Buoyancy frequency-squared at pressure midpoints, 1/s^2. 51 | The shape along the pressure axis dimension is one 52 | less than that of the inputs. 53 | (Frequency N is in radians per second.) 54 | p_mid : array 55 | Pressure at midpoints of p, dbar. 56 | The array shape matches N2. 57 | 58 | """ 59 | if lat is not None: 60 | if np.any((lat < -90) | (lat > 90)): 61 | raise ValueError('lat is out of range') 62 | SA, CT, p, lat = np.broadcast_arrays(SA, CT, p, lat) 63 | g = grav(lat, p) 64 | else: 65 | SA, CT, p = np.broadcast_arrays(SA, CT, p) 66 | g = 9.7963 # (Griffies, 2004) 67 | 68 | db_to_pa = 1e4 69 | shallow = axis_slicer(SA.ndim, slice(-1), axis) 70 | deep = axis_slicer(SA.ndim, slice(1, None), axis) 71 | if lat is not None: 72 | g_local = 0.5 * (g[shallow] + g[deep]) 73 | else: 74 | g_local = g 75 | 76 | dSA = SA[deep] - SA[shallow] 77 | dCT = CT[deep] - CT[shallow] 78 | dp = p[deep] - p[shallow] 79 | SA_mid = 0.5 * (SA[shallow] + SA[deep]) 80 | CT_mid = 0.5 * (CT[shallow] + CT[deep]) 81 | p_mid = 0.5 * (p[shallow] + p[deep]) 82 | 83 | specvol_mid, alpha_mid, beta_mid = specvol_alpha_beta(SA_mid, 84 | CT_mid, p_mid) 85 | 86 | N2 = ((g_local**2) / (specvol_mid * db_to_pa * dp)) 87 | N2 *= (beta_mid*dSA - alpha_mid*dCT) 88 | 89 | return N2, p_mid 90 | 91 | 92 | @match_args_return 93 | def Turner_Rsubrho(SA, CT, p, axis=0): 94 | """ 95 | Calculate the Turner Angle and the Stability Ratio. 96 | 97 | Parameters 98 | ---------- 99 | SA : array-like 100 | Absolute Salinity, g/kg 101 | CT : array-like 102 | Conservative Temperature (ITS-90), degrees C 103 | p : array-like 104 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar 105 | axis : int, optional 106 | The dimension along which pressure increases. 107 | 108 | Returns 109 | ------- 110 | Tu : array 111 | Turner Angle at pressure midpoints, degrees. 112 | The shape along the pressure axis dimension is one 113 | less than that of the inputs. 114 | Rsubrho : array 115 | Stability Ratio, dimensionless. 116 | The shape matches Tu. 117 | p_mid : array 118 | Pressure at midpoints of p, dbar. 119 | The array shape matches Tu. 120 | 121 | """ 122 | 123 | SA = np.clip(SA, 0, 50) 124 | SA, CT, p = np.broadcast_arrays(SA, CT, p) 125 | shallow = axis_slicer(SA.ndim, slice(-1), axis) 126 | deep = axis_slicer(SA.ndim, slice(1, None), axis) 127 | 128 | dSA = -SA[deep] + SA[shallow] 129 | dCT = -CT[deep] + CT[shallow] 130 | 131 | SA_mid = 0.5 * (SA[shallow] + SA[deep]) 132 | CT_mid = 0.5 * (CT[shallow] + CT[deep]) 133 | p_mid = 0.5 * (p[shallow] + p[deep]) 134 | 135 | _, alpha, beta = specvol_alpha_beta(SA_mid, CT_mid, p_mid) 136 | 137 | Tu = np.arctan2((alpha*dCT + beta*dSA), (alpha*dCT - beta*dSA)) 138 | Tu = np.degrees(Tu) 139 | 140 | igood = (dSA != 0) 141 | Rsubrho = np.zeros_like(dSA) 142 | Rsubrho.fill(np.nan) 143 | Rsubrho[igood] = (alpha[igood]*dCT[igood])/(beta[igood]*dSA[igood]) 144 | 145 | return Tu, Rsubrho, p_mid 146 | 147 | 148 | @match_args_return 149 | def IPV_vs_fNsquared_ratio(SA, CT, p, p_ref=0, axis=0): 150 | """ 151 | Calculates the ratio of the vertical gradient of potential density to 152 | the vertical gradient of locally-referenced potential density. This 153 | is also the ratio of the planetary Isopycnal Potential Vorticity 154 | (IPV) to f times N^2, hence the name for this variable, 155 | IPV_vs_fNsquared_ratio (see Eqn. (3.20.17) of IOC et al. (2010)). 156 | 157 | Parameters 158 | ---------- 159 | SA : array-like 160 | Absolute Salinity, g/kg 161 | t : array-like 162 | In-situ temperature (ITS-90), degrees C 163 | p : array-like 164 | Sea pressure (absolute pressure minus 10.1325 dbar), dbar 165 | p_ref : float 166 | Reference pressure, dbar 167 | 168 | Returns 169 | ------- 170 | IPV_vs_fNsquared_ratio : array 171 | The ratio of the vertical gradient of 172 | potential density referenced to p_ref, to the vertical 173 | gradient of locally-referenced potential density, dimensionless. 174 | p_mid : array 175 | Pressure at midpoints of p, dbar. 176 | The array shape matches IPV_vs_fNsquared_ratio. 177 | 178 | """ 179 | 180 | SA = np.clip(SA, 0, 50) 181 | SA, CT, p = np.broadcast_arrays(SA, CT, p) 182 | shallow = axis_slicer(SA.ndim, slice(-1), axis) 183 | deep = axis_slicer(SA.ndim, slice(1, None), axis) 184 | 185 | dSA = -SA[deep] + SA[shallow] 186 | dCT = -CT[deep] + CT[shallow] 187 | 188 | SA_mid = 0.5 * (SA[shallow] + SA[deep]) 189 | CT_mid = 0.5 * (CT[shallow] + CT[deep]) 190 | p_mid = 0.5 * (p[shallow] + p[deep]) 191 | 192 | _, alpha, beta = specvol_alpha_beta(SA_mid, CT_mid, p_mid) 193 | _, alpha_pref, beta_pref = specvol_alpha_beta(SA_mid, CT_mid, p_ref) 194 | 195 | num = dCT*alpha_pref - dSA*beta_pref 196 | den = dCT*alpha - dSA*beta 197 | 198 | igood = (den != 0) 199 | IPV_vs_fNsquared_ratio = np.zeros_like(dSA) 200 | IPV_vs_fNsquared_ratio.fill(np.nan) 201 | IPV_vs_fNsquared_ratio[igood] = num[igood] / den[igood] 202 | 203 | return IPV_vs_fNsquared_ratio, p_mid 204 | -------------------------------------------------------------------------------- /gsw/tests/_WIP_test_ufuncs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the unwrapped ufuncs. 3 | 4 | This is a WIP; it doesn't work yet for all cases, and might not be a good 5 | approach anyway. For now, test_check_functions is adequate, handling the 6 | wrapped ufuncs via check_functions "eval" and "exec" machinery. 7 | """ 8 | import numpy as np 9 | import pytest 10 | from numpy.testing import assert_allclose 11 | 12 | import gsw 13 | from gsw._utilities import Bunch 14 | from gsw.tests.check_functions import parse_check_functions 15 | 16 | cv = Bunch(np.load('gsw_cv_v3_0.npz')) 17 | cf = Bunch() 18 | 19 | d = dir(gsw._gsw_ufuncs) 20 | funcnames = [name for name in d if '__' not in name] 21 | 22 | mfuncs = parse_check_functions('gsw_check_functions_save.m') 23 | mfuncs = [mf for mf in mfuncs if mf.name in d] 24 | mfuncnames = [mf.name for mf in mfuncs] 25 | 26 | @pytest.fixture(scope='session', params=mfuncs) 27 | def cfcf(request): 28 | return cv, cf, request.param 29 | 30 | def test_mechanism(cfcf): 31 | cv, cf, mfunc = cfcf 32 | print("<%s>" % mfunc.name) 33 | def value_from_name(vname): 34 | b, name = vname.split('.') 35 | if b == 'cf': 36 | return cf[name] 37 | elif b == 'cv': 38 | return cv[name] 39 | else: 40 | raise ValueError("Can't find cf. or cv. in %s" % vname) 41 | def set_from_name(vname, value): 42 | b, name = vname.split('.') 43 | if b == 'cf': 44 | cf[name] = value 45 | else: 46 | raise ValueError(f"attempting to set value in {b}") 47 | 48 | func = getattr(gsw._gsw_ufuncs, mfunc.name) 49 | args = [eval(a) for a in mfunc.argstrings] 50 | #print("<<%s>>" % (args,)) 51 | out = func(*args) 52 | #print("<<<%s>>>" % (out,)) 53 | if isinstance(out, tuple): 54 | nout = len(out) 55 | else: 56 | nout = 1 57 | out = (out,) 58 | n = min(nout, len(mfunc.outstrings)) 59 | for i, s in enumerate(mfunc.outstrings[:n]): 60 | set_from_name(s, out[i]) 61 | if mfunc.test_varstrings is not None: 62 | ntests = (len(mfunc.test_varstrings) - 1) // 3 63 | for i in range(ntests): 64 | expected = value_from_name(mfunc.test_varstrings[3*i+1]) 65 | found = value_from_name(mfunc.test_varstrings[3*i+2]) 66 | tolerance = value_from_name(mfunc.test_varstrings[3*i+3]) 67 | #print(expected) 68 | #print(found) 69 | print(tolerance) 70 | try: 71 | assert_allclose(expected, found, atol=tolerance) 72 | except TypeError: 73 | print(mfunc.test_varstrings[3*i+3], tolerance.shape) 74 | print(mfunc.test_varstrings) 75 | # The following is not right, but this step is unimportant. 76 | #set_from_name(mfunc.test_varstrings[0], expected - found) 77 | 78 | else: 79 | print(">>%s<<" % mfunc.testline) 80 | print("missing mfunc.test_varstrings") 81 | mfunc.run() 82 | if hasattr(mfunc, 'exception'): 83 | print(">>>%s<<<", mfunc.exception) 84 | else: 85 | assert mfunc.passed 86 | -------------------------------------------------------------------------------- /gsw/tests/check_functions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module with functions and script to generate tests directly 3 | from our local version of gsw_check_functions.m. 4 | 5 | Usage (run from this test directory): 6 | python check_functions.py 7 | 8 | A primary use for this script is to see which functions are 9 | missing from GSW-Python; they appear in the NameError category. 10 | 11 | TypeError category can include functions that are incomplete 12 | or otherwise not working correctly. 13 | 14 | For functions that run but yield results that fail the check, 15 | the error arrays are printed. 16 | 17 | This can be improved--we should get less information from the 18 | matlab script and more from our own functions. We probably 19 | should not need the matlab script at all, or maybe use it only 20 | to extract the list of functions being tested in matlab. 21 | 22 | This module is also imported by test_check_functions.py, which 23 | is run by py.test. 24 | 25 | """ 26 | 27 | import os 28 | import re 29 | import sys 30 | 31 | import numpy as np 32 | 33 | from gsw import * # noqa 34 | from gsw._utilities import Bunch 35 | 36 | # If we switch to using the logging module, uncomment: 37 | # import logging 38 | # log = logging.getLogger() 39 | # logging.basicConfig() 40 | 41 | # The following re patterns are for the "alternative" parsing of 42 | # the test line to support using numpy assert_allclose. This is 43 | # not presently in use, but aspects of this method, here and in 44 | # _WIP_test_ufuncs.py, might replace some of the original code here. 45 | # 46 | # pattern for a single test line after it has been pre-processed to 47 | # remove spaces, the square brackets, and semicolon 48 | testlinepat = r"(\w+\.\w+)=find\(\w*\((\w+\.\w+)-(\w+\.\w+)\)>=(\w+\.\w+)\)" 49 | # 50 | # pattern for the inner test when there is a sequence separated by '|' 51 | testpat = r"\(+\w*\((\w+\.\w+)-(\w+\.\w+)\)+>=(\w+\.\w+)\)" 52 | 53 | 54 | def find(x): 55 | """ 56 | Numpy equivalent to Matlab find. 57 | """ 58 | return np.nonzero(np.asarray(x).flatten())[0] 59 | 60 | 61 | def group_or(line): 62 | """ 63 | Translate matlab 'find' functions including logical or operators. 64 | 65 | Example: the test for gsw_rho_alpha_beta 66 | 67 | Numpy wart: using bitwise or as a fake elementwise logical or, 68 | we need to add parentheses. 69 | """ 70 | if not ('find(' in line and '|' in line): 71 | return line 72 | i0 = line.index('find(') + 5 73 | head = line[:i0] 74 | tail = line[i0:] 75 | parts = tail.replace('|', ') | (') 76 | new = head + '(' + parts + ')' 77 | return new 78 | 79 | 80 | class FunctionCheck: 81 | """ 82 | Parse the line-pair for checks in gsw_check_functions. 83 | """ 84 | def __init__(self, linepair): 85 | """ 86 | *linepair* is the sequence of two lines; the first runs 87 | the function and assigns the output, and the second 88 | generates an array of indices where the output error 89 | exceeds a tolerance. 90 | """ 91 | 92 | self.linepair = linepair 93 | self.runline = linepair[0] 94 | self.testline = linepair[1] 95 | 96 | # parse the line that runs the function 97 | head, tail = self.runline.split('=') 98 | self.outstrings = [s.strip() for s in head.split(',')] 99 | self.outstr = ','.join(self.outstrings) 100 | funcstr, argpart = tail.split('(', 1) 101 | self.name = funcstr.strip() 102 | self.argstrings = [s.strip() for s in argpart[:-1].split(',')] 103 | self.argstr = ','.join(self.argstrings) 104 | 105 | # parse the line that checks the results 106 | head, tail = self.testline.split('=', 1) 107 | self.resultstr = head.strip() # cv.I* 108 | head, tail = tail.split('(', 1) 109 | self.teststr = tail.strip()[:-1] # argument of "find()" 110 | self.teststr = self.teststr.replace('abs(', 'np.abs(') 111 | 112 | # alternative parsing of testline 113 | testline = self.testline.replace(' ', '') 114 | if '|' in testline: 115 | diffstr, test_str = testline.split('=', 1) 116 | # Chop off the leading 'find('. 117 | tests = test_str[5:].split('|') 118 | self.test_varstrings = [diffstr] 119 | for test in tests: 120 | m = re.match(testpat, test) 121 | if m is None: 122 | print(self.name, testpat, test, m) 123 | if m is not None: 124 | self.test_varstrings.extend(list(m.groups())) 125 | else: 126 | m = re.match(testlinepat, testline) 127 | if m is not None: 128 | self.test_varstrings = m.groups() 129 | else: 130 | print("no match") 131 | self.test_varstrings = None 132 | 133 | 134 | # To be set when run() is successful 135 | self.outlist = None 136 | self.result = None # will be a reference to the cv.I* array 137 | self.passed = None # will be set to True or False 138 | 139 | # To be set if run() is not successful 140 | self.exception = None 141 | 142 | def __str__(self): 143 | return self.runline 144 | 145 | def record_details(self, evalargs): 146 | tline = self.testline 147 | i0 = 5 + tline.index('find(') 148 | tline = tline[i0:-1] 149 | checks = tline.split('|') 150 | parts = [] 151 | for check in checks: 152 | check = check.replace(' ', '') 153 | if check.startswith('('): 154 | check = check[1:-1] 155 | part = Bunch(check=check) 156 | LHS, RHS = check.split('>=') 157 | part.tolerance = eval(RHS, *evalargs) 158 | # Sometimes there is an extra set of (). 159 | if LHS.startswith('('): 160 | LHS = LHS[1:-1] 161 | LHS = LHS[4:-1] # chop off abs(...) 162 | target, calculated = LHS.split('-') 163 | part.checkval = eval(target, *evalargs) 164 | part.val = eval(calculated, *evalargs) 165 | parts.append(part) 166 | 167 | self.details = parts 168 | 169 | def run(self, locals_=None): 170 | try: 171 | if locals_ is not None: 172 | _globals = globals() #dict(**globals()) 173 | _globals.update(locals_) 174 | evalargs = (_globals,) 175 | else: 176 | evalargs = () 177 | 178 | # The following is needed for melting_ice_into_seawater. 179 | if len(self.outstrings) > 1: 180 | rl_ind = '[:%d]' % len(self.outstrings) 181 | else: 182 | rl_ind = '' 183 | 184 | exec(self.runline + rl_ind, *evalargs) 185 | if len(self.outstrings) == 1: 186 | if isinstance(eval(self.outstr, *evalargs), tuple): 187 | exec(f"{self.outstr} = {self.outstr}[0]", *evalargs) 188 | self.outlist = [eval(s, *evalargs) for s in self.outstrings] 189 | 190 | exec(self.testline, *evalargs) 191 | self.result = eval(self.resultstr, *evalargs) 192 | 193 | self.passed = (len(self.result) == 0) 194 | # The following has trouble with CT_first_derivatives 195 | if self.name not in ['CT_first_derivatives',]: 196 | self.record_details(evalargs) 197 | # print("%s passed? %s" % (self.name, self.passed)) 198 | 199 | except Exception as e: 200 | self.exception = e 201 | 202 | 203 | def find_arguments(checks): 204 | """ 205 | For a sequence of FunctionCheck instances, return the 206 | set of unique arguments as a sorted list. 207 | """ 208 | argset = set() 209 | for c in checks: 210 | argset.update(c.argstrings) 211 | argsetlist = list(argset) 212 | argsetlist.sort() 213 | return argsetlist 214 | 215 | 216 | def find_arglists(checks): 217 | """ 218 | For a sequence of FunctionCheck instances, return the 219 | set of unique argument lists as a sorted list. 220 | """ 221 | alset = set() 222 | for c in checks: 223 | alset.update([c.argstr]) 224 | arglists = list(alset) 225 | arglists.sort() 226 | return arglists 227 | 228 | def parse_check_functions(mfile): 229 | """ 230 | Return a list of FunctionCheck instances from gsw_check_functions.m 231 | """ 232 | 233 | with open(mfile) as fid: 234 | mfilelines = fid.readlines() 235 | 236 | first_pass = [] 237 | 238 | concat = False 239 | for line in mfilelines: 240 | line = line.strip() 241 | if concat: 242 | if line.endswith('...'): 243 | line = line[:-3] 244 | first_pass[-1] += line 245 | if line.endswith(';'): 246 | concat = False 247 | continue 248 | if '=' in line and (line.startswith('gsw_') or line.startswith('[gsw_')): 249 | if line.endswith('...'): 250 | line = line[:-3] 251 | concat = True 252 | first_pass.append(line) 253 | 254 | second_pass = [] 255 | 256 | for line in first_pass: 257 | if not '(' in line: 258 | continue 259 | if 'which' in line: 260 | continue 261 | line = line.replace('gsw_', '') 262 | if line.startswith('['): 263 | line = line[1:].replace(']', '') 264 | if line.endswith(';'): 265 | line = line[:-1] 266 | line = line.replace('(I)', '') # For deltaSA_atlas. 267 | second_pass.append(line) 268 | 269 | pairs = [] 270 | 271 | for i in range(len(second_pass)): 272 | if 'find(' in second_pass[i] and not 'find(' in second_pass[i-1]: 273 | pairs.extend(second_pass[i-1:i+1]) 274 | 275 | final = [group_or(line) for line in pairs] 276 | 277 | checks = [] 278 | for i in range(0, len(final), 2): 279 | pair = final[i:i+2] 280 | checks.append(FunctionCheck(pair)) 281 | 282 | return checks 283 | 284 | if __name__ == '__main__': 285 | import argparse 286 | 287 | parser = argparse.ArgumentParser( 288 | description='Run checks from gsw_check_functions.m') 289 | 290 | parser.add_argument('--path', dest='mfiledir', 291 | default="", 292 | help='path to external gsw_check_functions.m') 293 | parser.add_argument('-v', '--verbose', 294 | action='store_true', 295 | help='print output mismatch arrays') 296 | parser.add_argument('--find', 297 | help='run functions with this substring') 298 | 299 | args = parser.parse_args() 300 | 301 | if args.mfiledir: 302 | mfile = os.path.join(args.mfiledir, "gsw_check_functions.m") 303 | else: 304 | mfile = "gsw_check_functions_save.m" 305 | checks = parse_check_functions(mfile) 306 | 307 | #datadir = os.path.join(os.path.dirname(gsw.utilities.__file__), 'data') 308 | datadir = './' 309 | cv = Bunch(np.load(os.path.join(datadir, 'gsw_cv_v3_0.npz'))) 310 | cf = Bunch() 311 | 312 | if args.find: 313 | checks = [c for c in checks if args.find in c.runline] 314 | 315 | for fc in checks: 316 | fc.run() 317 | 318 | passes = [f for f in checks if f.passed] 319 | failures = [f for f in checks if f.passed is False] 320 | 321 | run_problems = [f for f in checks if f.exception is not None] 322 | 323 | etypes = [NameError, UnboundLocalError, TypeError, AttributeError] 324 | ex_dict = {} 325 | for exc in etypes: 326 | elist = [(f.name, f.exception) for f in checks if 327 | isinstance(f.exception, exc)] 328 | ex_dict[exc] = elist 329 | 330 | print("\n%s tests were translated from gsw_check_functions.m" % len(checks)) 331 | print("\n%s tests ran with no error and with correct output" % len(passes)) 332 | if args.verbose: 333 | for f in passes: 334 | print(f.name) 335 | 336 | print("\n%s tests had an output mismatch:" % len(failures)) 337 | for f in failures: 338 | print(f.name) 339 | print(f.runline) 340 | print(f.testline) 341 | if args.verbose: 342 | print(f.result) 343 | for part in f.details: 344 | print("tolerance: ", part.tolerance) 345 | print("error:") 346 | print(part.checkval - part.val) 347 | print('') 348 | 349 | print('') 350 | 351 | print("\n%s exceptions were raised as follows:" % len(run_problems)) 352 | for exc in etypes: 353 | print(" ", exc.__name__) 354 | strings = [" {} : {}".format(*e) for e in ex_dict[exc]] 355 | print("\n".join(strings)) 356 | print("") 357 | 358 | checkbunch = Bunch([(c.name, c) for c in checks]) 359 | -------------------------------------------------------------------------------- /gsw/tests/geo_strf_dyn_height.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TEOS-10/GSW-Python/5e9c5a46409d8163559b213f6292c6b45ddd9a59/gsw/tests/geo_strf_dyn_height.npy -------------------------------------------------------------------------------- /gsw/tests/geo_strf_velocity.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TEOS-10/GSW-Python/5e9c5a46409d8163559b213f6292c6b45ddd9a59/gsw/tests/geo_strf_velocity.npy -------------------------------------------------------------------------------- /gsw/tests/gsw_cv_v3_0.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TEOS-10/GSW-Python/5e9c5a46409d8163559b213f6292c6b45ddd9a59/gsw/tests/gsw_cv_v3_0.npz -------------------------------------------------------------------------------- /gsw/tests/list_check_functions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lists functions checked by test_check_functions, and functions 3 | that are in gsw_check_functions but are not in gsw. 4 | """ 5 | 6 | import os 7 | 8 | import numpy as np 9 | from check_functions import parse_check_functions 10 | 11 | import gsw 12 | 13 | root_path = os.path.abspath(os.path.dirname(__file__)) 14 | 15 | # Function checks that we can't handle automatically yet. 16 | blacklist = ['deltaSA_atlas', # the test is complicated; doesn't fit the pattern. 17 | ] 18 | 19 | d = dir(gsw) 20 | funcnames = [name for name in d if '__' not in name] 21 | 22 | mfuncs_all = parse_check_functions(os.path.join(root_path, 23 | 'gsw_check_functions_save.m')) 24 | mfuncs = [mf for mf in mfuncs_all if mf.name in d and mf.name not in blacklist] 25 | mfuncnames = sorted([mf.name for mf in mfuncs]) 26 | 27 | missingnames = [mf for mf in mfuncs_all if mf.name not in d] 28 | missingnames = sorted([mf.name for mf in missingnames]) 29 | 30 | print('Functions being checked:') 31 | for i, name in enumerate(mfuncnames): 32 | print(i, name) 33 | 34 | print('Functions not in gsw:') 35 | for i, name in enumerate(missingnames): 36 | print(i, name) 37 | 38 | -------------------------------------------------------------------------------- /gsw/tests/test_check_functions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests functions with pytest, using the machinery from check_functions.py 3 | """ 4 | 5 | import os 6 | 7 | import numpy as np 8 | import pytest 9 | from check_functions import parse_check_functions 10 | from numpy.testing import assert_allclose 11 | 12 | import gsw 13 | from gsw._utilities import Bunch 14 | 15 | # Most of the tests have some nan values, so we need to suppress the warning. 16 | # Any more careful fix would likely require considerable effort. 17 | # We get an overflow from ct_from_enthalpy_exact, but the test passes. 18 | np.seterr(invalid='ignore', over='ignore') 19 | 20 | root_path = os.path.abspath(os.path.dirname(__file__)) 21 | 22 | # Function checks that we can't handle automatically yet. 23 | blacklist = ['deltaSA_atlas', # the test is complicated; doesn't fit the pattern. 24 | 'geostrophic_velocity', # test elsewhere; we changed the API 25 | #'CT_from_entropy', # needs prior entropy_from_CT; don't have it in C 26 | #'CT_first_derivatives', # passes, but has trouble in "details"; 27 | # see check_functions.py 28 | #'entropy_second_derivatives', # OK now; handling extra parens. 29 | #'melting_ice_into_seawater', # OK now; fixed nargs mismatch. 30 | ] 31 | 32 | cv = Bunch(np.load(os.path.join(root_path, 'gsw_cv_v3_0.npz'))) 33 | 34 | # Substitute new check values for the pchip interpolation version. 35 | cv.geo_strf_dyn_height = np.load(os.path.join(root_path,'geo_strf_dyn_height.npy')) 36 | cv.geo_strf_velocity = np.load(os.path.join(root_path,'geo_strf_velocity.npy')) 37 | 38 | cf = Bunch() 39 | 40 | d = dir(gsw) 41 | funcnames = [name for name in d if '__' not in name] 42 | 43 | mfuncs = parse_check_functions(os.path.join(root_path, 'gsw_check_functions_save.m')) 44 | mfuncs = [mf for mf in mfuncs if mf.name in d and mf.name not in blacklist] 45 | mfuncnames = [mf.name for mf in mfuncs] 46 | 47 | 48 | @pytest.fixture(params=[-360., 0., 360.]) 49 | def lonshift(request): 50 | return request.param 51 | 52 | 53 | @pytest.fixture(params=mfuncs, ids=mfuncnames) 54 | def setup(request, lonshift): 55 | cvshift = Bunch(**cv) 56 | cvshift.long_chck_cast = cv.long_chck_cast + lonshift 57 | return cvshift, cf, request.param 58 | 59 | 60 | def test_check_function(setup): 61 | cv, cf, mfunc = setup 62 | mfunc.run(locals()) 63 | if mfunc.exception is not None or not mfunc.passed: 64 | print('\n', mfunc.name) 65 | print(' ', mfunc.runline) 66 | print(' ', mfunc.testline) 67 | if mfunc.exception is None: 68 | mfunc.exception = ValueError('Calculated values are different from the expected matlab results.') 69 | raise mfunc.exception 70 | else: 71 | print(mfunc.name) 72 | assert mfunc.passed 73 | -------------------------------------------------------------------------------- /gsw/tests/test_dll_export.py: -------------------------------------------------------------------------------- 1 | import ctypes 2 | 3 | import gsw 4 | 5 | 6 | def test_ctypes_access(): 7 | dllname = gsw._gsw_ufuncs.__file__ 8 | gswlib = ctypes.cdll.LoadLibrary(dllname) 9 | rho_gsw_ctypes = gswlib.gsw_rho # In-situ density. 10 | rho_gsw_ctypes.argtypes = [ctypes.c_double] * 3 11 | rho_gsw_ctypes.restype = ctypes.c_double 12 | stp = (35.0, 10.0, 0.0) 13 | assert rho_gsw_ctypes(*stp) == gsw.rho(*stp) 14 | -------------------------------------------------------------------------------- /gsw/tests/test_geostrophy.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | from numpy.testing import assert_allclose, assert_almost_equal, assert_array_equal 5 | 6 | import gsw 7 | from gsw._utilities import Bunch 8 | 9 | root_path = os.path.abspath(os.path.dirname(__file__)) 10 | 11 | cv = Bunch(np.load(os.path.join(root_path, 'gsw_cv_v3_0.npz'))) 12 | # Override the original with what we calculate using pchip interp 13 | cv.geo_strf_velocity = np.load(os.path.join(root_path,'geo_strf_velocity.npy')) 14 | 15 | lon = [1, 2] 16 | lat = [45, 45] 17 | expected = 78626.18767687 18 | 19 | # distance tests 20 | 21 | def test_list(): 22 | value = gsw.distance(lon, lat, p=0, axis=-1) 23 | assert_almost_equal(expected, value) 24 | 25 | def test_1darray(): 26 | value = gsw.distance(np.array(lon), np.array(lat), p=0, axis=-1) 27 | assert_almost_equal(expected, value) 28 | 29 | def test_1darray_default_p(): 30 | # @match_args_return doesn't see the default p. 31 | value = gsw.distance(np.array(lon), np.array(lat)) 32 | assert_almost_equal(expected, value) 33 | 34 | def test_2dlist(): 35 | value = gsw.distance(np.atleast_2d(lon), np.atleast_2d(lat), p=0, axis=1) 36 | assert_almost_equal(expected, value) 37 | 38 | # geostrophic streamfunction tests 39 | 40 | def test_strf_no_good(): 41 | # revised geo_strf_dyn_height requires 2 valid points 42 | shape = (5,) 43 | SA = np.ma.masked_all(shape, dtype=float) 44 | CT = np.ma.masked_all(shape, dtype=float) 45 | p = np.array([0.0, 10.0, 20.0, 30.0, 40.0]) 46 | 47 | # No valid points. 48 | strf = gsw.geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0) 49 | expected = np.zeros(shape, float) + np.nan 50 | assert_array_equal(strf.filled(np.nan), expected) 51 | 52 | # 1 valid point: not enough. 53 | SA[:1] = 35 54 | CT[:1] = 5 55 | strf = gsw.geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0) 56 | expected = np.zeros(shape, float) + np.nan 57 | assert_array_equal(strf.filled(np.nan), expected) 58 | 59 | # 2 valid points: enough for the calculation to proceed. 60 | SA[:2] = 35 61 | CT[:2] = [5, 4] 62 | strf = gsw.geo_strf_dyn_height(SA, CT, p, p_ref=0, axis=0) 63 | assert strf.count() == 2 64 | 65 | def test_geostrophy(): 66 | lon = cv.long_chck_cast 67 | lat = cv.lat_chck_cast 68 | p = cv.p_chck_cast 69 | CT = cv.CT_chck_cast 70 | SA = cv.SA_chck_cast 71 | strf = gsw.geo_strf_dyn_height(SA, CT, p) 72 | geovel, midlon, midlat = gsw.geostrophic_velocity(strf, lon, lat) 73 | assert_almost_equal(geovel, cv.geo_strf_velocity) 74 | assert_almost_equal(midlon, cv.geo_strf_velocity_mid_long[0]) 75 | assert_almost_equal(midlat, cv.geo_strf_velocity_mid_lat[0]) 76 | 77 | def test_dyn_height_shallower_pref(): 78 | """ 79 | Check that we can handle a p_ref that is shallower than the top of the 80 | cast. To make the results from bin 1 on down independent of whether 81 | bin 0 has been deleted, we need to use linear interpolation. 82 | """ 83 | p = cv.p_chck_cast 84 | CT = cv.CT_chck_cast 85 | SA = cv.SA_chck_cast 86 | strf0 = gsw.geo_strf_dyn_height(SA, CT, p, p_ref=0, interp_method='linear') 87 | strf1 = gsw.geo_strf_dyn_height(SA[1:], CT[1:], p[1:], p_ref=0, 88 | interp_method='linear') 89 | found = strf1 - strf1[0] 90 | expected = strf0[1:] - strf0[1] 91 | assert_almost_equal(found, expected) 92 | 93 | def test_pz_roundtrip(): 94 | """ 95 | The p_z conversion functions have Matlab-based checks that use 96 | only the first two arguments. 97 | Here we verify that the functions are also inverses when the optional 98 | arguments are used. 99 | """ 100 | z = np.array([-10, -100, -1000, -5000], dtype=float) 101 | p = gsw.p_from_z(z, 30, 0.5, 0.25) 102 | zz = gsw.z_from_p(p, 30, 0.5, 0.25) 103 | assert_almost_equal(z, zz) 104 | 105 | def test_dyn_height_mrst(): 106 | """ 107 | Tests the MRST-PCHIP interpolation method. 108 | """ 109 | p = cv.p_chck_cast 110 | CT = cv.CT_chck_cast 111 | SA = cv.SA_chck_cast 112 | pr = cv.pr 113 | strf = gsw.geo_strf_dyn_height(SA, CT, p, p_ref=pr, interp_method='mrst') 114 | 115 | assert_allclose(strf, cv.geo_strf_dyn_height, rtol=0, atol=cv.geo_strf_dyn_height_ca) 116 | -------------------------------------------------------------------------------- /gsw/tests/test_gibbs.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy.testing import assert_almost_equal 3 | 4 | import gsw 5 | 6 | 7 | def test_gibbs_0(): 8 | SA = np.array([35, 34])[:, np.newaxis] 9 | p = np.array([0, 1000])[np.newaxis, :] 10 | out = gsw.gibbs(0, 0, 0, SA, 15, p) 11 | expected = np.array([[-1624.830331610998, 8102.300078026374], 12 | [-1694.143599586675, 8040.219363411558]]) 13 | assert_almost_equal(out, expected) 14 | 15 | 16 | def test_gibbs_1(): 17 | ns = np.array([0, 1])[:, np.newaxis, np.newaxis] 18 | nt = np.array([0, 1])[np.newaxis, :, np.newaxis] 19 | np_ = np.array([0, 1])[np.newaxis, np.newaxis, :] 20 | out = gsw.gibbs(ns, nt, np_, 35, 15, 0) 21 | expected = np.array([[[-1624.830331610998, 9.748019262990162e-04], 22 | [-213.3508284006898, 2.083180196723266e-07]], 23 | [[70.39427245333731, -7.288938619915955e-07], 24 | [0.590312110989411, 2.083180196723266e-07]]]) 25 | expected[1, 1, 1] = np.nan # Unless we add this case to GSW-C. 26 | print(out) 27 | assert_almost_equal(out, expected) 28 | 29 | 30 | def test_gibbs_2(): 31 | params = [ 32 | (2, 0, 0, 35, 15, 0, 2.144088568168594), 33 | (0, 2, 0, 35, 15, 0, -13.86057508638656), 34 | (0, 0, 2, 35, 15, 0, -4.218331910346273e-13) 35 | ] 36 | for p in params: 37 | assert_almost_equal(gsw.gibbs(*p[:6]), p[6]) 38 | 39 | 40 | def test_gibbs_ice(): 41 | out = gsw.gibbs_ice(1, 0, 0, [0, 100]) 42 | expected = np.array([1220.788661299953, 1220.962914882458]) 43 | assert_almost_equal(out, expected) 44 | 45 | 46 | # Source, on an Intel Mac: 47 | # octave:3> gsw_gibbs(0, 0, 0, 35, 15, 0) 48 | # ans = -1624.830331610998 49 | # octave:4> gsw_gibbs(0, 0, 0, 35, 15, 1000) 50 | # ans = 8102.300078026374 51 | # octave:5> gsw_gibbs(0, 0, 0, 34, 15, 0) 52 | # ans = -1694.143599586675 53 | # octave:6> gsw_gibbs(0, 0, 0, 34, 15, 1000) 54 | # ans = 8040.219363411558 55 | 56 | 57 | # octave:7> gsw_gibbs(1, 0, 0, 35, 15, 0) 58 | # ans = 70.39427245333731 59 | # octave:8> gsw_gibbs(0, 1, 0, 35, 15, 0) 60 | # ans = -213.3508284006898 61 | # octave:9> gsw_gibbs(0, 0, 1, 35, 15, 0) 62 | # ans = 9.748019262990162e-04 63 | 64 | 65 | # octave:10> gsw_gibbs(2, 0, 0, 35, 15, 0) 66 | # ans = 2.144088568168594 67 | # octave:11> gsw_gibbs(0, 2, 0, 35, 15, 0) 68 | # ans = -13.86057508638656 69 | # octave:12> gsw_gibbs(0, 0, 2, 35, 15, 0) 70 | # ans = -4.218331910346273e-13 71 | 72 | # octave:13> gsw_gibbs(1, 0, 1, 35, 15, 0) 73 | # ans = -7.288938619915955e-07 74 | # octave:14> gsw_gibbs(1, 1, 0, 35, 15, 0) 75 | # ans = 0.590312110989411 76 | # octave:15> gsw_gibbs(0, 1, 1, 35, 15, 0) 77 | # ans = 2.083180196723266e-07 78 | # octave:16> 79 | 80 | # octave:16> gsw_gibbs(1, 1, 1, 35, 15, 0) 81 | # ans = 1.420449745181019e-09 82 | 83 | # octave:7> gsw_gibbs_ice(1, 0, 0, 0) 84 | # ans = 1220.788661299953 85 | # octave:8> gsw_gibbs_ice(1, 0, 0, 100) 86 | # ans = 1220.962914882458 -------------------------------------------------------------------------------- /gsw/tests/test_interpolation.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | from numpy.testing import assert_allclose 5 | 6 | import gsw 7 | from gsw._utilities import Bunch 8 | 9 | root_path = os.path.abspath(os.path.dirname(__file__)) 10 | 11 | cv = Bunch(np.load(os.path.join(root_path, 'gsw_cv_v3_0.npz'))) 12 | 13 | def test_sa_ct_interp(): 14 | p = cv.p_chck_cast 15 | CT = cv.CT_chck_cast 16 | SA = cv.SA_chck_cast 17 | p_i = np.repeat(cv.p_i[:, np.newaxis], p.shape[1], axis=1) 18 | SA_i, CT_i = gsw.sa_ct_interp(SA, CT, p, p_i) 19 | assert_allclose(SA_i, cv.SAi_SACTinterp, rtol=0, atol=cv.SAi_SACTinterp_ca) 20 | assert_allclose(CT_i, cv.CTi_SACTinterp, rtol=0, atol=cv.CTi_SACTinterp_ca) 21 | 22 | def test_tracer_ct_interp(): 23 | p = cv.p_chck_cast 24 | CT = cv.CT_chck_cast 25 | tracer = cv.SA_chck_cast 26 | p_i = np.repeat(cv.p_i[:, np.newaxis], p.shape[1], axis=1) 27 | tracer_i, CT_i = gsw.tracer_ct_interp(tracer, CT, p, p_i) 28 | assert_allclose(tracer_i, cv.traceri_tracerCTinterp, rtol=0, atol=cv.traceri_tracerCTinterp_ca) 29 | assert_allclose(CT_i, cv.CTi_SACTinterp, rtol=0, atol=cv.CTi_SACTinterp_ca) 30 | -------------------------------------------------------------------------------- /gsw/tests/test_utility.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import gsw 5 | 6 | nx, ny, nz = 2, 3, 10 7 | y = np.arange(nx*ny*nz, dtype=float).reshape((nx, ny, nz)) 8 | y += y**1.5 9 | z = np.arange(nz, dtype=float) 10 | z = np.broadcast_to(z, y.shape) 11 | zn = z.copy() 12 | zn[:, :, [0, -1]] = np.nan 13 | 14 | xi_arraylist = [[0.5, 1.5], np.linspace(-1, z.max() + 10, 50)] 15 | 16 | # Initial smoke test with small and large xi arrays. 17 | @pytest.mark.parametrize("xi", xi_arraylist) 18 | def test_in_range(xi): 19 | yi = gsw.pchip_interp(z, y, xi, axis=-1) 20 | assert yi.shape == (nx, ny, len(xi)) 21 | 22 | 23 | # Try with NaNs. 24 | @pytest.mark.parametrize("xi", xi_arraylist) 25 | def test_in_range_nan(xi): 26 | yi = gsw.pchip_interp(zn, y, xi, axis=-1) 27 | assert yi.shape == (nx, ny, len(xi)) 28 | -------------------------------------------------------------------------------- /gsw/tests/test_xarray.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests functions with xarray inputs. 3 | 4 | This version is a copy of the original test_check_functions but with 5 | an import of xarray, and conversion of the 3 main check cast arrays 6 | into DataArray objects. 7 | 8 | An additional xarray-dask test is added. 9 | """ 10 | 11 | import os 12 | 13 | import numpy as np 14 | import pandas as pd 15 | import pytest 16 | from check_functions import parse_check_functions 17 | from numpy.testing import assert_allclose 18 | 19 | import gsw 20 | from gsw._utilities import Bunch 21 | 22 | xr = pytest.importorskip('xarray') 23 | 24 | # Most of the tests have some nan values, so we need to suppress the warning. 25 | # Any more careful fix would likely require considerable effort. 26 | np.seterr(invalid='ignore') 27 | 28 | root_path = os.path.abspath(os.path.dirname(__file__)) 29 | 30 | # Function checks that we can't handle automatically yet. 31 | blacklist = ['deltaSA_atlas', # the test is complicated; doesn't fit the pattern. 32 | 'geostrophic_velocity', # test elsewhere; we changed the API 33 | #'CT_from_entropy', # needs prior entropy_from_CT; don't have it in C 34 | #'CT_first_derivatives', # passes, but has trouble in "details"; 35 | # see check_functions.py 36 | #'entropy_second_derivatives', # OK now; handling extra parens. 37 | #'melting_ice_into_seawater', # OK now; fixed nargs mismatch. 38 | ] 39 | 40 | # We get an overflow from ct_from_enthalpy_exact, but the test passes. 41 | cv = Bunch(np.load(os.path.join(root_path, 'gsw_cv_v3_0.npz'))) 42 | 43 | # Substitute new check values for the pchip interpolation version. 44 | cv.geo_strf_dyn_height = np.load(os.path.join(root_path,'geo_strf_dyn_height.npy')) 45 | cv.geo_strf_velocity = np.load(os.path.join(root_path,'geo_strf_velocity.npy')) 46 | 47 | for name in ['SA_chck_cast', 't_chck_cast', 'p_chck_cast']: 48 | cv[name] = xr.DataArray(cv[name]) 49 | 50 | cf = Bunch() 51 | 52 | d = dir(gsw) 53 | funcnames = [name for name in d if '__' not in name] 54 | 55 | mfuncs = parse_check_functions(os.path.join(root_path, 'gsw_check_functions_save.m')) 56 | mfuncs = [mf for mf in mfuncs if mf.name in d and mf.name not in blacklist] 57 | mfuncnames = [mf.name for mf in mfuncs] 58 | 59 | 60 | @pytest.fixture(scope='session', params=mfuncs) 61 | def cfcf(request): 62 | return cv, cf, request.param 63 | 64 | 65 | def test_check_function(cfcf): 66 | cv, cf, mfunc = cfcf 67 | mfunc.run(locals()) 68 | if mfunc.exception is not None or not mfunc.passed: 69 | print('\n', mfunc.name) 70 | print(' ', mfunc.runline) 71 | print(' ', mfunc.testline) 72 | if mfunc.exception is None: 73 | mfunc.exception = ValueError('Calculated values are different from the expected matlab results.') 74 | raise mfunc.exception 75 | else: 76 | print(mfunc.name) 77 | assert mfunc.passed 78 | 79 | 80 | def test_dask_chunking(): 81 | dsa = pytest.importorskip('dask.array') 82 | 83 | # define some input data 84 | shape = (100, 1000) 85 | chunks = (100, 200) 86 | sp = xr.DataArray(dsa.full(shape, 35., chunks=chunks), dims=['time', 'depth']) 87 | p = xr.DataArray(np.arange(shape[1]), dims=['depth']) 88 | lon = 0 89 | lat = 45 90 | 91 | sa = gsw.SA_from_SP(sp, p, lon, lat) 92 | sa_dask = sa.compute() 93 | 94 | sa_numpy = gsw.SA_from_SP(np.full(shape, 35.0), p.values, lon, lat) 95 | assert_allclose(sa_dask, sa_numpy) 96 | 97 | 98 | # Additional tests from Graeme MacGilchrist 99 | # https://nbviewer.jupyter.org/github/gmacgilchrist/wmt_bgc/blob/master/notebooks/test_gsw-xarray.ipynb 100 | 101 | # Define dimensions and coordinates 102 | dims = ['y','z','t'] 103 | # 2x2x2 104 | y = np.arange(0,2) 105 | z = np.arange(0,2) 106 | t = np.arange(0,2) 107 | # Define numpy arrays of salinity, temperature and pressure 108 | SA_vals = np.array([[[34.7,34.8],[34.9,35]],[[35.1,35.2],[35.3,35.4]]]) 109 | CT_vals = np.array([[[7,8],[9,10]],[[11,12],[13,14]]]) 110 | p_vals = np.array([10,20]) 111 | lat_vals = np.array([0,10]) 112 | # Plug in to xarray objects 113 | SA = xr.DataArray(SA_vals,dims=dims,coords={'y':y,'z':z,'t':t}) 114 | CT = xr.DataArray(CT_vals,dims=dims,coords={'y':y,'z':z,'t':t}) 115 | p = xr.DataArray(p_vals,dims=['z'],coords={'z':z}) 116 | lat = xr.DataArray(lat_vals,dims=['y'],coords={'y':y}) 117 | 118 | 119 | def test_xarray_with_coords(): 120 | pytest.importorskip('dask') 121 | SA_chunk = SA.chunk(chunks={'y':1,'t':1}) 122 | CT_chunk = CT.chunk(chunks={'y':1,'t':1}) 123 | lat_chunk = lat.chunk(chunks={'y':1}) 124 | 125 | # Dimensions and coordinates match: 126 | expected = gsw.sigma0(SA_vals, CT_vals) 127 | xarray = gsw.sigma0(SA, CT) 128 | chunked = gsw.sigma0(SA_chunk, CT_chunk) 129 | assert_allclose(xarray, expected) 130 | assert_allclose(chunked, expected) 131 | 132 | # Broadcasting along dimension required (dimensions known) 133 | expected = gsw.alpha(SA_vals, CT_vals, p_vals[np.newaxis, :, np.newaxis]) 134 | xarray = gsw.alpha(SA, CT, p) 135 | chunked = gsw.alpha(SA_chunk, CT_chunk, p) 136 | assert_allclose(xarray, expected) 137 | assert_allclose(chunked, expected) 138 | 139 | # Broadcasting along dimension required (dimensions unknown/exclusive) 140 | expected = gsw.z_from_p(p_vals[:, np.newaxis], lat_vals[np.newaxis, :]) 141 | xarray = gsw.z_from_p(p, lat) 142 | chunked = gsw.z_from_p(p,lat_chunk) 143 | assert_allclose(xarray, expected) 144 | assert_allclose(chunked, expected) 145 | 146 | def test_pandas_20(): 147 | df = pd.DataFrame( 148 | { 149 | "pressure": [0, 10, 20], 150 | "latitude": [70, 70, 70], 151 | } 152 | ) 153 | depth = -1 * gsw.z_from_p(df["pressure"], df["latitude"]) 154 | assert isinstance(depth, pd.core.series.Series) -------------------------------------------------------------------------------- /gsw/tests/write_geo_npyfiles.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generate our own test values for geostrophic calculations. 3 | 4 | We are presently using simple pchip interpolation for 5 | geostrophy rather than trying to mimic the ever-changing 6 | Matlab functions. Therefore we have to make our own test 7 | values, using the current test cast inputs. 8 | 9 | This is a minimal script for that purpose, to be run in 10 | the tests directory in which it lives. It should be run 11 | only if we change to a different calculation algorithm, 12 | or we update the cast input and general check value file. 13 | """ 14 | 15 | import numpy as np 16 | 17 | import gsw 18 | from gsw._utilities import Bunch 19 | 20 | cv = Bunch(np.load('gsw_cv_v3_0.npz')) 21 | 22 | dyn_height = gsw.geo_strf_dyn_height(cv.SA_chck_cast, 23 | cv.CT_chck_cast, 24 | cv.p_chck_cast, 25 | cv.pr) 26 | np.save('geo_strf_dyn_height.npy', dyn_height) 27 | 28 | lon = cv.long_chck_cast 29 | lat = cv.lat_chck_cast 30 | p = cv.p_chck_cast 31 | CT = cv.CT_chck_cast 32 | SA = cv.SA_chck_cast 33 | strf = gsw.geo_strf_dyn_height(SA, CT, p) 34 | geovel, midlon, midlat = gsw.geostrophic_velocity(strf, lon, lat) 35 | np.save('geo_strf_velocity.npy', geovel) 36 | # midlon, midlat are OK 37 | -------------------------------------------------------------------------------- /gsw/utility.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions not specific to the TEOS-10 realm of variables. 3 | """ 4 | 5 | import numpy as np 6 | 7 | from . import _gsw_ufuncs 8 | from ._utilities import indexer, match_args_return 9 | 10 | 11 | @match_args_return 12 | def pchip_interp(x, y, xi, axis=0): 13 | """ 14 | Interpolate using Piecewise Cubic Hermite Interpolating Polynomial 15 | 16 | This is a shape-preserving algorithm; it does not introduce new local 17 | extrema. The implementation in C that is wrapped here is largely taken 18 | from the scipy implementation, 19 | https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.PchipInterpolator.html. 20 | 21 | Points outside the range of the interpolation table are filled using the 22 | end values in the table. (In contrast, 23 | scipy.interpolate.pchip_interpolate() extrapolates using the end 24 | polynomials.) 25 | 26 | Parameters 27 | ---------- 28 | x, y : array-like 29 | Interpolation table x and y; n-dimensional, must be broadcastable to 30 | the same dimensions. 31 | xi : array-like 32 | One-dimensional array of new x values. 33 | axis : int, optional, default is 0 34 | Axis along which xi is taken. 35 | 36 | Returns 37 | ------- 38 | yi : array 39 | Values of y interpolated to xi along the specified axis. 40 | 41 | """ 42 | 43 | xi = np.array(xi, dtype=float, copy=False, order='C', ndmin=1) 44 | if xi.ndim > 1: 45 | raise ValueError('xi must be no more than 1-dimensional') 46 | nxi = xi.size 47 | x, y = np.broadcast_arrays(x, y) 48 | out_shape = list(x.shape) 49 | out_shape[axis] = nxi 50 | yi = np.empty(out_shape, dtype=float) 51 | yi.fill(np.nan) 52 | 53 | goodmask = ~(np.isnan(x) | np.isnan(y)) 54 | 55 | order = 'F' if y.flags.fortran else 'C' 56 | for ind in indexer(y.shape, axis, order=order): 57 | igood = goodmask[ind] 58 | # If p_ref is below the deepest value, skip the profile. 59 | xgood = x[ind][igood] 60 | ygood = y[ind][igood] 61 | 62 | yi[ind] = _gsw_ufuncs.util_pchip_interp(xgood, ygood, xi) 63 | 64 | return yi 65 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "setuptools.build_meta" 3 | requires = [ 4 | "build", 5 | 'numpy<3,>=2.0.0rc1; python_version >= "3.9"', 6 | 'oldest-supported-numpy; python_version < "3.9"', 7 | "pip>9.0.1", 8 | "setuptools>=42", 9 | "setuptools_scm[toml]>=3.4", 10 | "wheel", 11 | ] 12 | 13 | [project] 14 | name = "gsw" 15 | description = "Gibbs Seawater Oceanographic Package of TEOS-10" 16 | license = {text = "BSD-3-Clause"} 17 | authors = [ 18 | {name = "Eric Firing, Filipe Fernandes", email = "efiring@hawaii.edu"}, 19 | ] 20 | requires-python = ">=3.8" 21 | classifiers = [ 22 | "Development Status :: 5 - Production/Stable", 23 | "Intended Audience :: Science/Research", 24 | "License :: OSI Approved :: BSD License", 25 | "Operating System :: OS Independent", 26 | "Programming Language :: Python", 27 | "Programming Language :: Python :: 3 :: Only", 28 | "Programming Language :: Python :: 3.8", 29 | "Programming Language :: Python :: 3.9", 30 | "Programming Language :: Python :: 3.10", 31 | "Programming Language :: Python :: 3.11", 32 | "Programming Language :: Python :: 3.12", 33 | "Topic :: Scientific/Engineering", 34 | ] 35 | dynamic = [ 36 | "readme", 37 | "version", 38 | ] 39 | dependencies = [ 40 | "numpy>=1.21", 41 | ] 42 | [project.urls] 43 | documentation = "https://teos-10.github.io/GSW-Python/" 44 | homepage = "https://www.teos-10.org/" 45 | repository = "https://github.com/TEOS-10/GSW-python" 46 | 47 | [tool.setuptools] 48 | license-files = ["LICENSE.txt"] 49 | zip-safe = false 50 | include-package-data = true 51 | packages = ["gsw", "gsw.tests"] 52 | 53 | 54 | [tool.setuptools.dynamic] 55 | dependencies = {file = ["requirements.txt"]} 56 | readme = {file = "README.md", content-type = "text/markdown"} 57 | 58 | [tool.setuptools_scm] 59 | write_to = "gsw/_version.py" 60 | write_to_template = "__version__ = '{version}'" 61 | tag_regex = "^(?Pv)?(?P[^\\+]+)(?P.*)?$" 62 | 63 | [tool.ruff] 64 | lint.select = [ 65 | "A", # flake8-builtins 66 | "B", # flake8-bugbear 67 | "C4", # flake8-comprehensions 68 | "F", # flakes 69 | "I", # import sorting 70 | "UP", # upgrade 71 | "NPY201", # numpy 2.0 72 | ] 73 | target-version = "py38" 74 | line-length = 105 75 | 76 | lint.ignore = [ 77 | "F401", # module imported but unused 78 | "E501", # line too long 79 | "E713", # test for membership should be 'not in' 80 | ] 81 | 82 | exclude = [ 83 | "tools", 84 | ] 85 | 86 | [tool.ruff.lint.per-file-ignores] 87 | "docs/conf.py" = [ 88 | "A001", # variable is shadowing a python builtin 89 | ] 90 | "gsw/_fixed_wrapped_ufuncs.py" = [ 91 | "F403", #'from x import *' used; unable to detect undefined names 92 | "F405", # 'import' may be undefined, or defined from star imports 93 | ] 94 | 95 | "gsw/_utilities.py" = [ 96 | "B904", # Within an ``except`` clause, raise exceptions with ``raise ... from err`` 97 | ] 98 | 99 | [tool.check-manifest] 100 | ignore = [ 101 | "*.yml", 102 | ".coveragerc", 103 | "docs", 104 | "docs/*", 105 | "*.enc", 106 | "tools", 107 | "tools/*", 108 | "gsw/_version.py", 109 | ] 110 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | dask 2 | numpydoc 3 | pandas>=2 4 | pytest 5 | scipy 6 | setuptools_scm 7 | sphinx 8 | sphinx_rtd_theme 9 | xarray -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | Minimal setup.py for building gswc. 3 | """ 4 | 5 | 6 | import os 7 | import shutil 8 | import sys 9 | 10 | import numpy 11 | from setuptools import Extension, setup 12 | from setuptools.command.build_ext import build_ext as _build_ext 13 | 14 | rootpath = os.path.abspath(os.path.dirname(__file__)) 15 | 16 | 17 | def read(*parts): 18 | return open(os.path.join(rootpath, *parts)).read() 19 | 20 | 21 | class build_ext(_build_ext): 22 | # Extension builder from pandas without the cython stuff 23 | def build_extensions(self): 24 | numpy_incl = numpy.get_include() 25 | 26 | for ext in self.extensions: 27 | if hasattr(ext, "include_dirs") and not numpy_incl in ext.include_dirs: 28 | ext.include_dirs.append(numpy_incl) 29 | _build_ext.build_extensions(self) 30 | 31 | 32 | # MSVC can't handle C complex, and distutils doesn't seem to be able to 33 | # let us force C++ compilation of .c files, so we use the following hack for 34 | # Windows. 35 | if sys.platform == "win32": 36 | cext = "cpp" 37 | shutil.copy( 38 | "src/c_gsw/gsw_oceanographic_toolbox.c", 39 | "src/c_gsw/gsw_oceanographic_toolbox.cpp", 40 | ) 41 | shutil.copy("src/c_gsw/gsw_saar.c", "src/c_gsw/gsw_saar.cpp") 42 | else: 43 | cext = "c" 44 | 45 | ufunc_src_list = [ 46 | "src/_ufuncs.c", 47 | "src/c_gsw/gsw_oceanographic_toolbox." + cext, 48 | "src/c_gsw/gsw_saar." + cext, 49 | ] 50 | 51 | config = { 52 | "ext_modules": [Extension("gsw._gsw_ufuncs", ufunc_src_list)], 53 | "include_dirs": [os.path.join(rootpath, "src", "c_gsw")], 54 | "cmdclass": {"build_ext": build_ext}, 55 | } 56 | 57 | setup(**config) 58 | -------------------------------------------------------------------------------- /src/c_gsw/gsw_internal_const.h: -------------------------------------------------------------------------------- 1 | /* 2 | ** $Id: gsw_internal_const.h,v a1406e71e2a9 2016/01/08 08:01:43 fdelahoyde $ 3 | ** 4 | ** Internal constants for GSW-TEOS-10 V3.05. 5 | */ 6 | #ifndef GSW_INTERNAL_CONST_H 7 | #define GSW_INTERNAL_CONST_H 8 | 9 | /* 10 | ** The following hack is used to ensure that gcc (and gcc emulating compilers 11 | ** such as Macosx clang) do not emit unused variable warning messages. 12 | */ 13 | #ifdef __GNUC__ 14 | #define UNUSED __attribute__ ((unused)) 15 | #else 16 | #define UNUSED 17 | #endif 18 | 19 | 20 | #define GSW_TEOS10_CONSTANTS \ 21 | UNUSED double db2pa = 1.0e4, \ 22 | rec_db2pa = 1.0e-4, \ 23 | pa2db = 1.0e-4, \ 24 | rec_pa2db = 1.0e4, \ 25 | pi = 3.141592653589793, \ 26 | deg2rad = pi/180.0, \ 27 | rad2deg = 180.0/pi, \ 28 | gamma = 2.26e-7, \ 29 | /*\ 30 | ! cp0 = The "specific heat" for use [ J/(kg K) ]\ 31 | ! with Conservative Temperature \ 32 | */\ 33 | gsw_cp0 = 3991.86795711963, \ 34 | /*\ 35 | ! T0 = the Celsius zero point. [ K ]\ 36 | */\ 37 | gsw_t0 = 273.15, \ 38 | /*\ 39 | ! P0 = Absolute Pressure of one standard atmosphere. [ Pa ]\ 40 | */\ 41 | gsw_p0 = 101325.0, \ 42 | /*\ 43 | ! SSO = Standard Ocean Reference Salinity. [ g/kg ]\ 44 | */\ 45 | gsw_sso = 35.16504, \ 46 | gsw_sqrtsso = 5.930011804372737, \ 47 | /*\ 48 | ! uPS = unit conversion factor for salinities [ g/kg ]\ 49 | */\ 50 | gsw_ups = gsw_sso/35.0, \ 51 | /*\ 52 | ! sfac = 1/(40*gsw_ups)\ 53 | */\ 54 | gsw_sfac = 0.0248826675584615, \ 55 | /*\ 56 | ! deltaS = 24, offset = deltaS*gsw_sfac\ 57 | */\ 58 | offset = 5.971840214030754e-1, \ 59 | /*\ 60 | ! C3515 = Conductivity at (SP=35, t_68=15, p=0) [ mS/cm ]\ 61 | */\ 62 | gsw_c3515 = 42.9140, \ 63 | /*\ 64 | ! SonCl = SP to Chlorinity ratio [ (g/kg)^-1 ]\ 65 | */\ 66 | gsw_soncl = 1.80655, \ 67 | /*\ 68 | ! valence_factor = valence factor of sea salt of Reference Composition\ 69 | ! [ unitless ]\ 70 | */\ 71 | gsw_valence_factor = 1.2452898, \ 72 | /*\ 73 | ! atomic_weight = mole-weighted atomic weight of sea salt of Reference \ 74 | ! Composition [ g/mol ]\ 75 | */\ 76 | gsw_atomic_weight = 31.4038218 77 | 78 | 79 | #define GSW_SPECVOL_COEFFICIENTS \ 80 | UNUSED double a000 = -1.56497346750e-5, \ 81 | a001 = 1.85057654290e-5, \ 82 | a002 = -1.17363867310e-6, \ 83 | a003 = -3.65270065530e-7, \ 84 | a004 = 3.14540999020e-7, \ 85 | a010 = 5.55242129680e-5, \ 86 | a011 = -2.34332137060e-5, \ 87 | a012 = 4.26100574800e-6, \ 88 | a013 = 5.73918103180e-7, \ 89 | a020 = -4.95634777770e-5, \ 90 | a021 = 2.37838968519e-5, \ 91 | a022 = -1.38397620111e-6, \ 92 | a030 = 2.76445290808e-5, \ 93 | a031 = -1.36408749928e-5, \ 94 | a032 = -2.53411666056e-7, \ 95 | a040 = -4.02698077700e-6, \ 96 | a041 = 2.53683834070e-6, \ 97 | a050 = 1.23258565608e-6, \ 98 | a100 = 3.50095997640e-5, \ 99 | a101 = -9.56770881560e-6, \ 100 | a102 = -5.56991545570e-6, \ 101 | a103 = -2.72956962370e-7, \ 102 | a110 = -7.48716846880e-5, \ 103 | a111 = -4.73566167220e-7, \ 104 | a112 = 7.82747741600e-7, \ 105 | a120 = 7.24244384490e-5, \ 106 | a121 = -1.03676320965e-5, \ 107 | a122 = 2.32856664276e-8, \ 108 | a130 = -3.50383492616e-5, \ 109 | a131 = 5.18268711320e-6, \ 110 | a140 = -1.65263794500e-6, \ 111 | a200 = -4.35926785610e-5, \ 112 | a201 = 1.11008347650e-5, \ 113 | a202 = 5.46207488340e-6, \ 114 | a210 = 7.18156455200e-5, \ 115 | a211 = 5.85666925900e-6, \ 116 | a212 = -1.31462208134e-6, \ 117 | a220 = -4.30608991440e-5, \ 118 | a221 = 9.49659182340e-7, \ 119 | a230 = 1.74814722392e-5, \ 120 | a300 = 3.45324618280e-5, \ 121 | a301 = -9.84471178440e-6, \ 122 | a302 = -1.35441856270e-6, \ 123 | a310 = -3.73971683740e-5, \ 124 | a311 = -9.76522784000e-7, \ 125 | a320 = 6.85899736680e-6, \ 126 | a400 = -1.19594097880e-5, \ 127 | a401 = 2.59092252600e-6, \ 128 | a410 = 7.71906784880e-6, \ 129 | a500 = 1.38645945810e-6, \ 130 | b000 = -3.10389819760e-4, \ 131 | b003 = 3.63101885150e-7, \ 132 | b004 = -1.11471254230e-7, \ 133 | b010 = 3.50095997640e-5, \ 134 | b013 = -2.72956962370e-7, \ 135 | b020 = -3.74358423440e-5, \ 136 | b030 = 2.41414794830e-5, \ 137 | b040 = -8.75958731540e-6, \ 138 | b050 = -3.30527589000e-7, \ 139 | b100 = 1.33856134076e-3, \ 140 | b103 = 3.34926075600e-8, \ 141 | b110 = -8.71853571220e-5, \ 142 | b120 = 7.18156455200e-5, \ 143 | b130 = -2.87072660960e-5, \ 144 | b140 = 8.74073611960e-6, \ 145 | b200 = -2.55143801811e-3, \ 146 | b210 = 1.03597385484e-4, \ 147 | b220 = -5.60957525610e-5, \ 148 | b230 = 6.85899736680e-6, \ 149 | b300 = 2.32344279772e-3, \ 150 | b310 = -4.78376391520e-5, \ 151 | b320 = 1.54381356976e-5, \ 152 | b400 = -1.05461852535e-3, \ 153 | b410 = 6.93229729050e-6, \ 154 | b500 = 1.91594743830e-4, \ 155 | b001 = 2.42624687470e-5, \ 156 | b011 = -9.56770881560e-6, \ 157 | b021 = -2.36783083610e-7, \ 158 | b031 = -3.45587736550e-6, \ 159 | b041 = 1.29567177830e-6, \ 160 | b101 = -6.95849219480e-5, \ 161 | b111 = 2.22016695300e-5, \ 162 | b121 = 5.85666925900e-6, \ 163 | b131 = 6.33106121560e-7, \ 164 | b201 = 1.12412331915e-4, \ 165 | b211 = -2.95341353532e-5, \ 166 | b221 = -1.46478417600e-6, \ 167 | b301 = -6.92888744480e-5, \ 168 | b311 = 1.03636901040e-5, \ 169 | b401 = 1.54637136265e-5, \ 170 | b002 = -5.84844329840e-7, \ 171 | b012 = -5.56991545570e-6, \ 172 | b022 = 3.91373870800e-7, \ 173 | b032 = 7.76188880920e-9, \ 174 | b102 = -9.62445031940e-6, \ 175 | b112 = 1.09241497668e-5, \ 176 | b122 = -1.31462208134e-6, \ 177 | b202 = 1.47789320994e-5, \ 178 | b212 = -4.06325568810e-6, \ 179 | b302 = -7.12478989080e-6, \ 180 | c000 = -6.07991438090e-5, \ 181 | c001 = 1.99712338438e-5, \ 182 | c002 = -3.39280843110e-6, \ 183 | c003 = 4.21246123200e-7, \ 184 | c004 = -6.32363064300e-8, \ 185 | c005 = 1.17681023580e-8, \ 186 | c010 = 1.85057654290e-5, \ 187 | c011 = -2.34727734620e-6, \ 188 | c012 = -1.09581019659e-6, \ 189 | c013 = 1.25816399608e-6, \ 190 | c020 = -1.17166068530e-5, \ 191 | c021 = 4.26100574800e-6, \ 192 | c022 = 8.60877154770e-7, \ 193 | c030 = 7.92796561730e-6, \ 194 | c031 = -9.22650800740e-7, \ 195 | c040 = -3.41021874820e-6, \ 196 | c041 = -1.26705833028e-7, \ 197 | c050 = 5.07367668140e-7, \ 198 | c100 = 2.42624687470e-5, \ 199 | c101 = -1.16968865968e-6, \ 200 | c102 = 1.08930565545e-6, \ 201 | c103 = -4.45885016920e-7, \ 202 | c110 = -9.56770881560e-6, \ 203 | c111 = -1.11398309114e-5, \ 204 | c112 = -8.18870887110e-7, \ 205 | c120 = -2.36783083610e-7, \ 206 | c121 = 7.82747741600e-7, \ 207 | c130 = -3.45587736550e-6, \ 208 | c131 = 1.55237776184e-8, \ 209 | c140 = 1.29567177830e-6, \ 210 | c200 = -3.47924609740e-5, \ 211 | c201 = -9.62445031940e-6, \ 212 | c202 = 5.02389113400e-8, \ 213 | c210 = 1.11008347650e-5, \ 214 | c211 = 1.09241497668e-5, \ 215 | c220 = 2.92833462950e-6, \ 216 | c221 = -1.31462208134e-6, \ 217 | c230 = 3.16553060780e-7, \ 218 | c300 = 3.74707773050e-5, \ 219 | c301 = 9.85262139960e-6, \ 220 | c310 = -9.84471178440e-6, \ 221 | c311 = -2.70883712540e-6, \ 222 | c320 = -4.88261392000e-7, \ 223 | c400 = -1.73222186120e-5, \ 224 | c401 = -3.56239494540e-6, \ 225 | c410 = 2.59092252600e-6, \ 226 | c500 = 3.09274272530e-6, \ 227 | h001 = 1.07699958620e-3, \ 228 | h002 = -3.03995719050e-5, \ 229 | h003 = 3.32853897400e-6, \ 230 | h004 = -2.82734035930e-7, \ 231 | h005 = 2.10623061600e-8, \ 232 | h006 = -2.10787688100e-9, \ 233 | h007 = 2.80192913290e-10, \ 234 | h011 = -1.56497346750e-5, \ 235 | h012 = 9.25288271450e-6, \ 236 | h013 = -3.91212891030e-7, \ 237 | h014 = -9.13175163830e-8, \ 238 | h015 = 6.29081998040e-8, \ 239 | h021 = 2.77621064840e-5, \ 240 | h022 = -5.85830342650e-6, \ 241 | h023 = 7.10167624670e-7, \ 242 | h024 = 7.17397628980e-8, \ 243 | h031 = -1.65211592590e-5, \ 244 | h032 = 3.96398280870e-6, \ 245 | h033 = -1.53775133460e-7, \ 246 | h042 = -1.70510937410e-6, \ 247 | h043 = -2.11176388380e-8, \ 248 | h041 = 6.91113227020e-6, \ 249 | h051 = -8.05396155400e-7, \ 250 | h052 = 2.53683834070e-7, \ 251 | h061 = 2.05430942680e-7, \ 252 | h101 = -3.10389819760e-4, \ 253 | h102 = 1.21312343735e-5, \ 254 | h103 = -1.94948109950e-7, \ 255 | h104 = 9.07754712880e-8, \ 256 | h105 = -2.22942508460e-8, \ 257 | h111 = 3.50095997640e-5, \ 258 | h112 = -4.78385440780e-6, \ 259 | h113 = -1.85663848520e-6, \ 260 | h114 = -6.82392405930e-8, \ 261 | h121 = -3.74358423440e-5, \ 262 | h122 = -1.18391541805e-7, \ 263 | h123 = 1.30457956930e-7, \ 264 | h131 = 2.41414794830e-5, \ 265 | h132 = -1.72793868275e-6, \ 266 | h133 = 2.58729626970e-9, \ 267 | h141 = -8.75958731540e-6, \ 268 | h142 = 6.47835889150e-7, \ 269 | h151 = -3.30527589000e-7, \ 270 | h201 = 6.69280670380e-4, \ 271 | h202 = -1.73962304870e-5, \ 272 | h203 = -1.60407505320e-6, \ 273 | h204 = 4.18657594500e-9, \ 274 | h211 = -4.35926785610e-5, \ 275 | h212 = 5.55041738250e-6, \ 276 | h213 = 1.82069162780e-6, \ 277 | h221 = 3.59078227600e-5, \ 278 | h222 = 1.46416731475e-6, \ 279 | h223 = -2.19103680220e-7, \ 280 | h231 = -1.43536330480e-5, \ 281 | h232 = 1.58276530390e-7, \ 282 | h241 = 4.37036805980e-6, \ 283 | h301 = -8.50479339370e-4, \ 284 | h302 = 1.87353886525e-5, \ 285 | h303 = 1.64210356660e-6, \ 286 | h311 = 3.45324618280e-5, \ 287 | h312 = -4.92235589220e-6, \ 288 | h313 = -4.51472854230e-7, \ 289 | h321 = -1.86985841870e-5, \ 290 | h322 = -2.44130696000e-7, \ 291 | h331 = 2.28633245560e-6, \ 292 | h401 = 5.80860699430e-4, \ 293 | h402 = -8.66110930600e-6, \ 294 | h403 = -5.93732490900e-7, \ 295 | h411 = -1.19594097880e-5, \ 296 | h421 = 3.85953392440e-6, \ 297 | h412 = 1.29546126300e-6, \ 298 | h501 = -2.10923705070e-4, \ 299 | h502 = 1.54637136265e-6, \ 300 | h511 = 1.38645945810e-6, \ 301 | h601 = 3.19324573050e-5, \ 302 | v000 = 1.0769995862e-3, \ 303 | v001 = -6.0799143809e-5, \ 304 | v002 = 9.9856169219e-6, \ 305 | v003 = -1.1309361437e-6, \ 306 | v004 = 1.0531153080e-7, \ 307 | v005 = -1.2647261286e-8, \ 308 | v006 = 1.9613503930e-9, \ 309 | v010 = -3.1038981976e-4, \ 310 | v011 = 2.4262468747e-5, \ 311 | v012 = -5.8484432984e-7, \ 312 | v013 = 3.6310188515e-7, \ 313 | v014 = -1.1147125423e-7, \ 314 | v020 = 6.6928067038e-4, \ 315 | v021 = -3.4792460974e-5, \ 316 | v022 = -4.8122251597e-6, \ 317 | v023 = 1.6746303780e-8, \ 318 | v030 = -8.5047933937e-4, \ 319 | v031 = 3.7470777305e-5, \ 320 | v032 = 4.9263106998e-6, \ 321 | v040 = 5.8086069943e-4, \ 322 | v041 = -1.7322218612e-5, \ 323 | v042 = -1.7811974727e-6, \ 324 | v050 = -2.1092370507e-4, \ 325 | v051 = 3.0927427253e-6, \ 326 | v060 = 3.1932457305e-5, \ 327 | v100 = -1.5649734675e-5, \ 328 | v101 = 1.8505765429e-5, \ 329 | v102 = -1.1736386731e-6, \ 330 | v103 = -3.6527006553e-7, \ 331 | v104 = 3.1454099902e-7, \ 332 | v110 = 3.5009599764e-5, \ 333 | v111 = -9.5677088156e-6, \ 334 | v112 = -5.5699154557e-6, \ 335 | v113 = -2.7295696237e-7, \ 336 | v120 = -4.3592678561e-5, \ 337 | v121 = 1.1100834765e-5, \ 338 | v122 = 5.4620748834e-6, \ 339 | v130 = 3.4532461828e-5, \ 340 | v131 = -9.8447117844e-6, \ 341 | v132 = -1.3544185627e-6, \ 342 | v140 = -1.1959409788e-5, \ 343 | v141 = 2.5909225260e-6, \ 344 | v150 = 1.3864594581e-6, \ 345 | v200 = 2.7762106484e-5, \ 346 | v201 = -1.1716606853e-5, \ 347 | v202 = 2.1305028740e-6, \ 348 | v203 = 2.8695905159e-7, \ 349 | v210 = -3.7435842344e-5, \ 350 | v211 = -2.3678308361e-7, \ 351 | v212 = 3.9137387080e-7, \ 352 | v220 = 3.5907822760e-5, \ 353 | v221 = 2.9283346295e-6, \ 354 | v222 = -6.5731104067e-7, \ 355 | v230 = -1.8698584187e-5, \ 356 | v231 = -4.8826139200e-7, \ 357 | v240 = 3.8595339244e-6, \ 358 | v300 = -1.6521159259e-5, \ 359 | v301 = 7.9279656173e-6, \ 360 | v302 = -4.6132540037e-7, \ 361 | v310 = 2.4141479483e-5, \ 362 | v311 = -3.4558773655e-6, \ 363 | v312 = 7.7618888092e-9, \ 364 | v320 = -1.4353633048e-5, \ 365 | v321 = 3.1655306078e-7, \ 366 | v330 = 2.2863324556e-6, \ 367 | v400 = 6.9111322702e-6, \ 368 | v401 = -3.4102187482e-6, \ 369 | v402 = -6.3352916514e-8, \ 370 | v410 = -8.7595873154e-6, \ 371 | v411 = 1.2956717783e-6, \ 372 | v420 = 4.3703680598e-6, \ 373 | v500 = -8.0539615540e-7, \ 374 | v501 = 5.0736766814e-7, \ 375 | v510 = -3.3052758900e-7, \ 376 | v600 = 2.0543094268e-7 377 | 378 | #define GSW_SP_COEFFICIENTS \ 379 | UNUSED double a0 = 0.0080, \ 380 | a1 = -0.1692, \ 381 | a2 = 25.3851, \ 382 | a3 = 14.0941, \ 383 | a4 = -7.0261, \ 384 | a5 = 2.7081, \ 385 | b0 = 0.0005, \ 386 | b1 = -0.0056, \ 387 | b2 = -0.0066, \ 388 | b3 = -0.0375, \ 389 | b4 = 0.0636, \ 390 | b5 = -0.0144, \ 391 | c0 = 0.6766097, \ 392 | c1 = 2.00564e-2, \ 393 | c2 = 1.104259e-4, \ 394 | c3 = -6.9698e-7, \ 395 | c4 = 1.0031e-9, \ 396 | d1 = 3.426e-2, \ 397 | d2 = 4.464e-4, \ 398 | d3 = 4.215e-1, \ 399 | d4 = -3.107e-3, \ 400 | e1 = 2.070e-5, \ 401 | e2 = -6.370e-10, \ 402 | e3 = 3.989e-15, \ 403 | k = 0.0162 404 | 405 | #define GSW_SAAR_DATA \ 406 | UNUSED int deli[4] = {0,1,1,0}, delj[4] = {0,0,1,1}, npan = 6; \ 407 | UNUSED double longs_pan[6] = {260.00, 272.59, 276.50, 278.65, 280.73, 292.0},\ 408 | lats_pan[6] = { 19.55, 13.97, 9.60, 8.10, 9.33, 3.4} 409 | 410 | #ifdef __cplusplus 411 | #define GSW_GIBBS_ICE_COEFFICIENTS \ 412 | UNUSED std::complex t1 ( 3.68017112855051e-2, 5.10878114959572e-2), \ 413 | t2 ( 3.37315741065416e-1, 3.35449415919309e-1), \ 414 | r1 ( 4.47050716285388e1, 6.56876847463481e1), \ 415 | r20 (-7.25974574329220e1, -7.81008427112870e1), \ 416 | r21 (-5.57107698030123e-5, 4.64578634580806e-5), \ 417 | r22 (2.34801409215913e-11, -2.85651142904972e-11);\ 418 | /*\ 419 | ! 1./Pt, where Pt = 611.657; Experimental triple-point pressure in Pa.\ 420 | */\ 421 | UNUSED double rec_pt = 1.634903221903779e-3, \ 422 | tt = 273.16, /*Triple-point temperature, kelvin (K).*/ \ 423 | rec_tt = 3.660858105139845e-3, /*= 1/tt */ \ 424 | g00 = -6.32020233335886e5, \ 425 | g01 = 6.55022213658955e-1, \ 426 | g02 = -1.89369929326131e-8, \ 427 | g03 = 3.3974612327105304e-15, \ 428 | g04 = -5.564648690589909e-22 429 | #else 430 | #define GSW_GIBBS_ICE_COEFFICIENTS \ 431 | UNUSED double complex t1 =( 3.68017112855051e-2+ 5.10878114959572e-2*I), \ 432 | t2 =( 3.37315741065416e-1+ 3.35449415919309e-1*I), \ 433 | r1 =( 4.47050716285388e1+ 6.56876847463481e1*I), \ 434 | r20 =(-7.25974574329220e1+ -7.81008427112870e1*I), \ 435 | r21 =(-5.57107698030123e-5+ 4.64578634580806e-5*I), \ 436 | r22 =(2.34801409215913e-11+-2.85651142904972e-11*I); \ 437 | /*\ 438 | ! 1./Pt, where Pt = 611.657; Experimental triple-point pressure in Pa.\ 439 | */\ 440 | UNUSED double rec_pt = 1.634903221903779e-3, \ 441 | tt = 273.16, /*Triple-point temperature, kelvin (K).*/ \ 442 | rec_tt = 3.660858105139845e-3, /*= 1/tt */ \ 443 | g00 = -6.32020233335886e5, \ 444 | g01 = 6.55022213658955e-1, \ 445 | g02 = -1.89369929326131e-8, \ 446 | g03 = 3.3974612327105304e-15, \ 447 | g04 = -5.564648690589909e-22 448 | #endif 449 | 450 | 451 | #define GSW_FREEZING_POLY_COEFFICIENTS \ 452 | UNUSED double c0 = 0.017947064327968736, \ 453 | c1 = -6.076099099929818, \ 454 | c2 = 4.883198653547851, \ 455 | c3 = -11.88081601230542, \ 456 | c4 = 13.34658511480257, \ 457 | c5 = -8.722761043208607, \ 458 | c6 = 2.082038908808201, \ 459 | c7 = -7.389420998107497, \ 460 | c8 = -2.110913185058476, \ 461 | c9 = 0.2295491578006229, \ 462 | c10 = -0.9891538123307282, \ 463 | c11 = -0.08987150128406496, \ 464 | c12 = 0.3831132432071728, \ 465 | c13 = 1.054318231187074, \ 466 | c14 = 1.065556599652796, \ 467 | c15 = -0.7997496801694032, \ 468 | c16 = 0.3850133554097069, \ 469 | c17 = -2.078616693017569, \ 470 | c18 = 0.8756340772729538, \ 471 | c19 = -2.079022768390933, \ 472 | c20 = 1.596435439942262, \ 473 | c21 = 0.1338002171109174, \ 474 | c22 = 1.242891021876471, \ 475 | /* \ 476 | ! Note that a = 0.502500117621,/gsw_sso \ 477 | */ \ 478 | a = 0.014289763856964, \ 479 | b = 0.057000649899720, \ 480 | t0 = 0.002519, \ 481 | t1 = -5.946302841607319, \ 482 | t2 = 4.136051661346983, \ 483 | t3 = -1.115150523403847e1, \ 484 | t4 = 1.476878746184548e1, \ 485 | t5 = -1.088873263630961e1, \ 486 | t6 = 2.961018839640730, \ 487 | t7 = -7.433320943962606, \ 488 | t8 = -1.561578562479883, \ 489 | t9 = 4.073774363480365e-2, \ 490 | t10 = 1.158414435887717e-2, \ 491 | t11 = -4.122639292422863e-1, \ 492 | t12 = -1.123186915628260e-1, \ 493 | t13 = 5.715012685553502e-1, \ 494 | t14 = 2.021682115652684e-1, \ 495 | t15 = 4.140574258089767e-2, \ 496 | t16 = -6.034228641903586e-1, \ 497 | t17 = -1.205825928146808e-2, \ 498 | t18 = -2.812172968619369e-1, \ 499 | t19 = 1.877244474023750e-2, \ 500 | t20 = -1.204395563789007e-1, \ 501 | t21 = 2.349147739749606e-1, \ 502 | t22 = 2.748444541144219e-3 503 | 504 | 505 | #define GSW_BALTIC_DATA \ 506 | /*\ 507 | ! Coordinate data for the Baltic Sea\ 508 | */\ 509 | UNUSED double xb_left[3]={12.6, 7.0, 26.0},\ 510 | yb_left[3]={50.0, 59.0, 69.0},\ 511 | xb_right[2]={45.0, 26.0},\ 512 | yb_right[2]={50.0, 69.0} 513 | 514 | #ifndef max 515 | #define max(a,b) (((a)>(b))?(a):(b)) 516 | #endif 517 | #ifndef min 518 | #define min(a,b) (((a)<(b))?(a):(b)) 519 | #endif 520 | 521 | #endif /* GSW_INTERNAL_CONST_H */ 522 | -------------------------------------------------------------------------------- /src/c_gsw/gsw_saar.c: -------------------------------------------------------------------------------- 1 | /* 2 | ** $Id: gsw_saar.c,v b04abca68ac0 2015/09/13 17:47:28 fdelahoyde $ 3 | ** $Version: 3.05.0-2 $ 4 | ** 5 | ** GSW TEOS-10 V3.05 6 | */ 7 | #include "gswteos-10.h" 8 | #include "gsw_internal_const.h" 9 | #include "gsw_saar_data.h" 10 | 11 | #ifdef __cplusplus 12 | #define ISNAN(x) std::isnan(x) 13 | #else 14 | #define ISNAN(x) isnan(x) 15 | #endif 16 | 17 | static double gsw_sum(double *x, int n); 18 | 19 | static double 20 | gsw_sum(double *x, int n) 21 | { 22 | int i; 23 | double val = 0; 24 | 25 | for (i = 0; i < n; i++) 26 | val += x[i]; 27 | return (val); 28 | } 29 | 30 | #define sum(x) gsw_sum(x, sizeof(x) / sizeof(double)) 31 | 32 | /* 33 | !========================================================================== 34 | function gsw_saar(p,long,lat) 35 | !========================================================================== 36 | 37 | ! Calculates the Absolute Salinity Anomaly Ratio, SAAR at a geographic point. 38 | ! 39 | ! p : sea pressure [dbar] 40 | ! long : longitude [deg E] 41 | ! lat : latitude [deg N] 42 | ! 43 | ! gsw_saar : Absolute Salinity Anomaly Ratio [unitless] 44 | */ 45 | double 46 | gsw_saar(double p, double lon, double lat) 47 | { 48 | GSW_SAAR_DATA; 49 | int nx = gsw_nx, ny = gsw_ny, nz = gsw_nz; 50 | int indx0, indy0, indz0, k, ndepth_index; 51 | double saar[4], saar_old[4]; 52 | double sa_upper, sa_lower, dlong, dlat; 53 | double r1, s1, t1, ndepth_max, return_value; 54 | 55 | return_value = GSW_INVALID_VALUE; 56 | 57 | if (ISNAN(lat) || ISNAN(lon) || ISNAN(p)) 58 | return (return_value); 59 | 60 | if (lat < -86.0 || lat > 90.0) 61 | return (return_value); 62 | 63 | lon = fmod(lon, 360.0); 64 | if (lon < 0.0) 65 | lon += 360.0; 66 | 67 | dlong = longs_ref[1] - longs_ref[0]; 68 | dlat = lats_ref[1] - lats_ref[0]; 69 | 70 | indx0 = floor(0 + (nx - 1) * (lon - longs_ref[0]) / (longs_ref[nx - 1] - longs_ref[0])); 71 | if (indx0 == nx - 1) 72 | indx0 = nx - 2; 73 | 74 | indy0 = floor(0 + (ny - 1) * (lat - lats_ref[0]) / (lats_ref[ny - 1] - lats_ref[0])); 75 | if (indy0 == ny - 1) 76 | indy0 = ny - 2; 77 | /* 78 | ! Look for the maximum valid "ndepth_ref" value around our point. 79 | ! Note: invalid "ndepth_ref" values are NaNs (a hangover from the codes 80 | ! Matlab origins), but we have replaced the NaNs with a value of "9e90", 81 | ! hence we need an additional upper-limit check in the code below so they 82 | ! will not be recognised as valid values. 83 | */ 84 | ndepth_max = -1.0; 85 | for (k = 0; k < 4; k++) 86 | { 87 | ndepth_index = indy0 + delj[k] + (indx0 + deli[k]) * ny; 88 | if (ndepth_ref[ndepth_index] > 0.0 && 89 | ndepth_ref[ndepth_index] < 1e90) 90 | ndepth_max = max(ndepth_max, ndepth_ref[ndepth_index]); 91 | } 92 | /* 93 | ! If we are a long way from the ocean then there will be no valid "ndepth_ref" 94 | ! values near the point (ie. surrounded by NaNs) - so just return SAAR = 0.0 95 | */ 96 | if (ndepth_max == -1.0) 97 | return (0.0); 98 | 99 | if (p > p_ref[(int)(ndepth_max)-1]) 100 | p = p_ref[(int)(ndepth_max)-1]; 101 | 102 | indz0 = gsw_util_indx(p_ref, nz, p); 103 | 104 | r1 = (lon - longs_ref[indx0]) / (longs_ref[indx0 + 1] - longs_ref[indx0]); 105 | s1 = (lat - lats_ref[indy0]) / (lats_ref[indy0 + 1] - lats_ref[indy0]); 106 | t1 = (p - p_ref[indz0]) / (p_ref[indz0 + 1] - p_ref[indz0]); 107 | 108 | for (k = 0; k < 4; k++) 109 | saar[k] = saar_ref[indz0 + nz * (indy0 + delj[k] + (indx0 + deli[k]) * ny)]; 110 | 111 | if (longs_pan[0] <= lon && lon <= longs_pan[npan - 1] - 0.001 && 112 | lats_pan[npan - 1] <= lat && lat <= lats_pan[0]) 113 | { 114 | memmove(saar_old, saar, 4 * sizeof(double)); 115 | gsw_add_barrier(saar_old, lon, lat, longs_ref[indx0], 116 | lats_ref[indy0], dlong, dlat, saar); 117 | } 118 | else if (fabs(sum(saar)) >= GSW_ERROR_LIMIT) 119 | { 120 | memmove(saar_old, saar, 4 * sizeof(double)); 121 | gsw_add_mean(saar_old, saar); 122 | } 123 | 124 | sa_upper = (1.0 - s1) * (saar[0] + r1 * (saar[1] - saar[0])) + 125 | s1 * (saar[3] + r1 * (saar[2] - saar[3])); 126 | 127 | for (k = 0; k < 4; k++) 128 | saar[k] = saar_ref[indz0 + 1 + nz * (indy0 + delj[k] + (indx0 + deli[k]) * ny)]; 129 | 130 | if (longs_pan[0] <= lon && lon <= longs_pan[npan - 1] - 0.001 && 131 | lats_pan[npan - 1] <= lat && lat <= lats_pan[0]) 132 | { 133 | memmove(saar_old, saar, 4 * sizeof(double)); 134 | gsw_add_barrier(saar_old, lon, lat, longs_ref[indx0], 135 | lats_ref[indy0], dlong, dlat, saar); 136 | } 137 | else if (fabs(sum(saar)) >= GSW_ERROR_LIMIT) 138 | { 139 | memmove(saar_old, saar, 4 * sizeof(double)); 140 | gsw_add_mean(saar_old, saar); 141 | } 142 | 143 | sa_lower = (1.0 - s1) * (saar[0] + r1 * (saar[1] - saar[0])) + 144 | s1 * (saar[3] + r1 * (saar[2] - saar[3])); 145 | if (fabs(sa_lower) >= GSW_ERROR_LIMIT) 146 | sa_lower = sa_upper; 147 | 148 | return_value = sa_upper + t1 * (sa_lower - sa_upper); 149 | 150 | if (fabs(return_value) >= GSW_ERROR_LIMIT) 151 | return_value = GSW_INVALID_VALUE; 152 | 153 | return (return_value); 154 | } 155 | 156 | /* 157 | !========================================================================== 158 | function gsw_deltasa_atlas(p,lon,lat) 159 | !========================================================================== 160 | 161 | ! Calculates the Absolute Salinity Anomaly atlas value, delta_SA_atlas. 162 | ! 163 | ! p : sea pressure [dbar] 164 | ! lon : longiture [deg E] 165 | ! lat : latitude [deg N] 166 | ! 167 | ! deltasa_atlas : Absolute Salinity Anomaly atlas value [g/kg] 168 | */ 169 | double 170 | gsw_deltasa_atlas(double p, double lon, double lat) 171 | { 172 | GSW_SAAR_DATA; 173 | int nx = gsw_nx, ny = gsw_ny, nz = gsw_nz; 174 | int indx0, indy0, indz0, k, ndepth_index; 175 | double dsar[4], dsar_old[4]; 176 | double dlong, dlat; 177 | double return_value, sa_upper, sa_lower; 178 | double r1, s1, t1, ndepth_max; 179 | 180 | return_value = GSW_INVALID_VALUE; 181 | 182 | if (ISNAN(lat) || ISNAN(lon) || ISNAN(p)) 183 | return (return_value); 184 | 185 | if (lat < -86.0 || lat > 90.0) 186 | return (return_value); 187 | 188 | lon = fmod(lon, 360.0); 189 | if (lon < 0.0) 190 | lon += 360.0; 191 | 192 | dlong = longs_ref[1] - longs_ref[0]; 193 | dlat = lats_ref[1] - lats_ref[0]; 194 | 195 | indx0 = floor(0 + (nx - 1) * (lon - longs_ref[0]) / 196 | (longs_ref[nx - 1] - longs_ref[0])); 197 | if (indx0 == nx - 1) 198 | indx0 = nx - 2; 199 | 200 | indy0 = floor(0 + (ny - 1) * (lat - lats_ref[0]) / 201 | (lats_ref[ny - 1] - lats_ref[0])); 202 | if (indy0 == ny - 1) 203 | indy0 = ny - 2; 204 | /* 205 | ! Look for the maximum valid "ndepth_ref" value around our point. 206 | ! Note: invalid "ndepth_ref" values are NaNs (a hangover from the codes 207 | ! Matlab origins), but we have replaced the NaNs with a value of "9e90", 208 | ! hence we need an additional upper-limit check in the code below so they 209 | ! will not be recognised as valid values. 210 | */ 211 | ndepth_max = -1; 212 | for (k = 0; k < 4; k++) 213 | { 214 | ndepth_index = indy0 + delj[k] + (indx0 + deli[k]) * ny; 215 | if (ndepth_ref[ndepth_index] > 0.0 && 216 | ndepth_ref[ndepth_index] < 1e90) 217 | ndepth_max = max(ndepth_max, ndepth_ref[ndepth_index]); 218 | } 219 | /* 220 | ! If we are a long way from the ocean then there will be no valid "ndepth_ref" 221 | ! values near the point (ie. surrounded by NaNs) - so deltasa_atlas = 0.0 222 | */ 223 | if (ndepth_max == -1.0) 224 | return (0.0); 225 | 226 | if (p > p_ref[(int)(ndepth_max)-1]) 227 | p = p_ref[(int)(ndepth_max)-1]; 228 | 229 | indz0 = gsw_util_indx(p_ref, nz, p); 230 | 231 | r1 = (lon - longs_ref[indx0]) / 232 | (longs_ref[indx0 + 1] - longs_ref[indx0]); 233 | s1 = (lat - lats_ref[indy0]) / 234 | (lats_ref[indy0 + 1] - lats_ref[indy0]); 235 | t1 = (p - p_ref[indz0]) / 236 | (p_ref[indz0 + 1] - p_ref[indz0]); 237 | 238 | for (k = 0; k < 4; k++) 239 | dsar[k] = delta_sa_ref[indz0 + nz * (indy0 + delj[k] + 240 | (indx0 + deli[k]) * ny)]; 241 | 242 | if (longs_pan[0] <= lon && lon <= longs_pan[npan - 1] - 0.001 && 243 | lats_pan[npan - 1] <= lat && lat <= lats_pan[0]) 244 | { 245 | memmove(dsar_old, dsar, 4 * sizeof(double)); 246 | gsw_add_barrier(dsar_old, lon, lat, longs_ref[indx0], 247 | lats_ref[indy0], dlong, dlat, dsar); 248 | } 249 | else if (fabs(sum(dsar)) >= GSW_ERROR_LIMIT) 250 | { 251 | memmove(dsar_old, dsar, 4 * sizeof(double)); 252 | gsw_add_mean(dsar_old, dsar); 253 | } 254 | 255 | sa_upper = (1.0 - s1) * (dsar[0] + r1 * (dsar[1] - dsar[0])) + 256 | s1 * (dsar[3] + r1 * (dsar[2] - dsar[3])); 257 | 258 | for (k = 0; k < 4; k++) 259 | dsar[k] = delta_sa_ref[indz0 + 1 + nz * (indy0 + delj[k] + (indx0 + deli[k]) * ny)]; 260 | 261 | if (longs_pan[0] <= lon && lon <= longs_pan[npan - 1] && 262 | lats_pan[npan - 1] <= lat && lat <= lats_pan[0]) 263 | { 264 | memmove(dsar_old, dsar, 4 * sizeof(double)); 265 | gsw_add_barrier(dsar_old, lon, lat, longs_ref[indx0], 266 | lats_ref[indy0], dlong, dlat, dsar); 267 | } 268 | else if (fabs(sum(dsar)) >= GSW_ERROR_LIMIT) 269 | { 270 | memmove(dsar_old, dsar, 4 * sizeof(double)); 271 | gsw_add_mean(dsar_old, dsar); 272 | } 273 | 274 | sa_lower = (1.0 - s1) * (dsar[0] + r1 * (dsar[1] - dsar[0])) + 275 | s1 * (dsar[3] + r1 * (dsar[2] - dsar[3])); 276 | if (fabs(sa_lower) >= GSW_ERROR_LIMIT) 277 | sa_lower = sa_upper; 278 | 279 | return_value = sa_upper + t1 * (sa_lower - sa_upper); 280 | 281 | if (fabs(return_value) >= GSW_ERROR_LIMIT) 282 | return (GSW_INVALID_VALUE); 283 | 284 | return (return_value); 285 | } 286 | -------------------------------------------------------------------------------- /src/method_bodies.c: -------------------------------------------------------------------------------- 1 | /* Custom wrappers for GSW functions that are not suitable for ufuncs. 2 | */ 3 | 4 | 5 | /* 6 | double * Returns NULL on error, dyn_height if okay 7 | gsw_geo_strf_dyn_height(double *sa, double *ct, double *p, double p_ref, 8 | int n_levels, double *dyn_height) 9 | */ 10 | 11 | static PyObject * 12 | geo_strf_dyn_height(PyObject *NPY_UNUSED(self), PyObject *args) 13 | { 14 | PyObject *sa_o, *ct_o, *p_o; 15 | double p_ref; 16 | PyArrayObject *sa_a, *ct_a, *p_a, *dh_a; 17 | int n_levels; 18 | double *ret = NULL; /* NULL on error, dyn_height if OK */ 19 | 20 | if (!PyArg_ParseTuple(args, "OOOd", &sa_o, &ct_o, &p_o, &p_ref)) 21 | return NULL; 22 | 23 | sa_a = (PyArrayObject *)PyArray_ContiguousFromAny(sa_o, NPY_DOUBLE, 1, 1); 24 | if (sa_a == NULL) 25 | return NULL; 26 | ct_a = (PyArrayObject *)PyArray_ContiguousFromAny(ct_o, NPY_DOUBLE, 1, 1); 27 | if (ct_a == NULL) 28 | { 29 | Py_XDECREF(sa_a); 30 | return NULL; 31 | } 32 | p_a = (PyArrayObject *)PyArray_ContiguousFromAny(p_o, NPY_DOUBLE, 1, 1); 33 | if (p_a == NULL) 34 | { 35 | Py_XDECREF(sa_a); 36 | Py_XDECREF(ct_a); 37 | return NULL; 38 | } 39 | n_levels = PyArray_DIM(sa_a, 0); 40 | if (PyArray_DIM(ct_a, 0) != n_levels || PyArray_DIM(p_a, 0) != n_levels) 41 | { 42 | PyErr_SetString(PyExc_ValueError, 43 | "Arguments SA, CT, and p must have the same dimensions."); 44 | Py_XDECREF(sa_a); 45 | Py_XDECREF(ct_a); 46 | Py_XDECREF(p_a); 47 | return NULL; 48 | } 49 | dh_a = (PyArrayObject *)PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0); 50 | if (dh_a == NULL) 51 | { 52 | Py_XDECREF(sa_a); 53 | Py_XDECREF(ct_a); 54 | Py_XDECREF(p_a); 55 | return NULL; 56 | } 57 | ret = gsw_geo_strf_dyn_height((double *)PyArray_DATA(sa_a), 58 | (double *)PyArray_DATA(ct_a), 59 | (double *)PyArray_DATA(p_a), 60 | p_ref, 61 | n_levels, 62 | (double *)PyArray_DATA(dh_a)); 63 | Py_XDECREF(sa_a); 64 | Py_XDECREF(ct_a); 65 | Py_XDECREF(p_a); 66 | 67 | if (ret == NULL) 68 | { 69 | PyErr_SetString(PyExc_RuntimeError, 70 | "gws_geo_strf_dyn_height failed; check input arguments"); 71 | Py_XDECREF(dh_a); 72 | return NULL; 73 | } 74 | return (PyObject *)dh_a; 75 | } 76 | 77 | 78 | static PyObject * 79 | geo_strf_dyn_height_1(PyObject *NPY_UNUSED(self), PyObject *args) 80 | { 81 | PyObject *sa_o, *ct_o, *p_o; 82 | double p_ref; 83 | PyArrayObject *sa_a, *ct_a, *p_a, *dh_a; 84 | int n_levels; 85 | int ret = 1; /* error (1) until set to 0 by the C function */ 86 | double max_dp_i; 87 | int interp_method; 88 | 89 | if (!PyArg_ParseTuple(args, "OOOddi", &sa_o, &ct_o, &p_o, &p_ref, 90 | &max_dp_i, &interp_method)) 91 | return NULL; 92 | 93 | sa_a = (PyArrayObject *)PyArray_ContiguousFromAny(sa_o, NPY_DOUBLE, 1, 1); 94 | if (sa_a == NULL) 95 | return NULL; 96 | 97 | ct_a = (PyArrayObject *)PyArray_ContiguousFromAny(ct_o, NPY_DOUBLE, 1, 1); 98 | if (ct_a == NULL) 99 | { 100 | Py_DECREF(sa_a); 101 | return NULL; 102 | } 103 | p_a = (PyArrayObject *)PyArray_ContiguousFromAny(p_o, NPY_DOUBLE, 1, 1); 104 | if (p_a == NULL) 105 | { 106 | Py_DECREF(sa_a); 107 | Py_DECREF(ct_a); 108 | return NULL; 109 | } 110 | 111 | n_levels = PyArray_DIM(sa_a, 0); 112 | if (PyArray_DIM(ct_a, 0) != n_levels || PyArray_DIM(p_a, 0) != n_levels) 113 | { 114 | PyErr_SetString(PyExc_ValueError, 115 | "Arguments SA, CT, and p must have the same dimensions."); 116 | Py_DECREF(sa_a); 117 | Py_DECREF(ct_a); 118 | Py_DECREF(p_a); 119 | return NULL; 120 | } 121 | 122 | dh_a = (PyArrayObject *)PyArray_NewLikeArray(sa_a, NPY_CORDER, NULL, 0); 123 | if (dh_a == NULL) 124 | { 125 | Py_DECREF(sa_a); 126 | Py_DECREF(ct_a); 127 | Py_DECREF(p_a); 128 | return NULL; 129 | } 130 | 131 | ret = gsw_geo_strf_dyn_height_1((double *)PyArray_DATA(sa_a), 132 | (double *)PyArray_DATA(ct_a), 133 | (double *)PyArray_DATA(p_a), 134 | p_ref, 135 | n_levels, 136 | (double *)PyArray_DATA(dh_a), 137 | max_dp_i, 138 | interp_method); 139 | Py_DECREF(sa_a); 140 | Py_DECREF(ct_a); 141 | Py_DECREF(p_a); 142 | 143 | if (ret) 144 | { 145 | PyErr_Format(PyExc_RuntimeError, 146 | "gws_geo_strf_dyn_height_1 failed with code %d; check input arguments", 147 | ret); 148 | Py_DECREF(dh_a); 149 | return NULL; 150 | } 151 | return (PyObject *)dh_a; 152 | } 153 | 154 | 155 | static PyObject * 156 | util_pchip_interp(PyObject *NPY_UNUSED(self), PyObject *args) 157 | { 158 | PyObject *x, *y, *xi; 159 | PyArrayObject *xa, *ya, *xia, *yia; 160 | int n, ni; 161 | int ret = 1; /* error (1) until set to 0 by the C function */ 162 | 163 | if (!PyArg_ParseTuple(args, "OOO", &x, &y, &xi)) 164 | return NULL; 165 | 166 | xa = (PyArrayObject *)PyArray_ContiguousFromAny(x, NPY_DOUBLE, 1, 1); 167 | if (xa == NULL) 168 | { 169 | PyErr_SetString(PyExc_RuntimeError, 170 | "failed to convert argument x"); 171 | return NULL; 172 | } 173 | 174 | ya = (PyArrayObject *)PyArray_ContiguousFromAny(y, NPY_DOUBLE, 1, 1); 175 | if (ya == NULL) 176 | { 177 | PyErr_SetString(PyExc_RuntimeError, 178 | "failed to convert argument y"); 179 | Py_DECREF(xa); 180 | return NULL; 181 | } 182 | n = PyArray_DIM(xa, 0); 183 | 184 | xia = (PyArrayObject *)PyArray_ContiguousFromAny(xi, NPY_DOUBLE, 1, 1); 185 | if (xia == NULL) 186 | { 187 | PyErr_SetString(PyExc_RuntimeError, 188 | "failed to convert argument xi"); 189 | Py_DECREF(xa); 190 | Py_DECREF(ya); 191 | return NULL; 192 | } 193 | ni = PyArray_DIM(xia, 0); 194 | 195 | yia = (PyArrayObject *)PyArray_NewLikeArray(xia, NPY_CORDER, NULL, 0); 196 | if (yia == NULL) 197 | { 198 | Py_DECREF(xa); 199 | Py_DECREF(ya); 200 | Py_DECREF(xia); 201 | return NULL; 202 | } 203 | 204 | ret = gsw_util_pchip_interp((double *)PyArray_DATA(xa), 205 | (double *)PyArray_DATA(ya), 206 | n, 207 | (double *)PyArray_DATA(xia), 208 | (double *)PyArray_DATA(yia), 209 | ni); 210 | 211 | Py_DECREF(xa); 212 | Py_DECREF(ya); 213 | Py_DECREF(xia); 214 | if (ret) 215 | { 216 | PyErr_SetString(PyExc_RuntimeError, 217 | "gsw_util_pchip_interp failed; check input arguments"); 218 | return NULL; 219 | } 220 | return (PyObject *)yia; 221 | } 222 | 223 | 224 | static PyObject * 225 | sa_ct_interp(PyObject *NPY_UNUSED(self), PyObject *args) 226 | { 227 | PyObject *sa_o, *ct_o, *p_o, *pi_o, *res; 228 | PyArrayObject *sa_a, *ct_a, *p_a, *pi_a, *sai_a, *cti_a; 229 | int np, npi; 230 | int ret = 1; /* error (1) until set to 0 by the C function */ 231 | 232 | if (!PyArg_ParseTuple(args, "OOOO", &sa_o, &ct_o, &p_o, &pi_o)) 233 | return NULL; 234 | 235 | sa_a = (PyArrayObject *)PyArray_ContiguousFromAny(sa_o, NPY_DOUBLE, 1, 1); 236 | if (sa_a == NULL) 237 | return NULL; 238 | 239 | ct_a = (PyArrayObject *)PyArray_ContiguousFromAny(ct_o, NPY_DOUBLE, 1, 1); 240 | if (ct_a == NULL) 241 | { 242 | Py_DECREF(sa_a); 243 | return NULL; 244 | } 245 | p_a = (PyArrayObject *)PyArray_ContiguousFromAny(p_o, NPY_DOUBLE, 1, 1); 246 | if (p_a == NULL) 247 | { 248 | Py_DECREF(sa_a); 249 | Py_DECREF(ct_a); 250 | return NULL; 251 | } 252 | pi_a = (PyArrayObject *)PyArray_ContiguousFromAny(pi_o, NPY_DOUBLE, 1, 1); 253 | if (pi_a == NULL) 254 | { 255 | Py_DECREF(sa_a); 256 | Py_DECREF(ct_a); 257 | Py_DECREF(p_a); 258 | return NULL; 259 | } 260 | 261 | np = PyArray_DIM(sa_a, 0); 262 | if (PyArray_DIM(ct_a, 0) != np || PyArray_DIM(p_a, 0) != np) 263 | { 264 | PyErr_SetString(PyExc_ValueError, 265 | "Arguments SA, CT, and p must have the same dimensions."); 266 | Py_DECREF(sa_a); 267 | Py_DECREF(ct_a); 268 | Py_DECREF(p_a); 269 | Py_DECREF(pi_a); 270 | return NULL; 271 | } 272 | 273 | npi = PyArray_DIM(pi_a, 0); 274 | sai_a = (PyArrayObject *)PyArray_NewLikeArray(pi_a, NPY_CORDER, NULL, 0); 275 | if (sai_a == NULL) 276 | { 277 | Py_DECREF(sa_a); 278 | Py_DECREF(ct_a); 279 | Py_DECREF(p_a); 280 | Py_DECREF(pi_a); 281 | return NULL; 282 | } 283 | 284 | cti_a = (PyArrayObject *)PyArray_NewLikeArray(pi_a, NPY_CORDER, NULL, 0); 285 | if (cti_a == NULL) 286 | { 287 | Py_DECREF(sa_a); 288 | Py_DECREF(ct_a); 289 | Py_DECREF(p_a); 290 | Py_DECREF(pi_a); 291 | Py_DECREF(sai_a); 292 | return NULL; 293 | } 294 | 295 | ret = gsw_sa_ct_interp((double *)PyArray_DATA(sa_a), 296 | (double *)PyArray_DATA(ct_a), 297 | (double *)PyArray_DATA(p_a), 298 | np, 299 | (double *)PyArray_DATA(pi_a), 300 | npi, 301 | (double *)PyArray_DATA(sai_a), 302 | (double *)PyArray_DATA(cti_a)); 303 | Py_DECREF(sa_a); 304 | Py_DECREF(ct_a); 305 | Py_DECREF(p_a); 306 | Py_DECREF(pi_a); 307 | 308 | if (ret) 309 | { 310 | PyErr_Format(PyExc_RuntimeError, 311 | "gsw_sa_ct_interp failed with code %d; check input arguments", 312 | ret); 313 | Py_DECREF(sai_a); 314 | Py_DECREF(cti_a); 315 | return NULL; 316 | } 317 | 318 | res = PyTuple_Pack(2, sai_a, cti_a); 319 | 320 | return res; 321 | } 322 | 323 | 324 | static PyObject * 325 | tracer_ct_interp(PyObject *NPY_UNUSED(self), PyObject *args) 326 | { 327 | PyObject *tracer_o, *ct_o, *p_o, *pi_o, *res; 328 | double factor; 329 | PyArrayObject *tracer_a, *ct_a, *p_a, *pi_a, *traceri_a, *cti_a; 330 | int np, npi; 331 | int ret = 1; /* error (1) until set to 0 by the C function */ 332 | 333 | if (!PyArg_ParseTuple(args, "OOOOd", &tracer_o, &ct_o, &p_o, &pi_o, &factor)) 334 | return NULL; 335 | 336 | tracer_a = (PyArrayObject *)PyArray_ContiguousFromAny(tracer_o, NPY_DOUBLE, 1, 1); 337 | if (tracer_a == NULL) 338 | return NULL; 339 | 340 | ct_a = (PyArrayObject *)PyArray_ContiguousFromAny(ct_o, NPY_DOUBLE, 1, 1); 341 | if (ct_a == NULL) 342 | { 343 | Py_DECREF(tracer_a); 344 | return NULL; 345 | } 346 | p_a = (PyArrayObject *)PyArray_ContiguousFromAny(p_o, NPY_DOUBLE, 1, 1); 347 | if (p_a == NULL) 348 | { 349 | Py_DECREF(tracer_a); 350 | Py_DECREF(ct_a); 351 | return NULL; 352 | } 353 | pi_a = (PyArrayObject *)PyArray_ContiguousFromAny(pi_o, NPY_DOUBLE, 1, 1); 354 | if (pi_a == NULL) 355 | { 356 | Py_DECREF(tracer_a); 357 | Py_DECREF(ct_a); 358 | Py_DECREF(p_a); 359 | return NULL; 360 | } 361 | 362 | np = PyArray_DIM(tracer_a, 0); 363 | if (PyArray_DIM(ct_a, 0) != np || PyArray_DIM(p_a, 0) != np) 364 | { 365 | PyErr_SetString(PyExc_ValueError, 366 | "Arguments tracer, CT, and p must have the same dimensions."); 367 | Py_DECREF(tracer_a); 368 | Py_DECREF(ct_a); 369 | Py_DECREF(p_a); 370 | Py_DECREF(pi_a); 371 | return NULL; 372 | } 373 | 374 | npi = PyArray_DIM(pi_a, 0); 375 | traceri_a = (PyArrayObject *)PyArray_NewLikeArray(pi_a, NPY_CORDER, NULL, 0); 376 | if (traceri_a == NULL) 377 | { 378 | Py_DECREF(tracer_a); 379 | Py_DECREF(ct_a); 380 | Py_DECREF(p_a); 381 | Py_DECREF(pi_a); 382 | return NULL; 383 | } 384 | 385 | cti_a = (PyArrayObject *)PyArray_NewLikeArray(pi_a, NPY_CORDER, NULL, 0); 386 | if (cti_a == NULL) 387 | { 388 | Py_DECREF(tracer_a); 389 | Py_DECREF(ct_a); 390 | Py_DECREF(p_a); 391 | Py_DECREF(pi_a); 392 | Py_DECREF(traceri_a); 393 | return NULL; 394 | } 395 | 396 | ret = gsw_tracer_ct_interp((double *)PyArray_DATA(tracer_a), 397 | (double *)PyArray_DATA(ct_a), 398 | (double *)PyArray_DATA(p_a), 399 | np, 400 | (double *)PyArray_DATA(pi_a), 401 | npi, 402 | factor, 403 | (double *)PyArray_DATA(traceri_a), 404 | (double *)PyArray_DATA(cti_a)); 405 | Py_DECREF(tracer_a); 406 | Py_DECREF(ct_a); 407 | Py_DECREF(p_a); 408 | Py_DECREF(pi_a); 409 | 410 | if (ret) 411 | { 412 | PyErr_Format(PyExc_RuntimeError, 413 | "gsw_tracer_ct_interp failed with code %d; check input arguments", 414 | ret); 415 | Py_DECREF(traceri_a); 416 | Py_DECREF(cti_a); 417 | return NULL; 418 | } 419 | 420 | res = PyTuple_Pack(2, traceri_a, cti_a); 421 | 422 | return res; 423 | } 424 | -------------------------------------------------------------------------------- /src/method_def_entries.c: -------------------------------------------------------------------------------- 1 | /* Entries in the GswMethods table. */ 2 | {"geo_strf_dyn_height", geo_strf_dyn_height, METH_VARARGS, 3 | "geostrophic streamfunction dynamic height"}, 4 | {"geo_strf_dyn_height_1", geo_strf_dyn_height_1, METH_VARARGS, 5 | "geostrophic streamfunction dynamic height"}, 6 | {"util_pchip_interp", util_pchip_interp, METH_VARARGS, 7 | "PCHIP interpolation"}, 8 | {"sa_ct_interp", sa_ct_interp, METH_VARARGS, 9 | "SA and CT interpolation"}, 10 | {"tracer_ct_interp", tracer_ct_interp, METH_VARARGS, 11 | "Tracer and CT interpolation"}, 12 | -------------------------------------------------------------------------------- /tools/_utilities.py: -------------------------------------------------------------------------------- 1 | ../gsw/_utilities.py -------------------------------------------------------------------------------- /tools/c_header_parser.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions for taking apart the function declarations in gswteos-10.h. 3 | """ 4 | from pathlib import Path 5 | import re 6 | 7 | import numpy as np 8 | 9 | 10 | basedir = Path(__file__).parent.parent 11 | 12 | def get_signatures(strip_extern=True, srcdir='src'): 13 | """ 14 | Return a list of C function declarations. 15 | """ 16 | fname = basedir.joinpath(srcdir, "c_gsw/gswteos-10.h") 17 | 18 | with fname.open() as f: 19 | for line in f: 20 | if 'Prototypes' in line: 21 | break 22 | sigs = [] 23 | started = False 24 | for line in f: 25 | line = line.strip() 26 | if line.startswith('DECLSPEC extern'): 27 | sigs.append(line) 28 | if not line.endswith(';'): 29 | started = True 30 | elif started: 31 | sigs[-1] += line 32 | if line.endswith(';'): 33 | started = False 34 | if strip_extern: 35 | sigs = [s[16:].strip() for s in sigs] # probably don't need strip() 36 | return sigs 37 | 38 | 39 | def parse_signature(sig): 40 | # grab the part inside parentheses: the arguments (single group) 41 | arglistpat = re.compile(r'.*\((.*)\);') 42 | 43 | # the return type and asterisk if any, and name (3 groups) 44 | retpat = re.compile(r'^(\w+)\s+(\**)gsw_(\w+)') 45 | 46 | # in an argument specification, get the type, asterisk if any, name (3) 47 | argpat = re.compile(r'(\w+)\s+(\**)(\w+)') 48 | 49 | # Get the full argument list string. 50 | argstr = arglistpat.match(sig).groups()[0] 51 | 52 | # Make a tuple with an entry for each argument, e.g., 'double p'. 53 | argtup = tuple([a.strip() for a in argstr.split(',')]) 54 | 55 | argtypes = [] 56 | argnames = [] 57 | for arg in argtup: 58 | parts = argpat.match(arg).groups() 59 | argtypes.append(parts[0] + parts[1]) 60 | argnames.append(parts[2]) 61 | 62 | retgroups = retpat.match(sig).groups() 63 | ret = retgroups[0] + retgroups[1] 64 | funcname = retgroups[2] 65 | 66 | return dict(name=funcname, 67 | returntype=ret, 68 | argtypes=tuple(argtypes), 69 | argnames=tuple(argnames), 70 | argstring=argstr, 71 | argtuple=argtup, 72 | ) 73 | 74 | def parse_signatures(sigs): 75 | """ 76 | Given the default list of signatures from get_signatures, 77 | return a dictionary with function names as keys, and with 78 | each entry being the (dictionary) output of parse_signature. 79 | """ 80 | sigdict = {} 81 | for sig in sigs: 82 | psig = parse_signature(sig) 83 | sigdict[psig['name']] = psig 84 | return sigdict 85 | 86 | def get_sigdict(srcdir="src"): 87 | return parse_signatures(get_signatures(srcdir=srcdir)) 88 | 89 | 90 | def get_simple_name_nin_returntype(sigdict): 91 | """ 92 | Return a list of (name, nin, returntype) tuples. 93 | Include only functions with double arguments and a single return. 94 | Return may be double or int. 95 | """ 96 | tups = [] 97 | for name, sig in sigdict.items(): 98 | if all([t == 'double' for t in sig['argtypes']]): 99 | nin = len(sig['argtypes']) 100 | if sig['returntype'] in ('double', 'int'): 101 | tups.append((name, nin, sig['returntype'])) 102 | return tups 103 | 104 | 105 | def get_complex_name_nin_nout(sigdict): 106 | """ 107 | Return a list of (name, nin, nout) tuples. 108 | Include only functions with multiple outputs, double only. 109 | This not bullet-proof, but it works with the current set of functions. 110 | """ 111 | tups = [] 112 | simple = [tup[0] for tup in get_simple_name_nin_returntype(sigdict)] 113 | for name, sig in sigdict.items(): 114 | if name in simple: 115 | continue 116 | if sig['returntype'] == 'void' and 'int' not in sig['argtypes']: 117 | nin = 0 118 | nout = 0 119 | for arg in sig['argtuple']: 120 | if '*' in arg: 121 | nout += 1 122 | else: 123 | nin += 1 124 | tups.append((name, nin, nout)) 125 | return tups 126 | 127 | def mixed_sigdict(sigdict): 128 | """ 129 | This should find gibbs and gibbs_ice, with their leading int arguments. 130 | It is keyed by name. 131 | Returns a subset of sigdict, with a "letter_sig" entry added to each 132 | signature. 133 | """ 134 | out1 = {k: psig for k, psig in sigdict.items() if psig['returntype'] == 'double'} 135 | out = {} 136 | for k, psig in out1.items(): 137 | n_int = np.array([arg == "int" for arg in psig["argtypes"]]).sum() 138 | n_double = np.array([arg == "double" for arg in psig["argtypes"]]).sum() 139 | if n_int > 0 and n_int + n_double == len(psig["argtypes"]): 140 | out[k] = psig 141 | psig["letter_sig"] = f"{''.join([a[0] for a in psig['argtypes']])}_d" 142 | return out 143 | 144 | -------------------------------------------------------------------------------- /tools/categories.py: -------------------------------------------------------------------------------- 1 | """ 2 | Use 'src_wrapped_ufuncs.list' and the function names from parsing 3 | Matlab to generate lists of wrapped functions in categories. 4 | """ 5 | 6 | from matlab_parser import get_sigdicts_by_subdir 7 | 8 | sigdicts = get_sigdicts_by_subdir() 9 | with open('src_wrapped_ufuncs.list') as f: 10 | lines = f.readlines() 11 | uflist = [name.strip() for name in lines] 12 | 13 | def write_basic_conversions(): 14 | out = [] 15 | nlist = [n for n in uflist if 'from' in n] 16 | for name in nlist: 17 | if not('ice' in name or 'freezing' in name or 'exact' in name): 18 | try: 19 | out.append(sigdicts['toolbox'][name]['name']) 20 | except KeyError: 21 | pass 22 | out.append('') 23 | with open('basic_conversions.list', 'w') as f: 24 | f.write(',\n'.join(out)) 25 | 26 | def write_freezing(): 27 | out = [] 28 | nlist = [n for n in uflist if 'freezing' in n] 29 | for name in nlist: 30 | try: 31 | out.append(sigdicts['toolbox'][name]['name']) 32 | except KeyError: 33 | pass 34 | out.append('') 35 | with open('freezing.list', 'w') as f: 36 | f.write(',\n'.join(out)) 37 | 38 | def write_ice_not_freezing(): 39 | out = [] 40 | nlist = [n for n in uflist if 'ice' in n and 'freezing' not in n] 41 | for name in nlist: 42 | try: 43 | out.append(sigdicts['toolbox'][name]['name']) 44 | except KeyError: 45 | pass 46 | out.append('') 47 | with open('ice.list', 'w') as f: 48 | f.write(',\n'.join(out)) 49 | 50 | 51 | def write_exact(): 52 | out = [] 53 | nlist = [n for n in uflist if 'exact' in n] 54 | for name in nlist: 55 | try: 56 | out.append(sigdicts['toolbox'][name]['name']) 57 | except KeyError: 58 | pass 59 | out.append('') 60 | with open('exact.list', 'w') as f: 61 | f.write(',\n'.join(out)) 62 | 63 | -------------------------------------------------------------------------------- /tools/codegen: -------------------------------------------------------------------------------- 1 | # Source this file to run the code generation scripts in sequence. 2 | 3 | python make_ufuncs.py 4 | python make_wrapped_ufuncs.py 5 | python fix_wrapped_ufunc_typos.py 6 | -------------------------------------------------------------------------------- /tools/copy_from_GSW-C.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Copy all relevant .c and .h files from Python-C, if they are newer. 4 | 5 | This is a simple utility, to be run from this directory. It assumes that 6 | an up-to-date GSW-C GitHub repo and the present GSW-Python repo are 7 | siblings in the directory tree. 8 | """ 9 | 10 | import shutil 11 | from pathlib import Path 12 | 13 | current = Path(__file__).parent 14 | srcdir = Path(current.parent.parent, "GSW-C") 15 | 16 | destdir = Path(current.parent, "src", "c_gsw") 17 | 18 | fnames = [ 19 | "gsw_oceanographic_toolbox.c", 20 | "gsw_saar.c", 21 | "gsw_saar_data.h", 22 | "gsw_internal_const.h", 23 | "gswteos-10.h", 24 | ] 25 | 26 | 27 | if not srcdir.exists(): 28 | raise IOError( 29 | f"Could not find the GSW-C source code in {srcdir}. " 30 | "Please read the development notes to find how to setup your GSW-Python development environment." 31 | ) 32 | 33 | for fname in fnames: 34 | src = srcdir.joinpath(fname) 35 | dest = destdir.joinpath(fname) 36 | if src.stat().st_mtime > dest.stat().st_mtime: 37 | shutil.copyfile(str(src), str(dest)) 38 | print(f"copied {src} to {dest}") 39 | -------------------------------------------------------------------------------- /tools/docstring_parts.py: -------------------------------------------------------------------------------- 1 | """ 2 | Blocks of text for assembling docstrings. 3 | """ 4 | 5 | parameters = dict( 6 | SP = "Practical Salinity (PSS-78), unitless", 7 | SA = "Absolute Salinity, g/kg", 8 | SR = "Reference Salinity, g/kg", 9 | SK = "Knudsen Salinity, ppt", 10 | Sstar = "Preformed Salinity, g/kg", 11 | SA_seaice = 12 | """Absolute Salinity of sea ice: the mass fraction of salt 13 | in sea ice, expressed in g of salt per kg of sea ice.""", 14 | t_seaice = 15 | "In-situ temperature of the sea ice at pressure p (ITS-90), degrees C", 16 | t = "In-situ temperature (ITS-90), degrees C", 17 | Rt = "C(SP,t_68,0)/C(SP=35,t_68,0), unitless", 18 | CT = "Conservative Temperature (ITS-90), degrees C", 19 | C = "Conductivity, mS/cm", 20 | p = "Sea pressure (absolute pressure minus 10.1325 dbar), dbar", 21 | lon = "Longitude, -360 to 360 degrees", 22 | lat = "Latitude, -90 to 90 degrees", 23 | saturation_fraction = 24 | "Saturation fraction of dissolved air in seawater. (0..1)", 25 | p_ref = "Reference pressure, dbar", 26 | p_shallow = "Upper sea pressure (absolute pressure minus 10.1325 dbar), dbar", 27 | p_deep = "Lower sea pressure (absolute pressure minus 10.1325 dbar), dbar", 28 | 29 | enthalpy_diff = "Specific enthalpy, deep minus shallow, J/kg", 30 | pot_enthalpy_ice = "Potential enthalpy of ice, J/kg", 31 | h = "Specific enthalpy, J/kg", 32 | entropy = "Specific entropy, J/(kg*K)", 33 | pt0 = "Potential temperature with reference pressure of 0 dbar, degrees C", 34 | pt0_ice = "Potential temperature of ice (ITS-90), degrees C", 35 | # TODO: Check the functions using this to see if any customizations are needed. 36 | pt = "Potential temperature referenced to a sea pressure, degrees C", 37 | rho = "Seawater density (not anomaly) in-situ, e.g., 1026 kg/m^3.", 38 | t_Ih = "In-situ temperature of ice (ITS-90), degrees C", 39 | z = "Depth, positive up, m", 40 | SA_bulk = "bulk Absolute Salinity of the seawater and ice mixture, g/kg", 41 | w_Ih = 42 | """mass fraction of ice: the mass of ice divided by the 43 | sum of the masses of ice and seawater. 0 <= wIh <= 1. unitless.""", 44 | w_seaice = 45 | """mass fraction of ice: the mass of sea-ice divided by the sum 46 | of the masses of sea-ice and seawater. 0 <= wIh <= 1. unitless.""", 47 | h_bulk = "bulk enthalpy of the seawater and ice mixture, J/kg", 48 | h_pot_bulk = "bulk enthalpy of the seawater and ice mixture, J/kg", 49 | geo_strf_dyn_height = """dynamic height anomaly, m^2/s^2 50 | Note that the reference pressure, p_ref, of geo_strf_dyn_height must 51 | be zero (0) dbar.""", 52 | sea_surface_geopotential = "geopotential at zero sea pressure, m^2/s^2", 53 | ns = "order of SA derivative, integer in (0, 1, 2)", 54 | nt = "order of t derivative, integer in (0, 1, 2)", 55 | np = "order of p derivative, integer in (0, 1, 2)", 56 | ) 57 | 58 | return_overrides = dict( 59 | gibbs = [ 60 | "gibbs : array-like", 61 | " Specific Gibbs energy or its derivatives.", 62 | " The Gibbs energy (when ns = nt = np = 0) has units of J/kg.", 63 | " The Absolute Salinity derivatives are output in units of (J/kg) (g/kg)^(-ns).", 64 | " The temperature derivatives are output in units of (J/kg) (K)^(-nt).", 65 | " The pressure derivatives are output in units of (J/kg) (Pa)^(-np).", 66 | " The mixed derivatives are output in units of (J/kg) (g/kg)^(-ns) (K)^(-nt) (Pa)^(-np).", 67 | " Note: The derivatives are taken with respect to pressure in Pa, not", 68 | " withstanding that the pressure input into this routine is in dbar.", 69 | ], 70 | gibbs_ice = [ 71 | "gibbs_ice : array-like", 72 | " Specific Gibbs energy of ice or its derivatives.", 73 | " The Gibbs energy (when nt = np = 0) has units of J/kg.", 74 | " The temperature derivatives are output in units of (J/kg) (K)^(-nt).", 75 | " The pressure derivatives are output in units of (J/kg) (Pa)^(-np).", 76 | " The mixed derivatives are output in units of (J/kg) (K)^(-nt) (Pa)^(-np).", 77 | " Note. The derivatives are taken with respect to pressure in Pa, not", 78 | " withstanding that the pressure input into this routine is in dbar.", 79 | ] 80 | ) -------------------------------------------------------------------------------- /tools/docstring_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions for assembling a docstring from various sources. 3 | 4 | As a temporary measure, we are getting the numpydoc 'Returns' 5 | section by manipulating the Matlab 'OUTPUTS' block, rather than 6 | by using a dictionary of blocks as we are for 'Parameters'. 7 | """ 8 | 9 | import re 10 | 11 | def paragraphs(linelist): 12 | """ 13 | Break a list of lines at blank lines into a list of line-lists. 14 | """ 15 | plist = [] 16 | newlinelist = [] 17 | for line in linelist: 18 | line = line.strip() 19 | if line: 20 | newlinelist.append(line) 21 | elif newlinelist: 22 | plist.append(newlinelist) 23 | newlinelist = [] 24 | if newlinelist: 25 | plist.append(newlinelist) 26 | return plist 27 | 28 | 29 | def fix_one_output(lines): 30 | """ 31 | Convert a list of lines for a single output variable from 32 | the Matlab OUTPUTS section into a list suitable for the 33 | numpydoc Returns section. 34 | """ 35 | units = '' # Probably a hack; look into a better workaround. 36 | 37 | lines_orig = lines.copy() 38 | lines = [] 39 | 40 | for line in lines_orig: 41 | # Look for lines ending with a units spec in square brackets. 42 | match = re.search(r'(.*)\[(.*)\]', line) 43 | if match is not None: 44 | units = match.group(2).strip() 45 | remainder = match.group(1).strip() 46 | if 'has units of' not in remainder: 47 | lines.append(remainder) 48 | else: 49 | line = line.strip() 50 | if 'has units of' in line or line.startswith('Note'): 51 | break 52 | lines.append(line) 53 | 54 | outname, remainder = lines[0].split('=') 55 | outlines = ['%s : array-like, %s' % (outname.strip(), units)] 56 | remainder = remainder.strip() 57 | if remainder: 58 | outlines.append(' ' + remainder) 59 | # FIXME: we need to assemble the rest into a single line 60 | # and then break it into lines of appropriate length. 61 | for line in lines[1:]: 62 | if line: 63 | outlines.append(' ' + line) 64 | return outlines 65 | 66 | def fix_outputs_doc(lines): 67 | """ 68 | Convert a list of lines from the Matlab OUTPUTS section 69 | into a list suitable for the numpydoc Returns section, 70 | handling multiple output variables if present. 71 | 72 | Exception: we don't support the 'in_ocean' return, so 73 | it is filtered out here. 74 | """ 75 | pat = r'^\s*(\w+)\s+=' 76 | istarts = [] 77 | for i, line in enumerate(lines): 78 | m = re.match(pat, line) 79 | if m is not None and m.groups()[0] == 'in_ocean': 80 | lines = lines[:i] 81 | break 82 | if m is not None: 83 | istarts.append(i) 84 | iends = istarts[1:] + [len(lines)] 85 | outlines = [] 86 | for i0, i1 in zip(istarts, iends): 87 | outlines.extend(fix_one_output(lines[i0:i1])) 88 | # Add a blank line if needed, as in the case where we toss the 89 | # in_ocean chunk. (Maybe we are losing this blank line somewhere else...) 90 | if outlines[-1]: 91 | outlines.append('') 92 | return outlines 93 | 94 | 95 | 96 | def docstring_from_sections(sections): 97 | """ 98 | sections is a dictionary containing numpydoc text sections, 99 | without their headers. Everything above the Parameters is 100 | considered Head; it does not have to follow the standard of 101 | having a single line "short summary", etc. Each section 102 | must be a list of lines without newlines, and with 103 | indentation only relative to the edge of the docstring. 104 | 105 | Only the Head is required. 106 | 107 | """ 108 | doclines = [''] 109 | doclines.extend(sections['Head']) 110 | for name in ['Parameters', 111 | 'Returns', 112 | 'Other Parameters', 113 | 'Raises', 114 | 'See Also', 115 | 'Notes', 116 | 'References', 117 | 'Examples', 118 | ]: 119 | if name in sections: 120 | doclines.extend(['', 121 | name, 122 | '-' * len(name),]) 123 | doclines.extend(sections[name]) 124 | 125 | for i, line in enumerate(list(doclines)): 126 | if line.strip(): 127 | doclines[i] = ' %s\n' % line.rstrip() 128 | else: 129 | doclines[i] = '\n' 130 | # Ensure there is only one blank line at the end. 131 | blanks = 0 132 | for line in reversed(doclines): 133 | if not line.strip(): 134 | blanks += 1 135 | else: 136 | break 137 | if blanks == 0: 138 | doclines.append('\n') 139 | if blanks > 1: 140 | del doclines[-(blanks-1):] 141 | return ''.join(doclines) 142 | -------------------------------------------------------------------------------- /tools/fix_wrapped_ufunc_typos.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Run this script from this directory after running 'make_wrapped_ufuncs.py'. 4 | It will correct common typos in the docstrings. The original 5 | _wrapped_ufuncs.py is copied to _wrapped_ufuncs.orig, which can be deleted. 6 | 7 | """ 8 | 9 | from pathlib import Path 10 | import shutil 11 | 12 | basedir = Path(__file__).parent.parent 13 | wrapmod = basedir.joinpath('gsw', '_wrapped_ufuncs.py') 14 | 15 | orig = wrapmod.with_suffix('.orig') 16 | shutil.copyfile(wrapmod, orig) 17 | 18 | subs = [ 19 | (' the the ', ' the '), 20 | ('fomula', 'formula'), 21 | (' caclulated ', ' calculated '), 22 | (' occuring ', ' occurring '), 23 | (' thoughout ', ' throughout '), 24 | (' orignal ', ' original '), 25 | ('proceedure', 'procedure' ), 26 | (' appropiate ', ' appropriate '), 27 | (' subracted ', ' subtracted '), 28 | (' anomally ', ' anomaly '), 29 | (' frist ', ' first '), 30 | (' calulated ', ' calculated '), 31 | (' outout ', ' output '), 32 | (' degress ', ' degrees '), 33 | (' specifc ', ' specific '), 34 | (' avaialble ', ' available '), 35 | (' equlibrium ', ' equilibrium '), 36 | ('equlibrium', 'equilibrium'), 37 | (' apendix ', ' appendix '), 38 | (' slighty ', ' slightly '), 39 | ('rho : array-like, kg/m', 'rho : array-like, kg/m^3'), 40 | ('http://www.TEOS-10.org', 'https://www.teos-10.org/'), 41 | ('http://www.ocean-sci.net/8/1117/2012/os-8-1117-2012.pdf', 'https://os.copernicus.org/articles/8/1117/2012/os-8-1117-2012.pdf'), 42 | ('http://www.ocean-sci.net/6/3/2010/os-6-3-2010.pdf', 'https://os.copernicus.org/articles/6/3/2010/os-6-3-2010.pdf'), 43 | ('http://www.ocean-sci.net/7/363/2011/os-7-363-2011.pdf', 'https://os.copernicus.org/articles/7/363/2011/os-7-363-2011.pdf'), 44 | ('http://www.ocean-sci.net/8/1123/2012/os-8-1123-2012.pdf', 'https://os.copernicus.org/articles/8/1123/2012/os-8-1123-2012.pdf'), 45 | ('http://www.iapws.org', 'https://iapws.org/'), 46 | 47 | ] 48 | 49 | with open(wrapmod) as f: 50 | bigstring = f.read() 51 | 52 | for bad, good in subs: 53 | bigstring = bigstring.replace(bad, good) 54 | 55 | with open(wrapmod, "w") as f: 56 | f.write(bigstring) 57 | 58 | -------------------------------------------------------------------------------- /tools/make_ufuncs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generate the src/_ufuncs.c file to turn the scalar C functions 3 | into numpy ufuncs. Also writes ufuncs.list as a record of the 4 | ufunc names. 5 | 6 | """ 7 | from pathlib import Path 8 | 9 | from c_header_parser import ( 10 | get_sigdict, 11 | get_simple_name_nin_returntype, 12 | get_complex_name_nin_nout, 13 | mixed_sigdict, 14 | ) 15 | 16 | blacklist = ['add_barrier', 'add_mean'] 17 | 18 | basedir = Path(__file__).parent.parent 19 | 20 | modfile_head_top = """ 21 | /* 22 | This file is auto-generated--do not edit it. 23 | 24 | */ 25 | 26 | #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION 27 | #include "Python.h" 28 | #include "math.h" 29 | #include "numpy/ndarraytypes.h" 30 | #include "numpy/ufuncobject.h" 31 | #include "numpy/npy_3kcompat.h" 32 | #include "gswteos-10.h" 33 | 34 | /* possible hack for MSVC: */ 35 | #ifndef NAN 36 | static double NAN = 0.0/0.0; 37 | #endif 38 | 39 | #ifndef isnan 40 | # define isnan(x) ((x) != (x)) 41 | #endif 42 | 43 | #define CONVERT_INVALID(x) ((x == GSW_INVALID_VALUE)? NAN: x) 44 | 45 | """ 46 | 47 | # Loops will be generated by calls to modfile_loop_entry. 48 | 49 | 50 | modfile_middle = """ 51 | 52 | #include "method_bodies.c" 53 | 54 | static PyMethodDef GswMethods[] = { 55 | # include "method_def_entries.c" 56 | {NULL, NULL, 0, NULL} 57 | }; 58 | 59 | static struct PyModuleDef moduledef = { 60 | PyModuleDef_HEAD_INIT, 61 | "npufunc", 62 | NULL, 63 | -1, 64 | GswMethods, 65 | NULL, 66 | NULL, 67 | NULL, 68 | NULL 69 | }; 70 | 71 | 72 | PyMODINIT_FUNC PyInit__gsw_ufuncs(void) 73 | { 74 | PyObject *m, *d; 75 | 76 | PyObject *ufunc_ptr; 77 | 78 | m = PyModule_Create(&moduledef); 79 | if (!m) { 80 | return NULL; 81 | } 82 | 83 | d = PyModule_GetDict(m); 84 | 85 | import_array(); 86 | import_umath(); 87 | """ 88 | 89 | modfile_tail = """ 90 | 91 | return m; 92 | } 93 | """ 94 | 95 | 96 | def modfile_loop_entry(nin, nout, out_type): 97 | if out_type == 'd': 98 | out_return = 'double' 99 | npy_out_type = 'NPY_DOUBLE' 100 | else: 101 | out_return = 'int' 102 | npy_out_type = 'NPY_INT' # maybe change to NPY_BOOL 103 | ndin = 'd'*nin 104 | ndout = out_type*nout 105 | loop_id = '%s_%s' % (ndin, ndout) 106 | 107 | linelist = ['/* %d in, %d out */' % (nin, nout)] 108 | linelist.extend([ 109 | 'static void loop1d_%s(char **args, npy_intp const *dimensions,' % loop_id, 110 | ' npy_intp const* steps, void* data)', 111 | '{', 112 | ' npy_intp i;', 113 | ' npy_intp n = dimensions[0];']) 114 | for i in range(nin): 115 | linelist.append(' char *in%d = args[%d];' % (i, i)) 116 | linelist.append(' npy_intp in_step%d = steps[%d];' % (i, i)) 117 | for i in range(nout): 118 | linelist.append(' char *out%d = args[%d];' % (i, i+nin)) 119 | linelist.append(' npy_intp out_step%d = steps[%d];' % (i, i+nin)) 120 | intypes = ', '.join(['double'] * nin) 121 | if nout == 1: 122 | linelist.append(f' {out_return} (*func)(%s);' % (intypes,)) 123 | else: 124 | # Multiple outputs: only double is supported here. 125 | outtypes = ', '.join(['double *'] * nout) 126 | linelist.append(' void (*func)(%s, %s);' % (intypes, outtypes)) 127 | 128 | # Declare local variables for outputs. 129 | douts = [] 130 | for i in range(nout): 131 | douts.append('outd%d' % (i,)) 132 | linelist.append(f' {out_return} %s;' % ', '.join(douts)) 133 | linelist.extend([ 134 | ' func = data;', 135 | '', # End of declarations, start the loop. 136 | ' for (i = 0; i < n; i++) {']) 137 | tests = [] 138 | args = [] 139 | for i in range(nin): 140 | tests.append('isnan(*(double *)in%d)' % i) 141 | args.append('*(double *)in%d' % i) 142 | linelist.append(' if (%s) {' % '||'.join(tests)) 143 | outs = [] 144 | for i in range(nout): 145 | if out_type == 'd': 146 | outs.append('*((double *)out%d) = NAN;' % i) 147 | else: # integer for infunnel 148 | outs.append('*((int *)out0) = 0;') 149 | linelist.append(' %s' % ''.join(outs)) 150 | linelist.append(' } else {') 151 | if nout > 1: 152 | for i in range(nout): 153 | args.append('&outd%d' % i) 154 | linelist.append(' func(%s);' % ', '.join(args)) 155 | else: 156 | linelist.append(' outd0 = func(%s);' % ', '.join(args)) 157 | if out_type == 'd': 158 | for i in range(nout): 159 | linelist.append(' *((double *)out%d)' % (i,) 160 | + ' = CONVERT_INVALID(outd%d);' % (i,)) 161 | else: 162 | for i in range(nout): 163 | linelist.append(' *((int *)out%d)' % (i,) 164 | + ' = outd%d;' % (i,)) 165 | 166 | linelist.append(' }') 167 | for i in range(nin): 168 | linelist.append(' in%d += in_step%d;' % (i, i)) 169 | for i in range(nout): 170 | linelist.append(' out%d += out_step%d;' % (i, i)) 171 | 172 | linelist.extend([' }', '}', '']) 173 | linelist.append('static PyUFuncGenericFunction' 174 | ' funcs_%s[] = {&loop1d_%s};' % (loop_id, loop_id)) 175 | linelist.append('') 176 | linelist.append('static char types_%s[] = {' % (loop_id,)) 177 | 178 | linelist.append(' ' + 'NPY_DOUBLE, ' * nin) 179 | linelist.append(' ' + f'{npy_out_type}, ' * nout) 180 | linelist.extend(['};', '']) 181 | 182 | return '\n'.join(linelist) 183 | 184 | 185 | def modfile_loop_entry_from_sig(sig): 186 | """ 187 | Special case for gibbs, gibbs_ice. 188 | Assume the first half of the args are int, the remainder are double. 189 | Return is a double. 190 | This could all be generalized, but there is probably no need to do so. 191 | It could also be simplified by stripping out the handling of nout > 1. 192 | """ 193 | nin = len(sig["argtypes"]) 194 | nout = 1 195 | # loop_id = f"{'i' * (nin//2)}{'d' * (nin//2)}_{'d' * nout}" 196 | loop_id = sig["letter_sig"] 197 | linelist = ['/* %d int in, %d double in, %d out */' % (nin//2, nin//2, nout)] 198 | linelist.extend([ 199 | 'static void loop1d_%s(char **args, npy_intp const *dimensions,' % loop_id, 200 | ' npy_intp const* steps, void* data)', 201 | '{', 202 | ' npy_intp i;', 203 | ' npy_intp n = dimensions[0];']) 204 | for i in range(nin): 205 | linelist.append(' char *in%d = args[%d];' % (i, i)) 206 | linelist.append(' npy_intp in_step%d = steps[%d];' % (i, i)) 207 | for i in range(nout): 208 | linelist.append(' char *out%d = args[%d];' % (i, i+nin)) 209 | linelist.append(' npy_intp out_step%d = steps[%d];' % (i, i+nin)) 210 | intypes = ', '.join(['int'] * (nin//2) + ['double'] * (nin//2)) 211 | if nout == 1: 212 | linelist.append(' double (*func)(%s);' % (intypes,)) 213 | else: 214 | outtypes = ', '.join(['double *'] * nout) 215 | linelist.append(' void (*func)(%s, %s);' % (intypes, outtypes)) 216 | 217 | douts = [] 218 | for i in range(nout): 219 | douts.append('outd%d' % (i,)) 220 | linelist.append(' double %s;' % ', '.join(douts)) 221 | linelist.extend([ 222 | ' func = data;', 223 | '', 224 | ' for (i = 0; i < n; i++) {']) 225 | tests = [] 226 | args = [] 227 | for i in range(nin//2, nin): 228 | tests.append('isnan(*(double *)in%d)' % i) 229 | for i in range(nin//2): 230 | args.append('(int)*(long long *)in%d' % i) 231 | for i in range(nin//2, nin): 232 | args.append('*(double *)in%d' % i) 233 | linelist.append(' if (%s) {' % '||'.join(tests)) 234 | outs = [] 235 | for i in range(nout): 236 | outs.append('*((double *)out%d) = NAN;' % i) 237 | linelist.append(' %s' % ''.join(outs)) 238 | linelist.append(' } else {') 239 | if nout > 1: 240 | for i in range(nout): 241 | args.append('&outd%d' % i) 242 | linelist.append(' func(%s);' % ', '.join(args)) 243 | else: 244 | linelist.append(' outd0 = func(%s);' % ', '.join(args)) 245 | for i in range(nout): 246 | linelist.append(' *((double *)out%d)' % (i,) 247 | + ' = CONVERT_INVALID(outd%d);' % (i,)) 248 | linelist.append(' }') 249 | for i in range(nin): 250 | linelist.append(' in%d += in_step%d;' % (i, i)) 251 | for i in range(nout): 252 | linelist.append(' out%d += out_step%d;' % (i, i)) 253 | 254 | linelist.extend([' }', '}', '']) 255 | linelist.append('static PyUFuncGenericFunction' 256 | ' funcs_%s[] = {&loop1d_%s};' % (loop_id, loop_id)) 257 | linelist.append('') 258 | linelist.append('static char types_%s[] = {' % (loop_id,)) 259 | 260 | linelist.append(' ' + 'NPY_INT64, ' * (nin//2)) 261 | linelist.append(' ' + 'NPY_DOUBLE, ' * (nin//2)) 262 | linelist.append(' ' + 'NPY_DOUBLE, ' * nout) 263 | linelist.extend(['};', '']) 264 | 265 | return '\n'.join(linelist) 266 | 267 | 268 | def modfile_array_entry(funcname): 269 | return "static void *data_%s[] = {&gsw_%s};\n" % (funcname, funcname) 270 | 271 | 272 | _init_entry = """ 273 | ufunc_ptr = PyUFunc_FromFuncAndData(funcs_%(ndin)s_%(ndout)s, 274 | data_%(funcname)s, 275 | types_%(ndin)s_%(ndout)s, 276 | 1, %(nin)d, %(nout)d, // ndatatypes, nin, nout 277 | PyUFunc_None, 278 | "%(funcname)s", 279 | "%(funcname)s_docstring", 280 | 0); 281 | 282 | PyDict_SetItemString(d, "%(funcname)s", ufunc_ptr); 283 | Py_DECREF(ufunc_ptr); 284 | """ 285 | 286 | 287 | def modfile_init_entry(funcname, nin, nout, out_type='d'): 288 | return _init_entry % dict(funcname=funcname, nin=nin, nout=nout, 289 | ndin='d'*nin, ndout=out_type*nout) 290 | 291 | def modfile_init_entry_from_sig(sig): 292 | # Specialized for the gibbs functions. 293 | funcname = sig["name"] 294 | nin = len(sig["argtypes"]) 295 | nout = 1 296 | letter_sig = sig["letter_sig"] 297 | entry = f""" 298 | ufunc_ptr = PyUFunc_FromFuncAndData(funcs_{letter_sig:s}, 299 | data_{funcname:s}, 300 | types_{letter_sig:s}, 301 | 1, {nin:d}, {nout:d}, // ndatatypes, nin, nout 302 | PyUFunc_None, 303 | "{funcname:s}", 304 | "{funcname:s}_docstring", 305 | 0); 306 | 307 | PyDict_SetItemString(d, "{funcname:s}", ufunc_ptr); 308 | Py_DECREF(ufunc_ptr); 309 | 310 | """ 311 | return entry % vars() 312 | 313 | def write_modfile(modfile_name, srcdir): 314 | raw_sigdict = get_sigdict(srcdir=srcdir) 315 | sigdict = {name: sig for name, sig in raw_sigdict.items() if name not in blacklist} 316 | simple_tups = get_simple_name_nin_returntype(sigdict) 317 | complex_tups = get_complex_name_nin_nout(sigdict) 318 | mixed_sigs = mixed_sigdict(sigdict) 319 | 320 | modfile_head_parts = [modfile_head_top] 321 | simple_artups = {(nin, 1, returntype[0]) for _, nin, returntype in simple_tups} 322 | for artup in sorted(simple_artups): 323 | modfile_head_parts.append(modfile_loop_entry(*artup)) 324 | 325 | complex_artups = {tup[1:] for tup in complex_tups} 326 | for artup in sorted(complex_artups): 327 | modfile_head_parts.append(modfile_loop_entry(*artup, 'd')) 328 | modfile_head = '\n'.join(modfile_head_parts) 329 | 330 | chunks = [modfile_head] 331 | 332 | for sig in mixed_sigs.values(): 333 | chunks.append(modfile_loop_entry_from_sig(sig)) 334 | 335 | # Array entries 336 | for name, _, _ in simple_tups: 337 | chunks.append(modfile_array_entry(name)) 338 | 339 | for name, _, _ in complex_tups: 340 | chunks.append(modfile_array_entry(name)) 341 | 342 | for name in mixed_sigs.keys(): 343 | chunks.append(modfile_array_entry(name)) 344 | 345 | chunks.append(modfile_middle) 346 | 347 | for name, nin, returntype in simple_tups: 348 | chunks.append(modfile_init_entry(name, nin, 1, returntype[0])) 349 | 350 | for name, nin, nout in complex_tups: 351 | chunks.append(modfile_init_entry(name, nin, nout, 'd')) 352 | 353 | for sig in mixed_sigs.values(): 354 | chunks.append(modfile_init_entry_from_sig(sig)) 355 | 356 | chunks.append(modfile_tail) 357 | 358 | with modfile_name.open('w') as f: 359 | f.write(''.join(chunks)) 360 | 361 | funcnamelist1 = sorted([tup[0] for tup in simple_tups]) 362 | with open(srcdir.joinpath('_ufuncs1.list'), 'w') as f: 363 | f.write('\n'.join(funcnamelist1)) 364 | 365 | funcnamelist2 = sorted([tup[0] for tup in complex_tups]) 366 | with open(srcdir.joinpath('_ufuncs2.list'), 'w') as f: 367 | f.write('\n'.join(funcnamelist2)) 368 | 369 | funcnamelist = funcnamelist1 + funcnamelist2 + list(mixed_sigs.keys()) 370 | funcnamelist.sort() 371 | with open(srcdir.joinpath('_ufuncs.list'), 'w') as f: 372 | f.write('\n'.join(funcnamelist)) 373 | 374 | if __name__ == '__main__': 375 | srcdir = basedir.joinpath('src') 376 | modfile_name = basedir.joinpath(srcdir, '_ufuncs.c') 377 | write_modfile(modfile_name, srcdir=srcdir) 378 | -------------------------------------------------------------------------------- /tools/make_wrapped_ufuncs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Script that generates _wrapped_ufuncs.py based on the output 3 | of make_ufuncs.py. 4 | """ 5 | 6 | from pathlib import Path 7 | 8 | from _utilities import Bunch 9 | 10 | from matlab_parser import get_complete_sigdict, get_helpdict 11 | from c_header_parser import get_signatures, parse_signatures 12 | from docstring_parts import parameters, return_overrides 13 | from docstring_utils import (paragraphs, 14 | fix_outputs_doc, 15 | docstring_from_sections) 16 | 17 | basedir = Path(__file__).parent.parent 18 | 19 | 20 | # Functions that are Matlab subroutines, or exclusive to 21 | # the C and not needed; we don't need to expose them. 22 | blacklist = {'ct_freezing_exact', 23 | 'pt0_cold_ice_poly', 24 | 'pt_from_pot_enthalpy_ice_poly_dh', 25 | 't_freezing_exact', 26 | 'sa_p_inrange', 27 | } 28 | 29 | wrapper_head = ''' 30 | """ 31 | Auto-generated wrapper for C ufunc extension; do not edit! 32 | """ 33 | 34 | from . import _gsw_ufuncs 35 | from ._utilities import match_args_return 36 | 37 | ''' 38 | 39 | ## Alternatives: The first was the original, but it did not provide a way to 40 | # tell the decorator about the signature of the ufunc. The second solved that 41 | # problem, but failed to provide the argument names for the signature in the 42 | # help function and the ipython "?" functionality. 43 | 44 | # wrapper_template = ''' 45 | # @match_args_return 46 | # def %(funcname)s(%(args)s): 47 | # """%(doc)s 48 | # """ 49 | # return _gsw_ufuncs.%(ufuncname)s(%(args)s) 50 | # ''' 51 | 52 | # wrapper_template = """ 53 | # %(funcname)s = match_args_return(_gsw_ufuncs.%(ufuncname)s) 54 | # %(funcname)s.__doc__ = '''%(doc)s 55 | # ''' 56 | # """ 57 | 58 | # Make a Python function with the proper list of arguments; add the 'types' 59 | # attribute for the use of the decorator; then use the decorator in its 60 | # function form. 61 | wrapper_template = ''' 62 | def %(funcname)s(%(args)s): 63 | """%(doc)s 64 | """ 65 | return _gsw_ufuncs.%(ufuncname)s(%(args)s) 66 | %(funcname)s.types = _gsw_ufuncs.%(ufuncname)s.types 67 | %(funcname)s = match_args_return(%(funcname)s) 68 | ''' 69 | 70 | 71 | 72 | 73 | def get_argnames(ufname): 74 | try: 75 | msig = Bunch(msigdict[ufname]) 76 | csig = Bunch(csigdict[ufname]) 77 | except KeyError: 78 | return None 79 | cnames = csig.argnames[:] 80 | mnames = msig.argnames[:] 81 | nc, nm = len(cnames), len(mnames) 82 | if nc < nm: 83 | print('%s: truncating argument list, %s, %s' % ( 84 | ufname, cnames, mnames)) 85 | mnames = mnames[:nc] 86 | 87 | argnames = [] 88 | for ac, am in zip(cnames, mnames): 89 | if am == 'long': 90 | am = 'lon' 91 | if ac == am.lower(): 92 | argnames.append(am) 93 | else: 94 | raise RuntimeError("arg mismatch: %s, %s" % ( 95 | csig.argnames, msig.argnames)) 96 | return argnames 97 | 98 | def get_argname_set(): 99 | # This is not currently used internally. 100 | argset = set() 101 | for ufname in ufunclist: 102 | args = get_argnames(ufname) 103 | if args is not None: 104 | argset.update(args) 105 | return argset 106 | 107 | def get_ufnames_by_arg(): 108 | # This is not currently used internally. 109 | argdict = dict() 110 | for ufname in ufunclist: 111 | args = get_argnames(ufname) 112 | if args is None: 113 | continue 114 | for arg in args: 115 | if arg in argdict: 116 | argdict[arg].append(ufname) 117 | else: 118 | argdict[arg] = [ufname] 119 | return argdict 120 | 121 | 122 | def get_outnames(ufname): 123 | # This is currently used only in get_outname_set, which is not used internally. 124 | try: 125 | msig = Bunch(msigdict[ufname]) 126 | except KeyError: 127 | return None 128 | mnames = msig.outnames[:] 129 | 130 | outnames = [] 131 | for am in mnames: 132 | if am == 'long': 133 | am = 'lon' 134 | outnames.append(am) 135 | return outnames 136 | 137 | def get_outname_set(): 138 | # This is not currently used internally. 139 | argset = set() 140 | for ufname in ufunclist: 141 | args = get_outnames(ufname) 142 | if args is not None: 143 | argset.update(args) 144 | return argset 145 | 146 | 147 | def uf_wrapper(ufname): 148 | argnames = get_argnames(ufname) 149 | if argnames is None: 150 | print(f"in uf_wrapper, ufname is {ufname}, argnames is None") 151 | return None 152 | argstr = ', '.join(argnames) 153 | msig = Bunch(msigdict[ufname]) 154 | 155 | subs = dict(ufuncname=ufname, 156 | funcname=msig['name'], 157 | args=argstr, 158 | ) 159 | helpdict = get_helpdict(msig['path']) 160 | 161 | sections = {} 162 | if 'DESCRIPTION' not in helpdict: 163 | helpdict['DESCRIPTION'] = helpdict["summary"] 164 | sections["Notes"] = helpdict["all"] 165 | 166 | description_paragraphs = paragraphs(helpdict['DESCRIPTION']) 167 | sections["Head"] = description_paragraphs[0] 168 | if len(description_paragraphs) > 1: 169 | lines = [] 170 | for p in description_paragraphs[1:]: 171 | lines.extend(p) 172 | lines.append("\n") 173 | sections["Notes"] = lines 174 | plist = [] 175 | for arg in argnames: 176 | plist.append('%s : array-like' % arg) 177 | for line in parameters[arg].split('\n'): 178 | plist.append(" %s" % line) 179 | sections['Parameters'] = plist 180 | 181 | # I think we can assume OUTPUT will be present, but just 182 | # in case, we check for it. Maybe remove this later. 183 | if 'OUTPUT' in helpdict: 184 | outdoc = fix_outputs_doc(helpdict['OUTPUT']) 185 | else: 186 | outdoc = ['double, array'] 187 | if ufname in return_overrides: 188 | outdoc = return_overrides[ufname] 189 | sections['Returns'] = outdoc 190 | if "REFERENCES" in helpdict: 191 | sections["References"] = [line.strip() for line in helpdict["REFERENCES"]] 192 | doc = docstring_from_sections(sections) 193 | subs['doc'] = doc 194 | return wrapper_template % subs 195 | 196 | if __name__ == '__main__': 197 | srcdir = basedir.joinpath('src') 198 | with open(srcdir.joinpath('_ufuncs.list')) as f: 199 | ufunclist = [name.strip() for name in f.readlines()] 200 | ufunclist = [name for name in ufunclist if name not in blacklist] 201 | 202 | wrapmod = basedir.joinpath('gsw', '_wrapped_ufuncs.py') 203 | 204 | msigdict = get_complete_sigdict() 205 | csigdict = parse_signatures(get_signatures(srcdir=srcdir)) 206 | 207 | wrapped_ufnames = [] 208 | 209 | with wrapmod.open('w') as f: 210 | f.write(wrapper_head) 211 | for ufname in ufunclist: 212 | try: 213 | wrapped = uf_wrapper(ufname) 214 | if wrapped is None: 215 | continue 216 | except RuntimeError as err: 217 | print(ufname, err) 218 | if wrapped is None: 219 | print("failed:", ufname) 220 | else: 221 | f.write(wrapped) 222 | wrapped_ufnames.append(ufname) 223 | wrapped_ufnames.sort() 224 | with open(srcdir.joinpath('_wrapped_ufuncs.list'), 'w') as f: 225 | f.write('\n'.join(wrapped_ufnames) + '\n') 226 | -------------------------------------------------------------------------------- /tools/mat2npz.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from pathlib import Path 4 | 5 | import numpy as np 6 | 7 | import scipy.io as sio 8 | 9 | 10 | def _structured_to_dict(arr): 11 | if arr.dtype.kind == 'V' and arr.shape == (1, 1): 12 | b = {} 13 | x = arr[0, 0] 14 | for name in x.dtype.names: 15 | b[name] = _structured_to_dict(x[name]) 16 | return b 17 | return _crunch(arr) 18 | 19 | 20 | def _crunch(arr): 21 | if arr.size == 1: 22 | arr = arr.item() 23 | return arr 24 | arr = arr.squeeze() 25 | return np.array(arr) 26 | 27 | 28 | def loadmatdict(fname): 29 | out = {} 30 | with fname.open('rb') as fobj: 31 | xx = sio.loadmat(fobj) 32 | keys = [k for k in xx.keys() if not k.startswith('__')] 33 | for k in keys: 34 | out[k] = _structured_to_dict(xx[k]) 35 | return out 36 | 37 | # This is the data version designation used in the file name; but it 38 | # is not a true version, because the file contents changes from one 39 | # matlab release to another. 40 | data_ver = 'v3_0' 41 | 42 | # This is the version of the matlab zipfile from which we are getting 43 | # the data file. 44 | mat_zip_ver = 'v3_06_16' 45 | 46 | # The following relative path will depend on the directory layout for 47 | # whoever is running this utility. 48 | basedir = Path(__file__).parent.parent 49 | gsw_data_file = Path( 50 | basedir.parent, 51 | "GSW-Matlab", 52 | "Toolbox", 53 | "library", 54 | f"gsw_data_{data_ver}.mat", 55 | ) 56 | print(gsw_data_file) 57 | 58 | gsw_data = loadmatdict(gsw_data_file) 59 | 60 | # Save compare values `gsw_cv` in a separate file. 61 | cv_vars = gsw_data['gsw_cv'] 62 | cv_vars['gsw_data_file'] = str(gsw_data_file) 63 | cv_vars['mat_zip_ver'] = mat_zip_ver 64 | fname = Path(basedir, "gsw", "tests", f"gsw_cv_{data_ver}") 65 | np.savez(str(fname), **cv_vars) 66 | -------------------------------------------------------------------------------- /tools/matlab_parser.py: -------------------------------------------------------------------------------- 1 | """ 2 | It may be necessary to edit the location of the GSW-Matlab directory. 3 | """ 4 | 5 | import re 6 | from pathlib import Path 7 | 8 | basedir = Path(__file__).parent.parent 9 | 10 | gsw_matlab_dir = basedir.joinpath('..', 'GSW-Matlab', 'Toolbox').resolve() 11 | if not gsw_matlab_dir.exists(): 12 | raise IOError( 13 | f"Could not find the GSW-Matlab source code in {gsw_matlab_dir}." 14 | "Please read the development notes to find how to setup your GSW-Python development environment." 15 | ) 16 | 17 | gsw_matlab_subdirs = ['library', 'thermodynamics_from_t'] 18 | 19 | # pattern for functions returning one variable 20 | mfunc_topline1 = re.compile(r"^function (?P\S+)\s*=\s*" 21 | r"gsw_(?P\S+)" 22 | r"\((?P.*)\)") 23 | 24 | # pattern for multiple returns 25 | mfunc_topline2 = re.compile(r"^function \[(?P.*)\]\s*=\s*" 26 | r"gsw_(?P\S+)" 27 | r"\((?P.*)\)") 28 | 29 | # mis-spellings: key is bad in Matlab; replace with value 30 | arg_fixups = dict(sea_surface_geopotental='sea_surface_geopotential',) 31 | 32 | def list_functions(matdir=gsw_matlab_dir, subdir=''): 33 | rawlist = matdir.glob('*.m') 34 | signatures = [] 35 | rejects = [] 36 | for m in rawlist: 37 | with m.open(encoding='latin-1') as f: 38 | line = f.readline() 39 | _match = mfunc_topline1.match(line) 40 | if _match is None: 41 | _match = mfunc_topline2.match(line) 42 | if _match is None: 43 | rejects.append(m) 44 | else: 45 | _input = [s.strip() for s in _match.group('input').split(',')] 46 | _input = [arg_fixups.get(n, n) for n in _input] 47 | _output = [s.strip() for s in _match.group('output').split(',')] 48 | _funcname = _match.group('funcname') 49 | signatures.append((_funcname, _input, _output, m)) 50 | 51 | return signatures, rejects 52 | 53 | def get_all_signatures(): 54 | signatures, _ = list_functions() 55 | for subdir in gsw_matlab_subdirs: 56 | path = gsw_matlab_dir.joinpath(subdir) 57 | s, _ = list_functions(path) 58 | signatures.extend(s) 59 | return signatures 60 | 61 | def to_sigdict(signatures): 62 | sigdict = dict() 63 | for s in signatures: 64 | _funcname, _input, _output, _m = s 65 | sdict = dict(name=_funcname, 66 | argnames=tuple(_input), 67 | outnames=tuple(_output), 68 | path=_m) 69 | sigdict[_funcname.lower()] = sdict 70 | return sigdict 71 | 72 | def get_complete_sigdict(): 73 | return to_sigdict(get_all_signatures()) 74 | 75 | 76 | def get_sigdicts_by_subdir(): 77 | out = dict(toolbox=to_sigdict(list_functions()[0])) 78 | for subdir in gsw_matlab_subdirs: 79 | out[subdir] = to_sigdict(list_functions(subdir=subdir)[0]) 80 | return out 81 | 82 | 83 | def variables_from_signatures(signatures): 84 | inputs = set() 85 | outputs = set() 86 | for sig in signatures: 87 | inputs.update(sig[1]) 88 | outputs.update(sig[2]) 89 | return inputs, outputs 90 | 91 | def input_groups_from_signatures(signatures): 92 | groups = set() 93 | for sig in signatures: 94 | groups.add(tuple(sig[1])) 95 | return groups 96 | 97 | def get_help_text(fname): 98 | with fname.open(encoding='latin-1') as f: 99 | lines = f.readlines() 100 | help = [] 101 | started = False 102 | for line in lines: 103 | if not line.startswith('%'): 104 | if not started: 105 | continue 106 | else: 107 | break 108 | started = True 109 | help.append(line[2:]) 110 | return help 111 | 112 | def help_text_to_dict(help): 113 | """ 114 | Divide the help text into blocks, using headings as delimiters, and return 115 | them as a dictionary with the headings as keys. 116 | """ 117 | # Headings ('USAGE:', 'DESCRIPTION:', etc.) start with all caps and a colon. 118 | keypat = r"^([A-Z ]+):(.*)" 119 | hdict = dict() 120 | topline = help[0][2:].strip() 121 | parts = topline.split(maxsplit=1) 122 | if len(parts) == 2: 123 | hdict["summary"] = parts[1:] 124 | else: 125 | hdict["summary"] = ["no summary"] 126 | started = False 127 | for line in help[1:]: 128 | keyline = re.match(keypat, line) 129 | if keyline: 130 | # We found a new heading. 131 | if started: 132 | # End the previous block. 133 | hdict[key] = blocklines 134 | # Save the name of the block. 135 | key = keyline.groups()[0] 136 | blocklines = [] 137 | started = True 138 | # If there is anything else on the heading line, start the block 139 | # with it. 140 | _s = keyline.groups()[1].strip() 141 | if _s: 142 | blocklines.append(_s) 143 | elif started: 144 | _s = line.rstrip() 145 | _s_ljust = _s.lstrip() 146 | if (_s_ljust.startswith('The software is') or 147 | _s_ljust.startswith('=======')): 148 | continue 149 | blocklines.append(_s) 150 | if started and blocklines: 151 | hdict[key] = blocklines 152 | # Library functions don't have sections; we can use the whole thing instead. 153 | block = [] 154 | started = False 155 | for line in help: 156 | if line.startswith("=========="): 157 | started = True 158 | continue 159 | block.append(line) 160 | if line.startswith("VERSION"): 161 | break 162 | hdict['all'] = block 163 | return hdict 164 | 165 | 166 | def get_helpdict(fname): 167 | return help_text_to_dict(get_help_text(fname)) 168 | --------------------------------------------------------------------------------