├── docs ├── authors.rst ├── history.rst ├── readme.rst ├── contributing.rst ├── environment.yml ├── index.rst ├── installation.rst ├── usage.rst ├── Makefile └── conf.py ├── readthedocs.yml ├── setup.cfg ├── setup.py ├── MANIFEST.in ├── AUTHORS.rst ├── .zenodo.json ├── tox.ini ├── oceansdb ├── __init__.py ├── datasource │ ├── etopo.json │ ├── cars.json │ ├── woa.json │ ├── woa13.json │ └── woa18.json ├── common.py ├── utils.py ├── etopo.py ├── cars.py └── woa.py ├── .gitignore ├── .travis.yml ├── HISTORY.rst ├── LICENSE.rst ├── .github └── workflows │ ├── publish-to-pypi.yml │ └── ci.yml ├── Makefile ├── pyproject.toml ├── tests ├── test_ETOPO_from_nc.py ├── test_CARS_from_nc.py └── test_WOA_from_nc.py ├── README.rst └── CONTRIBUTING.rst /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | conda: 2 | file: docs/environment.yml 3 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [wheel] 2 | universal = 1 3 | 4 | [flake8] 5 | exclude = docs 6 | 7 | -------------------------------------------------------------------------------- /docs/environment.yml: -------------------------------------------------------------------------------- 1 | name: oceansdb 2 | dependencies: 3 | - netcdf4=1.2.4=np111py27_0 4 | - numpy=1.11.0=py27_0 5 | - pip 6 | - py=1.4.31=py27_0 7 | - python=2.7.11=0 8 | - scipy 9 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 4 | 5 | 6 | from setuptools import setup 7 | 8 | setup( 9 | packages=['oceansdb'], 10 | package_dir={'oceansdb': 'oceansdb'}, 11 | include_package_data=True, 12 | zip_safe=False, 13 | ) 14 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include VERSION 2 | include AUTHORS.rst 3 | include CONTRIBUTING.rst 4 | include HISTORY.rst 5 | include LICENSE.rst 6 | include README.rst 7 | include requirements.txt 8 | include requirements_dev.txt 9 | 10 | recursive-include oceansdb/datasource *.json 11 | recursive-include tests * 12 | recursive-exclude * __pycache__ 13 | recursive-exclude * *.py[co] 14 | 15 | recursive-include docs *.rst conf.py Makefile make.bat 16 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. oceansdb documentation master file, created by 2 | sphinx-quickstart on Tue Jul 9 22:26:36 2013. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to oceansdb's documentation! 7 | ====================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | readme 15 | installation 16 | usage 17 | contributing 18 | authors 19 | history 20 | 21 | Indices and tables 22 | ================== 23 | 24 | * :ref:`genindex` 25 | * :ref:`modindex` 26 | * :ref:`search` 27 | 28 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Guilherme Castelao 9 | 10 | Contributors 11 | ------------ 12 | 13 | * José Dias Neto 14 | 15 | * Erdem M Karakoylu: Reported bad checksum on CARS' salinity file (issue #7). 16 | * @sgartzman: Reported bug #8, truncated files on Python-2. 17 | * @maryjacketti: Reported bug #12, outdated ETOPO documentation. 18 | * Uday Bhaskar T V S: Reported improper path to DB files for Windows machines. 19 | * Carin Anderson & Ozan Göktürk: Reported bug #14 - coordinates partially coincident. 20 | * Simon Good: Reported bug on CARS related to #17. 21 | * Chris Slater: Added support for PEP-343 to release NetCDF file resources 22 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "OceansDB", 3 | "creators": [ 4 | { 5 | "name": "Castelao, Guilherme", 6 | "affiliation": "Scripps Institution of Oceanography - UC San Diego", 7 | "orcid": "0000-0002-6765-0708" 8 | } 9 | ], 10 | "contributors": [ 11 | { 12 | "type": "Researcher", 13 | "name": "Dias Neto, José", 14 | "affiliation": "Institute for Geophysics and Meteorology, University of Cologne", 15 | "orcid": "0000-0002-8488-8486" 16 | }, 17 | { 18 | "type": "Software Engineer", 19 | "name": "Slater, Chris", 20 | "affiliation": "Cooperative Institute for Research in Environmental Sciences (CIRES) at the University of Colorado Boulder" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{38}-numpy{18,19}-scipy{14,15}-cdf{12,16} 3 | 4 | [travis] 5 | python = 6 | 3.7: py37 7 | 3.8: py38 8 | 9 | [testenv] 10 | setenv = 11 | PYTHONPATH = {toxinidir}:{toxinidir}/oceansdb 12 | USE_NCCONFIG = 0 13 | 14 | commands = 15 | pip install -U pip 16 | py.test --basetemp={envtmpdir} tests 17 | 18 | deps = 19 | pytest>=3.0 20 | numpy18: numpy~=1.18.0 21 | numpy19: numpy>=1.19.0 22 | scipy14: scipy~=1.4.0 23 | scipy15: scipy>=1.5.0 24 | cdf12: netCDF4~=1.2.0 25 | cdf13: netCDF4~=1.3.0 26 | cdf14: netCDF4~=1.4.0 27 | cdf16: netCDF4>=1.6.0 28 | 29 | sitepackages = False 30 | 31 | [testenv:flake] 32 | basepython = python 33 | deps = flake8 34 | commands = flake8 oceansdb 35 | -------------------------------------------------------------------------------- /oceansdb/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | __author__ = 'Guilherme Castelao' 4 | __email__ = 'guilherme@castelao.net' 5 | 6 | from pkg_resources import DistributionNotFound, get_distribution 7 | 8 | try: 9 | __version__ = get_distribution(__name__).version 10 | except DistributionNotFound: 11 | try: 12 | from .version import version as __version__ 13 | except ImportError: 14 | raise ImportError( 15 | "Failed to find (autogenerated) version.py. " 16 | "This might be because you are installing from GitHub's tarballs, " 17 | "use the PyPI ones." 18 | ) 19 | 20 | 21 | from oceansdb.woa import WOA 22 | from oceansdb.etopo import ETOPO 23 | from oceansdb.cars import CARS 24 | #from WOA.woa import woa_profile 25 | #from WOA.woa import woa_profile_from_dap 26 | #from WOA.woa import woa_track_from_file 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | # PyBuilder 56 | target/ 57 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | sudo: false 4 | 5 | python: 6 | - "3.6" 7 | - "3.7" 8 | - "3.8" 9 | 10 | before_install: 11 | - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; 12 | - bash miniconda.sh -b -p $HOME/miniconda 13 | - export PATH="$HOME/miniconda/bin:$PATH" 14 | - hash -r 15 | - conda config --set always_yes yes --set changeps1 no 16 | - conda update -q conda 17 | # Useful for debugging any issues with conda 18 | - conda info -a 19 | 20 | install: 21 | - conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION pip numpy scipy hdf4 libnetcdf netcdf4 pytest 22 | - source activate test-environment 23 | - pip install -r requirements_dev.txt 24 | - pip install codecov pytest-cov 25 | - python setup.py install 26 | 27 | before_script: 28 | # Download the required climatology files. 29 | - python -c "import oceansdb; oceansdb.WOA()['sea_water_temperature']" 30 | - python -c "import oceansdb; oceansdb.CARS()['sea_water_temperature']" 31 | - python -c "import oceansdb; oceansdb.ETOPO()['topography']" 32 | 33 | script: 34 | - py.test --cov --cov-report=term-missing -vv 35 | 36 | after_success: 37 | - codecov 38 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | .. :changelog: 2 | 3 | History 4 | ------- 5 | 6 | 0.8.0 7 | ----- 8 | 9 | * Allowing to choose resolution and temporal scale. 10 | 11 | 0.7.0 (2017-05-11) 12 | ------------------ 13 | 14 | * New generic crop functionality. 15 | * Interpolate in steps, in order: time, lat x lon, and finally z. Gives better results for profiles. 16 | 17 | 0.6.0 (2016-04-14) 18 | ------------------ 19 | 20 | * Adding CSIRO Atlas of Regional Seas (CARS), another climatology for temperature and salinity. 21 | 22 | 0.4.0 (2016-03-29) 23 | ------------------ 24 | 25 | * Added etopo5, a world wide bathymetry. Some of this code has its origins on pyAVISO and CoTeDe. 26 | 27 | 0.2.0 (2016-03) 28 | ------------------ 29 | 30 | * Renamed package to OceansDB, planning to include other climatologies and references. 31 | 32 | 0.1.0 (2016-02-09) 33 | ------------------ 34 | 35 | * Generalized interpolator. Extract climatology for point, profile or section. 36 | 37 | 0.0.1 (2015-12-13) 38 | ------------------ 39 | 40 | * First release on PyPI. Extracted from CoTeDe to be a standalone package. 41 | 42 | pre-released (~2006) 43 | -------------------- 44 | 45 | * The first version was developed in support to quality control thermosalinographs at NOAA. It was later injected inside CoTeDe. 46 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install OceansDB, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install oceansdb 16 | 17 | This is the preferred method to install OceansDB, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for OceansDB can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/castelao/oceansdb 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OL https://github.com/castelao/oceansdb/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with: 44 | 45 | .. code-block:: console 46 | 47 | $ python setup.py install 48 | 49 | 50 | .. _Github repo: https://github.com/castelao/oceansdb 51 | .. _tarball: https://github.com/castelao/oceansdb/tarball/master 52 | -------------------------------------------------------------------------------- /oceansdb/datasource/etopo.json: -------------------------------------------------------------------------------- 1 | {"ETOPO": { 2 | "rel_path": "etopo", 3 | "vars": { 4 | "topography": { 5 | "default_resolution": "5min", 6 | "5min": { 7 | "default_tscale": "stationary", 8 | "varnames": { 9 | "lat": "ETOPO05_Y", 10 | "lon": "ETOPO05_X", 11 | "height": "ROSE" 12 | }, 13 | "stationary": [ 14 | { 15 | "filename": "etopo5.nc", 16 | "url": "https://pae-paha.pacioos.hawaii.edu/thredds/ncss/etopo5?var=ROSE&disableLLSubset=on&disableProjSubset=on&horizStride=1" 17 | } 18 | ] 19 | }, 20 | "1min": { 21 | "default_tscale": "stationary", 22 | "varnames": { 23 | "latitude": "y", 24 | "longitude": "x", 25 | "lat": "y", 26 | "lon": "x", 27 | "height": "z" 28 | }, 29 | "stationary": [ 30 | { 31 | "filename": "etopo1.nc", 32 | "url": "https://www.ngdc.noaa.gov/mgg/global/relief/ETOPO1/data/bedrock/grid_registered/netcdf/ETOPO1_Bed_g_gmt4.grd.gz" 33 | } 34 | ] 35 | } 36 | } 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /LICENSE.rst: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015-2020, Guilherme Castelao 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | * Neither the name of OceansDB nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish distributions to PyPI and TestPyPI 2 | 3 | on: 4 | push: 5 | tags: 6 | - v* 7 | 8 | jobs: 9 | test: 10 | uses: castelao/oceansdb/.github/workflows/ci.yml@main 11 | 12 | build-n-publish: 13 | name: Build 14 | needs: test 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v3 19 | with: 20 | fetch-depth: 0 21 | 22 | - name: Set up Python 3.9 23 | uses: actions/setup-python@v3 24 | with: 25 | python-version: "3.9" 26 | 27 | - name: Install pypa/build 28 | run: >- 29 | python -m 30 | pip install 31 | build 32 | --user 33 | - name: Build a binary wheel and a source tarball 34 | run: >- 35 | python -m 36 | build 37 | --sdist 38 | --wheel 39 | --outdir dist/ 40 | . 41 | 42 | - name: Check distribution files 43 | run: | 44 | pip install twine 45 | twine check dist/* 46 | 47 | # - name: Publish distribution package to Test PyPI 48 | # uses: pypa/gh-action-pypi-publish@master 49 | # with: 50 | # password: ${{ secrets.TEST_PYPI_API_TOKEN }} 51 | # repository_url: https://test.pypi.org/legacy/ 52 | - name: Publish distribution package to PyPI 53 | if: startsWith(github.ref, 'refs/tags') 54 | uses: pypa/gh-action-pypi-publish@release/v1 55 | with: 56 | user: __token__ 57 | password: ${{ secrets.PYPI_API_TOKEN }} 58 | verify_metadata: false 59 | verbose: true 60 | print_hash: true 61 | -------------------------------------------------------------------------------- /oceansdb/datasource/cars.json: -------------------------------------------------------------------------------- 1 | {"CARS": { 2 | "rel_path": "cars", 3 | "vars": { 4 | "sea_water_temperature": { 5 | "default_resolution": "30min", 6 | "30min": { 7 | "default_tscale": "stationary", 8 | "varnames": { 9 | "latitude": "lat", 10 | "longitude": "lon", 11 | "standard_deviation": "std_dev", 12 | "number_of_observations": "nq", 13 | "t_sd": "std_dev", 14 | "t_dd": "nq" 15 | }, 16 | "stationary": [ 17 | { 18 | "filename": "temperature_cars2009a.nc", 19 | "url": "http://www.marine.csiro.au/atlas/export/temperature_cars2009a.nc.gz", 20 | "md5hash": "a310418e6c36751f2f9e9e641905d503" 21 | } 22 | ] 23 | } 24 | }, 25 | "sea_water_salinity": { 26 | "default_resolution": "30min", 27 | "30min": { 28 | "default_tscale": "stationary", 29 | "varnames": { 30 | "latitude": "lat", 31 | "longitude": "lon", 32 | "standard_deviation": "std_dev", 33 | "number_of_observations": "nq", 34 | "s_sd": "std_dev", 35 | "s_dd": "nq" 36 | }, 37 | "stationary": [ 38 | { 39 | "filename": "salinity_cars2009a.nc", 40 | "url": "http://www.marine.csiro.au/atlas/export/salinity_cars2009a.nc.gz", 41 | "md5hash": "fd81af5bceabd3607a0d56b6445708af" 42 | } 43 | ] 44 | } 45 | } 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean-pyc clean-build docs clean 2 | 3 | help: 4 | @echo "clean - remove all build, test, coverage and Python artifacts" 5 | @echo "clean-build - remove build artifacts" 6 | @echo "clean-pyc - remove Python file artifacts" 7 | @echo "clean-test - remove test and coverage artifacts" 8 | @echo "lint - check style with flake8" 9 | @echo "test - run tests quickly with the default Python" 10 | @echo "test-all - run tests on every Python version with tox" 11 | @echo "coverage - check code coverage quickly with the default Python" 12 | @echo "docs - generate Sphinx HTML documentation, including API docs" 13 | @echo "release - package and upload a release" 14 | @echo "dist - package" 15 | @echo "install - install the package to the active Python's site-packages" 16 | 17 | clean: clean-build clean-pyc clean-test 18 | 19 | clean-build: 20 | rm -fr build/ 21 | rm -fr dist/ 22 | rm -fr .eggs/ 23 | find . -name '*.egg-info' -exec rm -fr {} + 24 | find . -name '*.egg' -exec rm -f {} + 25 | 26 | clean-pyc: 27 | find . -name '*.pyc' -exec rm -f {} + 28 | find . -name '*.pyo' -exec rm -f {} + 29 | find . -name '*~' -exec rm -f {} + 30 | find . -name '__pycache__' -exec rm -fr {} + 31 | 32 | clean-test: 33 | rm -fr .tox/ 34 | rm -f .coverage 35 | rm -fr htmlcov/ 36 | 37 | lint: 38 | flake8 oceansdb tests 39 | 40 | test: 41 | py.test tests 42 | 43 | test-all: 44 | tox 45 | 46 | coverage: 47 | coverage run --source oceansdb setup.py test 48 | coverage report -m 49 | coverage html 50 | open htmlcov/index.html 51 | 52 | docs: 53 | rm -f docs/oceansdb.rst 54 | rm -f docs/modules.rst 55 | sphinx-apidoc -o docs/ oceansdb 56 | $(MAKE) -C docs clean 57 | $(MAKE) -C docs html 58 | open docs/_build/html/index.html 59 | 60 | release: clean dist 61 | twine upload dist/* 62 | 63 | dist: clean 64 | python setup.py sdist 65 | python setup.py bdist_wheel 66 | ls -l dist 67 | 68 | install: clean 69 | python setup.py install 70 | -------------------------------------------------------------------------------- /oceansdb/datasource/woa.json: -------------------------------------------------------------------------------- 1 | {"WOA": { 2 | "vars": { 3 | "TEMP": { 4 | "5": { 5 | "annual": { 6 | "url": "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t00_5dv2.nc", 7 | "md5": "9cc5cf28d4f1f4057c9d9f263ca13d2a" 8 | }, 9 | "seasonal_old": { 10 | "url": "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/temperature_seasonal_5deg.nc", 11 | "md5": "271f66e8dea4dfef7db99f5f411af330" 12 | }, 13 | "seasonal": [ 14 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t13_5dv2.nc", 15 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t14_5dv2.nc", 16 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t15_5dv2.nc", 17 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t16_5dv2.nc" 18 | ] 19 | } 20 | }, 21 | "PSAL": { 22 | "5": { 23 | "annual": { 24 | "url": "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s00_5dv2.nc", 25 | "md5": "108f28fe1dd250b0598ae666be08fc19" 26 | }, 27 | "seasonal_old": { 28 | "url": "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA09/NetCDFdata/salinity_seasonal_5deg.nc", 29 | "md5": "1d2d1982338c688bdd18069d030ec05f" 30 | }, 31 | "seasonal": [ 32 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s13_5dv2.nc", 33 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s14_5dv2.nc", 34 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s15_5dv2.nc", 35 | "http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s16_5dv2.nc" 36 | ] 37 | } 38 | } 39 | } 40 | } } 41 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools >= 48", 4 | "setuptools_scm[toml] >= 6.2", 5 | "setuptools_scm_git_archive", 6 | "wheel >= 0.29.0", 7 | ] 8 | build-backend = 'setuptools.build_meta' 9 | 10 | [project] 11 | name="oceansdb" 12 | dynamic = ["version"] 13 | description="Subsample ocean climatologies and reference data" 14 | readme = "README.rst" 15 | requires-python = ">=3.7" 16 | license = {file = "LICENSE"} 17 | keywords = ["WOA", "World Ocean Atlas", "climatology", "oceanography", 18 | "ETOPO", "temperature", "salinity", "bathymetry", "CARS"] 19 | authors = [ 20 | {email = "guilherme@castelao.net"}, 21 | {name = "Guilherme Castelao"} 22 | ] 23 | classifiers=[ 24 | "Development Status :: 4 - Beta", 25 | "Intended Audience :: Science/Research", 26 | "License :: OSI Approved :: BSD License", 27 | "Operating System :: OS Independent", 28 | "Programming Language :: Python :: 3", 29 | "Programming Language :: Python :: 3.7", 30 | "Programming Language :: Python :: 3.8", 31 | "Programming Language :: Python :: 3.9", 32 | "Programming Language :: Python :: 3.10", 33 | "Topic :: Scientific/Engineering" 34 | ] 35 | dependencies = [ 36 | "numpy>=1.14", 37 | "scipy>=1.1", 38 | "netCDF4>=1.2.4", 39 | "supportdata>=0.1.3", 40 | ] 41 | 42 | [project.optional-dependencies] 43 | dev = [ 44 | "twine >= 1.8.1", 45 | "sphinx>=1.5.1", 46 | ] 47 | test = [ 48 | "hypothesis >= 6.29.3", 49 | "pytest >= 5.0.0", 50 | "pytest-cov[all]", 51 | "pip >= 9.0.1", 52 | "flake8 >= 3.2.1", 53 | "tox >= 2.3.3", 54 | "coverage >= 4.2", 55 | "supportdata >= 0.1.2", 56 | ] 57 | 58 | [project.urls] 59 | repository = "https://github.com/castelao/oceansdb" 60 | 61 | [tool.black] 62 | line-length = 88 63 | 64 | [tool.setuptools_scm] 65 | write_to = "oceansdb/version.py" 66 | git_describe_command = "git describe --dirty --tags --long --match 'v*' --first-parent" 67 | 68 | [tool.ruff] 69 | select = ["A", "I", "W"] 70 | ignore = [] 71 | 72 | # Allow autofix for all enabled rules (when `--fix`) is provided. 73 | fixable = [] 74 | unfixable = [] 75 | 76 | # Exclude a variety of commonly ignored directories. 77 | exclude = [ 78 | ".eggs", 79 | ".git", 80 | ".mypy_cache", 81 | ".nox", 82 | ".ruff_cache", 83 | ".tox", 84 | "__pypackages__", 85 | "_build", 86 | "build", 87 | "dist", 88 | ] 89 | per-file-ignores = {} 90 | 91 | line-length = 88 92 | 93 | # Assume Python 3.7. 94 | target-version = "py37" 95 | 96 | [tool.ruff.pydocstyle] 97 | convention = "numpy" 98 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: OceansDB 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | workflow_call: 9 | 10 | jobs: 11 | build: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | strategy: 16 | fail-fast: false 17 | max-parallel: 2 18 | matrix: 19 | python-version: ["3.7", "3.8", "3.9", "3.10"] 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | with: 24 | fetch-depth: 0 25 | 26 | - name: Set up Python ${{ matrix.python-version }} 27 | uses: actions/setup-python@v4 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | 31 | - name: Install dependencies 32 | run: | 33 | python -m pip install --upgrade pip 34 | pip install -e .[test] 35 | 36 | - name: Cache ETOPO 37 | id: cache-etopo 38 | uses: actions/cache@v3 39 | with: 40 | path: | 41 | ~/.config/oceansdb/etopo5.nc 42 | key: ${{ runner.os }}-ETOPO 43 | 44 | - name: Download ETOPO database 45 | if: steps.cache-etopo.outputs.cache-hit != 'true' 46 | run: python -c "import oceansdb; oceansdb.ETOPO()['topography']" 47 | 48 | - name: Cache WOA 49 | id: cache-woa 50 | uses: actions/cache@v3 51 | with: 52 | path: | 53 | ~/.config/oceansdb/woa18_decav_t13_5d.nc 54 | ~/.config/oceansdb/woa18_decav_t14_5d.nc 55 | ~/.config/oceansdb/woa18_decav_t15_5d.nc 56 | ~/.config/oceansdb/woa18_decav_t16_5d.nc 57 | ~/.config/oceansdb/woa18_decav_s13_5d.nc 58 | ~/.config/oceansdb/woa18_decav_s14_5d.nc 59 | ~/.config/oceansdb/woa18_decav_s15_5d.nc 60 | ~/.config/oceansdb/woa18_decav_s16_5d.nc 61 | key: ${{ runner.os }}-WOA18 62 | 63 | - name: Download WOA database 64 | if: steps.cache-woa.outputs.cache-hit != 'true' 65 | run: | 66 | python -c "import oceansdb; oceansdb.WOA()['sea_water_temperature']" 67 | python -c "import oceansdb; oceansdb.WOA()['sea_water_salinity']" 68 | 69 | - name: Cache CARS 70 | id: cache-cars 71 | uses: actions/cache@v3 72 | with: 73 | path: | 74 | ~/.config/oceansdb/temperature_cars2009a.nc 75 | ~/.config/oceansdb/salinity_cars2009a.nc 76 | key: ${{ runner.os }}-CARS 77 | 78 | - name: Download CARS database 79 | if: steps.cache-cars.outputs.cache-hit != 'true' 80 | run: | 81 | python -c "import oceansdb; oceansdb.CARS()['sea_water_temperature']" 82 | python -c "import oceansdb; oceansdb.CARS()['sea_water_salinity']" 83 | 84 | - name: Test with pytest 85 | run: | 86 | pytest tests 87 | -------------------------------------------------------------------------------- /tests/test_ETOPO_from_nc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | """ 6 | 7 | from datetime import datetime 8 | 9 | import numpy as np 10 | from numpy import ma 11 | 12 | from oceansdb.etopo import ETOPO 13 | 14 | def test_import(): 15 | # A shortcut 16 | from oceansdb import ETOPO 17 | with ETOPO() as db: 18 | pass 19 | 20 | 21 | def test_available_vars(): 22 | with ETOPO() as db: 23 | 24 | for v in ['topography']: 25 | assert v in db.keys() 26 | 27 | 28 | # ==== Request points coincidents to the ETOPO gridpoints 29 | def test_coincident_gridpoint(): 30 | with ETOPO() as db: 31 | 32 | h = db['topography'].extract(lat=17.5, lon=0) 33 | assert np.allclose(h['height'], [305.]) 34 | 35 | h = db['topography'].extract(lat=[17.5, 18.5], lon=0) 36 | assert np.allclose(h['height'], [305., 335.]) 37 | 38 | h = db['topography'].extract(lat=17.5, lon=[0, 0.25]) 39 | assert np.allclose(h['height'], [305., 305.]) 40 | 41 | h = db['topography'].extract(lat=[17.5, 18.5], lon=[0, 0.25]) 42 | assert np.allclose(h['height'], [[305., 305.], [335., 335.]]) 43 | 44 | 45 | def test_lon_cyclic(): 46 | with ETOPO() as db: 47 | 48 | h1 = db['topography'].extract(lat=17.5, lon=182.5) 49 | h2 = db['topography'].extract(lat=17.5, lon=-177.5) 50 | assert np.allclose(h1['height'], h2['height']) 51 | 52 | h1 = db['topography'].extract(lat=17.5, lon=[-37.5, -32.5]) 53 | h2 = db['topography'].extract(lat=17.5, lon=[322.5, 327.5]) 54 | assert np.allclose(h1['height'], h2['height']) 55 | 56 | lons = 360 * np.random.random(10) 57 | for lon1 in lons: 58 | h1 = db['topography'].extract(lat=17.5, lon=lon1) 59 | lon2 = lon1 - 360 60 | h2 = db['topography'].extract(lat=17.5, lon=lon2) 61 | assert np.allclose(h1['height'], h2['height']), \ 62 | "Different height between: %s and %s" % (lon1, lon2) 63 | 64 | 65 | def test_resolution(): 66 | """Test different resolutions of ETOPO 67 | 68 | Test in places where the two resolutions give different values to 69 | validate the result 70 | """ 71 | # 5 min arc resolution 72 | with ETOPO(resolution='5min') as db: 73 | h5min = db['topography'].extract(lat=-67, lon=103) 74 | assert np.allclose(h5min['height'], [1585.0]) 75 | 76 | # 1 min arc resolution 77 | db = ETOPO(resolution='1min') 78 | h1min = db['topography'].extract(lat=-67, lon=103) 79 | assert np.allclose(h1min['height'], [-26]) 80 | 81 | 82 | def test_track(): 83 | with ETOPO() as db: 84 | 85 | h = db['topography'].track(lat=[17.5, 18.5], lon=[0, 0.25]) 86 | assert np.allclose(h['height'], [305., 335.]) 87 | 88 | h = db['topography'].track(lat=[12, 15], lon=[-38, -35]) 89 | assert np.allclose(h['height'], [-4895.982 , -5959.1216]) 90 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | OceansDB 3 | ======== 4 | 5 | .. image:: https://zenodo.org/badge/52222122.svg 6 | :target: https://zenodo.org/badge/latestdoi/52222122 7 | 8 | .. image:: https://readthedocs.org/projects/oceansdb/badge/?version=latest 9 | :target: http://oceansdb.readthedocs.org/en/latest/?badge=latest 10 | :alt: Documentation Status 11 | 12 | .. image:: https://img.shields.io/travis/castelao/oceansdb.svg 13 | :target: https://travis-ci.org/castelao/oceansdb 14 | 15 | .. image:: https://img.shields.io/pypi/v/oceansdb.svg 16 | :target: https://pypi.python.org/pypi/oceansdb 17 | 18 | 19 | Package to subsample, or interpolate, climatologies like WOA to any coordinates. 20 | 21 | This package started with functions to obtain climatological values to compare with measured data, allowing a quality control check by comparison. It hence needed to work for any coordinates requested. I split these functionalities from `CoTeDe `_ into this standalone package to allow more people to use it for other purposes. 22 | 23 | * Free software: 3-clause BSD style license - see LICENSE.rst 24 | * Documentation: https://oceansdb.readthedocs.io. 25 | 26 | Features 27 | -------- 28 | 29 | - If the database files are not localy available, automatically download it. 30 | 31 | - Extract, or interpolate if necessary, climatologic data on requested coordinates; 32 | 33 | - Can request a single point, a profile or a section; 34 | 35 | - Ready to handle -180 to 180 or 0 to 360 coordinate system; 36 | 37 | - Ready to use with: 38 | 39 | - World Ocean Atlas (WOA) 40 | 41 | - CSIRO Atlas Regional Seas (CARS) 42 | 43 | - ETOPO (topography) 44 | 45 | Quick howto use 46 | --------------- 47 | 48 | Inside python: 49 | 50 | .. code-block:: python 51 | 52 | >>> import oceansdb 53 | >>> with oceansdb.WOA() as db: 54 | 55 | Find out what is available: 56 | 57 | .. code-block:: python 58 | 59 | >>> db.keys() 60 | 61 | Average temperature at one point: 62 | 63 | .. code-block:: python 64 | 65 | >>> t = db['sea_water_temperature'].extract(var='mean', doy=136.875, depth=0, lat=17.5, lon=-37.5) 66 | 67 | A profile of salinity: 68 | 69 | .. code-block:: python 70 | 71 | >>> t = db['sea_water_salinity'].extract(var='mean', doy=136.875, depth=[0, 10, 15, 18], lat=17.5, lon=-37.5) 72 | 73 | A full depth section of temperature: 74 | 75 | .. code-block:: python 76 | 77 | >>> t = db['sea_water_temperature'].extract(var='mean', doy=136.875, lat=17.48, lon=[-39, -37.5, -35.2]) 78 | 79 | Using CARS instead of WOA: 80 | 81 | .. code-block:: python 82 | 83 | >>> with oceansdb.CARS() as db: 84 | >>> t = db['sea_water_temperature'].extract(var='mean', doy=136.875, lat=17.48, lon=[-39, -37.5, -35.2], depth=[0,10,120,280]) 85 | 86 | Or to get topography for one point from the 1 min arc resolution: 87 | 88 | .. code-block:: python 89 | 90 | >>> with oceansdb.ETOPO(resolution='1min') as db: 91 | >>> h = db['topography'].extract(lat=17.5, lon=0) 92 | -------------------------------------------------------------------------------- /oceansdb/common.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | 4 | import numpy as np 5 | 6 | 7 | def cropIndices(dims, lat, lon, depth=None, doy=None): 8 | """ Return the indices to crop dataset 9 | 10 | Assuming that the dataset have the dimensions given by 11 | dims, this function return the indices to conform with 12 | the given coordinates (lat, lon, ...) 13 | 14 | ATTENTION: To address a bug when only lat, or only lon, are coincident 15 | to the input, cropIndices is now taking one extra point around the 16 | required to cover the desired output. This is not the optimal 17 | solution, but a temporary one. 18 | """ 19 | dims_out = {} 20 | idx = {} 21 | 22 | yn = slice( 23 | np.nonzero(dims['lat'] < lat.min())[0].max(), 24 | np.nonzero(dims['lat'] > lat.max())[0].min() + 1) 25 | dims_out['lat'] = np.atleast_1d(dims['lat'][yn]) 26 | idx['yn'] = yn 27 | 28 | lon_ext = np.array( 29 | (dims['lon'] - 2*360).tolist() + 30 | (dims['lon'] - 360).tolist() + 31 | dims['lon'].tolist() + 32 | (dims['lon'] + 360).tolist()) 33 | xn_ext = list(4 * list(range(dims['lon'].shape[0]))) 34 | xn_start = np.nonzero(lon_ext < lon.min())[0].max() 35 | xn_end = np.nonzero(lon_ext > lon.max())[0].min() 36 | xn = xn_ext[xn_start:xn_end+1] 37 | dims_out['lon'], i = np.unique(lon_ext[xn_start:xn_end+1], return_index=True) 38 | idx['xn'] = [xn[ii] for ii in i] 39 | 40 | if depth is not None: 41 | zn = slice( 42 | np.nonzero(dims['depth'] <= depth.min())[0].max(), 43 | np.nonzero(dims['depth'] >= min(dims['depth'].max(), depth.max()) 44 | )[0].min() + 1 45 | ) 46 | # If a higher degree interpolation system uses more than one data 47 | # point in the edge, I should extend this selection one point on 48 | # each side, without go beyond 0 49 | # if zn.start < 0: 50 | # zn = slice(0, zn.stop, zn.step) 51 | dims_out['depth'] = np.atleast_1d(dims['depth'][zn]) 52 | idx['zn'] = zn 53 | 54 | if doy is not None: 55 | # Source has only one time, like total mean field, or annual mean. 56 | if dims['time'].shape == (1,): 57 | dims_out['time'] = dims['time'] 58 | idx['tn'] = [0] 59 | else: 60 | time_ext = np.array( 61 | [dims['time'][-1] - 365.25] + 62 | dims['time'].tolist() + 63 | [dims['time'][0] + 365.25]) 64 | tn_ext = list(range(dims['time'].size)) 65 | tn_ext = [tn_ext[-1]] + tn_ext + [tn_ext[0]] 66 | tn_start = np.nonzero(time_ext <= doy.min())[0].max() 67 | tn_end = np.nonzero(time_ext >= doy.max())[0].min() 68 | dims_out['time'] = np.atleast_1d(time_ext[tn_start:tn_end+1]) 69 | idx['tn'] = tn_ext[tn_start:tn_end+1] 70 | 71 | return dims_out, idx 72 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | Contributions are welcome, and they are greatly appreciated! Every 6 | little bit helps, and credit will always be given. 7 | 8 | You can contribute in many ways: 9 | 10 | Types of Contributions 11 | ---------------------- 12 | 13 | Report Bugs 14 | ~~~~~~~~~~~ 15 | 16 | Report bugs at https://github.com/castelao/oceansdb/issues. 17 | 18 | If you are reporting a bug, please include: 19 | 20 | * Your operating system name and version. 21 | * Any details about your local setup that might be helpful in troubleshooting. 22 | * Detailed steps to reproduce the bug. 23 | 24 | Fix Bugs 25 | ~~~~~~~~ 26 | 27 | Look through the GitHub issues for bugs. Anything tagged with "bug" 28 | is open to whoever wants to implement it. 29 | 30 | Implement Features 31 | ~~~~~~~~~~~~~~~~~~ 32 | 33 | Look through the GitHub issues for features. Anything tagged with "feature" 34 | is open to whoever wants to implement it. 35 | 36 | Write Documentation 37 | ~~~~~~~~~~~~~~~~~~~ 38 | 39 | oceansdb could always use more documentation, whether as part of the 40 | official oceansdb docs, in docstrings, or even on the web in blog posts, 41 | articles, and such. 42 | 43 | Submit Feedback 44 | ~~~~~~~~~~~~~~~ 45 | 46 | The best way to send feedback is to file an issue at https://github.com/castelao/oceansdb/issues. 47 | 48 | If you are proposing a feature: 49 | 50 | * Explain in detail how it would work. 51 | * Keep the scope as narrow as possible, to make it easier to implement. 52 | * Remember that this is a volunteer-driven project, and that contributions 53 | are welcome :) 54 | 55 | Get Started! 56 | ------------ 57 | 58 | Ready to contribute? Here's how to set up `oceansdb` for local development. 59 | 60 | 1. Fork the `oceansdb` repo on GitHub. 61 | 2. Clone your fork locally:: 62 | 63 | $ git clone git@github.com:your_name_here/oceansdb.git 64 | 65 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 66 | 67 | $ mkvirtualenv oceansdb 68 | $ cd oceansdb/ 69 | $ python setup.py develop 70 | 71 | 4. Create a branch for local development:: 72 | 73 | $ git checkout -b name-of-your-bugfix-or-feature 74 | 75 | Now you can make your changes locally. 76 | 77 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: 78 | 79 | $ flake8 WOA tests 80 | $ python setup.py test 81 | $ tox 82 | 83 | To get flake8 and tox, just pip install them into your virtualenv. 84 | 85 | 6. Commit your changes and push your branch to GitHub:: 86 | 87 | $ git add . 88 | $ git commit -m "Your detailed description of your changes." 89 | $ git push origin name-of-your-bugfix-or-feature 90 | 91 | 7. Submit a pull request through the GitHub website. 92 | 93 | Pull Request Guidelines 94 | ----------------------- 95 | 96 | Before you submit a pull request, check that it meets these guidelines: 97 | 98 | 1. The pull request should include tests. 99 | 2. If the pull request adds functionality, the docs should be updated. Put 100 | your new functionality into a function with a docstring, and add the 101 | feature to the list in README.rst. 102 | 3. The pull request should work for Python 2.6, 2.7, 3.3, and 3.4, and for PyPy. Check 103 | https://travis-ci.org/castelao/oceansdb/pull_requests 104 | and make sure that the tests pass for all supported Python versions. 105 | 106 | Tips 107 | ---- 108 | 109 | To run a subset of tests:: 110 | 111 | $ py.test tests/test_WOA_from_nc.py 112 | -------------------------------------------------------------------------------- /tests/test_CARS_from_nc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | """ 6 | 7 | from datetime import datetime 8 | 9 | import numpy as np 10 | from numpy import ma 11 | 12 | from oceansdb.cars import CARS 13 | 14 | def test_import(): 15 | # A shortcut 16 | from oceansdb import CARS 17 | with CARS() as db: 18 | pass 19 | 20 | 21 | def test_available_vars(): 22 | with CARS() as db: 23 | 24 | for v in ['sea_water_temperature', 'sea_water_salinity']: 25 | assert v in db.keys() 26 | 27 | 28 | def test_oceansites_nomenclature(): 29 | with CARS() as db: 30 | assert db['sea_water_temperature'] == db['TEMP'] 31 | #assert db['sea_water_salinity'] == db['PSAL'] 32 | 33 | 34 | # ==== Request points coincidents to the CARS gridpoints 35 | def test_coincident_gridpoint(): 36 | with CARS() as db: 37 | 38 | t = db['sea_water_temperature'].extract(var='mn', doy=100, 39 | depth=0, lat=17.5, lon=322.5) 40 | assert np.allclose(t['mn'], [23.78240879]) 41 | 42 | t = db['sea_water_temperature'].extract(var='mn', doy=[100, 150], 43 | depth=0, lat=17.5, lon=322.5) 44 | assert np.allclose(t['mn'], [23.78240879, 24.57544294]) 45 | 46 | t = db['sea_water_temperature'].extract(var='mn', doy=100, 47 | depth=[0, 10], lat=17.5, lon=322.5) 48 | assert np.allclose(t['mn'], [23.78240879, 23.97279877]) 49 | 50 | t = db['sea_water_temperature'].extract(var='mn', doy=100, 51 | depth=0, lat=[17.5, 12.5], lon=322.5) 52 | assert np.allclose(t['mn'], [24.61333538, 23.78240879]) 53 | 54 | t = db['sea_water_temperature'].extract(var='mn', doy=100, 55 | depth=0, lat=17.5, lon=[322.5, 327.5]) 56 | assert np.allclose(t['mn'], [23.78240879, 24.03691995]) 57 | 58 | t = db['sea_water_temperature'].extract(var='mn', doy=100, 59 | depth=[0, 10], lat=[17.5, 12.5], lon=322.5) 60 | assert np.allclose(t['mn'], 61 | [[24.61333538, 23.78240879], [24.7047015, 23.97279877]]) 62 | 63 | 64 | def valid_mean_masked_std(): 65 | """Position with valid mean but masked standard deviation 66 | 67 | This was a special case for CoTeDe's quality control where there is a 68 | reference mean value but can't scale the variability since there is no 69 | standard deviation. 70 | 71 | I checked this values manually, directly from the original CARS' 72 | netCDF. 73 | """ 74 | with CARS() as db: 75 | t = db['sea_water_temperature'].extract(var='mn', doy=156, 76 | lat=-30, lon=15, depth=1000) 77 | assert np.allclose(t['mn'], [3.365484633192386]) 78 | 79 | t = db['sea_water_temperature'].extract(var='std_dev', doy=156, 80 | lat=-30, lon=15, depth=1000) 81 | assert ma.is_masked(t['std_dev']) 82 | 83 | t = db['sea_water_temperature'].extract(var='mn', doy=156, 84 | lat=-30, lon=15, depth=[475, 500]) 85 | assert np.allclose(t['mn'], [6.576306195708654, 6.205499360879657]) 86 | 87 | t = db['sea_water_temperature'].extract(var='std_dev', doy=156, 88 | lat=-30, lon=15, depth=1000) 89 | assert np.allclose(t['std_dev'][0], [0.7555582683533487]) 90 | assert ma.is_masked(t['std_dev'][1]) 91 | 92 | 93 | def test_special_cases_near_land(): 94 | """Specific cases from WOD 95 | 96 | Simon pointed 3 profiles from WOD that would fail 97 | """ 98 | with CARS() as db: 99 | 100 | coords = [[-67.4683, 109.91, -1.55866820], [-4.32, 114.65, 28.42269382], [-66.95, 111.7, -1.47608867]] 101 | for (lat, lon, ans) in coords: 102 | t = db["sea_water_temperature"].extract(var="mn", doy=90, depth=0, lat=lat, lon=lon) 103 | assert np.allclose(t["mn"], ans) 104 | -------------------------------------------------------------------------------- /oceansdb/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | 5 | """ 6 | 7 | import os 8 | import sys 9 | import pkg_resources 10 | import json 11 | 12 | from netCDF4 import Dataset 13 | 14 | from supportdata import download_file 15 | 16 | if sys.version_info >= (3, 0): 17 | from urllib.parse import urlparse 18 | else: 19 | from urlparse import urlparse 20 | 21 | 22 | """ 23 | 24 | http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA13/DATAv2/temperature/netcdf/decav/0.25/woa13_decav_t00_04v2.nc.html 25 | http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA13/DATAv2/salinity/netcdf/decav/0.25/woa13_decav_s00_04v2.nc.html 26 | http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA13/DATA/oxygen/netcdf/all/1.00/woa13_all_o00_01.nc.html 27 | http://data.nodc.noaa.gov/thredds/dodsC/woa/WOA13/DATA/nitrate/netcdf/all/1.00/woa13_all_n00_01.nc.html 28 | 29 | 30 | http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t00_01v2.nc 31 | http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t13_01v2.nc 32 | http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t01_01v2.nc 33 | 34 | http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/0.25/woa13_decav_t00_04v2.nc 35 | http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/0.25/woa13_decav_t13_04v2.nc 36 | http://data.nodc.noaa.gov/thredds/fileServer/woa/WOA13/DATAv2/temperature/netcdf/decav/0.25/woa13_decav_t01_04v2.nc 37 | 38 | """ 39 | 40 | 41 | def oceansdb_dir(): 42 | """Path where oceansDB databases files are saved 43 | """ 44 | dbpath = os.getenv('OCEANSDB_DIR', '~/.config/oceansdb') 45 | return os.path.expanduser(dbpath).replace('/', os.path.sep) 46 | 47 | 48 | class Dataset_flex(object): 49 | def __init__(self, filename, **kwargs): 50 | self.ds = Dataset(filename, mode='r') 51 | if 'aliases' in kwargs: 52 | self.aliases = kwargs['aliases'] 53 | else: 54 | self.aliases = {} 55 | def __getitem__(self, item): 56 | try: 57 | return self.ds.variables[self.aliases[item]] 58 | except: 59 | return self.ds.variables[item] 60 | def __enter__(self): 61 | return self 62 | def __exit__(self, exc_type, exc_value, exc_traceback): 63 | self.close() 64 | def close(self): 65 | self.ds.close() 66 | @property 67 | def variables(self): 68 | return self.ds.variables 69 | 70 | 71 | def dbsource(dbname, var, resolution=None, tscale=None): 72 | """Return which file(s) to use according to dbname, var, etc 73 | """ 74 | db_cfg = {} 75 | cfg_dir = 'datasource' 76 | cfg_files = pkg_resources.resource_listdir('oceansdb', cfg_dir) 77 | cfg_files = [f for f in cfg_files if f[-5:] == '.json'] 78 | for src_cfg in cfg_files: 79 | text = pkg_resources.resource_string( 80 | 'oceansdb', os.path.join(cfg_dir, src_cfg)) 81 | text = text.decode('UTF-8', 'replace') 82 | cfg = json.loads(text) 83 | for c in cfg: 84 | assert c not in db_cfg, "Trying to overwrite %s" 85 | db_cfg[c] = cfg[c] 86 | 87 | dbpath = oceansdb_dir() 88 | datafiles = [] 89 | cfg = db_cfg[dbname] 90 | 91 | if (resolution is None): 92 | resolution = cfg['vars'][var]['default_resolution'] 93 | 94 | if (tscale is None): 95 | tscale = cfg['vars'][var][resolution]["default_tscale"] 96 | 97 | for c in cfg['vars'][var][resolution][tscale]: 98 | download_file(outputdir=dbpath, **c) 99 | 100 | if 'filename' in c: 101 | filename = os.path.join(dbpath, c['filename']) 102 | else: 103 | filename = os.path.join(dbpath, 104 | os.path.basename(urlparse(c['url']).path)) 105 | 106 | if 'varnames' in cfg['vars'][var][resolution]: 107 | datafiles.append(Dataset_flex(filename, 108 | aliases=cfg['vars'][var][resolution]['varnames'])) 109 | else: 110 | datafiles.append(Dataset_flex(filename)) 111 | 112 | return datafiles 113 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Usage 3 | ======== 4 | 5 | To use oceansdb in a project: 6 | 7 | .. code-block:: python 8 | 9 | import oceansdb 10 | 11 | Now create a World Ocean Atlas database object by: 12 | 13 | .. code-block:: python 14 | 15 | db = oceansdb.WOA() 16 | 17 | On the first time you run this, it might take sometime since it needs to download the actual database files. You don't need to do anything other than wait. 18 | 19 | The propriety extract() of the database object is who takes care of sub-sample, and if necessary interpolate, to give you the variable(s) on the requested coordinates. The returned output is always a dictionary, even if you requested only one variable from the database. 20 | 21 | To get temperature at one point: 22 | 23 | .. code-block:: python 24 | 25 | >>> t = db['TEMP'].extract(var='t_mn', doy=136.875, depth=0, lat=17.5, lon=-37.5) 26 | 27 | The WOA climatologic temperature will be available as t['t_mn']. 28 | 29 | .. code-block:: python 30 | 31 | >>> t.keys() 32 | ['t_mn'] 33 | 34 | >>> t['t_mn'].shape 35 | (1,) 36 | 37 | >>> t['t_mn'] 38 | masked_array(data = [ 24.60449791], 39 | mask = False, 40 | fill_value = 1e+20) 41 | 42 | If you prefer you can obtain all available variables by not defining var, like: 43 | 44 | .. code-block:: python 45 | 46 | >>> t = db['PSAL'].extract(doy=136.875, depth=[0, 10, 15, 18], lat=17.5, lon=-37.5) 47 | 48 | >>> t.keys() 49 | ['s_dd', 's_sd', 's_se', 's_mn'] 50 | 51 | To get one profile of salinity: 52 | 53 | .. code-block:: python 54 | 55 | >>> t = db['PSAL'].extract(var='t_mn', doy=136.875, depth=[0, 10, 15, 18], lat=17.5, lon=-37.5) 56 | 57 | To get one section of temperature: 58 | 59 | .. code-block:: python 60 | 61 | >>> t = db['TEMP'].extract(var='t_mn', doy=136.875, lat=17.5, lon=[-39, -37.5, -35]) 62 | 63 | To get a regular 3D grid: 64 | 65 | .. code-block:: python 66 | 67 | >>> t = db['TEMP'].extract(var='t_mn', depth=[0, 10.23], doy=136.875, lat=[15, 17.5, 23], lon=[-39, -37.5, -35, -32.73]) 68 | 69 | >>> t['t_mn'].shape 70 | (2, 3, 4) 71 | 72 | To use bathymetry let's first load ETOPO 73 | 74 | .. code-block:: python 75 | 76 | >>> db = oceansdb.ETOPO() 77 | 78 | Let's check the variables available in ETOPO 79 | 80 | .. code-block:: python 81 | 82 | >>> db.keys() 83 | ['topography'] 84 | 85 | To get topography for one point: 86 | 87 | .. code-block:: python 88 | 89 | >>> db['topography'].extract(lat=15, lon=38) 90 | {'height': masked_array(data=[1012], 91 | mask=[False], 92 | fill_value=999999, 93 | dtype=int32)} 94 | 95 | To get topography along a latitude: 96 | 97 | .. code-block:: python 98 | 99 | >>> db['topography'].extract(lat=15, lon=[-25, -30, -38, -40, -45]) 100 | {'height': masked_array(data=[-4150, -5451, -5588, -5217, -3840], 101 | mask=[False, False, False, False, False], 102 | fill_value=999999, 103 | dtype=int32)} 104 | 105 | To get topography along a longitude: 106 | 107 | .. code-block:: python 108 | 109 | >>> db['topography'].extract(lat=[10, 15, 20, 25], lon=38) 110 | {'height': masked_array(data=[1486, 1012, -759, 797], 111 | mask=[False, False, False, False], 112 | fill_value=999999, 113 | dtype=int32)} 114 | 115 | To get topography along a area: 116 | 117 | .. code-block:: python 118 | 119 | >>> db['topography'].extract(lat=[10, 15, 20, 25], lon=[30, 38, 40]) 120 | {'height': masked_array( 121 | data=[[413, 1486, 1227], 122 | [504, 1012, 210], 123 | [294, -759, -217], 124 | [241, 797, 1050]], 125 | mask=[[False, False, False], 126 | [False, False, False], 127 | [False, False, False], 128 | [False, False, False]], 129 | fill_value=999999, 130 | dtype=int32)} 131 | 132 | To use ETOPO with 5min resolution instead of the 1min: 133 | 134 | .. code-block:: python 135 | 136 | >>> db = oceansdb.ETOPO(resolution='5min') 137 | 138 | >>> db['topography'].extract(lat=15, lon=38) 139 | {'height': masked_array(data=[1372.0], 140 | mask=[False], 141 | fill_value=1e+20, 142 | dtype=float32)} 143 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/{{ cookiecutter.repo_name }}.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/{{ cookiecutter.repo_name }}.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/{{ cookiecutter.repo_name }}" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/{{ cookiecutter.repo_name }}" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # oceansdb documentation build configuration file, created by 5 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | 19 | # If extensions (or modules to document with autodoc) are in another 20 | # directory, add these directories to sys.path here. If the directory is 21 | # relative to the documentation root, use os.path.abspath to make it 22 | # absolute, like shown here. 23 | #sys.path.insert(0, os.path.abspath('.')) 24 | 25 | # Get the project root dir, which is the parent dir of this 26 | cwd = os.getcwd() 27 | project_root = os.path.dirname(cwd) 28 | 29 | # Insert the project root dir as the first element in the PYTHONPATH. 30 | # This lets us ensure that the source package is imported, and that its 31 | # version is used. 32 | sys.path.insert(0, project_root) 33 | 34 | #import oceansdb 35 | 36 | # -- General configuration --------------------------------------------- 37 | 38 | # If your documentation needs a minimal Sphinx version, state it here. 39 | #needs_sphinx = '1.0' 40 | 41 | # Add any Sphinx extension module names here, as strings. They can be 42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 43 | extensions = [ 44 | 'sphinx.ext.autodoc', 45 | #'sphinx.ext.doctest', 46 | 'sphinx.ext.viewcode' 47 | #'sphinx.ext.coverage', 48 | ] 49 | 50 | # Add any paths that contain templates here, relative to this directory. 51 | templates_path = ['_templates'] 52 | 53 | # The suffix of source filenames. 54 | source_suffix = '.rst' 55 | 56 | # The encoding of source files. 57 | #source_encoding = 'utf-8-sig' 58 | 59 | # The master toctree document. 60 | master_doc = 'index' 61 | 62 | # General information about the project. 63 | project = u'OceansDB' 64 | copyright = u'2015, Guilherme Castelão' 65 | 66 | # The version info for the project you're documenting, acts as replacement 67 | # for |version| and |release|, also used in various other places throughout 68 | # the built documents. 69 | # 70 | # The short X.Y version. 71 | #version = oceansdb.__version__ 72 | version = '0.6' 73 | # The full version, including alpha/beta/rc tags. 74 | #release = oceansdb.__version__ 75 | release = '0.6' 76 | 77 | # The language for content autogenerated by Sphinx. Refer to documentation 78 | # for a list of supported languages. 79 | #language = None 80 | 81 | # There are two options for replacing |today|: either, you set today to 82 | # some non-false value, then it is used: 83 | #today = '' 84 | # Else, today_fmt is used as the format for a strftime call. 85 | #today_fmt = '%B %d, %Y' 86 | 87 | # List of patterns, relative to source directory, that match files and 88 | # directories to ignore when looking for source files. 89 | exclude_patterns = ['_build'] 90 | 91 | # The reST default role (used for this markup: `text`) to use for all 92 | # documents. 93 | #default_role = None 94 | 95 | # If true, '()' will be appended to :func: etc. cross-reference text. 96 | #add_function_parentheses = True 97 | 98 | # If true, the current module name will be prepended to all description 99 | # unit titles (such as .. function::). 100 | #add_module_names = True 101 | 102 | # If true, sectionauthor and moduleauthor directives will be shown in the 103 | # output. They are ignored by default. 104 | #show_authors = False 105 | 106 | # The name of the Pygments (syntax highlighting) style to use. 107 | pygments_style = 'sphinx' 108 | 109 | # A list of ignored prefixes for module index sorting. 110 | #modindex_common_prefix = [] 111 | 112 | # If true, keep warnings as "system message" paragraphs in the built 113 | # documents. 114 | #keep_warnings = False 115 | 116 | # If true, `todo` and `todoList` produce output, else they produce nothing. 117 | todo_include_todos = False 118 | 119 | # -- Options for HTML output ------------------------------------------- 120 | 121 | # The theme to use for HTML and HTML Help pages. See the documentation for 122 | # a list of builtin themes. 123 | html_theme = 'default' 124 | 125 | # Theme options are theme-specific and customize the look and feel of a 126 | # theme further. For a list of options available for each theme, see the 127 | # documentation. 128 | #html_theme_options = {} 129 | 130 | # Add any paths that contain custom themes here, relative to this directory. 131 | #html_theme_path = [] 132 | 133 | # The name for this set of Sphinx documents. If None, it defaults to 134 | # " v documentation". 135 | #html_title = None 136 | 137 | # A shorter title for the navigation bar. Default is the same as 138 | # html_title. 139 | #html_short_title = None 140 | 141 | # The name of an image file (relative to this directory) to place at the 142 | # top of the sidebar. 143 | #html_logo = None 144 | 145 | # The name of an image file (within the static path) to use as favicon 146 | # of the docs. This file should be a Windows icon file (.ico) being 147 | # 16x16 or 32x32 pixels large. 148 | #html_favicon = None 149 | 150 | # Add any paths that contain custom static files (such as style sheets) 151 | # here, relative to this directory. They are copied after the builtin 152 | # static files, so a file named "default.css" will overwrite the builtin 153 | # "default.css". 154 | html_static_path = ['_static'] 155 | 156 | # If not '', a 'Last updated on:' timestamp is inserted at every page 157 | # bottom, using the given strftime format. 158 | #html_last_updated_fmt = '%b %d, %Y' 159 | 160 | # If true, SmartyPants will be used to convert quotes and dashes to 161 | # typographically correct entities. 162 | #html_use_smartypants = True 163 | 164 | # Custom sidebar templates, maps document names to template names. 165 | #html_sidebars = {} 166 | 167 | # Additional templates that should be rendered to pages, maps page names 168 | # to template names. 169 | #html_additional_pages = {} 170 | 171 | # If false, no module index is generated. 172 | #html_domain_indices = True 173 | 174 | # If false, no index is generated. 175 | #html_use_index = True 176 | 177 | # If true, the index is split into individual pages for each letter. 178 | #html_split_index = False 179 | 180 | # If true, links to the reST sources are added to the pages. 181 | #html_show_sourcelink = True 182 | 183 | # If true, "Created using Sphinx" is shown in the HTML footer. 184 | # Default is True. 185 | #html_show_sphinx = True 186 | 187 | # If true, "(C) Copyright ..." is shown in the HTML footer. 188 | # Default is True. 189 | #html_show_copyright = True 190 | 191 | # If true, an OpenSearch description file will be output, and all pages 192 | # will contain a tag referring to it. The value of this option 193 | # must be the base URL from which the finished HTML is served. 194 | #html_use_opensearch = '' 195 | 196 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 197 | #html_file_suffix = None 198 | 199 | # Output file base name for HTML help builder. 200 | htmlhelp_basename = 'oceansdbdoc' 201 | 202 | 203 | # -- Options for LaTeX output ------------------------------------------ 204 | 205 | latex_elements = { 206 | # The paper size ('letterpaper' or 'a4paper'). 207 | #'papersize': 'letterpaper', 208 | 209 | # The font size ('10pt', '11pt' or '12pt'). 210 | #'pointsize': '10pt', 211 | 212 | # Additional stuff for the LaTeX preamble. 213 | #'preamble': '', 214 | } 215 | 216 | # Grouping the document tree into LaTeX files. List of tuples 217 | # (source start file, target name, title, author, documentclass 218 | # [howto/manual]). 219 | latex_documents = [ 220 | ('index', 'oceansdb.tex', 221 | u'oceansdb Documentation', 222 | u'Guilherme Castelao', 'manual'), 223 | ] 224 | 225 | # The name of an image file (relative to this directory) to place at 226 | # the top of the title page. 227 | #latex_logo = None 228 | 229 | # For "manual" documents, if this is true, then toplevel headings 230 | # are parts, not chapters. 231 | #latex_use_parts = False 232 | 233 | # If true, show page references after internal links. 234 | #latex_show_pagerefs = False 235 | 236 | # If true, show URL addresses after external links. 237 | #latex_show_urls = False 238 | 239 | # Documents to append as an appendix to all manuals. 240 | #latex_appendices = [] 241 | 242 | # If false, no module index is generated. 243 | #latex_domain_indices = True 244 | 245 | 246 | # -- Options for manual page output ------------------------------------ 247 | 248 | # One entry per manual page. List of tuples 249 | # (source start file, name, description, authors, manual section). 250 | man_pages = [ 251 | ('index', 'oceansdb', 252 | u'oceansdb Documentation', 253 | [u'Guilherme Castelao'], 1) 254 | ] 255 | 256 | # If true, show URL addresses after external links. 257 | #man_show_urls = False 258 | 259 | 260 | # -- Options for Texinfo output ---------------------------------------- 261 | 262 | # Grouping the document tree into Texinfo files. List of tuples 263 | # (source start file, target name, title, author, 264 | # dir menu entry, description, category) 265 | texinfo_documents = [ 266 | ('index', 'oceansdb', 267 | u'oceansdb Documentation', 268 | u'Guilherme Castelao', 269 | 'oceansdb', 270 | 'One line description of project.', 271 | 'Miscellaneous'), 272 | ] 273 | 274 | # Documents to append as an appendix to all manuals. 275 | #texinfo_appendices = [] 276 | 277 | # If false, no module index is generated. 278 | #texinfo_domain_indices = True 279 | 280 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 281 | #texinfo_show_urls = 'footnote' 282 | 283 | # If true, do not generate a @detailmenu in the "Top" node's menu. 284 | #texinfo_no_detailmenu = False 285 | -------------------------------------------------------------------------------- /oceansdb/etopo.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ Module to handle ETOPO bathymetry 4 | """ 5 | 6 | import numpy as np 7 | from numpy import ma 8 | import netCDF4 9 | 10 | from .utils import dbsource 11 | from .common import cropIndices 12 | 13 | from scipy.interpolate import griddata 14 | 15 | 16 | def get_depth(lat, lon, cfg): 17 | """ 18 | 19 | ATTENTION, conceptual error on the data near by Greenwich. 20 | url='http://opendap.ccst.inpe.br/Climatologies/ETOPO/etopo5.cdf' 21 | 22 | If I ever need to get depth from multiple points, check the history 23 | of this file. One day it was like that. 24 | """ 25 | # This assert fails if it is a np.float64. Re-think this assert anyways. 26 | #assert type(lat) in [int, float] 27 | #assert type(lon) in [int, float] 28 | 29 | # if lat.shape != lon.shape: 30 | # print "lat and lon must have the same size" 31 | etopo_netcdf = None 32 | try: 33 | try: 34 | try: 35 | etopo = netCDF4.Dataset(expanduser(cfg['file'])) 36 | except: 37 | # FIXME, It must have a time limit defined here, otherwise it can 38 | # get stuck trying to open the file. 39 | etopo = netCDF4.Dataset(expanduser(cfg['url'])) 40 | etopo_netcdf = etopo 41 | x = etopo.variables['ETOPO05_X'][:] 42 | y = etopo.variables['ETOPO05_Y'][:] 43 | except: 44 | etopo = open_url(cfg['url']).ROSE 45 | x = etopo.ETOPO05_X[:] 46 | y = etopo.ETOPO05_Y[:] 47 | 48 | if lon < 0: 49 | lon += 360 50 | 51 | iini = (abs(lon - x)).argmin() - 2 52 | ifin = (abs(lon - x)).argmin() + 2 53 | jini = (abs(lat - y)).argmin() - 2 54 | jfin = (abs(lat - y)).argmin() + 2 55 | 56 | assert (iini >= 0) or (iini <= len(x)) or \ 57 | (jini >= 0) or (jini <= len(y)), \ 58 | "Sorry not ready to handle too close to boundaries" 59 | 60 | try: 61 | z = etopo.variables['ROSE'][jini:jfin, iini:ifin] 62 | except: 63 | z = etopo.ROSE[jini:jfin, iini:ifin] 64 | 65 | interpolator = RectBivariateSpline(x[iini:ifin], y[jini:jfin], z.T) 66 | return interpolator(lon, lat)[0][0] 67 | finally: 68 | if etopo_netcdf: 69 | etopo_netcdf.close() 70 | 71 | 72 | class ETOPO_var_nc(object): 73 | """ 74 | ETOPO global topography 75 | """ 76 | def __init__(self, source): 77 | self.ncs = source 78 | 79 | self.load_dims(dims=['lat', 'lon']) 80 | self.set_keys() 81 | 82 | def __enter__(self): 83 | return self 84 | 85 | def __exit__(self, exc_type, exc_value, exc_traceback): 86 | self.close() 87 | 88 | def close(self): 89 | for nc in self.ncs: 90 | nc.close() 91 | 92 | def __getitem__(self, item): 93 | return self.data[item] 94 | 95 | def keys(self): 96 | return self.KEYS 97 | 98 | def load_dims(self, dims): 99 | self.dims = {} 100 | for d in dims: 101 | self.dims[d] = self.ncs[0][d][:] 102 | for nc in self.ncs[1:]: 103 | assert (self.dims[d] == nc[d][:]).all() 104 | 105 | def set_keys(self): 106 | self.KEYS = ['height'] 107 | 108 | def crop(self, lat, lon, var): 109 | """ Crop a subset of the dataset for each var 110 | 111 | Given doy, depth, lat and lon, it returns the smallest subset 112 | that still contains the requested coordinates inside it. 113 | 114 | It handels special cases like a region around greenwich and 115 | the international date line. 116 | 117 | Accepts 0 to 360 and -180 to 180 longitude reference. 118 | 119 | It extends time and longitude coordinates, so simplify the use 120 | of series. For example, a ship track can be requested with 121 | a longitude sequence like [352, 358, 364, 369, 380]. 122 | """ 123 | dims, idx = cropIndices(self.dims, lat, lon) 124 | subset = {} 125 | for v in var: 126 | subset = {v: self.ncs[0][v][idx['yn'], idx['xn']]} 127 | return subset, dims 128 | 129 | def nearest(self, lat, lon, var): 130 | output = {} 131 | dims, idx = cropIndices(self.dims, lat, lon) 132 | for v in var: 133 | if v == 'height': 134 | v = 'z' 135 | subset = self.ncs[0].variables[v][idx['yn'], idx['xn']] 136 | output[v] = ma.masked_all((lat.size, lon.size), dtype='f') 137 | for yn_out, y in enumerate(lat): 138 | yn_in = np.absolute(dims['lat']-y).argmin() 139 | for xn_out, x in enumerate(lon): 140 | xn_in = np.absolute(dims['lon']-x).argmin() 141 | output[v][yn_out, xn_out] = subset[yn_in, xn_in] 142 | return output 143 | 144 | def interpolate(self, lat, lon, var): 145 | """ Interpolate each var on the coordinates requested 146 | 147 | """ 148 | 149 | subset, dims = self.crop(lat, lon, var) 150 | 151 | if np.all([y in dims['lat'] for y in lat]) & \ 152 | np.all([x in dims['lon'] for x in lon]): 153 | yn = np.nonzero([y in lat for y in dims['lat']])[0] 154 | xn = np.nonzero([x in lon for x in dims['lon']])[0] 155 | output = {} 156 | for v in subset: 157 | # output[v] = subset[v][dn, zn, yn, xn] 158 | # Seriously that this is the way to do it?!!?? 159 | output[v] = subset[v][:, xn][yn] 160 | return output 161 | 162 | # The output coordinates shall be created only once. 163 | points_out = [] 164 | for latn in lat: 165 | for lonn in lon: 166 | points_out.append([latn, lonn]) 167 | points_out = np.array(points_out) 168 | 169 | output = {} 170 | for v in var: 171 | output[v] = ma.masked_all( 172 | (lat.size, lon.size), 173 | dtype=subset[v].dtype) 174 | 175 | # The valid data 176 | idx = np.nonzero(~ma.getmaskarray(subset[v])) 177 | 178 | if idx[0].size > 0: 179 | points = np.array([ 180 | dims['lat'][idx[0]], dims['lon'][idx[1]]]).T 181 | values = subset[v][idx] 182 | 183 | # Interpolate along the dimensions that have more than one 184 | # position, otherwise it means that the output is exactly 185 | # on that coordinate. 186 | ind = np.array( 187 | [np.unique(points[:, i]).size > 1 for i in 188 | range(points.shape[1])]) 189 | assert ind.any() 190 | 191 | values_out = griddata( 192 | np.atleast_1d(np.squeeze(points[:, ind])), 193 | values, 194 | np.atleast_1d(np.squeeze(points_out[:, ind])) 195 | ) 196 | 197 | # Remap the interpolated value back into a 4D array 198 | idx = np.isfinite(values_out) 199 | for [y, x], out in zip(points_out[idx], values_out[idx]): 200 | output[v][y==lat, x==lon] = out 201 | 202 | return output 203 | 204 | def track(self, mode=None, **kwargs): 205 | """ 206 | 207 | Possible scenarios: 208 | - Track: doy{1,n}, depth{1,n2}, lat{n}, lon{n} 209 | """ 210 | for k in kwargs: 211 | assert k in ['var', 'lat', 'lon'], \ 212 | "Wrong dimension to extract, check the manual" 213 | 214 | if 'var' in kwargs: 215 | var = np.atleast_1d(kwargs['var']) 216 | else: 217 | var = np.asanyarray(self.KEYS) 218 | 219 | lat = np.atleast_1d(kwargs['lat']) 220 | lon = np.atleast_1d(kwargs['lon']) 221 | 222 | assert lat.shape == lon.shape 223 | 224 | output = {} 225 | for v in var: 226 | output[v] = [] 227 | 228 | for y, x in zip(lat, lon): 229 | if mode == 'nearest': 230 | tmp = self.nearest( 231 | np.array([y]), np.array([x]), var) 232 | else: 233 | tmp = self.interpolate( 234 | np.array([y]), np.array([x]), var) 235 | 236 | for v in tmp: 237 | output[v].append(tmp[v]) 238 | 239 | for v in output: 240 | output[v] = np.atleast_1d(np.squeeze(output[v])) 241 | 242 | return output 243 | 244 | def extract(self, mode=None, **kwargs): 245 | """ 246 | 247 | Possible scenarios: 248 | - Point: lat{1},lon{1} 249 | - Section: [lat{1},lon{n} | lat{n},lon{1}] 250 | 251 | - Track: lat{n},lon{n} 252 | """ 253 | for k in kwargs: 254 | assert k in ['var', 'lat', 'lon'], \ 255 | "Wrong dimension to extract, check the manual" 256 | 257 | if 'var' in kwargs: 258 | var = np.atleast_1d(kwargs['var']) 259 | else: 260 | var = np.asanyarray(self.keys()) 261 | 262 | lat = np.atleast_1d(kwargs['lat']) 263 | lon = np.atleast_1d(kwargs['lon']) 264 | 265 | if mode == 'nearest': 266 | output = self.nearest(lat, lon, var) 267 | else: 268 | output = self.interpolate(lat, lon, var) 269 | for v in output: 270 | output[v] = np.atleast_1d(np.squeeze(output[v])) 271 | 272 | return output 273 | 274 | 275 | class ETOPO(ETOPO_var_nc): 276 | """ 277 | """ 278 | def __init__(self, dbname='ETOPO', resolution=None): 279 | self.dbname = dbname 280 | self.data = {'topography': None} 281 | self.resolution = resolution 282 | 283 | def keys(self): 284 | return self.data.keys() 285 | 286 | def __getitem__(self, item): 287 | if item == 'elevation': 288 | print("elevation is deprecated. Use topography instead") 289 | import time 290 | time.sleep(3) 291 | return self['topography'] 292 | 293 | if self.data[item] is None: 294 | self.data[item] = ETOPO_var_nc(source=dbsource( 295 | self.dbname, item, self.resolution)) 296 | return self.data[item] 297 | 298 | def __enter__(self): 299 | return self 300 | 301 | def __exit__(self, exc_type, exc_value, exc_traceback): 302 | self.close() 303 | 304 | def close(self): 305 | for data_key in self.data: 306 | var_nc = self.data.get(data_key) 307 | if var_nc: 308 | var_nc.close() 309 | 310 | def extract(self, *args, **kwargs): 311 | print("Deprecated syntax, better use: db['topography'].extract(...)") 312 | import time 313 | time.sleep(3) 314 | return self['topography'].extract(*args, **kwargs)['height'] 315 | -------------------------------------------------------------------------------- /tests/test_WOA_from_nc.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | """ 6 | 7 | from datetime import datetime 8 | 9 | import numpy as np 10 | from numpy import ma 11 | 12 | from oceansdb.woa import WOA 13 | 14 | 15 | def test_import(): 16 | # A shortcut 17 | from oceansdb import WOA 18 | with WOA() as db: 19 | pass 20 | 21 | 22 | def test_ncs_size(): 23 | """ Check if loaded the 4 seasonal climatology files 24 | 25 | The default for WOA is to load the seasonal climatology composed 26 | of four files, one for each season. This test checks if all four 27 | files were loaded. 28 | """ 29 | with WOA() as db: 30 | assert len(db['sea_water_temperature'].ncs) == 4 31 | assert len(db['sea_water_salinity'].ncs) == 4 32 | 33 | 34 | def test_available_vars(): 35 | with WOA() as db: 36 | for v in ['sea_water_temperature', 'sea_water_salinity']: 37 | assert v in db.keys() 38 | 39 | 40 | def test_oceansites_nomenclature(): 41 | with WOA() as db: 42 | assert db['sea_water_temperature'] == db['TEMP'] 43 | #assert db['sea_water_salinity'] == db['PSAL'] 44 | 45 | 46 | # ==== Request points coincidents to the WOA gridpoints 47 | def test_coincident_gridpoint_13(): 48 | with WOA(dbname='WOA13') as db: 49 | 50 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 51 | depth=0, lat=17.5, lon=-37.5) 52 | assert np.allclose(t['t_mn'], [24.60449791]) 53 | 54 | t = db['sea_water_temperature'].extract(var='t_mn', doy=[136.875, 228.125], 55 | depth=0, lat=17.5, lon=-37.5) 56 | assert np.allclose(t['t_mn'], [24.60449791, 26.38446426]) 57 | 58 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 59 | depth=[0, 10], lat=17.5, lon=-37.5) 60 | assert np.allclose(t['t_mn'], [24.60449791, 24.62145996]) 61 | 62 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 63 | depth=0, lat=[17.5, 12.5], lon=-37.5) 64 | assert np.allclose(t['t_mn'], [25.17827606, 24.60449791]) 65 | 66 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 67 | depth=0, lat=17.5, lon=[-37.5, -32.5]) 68 | assert np.allclose(t['t_mn'], [24.60449791, 23.98172188]) 69 | 70 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 71 | depth=[0, 10], lat=[17.5, 12.5], lon=-37.5) 72 | assert np.allclose(t['t_mn'], 73 | [[25.17827606, 24.60449791], [25.25433731, 24.62145996]]) 74 | 75 | 76 | def test_coincident_gridpoint(): 77 | with WOA() as db: 78 | 79 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 80 | depth=0, lat=17.5, lon=-37.5) 81 | assert np.allclose(t['t_mn'], [24.641016]) 82 | 83 | t = db['sea_water_temperature'].extract(var='t_mn', doy=[136.875, 228.125], 84 | depth=0, lat=17.5, lon=-37.5) 85 | assert np.allclose(t['t_mn'], [24.641016, 26.392792]) 86 | 87 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 88 | depth=[0, 10], lat=17.5, lon=-37.5) 89 | assert np.allclose(t['t_mn'], [24.641016, 24.636028]) 90 | 91 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 92 | depth=0, lat=[17.5, 12.5], lon=-37.5) 93 | assert np.allclose(t['t_mn'], [25.167528, 24.641016]) 94 | 95 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 96 | depth=0, lat=17.5, lon=[-37.5, -32.5]) 97 | assert np.allclose(t['t_mn'], [24.641016, 23.979605]) 98 | 99 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 100 | depth=[0, 10], lat=[17.5, 12.5], lon=-37.5) 101 | assert np.allclose(t['t_mn'], 102 | [[25.167528, 24.641016], [25.190489, 24.636028]]) 103 | 104 | 105 | def test_only_one_coincident_gridpoint(): 106 | """Makes difference if only lat, or only lon, is coincident 107 | 108 | Because lat/lon is interpolated together as a 2D space, if only 109 | one coordinate is coincident it should be instead a 1D interpolation. 110 | """ 111 | with WOA(dbname='WOA13') as db: 112 | 113 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 114 | depth=0, lat=-14.03, lon=62.5) 115 | assert np.allclose(t['t_mn'], [27.41223907]) 116 | 117 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 118 | depth=0, lat=-12.5, lon=62.03) 119 | assert np.allclose(t['t_mn'], [27.76179314]) 120 | 121 | 122 | def test_lon_cyclic(): 123 | with WOA() as db: 124 | 125 | t1 = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 126 | depth=0, lat=17.5, lon=182.5) 127 | t2 = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 128 | depth=0, lat=17.5, lon=-177.5) 129 | assert np.allclose(t1['t_mn'], t2['t_mn']) 130 | 131 | t1 = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 132 | depth=0, lat=17.5, lon=[-37.5, -32.5]) 133 | t2 = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 134 | depth=0, lat=17.5, lon=[322.5, 327.5]) 135 | assert np.allclose(t1['t_mn'], t2['t_mn']) 136 | 137 | 138 | def test_no_data_available(): 139 | """ This is a position without valid data """ 140 | 141 | with WOA() as db: 142 | out = db['sea_water_temperature'].extract( 143 | doy=155, lat=48.1953, lon=-69.5855, 144 | depth=[2.0, 5.0, 6.0, 21.0, 44.0, 79.0, 5000]) 145 | varnames = [u't_dd', u't_mn', u't_sd', u't_se'] 146 | for v in varnames: 147 | assert v in out.keys() 148 | for v in varnames: 149 | assert ma.getmaskarray(out[v]).all() 150 | 151 | 152 | def test_extract_overlimit(): 153 | """ Thest a request over the limits of the database """ 154 | with WOA(dbname='WOA13') as db: 155 | 156 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 157 | depth=5502, lat=17.5, lon=-37.5) 158 | assert ma.is_masked(t['t_mn']) 159 | 160 | t = db['sea_water_temperature'].extract(var='t_mn', doy=136.875, 161 | depth=[10, 5502], lat=17.5, lon=-37.5) 162 | assert np.all(t['t_mn'].mask == [False, True]) 163 | assert ma.allclose(t['t_mn'], 164 | ma.masked_array([24.62145996, 0], mask=[False, True])) 165 | 166 | 167 | def test_interpolate_partially_insuficient_data(): 168 | """ Special case with unsificient data for some points 169 | 170 | At 4700m depth the limited available data isin a plane that does not 171 | contain the desired output. It should not fail, but return non 172 | masked valid values where it is possible. 173 | """ 174 | with WOA() as db: 175 | t = db['sea_water_temperature'].extract(var='mean', doy=108, lat=4, lon=-38) 176 | assert not t['mean'].mask.all() 177 | 178 | 179 | def test_get_point(): 180 | with WOA(dbname='WOA13') as db: 181 | 182 | t = db['sea_water_temperature'].extract(var='t_mn', doy=90, 183 | depth=0, lat=17.5, lon=-37.5) 184 | #depth=0, lat=10, lon=330) 185 | assert np.allclose(t['t_mn'], [24.306862]) 186 | 187 | 188 | def test_get_point_inland(): 189 | with WOA() as db: 190 | 191 | t = db['sea_water_temperature'].extract(var='t_mn', doy=90, 192 | depth=0, lat=-19.9, lon=-43.9) 193 | assert t['t_mn'].mask.all() 194 | 195 | 196 | def test_get_profile(): 197 | with WOA(dbname='WOA13') as db: 198 | 199 | t = db['sea_water_temperature'].extract(var='mean', doy=10, 200 | depth=[0,10], lat=10, lon=330) 201 | assert np.allclose(t['mean'], [ 26.07524300, 26.12986183]) 202 | 203 | t = db['sea_water_temperature'].extract(doy=10, 204 | depth=[0,10], lat=10, lon=330) 205 | assert np.allclose(t['t_se'], [ 0.02941939, 0.0287159 ]) 206 | assert np.allclose(t['t_sd'], [ 0.8398821, 0.8142529]) 207 | assert np.allclose(t['t_mn'], [ 26.07524300, 26.12986183]) 208 | assert np.allclose(t['t_dd'], [ 813, 806]) 209 | 210 | 211 | def test_profile_maskedDepth(): 212 | """Test BUG#10 213 | """ 214 | with WOA() as db: 215 | depth = ma.masked_array([10, 100]) 216 | db['sea_water_temperature'].extract(var='mean', doy=10, 217 | depth=depth, lat=10, lon=330) 218 | 219 | 220 | def test_get_section(): 221 | with WOA() as db: 222 | t = db['sea_water_temperature'].extract(var='t_mn', doy=10, 223 | depth=[0,10], lat=28, lon=[-117, -114, -112, -105, -99, -93]) 224 | 225 | 226 | def test_get_surface(): 227 | with WOA() as db: 228 | t = db['sea_water_temperature'].extract(var='t_mn', doy=10, 229 | depth=[0,10], lat=[21, 24, 28, 32], 230 | lon=[-117, -114, -112, -105, -99, -93]) 231 | 232 | 233 | def test_track(): 234 | with WOA(dbname='WOA18') as db: 235 | params = [ 236 | [{"doy": 34, "depth": 0, "lat": 10, "lon": 330}, 237 | [25.51016]], 238 | [{"doy": 120, "depth": 0, "lat": 10, "lon": 330}, 239 | [25.795078]], 240 | [{"doy": 34, "depth": 300, "lat": 10, "lon": 330}, 241 | [10.52824]], 242 | [{"doy": 34, "depth": 0, "lat": 12, "lon": -25}, 243 | [24.412867]], 244 | [{"doy": [34, 120], "depth": 0, "lat": 10, "lon": 330}, 245 | [25.51016, 25.795078]], 246 | [{"doy": 34, "depth": [0, 300], "lat": 10, "lon": 330}, 247 | [25.51016, 10.52824]], 248 | [{"doy": 34, "depth": 0, "lat": [10, 12], "lon": [330, -25]}, 249 | [25.51016, 24.412867]], 250 | [{"doy": [34, 120], "depth": 0, "lat": [10, 12], "lon": [330, -25]}, 251 | [25.51016, 24.953312]], 252 | ] 253 | 254 | for p in params: 255 | t = db['sea_water_temperature'].track(var='t_mn', **p[0]) 256 | assert np.allclose(t['t_mn'], p[1]) 257 | 258 | def test_dev(): 259 | with WOA() as db: 260 | t = db['sea_water_temperature'].extract(doy=228.125, lat=12.5, lon=-37.5) 261 | 262 | 263 | def test_horizontalSurface_coincidentLatLon(): 264 | """ Creates an horizontal surface with coincident Lat/Lon 265 | """ 266 | with WOA() as db: 267 | t = db['sea_water_temperature'].extract(var='mean', doy=136.875, depth=43, 268 | lon = np.arange(-180, -170, 1), 269 | lat = np.arange(-50, -40, 1), 270 | mode = 'nearest') 271 | t = db['sea_water_temperature'].extract(var='mean', doy=136.875, depth=43, 272 | lon = np.arange(-180, -170, 1), 273 | lat = np.arange(-50, -40, 1)) 274 | 275 | def test_track_on_land(): 276 | """A track on land should return masked NaN values 277 | """ 278 | with WOA() as db: 279 | t = db["sea_water_temperature"].track( 280 | var="mean", 281 | doy=136.875, 282 | depth=0, 283 | lon=[1.4357, 1.4376], 284 | lat=[43.5938, 43.5980] 285 | ) 286 | for v in t: 287 | assert ma.getmaskarray(t[v]).all() 288 | -------------------------------------------------------------------------------- /oceansdb/cars.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ Module to handle CARS climatology 4 | 5 | Reference: http://www.marine.csiro.au/~dunn/cars2009/ 6 | 7 | Evaluate at day-of-year 45 (mid February) 8 | t = 2pi x 45/366 9 | feb = mean + an_cos*cos(t) + an_sin*sin(t) + sa_cos*cos(2*t) + sa_sin*sin(2*t) 10 | 11 | download_file('http://www.marine.csiro.au/atlas/export/temperature_cars2009a.nc.gz', '755ec973e4d9bd05de202feb696704c2') 12 | download_file('http://www.marine.csiro.au/atlas/export/salinity_cars2009a.nc.gz', '7f78173f4ef2c0a4ff9b5e52b62dc97d') 13 | """ 14 | 15 | import os 16 | from os.path import expanduser 17 | import re 18 | from datetime import datetime 19 | 20 | import numpy as np 21 | from numpy import ma 22 | import netCDF4 23 | from scipy.interpolate import interp1d 24 | # RectBivariateSpline 25 | from scipy.interpolate import griddata 26 | 27 | from .utils import dbsource 28 | from .common import cropIndices 29 | 30 | 31 | def extract(filename, doy, latitude, longitude, depth): 32 | """ 33 | For now only the nearest value 34 | For now only for one position, not an array of positions 35 | longitude 0-360 36 | """ 37 | assert np.size(doy) == 1 38 | assert np.size(latitude) == 1 39 | assert np.size(longitude) == 1 40 | assert np.size(depth) == 1 41 | 42 | assert (longitude >= 0) & (longitude <= 360) 43 | assert depth >= 0 44 | 45 | with netCDF4.Dataset(filename) as nc: 46 | 47 | t = 2 * np.pi * doy/366 48 | 49 | Z = np.absolute(nc['depth'][:] - depth).argmin() 50 | I = np.absolute(nc['lat'][:] - latitude).argmin() 51 | J = np.absolute(nc['lon'][:] - longitude).argmin() 52 | 53 | # Naive solution 54 | value = nc['mean'][:, I, J] 55 | value[:64] += nc['an_cos'][Z, I, J] * np.cos(t) + \ 56 | nc['an_sin'][:, I, J] * np.sin(t) 57 | value[:55] += nc['sa_cos'][Z, I, J] * np.cos(2*t) + \ 58 | nc['sa_sin'][:, I, J] * np.sin(2*t) 59 | value = value[Z] 60 | 61 | std = nc['std_dev'][Z, I, J] 62 | 63 | return value, std 64 | 65 | 66 | def cars_profile(filename, doy, latitude, longitude, depth): 67 | """ 68 | For now only the nearest value 69 | For now only for one position, not an array of positions 70 | longitude 0-360 71 | """ 72 | assert np.size(doy) == 1 73 | assert np.size(latitude) == 1 74 | assert np.size(longitude) == 1 75 | #assert np.size(depth) == 1 76 | 77 | assert (longitude >= 0) & (longitude <= 360) 78 | assert depth >= 0 79 | 80 | with netCDF4.Dataset(filename) as nc: 81 | 82 | t = 2 * np.pi * doy/366 83 | 84 | # Improve this. Optimize to get only necessary Z 85 | Z = slice(0, nc['depth'].size) 86 | I = np.absolute(nc['lat'][:] - latitude).argmin() 87 | J = np.absolute(nc['lon'][:] - longitude).argmin() 88 | 89 | # Not efficient, but it works 90 | assert (nc['depth'][:64] == nc['depth_ann'][:]).all() 91 | assert (nc['depth'][:55] == nc['depth_semiann'][:]).all() 92 | value = nc['mean'][:, I, J] 93 | value[:64] += nc['an_cos'][Z, I, J] * np.cos(t) + \ 94 | nc['an_sin'][:, I, J] * np.sin(t) 95 | value[:55] += nc['sa_cos'][Z, I, J] * np.cos(2*t) + \ 96 | nc['sa_sin'][:, I, J] * np.sin(2*t) 97 | value = value 98 | 99 | output = {'depth': np.asanyarray(depth)} 100 | from scipy.interpolate import griddata 101 | output['value'] = griddata(nc['depth'][Z], value[Z], depth) 102 | 103 | for v in ['std_dev']: 104 | output[v] = griddata(nc['depth'][Z], nc[v][Z, I, J], depth) 105 | 106 | return output 107 | 108 | 109 | class cars_data(object): 110 | """ Returns temperature/salinity from a CARS' file 111 | 112 | CARS climatology decompose temperature and salinity in annual 113 | and semi-annual harmonics. This class combines those harmonics 114 | on the fly, for the requested indices. 115 | 116 | data = cars_data('cars_file.nc') 117 | data[305, 0:10, :, :] 118 | """ 119 | def __init__(self, carsfile): 120 | self.nc = carsfile 121 | 122 | def __getitem__(self, item): 123 | """ t, z, y, x 124 | """ 125 | tn, zn, yn, xn = item 126 | 127 | #if type(zn) is not slice: 128 | # zn = slice(zn, zn+1) 129 | #zn_an = slice(zn.start, min(64, zn.stop), zn.step) 130 | #zn_sa = slice(zn.start, min(55, zn.stop), zn.step) 131 | 132 | output = [] 133 | d = 2 * np.pi * (np.arange(1, 367)[tn])/366 134 | for t in np.atleast_1d(d): 135 | tmp = self.nc['mean'][:, yn, xn] 136 | 137 | tmp[:64] += self.nc['an_cos'][:, yn, xn] * np.cos(t) + \ 138 | self.nc['an_sin'][:, yn, xn] * np.sin(t) 139 | tmp[:55] += self.nc['sa_cos'][:, yn, xn] * np.cos(2*t) + \ 140 | self.nc['sa_sin'][:, yn, xn] * np.sin(2*t) 141 | output.append(tmp[zn]) 142 | 143 | return ma.asanyarray(output) 144 | 145 | 146 | class CARS_var_nc(object): 147 | """ 148 | Reads the CARS Climatology NetCDF file and 149 | returns the corresponding values of salinity or temperature mean and 150 | standard deviation for the given time, lat, lon, depth. 151 | """ 152 | def __init__(self, source): 153 | self.ncs = source 154 | 155 | self.load_dims(dims=['lat', 'lon', 'depth']) 156 | self.set_keys() 157 | 158 | def __enter__(self): 159 | return self 160 | 161 | def __exit__(self, exc_type, exc_value, exc_traceback): 162 | self.close() 163 | 164 | def close(self): 165 | for nc in self.ncs: 166 | nc.close() 167 | 168 | def __getitem__(self, item): 169 | """ 170 | !!!ATENTION!!! Need to improve this. 171 | cars_data() should be modified to be used when loading ncs with source, thus avoiding the requirement on this getitem but running transparent. 172 | """ 173 | if item == 'mn': 174 | return cars_data(self.ncs[0]) 175 | else: 176 | return self.ncs[0].variables[item] 177 | 178 | def keys(self): 179 | return self.KEYS 180 | 181 | def load_dims(self, dims): 182 | self.dims = {} 183 | for d in dims: 184 | self.dims[d] = self.ncs[0][d][:] 185 | for nc in self.ncs[1:]: 186 | assert (self.dims[d] == nc[d][:]).all() 187 | 188 | #self.dims['time'] = [] 189 | #mfrac = 365/12. 190 | #for nc in self.ncs: 191 | # assert nc.variables['time'].size == 1 192 | # self.dims['time'].append(mfrac * nc.variables['time'][0]) 193 | self.dims['time'] = np.array([]) 194 | 195 | def set_keys(self): 196 | """ 197 | """ 198 | self.KEYS = ['mn'] 199 | for v in self.ncs[0].variables.keys(): 200 | if self.ncs[0].variables[v].dimensions == \ 201 | (u'depth', u'lat', u'lon'): 202 | S = self.ncs[0].variables[v].shape 203 | for nc in self.ncs[1:]: 204 | assert v in nc.variables 205 | assert nc.variables[v].shape == S 206 | self.KEYS.append(v) 207 | 208 | def __getitem__(self, item): 209 | if item in self.KEYS: 210 | return self.ncs[0].variables[item] 211 | elif re.match('(?:[s,t]_)?sd', item): 212 | return self.ncs[0].variables['std_dev'] 213 | elif re.match('(?:[s,t]_)?dd', item): 214 | return self.ncs[0].variables['nq'] 215 | 216 | return "yooo" 217 | 218 | def crop(self, doy, depth, lat, lon, var): 219 | """ Crop a subset of the dataset for each var 220 | 221 | Given doy, depth, lat and lon, it returns the smallest subset 222 | that still contains the requested coordinates inside it. 223 | 224 | It handels special cases like a region around greenwich and 225 | the international date line. 226 | 227 | Accepts 0 to 360 and -180 to 180 longitude reference. 228 | 229 | It extends time and longitude coordinates, so simplify the use 230 | of series. For example, a ship track can be requested with 231 | a longitude sequence like [352, 358, 364, 369, 380], and 232 | the equivalent for day of year above 365. 233 | """ 234 | dims, idx = cropIndices(self.dims, lat, lon, depth) 235 | 236 | dims['time'] = np.atleast_1d(doy) 237 | idx['tn'] = np.arange(dims['time'].size) 238 | 239 | # Temporary solution. Create an object for CARS dataset 240 | xn = idx['xn'] 241 | yn = idx['yn'] 242 | zn = idx['zn'] 243 | tn = idx['tn'] 244 | 245 | subset = {} 246 | for v in var: 247 | if v == 'mn': 248 | mn = [] 249 | for d in doy: 250 | t = 2 * np.pi * d/366 251 | # Naive solution 252 | # FIXME: This is not an efficient solution. 253 | value = self.ncs[0]['mean'][:, yn, xn] 254 | value[:64] += self.ncs[0]['an_cos'][:, yn, xn] * np.cos(t) + \ 255 | self.ncs[0]['an_sin'][:, yn, xn] * np.sin(t) 256 | value[:55] += self.ncs[0]['sa_cos'][:, yn, xn] * np.cos(2*t) + \ 257 | self.ncs[0]['sa_sin'][:, yn, xn] * np.sin(2*t) 258 | mn.append(value[zn]) 259 | 260 | subset['mn'] = ma.asanyarray(mn) 261 | else: 262 | subset[v] = ma.asanyarray( 263 | doy.size * [self[v][zn, yn, xn]]) 264 | return subset, dims 265 | 266 | def nearest(self, doy, depth, lat, lon, var): 267 | output = {} 268 | for v in var: 269 | output[v] = ma.masked_all((doy.size, depth.size, lat.size, 270 | lon.size), dtype='f') 271 | for tn_out, t in enumerate(doy): 272 | subset, dims = self.crop(np.array([t]), depth, lat, lon, [v]) 273 | for yn_out, y in enumerate(lat): 274 | yn_in = np.absolute(dims['lat']-y).argmin() 275 | for xn_out, x in enumerate(lon): 276 | xn_in = np.absolute(dims['lon']-x).argmin() 277 | for zn_out, z in enumerate(depth): 278 | zn_in = np.absolute(dims['depth']-z).argmin() 279 | output[v][tn_out, zn_out, yn_out, xn_out] = \ 280 | subset[v][0,zn_in, yn_in, xn_in] 281 | return output 282 | 283 | def interpolate(self, doy, depth, lat, lon, var): 284 | """ Interpolate each var on the coordinates requested 285 | 286 | """ 287 | 288 | subset, dims = self.crop(doy, depth, lat, lon, var) 289 | 290 | if np.all([d in dims['time'] for d in doy]) & \ 291 | np.all([z in dims['depth'] for z in depth]) & \ 292 | np.all([y in dims['lat'] for y in lat]) & \ 293 | np.all([x in dims['lon'] for x in lon]): 294 | dn = np.nonzero([d in doy for d in dims['time']])[0] 295 | zn = np.nonzero([z in depth for z in dims['depth']])[0] 296 | yn = np.nonzero([y in lat for y in dims['lat']])[0] 297 | xn = np.nonzero([x in lon for x in dims['lon']])[0] 298 | output = {} 299 | for v in subset: 300 | # output[v] = subset[v][dn, zn, yn, xn] 301 | # Seriously that this is the way to do it?!!?? 302 | output[v] = subset[v][:, :, :, xn][:, :, yn][:, zn][dn] 303 | return output 304 | 305 | # The output coordinates shall be created only once. 306 | points_out = [] 307 | for doyn in doy: 308 | for depthn in depth: 309 | for latn in lat: 310 | for lonn in lon: 311 | points_out.append([doyn, depthn, latn, lonn]) 312 | points_out = np.array(points_out) 313 | 314 | output = {} 315 | for v in var: 316 | output[v] = ma.masked_all( 317 | (doy.size, depth.size, lat.size, lon.size), 318 | dtype=subset[v].dtype) 319 | 320 | # The valid data 321 | idx = np.nonzero(~ma.getmaskarray(subset[v])) 322 | 323 | if idx[0].size > 0: 324 | points = np.array([ 325 | dims['time'][idx[0]], dims['depth'][idx[1]], 326 | dims['lat'][idx[2]], dims['lon'][idx[3]]]).T 327 | values = subset[v][idx] 328 | 329 | # Interpolate along the dimensions that have more than one 330 | # position, otherwise it means that the output is exactly 331 | # on that coordinate. 332 | ind = np.array( 333 | [np.unique(points[:, i]).size > 1 for i in 334 | range(points.shape[1])]) 335 | 336 | if ind.any(): 337 | # These interpolators understand NaN, but not masks. 338 | values[ma.getmaskarray(values)] = np.nan 339 | 340 | values_out = griddata( 341 | np.atleast_1d(np.squeeze(points[:, ind])), 342 | values, 343 | np.atleast_1d(np.squeeze(points_out[:, ind])) 344 | ) 345 | else: 346 | values_out = values 347 | 348 | # Remap the interpolated value back into a 4D array 349 | idx = np.isfinite(values_out) 350 | for [t, z, y, x], out in zip(points_out[idx], values_out[idx]): 351 | output[v][t==doy, z==depth, y==lat, x==lon] = out 352 | 353 | output[v] = ma.fix_invalid(output[v]) 354 | 355 | return output 356 | 357 | def extract(self, mode=None, **kwargs): 358 | """ 359 | 360 | Possible scenarios: 361 | - Point: doy{1}, depth{1}, lat{1},lon{1} 362 | - Profile: doy{1}, depth{0,1,n}, lat{1},lon{1} 363 | - Section: doy{1}, depth{0, n}, [lat{1},lon{n} | lat{n},lon{1}] 364 | 365 | - Track: doy{1,n}, depth{1,n2}, lat{n},lon{n} 366 | """ 367 | for k in kwargs: 368 | assert k in ['var', 'doy', 'depth', 'lat', 'lon'], \ 369 | "Wrong dimension to extract, check the manual" 370 | 371 | if 'var' in kwargs: 372 | var = np.atleast_1d(kwargs['var']) 373 | else: 374 | var = np.asanyarray(self.KEYS) 375 | 376 | doy = np.atleast_1d(kwargs['doy']) 377 | # This would only work if doy is 1D 378 | if type(doy[0]) is datetime: 379 | doy = np.array([int(d.strftime('%j')) for d in doy]) 380 | 381 | if 'depth' in kwargs: 382 | depth = np.atleast_1d(kwargs['depth']) 383 | else: 384 | depth = self.dims['depth'][:] 385 | 386 | assert np.all(depth >= 0), "Depth was supposed to be positive." 387 | 388 | lat = np.atleast_1d(kwargs['lat']) 389 | lon = np.atleast_1d(kwargs['lon']) 390 | 391 | if mode == 'nearest': 392 | output = self.nearest(doy, depth, lat, lon, var) 393 | else: 394 | output = self.interpolate(doy, depth, lat, lon, var) 395 | for v in output: 396 | output[v] = np.atleast_1d(np.squeeze(output[v])) 397 | 398 | return output 399 | 400 | def get_profile(var, doy, depth, lat, lon): 401 | print("get_profile is deprecated. You should migrate to extract()") 402 | return extract(var=var, doy=doy, depth=depth, lat=lat, lon=lon) 403 | 404 | 405 | class CARS(object): 406 | """ 407 | """ 408 | def __init__(self, dbname='CARS'): 409 | self.dbname = dbname 410 | self.data = {'sea_water_temperature': None, 411 | 'sea_water_salinity': None} 412 | 413 | def keys(self): 414 | return self.data.keys() 415 | 416 | def __getitem__(self, item): 417 | if item == 'TEMP': 418 | return self['sea_water_temperature'] 419 | elif item == 'PSAL': 420 | return self['sea_water_salinity'] 421 | 422 | if self.data[item] is None: 423 | self.data[item] = CARS_var_nc(source=dbsource(self.dbname, item)) 424 | return self.data[item] 425 | 426 | def __enter__(self): 427 | return self 428 | 429 | def __exit__(self, exc_type, exc_value, exc_traceback): 430 | self.close() 431 | 432 | def close(self): 433 | for data_key in self.data: 434 | var_nc = self.data.get(data_key) 435 | if var_nc: 436 | var_nc.close() 437 | 438 | -------------------------------------------------------------------------------- /oceansdb/datasource/woa13.json: -------------------------------------------------------------------------------- 1 | {"WOA13": { 2 | "rel_path": "woa", 3 | "vars": { 4 | "sea_water_temperature": { 5 | "default_resolution": "5deg", 6 | "1deg": { 7 | "default_tscale": "seasonal", 8 | "varnames": { 9 | "latitude": "lat", 10 | "longitude": "lon", 11 | "mean": "t_mn", 12 | "standard_deviation": "t_sd", 13 | "standard_error": "t_se", 14 | "number_of_observations": "t_dd" 15 | }, 16 | "annual": [ 17 | { 18 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t00_01v2.nc", 19 | "md5hash": "0dec94e221d6471dd455b8cb35907115" 20 | } 21 | ], 22 | "monthly": [ 23 | { 24 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t01_01v2.nc", 25 | "md5hash": "e96ed3f8432f2da5a33259a0a4400b52" 26 | }, 27 | { 28 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t02_01v2.nc", 29 | "md5hash": "07d518e8e12127ff391d9255e7a6d210" 30 | }, 31 | { 32 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t03_01v2.nc", 33 | "md5hash": "f7dd964985b60f8f19ae603a217dd10d" 34 | }, 35 | { 36 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t04_01v2.nc", 37 | "md5hash": "3e89c494b69676b42fff3cc9e4d39cb7" 38 | }, 39 | { 40 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t05_01v2.nc", 41 | "md5hash": "42569a3d6611042f7f0afded1dedc29e" 42 | }, 43 | { 44 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t06_01v2.nc", 45 | "md5hash": "0f3a2f6ffda0cb33f571bfde58ed6a5c" 46 | }, 47 | { 48 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t07_01v2.nc", 49 | "md5hash": "122686fc55381ac53c0f26e90a5ea7af" 50 | }, 51 | { 52 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t08_01v2.nc", 53 | "md5hash": "fc97d706177db547222990b23b8c38dc" 54 | }, 55 | { 56 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t09_01v2.nc", 57 | "md5hash": "743665183eed091d21e973a3c57dc965" 58 | }, 59 | { 60 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t10_01v2.nc", 61 | "md5hash": "41c9630dcd7f5015ef10aa69a6dc7ef0" 62 | }, 63 | { 64 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t11_01v2.nc", 65 | "md5hash": "f6b6874a847f1823b420114ca8ae9151" 66 | }, 67 | { 68 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t12_01v2.nc", 69 | "md5hash": "7015400e254e9ef0ba783ea63877e41f" 70 | } 71 | ], 72 | "seasonal": [ 73 | { 74 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t13_01v2.nc", 75 | "md5hash": "22ac7199efefedb75c319c5f0825ad4b" 76 | }, 77 | { 78 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t14_01v2.nc", 79 | "md5hash": "e8173e23018d9003a771af0ad3c6d970" 80 | }, 81 | { 82 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t15_01v2.nc", 83 | "md5hash": "ba025666a5b13c444471c91ccd9fd8a4" 84 | }, 85 | { 86 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/1.00/woa13_decav_t16_01v2.nc", 87 | "md5hash": "d975dca887d60e61957b67b8ca1d9fed" 88 | } 89 | ] 90 | }, 91 | "5deg": { 92 | "default_tscale": "seasonal", 93 | "varnames": { 94 | "latitude": "lat", 95 | "longitude": "lon", 96 | "mean": "t_mn", 97 | "standard_deviation": "t_sd", 98 | "standard_error": "t_se", 99 | "number_of_observations": "t_dd" 100 | }, 101 | "annual": [ 102 | { 103 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t00_5dv2.nc", 104 | "md5hash": "9cc5cf28d4f1f4057c9d9f263ca13d2a" 105 | } 106 | ], 107 | "monthly": [ 108 | { 109 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t01_5dv2.nc", 110 | "md5hash": "1f3ef5bff10cd17981e00674d491eaf9" 111 | }, 112 | { 113 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t02_5dv2.nc", 114 | "md5hash": "78a457c706a08b24ef6a6c34ad1b8afc" 115 | }, 116 | { 117 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t03_5dv2.nc", 118 | "md5hash": "dd52400be95ce398ac0428271489c0cd" 119 | }, 120 | { 121 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t04_5dv2.nc", 122 | "md5hash": "ca47a64e55a9998ad52cc3629f6abd65" 123 | }, 124 | { 125 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t05_5dv2.nc", 126 | "md5hash": "91015470b87741cf5d057b2702ca27e7" 127 | }, 128 | { 129 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t06_5dv2.nc", 130 | "md5hash": "5f1be843288fb9e2533d5feaa5d12744" 131 | }, 132 | { 133 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t07_5dv2.nc", 134 | "md5hash": "f0092776fb3bdac3451d42bace693f83" 135 | }, 136 | { 137 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t08_5dv2.nc", 138 | "md5hash": "18715348e8809e02b4f191db5efab0ee" 139 | }, 140 | { 141 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t09_5dv2.nc", 142 | "md5hash": "bed1912fa2cf7514e8c3ffc11c1d6837" 143 | }, 144 | { 145 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t10_5dv2.nc", 146 | "md5hash": "6ee785768b102e1c3c757bdd12f61f9c" 147 | }, 148 | { 149 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t11_5dv2.nc", 150 | "md5hash": "6039891f7053ace59f761daae2fcda44" 151 | }, 152 | { 153 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t12_5dv2.nc", 154 | "md5hash": "739cfd7bec705b351ce4f83f479f25ba" 155 | } 156 | ], 157 | "seasonal": [ 158 | { 159 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t13_5dv2.nc", 160 | "md5hash": "1ebaa01367a2d5f99d74ff1d37466c11" 161 | }, 162 | { 163 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t14_5dv2.nc", 164 | "md5hash": "869ab3881c07bd7a191351e1fcf60dc4" 165 | }, 166 | { 167 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t15_5dv2.nc", 168 | "md5hash": "87fb1cc276cf37769962aca97f59fa43" 169 | }, 170 | { 171 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/temperature/netcdf/decav/5deg/woa13_decav_t16_5dv2.nc", 172 | "md5hash": "b7f3bfdf7c632de91727c20d3a7331ac" 173 | } 174 | ] 175 | } 176 | }, 177 | "sea_water_salinity": { 178 | "default_resolution": "5deg", 179 | "1deg": { 180 | "default_tscale": "seasonal", 181 | "varnames": { 182 | "latitude": "lat", 183 | "longitude": "lon", 184 | "mean": "t_mn", 185 | "standard_deviation": "t_sd", 186 | "standard_error": "t_se", 187 | "number_of_observations": "t_dd" 188 | }, 189 | "annual": [ 190 | { 191 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s00_01v2.nc", 192 | "md5hash": "e5d6c2e757b13a2e6904b8d4849c33e6" 193 | } 194 | ], 195 | "monthly": [ 196 | { 197 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s01_01v2.nc", 198 | "md5hash": "2a00be825b5c55321d408448e71dd264" 199 | }, 200 | { 201 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s02_01v2.nc", 202 | "md5hash": "17aa2b0e35c8814abc550f62d237f40c" 203 | }, 204 | { 205 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s03_01v2.nc", 206 | "md5hash": "24caac61c615438bf5dff159fc83a1ef" 207 | }, 208 | { 209 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s04_01v2.nc", 210 | "md5hash": "5e1f4c8bedef5bd5cf08caea98f26318" 211 | }, 212 | { 213 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s05_01v2.nc", 214 | "md5hash": "e956501b17c2f302bcaf6de6d80cf329" 215 | }, 216 | { 217 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s06_01v2.nc", 218 | "md5hash": "b8a6b72b19fd957552cd11446d515091" 219 | }, 220 | { 221 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s07_01v2.nc", 222 | "md5hash": "722bd81e773449a440259226fcd460f8" 223 | }, 224 | { 225 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s08_01v2.nc", 226 | "md5hash": "99366f8601eeb94998624d1d4516e7b0" 227 | }, 228 | { 229 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s09_01v2.nc", 230 | "md5hash": "ae183d38983242c9d3ccb4a5eb32c984" 231 | }, 232 | { 233 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s10_01v2.nc", 234 | "md5hash": "1fcb1a8cab7311a0299fb269deacb234" 235 | }, 236 | { 237 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s11_01v2.nc", 238 | "md5hash": "91c150e7d74a6dba9872446c36179d05" 239 | }, 240 | { 241 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s12_01v2.nc", 242 | "md5hash": "aa4d51fc7a96af3d803cbdd69215905d" 243 | } 244 | ], 245 | "seasonal": [ 246 | { 247 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s13_01v2.nc", 248 | "md5hash": "bf6e9badbfca3098df27e7ca3aa1e280" 249 | }, 250 | { 251 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s14_01v2.nc", 252 | "md5hash": "b41ca5c7b8560304dea6ef2858a7e143" 253 | }, 254 | { 255 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s15_01v2.nc", 256 | "md5hash": "317f37f41ac41b50b443e2fc1902f3c2" 257 | }, 258 | { 259 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/1.00/woa13_decav_s16_01v2.nc", 260 | "md5hash": "542dcfd731ce00d18ade792db19f0761" 261 | } 262 | ] 263 | }, 264 | "5deg": { 265 | "default_tscale": "seasonal", 266 | "varnames": { 267 | "latitude": "lat", 268 | "longitude": "lon", 269 | "mean": "s_mn", 270 | "standard_deviation": "s_sd", 271 | "standard_error": "s_se", 272 | "number_of_observations": "s_dd" 273 | }, 274 | "annual": [ 275 | { 276 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s00_5dv2.nc", 277 | "md5hash": "108f28fe1dd250b0598ae666be08fc19" 278 | } 279 | ], 280 | "monthly": [ 281 | { 282 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s01_5dv2.nc", 283 | "md5hash": "3bc5be6c422fac213aec47fe958bb056" 284 | }, 285 | { 286 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s02_5dv2.nc", 287 | "md5hash": "7d0dcd9bfe9433822e0e3ab75d3951ae" 288 | }, 289 | { 290 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s03_5dv2.nc", 291 | "md5hash": "b9acd31883ee98727efa1881c309b307" 292 | }, 293 | { 294 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s04_5dv2.nc", 295 | "md5hash": "df790ede9a5c7cb0c86a5a16bf646abf" 296 | }, 297 | { 298 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s05_5dv2.nc", 299 | "md5hash": "eb6fbf7124e369b6ecd82e221e2638df" 300 | }, 301 | { 302 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s06_5dv2.nc", 303 | "md5hash": "afce195122e226519bf5c303b10251e8" 304 | }, 305 | { 306 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s07_5dv2.nc", 307 | "md5hash": "fff1ade0b34e954a296c4c16d8a92f26" 308 | }, 309 | { 310 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s08_5dv2.nc", 311 | "md5hash": "0b67be8ea3ede5da5ce305dbd7e39b39" 312 | }, 313 | { 314 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s09_5dv2.nc", 315 | "md5hash": "b0c43b3fce883bce9b8f6f8aad6ca645" 316 | }, 317 | { 318 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s10_5dv2.nc", 319 | "md5hash": "756e72f8c0470a5250c0e78555cb960a" 320 | }, 321 | { 322 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s11_5dv2.nc", 323 | "md5hash": "9fd772759c3891dc68d38e24917badd4" 324 | }, 325 | { 326 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s12_5dv2.nc", 327 | "md5hash": "9e7958856d0e7e199cfc2a611fd9282d" 328 | } 329 | ], 330 | "seasonal": [ 331 | { 332 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s13_5dv2.nc", 333 | "md5hash": "584e7c75ada03a117df58b0f9b4e51d3" 334 | }, 335 | { 336 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s14_5dv2.nc", 337 | "md5hash": "4e10898c34913d51f92111d4bba13352" 338 | }, 339 | { 340 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s15_5dv2.nc", 341 | "md5hash": "f29d26db2b0aeff69b8571b00649a32d" 342 | }, 343 | { 344 | "url": "https://data.nodc.noaa.gov/woa/WOA13/DATAv2/salinity/netcdf/decav/5deg/woa13_decav_s16_5dv2.nc", 345 | "md5hash": "7eafaf3c0a7f21d78468ce552c457cd3" 346 | } 347 | ] 348 | } 349 | } 350 | } 351 | } 352 | } 353 | -------------------------------------------------------------------------------- /oceansdb/woa.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ Module to handle World Ocean Atlas (WOA) climatology 4 | """ 5 | 6 | from os.path import expanduser 7 | import re 8 | from datetime import datetime 9 | 10 | import numpy as np 11 | from numpy import ma 12 | import netCDF4 13 | from scipy.interpolate import interp1d 14 | # RectBivariateSpline 15 | from scipy.interpolate import griddata 16 | 17 | from .utils import dbsource 18 | from .common import cropIndices 19 | 20 | 21 | # ============================================================================ 22 | def woa_profile(var, d, lat, lon, depth, cfg): 23 | # Must improve here. This try make sense if fail because there isn't an 24 | # etopo file, but if fail for another reason, like there is no lat, 25 | # it will loose time trying from_dap. 26 | try: 27 | woa = woa_profile_from_file(var, d, lat, lon, depth, cfg) 28 | except: 29 | try: 30 | woa = woa_profile_from_dap(var, d, lat, lon, depth, cfg) 31 | except: 32 | print("Couldn't make woa_comparison of %s" % var) 33 | return 34 | 35 | return woa 36 | 37 | 38 | def woa_profile_from_dap(var, d, lat, lon, depth, cfg): 39 | """ 40 | Monthly Climatologic Mean and Standard Deviation from WOA, 41 | used either for temperature or salinity. 42 | 43 | INPUTS 44 | time: [day of the year] 45 | lat: [-90= zwoa.min()) 85 | # Mean value profile 86 | f = interp1d(zwoa[~ma.getmaskarray(mn)].compressed(), mn.compressed()) 87 | mn_interp = ma.masked_all(depth.shape) 88 | mn_interp[ind] = f(depth[ind]) 89 | # The stdev profile 90 | f = interp1d(zwoa[~ma.getmaskarray(sd)].compressed(), sd.compressed()) 91 | sd_interp = ma.masked_all(depth.shape) 92 | sd_interp[ind] = f(depth[ind]) 93 | 94 | output = {'woa_an': mn_interp, 'woa_sd': sd_interp} 95 | 96 | return output 97 | 98 | 99 | def woa_track_from_file(d, lat, lon, filename, varnames=None): 100 | """ Temporary solution: WOA for surface track 101 | """ 102 | d = np.asanyarray(d) 103 | lat = np.asanyarray(lat) 104 | lon = np.asanyarray(lon) 105 | 106 | lon[lon < 0] += 360 107 | 108 | doy = np.array([int(dd.strftime('%j')) for dd in d]) 109 | 110 | with netCDF4.Dataset(expanduser(filename), 'r') as nc: 111 | 112 | if varnames is None: 113 | varnames = {} 114 | for v in nc.variables.keys(): 115 | if nc.variables[v].dimensions == \ 116 | (u'time', u'depth', u'lat', u'lon'): 117 | varnames[v] = v 118 | 119 | output = {} 120 | for v in varnames: 121 | output[v] = [] 122 | 123 | for d_n, lat_n, lon_n in zip(doy, lat, lon): 124 | # Get the nearest point. In the future interpolate. 125 | n_d = (np.abs(d_n - nc.variables['time'][:])).argmin() 126 | n_x = (np.abs(lon_n - nc.variables['lon'][:])).argmin() 127 | n_y = (np.abs(lat_n - nc.variables['lat'][:])).argmin() 128 | 129 | for v in varnames: 130 | output[v].append(nc.variables[varnames[v]][n_d, 0, n_y, n_x]) 131 | 132 | for v in varnames: 133 | output[v] = ma.fix_invalid(output[v]) 134 | 135 | return output 136 | 137 | 138 | class WOA_URL(object): 139 | def __init__(self): 140 | pass 141 | 142 | 143 | class WOA_var_nc(object): 144 | """ 145 | Reads the WOA Monthly Climatology NetCDF file and 146 | returns the corresponding WOA values of salinity or temperature mean and 147 | standard deviation for the given time, lat, lon, depth. 148 | """ 149 | def __init__(self, source): 150 | self.ncs = source 151 | 152 | self.load_dims(dims=['lat', 'lon', 'depth']) 153 | self.set_keys() 154 | 155 | def __enter__(self): 156 | return self 157 | 158 | def __exit__(self, exc_type, exc_value, exc_traceback): 159 | self.close() 160 | 161 | def close(self): 162 | for nc in self.ncs: 163 | nc.close() 164 | 165 | def __getitem__(self, item): 166 | return self.data[item] 167 | 168 | def keys(self): 169 | return self.KEYS 170 | 171 | def load_dims(self, dims): 172 | self.dims = {} 173 | for d in dims: 174 | self.dims[d] = self.ncs[0][d][:] 175 | for nc in self.ncs[1:]: 176 | assert (self.dims[d] == nc[d][:]).all() 177 | 178 | self.dims['time'] = [] 179 | mfrac = 365/12. 180 | for nc in self.ncs: 181 | assert nc.variables['time'].size == 1 182 | assert nc.variables['time'].units[:6] == 'months' 183 | self.dims['time'].append(mfrac * (nc['time'][0] % 12)) 184 | self.dims['time'] = np.array(self.dims['time']) 185 | 186 | def set_keys(self): 187 | """ 188 | """ 189 | self.KEYS = [] 190 | for v in self.ncs[0].variables.keys(): 191 | if self.ncs[0].variables[v].dimensions == \ 192 | (u'time', u'depth', u'lat', u'lon'): 193 | S = self.ncs[0].variables[v].shape 194 | for nc in self.ncs[1:]: 195 | assert v in nc.variables 196 | assert nc.variables[v].shape == S 197 | self.KEYS.append(v) 198 | 199 | def crop(self, doy, depth, lat, lon, var): 200 | """ Crop a subset of the dataset for each var 201 | 202 | Given doy, depth, lat and lon, it returns the smallest subset 203 | that still contains the requested coordinates inside it. 204 | 205 | It handels special cases like a region around greenwich and 206 | the international date line. 207 | 208 | Accepts 0 to 360 and -180 to 180 longitude reference. 209 | 210 | It extends time and longitude coordinates, so simplify the use 211 | of series. For example, a ship track can be requested with 212 | a longitude sequence like [352, 358, 364, 369, 380], and 213 | the equivalent for day of year above 365. 214 | """ 215 | dims, idx = cropIndices(self.dims, lat, lon, depth, doy) 216 | subset = {} 217 | for v in var: 218 | subset[v] = ma.asanyarray([ 219 | self.ncs[tnn][v][0, idx['zn'], idx['yn'], idx['xn']] 220 | for tnn in idx['tn']]) 221 | return subset, dims 222 | 223 | def nearest(self, doy, depth, lat, lon, var): 224 | output = {} 225 | dims, idx = cropIndices(self.dims, lat, lon, depth, doy) 226 | for v in var: 227 | output[v] = ma.masked_all((doy.size, depth.size, lat.size, 228 | lon.size), dtype='f') 229 | for tn_out, t in enumerate(doy): 230 | tn_in = np.absolute(dims['time']-t).argmin() 231 | subset = self.ncs[tn_in][v][0, idx['zn'], idx['yn'], idx['xn']] 232 | for yn_out, y in enumerate(lat): 233 | yn_in = np.absolute(dims['lat']-y).argmin() 234 | for xn_out, x in enumerate(lon): 235 | xn_in = np.absolute(dims['lon']-x).argmin() 236 | for zn_out, z in enumerate(depth): 237 | zn_in = np.absolute(dims['depth']-z).argmin() 238 | output[v][tn_out, zn_out, yn_out, xn_out] = \ 239 | subset[zn_in, yn_in, xn_in] 240 | return output 241 | 242 | def interpolate(self, doy, depth, lat, lon, var): 243 | """ Interpolate each var on the coordinates requested 244 | 245 | """ 246 | subset, dims = self.crop(doy, depth, lat, lon, var) 247 | 248 | # Subset contains everything requested. No need to interpolate. 249 | if np.all([d in dims['time'] for d in doy]) & \ 250 | np.all([z in dims['depth'] for z in depth]) & \ 251 | np.all([y in dims['lat'] for y in lat]) & \ 252 | np.all([x in dims['lon'] for x in lon]): 253 | dn = np.nonzero([d in doy for d in dims['time']])[0] 254 | zn = np.nonzero([z in depth for z in dims['depth']])[0] 255 | yn = np.nonzero([y in lat for y in dims['lat']])[0] 256 | xn = np.nonzero([x in lon for x in dims['lon']])[0] 257 | output = {} 258 | for v in subset: 259 | # output[v] = subset[v][dn, zn, yn, xn] 260 | # Seriously that this is the way to do it?!!?? 261 | output[v] = subset[v][:, :, :, xn][:, :, yn][:, zn][dn] 262 | return output 263 | 264 | output = {} 265 | for v in var: 266 | output[v] = ma.masked_all( 267 | (doy.size, depth.size, lat.size, lon.size), 268 | dtype=subset[v].dtype) 269 | 270 | # These interpolators don't understand Masked Arrays, but do NaN 271 | if subset[v].dtype in ['int32']: 272 | subset[v] = subset[v].astype('f') 273 | subset[v][ma.getmaskarray(subset[v])] = np.nan 274 | subset[v] = subset[v].data 275 | 276 | # First linear interpolate on time. 277 | if not (doy == dims['time']).all(): 278 | for v in subset.keys(): 279 | f = interp1d(dims['time'], subset[v], axis=0) 280 | subset[v] = f(doy) 281 | dims['time'] = np.atleast_1d(doy) 282 | 283 | if not (np.all(lat == dims['lat']) and np.all(lon == dims['lon'])): 284 | # Lat x Lon target coordinates are the same for all time and depth. 285 | points_out = [] 286 | for latn in lat: 287 | for lonn in lon: 288 | points_out.append([latn, lonn]) 289 | points_out = np.array(points_out) 290 | 291 | # Interpolate on X/Y plane 292 | for v in subset: 293 | tmp = np.nan * np.ones( 294 | (doy.size, dims['depth'].size, lat.size, lon.size), 295 | dtype=subset[v].dtype) 296 | for nt in range(doy.size): 297 | for nz in range(dims['depth'].size): 298 | data = subset[v][nt, nz] 299 | # The valid data 300 | idx = np.nonzero(~np.isnan(data)) 301 | if idx[0].size > 0: 302 | points = np.array([ 303 | dims['lat'][idx[0]], dims['lon'][idx[1]]]).T 304 | values = data[idx] 305 | # Interpolate along the dimensions that have more than 306 | # one position, otherwise it means that the output 307 | # is exactly on that coordinate. 308 | #ind = np.array([np.unique(points[:, i]).size > 1 309 | # for i in range(points.shape[1])]) 310 | #assert ind.any() 311 | 312 | try: 313 | values_out = griddata( 314 | #np.atleast_1d(np.squeeze(points[:, ind])), 315 | np.atleast_1d(np.squeeze(points)), 316 | values, 317 | #np.atleast_1d(np.squeeze(points_out[:, ind]))) 318 | np.atleast_1d(np.squeeze(points_out))) 319 | except: 320 | values_out = [] 321 | for p in points_out: 322 | try: 323 | values_out.append(griddata( 324 | np.atleast_1d(np.squeeze(points)), 325 | values, 326 | np.atleast_1d(np.squeeze( 327 | p)))) 328 | except: 329 | values_out.append(np.nan) 330 | values_out = np.array(values_out) 331 | 332 | # Remap the interpolated value back into a 4D array 333 | idx = np.isfinite(values_out) 334 | for [y, x], out in zip( 335 | points_out[idx], values_out[idx]): 336 | tmp[nt, nz, y==lat, x==lon] = out 337 | subset[v] = tmp 338 | 339 | # Interpolate on z 340 | same_depth = (np.shape(depth) == dims['depth'].shape) and \ 341 | np.allclose(depth, dims['depth']) 342 | if not same_depth: 343 | for v in list(subset.keys()): 344 | try: 345 | f = interp1d(dims['depth'], subset[v], axis=1, bounds_error=False) 346 | # interp1d does not handle Masked Arrays 347 | subset[v] = f(np.array(depth)) 348 | except: 349 | print("Fail to interpolate '%s' in depth" % v) 350 | del(subset[v]) 351 | 352 | for v in subset: 353 | if output[v].dtype in ['int32']: 354 | subset[v] = np.round(subset[v]) 355 | output[v][:] = ma.fix_invalid(subset[v][:]) 356 | 357 | return output 358 | 359 | def extract(self, mode=None, **kwargs): 360 | """ 361 | 362 | Possible scenarios: 363 | - Point: doy{1}, depth{1}, lat{1},lon{1} 364 | - Profile: doy{1}, depth{0,1,n}, lat{1},lon{1} 365 | - Section: doy{1}, depth{0, n}, [lat{1},lon{n} | lat{n},lon{1}] 366 | 367 | - Track: doy{1,n}, depth{1,n2}, lat{n},lon{n} 368 | """ 369 | for k in kwargs: 370 | assert k in ['var', 'doy', 'depth', 'lat', 'lon'], \ 371 | "Wrong dimension to extract, check the manual" 372 | 373 | if 'var' in kwargs: 374 | var = np.atleast_1d(kwargs['var']) 375 | else: 376 | var = np.asanyarray(self.KEYS) 377 | 378 | doy = np.atleast_1d(kwargs['doy']) 379 | # This would only work if doy is 1D 380 | if type(doy[0]) is datetime: 381 | doy = np.array([int(d.strftime('%j')) for d in doy]) 382 | 383 | if 'depth' in kwargs: 384 | depth = np.atleast_1d(kwargs['depth']) 385 | else: 386 | depth = self.dims['depth'][:] 387 | 388 | assert np.all(depth >= 0), "Depth was supposed to be positive." 389 | 390 | lat = np.atleast_1d(kwargs['lat']) 391 | lon = np.atleast_1d(kwargs['lon']) 392 | 393 | if mode == 'nearest': 394 | output = self.nearest(doy, depth, lat, lon, var) 395 | else: 396 | output = self.interpolate(doy, depth, lat, lon, var) 397 | 398 | for v in output: 399 | output[v] = np.atleast_1d(np.squeeze(output[v])) 400 | 401 | return output 402 | 403 | def track(self, mode=None, **kwargs): 404 | """ 405 | 406 | Possible scenarios: 407 | - Track: doy{1,n}, depth{1,n2}, lat{n}, lon{n} 408 | """ 409 | for k in kwargs: 410 | assert k in ['var', 'doy', 'depth', 'lat', 'lon'], \ 411 | "Wrong dimension to extract, check the manual" 412 | 413 | if 'var' in kwargs: 414 | var = np.atleast_1d(kwargs['var']) 415 | else: 416 | var = np.asanyarray(self.KEYS) 417 | 418 | doy = np.atleast_1d(kwargs['doy']) 419 | if type(doy[0]) is datetime: 420 | doy = np.array([int(d.strftime('%j')) for d in doy]) 421 | 422 | if 'depth' in kwargs: 423 | depth = np.atleast_1d(kwargs['depth']) 424 | else: 425 | depth = self.dims['depth'][:] 426 | 427 | assert np.all(depth >= 0), "Depth was supposed to be positive." 428 | 429 | lat = np.atleast_1d(kwargs['lat']) 430 | lon = np.atleast_1d(kwargs['lon']) 431 | 432 | assert lat.shape == lon.shape 433 | 434 | N = max(doy.size, depth.size, lat.size) 435 | 436 | if doy.shape == (1,): 437 | doy = doy * np.ones(N, dtype='i') 438 | if depth.shape == (1,): 439 | depth = depth * np.ones(N, dtype='i') 440 | if lat.shape == (1,): 441 | lat = lat * np.ones(N, dtype='i') 442 | lon = lon * np.ones(N, dtype='i') 443 | 444 | output = {} 445 | for v in var: 446 | output[v] = [] 447 | 448 | for t, z, y, x in zip(doy, depth, lat, lon): 449 | if mode == 'nearest': 450 | tmp = self.nearest( 451 | np.array([t]), np.array([z]), np.array([y]), np.array([x]), var) 452 | else: 453 | tmp = self.interpolate( 454 | np.array([t]), np.array([z]), np.array([y]), np.array([x]), var) 455 | 456 | for v in tmp: 457 | output[v].append(tmp[v]) 458 | 459 | for v in output: 460 | output[v] = np.atleast_1d(ma.array(output[v]).squeeze()) 461 | 462 | return output 463 | 464 | def get_profile(var, doy, depth, lat, lon): 465 | print("get_profile is deprecated. You should migrate to extract()") 466 | return extract(var=var, doy=doy, depth=depth, lat=lat, lon=lon) 467 | 468 | 469 | class WOA(object): 470 | """ 471 | """ 472 | def __init__(self, dbname='WOA18', resolution=None, tscale=None): 473 | self.dbname = dbname 474 | self.data = {'sea_water_temperature': None, 475 | 'sea_water_salinity': None, 476 | 'dissolved_oxygen': None, 477 | } 478 | self.resolution = resolution 479 | self.tscale = tscale 480 | 481 | def keys(self): 482 | return self.data.keys() 483 | 484 | def __getitem__(self, item): 485 | if item in ['TEMP', 'temperature']: 486 | return self['sea_water_temperature'] 487 | elif item in ['PSAL', 'salinity']: 488 | return self['sea_water_salinity'] 489 | elif item in ['DOXY']: 490 | return self['dissolved_oxygen'] 491 | 492 | if self.data[item] is None: 493 | self.data[item] = WOA_var_nc(source=dbsource( 494 | self.dbname, item, self.resolution, self.tscale)) 495 | return self.data[item] 496 | 497 | def __enter__(self): 498 | return self 499 | 500 | def __exit__(self, exc_type, exc_value, exc_traceback): 501 | self.close() 502 | 503 | def close(self): 504 | for data_key in self.data: 505 | var_nc = self.data.get(data_key) 506 | if var_nc: 507 | var_nc.close() 508 | -------------------------------------------------------------------------------- /oceansdb/datasource/woa18.json: -------------------------------------------------------------------------------- 1 | {"WOA18": { 2 | "rel_path": "woa", 3 | "vars": { 4 | "sea_water_temperature": { 5 | "default_resolution": "5deg", 6 | "1deg": { 7 | "default_tscale": "seasonal", 8 | "varnames": { 9 | "latitude": "lat", 10 | "longitude": "lon", 11 | "mean": "t_mn", 12 | "standard_deviation": "t_sd", 13 | "standard_error": "t_se", 14 | "number_of_observations": "t_dd" 15 | }, 16 | "annual": [ 17 | { 18 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t00_01.nc", 19 | "md5hash": "8334a01ae2219ab5d9249b17eda934dc" 20 | } 21 | ], 22 | "monthly": [ 23 | { 24 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t01_01.nc", 25 | "md5hash": "abf8c29c5395eda5390e8b26c38a9267" 26 | }, 27 | { 28 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t02_01.nc", 29 | "md5hash": "d0d0df5473bd4efd9dcc4cc645aa09ea" 30 | }, 31 | { 32 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t03_01.nc", 33 | "md5hash": "cb79b67385947608c9ec71e4bbf22693" 34 | }, 35 | { 36 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t04_01.nc", 37 | "md5hash": "1c149bd133cabd97915910ec7b3719ff" 38 | }, 39 | { 40 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t05_01.nc", 41 | "md5hash": "036e79be64f49bb53994dc089e425845" 42 | }, 43 | { 44 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t06_01.nc", 45 | "md5hash": "ec79f1bbe219c0d9409f64e962e110f1" 46 | }, 47 | { 48 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t07_01.nc", 49 | "md5hash": "d2e17eee5386a72d5265fb8709c45a04" 50 | }, 51 | { 52 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t08_01.nc", 53 | "md5hash": "ed3ec26804f9b743b41022092d581d35" 54 | }, 55 | { 56 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t09_01.nc", 57 | "md5hash": "dc0e4a4fb4d23a9577fef3aaa4ff918f" 58 | }, 59 | { 60 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t10_01.nc", 61 | "md5hash": "4c910805c71a5156dc10ba108f0294bf" 62 | }, 63 | { 64 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t11_01.nc", 65 | "md5hash": "a866369486b664e86ed80a41057ee75f" 66 | }, 67 | { 68 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t12_01.nc", 69 | "md5hash": "fde62df4db0f8d219daf883542c8dac4" 70 | } 71 | ], 72 | "seasonal": [ 73 | { 74 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t13_01.nc", 75 | "md5hash": "8300e489d214399cca4ab46486d8d419" 76 | }, 77 | { 78 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t14_01.nc", 79 | "md5hash": "a3fdab15767f3fbb407dc3f2e5bf6a22" 80 | }, 81 | { 82 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t15_01.nc", 83 | "md5hash": "cb95ee206a0c2bc04dc3aaf7a598cc11" 84 | }, 85 | { 86 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/1.00/woa18_decav_t16_01.nc", 87 | "md5hash": "cce19e567b09c68e9c72796823dfebe8" 88 | } 89 | ] 90 | }, 91 | "5deg": { 92 | "default_tscale": "seasonal", 93 | "varnames": { 94 | "latitude": "lat", 95 | "longitude": "lon", 96 | "mean": "t_mn", 97 | "standard_deviation": "t_sd", 98 | "standard_error": "t_se", 99 | "number_of_observations": "t_dd" 100 | }, 101 | "annual": [ 102 | { 103 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t00_5d.nc", 104 | "md5hash": "0f4fe0fc9a32174b8a24d0630af8e457" 105 | } 106 | ], 107 | "monthly": [ 108 | { 109 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t01_5d.nc", 110 | "md5hash": "c2dc282ae1e3b10dbaae0b37e2c5b6b7" 111 | }, 112 | { 113 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t02_5d.nc", 114 | "md5hash": "da920fa6df31b900b7d3c675c0ef9fab" 115 | }, 116 | { 117 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t03_5d.nc", 118 | "md5hash": "6bf1334f62634bb79cb0dfe00149635b" 119 | }, 120 | { 121 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t04_5d.nc", 122 | "md5hash": "b9fb5c8ac648493727b256581e36e326" 123 | }, 124 | { 125 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t05_5d.nc", 126 | "md5hash": "6ebe6a4eafc722deec4da96ee660c60d" 127 | }, 128 | { 129 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t06_5d.nc", 130 | "md5hash": "639da871c9d4713d63456b89ab8387ec" 131 | }, 132 | { 133 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t07_5d.nc", 134 | "md5hash": "f31c06fae00ea0d7ca4852314fd5f3b2" 135 | }, 136 | { 137 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t08_5d.nc", 138 | "md5hash": "3393fd7d68a718966c3960d37f9be7c1" 139 | }, 140 | { 141 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t09_5d.nc", 142 | "md5hash": "73d21f284a9e35de74128617ff3492a2" 143 | }, 144 | { 145 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t10_5d.nc", 146 | "md5hash": "e31b51e9efba3198ca83cd051ebb0f24" 147 | }, 148 | { 149 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t11_5d.nc", 150 | "md5hash": "19e8f4ed24b115c03dfe9ad45b4a64ae" 151 | }, 152 | { 153 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t12_5d.nc", 154 | "md5hash": "3502f24b326657df3099054ca9e7496c" 155 | } 156 | ], 157 | "seasonal": [ 158 | { 159 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t13_5d.nc", 160 | "md5hash": "4ef05bbca9d4ea244d3c2d490439c689" 161 | }, 162 | { 163 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t14_5d.nc", 164 | "md5hash": "0c330a71fc192308b4829da19082aa5e" 165 | }, 166 | { 167 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t15_5d.nc", 168 | "md5hash": "5d7c2ff27d36cd48bf9b4bdf6971d564" 169 | }, 170 | { 171 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/temperature/netcdf/decav/5deg/woa18_decav_t16_5d.nc", 172 | "md5hash": "4033f6679317500df489b52c26eccaf2" 173 | } 174 | ] 175 | } 176 | }, 177 | "sea_water_salinity": { 178 | "default_resolution": "5deg", 179 | "1deg": { 180 | "default_tscale": "seasonal", 181 | "varnames": { 182 | "latitude": "lat", 183 | "longitude": "lon", 184 | "mean": "t_mn", 185 | "standard_deviation": "t_sd", 186 | "standard_error": "t_se", 187 | "number_of_observations": "t_dd" 188 | }, 189 | "annual": [ 190 | { 191 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s00_01.nc", 192 | "md5hash": "2c3860adb43ab40ab3637b77aadcb3e5" 193 | } 194 | ], 195 | "monthly": [ 196 | { 197 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s01_01.nc", 198 | "md5hash": "8dd30831db975d059a4172323c06bb65" 199 | }, 200 | { 201 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s02_01.nc", 202 | "md5hash": "eaac03ed69c37fcd7b215dcf7b5c6cba" 203 | }, 204 | { 205 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s03_01.nc", 206 | "md5hash": "25a443d9cb0a79ad3fbfbc57f4becf96" 207 | }, 208 | { 209 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s04_01.nc", 210 | "md5hash": "4bbaca8427ef3d2d135ba4b1fdc4c830" 211 | }, 212 | { 213 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s05_01.nc", 214 | "md5hash": "5d3b2f2b59842bbf8a981381bb503921" 215 | }, 216 | { 217 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s06_01.nc", 218 | "md5hash": "d40b2d4a5172dc3c360c050c0768ba99" 219 | }, 220 | { 221 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s07_01.nc", 222 | "md5hash": "9b45b6ef0529f797acb86c0f740ba9f0" 223 | }, 224 | { 225 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s08_01.nc", 226 | "md5hash": "b947036b4dd3687454d6474c47ffac44" 227 | }, 228 | { 229 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s09_01.nc", 230 | "md5hash": "f008f1d88a05ddb447735407e978ac62" 231 | }, 232 | { 233 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s10_01.nc", 234 | "md5hash": "560db2703f7c7bc757c6d0e59a48248f" 235 | }, 236 | { 237 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s11_01.nc", 238 | "md5hash": "1f46c955d7de857f38082f4f55964e80" 239 | }, 240 | { 241 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s12_01.nc", 242 | "md5hash": "85d47f3e5ec28d2a15b246cad869ad87" 243 | } 244 | ], 245 | "seasonal": [ 246 | { 247 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s13_01.nc", 248 | "md5hash": "5c8b9bf6a24de0cbeacf7be30c7c3238" 249 | }, 250 | { 251 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s14_01.nc", 252 | "md5hash": "2a0e4cf3b78294ca0bde50f72c6403d0" 253 | }, 254 | { 255 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s15_01.nc", 256 | "md5hash": "9c2ff699977fa25ec76a3b0e100cc7b3" 257 | }, 258 | { 259 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/1.00/woa18_decav_s16_01.nc", 260 | "md5hash": "24fe08caa663bc0c45d90c248556f1a7" 261 | } 262 | ] 263 | }, 264 | "5deg": { 265 | "default_tscale": "seasonal", 266 | "varnames": { 267 | "latitude": "lat", 268 | "longitude": "lon", 269 | "mean": "s_mn", 270 | "standard_deviation": "s_sd", 271 | "standard_error": "s_se", 272 | "number_of_observations": "s_dd" 273 | }, 274 | "annual": [ 275 | { 276 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s00_5d.nc", 277 | "md5hash": "6a2378fa6f5f2c80ce698535ad613a85" 278 | } 279 | ], 280 | "monthly": [ 281 | { 282 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s01_5d.nc", 283 | "md5hash": "92b1050f9565a70bc061781a657efa5a" 284 | }, 285 | { 286 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s02_5d.nc", 287 | "md5hash": "6cebb9e52eb96a2e2634987c4aeeffc1" 288 | }, 289 | { 290 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s03_5d.nc", 291 | "md5hash": "d0d15d90d6ef9f164d6dd4a7ca78623f" 292 | }, 293 | { 294 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s04_5d.nc", 295 | "md5hash": "72737f79d9218cc9eb893a56519c86f9" 296 | }, 297 | { 298 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s05_5d.nc", 299 | "md5hash": "02c47864cd6fb0c23398373d4a0b1148" 300 | }, 301 | { 302 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s06_5d.nc", 303 | "md5hash": "50bb2db9cd09e4242c7e61b8b37ba616" 304 | }, 305 | { 306 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s07_5d.nc", 307 | "md5hash": "2b77388b5a117cdc91edc8cfc1b3b769" 308 | }, 309 | { 310 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s08_5d.nc", 311 | "md5hash": "14c47f5aa001208acf5d6613275e5a38" 312 | }, 313 | { 314 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s09_5d.nc", 315 | "md5hash": "f5dcd943d94f76ea3905a7c28350b8af" 316 | }, 317 | { 318 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s10_5d.nc", 319 | "md5hash": "4ed7cb9e8fa71119d7ddf62a11d489e6" 320 | }, 321 | { 322 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s11_5d.nc", 323 | "md5hash": "9f3e742f9f8287971c36595ac625ee6e" 324 | }, 325 | { 326 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s12_5d.nc", 327 | "md5hash": "694ef5ccf8be7f87c3d98565c9d30bf0" 328 | } 329 | ], 330 | "seasonal": [ 331 | { 332 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s13_5d.nc", 333 | "md5hash": "2114aaf85c1cd1ffaedad50867b99e93" 334 | }, 335 | { 336 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s14_5d.nc", 337 | "md5hash": "d9a066b6c0df69ac22675ca0d0b4e45d" 338 | }, 339 | { 340 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s15_5d.nc", 341 | "md5hash": "bf8dd028b90f9ebe7f5025c1ed2da4d9" 342 | }, 343 | { 344 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/salinity/netcdf/decav/5deg/woa18_decav_s16_5d.nc", 345 | "md5hash": "dbf9589625b7a59248dc4ea4dbb17f29" 346 | } 347 | ] 348 | } 349 | }, 350 | "dissolved_oxygen": { 351 | "default_resolution": "5deg", 352 | "1deg": { 353 | "default_tscale": "seasonal", 354 | "varnames": { 355 | "latitude": "lat", 356 | "longitude": "lon", 357 | "mean": "o_mn", 358 | "standard_deviation": "o_sd", 359 | "standard_error": "o_se", 360 | "number_of_observations": "o_dd" 361 | }, 362 | "annual": [ 363 | { 364 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o00_01.nc", 365 | "md5hash": "c022221423e15285ef69427b672f3736" 366 | } 367 | ], 368 | "monthly": [ 369 | { 370 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o01_01.nc", 371 | "md5hash": "beca4d668758d0526415e77766b5adeb" 372 | }, 373 | { 374 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o02_01.nc", 375 | "md5hash": "8943c953226326b432997ae7af7dca95" 376 | }, 377 | { 378 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o03_01.nc", 379 | "md5hash": "e7bfb3195927e85da4ca42300e19bdf7" 380 | }, 381 | { 382 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o04_01.nc", 383 | "md5hash": "fc1fd84f9edff8c69238b550ecd7ba57" 384 | }, 385 | { 386 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o05_01.nc", 387 | "md5hash": "f34845a234aba03eae342a78569c8379" 388 | }, 389 | { 390 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o06_01.nc", 391 | "md5hash": "cdc1ba96e2e7d49afd4752902d02593c" 392 | }, 393 | { 394 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o07_01.nc", 395 | "md5hash": "957b0246ba97e5676ed9467889e85681" 396 | }, 397 | { 398 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o08_01.nc", 399 | "md5hash": "0b6cb146ef1c85b94a1a717491012b49" 400 | }, 401 | { 402 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o09_01.nc", 403 | "md5hash": "a34027567546d6d4c9660038333fa631" 404 | }, 405 | { 406 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o10_01.nc", 407 | "md5hash": "1f9c132e05ea5cb0b6bc23185e25cef0" 408 | }, 409 | { 410 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o11_01.nc", 411 | "md5hash": "ebda216ff34219914daa32e3a399746d" 412 | }, 413 | { 414 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o12_01.nc", 415 | "md5hash": "2fdd0a84978f0756f8dd242801841c20" 416 | } 417 | ], 418 | "seasonal": [ 419 | { 420 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o13_01.nc", 421 | "md5hash": "971ac80c1b00bda140432adac06efeac" 422 | }, 423 | { 424 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o14_01.nc", 425 | "md5hash": "8fdad539a92de3fb095f7af2f403834e" 426 | }, 427 | { 428 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o15_01.nc", 429 | "md5hash": "84e4ef7edd5031287a1f2bf280974046" 430 | }, 431 | { 432 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/1.00/woa18_all_o16_01.nc", 433 | "md5hash": "67d0958e4aa1ca827d10dd08e9ccf6d9" 434 | } 435 | ] 436 | }, 437 | "5deg": { 438 | "default_tscale": "seasonal", 439 | "varnames": { 440 | "latitude": "lat", 441 | "longitude": "lon", 442 | "mean": "o_mn", 443 | "standard_deviation": "o_sd", 444 | "standard_error": "o_se", 445 | "number_of_observations": "o_dd" 446 | }, 447 | "annual": [ 448 | { 449 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o00_5d.nc", 450 | "md5hash": "18f3f480d4552c032b88f2ca7ebcfc52" 451 | } 452 | ], 453 | "monthly": [ 454 | { 455 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o01_5d.nc", 456 | "md5hash": "8a8ebace535d8fc1c7daff2107a5f945" 457 | }, 458 | { 459 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o02_5d.nc", 460 | "md5hash": "a15b1a75c377874719a35bad3e2cfffa" 461 | }, 462 | { 463 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o03_5d.nc", 464 | "md5hash": "45f8a61e7fb667ee0f93f7cb68d27956" 465 | }, 466 | { 467 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o04_5d.nc", 468 | "md5hash": "4854756e0f79d5c033b5b681a6b934e5" 469 | }, 470 | { 471 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o05_5d.nc", 472 | "md5hash": "4ba62667949a10aebfb38594142321ee" 473 | }, 474 | { 475 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o06_5d.nc", 476 | "md5hash": "5b5495ab661cb21919a49283978ead49" 477 | }, 478 | { 479 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o07_5d.nc", 480 | "md5hash": "f13646e18f4bf325ebf8744d4da06cba" 481 | }, 482 | { 483 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o08_5d.nc", 484 | "md5hash": "73f1d097df33e603ea3afc995553df08" 485 | }, 486 | { 487 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o09_5d.nc", 488 | "md5hash": "b46c25ac3ce1329b518ee7f793e8504d" 489 | }, 490 | { 491 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o10_5d.nc", 492 | "md5hash": "39da780f55dfa0204813d744743939c7" 493 | }, 494 | { 495 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o11_5d.nc", 496 | "md5hash": "7dc4edfc37be7b73adee61a8799884a8" 497 | }, 498 | { 499 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o12_5d.nc", 500 | "md5hash": "7ac7ceb7e60181726c13326106ea36e5" 501 | } 502 | ], 503 | "seasonal": [ 504 | { 505 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o13_5d.nc", 506 | "md5hash": "fd2f8d7953f4a6982026311fda0c536e" 507 | }, 508 | { 509 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o14_5d.nc", 510 | "md5hash": "307752da7b6a6052f5c35ae4f64b7edb" 511 | }, 512 | { 513 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o15_5d.nc", 514 | "md5hash": "d12e9fe986652fa6e2b90a7b03241cca" 515 | }, 516 | { 517 | "url": "https://data.nodc.noaa.gov/woa/WOA18/DATA/oxygen/netcdf/all/5deg/woa18_all_o16_5d.nc", 518 | "md5hash": "87007f9f373b928171d05cb02f443281" 519 | } 520 | ] 521 | } 522 | } 523 | } 524 | } 525 | } 526 | --------------------------------------------------------------------------------