├── tests ├── __init__.py ├── folder_test │ ├── failure │ │ ├── abdsdf │ │ │ └── .test │ │ └── dsfsdf │ │ │ └── .test │ └── success │ │ ├── 2013 │ │ ├── .test │ │ └── 001 │ │ │ └── .test │ │ ├── 2014 │ │ ├── 101 │ │ │ └── .test │ │ ├── 134 │ │ │ └── .test │ │ └── .test │ │ └── addfs │ │ └── .test ├── conftest.py ├── test_grid.py ├── test_reshuffle.py ├── test_download.py └── test_interface.py ├── .gitattributes ├── docs ├── _static │ └── .gitignore ├── readme.rst ├── authors.rst ├── changelog.rst ├── 5x5_cell_partitioning.png ├── license.rst ├── env.yml ├── index.rst ├── download.rst ├── Makefile ├── reading.rst ├── img2ts.rst ├── varnames.rst └── conf.py ├── src └── gldas │ ├── GLDASp4_landmask_025d.nc4 │ ├── __init__.py │ ├── utils.py │ ├── grid.py │ ├── reshuffle.py │ ├── download.py │ └── interface.py ├── .gitmodules ├── AUTHORS.rst ├── .readthedocs.yml ├── pyproject.toml ├── .gitignore ├── .coveragerc ├── setup.py ├── CHANGELOG.rst ├── LICENSE.txt ├── tox.ini ├── setup.cfg ├── README.rst └── .github └── workflows └── ci.yml /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/failure/abdsdf/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/failure/dsfsdf/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/success/2013/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/success/2014/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/success/addfs/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/success/2013/001/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/success/2014/101/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/folder_test/success/2014/134/.test: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | pygeobase/_version.py export-subst -------------------------------------------------------------------------------- /docs/_static/.gitignore: -------------------------------------------------------------------------------- 1 | # Empty directory 2 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. _readme: 2 | .. include:: ../README.rst 3 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. _authors: 2 | .. include:: ../AUTHORS.rst 3 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changes: 2 | .. include:: ../CHANGELOG.rst 3 | -------------------------------------------------------------------------------- /docs/5x5_cell_partitioning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TUW-GEO/gldas/HEAD/docs/5x5_cell_partitioning.png -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | .. _license: 2 | 3 | ======= 4 | License 5 | ======= 6 | 7 | .. include:: ../LICENSE.txt 8 | -------------------------------------------------------------------------------- /src/gldas/GLDASp4_landmask_025d.nc4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TUW-GEO/gldas/HEAD/src/gldas/GLDASp4_landmask_025d.nc4 -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "tests/test-data"] 2 | path = tests/test-data 3 | url = https://github.com/TUW-GEO/rs_data_readers-test-data.git 4 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | Developers 3 | ========== 4 | 5 | * Wolfgang Preimesberger 6 | * Christoph Paulik 7 | * Andreea Plocon 8 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-20.04 5 | tools: 6 | python: mambaforge-4.10 7 | 8 | python: 9 | install: 10 | - method: pip 11 | path: . 12 | 13 | sphinx: 14 | configuration: docs/conf.py 15 | 16 | conda: 17 | environment: docs/env.yml -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Dummy conftest.py for gldas. 4 | 5 | If you don't know what this is for, just leave it empty. 6 | Read more about conftest.py under: 7 | https://pytest.org/latest/plugins.html 8 | """ 9 | 10 | # import pytest 11 | -------------------------------------------------------------------------------- /docs/env.yml: -------------------------------------------------------------------------------- 1 | # To keep the RTD build as small as possible, we define a separate .yml here 2 | name: docs 3 | channels: 4 | - conda-forge 5 | - defaults 6 | dependencies: 7 | - python=3.12 8 | - ipykernel 9 | - nbsphinx 10 | - mock 11 | - pillow 12 | - sphinx<7 13 | - sphinx_rtd_theme 14 | - pip 15 | - pip: 16 | - recommonmark 17 | - readthedocs-sphinx-ext -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | # AVOID CHANGING REQUIRES: IT WILL BE UPDATED BY PYSCAFFOLD! 3 | requires = ["setuptools>=46.1.0", "setuptools_scm[toml]>=5", "wheel", "packaging", "build"] 4 | build-backend = "setuptools.build_meta" 5 | 6 | [tool.setuptools_scm] 7 | # See configuration details in https://github.com/pypa/setuptools_scm 8 | version_scheme = "no-guess-dev" 9 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | 3 | .. include:: reading.rst 4 | 5 | .. include:: varnames.rst 6 | 7 | .. include:: img2ts.rst 8 | 9 | Contents 10 | ======== 11 | 12 | .. toctree:: 13 | :maxdepth: 2 14 | 15 | Downloading 16 | Image Reading 17 | Variable names 18 | Conversion to time series and Reading 19 | License 20 | Authors 21 | Changelog 22 | Module Reference 23 | 24 | 25 | Indices and tables 26 | ================== 27 | 28 | * :ref:`genindex` 29 | * :ref:`modindex` 30 | * :ref:`search` 31 | -------------------------------------------------------------------------------- /src/gldas/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | if sys.version_info[:2] >= (3, 8): 4 | # TODO: Import directly (no need for conditional) when `python_requires = >= 3.8` 5 | from importlib.metadata import PackageNotFoundError, version # pragma: no cover 6 | else: 7 | from importlib_metadata import PackageNotFoundError, version # pragma: no cover 8 | 9 | try: 10 | # Change here if project is renamed and does not equal the package name 11 | dist_name = __name__ 12 | __version__ = version(dist_name) 13 | except PackageNotFoundError: # pragma: no cover 14 | __version__ = "unknown" 15 | finally: 16 | del version, PackageNotFoundError 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Temporary and binary files 2 | *~ 3 | *.py[cod] 4 | *.so 5 | *.cfg 6 | !setup.cfg 7 | *.orig 8 | *.log 9 | *.pot 10 | __pycache__/* 11 | .cache/* 12 | .*.swp 13 | 14 | # Project files 15 | .ropeproject 16 | .project 17 | .pydevproject 18 | .settings 19 | .idea 20 | 21 | # Package files 22 | *.egg 23 | *.eggs/ 24 | .installed.cfg 25 | *.egg-info 26 | 27 | # Unittest and coverage 28 | htmlcov/* 29 | .coverage 30 | .tox 31 | junit.xml 32 | coverage.xml 33 | .pytest_cache/* 34 | 35 | # Build and docs folder/files 36 | build/* 37 | dist/* 38 | sdist/* 39 | docs/api/* 40 | docs/_build/* 41 | cover/* 42 | MANIFEST 43 | 44 | */local_scripts/* 45 | .artifacts/* 46 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | # .coveragerc to control coverage.py 2 | [run] 3 | branch = True 4 | source = gldas 5 | omit = */_version.py 6 | 7 | [paths] 8 | source = 9 | src/ 10 | */site-packages/ 11 | 12 | [report] 13 | # Regexes for lines to exclude from consideration 14 | exclude_lines = 15 | # Have to re-enable the standard pragma 16 | pragma: no cover 17 | 18 | # Don't complain about missing debug-only code: 19 | def __repr__ 20 | if self\.debug 21 | 22 | # Don't complain if tests don't hit defensive assertion code: 23 | raise AssertionError 24 | raise NotImplementedError 25 | 26 | # Don't complain if non-runnable code isn't run: 27 | if 0: 28 | if __name__ == .__main__.: 29 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | Setup file for gldas. 3 | Use setup.cfg to configure your project. 4 | 5 | This file was generated with PyScaffold 4.6. 6 | PyScaffold helps you to put up the scaffold of your new Python project. 7 | Learn more under: https://pyscaffold.org/ 8 | """ 9 | 10 | from setuptools import setup 11 | 12 | if __name__ == "__main__": 13 | try: 14 | setup(use_scm_version={"version_scheme": "no-guess-dev"}) 15 | except: # noqa 16 | print( 17 | "\n\nAn error occurred while building the project, " 18 | "please ensure you have the most updated version of setuptools, " 19 | "setuptools_scm and wheel with:\n" 20 | " pip install -U setuptools setuptools_scm wheel\n\n" 21 | ) 22 | raise 23 | -------------------------------------------------------------------------------- /docs/download.rst: -------------------------------------------------------------------------------- 1 | Downloading Products 2 | ==================== 3 | 4 | In order to download GLDAS NOAH products you have to register an account with 5 | NASA's Earthdata portal at ``_. 6 | 7 | After that you can use the command line program ``gldas_download`` together with your username and password. 8 | 9 | For example to download all GLDAS Noah v2.1 Images (3-hourly) in the period from June 3 to 6 2018 10 | into the local directory `/tmp`, you'd call (with your username and password): 11 | 12 | .. code:: 13 | 14 | gldas_download /tmp -s 2018-06-03 -e 2018-06-05 --product GLDAS_Noah_v21_025 --username **USERNAME** --password **PASSWORD** 15 | 16 | would download GLDAS Noah version 2.1 data from the select start to the selected end day into the '/tmp' folder. 17 | 18 | For a description of the download function and all options run 19 | 20 | .. code:: 21 | 22 | gldas_download -h 23 | -------------------------------------------------------------------------------- /tests/test_grid.py: -------------------------------------------------------------------------------- 1 | from gldas.grid import GLDAS025Cellgrid, GLDAS025LandGrid, subgrid4bbox 2 | 3 | 4 | def test_GLDAS025_cell_grid(): 5 | gldas = GLDAS025Cellgrid() 6 | assert gldas.activegpis.size == 1036800 7 | assert gldas.activegpis[153426] == 153426 8 | assert gldas.activearrcell[153426] == 1409 9 | assert gldas.activearrlat[153426] == -63.375 10 | assert gldas.activearrlon[153426] == 16.625 11 | 12 | 13 | def test_GLDAS025LandGrid(): 14 | gldas = GLDAS025LandGrid() 15 | assert gldas.activegpis.size == 243883 16 | assert gldas.activegpis[153426] == 810230 17 | assert gldas.activearrcell[153426] == 1720 18 | assert gldas.activearrlat[153426] == 50.625 19 | assert gldas.activearrlon[153426] == 57.625 20 | 21 | 22 | def test_bbox_subgrid(): 23 | bbox = (130.125, -29.875, 134.875, -25.125) # bbox for cell 2244 24 | subgrid = subgrid4bbox(GLDAS025Cellgrid(), *bbox) 25 | assert subgrid == GLDAS025Cellgrid().subgrid_from_cells([2244]) 26 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | AUTODOCDIR = api 11 | 12 | # User-friendly check for sphinx-build 13 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) 14 | $(error "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://sphinx-doc.org/") 15 | endif 16 | 17 | .PHONY: help clean Makefile 18 | 19 | # Put it first so that "make" without argument is like "make help". 20 | help: 21 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | 23 | clean: 24 | rm -rf $(BUILDDIR)/* $(AUTODOCDIR) 25 | 26 | # Catch-all target: route all unknown targets to Sphinx using the new 27 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 28 | %: Makefile 29 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 30 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | Unreleased 6 | ========== 7 | - 8 | 9 | Version 0.7.2 10 | ============= 11 | - Make pygrib optional dependency 12 | 13 | Version 0.7.1 14 | ============= 15 | - CI and docs updates (new python versions etc.) 16 | 17 | Version 0.7.0 18 | ============= 19 | - Test download function with real data. 20 | - Support GLDAS EP 21 | - GHA to replace travis 22 | - Update docs 23 | - Update Formatting 24 | 25 | Version 0.6.1 26 | ============= 27 | - Travis upload release to pypi 28 | 29 | Version 0.6 30 | =========== 31 | - New package structure 32 | - Drop python 2 support 33 | - Allow subsetting by bbox 34 | - Add gldas land mask to package (no download necessary) 35 | 36 | Version 0.5 37 | =========== 38 | - Update trollsift parsing 39 | - Update readme 40 | - Support reshuffling of land points only 41 | - Test with netcdf test data 42 | 43 | Version 0.4 44 | =========== 45 | - Add support for GLDAS version 2.1 46 | - Compress files during reshuffling. 47 | 48 | Version 0.3 49 | =========== 50 | - Fix download with new URL 51 | - Only download grb and xml files 52 | 53 | Version 0.2 54 | =========== 55 | - Fix Python 3 bug when iterating over multiple images. 56 | - Add reshuffling to time series format and reading of time series. 57 | 58 | Version 0.1 59 | =========== 60 | - First release. Support for Downloading and reading GLDAS Noah v1 0.25 degree 61 | data. 62 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, TU Wien 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * Neither the name of ascat nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /src/gldas/utils.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import inspect 3 | import warnings 4 | 5 | 6 | class PygribError(ImportError): 7 | def __init__(self): 8 | # Call the base class constructor with the parameters it needs 9 | message = ("Pygrib is not installed. " 10 | "Please run 'pip install pygrib' or 'conda install pygrib' " 11 | "first, to read data in grib format.") 12 | super().__init__(message) 13 | 14 | def deprecated(message: str = None): 15 | """ 16 | Decorator for classes or functions to mark them as deprecated. 17 | If the decorator is applied without a specific message (`@deprecated()`), 18 | the default warning is shown when using the function/class. To specify 19 | a custom message use it like: 20 | @deprecated('Don't use this function anymore!'). 21 | 22 | Parameters 23 | ---------- 24 | message : str, optional (default: None) 25 | Custom message to show with the DeprecationWarning. 26 | """ 27 | 28 | def decorator(src): 29 | default_msg = f"GLDAS python " \ 30 | f"{'class' if inspect.isclass(src) else 'method'} " \ 31 | f"'{src.__module__}.{src.__name__}' " \ 32 | f"is deprecated and will be removed soon." 33 | 34 | @functools.wraps(src) 35 | def new_func(*args, **kwargs): 36 | warnings.simplefilter('always', DeprecationWarning) 37 | 38 | warnings.warn( 39 | default_msg if message is None else message, 40 | category=DeprecationWarning, 41 | stacklevel=2) 42 | warnings.simplefilter('default', DeprecationWarning) 43 | return src(*args, **kwargs) 44 | 45 | return new_func 46 | 47 | return decorator 48 | -------------------------------------------------------------------------------- /tests/test_reshuffle.py: -------------------------------------------------------------------------------- 1 | import os 2 | import glob 3 | import tempfile 4 | import numpy as np 5 | import numpy.testing as nptest 6 | 7 | from gldas.reshuffle import main 8 | from gldas.interface import GLDASTs 9 | 10 | from tempfile import TemporaryDirectory 11 | 12 | import pytest 13 | 14 | 15 | @pytest.mark.parametrize( 16 | "landpoints,bbox,n_files_should", 17 | # 15 cells, 4 with out landpoints, 1 grid file 18 | [(True, True, 15-4+1), (False, True, 15+1)], 19 | ) 20 | def test_reshuffle(landpoints, bbox, n_files_should): 21 | if bbox is True: 22 | bbox = ["41.125", "11.125", "63.875", "23.875"] 23 | inpath = os.path.join( 24 | os.path.dirname(os.path.abspath(__file__)), 25 | "test-data", 26 | "img2ts_test", 27 | "netcdf", 28 | ) 29 | startdate = "2016-01-01T03:00" 30 | enddate = "2016-01-01T21:00" 31 | parameters = ["SoilMoi0_10cm_inst", "SoilMoi10_40cm_inst"] 32 | 33 | with TemporaryDirectory() as ts_path: 34 | args = ( 35 | [inpath, ts_path, startdate, enddate] 36 | + parameters 37 | + ["--land_points", str(landpoints)] 38 | ) 39 | if bbox: 40 | args += ["--bbox", *bbox] 41 | main(args) 42 | assert len(glob.glob(os.path.join(ts_path, "*.nc"))) == n_files_should 43 | 44 | ds = GLDASTs( 45 | ts_path, 46 | ioclass_kws={"read_bulk": True, "read_dates": False}, 47 | parameters=["SoilMoi0_10cm_inst", "SoilMoi10_40cm_inst"], 48 | ) 49 | 50 | ts = ds.read(45.08, 15.1) 51 | ts_SM0_10_values_should = np.array( 52 | [9.595, 9.593, 9.578, 9.562, 9.555, 9.555, 9.556], dtype=np.float32 53 | ) 54 | nptest.assert_allclose( 55 | ts["SoilMoi0_10cm_inst"].values, ts_SM0_10_values_should, rtol=1e-5 56 | ) 57 | ts_SM10_40_values_should = np.array( 58 | [50.065, 50.064, 50.062, 50.060, 50.059, 50.059, 50.059], 59 | dtype=np.float32, 60 | ) 61 | nptest.assert_allclose( 62 | ts["SoilMoi10_40cm_inst"].values, ts_SM10_40_values_should, rtol=1e-5 63 | ) 64 | ds.close() 65 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox configuration file 2 | # Read more under https://tox.readthedocs.org/ 3 | # THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! 4 | 5 | [tox] 6 | minversion = 3.15 7 | envlist = default 8 | 9 | 10 | [testenv] 11 | description = invoke pytest to run automated tests 12 | isolated_build = True 13 | setenv = 14 | TOXINIDIR = {toxinidir} 15 | passenv = 16 | HOME 17 | extras = 18 | testing 19 | commands = 20 | pytest {posargs} 21 | 22 | 23 | [testenv:{clean,build}] 24 | description = 25 | Build (or clean) the package in isolation according to instructions in: 26 | https://setuptools.readthedocs.io/en/latest/build_meta.html#how-to-use-it 27 | https://github.com/pypa/pep517/issues/91 28 | https://github.com/pypa/build 29 | # NOTE: build is still experimental, please refer to the links for updates/issues 30 | skip_install = True 31 | changedir = {toxinidir} 32 | deps = 33 | build: build 34 | commands = 35 | clean: python -c 'from shutil import rmtree; rmtree("build", True); rmtree("dist", True)' 36 | build: python -m build . 37 | # By default `build` produces wheels, you can also explicitly use the flags `--sdist` and `--wheel` 38 | 39 | 40 | [testenv:{docs,doctests}] 41 | description = invoke sphinx-build to build the docs/run doctests 42 | setenv = 43 | DOCSDIR = {toxinidir}/docs 44 | BUILDDIR = {toxinidir}/docs/_build 45 | docs: BUILD = html 46 | doctests: BUILD = doctest 47 | deps = 48 | -r {toxinidir}/docs/requirements.txt 49 | # ^ requirements.txt shared with Read The Docs 50 | commands = 51 | sphinx-build -b {env:BUILD} -d "{env:BUILDDIR}/doctrees" "{env:DOCSDIR}" "{env:BUILDDIR}/{env:BUILD}" {posargs} 52 | 53 | 54 | [testenv:publish] 55 | description = 56 | Publish the package you have been developing to a package index server. 57 | By default, it uses testpypi. If you really want to publish your package 58 | to be publicly accessible in PyPI, use the `-- --repository pypi` option. 59 | skip_install = True 60 | changedir = {toxinidir} 61 | passenv = 62 | TWINE_USERNAME 63 | TWINE_PASSWORD 64 | TWINE_REPOSITORY 65 | deps = twine 66 | commands = 67 | python -m twine check dist/* 68 | python -m twine upload {posargs:--repository testpypi} dist/* 69 | -------------------------------------------------------------------------------- /docs/reading.rst: -------------------------------------------------------------------------------- 1 | Reading GLDAS images 2 | -------------------- 3 | 4 | Reading of the GLDAS raw grib files can be done in two ways. 5 | 6 | Reading by file name 7 | ~~~~~~~~~~~~~~~~~~~~ 8 | 9 | .. code-block:: python 10 | 11 | import os 12 | from datetime import datetime 13 | from gldas.interface import GLDAS_Noah_v1_025Img 14 | 15 | # read several parameters 16 | parameter = ['086_L2', '086_L1', '085_L1', '138', '132', '051'] 17 | # the class is initialized with the exact filename. 18 | img = GLDAS_Noah_v1_025Img(os.path.join(os.path.dirname(__file__), 19 | 'test-data', 20 | 'GLDAS_NOAH_image_data', 21 | '2015', 22 | '001', 23 | 'GLDAS_NOAH025SUBP_3H.A2015001.0000.001.2015037193230.grb'), 24 | parameter=parameter) 25 | 26 | # reading returns an image object which contains a data dictionary 27 | # with one array per parameter. The returned data is a global 0.25 degree 28 | # image/array. 29 | image = img.read() 30 | 31 | assert image.data['086_L1'].shape == (720, 1440) 32 | assert image.lon[0, 0] == -179.875 33 | assert image.lon[0, 1439] == 179.875 34 | assert image.lat[0, 0] == 89.875 35 | assert image.lat[719, 0] == -89.875 36 | assert sorted(image.data.keys()) == sorted(parameter) 37 | assert image.data['086_L1'][26, 609] == 30.7344 38 | assert image.data['086_L2'][26, 609] == 93.138 39 | assert image.data['085_L1'][576, 440] == 285.19 40 | assert image.data['138'][26, 609] == 237.27 41 | assert image.data['051'][26, 609] == 0 42 | assert image.lon.shape == (720, 1440) 43 | assert image.lon.shape == image.lat.shape 44 | 45 | Reading by date 46 | ~~~~~~~~~~~~~~~ 47 | 48 | All the gldas data in a directory structure can be accessed by date. 49 | The filename is automatically built from the given date. 50 | 51 | .. code-block:: python 52 | 53 | from gldas.interface import GLDAS_Noah_v1_025Ds 54 | 55 | parameter = ['086_L2', '086_L1', '085_L1', '138', '132', '051'] 56 | img = GLDAS_Noah_v1_025Ds(data_path=os.path.join(os.path.dirname(__file__), 57 | 'test-data', 58 | 'GLDAS_NOAH_image_data'), 59 | parameter=parameter) 60 | 61 | image = img.read(datetime(2015, 1, 1, 0)) 62 | 63 | 64 | For reading all image between two dates the 65 | :py:meth:`gldas.interface.GLDAS_Noah_v1_025Ds.iter_images` iterator can be 66 | used. 67 | -------------------------------------------------------------------------------- /docs/img2ts.rst: -------------------------------------------------------------------------------- 1 | Conversion to time series format 2 | ================================ 3 | 4 | For a lot of applications it is favorable to convert the image based format into 5 | a format which is optimized for fast time series retrieval. This is what we 6 | often need for e.g. validation studies. This can be done by stacking the images 7 | into a netCDF file and choosing the correct chunk sizes or a lot of other 8 | methods. We have chosen to do it in the following way: 9 | 10 | - Store only the reduced gaußian grid points since that saves space. 11 | - Further reduction the amount of stored data by saving only land points if selected. 12 | - Store the time series in netCDF4 in the Climate and Forecast convention 13 | `Orthogonal multidimensional array representation 14 | `_ 15 | - Store the time series in 5x5 degree cells. This means there will be 2566 cell 16 | files (without reduction to land points) and a file called ``grid.nc`` 17 | which contains the information about which grid point is stored in which file. 18 | This allows us to read a whole 5x5 degree area into memory and iterate over the time series quickly. 19 | 20 | .. image:: 5x5_cell_partitioning.png 21 | :target: _images/5x5_cell_partitioning.png 22 | 23 | This conversion can be performed using the ``gldas_repurpose`` command line 24 | program. An example would be: 25 | 26 | .. code-block:: shell 27 | 28 | gldas_repurpose /download/image/path /output/timeseries/path 2000-01-01 2001-01-01 SoilMoi0_10cm_inst SoilMoi10_40cm_inst 29 | 30 | Which would take GLDAS Noah data stored in ``/gldas_data`` from January 1st 31 | 2000 to January 1st 2001 and store the parameters for the top 2 layers of soil moisture as time 32 | series in the folder ``/timeseries/data``. 33 | 34 | Conversion to time series is performed by the `repurpose package 35 | `_ in the background. For custom settings 36 | or other options see the `repurpose documentation 37 | `_ and the code in 38 | ``gldas.reshuffle``. 39 | 40 | **Note**: If a ``RuntimeError: NetCDF: Bad chunk sizes.`` appears during reshuffling, consider downgrading the 41 | netcdf4 library via: 42 | 43 | .. code-block:: shell 44 | 45 | conda install -c conda-forge netcdf4=1.2.2 46 | 47 | Reading converted time series data 48 | ---------------------------------- 49 | 50 | For reading the data the ``gldas_repurpose`` command produces the class 51 | ``GLDASTs`` can be used: 52 | 53 | .. code-block:: python 54 | 55 | from gldas.interface import GLDASTs 56 | ds = GLDASTs(ts_path, ioclass_kws={'read_bulk': True}) 57 | # read_ts takes either lon, lat coordinates or a grid point indices. 58 | # and returns a pandas.DataFrame 59 | ts = ds.read(45, 15) 60 | 61 | >>> ts 62 | Snowf_tavg ... SoilTMP100_200cm_inst 63 | 2000-01-01 03:00:00 0.0 ... 292.014526 64 | 2000-01-01 06:00:00 0.0 ... 292.006256 65 | 2000-01-01 09:00:00 0.0 ... 291.998505 66 | 2000-01-01 12:00:00 0.0 ... 291.981201 67 | 2000-01-01 15:00:00 0.0 ... 291.974579 68 | ... ... ... ... 69 | 2023-10-31 09:00:00 0.0 ... 299.025757 70 | 2023-10-31 12:00:00 0.0 ... 299.025024 71 | 2023-10-31 15:00:00 0.0 ... 299.014282 72 | 2023-10-31 18:00:00 0.0 ... 299.003540 73 | 2023-10-31 21:00:00 0.0 ... 298.992798 -------------------------------------------------------------------------------- /src/gldas/grid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from pygeogrids.grids import BasicGrid 3 | from netCDF4 import Dataset 4 | import os 5 | 6 | def subgrid4bbox(grid, min_lon, min_lat, max_lon, max_lat): 7 | """ 8 | Select a spatial subset for the grid by bound box corner points 9 | 10 | Parameters 11 | ---------- 12 | grid: BasicGrid or CellGrid 13 | Grid object to trim. 14 | min_lon: float 15 | Lower left corner longitude 16 | min_lat: float 17 | Lower left corner latitude 18 | max_lon: float 19 | Upper right corner longitude 20 | max_lat: float 21 | Upper right corner latitude 22 | 23 | Returns 24 | ------- 25 | subgrid: BasicGrid or CellGrid 26 | Subset of the input grid. 27 | """ 28 | gpis, lons, lats, _ = grid.get_grid_points() 29 | assert len(gpis) == len(lats) == len(lons) 30 | bbox_gpis = gpis[ 31 | np.where( 32 | (lons <= max_lon) 33 | & (lons >= min_lon) 34 | & (lats <= max_lat) 35 | & (lats >= min_lat) 36 | ) 37 | ] 38 | 39 | return grid.subgrid_from_gpis(bbox_gpis) 40 | 41 | 42 | def GLDAS025Grids(only_land=False): 43 | """ 44 | Create global 0.25 DEG gldas grids (origin in bottom left) 45 | 46 | Parameters 47 | --------- 48 | only_land : bool, optional (default: False) 49 | Uses the land mask to reduce the GLDAS 0.25DEG land grid to land points 50 | only. 51 | 52 | Returns 53 | -------- 54 | grid : pygeogrids.CellGrid 55 | Either a land grid or a global grid 56 | """ 57 | 58 | resolution = 0.25 59 | glob_lons = np.arange( 60 | -180 + resolution / 2, 180 + resolution / 2, resolution 61 | ) 62 | glob_lats = np.arange( 63 | -90 + resolution / 2, 90 + resolution / 2, resolution 64 | ) 65 | lon, lat = np.meshgrid(glob_lons, glob_lats) 66 | glob_grid = BasicGrid(lon.flatten(), lat.flatten()).to_cell_grid( 67 | cellsize=5.0 68 | ) 69 | 70 | if only_land: 71 | ds = Dataset( 72 | os.path.join( 73 | os.path.abspath(os.path.dirname(__file__)), 74 | "GLDASp4_landmask_025d.nc4", 75 | ) 76 | ) 77 | land_lats = ds.variables["lat"][:] 78 | land_mask = ds.variables["GLDAS_mask"][:].flatten().filled() == 0.0 79 | dlat = glob_lats.size - land_lats.size 80 | 81 | land_mask = np.concatenate((np.ones(dlat * glob_lons.size), land_mask)) 82 | land_points = np.ma.masked_array( 83 | glob_grid.get_grid_points()[0], land_mask 84 | ) 85 | 86 | land_grid = glob_grid.subgrid_from_gpis( 87 | land_points[~land_points.mask].filled() 88 | ) 89 | return land_grid 90 | else: 91 | return glob_grid 92 | 93 | 94 | def GLDAS025Cellgrid(): 95 | """Alias to create a global 0.25 DEG grid without gaps w. 5 DEG cells """ 96 | return GLDAS025Grids(only_land=False) 97 | 98 | 99 | def GLDAS025LandGrid(): 100 | """Alias to create a global 0.25 DEG grid over land only w. 5 DEG cells """ 101 | return GLDAS025Grids(only_land=True) 102 | 103 | def load_grid(land_points=True, bbox=None): 104 | """ 105 | Load gldas grid. 106 | 107 | Parameters 108 | ---------- 109 | land_points : bool, optional (default: True) 110 | Reshuffle only land points 111 | bbox : tuple, optional (default: True) 112 | (min_lat, min_lon, max_lat, max_lon) 113 | Bounding box to limit reshuffling to. 114 | """ 115 | if land_points: 116 | subgrid = GLDAS025LandGrid() 117 | if bbox is not None: 118 | subgrid = subgrid4bbox(subgrid, *bbox) 119 | else: 120 | if bbox is not None: 121 | subgrid = subgrid4bbox(GLDAS025Cellgrid(), *bbox) 122 | else: 123 | subgrid = None 124 | 125 | return subgrid 126 | -------------------------------------------------------------------------------- /tests/test_download.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the download module of GLDAS. 3 | """ 4 | import os 5 | import unittest 6 | from datetime import datetime 7 | import pytest 8 | import tempfile 9 | 10 | from gldas.download import get_last_formatted_dir_in_dir 11 | from gldas.download import get_first_formatted_dir_in_dir 12 | from gldas.download import get_last_gldas_folder 13 | from gldas.download import get_first_gldas_folder 14 | from gldas.download import gldas_folder_get_version_first_last 15 | from gldas.download import main as main_download 16 | 17 | from gldas.interface import GLDAS_Noah_v21_025Ds 18 | 19 | try: 20 | username = os.environ['GES_DISC_USERNAME'] 21 | pwd = os.environ['GES_DISC_PWD'] 22 | except KeyError: 23 | username = pwd = None 24 | 25 | @pytest.mark.skipif(username is None or pwd is None, 26 | reason="Environment variable (or GitHub Secret) expected but not found:" 27 | "`GES_DISC_USERNAME` and/or `GES_DISC_PWD`") 28 | class TestDownload(unittest.TestCase): 29 | def setUp(self) -> None: 30 | self.outpath = tempfile.mkdtemp(prefix='gldas') 31 | 32 | def test_download_GLDAS_Noah_v21_025(self): 33 | args = [self.outpath, '-s', '2010-03-02', '-e' '2010-03-02', '--product', "GLDAS_Noah_v21_025", 34 | '--username', username, '--password', pwd] 35 | main_download(args) 36 | assert len(os.listdir(os.path.join(self.outpath, '2010', '061'))) == 8 * 2 + 2 37 | 38 | ds = GLDAS_Noah_v21_025Ds(self.outpath) 39 | img = ds.read(datetime(2010, 3, 2, 3)) 40 | assert list(img.data.keys()) == ['SoilMoi0_10cm_inst'] == list(img.metadata.keys()) 41 | ds.close() 42 | 43 | 44 | def test_get_last_dir_in_dir(): 45 | path = os.path.join(os.path.dirname(__file__), "folder_test", "success") 46 | last_dir = get_last_formatted_dir_in_dir(path, "{time:%Y}") 47 | assert last_dir == "2014" 48 | 49 | 50 | def test_get_last_dir_in_dir_failure(): 51 | path = os.path.join(os.path.dirname(__file__), "folder_test", "failure") 52 | last_dir = get_last_formatted_dir_in_dir(path, "{time:%Y}") 53 | assert last_dir == None 54 | 55 | 56 | def test_get_first_dir_in_dir(): 57 | path = os.path.join(os.path.dirname(__file__), "folder_test", "success") 58 | last_dir = get_first_formatted_dir_in_dir(path, "{time:%Y}") 59 | assert last_dir == "2013" 60 | 61 | 62 | def test_get_last_gldas_folder(): 63 | path = os.path.join(os.path.dirname(__file__), "folder_test", "success") 64 | last = get_last_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) 65 | last_should = os.path.join(path, "2014", "134") 66 | assert last == last_should 67 | 68 | 69 | def test_get_last_gldas_folder_no_folder(): 70 | path = os.path.join(os.path.dirname(__file__), "folder_test", "failure") 71 | last = get_last_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) 72 | last_should = None 73 | assert last == last_should 74 | 75 | 76 | def test_get_first_gldas_folder(): 77 | path = os.path.join(os.path.dirname(__file__), "folder_test", "success") 78 | last = get_first_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) 79 | last_should = os.path.join(path, "2013", "001") 80 | assert last == last_should 81 | 82 | 83 | def test_get_first_gldas_folder_no_folder(): 84 | path = os.path.join(os.path.dirname(__file__), "folder_test", "failure") 85 | last = get_first_gldas_folder(path, ["{time:%Y}", "{time:%j}"]) 86 | last_should = None 87 | assert last == last_should 88 | 89 | 90 | def test_gldas_get_start_end(): 91 | path = os.path.join( 92 | os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" 93 | ) 94 | version, start, end = gldas_folder_get_version_first_last(path) 95 | version_should = "GLDAS_Noah_v21_025" 96 | start_should = datetime(2015, 1, 1) 97 | end_should = datetime(2015, 1, 1) 98 | assert version == version_should 99 | assert end == end_should 100 | assert start == start_should 101 | 102 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # This file is used to configure your project. 2 | # Read more about the various options under: 3 | # http://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files 4 | 5 | [metadata] 6 | name = gldas 7 | description = Readers and converters for data from the GLDAS Noah Land Surface Model. 8 | author = TU Wien 9 | author_email = support@qa4sm.eu 10 | license = mit 11 | long_description = file: README.rst 12 | long_description_content_type = text/x-rst; charset=UTF-8 13 | url = https://github.com/TUW-GEO/gldas 14 | project_urls = 15 | Documentation = https://gldas.readthedocs.io/en/latest/ 16 | # Change if running only on Windows, Mac or Linux (comma-separated) 17 | platforms = any 18 | # Add here all kinds of additional classifiers as defined under 19 | # https://pypi.python.org/pypi?%3Aaction=list_classifiers 20 | classifiers = 21 | Development Status :: 4 - Beta 22 | Programming Language :: Python 23 | 24 | [options] 25 | zip_safe = False 26 | packages = find_namespace: 27 | include_package_data = True 28 | package_dir = 29 | =src 30 | # DON'T CHANGE THE FOLLOWING LINE! IT WILL BE UPDATED BY PYSCAFFOLD! 31 | # Add here dependencies of your project (semicolon/line-separated), e.g. 32 | install_requires = 33 | importlib-metadata; python_version<"3.8" 34 | pyproj 35 | pygeogrids 36 | numpy 37 | pygeobase 38 | datedown>=0.4 39 | trollsift 40 | netCDF4 41 | pyresample 42 | repurpose 43 | pynetcf 44 | 45 | # The usage of test_requires is discouraged, see `Dependency Management` docs 46 | # tests_require = pytest-cov; coverage; pytest; 47 | # Require a specific Python version, e.g. Python 2.7 or >= 3.4 48 | # python_requires = >=3.9 49 | 50 | [options.packages.find] 51 | where = src 52 | exclude = 53 | tests 54 | 55 | [options.extras_require] 56 | # Add here additional requirements for extra features, to install with: 57 | # `pip install gldas[PDF]` like: 58 | # PDF = ReportLab; RXP 59 | # Add here test requirements (semicolon/line-separated) 60 | testing = 61 | pytest-cov 62 | coverage 63 | pytest 64 | 65 | building = 66 | setuptools-scm 67 | setuptools 68 | wheel 69 | packaging 70 | build 71 | twine 72 | 73 | [options.entry_points] 74 | # Add here console scripts like: 75 | # console_scripts = 76 | # script_name = gldas.module:function 77 | # For example: 78 | console_scripts = 79 | gldas_download = gldas.download:run 80 | gldas_repurpose = gldas.reshuffle:run 81 | # And any other entry points, for example: 82 | # pyscaffold.cli = 83 | # awesome = pyscaffoldext.awesome.extension:AwesomeExtension 84 | 85 | [test] 86 | # py.test options when running `python setup.py test` 87 | # addopts = --verbose 88 | extras = True 89 | 90 | [tool:pytest] 91 | # Options for py.test: 92 | # Specify command line options as you would do when invoking py.test directly. 93 | # e.g. --cov-report html (or xml) for html/xml output or --junitxml junit.xml 94 | # in order to write a coverage file that can be read by Jenkins. 95 | addopts = 96 | --cov gldas --cov-report term-missing 97 | --verbose 98 | norecursedirs = 99 | dist 100 | build 101 | .tox 102 | testpaths = tests 103 | 104 | [aliases] 105 | dists = bdist_wheel 106 | 107 | [bdist_wheel] 108 | # Use this option if your package is pure-python 109 | universal = 1 110 | 111 | [build_sphinx] 112 | source_dir = docs 113 | build_dir = build/sphinx 114 | 115 | [devpi:upload] 116 | # Options for the devpi: PyPI server and packaging tool 117 | # VCS export must be deactivated since we are using setuptools-scm 118 | no_vcs = 1 119 | formats = bdist_wheel 120 | 121 | [flake8] 122 | # Some sane defaults for the code style checker flake8 123 | exclude = 124 | .tox 125 | build 126 | dist 127 | .eggs 128 | docs/conf.py 129 | 130 | [pyscaffold] 131 | # PyScaffold's parameters when the project was created. 132 | # This will be used when updating. Do not change! 133 | version = 4.6 134 | package = gldas 135 | extensions = 136 | no_skeleton 137 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | gldas 3 | ===== 4 | 5 | |ci| |cov| |pip| |doc| 6 | 7 | .. |ci| image:: https://github.com/TUW-GEO/gldas/actions/workflows/ci.yml/badge.svg?branch=master 8 | :target: https://github.com/TUW-GEO/gldas/actions 9 | 10 | .. |cov| image:: https://coveralls.io/repos/TUW-GEO/gldas/badge.png?branch=master 11 | :target: https://coveralls.io/r/TUW-GEO/gldas?branch=master 12 | 13 | .. |pip| image:: https://badge.fury.io/py/gldas.svg 14 | :target: http://badge.fury.io/py/gldas 15 | 16 | .. |doc| image:: https://readthedocs.org/projects/gldas/badge/?version=latest 17 | :target: http://gldas.readthedocs.org/ 18 | 19 | Readers and converters for data from the `GLDAS Noah Land Surface Model 20 | `_. Written in Python. 21 | 22 | Works great in combination with `pytesmo `_. 23 | 24 | Citation 25 | ======== 26 | 27 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.596427.svg 28 | :target: https://doi.org/10.5281/zenodo.596427 29 | 30 | If you use the software in a publication then please cite it using the Zenodo DOI. 31 | Be aware that this badge links to the latest package version. 32 | 33 | Please select your specific version at https://doi.org/10.5281/zenodo.596427 to get the DOI of that version. 34 | You should normally always use the DOI for the specific version of your record in citations. 35 | This is to ensure that other researchers can access the exact research artefact you used for reproducibility. 36 | 37 | You can find additional information regarding DOI versioning at http://help.zenodo.org/#versioning 38 | 39 | Installation 40 | ============ 41 | 42 | This package can be installed via pip from pypi.org. The minimum supported 43 | python version is ``3.10``. 44 | 45 | You can install the gldas package and all required dependencies via 46 | 47 | .. code:: 48 | 49 | pip install gldas 50 | 51 | Optional dependencies 52 | --------------------- 53 | 54 | To read grib versions of GLDAS Noah, please install pygrib first: 55 | 56 | .. code:: 57 | 58 | pip install pygrib 59 | 60 | On windows it might be necessary to use conda: 61 | 62 | .. code:: 63 | 64 | conda install -c conda-forge pygrib 65 | 66 | 67 | Supported Products 68 | ================== 69 | 70 | At the moment this package supports GLDAS Noah data version 1 in grib 71 | format (reading, time series creation) and GLDAS Noah data version 2.0 and version 2.1 in netCDF format (download, reading, time series creation) with a spatial sampling of 0.25 degrees. 72 | It should be easy to extend the package to support other GLDAS based products. 73 | This will be done as need arises. 74 | 75 | Contribute 76 | ========== 77 | 78 | We are happy if you want to contribute. Please raise an issue explaining what is missing or if you find a bug. We will also gladly accept pull requests against our master branch for new features or bug fixes. 79 | 80 | Development setup 81 | ----------------- 82 | 83 | For Development we also recommend a ``conda`` environment. You can create one 84 | including test dependencies and debugger by running ``conda create -n gldas python=3.12``, then 85 | ``conda env update -f environment.yml`` to install all dependencies. Finally, call 86 | ``pip install -e .[testing]``. Now everything should be in place to run tests 87 | and develop new features. 88 | 89 | Guidelines 90 | ---------- 91 | 92 | If you want to contribute please follow these steps: 93 | 94 | - Fork the gldas repository to your account 95 | - Clone the repository, make sure you use ``git clone --recursive`` to also get the test data repository. 96 | - make a new feature branch from the gldas master branch 97 | - Add your feature 98 | - Please include tests for your contributions in one of the test directories. We use py.test so a simple function called test_my_feature is enough 99 | - submit a pull request to our master branch 100 | 101 | Note 102 | ==== 103 | 104 | This project has been set up using PyScaffold 2.5.6. For details and usage 105 | information on PyScaffold see http://pyscaffold.readthedocs.org/. 106 | -------------------------------------------------------------------------------- /docs/varnames.rst: -------------------------------------------------------------------------------- 1 | Variable naming for different versions of GLDAS NOAH 2 | ==================================================== 3 | 4 | For GLDAS Noah 1.0 parameters are called using their PDS IDs from the table below. 5 | 6 | For GLDAS Noah 2.0 and GLDAS Noah 2.1 parameters are called using Variable Names from the table below. 7 | A full list of variable names can be found in the `GLDAS 2.x README `_ 8 | 9 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 10 | | PDS ID (old)| Variable Name (new) | Parameter | Resolution | Depth/Height Interval [m] | Units | 11 | +=============+=======================+==================================+============+===========================+============+ 12 | | 086_L1 | SoilMoi0_10cm_inst | Soil moisture | 0.25° | 0.00 - 0.10 |[kg/m^2] | 13 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 14 | | 086_L2 | SoilMoi10_40cm_inst | Soil moisture | 0.25° | 0.10 - 0.40 |[kg/m^2] | 15 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 16 | | 086_L3 | SoilMoi40_100cm_inst | Soil moisture | 0.25° | 0.40 - 1.00 |[kg/m^2] | 17 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 18 | | 086_L4 | SoilMoi100_200cm_inst | Soil moisture | 0.25° | 1.00 - 2.00 |[kg/m^2] | 19 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 20 | | 085_L1 | SoilTMP0_10cm_inst | Soil temperature | 0.25° | 0.00 - 0.10 | [K] | 21 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 22 | | 085_L2 | SoilTMP10_40cm_inst | Soil temperature | 0.25° | 0.10 - 0.40 | [K] | 23 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 24 | | 085_L3 | SoilTMP40_100cm_inst | Soil temperature | 0.25° | 0.40 - 1.00 | [K] | 25 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 26 | | 085_L4 | SoilTMP100_200cm_inst | Soil temperature | 0.25° | 1.00 - 2.00 | [K] | 27 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 28 | | 065 | SWE_inst | Snow depth water equivalent | 0.25° | 0 | [kg/m^2] | 29 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 30 | | 138 | AvgSurfT_inst | Average Surface Skin temperature | 0.25° | 0 | [K] | 31 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 32 | | 131 | Snowf_tavg | Snow precipitation rate | 0.25° | 0 | [kg/m^2/s] | 33 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 34 | | 132 | Rainf_tavg | Rain precipitation rate | 0.25° | 0 | [kg/m^2/s] | 35 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 36 | | 057 | Evap_tavg | Total Evapo-transpiration | 0.25° | 0 | [kg/m^2/s] | 37 | +-------------+-----------------------+----------------------------------+------------+---------------------------+------------+ 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | 2 | # This workflow will install Python dependencies and run tests on 3 | # windows and linux systems with a variety of Python versions 4 | 5 | # For more information see: 6 | # https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 7 | 8 | name: CI/CD 9 | 10 | on: 11 | push: 12 | pull_request: 13 | workflow_dispatch: 14 | schedule: 15 | - cron: '0 0 * * *' # daily 16 | 17 | jobs: 18 | build: 19 | name: py${{ matrix.python-version }} @ ${{ matrix.os }} 20 | runs-on: ${{ matrix.os }} 21 | strategy: 22 | matrix: 23 | include: 24 | - os: "ubuntu-latest" 25 | python-version: '3.10' # first supported 26 | - os: "windows-latest" 27 | python-version: '3.10' # first supported 28 | - os: "macos-latest" 29 | python-version: '3.10' # first supported 30 | - os: "ubuntu-latest" 31 | python-version: '3.12' # latest supported 32 | - os: "windows-latest" 33 | python-version: '3.12' # latest supported 34 | - os: "macos-latest" 35 | python-version: '3.12' # latest supported 36 | steps: 37 | - uses: actions/checkout@v4 38 | with: 39 | submodules: true 40 | fetch-depth: 0 41 | - uses: conda-incubator/setup-miniconda@v3 42 | with: 43 | auto-update-conda: true 44 | python-version: ${{ matrix.python-version }} 45 | channel-priority: flexible 46 | activate-environment: gldas 47 | auto-activate-base: false 48 | # environment-file: 'environment.yml' 49 | - name: Print environment infos 50 | shell: bash -l {0} 51 | run: | 52 | conda info -a 53 | which pip 54 | which python 55 | conda list 56 | - name: Install package and dependencies 57 | shell: bash -l {0} 58 | run: | 59 | pip install -e .[building,testing] 60 | - name: Run Tests 61 | shell: bash -l {0} 62 | run: | 63 | pytest 64 | - name: Install pygrib and run pygrib tests 65 | shell: bash -l {0} 66 | run: | 67 | conda install -c conda-forge pygrib 68 | pytest -m "pygrib" --cov-append 69 | - name: Export Environment 70 | shell: bash -l {0} 71 | run: | 72 | mkdir -p artifacts 73 | filename=env_py${{ matrix.python-version }}_${{ matrix.os }}.yml 74 | conda env export --no-builds | grep -v "prefix" > artifacts/$filename 75 | - name: Create wheel and dist package 76 | shell: bash -l {0} 77 | run: | 78 | mkdir -p artifacts/dist 79 | if [ ${{ matrix.os }} == "ubuntu-latest" ] 80 | then 81 | # build dist on linux 82 | python -m build --outdir artifacts/dist 83 | fi 84 | pip install wheel 85 | python setup.py bdist_wheel --dist-dir artifacts/dist 86 | ls artifacts/dist 87 | twine check artifacts/dist/* 88 | - name: Upload Coverage 89 | shell: bash -l {0} 90 | run: | 91 | pip install coveralls && coveralls --service=github-actions 92 | env: 93 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 94 | COVERALLS_FLAG_NAME: ${{ matrix.python-version }} 95 | COVERALLS_PARALLEL: true 96 | - name: Upload Artifacts 97 | uses: actions/upload-artifact@v4 98 | with: 99 | name: Artifacts-py${{ matrix.python-version }}-${{ matrix.os }} 100 | path: artifacts/* 101 | 102 | coveralls: 103 | name: Submit Coveralls 👚 104 | needs: build 105 | runs-on: ubuntu-latest 106 | container: python:3-slim 107 | steps: 108 | - name: Finalize Coveralls Parallel Build 109 | run: | 110 | pip3 install --upgrade coveralls 111 | coveralls --finish 112 | env: 113 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 114 | COVERALLS_SERVICE_NAME: github-actions 115 | publish: 116 | name: Upload to PyPI 🚀 117 | if: startsWith(github.ref, 'refs/tags/v') && startsWith(github.repository, 'TUW-GEO') 118 | needs: build 119 | runs-on: ubuntu-latest 120 | steps: 121 | - name: Print environment variables 122 | run: | 123 | echo "GITHUB_REF = $GITHUB_REF" 124 | echo "GITHUB_REPOSITORY = $GITHUB_REPOSITORY" 125 | - name: Download Artifacts 126 | uses: actions/download-artifact@v4 127 | with: 128 | path: Artifacts 129 | pattern: Artifacts-* 130 | merge-multiple: true 131 | - name: Display downloaded files 132 | run: ls -aR 133 | - name: Upload to PyPI 134 | uses: pypa/gh-action-pypi-publish@release/v1 135 | with: 136 | skip-existing: true 137 | verbose: true 138 | verify-metadata: true 139 | packages-dir: Artifacts/dist/ 140 | user: __token__ 141 | password: ${{ secrets.PYPI_API_TOKEN }} # this needs to be uploaded to github actions secrets 142 | -------------------------------------------------------------------------------- /src/gldas/reshuffle.py: -------------------------------------------------------------------------------- 1 | # The MIT License (MIT) 2 | # 3 | # Copyright (c) 2018, TU Wien 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in 13 | # all copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | # SOFTWARE. 22 | 23 | """ 24 | Module for a command line interface to convert the GLDAS data into a 25 | time series format using the repurpose package 26 | """ 27 | 28 | import os 29 | import sys 30 | import argparse 31 | from datetime import datetime 32 | 33 | from pygeogrids import BasicGrid 34 | 35 | from repurpose.img2ts import Img2Ts 36 | from gldas.interface import GLDAS_Noah_v1_025Ds, GLDAS_Noah_v21_025Ds 37 | from gldas.grid import load_grid 38 | import warnings 39 | 40 | 41 | def get_filetype(inpath): 42 | """ 43 | Tries to find out the file type by searching for 44 | grib or nc files two subdirectories into the passed input path. 45 | If function fails, netcdf is assumed. 46 | 47 | Parameters 48 | ---------- 49 | input_root: str 50 | Input path where GLDAS data was downloaded 51 | 52 | Returns 53 | ------- 54 | filetype : str 55 | File type string. 56 | """ 57 | onedown = os.path.join(inpath, os.listdir(inpath)[0]) 58 | twodown = os.path.join(onedown, os.listdir(onedown)[0]) 59 | 60 | filelist = [] 61 | for path, subdirs, files in os.walk(twodown): 62 | for name in files: 63 | filename, extension = os.path.splitext(name) 64 | filelist.append(extension) 65 | 66 | if ".nc4" in filelist and ".grb" not in filelist: 67 | return "netCDF" 68 | elif ".grb" in filelist and ".nc4" not in filelist: 69 | return "grib" 70 | else: 71 | # if file type cannot be detected, guess netCDF 72 | return "netCDF" 73 | 74 | 75 | def mkdate(datestring): 76 | """ 77 | Create date string. 78 | 79 | Parameters 80 | ---------- 81 | datestring : str 82 | Date string. 83 | 84 | Returns 85 | ------- 86 | datestr : datetime 87 | Date string as datetime. 88 | """ 89 | if len(datestring) == 10: 90 | return datetime.strptime(datestring, "%Y-%m-%d") 91 | if len(datestring) == 16: 92 | return datetime.strptime(datestring, "%Y-%m-%dT%H:%M") 93 | 94 | 95 | def str2bool(val): 96 | if val in ["True", "true", "t", "T", "1"]: 97 | return True 98 | else: 99 | return False 100 | 101 | 102 | def reshuffle( 103 | input_root, 104 | outputpath, 105 | startdate, 106 | enddate, 107 | parameters, 108 | input_grid=None, 109 | imgbuffer=50, 110 | ): 111 | """ 112 | Reshuffle method applied to GLDAS data. 113 | 114 | Parameters 115 | ---------- 116 | input_root: string 117 | input path where gldas data was downloaded 118 | outputpath : string 119 | Output path. 120 | startdate : datetime 121 | Start date. 122 | enddate : datetime 123 | End date. 124 | parameters: list 125 | parameters to read and convert 126 | input_grid : CellGrid, optional (default: None) 127 | Local input grid to read data for. If None is passed, we create the grid 128 | from data. 129 | imgbuffer: int, optional 130 | How many images to read at once before writing time series. 131 | """ 132 | 133 | if get_filetype(input_root) == "grib": 134 | if input_grid is not None: 135 | warnings.warn("Land Grid is fit to GLDAS 2.x netCDF data") 136 | 137 | input_dataset = GLDAS_Noah_v1_025Ds( 138 | input_root, parameters, subgrid=input_grid, array_1D=True 139 | ) 140 | else: 141 | input_dataset = GLDAS_Noah_v21_025Ds( 142 | input_root, parameters, subgrid=input_grid, array_1D=True 143 | ) 144 | 145 | if not os.path.exists(outputpath): 146 | os.makedirs(outputpath) 147 | 148 | global_attr = {"product": "GLDAS"} 149 | 150 | # get time series attributes from first day of data. 151 | data = input_dataset.read(startdate) 152 | ts_attributes = data.metadata 153 | if input_grid is None: 154 | grid = BasicGrid(data.lon, data.lat) 155 | else: 156 | grid = input_grid 157 | 158 | reshuffler = Img2Ts( 159 | input_dataset=input_dataset, 160 | outputpath=outputpath, 161 | startdate=startdate, 162 | enddate=enddate, 163 | input_grid=grid, 164 | imgbuffer=imgbuffer, 165 | cellsize_lat=5.0, 166 | cellsize_lon=5.0, 167 | global_attr=global_attr, 168 | n_proc=1, 169 | zlib=True, 170 | unlim_chunksize=1000, 171 | ts_attributes=ts_attributes, 172 | ) 173 | reshuffler.calc() 174 | 175 | 176 | def parse_args(args): 177 | """ 178 | Parse command line parameters for recursive download. 179 | 180 | Parameters 181 | ---------- 182 | args : list of str 183 | Command line parameters as list of strings. 184 | 185 | Returns 186 | ------- 187 | args : argparse.Namespace 188 | Command line arguments. 189 | """ 190 | parser = argparse.ArgumentParser( 191 | description="Convert GLDAS data to time series format." 192 | ) 193 | parser.add_argument( 194 | "dataset_root", 195 | help="Root of local filesystem where the " "data is stored.", 196 | ) 197 | 198 | parser.add_argument( 199 | "timeseries_root", 200 | help="Root of local filesystem where the timeseries " 201 | "should be stored.", 202 | ) 203 | 204 | parser.add_argument( 205 | "start", 206 | type=mkdate, 207 | help=( 208 | "Startdate. Either in format YYYY-MM-DD or " "YYYY-MM-DDTHH:MM." 209 | ), 210 | ) 211 | 212 | parser.add_argument( 213 | "end", 214 | type=mkdate, 215 | help=("Enddate. Either in format YYYY-MM-DD or " "YYYY-MM-DDTHH:MM."), 216 | ) 217 | 218 | parser.add_argument( 219 | "parameters", 220 | metavar="parameters", 221 | nargs="+", 222 | help=( 223 | "Parameters to reshuffle into time series format. " 224 | "e.g. SoilMoi0_10cm_inst SoilMoi10_40cm_inst for " 225 | "Volumetric soil water layers 1 to 2." 226 | ), 227 | ) 228 | 229 | parser.add_argument( 230 | "--land_points", 231 | type=str2bool, 232 | default="False", 233 | help=( 234 | "Set True to convert only land points as defined" 235 | " in the GLDAS land mask (faster and less/smaller files)" 236 | ), 237 | ) 238 | 239 | parser.add_argument( 240 | "--bbox", 241 | type=float, 242 | default=None, 243 | nargs=4, 244 | help=( 245 | "min_lon min_lat max_lon max_lat. " 246 | "Bounding Box (lower left and upper right corner) " 247 | "of area to reshuffle (WGS84)" 248 | ), 249 | ) 250 | 251 | parser.add_argument( 252 | "--imgbuffer", 253 | type=int, 254 | default=50, 255 | help=( 256 | "How many images to read at once. Bigger " 257 | "numbers make the conversion faster but " 258 | "consume more memory." 259 | ), 260 | ) 261 | 262 | args = parser.parse_args(args) 263 | # set defaults that can not be handled by argparse 264 | 265 | print( 266 | "Converting data from {} to" 267 | " {} into folder {}.".format( 268 | args.start.isoformat(), args.end.isoformat(), args.timeseries_root 269 | ) 270 | ) 271 | 272 | return args 273 | 274 | 275 | def main(args): 276 | """ 277 | Main routine used for command line interface. 278 | 279 | Parameters 280 | ---------- 281 | args : list of str 282 | Command line arguments. 283 | """ 284 | args = parse_args(args) 285 | 286 | input_grid = load_grid( 287 | land_points=args.land_points, 288 | bbox=tuple(args.bbox) if args.bbox is not None else None, 289 | ) 290 | 291 | reshuffle( 292 | args.dataset_root, 293 | args.timeseries_root, 294 | args.start, 295 | args.end, 296 | args.parameters, 297 | input_grid=input_grid, 298 | imgbuffer=args.imgbuffer, 299 | ) 300 | 301 | 302 | def run(): 303 | main(sys.argv[1:]) 304 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # This file is execfile()d with the current directory set to its containing dir. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | # 7 | # All configuration values have a default; values that are commented out 8 | # serve to show the default. 9 | 10 | import os 11 | import sys 12 | import shutil 13 | 14 | # -- Path setup -------------------------------------------------------------- 15 | 16 | __location__ = os.path.dirname(__file__) 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.join(__location__, "../src")) 22 | 23 | # -- Run sphinx-apidoc ------------------------------------------------------- 24 | # This hack is necessary since RTD does not issue `sphinx-apidoc` before running 25 | # `sphinx-build -b html . _build/html`. See Issue: 26 | # https://github.com/readthedocs/readthedocs.org/issues/1139 27 | # DON'T FORGET: Check the box "Install your project inside a virtualenv using 28 | # setup.py install" in the RTD Advanced Settings. 29 | # Additionally it helps us to avoid running apidoc manually 30 | 31 | try: # for Sphinx >= 1.7 32 | from sphinx.ext import apidoc 33 | except ImportError: 34 | from sphinx import apidoc 35 | 36 | output_dir = os.path.join(__location__, "api") 37 | module_dir = os.path.join(__location__, "../src/gldas") 38 | try: 39 | shutil.rmtree(output_dir) 40 | except FileNotFoundError: 41 | pass 42 | 43 | try: 44 | import sphinx 45 | 46 | cmd_line = f"sphinx-apidoc --implicit-namespaces -f -o {output_dir} {module_dir}" 47 | 48 | args = cmd_line.split(" ") 49 | if tuple(sphinx.__version__.split(".")) >= ("1", "7"): 50 | # This is a rudimentary parse_version to avoid external dependencies 51 | args = args[1:] 52 | 53 | apidoc.main(args) 54 | except Exception as e: 55 | print("Running `sphinx-apidoc` failed!\n{}".format(e)) 56 | 57 | # -- General configuration --------------------------------------------------- 58 | 59 | # If your documentation needs a minimal Sphinx version, state it here. 60 | # needs_sphinx = '1.0' 61 | 62 | # Add any Sphinx extension module names here, as strings. They can be extensions 63 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 64 | extensions = [ 65 | "sphinx.ext.autodoc", 66 | "sphinx.ext.intersphinx", 67 | "sphinx.ext.todo", 68 | "sphinx.ext.autosummary", 69 | "sphinx.ext.viewcode", 70 | "sphinx.ext.coverage", 71 | "sphinx.ext.doctest", 72 | "sphinx.ext.ifconfig", 73 | "sphinx.ext.mathjax", 74 | "sphinx.ext.napoleon", 75 | ] 76 | 77 | # Add any paths that contain templates here, relative to this directory. 78 | templates_path = ["_templates"] 79 | 80 | # The suffix of source filenames. 81 | source_suffix = ".rst" 82 | 83 | # The encoding of source files. 84 | # source_encoding = 'utf-8-sig' 85 | 86 | # The master toctree document. 87 | master_doc = "index" 88 | 89 | # General information about the project. 90 | project = "gldas" 91 | copyright = "2025, TU Wien" 92 | 93 | # The version info for the project you're documenting, acts as replacement for 94 | # |version| and |release|, also used in various other places throughout the 95 | # built documents. 96 | # 97 | # version: The short X.Y version. 98 | # release: The full version, including alpha/beta/rc tags. 99 | # If you don’t need the separation provided between version and release, 100 | # just set them both to the same value. 101 | try: 102 | from gldas import __version__ as version 103 | except ImportError: 104 | version = "" 105 | 106 | if not version or version.lower() == "unknown": 107 | version = os.getenv("READTHEDOCS_VERSION", "unknown") # automatically set by RTD 108 | 109 | release = version 110 | 111 | # The language for content autogenerated by Sphinx. Refer to documentation 112 | # for a list of supported languages. 113 | # language = None 114 | 115 | # There are two options for replacing |today|: either, you set today to some 116 | # non-false value, then it is used: 117 | # today = '' 118 | # Else, today_fmt is used as the format for a strftime call. 119 | # today_fmt = '%B %d, %Y' 120 | 121 | # List of patterns, relative to source directory, that match files and 122 | # directories to ignore when looking for source files. 123 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"] 124 | 125 | # The reST default role (used for this markup: `text`) to use for all documents. 126 | # default_role = None 127 | 128 | # If true, '()' will be appended to :func: etc. cross-reference text. 129 | # add_function_parentheses = True 130 | 131 | # If true, the current module name will be prepended to all description 132 | # unit titles (such as .. function::). 133 | # add_module_names = True 134 | 135 | # If true, sectionauthor and moduleauthor directives will be shown in the 136 | # output. They are ignored by default. 137 | # show_authors = False 138 | 139 | # The name of the Pygments (syntax highlighting) style to use. 140 | pygments_style = "sphinx" 141 | 142 | # A list of ignored prefixes for module index sorting. 143 | # modindex_common_prefix = [] 144 | 145 | # If true, keep warnings as "system message" paragraphs in the built documents. 146 | # keep_warnings = False 147 | 148 | # If this is True, todo emits a warning for each TODO entries. The default is False. 149 | todo_emit_warnings = True 150 | 151 | 152 | # -- Options for HTML output ------------------------------------------------- 153 | 154 | # The theme to use for HTML and HTML Help pages. See the documentation for 155 | # a list of builtin themes. 156 | html_theme = "sphinx_rtd_theme" 157 | 158 | # Theme options are theme-specific and customize the look and feel of a theme 159 | # further. For a list of options available for each theme, see the 160 | # documentation. 161 | html_theme_options = { 162 | "sidebar_width": "300px", 163 | "page_width": "1200px" 164 | } 165 | 166 | # Add any paths that contain custom themes here, relative to this directory. 167 | # html_theme_path = [] 168 | 169 | # The name for this set of Sphinx documents. If None, it defaults to 170 | # " v documentation". 171 | # html_title = None 172 | 173 | # A shorter title for the navigation bar. Default is the same as html_title. 174 | # html_short_title = None 175 | 176 | # The name of an image file (relative to this directory) to place at the top 177 | # of the sidebar. 178 | # html_logo = "" 179 | 180 | # The name of an image file (within the static path) to use as favicon of the 181 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 182 | # pixels large. 183 | # html_favicon = None 184 | 185 | # Add any paths that contain custom static files (such as style sheets) here, 186 | # relative to this directory. They are copied after the builtin static files, 187 | # so a file named "default.css" will overwrite the builtin "default.css". 188 | html_static_path = ["_static"] 189 | 190 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 191 | # using the given strftime format. 192 | # html_last_updated_fmt = '%b %d, %Y' 193 | 194 | # If true, SmartyPants will be used to convert quotes and dashes to 195 | # typographically correct entities. 196 | # html_use_smartypants = True 197 | 198 | # Custom sidebar templates, maps document names to template names. 199 | # html_sidebars = {} 200 | 201 | # Additional templates that should be rendered to pages, maps page names to 202 | # template names. 203 | # html_additional_pages = {} 204 | 205 | # If false, no module index is generated. 206 | # html_domain_indices = True 207 | 208 | # If false, no index is generated. 209 | # html_use_index = True 210 | 211 | # If true, the index is split into individual pages for each letter. 212 | # html_split_index = False 213 | 214 | # If true, links to the reST sources are added to the pages. 215 | # html_show_sourcelink = True 216 | 217 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 218 | # html_show_sphinx = True 219 | 220 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 221 | # html_show_copyright = True 222 | 223 | # If true, an OpenSearch description file will be output, and all pages will 224 | # contain a tag referring to it. The value of this option must be the 225 | # base URL from which the finished HTML is served. 226 | # html_use_opensearch = '' 227 | 228 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 229 | # html_file_suffix = None 230 | 231 | # Output file base name for HTML help builder. 232 | htmlhelp_basename = "gldas-doc" 233 | 234 | 235 | # -- Options for LaTeX output ------------------------------------------------ 236 | 237 | latex_elements = { 238 | # The paper size ("letterpaper" or "a4paper"). 239 | # "papersize": "letterpaper", 240 | # The font size ("10pt", "11pt" or "12pt"). 241 | # "pointsize": "10pt", 242 | # Additional stuff for the LaTeX preamble. 243 | # "preamble": "", 244 | } 245 | 246 | # Grouping the document tree into LaTeX files. List of tuples 247 | # (source start file, target name, title, author, documentclass [howto/manual]). 248 | latex_documents = [ 249 | ("index", "user_guide.tex", "gldas Documentation", "TU Wien", "manual") 250 | ] 251 | 252 | # The name of an image file (relative to this directory) to place at the top of 253 | # the title page. 254 | # latex_logo = "" 255 | 256 | # For "manual" documents, if this is true, then toplevel headings are parts, 257 | # not chapters. 258 | # latex_use_parts = False 259 | 260 | # If true, show page references after internal links. 261 | # latex_show_pagerefs = False 262 | 263 | # If true, show URL addresses after external links. 264 | # latex_show_urls = False 265 | 266 | # Documents to append as an appendix to all manuals. 267 | # latex_appendices = [] 268 | 269 | # If false, no module index is generated. 270 | # latex_domain_indices = True 271 | 272 | # -- External mapping -------------------------------------------------------- 273 | python_version = ".".join(map(str, sys.version_info[0:2])) 274 | intersphinx_mapping = { 275 | "sphinx": ("https://www.sphinx-doc.org/en/master", None), 276 | "python": ("https://docs.python.org/" + python_version, None), 277 | "matplotlib": ("https://matplotlib.org", None), 278 | "numpy": ("https://numpy.org/doc/stable", None), 279 | "sklearn": ("https://scikit-learn.org/stable", None), 280 | "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None), 281 | "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), 282 | "setuptools": ("https://setuptools.pypa.io/en/stable/", None), 283 | "pyscaffold": ("https://pyscaffold.org/en/stable", None), 284 | } 285 | 286 | print(f"loading configurations for {project} {version} ...", file=sys.stderr) 287 | -------------------------------------------------------------------------------- /src/gldas/download.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module managing download of NOAH GLDAS data. 3 | """ 4 | 5 | import os 6 | import sys 7 | import glob 8 | import argparse 9 | from functools import partial 10 | 11 | from trollsift.parser import validate, parse, globify 12 | from datetime import datetime 13 | from datedown.interface import mkdate 14 | from datedown.dates import daily 15 | from datedown.urlcreator import create_dt_url 16 | from datedown.fname_creator import create_dt_fpath 17 | from datedown.interface import download_by_dt 18 | from datedown.down import download 19 | 20 | 21 | def gldas_folder_get_version_first_last( 22 | root, fmt=None, subpaths=["{time:%Y}", "{time:%j}"] 23 | ): 24 | """ 25 | Get product version and first and last product which exists under the root folder. 26 | 27 | Parameters 28 | ---------- 29 | root: string 30 | Root folder on local filesystem 31 | fmt: string, optional 32 | Formatting string 33 | (default: "GLDAS_NOAH025_3H.A{time:%Y%m%d.%H%M}.0{version:2s}.nc4") 34 | subpaths: list, optional 35 | Format of the subdirectories under root (default: ['{:%Y}', '{:%j}']). 36 | 37 | Returns 38 | ------- 39 | version: string 40 | Found product version 41 | start: datetime.datetime 42 | First found product datetime 43 | end: datetime.datetime 44 | Last found product datetime 45 | """ 46 | if fmt is None: 47 | fmt = "GLDAS_NOAH025_3H{ep}.A{time:%Y%m%d.%H%M}.0{version:2s}.nc4" 48 | 49 | start = None 50 | end = None 51 | version = None 52 | first_folder = get_first_gldas_folder(root, subpaths) 53 | last_folder = get_last_gldas_folder(root, subpaths) 54 | 55 | if first_folder is not None: 56 | files = sorted(glob.glob(os.path.join(first_folder, globify(fmt)))) 57 | data = parse(fmt, os.path.split(files[0])[1]) 58 | start = data["time"] 59 | ep = data["ep"] 60 | version = f"GLDAS_Noah_v{data['version']}_025{data['ep']}" 61 | 62 | if last_folder is not None: 63 | files = sorted(glob.glob(os.path.join(last_folder, globify(fmt)))) 64 | data = parse(fmt, os.path.split(files[-1])[1]) 65 | end = data["time"] 66 | 67 | return version, start, end 68 | 69 | 70 | def get_last_gldas_folder(root, subpaths): 71 | """ 72 | Get last GLDAS folder name. 73 | 74 | Parameters 75 | ---------- 76 | root : str 77 | Root path. 78 | subpaths : list of str 79 | Subpath information. 80 | 81 | Returns 82 | ------- 83 | directory : str 84 | Last folder name. 85 | """ 86 | directory = root 87 | for level, subpath in enumerate(subpaths): 88 | last_dir = get_last_formatted_dir_in_dir(directory, subpath) 89 | if last_dir is None: 90 | directory = None 91 | break 92 | directory = os.path.join(directory, last_dir) 93 | 94 | return directory 95 | 96 | 97 | def get_first_gldas_folder(root, subpaths): 98 | """ 99 | Get first GLDAS folder name. 100 | 101 | Parameters 102 | ---------- 103 | root : str 104 | Root path. 105 | subpaths : list of str 106 | Subpath information. 107 | 108 | Returns 109 | ------- 110 | directory : str 111 | First folder name. 112 | """ 113 | directory = root 114 | for level, subpath in enumerate(subpaths): 115 | last_dir = get_first_formatted_dir_in_dir(directory, subpath) 116 | if last_dir is None: 117 | directory = None 118 | break 119 | directory = os.path.join(directory, last_dir) 120 | 121 | return directory 122 | 123 | 124 | def get_last_formatted_dir_in_dir(folder, fmt): 125 | """ 126 | Get the (alphabetically) last directory in a directory 127 | which can be formatted according to fmt. 128 | 129 | Parameters 130 | ---------- 131 | folder : str 132 | Folder name. 133 | fmt : str 134 | Format string. 135 | 136 | Returns 137 | ------- 138 | last_elem : str 139 | Last formatted directory. 140 | """ 141 | last_elem = None 142 | root_elements = sorted(os.listdir(folder)) 143 | for root_element in root_elements[::-1]: 144 | if os.path.isdir(os.path.join(folder, root_element)): 145 | if validate(fmt, root_element): 146 | last_elem = root_element 147 | break 148 | 149 | return last_elem 150 | 151 | 152 | def get_first_formatted_dir_in_dir(folder, fmt): 153 | """ 154 | Get the (alphabetically) first directory in a directory 155 | which can be formatted according to fmt. 156 | 157 | Parameters 158 | ---------- 159 | folder : str 160 | Folder name. 161 | fmt : str 162 | Format string. 163 | 164 | Returns 165 | ------- 166 | first_elem : str 167 | First formatted directory. 168 | """ 169 | first_elem = None 170 | root_elements = sorted(os.listdir(folder)) 171 | for root_element in root_elements: 172 | if os.path.isdir(os.path.join(folder, root_element)): 173 | if validate(fmt, root_element): 174 | first_elem = root_element 175 | break 176 | 177 | return first_elem 178 | 179 | 180 | def get_gldas_start_date(product): 181 | """ 182 | Get NOAH GLDAS start date. 183 | 184 | Parameters 185 | ---------- 186 | product : str 187 | Product name. 188 | 189 | Returns 190 | ------- 191 | start_date : datetime 192 | Start date of NOAH GLDAS product. 193 | """ 194 | dt_dict = { 195 | "GLDAS_Noah_v20_025": datetime(1948, 1, 1, 3), 196 | "GLDAS_Noah_v21_025": datetime(2000, 1, 1, 3), 197 | "GLDAS_Noah_v21_025_EP": datetime(2000, 1, 1, 3), 198 | } 199 | 200 | return dt_dict[product] 201 | 202 | 203 | def parse_args(args): 204 | """ 205 | Parse command line parameters for recursive download. 206 | 207 | Parameters 208 | ---------- 209 | args : list of str 210 | Command line parameters as list of strings. 211 | 212 | Returns 213 | ------- 214 | args : argparse.Namespace 215 | Command line arguments. 216 | """ 217 | parser = argparse.ArgumentParser( 218 | description="Download GLDAS data.", 219 | formatter_class=argparse.RawTextHelpFormatter, 220 | ) 221 | 222 | parser.add_argument( 223 | "localroot", 224 | help="Root of local filesystem where" "the data is stored.", 225 | ) 226 | 227 | parser.add_argument( 228 | "-s", 229 | "--start", 230 | type=mkdate, 231 | help=( 232 | "Startdate as YYYY-MM-DD. " 233 | "If not given then the target" 234 | "folder is scanned for a start date. If no data" 235 | "is found there then the first available date " 236 | "of the product is used." 237 | ), 238 | ) 239 | 240 | parser.add_argument( 241 | "-e", 242 | "--end", 243 | type=mkdate, 244 | help=( 245 | "Enddate. In format YYYY-MM-DD.If not given then the " 246 | "current date is used." 247 | ), 248 | ) 249 | 250 | help_string = "\n".join( 251 | [ 252 | "GLDAS product to download.", 253 | "GLDAS_Noah_v20_025 available from {} to 2014-12-31", 254 | "GLDAS_Noah_v21_025 available from {}", 255 | "GLDAS_Noah_v21_025_EP available after GLDAS_Noah_v21_025", 256 | ] 257 | ) 258 | 259 | help_string = help_string.format( 260 | get_gldas_start_date("GLDAS_Noah_v20_025"), 261 | get_gldas_start_date("GLDAS_Noah_v21_025"), 262 | ) 263 | 264 | parser.add_argument( 265 | "--product", 266 | choices=[ 267 | "GLDAS_Noah_v20_025", 268 | "GLDAS_Noah_v21_025", 269 | "GLDAS_Noah_v21_025_EP", 270 | ], 271 | default="GLDAS_Noah_v21_025", 272 | help=help_string, 273 | ) 274 | 275 | parser.add_argument("--username", help="Username to use for download.") 276 | 277 | parser.add_argument("--password", help="password to use for download.") 278 | 279 | parser.add_argument( 280 | "--n_proc", 281 | default=1, 282 | type=int, 283 | help="Number of parallel processes to use for" "downloading.", 284 | ) 285 | 286 | args = parser.parse_args(args) 287 | # set defaults that can not be handled by argparse 288 | 289 | # Compare versions to prevent mixing data sets 290 | version, first, last = gldas_folder_get_version_first_last(args.localroot) 291 | if args.product and version and (args.product != version): 292 | raise Exception( 293 | "Error: Found products of different version ({}) " 294 | "in {}. Abort download!".format(version, args.localroot) 295 | ) 296 | 297 | if args.start is None or args.end is None: 298 | if not args.product: 299 | args.product = version 300 | if args.start is None: 301 | if last is None: 302 | if args.product: 303 | args.start = get_gldas_start_date(args.product) 304 | else: 305 | # In case of no indication if version, use GLDAS Noah 2.0 306 | # start time, because it has the longest time span 307 | args.start = get_gldas_start_date("GLDAS_Noah_v20_025") 308 | else: 309 | args.start = last 310 | if args.end is None: 311 | args.end = datetime.now() 312 | 313 | prod_urls = { 314 | "GLDAS_Noah_v20_025": { 315 | "root": "hydro1.gesdisc.eosdis.nasa.gov", 316 | "dirs": ["data", "GLDAS", "GLDAS_NOAH025_3H.2.0", "%Y", "%j"], 317 | }, 318 | "GLDAS_Noah_v21_025": { 319 | "root": "hydro1.gesdisc.eosdis.nasa.gov", 320 | "dirs": ["data", "GLDAS", "GLDAS_NOAH025_3H.2.1", "%Y", "%j"], 321 | }, 322 | "GLDAS_Noah_v21_025_EP": { 323 | "root": "hydro1.gesdisc.eosdis.nasa.gov", 324 | "dirs": ["data", "GLDAS", "GLDAS_NOAH025_3H_EP.2.1", "%Y", "%j"], 325 | }, 326 | } 327 | 328 | args.urlroot = prod_urls[args.product]["root"] 329 | args.urlsubdirs = prod_urls[args.product]["dirs"] 330 | args.localsubdirs = ["%Y", "%j"] 331 | 332 | print( 333 | "Downloading data from {} to {} " 334 | "into folder {}.".format( 335 | args.start.isoformat(), args.end.isoformat(), args.localroot 336 | ) 337 | ) 338 | return args 339 | 340 | 341 | def main(args): 342 | """ 343 | Main routine used for command line interface. 344 | 345 | Parameters 346 | ---------- 347 | args : list of str 348 | Command line arguments. 349 | """ 350 | args = parse_args(args) 351 | 352 | dts = list(daily(args.start, args.end)) 353 | url_create_fn = partial( 354 | create_dt_url, root=args.urlroot, fname="", subdirs=args.urlsubdirs 355 | ) 356 | fname_create_fn = partial( 357 | create_dt_fpath, 358 | root=args.localroot, 359 | fname="", 360 | subdirs=args.localsubdirs, 361 | ) 362 | 363 | down_func = partial( 364 | download, 365 | num_proc=args.n_proc, 366 | username=args.username, 367 | password="'" + args.password + "'", 368 | recursive=True, 369 | filetypes=["nc4", "nc4.xml"], 370 | ) 371 | download_by_dt( 372 | dts, url_create_fn, fname_create_fn, down_func, recursive=True 373 | ) 374 | 375 | 376 | def run(): 377 | main(sys.argv[1:]) 378 | -------------------------------------------------------------------------------- /tests/test_interface.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | from datetime import datetime 4 | import pytest 5 | 6 | from gldas.interface import GLDAS_Noah_v1_025Ds, GLDAS_Noah_v1_025Img 7 | from gldas.interface import GLDAS_Noah_v21_025Ds, GLDAS_Noah_v21_025Img 8 | from gldas.grid import GLDAS025LandGrid 9 | from gldas.interface import pygrib_available 10 | 11 | @pytest.mark.pygrib 12 | @pytest.mark.skipif(not pygrib_available, reason="Pygrib not installed.") 13 | def test_GLDAS_Noah_v1_025Ds_img_reading(): 14 | parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] 15 | img = GLDAS_Noah_v1_025Ds( 16 | data_path=os.path.join( 17 | os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" 18 | ), 19 | parameter=parameter, 20 | subgrid=None, 21 | array_1D=True, 22 | ) 23 | 24 | image = img.read(datetime(2015, 1, 1, 0)) 25 | 26 | assert sorted(image.data.keys()) == sorted(parameter) 27 | assert image.timestamp == datetime(2015, 1, 1, 0) 28 | assert image.data["086_L1"][998529] == 30.7344 29 | assert image.data["086_L2"][998529] == 93.138 30 | assert image.data["085_L1"][206360] == 285.19 31 | assert image.data["138"][998529] == 237.27 32 | assert image.data["051"][998529] == 0 33 | assert image.lon.shape == (360 * 180 * (1 / 0.25) ** 2,) 34 | assert image.lon.shape == image.lat.shape 35 | assert sorted(list(image.metadata.keys())) == sorted(parameter) 36 | assert image.metadata["085_L1"]["units"] == u"K" 37 | assert ( 38 | image.metadata["085_L1"]["long_name"] 39 | == u"ST Surface temperature of soil K" 40 | ) 41 | img.close() 42 | 43 | 44 | def test_GLDAS_Noah_v21_025Ds_img_reading(): 45 | parameter = [ 46 | "SoilMoi10_40cm_inst", 47 | "SoilMoi0_10cm_inst", 48 | "SoilTMP0_10cm_inst", 49 | "AvgSurfT_inst", 50 | "SWE_inst", 51 | ] 52 | img = GLDAS_Noah_v21_025Ds( 53 | data_path=os.path.join( 54 | os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" 55 | ), 56 | parameter=parameter, 57 | subgrid=None, 58 | array_1D=True, 59 | ) 60 | 61 | image = img.read(datetime(2015, 1, 1, 0)) 62 | 63 | assert sorted(image.data.keys()) == sorted(parameter) 64 | assert image.timestamp == datetime(2015, 1, 1, 0) 65 | assert round(image.data["SoilMoi0_10cm_inst"][998529], 3) == 38.804 66 | assert round(image.data["SoilMoi10_40cm_inst"][998529], 3) == 131.699 67 | assert round(image.data["SoilTMP0_10cm_inst"][998529], 3) == 254.506 68 | assert round(image.data["AvgSurfT_inst"][998529], 3) == 235.553 69 | assert round(image.data["SWE_inst"][998529], 3) == 108.24 70 | assert image.lon.shape == (360 * 180 * (1 / 0.25) ** 2,) 71 | assert image.lon.shape == image.lat.shape 72 | assert sorted(list(image.metadata.keys())) == sorted(parameter) 73 | assert image.metadata["AvgSurfT_inst"]["units"] == u"K" 74 | assert ( 75 | image.metadata["AvgSurfT_inst"]["long_name"] 76 | == u"Average Surface Skin temperature" 77 | ) 78 | img.close() 79 | 80 | 81 | def test_GLDAS_Noah_v21_025Ds_img_reading_landpoints(): 82 | landgrid = GLDAS025LandGrid() 83 | 84 | parameter = [ 85 | "SoilMoi10_40cm_inst", 86 | "SoilMoi0_10cm_inst", 87 | "SoilTMP0_10cm_inst", 88 | "AvgSurfT_inst", 89 | "SWE_inst", 90 | ] 91 | 92 | img = GLDAS_Noah_v21_025Ds( 93 | data_path=os.path.join( 94 | os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" 95 | ), 96 | parameter=parameter, 97 | subgrid=landgrid, 98 | array_1D=True, 99 | ) 100 | 101 | image = img.read(datetime(2015, 1, 1, 0)) 102 | 103 | assert sorted(image.data.keys()) == sorted(parameter) 104 | # gpi for testing on the land grid: 527549, lat: 1.625, lon: -52.625 105 | assert image.timestamp == datetime(2015, 1, 1, 0) 106 | assert round(image.data["SoilMoi0_10cm_inst"][50000], 3) == 26.181 107 | assert round(image.data["SoilMoi10_40cm_inst"][50000], 3) == 84.558 108 | assert round(image.data["SoilTMP0_10cm_inst"][50000], 3) == 301.276 109 | assert round(image.data["AvgSurfT_inst"][50000], 3) == 294.863 110 | assert round(image.data["SWE_inst"][50000], 3) == 0 111 | assert (image.lon.size, image.lat.size) == ( 112 | landgrid.activearrlat.size, 113 | landgrid.activearrlon.size, 114 | ) 115 | assert sorted(list(image.metadata.keys())) == sorted(parameter) 116 | assert image.metadata["AvgSurfT_inst"]["units"] == u"K" 117 | assert ( 118 | image.metadata["AvgSurfT_inst"]["long_name"] 119 | == u"Average Surface Skin temperature" 120 | ) 121 | img.close() 122 | 123 | @pytest.mark.pygrib 124 | @pytest.mark.skipif(not pygrib_available, reason="Pygrib not installed.") 125 | def test_GLDAS_Noah_v1_025Ds_timestamps_for_daterange(): 126 | landgrid = GLDAS025LandGrid() 127 | parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] 128 | img = GLDAS_Noah_v1_025Ds( 129 | data_path=os.path.join( 130 | os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" 131 | ), 132 | parameter=parameter, 133 | subgrid=None, 134 | array_1D=True, 135 | ) 136 | 137 | tstamps = img.tstamps_for_daterange( 138 | datetime(2000, 1, 1), datetime(2000, 1, 1) 139 | ) 140 | assert len(tstamps) == 8 141 | assert tstamps == [ 142 | datetime(2000, 1, 1, 0), 143 | datetime(2000, 1, 1, 3), 144 | datetime(2000, 1, 1, 6), 145 | datetime(2000, 1, 1, 9), 146 | datetime(2000, 1, 1, 12), 147 | datetime(2000, 1, 1, 15), 148 | datetime(2000, 1, 1, 18), 149 | datetime(2000, 1, 1, 21), 150 | ] 151 | img.close() 152 | 153 | 154 | def test_GLDAS_Noah_v21_025Ds_timestamps_for_daterange(): 155 | 156 | parameter = [ 157 | "SoilMoi10_40cm_inst", 158 | "SoilMoi0_10cm_inst", 159 | "SoilTMP0_10cm_inst", 160 | "AvgSurfT_inst", 161 | "SWE_inst", 162 | ] 163 | img = GLDAS_Noah_v21_025Ds( 164 | data_path=os.path.join( 165 | os.path.dirname(__file__), "test-data", "GLDAS_NOAH_image_data" 166 | ), 167 | parameter=parameter, 168 | subgrid=None, 169 | array_1D=True, 170 | ) 171 | 172 | tstamps = img.tstamps_for_daterange( 173 | datetime(2000, 1, 1), datetime(2000, 1, 1) 174 | ) 175 | assert len(tstamps) == 8 176 | assert tstamps == [ 177 | datetime(2000, 1, 1, 0), 178 | datetime(2000, 1, 1, 3), 179 | datetime(2000, 1, 1, 6), 180 | datetime(2000, 1, 1, 9), 181 | datetime(2000, 1, 1, 12), 182 | datetime(2000, 1, 1, 15), 183 | datetime(2000, 1, 1, 18), 184 | datetime(2000, 1, 1, 21), 185 | ] 186 | img.close() 187 | 188 | @pytest.mark.pygrib 189 | @pytest.mark.skipif(not pygrib_available, reason="Pygrib not installed.") 190 | def test_GLDAS_Noah_v1_025Img_img_reading_1D(): 191 | 192 | parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] 193 | img = GLDAS_Noah_v1_025Img( 194 | os.path.join( 195 | os.path.dirname(__file__), 196 | "test-data", 197 | "GLDAS_NOAH_image_data", 198 | "2015", 199 | "001", 200 | "GLDAS_NOAH025SUBP_3H.A2015001.0000.001.2015037193230.grb", 201 | ), 202 | parameter=parameter, 203 | subgrid=None, 204 | array_1D=True, 205 | ) 206 | 207 | image = img.read() 208 | 209 | assert sorted(image.data.keys()) == sorted(parameter) 210 | assert image.data["086_L1"][998529] == 30.7344 211 | assert image.data["086_L2"][998529] == 93.138 212 | assert image.data["085_L1"][206360] == 285.19 213 | assert image.data["138"][998529] == 237.27 214 | assert image.data["051"][998529] == 0 215 | assert image.lon.shape == (360 * 180 * (1 / 0.25) ** 2,) 216 | assert image.lon.shape == image.lat.shape 217 | img.close() 218 | 219 | 220 | def test_GLDAS_Noah_v21_025Img_img_reading_1D(): 221 | landgrid = GLDAS025LandGrid() 222 | parameter = [ 223 | "SoilMoi10_40cm_inst", 224 | "SoilMoi0_10cm_inst", 225 | "SoilTMP0_10cm_inst", 226 | "AvgSurfT_inst", 227 | "SWE_inst", 228 | ] 229 | img = GLDAS_Noah_v21_025Img( 230 | os.path.join( 231 | os.path.dirname(__file__), 232 | "test-data", 233 | "GLDAS_NOAH_image_data", 234 | "2015", 235 | "001", 236 | "GLDAS_NOAH025_3H.A20150101.0000.021.nc4", 237 | ), 238 | parameter=parameter, 239 | subgrid=landgrid, 240 | array_1D=True, 241 | ) 242 | 243 | image = img.read() 244 | 245 | assert sorted(image.data.keys()) == sorted(parameter) 246 | # gpi for testing on the land grid: 527549, lat: 1.625, lon: -52.625 247 | assert round(image.data["SoilMoi0_10cm_inst"][50000], 3) == 26.181 248 | assert round(image.data["SoilMoi10_40cm_inst"][50000], 3) == 84.558 249 | assert round(image.data["SoilTMP0_10cm_inst"][50000], 3) == 301.276 250 | assert round(image.data["AvgSurfT_inst"][50000], 3) == 294.863 251 | assert round(image.data["SWE_inst"][50000], 3) == 0 252 | assert (image.lon.size, image.lat.size) == ( 253 | landgrid.activearrlat.size, 254 | landgrid.activearrlon.size, 255 | ) 256 | img.close() 257 | 258 | @pytest.mark.pygrib 259 | @pytest.mark.skipif(not pygrib_available, reason="Pygrib not installed.") 260 | def test_GLDAS_Noah_v1_025Img_img_reading_2D(): 261 | 262 | parameter = ["086_L2", "086_L1", "085_L1", "138", "132", "051"] 263 | img = GLDAS_Noah_v1_025Img( 264 | os.path.join( 265 | os.path.dirname(__file__), 266 | "test-data", 267 | "GLDAS_NOAH_image_data", 268 | "2015", 269 | "001", 270 | "GLDAS_NOAH025SUBP_3H.A2015001.0000.001.2015037193230.grb", 271 | ), 272 | parameter=parameter, 273 | ) 274 | 275 | image = img.read() 276 | 277 | assert image.data["086_L1"].shape == (720, 1440) 278 | assert image.lon[0, 0] == -179.875 279 | assert image.lon[0, 1439] == 179.875 280 | assert image.lat[0, 0] == 89.875 281 | assert image.lat[719, 0] == -89.875 282 | assert sorted(image.data.keys()) == sorted(parameter) 283 | assert image.data["086_L1"][26, 609] == 30.7344 284 | assert image.data["086_L2"][26, 609] == 93.138 285 | assert image.data["085_L1"][576, 440] == 285.19 286 | assert image.data["138"][26, 609] == 237.27 287 | assert image.data["051"][26, 609] == 0 288 | assert image.lon.shape == (720, 1440) 289 | assert image.lon.shape == image.lat.shape 290 | img.close() 291 | 292 | 293 | def test_GLDAS_Noah_v21_025Img_img_reading_2D(): 294 | parameter = [ 295 | "SoilMoi10_40cm_inst", 296 | "SoilMoi0_10cm_inst", 297 | "SoilTMP0_10cm_inst", 298 | "AvgSurfT_inst", 299 | "SWE_inst", 300 | ] 301 | img = GLDAS_Noah_v21_025Img( 302 | os.path.join( 303 | os.path.dirname(__file__), 304 | "test-data", 305 | "GLDAS_NOAH_image_data", 306 | "2015", 307 | "001", 308 | "GLDAS_NOAH025_3H.A20150101.0000.021.nc4", 309 | ), 310 | parameter=parameter, 311 | ) 312 | 313 | image = img.read() 314 | 315 | assert image.data["SoilMoi0_10cm_inst"].shape == (720, 1440) 316 | assert image.lon[0, 0] == -179.875 317 | assert image.lon[0, 1439] == 179.875 318 | assert image.lat[0, 0] == 89.875 319 | assert image.lat[719, 0] == -89.875 320 | assert sorted(image.data.keys()) == sorted(parameter) 321 | assert round(image.data["SoilMoi0_10cm_inst"][26, 609], 3) == 38.804 322 | assert round(image.data["SoilMoi10_40cm_inst"][26, 609], 3) == 131.699 323 | assert round(image.data["SoilTMP0_10cm_inst"][26, 609], 3) == 254.506 324 | assert round(image.data["AvgSurfT_inst"][26, 609], 3) == 235.553 325 | assert round(image.data["SWE_inst"][26, 609], 3) == 108.24 326 | assert image.lon.shape == (720, 1440) 327 | assert image.lon.shape == image.lat.shape 328 | img.close() 329 | 330 | -------------------------------------------------------------------------------- /src/gldas/interface.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | import numpy as np 3 | import os 4 | 5 | try: 6 | import pygrib 7 | pygrib_available = True 8 | except ImportError: 9 | pygrib_available = False 10 | 11 | from pygeobase.io_base import ImageBase, MultiTemporalImageBase 12 | from pygeobase.object_base import Image 13 | from pynetcf.time_series import GriddedNcOrthoMultiTs 14 | 15 | from datetime import timedelta 16 | 17 | from gldas.grid import GLDAS025Cellgrid 18 | from netCDF4 import Dataset 19 | from pygeogrids.netcdf import load_grid 20 | from gldas.utils import deprecated, PygribError 21 | 22 | 23 | class GLDAS_Noah_v2_025Img(ImageBase): 24 | """ 25 | Class for reading one GLDAS Noah v2.1 nc file in 0.25 deg grid. 26 | """ 27 | 28 | def __init__( 29 | self, 30 | filename, 31 | mode="r", 32 | parameter="SoilMoi0_10cm_inst", 33 | subgrid=None, 34 | array_1D=False, 35 | ): 36 | """ 37 | Parameters 38 | ---------- 39 | filename: str 40 | filename of the GLDAS nc file 41 | mode: string, optional 42 | mode of opening the file, only 'r' is implemented at the moment 43 | parameter : string or list, optional 44 | one or list of parameters to read, see GLDAS v2.1 documentation 45 | for more information (default: 'SoilMoi0_10cm_inst'). 46 | subgrid : Cell Grid 47 | Subgrid of the global GLDAS Grid to use for reading image data (e.g only land points) 48 | array_1D: boolean, optional 49 | if set then the data is read into 1D arrays. 50 | Needed for some legacy code. 51 | """ 52 | 53 | super(GLDAS_Noah_v2_025Img, self).__init__(filename, mode=mode) 54 | 55 | if type(parameter) != list: 56 | parameter = [parameter] 57 | 58 | self.parameters = parameter 59 | self.fill_values = np.repeat(9999.0, 1440 * 120) 60 | self.grid = GLDAS025Cellgrid() if not subgrid else subgrid 61 | self.array_1D = array_1D 62 | 63 | def read(self, timestamp=None): 64 | 65 | # print 'read file: %s' %self.filename 66 | # Returns the selected parameters for a gldas image and 67 | # according metadata 68 | 69 | return_img = {} 70 | return_metadata = {} 71 | 72 | try: 73 | dataset = Dataset(self.filename) 74 | except IOError: 75 | raise IOError(f"Error opening file {self.filename}") 76 | 77 | param_names = [] 78 | for parameter in self.parameters: 79 | param_names.append(parameter) 80 | 81 | for parameter, variable in dataset.variables.items(): 82 | if parameter in param_names: 83 | param_metadata = {} 84 | param_data = {} 85 | for attrname in variable.ncattrs(): 86 | if attrname in ["long_name", "units"]: 87 | param_metadata.update( 88 | {str(attrname): getattr(variable, attrname)} 89 | ) 90 | 91 | param_data = dataset.variables[parameter][:] 92 | np.ma.set_fill_value(param_data, 9999) 93 | param_data = np.concatenate( 94 | ( 95 | self.fill_values, 96 | np.ma.getdata(param_data.filled()).flatten(), 97 | ) 98 | ) 99 | 100 | return_img.update( 101 | {str(parameter): param_data[self.grid.activegpis]} 102 | ) 103 | 104 | return_metadata.update({str(parameter): param_metadata}) 105 | 106 | # Check for corrupt files 107 | try: 108 | return_img[parameter] 109 | except KeyError: 110 | path, thefile = os.path.split(self.filename) 111 | print( 112 | "%s in %s is corrupt - filling" 113 | "image with NaN values" % (parameter, thefile) 114 | ) 115 | return_img[parameter] = np.empty(self.grid.n_gpi).fill( 116 | np.nan 117 | ) 118 | 119 | return_metadata["corrupt_parameters"].append() 120 | 121 | dataset.close() 122 | 123 | if self.array_1D: 124 | return Image( 125 | self.grid.activearrlon, 126 | self.grid.activearrlat, 127 | return_img, 128 | return_metadata, 129 | timestamp, 130 | ) 131 | else: 132 | for key in return_img: 133 | return_img[key] = np.flipud( 134 | return_img[key].reshape((720, 1440)) 135 | ) 136 | 137 | return Image( 138 | np.flipud(self.grid.activearrlon.reshape((720, 1440))), 139 | np.flipud(self.grid.activearrlat.reshape((720, 1440))), 140 | return_img, 141 | return_metadata, 142 | timestamp, 143 | ) 144 | 145 | def write(self, data): 146 | raise NotImplementedError() 147 | 148 | def flush(self): 149 | pass 150 | 151 | def close(self): 152 | pass 153 | 154 | 155 | class GLDAS_Noah_v21_025Img(GLDAS_Noah_v2_025Img): 156 | def __init__( 157 | self, 158 | filename, 159 | mode="r", 160 | parameter="SoilMoi0_10cm_inst", 161 | subgrid=None, 162 | array_1D=False, 163 | ): 164 | 165 | warnings.warn( 166 | "GLDAS_Noah_v21_025Img is outdated and replaced by the general" 167 | "GLDAS_Noah_v2_025Img class to read gldas v2.0 and v2.1 " 168 | "0.25 DEG netcdf files." 169 | "The old class will be removed soon.", 170 | category=DeprecationWarning, 171 | ) 172 | 173 | super(GLDAS_Noah_v21_025Img, self).__init__( 174 | filename=filename, 175 | mode=mode, 176 | parameter=parameter, 177 | subgrid=subgrid, 178 | array_1D=array_1D, 179 | ) 180 | 181 | 182 | class GLDAS_Noah_v1_025Img(ImageBase): 183 | """ 184 | Class for reading one GLDAS Noah v1 grib file in 0.25 deg grid. 185 | 186 | Parameters 187 | ---------- 188 | filename: string 189 | filename of the GLDAS grib file 190 | mode: string, optional 191 | mode of opening the file, only 'r' is implemented at the moment 192 | parameter : string or list, optional 193 | one or list of ['001', '011', '032', '051', '057', '065', '071', 194 | '085_L1', '085_L2', '085_L3', '085_L4', 195 | '086_L1', '086_L2', '086_L3', '086_L4', 196 | '099', '111', '112', '121', '122', 197 | '131', '132', '138', '155', 198 | '204', '205', '234', '235'] 199 | parameters to read, see GLDAS documentation for more information 200 | Default : '086_L1' 201 | subgrid : Cell Grid 202 | Subgrid of the global GLDAS Grid to use for reading image data (e.g only land points) 203 | array_1D: boolean, optional 204 | if set then the data is read into 1D arrays. 205 | Needed for some legacy code. 206 | """ 207 | 208 | @deprecated(message="GLDAS Noah v1 data is deprecated, v2 should be used.") 209 | def __init__( 210 | self, 211 | filename, 212 | mode="r", 213 | parameter="086_L1", 214 | subgrid=None, 215 | array_1D=False, 216 | ): 217 | if not pygrib_available: 218 | raise PygribError 219 | 220 | super(GLDAS_Noah_v1_025Img, self).__init__(filename, mode=mode) 221 | 222 | if type(parameter) != list: 223 | parameter = [parameter] 224 | self.parameters = parameter 225 | self.fill_values = np.repeat(9999.0, 1440 * 120) 226 | self.grid = subgrid if subgrid else GLDAS025Cellgrid() 227 | self.array_1D = array_1D 228 | 229 | def read(self, timestamp=None): 230 | 231 | return_img = {} 232 | return_metadata = {} 233 | layers = {"085": 1, "086": 1} 234 | 235 | try: 236 | grbs = pygrib.open(self.filename) 237 | except IOError as e: 238 | print(e) 239 | print(" ".join([self.filename, "can not be opened"])) 240 | raise e 241 | 242 | ids = [] 243 | for parameter in self.parameters: 244 | ids.append(int(parameter.split("_")[0])) 245 | parameter_ids = np.unique(np.array(ids)) 246 | 247 | for message in grbs: 248 | if message["indicatorOfParameter"] in parameter_ids: 249 | parameter_id = "{:03d}".format(message["indicatorOfParameter"]) 250 | 251 | param_metadata = {} 252 | # read metadata in any case 253 | param_metadata["units"] = message["units"] 254 | param_metadata["long_name"] = message["parameterName"] 255 | 256 | if parameter_id in layers.keys(): 257 | parameter = "_".join( 258 | (parameter_id, "L" + str(layers[parameter_id])) 259 | ) 260 | 261 | if parameter in self.parameters: 262 | param_data = np.concatenate( 263 | ( 264 | self.fill_values, 265 | np.ma.getdata(message["values"]).flatten(), 266 | ) 267 | ) 268 | 269 | return_img[parameter] = param_data[ 270 | self.grid.activegpis 271 | ] 272 | return_metadata[parameter] = param_metadata 273 | layers[parameter_id] += 1 274 | 275 | else: 276 | parameter = parameter_id 277 | param_data = np.concatenate( 278 | ( 279 | self.fill_values, 280 | np.ma.getdata(message["values"]).flatten(), 281 | ) 282 | ) 283 | return_img[parameter] = param_data[self.grid.activegpis] 284 | return_metadata[parameter] = param_metadata 285 | 286 | grbs.close() 287 | for parameter in self.parameters: 288 | try: 289 | return_img[parameter] 290 | except KeyError: 291 | print( 292 | self.filename[self.filename.rfind("GLDAS") :], 293 | "corrupt file - filling image with nan values", 294 | ) 295 | return_img[parameter] = np.empty(self.grid.n_gpi) 296 | return_img[parameter].fill(np.nan) 297 | 298 | if self.array_1D: 299 | return Image( 300 | self.grid.activearrlon, 301 | self.grid.activearrlat, 302 | return_img, 303 | return_metadata, 304 | timestamp, 305 | ) 306 | else: 307 | for key in return_img: 308 | return_img[key] = np.flipud( 309 | return_img[key].reshape((720, 1440)) 310 | ) 311 | 312 | lons = np.flipud(self.grid.activearrlon.reshape((720, 1440))) 313 | lats = np.flipud(self.grid.activearrlat.reshape((720, 1440))) 314 | 315 | return Image(lons, lats, return_img, return_metadata, timestamp) 316 | 317 | def write(self, data): 318 | raise NotImplementedError() 319 | 320 | def flush(self): 321 | pass 322 | 323 | def close(self): 324 | pass 325 | 326 | class GLDAS_Noah_v21_025Ds(MultiTemporalImageBase): 327 | """ 328 | Class for reading GLDAS v2.1 images in nc format. 329 | 330 | Parameters 331 | ---------- 332 | data_path : string 333 | Path to the nc files 334 | parameter : string or list, optional 335 | one or list of parameters to read, see GLDAS v2.1 documentation 336 | for more information (default: 'SoilMoi0_10cm_inst'). 337 | subgrid : Cell Grid 338 | Subgrid of the global GLDAS Grid to use for reading image data (e.g only land points) 339 | array_1D: boolean, optional 340 | If set then the data is read into 1D arrays. 341 | Needed for some legacy code. 342 | """ 343 | 344 | def __init__( 345 | self, 346 | data_path, 347 | parameter="SoilMoi0_10cm_inst", 348 | subgrid=None, 349 | array_1D=False, 350 | ): 351 | ioclass_kws = { 352 | "parameter": parameter, 353 | "subgrid": subgrid, 354 | "array_1D": array_1D, 355 | } 356 | 357 | sub_path = ["%Y", "%j"] 358 | filename_templ = "GLDAS_NOAH025_3H*.A{datetime}.*.nc4" 359 | 360 | super(GLDAS_Noah_v21_025Ds, self).__init__( 361 | data_path, 362 | GLDAS_Noah_v21_025Img, 363 | fname_templ=filename_templ, 364 | datetime_format="%Y%m%d.%H%M", 365 | subpath_templ=sub_path, 366 | exact_templ=False, 367 | ioclass_kws=ioclass_kws, 368 | ) 369 | 370 | def tstamps_for_daterange(self, start_date, end_date): 371 | """ 372 | return timestamps for daterange, 373 | 374 | Parameters 375 | ---------- 376 | start_date: datetime 377 | start of date range 378 | end_date: datetime 379 | end of date range 380 | 381 | Returns 382 | ------- 383 | timestamps : list 384 | list of datetime objects of each available image between 385 | start_date and end_date 386 | """ 387 | img_offsets = np.array( 388 | [ 389 | timedelta(hours=0), 390 | timedelta(hours=3), 391 | timedelta(hours=6), 392 | timedelta(hours=9), 393 | timedelta(hours=12), 394 | timedelta(hours=15), 395 | timedelta(hours=18), 396 | timedelta(hours=21), 397 | ] 398 | ) 399 | 400 | timestamps = [] 401 | diff = end_date - start_date 402 | for i in range(diff.days + 1): 403 | daily_dates = start_date + timedelta(days=i) + img_offsets 404 | timestamps.extend(daily_dates.tolist()) 405 | 406 | return timestamps 407 | 408 | 409 | class GLDAS_Noah_v1_025Ds(MultiTemporalImageBase): 410 | """ 411 | Class for reading GLDAS images in grib format. 412 | 413 | Parameters 414 | ---------- 415 | data_path : string 416 | path to the grib files 417 | parameter : string or list, optional 418 | one or list of ['001', '011', '032', '051', '057', '065', '071', 419 | '085_L1', '085_L2', '085_L3', '085_L4', 420 | '086_L1', '086_L2', '086_L3', '086_L4', 421 | '099', '111', '112', '121', '122', '131', '132', '138', 422 | '155', '204', '205', '234', '235'] 423 | parameters to read, see GLDAS documentation for more information 424 | Default : '086_L1' 425 | subgrid : Cell Grid 426 | Subgrid of the global GLDAS Grid to use for reading image data (e.g only land points) 427 | array_1D: boolean, optional 428 | if set then the data is read into 1D arrays. 429 | Needed for some legacy code. 430 | """ 431 | 432 | @deprecated("GLDAS Noah v1 data is deprecated, v2 should be used.") 433 | def __init__( 434 | self, data_path, parameter="086_L1", subgrid=None, array_1D=False 435 | ): 436 | if not pygrib_available: 437 | raise PygribError 438 | 439 | ioclass_kws = { 440 | "parameter": parameter, 441 | "subgrid": subgrid, 442 | "array_1D": array_1D, 443 | } 444 | 445 | sub_path = ["%Y", "%j"] 446 | filename_templ = "GLDAS_NOAH025SUBP_3H.A{datetime}.001.*.grb" 447 | 448 | super(GLDAS_Noah_v1_025Ds, self).__init__( 449 | data_path, 450 | GLDAS_Noah_v1_025Img, 451 | fname_templ=filename_templ, 452 | datetime_format="%Y%j.%H%M", 453 | subpath_templ=sub_path, 454 | exact_templ=False, 455 | ioclass_kws=ioclass_kws, 456 | ) 457 | 458 | def tstamps_for_daterange(self, start_date, end_date): 459 | """ 460 | return timestamps for daterange, 461 | 462 | Parameters 463 | ---------- 464 | start_date: datetime 465 | start of date range 466 | end_date: datetime 467 | end of date range 468 | 469 | Returns 470 | ------- 471 | timestamps : list 472 | list of datetime objects of each available image between 473 | start_date and end_date 474 | """ 475 | img_offsets = np.array( 476 | [ 477 | timedelta(hours=0), 478 | timedelta(hours=3), 479 | timedelta(hours=6), 480 | timedelta(hours=9), 481 | timedelta(hours=12), 482 | timedelta(hours=15), 483 | timedelta(hours=18), 484 | timedelta(hours=21), 485 | ] 486 | ) 487 | 488 | timestamps = [] 489 | diff = end_date - start_date 490 | for i in range(diff.days + 1): 491 | daily_dates = start_date + timedelta(days=i) + img_offsets 492 | timestamps.extend(daily_dates.tolist()) 493 | 494 | return timestamps 495 | 496 | 497 | class GLDASTs(GriddedNcOrthoMultiTs): 498 | def __init__(self, ts_path, grid_path=None, **kwargs): 499 | """ 500 | Class for reading GLDAS time series after reshuffling. 501 | 502 | Parameters 503 | ---------- 504 | ts_path : str 505 | Directory where the netcdf time series files are stored 506 | grid_path : str, optional (default: None) 507 | Path to grid file, that is used to organize the location of time 508 | series to read. If None is passed, grid.nc is searched for in the 509 | ts_path. 510 | 511 | Optional keyword arguments that are passed to the Gridded Base: 512 | ------------------------------------------------------------------------ 513 | parameters : list, optional (default: None) 514 | Specific variable names to read, if None are selected, all are read. 515 | offsets : dict, optional (default:None) 516 | Offsets (values) that are added to the parameters (keys) 517 | scale_factors : dict, optional (default:None) 518 | Offset (value) that the parameters (key) is multiplied with 519 | ioclass_kws: dict 520 | Optional keyword arguments to pass to OrthoMultiTs class: 521 | ---------------------------------------------------------------- 522 | read_bulk : boolean, optional (default:False) 523 | if set to True the data of all locations is read into memory, 524 | and subsequent calls to read_ts read from the cache and not from disk 525 | this makes reading complete files faster# 526 | read_dates : boolean, optional (default:False) 527 | if false dates will not be read automatically but only on specific 528 | request useable for bulk reading because currently the netCDF 529 | num2date routine is very slow for big datasets 530 | """ 531 | if grid_path is None: 532 | grid_path = os.path.join(ts_path, "grid.nc") 533 | 534 | grid = load_grid(grid_path) 535 | super(GLDASTs, self).__init__(ts_path, grid, **kwargs) 536 | --------------------------------------------------------------------------------