├── requirements.txt ├── requirements_conda.txt ├── climate_toolbox ├── geo │ ├── __init__.py │ └── distance.py ├── utils │ ├── __init__.py │ └── utils.py ├── aggregations │ ├── __init__.py │ └── aggregations.py ├── transformations │ ├── __init__.py │ └── transformations.py ├── io │ ├── __init__.py │ └── io.py ├── climate_toolbox.py └── __init__.py ├── docs ├── authors.rst ├── history.rst ├── readme.rst ├── contributing.rst ├── modules.rst ├── usage.rst ├── index.rst ├── climate_toolbox.io.rst ├── environment.yml ├── climate_toolbox.geo.rst ├── climate_toolbox.utils.rst ├── climate_toolbox.aggregations.rst ├── climate_toolbox.transformations.rst ├── climate_toolbox.rst ├── installation.rst ├── Makefile ├── make.bat └── conf.py ├── .coveralls.yml ├── tests ├── __init__.py └── test_climate_toolbox.py ├── .readthedocs.yml ├── pytest.ini ├── .github ├── PULL_REQUEST_TEMPLATE.md ├── ISSUE_TEMPLATE.md └── workflows │ └── pythonpackage.yaml ├── requirements_dev.txt ├── AUTHORS.rst ├── MANIFEST.in ├── .editorconfig ├── setup.cfg ├── description.py ├── tox.ini ├── HISTORY.rst ├── .gitignore ├── LICENSE ├── README.rst ├── setup.py ├── Makefile ├── .travis.yml ├── CONTRIBUTING.rst └── travis_pypi_setup.py /requirements.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements_conda.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /climate_toolbox/geo/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /climate_toolbox/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /climate_toolbox/aggregations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /climate_toolbox/transformations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /.coveralls.yml: -------------------------------------------------------------------------------- 1 | service_name: travis-ci 2 | parallel: true -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /climate_toolbox/io/__init__.py: -------------------------------------------------------------------------------- 1 | from .io import standardize_climate_data, load_bcsd 2 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Unit test package for climate_toolbox.""" 4 | -------------------------------------------------------------------------------- /climate_toolbox/climate_toolbox.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file describes the process for computing weighted climate data 3 | """ 4 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | python: 4 | version: 3.7 5 | 6 | conda: 7 | environment: docs/environment.yml 8 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | climate_toolbox 2 | =============== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | climate_toolbox 8 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | To use climate_toolbox in a project:: 6 | 7 | import climate_toolbox 8 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | testpaths= ./climate_toolbox ./tests 3 | addopts= --cov=climate_toolbox --cov=tests --doctest-modules --cov-report term-missing -------------------------------------------------------------------------------- /climate_toolbox/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Top-level package for climate_toolbox.""" 4 | 5 | __author__ = """Justin Simcock""" 6 | __email__ = "jsimcock@rhg.com" 7 | __version__ = "0.1.5" 8 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | - [ ] closes #xxxx 2 | - [ ] tests added / passed 3 | - [ ] docs reflect changes 4 | - [ ] passes ``flake8 climate_toolbox tests docs`` 5 | - [ ] entry in HISTORY.rst 6 | 7 | [summarize your pull request here] -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | bumpversion==0.5.3 2 | wheel==0.29.0 3 | flake8 4 | tox==3.25.1 5 | coverage==4.4.1 6 | Sphinx==1.6.3 7 | PyYAML>=4.2b1 8 | pytest==7.1.2 9 | pytest-runner==2.11.1 10 | pytest-cov 11 | numpy 12 | pandas 13 | scipy 14 | toolz 15 | xarray 16 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | This repository is a project of the `Climate Impact Lab `_ 6 | 7 | Development Lead 8 | ---------------- 9 | 10 | * Justin Simcock 11 | 12 | Contributors 13 | ------------ 14 | 15 | None yet. Why not be the first? 16 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS.rst 2 | include CONTRIBUTING.rst 3 | include HISTORY.rst 4 | include LICENSE 5 | include README.rst 6 | 7 | recursive-include tests * 8 | recursive-exclude * __pycache__ 9 | recursive-exclude * *.py[co] 10 | 11 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 12 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * climate_toolbox version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to climate_toolbox's documentation! 2 | ==================================================================================================== 3 | 4 | Contents: 5 | 6 | .. toctree:: 7 | :maxdepth: 2 8 | 9 | readme 10 | installation 11 | usage 12 | modules 13 | contributing 14 | authors 15 | history 16 | 17 | Indices and tables 18 | ================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | -------------------------------------------------------------------------------- /docs/climate_toolbox.io.rst: -------------------------------------------------------------------------------- 1 | climate\_toolbox.io package 2 | =========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | climate\_toolbox.io.io module 8 | ----------------------------- 9 | 10 | .. automodule:: climate_toolbox.io.io 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: climate_toolbox.io 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/environment.yml: -------------------------------------------------------------------------------- 1 | name: rtd 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - python=3.6 7 | - pandas 8 | - xarray 9 | - scipy 10 | - numpy 11 | - toolz 12 | - pip: 13 | - datafs==0.7.1 14 | - bumpversion==0.5.3 15 | - wheel==0.29.0 16 | - flake8==3.3.0 17 | - tox==2.7.0 18 | - coverage==4.4.1 19 | - Sphinx==1.6.3 20 | - PyYAML>=4.2b1 21 | - pytest==3.1.3 22 | - pytest-runner==2.11.1 23 | - pytest-cov==2.7.1 24 | -------------------------------------------------------------------------------- /docs/climate_toolbox.geo.rst: -------------------------------------------------------------------------------- 1 | climate\_toolbox.geo package 2 | ============================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | climate\_toolbox.geo.distance module 8 | ------------------------------------ 9 | 10 | .. automodule:: climate_toolbox.geo.distance 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: climate_toolbox.geo 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/climate_toolbox.utils.rst: -------------------------------------------------------------------------------- 1 | climate\_toolbox.utils package 2 | ============================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | climate\_toolbox.utils.utils module 8 | ----------------------------------- 9 | 10 | .. automodule:: climate_toolbox.utils.utils 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: climate_toolbox.utils 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/climate_toolbox.aggregations.rst: -------------------------------------------------------------------------------- 1 | climate\_toolbox.aggregations package 2 | ===================================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | climate\_toolbox.aggregations.aggregations module 8 | ------------------------------------------------- 9 | 10 | .. automodule:: climate_toolbox.aggregations.aggregations 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: climate_toolbox.aggregations 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/climate_toolbox.transformations.rst: -------------------------------------------------------------------------------- 1 | climate\_toolbox.transformations package 2 | ======================================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | climate\_toolbox.transformations.transformations module 8 | ------------------------------------------------------- 9 | 10 | .. automodule:: climate_toolbox.transformations.transformations 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: climate_toolbox.transformations 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.1.5 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:setup.py] 7 | search = version='{current_version}' 8 | replace = version='{new_version}' 9 | 10 | [bumpversion:file:climate_toolbox/__init__.py] 11 | search = __version__ = '{current_version}' 12 | replace = __version__ = '{new_version}' 13 | 14 | [bdist_wheel] 15 | universal = 1 16 | 17 | [flake8] 18 | exclude = docs 19 | ignore = E203,E266,E402,E501,W503,F401,C901 20 | max-line-length = 100 21 | max-complexity = 18 22 | select = B,C,E,F,W,T4,B9 23 | 24 | 25 | [aliases] 26 | test = pytest 27 | 28 | -------------------------------------------------------------------------------- /description.py: -------------------------------------------------------------------------------- 1 | 2 | with open('README.rst') as readme_file: 3 | readme = readme_file.read() 4 | 5 | with open('HISTORY.rst') as history_file: 6 | history = history_file.read() 7 | 8 | import re 9 | 10 | long_description = re.sub( 11 | r':py:[a-z]+:`([^`]+)`', 12 | r'``\1``', 13 | readme + '\n\n' + history) 14 | 15 | long_description = re.sub( 16 | r':issue:`([^`]+)`', 17 | r'`GH issue #\1 `_', 18 | long_description) 19 | 20 | long_description = re.sub( 21 | r':pull:`([^`]+)`', 22 | r'`GH PR #\1 `_', 23 | long_description) 24 | -------------------------------------------------------------------------------- /docs/climate_toolbox.rst: -------------------------------------------------------------------------------- 1 | climate\_toolbox package 2 | ======================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | 9 | climate_toolbox.geo 10 | climate_toolbox.aggregations 11 | climate_toolbox.io 12 | climate_toolbox.utils 13 | climate_toolbox.transformations 14 | 15 | Submodules 16 | ---------- 17 | 18 | climate\_toolbox.climate\_toolbox module 19 | ---------------------------------------- 20 | 21 | .. automodule:: climate_toolbox.climate_toolbox 22 | :members: 23 | :undoc-members: 24 | :show-inheritance: 25 | 26 | 27 | Module contents 28 | --------------- 29 | 30 | .. automodule:: climate_toolbox 31 | :members: 32 | :undoc-members: 33 | :show-inheritance: 34 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27, py34, py35, flake8 3 | 4 | [travis] 5 | python = 6 | 3.5: py35 7 | 3.4: py34 8 | 2.7: py27 9 | 10 | [testenv:flake8] 11 | basepython=python 12 | deps=flake8 13 | commands=flake8 climate_toolbox 14 | 15 | [testenv] 16 | setenv = 17 | PYTHONPATH = {toxinidir} 18 | deps = 19 | -r{toxinidir}/requirements_dev.txt 20 | -r{toxinidir}/requirements_conda.txt 21 | commands = 22 | pip install -U pip 23 | pytest --basetemp={envtmpdir} 24 | 25 | ; If you want to make tox run the tests with the same versions, create a 26 | ; requirements.txt with the pinned versions and uncomment the following lines: 27 | ; deps = 28 | ; -r{toxinidir}/requirements_dev.txt 29 | ; -r{toxinidir}/requirements_conda.txt 30 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | 2 | History 3 | ======= 4 | 5 | 0.1.4 (current version) 6 | ----------------------- 7 | 8 | * Support vectorized indexing for xarray >= 0.10 in :py:func:`climate_toolbox.climate_toolbox._reindex_spatial_data_to_regions` (:issue:`10`) 9 | * Support iteratively increasing bounding box in :py:func:`~climate_toolbox.climate_toolbox._fill_holes_xr` (:issue:`11`). 10 | * Support multiple interpolation methods (linear and cubic) in :py:func:`~climate_toolbox.climate_toolbox._fill_holes_xr` (:issue:`12`). 11 | * Fix bug causing tests to pass no matter what 12 | 13 | 0.1.3 (2017-08-04) 14 | ------------------ 15 | 16 | * Support passing a dataset (not just a filepath) into ``load_baseline`` and ``load_bcsd`` (:issue:`4`) 17 | 18 | 0.1.2 (2017-07-25) 19 | ------------------ 20 | 21 | * merge in bug fixes 22 | 23 | 0.1.1 (2017-07-25) 24 | ----------------------- 25 | 26 | * Various bug fixes (see :issue:`2`) 27 | 28 | 29 | 0.1.0 (2017-07-24) 30 | ------------------ 31 | 32 | * First release on PyPI. 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | # pyenv python configuration file 62 | .python-version 63 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | MIT License 3 | 4 | Copyright (c) 2017, ClimateImpactLab 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 7 | 8 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 9 | 10 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 11 | 12 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | climate_toolbox 3 | ================== 4 | 5 | 6 | .. image:: https://img.shields.io/pypi/v/climate_toolbox.svg 7 | :target: https://pypi.python.org/pypi/climate_toolbox 8 | 9 | .. image:: https://img.shields.io/travis/ClimateImpactLab/climate_toolbox/master.svg?logo=travis 10 | :target: https://travis-ci.org/ClimateImpactLab/climate_toolbox 11 | 12 | .. image:: https://readthedocs.org/projects/climate-toolbox/badge/?version=latest 13 | :target: https://climate-toolbox.readthedocs.io/en/latest/?badge=latest 14 | :alt: Documentation Status 15 | 16 | .. image:: https://pyup.io/repos/github/ClimateImpactLab/climate_toolbox/shield.svg 17 | :target: https://pyup.io/repos/github/ClimateImpactLab/climate_toolbox/ 18 | :alt: Updates 19 | 20 | 21 | Tools for climate data wrangling 22 | 23 | 24 | * Free software: MIT license 25 | * Documentation: https://climate-toolbox.readthedocs.io. 26 | 27 | 28 | 29 | Features 30 | -------- 31 | 32 | * TODO 33 | 34 | Credits 35 | --------- 36 | ``climate_toolbox`` was created by the `Climate Impact Lab `_ 37 | -------------------------------------------------------------------------------- /.github/workflows/pythonpackage.yaml: -------------------------------------------------------------------------------- 1 | name: Python package 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - "master" 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: ubuntu-latest 13 | strategy: 14 | matrix: 15 | python-version: ["3.x"] 16 | defaults: 17 | run: 18 | shell: bash 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v3 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | cache: 'pip' 27 | cache-dependency-path: 'requirements_dev.txt' 28 | - name: Install dependencies 29 | run: | 30 | pip install -r requirements_dev.txt 31 | - name: Install package 32 | run: | 33 | pip install . 34 | - name: Format check with flake8 35 | run: | 36 | flake8 37 | - name: Test with pytest 38 | run: | 39 | pytest -v --cov climate_toolbox --cov-report term-missing --cov-report xml 40 | - name: Upload coverage to Codecov 41 | uses: codecov/codecov-action@v2 42 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install climate_toolbox, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install climate_toolbox 16 | 17 | This is the preferred method to install climate_toolbox, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for climate_toolbox can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/ClimateImpactLab/climate_toolbox 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OL https://github.com/ClimateImpactLab/climate_toolbox/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with: 44 | 45 | .. code-block:: console 46 | 47 | $ python setup.py install 48 | 49 | 50 | .. _Github repo: https://github.com/ClimateImpactLab/climate_toolbox 51 | .. _tarball: https://github.com/ClimateImpactLab/climate_toolbox/tarball/master 52 | -------------------------------------------------------------------------------- /climate_toolbox/io/io.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | 3 | from climate_toolbox.utils.utils import rename_coords_to_lon_and_lat, convert_lons_split 4 | 5 | 6 | def standardize_climate_data(ds): 7 | """ 8 | Read climate data and standardize units to: 9 | - lon and lat, 10 | - lon to -180 to 180 and 11 | 12 | Parameters 13 | ---------- 14 | ds: xr.Dataset 15 | 16 | Returns 17 | ------- 18 | xr.Dataset 19 | """ 20 | 21 | ds = rename_coords_to_lon_and_lat(ds) 22 | ds = convert_lons_split(ds, lon_name="lon") 23 | 24 | return ds 25 | 26 | 27 | def load_bcsd(fp, varname, lon_name="lon", broadcast_dims=("time",)): 28 | """ 29 | Read and prepare climate data 30 | 31 | After reading data, this method also fills NA values using linear 32 | interpolation, and standardizes longitude to -180:180 33 | 34 | Parameters 35 | ---------- 36 | fp: str 37 | File path or dataset 38 | 39 | varname: str 40 | Variable name to be read 41 | 42 | lon_name : str, optional 43 | Name of the longitude dimension (defualt selects from ['lon' or 44 | 'longitude']) 45 | 46 | Returns 47 | ------- 48 | xr.Dataset 49 | xarray dataset loaded into memory 50 | """ 51 | if hasattr(fp, "sel_points"): 52 | ds = fp 53 | 54 | else: 55 | with xr.open_dataset(fp) as ds: 56 | ds.load() 57 | 58 | return standardize_climate_data(ds) 59 | 60 | 61 | def load_gmfd(fp, varname, lon_name="lon", broadcast_dims=("time",)): 62 | pass 63 | 64 | 65 | def load_best(fp, varname, lon_name="lon", broadcast_dims=("time",)): 66 | pass 67 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """The setup script.""" 5 | 6 | from setuptools import setup, find_packages 7 | from description import long_description 8 | 9 | requirements = [ 10 | 'Click>=6.0', 11 | # TODO: put package requirements here 12 | ] 13 | 14 | setup_requirements = [ 15 | 'pytest-runner', 16 | # TODO(jgerardsimcock): put setup requirements 17 | # (distutils extensions, etc.) here 18 | ] 19 | 20 | test_requirements = [ 21 | 'pytest', 22 | # TODO: put package test requirements here 23 | ] 24 | 25 | setup( 26 | name='climate_toolbox', 27 | version='0.1.5', 28 | description="Tools for climate data wrangling", 29 | long_description=long_description, 30 | author="Justin Simcock", 31 | author_email='jsimcock@rhg.com', 32 | url='https://github.com/ClimateImpactLab/climate_toolbox', 33 | packages=find_packages(include=['climate_toolbox']), 34 | entry_points={ 35 | 'console_scripts': [ 36 | 'climate_toolbox=climate_toolbox.cli:main' 37 | ] 38 | }, 39 | include_package_data=True, 40 | install_requires=requirements, 41 | license="MIT license", 42 | zip_safe=False, 43 | keywords='climate_toolbox', 44 | classifiers=[ 45 | 'Development Status :: 2 - Pre-Alpha', 46 | 'Intended Audience :: Developers', 47 | 'License :: OSI Approved :: MIT License', 48 | 'Natural Language :: English', 49 | "Programming Language :: Python :: 2", 50 | 'Programming Language :: Python :: 2.7', 51 | 'Programming Language :: Python :: 3', 52 | 'Programming Language :: Python :: 3.5', 53 | 'Programming Language :: Python :: 3.6', 54 | ], 55 | test_suite='tests', 56 | tests_require=test_requirements, 57 | setup_requires=setup_requirements, 58 | ) 59 | -------------------------------------------------------------------------------- /climate_toolbox/geo/distance.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # model major (km) minor (km) flattening 4 | ELLIPSOIDS = { 5 | "WGS-84": (6378.137, 6356.7523142, 1 / 298.257223563), 6 | "GRS-80": (6378.137, 6356.7523141, 1 / 298.257222101), 7 | "Airy (1830)": (6377.563396, 6356.256909, 1 / 299.3249646), 8 | "Intl 1924": (6378.388, 6356.911946, 1 / 297.0), 9 | "Clarke (1880)": (6378.249145, 6356.51486955, 1 / 293.465), 10 | "GRS-67": (6378.1600, 6356.774719, 1 / 298.25), 11 | } 12 | 13 | 14 | EARTH_RADIUS = 6371.009 15 | 16 | 17 | def great_circle(ax, ay, bx, by, radius=EARTH_RADIUS): 18 | """ 19 | calculate the great circle distance (km) between points 20 | 21 | Provide points (ax, ay) and (bx, by) as floats, or as 22 | vectors. If ax and ay are vectors or arrays of the 23 | same shape, the element-wise distance will be found 24 | between points in the vectors/arrays. If ax, ay are 25 | (Mx1) column vectors and (bx, by) are (1xN) row 26 | vectors, the vectors will be broadcast using numpy 27 | broadcasting rules and the distance between each pair 28 | of points will be returned as an (MxN) matrix. 29 | 30 | Parameters 31 | ----------- 32 | ax : float or array 33 | x/long of point a 34 | ay : float or array 35 | y/lat of point a 36 | bx : float or array 37 | x/long of point b 38 | by : float or array 39 | y/lat of point b 40 | radius : float, optional 41 | Radius of the sphere on which to calculate the great 42 | circle distance (default is to use the Earth's radius in 43 | km, `6371.009`). Values returned will be in units of the 44 | radius provided. 45 | 46 | Returns 47 | -------- 48 | distance : float or array 49 | great circle distance between points a and b. Units will 50 | match the radius provided (default km) 51 | """ 52 | 53 | lat1, lng1 = np.radians(ay), np.radians(ax) 54 | lat2, lng2 = np.radians(by), np.radians(bx) 55 | 56 | sin_lat1, cos_lat1 = np.sin(lat1), np.cos(lat1) 57 | sin_lat2, cos_lat2 = np.sin(lat2), np.cos(lat2) 58 | 59 | delta_lng = lng2 - lng1 60 | cos_delta_lng, sin_delta_lng = np.cos(delta_lng), np.sin(delta_lng) 61 | 62 | d = np.arctan2( 63 | np.sqrt( 64 | (cos_lat2 * sin_delta_lng) ** 2 65 | + (cos_lat1 * sin_lat2 - sin_lat1 * cos_lat2 * cos_delta_lng) ** 2 66 | ), 67 | sin_lat1 * sin_lat2 + cos_lat1 * cos_lat2 * cos_delta_lng, 68 | ) 69 | 70 | return radius * d 71 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help 2 | .DEFAULT_GOAL := help 3 | define BROWSER_PYSCRIPT 4 | import os, webbrowser, sys 5 | try: 6 | from urllib import pathname2url 7 | except: 8 | from urllib.request import pathname2url 9 | 10 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 11 | endef 12 | export BROWSER_PYSCRIPT 13 | 14 | define PRINT_HELP_PYSCRIPT 15 | import re, sys 16 | 17 | for line in sys.stdin: 18 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 19 | if match: 20 | target, help = match.groups() 21 | print("%-20s %s" % (target, help)) 22 | endef 23 | export PRINT_HELP_PYSCRIPT 24 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 25 | 26 | help: 27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 28 | 29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 30 | 31 | 32 | clean-build: ## remove build artifacts 33 | rm -fr build/ 34 | rm -fr dist/ 35 | rm -fr .eggs/ 36 | find . -name '*.egg-info' -exec rm -fr {} + 37 | find . -name '*.egg' -exec rm -f {} + 38 | 39 | clean-pyc: ## remove Python file artifacts 40 | find . -name '*.pyc' -exec rm -f {} + 41 | find . -name '*.pyo' -exec rm -f {} + 42 | find . -name '*~' -exec rm -f {} + 43 | find . -name '__pycache__' -exec rm -fr {} + 44 | 45 | clean-test: ## remove test and coverage artifacts 46 | rm -fr .tox/ 47 | rm -f .coverage 48 | rm -fr htmlcov/ 49 | 50 | lint: ## check style with flake8 51 | flake8 climate_toolbox tests 52 | 53 | test: ## run tests quickly with the default Python 54 | pytest 55 | 56 | test-all: ## run tests on every Python version with tox 57 | tox 58 | 59 | coverage: ## check code coverage quickly with the default Python 60 | coverage run --source climate_toolbox -m pytest 61 | coverage report -m 62 | coverage html 63 | $(BROWSER) htmlcov/index.html 64 | 65 | docs: ## generate Sphinx HTML documentation, including API docs 66 | rm -f docs/climate_toolbox.rst 67 | rm -f docs/modules.rst 68 | sphinx-apidoc -o docs/ climate_toolbox 69 | $(MAKE) -C docs clean 70 | $(MAKE) -C docs html 71 | $(BROWSER) docs/_build/html/index.html 72 | 73 | servedocs: docs ## compile the docs watching for changes 74 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 75 | 76 | release: clean ## package and upload a release 77 | python setup.py sdist upload 78 | python setup.py bdist_wheel upload 79 | 80 | dist: clean ## builds source and wheel package 81 | python setup.py sdist 82 | python setup.py bdist_wheel 83 | ls -l dist 84 | 85 | install: clean ## install the package to the active Python's site-packages 86 | python setup.py install 87 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # This file was autogenerated and will overwrite each time you run travis_pypi_setup.py 2 | after_success: 3 | - coverage combine 4 | - coveralls 5 | deploy: 6 | true: 7 | python: 3.6 8 | env: 9 | TEST_ENV=conda 10 | repo: ClimateImpactLab/climate_toolbox 11 | tags: true 12 | distributions: sdist bdist_wheel 13 | provider: pypi 14 | user: delgadom 15 | password: 16 | secure: !!binary | 17 | T08rME5qdHB2ZVlPN0hQbDFVb2tlL0xDNDVLMCtmYVMrNy9sRUJQbXlBc2hCVjkwdEdqblZDQ3c0 18 | Y1ZodzZlYVM2TnR4dzF0ZWhkcHZvRkRiMVVJTFhJaURYU2RoWXVGcC8wdmhyOCsvbGR6WUtkUFAv 19 | bjJBajlDR1VYK3NTcjM5NEgrY3RBczIyMTZ0clExdCtSVHJwOUc3YTRmVmdocVZyWkZNMlFadjFa 20 | ZC9jNnhUb2lhUm84VTZaU3B6Y0VBVnhQV21JRnB2K1Y0SVc3SEFVY0ExS2ROU2xZa3RwYzV5ZU1a 21 | WjJhdFBGTlp4anp5SklsQys3TW5kNzZoaW16S1JhQi96UENBMGRWVjZrQTJBNXFjblZUM2dhTjYy 22 | c3IvNFN5S3BwNHF5amtSVkthS3A1em9YUEJ4RHdTMjlZdFVRUUxCMSsrb3oxbDVhQjBsK0lpdlNB 23 | OExQOTluN1ZRYkJjODRraXhPbkswc3kvazFRdzVERmFGZUlTbWdORmw4di9oNCtnTmZVRnV2ZFo3 24 | N2lRS1o5YWNTMVAzaWk3WExFdll0dmpsVGR3VlI5aUlHZTFRMWR2UjFCamwzMFE1TGNwTmNOVWM1 25 | UURBSnB0VytvQUQ5aFpsV3hZN0FLZnVzd21zRmM2M21UaUgxNjVYRitCaHRJQmtRNFRpb25qVXAr 26 | QUlUVHZNTE1DYTdxZTVWb3JyTnBzQ2JqVmxJT29IRGNuelhYL2NySWZOTXRzQXgrYWhxR0FkTENH 27 | VXd2dk9XMmZ1RXJaQ25FOC9DZXBwaVJWdlN3MEpkUXBJS1VFUUFKSWhad09BRndCWGVDY0JaZkN3 28 | UU1MbGRuOEtxdlA3cjJMNHA5SWFMVDRiSHU0QTNTaU5WKzlITUsxYzJGOE51VXp1cUNPZ2FjNms9 29 | env: 30 | matrix: 31 | - TEST_ENV=tox 32 | - TEST_ENV=conda 33 | install: 34 | - pip install --upgrade pip 35 | - 'if [[ "$TEST_ENV" == "conda" ]]; then if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; 36 | then wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh 37 | -O miniconda.sh; else wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh 38 | -O miniconda.sh; fi; bash miniconda.sh -b -p $HOME/miniconda; export PATH="$HOME/miniconda/bin:$PATH"; 39 | hash -r; conda config --set always_yes yes --set changeps1 no; conda update -q conda; 40 | conda config --add channels conda-forge; conda info -a; 41 | 42 | conda create -q -n test-env python=$TRAVIS_PYTHON_VERSION; source activate test-env; 43 | 44 | python setup.py install; python -m pip install -r requirements_dev.txt; conda install 45 | --yes --file requirements_conda.txt; else pip install -U tox-travis; fi' 46 | language: python 47 | python: 48 | - 3.6 49 | - 3.5 50 | - 2.7 51 | script: 52 | - if [[ "$TEST_ENV" == "conda" ]]; then export PATH=$HOME/miniconda/bin:$PATH; source 53 | activate test-env; python -m pytest; sphinx-apidoc -o docs climate_toolbox; sphinx-build 54 | -W -b html -d docs/_build/doctrees docs/. docs/_build/html; else tox; fi; 55 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | 2 | Contributing 3 | ============ 4 | 5 | Contributions are welcome, and they are greatly appreciated! Every 6 | little bit helps, and credit will always be given. 7 | 8 | You can contribute in many ways: 9 | 10 | Types of Contributions 11 | ---------------------- 12 | 13 | Report Bugs 14 | ~~~~~~~~~~~ 15 | 16 | Report bugs at https://github.com/ClimateImpactLab/climate_toolbox/issues. 17 | 18 | If you are reporting a bug, please include: 19 | 20 | * Your operating system name and version. 21 | * Any details about your local setup that might be helpful in troubleshooting. 22 | * Detailed steps to reproduce the bug. 23 | 24 | Fix Bugs 25 | ~~~~~~~~ 26 | 27 | Look through the GitHub issues for bugs. Anything tagged with "bug" 28 | and "help wanted" is open to whoever wants to implement it. 29 | 30 | Implement Features 31 | ~~~~~~~~~~~~~~~~~~ 32 | 33 | Look through the GitHub issues for features. Anything tagged with "enhancement" 34 | and "help wanted" is open to whoever wants to implement it. 35 | 36 | Write Documentation 37 | ~~~~~~~~~~~~~~~~~~~ 38 | 39 | climate_toolbox could always use more documentation, whether as part of the 40 | official climate_toolbox docs, in docstrings, or even on the web in blog posts, 41 | articles, and such. 42 | 43 | Submit Feedback 44 | ~~~~~~~~~~~~~~~ 45 | 46 | The best way to send feedback is to file an issue at https://github.com/ClimateImpactLab/climate_toolbox/issues. 47 | 48 | If you are proposing a feature: 49 | 50 | * Explain in detail how it would work. 51 | * Keep the scope as narrow as possible, to make it easier to implement. 52 | * Remember that this is a volunteer-driven project, and that contributions 53 | are welcome :) 54 | 55 | Get Started! 56 | ------------ 57 | 58 | Ready to contribute? Here's how to set up `climate_toolbox` for local development. 59 | 60 | 1. Fork the `climate_toolbox` repo on GitHub. 61 | 2. Clone your fork locally:: 62 | 63 | $ git clone git@github.com:your_name_here/climate_toolbox.git 64 | 65 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 66 | 67 | $ mkvirtualenv climate_toolbox 68 | $ cd climate_toolbox/ 69 | $ python setup.py develop 70 | 71 | If you are using Conda run the following:: 72 | 73 | $ conda create -n climate_toolbox python=3.5 74 | $ conda activate climate_toolbox 75 | $ pip install -r requirements.txt 76 | $ python setup.py test 77 | 78 | 4. Create a branch for local development:: 79 | 80 | $ git checkout -b name-of-your-bugfix-or-feature 81 | 82 | Now you can make your changes locally. 83 | 84 | 5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: 85 | 86 | $ flake8 climate_toolbox tests 87 | $ python setup.py test or pytest 88 | $ tox 89 | 90 | To get flake8 and tox, just pip install them into your virtualenv. 91 | 92 | 6. Commit your changes and push your branch to GitHub:: 93 | 94 | $ git add . 95 | $ git commit -m "Your detailed description of your changes." 96 | $ git push origin name-of-your-bugfix-or-feature 97 | 98 | 7. Submit a pull request through the GitHub website. 99 | 100 | Pull Request Guidelines 101 | ----------------------- 102 | 103 | Before you submit a pull request, check that it meets these guidelines: 104 | 105 | 1. The pull request should include tests. 106 | 2. If the pull request adds functionality, the docs should be updated. Put 107 | your new functionality into a function with a docstring, and add the 108 | feature to the list in README.rst. 109 | 3. The pull request should work for Python 2.6, 2.7, 3.3, 3.4 and 3.5, and for PyPy. Check 110 | https://travis-ci.org/ClimateImpactLab/climate_toolbox/pull_requests 111 | and make sure that the tests pass for all supported Python versions. 112 | 113 | Tips 114 | ---- 115 | 116 | To run a subset of tests:: 117 | 118 | $ pytest tests.test_climate_toolbox 119 | -------------------------------------------------------------------------------- /climate_toolbox/aggregations/aggregations.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | import pandas as pd 4 | import toolz 5 | from distutils.version import LooseVersion 6 | 7 | 8 | def _reindex_spatial_data_to_regions(ds, df): 9 | """ 10 | Reindexes spatial and segment weight data to regions 11 | Enables region index-based math operations 12 | Parameters 13 | ---------- 14 | ds: xarray Dataset 15 | df: pandas DataFrame 16 | Returns 17 | ------- 18 | Xarray DataArray 19 | """ 20 | 21 | # use vectorized indexing in xarray >= 0.10 22 | if LooseVersion(xr.__version__) > LooseVersion("0.9.999"): 23 | 24 | lon_indexer = xr.DataArray(df.lon.values, dims=("reshape_index",)) 25 | lat_indexer = xr.DataArray(df.lat.values, dims=("reshape_index",)) 26 | 27 | return ds.sel(lon=lon_indexer, lat=lat_indexer) 28 | 29 | else: 30 | res = ds.sel_points("reshape_index", lat=df.lat.values, lon=df.lon.values) 31 | 32 | return res 33 | 34 | 35 | def _aggregate_reindexed_data_to_regions( 36 | ds, variable, aggwt, agglev, weights, backup_aggwt="areawt" 37 | ): 38 | """ 39 | Performs weighted avg for climate variable by region 40 | 41 | Parameters 42 | ---------- 43 | 44 | ds: xarray.DataArray 45 | 46 | variable: str 47 | name of the data variable 48 | 49 | aggwt: str 50 | variable to weight by (i.e popwt, areawt, cropwt) 51 | 52 | agglev: str 53 | indicates which regional id scheme to select in the dataframe 54 | 55 | weights: pd.DataFrame 56 | pandas DataFrame of weights 57 | 58 | backup_aggwt: str, optional 59 | aggregation weight to use in regions with no aggwt data (default 60 | 'areawt') 61 | 62 | """ 63 | 64 | ds.coords[agglev] = xr.DataArray( 65 | weights[agglev].values, dims={"reshape_index": weights.index.values} 66 | ) 67 | 68 | # format weights 69 | ds[aggwt] = xr.DataArray( 70 | weights[aggwt].values, dims={"reshape_index": weights.index.values} 71 | ) 72 | 73 | ds[aggwt] = ds[aggwt].where(ds[aggwt] > 0).fillna(weights[backup_aggwt].values) 74 | 75 | weighted = xr.Dataset( 76 | { 77 | variable: ( 78 | ((ds[variable] * ds[aggwt]).groupby(agglev).sum(dim="reshape_index")) 79 | / (ds[aggwt].groupby(agglev).sum(dim="reshape_index")) 80 | ) 81 | } 82 | ) 83 | 84 | return weighted 85 | 86 | 87 | def weighted_aggregate_grid_to_regions(ds, variable, aggwt, agglev, weights=None): 88 | """ 89 | Computes the weighted reshape of gridded data 90 | 91 | Parameters 92 | ---------- 93 | ds : xr.Dataset 94 | xarray Dataset to be aggregated. Must have 'lat' and 'lon' in the 95 | coordinates. 96 | 97 | variable : str 98 | name of the variable to be aggregated 99 | 100 | aggwt : str 101 | Weighting variable (e.g. 'popwt', 'areawt'). This must be a column name 102 | in the weights file. 103 | 104 | agglev : str 105 | Target regional aggregation level (e.g. 'ISO', 'hierid'). This must be 106 | a column name in the weights file. 107 | 108 | weights : str, optional 109 | Regional aggregation weights (default agglomerated-world-new BCSD 110 | segment weights) 111 | 112 | Returns 113 | ------- 114 | ds: xr.Dataset 115 | weighted and averaged dataset based on agglev 116 | """ 117 | 118 | if weights is None: 119 | weights = prepare_spatial_weights_data() 120 | 121 | ds = _reindex_spatial_data_to_regions(ds, weights) 122 | ds = _aggregate_reindexed_data_to_regions(ds, variable, aggwt, agglev, weights) 123 | 124 | return ds 125 | 126 | 127 | @toolz.memoize 128 | def prepare_spatial_weights_data(weights_file): 129 | """ 130 | Rescales the pix_cent_x colum values 131 | 132 | Parameters 133 | ---------- 134 | weights_file: str 135 | location of file used for weighting 136 | 137 | 138 | .. note:: unnecessary if we can standardize our input 139 | """ 140 | 141 | df = pd.read_csv(weights_file) 142 | 143 | # Re-label out-of-bounds pixel centers 144 | df.set_value((df["pix_cent_x"] == 180.125), "pix_cent_x", -179.875) 145 | 146 | # probably totally unnecessary 147 | df.drop_duplicates() 148 | df.index.names = ["reshape_index"] 149 | 150 | df.rename(columns={"pix_cent_x": "lon", "pix_cent_y": "lat"}, inplace=True) 151 | 152 | return df 153 | -------------------------------------------------------------------------------- /travis_pypi_setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """Update encrypted deploy password in Travis config file.""" 4 | 5 | 6 | from __future__ import print_function 7 | import base64 8 | import json 9 | import os 10 | from getpass import getpass 11 | import yaml 12 | from cryptography.hazmat.primitives.serialization import load_pem_public_key 13 | from cryptography.hazmat.backends import default_backend 14 | from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15 15 | 16 | 17 | try: 18 | from urllib import urlopen 19 | except ImportError: 20 | from urllib.request import urlopen 21 | 22 | 23 | GITHUB_REPO = 'ClimateImpactLab/climate_toolbox' 24 | TRAVIS_CONFIG_FILE = os.path.join( 25 | os.path.dirname(os.path.abspath(__file__)), '.travis.yml') 26 | 27 | 28 | def load_key(pubkey): 29 | """Load public RSA key. 30 | 31 | Work around keys with incorrect header/footer format. 32 | 33 | Read more about RSA encryption with cryptography: 34 | https://cryptography.io/latest/hazmat/primitives/asymmetric/rsa/ 35 | """ 36 | try: 37 | return load_pem_public_key(pubkey.encode(), default_backend()) 38 | except ValueError: 39 | # workaround for https://github.com/travis-ci/travis-api/issues/196 40 | pubkey = pubkey.replace('BEGIN RSA', 'BEGIN').replace('END RSA', 'END') 41 | return load_pem_public_key(pubkey.encode(), default_backend()) 42 | 43 | 44 | def encrypt(pubkey, password): 45 | """Encrypt password using given RSA public key and encode it with base64. 46 | 47 | The encrypted password can only be decrypted by someone with the 48 | private key (in this case, only Travis). 49 | """ 50 | key = load_key(pubkey) 51 | encrypted_password = key.encrypt(password, PKCS1v15()) 52 | return base64.b64encode(encrypted_password) 53 | 54 | 55 | def fetch_public_key(repo): 56 | """Download RSA public key Travis will use for this repo. 57 | 58 | Travis API docs: http://docs.travis-ci.com/api/#repository-keys 59 | """ 60 | keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo) 61 | data = json.loads(urlopen(keyurl).read().decode()) 62 | if 'key' not in data: 63 | errmsg = "Could not find public key for repo: {}.\n".format(repo) 64 | errmsg += "Have you already added your GitHub repo to Travis?" 65 | raise ValueError(errmsg) 66 | return data['key'] 67 | 68 | 69 | def prepend_line(filepath, line): 70 | """Rewrite a file adding a line to its beginning.""" 71 | with open(filepath) as f: 72 | lines = f.readlines() 73 | 74 | lines.insert(0, line) 75 | 76 | with open(filepath, 'w') as f: 77 | f.writelines(lines) 78 | 79 | 80 | def load_yaml_config(filepath): 81 | """Load yaml config file at the given path.""" 82 | with open(filepath) as f: 83 | return yaml.safe_load(f) 84 | 85 | 86 | def save_yaml_config(filepath, config): 87 | """Save yaml config file at the given path.""" 88 | with open(filepath, 'w') as f: 89 | yaml.dump(config, f, default_flow_style=False) 90 | 91 | 92 | def update_travis_deploy_password(encrypted_password): 93 | """Put `encrypted_password` into the deploy section of .travis.yml.""" 94 | config = load_yaml_config(TRAVIS_CONFIG_FILE) 95 | 96 | config['deploy']['password'] = dict(secure=encrypted_password) 97 | 98 | save_yaml_config(TRAVIS_CONFIG_FILE, config) 99 | 100 | line = ('# This file was autogenerated and will overwrite' 101 | ' each time you run travis_pypi_setup.py\n') 102 | prepend_line(TRAVIS_CONFIG_FILE, line) 103 | 104 | 105 | def main(args): 106 | """Add a PyPI password to .travis.yml so that Travis can deploy to PyPI. 107 | 108 | Fetch the Travis public key for the repo, and encrypt the PyPI password 109 | with it before adding, so that only Travis can decrypt and use the PyPI 110 | password. 111 | """ 112 | public_key = fetch_public_key(args.repo) 113 | password = args.password or getpass('PyPI password: ') 114 | update_travis_deploy_password(encrypt(public_key, password.encode())) 115 | print("Wrote encrypted password to .travis.yml -- you're ready to deploy") 116 | 117 | 118 | if '__main__' == __name__: 119 | import argparse 120 | parser = argparse.ArgumentParser(description=__doc__) 121 | parser.add_argument('--repo', default=GITHUB_REPO, 122 | help='GitHub repo (default: %s)' % GITHUB_REPO) 123 | parser.add_argument('--password', 124 | help='PyPI password (will prompt if not provided)') 125 | 126 | args = parser.parse_args() 127 | main(args) 128 | -------------------------------------------------------------------------------- /climate_toolbox/utils/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handy functions for standardizing the format of climate data 3 | """ 4 | 5 | import xarray as xr 6 | import numpy as np 7 | import pandas as pd 8 | 9 | 10 | def convert_kelvin_to_celsius(df, temp_name): 11 | """Convert Kelvin to Celsius""" 12 | df_attrs = df[temp_name].attrs 13 | df[temp_name] = df[temp_name] - 273.15 14 | # update attrs & unit information 15 | df[temp_name].attrs.update(df_attrs) 16 | df[temp_name].attrs["units"] = "C" 17 | df[temp_name].attrs["valid_min"] = -108.78788 18 | df[temp_name].attrs["valid_max"] = 62.02828 19 | 20 | return df 21 | 22 | 23 | def convert_lons_mono(ds, lon_name="longitude"): 24 | """Convert longitude from -180-180 to 0-360""" 25 | ds[lon_name].values = np.where( 26 | ds[lon_name].values < 0, 360 + ds[lon_name].values, ds[lon_name].values 27 | ) 28 | 29 | # sort the dataset by the new lon values 30 | ds = ds.sel(**{lon_name: np.sort(ds[lon_name].values)}) 31 | 32 | return ds 33 | 34 | 35 | def convert_lons_split(ds, lon_name="longitude"): 36 | """Convert longitude from 0-360 to -180-180""" 37 | ds[lon_name].values = xr.where(ds[lon_name] > 180, ds[lon_name] - 360, ds[lon_name]) 38 | 39 | # sort the dataset by the new lon values 40 | ds = ds.sel(**{lon_name: np.sort(ds[lon_name].values)}) 41 | 42 | return ds 43 | 44 | 45 | def rename_coords_to_lon_and_lat(ds): 46 | """Rename Dataset spatial coord names to: 47 | lat, lon 48 | """ 49 | if "latitude" in ds.coords: 50 | ds = ds.rename({"latitude": "lat"}) 51 | if "longitude" in ds.coords: 52 | ds = ds.rename({"longitude": "lon"}) 53 | elif "long" in ds.coords: 54 | ds = ds.rename({"long": "lon"}) 55 | 56 | if "z" in ds.coords: 57 | ds = ds.drop("z").squeeze() 58 | 59 | return ds 60 | 61 | 62 | def rename_coords_to_longitude_and_latitude(ds): 63 | """Rename Dataset spatial coord names to: 64 | latitude, longitude 65 | """ 66 | if "lat" in ds.coords: 67 | ds = ds.rename({"lat": "latitude"}) 68 | if "lon" in ds.coords: 69 | ds = ds.rename({"lon": "longitude"}) 70 | elif "long" in ds.coords: 71 | ds = ds.rename({"long": "longitude"}) 72 | 73 | if "z" in ds.coords: 74 | ds = ds.drop("z").squeeze() 75 | 76 | return ds 77 | 78 | 79 | def remove_leap_days(ds): 80 | ds = ds.loc[{"time": ~((ds["time.month"] == 2) & (ds["time.day"] == 29))}] 81 | 82 | return ds 83 | 84 | 85 | def season_boundaries(growing_days): 86 | """Returns the sorted start and end date of growing season""" 87 | 88 | # the longitude values of the data is off, we need to scale it 89 | growing_days.longitude.values = growing_days.longitude.values - 180 90 | # we then sort by longitude 91 | growing_days = growing_days.sortby("longitude") 92 | 93 | # construct the ds 94 | gdd_sorted = xr.DataArray( 95 | # xarray has no method to sort along an axis 96 | # we use np.sort but construct the matrix from a xarray dataArray 97 | # we use transpose to track the axis we want to sort along 98 | np.sort( 99 | growing_days.variable.transpose("latitude", "longitude", "z").values, axis=2 100 | ), 101 | dims=("latitude", "longitude", "sort"), 102 | coords={ 103 | "latitude": growing_days.latitude, 104 | "longitude": growing_days.longitude, 105 | "sort": pd.Index(["min", "max"]), 106 | }, 107 | ) 108 | 109 | # we can then select an axis in the sorted dataarray as min 110 | min_day, max_day = gdd_sorted.sel(sort="min"), gdd_sorted.sel(sort="max") 111 | 112 | return min_day, max_day 113 | 114 | 115 | def get_daily_growing_season_mask(lat, lon, time, growing_days_path): 116 | """ 117 | 118 | Constructs a mask for days in the within calendar growing season 119 | 120 | Parameters 121 | ---------- 122 | lat: xr.DataArray coords object 123 | lon: xr.DataArray coords object 124 | time: xr.DataArray coords object 125 | growing_days_path: str 126 | 127 | Returns 128 | ------- 129 | DataArray 130 | xr.DataArray of masked lat x lon x time 131 | 132 | """ 133 | 134 | growing_days = xr.open_dataset(growing_days_path) 135 | 136 | # find the min and max for the growing season 137 | min_day, max_day = season_boundaries(growing_days) 138 | 139 | data = np.ones((lat.shape[0], lon.shape[0], time.shape[0])) 140 | # create an array of ones in the shape of the data 141 | ones = xr.DataArray(data, coords=[lat, lon, time], dims=["lat", "lon", "time"]) 142 | 143 | # mask the array around the within calendar year start and end times 144 | # of growing season 145 | mask = (ones["time.dayofyear"] >= min_day) & (ones["time.dayofyear"] <= max_day) 146 | 147 | # apply this mask and 148 | finalmask = ( 149 | mask.where(growing_days.variable.sel(z=2) >= growing_days.variable.sel(z=1)) 150 | .fillna(1 - mask) 151 | .where(~growing_days.variable.sel(z=1, drop=True).isnull()) 152 | .rename({"latitude": "lat", "longitude": "lon"}) 153 | ) 154 | 155 | return finalmask 156 | -------------------------------------------------------------------------------- /climate_toolbox/transformations/transformations.py: -------------------------------------------------------------------------------- 1 | import xarray as xr 2 | import numpy as np 3 | 4 | from climate_toolbox.utils.utils import remove_leap_days, convert_kelvin_to_celsius 5 | 6 | 7 | def snyder_edd(tasmin, tasmax, threshold): 8 | r""" 9 | Snyder exceedance degree days/cooling degree days 10 | 11 | Similarly to Snyder HDDs, Snyder exceedance degree days for any given day 12 | are given by the integral between the sinosiod-interpolated temperature and 13 | the threshold. 14 | 15 | The closed form solution is given by: 16 | 17 | .. math:: 18 | 19 | EDD_{P} = \sum_{d \in P} EDD_d 20 | 21 | where 22 | 23 | .. math:: 24 | 25 | EED_d = 26 | \begin{cases} 27 | ( (M - e)(\pi /2 - \theta) + w \cos(\theta) ) / \pi, & \text{if } tmin_d < e < tmax_d \\ 28 | 0 , & \text{if } tmax_d < e \\ 29 | M - e, & \text{otherwise} 30 | \end{cases} 31 | 32 | and 33 | 34 | .. math:: 35 | 36 | \begin{array}{rll} 37 | M & = & (tmax_d + tmin_d)/2 \\ 38 | w & = & (tmax_d-tmin_d)/2 \\ 39 | \theta & = & \arcsin( (e-M)/w ) \\ 40 | \end{array} 41 | 42 | Parameters 43 | ---------- 44 | 45 | tasmin : xarray.DataArray 46 | Daily minimum temperature (degrees C) 47 | 48 | tasmax : xarray.DataArray 49 | Daily maximum temperature (degrees C) 50 | 51 | threshold : int, float, xarray.DataArray 52 | Threshold (degrees C) 53 | 54 | Returns 55 | ------- 56 | 57 | edd : xarray.DataArray 58 | Snyder exceedance degree days (degreedays) 59 | 60 | """ 61 | 62 | # Check for unit agreement 63 | assert tasmin.units == tasmax.units 64 | 65 | # check to make sure tasmax > tasmin everywhere 66 | assert not (tasmax < tasmin).any(), "values encountered where tasmin > tasmax" 67 | 68 | # compute useful quantities for use in the transformation 69 | snyder_mean = (tasmax + tasmin) / 2 70 | snyder_width = (tasmax - tasmin) / 2 71 | snyder_theta = xr.ufuncs.arcsin((threshold - snyder_mean) / snyder_width) 72 | 73 | # the trasnformation is computed using numpy arrays, taking advantage of 74 | # numpy's second where clause. Note that in the current dev build of 75 | # xarray, xr.where allows this functionality. As soon as this goes live, 76 | # this block can be replaced with xarray 77 | res = xr.where( 78 | tasmin < threshold, 79 | xr.where( 80 | tasmax > threshold, 81 | ( 82 | (snyder_mean - threshold) * (np.pi / 2 - snyder_theta) 83 | + (snyder_width * np.cos(snyder_theta)) 84 | ) 85 | / np.pi, 86 | 0, 87 | ), 88 | snyder_mean - threshold, 89 | ) 90 | 91 | res.attrs["units"] = "degreedays_{}{}".format(threshold, tasmax.attrs["units"]) 92 | 93 | return res 94 | 95 | 96 | def snyder_gdd(tasmin, tasmax, threshold_low, threshold_high): 97 | r""" 98 | Snyder growing degree days 99 | 100 | Growing degree days are the difference between EDD measures at two 101 | thresholds. 102 | 103 | .. math:: 104 | 105 | {GDD}_{T_{low}, T_{high}, y, i} = {EDD}_{T_{low}, y, i} - {EDD}_{T_{high}, y, i} 106 | 107 | Note that where :math:`tas_{d,i}>{T_{high}}`, GDD will be a constant value 108 | :math:`T_{high}-T_{low}`. Thus, this measure is only useful when another 109 | measure, e.g. :math:`{EDD}_{T_{high}}`, sometimes referred to as 110 | *killing degree days*, is used as an additional predictor. 111 | 112 | 113 | Parameters 114 | ---------- 115 | 116 | tasmin : xarray.DataArray 117 | Daily minimum temperature (degrees C) 118 | 119 | tasmax : xarray.DataArray 120 | Daily maximum temperature (degrees C) 121 | 122 | threshold_low : int, float, xarray.DataArray 123 | Lower threshold (degrees C) 124 | 125 | threshold_high : int, float, xarray.DataArray 126 | Upper threshold (degrees C) 127 | 128 | Returns 129 | ------- 130 | 131 | gdd : xarray.DataArray 132 | Snyder growing degree days (degreedays) 133 | 134 | """ 135 | 136 | # Check for unit agreement 137 | assert tasmin.units == tasmax.units 138 | 139 | res = snyder_edd(tasmin, tasmax, threshold_low) - snyder_edd( 140 | tasmin, tasmax, threshold_high 141 | ) 142 | 143 | res.attrs["units"] = "degreedays_{}-{}{}".format( 144 | threshold_low, threshold_high, tasmax.attrs["units"] 145 | ) 146 | 147 | return res 148 | 149 | 150 | def validate_edd_snyder_agriculture(ds, thresholds): 151 | msg_null = "hierid dims do not match 24378" 152 | 153 | assert ds.hierid.shape == (24378,), msg_null 154 | 155 | for threshold in thresholds: 156 | assert threshold in list(ds.refTemp) 157 | return 158 | 159 | 160 | def tas_poly(ds, power, varname): 161 | """ 162 | Daily average temperature (degrees C), raised to a power 163 | 164 | Leap years are removed before counting days (uses a 365 day 165 | calendar). 166 | """ 167 | 168 | powername = ordinal(power) 169 | 170 | description = ( 171 | """ 172 | Daily average temperature (degrees C){raised} 173 | 174 | Leap years are removed before counting days (uses a 365 day 175 | calendar). 176 | """.format( 177 | raised="" 178 | if power == 1 179 | else (" raised to the {powername} power".format(powername=powername)) 180 | ) 181 | ).strip() 182 | 183 | ds1 = xr.Dataset() 184 | 185 | # remove leap years 186 | ds = remove_leap_days(ds) 187 | 188 | # do transformation 189 | ds1[varname] = (ds.tas - 273.15) ** power 190 | 191 | # Replace datetime64[ns] 'time' with YYYYDDD int 'day' 192 | if ds.dims["time"] > 365: 193 | raise ValueError 194 | 195 | ds1.coords["day"] = ds["time.year"] * 1000 + np.arange(1, len(ds.time) + 1) 196 | ds1 = ds1.swap_dims({"time": "day"}) 197 | ds1 = ds1.drop("time") 198 | 199 | ds1 = ds1.rename({"day": "time"}) 200 | 201 | # document variable 202 | ds1[varname].attrs["units"] = "C^{}".format(power) if power > 1 else "C" 203 | 204 | ds1[varname].attrs["long_title"] = description.splitlines()[0] 205 | ds1[varname].attrs["description"] = description 206 | ds1[varname].attrs["variable"] = varname 207 | 208 | return ds1 209 | 210 | 211 | def ordinal(n): 212 | """Converts numbers into ordinal strings""" 213 | 214 | return "%d%s" % (n, "tsnrhtdd"[(n // 10 % 10 != 1) * (n % 10 < 4) * n % 10 :: 4]) 215 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/climate_toolbox.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/climate_toolbox.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/climate_toolbox" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/climate_toolbox" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\climate_toolbox.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\climate_toolbox.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /tests/test_climate_toolbox.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """Tests for `climate_toolbox` package.""" 5 | 6 | import pytest 7 | 8 | from climate_toolbox.utils.utils import ( 9 | rename_coords_to_lon_and_lat, 10 | rename_coords_to_longitude_and_latitude, 11 | convert_lons_mono, 12 | convert_lons_split, 13 | remove_leap_days, 14 | convert_kelvin_to_celsius, 15 | ) 16 | from climate_toolbox.aggregations.aggregations import ( 17 | _reindex_spatial_data_to_regions, 18 | _aggregate_reindexed_data_to_regions, 19 | ) 20 | from climate_toolbox.transformations.transformations import snyder_edd, snyder_gdd 21 | from climate_toolbox.io import standardize_climate_data 22 | 23 | import numpy as np 24 | import pandas as pd 25 | import xarray as xr 26 | 27 | 28 | # python utils 29 | 30 | # create pytest resource to be used across tests 31 | 32 | 33 | @pytest.fixture(scope="session") 34 | def lat(): 35 | 36 | return np.arange(-89.875, 90, 2) 37 | 38 | 39 | @pytest.fixture(scope="session") 40 | def lon(): 41 | 42 | return np.arange(0.125, 360.0, 2) 43 | 44 | 45 | @pytest.fixture(scope="session") 46 | def time(): 47 | 48 | return pd.date_range(start=pd.datetime(2000, 1, 1), periods=10, freq="D") 49 | 50 | 51 | @pytest.fixture 52 | def clim_data(lat, lon, time): 53 | """ 54 | Generate fake climate data to do tests on 55 | """ 56 | np.random.seed(42) 57 | temp = np.random.rand(len(lat), len(lon), len(time)) * 100 58 | 59 | ds = xr.Dataset( 60 | {"temperature": (["lat", "lon", "time"], temp)}, 61 | coords={"lon": lon, "lat": lat, "time": time}, 62 | ) 63 | 64 | return ds 65 | 66 | 67 | def test_clim_data(clim_data): 68 | 69 | assert not clim_data.temperature.isnull().any() 70 | 71 | 72 | @pytest.fixture 73 | def make_holes(clim_data, lat): 74 | 75 | N = len(lat) - 1 76 | 77 | tmp = clim_data["temperature"].values 78 | array = np.random.randint(0, N, 1) 79 | tmp[array] = np.nan 80 | 81 | clim_data["temperature"].values = tmp 82 | 83 | return clim_data 84 | 85 | 86 | @pytest.fixture 87 | def weights(lat, lon): 88 | 89 | df = pd.DataFrame() 90 | lats = np.random.choice(lat, 100) 91 | lons = np.random.choice(lon, 100) 92 | df["lat"] = lats 93 | df["lon"] = lons 94 | 95 | df["areawt"] = np.random.random(len(df["lon"])) 96 | tmp = np.random.random(len(df["lon"])) 97 | tmp[::5] = np.nan 98 | df["popwt"] = tmp 99 | df["hierid"] = np.random.choice(np.arange(1, 25), len(lats)) 100 | 101 | mapping = {h: np.random.choice(np.arange(1, 5)) for h in df["hierid"].values} 102 | 103 | df["ISO"] = [mapping[i] for i in df["hierid"]] 104 | df.index.names = ["reshape_index"] 105 | 106 | return df 107 | 108 | 109 | def test_reindex_spatial_weights(clim_data, weights): 110 | 111 | assert not clim_data.temperature.isnull().any() 112 | 113 | ds = _reindex_spatial_data_to_regions(clim_data, weights) 114 | 115 | assert ds.temperature.shape == (len(ds["lon"]), len(ds["time"])) 116 | assert "reshape_index" in ds.dims 117 | 118 | 119 | def test_weighting(clim_data, weights): 120 | 121 | assert np.isnan(weights["popwt"].values).any() 122 | ds = _reindex_spatial_data_to_regions(clim_data, weights) 123 | assert not ds.temperature.isnull().any() 124 | 125 | wtd = _aggregate_reindexed_data_to_regions( 126 | ds, "temperature", "popwt", "ISO", weights 127 | ) 128 | 129 | assert not wtd.temperature.isnull().any() 130 | 131 | wtd = _aggregate_reindexed_data_to_regions( 132 | ds, "temperature", "areawt", "ISO", weights 133 | ) 134 | 135 | assert not wtd.temperature.isnull().any() 136 | 137 | 138 | def test_rename_coords_to_lon_and_lat(): 139 | ds = xr.Dataset(coords={"z": [1.20, 2.58], "long": [156.6, 38.48]}) 140 | 141 | ds = rename_coords_to_lon_and_lat(ds) 142 | coords = ds.coords 143 | 144 | assert "z" not in coords 145 | assert coords.z is None 146 | assert "lon" in coords and "long" not in coords 147 | 148 | 149 | def test_rename_coords_to_lon_and_lat2(): 150 | ds = xr.Dataset(coords={"latitude": [71.32, 72.58], "longitude": [156.6, 38.48]}) 151 | 152 | ds = rename_coords_to_lon_and_lat(ds) 153 | coords = ds.coords 154 | 155 | assert "lat" in coords and "latitude" not in coords 156 | assert "lon" in coords and "longitude" not in coords 157 | 158 | 159 | def test_rename_coords_to_longitude_and_latitude(): 160 | ds = xr.Dataset(coords={"lat": [71.32, 72.58], "lon": [156.6, 38.48]}) 161 | ds = rename_coords_to_longitude_and_latitude(ds) 162 | coords = ds.coords 163 | 164 | assert "latitude" in coords and "lat" not in coords 165 | assert "longitude" in coords and "lon" not in coords 166 | 167 | 168 | def test_rename_coords_to_longitude_and_latitude_with_clim_data(clim_data): 169 | ds = rename_coords_to_longitude_and_latitude(clim_data) 170 | coords = ds.coords 171 | 172 | assert "latitude" in coords and "lat" not in coords 173 | assert "longitude" in coords and "lon" not in coords 174 | 175 | 176 | def test_convert_lons_mono(): 177 | ds = xr.Dataset(coords={"lon": [-156.6, -38.48]}) 178 | expected = np.array([203.4, 321.52]) 179 | 180 | ds = convert_lons_mono(ds, lon_name="lon") 181 | 182 | np.testing.assert_array_equal(ds.lon.values, expected) 183 | 184 | 185 | def test_convert_lons_split(): 186 | ds = xr.Dataset(coords={"longitude": [300, 320]}) 187 | expected = np.array([-60, -40]) 188 | 189 | ds = convert_lons_split(ds) 190 | 191 | np.testing.assert_array_equal(ds.longitude.values, expected) 192 | 193 | 194 | def test_remove_leap_days(): 195 | da = xr.DataArray( 196 | np.random.rand(4, 3), 197 | [ 198 | ("time", pd.date_range("2000-02-27", periods=4)), 199 | ("space", ["IA", "IL", "IN"]), 200 | ], 201 | ) 202 | leap_day = np.datetime64("2000-02-29") 203 | 204 | da = remove_leap_days(da) 205 | 206 | assert leap_day not in da.coords["time"].values 207 | 208 | 209 | def test_remove_leap_days_with_clim_data(clim_data): 210 | leap_day = np.datetime64("2000-02-29") 211 | 212 | da = remove_leap_days(clim_data) 213 | 214 | assert leap_day not in da.coords["time"].values 215 | 216 | 217 | def test_convert_kelvin_to_celsius(clim_data): 218 | ds = convert_kelvin_to_celsius(clim_data, "temperature") 219 | 220 | assert "C" in ds.data_vars["temperature"].units 221 | 222 | 223 | def test_standardize_climate_data(clim_data): 224 | ds = standardize_climate_data(clim_data) 225 | 226 | coordinates = ds.coords 227 | 228 | assert "lat" in coordinates and "latitude" not in coordinates 229 | assert "lon" in coordinates and "longitude" not in coordinates 230 | 231 | 232 | def test_snyder_edd(): 233 | ds_tasmax = xr.Dataset( 234 | data_vars={ 235 | "tmax": ("(latitude, longitude)", [280.4963, 280.7887], {"units": "K"}) 236 | }, 237 | coords={"latitude": [-33.625, -33.375], "longitude": [286.125, 286.375]}, 238 | ) 239 | 240 | ds_tasmin = xr.Dataset( 241 | data_vars={ 242 | "tmin": ("(latitude, longitude)", [278.902, 278.23163], {"units": "K"}) 243 | }, 244 | coords={"latitude": [-33.625, -33.375], "longitude": [286.125, 286.375]}, 245 | ) 246 | 247 | threshold = 8 248 | 249 | res = snyder_edd(ds_tasmin.tmin, ds_tasmax.tmax, threshold=273.15 + threshold) 250 | 251 | assert res.units == "degreedays_281.15K" 252 | assert res.sum().item(0) == 0.0 253 | 254 | 255 | def test_snyder_gdd(): 256 | ds_tasmax = xr.Dataset( 257 | data_vars={ 258 | "tmax": ("(latitude, longitude)", [280.4963, 280.7887], {"units": "K"}) 259 | }, 260 | coords={"latitude": [-33.625, -33.375], "longitude": [286.125, 286.375]}, 261 | ) 262 | 263 | ds_tasmin = xr.Dataset( 264 | data_vars={ 265 | "tmin": ("(latitude, longitude)", [278.902, 278.23163], {"units": "K"}) 266 | }, 267 | coords={"latitude": [-33.625, -33.375], "longitude": [286.125, 286.375]}, 268 | ) 269 | 270 | res = snyder_gdd( 271 | ds_tasmin.tmin, 272 | ds_tasmax.tmax, 273 | threshold_low=273.15 + 1, 274 | threshold_high=273.15 + 8, 275 | ) 276 | 277 | assert not res.units == "degreedays_281.15K" 278 | assert res.units == "degreedays_274.15-281.15K" 279 | assert res.sum().item(0) == pytest.approx(11, 0.1) 280 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # climate_toolbox documentation build configuration file, created by 5 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | import sphinx.environment 19 | from docutils.utils import get_source_line 20 | 21 | def _warn_node(self, msg, node, **kwargs): 22 | if not msg.startswith('nonlocal image URI found:'): 23 | self._warnfunc(msg, '%s:%s' % get_source_line(node), **kwargs) 24 | 25 | sphinx.environment.BuildEnvironment.warn_node = _warn_node 26 | 27 | 28 | # If extensions (or modules to document with autodoc) are in another 29 | # directory, add these directories to sys.path here. If the directory is 30 | # relative to the documentation root, use os.path.abspath to make it 31 | # absolute, like shown here. 32 | #sys.path.insert(0, os.path.abspath('.')) 33 | 34 | # Get the project root dir, which is the parent dir of this 35 | cwd = os.getcwd() 36 | project_root = os.path.dirname(cwd) 37 | 38 | # Insert the project root dir as the first element in the PYTHONPATH. 39 | # This lets us ensure that the source package is imported, and that its 40 | # version is used. 41 | sys.path.insert(0, project_root) 42 | 43 | import climate_toolbox 44 | 45 | # -- General configuration --------------------------------------------- 46 | 47 | # If your documentation needs a minimal Sphinx version, state it here. 48 | #needs_sphinx = '1.0' 49 | 50 | # Add any Sphinx extension module names here, as strings. They can be 51 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 52 | extensions = [ 53 | 'sphinx.ext.autodoc', 54 | 'sphinx.ext.todo', 55 | 'sphinx.ext.viewcode', 56 | 'sphinx.ext.extlinks', 57 | 'sphinx.ext.napoleon', 58 | 'sphinx.ext.intersphinx', 59 | 'sphinx.ext.mathjax'] 60 | 61 | extlinks = { 62 | 'issue': ('https://github.com/ClimateImpactLab/climate_toolbox/issues/%s', 'GH #'), 63 | 'pull': ('https://github.com/ClimateImpactLab/climate_toolbox/pull/%s', 'PR #')} 64 | 65 | napoleon_numpy_docstring = True 66 | 67 | # Add any paths that contain templates here, relative to this directory. 68 | templates_path = ['_templates'] 69 | 70 | # The suffix of source filenames. 71 | source_suffix = '.rst' 72 | 73 | # The encoding of source files. 74 | #source_encoding = 'utf-8-sig' 75 | 76 | # The master toctree document. 77 | master_doc = 'index' 78 | 79 | # General information about the project. 80 | project = u'climate_toolbox' 81 | copyright = u"2017, ClimateImpactLab" 82 | 83 | # The version info for the project you're documenting, acts as replacement 84 | # for |version| and |release|, also used in various other places throughout 85 | # the built documents. 86 | # 87 | # The short X.Y version. 88 | version = climate_toolbox.__version__ 89 | # The full version, including alpha/beta/rc tags. 90 | release = climate_toolbox.__version__ 91 | 92 | # The language for content autogenerated by Sphinx. Refer to documentation 93 | # for a list of supported languages. 94 | #language = None 95 | 96 | # There are two options for replacing |today|: either, you set today to 97 | # some non-false value, then it is used: 98 | #today = '' 99 | # Else, today_fmt is used as the format for a strftime call. 100 | #today_fmt = '%B %d, %Y' 101 | 102 | # List of patterns, relative to source directory, that match files and 103 | # directories to ignore when looking for source files. 104 | exclude_patterns = ['_build'] 105 | 106 | # The reST default role (used for this markup: `text`) to use for all 107 | # documents. 108 | #default_role = None 109 | 110 | # If true, '()' will be appended to :func: etc. cross-reference text. 111 | #add_function_parentheses = True 112 | 113 | # If true, the current module name will be prepended to all description 114 | # unit titles (such as .. function::). 115 | #add_module_names = True 116 | 117 | # If true, sectionauthor and moduleauthor directives will be shown in the 118 | # output. They are ignored by default. 119 | #show_authors = False 120 | 121 | # The name of the Pygments (syntax highlighting) style to use. 122 | pygments_style = 'sphinx' 123 | 124 | # A list of ignored prefixes for module index sorting. 125 | #modindex_common_prefix = [] 126 | 127 | # If true, keep warnings as "system message" paragraphs in the built 128 | # documents. 129 | #keep_warnings = False 130 | 131 | 132 | # -- Options for HTML output ------------------------------------------- 133 | 134 | # The theme to use for HTML and HTML Help pages. See the documentation for 135 | # a list of builtin themes. 136 | html_theme = 'default' 137 | 138 | # Theme options are theme-specific and customize the look and feel of a 139 | # theme further. For a list of options available for each theme, see the 140 | # documentation. 141 | #html_theme_options = {} 142 | 143 | # Add any paths that contain custom themes here, relative to this directory. 144 | #html_theme_path = [] 145 | 146 | # The name for this set of Sphinx documents. If None, it defaults to 147 | # " v documentation". 148 | #html_title = None 149 | 150 | # A shorter title for the navigation bar. Default is the same as 151 | # html_title. 152 | #html_short_title = None 153 | 154 | # The name of an image file (relative to this directory) to place at the 155 | # top of the sidebar. 156 | #html_logo = None 157 | 158 | # The name of an image file (within the static path) to use as favicon 159 | # of the docs. This file should be a Windows icon file (.ico) being 160 | # 16x16 or 32x32 pixels large. 161 | #html_favicon = None 162 | 163 | # Add any paths that contain custom static files (such as style sheets) 164 | # here, relative to this directory. They are copied after the builtin 165 | # static files, so a file named "default.css" will overwrite the builtin 166 | # "default.css". 167 | # html_static_path = ['_static'] 168 | html_static_path = [] 169 | 170 | # If not '', a 'Last updated on:' timestamp is inserted at every page 171 | # bottom, using the given strftime format. 172 | #html_last_updated_fmt = '%b %d, %Y' 173 | 174 | # If true, SmartyPants will be used to convert quotes and dashes to 175 | # typographically correct entities. 176 | #html_use_smartypants = True 177 | 178 | # Custom sidebar templates, maps document names to template names. 179 | #html_sidebars = {} 180 | 181 | # Additional templates that should be rendered to pages, maps page names 182 | # to template names. 183 | #html_additional_pages = {} 184 | 185 | # If false, no module index is generated. 186 | #html_domain_indices = True 187 | 188 | # If false, no index is generated. 189 | #html_use_index = True 190 | 191 | # If true, the index is split into individual pages for each letter. 192 | #html_split_index = False 193 | 194 | # If true, links to the reST sources are added to the pages. 195 | #html_show_sourcelink = True 196 | 197 | # If true, "Created using Sphinx" is shown in the HTML footer. 198 | # Default is True. 199 | #html_show_sphinx = True 200 | 201 | # If true, "(C) Copyright ..." is shown in the HTML footer. 202 | # Default is True. 203 | #html_show_copyright = True 204 | 205 | # If true, an OpenSearch description file will be output, and all pages 206 | # will contain a tag referring to it. The value of this option 207 | # must be the base URL from which the finished HTML is served. 208 | #html_use_opensearch = '' 209 | 210 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 211 | #html_file_suffix = None 212 | 213 | # Output file base name for HTML help builder. 214 | htmlhelp_basename = 'climate_toolboxdoc' 215 | 216 | 217 | # -- Options for LaTeX output ------------------------------------------ 218 | 219 | latex_elements = { 220 | # The paper size ('letterpaper' or 'a4paper'). 221 | #'papersize': 'letterpaper', 222 | 223 | # The font size ('10pt', '11pt' or '12pt'). 224 | #'pointsize': '10pt', 225 | 226 | # Additional stuff for the LaTeX preamble. 227 | #'preamble': '', 228 | } 229 | 230 | # Grouping the document tree into LaTeX files. List of tuples 231 | # (source start file, target name, title, author, documentclass 232 | # [howto/manual]). 233 | latex_documents = [ 234 | ('index', 'climate_toolbox.tex', 235 | u'climate_toolbox Documentation', 236 | u'ClimateImpactLab', 'manual'), 237 | ] 238 | 239 | # The name of an image file (relative to this directory) to place at 240 | # the top of the title page. 241 | #latex_logo = None 242 | 243 | # For "manual" documents, if this is true, then toplevel headings 244 | # are parts, not chapters. 245 | #latex_use_parts = False 246 | 247 | # If true, show page references after internal links. 248 | #latex_show_pagerefs = False 249 | 250 | # If true, show URL addresses after external links. 251 | #latex_show_urls = False 252 | 253 | # Documents to append as an appendix to all manuals. 254 | #latex_appendices = [] 255 | 256 | # If false, no module index is generated. 257 | #latex_domain_indices = True 258 | 259 | 260 | # -- Options for manual page output ------------------------------------ 261 | 262 | # One entry per manual page. List of tuples 263 | # (source start file, name, description, authors, manual section). 264 | man_pages = [ 265 | ('index', 'climate_toolbox', 266 | u'climate_toolbox Documentation', 267 | [u'ClimateImpactLab'], 1) 268 | ] 269 | 270 | # If true, show URL addresses after external links. 271 | #man_show_urls = False 272 | 273 | 274 | # -- Options for Texinfo output ---------------------------------------- 275 | 276 | # Grouping the document tree into Texinfo files. List of tuples 277 | # (source start file, target name, title, author, 278 | # dir menu entry, description, category) 279 | texinfo_documents = [ 280 | ('index', 'climate_toolbox', 281 | u'climate_toolbox Documentation', 282 | u'ClimateImpactLab', 283 | 'climate_toolbox', 284 | 'One line description of project.', 285 | 'Miscellaneous'), 286 | ] 287 | 288 | # Documents to append as an appendix to all manuals. 289 | #texinfo_appendices = [] 290 | 291 | # If false, no module index is generated. 292 | #texinfo_domain_indices = True 293 | 294 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 295 | #texinfo_show_urls = 'footnote' 296 | 297 | # If true, do not generate a @detailmenu in the "Top" node's menu. 298 | #texinfo_no_detailmenu = False 299 | 300 | 301 | intersphinx_mapping = { 302 | 'python': ('https://docs.python.org/2.7', None) 303 | } 304 | --------------------------------------------------------------------------------