├── .editorconfig ├── .flake8 ├── .github ├── DISCUSSION_TEMPLATE │ ├── questions.yml │ └── translations.yml ├── ISSUE_TEMPLATE.md ├── ISSUE_TEMPLATE │ ├── 0001-GENERIC-ISSUE-TEMPLATE.yml │ ├── 0002-BUG-REPORT.yml │ ├── 0003-FEATURE-REQUEST.yml │ └── config.yml ├── PULL_REQUEST_TEMPLATE.md ├── dependabot.yml ├── labeler.yml ├── publish-mastodon-template.md └── workflows │ ├── add-to-project.yml │ ├── bump-version.yml │ ├── cache-cleaner.yml │ ├── codeql.yml │ ├── dependency-review.yml │ ├── first-pull-request.yml │ ├── label-on-approval.yml │ ├── label.yml │ ├── main.yml │ ├── publish-mastodon.yml │ ├── publish-pypi.yml │ ├── scorecard.yml │ ├── tag-testpypi.yml │ ├── testdata-version.yml │ ├── upstream.yml │ └── workflow-warning.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pylintrc.toml ├── .readthedocs.yml ├── .yamllint.yaml ├── .zenodo.json ├── AUTHORS.rst ├── CHANGELOG.rst ├── CI ├── requirements_ci.in ├── requirements_ci.txt └── requirements_upstream.txt ├── CITATION.cff ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.rst ├── LICENSE ├── Makefile ├── README.rst ├── SECURITY.md ├── docs ├── Makefile ├── _static │ ├── indsearch.js │ ├── style.css │ └── xarray.css ├── _templates │ └── base.html ├── analogues.rst ├── api.rst ├── api_indicators.rst ├── authors.rst ├── autodoc_indicator.py ├── changelog.rst ├── checks.rst ├── conf.py ├── contributing.rst ├── explanation.rst ├── index.rst ├── indicators.rst ├── indices.rst ├── installation.rst ├── internationalization.rst ├── logos │ ├── empty.png │ ├── xclim-logo-dark.png │ ├── xclim-logo-light.png │ ├── xclim-logo-small-dark.png │ └── xclim-logo-small-light.png ├── make.bat ├── notebooks │ ├── Indicator.svg │ ├── Indice.svg │ ├── Modules.svg │ ├── _finder.py │ ├── analogs.ipynb │ ├── cli.ipynb │ ├── customize.ipynb │ ├── ensembles-advanced.ipynb │ ├── ensembles.ipynb │ ├── example.ipynb │ ├── example │ │ ├── example.fr.json │ │ ├── example.py │ │ └── example.yml │ ├── extendxclim.ipynb │ ├── frequency_analysis.ipynb │ ├── index.rst │ ├── partitioning.ipynb │ ├── units.ipynb │ ├── usage.ipynb │ └── xclim_training │ │ ├── Exercices.ipynb │ │ ├── XARRAY_calcul_moy_saisonniere.ipynb │ │ ├── XCLIM Demo - Ensembles.ipynb │ │ ├── XCLIM_calculate_index-Exemple.ipynb │ │ ├── finch.ipynb │ │ ├── intro_xarray.ipynb │ │ └── readme.ipynb ├── paper │ ├── paper.bib │ └── paper.md ├── readme.rst ├── references.bib ├── references.rst ├── rstjinja.py ├── sdba.rst ├── security.rst └── support.rst ├── environment.yml ├── pyproject.toml ├── src └── xclim │ ├── __init__.py │ ├── analog.py │ ├── cli.py │ ├── core │ ├── __init__.py │ ├── _exceptions.py │ ├── _types.py │ ├── bootstrapping.py │ ├── calendar.py │ ├── cfchecks.py │ ├── datachecks.py │ ├── dataflags.py │ ├── formatting.py │ ├── indicator.py │ ├── locales.py │ ├── missing.py │ ├── options.py │ ├── units.py │ └── utils.py │ ├── data │ ├── __init__.py │ ├── anuclim.yml │ ├── cf.yml │ ├── fr.json │ ├── icclim.yml │ ├── schema.yml │ └── variables.yml │ ├── ensembles │ ├── __init__.py │ ├── _base.py │ ├── _filters.py │ ├── _partitioning.py │ ├── _reduce.py │ └── _robustness.py │ ├── indicators │ ├── __init__.py │ ├── atmos │ │ ├── __init__.py │ │ ├── _conversion.py │ │ ├── _precip.py │ │ ├── _synoptic.py │ │ ├── _temperature.py │ │ └── _wind.py │ ├── generic │ │ ├── __init__.py │ │ └── _stats.py │ ├── land │ │ ├── __init__.py │ │ ├── _snow.py │ │ └── _streamflow.py │ └── seaIce │ │ ├── __init__.py │ │ └── _seaice.py │ ├── indices │ ├── __init__.py │ ├── _agro.py │ ├── _anuclim.py │ ├── _conversion.py │ ├── _hydrology.py │ ├── _multivariate.py │ ├── _simple.py │ ├── _synoptic.py │ ├── _threshold.py │ ├── fire │ │ ├── __init__.py │ │ ├── _cffwis.py │ │ └── _ffdi.py │ ├── generic.py │ ├── helpers.py │ ├── run_length.py │ └── stats.py │ ├── sdba.py │ └── testing │ ├── __init__.py │ ├── conftest.py │ ├── helpers.py │ ├── registry.txt │ └── utils.py ├── tests ├── conftest.py ├── data │ └── callendar_1938.txt ├── test_analog.py ├── test_atmos.py ├── test_bootstrapping.py ├── test_calendar.py ├── test_cffwis.py ├── test_checks.py ├── test_cli.py ├── test_ensembles.py ├── test_ffdi.py ├── test_filters.py ├── test_flags.py ├── test_formatting.py ├── test_generic.py ├── test_generic_indicators.py ├── test_helpers.py ├── test_hydrology.py ├── test_indicators.py ├── test_indices.py ├── test_land.py ├── test_locales.py ├── test_missing.py ├── test_modules.py ├── test_options.py ├── test_partitioning.py ├── test_precip.py ├── test_preciptemp.py ├── test_run_length.py ├── test_seaice.py ├── test_snow.py ├── test_stats.py ├── test_temperature.py ├── test_testing_utils.py ├── test_units.py ├── test_utils.py ├── test_wind.py └── test_xsdba.py └── tox.ini /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.{yaml,yml}] 14 | indent_size = 2 15 | 16 | [*.bat] 17 | indent_style = tab 18 | end_of_line = crlf 19 | 20 | [LICENSE] 21 | insert_final_newline = false 22 | 23 | [Makefile] 24 | indent_style = tab 25 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = 3 | .git, 4 | docs, 5 | build, 6 | .eggs, 7 | tests 8 | ignore = 9 | C, 10 | D, 11 | E, 12 | F, 13 | RST210, 14 | RST213, 15 | W503 16 | per-file-ignores = 17 | src/xclim/core/locales.py:RST399 18 | rst-directives = 19 | bibliography, 20 | autolink-skip 21 | rst-roles = 22 | doc, 23 | mod, 24 | py:attr, 25 | py:attribute, 26 | py:class, 27 | py:const, 28 | py:data, 29 | py:func, 30 | py:indicator, 31 | py:meth, 32 | py:mod, 33 | py:obj, 34 | py:ref, 35 | ref, 36 | cite:cts, 37 | cite:p, 38 | cite:t, 39 | cite:ts 40 | -------------------------------------------------------------------------------- /.github/DISCUSSION_TEMPLATE/questions.yml: -------------------------------------------------------------------------------- 1 | title: "[Questions] " 2 | labels: [ "support" ] 3 | 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for opening this discussion! 9 | Before you submit, please make sure you have read our [Code of Conduct](https://github.com/Ouranosinc/xclim/blob/main/CODE_OF_CONDUCT.md). 10 | - type: textarea 11 | id: setup-information 12 | attributes: 13 | label: Setup Information 14 | description: | 15 | What xclim version are you running? 16 | value: | 17 | * Xclim version: 18 | - type: textarea 19 | id: description 20 | attributes: 21 | label: Context 22 | description: Describe what you were trying to get done. Tell us what happened, what went wrong, and what you expected to happen. 23 | - type: textarea 24 | id: steps-to-reproduce 25 | attributes: 26 | label: Steps To Reproduce 27 | description: Paste the command(s) you ran and the output. If there was a crash, please include the traceback below. 28 | value: | 29 | ``` 30 | $ pip install foo --bar 31 | ``` 32 | -------------------------------------------------------------------------------- /.github/DISCUSSION_TEMPLATE/translations.yml: -------------------------------------------------------------------------------- 1 | title: "[Translations] " 2 | labels: [ "docs" ] 3 | 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for taking the time to help translate Xclim's documentation! 9 | Before you submit, please make sure you have read our [Code of Conduct](https://github.com/Ouranosinc/xclim/blob/main/CODE_OF_CONDUCT.md). 10 | - type: textarea 11 | id: language 12 | attributes: 13 | label: Language 14 | description: What language are you translating to? 15 | - type: textarea 16 | id: translation 17 | attributes: 18 | label: Translation 19 | description: | 20 | Please paste your translation here. 21 | If you are translating a file, please paste the file contents here. 22 | Remember that you can use Markdown formatting in this text box. 23 | value: | 24 | 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | - xclim version: 2 | - Python version: 3 | - Operating System: 4 | 5 | ### Description 6 | 7 | 9 | 10 | ### What I Did 11 | 12 | 14 | 15 | ``` 16 | $ pip install foo --bar 17 | ``` 18 | 19 | ### What I Received 20 | 21 | 22 | 23 | ``` 24 | Traceback (most recent call last): 25 | File "/path/to/file/script.py", line 3326, in run_code 26 | exec(code_obj, self.user_global_ns, self.user_ns) 27 | File "", line 1, in 28 | 1/0 29 | ZeroDivisionError: division by zero 30 | ``` 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/0001-GENERIC-ISSUE-TEMPLATE.yml: -------------------------------------------------------------------------------- 1 | name: Generic issue template 2 | description: For detailing generic/uncategorized issues in Xclim 3 | 4 | body: 5 | - type: textarea 6 | id: generic-issue 7 | attributes: 8 | label: Generic Issue 9 | description: Please fill in the following information fields as needed. 10 | value: | 11 | * xclim version: 12 | * Python version: 13 | * Operating System: 14 | 15 | ### Description 16 | 18 | 19 | ### What I Did 20 | 22 | ``` 23 | $ pip install foo --bar 24 | ``` 25 | 26 | ### What I Received 27 | 28 | ``` 29 | Traceback (most recent call last): 30 | File "/path/to/file/script.py", line 3326, in run_code 31 | exec(code_obj, self.user_global_ns, self.user_ns) 32 | File "", line 1, in 33 | 1/0 34 | ZeroDivisionError: division by zero 35 | - type: checkboxes 36 | id: terms 37 | attributes: 38 | label: Code of Conduct 39 | description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/Ouranosinc/xclim/blob/main/CODE_OF_CONDUCT.md) 40 | options: 41 | - label: I agree to follow this project's Code of Conduct 42 | required: true 43 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/0002-BUG-REPORT.yml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Help us improve Xclim 3 | labels: [ "bug" ] 4 | 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Thanks for taking the time to fill out this bug report! 10 | - type: textarea 11 | id: setup-information 12 | attributes: 13 | label: Setup Information 14 | description: | 15 | What software versions are you running? Example: 16 | - Xclim version: 0.55.0-gamma 17 | - Python version: 4.2 18 | - Operating System: Nutmeg Linux 12.34 | macOS 11.0 "Redmond" 19 | value: | 20 | - Xclim version: 21 | - Python version: 22 | - Operating System: 23 | - type: textarea 24 | id: description 25 | attributes: 26 | label: Description 27 | description: Describe what you were trying to get done. Tell us what happened, what went wrong, and what you expected to happen. 28 | - type: textarea 29 | id: steps-to-reproduce 30 | attributes: 31 | label: Steps To Reproduce 32 | description: Paste the command(s) you ran and the output. If there was a crash, please include the traceback below. 33 | - type: textarea 34 | id: additional-context 35 | attributes: 36 | label: Additional context 37 | description: Add any other context about the problem here. 38 | - type: checkboxes 39 | id: submit-pr 40 | attributes: 41 | label: Contribution 42 | description: Do you intend to submit a fix for this bug? (The xclim developers will help with code compliance) 43 | options: 44 | - label: I would be willing/able to open a Pull Request to address this bug. 45 | - type: checkboxes 46 | id: terms 47 | attributes: 48 | label: Code of Conduct 49 | description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/Ouranosinc/xclim/blob/main/CODE_OF_CONDUCT.md) 50 | options: 51 | - label: I agree to follow this project's Code of Conduct 52 | required: true 53 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/0003-FEATURE-REQUEST.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for this project 3 | labels: [ "enhancement" ] 4 | 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Thanks for taking the time to fill out this feature request! 10 | - type: textarea 11 | id: problem 12 | attributes: 13 | label: Addressing a Problem? 14 | description: Is your feature request related to a problem? Please describe it. 15 | - type: textarea 16 | id: potential-solution 17 | attributes: 18 | label: Potential Solution 19 | description: Describe the solution you'd like to see implemented. 20 | - type: textarea 21 | id: additional-context 22 | attributes: 23 | label: Additional context 24 | description: Add any other context about the feature request here. 25 | - type: checkboxes 26 | id: submit-pr 27 | attributes: 28 | label: Contribution 29 | description: Do you intend to submit a fix for this bug? (The xclim developers will help with code compliance) 30 | options: 31 | - label: I would be willing/able to open a Pull Request to contribute this feature. 32 | - type: checkboxes 33 | id: terms 34 | attributes: 35 | label: Code of Conduct 36 | description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/Ouranosinc/xclim/blob/main/CODE_OF_CONDUCT.md) 37 | options: 38 | - label: I agree to follow this project's Code of Conduct 39 | required: true 40 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Questions and/or support 4 | about: "For questions or support, please use the Discussions tab" 5 | url: https://www.github.com/Ouranosinc/xclim/discussions/categories/questions 6 | - name: Translation requests 7 | about: "For coordinating translation requests, please use the Discussions tab" 8 | url: https://www.github.com/Ouranosinc/xclim/discussions/categories/translations 9 | - name: PAVICS-related questions 10 | about: "For questions related to PAVICS, the Platform for the Analysis and Visualization of Climate Science, please use the PAVICS email: pavics@ouranos.ca" 11 | url: https://pavics.ouranos.ca/index.html 12 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ### Pull Request Checklist: 4 | - [ ] This PR addresses an already opened issue (for bug fixes / features) 5 | - This PR fixes #xyz 6 | - [ ] Tests for the changes have been added (for bug fixes / features) 7 | - [ ] (If applicable) Documentation has been added / updated (for bug fixes / features) 8 | - [ ] CHANGELOG.rst has been updated (with summary of main changes) 9 | - [ ] Link to issue (:issue:`number`) and pull request (:pull:`number`) has been added 10 | 11 | ### What kind of change does this PR introduce? 12 | 13 | * ... 14 | 15 | ### Does this PR introduce a breaking change? 16 | 17 | 18 | ### Other information: 19 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: / 5 | schedule: 6 | interval: weekly 7 | time: '12:00' 8 | groups: 9 | actions: 10 | patterns: 11 | - "*" 12 | 13 | - package-ecosystem: pip 14 | directory: / 15 | schedule: 16 | interval: weekly 17 | time: '12:00' 18 | groups: 19 | python: 20 | patterns: 21 | - "*" 22 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | # label rules used by .github/workflows/label.yml 2 | 3 | # label 'ci' all automation-related steps and files 4 | # Since this repository is in itself an automation process to deploy a server instance, 5 | # we refer here to CI as the 'meta' configuration files for managing the code and integrations with the repository, 6 | # not configurations related to the deployment process itself. 7 | 8 | 'API': 9 | - changed-files: 10 | - any-glob-to-any-file: 11 | - 'xclim/cli.py' 12 | 13 | 'CI': 14 | - changed-files: 15 | - any-glob-to-any-file: 16 | - '.editorconfig' 17 | - '.pre-commit-config.yaml' 18 | - '.readthedocs.yml' 19 | - '.yamllint.yml' 20 | - '.github/workflows/*' 21 | - 'docs/Makefile' 22 | - 'pylintrc' 23 | - 'tox.ini' 24 | - 'Makefile' 25 | 26 | 'docs': 27 | - changed-files: 28 | - any-glob-to-any-file: 29 | - '.readthedocs.yml' 30 | - 'docs/**/*' 31 | - 'AUTHORS.rst' 32 | - 'CONTRIBUTING.rst' 33 | - 'CODE_OF_CONDUCT.md' 34 | - 'DISCUSSION_TEMPLATE/**/*' 35 | - 'ISSUE_TEMPLATE/**/*' 36 | - 'ISSUE_TEMPLATE.md' 37 | - 'PULL_REQUEST_TEMPLATE.md' 38 | - 'README.rst' 39 | 40 | 'indicators': 41 | - changed-files: 42 | - any-glob-to-any-file: 43 | - 'src/xclim/indicators/**/*' 44 | - 'src/xclim/indices/**/_*.py' 45 | - 'src/xclim/data/**/*.json' 46 | - 'src/xclim/data/**/*.yml' 47 | 48 | 'information': 49 | - changed-files: 50 | - any-glob-to-any-file: 51 | - 'CONTRIBUTING.rst' 52 | -------------------------------------------------------------------------------- /.github/publish-mastodon-template.md: -------------------------------------------------------------------------------- 1 | New #xclim release: {{ .tag }} 🎉 2 | 3 | Latest source code available at: {{ .url }} 4 | Check out the docs for more information: https://xclim.readthedocs.io/en/stable/ 5 | 6 | -------------------------------------------------------------------------------- /.github/workflows/add-to-project.yml: -------------------------------------------------------------------------------- 1 | name: Add Issues to xclim Project 2 | 3 | on: 4 | issues: 5 | types: 6 | - opened 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | add-to-project: 13 | name: Add Issue to xclim Project 14 | runs-on: ubuntu-latest 15 | permissions: 16 | repository-projects: write 17 | steps: 18 | - name: Harden Runner 19 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 20 | with: 21 | disable-sudo: true 22 | egress-policy: block 23 | allowed-endpoints: > 24 | api.github.com:443 25 | 26 | - name: Generate App Token 27 | id: token_generator 28 | uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 29 | with: 30 | app-id: ${{ secrets.OURANOS_HELPER_BOT_ID }} 31 | private-key: ${{ secrets.OURANOS_HELPER_BOT_KEY }} 32 | 33 | - name: Add Issue to xclim Project 34 | uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2 35 | with: 36 | project-url: https://github.com/orgs/Ouranosinc/projects/6 37 | github-token: ${{ steps.token_generator.outputs.token }} 38 | -------------------------------------------------------------------------------- /.github/workflows/bump-version.yml: -------------------------------------------------------------------------------- 1 | name: Bump Patch Version 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths-ignore: 8 | - .* 9 | - .github/*.md 10 | - .github/*.yml 11 | - .github/*/*.md 12 | - .github/*/*.yml 13 | - CHANGELOG.rst 14 | - CI/*.in 15 | - CI/*.txt 16 | - Makefile 17 | - docs/*/*.ipynb 18 | - docs/*/*.py 19 | - docs/*/*.rst 20 | - docs/Makefile 21 | - docs/make.bat 22 | - docs/references.bib 23 | - environment.yml 24 | - pylintrc 25 | - pyproject.toml 26 | - src/xclim/__init__.py 27 | - tests/*.py 28 | - tox.ini 29 | 30 | permissions: 31 | contents: read 32 | 33 | jobs: 34 | bump_patch_version: 35 | name: Bumpversion Patch 36 | runs-on: ubuntu-latest 37 | permissions: 38 | actions: read 39 | steps: 40 | - name: Harden Runner 41 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 42 | with: 43 | disable-sudo: true 44 | egress-policy: block 45 | allowed-endpoints: > 46 | api.github.com:443 47 | files.pythonhosted.org:443 48 | github.com:443 49 | pypi.org:443 50 | - name: Generate App Token 51 | id: token_generator 52 | uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 53 | with: 54 | app-id: ${{ secrets.OURANOS_HELPER_BOT_ID }} 55 | private-key: ${{ secrets.OURANOS_HELPER_BOT_KEY }} 56 | - name: Checkout Repository 57 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 58 | with: 59 | token: ${{ steps.token_generator.outputs.token }} 60 | persist-credentials: false 61 | - name: Set up Python3 62 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 63 | with: 64 | python-version: "3.x" 65 | - name: Import GPG Key 66 | uses: crazy-max/ghaction-import-gpg@e89d40939c28e39f97cf32126055eeae86ba74ec # v6.3.0 67 | with: 68 | gpg_private_key: ${{ secrets.OURANOS_HELPER_BOT_GPG_PRIVATE_KEY }} 69 | passphrase: ${{ secrets.OURANOS_HELPER_BOT_GPG_PRIVATE_KEY_PASSWORD }} 70 | git_user_signingkey: true 71 | git_commit_gpgsign: true 72 | trust_level: 5 73 | - name: Install CI libraries 74 | run: | 75 | python -m pip install --require-hashes -r CI/requirements_ci.txt 76 | - name: Conditional Bump 77 | run: | 78 | CURRENT_VERSION=$(bump-my-version show current_version) 79 | if [[ ${CURRENT_VERSION} =~ -dev(\.\d+)? ]]; then 80 | echo "Development version (ends in 'dev(\.\d+)?'), bumping 'build' version" 81 | bump-my-version bump build 82 | else 83 | echo "Version is stable, bumping 'patch' version" 84 | bump-my-version bump patch 85 | fi 86 | echo "new_version=$(bump-my-version show current_version)" 87 | - name: Push Changes 88 | uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # v0.8.0 89 | with: 90 | force: false 91 | branch: ${{ github.ref }} 92 | github_token: ${{ steps.token_generator.outputs.token }} 93 | -------------------------------------------------------------------------------- /.github/workflows/cache-cleaner.yml: -------------------------------------------------------------------------------- 1 | # Example taken from https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#managing-caches 2 | name: Cleanup Caches on PR Merge 3 | on: 4 | pull_request: 5 | types: 6 | - closed 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | cleanup: 13 | runs-on: ubuntu-latest 14 | permissions: 15 | actions: write 16 | steps: 17 | - name: Harden Runner 18 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 19 | with: 20 | disable-sudo: true 21 | egress-policy: block 22 | allowed-endpoints: > 23 | api.github.com:443 24 | github.com:443 25 | objects.githubusercontent.com:443 26 | 27 | - name: Checkout Repository 28 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 29 | with: 30 | persist-credentials: false 31 | 32 | - name: Cleanup 33 | run: | 34 | gh extension install actions/gh-actions-cache 35 | 36 | REPO=${{ github.repository }} 37 | BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge" 38 | 39 | echo "Fetching list of cache key" 40 | cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 ) 41 | 42 | ## Setting this to not fail the workflow while deleting cache keys. 43 | set +e 44 | echo "Deleting caches..." 45 | for cacheKey in $cacheKeysForPR 46 | do 47 | gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm 48 | done 49 | echo "Done" 50 | env: 51 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 52 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: CodeQL Scan 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths-ignore: 8 | - .github/*/*.yml 9 | - .pre-commit-config.yaml 10 | - CHANGELOG.rst 11 | - CI/*.txt 12 | - Makefile 13 | - docs/*/*.ipynb 14 | - docs/*/*.py 15 | - docs/*/*.rst 16 | - pyproject.toml 17 | - src/xclim/__init__.py 18 | - tox.ini 19 | pull_request: 20 | branches: 21 | - main 22 | schedule: 23 | - cron: '30 23 * * 5' 24 | 25 | permissions: # added using https://github.com/step-security/secure-repo 26 | actions: read 27 | 28 | jobs: 29 | analyze: 30 | name: Analyze 31 | runs-on: ubuntu-latest 32 | permissions: 33 | actions: read 34 | contents: read 35 | security-events: write 36 | strategy: 37 | fail-fast: false 38 | matrix: 39 | language: 40 | - 'python' 41 | steps: 42 | - name: Harden Runner 43 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 44 | with: 45 | disable-sudo: true 46 | egress-policy: block 47 | allowed-endpoints: > 48 | api.github.com:443 49 | files.pythonhosted.org:443 50 | github.com:443 51 | objects.githubusercontent.com:443 52 | pypi.org:443 53 | uploads.github.com:443 54 | 55 | - name: Checkout Repository 56 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 57 | with: 58 | persist-credentials: false 59 | 60 | # Initializes the CodeQL tools for scanning. 61 | - name: Initialize CodeQL 62 | uses: github/codeql-action/init@1245696032ecf7d39f87d54daa406e22ddf769a8 63 | with: 64 | languages: ${{ matrix.language }} 65 | 66 | - name: Autobuild 67 | uses: github/codeql-action/autobuild@1245696032ecf7d39f87d54daa406e22ddf769a8 68 | 69 | - name: Perform CodeQL Analysis 70 | uses: github/codeql-action/analyze@1245696032ecf7d39f87d54daa406e22ddf769a8 71 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | # Dependency Review Action 2 | # 3 | # This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. 4 | # 5 | # Source repository: https://github.com/actions/dependency-review-action 6 | # Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement 7 | name: Dependency Review 8 | on: 9 | pull_request: 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | dependency-review: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Harden Runner 19 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 20 | with: 21 | disable-sudo: true 22 | egress-policy: block 23 | allowed-endpoints: > 24 | api.deps.dev:443 25 | api.electricitymap.org:443 26 | api.github.com:443 27 | api.green-coding.io:443 28 | api.securityscorecards.dev:443 29 | github.com:443 30 | ip-api.com:80 31 | ipapi.co:443 32 | proxy.golang.org:443 33 | pypi.org:443 34 | sum.golang.org:443 35 | 36 | - name: Start Energy Measurement 37 | uses: green-coding-solutions/eco-ci-energy-estimation@173a7c84b6d28fc44a6c2ac4985a60ed4b5b3661 # v4.7 38 | with: 39 | task: start-measurement 40 | branch: ${{ github.head_ref || github.ref_name }} 41 | 42 | - name: Checkout Repository 43 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 44 | with: 45 | persist-credentials: false 46 | 47 | - name: Dependency Review 48 | uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1 49 | 50 | - name: Energy Measurement 51 | uses: green-coding-solutions/eco-ci-energy-estimation@173a7c84b6d28fc44a6c2ac4985a60ed4b5b3661 # v4.7 52 | with: 53 | task: get-measurement 54 | label: 'Dependency Review' 55 | continue-on-error: true 56 | 57 | - name: Show Energy Results 58 | uses: green-coding-solutions/eco-ci-energy-estimation@173a7c84b6d28fc44a6c2ac4985a60ed4b5b3661 # v4.7 59 | with: 60 | task: display-results 61 | continue-on-error: true 62 | -------------------------------------------------------------------------------- /.github/workflows/first-pull-request.yml: -------------------------------------------------------------------------------- 1 | name: First Pull Request 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | welcome: 13 | name: Welcome 14 | runs-on: ubuntu-latest 15 | permissions: 16 | pull-requests: write 17 | steps: 18 | - name: Harden Runner 19 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 20 | with: 21 | disable-sudo: true 22 | egress-policy: block 23 | allowed-endpoints: > 24 | api.github.com:443 25 | 26 | - name: Welcome New Contributor 27 | uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 28 | with: 29 | script: | 30 | // Get a list of all issues created by the PR opener 31 | // See: https://octokit.github.io/rest.js/#pagination 32 | const creator = context.payload.sender.login 33 | const opts = github.rest.issues.listForRepo.endpoint.merge({ 34 | ...context.issue, 35 | creator, 36 | state: 'all' 37 | }) 38 | const issues = await github.paginate(opts) 39 | 40 | for (const issue of issues) { 41 | if (issue.number === context.issue.number) { 42 | continue 43 | } 44 | 45 | if (issue.pull_request) { 46 | return // Creator is already a contributor. 47 | } 48 | } 49 | 50 | await github.rest.issues.createComment({ 51 | issue_number: context.issue.number, 52 | owner: context.repo.owner, 53 | repo: context.repo.repo, 54 | body: `**Welcome**, new contributor! 55 | 56 | It appears that this is your first Pull Request. To give credit where it's due, we ask that you add your information to the \`AUTHORS.rst\` and \`.zenodo.json\`: 57 | - [ ] The relevant author information has been added to \`AUTHORS.rst\` and \`.zenodo.json\` 58 | 59 | Please make sure you've read our [contributing guide](CONTRIBUTING.rst). We look forward to reviewing your Pull Request shortly ✨` 60 | }) 61 | -------------------------------------------------------------------------------- /.github/workflows/label-on-approval.yml: -------------------------------------------------------------------------------- 1 | name: Label Pull Request on Pull Request review approval 2 | 3 | on: 4 | pull_request_review: 5 | types: 6 | - submitted 7 | pull_request_target: 8 | types: 9 | - ready_for_review 10 | - review_requested 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | label_approved: 17 | name: Label on Approval 18 | runs-on: ubuntu-latest 19 | if: | 20 | (!contains(github.event.pull_request.labels.*.name, 'approved')) && 21 | (github.event.review.state == 'approved') && 22 | (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) 23 | permissions: 24 | pull-requests: write 25 | steps: 26 | - name: Harden Runner 27 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 28 | with: 29 | disable-sudo: true 30 | egress-policy: block 31 | allowed-endpoints: > 32 | api.github.com:443 33 | - name: Label Approved 34 | uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 35 | with: 36 | script: | 37 | github.rest.issues.addLabels({ 38 | issue_number: context.issue.number, 39 | owner: context.repo.owner, 40 | repo: context.repo.repo, 41 | labels: ['approved'] 42 | }) 43 | 44 | comment_approved: 45 | name: Comment Concerning Approved Tag 46 | runs-on: ubuntu-latest 47 | if: | 48 | (github.event_name == 'pull_request_target') && 49 | (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) 50 | permissions: 51 | pull-requests: write 52 | steps: 53 | - name: Harden Runner 54 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 55 | with: 56 | disable-sudo: true 57 | egress-policy: block 58 | allowed-endpoints: > 59 | api.github.com:443 60 | - name: Find Warning Comment 61 | uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 62 | id: fc_warning 63 | with: 64 | issue-number: ${{ github.event.pull_request.number }} 65 | comment-author: 'github-actions[bot]' 66 | body-includes: This Pull Request is coming from a fork and must be manually tagged `approved` in order to perform additional testing. 67 | - name: Update Warning Comment 68 | if: | 69 | (steps.fc_warning.outputs.comment-id == '') && 70 | (!contains(github.event.pull_request.labels.*.name, 'approved')) && 71 | (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) 72 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 73 | with: 74 | comment-id: ${{ steps.fc_warning.outputs.comment-id }} 75 | issue-number: ${{ github.event.pull_request.number }} 76 | body: | 77 | > [!WARNING] 78 | > This Pull Request is coming from a fork and must be manually tagged `approved` in order to perform additional testing. 79 | edit-mode: replace 80 | - name: Find Note Comment 81 | uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 82 | id: fc_note 83 | with: 84 | issue-number: ${{ github.event.pull_request.number }} 85 | comment-author: 'github-actions[bot]' 86 | body-includes: This Pull Request has been manually approved for additional testing! 87 | - name: Update Note Comment 88 | if: | 89 | (steps.fc_note.outputs.comment-id == '') && 90 | contains(github.event.pull_request.labels.*.name, 'approved') 91 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 92 | with: 93 | comment-id: ${{ steps.fc_note.outputs.comment-id }} 94 | issue-number: ${{ github.event.pull_request.number }} 95 | body: | 96 | > [!NOTE] 97 | > This Pull Request has been manually approved for additional testing! 98 | reactions: | 99 | hooray 100 | edit-mode: append 101 | -------------------------------------------------------------------------------- /.github/workflows/label.yml: -------------------------------------------------------------------------------- 1 | # This workflow will triage pull requests and apply a label based on the 2 | # paths that are modified in the pull request. 3 | # 4 | # To use this workflow, you will need to set up a .github/labeler.yml 5 | # file with configuration. For more information, see: 6 | # https://github.com/actions/labeler/blob/master/README.md 7 | 8 | name: Labeler 9 | on: 10 | # Note: potential security risk from this action using pull_request_target. 11 | # Do not add actions in here which need a checkout of the repo, and do not use any caching in here. 12 | # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target 13 | pull_request_target: 14 | types: 15 | - opened 16 | - reopened 17 | - synchronize 18 | 19 | permissions: 20 | contents: read 21 | 22 | jobs: 23 | label: 24 | name: Label 25 | runs-on: ubuntu-latest 26 | permissions: 27 | pull-requests: write 28 | steps: 29 | - name: Harden Runner 30 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 31 | with: 32 | disable-sudo: true 33 | egress-policy: block 34 | allowed-endpoints: > 35 | api.github.com:443 36 | 37 | - name: Labeler 38 | uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 39 | with: 40 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 41 | -------------------------------------------------------------------------------- /.github/workflows/publish-mastodon.yml: -------------------------------------------------------------------------------- 1 | name: Publish Release Announcement to Mastodon 2 | 3 | on: 4 | release: 5 | types: 6 | - released 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | toot: 13 | name: Generate Mastodon Toot 14 | runs-on: ubuntu-latest 15 | environment: production 16 | steps: 17 | - name: Harden Runner 18 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 19 | with: 20 | disable-sudo: true 21 | egress-policy: block 22 | allowed-endpoints: > 23 | api.github.com:443 24 | fosstodon.org:443 25 | github.com:443 26 | 27 | - name: Checkout Repository 28 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 29 | with: 30 | persist-credentials: false 31 | 32 | - name: Get Release Description 33 | run: | 34 | # Fetch the release information using the GitHub API 35 | RELEASE_INFO=$(curl -sH "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ 36 | "https://api.github.com/repos/${{ github.repository }}/releases/tags/${TAG_NAME}") 37 | 38 | # Extract the release description from the response 39 | RELEASE_DESCRIPTION=$(echo "$RELEASE_INFO" | jq -r .body) 40 | 41 | # Remove Markdown links and the space preceding them 42 | CLEANED_DESCRIPTION=$(echo "$RELEASE_DESCRIPTION" | sed -E 's/\ \(\[[^]]+\]\([^)]+\)\)//g') 43 | 44 | # Extract the first line of the release description 45 | CONTRIBUTORS=$(echo "$CLEANED_DESCRIPTION" | head -n 1) 46 | 47 | echo "contributors=${CONTRIBUTORS}" >> $GITHUB_ENV 48 | env: 49 | TAG_NAME: ${{ github.event.release.tag_name }} 50 | 51 | - name: Prepare Message 52 | id: render_template 53 | uses: chuhlomin/render-template@807354a04d9300c9c2ac177c0aa41556c92b3f75 # v1.10 54 | with: 55 | template: .github/publish-mastodon-template.md 56 | vars: | 57 | tag: ${{ github.event.release.tag_name }} 58 | url: https://github.com/Ouranosinc/xclim/releases/tag/${{ github.event.release.tag_name }} 59 | 60 | - name: Message Preview 61 | run: | 62 | echo "${TEMPLATE}${CONTRIBUTORS}" 63 | env: 64 | TEMPLATE: ${{ steps.render_template.outputs.result }} 65 | CONTRIBUTORS: ${{ env.contributors }} 66 | 67 | - name: Send toot to Mastodon 68 | uses: cbrgm/mastodon-github-action@740aa5979f7dd752b329e3d3e3492166e5ada890 # v2.1.16 69 | with: 70 | url: ${{ secrets.MASTODON_URL }} 71 | access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} 72 | message: "${{ steps.render_template.outputs.result }}${{ env.contributors }}" 73 | language: "en" 74 | visibility: "public" 75 | -------------------------------------------------------------------------------- /.github/workflows/publish-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python 🐍 distributions 📦 to PyPI 2 | 3 | on: 4 | release: 5 | types: 6 | - published 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | build-n-publish-pypi: 13 | name: Build and publish Python 🐍 distributions 📦 to PyPI 14 | environment: production 15 | permissions: 16 | # IMPORTANT: this permission is mandatory for trusted publishing 17 | id-token: write 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Harden Runner 21 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 22 | with: 23 | disable-sudo: true 24 | egress-policy: block 25 | allowed-endpoints: > 26 | files.pythonhosted.org:443 27 | github.com:443 28 | pypi.org:443 29 | ruf-repo-cdn.sigstore.dev:443 30 | upload.pypi.org:443 31 | - name: Checkout Repository 32 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 33 | with: 34 | persist-credentials: false 35 | - name: Set up Python3 36 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 37 | with: 38 | python-version: "3.x" 39 | - name: Install CI libraries 40 | run: | 41 | python -m pip install --require-hashes -r CI/requirements_ci.txt 42 | - name: Build a binary wheel and a source tarball 43 | run: | 44 | python -m flit build 45 | - name: Publish distribution 📦 to PyPI 46 | uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 47 | -------------------------------------------------------------------------------- /.github/workflows/scorecard.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. They are provided 2 | # by a third-party and are governed by separate terms of service, privacy 3 | # policy, and support documentation. 4 | 5 | name: Scorecard supply-chain security 6 | on: 7 | # For Branch-Protection check. Only the default branch is supported. See 8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 9 | branch_protection_rule: 10 | # To guarantee Maintained check is occasionally updated. See 11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 12 | schedule: 13 | - cron: '41 8 * * 4' 14 | push: 15 | branches: 16 | - main 17 | 18 | # Declare default permissions as read only. 19 | # Read-all permission is not technically needed for this workflow. 20 | permissions: 21 | contents: read 22 | 23 | jobs: 24 | analysis: 25 | name: Scorecard analysis 26 | runs-on: ubuntu-latest 27 | permissions: 28 | # Needed to upload the results to code-scanning dashboard. 29 | security-events: write 30 | # Needed to publish results and get a badge (see publish_results below). 31 | id-token: write 32 | 33 | steps: 34 | - name: Harden Runner 35 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 36 | with: 37 | egress-policy: audit 38 | 39 | - name: Checkout Repository 40 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 41 | with: 42 | persist-credentials: false 43 | 44 | - name: Run Analysis 45 | uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 46 | with: 47 | results_file: results.sarif 48 | results_format: sarif 49 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 50 | # - you want to enable the Branch-Protection check on a *public* repository, or 51 | # - you are installing Scorecard on a *private* repository 52 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. 53 | repo_token: ${{ secrets.OPENSSF_SCORECARD_TOKEN }} 54 | 55 | # Public repositories: 56 | # - Publish results to OpenSSF REST API for easy access by consumers 57 | # - Allows the repository to include the Scorecard badge. 58 | # - See https://github.com/ossf/scorecard-action#publishing-results. 59 | publish_results: true 60 | 61 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 62 | # format to the repository Actions tab. 63 | - name: Upload Artifact 64 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 65 | with: 66 | name: SARIF file 67 | path: results.sarif 68 | retention-days: 5 69 | 70 | # Upload the results to GitHub's code scanning dashboard. 71 | - name: Upload to code-scanning 72 | uses: github/codeql-action/upload-sarif@e5f05b81d5b6ff8cfa111c80c22c5fd02a384118 # 3.23.0 73 | with: 74 | sarif_file: results.sarif 75 | -------------------------------------------------------------------------------- /.github/workflows/tag-testpypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python 🐍 distributions 📦 to TestPyPI 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | build-n-publish-testpypi: 13 | name: Build and publish Python 🐍 distributions 📦 to TestPyPI 14 | environment: staging 15 | permissions: 16 | # IMPORTANT: this permission is mandatory for trusted publishing 17 | id-token: write 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Harden Runner 21 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 22 | with: 23 | disable-sudo: true 24 | egress-policy: block 25 | allowed-endpoints: > 26 | files.pythonhosted.org:443 27 | github.com:443 28 | pypi.org:443 29 | ruf-repo-cdn.sigstore.dev:443 30 | test.pypi.org:443 31 | - name: Checkout Repository 32 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 33 | with: 34 | persist-credentials: false 35 | - name: Set up Python3 36 | uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 37 | with: 38 | python-version: "3.x" 39 | - name: Install CI libraries 40 | run: | 41 | python -m pip install --require-hashes -r CI/requirements_ci.txt 42 | - name: Build a binary wheel and a source tarball 43 | run: | 44 | python -m flit build 45 | - name: Publish distribution 📦 to Test PyPI 46 | uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 47 | with: 48 | repository-url: https://test.pypi.org/legacy/ 49 | skip-existing: true 50 | -------------------------------------------------------------------------------- /.github/workflows/testdata-version.yml: -------------------------------------------------------------------------------- 1 | name: Verify Testing Data 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - reopened 8 | - synchronize 9 | paths: 10 | - .github/workflows/main.yml 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | use-latest-tag: 17 | name: Check Latest xclim-testdata Tag 18 | runs-on: ubuntu-latest 19 | if: | 20 | (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) 21 | permissions: 22 | pull-requests: write 23 | steps: 24 | - name: Harden Runner 25 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 26 | with: 27 | disable-sudo: true 28 | egress-policy: block 29 | allowed-endpoints: > 30 | api.github.com:443 31 | github.com:443 32 | - name: Checkout Repository 33 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 34 | with: 35 | persist-credentials: false 36 | - name: Find xclim-testdata Tag and CI Testing Branch 37 | run: | 38 | XCLIM_TESTDATA_TAG="$( \ 39 | git -c 'versionsort.suffix=-' \ 40 | ls-remote --exit-code --refs --sort='version:refname' --tags https://github.com/Ouranosinc/xclim-testdata '*.*.*' \ 41 | | tail --lines=1 \ 42 | | cut --delimiter='/' --fields=3)" 43 | echo "XCLIM_TESTDATA_TAG=${XCLIM_TESTDATA_TAG}" >> $GITHUB_ENV 44 | XCLIM_TESTDATA_BRANCH="$(grep -E "XCLIM_TESTDATA_BRANCH" .github/workflows/main.yml | cut -d ' ' -f4)" 45 | echo "XCLIM_TESTDATA_BRANCH=${XCLIM_TESTDATA_BRANCH}" >> $GITHUB_ENV 46 | - name: Report Versions Found 47 | run: | 48 | echo "Latest xclim-testdata tag: ${XCLIM_TESTDATA_TAG}" 49 | echo "Tag for xclim-testdata in CI: ${XCLIM_TESTDATA_BRANCH}" 50 | env: 51 | XCLIM_TESTDATA_TAG: ${{ env.XCLIM_TESTDATA_TAG }} 52 | XCLIM_TESTDATA_BRANCH: ${{ env.XCLIM_TESTDATA_BRANCH }} 53 | - name: Find Comment 54 | uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 55 | id: fc 56 | with: 57 | issue-number: ${{ github.event.pull_request.number }} 58 | comment-author: 'github-actions[bot]' 59 | body-includes: It appears that this Pull Request modifies the `main.yml` workflow. 60 | - name: Compare Versions 61 | if: ${{( env.XCLIM_TESTDATA_TAG != env.XCLIM_TESTDATA_BRANCH )}} 62 | uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 63 | with: 64 | script: | 65 | core.setFailed('Configured `xclim-testdata` tag is not `latest`.') 66 | - name: Update Failure Comment 67 | if: ${{ failure() }} 68 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 69 | with: 70 | comment-id: ${{ steps.fc.outputs.comment-id }} 71 | issue-number: ${{ github.event.pull_request.number }} 72 | body: | 73 | > [!WARNING] 74 | > It appears that this Pull Request modifies the `main.yml` workflow. 75 | 76 | On inspection, it seems that the `XCLIM_TESTDATA_BRANCH` environment variable is set to a tag that is not the latest in the `Ouranosinc/xclim-testdata` repository. 77 | 78 | This value must match the most recent tag (`${{ env.XCLIM_TESTDATA_TAG }}`) in order to merge this Pull Request. 79 | 80 | If this PR depends on changes in a new testing dataset branch, be sure to tag a new version of `Ouranosinc/xclim-testdata` once your changes have been merged to its `main` branch. 81 | edit-mode: replace 82 | - name: Update Success Comment 83 | if: ${{ success() }} 84 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 85 | with: 86 | comment-id: ${{ steps.fc.outputs.comment-id }} 87 | issue-number: ${{ github.event.pull_request.number }} 88 | body: | 89 | > [!NOTE] 90 | > It appears that this Pull Request modifies the `main.yml` workflow. 91 | 92 | On inspection, the `XCLIM_TESTDATA_BRANCH` environment variable is set to the most recent tag (`${{ env.XCLIM_TESTDATA_TAG }}`). 93 | 94 | No further action is required. 95 | edit-mode: replace 96 | -------------------------------------------------------------------------------- /.github/workflows/upstream.yml: -------------------------------------------------------------------------------- 1 | name: Test Upstream Dependencies 2 | on: 3 | push: 4 | branches: 5 | - main 6 | paths-ignore: 7 | - CHANGELOG.rst 8 | - README.rst 9 | - pyproject.toml 10 | - src/xclim/__init__.py 11 | schedule: 12 | - cron: "0 0 * * *" # Daily “At 00:00” UTC 13 | workflow_dispatch: # allows you to trigger the workflow run manually 14 | 15 | concurrency: 16 | group: ${{ github.workflow }}-${{ github.ref }} 17 | cancel-in-progress: true 18 | 19 | permissions: 20 | contents: read 21 | 22 | jobs: 23 | upstream-dev: 24 | name: test-upstream-dev (Python${{ matrix.python-version }}) 25 | runs-on: ubuntu-latest 26 | permissions: 27 | issues: write 28 | if: | 29 | (github.event_name == 'schedule') || 30 | (github.event_name == 'workflow_dispatch') || 31 | (github.event_name == 'push') 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | python-version: [ "3.12" ] 36 | testdata-cache: [ '~/.cache/xclim-testdata' ] 37 | defaults: 38 | run: 39 | shell: bash -l {0} 40 | steps: 41 | - name: Harden Runner 42 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 43 | with: 44 | disable-sudo: true 45 | egress-policy: block 46 | allowed-endpoints: > 47 | api.electricitymap.org:443 48 | api.github.com:443 49 | api.green-coding.io:443 50 | api.securityscorecards.dev:443 51 | conda.anaconda.org:443 52 | dap.service.does.not.exist:443 53 | files.pythonhosted.org:443 54 | github.com:443 55 | ip-api.com:80 56 | ipapi.co:443 57 | objects.githubusercontent.com:443 58 | proxy.golang.org:4433 59 | pypi.org:443 60 | raw.githubusercontent.com:443 61 | repo.anaconda.com:443 62 | sum.golang.org:443 63 | - name: Start Measurement 64 | uses: green-coding-solutions/eco-ci-energy-estimation@173a7c84b6d28fc44a6c2ac4985a60ed4b5b3661 # v4.7 65 | with: 66 | task: start-measurement 67 | branch: ${{ github.head_ref || github.ref_name }} 68 | - name: Checkout Repository 69 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 70 | with: 71 | fetch-depth: 0 # Fetch all history for all branches and tags. 72 | persist-credentials: false 73 | - name: Setup Conda (Micromamba) with Python${{ matrix.python-version }} 74 | uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc # v2.0.4 75 | with: 76 | cache-downloads: true 77 | cache-environment: true 78 | environment-file: environment.yml 79 | create-args: >- 80 | eigen 81 | pybind11 82 | pytest-reportlog 83 | python=${{ matrix.python-version }} 84 | - name: Micromamba version 85 | run: | 86 | echo "micromamba: $(micromamba --version)" 87 | - name: Install upstream versions and SBCK 88 | run: | 89 | # git-based dependencies cannot be installed from hashes 90 | python -m pip install -r CI/requirements_upstream.txt 91 | python -m pip install "sbck @ git+https://github.com/yrobink/SBCK-python.git@master" 92 | - name: Install xclim 93 | run: | 94 | python -m pip install --no-user --no-deps --editable . 95 | - name: Check versions 96 | run: | 97 | micromamba list 98 | xclim show_version_info 99 | python -m pip check || true 100 | - name: Setup Python Measurement 101 | uses: green-coding-solutions/eco-ci-energy-estimation@173a7c84b6d28fc44a6c2ac4985a60ed4b5b3661 # v4.7 102 | with: 103 | task: get-measurement 104 | label: 'Environment Setup (Upstream, Python${{ matrix.python-version }})' 105 | continue-on-error: true 106 | - name: Test Data Caching 107 | uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 108 | with: 109 | path: | 110 | ${{ matrix.testdata-cache }} 111 | key: ${{ runner.os }}-xclim-testdata-upstream-${{ hashFiles('pyproject.toml', 'tox.ini') }} 112 | - name: Run Tests 113 | if: success() 114 | id: status 115 | run: | 116 | python -m pytest --numprocesses=logical --durations=10 --cov=xclim --cov-report=term-missing --report-log output-${{ matrix.python-version }}-log.jsonl 117 | - name: Generate and publish the report 118 | if: | 119 | failure() 120 | && steps.status.outcome == 'failure' 121 | && github.event_name == 'schedule' 122 | && github.repository_owner == 'Ouranosinc' 123 | uses: xarray-contrib/issue-from-pytest-log@f94477e45ef40e4403d7585ba639a9a3bcc53d43 # v1.3.0 124 | with: 125 | issue-title: "⚠️ Nightly upstream-dev CI failed for Python${{ matrix.python-version }} ⚠️" 126 | log-path: output-${{ matrix.python-version }}-log.jsonl 127 | - name: Tests measurement 128 | uses: green-coding-solutions/eco-ci-energy-estimation@173a7c84b6d28fc44a6c2ac4985a60ed4b5b3661 # v4.7 129 | with: 130 | task: get-measurement 131 | label: 'Testing and Reporting (Upstream, Python${{ matrix.python-version }})' 132 | continue-on-error: true 133 | - name: Show Energy Results 134 | uses: green-coding-solutions/eco-ci-energy-estimation@173a7c84b6d28fc44a6c2ac4985a60ed4b5b3661 # v4.7 135 | with: 136 | task: display-results 137 | continue-on-error: true 138 | -------------------------------------------------------------------------------- /.github/workflows/workflow-warning.yml: -------------------------------------------------------------------------------- 1 | name: Workflow Changes Warnings 2 | 3 | on: 4 | # Note: potential security risk from this action using pull_request_target. 5 | # Do not add actions in here which need a checkout of the repo, and do not use any caching in here. 6 | # See: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target 7 | pull_request_target: 8 | types: 9 | - opened 10 | - reopened 11 | - synchronize 12 | paths: 13 | - .github/workflows/*.yml 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | comment-concerning-workflow-changes: 20 | name: Comment Concerning Workflow Changes 21 | runs-on: ubuntu-latest 22 | if: | 23 | (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) 24 | permissions: 25 | contents: read 26 | pull-requests: write 27 | steps: 28 | - name: Harden Runner 29 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 30 | with: 31 | disable-sudo: true 32 | egress-policy: block 33 | allowed-endpoints: > 34 | api.github.com:443 35 | - name: Find Warning Comment 36 | uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 37 | id: fc_warning 38 | with: 39 | issue-number: ${{ github.event.pull_request.number }} 40 | comment-author: 'github-actions[bot]' 41 | body-includes: | 42 | This Pull Request modifies GitHub workflows and is coming from a fork. 43 | - name: Create Warning Comment 44 | if: | 45 | (steps.fc_warning.outputs.comment-id == '') && 46 | (!contains(github.event.pull_request.labels.*.name, 'approved')) && 47 | (github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name) 48 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 49 | with: 50 | comment-id: ${{ steps.fc_warning.outputs.comment-id }} 51 | issue-number: ${{ github.event.pull_request.number }} 52 | body: | 53 | > [!WARNING] 54 | > This Pull Request modifies GitHub Workflows and is coming from a fork. 55 | **It is very important for the reviewer to ensure that the workflow changes are appropriate.** 56 | edit-mode: replace 57 | - name: Find Note Comment 58 | uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 59 | id: fc_note 60 | with: 61 | issue-number: ${{ github.event.pull_request.number }} 62 | comment-author: 'github-actions[bot]' 63 | body-includes: Workflow changes in this Pull Request have been approved! 64 | - name: Update Comment 65 | if: | 66 | contains(github.event.pull_request.labels.*.name, 'approved') 67 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0 68 | with: 69 | comment-id: ${{ steps.fc_note.outputs.comment-id }} 70 | issue-number: ${{ github.event.pull_request.number }} 71 | body: | 72 | > [!NOTE] 73 | > Workflow changes in this Pull Request have been approved! 74 | reactions: | 75 | hooray 76 | edit-mode: replace 77 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # Doc-generated files 7 | docs/notebooks/dask-worker-space/ 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.lcov 49 | coverage.xml 50 | *.cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | 62 | # Flask stuff: 63 | instance/ 64 | .webassets-cache 65 | 66 | # Scrapy stuff: 67 | .scrapy 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | 72 | # PyBuilder 73 | target/ 74 | 75 | # Jupyter Notebook 76 | .ipynb_checkpoints 77 | 78 | # pyenv 79 | .python-version 80 | 81 | # celery beat schedule file 82 | celerybeat-schedule 83 | 84 | # SageMath parsed files 85 | *.sage.py 86 | 87 | # dotenv 88 | .env 89 | 90 | # virtualenv 91 | .venv 92 | venv/ 93 | ENV/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | .idea/ 108 | 109 | # autogenerated RestructuredText 110 | docs/apidoc/modules.rst 111 | docs/apidoc/xclim*.rst 112 | docs/_dynamic/indicators.json 113 | docs/variables.json 114 | 115 | # VS Code 116 | .vscode 117 | 118 | # netCDF files 119 | *.nc 120 | 121 | # zarr store 122 | *.zarr 123 | 124 | # dask 125 | dask-worker-space 126 | 127 | # Apple 128 | .DS_Store 129 | 130 | # Testing registries 131 | /src/xclim/testing/*/ 132 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3 3 | 4 | repos: 5 | - repo: https://github.com/asottile/pyupgrade 6 | rev: v3.20.0 7 | hooks: 8 | - id: pyupgrade 9 | args: ['--py310-plus'] 10 | exclude: 'src/xclim/core/indicator.py' 11 | - repo: https://github.com/pre-commit/pre-commit-hooks 12 | rev: v5.0.0 13 | hooks: 14 | - id: trailing-whitespace 15 | - id: end-of-file-fixer 16 | exclude: '.ipynb|.github/publish-mastodon-template.md' 17 | - id: fix-byte-order-marker 18 | - id: name-tests-test 19 | args: [ '--pytest-test-first' ] 20 | - id: no-commit-to-branch 21 | args: [ '--branch', 'main' ] 22 | - id: check-merge-conflict 23 | - id: check-json 24 | - id: check-toml 25 | - id: check-yaml 26 | args: [ '--allow-multiple-documents' ] 27 | - id: debug-statements 28 | - id: pretty-format-json 29 | args: [ '--autofix', '--no-ensure-ascii', '--no-sort-keys' ] 30 | exclude: '.ipynb' 31 | - repo: https://github.com/pappasam/toml-sort 32 | rev: v0.24.2 33 | hooks: 34 | - id: toml-sort-fix 35 | exclude: '.pylintrc.toml' 36 | - repo: https://github.com/adrienverge/yamllint.git 37 | rev: v1.37.1 38 | hooks: 39 | - id: yamllint 40 | args: [ '--config-file=.yamllint.yaml' ] 41 | - repo: https://github.com/astral-sh/ruff-pre-commit 42 | rev: v0.11.12 43 | hooks: 44 | - id: ruff-format 45 | exclude: '(src/xclim/indices/__init__.py|docs/installation.rst)' 46 | - id: ruff 47 | args: [ '--fix', '--show-fixes' ] 48 | - repo: https://github.com/pylint-dev/pylint 49 | rev: v3.3.7 50 | hooks: 51 | - id: pylint 52 | args: [ '--rcfile=.pylintrc.toml', '--errors-only', '--jobs=0', '--disable=import-error' ] 53 | - repo: https://github.com/pycqa/flake8 54 | rev: 7.2.0 55 | hooks: 56 | - id: flake8 57 | additional_dependencies: [ 'flake8-rst-docstrings '] 58 | args: [ '--config=.flake8' ] 59 | - repo: https://github.com/jendrikseipp/vulture 60 | rev: 'v2.14' 61 | hooks: 62 | - id: vulture 63 | - repo: https://github.com/kynan/nbstripout 64 | rev: 0.8.1 65 | hooks: 66 | - id: nbstripout 67 | files: '.ipynb' 68 | args: [ '--extra-keys=metadata.kernelspec' ] 69 | - repo: https://github.com/pre-commit/pygrep-hooks 70 | rev: v1.10.0 71 | hooks: 72 | # - id: python-check-blanket-noqa 73 | # - id: python-check-blanket-type-ignore 74 | - id: python-no-eval 75 | - id: python-no-log-warn 76 | - id: python-use-type-annotations 77 | - id: rst-directive-colons 78 | - id: rst-inline-touching-normal 79 | - id: text-unicode-replacement-char 80 | - repo: https://github.com/executablebooks/mdformat 81 | rev: 0.7.22 82 | hooks: 83 | - id: mdformat 84 | exclude: '.github/\w+.md|.github/publish-mastodon-template.md|docs/paper/paper.md' 85 | - repo: https://github.com/keewis/blackdoc 86 | rev: v0.3.9 87 | hooks: 88 | - id: blackdoc 89 | additional_dependencies: [ 'black==25.1.0' ] 90 | exclude: '(.py|docs/installation.rst)' 91 | - repo: https://github.com/codespell-project/codespell 92 | rev: v2.4.1 93 | hooks: 94 | - id: codespell 95 | additional_dependencies: [ 'tomli' ] 96 | args: [ '--toml=pyproject.toml' ] 97 | - repo: https://github.com/numpy/numpydoc 98 | rev: v1.8.0 99 | hooks: 100 | - id: numpydoc-validation 101 | # Exclude the missing submodule from the xclim.core, see: 102 | exclude: ^docs/|^tests/|^src/xclim/core/missing.py 103 | - repo: https://github.com/fluiddyn/formattex-pre-commit 104 | rev: 0.1.4 105 | hooks: 106 | - id: formatbibtex 107 | - repo: https://github.com/gitleaks/gitleaks 108 | rev: v8.27.0 109 | hooks: 110 | - id: gitleaks 111 | - repo: https://github.com/python-jsonschema/check-jsonschema 112 | rev: 0.33.0 113 | hooks: 114 | - id: check-github-workflows 115 | - id: check-readthedocs 116 | - repo: meta 117 | hooks: 118 | - id: check-hooks-apply 119 | - id: check-useless-excludes 120 | 121 | ci: 122 | autofix_commit_msg: | 123 | [pre-commit.ci] auto fixes from pre-commit.com hooks 124 | 125 | for more information, see https://pre-commit.ci 126 | autofix_prs: true 127 | autoupdate_branch: '' 128 | autoupdate_commit_msg: '[pre-commit.ci] pre-commit autoupdate' 129 | autoupdate_schedule: monthly 130 | skip: [ nbstripout ] 131 | submodules: false 132 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sphinx: 4 | configuration: docs/conf.py 5 | fail_on_warning: true 6 | 7 | #formats: 8 | # - pdf 9 | 10 | build: 11 | os: ubuntu-24.04 12 | tools: 13 | python: "mambaforge-23.11" 14 | jobs: 15 | pre_create_environment: 16 | - sed -i "s/python >=3.11,<3.14/python >=3.11,<3.13/" environment.yml 17 | pre_build: 18 | - sphinx-apidoc -o docs/apidoc/ --private --module-first src/xclim src/xclim/testing/tests src/xclim/indicators src/xclim/indices 19 | - rm docs/apidoc/xclim.rst 20 | - env SPHINX_APIDOC_OPTIONS="members,undoc-members,show-inheritance,noindex" sphinx-apidoc -o docs/apidoc/ --private --module-first src/xclim src/xclim/testing/tests 21 | - sphinx-build -b linkcheck docs/ _build/linkcheck || true 22 | 23 | conda: 24 | environment: environment.yml 25 | 26 | python: 27 | install: 28 | - method: pip 29 | path: . 30 | extra_requirements: 31 | - dev 32 | - extras 33 | 34 | search: 35 | ranking: 36 | notebooks/*: 2 37 | api_indicators.html: 1 38 | indices.html: -1 39 | _modules/*: -3 40 | -------------------------------------------------------------------------------- /.yamllint.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | rules: 4 | 5 | brackets: 6 | forbid: false 7 | min-spaces-inside: 0 8 | max-spaces-inside: 1 9 | 10 | commas: 11 | min-spaces-after: 1 12 | 13 | document-start: disable 14 | 15 | float-values: 16 | require-numeral-before-decimal: true 17 | 18 | hyphens: 19 | max-spaces-after: 1 20 | 21 | indentation: 22 | indent-sequences: whatever 23 | spaces: consistent 24 | 25 | key-duplicates: 26 | forbid-duplicated-merge-keys: true 27 | 28 | line-length: 29 | allow-non-breakable-words: true 30 | allow-non-breakable-inline-mappings: true 31 | max: 120 32 | level: warning 33 | 34 | new-lines: 35 | type: unix 36 | 37 | trailing-spaces: {} 38 | 39 | truthy: disable 40 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | Credits 3 | ======= 4 | 5 | Development Lead 6 | ---------------- 7 | 8 | * Travis Logan `@tlogan2000 `_ 9 | 10 | Co-Developers 11 | ------------- 12 | 13 | * Abel Aoun `@bzah `_ 14 | * Pascal Bourgault `@aulemahal `_ 15 | * Éric Dupuis `@coxipi `_ 16 | * David Huard `@huard `_ 17 | * Juliette Lavoie `@juliettelavoie `_ 18 | * Gabriel Rondeau-Genesse `@RondeauG `_ 19 | * Trevor James Smith `@Zeitsperre `_ 20 | 21 | Contributors 22 | ------------ 23 | 24 | * Raquel Alegre `@raquelalegre `_ 25 | * Clair Barnes `@clairbarnes `_ 26 | * Alexis Beaupré-Laperrière `@Beauprel `_ 27 | * Sébastien Biner `@sbiner `_ 28 | * Marco Braun `@vindelico `_ 29 | * David Caron `@davidcaron `_ 30 | * Carsten Ehbrecht `@cehbrecht `_ 31 | * Jeremy Fyke `@jeremyfyke `_ 32 | * Sarah Gammon `@SarahG-579462 `_ 33 | * Tom Keel `@Thomasjkeel `_ 34 | * Marie-Pier Labonté `@marielabonte `_ 35 | * Ludwig Lierhammer `@ludwiglierhammer `_ 36 | * Jwen Fai Low `@jwenfai `_ 37 | * Jamie Quinn `@JamieJQuinn `_ 38 | * Yannick Rousseau 39 | * Philippe Roy `@Balinus `_ 40 | * Dougie Squire `@dougiesquire `_ 41 | * Ag Stephens `@agstephens `_ 42 | * Maliko Tanguy `@malngu `_ 43 | * Christopher Whelan `@qwhelan `_ 44 | * Dante Castro `@profesorpaiche `_ 45 | * Sascha Hofmann `@saschahofmann `_ 46 | * Javier Diez-Sierra `@JavierDiezSierra `_ 47 | * Hui-Min Wang `@Hem-W `_ 48 | * Adrien Lamarche `@LamAdr `_ 49 | * Faisal Mahmood `@faimahsho `_ 50 | * Sebastian Lehner `@seblehner `_ 51 | * Baptiste Hamon `@baptistehamon `_ 52 | * Jack Kit-tai Wong `@jack-ktw `_ 53 | * Jens de Bruijn `@jensdebruijn `_ 54 | * Armin Hofmann `@HofmannGeo `_ 55 | -------------------------------------------------------------------------------- /CI/requirements_ci.in: -------------------------------------------------------------------------------- 1 | bump-my-version==1.1.2 2 | deptry==0.23.0 3 | exceptiongroup==1.2.2 4 | flit==3.12.0 5 | pip==25.1.1 6 | pylint==3.3.7 7 | tomli==2.2.1 8 | tox==4.25.0 9 | tox-gh==1.5.0 10 | -------------------------------------------------------------------------------- /CI/requirements_upstream.txt: -------------------------------------------------------------------------------- 1 | bottleneck @ git+https://github.com/pydata/bottleneck.git@master 2 | cftime @ git+https://github.com/Unidata/cftime.git@master 3 | flox @ git+https://github.com/xarray-contrib/flox.git@main 4 | # numpy @ git+https://github.com/numpy/numpy.git@main 5 | xarray @ git+https://github.com/pydata/xarray.git@main 6 | xsdba @ git+ https://github.com/Ouranosinc/xsdba.git@main 7 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help lint test test-all 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | from urllib.request import pathname2url 7 | 8 | webbrowser.open(sys.argv[1]) 9 | endef 10 | export BROWSER_PYSCRIPT 11 | 12 | define PRINT_HELP_PYSCRIPT 13 | import re, sys 14 | 15 | for line in sys.stdin: 16 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 17 | if match: 18 | target, help = match.groups() 19 | print("%-20s %s" % (target, help)) 20 | endef 21 | export PRINT_HELP_PYSCRIPT 22 | 23 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 24 | 25 | help: 26 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 27 | 28 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 29 | 30 | clean-build: ## remove build artifacts 31 | rm -fr build/ 32 | rm -fr dist/ 33 | rm -fr .eggs/ 34 | find . -name '*.egg-info' -exec rm -fr {} + 35 | find . -name '*.egg' -exec rm -f {} + 36 | 37 | clean-docs: ## remove docs artifacts 38 | rm -f docs/apidoc/xclim*.rst 39 | rm -f docs/apidoc/modules.rst 40 | rm -f docs/notebooks/data/*.nc 41 | $(MAKE) -C docs clean 42 | 43 | clean-pyc: ## remove Python file artifacts 44 | find . -name '*.pyc' -exec rm -f {} + 45 | find . -name '*.pyo' -exec rm -f {} + 46 | find . -name '*~' -exec rm -f {} + 47 | find . -name '__pycache__' -exec rm -fr {} + 48 | 49 | clean-test: ## remove test and coverage artifacts 50 | rm -fr .tox/ 51 | rm -f .coverage 52 | rm -fr htmlcov/ 53 | rm -fr .pytest_cache 54 | 55 | lint: ## check style with flake8 and black 56 | python -m ruff check src/xclim tests 57 | python -m flake8 --config=.flake8 src/xclim tests 58 | python -m vulture src/xclim tests 59 | python -m blackdoc --check README.rst CHANGELOG.rst CONTRIBUTING.rst docs --exclude=".py" 60 | codespell src/xclim tests docs 61 | python -m numpydoc lint src/xclim/*.py src/xclim/ensembles/*.py src/xclim/indices/*.py src/xclim/indicators/*.py src/xclim/testing/*.py 62 | python -m deptry src 63 | python -m yamllint --config-file=.yamllint.yaml src/xclim 64 | 65 | test: ## run tests quickly with the default Python 66 | pytest 67 | pytest --no-cov --nbval --dist=loadscope --rootdir=tests/ docs/notebooks --ignore=docs/notebooks/example.ipynb 68 | pytest --rootdir=tests/ --xdoctest src/xclim 69 | 70 | test-all: ## run tests on every Python version with tox 71 | tox 72 | 73 | coverage: ## check code coverage quickly with the default Python 74 | python -m coverage run --source xclim -m pytest src/xclim 75 | python -m coverage report -m 76 | python -m coverage html 77 | $(BROWSER) htmlcov/index.html 78 | 79 | autodoc-obsolete: clean-docs ## create sphinx-apidoc files (obsolete) 80 | mkdir -p docs/apidoc/ 81 | sphinx-apidoc -o docs/apidoc/ --private --module-first src/xclim 82 | 83 | autodoc-custom-index: clean-docs ## create sphinx-apidoc files but with special index handling for indices and indicators 84 | mkdir -p docs/apidoc/ 85 | sphinx-apidoc -o docs/apidoc/ --private --module-first src/xclim src/xclim/indicators src/xclim/indices 86 | rm docs/apidoc/xclim.rst 87 | env SPHINX_APIDOC_OPTIONS="members,undoc-members,show-inheritance,noindex" sphinx-apidoc -o docs/apidoc/ --private --module-first src/xclim 88 | 89 | linkcheck: autodoc-custom-index ## run checks over all external links found throughout the documentation 90 | $(MAKE) -C docs linkcheck 91 | 92 | docs: autodoc-custom-index ## generate Sphinx HTML documentation, including API docs, but without indexes for for indices and indicators 93 | $(MAKE) -C docs html 94 | ifndef READTHEDOCS 95 | ## Start http server and show in browser. 96 | ## We want to have the cli command run in the foreground, so it's easy to kill. 97 | ## And we wait 2 sec for the server to start before opening the browser. 98 | \{ sleep 2; $(BROWSER) http://localhost:54345 \} & 99 | python -m http.server 54345 --directory docs/_build/html/ 100 | endif 101 | 102 | servedocs: autodoc-custom-index ## generate Sphinx HTML documentation, including API docs, but without indexes for for indices and indicators, and watch for changes 103 | $(MAKE) -C docs livehtml 104 | 105 | release: dist ## package and upload a release 106 | python -m flit publish dist/* 107 | 108 | dist: clean ## builds source and wheel package 109 | python -m flit build 110 | ls -l dist 111 | 112 | install: clean ## install the package to the active Python's site-packages 113 | python -m pip install --no-user . 114 | 115 | develop: clean ## install the package and development dependencies in editable mode to the active Python's site-packages 116 | python -m pip install --no-user --editable ".[dev,docs]" 117 | 118 | upstream: clean develop ## install the GitHub-based development branches of dependencies in editable mode to the active Python's site-packages 119 | python -m pip install --no-user --requirement requirements_upstream.txt 120 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | `xclim` is in rapid development and receives regular updates every four to six (4-6) weeks. In the event of a security-related bug discovery soon after the release of an `xclim` version, the last supported version will receive a patch release. 6 | 7 | ## Reporting a Vulnerability 8 | 9 | If you believe you have found a security vulnerability in `xclim`, we encourage you to let us know right away. We take all security vulnerabilities seriously and appreciate your efforts to responsibly disclose them. 10 | 11 | Please follow these steps to report a security vulnerability: 12 | 13 | 1. **Email**: Email [github-support@ouranos.ca](mailto:github-support@ouranos.ca) with a detailed description of the vulnerability. If applicable, please include any steps or a proof-of-concept to help us understand and reproduce the issue. 14 | 15 | 1. **Encryption (Optional)**: If you are concerned about the sensitivity of the information you are sharing, you can use the PGP key found below to encrypt your communication. 16 | 17 | 1. **Response**: We will acknowledge your email within 48 hours and work with you to understand and confirm the vulnerability. 18 | 19 | 1. **Fix and Disclosure**: Once the vulnerability is confirmed, we will work to address it promptly. We appreciate your patience as we investigate and implement a fix. Once resolved, we will coordinate the disclosure and provide credit to the reporter unless they prefer to remain anonymous. 20 | 21 | ## PGP Encryption Key 22 | 23 | You can use the following PGP key to encrypt your communications with us: 24 | 25 | ``` 26 | -----BEGIN PGP PUBLIC KEY BLOCK----- 27 | 28 | mDMEZamQrhYJKwYBBAHaRw8BAQdA+saPvmvr1MYe1nQy3n3QDcRE9T7UzTJ1XH31 29 | EI4Zb6u0Mk91cmFub3MgR2l0SHViIFN1cHBvcnQgPGdpdGh1Yi1zdXBwb3J0QG91 30 | cmFub3MuY2E+iJkEExYKAEEWIQSeAu+Cbjupx79jy9VeVFD6o5TVcwUCZamQrgIb 31 | AwUJCWYBgAULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAAKCRBeVFD6o5TVc4ho 32 | AQDXjDkx0b3A7yl6PQ4hBJ2uYzw0UWbml7mUwVdhMmdZkQD/VJZQNWrCQeOtYEM8 33 | icZJYwR/OsKFOWqlDytusGGtjwa4OARlqZCuEgorBgEEAZdVAQUBAQdAa41Zabjz 34 | P9O+p6tI69Cnft6U5om3+qCcMo8amTqauH0DAQgHiH4EGBYKACYWIQSeAu+Cbjup 35 | x79jy9VeVFD6o5TVcwUCZamQrgIbDAUJCWYBgAAKCRBeVFD6o5TVcwmaAQClDxW6 36 | 2gir7lhRXAcO+vmRImpGd29TrkcQVh+ak7VlwQEA706d7Kusiorlf/h8pLSoNMmS 37 | kuLGmHpUJ8NVGppU+wo= 38 | =wuxr 39 | -----END PGP PUBLIC KEY BLOCK----- 40 | ``` 41 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS ?= 6 | SPHINXBUILD ?= sphinx-build 7 | SPHINXPROJ = xclim 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | 22 | livehtml: 23 | sphinx-autobuild --port 54345 --open-browser --delay 3 --re-ignore "$(BUILDDIR)|apidoc|Makefile|_dynamic/indicators.json|variables.json|__pycache__" "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 24 | -------------------------------------------------------------------------------- /docs/_static/indsearch.js: -------------------------------------------------------------------------------- 1 | /* Array of indicator objects */ 2 | let indicators = []; 3 | let defModules = ["atmos", "generic", "land", "seaIce"]; 4 | /* MiniSearch object defining search mechanism */ 5 | let miniSearch = new MiniSearch({ 6 | fields: ['title', 'abstract', 'variables', 'keywords', 'id'], // fields to index for full-text search 7 | storeFields: ['title', 'abstract', 'vars', 'realm', 'module', 'name', 'keywords'], // fields to return with search results 8 | searchOptions: { 9 | boost: {'title': 3, 'variables': 2}, 10 | fuzzy: 0.1, 11 | prefix: true, 12 | boostDocument: (docID, term, storedFields) => { 13 | if (defModules.indexOf(storedFields['module']) > -1) { 14 | return 2; 15 | } else { 16 | return 1; 17 | } 18 | }, 19 | }, 20 | extractField: (doc, field) => { 21 | if (field === 'variables') { 22 | return Object.keys(doc['vars']).join(' '); 23 | } 24 | return MiniSearch.getDefault('extractField')(doc, field); 25 | } 26 | }); 27 | 28 | // Populate search object with complete list of indicators 29 | fetch('_static/indicators.json') 30 | .then(data => data.json()) 31 | .then(data => { 32 | indicators = Object.entries(data).map(([k, v]) => { 33 | return {id: k.toLowerCase(), ...v} 34 | }); 35 | miniSearch.addAll(indicators); 36 | indFilter(); 37 | }); 38 | 39 | 40 | function escapeHTML(str){ 41 | /* Escape HTML characters in a string. */ 42 | var map = 43 | { 44 | '&': '&', 45 | '<': '<', 46 | '>': '>', 47 | '"': '"', 48 | "'": ''' 49 | }; 50 | return str.replace(/[&<>"']/g, function(m) {return map[m];}); 51 | } 52 | 53 | function makeKeywordLabel(ind) { 54 | /* Print list of keywords only if there is at least one. */ 55 | if (ind.keywords[0].length > 0) { 56 | const keywords = ind.keywords.map(v => `${v.trim()}`).join(''); 57 | return `
Keywords: ${keywords}
`; 58 | } 59 | else { 60 | return ""; 61 | } 62 | } 63 | 64 | 65 | function makeVariableList(ind) { 66 | /* Print list of variables and include mouse-hover tooltip with variable description. */ 67 | return Object.entries(ind.vars).map((kv) => { 68 | /* kv[0] is the variable name, kv[1] is the variable description. */ 69 | /* Convert kv[1] to a string literal */ 70 | const text = escapeHTML(kv[1]); 71 | const tooltip = ``; 72 | return tooltip 73 | }).join(''); 74 | } 75 | 76 | function indTemplate(ind) { 77 | // const varlist = Object.entries(ind.vars).map((kv) => `${kv[0]}`).join(''); 78 | const varlist = makeVariableList(ind); 79 | return ` 80 |
81 |
82 | ${escapeHTML(ind.title)} 83 | 84 | ${ind.module}.${ind.name} 85 | 86 |
87 |
Uses: ${varlist}
88 |

${escapeHTML(ind.abstract)}

89 | ${makeKeywordLabel(ind)} 90 |
Yaml ID: ${ind.id}
91 |
92 | `; 93 | } 94 | 95 | function indFilter() { 96 | const input = document.getElementById("queryInput").value; 97 | const incVirt = document.getElementById("incVirtMod").checked; 98 | let opts = {}; 99 | if (!incVirt) { 100 | opts["filter"] = (result) => (defModules.indexOf(result.module) > -1); 101 | } 102 | let inds = []; 103 | if (input === "") { //Search wildcard so that boostDocument rules are applied. 104 | inds = miniSearch.search(MiniSearch.wildcard, opts); 105 | } else { 106 | inds = miniSearch.search(input, opts); 107 | } 108 | 109 | const newTable = inds.map(indTemplate).join(''); 110 | const tableElem = document.getElementById("indTable"); 111 | tableElem.innerHTML = newTable; 112 | return newTable; 113 | } 114 | -------------------------------------------------------------------------------- /docs/_static/style.css: -------------------------------------------------------------------------------- 1 | @import url("styles/furo.css"); 2 | 3 | .wy-side-nav-search>a img.logo, 4 | .wy-side-nav-search .wy-dropdown>a img.logo { 5 | width: 12rem 6 | } 7 | 8 | .wy-side-nav-search { 9 | background-color: #eee; 10 | } 11 | 12 | .wy-side-nav-search>div.version { 13 | display: inline; 14 | color: rgb(23, 84, 159); 15 | } 16 | 17 | .wy-nav-top { 18 | background-color: #555; 19 | } 20 | 21 | table.colwidths-given { 22 | table-layout: fixed; 23 | width: 100%; 24 | } 25 | table.docutils td { 26 | white-space: unset; 27 | word-wrap: break-word; 28 | } 29 | 30 | td, th { 31 | border: 1px solid #dddddd; 32 | text-align: left; 33 | padding: 8px; 34 | } 35 | 36 | td.name { 37 | font-weight: 500; 38 | } 39 | 40 | body div tr:nth-child(even), 41 | body div.rendered_html tbody tr:nth-child(even) { 42 | background-color: var(--color-background-table-rows-even); 43 | color: var(--color-text-table-rows-even); 44 | } 45 | 46 | body div tr:nth-child(odd), 47 | body div.rendered_html tbody tr:nth-child(odd) { 48 | background-color: var(--color-background-table-rows-odd); 49 | color: var(--color-text-table-rows-odd); 50 | } 51 | 52 | dd { 53 | margin: 0 0 0 2em; 54 | padding-bottom: 0.5em; 55 | } 56 | 57 | dt var { 58 | font-family: Monospace; 59 | font-style: normal; 60 | } 61 | 62 | .xr-attrs dt { 63 | display: inline !important; 64 | } 65 | 66 | ul.simple li { 67 | list-style: circle; 68 | } 69 | 70 | /*this selects the contents section, the first table of contents and the list item*/ 71 | /*underneath it. Then it removes the spacing to make it look whole. If you have more*/ 72 | /*items, you can consider using :not(:last-of-type) selector.*/ 73 | #contents .toctree-wrapper:first-of-type ul { 74 | margin-bottom: 0; 75 | } 76 | 77 | #queryInput { 78 | width: 100%; 79 | padding: 10px; 80 | margin: 5px; 81 | } 82 | 83 | .indElem { 84 | margin: 10px; 85 | padding: 10px; 86 | background-color: var(--color-indicator-background); 87 | color: var(--color-indicator-text); 88 | border-radius: 10px; 89 | } 90 | 91 | .indName { 92 | float: right; 93 | } 94 | 95 | code > .indName { 96 | background-color: var(--color-indicator-background); 97 | color: var(--color-indicator-text); 98 | } 99 | 100 | .indVarname { 101 | border-radius: 10px; 102 | background-color: var(--color-indicator-widget-background); 103 | color: var(--color-indicator-widget-text); 104 | border: solid 1px var(--color-indicator-widget-text); 105 | margin-right: 3px; 106 | margin-bottom: 3px; 107 | line-height: 24px; 108 | cursor: help; 109 | } 110 | 111 | /* Rounded corners for keyword labels: */ 112 | .keywordlabel { 113 | border-radius: 10px; 114 | padding: 5px; 115 | margin: 5px; 116 | background-color: var(--color-indicator-widget-background); 117 | color: var(--color-indicator-widget-text); 118 | border: solid 1px var(--color-indicator-widget-text); 119 | line-height: 24px; 120 | 121 | } 122 | 123 | #incVirtModLbl { 124 | display: inline; 125 | } 126 | 127 | /* extend furo for inline ".only-dark" elements */ 128 | body .only-dark-inline, 129 | body .only-light-inline { 130 | display: none !important; 131 | } 132 | 133 | @media not print { 134 | body[data-theme="dark"] .only-dark-inline, 135 | body[data-theme="light"] .only-light-inline { 136 | display: inline !important; 137 | } 138 | @media (prefers-color-scheme: dark) { 139 | body:not([data-theme="light"]) .only-dark-inline{ 140 | display: inline !important; 141 | } 142 | } 143 | @media (prefers-color-scheme: light) { 144 | body:not([data-theme="dark"]) .only-light-inline{ 145 | display: inline !important; 146 | } 147 | } 148 | } 149 | 150 | @media print { 151 | .only-light-inline{ 152 | display: inline !important; 153 | } 154 | .only-dark-inline{ 155 | display: none !important; 156 | } 157 | } 158 | 159 | img.xclim-logo-small.only-dark-inline { 160 | width: 91px; 161 | height: 103px; 162 | margin: 0; 163 | padding: 0; 164 | background-color: transparent; 165 | background-repeat: no-repeat; 166 | border-image-width: 0px; 167 | border: none; 168 | border-image-width: 0px; 169 | background-image: url("xclim-logo-small-dark.png"); 170 | } 171 | 172 | img.xclim-logo-small.only-light-inline { 173 | width: 91px; 174 | height: 103px; 175 | margin: 0; 176 | padding: 0; 177 | background-color: transparent; 178 | background-repeat: no-repeat; 179 | border: none; 180 | border-image-width: 0px; 181 | background-image: url("xclim-logo-small-light.png"); 182 | } 183 | 184 | img.xclim-logo-small.no-theme { 185 | display: none; 186 | width: 0px; 187 | } 188 | 189 | button.copybtn.copybtn svg { 190 | stroke: var(--color-copybutton); 191 | } 192 | -------------------------------------------------------------------------------- /docs/_static/xarray.css: -------------------------------------------------------------------------------- 1 | 2 | /* default xarray theme, this is taken from the injected css that xarray uses. 3 | However, we change it so that it updates in the body, instead of :root, so that it updates with the theme.*/ 4 | html, body { 5 | --xr-font-color0: var(--jp-content-font-color0, rgba(0, 0, 0, 1)); 6 | --xr-font-color2: var(--jp-content-font-color2, rgba(0, 0, 0, 0.54)); 7 | --xr-font-color3: var(--jp-content-font-color3, rgba(0, 0, 0, 0.38)); 8 | --xr-border-color: var(--jp-border-color2, #e0e0e0); 9 | --xr-disabled-color: var(--jp-layout-color3, #bdbdbd); 10 | --xr-background-color: var(--jp-layout-color0, white); 11 | --xr-background-color-row-even: var(--jp-layout-color1, white); 12 | --xr-background-color-row-odd: var(--jp-layout-color2, #eeeeee); 13 | } 14 | -------------------------------------------------------------------------------- /docs/_templates/base.html: -------------------------------------------------------------------------------- 1 | {% extends "!base.html" %} 2 | {% set css_files = css_files + ["_static/style.css"] %} 3 | 4 | 9 | 10 | {% block site_meta %} 11 | {% if "indicators" in sourcename %} 12 | 13 | 14 | {% endif %} 15 | {{ super() }} 16 | {% endblock %} 17 | -------------------------------------------------------------------------------- /docs/analogues.rst: -------------------------------------------------------------------------------- 1 | Spatial Analogues 2 | ================= 3 | 4 | Spatial analogues are maps showing which areas have a present-day climate that is analogous to the future climate of a 5 | given place. This type of map can be useful for climate adaptation to see how regions are coping today under 6 | specific climate conditions. For example, officials from a city located in a temperate region that may be expecting more 7 | heatwaves in the future can learn from the experience of another city where heatwaves are a common occurrence, 8 | leading to more proactive intervention plans to better deal with new climate conditions. 9 | 10 | Spatial analogues are estimated by comparing the distribution of climate indices computed at the target location over 11 | the future period with the distribution of the same climate indices computed over a reference period for multiple 12 | candidate regions. A number of methodological choices thus enter the computation: 13 | 14 | - Climate indices of interest, 15 | - Metrics measuring the difference between the distributions of indices, 16 | - Reference data from which to compute the base indices, 17 | - A future climate scenario to compute the target indices. 18 | 19 | The climate indices chosen to compute the spatial analogues are usually annual values of indices relevant to the 20 | intended audience of these maps. For example, in the case of the wine grape industry, the climate indices examined could 21 | include the length of the frost-free season, growing degree-days, annual winter minimum temperature and annual number of 22 | very cold days :cite:p:`roy_probabilistic_2017`. 23 | 24 | See :ref:`notebooks/analogs:Spatial Analogues examples`. 25 | 26 | Methods to compute the (dis)similarity between samples 27 | ------------------------------------------------------ 28 | This module implements all methods described in :cite:cts:`grenier_assessment_2013` to measure the dissimilarity between 29 | two samples, as well as the Székely-Rizzo energy distance. Some of these algorithms can be used to test whether two samples 30 | have been drawn from the same distribution. Here, they are used in finding areas with analogue climate conditions to a 31 | target climate: 32 | 33 | * Standardized Euclidean distance 34 | * Nearest Neighbour distance 35 | * Zech-Aslan energy statistic 36 | * Székely-Rizzo energy distance 37 | * Friedman-Rafsky runs statistic 38 | * Kolmogorov-Smirnov statistic 39 | * Kullback-Leibler divergence 40 | * Mahalanobis distance 41 | 42 | All methods accept arrays, the first is the reference (n, D) and the second is the candidate (m, D). Where the climate 43 | indicators vary along D and the distribution dimension along n or m. All methods output a single float. See their 44 | documentation in :ref:`analogues:Analogues Metrics API`. 45 | 46 | .. warning:: 47 | 48 | Some methods are scale-invariant and others are not. This is indicated in the docstring 49 | of the methods as it can change the results significantly. In most cases, scale-invariance 50 | is desirable and inputs may need to be scaled beforehand for scale-dependent methods. 51 | 52 | .. rubric:: References 53 | 54 | :cite:cts:`roy_probabilistic_2017` 55 | :cite:cts:`grenier_assessment_2013` 56 | 57 | Analogues Metrics API 58 | --------------------- 59 | 60 | See: :ref:`spatial-analogues-api` 61 | 62 | Analogues Developer Functions 63 | ----------------------------- 64 | 65 | See: :ref:`spatial-analogues-developer-api` 66 | -------------------------------------------------------------------------------- /docs/api_indicators.rst: -------------------------------------------------------------------------------- 1 | Indicators are the main tool xclim provides to compute climate indices. In contrast 2 | to the function defined in `xclim.indices`, Indicators add a layer of health checks 3 | and metadata handling. Indicator objects are split into realms : atmos, land and seaIce. 4 | 5 | Virtual modules are also inserted here. A normal installation of xclim comes with three virtual modules: 6 | 7 | - :py:mod:`xclim.indicators.cf`, Indicators defined in `cf-index-meta`. 8 | - :py:mod:`xclim.indicators.icclim`, Indicators defined by ECAD, as found in python package Icclim. 9 | - :py:mod:`xclim.indicators.anuclim`, Indicators of the Australian National University's Fenner School of Environment and Society. 10 | 11 | Climate Indicators API 12 | ---------------------- 13 | 14 | .. automodule:: xclim.indicators.atmos 15 | :members: 16 | :undoc-members: 17 | :imported-members: 18 | 19 | .. automodule:: xclim.indicators.land 20 | :members: 21 | :undoc-members: 22 | :imported-members: 23 | 24 | .. automodule:: xclim.indicators.seaIce 25 | :members: 26 | :undoc-members: 27 | :imported-members: 28 | 29 | Virtual Indicator Submodules 30 | ---------------------------- 31 | 32 | .. automodule:: xclim.indicators.cf 33 | :members: 34 | :imported-members: 35 | :undoc-members: 36 | 37 | .. automodule:: xclim.indicators.icclim 38 | :members: 39 | :imported-members: 40 | :undoc-members: 41 | 42 | .. automodule:: xclim.indicators.anuclim 43 | :members: 44 | :imported-members: 45 | :undoc-members: 46 | -------------------------------------------------------------------------------- /docs/authors.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../AUTHORS.rst 2 | -------------------------------------------------------------------------------- /docs/autodoc_indicator.py: -------------------------------------------------------------------------------- 1 | """ 2 | Sphinx extension that acts as a autodoc patch for documenting Indicator instances. 3 | 4 | By default, indicator instances are skipped by autodoc because their subclass is not a builtin type of python. 5 | 6 | Based on https://github.com/powerline/powerline/blob/83d855d3d73498c47553afeba212415990d95c54/docs/source/powerline_autodoc.py 7 | """ 8 | 9 | from __future__ import annotations 10 | 11 | from sphinx.domains.python import PyFunction, PyXRefRole 12 | from sphinx.ext import autodoc 13 | 14 | from xclim.core.indicator import Indicator 15 | 16 | 17 | class IndicatorDocumenter(autodoc.FunctionDocumenter): 18 | objtype = "indicator" 19 | 20 | @classmethod 21 | def can_document_member(cls, member, membername, isattr, parent): 22 | return isinstance(member, Indicator) 23 | 24 | 25 | class IndicatorDirective(PyFunction): 26 | pass 27 | 28 | 29 | def setup(app): 30 | app.add_autodocumenter(IndicatorDocumenter) 31 | app.add_directive_to_domain("py", "indicator", IndicatorDirective) 32 | app.add_role_to_domain("py", "indicator", PyXRefRole()) 33 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CHANGELOG.rst 2 | -------------------------------------------------------------------------------- /docs/checks.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | Health Checks 3 | ============= 4 | The :class:`Indicator` class performs a number of sanity checks on inputs to make sure valid data is fed to indices 5 | computations (:py:mod:`~xclim.core.cfchecks` for checks on the metadata and :py:mod:`~xclim.core.datachecks` for checks on the coordinates). 6 | Output values are properly masked in case input values are missing or invalid (:py:mod:`~xclim.core.missing`). 7 | Finally, a user can use functions of :py:mod:`~xclim.core.dataflags` to explore potential issues with its data (extreme values, suspicious runs, etc). 8 | 9 | .. automodule:: xclim.core.cfchecks 10 | :members: 11 | :undoc-members: 12 | :show-inheritance: 13 | :noindex: 14 | 15 | .. automodule:: xclim.core.datachecks 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | :noindex: 20 | 21 | .. automodule:: xclim.core.missing 22 | :noindex: 23 | 24 | .. note:: 25 | 26 | Corresponding stand-alone functions are also exposed to run the same missing value checks independent from indicator calculations. 27 | 28 | .. autofunction:: xclim.core.missing.missing_any 29 | :noindex: 30 | 31 | .. autofunction:: xclim.core.missing.at_least_n_valid 32 | :noindex: 33 | 34 | .. autofunction:: xclim.core.missing.missing_pct 35 | :noindex: 36 | 37 | .. autofunction:: xclim.core.missing.missing_wmo 38 | :noindex: 39 | 40 | .. autofunction:: xclim.core.missing.missing_from_context 41 | :noindex: 42 | 43 | .. automodule:: xclim.core.dataflags 44 | :members: 45 | :undoc-members: 46 | :show-inheritance: 47 | :noindex: 48 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/explanation.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Why use xclim? 3 | ============== 4 | 5 | Purpose 6 | ======= 7 | 8 | `xclim` aims to position itself as a climate services tool for any researchers interested in using Climate and Forecast Conventions (`CF-Conventions `_) compliant datasets to perform climate analyses. This tool is optimized for working with Big Data in the climate science domain and can function as an independent library for one-off analyses in *Jupyter Notebooks* or as a backend engine for performing climate data analyses via **Web Processing Services** (`WPS `_; e.g. `Finch `_). It was primarily developed targeting Earth and Environmental Science audiences and researchers, originally for calculating climate indicators for the Canadian government web service `ClimateData.ca `_. 9 | 10 | The primary domains that `xclim` is built for are in calculating climate indicators, performing statistical correction / bias adjustment of climate model output variables or simulations, and in performing climate model simulation ensemble statistics. 11 | 12 | Other Python projects similar to xclim 13 | ====================================== 14 | 15 | `xclim` has been developed within an ecosystem of several existing projects that deal with climate and statistical correction/downscaling and has both influenced and been influenced by their approaches: 16 | 17 | * `icclim` (`icclim Source Code `_; `icclim Documentation `_) 18 | - `xclim` aimed to reimplement `icclim` using `xarray`-natives for the computation of climate indices. Starting from version 5.0 of `icclim`, `xclim` has become a core dependency for this project. 19 | - The `icclim` developers have prepared a documentation page comparing xclim and icclim (`xclim_and_icclim `_). 20 | 21 | * `climate_indices` (`climate_indices Source Code `_; `climate_indices Documentation `_) 22 | - Provides several moisture- and drought-related indicators not implemented at-present in `xclim` (SPI, SPEI, PDSI, etc.). It also offers a robust command-line interface and uses `xarray` in its backend. 23 | - There is currently an ongoing discussion about the merging of `climate_indices` and `xclim`: :issue:`1273`. 24 | 25 | * `MetPy` (`MetPy Source Code `_; `MetPy Documentation `_) 26 | - `MetPy` is built for reading, visualizing, and performing calculations specifically on standards-compliant, operational weather data. Like `xclim`, it makes use of `xarray`. 27 | - `xclim` adopted its standards and unit-handling approaches from `MetPy` and associated project `cf-xarray`. 28 | 29 | * `climpred` (`climpred Source Code `_; `climpred Documentation `_) 30 | - `climpred` is designed to analyze and validate weather and climate forecast data against observations, reconstructions, and simulations. Similar to `xclim`, it leverages `xarray`, `dask`, and `cf_xarray` for object handling, distributed computation, and metadata validation, respectively. 31 | 32 | * `pyet` (`pyet Source Code `_; `pyet Documentation `_) 33 | - `pyet` is a tool for calculating/estimating evapotranspiration using many different accepted methodologies and employs a similar design approach as `xclim`, based on `xarray`-natives. 34 | 35 | * `xcdat` (`xcdat Source Code `_; `xcdat Documentation `_) 36 | 37 | * `GeoCAT` (`GeoCAT Documentation `_) 38 | - `GeoCAT` is an ensemble of tools developed specifically for scalable data analysis and visualization of structures and unstructured gridded earth science datasets. `GeoCAT` tools rely on many of the same tools that `xclim` uses in its stack (notably, `xarray`, `dask`, and `jupyter notebooks`). 39 | 40 | * `scikit-downscale` (`scikit-downscale Source Code `_, `scikit-downscale Documentation `_) 41 | - `scikit-downscale` offers algorithms for statistical downscaling. `xclim` drew inspiration from its fit/predict architecture API approach. The suite of downscaling algorithms offered between both projects differs. 42 | 43 | R-language specific projects 44 | ---------------------------- 45 | 46 | * `climdex.pcic` (`climdex.pcic Source Code `_; `climdex.pci R-CRAN Index `_) 47 | * `climind` (`climind Source Code `_; `climind Documentation `_) 48 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | xclim Official Documentation 2 | ============================ 3 | 4 | `xclim` is an operational Python library for climate services, providing numerous climate-related indicator tools 5 | with an extensible framework for constructing custom climate indicators, statistical downscaling and bias 6 | adjustment of climate model simulations, as well as climate model ensemble analysis tools. 7 | 8 | xclim is built using `xarray`_ and can seamlessly benefit from the parallelization handling provided by `dask`_. 9 | Its objective is to make it as simple as possible for users to perform typical climate services data treatment workflows. 10 | Leveraging xarray and dask, users can easily bias-adjust climate simulations over large spatial domains or compute indices from large climate datasets. 11 | 12 | .. _xarray: https://docs.xarray.dev/ 13 | .. _dask: https://docs.dask.org/ 14 | 15 | .. toctree:: 16 | :hidden: 17 | 18 | self 19 | 20 | .. toctree:: 21 | :maxdepth: 2 22 | :caption: Table of Contents: 23 | 24 | About 25 | installation 26 | Why xclim? 27 | notebooks/usage 28 | notebooks/index 29 | indicators 30 | indices 31 | checks 32 | notebooks/units 33 | internationalization 34 | notebooks/cli 35 | sdba 36 | analogues 37 | contributing 38 | 39 | .. toctree:: 40 | :titlesonly: 41 | 42 | authors 43 | changelog 44 | support 45 | security 46 | references 47 | 48 | .. toctree:: 49 | :maxdepth: 2 50 | :caption: User API 51 | 52 | api 53 | 54 | .. toctree:: 55 | :maxdepth: 1 56 | :caption: All Modules 57 | 58 | apidoc/modules 59 | 60 | .. only:: html 61 | 62 | Indices and tables 63 | ================== 64 | * :ref:`genindex` 65 | * :ref:`modindex` 66 | * :ref:`search` 67 | -------------------------------------------------------------------------------- /docs/indicators.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Climate Indicators 3 | ================== 4 | 5 | :py:class:`xclim.core.indicator.Indicator` instances essentially perform the same computations as the functions 6 | found in the :mod:`xclim.indices` library, but also run a number of health checks on input data 7 | and assign attributes to the output arrays. So for example, if there are missing values in 8 | a time series, indices won't notice, but indicators will return NaNs for periods with missing 9 | values (depending on the missing values algorithm selected, see :ref:`checks:Missing values identification`). Indicators also check that the input data has the expected frequency (e.g. daily) and that 10 | it is indeed the expected variable (e.g. a precipitation flux). The output is assigned attributes 11 | that conform as much as possible with the `CF-Convention`_. 12 | 13 | Indicators are split into realms (atmos, land, seaIce), according to the variables they operate on. 14 | See :ref:`notebooks/extendxclim:Defining new indicators` for instruction on how to create your own indicators. This page 15 | allows a simple free text search of all indicators. Click on the python names to get to the complete docstring of each indicator. 16 | 17 | .. raw:: html 18 | 19 | 20 | 21 |
22 |
23 | 24 | .. 25 | Filling of the table and search is done by scripts in _static/indsearch.js which are added through _templates/layout.html 26 | the data comes from indicators.json which is created by conf.py. 27 | 28 | .. _CF-Convention: http://cfconventions.org/ 29 | -------------------------------------------------------------------------------- /docs/indices.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Climate Indices 3 | =============== 4 | 5 | .. note:: 6 | 7 | Climate `Indices` serve as the driving mechanisms behind `Indicators` and should be used in cases where 8 | default settings for an Indicator may need to be tweaked, metadata completeness is not required, or a user 9 | wishes to design a virtual module from existing indices (see: :ref:`notebooks/extendxclim:Defining new indicators`). 10 | 11 | For higher-level and general purpose use, the xclim developers suggest using the :ref:`indicators:Climate Indicators`. 12 | 13 | Indices Library 14 | --------------- 15 | 16 | Climate indices functions are designed to operate on :py:class:`xarray.DataArray` objects. 17 | Most of these functions operate on daily time series, but in some cases might accept other sampling 18 | frequencies as well. All functions perform units checks to make sure that inputs have the expected dimensions 19 | (e.g. handling for units of temperature, whether they are Celsius, kelvin or Fahrenheit), and set the `units` 20 | attribute of the output `DataArray`. 21 | 22 | The :py:mod:`xclim.indices.generic`, :py:mod:`xclim.indices.helpers`, :py:mod:`xclim.indices.run_length`, and 23 | :py:mod:`xclim.indices.stats` submodules provide helper functions to simplify the implementation of indices 24 | while functions under :py:mod:`xclim.core.calendar` can aid with challenges arising from variable calendar 25 | types. 26 | 27 | .. warning:: 28 | 29 | Indices functions do not perform missing value checks, and usually do not set CF-Convention attributes 30 | (long_name, standard_name, description, cell_methods, etc.). These functionalities are provided by 31 | :py:class:`xclim.core.indicator.Indicator` instances found in the :py:mod:`xclim.indicators.atmos`, 32 | :py:mod:`xclim.indicators.land` and :mod:`xclim.indicators.seaIce` modules. 33 | 34 | .. automodule:: xclim.indices 35 | :members: 36 | :imported-members: 37 | :undoc-members: 38 | :show-inheritance: 39 | 40 | Indices submodules 41 | ------------------ 42 | 43 | .. automodule:: xclim.indices.generic 44 | :members: 45 | :undoc-members: 46 | :show-inheritance: 47 | 48 | .. automodule:: xclim.indices.helpers 49 | :members: 50 | :undoc-members: 51 | :show-inheritance: 52 | 53 | .. automodule:: xclim.indices.run_length 54 | :members: 55 | :undoc-members: 56 | :show-inheritance: 57 | 58 | .. automodule:: xclim.indices.stats 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | Fire indices submodule 64 | ^^^^^^^^^^^^^^^^^^^^^^ 65 | Indices related to fire and fire weather. Currently, submodules exist for calculating indices from the Canadian Forest Fire Weather Index System and the McArthur Forest Fire Danger (Mark 5) System. All fire indices can be accessed from the :py:mod:`xclim.indices` module. 66 | 67 | .. automodule:: xclim.indices.fire._cffwis 68 | :members: fire_weather_ufunc, fire_season, overwintering_drought_code, drought_code, cffwis_indices 69 | :undoc-members: 70 | :show-inheritance: 71 | 72 | .. automodule:: xclim.indices.fire._ffdi 73 | :members: 74 | :undoc-members: 75 | :show-inheritance: 76 | 77 | .. only:: html 78 | 79 | Fire indices footnotes 80 | ~~~~~~~~~~~~~~~~~~~~~~ 81 | 82 | .. _ffdi-footnotes: 83 | 84 | McArthur Forest Fire Danger Indices methods 85 | ******************************************* 86 | 87 | .. bibliography:: 88 | :labelprefix: FFDI- 89 | :keyprefix: ffdi- 90 | 91 | .. only:: html 92 | 93 | .. _fwi-footnotes: 94 | 95 | Canadian Forest Fire Weather Index System codes 96 | *********************************************** 97 | 98 | .. bibliography:: 99 | :labelprefix: CODE- 100 | :keyprefix: code- 101 | 102 | .. only:: html 103 | 104 | .. note:: 105 | 106 | Matlab code of the GFWED obtained through personal communication. 107 | 108 | Fire season determination methods 109 | ********************************* 110 | 111 | .. bibliography:: 112 | :labelprefix: FIRE- 113 | :keyprefix: fire- 114 | 115 | .. only:: html 116 | 117 | Drought Code overwintering background 118 | ************************************* 119 | 120 | .. bibliography:: 121 | :labelprefix: DROUGHT- 122 | :keyprefix: drought- 123 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Stable release 6 | -------------- 7 | 8 | To install `xclim` via `pip`, run this command in your terminal: 9 | 10 | .. code-block:: shell 11 | 12 | python -m pip install xclim 13 | 14 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide you through the process. 15 | 16 | .. _pip: https://pip.pypa.io/ 17 | .. _Python installation guide: https://docs.python-guide.org/starting/installation/ 18 | 19 | Anaconda release 20 | ---------------- 21 | 22 | For ease of installation across operating systems, we also offer an Anaconda Python package hosted on conda-forge. 23 | This version tends to be updated at around the same frequency as the PyPI-hosted library, but can lag by a few days at times. 24 | 25 | `xclim` can be installed from conda-forge with the following: 26 | 27 | .. code-block:: shell 28 | 29 | conda install -c conda-forge xclim 30 | 31 | .. _extra-dependencies: 32 | 33 | Extra Dependencies 34 | ------------------ 35 | 36 | Speedups and Helper Libraries 37 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 38 | 39 | To improve performance of `xclim`, we highly recommend you also install `flox`_ (see: :doc:`flox API `). 40 | This package seamlessly integrates into `xarray` and significantly improves the performance of the grouping and resampling algorithms, especially when using `dask` on large datasets. 41 | 42 | For grid subsetting, we also recommend using the tools found in `clisops`_ (see: :doc:`clisops.core.subset API `) for spatial manipulation of geospatial data. `clisops` began as a component of `xclim` and is designed to work alongside `xclim` and the `Pangeo`_ stack (`xarray`, `dask`, `jupyter`). In order to install `clisops`, the `GDAL`_ system libraries must be available. 43 | 44 | On Debian/Ubuntu, `GDAL` can be installed via `apt`: 45 | 46 | .. code-block:: shell 47 | 48 | sudo apt-get install libgdal-dev 49 | 50 | If on Anaconda Python, `GDAL` will be installed if needed as a `clisops` dependency. 51 | 52 | Both of these libraries are available on PyPI and conda-forge: 53 | 54 | .. code-block:: shell 55 | 56 | python -m pip install flox clisops 57 | 58 | Or, alternatively: 59 | 60 | .. code-block:: shell 61 | 62 | conda install -c conda-forge flox clisops 63 | 64 | .. _GDAL: https://gdal.org/download.html#binaries 65 | .. _Pangeo: https://pangeo.io/ 66 | 67 | Upstream Dependencies 68 | ^^^^^^^^^^^^^^^^^^^^^ 69 | 70 | `xclim` is regularly tested against the main development branches of a handful of key base libraries (`cftime`, `flox`, `pint`, `xarray`). 71 | For convenience, these libraries can be installed alongside `xclim` using the following `pip`-install command: 72 | 73 | .. code-block:: shell 74 | 75 | python -m pip install -r requirements_upstream.txt 76 | 77 | Or, alternatively: 78 | 79 | .. code-block:: shell 80 | 81 | make upstream 82 | 83 | .. _flox: https://github.com/xarray-contrib/flox 84 | .. _clisops: https://github.com/roocs/clisops 85 | 86 | From Sources 87 | ------------ 88 | 89 | .. warning:: 90 | 91 | While `xclim` strives to be compatible with latest releases and development versions of upstream libraries, many of the required base libraries (`numpy`, `scipy`, `numba`, etc.) may lag by several months before supporting the latest minor releases of Python. 92 | 93 | In order to ensure that installation of `xclim` doesn't fail, we suggest installing the `Cython` module before installing `xclim` in order to compile necessary libraries from their source packages, if required. 94 | 95 | The sources for xclim can be downloaded from the `Github repo`_. 96 | 97 | You can either clone the public repository: 98 | 99 | .. code-block:: shell 100 | 101 | git clone git@github.com:Ouranosinc/xclim.git 102 | 103 | Or download the `tarball`_: 104 | 105 | .. code-block:: shell 106 | 107 | curl -OL https://github.com/Ouranosinc/xclim/tarball/main 108 | 109 | Once you have extracted a copy of the source, you can install it with `pip`_: 110 | 111 | .. code-block:: shell 112 | 113 | python -m pip install -e ".[all]" 114 | 115 | Alternatively, you can also install a local development copy via `flit`_: 116 | 117 | .. code-block:: shell 118 | 119 | flit install [--symlink] xclim 120 | 121 | .. _Github repo: https://github.com/Ouranosinc/xclim 122 | .. _tarball: https://github.com/Ouranosinc/xclim/tarball/main 123 | .. _flit: https://flit.pypa.io/en/stable 124 | 125 | Creating a Conda Environment 126 | ---------------------------- 127 | 128 | To create a conda environment including `xclim`'s dependencies and several optional libraries (notably: `clisops`, `eigen`, `sbck`, and `flox`) and development dependencies, run the following command from within your cloned repo: 129 | 130 | .. code-block:: console 131 | 132 | conda env create -n my_xclim_env --file=environment.yml 133 | conda activate my_xclim_env 134 | (my_xclim_env) python -m pip install --no-deps -e . 135 | -------------------------------------------------------------------------------- /docs/internationalization.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: xclim.core.locales 2 | :members: 3 | :show-inheritance: 4 | :noindex: 5 | -------------------------------------------------------------------------------- /docs/logos/empty.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ouranosinc/xclim/ee6d6f45d7b0f625d691f6fde3f390ccc598d875/docs/logos/empty.png -------------------------------------------------------------------------------- /docs/logos/xclim-logo-dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ouranosinc/xclim/ee6d6f45d7b0f625d691f6fde3f390ccc598d875/docs/logos/xclim-logo-dark.png -------------------------------------------------------------------------------- /docs/logos/xclim-logo-light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ouranosinc/xclim/ee6d6f45d7b0f625d691f6fde3f390ccc598d875/docs/logos/xclim-logo-light.png -------------------------------------------------------------------------------- /docs/logos/xclim-logo-small-dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ouranosinc/xclim/ee6d6f45d7b0f625d691f6fde3f390ccc598d875/docs/logos/xclim-logo-small-dark.png -------------------------------------------------------------------------------- /docs/logos/xclim-logo-small-light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ouranosinc/xclim/ee6d6f45d7b0f625d691f6fde3f390ccc598d875/docs/logos/xclim-logo-small-light.png -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=xclim 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/notebooks/_finder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | 6 | def _find_current_folder(): 7 | """ 8 | Find the folder containing the notebooks. 9 | 10 | Needed in order to run the notebooks from the docs/notebooks folder. 11 | """ 12 | return Path(__file__).absolute().parent 13 | -------------------------------------------------------------------------------- /docs/notebooks/example/example.fr.json: -------------------------------------------------------------------------------- 1 | { 2 | "FD": { 3 | "title": "Nombre de jours de gel", 4 | "long_name": "Nombre de jours de gel (Tmin < 0°C)", 5 | "description": "Nombre de jours où la température minimale passe sous 0°C." 6 | }, 7 | "R95P": { 8 | "title": "Précpitations accumulées lors des jours de fortes pluies (> {perc}e percentile)" 9 | }, 10 | "R95P.R95p": { 11 | "long_name": "Accumulation {freq:f} des précipitations lors des jours de fortes pluies (> {perc}e percentile)", 12 | "description": "Épaisseur équivalente des précipitations accumulées lors des jours où la pluie est plus forte que le {perc}e percentile de la série." 13 | }, 14 | "R95P.R95p_days": { 15 | "long_name": "Nombre de jours de fortes pluies (> {perc}e percentile)", 16 | "description": "Nombre de jours où la pluie est plus forte que le {perc}e percentile de la série." 17 | }, 18 | "R99P.R99p": { 19 | "long_name": "Accumulation {freq:f} des précipitations lors des jours de fortes pluies (> {perc}e percentile)", 20 | "description": "Épaisseur équivalente des précipitations accumulées lors des jours où la pluie est plus forte que le {perc}e percentile de la série." 21 | }, 22 | "R99P.R99p_days": { 23 | "long_name": "Nombre de jours de fortes pluies (> {perc}e percentile)", 24 | "description": "Nombre de jours où la pluie est plus forte que le {perc}e percentile de la série." 25 | }, 26 | "RX5DAY_CANOPY": { 27 | "long_name": "Cumul maximal de la précipitation quotidienne sur 5 jours au-dessus de la canopée." 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /docs/notebooks/example/example.py: -------------------------------------------------------------------------------- 1 | # noqa: D100 2 | from __future__ import annotations 3 | 4 | import xarray as xr 5 | 6 | from xclim.core.units import declare_units, rate2amount 7 | 8 | 9 | @declare_units(pr="[precipitation]") 10 | def extreme_precip_accumulation_and_days( 11 | pr: xr.DataArray, perc: float = 95, freq: str = "YS" 12 | ) -> tuple[xr.DataArray, xr.DataArray]: 13 | """ 14 | Total precipitation accumulation during extreme events and number of days of such precipitation. 15 | 16 | The `perc` percentile of the precipitation (including all values, not in a day-of-year manner) 17 | is computed. Then, for each period, the days where `pr` is above the threshold are accumulated, 18 | to get the total precip related to those extreme events. 19 | 20 | Parameters 21 | ---------- 22 | pr: xr.DataArray 23 | Precipitation flux (both phases). 24 | perc: float 25 | Percentile corresponding to "extreme" precipitation, [0-100]. 26 | freq: str 27 | Resampling frequency. 28 | 29 | Returns 30 | ------- 31 | xarray.DataArray 32 | Precipitation accumulated during events where pr was above the {perc}th percentile of the whole series. 33 | xarray.DataArray 34 | Number of days where pr was above the {perc}th percentile of the whole series. 35 | """ 36 | pr_thresh = pr.quantile(perc / 100, dim="time").drop_vars("quantile") 37 | 38 | extreme_days = pr >= pr_thresh 39 | pr_extreme = rate2amount(pr).where(extreme_days) 40 | 41 | out1 = pr_extreme.resample(time=freq).sum() 42 | out1 = out1.assign_attrs(units=pr_extreme.units) 43 | 44 | out2 = extreme_days.resample(time=freq).sum() 45 | out2 = out2.assign_attrs(units="days") 46 | return out1, out2 47 | -------------------------------------------------------------------------------- /docs/notebooks/example/example.yml: -------------------------------------------------------------------------------- 1 | doc: | 2 | ============== 3 | Example module 4 | ============== 5 | 6 | This module is an example of YAML generated xclim submodule. 7 | realm: atmos 8 | references: xclim documentation https://xclim.readthedocs.io 9 | variables: 10 | prveg: 11 | canonical_units: kg m-2 s-1 12 | description: Precipitation flux on the outer surface of the forest 13 | standard_name: precipitation_flux_onto_canopy 14 | indicators: 15 | RX1day_summer: 16 | base: rx1day 17 | cf_attrs: 18 | long_name: Highest 1-day precipitation amount 19 | parameters: 20 | indexer: 21 | month: [5, 6, 7, 8, 9] 22 | context: hydro 23 | RX5day_canopy: 24 | base: max_n_day_precipitation_amount 25 | cf_attrs: 26 | long_name: Highest 5-day precipitation amount on the canopy 27 | input: 28 | pr: prveg 29 | parameters: 30 | freq: QS-DEC 31 | window: 5 32 | context: hydro 33 | R75pdays: 34 | base: days_over_precip_thresh 35 | parameters: 36 | pr_per: 37 | description: Daily 75th percentile of wet day precipitation flux. 38 | thresh: 1 mm/day 39 | context: hydro 40 | fd: 41 | compute: count_occurrences 42 | input: 43 | data: tasmin 44 | cf_attrs: 45 | cell_methods: 'time: minimum within days time: sum over days' 46 | long_name: Number of Frost Days (Tmin < 0°C) 47 | standard_name: number_of_days_with_air_temperature_below_threshold 48 | units: days 49 | var_name: fd 50 | parameters: 51 | op: < 52 | threshold: 0 degC 53 | freq: 54 | default: YS 55 | references: ETCCDI 56 | spring_fd: 57 | compute: frost_days 58 | parameters: 59 | freq: YS-DEC 60 | indexer: 61 | month: [ 12, 1, 2 ] 62 | R95p: 63 | compute: extreme_precip_accumulation_and_days 64 | cf_attrs: 65 | - cell_methods: 'time: sum within days time: sum over days' 66 | long_name: Annual total PRCP when RR > {perc}th percentile 67 | units: m 68 | var_name: R95p 69 | - long_name: Annual number of days when RR > {perc}th percentile 70 | units: days 71 | var_name: R95p_days 72 | parameters: 73 | perc: 95 74 | references: climdex 75 | context: hydro 76 | R99p: 77 | base: .R95p 78 | cf_attrs: 79 | - var_name: R99p 80 | - var_name: R99p_days 81 | parameters: 82 | perc: 99 83 | context: hydro 84 | -------------------------------------------------------------------------------- /docs/notebooks/index.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | Examples 3 | ======== 4 | 5 | .. toctree:: 6 | :maxdepth: 1 7 | 8 | example 9 | ensembles 10 | ensembles-advanced 11 | frequency_analysis 12 | customize 13 | extendxclim 14 | analogs 15 | partitioning 16 | -------------------------------------------------------------------------------- /docs/notebooks/xclim_training/Exercices.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Exercices pratiques\n", 8 | "\n", 9 | "Pour le reste de la formation, on vous demande d'utiliser xclim pour reproduire des analyses standards que vous faites régulièrement, que vous avez déjà fait ou que vous devrez faire. \n", 10 | "\n", 11 | "\n", 12 | "Notez les problèmes que vous rencontrez, les opérations non-intuitives, les limitations que vous percevez. On se servira de vos commentaires pour compléter notre documentation et améliorer xclim. \n", 13 | "\n", 14 | "- Documentation xclim: https://xclim.readthedocs.io/en/latest/\n", 15 | "- Documentation xarray: http://xarray.pydata.org/en/stable/\n", 16 | "\n", 17 | "Pour rapporter un problème avec xclim ou faire une suggestion: https://github.com/Ouranosinc/xclim/issues\n", 18 | "\n", 19 | "\n", 20 | "Vous pouvez travailler dans un notebook en lançant dans votre environnement python36 la commande\n", 21 | "\n", 22 | "```bash\n", 23 | "$ jupyter notebook\n", 24 | "```\n", 25 | "\n", 26 | "SVP réduisez les dimensions de vos calculs afin de donner une chance à nos machines !\n" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "## Quelques trucs\n", 34 | "\n", 35 | "### Wildcards dans les chemins \n", 36 | "\n", 37 | "Vous pouvez vous servir des wildcards pour ouvrir plusieurs fichiers à la fois. " 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "metadata": { 44 | "tags": [ 45 | "skip" 46 | ] 47 | }, 48 | "outputs": [], 49 | "source": [ 50 | "from __future__ import annotations\n", 51 | "\n", 52 | "import xarray as xr\n", 53 | "\n", 54 | "# Pour ouvrir tous les mois d'une année:\n", 55 | "ds = xr.open_mfdataset(\"/tas_bch_2032??_se.nc\")" 56 | ] 57 | }, 58 | { 59 | "cell_type": "markdown", 60 | "metadata": {}, 61 | "source": [ 62 | "### Propager les unités\n", 63 | "\n", 64 | "xclim vérifie que les unités sont compatibles avec l'indicateur à calculer. Par contre, les opérations xarray ne conservent pas les unités par défaut. Pour propager les attributs d'un object, utilisez `keep_attrs=True`. " 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": null, 70 | "metadata": { 71 | "tags": [ 72 | "skip" 73 | ] 74 | }, 75 | "outputs": [], 76 | "source": [ 77 | "ts = (\n", 78 | " ds.tas.isel(rlat=slice(0, 2), rlon=slice(0, 2))\n", 79 | " .resample(time=\"D\")\n", 80 | " .mean(dim=\"time\", keep_attrs=True)\n", 81 | ")" 82 | ] 83 | } 84 | ], 85 | "metadata": { 86 | "language_info": { 87 | "codemirror_mode": { 88 | "name": "ipython", 89 | "version": 3 90 | }, 91 | "file_extension": ".py", 92 | "mimetype": "text/x-python", 93 | "name": "python", 94 | "nbconvert_exporter": "python", 95 | "pygments_lexer": "ipython3", 96 | "version": "3.8.5" 97 | } 98 | }, 99 | "nbformat": 4, 100 | "nbformat_minor": 2 101 | } 102 | -------------------------------------------------------------------------------- /docs/notebooks/xclim_training/finch.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Finch usage\n", 8 | "\n", 9 | "Finch is a WPS server for climate indicators, but also has a few utilities to facilitate data handling. To get started, first instantiate the client. " 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "from __future__ import annotations\n", 19 | "\n", 20 | "from birdy import WPSClient\n", 21 | "\n", 22 | "url = \"https://finch.crim.ca/wps\"\n", 23 | "# url = 'https://pavics.ouranos.ca/twitcher/ows/proxy/finch/wps'\n", 24 | "# url = 'http://localhost:5000'\n", 25 | "wps = WPSClient(url)" 26 | ] 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "The list of available processes can be displayed using the help function, as well as details about individual processes. " 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "?wps" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "?wps.frost_days" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": {}, 56 | "source": [ 57 | "To actually compute an indicator, we need to specify the path to the netCDF file used as input for the calculation of the indicator. To compute `frost_days`, we need a time series of daily minimum temperature. Here we'll use a small test file. Note that here we're using an OPeNDAP link, but it could also be an url to a netCDF file, or the path to a local file on disk. We then simply call the indicator. The response is an object that can poll the server to inquire about the status of the process. This object can use two modes: \n", 58 | " - synchronous: it will wait for the server's response before returning; or \n", 59 | " - asynchronous: it will return immediately, but without the actual output from the process.\n", 60 | " \n", 61 | "Here, since we're applying the process on a small test file, we're using the default synchronous mode. For long computations, use the asynchronous mode to avoid time-out errors. The asynchronous mode is activated by setting the `progress` attribute of the WPS client to True. " 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": null, 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "tasmin = \"https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/testdata/flyingpigeon/cmip3/tasmin.sresa2.miub_echo_g.run1.atm.da.nc\"\n", 71 | "resp = wps.frost_days(tasmin)" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "print(resp.status)\n", 81 | "out = resp.get()\n", 82 | "print(out)" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "metadata": {}, 88 | "source": [ 89 | "The `get` method returns a `NamedTuple` object with all the WPS outputs, either as references to files or actual content. To copy the file to the local disk, you can use the `getOutput` method. " 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "resp.getOutput(\"/tmp/out.nc\")" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": {}, 105 | "outputs": [], 106 | "source": [ 107 | "import xarray as xr\n", 108 | "\n", 109 | "xr.open_dataset(\"/tmp/out.nc\")" 110 | ] 111 | }, 112 | { 113 | "cell_type": "markdown", 114 | "metadata": {}, 115 | "source": [ 116 | "The birdy client offers a quicker way to download and open the files automatically using `asobj=True`, as long as the file format is known to birdy. " 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "metadata": {}, 123 | "outputs": [], 124 | "source": [ 125 | "ds, log = resp.get(asobj=True)" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [ 134 | "ds" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "print(log)" 144 | ] 145 | } 146 | ], 147 | "metadata": { 148 | "language_info": { 149 | "codemirror_mode": { 150 | "name": "ipython", 151 | "version": 3 152 | }, 153 | "file_extension": ".py", 154 | "mimetype": "text/x-python", 155 | "name": "python", 156 | "nbconvert_exporter": "python", 157 | "pygments_lexer": "ipython3", 158 | "version": "3.6.7" 159 | } 160 | }, 161 | "nbformat": 4, 162 | "nbformat_minor": 2 163 | } 164 | -------------------------------------------------------------------------------- /docs/notebooks/xclim_training/readme.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Instructions\n", 8 | "\n", 9 | "## Installation d'outils pour Windows\n", 10 | "\n", 11 | "1. Installer MobaXterm: https://mobaxterm.mobatek.net/download-home-edition.html\n", 12 | "\n", 13 | "\n", 14 | "## Sur le serveur\n", 15 | "\n", 16 | "1. Ouvrir une session sur doris ou neree\n", 17 | "2. Charger le module `Anaconda`\n", 18 | "3. Ouvrir l'environnement `python36`\n", 19 | "4. Lancer un serveur de notebook jupyter avec le port désigné (8888 dans l'exemple) : voir liste ci-dessous pour le port à utiliser\n", 20 | "5. Copier l'URL commençant par http://localhost:\n", 21 | "\n", 22 | "### Liste de ports par poste de travail pour la formation\n", 23 | "1. Poste 1 : JP=8890 ; DP=8790\n", 24 | "\n", 25 | "2. Poste 2 : JP=8891 ; DP=8791\n", 26 | "\n", 27 | "3. Poste 3 : JP=8892 ; DP=8792\n", 28 | "\n", 29 | "4. Poste 4 : JP=8893 ; DP=8793\n", 30 | "\n", 31 | "5. Poste 5 : JP=8894 ; DP=8794\n", 32 | "\n", 33 | "6. Poste 6 : JP=8895 ; DP=8795\n", 34 | "\n", 35 | "7. Poste 7 : JP=8896 ; DP=8796\n", 36 | "\n", 37 | "8. Poste 8 : JP=8897 ; DP=8797\n", 38 | "\n", 39 | "9. Poste 9 : JP=8898 ; DP=8798\n", 40 | "\n", 41 | "10. Poste 10 : JP=8899 ; DP=8799\n" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "$ ssh user@server \n", 51 | "$ git clone https://github.com/Ouranosinc/xclim.git\n", 52 | "$ cd xclim\n", 53 | "$ git checkout xclim_training\n", 54 | "$ cd docs/notebooks/xclim_training/\n", 55 | "$ module load Anaconda\n", 56 | "$ export OMP_NUM_THREADS=4\n", 57 | "$ source activate python36\n", 58 | "$ jupyter notebook --no-browser --port {JP} " 59 | ] 60 | }, 61 | { 62 | "cell_type": "markdown", 63 | "metadata": {}, 64 | "source": [ 65 | "## Sur votre machine\n", 66 | "\n", 67 | "1. Créer un tunnel entre le serveur et votre laptop (un port pour le notebook {JP} et un port pour le dashboard {DP} \n", 68 | "2. Accéder à Jupyter dans un navigateur via l'URL http://localhost:{JP}" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": null, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "$ ssh -N -L{JP}:localhost:{JP} -L{DP}:localhost:{DP} user@server" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "## À la fin de la formation\n", 85 | "\n", 86 | "1. Fermer Jupyter sur le serveur (CTRL-C)\n", 87 | "2. Fermer le tunnel sur votre machine" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [ 96 | "$ ps aux | grep localhost\n", 97 | "$ kill -9 " 98 | ] 99 | } 100 | ], 101 | "metadata": { 102 | "language_info": { 103 | "codemirror_mode": { 104 | "name": "ipython", 105 | "version": 3 106 | }, 107 | "file_extension": ".py", 108 | "mimetype": "text/x-python", 109 | "name": "python", 110 | "nbconvert_exporter": "python", 111 | "pygments_lexer": "ipython3", 112 | "version": "3.6.7" 113 | } 114 | }, 115 | "nbformat": 4, 116 | "nbformat_minor": 2 117 | } 118 | -------------------------------------------------------------------------------- /docs/paper/paper.bib: -------------------------------------------------------------------------------- 1 | @article{Hassel:2017, 2 | title = {A data model of the Climate and Forecast metadata conventions (CF-1.6) with a 3 | software implementation (cf-python v2.1)}, 4 | author = {Hassell, D. and Gregory, J. and Blower, J. and Lawrence, B. N. and Taylor, K. 5 | E.}, 6 | doi = {10.5194/gmd-10-4619-2017}, 7 | journal = {Geoscientific Model Development}, 8 | number = {12}, 9 | pages = {4619--4646}, 10 | url = {https://gmd.copernicus.org/articles/10/4619/2017/}, 11 | volume = {10}, 12 | year = {2017} 13 | } 14 | 15 | 16 | @article{Hoyer:2017, 17 | title = {xarray: {N}-{D} labeled {Arrays} and {Datasets} in {Python}}, 18 | author = {Hoyer, Stephan and Hamman, Joseph J.}, 19 | doi = {10.5334/jors.148}, 20 | issn = {2049-9647}, 21 | journal = {Journal of Open Research Software}, 22 | language = {en}, 23 | month = {apr}, 24 | pages = {10}, 25 | shorttitle = {xarray}, 26 | url = {http://openresearchsoftware.metajnl.com/articles/10.5334/jors.148/}, 27 | urldate = {2019-07-02}, 28 | volume = {5}, 29 | year = {2017} 30 | } 31 | 32 | 33 | @article{Page:2022, 34 | title = {Access to Analysis and Climate Indices Tools for Climate Researchers and End 35 | Users}, 36 | author = {Christian Pagé and Alessandro Spinuso and Lars Bärring and Klaus Zimmermann and 37 | Abel Aoun}, 38 | doi = {10.1002/essoar.10510291.1}, 39 | month = {jan}, 40 | publisher = {Wiley}, 41 | url = {https://doi.org/10.1002%2Fessoar.10510291.1}, 42 | year = {2022} 43 | } 44 | 45 | 46 | @misc{icclim, 47 | title = {Python library for climate indices calculation}, 48 | author = {Christian Pagé and Abel Aoun and Natalia Tatarinova}, 49 | journal = {GitHub repository}, 50 | doi = {10.5281/zenodo.7382653}, 51 | license = {Apache-2.0 license}, 52 | publisher = {GitHub}, 53 | url = {https://github.com/cerfacs-globc/icclim}, 54 | year = {2022} 55 | } 56 | 57 | 58 | @misc{metpy, 59 | title = {{MetPy: A Python Package for Meteorological Data}}, 60 | author = {May, Ryan and Arms, Sean and Marsh, Patrick and Bruning, Eric and Leeman, John 61 | and Goebbert, Kevin and Thielen, Jonathan and Bruick, Zachary and Camron, M. Drew}, 62 | doi = {10.5065/D6WW7G29}, 63 | journal = {GitHub repository}, 64 | license = {BSD-3-Clause}, 65 | publisher = {GitHub}, 66 | url = {https://github.com/Unidata/MetPy}, 67 | year = {2022} 68 | } 69 | 70 | 71 | @misc{clisops, 72 | title = {clisops - climate simulation operations}, 73 | author = {Ag Stephens and Eleanor Smith and Carsten Ehbrecht and Trevor James Smith}, 74 | journal = {GitHub repository}, 75 | publisher = {GitHub}, 76 | url = {https://github.com/roocs/clisops}, 77 | year = {2022} 78 | } 79 | 80 | 81 | @misc{xesmf, 82 | title = {xESMF: Universal Regridder for Geospatial Data}, 83 | author = {Jiawei Zhuang and David Huard and Pascal Bourgault and Raphael Dussin and 84 | Anderson Banihirwe and Stéphane Raynaud}, 85 | journal = {GitHub repository}, 86 | publisher = {GitHub}, 87 | url = {https://github.com/pangeo-data/xESMF}, 88 | year = {2022} 89 | } 90 | 91 | 92 | @misc{finch, 93 | title = {A Web Processing Service for Climate Indicators}, 94 | author = {David Huard and Pascal Bourgault and Trevor James Smith and David Caron and 95 | Long Vu and Mathieu Provencher}, 96 | journal = {GitHub repository}, 97 | publisher = {GitHub}, 98 | url = {https://github.com/bird-house/finch}, 99 | year = {2022} 100 | } 101 | 102 | 103 | @manual{dask:2016, 104 | title = {Dask: Library for dynamic task scheduling}, 105 | author = {Dask Development Team}, 106 | year = {2016}, 107 | url = {https://dask.org} 108 | } 109 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/references.rst: -------------------------------------------------------------------------------- 1 | .. only:: not latex 2 | 3 | ============ 4 | Bibliography 5 | ============ 6 | 7 | McArthur Forest Fire Danger System References 8 | --------------------------------------------- 9 | 10 | See: :ref:`ffdi-footnotes` 11 | 12 | Canadian Forest Fire Weather Index System References 13 | ---------------------------------------------------- 14 | 15 | See: :ref:`fwi-footnotes` 16 | 17 | SDBA References 18 | --------------- 19 | 20 | See: `sdba bibliography `_. 21 | 22 | General References 23 | ------------------ 24 | 25 | .. bibliography:: 26 | -------------------------------------------------------------------------------- /docs/rstjinja.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | 4 | def rstjinja(app, docname, source): 5 | """Render our pages as a jinja template for fancy templating goodness.""" 6 | # Make sure we're outputting HTML 7 | if app.builder.format != "html": 8 | return 9 | src = source[0] 10 | rendered = app.builder.templates.render_string(src, app.config.html_context) 11 | source[0] = rendered 12 | 13 | 14 | def setup(app): 15 | app.connect("source-read", rstjinja) 16 | -------------------------------------------------------------------------------- /docs/security.rst: -------------------------------------------------------------------------------- 1 | .. mdinclude:: ../SECURITY.md 2 | -------------------------------------------------------------------------------- /docs/support.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Support Policy 3 | ============== 4 | 5 | Support Channels 6 | ---------------- 7 | 8 | * `xclim` Issues: https://github.com/Ouranosinc/xclim/issues 9 | * `xclim` Discussions: https://github.com/Ouranosinc/xclim/discussions 10 | * PAVICS-related Questions: ``_ 11 | 12 | API Compatibility 13 | ----------------- 14 | 15 | `xclim` aims to maintain backwards compatibility as much as possible. New features that are considered "breaking changes" are adopted gradually while deprecation notices are issued for the older features. Dropping support for older versions of support libraries is considered a breaking change. 16 | 17 | Significant `xclim` API changes are documented in the changelog. When modules are significantly modified, they are marked as such in the documentation, while deprecation warnings are issued in the code. Support for deprecated features is often maintained for a reasonable period of time (two or three stable releases), but users are encouraged to update their code to the new API as soon as possible. 18 | 19 | Scientific Python Ecosystem Compatibility 20 | ----------------------------------------- 21 | 22 | `xclim` closely follows the compatibility of the `xarray` and `dask` libraries. The `xclim` library is tested against the latest stable versions of `xarray` and `dask` and is expected to work with the latest stable versions of `numpy`, `scipy`, and `pandas`. These projects tend to follow either the `NumPy Enhancement Protocols (NEP-29) `_ or the `Scientific Python SPEC-0 `_ for deprecation policies; `xclim` generally follows the same policies. 23 | 24 | The lowest supported versions of libraries listed in the `xclim` package metadata are expected to be compatible with the latest stable versions of `xclim`. From time to time, these minimum supported versions will be updated to follow the Scientific Python SPEC-0 recommendations. In the event that a significant breaking change is made to the `xarray` or `dask` libraries, `xclim` may adopt a newer minimum supported version of those libraries than SPEC-0 might recommend. 25 | 26 | `xclim` tends to support older Python versions until one or many of the following events occur: 27 | - The Python version no longer receives security patches by the Python Software Foundation (EoL). 28 | - The Python version is no longer supported by the last stable releases of the `xarray` or `dask` libraries. 29 | - Maintaining support for an older Python versions becomes a burden on the development team. 30 | 31 | Versioning 32 | ---------- 33 | 34 | `xclim` mostly adheres to the `Semantic Versioning v2.0 `_ convention, which means that the version number is composed of three or four numbers: `MAJOR.MINOR.PATCH-DEV.#`. The version number is incremented according to the following rules: 35 | 36 | - `MAJOR` version is incremented when incompatible changes are made to the API. 37 | - `MINOR` version is incremented when new features are added in a backwards-compatible manner. 38 | - `PATCH` version is incremented when backwards-compatible bug fixes are made. 39 | - `DEV.#` (development) version is incremented when new features are added or bug fixes are made. 40 | 41 | The development version is used for testing new features and bug fixes before they are released in a stable version. The development version is not considered stable and should not be used in production environments. 42 | 43 | Modifications to continuous integration (CI) pipelines, linting tools, documentation, and other non-code changes do not affect the version number. 44 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: xclim 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python >=3.11,<3.14 6 | - boltons >=20.1 7 | - bottleneck >=1.3.1 8 | - cf_xarray >=0.9.3 9 | - cftime >=1.4.1 10 | - click >=8.1 11 | - dask >=2024.8.1 12 | - filelock >=3.14.0 13 | - numba >=0.57.0 14 | - numpy >=1.24.0 15 | - packaging >=24.0 16 | - pandas >=2.2.0 17 | - pint >=0.24.4 18 | - pyarrow >=15.0.0 # Strongly encouraged for Pandas v2.2.0+ 19 | - pyyaml >=6.0.1 20 | - scikit-learn >=1.2.0 21 | - scipy >=1.11.0 22 | - xarray >=2023.11.0,!=2024.10.0 23 | - yamale >=5.0.0 24 | # Extras 25 | - flox >=0.9 26 | - xsdba >=0.4.0 27 | # lmoments3 - Not an official dependency but required for some Jupyter notebooks 28 | - lmoments3 >=1.0.7 29 | # Testing and development dependencies 30 | - blackdoc =0.3.9 31 | - bump-my-version >=1.1.1 32 | - cairosvg >=2.6.0 33 | - codespell >=2.4.1 34 | - coverage >=7.5.0 35 | - deptry >=0.23.0 36 | - distributed >=2.0 37 | - flake8 >=7.2.0 38 | - flake8-rst-docstrings >=0.3.0 39 | - flit >=3.10.1,<4.0 40 | - furo >=2023.9.10 41 | - h5netcdf >=1.3.0 42 | - ipykernel 43 | - ipython >=8.10.0 44 | - matplotlib >=3.7.0 45 | - mypy >=1.14.1 46 | - nbconvert >=7.16.4 47 | - nbsphinx >=0.9.5 48 | - nbval >=0.11.0 49 | - nc-time-axis >=1.4.1 50 | - netcdf4 # Required for some Jupyter notebooks 51 | - notebook 52 | - numpydoc >=1.8.0 53 | - pandas-stubs >=2.2 54 | - pip >=25.0 55 | - pooch >=1.8.0 56 | - pre-commit >=3.7 57 | - pybtex >=0.24.0 58 | - pylint >=3.3.1 59 | - pytest >=8.0.0 60 | - pytest-cov >=5.0.0 61 | - pytest-socket >=0.6.0 62 | - pytest-timeout >=2.4.0 63 | - pytest-xdist >=3.2 64 | - ruff >=0.9.6 65 | - sphinx >=8.2.0 66 | - sphinx-autobuild >=2024.4.16 67 | - sphinx-autodoc-typehints >=3.1.0 68 | - sphinx-codeautolink >=0.16.2 69 | - sphinx-copybutton 70 | - sphinx-mdinclude 71 | - sphinxcontrib-bibtex 72 | - sphinxcontrib-svg2pdfconverter 73 | - tokenize-rt >=5.2.0 74 | - tox >=4.25.0 75 | - tox-gh >=1.5.0 76 | - vulture =2.14 77 | - xdoctest >=1.1.5 78 | - yamllint >=1.35.1 79 | # Temporary Fixes 80 | - importlib-metadata <8.7.0 # Issues with dask >=2025.5.1 on Python3.11 81 | -------------------------------------------------------------------------------- /src/xclim/__init__.py: -------------------------------------------------------------------------------- 1 | """Climate indices computation package based on Xarray.""" 2 | 3 | from __future__ import annotations 4 | 5 | import importlib.resources as _resources 6 | 7 | from xclim import indices 8 | from xclim.core import units # noqa 9 | from xclim.core.indicator import build_indicator_module_from_yaml 10 | from xclim.core.locales import load_locale 11 | from xclim.core.options import set_options # noqa 12 | from xclim.indicators import atmos, generic, land, seaIce # noqa 13 | 14 | __author__ = """Travis Logan""" 15 | __email__ = "logan.travis@ouranos.ca" 16 | __version__ = "0.57.1-dev.0" 17 | 18 | 19 | with _resources.as_file(_resources.files("xclim.data")) as _module_data: 20 | # Load official locales 21 | for filename in _module_data.glob("??.json"): 22 | # Only select .json and not ..json 23 | load_locale(filename, filename.stem) 24 | 25 | # Virtual modules creation: 26 | build_indicator_module_from_yaml(_module_data / "icclim", mode="raise") 27 | build_indicator_module_from_yaml(_module_data / "anuclim", mode="raise") 28 | build_indicator_module_from_yaml(_module_data / "cf", mode="raise") 29 | -------------------------------------------------------------------------------- /src/xclim/core/__init__.py: -------------------------------------------------------------------------------- 1 | """Core module.""" 2 | 3 | from __future__ import annotations 4 | 5 | from xclim.core import missing 6 | from xclim.core._exceptions import * 7 | from xclim.core._types import * 8 | -------------------------------------------------------------------------------- /src/xclim/core/_exceptions.py: -------------------------------------------------------------------------------- 1 | """Exceptions and error handling utilities.""" 2 | 3 | from __future__ import annotations 4 | 5 | import logging 6 | import warnings 7 | 8 | logger = logging.getLogger("xclim") 9 | 10 | __all__ = ["MissingVariableError", "ValidationError", "raise_warn_or_log"] 11 | 12 | 13 | class ValidationError(ValueError): 14 | """Error raised when input data to an indicator fails the validation tests.""" 15 | 16 | @property 17 | def msg(self): # numpydoc ignore=GL08 18 | return self.args[0] 19 | 20 | 21 | class MissingVariableError(ValueError): 22 | """Error raised when a dataset is passed to an indicator but one of the needed variable is missing.""" 23 | 24 | 25 | def raise_warn_or_log( 26 | err: Exception, 27 | mode: str, 28 | msg: str | None = None, 29 | err_type: type = ValueError, 30 | stacklevel: int = 1, 31 | ): 32 | """ 33 | Raise, warn or log an error according. 34 | 35 | Parameters 36 | ---------- 37 | err : Exception 38 | An error. 39 | mode : {'ignore', 'log', 'warn', 'raise'} 40 | What to do with the error. 41 | msg : str, optional 42 | The string used when logging or warning. 43 | Defaults to the `msg` attr of the error (if present) or to "Failed with ". 44 | err_type : type 45 | The type of error/exception to raise. 46 | stacklevel : int 47 | Stacklevel when warning. Relative to the call of this function (1 is added). 48 | """ 49 | message = msg or getattr(err, "msg", f"Failed with {err!r}.") 50 | if mode == "ignore": 51 | pass 52 | elif mode == "log": 53 | logger.info(message) 54 | elif mode == "warn": 55 | warnings.warn(message, stacklevel=stacklevel + 1) 56 | else: # mode == "raise" 57 | raise err from err_type(message) 58 | -------------------------------------------------------------------------------- /src/xclim/core/_types.py: -------------------------------------------------------------------------------- 1 | """Type annotations and constants used throughout xclim.""" 2 | 3 | from __future__ import annotations 4 | 5 | from importlib.resources import as_file, files 6 | from typing import NewType, TypeVar 7 | 8 | import xarray as xr 9 | from pint import Quantity 10 | from yaml import safe_load 11 | 12 | __all__ = [ 13 | "VARIABLES", 14 | "DateStr", 15 | "DayOfYearStr", 16 | "Quantified", 17 | ] 18 | 19 | #: Type annotation for strings representing full dates (YYYY-MM-DD), may include time. 20 | DateStr = NewType("DateStr", str) 21 | 22 | #: Type annotation for strings representing dates without a year (MM-DD). 23 | DayOfYearStr = NewType("DayOfYearStr", str) 24 | 25 | #: Type annotation for thresholds and other not-exactly-a-variable quantities 26 | Quantified = TypeVar("Quantified", xr.DataArray, str, Quantity) 27 | 28 | 29 | with as_file(files("xclim.data")) as data_dir: 30 | with (data_dir / "variables.yml").open() as f: 31 | VARIABLES = safe_load(f)["variables"] 32 | """Official variables definitions. 33 | 34 | A mapping from variable name to a dict with the following keys: 35 | 36 | - canonical_units [required] : The conventional units used by this variable. 37 | - cell_methods [optional] : The conventional `cell_methods` CF attribute 38 | - description [optional] : A description of the variable, to populate dynamically generated docstrings. 39 | - dimensions [optional] : The dimensionality of the variable, an abstract version of the units. 40 | See `xclim.units.units._dimensions.keys()` for available terms. This is especially useful for making xclim aware of 41 | "[precipitation]" variables. 42 | - standard_name [optional] : If it exists, the CF standard name. 43 | - data_flags [optional] : Data flags methods (:py:mod:`xclim.core.dataflags`) applicable to this variable. 44 | The method names are keys and values are dicts of keyword arguments to pass 45 | (an empty dict if there's nothing to configure). 46 | """ 47 | -------------------------------------------------------------------------------- /src/xclim/core/cfchecks.py: -------------------------------------------------------------------------------- 1 | """ 2 | CF-Convention Checking 3 | ====================== 4 | 5 | Utilities designed to verify the compliance of metadata with the CF-Convention. 6 | """ 7 | 8 | from __future__ import annotations 9 | 10 | import fnmatch 11 | import re 12 | from collections.abc import Sequence 13 | 14 | import xarray as xr 15 | 16 | from xclim.core._exceptions import ValidationError 17 | from xclim.core._types import VARIABLES 18 | from xclim.core.options import cfcheck 19 | 20 | 21 | @cfcheck 22 | def check_valid(var: xr.DataArray, key: str, expected: str | Sequence[str]): 23 | r""" 24 | Check that a variable's attribute has one of the expected values and raise a ValidationError if otherwise. 25 | 26 | Parameters 27 | ---------- 28 | var : xr.DataArray 29 | The variable to check. 30 | key : str 31 | The attribute to check. 32 | expected : str or sequence of str 33 | The expected value(s). 34 | 35 | Raises 36 | ------ 37 | ValidationError 38 | If the attribute is not present or does not match the expected value(s). 39 | """ 40 | att = getattr(var, key, None) 41 | if att is None: 42 | raise ValidationError(f"Variable does not have a `{key}` attribute.") 43 | if isinstance(expected, str): 44 | expected = [expected] 45 | for exp in expected: 46 | if fnmatch.fnmatch(att, exp): 47 | break 48 | else: 49 | raise ValidationError( 50 | f"Variable has a non-conforming {key}: Got `{att}`, expected `{expected}`", 51 | ) 52 | 53 | 54 | def cfcheck_from_name(varname: str, vardata: xr.DataArray, attrs: list[str] | None = None): 55 | """ 56 | Perform cfchecks on a DataArray using specifications from xclim's default variables. 57 | 58 | Parameters 59 | ---------- 60 | varname : str 61 | The name of the variable to check. 62 | vardata : xr.DataArray 63 | The variable to check. 64 | attrs : list of str, optional 65 | Attributes to check. Default is ["cell_methods", "standard_name"]. 66 | 67 | Raises 68 | ------ 69 | ValidationError 70 | If the variable does not meet the expected CF-Convention. 71 | """ 72 | if attrs is None: 73 | attrs = ["cell_methods", "standard_name"] 74 | 75 | data = VARIABLES[varname] 76 | if "cell_methods" in data and "cell_methods" in attrs: 77 | _check_cell_methods(getattr(vardata, "cell_methods", None), data["cell_methods"]) 78 | if "standard_name" in data and "standard_name" in attrs: 79 | check_valid(vardata, "standard_name", data["standard_name"]) 80 | 81 | 82 | @cfcheck 83 | def _check_cell_methods(data_cell_methods: str, expected_method: str) -> None: 84 | if data_cell_methods is None: 85 | raise ValidationError("Variable does not have a `cell_methods` attribute.") 86 | EXTRACT_CELL_METHOD_REGEX = r"(\s*\S+\s*:(\s+[\w()-]+)+)(?!\S*:)" 87 | for m in re.compile(EXTRACT_CELL_METHOD_REGEX).findall(data_cell_methods): 88 | if expected_method in m[0]: 89 | return None 90 | raise ValidationError( 91 | f"Variable has a non-conforming cell_methods: " 92 | f"Got `{data_cell_methods}`, which do not include the expected " 93 | f"`{expected_method}`." 94 | ) 95 | -------------------------------------------------------------------------------- /src/xclim/core/datachecks.py: -------------------------------------------------------------------------------- 1 | """ 2 | Data Checks 3 | =========== 4 | 5 | Utilities designed to check the validity of data inputs. 6 | """ 7 | 8 | from __future__ import annotations 9 | 10 | from collections.abc import Sequence 11 | 12 | import xarray as xr 13 | 14 | from xclim.core._exceptions import ValidationError 15 | from xclim.core.calendar import compare_offsets, parse_offset 16 | from xclim.core.options import datacheck 17 | 18 | 19 | @datacheck 20 | def check_freq(var: xr.DataArray, freq: str | Sequence[str], strict: bool = True) -> None: 21 | """ 22 | Raise an error if not series has not the expected temporal frequency or is not monotonically increasing. 23 | 24 | Parameters 25 | ---------- 26 | var : xr.DataArray 27 | Input array. 28 | freq : str or sequence of str 29 | The expected temporal frequencies, using Pandas frequency terminology 30 | (e.g. {'Y', 'M', 'D', 'h', 'min', 's', 'ms', 'us'}) and multiples thereof. 31 | To test strictly for 'W', pass '7D' with `strict=True`. 32 | This ignores the start/end flag and the anchor (ex: 'YS-JUL' will validate against 'Y'). 33 | strict : bool 34 | Whether multiples of the frequencies are considered invalid or not. With `strict` set to False, a '3h' series 35 | will not raise an error if freq is set to 'h'. 36 | 37 | Raises 38 | ------ 39 | ValidationError 40 | - If the frequency of `var` is not inferrable. 41 | - If the frequency of `var` does not match the requested `freq`. 42 | """ 43 | if isinstance(freq, str): 44 | freq = [freq] 45 | exp_base = [parse_offset(frq)[1] for frq in freq] 46 | v_freq = xr.infer_freq(var.time) 47 | if v_freq is None: 48 | raise ValidationError( 49 | "Unable to infer the frequency of the time series. To mute this, set xclim's option data_validation='log'." 50 | ) 51 | v_base = parse_offset(v_freq)[1] 52 | if v_base not in exp_base or (strict and all(compare_offsets(v_freq, "!=", frq) for frq in freq)): 53 | raise ValidationError( 54 | f"Frequency of time series not {'strictly' if strict else ''} in {freq}. " 55 | "To mute this, set xclim's option data_validation='log'." 56 | ) 57 | 58 | 59 | def check_daily(var: xr.DataArray) -> None: 60 | """ 61 | Raise an error if series has a frequency other that daily, or is not monotonically increasing. 62 | 63 | Parameters 64 | ---------- 65 | var : xr.DataArray 66 | Input array. 67 | 68 | Notes 69 | ----- 70 | This does not check for gaps in series. 71 | """ 72 | check_freq(var, "D") 73 | 74 | 75 | @datacheck 76 | def check_common_time(inputs: Sequence[xr.DataArray]) -> None: 77 | """ 78 | Raise an error if the list of inputs doesn't have a single common frequency. 79 | 80 | Parameters 81 | ---------- 82 | inputs : Sequence of xr.DataArray 83 | Input arrays. 84 | 85 | Raises 86 | ------ 87 | ValidationError 88 | - if the frequency of any input can't be inferred 89 | - if inputs have different frequencies 90 | - if inputs have a daily or hourly frequency, but they are not given at the same time of day. 91 | """ 92 | # Check all have the same freq 93 | freqs = [xr.infer_freq(da.time) for da in inputs] 94 | if None in freqs: 95 | raise ValidationError( 96 | "Unable to infer the frequency of the time series. To mute this, set xclim's option data_validation='log'." 97 | ) 98 | if len(set(freqs)) != 1: 99 | raise ValidationError( 100 | f"Inputs have different frequencies. Got : {freqs}.To mute this, set xclim's option data_validation='log'." 101 | ) 102 | 103 | # Check if anchor is the same 104 | freq = freqs[0] 105 | base = parse_offset(freq)[1] 106 | fmt = {"h": ":%M", "D": "%H:%M"} 107 | if base in fmt: 108 | outs = {da.indexes["time"][0].strftime(fmt[base]) for da in inputs} 109 | if len(outs) > 1: 110 | raise ValidationError( 111 | f"All inputs have the same frequency ({freq}), but they are not anchored on the same " 112 | f"minutes (got {outs}). xarray's alignment would silently fail. You can try to fix this " 113 | f"with `da.resample('{freq}').mean()`. To mute this, set xclim's option data_validation='log'." 114 | ) 115 | -------------------------------------------------------------------------------- /src/xclim/data/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Data files for xclim 3 | ==================== 4 | 5 | JSON and YAML definitions for virtual modules and internationalisation support. 6 | 7 | Currently, the following virtual modules are defined: 8 | * ANUCLIM (`anuclim.yml`) 9 | * CF (`cf.yml`) 10 | * ICCLIM (`icclim.yml`) 11 | 12 | And the following languages are supported: 13 | * English (default) 14 | * French (`fr.json`) 15 | 16 | These files are used by xclim to define new indicators and to provide translations for the user interface. 17 | 18 | Additionally, this package contains the following data files: 19 | * `schema.yml`: YAML schema for detailing class definitions used for indicators. 20 | * `variables.yml`: YAML schema defining the variables and their metadata used in the indicator definitions. 21 | """ 22 | -------------------------------------------------------------------------------- /src/xclim/data/schema.yml: -------------------------------------------------------------------------------- 1 | base: str(required=False) 2 | doc: str(required=False) 3 | keywords: str(required=False) 4 | module: str(required=False) 5 | realm: str(required=False) 6 | references: str(required=False) 7 | indicators: map(include('indicator'), key=regex(r'^[-\w]+$')) 8 | variables: map(include('variable'), key=regex(r'^[\w]+$'), required=False) 9 | --- 10 | indicator: 11 | abstract: str(required=False) 12 | allowed_periods: list(enum('A', 'Y', 'Q', 'M', 'W'), required=False) 13 | src_freq: any(str(), list(str()), required=False) 14 | base: str(required=False) 15 | compute: str(required=False) 16 | input: map(str(), key=str(), required=False) 17 | keywords: str(required=False) 18 | measure: str(required=False) 19 | missing: str(required=False) 20 | missing_options: map(key=str(), required=False) 21 | notes: str(required=False) 22 | cf_attrs: any(list(include('cf_attrs')), include('cf_attrs'), required=False) 23 | parameters: map(str(), num(), bool(), null(), include('parameter'), include('indexer'), key=str(), required=False) 24 | realm: str(required=False) 25 | references: str(required=False) 26 | title: str(required=False) 27 | context: str(required=False) 28 | 29 | cf_attrs: map(str(), key=str(), required=False) 30 | 31 | parameter: 32 | description: str(required=False) 33 | default: any(str(), num(), bool(), null(), include('indexer'), required=False) 34 | choices: list(str(), required=False) 35 | units: str(required=False) 36 | kind: int(required=False) 37 | 38 | indexer: 39 | drop: bool(required=False) 40 | month: any(int(), list(int()), required=False) 41 | season: any(str(), list(str()), required=False) 42 | doy_bounds: list(int(), required=False, maxItems=2) 43 | date_bounds: list(str(), required=False, maxItems=2) 44 | include_bounds: any(bool(), list(bool(), bool()), required=False, maxItems=2) 45 | 46 | variable: 47 | canonical_units: str(required=True) 48 | cell_methods: str(required=False) 49 | description: str(required=True) 50 | standard_name: str(required=False) 51 | data_flags: list(str(), any(), required=False) 52 | -------------------------------------------------------------------------------- /src/xclim/ensembles/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ensemble tools 3 | ============== 4 | 5 | This submodule defines some useful methods for dealing with ensembles of climate simulations. 6 | In xclim, an "ensemble" is a `Dataset` or a `DataArray` where multiple climate realizations 7 | or models are concatenated along the `realization` dimension. 8 | """ 9 | 10 | from __future__ import annotations 11 | 12 | from xclim.ensembles._base import ( 13 | create_ensemble, 14 | ensemble_mean_std_max_min, 15 | ensemble_percentiles, 16 | ) 17 | from xclim.ensembles._partitioning import ( 18 | fractional_uncertainty, 19 | general_partition, 20 | hawkins_sutton, 21 | lafferty_sriver, 22 | ) 23 | from xclim.ensembles._reduce import ( 24 | kkz_reduce_ensemble, 25 | kmeans_reduce_ensemble, 26 | make_criteria, 27 | plot_rsqprofile, 28 | ) 29 | from xclim.ensembles._robustness import ( 30 | robustness_categories, 31 | robustness_coefficient, 32 | robustness_fractions, 33 | ) 34 | -------------------------------------------------------------------------------- /src/xclim/indicators/__init__.py: -------------------------------------------------------------------------------- 1 | """Indicators module.""" 2 | 3 | # The actual code for importing virtual modules is in the top-level __init__. 4 | # This is for import reasons: we need to make sure all normal indicators are created before. 5 | from __future__ import annotations 6 | -------------------------------------------------------------------------------- /src/xclim/indicators/atmos/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Atmospheric Indicators 3 | ====================== 4 | 5 | While the `indices` module stores the computing functions, this module defines Indicator classes and instances that 6 | include a number of functionalities, such as input validation, unit conversion, output meta-data handling, 7 | and missing value masking. 8 | 9 | The concept followed here is to define Indicator subclasses for each input variable, then create instances 10 | for each indicator. 11 | """ 12 | 13 | from __future__ import annotations 14 | 15 | from ._conversion import * 16 | from ._precip import * 17 | from ._synoptic import * 18 | from ._temperature import * 19 | from ._wind import * 20 | -------------------------------------------------------------------------------- /src/xclim/indicators/atmos/_synoptic.py: -------------------------------------------------------------------------------- 1 | """Synoptic indicator definitions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from xclim import indices 6 | from xclim.core.indicator import Indicator 7 | 8 | __all__ = ["jetstream_metric_woollings"] 9 | 10 | 11 | class JetStream(Indicator): 12 | """Indicator involving daily u- and/or v-component wind series.""" 13 | 14 | src_freq = "D" 15 | 16 | 17 | jetstream_metric_woollings = JetStream( 18 | title="Strength and latitude of jetstream", 19 | identifier="jetstream_metric_woollings", 20 | var_name=["jetlat", "jetstr"], 21 | units=["degrees_north", "m s-1"], 22 | long_name=[ 23 | "Latitude of maximum smoothed zonal wind speed", 24 | "Maximum strength of smoothed zonal wind speed", 25 | ], 26 | description=[ 27 | "Daily latitude of maximum Lanczos smoothed zonal wind speed.", 28 | "Daily maximum strength of Lanczos smoothed zonal wind speed.", 29 | ], 30 | compute=indices.jetstream_metric_woollings, 31 | ) 32 | -------------------------------------------------------------------------------- /src/xclim/indicators/atmos/_wind.py: -------------------------------------------------------------------------------- 1 | """Wind indicator definitions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from xclim import indices 6 | from xclim.core.indicator import ResamplingIndicatorWithIndexing 7 | 8 | __all__ = [ 9 | "calm_days", 10 | "sfcWind_max", 11 | "sfcWind_mean", 12 | "sfcWind_min", 13 | "sfcWindmax_max", 14 | "sfcWindmax_mean", 15 | "sfcWindmax_min", 16 | "windy_days", 17 | ] 18 | 19 | 20 | class Wind(ResamplingIndicatorWithIndexing): 21 | """Indicator involving daily sfcWind series.""" 22 | 23 | src_freq = "D" 24 | keywords = "wind" 25 | 26 | 27 | calm_days = Wind( 28 | title="Calm days", 29 | identifier="calm_days", 30 | units="days", 31 | long_name="Number of days with surface wind speed below {thresh}", 32 | description="{freq} number of days with surface wind speed below {thresh}.", 33 | abstract="Number of days with surface wind speed below threshold.", 34 | cell_methods="time: sum over days", 35 | compute=indices.calm_days, 36 | ) 37 | 38 | windy_days = Wind( 39 | title="Windy days", 40 | identifier="windy_days", 41 | units="days", 42 | standard_name="number_of_days_with_wind_speed_above_threshold", 43 | long_name="Number of days with surface wind speed at or above {thresh}", 44 | description="{freq} number of days with surface wind speed at or above {thresh}.", 45 | abstract="Number of days with surface wind speed at or above threshold.", 46 | cell_methods="time: sum over days", 47 | compute=indices.windy_days, 48 | ) 49 | 50 | sfcWind_max = Wind( 51 | title="Maximum near-surface mean wind speed", 52 | identifier="sfcWind_max", 53 | units="m s-1", 54 | standard_name="wind_speed", 55 | long_name="Maximum daily mean wind speed", 56 | description="{freq} maximum of daily mean wind speed", 57 | abstract="Maximum of daily mean near-surface wind speed.", 58 | cell_methods="time: max over days", 59 | compute=indices.sfcWind_max, 60 | ) 61 | 62 | sfcWind_mean = Wind( 63 | title="Mean near-surface wind speed", 64 | identifier="sfcWind_mean", 65 | units="m s-1", 66 | standard_name="wind_speed", 67 | long_name="Mean daily mean wind speed", 68 | description="{freq} mean of daily mean wind speed", 69 | abstract="Mean of daily near-surface wind speed.", 70 | cell_methods="time: mean over days", 71 | compute=indices.sfcWind_mean, 72 | ) 73 | 74 | sfcWind_min = Wind( 75 | title="Minimum near-surface mean wind speed", 76 | identifier="sfcWind_min", 77 | units="m s-1", 78 | standard_name="wind_speed", 79 | long_name="Minimum daily mean wind speed", 80 | description="{freq} minimum of daily mean wind speed", 81 | abstract="Minimum of daily mean near-surface wind speed.", 82 | cell_methods="time: min over days", 83 | compute=indices.sfcWind_min, 84 | ) 85 | 86 | sfcWindmax_max = Wind( 87 | title="Maximum near-surface maximum wind speed", 88 | identifier="sfcWindmax_max", 89 | units="m s-1", 90 | standard_name="wind_speed", 91 | long_name="Maximum daily maximum wind speed", 92 | description="{freq} maximum of daily maximum wind speed", 93 | abstract="Maximum of daily maximum near-surface wind speed.", 94 | cell_methods="time: max over days", 95 | compute=indices.sfcWindmax_max, 96 | ) 97 | 98 | sfcWindmax_mean = Wind( 99 | title="Mean near-surface maximum wind speed", 100 | identifier="sfcWindmax_mean", 101 | units="m s-1", 102 | standard_name="wind_speed", 103 | long_name="Mean daily maximum wind speed", 104 | description="{freq} mean of daily maximum wind speed", 105 | abstract="Mean of daily maximum near-surface wind speed.", 106 | cell_methods="time: mean over days", 107 | compute=indices.sfcWindmax_mean, 108 | ) 109 | 110 | sfcWindmax_min = Wind( 111 | title="Minimum near-surface maximum wind speed", 112 | identifier="sfcWindmax_min", 113 | units="m s-1", 114 | standard_name="wind_speed", 115 | long_name="Minimum daily maximum wind speed", 116 | description="{freq} minimum of daily maximum wind speed", 117 | abstract="Minimum of daily maximum near-surface wind speed.", 118 | cell_methods="time: min over days", 119 | compute=indices.sfcWindmax_min, 120 | ) 121 | -------------------------------------------------------------------------------- /src/xclim/indicators/generic/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generic Indicators 3 | ================== 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | from ._stats import * 9 | -------------------------------------------------------------------------------- /src/xclim/indicators/generic/_stats.py: -------------------------------------------------------------------------------- 1 | """Statistical indicator definitions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from xclim.core.indicator import ReducingIndicator, ResamplingIndicator 6 | from xclim.indices.generic import select_resample_op 7 | from xclim.indices.stats import fit as _fit 8 | from xclim.indices.stats import frequency_analysis 9 | 10 | __all__ = ["fit", "return_level", "stats"] 11 | 12 | 13 | class Generic(ReducingIndicator): 14 | """Generic class.""" 15 | 16 | realm = "generic" 17 | 18 | 19 | class GenericResampling(ResamplingIndicator): 20 | """Generic Resampling class.""" 21 | 22 | realm = "generic" 23 | 24 | 25 | fit = Generic( 26 | title="Distribution parameters fitted over the time dimension.", 27 | identifier="fit", 28 | var_name="params", 29 | units="", 30 | standard_name="{dist} parameters", 31 | long_name="{dist} distribution parameters", 32 | description="Parameters of the {dist} distribution.", 33 | cell_methods="time: fit", 34 | compute=_fit, 35 | src_freq=None, 36 | ) 37 | 38 | 39 | return_level = Generic( 40 | title="Return level from frequency analysis", 41 | identifier="return_level", 42 | var_name="fa_{window}{mode:r}{indexer}", 43 | long_name="N-year return level", 44 | description="Frequency analysis for the {mode} {indexer} {window}-day value estimated using the {dist} " 45 | "distribution.", 46 | abstract="Frequency analysis on the basis of a given mode and distribution.", 47 | compute=frequency_analysis, 48 | src_freq="D", 49 | ) 50 | 51 | 52 | stats = GenericResampling( 53 | title="Simple resampled statistic of the values.", 54 | identifier="stats", 55 | var_name="stat_{indexer}{op:r}", 56 | long_name="{op:noun} of variable", 57 | description="{freq} {op:noun} of variable ({indexer}).", 58 | compute=select_resample_op, 59 | parameters={"out_units": None}, 60 | ) 61 | -------------------------------------------------------------------------------- /src/xclim/indicators/land/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Land Indicators 3 | =============== 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | from ._snow import * 9 | from ._streamflow import * 10 | -------------------------------------------------------------------------------- /src/xclim/indicators/seaIce/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Ice-related indicators 3 | ====================== 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | from ._seaice import * 9 | -------------------------------------------------------------------------------- /src/xclim/indicators/seaIce/_seaice.py: -------------------------------------------------------------------------------- 1 | """Sea ice indicator definitions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from xclim import indices 6 | from xclim.core.indicator import Indicator 7 | 8 | __all__ = ["sea_ice_area", "sea_ice_extent"] 9 | 10 | 11 | class SiconcAreacello(Indicator): 12 | """Class for indicators having sea ice concentration and grid cell area inputs.""" 13 | 14 | missing = "skip" 15 | keywords = "seaice" 16 | 17 | 18 | sea_ice_extent = SiconcAreacello( 19 | title="Sea ice extent", 20 | identifier="sea_ice_extent", 21 | units="m2", 22 | standard_name="sea_ice_extent", 23 | long_name="Sum of ocean areas where sea ice concentration exceeds {thresh}", 24 | description="The sum of ocean areas where sea ice concentration exceeds {thresh}.", 25 | abstract="A measure of the extent of all areas where sea ice concentration exceeds a threshold.", 26 | cell_methods="lon: sum lat: sum", 27 | compute=indices.sea_ice_extent, 28 | ) 29 | 30 | 31 | sea_ice_area = SiconcAreacello( 32 | title="Sea ice area", 33 | identifier="sea_ice_area", 34 | units="m2", 35 | standard_name="sea_ice_area", 36 | long_name="Sum of ice-covered areas where sea ice concentration exceeds {thresh}", 37 | description="The sum of ice-covered areas where sea ice concentration exceeds {thresh}.", 38 | abstract="A measure of total ocean surface covered by sea ice.", 39 | cell_methods="lon: sum lat: sum", 40 | compute=indices.sea_ice_area, 41 | ) 42 | -------------------------------------------------------------------------------- /src/xclim/indices/__init__.py: -------------------------------------------------------------------------------- 1 | """Indices module.""" 2 | 3 | from __future__ import annotations 4 | 5 | from xclim.indices._agro import * 6 | from xclim.indices._anuclim import * 7 | from xclim.indices._conversion import * 8 | from xclim.indices._hydrology import * 9 | from xclim.indices._multivariate import * 10 | from xclim.indices._simple import * 11 | from xclim.indices._synoptic import * 12 | from xclim.indices._threshold import * 13 | from xclim.indices.fire import ( 14 | cffwis_indices, 15 | drought_code, 16 | duff_moisture_code, 17 | fire_season, 18 | griffiths_drought_factor, 19 | keetch_byram_drought_index, 20 | mcarthur_forest_fire_danger_index, 21 | ) 22 | 23 | # pylint: disable=pointless-string-statement 24 | """ 25 | Notes for docstrings 26 | -------------------- 27 | 28 | The docstrings adhere to the `NumPy`_ style convention and is meant as a way to store CF-Convention metadata as 29 | well as information relevant to third party libraries (such as a WPS server). 30 | 31 | The first line of the docstring (the short summary), will be assigned to the output's `long_name` attribute. The 32 | `long_name` attribute is defined by the NetCDF User Guide to contain a long descriptive name which may, for example, 33 | be used for labeling plots 34 | 35 | The second paragraph will be considered as the "*abstract*", or the CF global "*comment*" (miscellaneous information 36 | about the data or methods used to produce it). 37 | 38 | The third and fourth sections are the **Parameters** and **Returns** sections describing the input and output values 39 | respectively. 40 | 41 | The following example shows the structure of an indice definition: 42 | 43 | .. code-block:: python 44 | 45 | @declare_units(var1="[units dimension]", thresh="[units dimension]") 46 | def indice_name(var1: xr.DataArray, thresh: str = "0 degC", freq: str = "YS"): 47 | \"\"\" 48 | The first line is the title 49 | 50 | The second paragraph is the abstract. 51 | 52 | Parameters 53 | ---------- 54 | var1 : xarray.DataArray 55 | Description of variable (no need to specify units, the signature and decorator carry this information). 56 | is a short name like "tas", "pr" or "sfcWind". 57 | thresh : str 58 | Description of the threshold (no need to specify units or the default again). 59 | Parameters required to run the computation must always have a working default value. 60 | freq : str 61 | Resampling frequency. (the signature carries the default value information) 62 | 63 | Returns 64 | ------- 65 | : xarray.DataArray, [output units dimension] 66 | Output's 67 | Specifying is optional. 68 | \"\"\" 69 | 70 | # Don't forget to explicitly handle the units! 71 | 72 | The next sections would be **Notes** and **References**: 73 | 74 | .. code-block:: python 75 | 76 | Notes 77 | ----- 78 | This is where the mathematical equation is described. 79 | At the end of the description, convention suggests 80 | to add a reference [example]_: 81 | 82 | .. math:: 83 | 84 | 3987^12 + 4365^12 = 4472^12 85 | 86 | References 87 | ---------- 88 | .. [example] Smith, T.J. and Huard, D. (2018). "CF Docstrings: 89 | A manifesto on conventions and the metaphysical nature 90 | of ontological python documentation." Climate Aesthetics, 91 | vol. 1, pp. 121-155. 92 | 93 | Indice Descriptions 94 | =================== 95 | .. _`NumPy`: https://numpydoc.readthedocs.io/en/latest/format.html#docstring-standard 96 | """ 97 | # pylint: enable=pointless-string-statement 98 | 99 | # TODO: Should we reference the standard vocabulary we're using ? 100 | # E.g. http://vocab.nerc.ac.uk/collection/P07/current/BHMHISG2/ 101 | -------------------------------------------------------------------------------- /src/xclim/indices/_synoptic.py: -------------------------------------------------------------------------------- 1 | """Synoptic indice definitions.""" 2 | 3 | from __future__ import annotations 4 | 5 | import cf_xarray # noqa: F401, pylint: disable=unused-import 6 | import numpy as np 7 | import xarray 8 | 9 | from xclim.core.units import convert_units_to, declare_units 10 | 11 | # Frequencies : YS: year start, QS-DEC: seasons starting in december, MS: month start 12 | # See https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html 13 | 14 | # -------------------------------------------------- # 15 | # ATTENTION: ASSUME ALL INDICES WRONG UNTIL TESTED ! # 16 | # -------------------------------------------------- # 17 | 18 | __all__ = [ 19 | "jetstream_metric_woollings", 20 | ] 21 | 22 | 23 | @declare_units(ua="[speed]") 24 | def jetstream_metric_woollings( 25 | ua: xarray.DataArray, 26 | ) -> tuple[xarray.DataArray, xarray.DataArray]: 27 | """ 28 | Strength and latitude of jetstream. 29 | 30 | Identify latitude and strength of maximum smoothed zonal wind speed in the region from 15 to 75°N and -60 to 0°E, 31 | using the formula outlined in :cite:p:`woollings_variability_2010`. Wind is smoothened using a Lanczos filter 32 | approach. 33 | 34 | Parameters 35 | ---------- 36 | ua : xarray.DataArray 37 | Eastward wind component (u) at between 750 and 950 hPa. 38 | 39 | Returns 40 | ------- 41 | (xarray.DataArray, xarray.DataArray) 42 | Daily time series of latitude of jetstream and daily time series of strength of jetstream. 43 | 44 | Warnings 45 | -------- 46 | This metric expects eastward wind component (u) to be on a regular grid (i.e. Plate Carree, 1D lat and lon) 47 | 48 | References 49 | ---------- 50 | :cite:cts:`woollings_variability_2010` 51 | """ 52 | # Select longitudes in the -60 to 0°E range 53 | lon = ua.cf["longitude"] 54 | ilon = (lon >= 300) * (lon <= 360) + (lon >= -60) * (lon <= 0) 55 | if not ilon.any(): 56 | raise ValueError("Make sure the grid includes longitude values in a range between -60 and 0°E.") 57 | 58 | # Select latitudes in the 15 to 75°N range 59 | lat = ua.cf["latitude"] 60 | ilat = (lat >= 15) * (lat <= 75) 61 | if not ilat.any(): 62 | raise ValueError("Make sure the grid includes latitude values in a range between 15 and 75°N.") 63 | 64 | # Select levels between 750 and 950 hPa 65 | pmin = convert_units_to("750 hPa", ua.cf["vertical"]) 66 | pmax = convert_units_to("950 hPa", ua.cf["vertical"]) 67 | 68 | p = ua.cf["vertical"] 69 | ip = (p >= pmin) * (p <= pmax) 70 | if not ip.any(): 71 | raise ValueError("Make sure the grid includes pressure values in a range between 750 and 950 hPa.") 72 | 73 | ua = ua.cf.sel( 74 | vertical=ip, 75 | latitude=ilat, 76 | longitude=ilon, 77 | ) 78 | 79 | zonal_mean = ua.cf.mean(["vertical", "longitude"]) 80 | 81 | # apply Lanczos filter - parameters are hard-coded following those used in Woollings (2010) 82 | filter_freq = 10 83 | window_size = 61 84 | cutoff = 1 / filter_freq 85 | if ua.time.size <= filter_freq or ua.time.size <= window_size: 86 | raise ValueError(f"Time series is too short to apply 61-day Lanczos filter (got a length of {ua.time.size})") 87 | 88 | # compute low-pass filter weights 89 | lanczos_weights = _compute_low_pass_filter_weights(window_size=window_size, cutoff=cutoff) 90 | # apply the filter 91 | ua_lf = zonal_mean.rolling(time=window_size, center=True).construct("window").dot(lanczos_weights) 92 | 93 | # Get latitude & eastward wind component units 94 | lat_name = ua.cf["latitude"].name 95 | lat_units = ua.cf["latitude"].units 96 | ua_units = ua.units 97 | 98 | jetlat = ua_lf.cf.idxmax(lat_name).rename("jetlat").assign_attrs(units=lat_units) 99 | jetstr = ua_lf.cf.max(lat_name).rename("jetstr").assign_attrs(units=ua_units) 100 | return jetlat, jetstr 101 | 102 | 103 | def _compute_low_pass_filter_weights(window_size: int, cutoff: float) -> xarray.DataArray: 104 | order = ((window_size - 1) // 2) + 1 105 | nwts = 2 * order + 1 106 | w = np.zeros([nwts]) 107 | n = nwts // 2 108 | w[n] = 2 * cutoff 109 | k = np.arange(1.0, n) 110 | sigma = np.sin(np.pi * k / n) * n / (np.pi * k) 111 | firstfactor = np.sin(2.0 * np.pi * cutoff * k) / (np.pi * k) 112 | w[n - 1 : 0 : -1] = firstfactor * sigma 113 | w[n + 1 : -1] = firstfactor * sigma 114 | 115 | lanczos_weights = xarray.DataArray(w[0 + (window_size % 2) : -1], dims=["window"]) 116 | return lanczos_weights 117 | -------------------------------------------------------------------------------- /src/xclim/indices/fire/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fire Indices Submodule 3 | ====================== 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | from ._cffwis import * 9 | from ._ffdi import * 10 | -------------------------------------------------------------------------------- /src/xclim/sdba.py: -------------------------------------------------------------------------------- 1 | """ 2 | Statistical downscaling and bias adjustment submodule. 3 | 4 | This module is a placeholder for the `xclim.sdba` submodule, which has been split into its own package `xsdba`. 5 | """ 6 | 7 | import warnings 8 | 9 | try: 10 | from xsdba import * # pylint: disable=wildcard-import,unused-wildcard-import 11 | 12 | warnings.warn( 13 | "The `xclim.sdba` module has been split into its own package `xsdba`. " 14 | "Users are encouraged to use `xsdba` directly. " 15 | "For the time being, `xclim.sdba` will import `xsdba` to allow for API compatibility. " 16 | "This behaviour may change in the future. " 17 | "For more information, see: https://xsdba.readthedocs.io/en/stable/xclim_migration_guide.html" 18 | ) 19 | except ImportError as err: 20 | error_msg = ( 21 | "The `xclim.sdba` module has been split into its own package: `xsdba`. " 22 | "Run `pip install xclim[extras]` or install `xsdba` via `pip` or `conda`. " 23 | "Users are encouraged to use `xsdba` directly. " 24 | "For the time being, `xclim.sdba` will import `xsdba` to allow for API compatibility. " 25 | "This behaviour may change in the future. " 26 | "For more information, see: https://xsdba.readthedocs.io/en/stable/xclim_migration_guide.html" 27 | ) 28 | raise ImportError(error_msg) from err 29 | -------------------------------------------------------------------------------- /src/xclim/testing/__init__.py: -------------------------------------------------------------------------------- 1 | """Helpers for testing xclim.""" 2 | 3 | from __future__ import annotations 4 | 5 | from . import helpers 6 | from .utils import * 7 | -------------------------------------------------------------------------------- /src/xclim/testing/conftest.py: -------------------------------------------------------------------------------- 1 | """Specialized setup for running xclim doctests.""" 2 | 3 | # This file is the setup for the doctest suite. 4 | # This must be run using the following commands: 5 | # python -c "from xclim.testing.utils import run_doctests; run_doctests()" 6 | 7 | from __future__ import annotations 8 | 9 | import os 10 | from pathlib import Path 11 | 12 | import pytest 13 | 14 | from xclim.testing.helpers import ( 15 | add_doctest_filepaths, 16 | add_example_file_paths, 17 | generate_atmos, 18 | ) 19 | from xclim.testing.utils import ( 20 | TESTDATA_BRANCH, 21 | TESTDATA_CACHE_DIR, 22 | TESTDATA_REPO_URL, 23 | gather_testing_data, 24 | testing_setup_warnings, 25 | ) 26 | from xclim.testing.utils import nimbus as _nimbus 27 | from xclim.testing.utils import open_dataset as _open_dataset 28 | 29 | 30 | @pytest.fixture(autouse=True, scope="session") 31 | def threadsafe_data_dir(tmp_path_factory): # numpydoc ignore=PR01 32 | """ 33 | Return a threadsafe temporary directory for storing testing data. 34 | 35 | Yields 36 | ------ 37 | Path 38 | The path to the temporary directory. 39 | """ 40 | yield Path(tmp_path_factory.getbasetemp().joinpath("data")) 41 | 42 | 43 | @pytest.fixture(scope="session") 44 | def nimbus(threadsafe_data_dir, worker_id): # numpydoc ignore=PR01 45 | """ 46 | Return a nimbus object for the test data. 47 | 48 | Returns 49 | ------- 50 | nimbus 51 | An preconfigured pooch object. 52 | """ 53 | return _nimbus( 54 | repo=TESTDATA_REPO_URL, 55 | branch=TESTDATA_BRANCH, 56 | cache_dir=(TESTDATA_CACHE_DIR if worker_id == "master" else threadsafe_data_dir), 57 | ) 58 | 59 | 60 | @pytest.fixture(scope="session") 61 | def open_dataset(threadsafe_data_dir, worker_id): # numpydoc ignore=PR01 62 | """ 63 | Return a function that opens a dataset from the test data. 64 | 65 | Returns 66 | ------- 67 | function 68 | A function that opens a dataset from the test data. 69 | """ 70 | 71 | def _open_session_scoped_file(file: str | os.PathLike, **xr_kwargs): 72 | nimbus_kwargs = { 73 | "branch": TESTDATA_BRANCH, 74 | "repo": TESTDATA_REPO_URL, 75 | "cache_dir": (TESTDATA_CACHE_DIR if worker_id == "master" else threadsafe_data_dir), 76 | } 77 | xr_kwargs.setdefault("cache", True) 78 | xr_kwargs.setdefault("engine", "h5netcdf") 79 | return _open_dataset( 80 | file, 81 | nimbus_kwargs=nimbus_kwargs, 82 | **xr_kwargs, 83 | ) 84 | 85 | return _open_session_scoped_file 86 | 87 | 88 | @pytest.fixture(scope="session", autouse=True) 89 | def is_matplotlib_installed(xdoctest_namespace) -> None: # numpydoc ignore=PR01 90 | """Skip tests that require matplotlib if it is not installed.""" 91 | 92 | def _is_matplotlib_installed(): 93 | try: 94 | import matplotlib # noqa: F401 95 | 96 | except ImportError: 97 | pytest.skip("This doctest requires matplotlib to be installed.") 98 | 99 | xdoctest_namespace["is_matplotlib_installed"] = _is_matplotlib_installed 100 | 101 | 102 | @pytest.fixture(scope="session", autouse=True) 103 | def doctest_setup(xdoctest_namespace, nimbus, worker_id, open_dataset) -> None: # numpydoc ignore=PR01 104 | """Gather testing data on doctest run.""" 105 | testing_setup_warnings() 106 | gather_testing_data(worker_cache_dir=nimbus.path, worker_id=worker_id) 107 | xdoctest_namespace.update(generate_atmos(nimbus=nimbus)) 108 | 109 | class AttrDict(dict): # numpydoc ignore=PR01 110 | """A dictionary that allows access to its keys as attributes.""" 111 | 112 | def __init__(self, *args, **kwargs): 113 | super().__init__(*args, **kwargs) 114 | self.__dict__ = self 115 | 116 | xdoctest_namespace["open_dataset"] = open_dataset 117 | xdoctest_namespace["xr"] = AttrDict() 118 | xdoctest_namespace["xr"].update({"open_dataset": open_dataset}) 119 | xdoctest_namespace.update(add_doctest_filepaths()) 120 | xdoctest_namespace.update(add_example_file_paths()) 121 | -------------------------------------------------------------------------------- /tests/data/callendar_1938.txt: -------------------------------------------------------------------------------- 1 | By fuel combustion man has added about 150,000 million tons of carbon dioxide to the air during the past half century. The author estimates from the best available data that approximately three quarters of this has remained in the atmosphere. The radiation absorption coefficients of carbon dioxide and water vapour are used to show the effect of carbon dioxide on “sky radiation.” From this the increase in mean temperature, due to the artificial production of carbon dioxide, is estimated to be at the rate of 0.003°C. per year at the present time. The temperature observations at zoo meteorological stations are used to show that world temperatures have actually increased at an average rate of 0.005°C. per year during the past half century. 2 | -------------------------------------------------------------------------------- /tests/test_filters.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Ouranosinc/xclim/ee6d6f45d7b0f625d691f6fde3f390ccc598d875/tests/test_filters.py -------------------------------------------------------------------------------- /tests/test_formatting.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import datetime as dt 4 | import re 5 | 6 | from xclim import __version__ 7 | from xclim.core import formatting as fmt 8 | from xclim.indicators.atmos import degree_days_exceedance_date, heat_wave_frequency 9 | 10 | 11 | def test_prefix_attrs(): 12 | source = {"units": "mm/s", "name": "pr"} 13 | dest = fmt.prefix_attrs(source, ["units"], "original_") 14 | assert "original_units" in dest 15 | 16 | out = fmt.unprefix_attrs(dest, ["units"], "original_") 17 | assert out == source 18 | 19 | # Check that the "naked" units will be overwritten. 20 | dest["units"] = "" 21 | 22 | out = fmt.unprefix_attrs(dest, ["units"], "original_") 23 | assert out == source 24 | 25 | 26 | def test_indicator_docstring(): 27 | doc = heat_wave_frequency.__doc__.split("\n") 28 | assert doc[0] == "Heat wave frequency (realm: atmos)" 29 | assert doc[5] == "Based on indice :py:func:`~xclim.indices._multivariate.heat_wave_frequency`." 30 | assert doc[6] == "Keywords : temperature health,." 31 | assert doc[12] == " Default : `ds.tasmin`. [Required units : [temperature]]" 32 | assert ( 33 | doc[41] 34 | == " Total number of series of at least {window} consecutive days with daily minimum temperature above " 35 | "{thresh_tasmin} and daily maximum temperature above {thresh_tasmax}, " 36 | "with additional attributes: **description**: {freq} number of heat wave events within a given period. " 37 | "A heat wave occurs when daily minimum and maximum temperatures exceed {thresh_tasmin} and {thresh_tasmax}, " 38 | "respectively, over at least {window} days." 39 | ) 40 | 41 | doc = degree_days_exceedance_date.__doc__.split("\n") 42 | assert doc[21] == " Default : >. " 43 | 44 | 45 | def test_update_xclim_history(atmosds): 46 | @fmt.update_xclim_history 47 | def func(da, arg1, arg2=None, arg3=None): # noqa: F841 48 | return da 49 | 50 | out = func(atmosds.tas, 1, arg2=[1, 2], arg3=None) 51 | 52 | matches = re.match( 53 | r"\[([0-9-:\s]*)]\s(\w*):\s(\w*)\((.*)\)\s-\sxclim\sversion:\s(\d*\.\d*\.\d*[a-zA-Z-]*(\.\d*)?)", 54 | out.attrs["history"], 55 | ).groups() 56 | 57 | date = dt.datetime.fromisoformat(matches[0]) 58 | assert dt.timedelta(0) < (dt.datetime.now() - date) < dt.timedelta(seconds=3) 59 | assert matches[1] == "tas" 60 | assert matches[2] == "func" 61 | assert matches[3] == "da=tas, arg1=1, arg2=[1, 2], arg3=None" 62 | assert matches[4] == __version__ 63 | -------------------------------------------------------------------------------- /tests/test_generic_indicators.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from xclim import generic, set_options 7 | 8 | 9 | class TestFit: 10 | def test_simple(self, pr_ndseries, random): 11 | pr = pr_ndseries(random.random((1000, 1, 2))) 12 | ts = generic.stats(pr, freq="YS", op="max") 13 | p = generic.fit(ts, dist="gumbel_r") 14 | assert p.attrs["estimator"] == "Maximum likelihood" 15 | assert "time" not in p.dims 16 | 17 | def test_nan(self, pr_series, random): 18 | r = random.random(22) 19 | r[0] = np.nan 20 | pr = pr_series(r) 21 | 22 | out = generic.fit(pr, dist="norm") 23 | assert np.isnan(out.values[0]) 24 | with set_options(check_missing="skip"): 25 | out = generic.fit(pr, dist="norm") 26 | assert not np.isnan(out.values[0]) 27 | 28 | def test_ndim(self, pr_ndseries, random): 29 | pr = pr_ndseries(random.random((100, 1, 2))) 30 | out = generic.fit(pr, dist="norm") 31 | assert out.shape == (2, 1, 2) 32 | np.testing.assert_array_equal(out.isnull(), False) 33 | 34 | def test_options(self, q_series, random): 35 | q = q_series(random.random(19)) 36 | out = generic.fit(q, dist="norm") 37 | np.testing.assert_array_equal(out.isnull(), False) 38 | 39 | with set_options(missing_options={"at_least_n": {"n": 10}}): 40 | out = generic.fit(q, dist="norm") 41 | np.testing.assert_array_equal(out.isnull(), False) 42 | 43 | 44 | class TestReturnLevel: 45 | def test_seasonal(self, ndq_series): 46 | out = generic.return_level(ndq_series, mode="max", t=[2, 5], dist="gamma", season="DJF") 47 | 48 | assert out.description == ( 49 | "Frequency analysis for the maximal winter 1-day value estimated using the gamma distribution." 50 | ) 51 | assert out.name == "fa_1maxwinter" 52 | assert out.shape == (2, 2, 3) # nrt, nx, ny 53 | np.testing.assert_array_equal(out.isnull(), False) 54 | 55 | def test_any_variable(self, pr_series, random): 56 | pr = pr_series(random.random(100)) 57 | out = generic.return_level(pr, mode="max", t=2, dist="gamma") 58 | assert out.units == pr.units 59 | 60 | def test_no_indexer(self, ndq_series): 61 | out = generic.return_level(ndq_series, mode="max", t=[2, 5], dist="gamma") 62 | assert out.description in [ 63 | "Frequency analysis for the maximal annual 1-day value estimated using the gamma distribution." 64 | ] 65 | assert out.name == "fa_1maxannual" 66 | assert out.shape == (2, 2, 3) # nrt, nx, ny 67 | np.testing.assert_array_equal(out.isnull(), False) 68 | 69 | def test_q27(self, ndq_series): 70 | out = generic.return_level(ndq_series, mode="max", t=2, dist="gamma", window=7) 71 | assert out.shape == (1, 2, 3) 72 | 73 | def test_empty(self, ndq_series): 74 | q = ndq_series.copy() 75 | q[:, 0, 0] = np.nan 76 | out = generic.return_level(q, mode="max", t=2, dist="genextreme", window=6, freq="YS") 77 | assert np.isnan(out.values[:, 0, 0]).all() 78 | 79 | 80 | class TestStats: 81 | """See other tests in test_land::TestStats""" 82 | 83 | @pytest.mark.parametrize( 84 | "op,word", 85 | [("min", "Minimum"), ("integral", "Integral"), ("doymin", "Day of minimum")], 86 | ) 87 | def test_simple(self, pr_series, random, op, word): 88 | pr = pr_series(random.random(400)) 89 | out = generic.stats(pr, freq="YS", op=op) 90 | assert out.long_name == f"{word} of variable" 91 | 92 | def test_ndq(self, ndq_series): 93 | out = generic.stats(ndq_series, freq="YS", op="min", season="MAM") 94 | assert out.attrs["units"] == "m3 s-1" 95 | 96 | def test_missing(self, ndq_series): 97 | a = ndq_series.where(~((ndq_series.time.dt.dayofyear == 5) & (ndq_series.time.dt.year == 1902))) 98 | assert a.shape == (5000, 2, 3) 99 | out = generic.stats(a, op="max", month=1) 100 | 101 | np.testing.assert_array_equal(out.sel(time="1900").isnull(), False) 102 | np.testing.assert_array_equal(out.sel(time="1902").isnull(), True) 103 | 104 | def test_3hourly(self, pr_hr_series, random): 105 | pr = pr_hr_series(random.random(366 * 24)).resample(time="3h").mean() 106 | out = generic.stats(pr, freq="MS", op="var") 107 | assert out.units == "kg2 m-4 s-2" 108 | assert out.long_name == "Variance of variable" 109 | -------------------------------------------------------------------------------- /tests/test_land.py: -------------------------------------------------------------------------------- 1 | """Tests for indicators in `land` realm.""" 2 | 3 | from __future__ import annotations 4 | 5 | import numpy as np 6 | import xarray as xr 7 | 8 | from xclim import land 9 | 10 | 11 | def test_base_flow_index(ndq_series): 12 | out = land.base_flow_index(ndq_series, freq="YS") 13 | 14 | assert out.attrs["units"] == "1" 15 | assert isinstance(out, xr.DataArray) 16 | 17 | 18 | def test_rb_flashiness_index(ndq_series): 19 | out = land.base_flow_index(ndq_series, freq="YS") 20 | 21 | assert out.attrs["units"] == "1" 22 | assert isinstance(out, xr.DataArray) 23 | 24 | 25 | def test_qdoy_max(ndq_series, q_series): 26 | out = land.doy_qmax(ndq_series, freq="YS", season="JJA") 27 | assert out.attrs["units"] == "1" 28 | 29 | a = np.ones(450) 30 | a[100] = 2 31 | out = land.doy_qmax(q_series(a), freq="YS") 32 | assert out[0] == 101 33 | 34 | 35 | def test_snow_melt_we_max(snw_series): 36 | a = np.zeros(365) 37 | a[10] = 5 38 | snw = snw_series(a) 39 | out = land.snow_melt_we_max(snw) 40 | assert out[0] == 5 41 | 42 | 43 | def test_blowing_snow(snd_series, sfcWind_series): 44 | a = np.zeros(366) 45 | a[10:20] = np.arange(10) 46 | snd = snd_series(a, start="2001-07-1") 47 | ws = sfcWind_series(a, start="2001-07-1") 48 | 49 | out = land.blowing_snow(snd, ws, snd_thresh="50 cm", sfcWind_thresh="5 km/h") 50 | np.testing.assert_array_equal(out, [5, np.nan]) 51 | 52 | 53 | def test_snd_storm_days(snd_series): 54 | a = np.zeros(366) 55 | a[10:20] = np.arange(10) 56 | 57 | snd = snd_series(a) 58 | out = land.snd_storm_days(snd, thresh="50 cm") 59 | np.testing.assert_array_equal(out, [9, np.nan]) 60 | 61 | 62 | def test_snw_storm_days(snw_series): 63 | a = np.zeros(366) 64 | a[10:20] = np.arange(10) 65 | 66 | snw = snw_series(a) 67 | out = land.snw_storm_days(snw, thresh="0.5 kg m-2") 68 | np.testing.assert_array_equal(out, [9, np.nan]) 69 | 70 | 71 | def test_flow_index(q_series): 72 | a = np.ones(365 * 2) * 10 73 | a[10:50] = 50 74 | q = q_series(a) 75 | 76 | out = land.flow_index(q, p=0.95) 77 | np.testing.assert_array_equal(out, 5) 78 | 79 | 80 | def test_high_flow_frequency(q_series): 81 | a = np.zeros(366 * 2) * 10 82 | a[50:60] = 10 83 | a[200:210] = 20 84 | q = q_series(a) 85 | out = land.high_flow_frequency( 86 | q, 87 | threshold_factor=9, 88 | freq="YS", 89 | ) 90 | np.testing.assert_array_equal(out, [20, 0, np.nan]) 91 | 92 | 93 | def test_low_flow_frequency(q_series): 94 | a = np.ones(366 * 2) * 10 95 | a[50:60] = 1 96 | a[200:210] = 1 97 | q = q_series(a) 98 | out = land.low_flow_frequency(q, threshold_factor=0.2, freq="YS") 99 | np.testing.assert_array_equal(out, [20, 0, np.nan]) 100 | -------------------------------------------------------------------------------- /tests/test_options.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Tests for `xclim.core.options` 3 | from __future__ import annotations 4 | 5 | import pytest 6 | 7 | from xclim import set_options 8 | from xclim.core.missing import MissingBase 9 | from xclim.core.options import OPTIONS, register_missing_method 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "option,value", 14 | [ 15 | ("metadata_locales", ["fr"]), 16 | ("data_validation", "log"), 17 | ("data_validation", "raise"), 18 | ("cf_compliance", "log"), 19 | ("cf_compliance", "raise"), 20 | ("check_missing", "wmo"), 21 | ("check_missing", "any"), 22 | ("missing_options", {"wmo": {"nm": 10, "nc": 3}}), 23 | ("missing_options", {"pct": {"subfreq": None, "tolerance": 0.1}}), 24 | ( 25 | "missing_options", 26 | {"wmo": {"nm": 10, "nc": 3}, "pct": {"subfreq": None, "tolerance": 0.1}}, 27 | ), 28 | ], 29 | ) 30 | def test_set_options_valid(option, value): 31 | old = OPTIONS[option] 32 | with set_options(**{option: value}): 33 | if option != "missing_options": 34 | assert OPTIONS[option] == value 35 | else: 36 | for k, opts in value.items(): 37 | curr_opts = OPTIONS["missing_options"][k].copy() 38 | assert curr_opts == opts 39 | assert OPTIONS[option] == old 40 | 41 | 42 | @pytest.mark.parametrize( 43 | "option,value", 44 | [ 45 | ("metadata_locales", ["tlh"]), 46 | ("metadata_locales", [("tlh", "not/a/real/klingo/file.json")]), 47 | ("data_validation", True), 48 | ("check_missing", "from_context"), 49 | ("cf_compliance", False), 50 | ("missing_options", {"pct": {"nm": 45}}), 51 | ("missing_options", {"wmo": {"nm": 45, "nc": 3}}), 52 | ( 53 | "missing_options", 54 | {"wmo": {"nm": 45, "nc": 3}, "notachoice": {"tolerance": 45}}, 55 | ), 56 | ( 57 | "missing_options", 58 | {"wmo": {"nm": 45, "nc": 3, "_validator": lambda x: x < 1}}, 59 | ), 60 | ], 61 | ) 62 | def test_set_options_invalid(option, value): 63 | old = OPTIONS[option] 64 | with pytest.raises(ValueError): 65 | set_options(**{option: value}) 66 | assert old == OPTIONS[option] 67 | 68 | 69 | def test_register_missing_method(): 70 | @register_missing_method("test") 71 | class MissingTest(MissingBase): 72 | def is_missing(self, null, count, freq): 73 | return True 74 | 75 | @staticmethod 76 | def validate(a_param): 77 | return a_param < 3 78 | 79 | with pytest.raises(ValueError): 80 | set_options(missing_options={"test": {"a_param": 5}}) 81 | 82 | with set_options(check_missing="test"): 83 | assert OPTIONS["check_missing"] == "test" 84 | -------------------------------------------------------------------------------- /tests/test_preciptemp.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import numpy as np 4 | 5 | from xclim import atmos 6 | from xclim.core.calendar import percentile_doy 7 | 8 | K2C = 273 9 | 10 | 11 | class TestColdAndDry: 12 | def test_simple(self, tas_series, pr_series): 13 | # GIVEN 14 | raw_temp = np.full(365 * 4, 20) + K2C 15 | raw_temp[10:20] -= 10 16 | ts = tas_series(raw_temp) 17 | ts_per = percentile_doy(ts, 5, 25).sel(percentiles=25) 18 | raw_prec = np.full(365 * 4, 10) 19 | raw_prec[10:20] = 0 20 | pr = pr_series(raw_prec) 21 | pr_per = percentile_doy(pr, 5, 25).sel(percentiles=25) 22 | # WHEN 23 | result = atmos.cold_and_dry_days(ts, pr, ts_per, pr_per, freq="MS") 24 | # THEN january has 10 cold and dry days 25 | assert result.data[0] == 10 26 | 27 | 28 | class TestWarmAndDry: 29 | def test_simple(self, tas_series, pr_series): 30 | # GIVEN 31 | raw_temp = np.full(365 * 4, 20) + K2C 32 | raw_temp[10:30] += 10 33 | ts = tas_series(raw_temp) 34 | ts_per = percentile_doy(ts, 5, 75).sel(percentiles=75) 35 | raw_prec = np.full(365 * 4, 10) 36 | raw_prec[10:30] = 0 37 | pr = pr_series(raw_prec) 38 | pr_per = percentile_doy(pr, 5, 25).sel(percentiles=25) 39 | # WHEN 40 | result = atmos.warm_and_dry_days(ts, pr, ts_per, pr_per, freq="MS") 41 | # THEN january has 20 warm and dry days 42 | assert result.data[0] == 20 43 | 44 | 45 | class TestWarmAndWet: 46 | def test_simple(self, tas_series, pr_series): 47 | # GIVEN 48 | raw_temp = np.full(365 * 4, 20) + K2C 49 | raw_temp[10:30] += 10 50 | ts = tas_series(raw_temp) 51 | ts_per = percentile_doy(ts, 5, 75).sel(percentiles=75) 52 | raw_prec = np.full(365 * 4, 10) 53 | raw_prec[10:30] += 20 54 | pr = pr_series(raw_prec) 55 | pr_per = percentile_doy(pr, 5, 75).sel(percentiles=75) 56 | # WHEN 57 | result = atmos.warm_and_wet_days(ts, pr, ts_per, pr_per, freq="MS") 58 | # THEN january has 20 warm and wet days 59 | assert result.data[0] == 20 60 | 61 | 62 | class TestColdAndWet: 63 | def test_simple(self, tas_series, pr_series): 64 | # GIVEN 65 | raw_temp = np.full(365 * 4, 20) + K2C 66 | raw_temp[10:25] -= 20 67 | ts = tas_series(raw_temp) 68 | ts_per = percentile_doy(ts, 5, 75).sel(percentiles=75) 69 | raw_prec = np.full(365 * 4, 10) 70 | raw_prec[15:30] += 20 71 | pr = pr_series(raw_prec) 72 | pr_per = percentile_doy(pr, 5, 75).sel(percentiles=75) 73 | # WHEN 74 | result = atmos.cold_and_wet_days(ts, pr, ts_per, pr_per, freq="MS") 75 | # THEN january has 10 cold and wet days 76 | assert result.data[0] == 10 77 | -------------------------------------------------------------------------------- /tests/test_seaice.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import numpy as np 4 | import xarray as xr 5 | 6 | from xclim import seaIce 7 | from xclim.indices import sea_ice_area, sea_ice_extent 8 | 9 | 10 | class TestSeaIceExtent: 11 | def values(self, areacello): 12 | s = xr.ones_like(areacello) 13 | s = s.where(s.lat > 0, 10) 14 | s = s.where(s.lat <= 0, 50) 15 | sic = xr.concat([s, s], dim="time") 16 | sic.attrs["units"] = "%" 17 | sic.attrs["standard_name"] = "sea_ice_area_fraction" 18 | 19 | return areacello, sic 20 | 21 | def test_simple(self, areacello): 22 | area, sic = self.values(areacello) 23 | 24 | a = sea_ice_extent(sic, area) 25 | expected = 4 * np.pi * area.r**2 / 2.0 26 | np.testing.assert_array_almost_equal(a / expected, 1, 3) 27 | assert a.units == "m2" 28 | 29 | def test_indicator(self, areacello): 30 | area, sic = self.values(areacello) 31 | 32 | a = seaIce.sea_ice_extent(sic, area) 33 | assert a.units == "m2" 34 | 35 | def test_dimensionless(self, areacello): 36 | area, sic = self.values(areacello) 37 | sic = sic / 100 38 | sic.attrs["units"] = "" 39 | 40 | a = sea_ice_extent(sic, area) 41 | expected = 4 * np.pi * area.r**2 / 2.0 42 | np.testing.assert_array_almost_equal(a / expected, 1, 3) 43 | assert a.units == "m2" 44 | 45 | def test_area_units(self, areacello): 46 | area, sic = self.values(areacello) 47 | 48 | # Convert area to km^2 49 | area /= 1e6 50 | area.attrs["units"] = "km^2" 51 | 52 | a = sea_ice_extent(sic, area) 53 | assert a.units == "km2" 54 | 55 | expected = 4 * np.pi * area.r**2 / 2.0 / 1e6 56 | np.testing.assert_array_almost_equal(a / expected, 1, 3) 57 | 58 | 59 | class TestSeaIceArea(TestSeaIceExtent): 60 | def test_simple(self, areacello): 61 | area, sic = self.values(areacello) 62 | 63 | a = sea_ice_area(sic, area) 64 | expected = 4 * np.pi * area.r**2 / 2.0 / 2.0 65 | np.testing.assert_array_almost_equal(a / expected, 1, 3) 66 | assert a.units == "m2" 67 | 68 | def test_indicator(self, areacello): 69 | area, sic = self.values(areacello) 70 | 71 | a = seaIce.sea_ice_area(sic, area) 72 | assert a.units == "m2" 73 | 74 | def test_dimensionless(self, areacello): 75 | area, sic = self.values(areacello) 76 | sic /= 100 77 | sic.attrs["units"] = "" 78 | 79 | a = sea_ice_area(sic, area) 80 | expected = 4 * np.pi * area.r**2 / 2.0 / 2.0 81 | np.testing.assert_array_almost_equal(a / expected, 1, 3) 82 | assert a.units == "m2" 83 | 84 | def test_area_units(self, areacello): 85 | area, sic = self.values(areacello) 86 | 87 | # Convert area to km^2 88 | area /= 1e6 89 | area.attrs["units"] = "km^2" 90 | 91 | a = sea_ice_area(sic, area) 92 | assert a.units == "km2" 93 | 94 | expected = 4 * np.pi * area.r**2 / 2.0 / 2.0 / 1e6 95 | np.testing.assert_array_almost_equal(a / expected, 1, 3) 96 | -------------------------------------------------------------------------------- /tests/test_testing_utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import platform 4 | from pathlib import Path 5 | 6 | import numpy as np 7 | import pytest 8 | from xarray import Dataset 9 | 10 | from xclim import __version__ as __xclim_version__ 11 | from xclim.testing.helpers import test_timeseries as timeseries 12 | from xclim.testing.utils import open_dataset, publish_release_notes, show_versions 13 | 14 | 15 | class TestFixtures: 16 | def test_timeseries_made_up_variable(self): 17 | ds = timeseries( 18 | np.zeros(31), 19 | "luminiferous_aether_flux", 20 | units="W K mol A-1 m-2 s-1", 21 | as_dataset=True, 22 | ) 23 | 24 | assert isinstance(ds, Dataset) 25 | assert ds.luminiferous_aether_flux.attrs["units"] == "W K mol A-1 m-2 s-1" 26 | assert "standard_name" not in ds.luminiferous_aether_flux.attrs 27 | 28 | 29 | class TestFileRequests: 30 | @staticmethod 31 | def file_md5_checksum(f_name): 32 | import hashlib 33 | 34 | hash_md5 = hashlib.md5() # noqa: S324 35 | with open(f_name, "rb") as f: 36 | hash_md5.update(f.read()) 37 | return hash_md5.hexdigest() 38 | 39 | @pytest.mark.requires_internet 40 | def test_open_testdata( 41 | self, 42 | ): 43 | from xclim.testing.utils import default_testdata_cache, default_testdata_version 44 | 45 | nimbus_kwargs = {"cache_dir": default_testdata_cache, "branch": default_testdata_version} 46 | 47 | # Test with top-level default engine 48 | ds = open_dataset( 49 | "cmip5/tas_Amon_CanESM2_rcp85_r1i1p1_200701-200712.nc", 50 | nimbus_kwargs=nimbus_kwargs, 51 | engine="h5netcdf", 52 | ) 53 | assert ds.lon.size == 128 54 | 55 | def test_md5_sum(self): 56 | test_data = Path(__file__).parent / "data" 57 | callendar = test_data / "callendar_1938.txt" 58 | md5_sum = self.file_md5_checksum(callendar) 59 | if platform.system() == "Windows": 60 | # Windows has a different line ending (CR-LF) than Unix (LF) 61 | assert md5_sum == "38083271c2d4c85dea6bd6baf23d34de" # noqa 62 | else: 63 | assert md5_sum == "9a5d9f94d76d4f9d9b7aaadbe8cbf541" # noqa 64 | 65 | 66 | class TestReleaseSupportFuncs: 67 | def test_show_version_file(self, tmp_path): 68 | temp_filename = tmp_path.joinpath("version_info.txt") 69 | show_versions(file=temp_filename) 70 | 71 | with temp_filename.open(encoding="utf-8") as f: 72 | contents = f.readlines().copy() 73 | assert "INSTALLED VERSIONS\n" in contents 74 | assert "------------------\n" in contents 75 | assert f"python: {platform.python_version()}\n" in contents 76 | assert f"xclim: {__xclim_version__}\n" in contents 77 | assert "boltons: installed\n" in contents 78 | 79 | @pytest.mark.requires_docs 80 | def test_release_notes_file(self, tmp_path): 81 | temp_filename = tmp_path.joinpath("version_info.txt") 82 | publish_release_notes( 83 | style="md", 84 | file=temp_filename, 85 | changes=Path(__file__).parent.parent.joinpath("CHANGELOG.rst"), 86 | ) 87 | 88 | with temp_filename.open(encoding="utf-8") as f: 89 | assert "# Changelog" in f.readlines()[0] 90 | 91 | @pytest.mark.requires_docs 92 | def test_release_notes_file_not_implemented(self, tmp_path): 93 | temp_filename = tmp_path.joinpath("version_info.txt") 94 | with pytest.raises(NotImplementedError): 95 | publish_release_notes( 96 | style="qq", 97 | file=temp_filename, 98 | changes=Path(__file__).parent.parent.joinpath("CHANGELOG.rst"), 99 | ) 100 | 101 | @pytest.mark.requires_docs 102 | def test_error(self): 103 | with pytest.raises(FileNotFoundError): 104 | publish_release_notes("md", changes="foo") 105 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Test for utils 3 | from __future__ import annotations 4 | 5 | import numpy as np 6 | import xarray as xr 7 | 8 | from xclim.core.utils import _chunk_like, ensure_chunk_size, nan_calc_percentiles 9 | from xclim.testing.helpers import test_timeseries as _test_timeseries 10 | 11 | 12 | def test_ensure_chunk_size(): 13 | da = xr.DataArray(np.zeros((20, 21, 20)), dims=("x", "y", "z")) 14 | 15 | out = ensure_chunk_size(da, x=10, y=-1) 16 | 17 | assert da is out 18 | 19 | dac = da.chunk({"x": (1,) * 20, "y": (10, 10, 1), "z": (10, 10)}) 20 | 21 | out = ensure_chunk_size(dac, x=3, y=5, z=-1) 22 | 23 | assert out.chunks[0] == (3, 3, 3, 3, 3, 5) 24 | assert out.chunks[1] == (10, 11) 25 | assert out.chunks[2] == (20,) 26 | 27 | 28 | class TestNanCalcPercentiles: 29 | def test_calc_perc_type7(self): 30 | # Example array from: https://en.wikipedia.org/wiki/Percentile#The_nearest-rank_method 31 | arr = np.asarray([15.0, 20.0, 35.0, 40.0, 50.0]) 32 | res = nan_calc_percentiles(arr, percentiles=[40.0], alpha=1, beta=1) 33 | # The expected is from R `quantile(arr, probs=c(0.4), type=7)` 34 | assert res[()] == 29 35 | 36 | def test_calc_perc_type8(self): 37 | # Example array from: https://en.wikipedia.org/wiki/Percentile#The_nearest-rank_method 38 | arr = np.asarray([[15.0, 20.0, 35.0, 40.0, 50.0], [15.0, 20.0, 35.0, 40.0, 50.0]]) 39 | res = nan_calc_percentiles( 40 | arr, 41 | percentiles=[40.0], 42 | alpha=1.0 / 3.0, 43 | beta=1.0 / 3.0, 44 | ) 45 | # The expected is from R `quantile(arr, probs=c(0.4), type=8)` 46 | assert np.all(res[0][0] == 27) 47 | assert np.all(res[0][1] == 27) 48 | 49 | def test_calc_perc_2d(self): 50 | # Example array from: https://en.wikipedia.org/wiki/Percentile#The_nearest-rank_method 51 | arr = np.asarray([[15.0, 20.0, 35.0, 40.0, 50.0], [15.0, 20.0, 35.0, 40.0, 50.0]]) 52 | res = nan_calc_percentiles(arr, percentiles=[40.0]) 53 | # The expected is from R ` quantile(c(15.0, 20.0, 35.0, 40.0, 50.0), probs=0.4)` 54 | assert np.all(res[0][0] == 29) 55 | assert np.all(res[0][1] == 29) 56 | 57 | def test_calc_perc_nan(self): 58 | arr = np.asarray([np.nan]) 59 | res = nan_calc_percentiles(arr, percentiles=[50.0]) 60 | assert np.isnan(res) 61 | 62 | def test_calc_perc_empty(self): 63 | arr = np.asarray([]) 64 | res = nan_calc_percentiles(arr) 65 | assert np.isnan(res) 66 | 67 | def test_calc_perc_partial_nan(self): 68 | arr = np.asarray([np.nan, 41.0, 41.0, 43.0, 43.0]) 69 | res = nan_calc_percentiles(arr, percentiles=[50.0], alpha=1 / 3.0, beta=1 / 3.0) 70 | # The expected is from R `quantile(arr, 0.5, type=8, na.rm = TRUE)` 71 | # Note that scipy mquantiles would give a different result here 72 | assert res[()] == 42.0 73 | 74 | 75 | def test_chunk_like(): 76 | da = _test_timeseries( 77 | np.zeros( 78 | 100, 79 | ), 80 | "tas", 81 | ) 82 | da = xr.concat([da] * 10, xr.DataArray(np.arange(10), dims=("lat",), name="lat")) 83 | 84 | assert isinstance(da.lat.variable, xr.core.variable.IndexVariable) 85 | t, la = _chunk_like(da.time, da.lat, chunks={"time": 10, "lat": 1}) 86 | assert t.chunks[0] == tuple([10] * 10) 87 | assert la.chunks[0] == tuple([1] * 10) 88 | -------------------------------------------------------------------------------- /tests/test_wind.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from xclim import atmos 7 | 8 | 9 | class TestWindSpeedIndicators: 10 | test_data = "ERA5/daily_surface_cancities_1990-1993.nc" 11 | 12 | def test_calm_windy_days(self, open_dataset): 13 | with open_dataset(self.test_data) as ds: 14 | sfcwind, _ = atmos.wind_speed_from_vector(ds.uas, ds.vas, calm_wind_thresh="0 m/s") 15 | calm = atmos.calm_days(sfcwind, thresh="5 m/s") 16 | windy = atmos.windy_days(sfcwind, thresh="5 m/s") 17 | c = sfcwind.resample(time="MS").count() 18 | np.testing.assert_array_equal(calm + windy, c) 19 | 20 | 21 | class TestSfcWind: 22 | test_data = "ERA5/daily_surface_cancities_1990-1993.nc" 23 | 24 | @pytest.mark.parametrize( 25 | "metric", 26 | ["mean", "max", "min"], 27 | ) 28 | def test_sfcWind(self, open_dataset, metric): 29 | with open_dataset(self.test_data) as ds: 30 | sfcWind, _ = atmos.wind_speed_from_vector(ds.uas, ds.vas) 31 | sfcWind_calculated = getattr(atmos, f"sfcWind_{metric}")(sfcWind) 32 | 33 | resample = sfcWind.resample(time="YS") 34 | c = getattr(resample, metric)() 35 | np.testing.assert_array_equal(sfcWind_calculated, c) 36 | 37 | 38 | class TestSfcWindMax: 39 | test_data = "ERA5/daily_surface_cancities_1990-1993.nc" 40 | 41 | @pytest.mark.parametrize( 42 | "metric", 43 | ["mean", "max", "min"], 44 | ) 45 | def test_sfcWindmax(self, open_dataset, metric): 46 | with open_dataset(self.test_data) as ds: 47 | sfcWind, _ = atmos.wind_speed_from_vector(ds.uas, ds.vas) 48 | sfcWindmax_calculated = getattr(atmos, f"sfcWindmax_{metric}")(sfcWind) 49 | 50 | resample = sfcWind.resample(time="YS") 51 | c = getattr(resample, metric)() 52 | np.testing.assert_array_equal(sfcWindmax_calculated, c) 53 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | min_version = 4.25.0 3 | env_list = 4 | lint 5 | docs 6 | notebooks 7 | doctests 8 | py3.11-extras-numpy 9 | py3.12-extras-sbck 10 | py3.13-extras-lmoments 11 | labels = 12 | static = lint 13 | test = py3.11-extras-numpy, py3.12-extras-sbck, py3.13-extras-lmoments 14 | special = docs, notebooks, doctests 15 | requires = 16 | pip >= 25.0 17 | flit >=3.10.1,<4.0 18 | opts = 19 | --verbose 20 | 21 | [gh] 22 | python = 23 | 3.10 = py3.10-coverage-extras 24 | 3.11 = py3.11-coverage-extras-sbck-lmoments 25 | 3.12 = py3.12-coverage-extras-numpy 26 | 3.13 = py3.13-extras-lmoments 27 | 28 | [testenv:lint] 29 | description = Run code quality compliance tests under {basepython} 30 | skip_install = True 31 | extras = 32 | deps = 33 | blackdoc ==0.3.9 34 | codespell >=2.4.1 35 | deptry >=0.23.0 36 | flake8 >=7.1.1 37 | flake8-rst-docstrings ==0.3.0 38 | numpydoc >=1.8.0 39 | ruff >=0.9.6 40 | vulture >=2.11 41 | yamllint >=1.35.1 42 | commands_pre = 43 | commands = 44 | make lint 45 | commands_post = 46 | allowlist_externals = 47 | make 48 | 49 | [testenv:docs] 50 | description = Build the documentation with makefile under {basepython} 51 | setenv = 52 | PYTHONPATH = {toxinidir} 53 | READTHEDOCS = 1 54 | extras = 55 | docs 56 | deps = 57 | lmoments3 58 | h5netcdf 59 | netCDF4 60 | pot 61 | pytest 62 | xsdba 63 | commands_pre = 64 | commands = 65 | make docs 66 | commands_post = 67 | allowlist_externals = 68 | env 69 | make 70 | 71 | [testenv:notebooks{-prefetch,}] 72 | description = Run notebooks with pytest under {basepython} 73 | extras = 74 | all 75 | deps = 76 | lmoments3 77 | commands = 78 | pytest --no-cov --nbval --dist=loadscope --rootdir=tests/ --ignore=docs/notebooks/example.ipynb docs/notebooks 79 | commands_post = 80 | 81 | [testenv:doctests{-prefetch,}] 82 | description = Run doctests with pytest under {basepython} 83 | commands = 84 | python -c 'from xclim.testing.utils import run_doctests; run_doctests()' 85 | commands_post = 86 | 87 | [testenv] 88 | description = Run tests with pytest under {basepython} 89 | setenv = 90 | COV_CORE_SOURCE = 91 | PYTEST_ADDOPTS = --numprocesses=logical --durations=10 92 | coverage: PYTEST_ADDOPTS = --numprocesses=logical --durations=10 --cov=xclim --cov-report=term-missing 93 | Xfrozen_modules = off 94 | passenv = 95 | CI 96 | COVERALLS_* 97 | GITHUB_* 98 | LD_LIBRARY_PATH 99 | SKIP_NOTEBOOKS 100 | XCLIM_* 101 | extras = 102 | dev 103 | extras: extras 104 | deps = 105 | coverage: coveralls>=4.0.1 106 | lmoments: lmoments3 107 | numpy: numpy>=1.24,<2.0 108 | upstream: -r CI/requirements_upstream.txt 109 | install_command = python -m pip install --no-user {opts} {packages} 110 | download = True 111 | commands_pre = 112 | sbck: python -c 'print("The sbck dependency requires the \"libeigen3-dev\" package to be installed on the system.")' 113 | sbck: python -m pip install pybind11 114 | sbck: python -m pip install sbck 115 | python -m pip list 116 | xclim show_version_info 117 | python -m pip check 118 | xclim --help 119 | prefetch: xclim prefetch_testing_data 120 | commands = 121 | !offline: pytest {posargs} 122 | ; Prevent socket connections (except for unix sockets for async support) 123 | offline: python -c 'print("Running offline tests with positional arguments: --disable-socket --allow-unix-socket --m \"not requires_internet\"")' 124 | offline: python -c 'print("These can be overwritten with: tox -e offline -- -m \"some other marker statement\"")' 125 | offline: pytest --disable-socket --allow-unix-socket {posargs:-m 'not requires_internet'} 126 | commands_post = 127 | coverage: - coveralls 128 | allowlist_externals = 129 | git 130 | xclim 131 | --------------------------------------------------------------------------------