├── .gitattributes ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── SECURITY.md ├── codecov.yml ├── dependabot.yaml ├── release.yaml └── workflows │ ├── _build.yaml │ ├── _build_doc.yaml │ ├── _codecov.yaml │ ├── _codeql.yaml │ ├── _pre_commit.yaml │ ├── _pypi_publish.yaml │ ├── _pypi_test_publish.yaml │ ├── _test.yaml │ ├── cicd.yaml │ ├── dependabot_auto_approve.yaml │ └── scorecard.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGELOG.md ├── CITATION ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── cmethods ├── __init__.py ├── core.py ├── distribution.py ├── scaling.py ├── static.py ├── types.py └── utils.py ├── doc ├── Makefile ├── _static │ └── images │ │ ├── biasCdiagram.png │ │ ├── dm-doy-plot.png │ │ ├── qm-cdf-plot-1.png │ │ └── qm-cdf-plot-2.png ├── cli.rst ├── cmethods.rst ├── conf.py ├── getting_started.rst ├── index.rst ├── introduction.rst ├── issues.rst ├── license.rst ├── links.rst ├── make.bat ├── methods.rst └── requirements.txt ├── examples ├── examples.ipynb ├── input_data │ ├── control.nc │ ├── observations.nc │ └── scenario.nc └── requirements.txt ├── pyproject.toml ├── requirements-dev.txt ├── setup.py └── tests ├── README.rst ├── __init__.py ├── conftest.py ├── fixture ├── precipitation_obsh.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── lon │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── pr │ │ ├── .zarray │ │ ├── .zattrs │ │ ├── 0.0.0 │ │ ├── 0.1.0 │ │ ├── 1.0.0 │ │ └── 1.1.0 │ └── time │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs ├── precipitation_obsp.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── lon │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── pr │ │ ├── .zarray │ │ ├── .zattrs │ │ ├── 0.0.0 │ │ ├── 0.1.0 │ │ ├── 1.0.0 │ │ └── 1.1.0 │ └── time │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs ├── precipitation_simh.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── lon │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── pr │ │ ├── .zarray │ │ ├── .zattrs │ │ ├── 0.0.0 │ │ ├── 0.1.0 │ │ ├── 1.0.0 │ │ └── 1.1.0 │ └── time │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs ├── precipitation_simp.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── lon │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── pr │ │ ├── .zarray │ │ ├── .zattrs │ │ ├── 0.0.0 │ │ ├── 0.1.0 │ │ ├── 1.0.0 │ │ └── 1.1.0 │ └── time │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs ├── temperature_obsh.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── lon │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── tas │ │ ├── .zarray │ │ ├── .zattrs │ │ ├── 0.0.0 │ │ ├── 0.1.0 │ │ ├── 1.0.0 │ │ └── 1.1.0 │ └── time │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs ├── temperature_obsp.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── lon │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── tas │ │ ├── .zarray │ │ ├── .zattrs │ │ ├── 0.0.0 │ │ ├── 0.1.0 │ │ ├── 1.0.0 │ │ └── 1.1.0 │ └── time │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs ├── temperature_simh.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── lon │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs │ ├── tas │ │ ├── .zarray │ │ ├── .zattrs │ │ ├── 0.0.0 │ │ ├── 0.1.0 │ │ ├── 1.0.0 │ │ └── 1.1.0 │ └── time │ │ ├── 0 │ │ ├── .zarray │ │ └── .zattrs └── temperature_simp.zarr │ ├── .zattrs │ ├── .zgroup │ ├── .zmetadata │ ├── lat │ ├── 0 │ ├── .zarray │ └── .zattrs │ ├── lon │ ├── 0 │ ├── .zarray │ └── .zattrs │ ├── tas │ ├── .zarray │ ├── .zattrs │ ├── 0.0.0 │ ├── 0.1.0 │ ├── 1.0.0 │ └── 1.1.0 │ └── time │ ├── 0 │ ├── .zarray │ └── .zattrs ├── helper.py ├── test_cli.py ├── test_methods.py ├── test_methods_different_input_shape.py ├── test_misc.py ├── test_utils.py └── test_zarr_dask_compatibility.py /.gitattributes: -------------------------------------------------------------------------------- 1 | tests/fixture/temperature_simh.zarr filter=lfs diff=lfs merge=lfs -text 2 | tests/fixture/temperature_simp.zarr filter=lfs diff=lfs merge=lfs -text 3 | tests/fixture/precipitation_obsh.zarr filter=lfs diff=lfs merge=lfs -text 4 | tests/fixture/precipitation_obsp.zarr filter=lfs diff=lfs merge=lfs -text 5 | tests/fixture/precipitation_simh.zarr filter=lfs diff=lfs merge=lfs -text 6 | tests/fixture/precipitation_simp.zarr filter=lfs diff=lfs merge=lfs -text 7 | tests/fixture/temperature_obsh.zarr filter=lfs diff=lfs merge=lfs -text 8 | tests/fixture/temperature_obsp.zarr filter=lfs diff=lfs merge=lfs -text 9 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @btschwertfeger 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "" 5 | labels: "Bug" 6 | assignees: "" 7 | --- 8 | 9 | **Describe the bug** 10 | A clear and concise description of what the bug is. 11 | 12 | **To Reproduce** 13 | Steps to reproduce the behavior: 14 | 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | 28 | - OS: [e.g. iOS] 29 | - Browser [e.g. chrome, safari] 30 | - Version [e.g. 22] 31 | 32 | **Smartphone (please complete the following information):** 33 | 34 | - Device: [e.g. iPhone6] 35 | - OS: [e.g. iOS8.1] 36 | - Browser [e.g. stock browser, safari] 37 | - Version [e.g. 22] 38 | 39 | **Additional context** 40 | Add any other context about the problem here. 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "" 5 | labels: "Feature" 6 | assignees: "" 7 | --- 8 | 9 | **Is your feature request related to a problem? Please describe.** 10 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 11 | 12 | **Describe the solution you'd like** 13 | A clear and concise description of what you want to happen. 14 | 15 | **Describe alternatives you've considered** 16 | A clear and concise description of any alternative solutions or features you've considered. 17 | 18 | **Additional context** 19 | Add any other context or screenshots about the feature request here. 20 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting Security Vulnerabilities 4 | 5 | At python-cmethods, we take security seriously. We welcome and encourage 6 | responsible disclosure of security vulnerabilities. If you believe you've found 7 | a security vulnerability within our project, please report it immediately. 8 | 9 | ## How to Report a Security Vulnerability 10 | 11 | To report a security vulnerability, please send an email to 12 | [contact@b-schwertfeger.de](mailto:contact@b-schwertfeger.de) with a detailed 13 | description of the vulnerability. We kindly request that you refrain from 14 | disclosing the vulnerability publicly until we have had an opportunity to 15 | address it. 16 | 17 | ## Our Commitment 18 | 19 | We are committed to promptly addressing and resolving any security 20 | vulnerabilities reported to us. We will investigate all reports and take 21 | appropriate action to protect the security of our users and their data. 22 | -------------------------------------------------------------------------------- /.github/codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | ## Status checks in PR 3 | ## 4 | status: 5 | project: 6 | default: 7 | informational: true 8 | ## Target coverage is the last one 9 | ## 10 | target: auto 11 | ## this allows a 2% drop from the previous base commit coverage 12 | ## 13 | threshold: 2% 14 | patch: 15 | default: 16 | informational: true 17 | 18 | comment: 19 | layout: "reach, diff, flags, files" 20 | behavior: default 21 | require_changes: true # if false: post the comment even if coverage don't change 22 | require_base: no # [yes :: must have a base report to post] 23 | require_head: yes # [yes :: must have a head report to post] 24 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | version: 2 3 | updates: 4 | - package-ecosystem: github-actions 5 | directory: / 6 | schedule: 7 | interval: monthly 8 | groups: 9 | all: 10 | dependency-type: production 11 | -------------------------------------------------------------------------------- /.github/release.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Config file to auto-generate release notes based on 8 | # https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes 9 | 10 | changelog: 11 | exclude: 12 | # labels: 13 | # - ignore-for-release 14 | # authors: 15 | # - john-doe 16 | categories: 17 | - title: Breaking Changes ⚠️ 18 | labels: 19 | - Breaking 20 | - breaking 21 | - title: Implemented Enhancements 🎉 22 | labels: 23 | - Feature 24 | - enhancement 25 | - title: Fixed Bugs 🪲 26 | labels: 27 | - Bug 28 | - bug 29 | - title: Other Changes 30 | labels: 31 | - "*" 32 | exclude: 33 | labels: 34 | - dependencies 35 | - github_actions 36 | - title: 👒 Dependencies 37 | labels: 38 | - dependencies 39 | - github_actions 40 | -------------------------------------------------------------------------------- /.github/workflows/_build.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Template workflow to build the project for a specific os 8 | # and Python version. 9 | # 10 | 11 | name: Build 12 | 13 | on: 14 | workflow_call: 15 | inputs: 16 | os: 17 | type: string 18 | required: true 19 | python-version: 20 | type: string 21 | required: true 22 | 23 | permissions: read-all 24 | 25 | jobs: 26 | Build: 27 | runs-on: ${{ inputs.os }} 28 | steps: 29 | - name: Checkout repository 30 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 31 | with: 32 | fetch-depth: 0 # IMPORTANT: otherwise the current tag does not get fetched and the build version gets worse 33 | 34 | - name: Set up Python ${{ inputs.python-version }} 35 | uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 36 | with: 37 | python-version: ${{ inputs.python-version }} 38 | 39 | - name: Install dependencies 40 | run: | 41 | python -m pip install --user --upgrade pip 42 | python -m pip install --user build 43 | 44 | - name: Install hdf5 and netcdf (macOS) 45 | if: runner.os == 'macOS' 46 | run: brew install hdf5 netcdf 47 | 48 | - name: Check git status (not Windows) 49 | if: runner.os != 'Windows' 50 | run: | 51 | if [[ -z "$(git status --porcelain)" ]]; then 52 | echo "No changes found." 53 | else 54 | echo "Changes detected. Please commit or discard changes before publishing." 55 | git status --porcelain 56 | exit 1 57 | fi 58 | 59 | - name: Check git status (Windows) 60 | if: runner.os == 'Windows' 61 | run: | 62 | if (-not (git status --porcelain)) { 63 | Write-Output "No changes found." 64 | } else { 65 | Write-Output "Changes detected. Please commit or discard changes before publishing." 66 | git status --porcelain 67 | exit 1 68 | } 69 | 70 | - name: Build Linux 71 | if: runner.os == 'linux' 72 | run: python -m build 73 | 74 | - name: Store the distribution packages 75 | uses: actions/upload-artifact@v4 76 | # upload artifacts with the oldest supported version 77 | if: runner.os == 'linux' && inputs.python-version == '3.9' 78 | with: 79 | name: python-package-distributions 80 | path: dist/ 81 | 82 | - name: Build macOS 83 | if: runner.os == 'macOS' 84 | run: python -m build 85 | 86 | - name: Build Windows 87 | if: runner.os == 'Windows' 88 | # put it here to avoid more filtering 89 | run: python -m build -o . 90 | 91 | - name: Install the package on Linux or MacOS 92 | if: runner.os != 'Windows' 93 | run: python -m pip install --user dist/python_cmethods*.whl 94 | 95 | - name: Install the package on Windows 96 | if: runner.os == 'Windows' 97 | run: | 98 | try { 99 | $WHEEL = Get-ChildItem -Path . -Filter "python_cmethods*.whl" -ErrorAction Stop 100 | python -m pip install --user $WHEEL 101 | } catch { 102 | Write-Error "Error: .whl file not found in the current directory." 103 | exit 1 104 | } 105 | -------------------------------------------------------------------------------- /.github/workflows/_build_doc.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Template workflow to build documentation. 8 | # 9 | 10 | name: Build Doc 11 | 12 | on: 13 | workflow_call: 14 | inputs: 15 | os: 16 | type: string 17 | required: true 18 | python-version: 19 | type: string 20 | required: true 21 | 22 | permissions: read-all 23 | 24 | jobs: 25 | Build: 26 | runs-on: ${{ inputs.os }} 27 | steps: 28 | - name: Checkout repository 29 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 30 | 31 | - name: Set up Python ${{ inputs.python-version }} 32 | uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 33 | with: 34 | python-version: ${{ inputs.python-version }} 35 | 36 | - name: Install dependencies 37 | run: | 38 | python -m pip install --user --upgrade pip 39 | python -m pip install --user . 40 | python -m pip install --user -r doc/requirements.txt 41 | DEBIAN_FRONTEND=noninteractive sudo apt-get install -y pandoc 42 | 43 | - name: Build the documentation 44 | run: cd doc && make html 45 | -------------------------------------------------------------------------------- /.github/workflows/_codecov.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Template workflow to build the project for a specific os 8 | # and Python version, run the tests and upload the results to codecov. 9 | # 10 | 11 | name: CodeCov 12 | 13 | on: 14 | workflow_call: 15 | inputs: 16 | os: 17 | type: string 18 | required: true 19 | python-version: 20 | type: string 21 | required: true 22 | 23 | permissions: read-all 24 | 25 | concurrency: 26 | group: codecov-${{ github.ref }} 27 | cancel-in-progress: true 28 | 29 | jobs: 30 | codecov: 31 | name: Coverage 32 | runs-on: ${{ inputs.os }} 33 | env: 34 | OS: ${{ inputs.os }} 35 | PYTHON: ${{ inputs.python-version }} 36 | 37 | steps: 38 | - name: Checkout repository 39 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 40 | 41 | - name: Set up Python ${{ matrix.python-version }} 42 | uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 43 | with: 44 | python-version: ${{ inputs.python-version }} 45 | 46 | - name: Install dependencies 47 | run: python -m pip install --upgrade pip 48 | 49 | - name: Install package 50 | run: python -m pip install . -r requirements-dev.txt 51 | 52 | - name: Generate coverage report 53 | run: pytest --retries 1 --cov --cov-report=xml 54 | 55 | - name: Upload coverage to Codecov 56 | uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 #v5.5.1 57 | with: 58 | token: ${{ secrets.CODECOV_TOKEN }} 59 | files: coverage.xml 60 | env_vars: OS,PYTHON 61 | fail_ci_if_error: true 62 | flags: unittests 63 | name: codecov-umbrella 64 | verbose: true 65 | -------------------------------------------------------------------------------- /.github/workflows/_codeql.yaml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | 13 | name: CodeQL 14 | 15 | on: 16 | workflow_call: 17 | 18 | # Don't change this permissions. These must match those of the analyze job. 19 | permissions: 20 | actions: read 21 | contents: read 22 | security-events: write 23 | 24 | jobs: 25 | analyze: 26 | name: Analyze 27 | runs-on: ubuntu-latest 28 | permissions: 29 | actions: read 30 | contents: read 31 | security-events: write 32 | 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | language: ["python"] 37 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 38 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 39 | 40 | steps: 41 | - name: Checkout repository 42 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v3.30.5 47 | with: 48 | languages: python 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | 53 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 54 | # queries: security-extended,security-and-quality 55 | 56 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). 57 | # If this step fails, then you should remove it and run the build manually (see below) 58 | - name: Autobuild 59 | uses: github/codeql-action/autobuild@v3.30.5 60 | 61 | # ℹ️ Command-line programs to run using the OS shell. 62 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 63 | 64 | # If the Autobuild fails above, remove it and uncomment the following three lines. 65 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 66 | 67 | # - run: | 68 | # echo "Run, Build Application using script" 69 | # ./location_of_script_within_repo/buildscript.sh 70 | 71 | - name: Perform CodeQL Analysis 72 | uses: github/codeql-action/analyze@v3.30.5 73 | with: 74 | category: "/language:python" 75 | -------------------------------------------------------------------------------- /.github/workflows/_pre_commit.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Template workflow to run pre-commit. 8 | # 9 | 10 | name: Pre-Commit 11 | 12 | on: 13 | workflow_call: 14 | 15 | permissions: read-all 16 | 17 | jobs: 18 | Pre-Commit: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 23 | - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 24 | -------------------------------------------------------------------------------- /.github/workflows/_pypi_publish.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Template workflow to build the project and publish 8 | # the package to PyPI. 9 | # 10 | 11 | name: PyPI Publish 12 | 13 | on: 14 | workflow_call: 15 | secrets: 16 | API_TOKEN: 17 | required: true 18 | 19 | permissions: read-all 20 | 21 | jobs: 22 | publish-to-pypi: 23 | name: Publish Python distribution to PyPI 24 | runs-on: ubuntu-latest 25 | 26 | permissions: 27 | id-token: write # IMPORTANT: this permission is mandatory for OIDC publishing 28 | 29 | environment: 30 | name: pypi 31 | url: https://pypi.org/p/python-cmethods 32 | 33 | steps: 34 | - name: Download all the distributions 35 | uses: actions/download-artifact@v5 36 | with: 37 | name: python-package-distributions 38 | path: dist/ 39 | 40 | - name: Publish package distributions to PyPI (optional - testpypi) 41 | uses: pypa/gh-action-pypi-publish@release/v1 42 | with: 43 | password: ${{ secrets.API_TOKEN }} 44 | repository-url: https://upload.pypi.org/legacy/ 45 | -------------------------------------------------------------------------------- /.github/workflows/_pypi_test_publish.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Template workflow to build the project and publish 8 | # the package to test PyPI. 9 | # 10 | 11 | name: PyPI Publish 12 | 13 | on: 14 | workflow_call: 15 | secrets: 16 | API_TOKEN: 17 | required: true 18 | 19 | permissions: read-all 20 | 21 | jobs: 22 | publish-to-test-pypi: 23 | name: Publish Python distribution to PyPI 24 | runs-on: ubuntu-latest 25 | permissions: 26 | id-token: write # IMPORTANT: this permission is mandatory for OIDC publishing 27 | environment: 28 | name: testpypi 29 | url: https://test.pypi.org/p/python-cmethods 30 | steps: 31 | - name: Download all the distributions 32 | uses: actions/download-artifact@v5 33 | with: 34 | name: python-package-distributions 35 | path: dist/ 36 | 37 | - name: Publish package distributions to PyPI (optional - testpypi) 38 | uses: pypa/gh-action-pypi-publish@release/v1 39 | with: 40 | password: ${{ secrets.API_TOKEN }} 41 | repository-url: https://test.pypi.org/legacy/ 42 | -------------------------------------------------------------------------------- /.github/workflows/_test.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Template workflow to run the unit tests of the package 8 | # 9 | 10 | name: Test Spot 11 | 12 | on: 13 | workflow_call: 14 | inputs: 15 | os: 16 | type: string 17 | required: true 18 | python-version: 19 | type: string 20 | required: true 21 | 22 | permissions: read-all 23 | 24 | jobs: 25 | Test: 26 | name: Test ${{ inputs.os }} ${{ inputs.python-version }} 27 | runs-on: ${{ inputs.os }} 28 | steps: 29 | - name: Checkout repository 30 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 31 | 32 | - name: Set up Python ${{ inputs.python-version }} 33 | uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 34 | with: 35 | python-version: ${{ inputs.python-version }} 36 | 37 | - name: Install dependencies 38 | run: python -m pip install --user --upgrade pip 39 | 40 | - name: Install package 41 | run: python -m pip install --user . -r requirements-dev.txt 42 | 43 | - name: Run unit tests 44 | run: pytest -vv --retries 1 tests 45 | -------------------------------------------------------------------------------- /.github/workflows/cicd.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Workflow to apply pre-commit, build, test and upload the package 8 | # to the test index of PyPI. 9 | 10 | name: CI/CD 11 | 12 | on: 13 | push: 14 | branches: 15 | - "**" 16 | schedule: 17 | - cron: "20 16 * * 0" 18 | release: 19 | types: [created] 20 | 21 | concurrency: 22 | group: ${{ github.workflow }}-${{ github.ref }} 23 | cancel-in-progress: true 24 | 25 | jobs: 26 | ## Checks the code logic, style and more 27 | ## 28 | Pre-Commit: 29 | uses: ./.github/workflows/_pre_commit.yaml 30 | 31 | ## Discover vulnerabilities 32 | ## 33 | CodeQL: 34 | uses: ./.github/workflows/_codeql.yaml 35 | 36 | ## Builds the package on multiple OS for multiple 37 | ## Python versions 38 | ## 39 | Build: 40 | needs: [Pre-Commit] 41 | uses: ./.github/workflows/_build.yaml 42 | strategy: 43 | fail-fast: false 44 | matrix: 45 | os: [ubuntu-latest, macos-latest, windows-latest] 46 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 47 | with: 48 | os: ${{ matrix.os }} 49 | python-version: ${{ matrix.python-version }} 50 | 51 | ## Build the documentation 52 | ## 53 | Build-Doc: 54 | needs: [Pre-Commit] 55 | uses: ./.github/workflows/_build_doc.yaml 56 | with: 57 | os: ubuntu-latest 58 | python-version: "3.11" 59 | 60 | ## Run the unit tests for Python 3.8 until 3.11 61 | ## 62 | Test: 63 | needs: [Pre-Commit] 64 | uses: ./.github/workflows/_test.yaml 65 | strategy: 66 | # FIXME: fail-fast as soon as the tests are not flaky anymore 67 | fail-fast: false 68 | matrix: 69 | os: [ubuntu-latest, windows-latest] 70 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 71 | with: 72 | os: ${{ matrix.os }} 73 | python-version: ${{ matrix.python-version }} 74 | 75 | ## Generates and uploads the coverage statistics to codecov 76 | ## 77 | CodeCov: 78 | if: | 79 | (success() && github.actor == 'btschwertfeger') 80 | && (github.event_name == 'push' || github.event_name == 'release') 81 | needs: [Pre-Commit] 82 | uses: ./.github/workflows/_codecov.yaml 83 | with: 84 | os: ubuntu-latest 85 | python-version: "3.11" 86 | secrets: inherit 87 | 88 | ## Uploads the package to test.pypi.org on master if triggered by 89 | ## a regular commit/push. 90 | ## 91 | UploadTestPyPI: 92 | if: | 93 | ( 94 | success() 95 | && github.actor == 'btschwertfeger' 96 | && github.ref == 'refs/heads/master' 97 | ) 98 | && (github.event_name == 'push' || github.event_name == 'release') 99 | needs: 100 | - Build 101 | - Build-Doc 102 | - Test 103 | - CodeQL 104 | name: Upload current version to Test PyPI 105 | uses: ./.github/workflows/_pypi_test_publish.yaml 106 | secrets: 107 | API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} 108 | 109 | ## Upload the python-kraken-sdk to Production PyPI 110 | ## 111 | UploadPyPI: 112 | if: | 113 | success() && 114 | github.actor == 'btschwertfeger' && 115 | github.event_name == 'release' 116 | needs: 117 | - Build 118 | - Build-Doc 119 | - Test 120 | - CodeQL 121 | name: Upload the current release to PyPI 122 | uses: ./.github/workflows/_pypi_publish.yaml 123 | secrets: 124 | API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} 125 | -------------------------------------------------------------------------------- /.github/workflows/dependabot_auto_approve.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # Workflow that approves and merges all pull requests from the dependabot[bot] 8 | # author. 9 | # 10 | # Source (May, 2024): 11 | # - https://blog.somewhatabstract.com/2021/10/11/setting-up-dependabot-with-github-actions-to-approve-and-merge/ 12 | 13 | name: Dependabot auto-merge 14 | on: pull_request_target 15 | 16 | permissions: 17 | pull-requests: write 18 | contents: write 19 | 20 | jobs: 21 | dependabot: 22 | runs-on: ubuntu-latest 23 | if: ${{ github.actor == 'dependabot[bot]' }} 24 | steps: 25 | - name: Dependabot metadata 26 | id: dependabot-metadata 27 | uses: dependabot/fetch-metadata@v2.4.0 28 | with: 29 | github-token: "${{ secrets.GITHUB_TOKEN }}" 30 | - name: Approve a PR 31 | if: ${{ steps.dependabot-metadata.outputs.update-type != 'version-update:semver-major' }} 32 | run: gh pr review --approve "$PR_URL" 33 | env: 34 | PR_URL: ${{ github.event.pull_request.html_url }} 35 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 36 | - name: Enable auto-merge for Dependabot PRs 37 | if: ${{ steps.dependabot-metadata.outputs.update-type != 'version-update:semver-major' }} 38 | run: gh pr merge --auto --squash "$PR_URL" 39 | env: 40 | PR_URL: ${{ github.event.pull_request.html_url }} 41 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 42 | -------------------------------------------------------------------------------- /.github/workflows/scorecard.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. They are provided 2 | # by a third-party and are governed by separate terms of service, privacy 3 | # policy, and support documentation. 4 | 5 | name: Scorecard supply-chain security 6 | on: 7 | # For Branch-Protection check. Only the default branch is supported. See 8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 9 | branch_protection_rule: 10 | # To guarantee Maintained check is occasionally updated. See 11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 12 | schedule: 13 | - cron: "29 19 * * 5" 14 | push: 15 | branches: ["master"] 16 | 17 | # Declare default permissions as read only. 18 | permissions: read-all 19 | 20 | jobs: 21 | analysis: 22 | name: Scorecard analysis 23 | runs-on: ubuntu-latest 24 | permissions: 25 | # Needed to upload the results to code-scanning dashboard. 26 | security-events: write 27 | # Needed to publish results and get a badge (see publish_results below). 28 | id-token: write 29 | # Uncomment the permissions below if installing in a private repository. 30 | # contents: read 31 | # actions: read 32 | 33 | steps: 34 | - name: "Checkout code" 35 | uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 36 | with: 37 | persist-credentials: false 38 | 39 | - name: "Run analysis" 40 | uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 41 | with: 42 | results_file: results.sarif 43 | results_format: sarif 44 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 45 | # - you want to enable the Branch-Protection check on a *public* repository, or 46 | # - you are installing Scorecard on a *private* repository 47 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. 48 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 49 | 50 | # Public repositories: 51 | # - Publish results to OpenSSF REST API for easy access by consumers 52 | # - Allows the repository to include the Scorecard badge. 53 | # - See https://github.com/ossf/scorecard-action#publishing-results. 54 | # For private repositories: 55 | # - `publish_results` will always be set to `false`, regardless 56 | # of the value entered here. 57 | publish_results: true 58 | 59 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 60 | # format to the repository Actions tab. 61 | - name: "Upload artifact" 62 | uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 63 | with: 64 | name: SARIF file 65 | path: results.sarif 66 | retention-days: 5 67 | 68 | # Upload the results to GitHub's code scanning dashboard. 69 | - name: "Upload to code-scanning" 70 | uses: github/codeql-action/upload-sarif@80cb6b56b93de3e779c7d476d9100d06fb87c877 # v2.2.4 71 | with: 72 | sarif_file: results.sarif 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | # C extensions 6 | *.so 7 | *.tar.gz 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | _version.py 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .nox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *.cover 45 | *.py,cover 46 | .pytest_cache/ 47 | pytest.xml 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Sphinx documentation 54 | doc/_build/ 55 | 56 | # Jupyter Notebook 57 | .ipynb_checkpoints 58 | 59 | # Environments 60 | .env 61 | .venv 62 | env/ 63 | venv/ 64 | ENV/ 65 | env.bak/ 66 | venv.bak/ 67 | .vscode/ 68 | 69 | # mkdocs documentation 70 | /site 71 | 72 | # mypy 73 | .mypy_cache/ 74 | .dmypy.json 75 | dmypy.json 76 | 77 | # Pyre type checker 78 | .pyre/ 79 | 80 | # misc 81 | .DS_Store 82 | *.csv 83 | *.log 84 | *.zip 85 | *.nc 86 | !examples/input_data/*.nc 87 | !tests/fixture/ 88 | dev 89 | del*.py 90 | 91 | *.egg-info/ 92 | conda.stuff/ 93 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | repos: 9 | - repo: https://github.com/astral-sh/ruff-pre-commit 10 | rev: v0.11.13 11 | hooks: 12 | - id: ruff 13 | args: 14 | - --fix 15 | - --preview 16 | - --exit-non-zero-on-fix 17 | - id: ruff-format 18 | # - repo: https://github.com/pre-commit/mirrors-mypy # FIXME 19 | # rev: v1.13.0 20 | # hooks: 21 | # - id: mypy 22 | # name: mypy 23 | # args: 24 | # - --config-file=pyproject.toml 25 | # - --install-types 26 | # - --non-interactive 27 | - repo: https://github.com/codespell-project/codespell 28 | rev: v2.4.1 29 | hooks: 30 | - id: codespell 31 | additional_dependencies: [tomli] 32 | - repo: https://github.com/gitleaks/gitleaks 33 | rev: v8.21.2 34 | hooks: 35 | - id: gitleaks 36 | - repo: https://github.com/pre-commit/pre-commit-hooks 37 | rev: v5.0.0 38 | hooks: 39 | # all available hooks can be found here: https://github.com/pre-commit/pre-commit-hooks/blob/main/.pre-commit-hooks.yaml 40 | - id: check-yaml 41 | - id: check-ast 42 | - id: check-json 43 | - id: check-toml 44 | - id: check-docstring-first 45 | - id: check-case-conflict 46 | - id: check-merge-conflict 47 | - id: check-added-large-files 48 | args: ["--maxkb=500"] 49 | - id: check-executables-have-shebangs 50 | - id: trailing-whitespace 51 | - id: fix-byte-order-marker 52 | - id: requirements-txt-fixer 53 | - id: mixed-line-ending 54 | - id: name-tests-test 55 | args: ["--pytest-test-first"] 56 | exclude: tests/helper.py 57 | - id: end-of-file-fixer 58 | - id: pretty-format-json 59 | - id: detect-private-key 60 | - repo: https://github.com/pre-commit/pygrep-hooks 61 | rev: v1.10.0 62 | hooks: 63 | - id: python-use-type-annotations 64 | - id: python-check-blanket-noqa 65 | - id: python-check-blanket-type-ignore 66 | - id: python-check-mock-methods 67 | - id: rst-backticks 68 | - id: python-no-eval 69 | - id: python-no-log-warn 70 | - id: rst-backticks 71 | - id: rst-inline-touching-normal 72 | - id: rst-directive-colons 73 | - id: text-unicode-replacement-char 74 | - repo: https://github.com/PyCQA/isort # TODO: remove as soon as ruff is stable 75 | rev: 6.0.1 76 | hooks: 77 | - id: isort 78 | args: [--profile=black] 79 | - repo: https://github.com/PyCQA/bandit 80 | rev: 1.8.3 81 | hooks: 82 | - id: bandit 83 | exclude: "^tests/.*|examples/.*" 84 | - repo: https://github.com/yunojuno/pre-commit-xenon 85 | rev: v0.1 86 | hooks: 87 | - id: xenon 88 | args: 89 | - --max-average=B 90 | - --max-modules=B 91 | - --max-absolute=C 92 | - repo: https://github.com/rbubley/mirrors-prettier 93 | rev: v3.3.3 94 | hooks: 95 | - id: prettier 96 | exclude: '\.nc$|^tests/fixture/|\.ipynb$' 97 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # -*- mode: yaml; coding: utf-8 -*- 2 | # .readthedocs.yaml 3 | # Read the Docs configuration file 4 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 5 | 6 | # Required 7 | version: 2 8 | 9 | # Set the version of Python and other tools you might need 10 | build: 11 | os: ubuntu-22.04 12 | tools: 13 | python: "3.11" 14 | 15 | # Build documentation in the docs/ directory with Sphinx 16 | sphinx: 17 | configuration: doc/conf.py 18 | 19 | # Optionally declare the Python requirements required to build your docs 20 | python: 21 | install: 22 | - requirements: doc/requirements.txt 23 | -------------------------------------------------------------------------------- /CITATION: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | authors: 4 | - family-names: "Schwertfeger" 5 | given-names: "Benjamin Thomas" 6 | orcid: "https://orcid.org/0000-0001-7664-8434" 7 | title: "python-cmethods" 8 | doi: 10.5281/zenodo.7652755 9 | url: "https://github.com/btschwertfeger/python-cmethods" 10 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | include README.md LICENSE 9 | 10 | graft cmethods 11 | 12 | prune tests 13 | prune doc 14 | prune examples 15 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # -*- mode: make; coding: utf-8 -*- 2 | #!make 3 | # 4 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 5 | # All rights reserved. 6 | # https://github.com/btschwertfeger 7 | # 8 | 9 | UV := uv 10 | PYTHON := python 11 | PYTEST := $(UV) run pytest 12 | TESTS := tests 13 | PYTEST_OPTS := -vv --junit-xml=pytest.xml 14 | PYTEST_COV_OPTS := $(PYTEST_OPTS) --cov=cmethods --cov-report=xml:coverage.xml --cov-report=term 15 | 16 | ## ======= M A K E F I L E - T A R G E T S ===================================== 17 | ## help Show this help message 18 | ## 19 | .PHONY: help 20 | help: 21 | @grep "^##" Makefile | sed -e "s/##//" 22 | 23 | ## ======= B U I L D I N G ===================================================== 24 | ## build Builds the package 25 | ## 26 | .PHONY: build 27 | build: 28 | $(PYTHON) -m build . 29 | 30 | ## rebuild Rebuild the package 31 | ## 32 | .PHONY: rebuild 33 | rebuild: clean build 34 | 35 | ## ======= I N S T A L L A T I O N ============================================= 36 | ## install Install the package 37 | ## 38 | .PHONY: install 39 | install: check-uv 40 | $(UV) pip install . 41 | 42 | ## dev Installs the package in edit mode 43 | ## 44 | .PHONY: dev 45 | dev: 46 | @git lfs install 47 | $(UV) pip install -e . -r doc/requirements.txt -r requirements-dev.txt 48 | 49 | ## ======= T E S T I N G ======================================================= 50 | ## test Run the unit tests 51 | ## 52 | .PHONY: test 53 | test: 54 | $(PYTHON) -m pytest $(PYTEST_OPTS) $(TESTS) 55 | 56 | .PHONY: test 57 | tests: test 58 | 59 | ## retest Rerun tests that failed before 60 | ## 61 | .PHONY: retest 62 | retest: 63 | $(PYTHON) -m pytest $(PYTEST_OPTS) --lf $(TESTS) 64 | 65 | ## wip Run tests marked as wip 66 | ## 67 | .PHONY: wip 68 | wip: 69 | $(PYTHON) -m pytest $(PYTEST_OPTS) -m "wip" $(TESTS) 70 | 71 | ## coverage Run all tests and generate the coverage report 72 | ## 73 | .PHONY: coverage 74 | coverage: 75 | $(PYTEST) $(PYTEST_COV_OPTS) $(TEST_DIR) 76 | 77 | ## doc Build the documentation 78 | ## 79 | .PHONY: doc 80 | doc: 81 | cd doc && make html 82 | 83 | ## doctest Run the documentation tests 84 | ## 85 | .PHONY: doctest 86 | doctest: 87 | cd doc && make doctest 88 | 89 | ## ======= M I S C E L A N I O U S ============================================= 90 | ## pre-commit Run the pre-commit targets 91 | ## 92 | .PHONY: pre-commit 93 | pre-commit: 94 | @pre-commit run -a 95 | 96 | ## ruff Run ruff without fix 97 | .PHONY: ruff 98 | ruff: 99 | ruff check --preview . 100 | 101 | ## ruff-fix Run ruff with fix 102 | ## 103 | .PHONY: ruff-fix 104 | ruff-fix: 105 | ruff check --fix --preview . 106 | ruff format . 107 | 108 | ## changelog Create the changelog 109 | ## 110 | .PHONY: changelog 111 | changelog: 112 | docker run -it --rm \ 113 | -v $(PWD):/usr/local/src/your-app/ \ 114 | githubchangeloggenerator/github-changelog-generator \ 115 | -u btschwertfeger \ 116 | -p python-cmethods \ 117 | -t $(GHTOKEN) \ 118 | --breaking-labels Breaking \ 119 | --enhancement-labels Feature 120 | 121 | ## clean Clean the workspace 122 | ## 123 | .PHONY: clean 124 | clean: 125 | rm -rf .mypy_cache .pytest_cache .cache \ 126 | build/ dist/ python_cmethods.egg-info \ 127 | examples/.ipynb_checkpoints .ipynb_checkpoints \ 128 | doc/_build \ 129 | .coverage* pytest.xml 130 | 131 | rm -f .coverage cmethods/_version.py 132 | 133 | find tests -name "__pycache__" | xargs rm -rf 134 | find cmethods -name "__pycache__" | xargs rm -rf 135 | find examples -name "__pycache__" | xargs rm -rf 136 | 137 | ## check-uv Check if uv is installed 138 | ## 139 | .PHONY: check-uv 140 | check-uv: 141 | @if ! command -v $(UV) >/dev/null; then \ 142 | echo "Error: uv is not installed. Please visit https://github.com/astral-sh/uv for installation instructions."; \ 143 | exit 1; \ 144 | fi 145 | -------------------------------------------------------------------------------- /cmethods/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # https://github.com/btschwertfeger 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see 18 | # https://www.gnu.org/licenses/gpl-3.0.html. 19 | # 20 | # pylint: disable=consider-using-f-string,logging-not-lazy 21 | 22 | r""" 23 | Module providing the a method named "adjust" to apply different bias 24 | correction techniques to time-series climate data. 25 | 26 | Some variables used in this package: 27 | 28 | T = Temperatures ($T$) 29 | X = Some climate variable ($X$) 30 | h = historical 31 | p = scenario; future; predicted 32 | obs = observed data ($T_{obs,h}$) 33 | simh = modeled data with same time period as obs ($T_{sim,h}$) 34 | simp = data to correct (predicted simulated data) ($T_{sim,p}$) 35 | F = Cumulative Distribution Function 36 | \mu = mean 37 | \sigma = standard deviation 38 | i = index 39 | _{m} = long-term monthly interval 40 | """ 41 | 42 | from __future__ import annotations 43 | 44 | import logging 45 | import sys 46 | from typing import TYPE_CHECKING, Any 47 | 48 | if TYPE_CHECKING: 49 | from cloup import Context 50 | 51 | import cloup 52 | import xarray as xr 53 | from click import echo 54 | from cloup import HelpFormatter, HelpTheme, Path, Style, command, option, option_group 55 | from cloup.constraints import Equal, If, require_all 56 | 57 | from cmethods.core import adjust 58 | 59 | __all__ = ["adjust"] 60 | 61 | 62 | def print_version( 63 | ctx: Context, 64 | param: Any, # noqa: ARG001 65 | value: Any, 66 | ) -> None: 67 | """Prints the version of the package""" 68 | if not value or ctx.resilient_parsing: 69 | return 70 | from importlib.metadata import version # noqa: PLC0415 71 | 72 | echo(version("python-cmethods")) 73 | ctx.exit() 74 | 75 | 76 | @command( 77 | context_settings={ 78 | "auto_envvar_prefix": "CMETHODS", 79 | "help_option_names": ["-h", "--help"], 80 | }, 81 | formatter_settings=HelpFormatter.settings( 82 | theme=HelpTheme( 83 | invoked_command=Style(fg="bright_yellow"), 84 | heading=Style(fg="bright_white", bold=True), 85 | constraint=Style(fg="magenta"), 86 | col1=Style(fg="bright_yellow"), 87 | ), 88 | ), 89 | ) 90 | @option( 91 | "--version", 92 | is_flag=True, 93 | callback=print_version, 94 | expose_value=False, 95 | is_eager=True, 96 | ) 97 | @option( 98 | "--obs", 99 | "--observations", 100 | required=True, 101 | type=Path(exists=True), 102 | help="Reference data set (control period)", 103 | ) 104 | @option( 105 | "--simh", 106 | "--simulated-historical", 107 | required=True, 108 | type=Path(exists=True), 109 | help="Modeled data set (control period)", 110 | ) 111 | @option( 112 | "--simp", 113 | "--simulated-scenario", 114 | required=True, 115 | type=Path(exists=True), 116 | help="Modeled data set (scenario period)", 117 | ) 118 | @option( 119 | "--method", 120 | required=True, 121 | type=cloup.Choice( 122 | [ 123 | "linear_scaling", 124 | "variance_scaling", 125 | "delta_method", 126 | "quantile_mapping", 127 | "quantile_delta_mapping", 128 | ], 129 | case_sensitive=False, 130 | ), 131 | help="Bias adjustment method to apply", 132 | ) 133 | @option( 134 | "--kind", 135 | required=True, 136 | type=cloup.Choice(["+", "add", "*", "mult"]), 137 | help="Kind of adjustment", 138 | ) 139 | @option( 140 | "--variable", 141 | required=True, 142 | type=str, 143 | help="Variable of interest", 144 | ) 145 | @option( 146 | "-o", 147 | "--output", 148 | required=True, 149 | type=str, 150 | callback=lambda _, __, value: (value if value.endswith(".nc") else f"{value}.nc"), 151 | help="Output file name", 152 | ) 153 | @option_group( 154 | "Scaling-Based Adjustment Options", 155 | option( 156 | "--group", 157 | type=str, 158 | help="Temporal grouping", 159 | ), 160 | constraint=If( 161 | Equal("method", "linear_scaling") & Equal("method", "variance_scaling") & Equal("method", "delta_method"), 162 | then=require_all, 163 | ), 164 | ) 165 | @option_group( 166 | "Distribution-Based Adjustment Options", 167 | option( 168 | "--quantiles", 169 | type=int, 170 | help="Quantiles to respect", 171 | ), 172 | constraint=If( 173 | Equal("method", "quantile_mapping") & Equal("method", "quantile_delta_mapping"), 174 | then=require_all, 175 | ), 176 | ) 177 | def cli(**kwargs) -> None: 178 | """ 179 | Command-line tool to apply bias correction procedures to climate data. 180 | 181 | Copyright (C) 2023 Benjamin Thomas Schwertfeger\n 182 | GitHub: https://github.com/btschwertfeger/python-cmethods 183 | """ 184 | 185 | logging.basicConfig( 186 | format="%(asctime)s %(levelname)8s | %(message)s", 187 | datefmt="%Y/%m/%d %H:%M:%S", 188 | level=logging.INFO, 189 | ) 190 | log = logging.getLogger(__name__) 191 | log.info("Loading data sets ...") 192 | try: 193 | for key, message in zip( 194 | ("obs", "simh", "simp"), 195 | ( 196 | "observation data set", 197 | "modeled data set of the control period", 198 | "modeled data set of the scenario period", 199 | ), 200 | ): 201 | kwargs[key] = xr.open_dataset(kwargs[key]) 202 | if not isinstance(kwargs[key], xr.Dataset): 203 | raise TypeError("The data sets must be type xarray.Dataset") 204 | 205 | if kwargs["variable"] not in kwargs[key]: 206 | raise KeyError( 207 | f"Variable '{kwargs['variable']}' is missing in the {message}", 208 | ) 209 | kwargs[key] = kwargs[key][kwargs["variable"]] 210 | except (TypeError, KeyError) as exc: 211 | log.error(exc) 212 | sys.exit(1) 213 | 214 | log.info("Data sets loaded ...") 215 | kwargs["n_quantiles"] = kwargs["quantiles"] 216 | del kwargs["quantiles"] 217 | 218 | log.info("Applying %s ...", kwargs["method"]) 219 | result = adjust(**kwargs) 220 | 221 | log.info("Saving result to %s ...", kwargs["output"]) 222 | result.to_netcdf(kwargs["output"]) 223 | -------------------------------------------------------------------------------- /cmethods/core.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # https://github.com/btschwertfeger 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see 18 | # https://www.gnu.org/licenses/gpl-3.0.html. 19 | # 20 | 21 | """ 22 | Module providing the main function that is used to apply the implemented bias 23 | correction techniques. 24 | """ 25 | 26 | from __future__ import annotations 27 | 28 | from typing import TYPE_CHECKING, Callable, Dict, Optional 29 | 30 | import xarray as xr 31 | 32 | from cmethods.distribution import quantile_delta_mapping as __quantile_delta_mapping 33 | from cmethods.distribution import quantile_mapping as __quantile_mapping 34 | from cmethods.scaling import delta_method as __delta_method 35 | from cmethods.scaling import linear_scaling as __linear_scaling 36 | from cmethods.scaling import variance_scaling as __variance_scaling 37 | from cmethods.static import SCALING_METHODS 38 | from cmethods.utils import UnknownMethodError, ensure_xr_dataarray 39 | 40 | if TYPE_CHECKING: 41 | from cmethods.types import XRData 42 | 43 | __METHODS_FUNC__: Dict[str, Callable] = { 44 | "linear_scaling": __linear_scaling, 45 | "variance_scaling": __variance_scaling, 46 | "delta_method": __delta_method, 47 | "quantile_mapping": __quantile_mapping, 48 | "quantile_delta_mapping": __quantile_delta_mapping, 49 | } 50 | 51 | 52 | def apply_ufunc( 53 | method: str, 54 | obs: xr.xarray.core.dataarray.DataArray, 55 | simh: xr.xarray.core.dataarray.DataArray, 56 | simp: xr.xarray.core.dataarray.DataArray, 57 | **kwargs: dict, 58 | ) -> xr.xarray.core.dataarray.DataArray: 59 | """ 60 | Internal function used to apply the bias correction technique to the 61 | passed input data. 62 | """ 63 | ensure_xr_dataarray(obs=obs, simh=simh, simp=simp) 64 | if method not in __METHODS_FUNC__: 65 | raise UnknownMethodError(method, __METHODS_FUNC__.keys()) 66 | 67 | if kwargs.get("input_core_dims"): 68 | if not isinstance(kwargs["input_core_dims"], dict): 69 | raise TypeError("input_core_dims must be an object of type 'dict'") 70 | if not len(kwargs["input_core_dims"]) == 3 or any( 71 | not isinstance(value, str) for value in kwargs["input_core_dims"].values() 72 | ): 73 | raise ValueError( 74 | 'input_core_dims must have three key-value pairs like: {"obs": "time", "simh": "time", "simp": "time"}', 75 | ) 76 | 77 | input_core_dims = kwargs.pop("input_core_dims") 78 | else: 79 | input_core_dims = {"obs": "time", "simh": "time", "simp": "time"} 80 | 81 | result: XRData = xr.apply_ufunc( 82 | __METHODS_FUNC__[method], 83 | obs, 84 | simh, 85 | # Need to spoof a fake time axis since 'time' coord on full dataset is 86 | # different than 'time' coord on training dataset. 87 | simp.rename({input_core_dims["simp"]: "__t_simp__"}), 88 | dask="parallelized", 89 | vectorize=True, 90 | # This will vectorize over the time dimension, so will submit each grid 91 | # cell independently 92 | input_core_dims=[ 93 | [input_core_dims["obs"]], 94 | [input_core_dims["simh"]], 95 | ["__t_simp__"], 96 | ], 97 | # Need to denote that the final output dataset will be labeled with the 98 | # spoofed time coordinate 99 | output_core_dims=[["__t_simp__"]], 100 | kwargs=dict(kwargs), 101 | ) 102 | 103 | # Rename to proper coordinate name. 104 | result = result.rename({"__t_simp__": input_core_dims["simp"]}) 105 | 106 | # ufunc will put the core dimension to the end (time), so want to preserve 107 | # original order where time is commonly first. 108 | return result.transpose(*obs.rename({input_core_dims["obs"]: input_core_dims["simp"]}).dims) 109 | 110 | 111 | def adjust( 112 | method: str, 113 | obs: xr.xarray.core.dataarray.DataArray, 114 | simh: xr.xarray.core.dataarray.DataArray, 115 | simp: xr.xarray.core.dataarray.DataArray, 116 | **kwargs, 117 | ) -> xr.xarray.core.dataarray.DataArray | xr.xarray.core.dataarray.Dataset: 118 | """ 119 | Function to apply a bias correction technique on single and multidimensional 120 | data sets. For more information please refer to the method specific 121 | requirements and execution examples. 122 | 123 | See https://python-cmethods.readthedocs.io/en/latest/methods.html 124 | 125 | 126 | The time dimension of ``obs``, ``simh`` and ``simp`` must be named ``time``. 127 | 128 | If the sizes of time dimensions of the input data sets differ, you have to 129 | pass the hidden ``input_core_dims`` parameter, see 130 | https://python-cmethods.readthedocs.io/en/latest/getting_started.html#advanced-usage 131 | for more information. 132 | 133 | :param method: Technique to apply 134 | :type method: str 135 | :param obs: The reference/observational data set 136 | :type obs: xr.xarray.core.dataarray.DataArray 137 | :param simh: The modeled data of the control period 138 | :type simh: xr.xarray.core.dataarray.DataArray 139 | :param simp: The modeled data of the period to adjust 140 | :type simp: xr.xarray.core.dataarray.DataArray 141 | :param kwargs: Any other method-specific parameter (like 142 | ``n_quantiles`` and ``kind``) 143 | :type kwargs: dict 144 | :return: The bias corrected/adjusted data set 145 | :rtype: xr.xarray.core.dataarray.DataArray | xr.xarray.core.dataarray.Dataset 146 | """ 147 | kwargs["adjust_called"] = True 148 | ensure_xr_dataarray(obs=obs, simh=simh, simp=simp) 149 | 150 | if method == "detrended_quantile_mapping": # noqa: PLR2004 151 | raise ValueError( 152 | "This function is not available for detrended quantile mapping." 153 | " Please use cmethods.CMethods.detrended_quantile_mapping", 154 | ) 155 | 156 | # No grouped correction | distribution-based technique 157 | # NOTE: This is disabled since using groups like "time.month" will lead 158 | # to unrealistic monthly transitions. If such behavior is wanted, 159 | # mock this function or apply ``CMethods.__apply_ufunc` directly 160 | # on your data sets. 161 | if kwargs.get("group") is None: 162 | return apply_ufunc(method, obs, simh, simp, **kwargs).to_dataset() 163 | 164 | if method not in SCALING_METHODS: 165 | raise ValueError( 166 | "Can't use group for distribution based methods.", # except for DQM 167 | ) 168 | 169 | # Grouped correction | scaling-based technique 170 | group: str | dict[str, str] = kwargs["group"] 171 | if isinstance(group, str): 172 | # only for same sized time dimensions 173 | obs_group = group 174 | simh_group = group 175 | simp_group = group 176 | elif isinstance(group, dict): 177 | if any(key not in {"obs", "simh", "simp"} for key in group): 178 | raise ValueError( 179 | "group must either be a string like 'time' or a dict like " 180 | '{"obs": "time.month", "simh": "t_simh.month", "simp": "time.month"}', 181 | ) 182 | # for different sized time dimensions 183 | obs_group = group["obs"] 184 | simh_group = group["simh"] 185 | simp_group = group["simp"] 186 | else: 187 | raise ValueError("'group' must be a string or a dict!") 188 | 189 | del kwargs["group"] 190 | 191 | result: Optional[XRData] = None 192 | for (_, obs_gds), (_, simh_gds), (_, simp_gds) in zip( 193 | obs.groupby(obs_group), 194 | simh.groupby(simh_group), 195 | simp.groupby(simp_group), 196 | ): 197 | monthly_result = apply_ufunc( 198 | method, 199 | obs_gds, 200 | simh_gds, 201 | simp_gds, 202 | **kwargs, 203 | ) 204 | 205 | result = monthly_result if result is None else xr.merge([result, monthly_result]) 206 | 207 | return result 208 | 209 | 210 | __all__ = ["adjust"] 211 | -------------------------------------------------------------------------------- /cmethods/scaling.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # https://github.com/btschwertfeger 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see 18 | # https://www.gnu.org/licenses/gpl-3.0.html. 19 | # 20 | 21 | """ 22 | Module providing functions for scaling-based bias adjustments. Functions are not 23 | intended to used directly - but as part of the adjustment procedure triggered by 24 | :func:``cmethods.adjust``. 25 | """ 26 | 27 | from __future__ import annotations 28 | 29 | from typing import TYPE_CHECKING, Any, Final 30 | 31 | import numpy as np 32 | 33 | from cmethods.static import ADDITIVE, MAX_SCALING_FACTOR, MULTIPLICATIVE 34 | from cmethods.utils import ( 35 | check_adjust_called, 36 | check_np_types, 37 | ensure_dividable, 38 | get_adjusted_scaling_factor, 39 | ) 40 | 41 | if TYPE_CHECKING: 42 | from cmethods.types import NPData 43 | 44 | 45 | # ? -----========= L I N E A R - S C A L I N G =========------ 46 | def linear_scaling( 47 | obs: NPData, 48 | simh: NPData, 49 | simp: NPData, 50 | kind: str = "+", 51 | **kwargs: Any, 52 | ) -> NPData: 53 | r""" 54 | **Do not call this function directly, please use :func:`cmethods.adjust`** 55 | 56 | See https://python-cmethods.readthedocs.io/en/latest/methods.html#linear-scaling 57 | """ 58 | check_adjust_called( 59 | function_name="linear_scaling", 60 | adjust_called=kwargs.get("adjust_called"), 61 | ) 62 | check_np_types(obs=obs, simh=simh, simp=simp) 63 | 64 | if kind in ADDITIVE: 65 | return np.array(simp) + (np.nanmean(obs) - np.nanmean(simh)) # Eq. 1 66 | if kind in MULTIPLICATIVE: 67 | max_scaling_factor: Final[float] = kwargs.get( 68 | "max_scaling_factor", 69 | MAX_SCALING_FACTOR, 70 | ) 71 | adj_scaling_factor: Final[float] = get_adjusted_scaling_factor( 72 | ensure_dividable( 73 | np.nanmean(obs), 74 | np.nanmean(simh), 75 | max_scaling_factor, 76 | ), 77 | max_scaling_factor, 78 | ) 79 | return np.array(simp) * adj_scaling_factor # Eq. 2 80 | raise NotImplementedError( 81 | f"{kind=} not available for linear_scaling. Use '+' or '*' instead.", 82 | ) 83 | 84 | 85 | # ? -----========= V A R I A N C E - S C A L I N G =========------ 86 | 87 | 88 | def variance_scaling( 89 | obs: NPData, 90 | simh: NPData, 91 | simp: NPData, 92 | kind: str = "+", 93 | **kwargs: Any, 94 | ) -> NPData: 95 | r""" 96 | **Do not call this function directly, please use :func:`cmethods.CMethods.adjust`** 97 | 98 | See https://python-cmethods.readthedocs.io/en/latest/methods.html#variance-scaling 99 | """ 100 | check_adjust_called( 101 | function_name="variance_scaling", 102 | adjust_called=kwargs.get("adjust_called"), 103 | ) 104 | check_np_types(obs=obs, simh=simp, simp=simp) 105 | 106 | if kind in ADDITIVE: 107 | LS_simh = linear_scaling(obs, simh, simh, kind="+", **kwargs) # Eq. 1 108 | LS_simp = linear_scaling(obs, simh, simp, kind="+", **kwargs) # Eq. 2 109 | 110 | VS_1_simh = LS_simh - np.nanmean(LS_simh) # Eq. 3 111 | VS_1_simp = LS_simp - np.nanmean(LS_simp) # Eq. 4 112 | max_scaling_factor: Final[float] = kwargs.get( 113 | "max_scaling_factor", 114 | MAX_SCALING_FACTOR, 115 | ) 116 | adj_scaling_factor: Final[float] = get_adjusted_scaling_factor( 117 | ensure_dividable( 118 | np.std(np.array(obs)), 119 | np.std(VS_1_simh), 120 | max_scaling_factor, 121 | ), 122 | max_scaling_factor, 123 | ) 124 | 125 | VS_2_simp = VS_1_simp * adj_scaling_factor # Eq. 5 126 | return VS_2_simp + np.nanmean(LS_simp) # Eq. 6 127 | 128 | raise NotImplementedError( 129 | f"{kind=} not available for variance_scaling. Use '+' instead.", 130 | ) 131 | 132 | 133 | # ? -----========= D E L T A - M E T H O D =========------ 134 | def delta_method( 135 | obs: NPData, 136 | simh: NPData, 137 | simp: NPData, 138 | kind: str = "+", 139 | **kwargs: Any, 140 | ) -> NPData: 141 | r""" 142 | **Do not call this function directly, please use :func:`cmethods.adjust`** 143 | See https://python-cmethods.readthedocs.io/en/latest/methods.html#delta-method 144 | """ 145 | check_adjust_called( 146 | function_name="delta_method", 147 | adjust_called=kwargs.get("adjust_called"), 148 | ) 149 | check_np_types(obs=obs, simh=simh, simp=simp) 150 | 151 | if kind in ADDITIVE: 152 | return np.array(obs) + (np.nanmean(simp) - np.nanmean(simh)) # Eq. 1 153 | if kind in MULTIPLICATIVE: 154 | max_scaling_factor: Final[float] = kwargs.get( 155 | "max_scaling_factor", 156 | MAX_SCALING_FACTOR, 157 | ) 158 | adj_scaling_factor = get_adjusted_scaling_factor( 159 | ensure_dividable( 160 | np.nanmean(simp), 161 | np.nanmean(simh), 162 | max_scaling_factor, 163 | ), 164 | max_scaling_factor, 165 | ) 166 | return np.array(obs) * adj_scaling_factor # Eq. 2 167 | raise NotImplementedError( 168 | f"{kind=} not available for delta_method. Use '+' or '*' instead.", 169 | ) 170 | -------------------------------------------------------------------------------- /cmethods/static.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # https://github.com/btschwertfeger 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see 18 | # https://www.gnu.org/licenses/gpl-3.0.html. 19 | # 20 | 21 | """Module providing static information for the python-cmethods package""" 22 | 23 | from __future__ import annotations 24 | 25 | from typing import List 26 | 27 | SCALING_METHODS: List[str] = [ 28 | "linear_scaling", 29 | "variance_scaling", 30 | "delta_method", 31 | ] 32 | DISTRIBUTION_METHODS: List[str] = [ 33 | "quantile_mapping", 34 | "detrended_quantile_mapping", 35 | "quantile_delta_mapping", 36 | ] 37 | 38 | CUSTOM_METHODS: List[str] = SCALING_METHODS + DISTRIBUTION_METHODS 39 | METHODS: List[str] = CUSTOM_METHODS 40 | 41 | ADDITIVE: List[str] = ["+", "add"] 42 | MULTIPLICATIVE: List[str] = ["*", "mult"] 43 | MAX_SCALING_FACTOR: int = 10 44 | 45 | __all__ = [ 46 | "ADDITIVE", 47 | "CUSTOM_METHODS", 48 | "DISTRIBUTION_METHODS", 49 | "MAX_SCALING_FACTOR", 50 | "METHODS", 51 | "MULTIPLICATIVE", 52 | "SCALING_METHODS", 53 | ] 54 | -------------------------------------------------------------------------------- /cmethods/types.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # https://github.com/btschwertfeger 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see 18 | # https://www.gnu.org/licenses/gpl-3.0.html. 19 | # 20 | 21 | """Module providing custom types""" 22 | 23 | from __future__ import annotations 24 | 25 | from typing import TypeVar 26 | 27 | from numpy import generic, ndarray 28 | from xarray.core.dataarray import DataArray, Dataset 29 | 30 | XRData_t = (Dataset, DataArray) 31 | NPData_t = (list, ndarray, generic) 32 | XRData = TypeVar("XRData", Dataset, DataArray) 33 | NPData = TypeVar("NPData", list, ndarray, generic) 34 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # -*- mode: make; coding: utf-8 -*- 2 | #!make 3 | # Minimal makefile for Sphinx documentation 4 | # 5 | 6 | # You can set these variables from the command line, and also 7 | # from the environment for the first two. 8 | SPHINXOPTS ?= 9 | SPHINXBUILD ?= sphinx-build 10 | SOURCEDIR = . 11 | BUILDDIR = _build 12 | 13 | # Put it first so that "make" without argument is like "make help". 14 | help: 15 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 16 | 17 | .PHONY: help Makefile 18 | 19 | # Catch-all target: route all unknown targets to Sphinx using the new 20 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 21 | %: Makefile 22 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 23 | rm $(SOURCEDIR)/examples.ipynb 24 | -------------------------------------------------------------------------------- /doc/_static/images/biasCdiagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btschwertfeger/python-cmethods/5cb26f1e4cd285b8f6edf3089a02f4259a22b9f8/doc/_static/images/biasCdiagram.png -------------------------------------------------------------------------------- /doc/_static/images/dm-doy-plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btschwertfeger/python-cmethods/5cb26f1e4cd285b8f6edf3089a02f4259a22b9f8/doc/_static/images/dm-doy-plot.png -------------------------------------------------------------------------------- /doc/_static/images/qm-cdf-plot-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btschwertfeger/python-cmethods/5cb26f1e4cd285b8f6edf3089a02f4259a22b9f8/doc/_static/images/qm-cdf-plot-1.png -------------------------------------------------------------------------------- /doc/_static/images/qm-cdf-plot-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btschwertfeger/python-cmethods/5cb26f1e4cd285b8f6edf3089a02f4259a22b9f8/doc/_static/images/qm-cdf-plot-2.png -------------------------------------------------------------------------------- /doc/cli.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | Command-Line Interface 22 | ====================== 23 | 24 | The command-line interface provides the following help instructions 25 | 26 | .. code-block:: bash 27 | 28 | cmethods --help 29 | 30 | Usage: cmethods [OPTIONS] 31 | 32 | Command line tool to apply bias adjustment procedures to climate data. 33 | 34 | Scaling-Based Adjustment Options: 35 | [all required if --method="linear_scaling" and --method="variance_scaling" and 36 | --method="delta_method"] 37 | --group TEXT Temporal grouping 38 | 39 | Distribution-Based Adjustment Options: 40 | [all required if --method="quantile_mapping" and 41 | --method="quantile_delta_mapping"] 42 | --quantiles INTEGER Quantiles to respect 43 | 44 | Other options: 45 | --version Show the version and exit. 46 | --obs, --observations PATH Reference data set (control period) [required] 47 | --simh, --simulated-historical PATH 48 | Modeled data set (control period) [required] 49 | --simp, --simulated-scenario PATH 50 | Modeled data set (scenario period) [required] 51 | --method [linear_scaling|variance_scaling|delta_method|quantile_mapping|quantile_delta_mapping] 52 | Bias adjustment method to apply [required] 53 | --kind [add|mult] Kind of adjustment [required] 54 | --variable TEXT Variable of interest [required] 55 | -o, --output TEXT Output file name [required] 56 | -h, --help Show this message and exit. 57 | -------------------------------------------------------------------------------- /doc/cmethods.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | Classes and Functions 22 | ===================== 23 | 24 | In past versions of the python-cmethods package (v1.x) there was a "CMethods" 25 | class that implemented the bias correction methods. This class was removed in 26 | version v2.0.0. Since then, the ``cmethods.adjust`` function is used to apply 27 | the implemented techniques except for detrended quantile mapping. 28 | 29 | .. autofunction:: cmethods.adjust 30 | .. automethod:: cmethods.distribution.detrended_quantile_mapping 31 | 32 | Some additional methods 33 | ----------------------- 34 | 35 | .. automethod:: cmethods.utils.get_pdf 36 | .. automethod:: cmethods.utils.get_cdf 37 | .. automethod:: cmethods.utils.get_inverse_of_cdf 38 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # https://github.com/btschwertfeger 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see 18 | # https://www.gnu.org/licenses/gpl-3.0.html. 19 | # 20 | # Configuration file for the Sphinx documentation builder. 21 | # 22 | # For the full list of built-in configuration values, see the documentation: 23 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 24 | # pylint: disable=invalid-name 25 | 26 | # -- Project information ----------------------------------------------------- 27 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 28 | 29 | """This module is the configuration for the Sphinx documentation building process""" 30 | 31 | import sys 32 | from os.path import join 33 | from pathlib import Path 34 | from shutil import copyfile 35 | 36 | project = "python-cmethods" 37 | copyright = "2023, Benjamin Thomas Schwertfeger" # pylint: disable=redefined-builtin 38 | author = "Benjamin Thomas Schwertfeger" 39 | 40 | # to import the package 41 | parent_directory = Path("..").resolve() 42 | sys.path.insert(0, str(parent_directory)) 43 | 44 | # import links 45 | rst_epilog = "" 46 | # Read link all targets from file 47 | with open("links.rst", encoding="utf-8") as f: 48 | rst_epilog += f.read() 49 | 50 | 51 | def setup(app) -> None: # noqa: ARG001 52 | copyfile(join("..", "examples", "examples.ipynb"), "examples.ipynb") 53 | 54 | 55 | # -- General configuration --------------------------------------------------- 56 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 57 | 58 | extensions = [ 59 | "sphinx.ext.autodoc", 60 | "sphinx.ext.doctest", 61 | "sphinx.ext.intersphinx", 62 | "sphinx.ext.coverage", 63 | "sphinx.ext.napoleon", 64 | "sphinx.ext.autosectionlabel", 65 | "sphinx.ext.mathjax", 66 | "nbsphinx", 67 | "IPython.sphinxext.ipython_console_highlighting", 68 | ] 69 | 70 | templates_path = ["_templates"] 71 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "links.rst", "**.ipynb_checkpoints"] 72 | 73 | # -- Options for HTML output ------------------------------------------------- 74 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 75 | html_theme = "sphinx_rtd_theme" 76 | html_static_path = ["_static"] 77 | html_context = { 78 | "display_github": True, 79 | "github_user": "btschwertfeger", 80 | "github_repo": "python-cmethods", 81 | "github_version": "master/doc/", 82 | } 83 | -------------------------------------------------------------------------------- /doc/getting_started.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | Getting Started 22 | =============== 23 | 24 | Installation 25 | ------------ 26 | 27 | The `python-cmethods`_ module can be installed using the package manager pip: 28 | 29 | .. code-block:: bash 30 | 31 | python3 -m pip install python-cmethods 32 | 33 | If the installation fails due to missing HDF5 headers, ensure that 'hdf5' and 34 | 'netcdf' are pre-installed, e.g. on macOS using: ``brew install hdf5 netcdf``. 35 | 36 | The package is also available via conda-forge. See 37 | `python_cmethods-conda`_ for more information. 38 | 39 | Command-Line Interface Usage 40 | ---------------------------- 41 | 42 | The python-cmethods package provides a command-line interface for applying 43 | various bias correction methods out of the box. 44 | 45 | Keep in mind that due to the various kinds of data and possibilities to 46 | pre-process those, the CLI only provides a basic application of the implemented 47 | techniques. For special parameters, adjustments, and data preparation, please 48 | use programming interface. 49 | 50 | Listing the parameters and their requirements is available by passing the 51 | ``--help`` option: 52 | 53 | .. code-block:: bash 54 | 55 | cmethods --help 56 | 57 | Applying the cmethods tool on the provided example data using the linear scaling 58 | approach is shown below: 59 | 60 | .. code-block:: bash 61 | :caption: Apply Linear Scaling via command-line 62 | 63 | cmethods \ 64 | --obs examples/input_data/observations.nc \ 65 | --simh examples/input_data/control.nc \ 66 | --simp examples/input_data/scenario.nc \ 67 | --method linear_scaling \ 68 | --kind add \ 69 | --variable tas \ 70 | --group time.month \ 71 | --output linear_scaling.nc 72 | 73 | 2024/04/08 18:11:12 INFO | Loading data sets ... 74 | 2024/04/08 18:11:12 INFO | Data sets loaded ... 75 | 2024/04/08 18:11:12 INFO | Applying linear_scaling ... 76 | 2024/04/08 18:11:15 INFO | Saving result to linear_scaling.nc ... 77 | 78 | 79 | For applying a distribution-based bias correction technique, the following 80 | example may help: 81 | 82 | .. code-block:: bash 83 | :caption: Apply Quantile Delta Mapping via command-line 84 | 85 | cmethods \ 86 | --obs examples/input_data/observations.nc \ 87 | --simh examples/input_data/control.nc \ 88 | --simp examples/input_data/scenario.nc \ 89 | --method quantile_delta_mapping \ 90 | --kind add \ 91 | --variable tas \ 92 | --quantiles 1000 \ 93 | --output quantile_delta_mapping.nc 94 | 95 | 2024/04/08 18:16:34 INFO | Loading data sets ... 96 | 2024/04/08 18:16:35 INFO | Data sets loaded ... 97 | 2024/04/08 18:16:35 INFO | Applying quantile_delta_mapping ... 98 | 2024/04/08 18:16:35 INFO | Saving result to quantile_delta_mapping.nc ... 99 | 100 | Programming Interface Usage and Examples 101 | ---------------------------------------- 102 | 103 | The `python-cmethods`_ module can be imported and applied as showing in the 104 | following examples. For more detailed description of the methods, please have a 105 | look at the method specific documentation. 106 | 107 | .. code-block:: python 108 | :linenos: 109 | :caption: Apply the Linear Scaling bias correction technique on 1-dimensional data 110 | 111 | import xarray as xr 112 | from cmethods import adjust 113 | 114 | obsh = xr.open_dataset("input_data/observations.nc") 115 | simh = xr.open_dataset("input_data/control.nc") 116 | simp = xr.open_dataset("input_data/scenario.nc") 117 | 118 | ls_result = adjust( 119 | method="linear_scaling", 120 | obs=obsh["tas"][:, 0, 0], 121 | simh=simh["tas"][:, 0, 0], 122 | simp=simp["tas"][:, 0, 0], 123 | kind="+", 124 | ) 125 | 126 | .. code-block:: python 127 | :linenos: 128 | :caption: Apply the Quantile Delta Mapping bias correction technique on 3-dimensional data 129 | 130 | import xarray as xr 131 | from cmethods import adjust 132 | 133 | obsh = xr.open_dataset("input_data/observations.nc") 134 | simh = xr.open_dataset("input_data/control.nc") 135 | simp = xr.open_dataset("input_data/scenario.nc") 136 | 137 | qdm_result = adjust( 138 | method="quantile_delta_mapping", 139 | obs=obsh["tas"], 140 | simh=simh["tas"], 141 | simp=simp["tas"], 142 | n_quaniles=1000, 143 | kind="+", 144 | ) 145 | 146 | 147 | Advanced Usage 148 | -------------- 149 | 150 | In some cases the time dimension of input data sets have different sizes. In 151 | such case, the hidden parameter ``input_core_dims`` must be passed to the 152 | ``adjust`` call. 153 | 154 | It defines the dimension names of the input data sets, i.e. if the time 155 | dimensions of ``obs`` and ``simp`` have the length, but the time dimension of 156 | ``simh`` is somewhat smaller, you have to define this as follows: 157 | 158 | 159 | .. code-block:: python 160 | :linenos: 161 | :caption: Bias Adjustments for data sets with different time dimension lengths pt. 1 162 | 163 | from cmethods import adjust 164 | import xarray as xr 165 | 166 | obs = xr.open_dataset("examples/input_data/observations.nc")["tas"] 167 | simh = simp.copy(deep=True)[3650:] 168 | simp = xr.open_dataset("examples/input_data/control.nc")["tas"] 169 | 170 | bc = adjust( 171 | method="quantile_mapping", 172 | obs=obs, 173 | simh=simh.rename({"time": "t_simh"}), 174 | simp=simp, 175 | kind="+", 176 | input_core_dims={"obs": "time", "simh": "t_simh", "simp": "time"}, 177 | n_quantiles=100, 178 | ) 179 | 180 | In case you are applying a scaling based technique using grouping, you have to 181 | adjust the group names accordingly to the time dimension names. 182 | 183 | .. code-block:: python 184 | :linenos: 185 | :caption: Bias Adjustments for data sets with different time dimension lengths pt. 2 186 | 187 | from cmethods import adjust 188 | import xarray as xr 189 | 190 | obs = xr.open_dataset("examples/input_data/observations.nc")["tas"] 191 | simh = simp.copy(deep=True)[3650:] 192 | simp = xr.open_dataset("examples/input_data/control.nc")["tas"] 193 | 194 | bc = adjust( 195 | method="linear_scaling", 196 | obs=obs, 197 | simh=simh.rename({"time": "t_simh"}), 198 | simp=simp, 199 | kind="+", 200 | group={"obs": "time.month", "simh": "t_simh.month", "simp": "time.month"}, 201 | input_core_dims={"obs": "time", "simh": "t_simh", "simp": "time"}, 202 | ) 203 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | Welcome to python-cmethods's documentation! 22 | =========================================== 23 | 24 | .. toctree:: 25 | :maxdepth: 2 26 | :caption: Contents: 27 | 28 | introduction.rst 29 | getting_started.rst 30 | examples.ipynb 31 | cli.rst 32 | cmethods.rst 33 | methods.rst 34 | issues.rst 35 | license.rst 36 | -------------------------------------------------------------------------------- /doc/introduction.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | python-cmethods 22 | =============== 23 | 24 | |GitHub badge| |License badge| |PyVersions badge| |Downloads badge| 25 | |CI/CD badge| |codecov badge| |OSSF Scorecard| |OSSF Best Practices| 26 | |Release date badge| |Release version badge| |DOI badge| |Docs stable| 27 | 28 | About 29 | ----- 30 | 31 | Welcome to `python-cmethods`_, a powerful Python package designed for bias 32 | correction and adjustment of climate data. Built with a focus on ease of use and 33 | efficiency, python-cmethods offers a comprehensive suite of functions tailored 34 | for applying bias correction methods to climate model simulations and 35 | observational datasets. 36 | 37 | Bias correction in climate research involves the adjustment of systematic errors 38 | or biases present in climate model simulations or observational datasets to 39 | improve their accuracy and reliability, ensuring that the data better represents 40 | actual climate conditions. This process typically involves statistical methods 41 | or empirical relationships to correct for biases caused by factors such as 42 | instrument calibration, spatial resolution, or model deficiencies. 43 | 44 | .. figure:: _static/images/biasCdiagram.png 45 | :width: 600 46 | :align: center 47 | :alt: Schematic representation of a bias adjustment procedure 48 | 49 | Fig 1: Schematic representation of a bias adjustment procedure 50 | 51 | In this way, for example, modeled data, which on average represent values that 52 | are too cold, can be easily bias-corrected by applying any adjustment procedure 53 | included in this package. 54 | 55 | For instance, modeled data can report values that are way colder than the those 56 | data reported by reanalysis time-series. To address this issue, an adjustment 57 | procedure can be employed. The figure below illustrates the observed, modeled, 58 | and adjusted values, revealing that the delta-adjusted time series 59 | (:math:`T^{*DM}_{sim,p}`) is significantly more similar to the observational 60 | data (:math:`T_{obs,p}`) than the raw model output (:math:`T_{sim,p}`). 61 | 62 | .. figure:: _static/images/dm-doy-plot.png 63 | :width: 600 64 | :align: center 65 | :alt: Temperature per day of year in modeled, observed and bias-adjusted climate data 66 | 67 | Fig 2: Temperature per day of year in modeled, observed and bias-adjusted climate data 68 | 69 | The mathematical foundations supporting each bias correction technique 70 | implemented in python-cmethods are integral to the package, ensuring 71 | transparency and reproducibility in the correction process. Each method is 72 | accompanied by references to trusted publications, reinforcing the reliability 73 | and rigor of the corrections applied. 74 | 75 | 76 | Available Methods 77 | ----------------- 78 | 79 | python-cmethods provides the following bias correction techniques: 80 | 81 | - :ref:`Linear Scaling` 82 | - :ref:`Variance Scaling` 83 | - :ref:`Delta Method` 84 | - :ref:`Quantile Mapping` 85 | - :ref:`Detrended Quantile Mapping` 86 | - :ref:`Quantile Delta Mapping` 87 | 88 | Please refer to the official documentation for more information about these 89 | methods as well as sample scripts: 90 | https://python-cmethods.readthedocs.io/en/stable/ 91 | 92 | Best Practices and important Notes 93 | ---------------------------------- 94 | 95 | - The training data should have the same temporal resolution. 96 | 97 | - Except for the variance scaling, all methods can be applied on stochastic and 98 | non-stochastic climate variables. Variance scaling can only be applied on 99 | non-stochastic climate variables. 100 | 101 | - Non-stochastic climate variables are those that can be predicted with relative 102 | certainty based on factors such as location, elevation, and season. Examples 103 | of non-stochastic climate variables include air temperature, air pressure, and 104 | solar radiation. 105 | 106 | - Stochastic climate variables, on the other hand, are those that exhibit a high 107 | degree of variability and unpredictability, making them difficult to forecast 108 | accurately. Precipitation is an example of a stochastic climate variable 109 | because it can vary greatly in timing, intensity, and location due to complex 110 | atmospheric and meteorological processes. 111 | 112 | - Except for the detrended quantile mapping (DQM) technique, all methods can be 113 | applied to single and multidimensional data sets. The implementation of DQM to 114 | 3-dimensional data is still in progress. 115 | 116 | - For any questions -- please open an issue at 117 | https://github.com/btschwertfeger/python-cmethods/issues. Examples can be found 118 | in the `python-cmethods`_ repository and of course within this documentation. 119 | 120 | References 121 | ---------- 122 | 123 | - Schwertfeger, Benjamin Thomas and Lohmann, Gerrit and Lipskoch, Henrik (2023) *"Introduction of the BiasAdjustCXX command-line tool for the application of fast and efficient bias corrections in climatic research"*, SoftwareX, Volume 22, 101379, ISSN 2352-7110, (https://doi.org/10.1016/j.softx.2023.101379) 124 | - Schwertfeger, Benjamin Thomas (2022) *"The influence of bias corrections on variability, distribution, and correlation of temperatures in comparison to observed and modeled climate data in Europe"* (https://epic.awi.de/id/eprint/56689/) 125 | - Linear Scaling and Variance Scaling based on: Teutschbein, Claudia and Seibert, Jan (2012) *"Bias correction of regional climate model simulations for hydrological climate-change impact studies: Review and evaluation of different methods"* (https://doi.org/10.1016/j.jhydrol.2012.05.052) 126 | - Delta Method based on: Beyer, R. and Krapp, M. and Manica, A.: *"An empirical evaluation of bias correction methods for palaeoclimate simulations"* (https://doi.org/10.5194/cp-16-1493-2020) 127 | - Quantile and Detrended Quantile Mapping based on: Alex J. Cannon and Stephen R. Sobie and Trevor Q. Murdock *"Bias Correction of GCM Precipitation by Quantile Mapping: How Well Do Methods Preserve Changes in Quantiles and Extremes?"* (https://doi.org/10.1175/JCLI-D-14-00754.1) 128 | - Quantile Delta Mapping based on: Tong, Y., Gao, X., Han, Z. et al. *"Bias correction of temperature and precipitation over China for RCM simulations using the QM and QDM methods"*. Clim Dyn 57, 1425–1443 (2021). (https://doi.org/10.1007/s00382-020-05447-4) 129 | -------------------------------------------------------------------------------- /doc/issues.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | Known Issues 22 | ============ 23 | 24 | - Since the scaling methods implemented so far scale by default over the mean 25 | values of the respective months, unrealistic long-term mean values may occur 26 | at the month transitions. This can be prevented either by selecting 27 | ``group='time.dayofyear'``. Alternatively, it is possible not to scale using 28 | long-term mean values, but using a 31-day interval, which takes the 31 29 | surrounding values over all years as the basis for calculating the mean 30 | values. This is not yet implemented in this module, but is available in the 31 | command-line tool `BiasAdjustCXX`_. 32 | -------------------------------------------------------------------------------- /doc/license.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | .. _section-license: 22 | 23 | License 24 | ======= 25 | 26 | .. include:: ../LICENSE 27 | -------------------------------------------------------------------------------- /doc/links.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. 3 | .. Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | .. https://github.com/btschwertfeger 5 | .. 6 | .. This program is free software: you can redistribute it and/or modify 7 | .. it under the terms of the GNU General Public License as published by 8 | .. the Free Software Foundation, either version 3 of the License, or 9 | .. (at your option) any later version. 10 | .. 11 | .. This program is distributed in the hope that it will be useful, 12 | .. but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | .. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | .. GNU General Public License for more details. 15 | .. 16 | .. You should have received a copy of the GNU General Public License 17 | .. along with this program. If not, see 18 | .. https://www.gnu.org/licenses/gpl-3.0.html. 19 | .. 20 | 21 | .. LINKS 22 | 23 | .. _python-cmethods: https://github.com/btschwertfeger/python-cmethods 24 | .. _python_cmethods-conda: https://anaconda.org/conda-forge/python_cmethods 25 | .. _python-cmethods/issues: https://github.com/btschwertfeger/python-cmethods/issues 26 | .. _python-cmethods/discussions: https://github.com/btschwertfeger/python-cmethods/discussions 27 | .. _BiasAdjustCXX: https://github.com/btschwertfeger/BiasAdjustCXX 28 | .. _tool comparison: https://github.com/btschwertfeger/BiasAdjustCXX-Performance-Test 29 | .. _xclim: https://zenodo.org/record/7535677 30 | .. _xarray: https://xarray.dev/ 31 | .. _dask: https://docs.dask.org/en/stable/ 32 | 33 | .. BADGES 34 | 35 | .. |GitHub badge| image:: https://badgen.net/badge/icon/github?icon=github&label 36 | :target: https://github.com/btschwertfeger/python-cmethods 37 | .. |License badge| image:: https://img.shields.io/badge/License-GPLv3-orange.svg 38 | :target: https://www.gnu.org/licenses/gpl-3.0 39 | .. |PyVersions badge| image:: https://img.shields.io/badge/python-3.9_|_3.10_|_3.11|_3.12|_3.13-blue.svg 40 | :target: https://github.com/btschwertfeger/python-cmethods 41 | .. |Downloads badge| image:: https://static.pepy.tech/personalized-badge/python-cmethods?period=total&units=abbreviation&left_color=grey&right_color=orange&left_text=downloads 42 | :target: https://pepy.tech/project/python-cmethods 43 | .. |CI/CD badge| image:: https://github.com/btschwertfeger/python-cmethods/actions/workflows/cicd.yaml/badge.svg?branch=master 44 | :target: https://github.com/btschwertfeger/python-cmethods/actions/workflows/cicd.yaml 45 | .. |codecov badge| image:: https://codecov.io/gh/btschwertfeger/python-cmethods/branch/master/badge.svg 46 | :target: https://app.codecov.io/gh/btschwertfeger/python-cmethods 47 | .. |Release date badge| image:: https://shields.io/github/release-date/btschwertfeger/python-cmethods 48 | :target: https://github.com/btschwertfeger/python-cmethods 49 | .. |Release version badge| image:: https://shields.io/github/v/release/btschwertfeger/python-cmethods?display_name=tag 50 | :target: https://github.com/btschwertfeger/python-cmethods 51 | .. |DOI badge| image:: https://zenodo.org/badge/496160109.svg 52 | :target: https://zenodo.org/badge/latestdoi/496160109 53 | .. |Docs latest| image:: https://readthedocs.org/projects/python-cmethods/badge/?version=latest 54 | :target: https://python-cmethods.readthedocs.io/en/latest 55 | :alt: Documentation Status latest 56 | .. |Docs stable| image:: https://readthedocs.org/projects/python-cmethods/badge/?version=stable 57 | :target: https://python-cmethods.readthedocs.io/en/stable 58 | :alt: Documentation Status stable 59 | .. |OSSF Scorecard| image:: https://img.shields.io/ossf-scorecard/github.com/btschwertfeger/python-cmethods?label=openssf%20scorecard&style=flat 60 | :target: https://securityscorecards.dev/viewer/?uri=github.com/btschwertfeger/python-cmethods 61 | :alt: OSSF Scorecard 62 | .. |OSSF Best Practices| image:: https://www.bestpractices.dev/projects/8666/badge 63 | :target: https://www.bestpractices.dev/projects/8666 64 | :alt: OSSF Best Practices 65 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /doc/requirements.txt: -------------------------------------------------------------------------------- 1 | click~=8.1 2 | cloup~=3.0 3 | ipython 4 | nbsphinx 5 | netCDF4>=1.6.1 6 | numpy 7 | setuptools_scm 8 | sphinx<8.2.0 # doesn't work with nbsphinx 9 | sphinx-rtd-theme 10 | xarray>=2022.11.0 11 | -------------------------------------------------------------------------------- /examples/input_data/control.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btschwertfeger/python-cmethods/5cb26f1e4cd285b8f6edf3089a02f4259a22b9f8/examples/input_data/control.nc -------------------------------------------------------------------------------- /examples/input_data/observations.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btschwertfeger/python-cmethods/5cb26f1e4cd285b8f6edf3089a02f4259a22b9f8/examples/input_data/observations.nc -------------------------------------------------------------------------------- /examples/input_data/scenario.nc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btschwertfeger/python-cmethods/5cb26f1e4cd285b8f6edf3089a02f4259a22b9f8/examples/input_data/scenario.nc -------------------------------------------------------------------------------- /examples/requirements.txt: -------------------------------------------------------------------------------- 1 | matplotlib 2 | netCDF4 3 | python-cmethods 4 | xarray 5 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | dask[distributed] 2 | pytest 3 | pytest-cov 4 | pytest-retry 5 | scikit-learn 6 | scipy 7 | venv-kernel 8 | zarr 9 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | # 8 | # This file is only used by sphinx for liking the package to the documentation. 9 | 10 | from setuptools import setup 11 | 12 | setup() 13 | -------------------------------------------------------------------------------- /tests/README.rst: -------------------------------------------------------------------------------- 1 | Unit tests for python-cmethods 2 | ############################## 3 | 4 | The input data sets are generated before the tests are executed. They are based 5 | on simple equations to simulate temperatures and precipitation for different 6 | locations. The bias correction methods are then applied to those to to then 7 | validate if the methods improved the data. 8 | 9 | Additionally some input data is saved as .zarr, so the dask compatibility can be 10 | tested as well. 11 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | # This file is required for the codecov CI/CD job. 9 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | """Module providing fixtures for testing.""" 9 | 10 | from __future__ import annotations 11 | 12 | import os 13 | from typing import Any 14 | 15 | import pytest 16 | import xarray as xr 17 | from click.testing import CliRunner 18 | from dask.distributed import LocalCluster 19 | 20 | from .helper import get_datasets 21 | 22 | FIXTURE_DIR: str = os.path.join(os.path.dirname(__file__), "fixture") 23 | 24 | 25 | @pytest.fixture 26 | def cli_runner() -> CliRunner: 27 | """Provide a cli-runner for testing the CLI""" 28 | return CliRunner() 29 | 30 | 31 | @pytest.fixture(scope="session") 32 | def dask_cluster() -> Any: 33 | # Create a Dask LocalCluster 34 | cluster = LocalCluster() 35 | 36 | # Create a Dask Client connected to the LocalCluster 37 | client = cluster.get_client() 38 | 39 | # Yield the client, making it available for the tests 40 | yield client 41 | 42 | # After the tests are done, close the cluster 43 | client.close() 44 | 45 | 46 | @pytest.fixture 47 | def datasets() -> dict: 48 | obsh_add, obsp_add, simh_add, simp_add = get_datasets(kind="+") 49 | obsh_mult, obsp_mult, simh_mult, simp_mult = get_datasets(kind="*") 50 | 51 | return { 52 | "+": { 53 | "obsh": obsh_add["+"], 54 | "obsp": obsp_add["+"], 55 | "simh": simh_add["+"], 56 | "simp": simp_add["+"], 57 | }, 58 | "*": { 59 | "obsh": obsh_mult["*"], 60 | "obsp": obsp_mult["*"], 61 | "simh": simh_mult["*"], 62 | "simp": simp_mult["*"], 63 | }, 64 | } 65 | 66 | 67 | @pytest.fixture 68 | def datasets_from_zarr() -> dict: 69 | return { 70 | "+": { 71 | "obsh": xr.open_zarr( 72 | os.path.join(FIXTURE_DIR, "temperature_obsh.zarr"), 73 | ).chunk({"time": -1}), 74 | "obsp": xr.open_zarr( 75 | os.path.join(FIXTURE_DIR, "temperature_obsp.zarr"), 76 | ).chunk({"time": -1}), 77 | "simh": xr.open_zarr( 78 | os.path.join(FIXTURE_DIR, "temperature_simh.zarr"), 79 | ).chunk({"time": -1}), 80 | "simp": xr.open_zarr( 81 | os.path.join(FIXTURE_DIR, "temperature_simp.zarr"), 82 | ).chunk({"time": -1}), 83 | }, 84 | "*": { 85 | "obsh": xr.open_zarr( 86 | os.path.join(FIXTURE_DIR, "precipitation_obsh.zarr"), 87 | ).chunk({"time": -1}), 88 | "obsp": xr.open_zarr( 89 | os.path.join(FIXTURE_DIR, "precipitation_obsp.zarr"), 90 | ).chunk({"time": -1}), 91 | "simh": xr.open_zarr( 92 | os.path.join(FIXTURE_DIR, "precipitation_simh.zarr"), 93 | ).chunk({"time": -1}), 94 | "simp": xr.open_zarr( 95 | os.path.join(FIXTURE_DIR, "precipitation_simp.zarr"), 96 | ).chunk({"time": -1}), 97 | }, 98 | } 99 | -------------------------------------------------------------------------------- /tests/fixture/precipitation_obsh.zarr/.zattrs: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /tests/fixture/precipitation_obsh.zarr/.zgroup: -------------------------------------------------------------------------------- 1 | { 2 | "zarr_format": 2 3 | } -------------------------------------------------------------------------------- /tests/fixture/precipitation_obsh.zarr/.zmetadata: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | ".zattrs": {}, 4 | ".zgroup": { 5 | "zarr_format": 2 6 | }, 7 | "lat/.zarray": { 8 | "chunks": [ 9 | 4 10 | ], 11 | "compressor": { 12 | "blocksize": 0, 13 | "clevel": 5, 14 | "cname": "lz4", 15 | "id": "blosc", 16 | "shuffle": 1 17 | }, 18 | "dtype": " bool: 20 | return np.sqrt(mean_squared_error(result, obsp)) < np.sqrt( 21 | mean_squared_error(simp, obsp), 22 | ) 23 | 24 | 25 | def is_3d_rmse_better(result, obsp, simp) -> bool: 26 | result_reshaped = result.stack(z=("lat", "lon")) 27 | obsp_reshaped = obsp.stack(z=("lat", "lon")) 28 | simp_reshaped = simp.stack(z=("lat", "lon")) 29 | 30 | # Compute RMSE 31 | rmse_values_old = np.sqrt( 32 | mean_squared_error(simp_reshaped, obsp_reshaped, multioutput="raw_values"), 33 | ) 34 | rmse_values_new = np.sqrt( 35 | mean_squared_error(result_reshaped, obsp_reshaped, multioutput="raw_values"), 36 | ) 37 | # Convert the flattened array back to the original grid shape 38 | rmse_values_old_ds = xr.DataArray( 39 | rmse_values_old.reshape(obsp.lat.size, obsp.lon.size), 40 | coords={"lat": obsp.lat, "lon": obsp.lon}, 41 | dims=["lat", "lon"], 42 | ) 43 | rmse_values_new_ds = xr.DataArray( 44 | rmse_values_new.reshape(obsp.lat.size, obsp.lon.size), 45 | coords={"lat": obsp.lat, "lon": obsp.lon}, 46 | dims=["lat", "lon"], 47 | ) 48 | return (rmse_values_new_ds < rmse_values_old_ds).all() 49 | 50 | 51 | @lru_cache(maxsize=None) 52 | def get_datasets(kind: str) -> tuple[xr.Dataset, xr.Dataset, xr.Dataset, xr.Dataset]: 53 | historical_time = xr.cftime_range( 54 | "1971-01-01", 55 | "2000-12-31", 56 | freq="D", 57 | calendar="noleap", 58 | ) 59 | future_time = xr.cftime_range( 60 | "2001-01-01", 61 | "2030-12-31", 62 | freq="D", 63 | calendar="noleap", 64 | ) 65 | latitudes = np.arange(23, 27, 1) 66 | 67 | def get_hist_temp_for_lat(lat: int) -> list[float]: 68 | """Returns a fake interval time series by latitude value""" 69 | return 273.15 - ( 70 | lat * np.cos(2 * np.pi * historical_time.dayofyear / 365) 71 | + 2 * np.random.random_sample((historical_time.size,)) 72 | + 273.15 73 | + 0.1 * (historical_time - historical_time[0]).days / 365 74 | ) 75 | 76 | def get_fake_hist_precipitation_data() -> list[float]: 77 | """Returns ratio based fake time series""" 78 | pr = ( 79 | np.cos(2 * np.pi * historical_time.dayofyear / 365) 80 | * np.cos(2 * np.pi * historical_time.dayofyear / 365) 81 | * np.random.random_sample((historical_time.size,)) 82 | ) 83 | 84 | pr *= 0.0004 / pr.max() # scaling 85 | years = 30 86 | days_without_rain_per_year = 239 87 | 88 | c = days_without_rain_per_year * years # avoid rain every day 89 | pr.ravel()[np.random.choice(pr.size, c, replace=False)] = 0 90 | return pr 91 | 92 | def get_dataset(data, time, kind: str) -> xr.Dataset: 93 | """Returns a data set by data and time""" 94 | return ( 95 | xr.DataArray( 96 | data, 97 | dims=("lon", "lat", "time"), 98 | coords={"time": time, "lat": latitudes, "lon": [0, 1, 3]}, 99 | ) 100 | .transpose("time", "lat", "lon") 101 | .to_dataset(name=kind) 102 | ) 103 | 104 | if kind == "+": # noqa: PLR2004 105 | some_data = [get_hist_temp_for_lat(val) for val in latitudes] 106 | data = np.array( 107 | [ 108 | np.array(some_data), 109 | np.array(some_data) + 0.5, 110 | np.array(some_data) + 1, 111 | ], 112 | ) 113 | obsh = get_dataset(data, historical_time, kind=kind) 114 | obsp = get_dataset(data + 1, historical_time, kind=kind) 115 | simh = get_dataset(data - 2, historical_time, kind=kind) 116 | simp = get_dataset(data - 1, future_time, kind=kind) 117 | 118 | else: # precipitation 119 | some_data = [get_fake_hist_precipitation_data() for _ in latitudes] 120 | data = np.array( 121 | [some_data, np.array(some_data) + np.random.rand(), np.array(some_data)], 122 | ) 123 | obsh = get_dataset(data, historical_time, kind=kind) 124 | obsp = get_dataset(data * 1.02, historical_time, kind=kind) 125 | simh = get_dataset(data * 0.95, historical_time, kind=kind) 126 | simp = get_dataset(data * 0.965, future_time, kind=kind) 127 | 128 | return obsh, obsp, simh, simp 129 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | """Module implementing the tests regarding the CLI""" 9 | 10 | from __future__ import annotations 11 | 12 | from typing import TYPE_CHECKING 13 | 14 | import pytest 15 | 16 | if TYPE_CHECKING: 17 | from click.testing import CliRunner 18 | import logging 19 | import os 20 | from pathlib import Path 21 | from tempfile import TemporaryDirectory 22 | 23 | from cmethods import cli 24 | 25 | 26 | @pytest.mark.parametrize( 27 | ("method", "kind", "exclusive"), 28 | [ 29 | ("linear_scaling", "+", "--group=time.month"), 30 | ("linear_scaling", "*", "--group=time.month"), 31 | ("variance_scaling", "+", "--group=time.month"), 32 | ("delta_method", "+", "--group=time.month"), 33 | ("delta_method", "*", "--group=time.month"), 34 | ("quantile_mapping", "+", "--quantiles=100"), 35 | ("quantile_mapping", "*", "--quantiles=100"), 36 | ("quantile_delta_mapping", "+", "--quantiles=100"), 37 | ("quantile_delta_mapping", "*", "--quantiles=100"), 38 | ], 39 | ) 40 | def test_cli_runner( 41 | method: str, 42 | kind: str, 43 | exclusive: str, 44 | cli_runner: CliRunner, 45 | caplog: pytest.LogCaptureFixture, 46 | ) -> None: 47 | """Test checking the command-line interface.""" 48 | logging.root.setLevel(logging.DEBUG) 49 | with TemporaryDirectory() as tmp_dir: 50 | output = f"{os.path.join(tmp_dir, method)}.nc" 51 | cmd: list[str] = [ 52 | f"--obs={os.path.join('examples', 'input_data', 'observations.nc')}", 53 | f"--simh={os.path.join('examples', 'input_data', 'control.nc')}", 54 | f"--simp={os.path.join('examples', 'input_data', 'scenario.nc')}", 55 | f"--method={method}", 56 | f"--kind={kind}", 57 | "--variable=tas", 58 | exclusive, 59 | f"--output={output}", 60 | ] 61 | result = cli_runner.invoke(cli, cmd) 62 | assert result.exit_code == 0, result.exception 63 | assert Path(output).is_file() 64 | 65 | for phrase in ( 66 | "Loading data sets ...", 67 | "Data sets loaded ...", 68 | f"Applying {method} ...", 69 | f"Saving result to {output}", 70 | ): 71 | assert phrase in caplog.text 72 | 73 | 74 | def test_cli_runner_missing_variable( 75 | cli_runner: CliRunner, 76 | caplog: pytest.LogCaptureFixture, 77 | ) -> None: 78 | """ 79 | Test checking the command-line interface for failure due to missing variable 80 | in data set. 81 | """ 82 | logging.root.setLevel(logging.DEBUG) 83 | with TemporaryDirectory() as tmp_dir: 84 | output = f"{os.path.join(tmp_dir, 'linear_scaling.nc')}" 85 | cmd: list[str] = [ 86 | f"--obs={os.path.join('examples', 'input_data', 'observations.nc')}", 87 | f"--simh={os.path.join('examples', 'input_data', 'control.nc')}", 88 | f"--simp={os.path.join('examples', 'input_data', 'scenario.nc')}", 89 | "--method=linear_scaling", 90 | "--kind=add", 91 | "--variable=proc", 92 | "--group=time.month", 93 | f"--output={output}", 94 | ] 95 | result = cli_runner.invoke(cli, cmd) 96 | assert result.exit_code == 1, result.exception 97 | assert not Path(output).is_file() 98 | 99 | for phrase in ( 100 | "Loading data sets ...", 101 | "Variable 'proc' is missing in the observation data set", 102 | ): 103 | assert phrase in caplog.text 104 | -------------------------------------------------------------------------------- /tests/test_methods.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | """ 9 | Module implementing the unit tests for all implemented bias correction 10 | techniques. 11 | """ 12 | 13 | from __future__ import annotations 14 | 15 | import pytest 16 | 17 | from cmethods import adjust 18 | from cmethods.distribution import detrended_quantile_mapping 19 | from cmethods.types import NPData_t, XRData_t 20 | 21 | from .helper import is_1d_rmse_better, is_3d_rmse_better 22 | 23 | GROUP: str = "time.month" 24 | N_QUANTILES: int = 100 25 | 26 | 27 | @pytest.mark.parametrize( 28 | ("method", "kind"), 29 | [ 30 | ("linear_scaling", "+"), 31 | ("linear_scaling", "*"), 32 | ("variance_scaling", "+"), 33 | ("delta_method", "+"), 34 | ("delta_method", "*"), 35 | ], 36 | ) 37 | def test_1d_scaling( 38 | datasets: dict, 39 | method: str, 40 | kind: str, 41 | ) -> None: 42 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 43 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 44 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 45 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 46 | 47 | # not group 48 | result: XRData_t = adjust(method=method, obs=obsh, simh=simh, simp=simp, kind=kind) 49 | assert isinstance(result, XRData_t) 50 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 51 | 52 | # grouped 53 | result = adjust( 54 | method=method, 55 | obs=obsh, 56 | simh=simh, 57 | simp=simp, 58 | kind=kind, 59 | group=GROUP, 60 | ) 61 | assert isinstance(result, XRData_t) 62 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 63 | 64 | 65 | @pytest.mark.parametrize( 66 | ("method", "kind"), 67 | [ 68 | ("linear_scaling", "+"), 69 | ("linear_scaling", "*"), 70 | ("variance_scaling", "+"), 71 | ("delta_method", "+"), 72 | ("delta_method", "*"), 73 | ], 74 | ) 75 | def test_3d_scaling( 76 | datasets: dict, 77 | method: str, 78 | kind: str, 79 | ) -> None: 80 | obsh: XRData_t = datasets[kind]["obsh"] 81 | obsp: XRData_t = datasets[kind]["obsp"] 82 | simh: XRData_t = datasets[kind]["simh"] 83 | simp: XRData_t = datasets[kind]["simp"] 84 | 85 | # not grouped 86 | result: XRData_t = adjust( 87 | method=method, 88 | obs=obsh, 89 | simh=simh, 90 | simp=simp, 91 | kind=kind, 92 | ) 93 | 94 | assert isinstance(result, XRData_t) 95 | assert is_3d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 96 | 97 | # grouped 98 | result: XRData_t = adjust( 99 | method=method, 100 | obs=obsh, 101 | simh=simh, 102 | simp=simp, 103 | kind=kind, 104 | group=GROUP, 105 | ) 106 | 107 | assert isinstance(result, XRData_t) 108 | assert is_3d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 109 | 110 | 111 | @pytest.mark.parametrize( 112 | ("method", "kind"), 113 | [ 114 | ("linear_scaling", "+"), 115 | ("linear_scaling", "*"), 116 | ("variance_scaling", "+"), 117 | ], 118 | ) 119 | def test_3d_scaling_different_time_span( 120 | datasets: dict, 121 | method: str, 122 | kind: str, 123 | ) -> None: 124 | obsh: XRData_t = datasets[kind]["obsh"] 125 | obsp: XRData_t = datasets[kind]["obsp"] 126 | simh: XRData_t = datasets[kind]["simh"] 127 | simp: XRData_t = datasets[kind]["simp"] 128 | simh = simh.sel(time=slice(simh.time[1], None)).rename({"time": "t_simh"}) 129 | 130 | time_names = {"obs": "time", "simh": "t_simh", "simp": "time"} 131 | 132 | # not grouped 133 | result: XRData_t = adjust( 134 | method=method, 135 | obs=obsh, 136 | simh=simh, 137 | simp=simp, 138 | kind=kind, 139 | input_core_dims=time_names, 140 | ) 141 | 142 | assert isinstance(result, XRData_t) 143 | assert is_3d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 144 | 145 | # grouped 146 | result: XRData_t = adjust( 147 | method=method, 148 | obs=obsh, 149 | simh=simh, 150 | simp=simp, 151 | kind=kind, 152 | group={"obs": "time.month", "simh": "t_simh.month", "simp": "time.month"}, 153 | input_core_dims=time_names, 154 | ) 155 | 156 | assert isinstance(result, XRData_t) 157 | assert is_3d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 158 | 159 | 160 | @pytest.mark.parametrize( 161 | ("method", "kind"), 162 | [ 163 | ("quantile_mapping", "+"), 164 | ("quantile_mapping", "*"), 165 | ("quantile_delta_mapping", "+"), 166 | ("quantile_delta_mapping", "*"), 167 | ], 168 | ) 169 | def test_1d_distribution( 170 | datasets: dict, 171 | method: str, 172 | kind: str, 173 | ) -> None: 174 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 175 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 176 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 177 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 178 | 179 | result: XRData_t = adjust( 180 | method=method, 181 | obs=obsh, 182 | simh=simh, 183 | simp=simp, 184 | kind=kind, 185 | n_quantiles=N_QUANTILES, 186 | ) 187 | 188 | assert isinstance(result, XRData_t) 189 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 190 | 191 | 192 | @pytest.mark.parametrize( 193 | ("method", "kind"), 194 | [ 195 | ("quantile_mapping", "+"), 196 | ("quantile_mapping", "*"), 197 | ("quantile_delta_mapping", "+"), 198 | ("quantile_delta_mapping", "*"), 199 | ], 200 | ) 201 | def test_3d_distribution( 202 | datasets: dict, 203 | method: str, 204 | kind: str, 205 | ) -> None: 206 | obsh: XRData_t = datasets[kind]["obsh"] 207 | obsp: XRData_t = datasets[kind]["obsp"] 208 | simh: XRData_t = datasets[kind]["simh"] 209 | simp: XRData_t = datasets[kind]["simp"] 210 | 211 | result: XRData_t = adjust( 212 | method=method, 213 | obs=obsh, 214 | simh=simh, 215 | simp=simp, 216 | kind=kind, 217 | n_quantiles=N_QUANTILES, 218 | ) 219 | 220 | assert isinstance(result, XRData_t) 221 | assert is_3d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 222 | 223 | 224 | @pytest.mark.parametrize( 225 | ("method", "kind"), 226 | [ 227 | ("quantile_mapping", "+"), 228 | ("quantile_mapping", "*"), 229 | ("quantile_delta_mapping", "+"), 230 | ("quantile_delta_mapping", "*"), 231 | ], 232 | ) 233 | def test_3d_distribution_different_time_span( 234 | datasets: dict, 235 | method: str, 236 | kind: str, 237 | ) -> None: 238 | obsh: XRData_t = datasets[kind]["obsh"] 239 | obsp: XRData_t = datasets[kind]["obsp"] 240 | simh: XRData_t = datasets[kind]["simh"] 241 | simp: XRData_t = datasets[kind]["simp"] 242 | 243 | simh = simh.sel(time=slice(simh.time[1], None)).rename({"time": "t_simh"}) 244 | time_names = {"obs": "time", "simh": "t_simh", "simp": "time"} 245 | 246 | result: XRData_t = adjust( 247 | method=method, 248 | obs=obsh, 249 | simh=simh, 250 | simp=simp, 251 | kind=kind, 252 | n_quantiles=N_QUANTILES, 253 | input_core_dims=time_names, 254 | ) 255 | 256 | assert isinstance(result, XRData_t) 257 | assert is_3d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 258 | 259 | 260 | def test_1d_detrended_quantile_mapping_add(datasets: dict) -> None: 261 | kind: str = "+" 262 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 263 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 264 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 265 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 266 | 267 | # not group 268 | result: XRData_t = detrended_quantile_mapping( 269 | obs=obsh, 270 | simh=simh, 271 | simp=simp, 272 | kind=kind, 273 | n_quantiles=N_QUANTILES, 274 | ) 275 | assert isinstance(result, NPData_t) 276 | assert is_1d_rmse_better(result=result, obsp=obsp, simp=simp) 277 | 278 | 279 | def test_1d_detrended_quantile_mapping_mult(datasets: dict) -> None: 280 | kind: str = "*" 281 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 282 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 283 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 284 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 285 | 286 | # not group 287 | result: XRData_t = detrended_quantile_mapping( 288 | obs=obsh, 289 | simh=simh, 290 | simp=simp, 291 | kind=kind, 292 | n_quantiles=N_QUANTILES, 293 | ) 294 | assert isinstance(result, NPData_t) 295 | assert is_1d_rmse_better(result=result, obsp=obsp, simp=simp) 296 | -------------------------------------------------------------------------------- /tests/test_methods_different_input_shape.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | """ 9 | Module implementing the unit tests that check if the input data sets can have 10 | different shapes. 11 | 12 | TODO: Remove the copy-paste stuff here. That could be done way simpler. 13 | """ 14 | 15 | from __future__ import annotations 16 | 17 | import pytest 18 | 19 | from cmethods import adjust 20 | from cmethods.types import XRData_t 21 | 22 | from .helper import is_1d_rmse_better 23 | 24 | pytestmark = [pytest.mark.flaky] 25 | 26 | N_QUANTILES: int = 100 27 | 28 | 29 | @pytest.mark.parametrize( 30 | ("method", "kind"), 31 | [ 32 | ("linear_scaling", "+"), 33 | ("linear_scaling", "*"), 34 | ("variance_scaling", "+"), 35 | ], 36 | ) 37 | def test_1d_scaling_obs_shorter( 38 | datasets: dict, 39 | method: str, 40 | kind: str, 41 | ) -> None: 42 | obsh: XRData_t = datasets[kind]["obsh"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 43 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 44 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 45 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 46 | 47 | # not group 48 | result: XRData_t = adjust( 49 | method=method, 50 | obs=obsh, 51 | simh=simh, 52 | simp=simp, 53 | kind=kind, 54 | input_core_dims={"obs": "t_time", "simh": "time", "simp": "time"}, 55 | ) 56 | assert isinstance(result, XRData_t) 57 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 58 | 59 | # grouped 60 | result = adjust( 61 | method=method, 62 | obs=obsh, 63 | simh=simh, 64 | simp=simp, 65 | kind=kind, 66 | group={"obs": "t_time.month", "simh": "time.month", "simp": "time.month"}, 67 | input_core_dims={"obs": "t_time", "simh": "time", "simp": "time"}, 68 | ) 69 | assert isinstance(result, XRData_t) 70 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 71 | 72 | 73 | @pytest.mark.parametrize( 74 | ("method", "kind"), 75 | [ 76 | ("linear_scaling", "+"), 77 | ("linear_scaling", "*"), 78 | ("delta_method", "+"), 79 | ("delta_method", "*"), 80 | ("variance_scaling", "+"), 81 | ], 82 | ) 83 | def test_1d_scaling_simh_shorter( 84 | datasets: dict, 85 | method: str, 86 | kind: str, 87 | ) -> None: 88 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 89 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 90 | simh: XRData_t = datasets[kind]["simh"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 91 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 92 | 93 | # not group 94 | result: XRData_t = adjust( 95 | method=method, 96 | obs=obsh, 97 | simh=simh, 98 | simp=simp, 99 | kind=kind, 100 | input_core_dims={"obs": "time", "simh": "t_time", "simp": "time"}, 101 | ) 102 | assert isinstance(result, XRData_t) 103 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 104 | 105 | # grouped 106 | result = adjust( 107 | method=method, 108 | obs=obsh, 109 | simh=simh, 110 | simp=simp, 111 | kind=kind, 112 | group={"obs": "time.month", "simh": "t_time.month", "simp": "time.month"}, 113 | input_core_dims={"obs": "time", "simh": "t_time", "simp": "time"}, 114 | ) 115 | assert isinstance(result, XRData_t) 116 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 117 | 118 | 119 | @pytest.mark.parametrize( 120 | ("method", "kind"), 121 | [ 122 | ("linear_scaling", "+"), 123 | ("linear_scaling", "*"), 124 | ("variance_scaling", "+"), 125 | ], 126 | ) 127 | def test_1d_scaling_simp_shorter( 128 | datasets: dict, 129 | method: str, 130 | kind: str, 131 | ) -> None: 132 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 133 | obsp: XRData_t = datasets[kind]["obsp"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 134 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 135 | simp: XRData_t = datasets[kind]["simp"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 136 | 137 | # not group 138 | result: XRData_t = adjust( 139 | method=method, 140 | obs=obsh, 141 | simh=simh, 142 | simp=simp, 143 | kind=kind, 144 | input_core_dims={"obs": "time", "simh": "time", "simp": "t_time"}, 145 | ) 146 | assert isinstance(result, XRData_t) 147 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 148 | 149 | # grouped 150 | result = adjust( 151 | method=method, 152 | obs=obsh, 153 | simh=simh, 154 | simp=simp, 155 | kind=kind, 156 | group={"obs": "time.month", "simh": "time.month", "simp": "t_time.month"}, 157 | input_core_dims={"obs": "time", "simh": "time", "simp": "t_time"}, 158 | ) 159 | assert isinstance(result, XRData_t) 160 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 161 | 162 | 163 | # --------------------------------------------------------------------------- 164 | 165 | 166 | @pytest.mark.parametrize( 167 | ("method", "kind"), 168 | [ 169 | ("quantile_mapping", "+"), 170 | ("quantile_mapping", "*"), 171 | ("quantile_delta_mapping", "+"), 172 | ("quantile_delta_mapping", "*"), 173 | ], 174 | ) 175 | def test_1d_distribution_obs_shorter( 176 | datasets: dict, 177 | method: str, 178 | kind: str, 179 | ) -> None: 180 | obsh: XRData_t = datasets[kind]["obsh"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 181 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 182 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 183 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 184 | 185 | result: XRData_t = adjust( 186 | method=method, 187 | obs=obsh, 188 | simh=simh, 189 | simp=simp, 190 | kind=kind, 191 | n_quantiles=N_QUANTILES, 192 | input_core_dims={"obs": "t_time", "simh": "time", "simp": "time"}, 193 | ) 194 | 195 | assert isinstance(result, XRData_t) 196 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 197 | 198 | 199 | @pytest.mark.parametrize( 200 | ("method", "kind"), 201 | [ 202 | ("quantile_mapping", "+"), 203 | ("quantile_mapping", "*"), 204 | ("quantile_delta_mapping", "+"), 205 | ("quantile_delta_mapping", "*"), 206 | ], 207 | ) 208 | def test_1d_distribution_simh_shorter( 209 | datasets: dict, 210 | method: str, 211 | kind: str, 212 | ) -> None: 213 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 214 | obsp: XRData_t = datasets[kind]["obsp"][:, 0, 0] 215 | simh: XRData_t = datasets[kind]["simh"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 216 | simp: XRData_t = datasets[kind]["simp"][:, 0, 0] 217 | 218 | result: XRData_t = adjust( 219 | method=method, 220 | obs=obsh, 221 | simh=simh, 222 | simp=simp, 223 | kind=kind, 224 | n_quantiles=N_QUANTILES, 225 | input_core_dims={"obs": "time", "simh": "t_time", "simp": "time"}, 226 | ) 227 | 228 | assert isinstance(result, XRData_t) 229 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 230 | 231 | 232 | @pytest.mark.parametrize( 233 | ("method", "kind"), 234 | [ 235 | ("quantile_mapping", "+"), 236 | ("quantile_mapping", "*"), 237 | ("quantile_delta_mapping", "+"), 238 | ("quantile_delta_mapping", "*"), 239 | ], 240 | ) 241 | def test_1d_distribution_simp_shorter( 242 | datasets: dict, 243 | method: str, 244 | kind: str, 245 | ) -> None: 246 | obsh: XRData_t = datasets[kind]["obsh"][:, 0, 0] 247 | obsp: XRData_t = datasets[kind]["obsp"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 248 | simh: XRData_t = datasets[kind]["simh"][:, 0, 0] 249 | simp: XRData_t = datasets[kind]["simp"][:7300, 0, 0].rename({"time": "t_time"}) # 20/30 years 250 | 251 | result: XRData_t = adjust( 252 | method=method, 253 | obs=obsh, 254 | simh=simh, 255 | simp=simp, 256 | kind=kind, 257 | n_quantiles=N_QUANTILES, 258 | input_core_dims={"obs": "time", "simh": "time", "simp": "t_time"}, 259 | ) 260 | 261 | assert isinstance(result, XRData_t) 262 | assert is_1d_rmse_better(result=result[kind], obsp=obsp, simp=simp) 263 | -------------------------------------------------------------------------------- /tests/test_misc.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | """Module implementing even more tests""" 9 | 10 | from __future__ import annotations 11 | 12 | import logging 13 | import re 14 | from typing import Any 15 | 16 | import numpy as np 17 | import pytest 18 | 19 | from cmethods import adjust 20 | from cmethods.distribution import ( 21 | detrended_quantile_mapping, 22 | quantile_delta_mapping, 23 | quantile_mapping, 24 | ) 25 | from cmethods.scaling import delta_method, linear_scaling, variance_scaling 26 | 27 | 28 | def test_not_implemented_errors( 29 | datasets: dict, 30 | caplog: Any, 31 | ) -> None: 32 | caplog.set_level(logging.INFO) 33 | 34 | with ( 35 | pytest.raises( 36 | NotImplementedError, 37 | match=re.escape(r"kind='/' not available for linear_scaling."), 38 | ), 39 | pytest.warns(UserWarning, match="Do not call linear_scaling"), 40 | ): 41 | linear_scaling(obs=[], simh=[], simp=[], kind="/") 42 | 43 | with ( 44 | pytest.raises( 45 | NotImplementedError, 46 | match=re.escape(r"kind='/' not available for variance_scaling."), 47 | ), 48 | pytest.warns(UserWarning, match="Do not call variance_scaling"), 49 | ): 50 | variance_scaling(obs=[], simh=[], simp=[], kind="/") 51 | 52 | with ( 53 | pytest.raises( 54 | NotImplementedError, 55 | match=re.escape(r"kind='/' not available for delta_method. "), 56 | ), 57 | pytest.warns(UserWarning, match="Do not call delta_method"), 58 | ): 59 | delta_method(obs=[], simh=[], simp=[], kind="/") 60 | 61 | with ( 62 | pytest.raises( 63 | NotImplementedError, 64 | match=re.escape(r"kind='/' for quantile_mapping is not available."), 65 | ), 66 | pytest.warns(UserWarning, match="Do not call quantile_mapping"), 67 | ): 68 | quantile_mapping( 69 | obs=np.array(datasets["+"]["obsh"][:, 0, 0]), 70 | simh=np.array(datasets["+"]["simh"][:, 0, 0]), 71 | simp=np.array(datasets["+"]["simp"][:, 0, 0]), 72 | kind="/", 73 | n_quantiles=100, 74 | ) 75 | with pytest.raises( 76 | NotImplementedError, 77 | match=re.escape(r"kind='/' for detrended_quantile_mapping is not available."), 78 | ): 79 | detrended_quantile_mapping( 80 | obs=np.array(datasets["+"]["obsh"][:, 0, 0]), 81 | simh=np.array(datasets["+"]["simh"][:, 0, 0]), 82 | simp=np.array(datasets["+"]["simp"][:, 0, 0]), 83 | kind="/", 84 | n_quantiles=100, 85 | ) 86 | 87 | with ( 88 | pytest.raises( 89 | NotImplementedError, 90 | match=re.escape(r"kind='/' not available for quantile_delta_mapping."), 91 | ), 92 | pytest.warns(UserWarning, match="Do not call quantile_delta_mapping"), 93 | ): 94 | quantile_delta_mapping( 95 | obs=np.array(datasets["+"]["obsh"][:, 0, 0]), 96 | simh=np.array(datasets["+"]["simh"][:, 0, 0]), 97 | simp=np.array(datasets["+"]["simp"][:, 0, 0]), 98 | kind="/", 99 | n_quantiles=100, 100 | ) 101 | 102 | 103 | def test_adjust_failing_dqm(datasets: dict) -> None: 104 | with pytest.raises( 105 | ValueError, 106 | match=r"This function is not available for detrended quantile mapping. " 107 | "Please use cmethods.CMethods.detrended_quantile_mapping", 108 | ): 109 | adjust( 110 | method="detrended_quantile_mapping", 111 | obs=datasets["+"]["obsh"][:, 0, 0], 112 | simh=datasets["+"]["simh"][:, 0, 0], 113 | simp=datasets["+"]["simp"][:, 0, 0], 114 | kind="/", 115 | n_quantiles=100, 116 | ) 117 | 118 | 119 | def test_adjust_failing_no_group_for_distribution(datasets: dict) -> None: 120 | with pytest.raises( 121 | ValueError, 122 | match=r"Can't use group for distribution based methods.", 123 | ): 124 | adjust( 125 | method="quantile_mapping", 126 | obs=datasets["+"]["obsh"][:, 0, 0], 127 | simh=datasets["+"]["simh"][:, 0, 0], 128 | simp=datasets["+"]["simp"][:, 0, 0], 129 | kind="/", 130 | n_quantiles=100, 131 | group="time.month", 132 | ) 133 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2023 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | """ 9 | Module to to test utility functions for the CMethods package 10 | 11 | Data types are ignored for simplicity. 12 | """ 13 | 14 | import re 15 | 16 | import numpy as np 17 | import pytest 18 | import xarray as xr 19 | 20 | from cmethods import adjust 21 | from cmethods.distribution import ( 22 | detrended_quantile_mapping, 23 | quantile_delta_mapping, 24 | quantile_mapping, 25 | ) 26 | from cmethods.static import MAX_SCALING_FACTOR 27 | from cmethods.utils import ( 28 | check_np_types, 29 | ensure_dividable, 30 | ensure_xr_dataarray, 31 | get_adjusted_scaling_factor, 32 | get_pdf, 33 | nan_or_equal, 34 | ) 35 | 36 | 37 | # -------------------------------------------------------------------------- 38 | # test for nan values 39 | @pytest.mark.filterwarnings("ignore:Do not call quantile_mapping directly") 40 | def test_quantile_mapping_single_nan() -> None: 41 | obs, simh, simp = list(np.arange(10)), list(np.arange(10)), list(np.arange(10)) 42 | obs[0] = np.nan 43 | expected = np.array([0.0, 1.8, 2.7, 3.6, 4.5, 5.4, 6.3, 7.2, 8.1, 9.0]) 44 | 45 | res = quantile_mapping(obs=obs, simh=simh, simp=simp, n_quantiles=5) 46 | assert np.allclose(res, expected), res 47 | 48 | 49 | @pytest.mark.filterwarnings("ignore:All-NaN slice encountered") 50 | @pytest.mark.filterwarnings("ignore:Do not call quantile_mapping directly") 51 | def test_quantile_mapping_all_nan() -> None: 52 | obs, simh, simp = ( 53 | list(np.full(10, np.nan)), 54 | list(np.arange(10)), 55 | list(np.arange(10)), 56 | ) 57 | res = quantile_mapping(obs=obs, simh=simh, simp=simp, n_quantiles=5) 58 | assert np.allclose(res, simp) 59 | 60 | 61 | @pytest.mark.filterwarnings("ignore:Do not call quantile_delta_mapping directly") 62 | def test_quantile_delta_mapping_single_nan() -> None: 63 | obs, simh, simp = list(np.arange(10)), list(np.arange(10)), list(np.arange(10)) 64 | obs[0] = np.nan 65 | expected = np.array([0.0, 1.8, 2.7, 3.6, 4.5, 5.4, 6.3, 7.2, 8.1, 9.0]) 66 | 67 | res = quantile_delta_mapping(obs=obs, simh=simh, simp=simp, n_quantiles=5) 68 | assert np.allclose(res, expected) 69 | 70 | 71 | @pytest.mark.filterwarnings("ignore:All-NaN slice encountered") 72 | @pytest.mark.filterwarnings("ignore:Do not call quantile_delta_mapping directly") 73 | def test_quantile_delta_mapping_all_nan() -> None: 74 | obs, simh, simp = ( 75 | list(np.full(10, np.nan)), 76 | list(np.arange(10)), 77 | list(np.arange(10)), 78 | ) 79 | res = quantile_delta_mapping(obs=obs, simh=simh, simp=simp, n_quantiles=5) 80 | assert np.allclose(res, simp) 81 | 82 | 83 | # -------------------------------------------------------------------------- 84 | # test utils 85 | 86 | 87 | def test_nan_or_equal() -> None: 88 | assert nan_or_equal(0, 0) 89 | assert nan_or_equal(np.nan, np.nan) 90 | assert not nan_or_equal(0, 1) 91 | 92 | 93 | def test_get_pdf() -> None: 94 | assert (get_pdf(np.arange(10), [0, 5, 11]) == np.array((5, 5))).all() 95 | 96 | 97 | def test_get_adjusted_scaling_factor() -> None: 98 | assert get_adjusted_scaling_factor(10, 5) == 5 99 | assert get_adjusted_scaling_factor(10, 11) == 10 100 | assert get_adjusted_scaling_factor(-10, -11) == -10 101 | assert get_adjusted_scaling_factor(-11, -10) == -10 102 | 103 | 104 | def test_ensure_devidable() -> None: 105 | assert np.array_equal( 106 | ensure_dividable( 107 | np.array((1, 2, 3, 4, 5, 0)), 108 | np.array((0, 1, 0, 2, 3, 0)), 109 | MAX_SCALING_FACTOR, 110 | ), 111 | np.array((10, 2, 30, 2, 5 / 3, 0)), 112 | ) 113 | 114 | 115 | # -------------------------------------------------------------------------- 116 | # test type checking related functions 117 | # For most of them only one part of the check is tested, since other tests 118 | # are already covering the functionality and correctness of functions using 119 | # valid values. 120 | 121 | 122 | def test_np_type_check() -> None: 123 | """ 124 | Checks the correctness of the type checking function when the types are 125 | correct. No error should occur. 126 | """ 127 | 128 | check_np_types(obs=[], simh=[], simp=[]) 129 | 130 | 131 | def test_xr_type_check() -> None: 132 | """ 133 | Checks the correctness of the type checking function when the types are 134 | correct. No error should occur. 135 | """ 136 | ds: xr.core.dataarray.Dataset = xr.core.dataarray.Dataset() 137 | ensure_xr_dataarray(obs=ds, simh=ds, simp=ds) 138 | 139 | 140 | def test_type_check_failing() -> None: 141 | """ 142 | Checks the correctness of the type checking function when the inputs do not 143 | have the correct type. 144 | """ 145 | 146 | phrase: str = "must be type list, np.ndarray, or np.generic" 147 | with pytest.raises(TypeError, match=f"'obs' {phrase}"): 148 | check_np_types(obs=1, simh=[], simp=[]) 149 | 150 | with pytest.raises(TypeError, match=f"'simh' {phrase}"): 151 | check_np_types(obs=[], simh=1, simp=[]) 152 | 153 | with pytest.raises(TypeError, match=f"'simp' {phrase}"): 154 | check_np_types(obs=[], simh=[], simp=1) 155 | 156 | 157 | @pytest.mark.filterwarnings("ignore:Do not call quantile_mapping directly") 158 | def test_quantile_mapping_type_check_n_quantiles_failing() -> None: 159 | """n_quantiles must by type int""" 160 | with pytest.raises(TypeError, match="'n_quantiles' must be type int"): 161 | quantile_mapping(obs=[], simh=[], simp=[], n_quantiles="100") 162 | 163 | 164 | def test_detrended_quantile_mapping_type_check_n_quantiles_failing( 165 | datasets: dict, 166 | ) -> None: 167 | """n_quantiles must by type int""" 168 | with pytest.raises(TypeError, match=re.escape("'n_quantiles' must be type int")): 169 | detrended_quantile_mapping( # type: ignore[attr-defined] 170 | obs=datasets["+"]["obsh"][:, 0, 0], 171 | simh=datasets["+"]["simh"][:, 0, 0], 172 | simp=datasets["+"]["simp"][:, 0, 0], 173 | n_quantiles="100", 174 | ) 175 | 176 | 177 | def test_detrended_quantile_mapping_type_check_simp_failing(datasets: dict) -> None: 178 | """n_quantiles must by type int""" 179 | with pytest.raises( 180 | TypeError, 181 | match=r"'simp' must be type xarray.core.dataarray.DataArray", 182 | ): 183 | detrended_quantile_mapping( # type: ignore[attr-defined] 184 | obs=datasets["+"]["obsh"][:, 0, 0], 185 | simh=datasets["+"]["simh"][:, 0, 0], 186 | simp=[], 187 | n_quantiles=100, 188 | ) 189 | 190 | 191 | @pytest.mark.filterwarnings("ignore:Do not call quantile_delta_mapping directly") 192 | def test_quantile_delta_mapping_type_check_n_quantiles() -> None: 193 | """n_quantiles must by type int""" 194 | with pytest.raises(TypeError, match="'n_quantiles' must be type int"): 195 | quantile_delta_mapping( # type: ignore[attr-defined] 196 | obs=[], 197 | simh=[], 198 | simp=[], 199 | n_quantiles="100", 200 | ) 201 | 202 | 203 | @pytest.mark.filterwarnings("ignore:Do not call quantile_delta_mapping directly") 204 | def test_quantile_delta_mapping_type_check_n_quantiles_failing() -> None: 205 | """n_quantiles must by type int""" 206 | with pytest.raises(TypeError, match="'n_quantiles' must be type int"): 207 | quantile_delta_mapping( # type: ignore[attr-defined] 208 | obs=[], 209 | simh=[], 210 | simp=[], 211 | n_quantiles="100", 212 | ) 213 | 214 | 215 | def test_adjust_type_checking_failing() -> None: 216 | """ 217 | Checks for all types that are expected to be passed to the adjust_3d method 218 | """ 219 | # Create the DataArray 220 | data: xr.core.dataarray.DataArray = xr.DataArray( 221 | [10, 20, 30, 40, 50], 222 | dims=["time"], 223 | ) 224 | with pytest.raises( 225 | TypeError, 226 | match=r"'obs' must be type 'xarray.core.dataarray.DataArray'.", 227 | ): 228 | adjust( 229 | method="linear_scaling", 230 | obs=[], 231 | simh=data, 232 | simp=data, 233 | group="time.month", 234 | ) 235 | with pytest.raises( 236 | TypeError, 237 | match=r"'simh' must be type 'xarray.core.dataarray.DataArray'.", 238 | ): 239 | adjust( 240 | method="linear_scaling", 241 | obs=data, 242 | simh=[], 243 | simp=data, 244 | group="time.month", 245 | ) 246 | 247 | with pytest.raises( 248 | TypeError, 249 | match=r"'simp' must be type 'xarray.core.dataarray.DataArray'.", 250 | ): 251 | adjust( 252 | method="linear_scaling", 253 | obs=data, 254 | simh=data, 255 | simp=[], 256 | group="time.month", 257 | ) 258 | -------------------------------------------------------------------------------- /tests/test_zarr_dask_compatibility.py: -------------------------------------------------------------------------------- 1 | # -*- mode: python; coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2024 Benjamin Thomas Schwertfeger 4 | # All rights reserved. 5 | # https://github.com/btschwertfeger 6 | # 7 | 8 | from typing import Any 9 | 10 | import pytest 11 | import xarray as xr 12 | 13 | from cmethods import adjust 14 | from cmethods.types import XRData_t 15 | 16 | from .helper import is_3d_rmse_better 17 | 18 | pytestmark = [pytest.mark.flaky] 19 | 20 | GROUP: str = "time.month" 21 | N_QUANTILES: int = 100 22 | 23 | 24 | @pytest.mark.parametrize( 25 | ("method", "kind"), 26 | [ 27 | ("linear_scaling", "+"), 28 | ("linear_scaling", "*"), 29 | ("variance_scaling", "+"), 30 | ("delta_method", "+"), 31 | ("delta_method", "*"), 32 | ], 33 | ) 34 | def test_3d_scaling_zarr( 35 | datasets_from_zarr: xr.Dataset, 36 | method: str, 37 | kind: str, 38 | dask_cluster: Any, # noqa: ARG001 39 | ) -> None: 40 | variable: str = "tas" if kind == "+" else "pr" # noqa: PLR2004 41 | obsh: xr.DataArray = datasets_from_zarr[kind]["obsh"][variable] 42 | obsp: xr.DataArray = datasets_from_zarr[kind]["obsp"][variable] 43 | simh: xr.DataArray = datasets_from_zarr[kind]["simh"][variable] 44 | simp: xr.DataArray = datasets_from_zarr[kind]["simp"][variable] 45 | 46 | result: XRData_t = adjust( 47 | method=method, 48 | obs=obsh, 49 | simh=simh, 50 | simp=simp, 51 | kind=kind, 52 | ) 53 | assert isinstance(result, XRData_t) 54 | assert is_3d_rmse_better(result=result[variable], obsp=obsp, simp=simp) 55 | 56 | # grouped 57 | result = adjust( 58 | method=method, 59 | obs=obsh, 60 | simh=simh, 61 | simp=simp, 62 | kind=kind, 63 | group=GROUP, 64 | ) 65 | assert isinstance(result, XRData_t) 66 | assert is_3d_rmse_better(result=result[variable], obsp=obsp, simp=simp) 67 | 68 | 69 | @pytest.mark.parametrize( 70 | ("method", "kind"), 71 | [ 72 | ("quantile_mapping", "+"), 73 | ("quantile_mapping", "*"), 74 | ("quantile_delta_mapping", "+"), 75 | ("quantile_delta_mapping", "*"), 76 | ], 77 | ) 78 | def test_3d_distribution_zarr( 79 | datasets_from_zarr: dict, 80 | method: str, 81 | kind: str, 82 | dask_cluster: Any, # noqa: ARG001 83 | ) -> None: 84 | variable: str = "tas" if kind == "+" else "pr" # noqa: PLR2004 85 | obsh: XRData_t = datasets_from_zarr[kind]["obsh"][variable] 86 | obsp: XRData_t = datasets_from_zarr[kind]["obsp"][variable] 87 | simh: XRData_t = datasets_from_zarr[kind]["simh"][variable] 88 | simp: XRData_t = datasets_from_zarr[kind]["simp"][variable] 89 | 90 | result: XRData_t = adjust( 91 | method=method, 92 | obs=obsh, 93 | simh=simh, 94 | simp=simp, 95 | kind=kind, 96 | n_quantiles=N_QUANTILES, 97 | ) 98 | 99 | assert isinstance(result, XRData_t) 100 | assert is_3d_rmse_better(result=result[variable], obsp=obsp, simp=simp) 101 | --------------------------------------------------------------------------------