├── .github ├── dependabot.yml ├── release.yml └── workflows │ ├── publish.yml │ ├── publish_pure_python.yml │ ├── pull_from_upstream.yml │ ├── test_publish.yml │ ├── test_publish_pure_python.yml │ ├── test_tox.yml │ ├── tox.yml │ └── update_tag.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── LICENSE ├── README.md ├── docs ├── Makefile ├── make.bat ├── requirements.txt └── source │ ├── conf.py │ ├── index.rst │ ├── publish.rst │ ├── publish_pure_python.rst │ └── tox.rst ├── pyproject.toml ├── setup.cfg ├── setup.py ├── test_package ├── __init__.py ├── simple.c └── tests │ ├── __init__.py │ └── test_example.py ├── tools ├── load_build_targets.py ├── set_env.py └── tox_matrix.py ├── tox.ini └── update_scripts_in_yml.py /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: ".github/workflows" # Location of package manifests 5 | schedule: 6 | interval: "monthly" 7 | groups: 8 | actions: 9 | patterns: 10 | - "*" 11 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | authors: 4 | - pre-commit-ci 5 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Build and publish Python package 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | targets: 7 | description: List of build targets for cibuildwheel 8 | required: false 9 | default: | 10 | - linux 11 | - macos 12 | - windows 13 | type: string 14 | sdist: 15 | description: Whether to build a source distribution 16 | required: false 17 | default: true 18 | type: boolean 19 | test_extras: 20 | description: Any extras_requires modifier that should be used to install the package for testing 21 | required: false 22 | default: '' 23 | type: string 24 | test_command: 25 | description: The command to run to test the package (will be run in a temporary directory) 26 | required: false 27 | default: '' 28 | type: string 29 | env: 30 | description: A map of environment variables to be available when building and testing 31 | required: false 32 | default: '' 33 | type: string 34 | libraries: 35 | description: Packages needed to build the source distribution for testing (installed using apt) 36 | required: false 37 | default: '' 38 | type: string 39 | sdist-runs-on: 40 | description: Which runner image to use to build and test the sdist 41 | required: false 42 | default: 'ubuntu-latest' 43 | type: string 44 | upload_to_pypi: 45 | description: A condition specifying whether to upload to PyPI 46 | required: false 47 | default: 'refs/tags/v' 48 | type: string 49 | repository_url: 50 | description: The PyPI repository URL to use 51 | required: false 52 | default: '' 53 | type: string 54 | upload_to_anaconda: 55 | description: A condition specifying whether to upload to Anaconda.org 56 | required: false 57 | default: false 58 | type: boolean 59 | anaconda_user: 60 | description: Anaconda.org user or organisation 61 | required: false 62 | default: '' 63 | type: string 64 | anaconda_package: 65 | description: Anaconda.org package name 66 | required: false 67 | default: '' 68 | type: string 69 | anaconda_keep_n_latest: 70 | description: If specified, only this number of the most recent versions are kept 71 | required: false 72 | default: -1 73 | type: number 74 | fail-fast: 75 | description: Whether to cancel all in-progress jobs if any job fails 76 | required: false 77 | default: false 78 | type: boolean 79 | timeout-minutes: 80 | description: The maximum number of minutes to let a build job run before GitHub automatically cancels it 81 | required: false 82 | default: 360 83 | type: number 84 | submodules: 85 | description: Whether to checkout submodules 86 | required: false 87 | default: true 88 | type: boolean 89 | checkout_ref: 90 | description: The ref to checkout 91 | required: false 92 | default: '' 93 | type: string 94 | secrets: 95 | pypi_token: 96 | required: false 97 | anaconda_token: 98 | required: false 99 | 100 | jobs: 101 | 102 | targets: 103 | name: Load build targets 104 | if: ${{ inputs.targets != '' }} 105 | runs-on: ubuntu-latest 106 | outputs: 107 | matrix: ${{ steps.set-outputs.outputs.matrix }} 108 | upload_to_pypi: ${{ steps.set-upload.outputs.upload_to_pypi }} 109 | steps: 110 | - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 111 | with: 112 | python-version: '3.12' 113 | - run: python -m pip install PyYAML click 114 | - run: echo $LOAD_BUILD_TARGETS_SCRIPT | base64 --decode > load_build_targets.py 115 | env: 116 | LOAD_BUILD_TARGETS_SCRIPT: aW1wb3J0IGpzb24KaW1wb3J0IG9zCmltcG9ydCByZQoKaW1wb3J0IGNsaWNrCmltcG9ydCB5YW1sCgpNQUNISU5FX1RZUEUgPSB7CiAgICAibGludXgiOiAidWJ1bnR1LWxhdGVzdCIsCiAgICAibWFjb3MiOiAibWFjb3MtbGF0ZXN0IiwKICAgICJ3aW5kb3dzIjogIndpbmRvd3MtbGF0ZXN0IiwKfQoKQ0lCV19CVUlMRCA9IG9zLmVudmlyb24uZ2V0KCJDSUJXX0JVSUxEIiwgIioiKQpDSUJXX0FSQ0hTID0gb3MuZW52aXJvbi5nZXQoIkNJQldfQVJDSFMiLCAiYXV0byIpCgoKQGNsaWNrLmNvbW1hbmQoKQpAY2xpY2sub3B0aW9uKCItLXRhcmdldHMiLCBkZWZhdWx0PSIiKQpkZWYgbG9hZF9idWlsZF90YXJnZXRzKHRhcmdldHMpOgogICAgIiIiU2NyaXB0IHRvIGxvYWQgY2lidWlsZHdoZWVsIHRhcmdldHMgZm9yIEdpdEh1YiBBY3Rpb25zIHdvcmtmbG93LiIiIgogICAgIyBMb2FkIGxpc3Qgb2YgdGFyZ2V0cwogICAgdGFyZ2V0cyA9IHlhbWwubG9hZCh0YXJnZXRzLCBMb2FkZXI9eWFtbC5CYXNlTG9hZGVyKQogICAgcHJpbnQoanNvbi5kdW1wcyh0YXJnZXRzLCBpbmRlbnQ9MikpCgogICAgIyBDcmVhdGUgbWF0cml4CiAgICBtYXRyaXggPSB7ImluY2x1ZGUiOiBbXX0KICAgIGZvciB0YXJnZXQgaW4gdGFyZ2V0czoKICAgICAgICBtYXRyaXhbImluY2x1ZGUiXS5hcHBlbmQoZ2V0X21hdHJpeF9pdGVtKHRhcmdldCkpCgogICAgIyBPdXRwdXQgbWF0cml4CiAgICBwcmludChqc29uLmR1bXBzKG1hdHJpeCwgaW5kZW50PTIpKQogICAgd2l0aCBvcGVuKG9zLmVudmlyb25bIkdJVEhVQl9PVVRQVVQiXSwgImEiKSBhcyBmOgogICAgICAgIGYud3JpdGUoZiJtYXRyaXg9e2pzb24uZHVtcHMobWF0cml4KX1cbiIpCgoKZGVmIGdldF9vcyh0YXJnZXQpOgogICAgaWYgIm1hY29zIiBpbiB0YXJnZXQ6CiAgICAgICAgcmV0dXJuIE1BQ0hJTkVfVFlQRVsibWFjb3MiXQogICAgaWYgIndpbiIgaW4gdGFyZ2V0OgogICAgICAgIHJldHVybiBNQUNISU5FX1RZUEVbIndpbmRvd3MiXQogICAgcmV0dXJuIE1BQ0hJTkVfVFlQRVsibGludXgiXQoKCmRlZiBnZXRfY2lid19idWlsZCh0YXJnZXQpOgogICAgaWYgdGFyZ2V0IGluIHsibGludXgiLCAibWFjb3MiLCAid2luZG93cyJ9OgogICAgICAgIHJldHVybiBDSUJXX0JVSUxECiAgICByZXR1cm4gdGFyZ2V0CgoKZGVmIGdldF9jaWJ3X2FyY2hzKHRhcmdldCk6CiAgICAiIiIKICAgIEhhbmRsZSBub24tbmF0aXZlIGFyY2hpdGVjdHVyZXMKCiAgICBjaWJ3IGFsbG93cyBydW5uaW5nIG5vbi1uYXRpdmUgYnVpbGRzIG9uIHZhcmlvdXMgcGxhdGZvcm1zOgogICAgaHR0cHM6Ly9jaWJ1aWxkd2hlZWwucHlwYS5pby9lbi9zdGFibGUvb3B0aW9ucy8jYXJjaHMKCiAgICBUaGlzIGxvZ2ljIG92ZXJyaWRlcyB0aGUgImF1dG8iIGZsYWcgYmFzZWQgb24gT1MgYW5kIGEgbGlzdCBvZiBzdXBwb3J0ZWQKICAgIG5vbi1uYXRpdmUgYXJjaCBpZiBhIG5vbi1uYXRpdmUgYXJjaCBpcyBnaXZlbiBmb3IgYSBwYXJ0aWN1bGFyIHBsYXRmb3JtIGluCiAgICB0YXJnZXRzLCByYXRoZXIgdGhhbiB0aGUgdXNlciBoYXZpbmcgdG8gZG8gdGhpcyBtYW51YWxseS4KICAgICIiIgogICAgcGxhdGZvcm1fYXJjaHMgPSB7CiAgICAgICAgIyBXZSBub3cgY3Jvc3MgY29tcGlsZSB4ODZfNjQgb24gYXJtNjQgYnkgZGVmYXVsdAogICAgICAgICJtYWNvcyI6IFsidW5pdmVyc2FsMiIsICJ4ODZfNjQiXSwKICAgICAgICAjIFRoaXMgaXMgYSBsaXN0IG9mIHN1cHBvcnRlZCBldW11bGF0ZWQgYXJjaGVzIG9uIGxpbnV4CiAgICAgICAgImxpbnV4IjogWyJhYXJjaDY0IiwgInBwYzY0bGUiLCAiczM5MHgiLCAiYXJtdjdsIl0sCiAgICB9CiAgICBmb3IgcGxhdGZvcm0sIGFyY2hzIGluIHBsYXRmb3JtX2FyY2hzLml0ZW1zKCk6CiAgICAgICAgaWYgcGxhdGZvcm0gaW4gdGFyZ2V0OgogICAgICAgICAgICBmb3IgYXJjaCBpbiBhcmNoczoKICAgICAgICAgICAgICAgIGlmIHRhcmdldC5lbmRzd2l0aChhcmNoKToKICAgICAgICAgICAgICAgICAgICByZXR1cm4gYXJjaAoKICAgICMgSWYgbm8gZXhwbGljdCBhcmNoIGhhcyBiZWVuIHNwZWNpZmllZCBidWlsZCBib3RoIGFybTY0IGFuZCB4ODZfNjQgb24gbWFjb3MKICAgIGlmICJtYWNvcyIgaW4gdGFyZ2V0OgogICAgICAgIHJldHVybiBvcy5lbnZpcm9uLmdldCgiQ0lCV19BUkNIUyIsICJhcm02NCB4ODZfNjQiKQoKICAgIHJldHVybiBDSUJXX0FSQ0hTCgoKZGVmIGdldF9hcnRpZmFjdF9uYW1lKHRhcmdldCk6CiAgICBhcnRpZmFjdF9uYW1lID0gcmUuc3ViKHIiW1xcIC86PD58Kj9cIiddIiwgIi0iLCB0YXJnZXQpCiAgICBhcnRpZmFjdF9uYW1lID0gcmUuc3ViKHIiLSsiLCAiLSIsIGFydGlmYWN0X25hbWUpCiAgICByZXR1cm4gYXJ0aWZhY3RfbmFtZQoKCmRlZiBnZXRfbWF0cml4X2l0ZW0odGFyZ2V0KToKICAgIGV4dHJhX3RhcmdldF9hcmdzID0ge30KICAgIGlmIGlzaW5zdGFuY2UodGFyZ2V0LCBkaWN0KToKICAgICAgICBleHRyYV90YXJnZXRfYXJncyA9IHRhcmdldAogICAgICAgIHRhcmdldCA9IGV4dHJhX3RhcmdldF9hcmdzLnBvcCgidGFyZ2V0IikKICAgIHJldHVybiB7CiAgICAgICAgInRhcmdldCI6IHRhcmdldCwKICAgICAgICAicnVucy1vbiI6IGdldF9vcyh0YXJnZXQpLAogICAgICAgICJDSUJXX0JVSUxEIjogZ2V0X2NpYndfYnVpbGQodGFyZ2V0KSwKICAgICAgICAiQ0lCV19BUkNIUyI6IGdldF9jaWJ3X2FyY2hzKHRhcmdldCksCiAgICAgICAgImFydGlmYWN0LW5hbWUiOiBnZXRfYXJ0aWZhY3RfbmFtZSh0YXJnZXQpLAogICAgICAgICoqZXh0cmFfdGFyZ2V0X2FyZ3MsCiAgICB9CgoKaWYgX19uYW1lX18gPT0gIl9fbWFpbl9fIjoKICAgIGxvYWRfYnVpbGRfdGFyZ2V0cygpCg== 117 | - id: set-outputs 118 | run: python load_build_targets.py --targets "${{ inputs.targets }}" 119 | shell: sh 120 | - id: set-upload 121 | run: | 122 | if [ $UPLOAD_TO_PYPI == "true" ] || [ $UPLOAD_TAG == "true" ]; 123 | then 124 | echo "upload_to_pypi=true" >> $GITHUB_OUTPUT 125 | else 126 | echo "upload_to_pypi=false" >> $GITHUB_OUTPUT 127 | fi 128 | env: 129 | UPLOAD_TO_PYPI: ${{ inputs.upload_to_pypi }} 130 | UPLOAD_TAG: ${{ startsWith(inputs.upload_to_pypi, 'refs/tags/') && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || github.event_name == 'create') && startsWith(github.ref, inputs.upload_to_pypi) }} 131 | 132 | build_wheels: 133 | name: Build ${{ matrix.target }} wheels 134 | needs: [targets] 135 | runs-on: ${{ matrix.runs-on }} 136 | timeout-minutes: ${{ inputs.timeout-minutes }} 137 | strategy: 138 | fail-fast: ${{ inputs.fail-fast }} 139 | matrix: ${{fromJSON(needs.targets.outputs.matrix)}} 140 | steps: 141 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 142 | with: 143 | fetch-depth: 0 144 | lfs: true 145 | submodules: ${{ inputs.submodules }} 146 | ref: ${{ inputs.checkout_ref }} 147 | - name: Get machine arch 148 | if: ${{ runner.os == 'Linux' }} 149 | id: uname_m 150 | run: | 151 | echo "uname_m=$(uname -m)" >> "$GITHUB_OUTPUT" 152 | - name: Set up QEMU 153 | if: ${{ runner.os == 'Linux' && (matrix.CIBW_ARCHS != 'auto' && matrix.CIBW_ARCHS != steps.uname_m.outputs.uname_m) }} 154 | uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 155 | with: 156 | platforms: all 157 | - name: Configure cibuildwheel 158 | shell: bash 159 | run: | 160 | if [ -n "${{ inputs.test_extras }}" ]; 161 | then 162 | echo "CIBW_TEST_EXTRAS=${{ inputs.test_extras }}" >> $GITHUB_ENV 163 | fi 164 | set +e 165 | IFS='' read -r -d '' test_command <<"EOF" 166 | ${{ inputs.test_command }} 167 | EOF 168 | set -e 169 | if [ -n "$test_command" ]; 170 | then 171 | echo "CIBW_TEST_COMMAND<> $GITHUB_ENV 172 | echo $(echo $test_command | tr -d '\n') >> $GITHUB_ENV 173 | echo "EOF" >> $GITHUB_ENV 174 | fi 175 | cat $GITHUB_ENV 176 | - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 177 | if: ${{ inputs.env != '' }} 178 | with: 179 | python-version: '3.12' 180 | - id: set-env 181 | if: ${{ inputs.env != '' }} 182 | run: | 183 | python -m pip install PyYAML 184 | echo $SET_ENV_SCRIPT | base64 --decode > set_env.py 185 | python set_env.py "${{ inputs.env }}" 186 | rm set_env.py 187 | shell: sh 188 | env: 189 | SET_ENV_SCRIPT: aW1wb3J0IGpzb24KaW1wb3J0IG9zCmltcG9ydCBzeXMKCmltcG9ydCB5YW1sCgpHSVRIVUJfRU5WID0gb3MuZ2V0ZW52KCJHSVRIVUJfRU5WIikKaWYgR0lUSFVCX0VOViBpcyBOb25lOgogICAgcmFpc2UgVmFsdWVFcnJvcigiR0lUSFVCX0VOViBub3Qgc2V0LiBNdXN0IGJlIHJ1biBpbnNpZGUgR2l0SHViIEFjdGlvbnMuIikKCkRFTElNSVRFUiA9ICJFT0YiCgoKZGVmIHNldF9lbnYoZW52KToKCiAgICBlbnYgPSB5YW1sLmxvYWQoZW52LCBMb2FkZXI9eWFtbC5CYXNlTG9hZGVyKQogICAgcHJpbnQoanNvbi5kdW1wcyhlbnYsIGluZGVudD0yKSkKCiAgICBpZiBub3QgaXNpbnN0YW5jZShlbnYsIGRpY3QpOgogICAgICAgIHRpdGxlID0gImBlbnZgIG11c3QgYmUgbWFwcGluZyIKICAgICAgICBtZXNzYWdlID0gZiJgZW52YCBtdXN0IGJlIG1hcHBpbmcgb2YgZW52IHZhcmlhYmxlcyB0byB2YWx1ZXMsIGdvdCB0eXBlIHt0eXBlKGVudil9IgogICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgIGV4aXQoMSkKCiAgICBmb3IgaywgdiBpbiBlbnYuaXRlbXMoKToKCiAgICAgICAgaWYgbm90IGlzaW5zdGFuY2Uodiwgc3RyKToKICAgICAgICAgICAgdGl0bGUgPSAiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncyIKICAgICAgICAgICAgbWVzc2FnZSA9IGYiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncywgYnV0IHZhbHVlIG9mIHtrfSBoYXMgdHlwZSB7dHlwZSh2KX0iCiAgICAgICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgICAgICBleGl0KDEpCgogICAgICAgIHYgPSB2LnNwbGl0KCJcbiIpCgogICAgICAgIHdpdGggb3BlbihHSVRIVUJfRU5WLCAiYSIpIGFzIGY6CiAgICAgICAgICAgIGlmIGxlbih2KSA9PSAxOgogICAgICAgICAgICAgICAgZi53cml0ZShmIntrfT17dlswXX1cbiIpCiAgICAgICAgICAgIGVsc2U6CiAgICAgICAgICAgICAgICBmb3IgbGluZSBpbiB2OgogICAgICAgICAgICAgICAgICAgIGFzc2VydCBsaW5lLnN0cmlwKCkgIT0gREVMSU1JVEVSCiAgICAgICAgICAgICAgICBmLndyaXRlKGYie2t9PDx7REVMSU1JVEVSfVxuIikKICAgICAgICAgICAgICAgIGZvciBsaW5lIGluIHY6CiAgICAgICAgICAgICAgICAgICAgZi53cml0ZShmIntsaW5lfVxuIikKICAgICAgICAgICAgICAgIGYud3JpdGUoZiJ7REVMSU1JVEVSfVxuIikKCiAgICAgICAgcHJpbnQoZiJ7a30gd3JpdHRlbiB0byBHSVRIVUJfRU5WIikKCgppZiBfX25hbWVfXyA9PSAiX19tYWluX18iOgogICAgc2V0X2VudihzeXMuYXJndlsxXSkK 190 | - name: Run cibuildwheel 191 | uses: pypa/cibuildwheel@d04cacbc9866d432033b1d09142936e6a0e2121a # v2.23.2 192 | with: 193 | output-dir: dist 194 | env: 195 | CIBW_BUILD: ${{ matrix.CIBW_BUILD }} 196 | CIBW_ARCHS: ${{ matrix.CIBW_ARCHS }} 197 | - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 198 | if: | 199 | needs.targets.outputs.upload_to_pypi == 'true' || inputs.upload_to_anaconda 200 | with: 201 | name: "dist-${{ matrix.artifact-name }}" 202 | path: dist/* 203 | 204 | build_sdist: 205 | name: Build source distribution 206 | needs: [targets] 207 | if: ${{ inputs.sdist }} 208 | runs-on: ${{ inputs.sdist-runs-on }} 209 | timeout-minutes: ${{ inputs.timeout-minutes }} 210 | steps: 211 | - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 212 | if: ${{ inputs.env != '' }} 213 | with: 214 | python-version: '3.12' 215 | - id: set-env 216 | if: ${{ inputs.env != '' }} 217 | run: | 218 | python -m pip install PyYAML 219 | echo $SET_ENV_SCRIPT | base64 --decode > set_env.py 220 | python set_env.py "${{ inputs.env }}" 221 | rm set_env.py 222 | shell: sh 223 | env: 224 | SET_ENV_SCRIPT: aW1wb3J0IGpzb24KaW1wb3J0IG9zCmltcG9ydCBzeXMKCmltcG9ydCB5YW1sCgpHSVRIVUJfRU5WID0gb3MuZ2V0ZW52KCJHSVRIVUJfRU5WIikKaWYgR0lUSFVCX0VOViBpcyBOb25lOgogICAgcmFpc2UgVmFsdWVFcnJvcigiR0lUSFVCX0VOViBub3Qgc2V0LiBNdXN0IGJlIHJ1biBpbnNpZGUgR2l0SHViIEFjdGlvbnMuIikKCkRFTElNSVRFUiA9ICJFT0YiCgoKZGVmIHNldF9lbnYoZW52KToKCiAgICBlbnYgPSB5YW1sLmxvYWQoZW52LCBMb2FkZXI9eWFtbC5CYXNlTG9hZGVyKQogICAgcHJpbnQoanNvbi5kdW1wcyhlbnYsIGluZGVudD0yKSkKCiAgICBpZiBub3QgaXNpbnN0YW5jZShlbnYsIGRpY3QpOgogICAgICAgIHRpdGxlID0gImBlbnZgIG11c3QgYmUgbWFwcGluZyIKICAgICAgICBtZXNzYWdlID0gZiJgZW52YCBtdXN0IGJlIG1hcHBpbmcgb2YgZW52IHZhcmlhYmxlcyB0byB2YWx1ZXMsIGdvdCB0eXBlIHt0eXBlKGVudil9IgogICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgIGV4aXQoMSkKCiAgICBmb3IgaywgdiBpbiBlbnYuaXRlbXMoKToKCiAgICAgICAgaWYgbm90IGlzaW5zdGFuY2Uodiwgc3RyKToKICAgICAgICAgICAgdGl0bGUgPSAiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncyIKICAgICAgICAgICAgbWVzc2FnZSA9IGYiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncywgYnV0IHZhbHVlIG9mIHtrfSBoYXMgdHlwZSB7dHlwZSh2KX0iCiAgICAgICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgICAgICBleGl0KDEpCgogICAgICAgIHYgPSB2LnNwbGl0KCJcbiIpCgogICAgICAgIHdpdGggb3BlbihHSVRIVUJfRU5WLCAiYSIpIGFzIGY6CiAgICAgICAgICAgIGlmIGxlbih2KSA9PSAxOgogICAgICAgICAgICAgICAgZi53cml0ZShmIntrfT17dlswXX1cbiIpCiAgICAgICAgICAgIGVsc2U6CiAgICAgICAgICAgICAgICBmb3IgbGluZSBpbiB2OgogICAgICAgICAgICAgICAgICAgIGFzc2VydCBsaW5lLnN0cmlwKCkgIT0gREVMSU1JVEVSCiAgICAgICAgICAgICAgICBmLndyaXRlKGYie2t9PDx7REVMSU1JVEVSfVxuIikKICAgICAgICAgICAgICAgIGZvciBsaW5lIGluIHY6CiAgICAgICAgICAgICAgICAgICAgZi53cml0ZShmIntsaW5lfVxuIikKICAgICAgICAgICAgICAgIGYud3JpdGUoZiJ7REVMSU1JVEVSfVxuIikKCiAgICAgICAgcHJpbnQoZiJ7a30gd3JpdHRlbiB0byBHSVRIVUJfRU5WIikKCgppZiBfX25hbWVfXyA9PSAiX19tYWluX18iOgogICAgc2V0X2VudihzeXMuYXJndlsxXSkK 225 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 226 | with: 227 | fetch-depth: 0 228 | lfs: true 229 | submodules: ${{ inputs.submodules }} 230 | - name: Install dependencies 231 | if: ${{ inputs.libraries != '' }} 232 | uses: ConorMacBride/install-package@3e7ad059e07782ee54fa35f827df52aae0626f30 # v1.1.0 233 | with: 234 | apt: ${{ inputs.libraries }} 235 | - id: build 236 | uses: OpenAstronomy/build-python-dist@bbb0e1c5b132893999ea56d77bd4b526e0097c7d # v1.0.1 237 | with: 238 | test_extras: ${{ inputs.test_extras }} 239 | test_command: ${{ inputs.test_command }} 240 | pure_python_wheel: false 241 | python-version: '3.12' 242 | - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.5 243 | if: | 244 | needs.targets.outputs.upload_to_pypi == 'true' || inputs.upload_to_anaconda 245 | with: 246 | name: dist-sdist 247 | path: dist/* 248 | 249 | upload: 250 | name: Upload 251 | needs: [targets, build_wheels, build_sdist] 252 | runs-on: ubuntu-latest 253 | if: | 254 | always() && 255 | needs.targets.result == 'success' && 256 | ( needs.targets.outputs.upload_to_pypi == 'true' || 257 | inputs.upload_to_anaconda ) && 258 | needs.build_wheels.result != 'failure' && 259 | needs.build_sdist.result != 'failure' 260 | steps: 261 | - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 262 | with: 263 | pattern: dist-* 264 | path: dist 265 | merge-multiple: true 266 | - uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 267 | name: Upload to PyPI 268 | if: ${{ needs.targets.outputs.upload_to_pypi == 'true' }} 269 | with: 270 | user: __token__ 271 | password: ${{ secrets.pypi_token }} 272 | repository-url: ${{ inputs.repository_url }} 273 | - uses: OpenAstronomy/publish-wheels-anaconda@eae491141709933a7636af70b070e4a92d1a45be # v1.0.1 274 | if: ${{ inputs.upload_to_anaconda }} 275 | with: 276 | anaconda_user: ${{ inputs.anaconda_user }} 277 | anaconda_package: ${{ inputs.anaconda_package }} 278 | anaconda_token: ${{ secrets.anaconda_token }} 279 | keep_n_latest: ${{ inputs.anaconda_keep_n_latest }} 280 | -------------------------------------------------------------------------------- /.github/workflows/publish_pure_python.yml: -------------------------------------------------------------------------------- 1 | name: Build and publish pure Python package 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | test_extras: 7 | description: Any extras_requires modifier that should be used to install the package for testing 8 | required: false 9 | default: '' 10 | type: string 11 | test_command: 12 | description: The command to run to test the package (will be run in a temporary directory) 13 | required: false 14 | default: '' 15 | type: string 16 | env: 17 | description: A map of environment variables to be available when building and testing 18 | required: false 19 | default: '' 20 | type: string 21 | libraries: 22 | description: Packages needed to build the source distribution for testing (installed using apt) 23 | required: false 24 | default: '' 25 | type: string 26 | runs-on: 27 | description: Which runner image to use to build and test the sdist and wheel 28 | required: false 29 | default: 'ubuntu-latest' 30 | type: string 31 | upload_to_pypi: 32 | description: A condition specifying whether to upload to PyPI 33 | required: false 34 | default: 'refs/tags/v' 35 | type: string 36 | repository_url: 37 | description: The PyPI repository URL to use 38 | required: false 39 | default: '' 40 | type: string 41 | upload_to_anaconda: 42 | description: A condition specifying whether to upload to Anaconda.org 43 | required: false 44 | default: false 45 | type: boolean 46 | anaconda_user: 47 | description: Anaconda.org user or organisation 48 | required: false 49 | default: '' 50 | type: string 51 | anaconda_package: 52 | description: Anaconda.org package name 53 | required: false 54 | default: '' 55 | type: string 56 | anaconda_keep_n_latest: 57 | description: If specified, only this number of the most recent versions are kept 58 | required: false 59 | default: -1 60 | type: number 61 | timeout-minutes: 62 | description: The maximum number of minutes to let the workflow run before GitHub automatically cancels it 63 | required: false 64 | default: 360 65 | type: number 66 | submodules: 67 | description: Whether to checkout submodules 68 | required: false 69 | default: true 70 | type: boolean 71 | python-version: 72 | description: The Python version to use for building and testing 73 | required: false 74 | default: '3.x' 75 | type: string 76 | checkout_ref: 77 | description: The ref to checkout 78 | required: false 79 | default: '' 80 | type: string 81 | secrets: 82 | pypi_token: 83 | required: false 84 | anaconda_token: 85 | required: false 86 | 87 | jobs: 88 | 89 | build: 90 | name: Build source and wheel distribution 91 | runs-on: ${{ inputs.runs-on }} 92 | timeout-minutes: ${{ inputs.timeout-minutes }} 93 | steps: 94 | - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 95 | if: ${{ inputs.env != '' }} 96 | with: 97 | python-version: '3.12' 98 | - id: set-env 99 | if: ${{ inputs.env != '' }} 100 | run: | 101 | python -m pip install -U pip 102 | python -m pip install PyYAML 103 | echo $SET_ENV_SCRIPT | base64 --decode > set_env.py 104 | python set_env.py "${{ inputs.env }}" 105 | rm set_env.py 106 | shell: sh 107 | env: 108 | SET_ENV_SCRIPT: aW1wb3J0IGpzb24KaW1wb3J0IG9zCmltcG9ydCBzeXMKCmltcG9ydCB5YW1sCgpHSVRIVUJfRU5WID0gb3MuZ2V0ZW52KCJHSVRIVUJfRU5WIikKaWYgR0lUSFVCX0VOViBpcyBOb25lOgogICAgcmFpc2UgVmFsdWVFcnJvcigiR0lUSFVCX0VOViBub3Qgc2V0LiBNdXN0IGJlIHJ1biBpbnNpZGUgR2l0SHViIEFjdGlvbnMuIikKCkRFTElNSVRFUiA9ICJFT0YiCgoKZGVmIHNldF9lbnYoZW52KToKCiAgICBlbnYgPSB5YW1sLmxvYWQoZW52LCBMb2FkZXI9eWFtbC5CYXNlTG9hZGVyKQogICAgcHJpbnQoanNvbi5kdW1wcyhlbnYsIGluZGVudD0yKSkKCiAgICBpZiBub3QgaXNpbnN0YW5jZShlbnYsIGRpY3QpOgogICAgICAgIHRpdGxlID0gImBlbnZgIG11c3QgYmUgbWFwcGluZyIKICAgICAgICBtZXNzYWdlID0gZiJgZW52YCBtdXN0IGJlIG1hcHBpbmcgb2YgZW52IHZhcmlhYmxlcyB0byB2YWx1ZXMsIGdvdCB0eXBlIHt0eXBlKGVudil9IgogICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgIGV4aXQoMSkKCiAgICBmb3IgaywgdiBpbiBlbnYuaXRlbXMoKToKCiAgICAgICAgaWYgbm90IGlzaW5zdGFuY2Uodiwgc3RyKToKICAgICAgICAgICAgdGl0bGUgPSAiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncyIKICAgICAgICAgICAgbWVzc2FnZSA9IGYiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncywgYnV0IHZhbHVlIG9mIHtrfSBoYXMgdHlwZSB7dHlwZSh2KX0iCiAgICAgICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgICAgICBleGl0KDEpCgogICAgICAgIHYgPSB2LnNwbGl0KCJcbiIpCgogICAgICAgIHdpdGggb3BlbihHSVRIVUJfRU5WLCAiYSIpIGFzIGY6CiAgICAgICAgICAgIGlmIGxlbih2KSA9PSAxOgogICAgICAgICAgICAgICAgZi53cml0ZShmIntrfT17dlswXX1cbiIpCiAgICAgICAgICAgIGVsc2U6CiAgICAgICAgICAgICAgICBmb3IgbGluZSBpbiB2OgogICAgICAgICAgICAgICAgICAgIGFzc2VydCBsaW5lLnN0cmlwKCkgIT0gREVMSU1JVEVSCiAgICAgICAgICAgICAgICBmLndyaXRlKGYie2t9PDx7REVMSU1JVEVSfVxuIikKICAgICAgICAgICAgICAgIGZvciBsaW5lIGluIHY6CiAgICAgICAgICAgICAgICAgICAgZi53cml0ZShmIntsaW5lfVxuIikKICAgICAgICAgICAgICAgIGYud3JpdGUoZiJ7REVMSU1JVEVSfVxuIikKCiAgICAgICAgcHJpbnQoZiJ7a30gd3JpdHRlbiB0byBHSVRIVUJfRU5WIikKCgppZiBfX25hbWVfXyA9PSAiX19tYWluX18iOgogICAgc2V0X2VudihzeXMuYXJndlsxXSkK 109 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 110 | with: 111 | fetch-depth: 0 112 | lfs: true 113 | submodules: ${{ inputs.submodules }} 114 | ref: ${{ inputs.checkout_ref }} 115 | - name: Install dependencies 116 | if: ${{ inputs.libraries != '' }} 117 | uses: ConorMacBride/install-package@3e7ad059e07782ee54fa35f827df52aae0626f30 # v1.1.0 118 | with: 119 | apt: ${{ inputs.libraries }} 120 | - id: build 121 | uses: OpenAstronomy/build-python-dist@bbb0e1c5b132893999ea56d77bd4b526e0097c7d # v1.0.1 122 | with: 123 | test_extras: ${{ inputs.test_extras }} 124 | test_command: ${{ inputs.test_command }} 125 | python-version: ${{ inputs.python-version }} 126 | pure_python_wheel: true 127 | - id: set-upload 128 | run: | 129 | if [ $UPLOAD_TO_PYPI == "true" ] || [ $UPLOAD_TAG == "true" ]; 130 | then 131 | echo "upload_to_pypi=true" >> $GITHUB_OUTPUT 132 | else 133 | echo "upload_to_pypi=false" >> $GITHUB_OUTPUT 134 | fi 135 | env: 136 | UPLOAD_TO_PYPI: ${{ inputs.upload_to_pypi }} 137 | UPLOAD_TAG: ${{ startsWith(inputs.upload_to_pypi, 'refs/tags/') && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || github.event_name == 'create') && startsWith(github.ref, inputs.upload_to_pypi) }} 138 | - uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 139 | name: Upload to PyPI 140 | if: ${{ steps.set-upload.outputs.upload_to_pypi == 'true' }} 141 | with: 142 | user: __token__ 143 | password: ${{ secrets.pypi_token }} 144 | repository-url: ${{ inputs.repository_url }} 145 | - uses: OpenAstronomy/publish-wheels-anaconda@eae491141709933a7636af70b070e4a92d1a45be # v1.0.1 146 | if: ${{ inputs.upload_to_anaconda }} 147 | with: 148 | anaconda_user: ${{ inputs.anaconda_user }} 149 | anaconda_package: ${{ inputs.anaconda_package }} 150 | anaconda_token: ${{ secrets.anaconda_token }} 151 | keep_n_latest: ${{ inputs.anaconda_keep_n_latest }} 152 | -------------------------------------------------------------------------------- /.github/workflows/pull_from_upstream.yml: -------------------------------------------------------------------------------- 1 | name: Sync OpenAstronomy Workflows from upstream 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | # Run every Saturday at 0900 UTC 7 | - cron: '0 9 * * 6' 8 | 9 | jobs: 10 | sync-workflows: 11 | if: github.repository != 'OpenAstronomy/github-actions-workflows' 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - name: Checkout target repository 16 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 17 | with: 18 | # Checkout the repository where the workflow is running 19 | ref: main 20 | fetch-depth: 0 21 | 22 | - name: Set up git 23 | run: | 24 | git config --global user.name "${{ secrets.GITHUB_ACTOR }}" 25 | git config --global user.email "${{ secrets.GITHUB_ACTOR }}@users.noreply.github.com" 26 | 27 | - name: Pull from OpenAstronomy/github-actions-workflows main branch 28 | run: | 29 | git remote add upstream https://github.com/OpenAstronomy/github-actions-workflows.git 30 | git fetch upstream main 31 | git merge upstream/main 32 | 33 | - name: Push changes to the target repository 34 | run: | 35 | git push origin main 36 | -------------------------------------------------------------------------------- /.github/workflows/test_publish.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: 3 | push: 4 | paths: 5 | - .github/workflows/publish.yml 6 | - .github/workflows/test_publish.yml 7 | pull_request: 8 | paths: 9 | - .github/workflows/publish.yml 10 | - .github/workflows/test_publish.yml 11 | 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.ref }} 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | release_default: 18 | uses: ./.github/workflows/publish.yml 19 | with: 20 | test_extras: test 21 | test_command: pytest --pyargs test_package 22 | timeout-minutes: 30 23 | release: 24 | uses: ./.github/workflows/publish.yml 25 | with: 26 | test_extras: test 27 | test_command: pytest --pyargs test_package 28 | targets: | 29 | - linux 30 | - cp31?-macos* 31 | - target: cp311-macosx_x86_64 32 | runs-on: macos-13 33 | - cp312-macosx_arm64 34 | - cp313-macosx_universal2 35 | - cp3?-win_amd64 36 | - target: cp312-manylinux_aarch64 37 | runs-on: ubuntu-24.04-arm 38 | - cp313-manylinux_x86_64 39 | - target: cp311-manylinux_ppc64le 40 | timeout-minutes: 30 41 | release_sdist_only: 42 | uses: ./.github/workflows/publish.yml 43 | with: 44 | test_extras: test 45 | test_command: pytest --pyargs test_package 46 | targets: '' 47 | -------------------------------------------------------------------------------- /.github/workflows/test_publish_pure_python.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: 3 | push: 4 | paths: 5 | - .github/workflows/publish_pure_python.yml 6 | - .github/workflows/test_publish_pure_python.yml 7 | pull_request: 8 | paths: 9 | - .github/workflows/publish_pure_python.yml 10 | - .github/workflows/test_publish_pure_python.yml 11 | 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.ref }} 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | release: 18 | uses: ./.github/workflows/publish_pure_python.yml 19 | with: 20 | test_extras: test 21 | test_command: pytest --pyargs test_package 22 | timeout-minutes: 5 23 | 24 | setenv: 25 | uses: ./.github/workflows/publish_pure_python.yml 26 | with: 27 | test_command: python -c "import os; assert os.getenv('CUSTOM_VAR') == 'custom value'" 28 | env: | 29 | CUSTOM_VAR: custom value 30 | -------------------------------------------------------------------------------- /.github/workflows/test_tox.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: 3 | push: 4 | paths: 5 | - .github/workflows/tox.yml 6 | - .github/workflows/test_tox.yml 7 | pull_request: 8 | paths: 9 | - .github/workflows/tox.yml 10 | - .github/workflows/test_tox.yml 11 | 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.ref }} 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | test_pyos: 18 | uses: ./.github/workflows/tox.yml 19 | with: 20 | envs: | 21 | - linux: py312-inputs-linux 22 | - macos: py311-inputs-macos 23 | - macos: py39-inputs-macos 24 | - windows: py310-inputs-windows 25 | toxargs: '-v' 26 | runs-on: windows-2019 27 | 28 | - linux: py312-inputs-conda 29 | - macos: py312-inputs-conda 30 | conda: false 31 | posargs: not 32 | - windows: py312-inputs-con_da 33 | conda: true 34 | 35 | pytest: false 36 | 37 | test_global_override: 38 | uses: ./.github/workflows/tox.yml 39 | with: 40 | conda: 'true' 41 | runs-on: | 42 | linux: ubuntu-22.04 43 | macos: macos-11 44 | envs: | 45 | # conda present in toxenv 46 | - linux: py311-inputs-conda 47 | - linux: py311-inputs-conda 48 | conda: false 49 | posargs: not 50 | # conda not present in toxenv 51 | - linux: py311-inputs-con_da 52 | runs-on: ubuntu-latest 53 | - linux: py311-inputs-con_da 54 | conda: auto 55 | posargs: not 56 | timeout-minutes: 5 57 | pytest: false 58 | timeout-minutes: 2 59 | 60 | test_default_python: 61 | uses: ./.github/workflows/tox.yml 62 | with: 63 | default_python: '3.9' 64 | envs: | 65 | - linux: default_python 66 | posargs: '9' 67 | - linux: default_python 68 | default_python: '3.12' 69 | posargs: '12' 70 | - linux: py313-python_version 71 | python-version: '3.13-dev' 72 | posargs: 'CPython' 73 | - linux: pypy310-python_version 74 | python-version: 'pypy-3.10' 75 | posargs: 'PyPy' 76 | pytest: false 77 | 78 | test_libraries: 79 | uses: ./.github/workflows/tox.yml 80 | with: 81 | libraries: | 82 | apt: 83 | - rolldice 84 | brew: 85 | - openjpeg 86 | envs: | 87 | - linux: libraries 88 | posargs: 'rolldice -v' 89 | - macos: libraries 90 | posargs: 'which opj_compress' 91 | - linux: libraries 92 | posargs: 'bcal -h' 93 | libraries: 94 | apt: 95 | - bcal 96 | - windows: libraries 97 | posargs: 'dot -V' 98 | libraries: 99 | apt: 100 | - shouldnotinstall12345 101 | choco: 102 | - graphviz 103 | # test no libraries override 104 | - linux: libraries 105 | libraries: '' 106 | posargs: 'rolldice -v && exit 1 || exit 0' 107 | pytest: false 108 | 109 | test_venv: 110 | uses: ./.github/workflows/tox.yml 111 | with: 112 | envs: | 113 | - linux: pep8 114 | name: style_check 115 | pytest: false 116 | - linux: py312 117 | - macos: py311 118 | - windows: py310 119 | 120 | test_conda: 121 | uses: ./.github/workflows/tox.yml 122 | with: 123 | envs: | 124 | - linux: py312-conda 125 | - macos: py311-conda 126 | - windows: py310-conda 127 | 128 | test_setenv: 129 | uses: ./.github/workflows/tox.yml 130 | with: 131 | setenv: | 132 | MY_VAR: global_value 133 | envs: | 134 | - linux: setenv-local 135 | setenv: | 136 | MY_VAR: local_value 137 | - macos: setenv-global 138 | 139 | test_cache_setup: 140 | uses: ./.github/workflows/tox.yml 141 | with: 142 | cache-path: a/ 143 | cache-key: cache-${{ github.run_id }} 144 | envs: | 145 | - linux: cache-setup 146 | 147 | test_cache_verify: 148 | needs: [test_cache_setup] 149 | uses: ./.github/workflows/tox.yml 150 | with: 151 | envs: | 152 | - linux: cache-verify 153 | cache-path: | 154 | a/ 155 | cache-key: cache-${{ github.run_id }} 156 | 157 | test_artifact_upload: 158 | uses: ./.github/workflows/tox.yml 159 | with: 160 | envs: | 161 | - linux: artifact-upload 162 | pytest: false 163 | artifact-path: test.txt 164 | 165 | test_artifact_download: 166 | needs: [test_artifact_upload] 167 | runs-on: ubuntu-latest 168 | steps: 169 | - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 170 | with: 171 | name: artifact-upload-(ubuntu-latest) 172 | path: . 173 | - run: python -c "assert open('test.txt').read().strip() == 'hello world'" 174 | 175 | test_pytest_args: 176 | uses: ./.github/workflows/tox.yml 177 | with: 178 | envs: | 179 | - linux: py312-linux 180 | pytest-results-summary: true 181 | 182 | test_prereleases: 183 | uses: ./.github/workflows/tox.yml 184 | with: 185 | envs: | 186 | - linux: py312 187 | 188 | test_freethreaded: 189 | uses: ./.github/workflows/tox.yml 190 | with: 191 | envs: | 192 | - linux: py313t-linux 193 | - macos: py313t-macos 194 | - windows: py313t-windows 195 | -------------------------------------------------------------------------------- /.github/workflows/tox.yml: -------------------------------------------------------------------------------- 1 | name: Test Python package 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | envs: 7 | description: Array of tox environments to test 8 | required: true 9 | type: string 10 | libraries: 11 | description: Additional packages to install 12 | required: false 13 | default: '' 14 | type: string 15 | posargs: 16 | description: Positional arguments for the underlying tox test command 17 | required: false 18 | default: '' 19 | type: string 20 | toxdeps: 21 | description: Tox dependencies 22 | required: false 23 | default: '' 24 | type: string 25 | toxargs: 26 | description: Positional arguments for tox 27 | required: false 28 | default: '' 29 | type: string 30 | pytest: 31 | description: Whether pytest is run 32 | required: false 33 | default: true 34 | type: boolean 35 | pytest-results-summary: 36 | description: Whether to report test summary 37 | required: false 38 | default: false 39 | type: boolean 40 | coverage: 41 | description: Coverage providers to upload to 42 | required: false 43 | default: '' 44 | type: string 45 | conda: 46 | description: Whether to test with conda 47 | required: false 48 | default: 'auto' 49 | type: string 50 | setenv: 51 | description: A map of environment variables to be available when testing 52 | required: false 53 | default: '' 54 | type: string 55 | display: 56 | description: Whether to setup a headless display 57 | required: false 58 | default: false 59 | type: boolean 60 | cache-path: 61 | description: A list of files, directories, and wildcard patterns to cache and restore 62 | required: false 63 | default: '' 64 | type: string 65 | cache-key: 66 | description: An explicit key for restoring and saving the cache 67 | required: false 68 | default: '' 69 | type: string 70 | cache-restore-keys: 71 | description: An ordered list of keys to use for restoring the cache if no cache hit occurred for key 72 | required: false 73 | default: '' 74 | type: string 75 | artifact-path: 76 | description: A list of files, directories, and wildcard patterns to upload as artifacts 77 | required: false 78 | default: '' 79 | type: string 80 | runs-on: 81 | description: Which runner image to use for each OS 82 | required: false 83 | default: '' 84 | type: string 85 | default_python: 86 | description: Default version of Python 87 | required: false 88 | default: '3.x' 89 | type: string 90 | fail-fast: 91 | description: Whether to cancel all in-progress jobs if any job fails 92 | required: false 93 | default: false 94 | type: boolean 95 | timeout-minutes: 96 | description: The maximum number of minutes to let a job run before GitHub automatically cancels it 97 | required: false 98 | default: 360 99 | type: number 100 | submodules: 101 | description: Whether to checkout submodules 102 | required: false 103 | default: true 104 | type: boolean 105 | checkout_ref: 106 | description: The ref to checkout 107 | required: false 108 | default: '' 109 | type: string 110 | secrets: 111 | CODECOV_TOKEN: 112 | description: Codecov upload token 113 | required: false 114 | 115 | jobs: 116 | 117 | envs: 118 | name: Load tox environments 119 | runs-on: ubuntu-latest 120 | outputs: 121 | matrix: ${{ steps.set-outputs.outputs.matrix }} 122 | steps: 123 | - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 124 | with: 125 | python-version: '3.12' 126 | - run: python -m pip install PyYAML click packaging 127 | - run: echo $TOX_MATRIX_SCRIPT | base64 --decode > tox_matrix.py 128 | env: 129 | TOX_MATRIX_SCRIPT: import json
import os
import re

import click
import yaml
from packaging.version import InvalidVersion, Version


@click.command()
@click.option("--envs", default="")
@click.option("--libraries", default="")
@click.option("--posargs", default="")
@click.option("--toxdeps", default="")
@click.option("--toxargs", default="")
@click.option("--pytest", default="true")
@click.option("--pytest-results-summary", default="false")
@click.option("--coverage", default="")
@click.option("--conda", default="auto")
@click.option("--setenv", default="")
@click.option("--display", default="false")
@click.option("--cache-path", default="")
@click.option("--cache-key", default="")
@click.option("--cache-restore-keys", default="")
@click.option("--artifact-path", default="")
@click.option("--runs-on", default="")
@click.option("--default-python", default="")
@click.option("--timeout-minutes", default="360")
def load_tox_targets(envs, libraries, posargs, toxdeps, toxargs, pytest, pytest_results_summary,
                     coverage, conda, setenv, display, cache_path, cache_key,
                     cache_restore_keys, artifact_path, runs_on, default_python, timeout_minutes):
    """Script to load tox targets for GitHub Actions workflow."""
    # Load envs config
    envs = yaml.load(envs, Loader=yaml.BaseLoader)
    print(json.dumps(envs, indent=2))

    # Load global libraries config
    global_libraries = {
        "brew": [],
        "brew-cask": [],
        "apt": [],
        "choco": [],
    }
    libraries = yaml.load(libraries, Loader=yaml.BaseLoader)
    if libraries is not None:
        global_libraries.update(libraries)
    print(json.dumps(global_libraries, indent=2))

    # Default images to use for runners
    default_runs_on = {
        "linux": "ubuntu-latest",
        "macos": "macos-latest",
        "windows": "windows-latest",
    }
    custom_runs_on = yaml.load(runs_on, Loader=yaml.BaseLoader)
    if isinstance(custom_runs_on, dict):
        default_runs_on.update(custom_runs_on)
    print(json.dumps(default_runs_on, indent=2))

    # Default string parameters which can be overwritten by each env
    string_parameters = {
        "posargs": posargs,
        "toxdeps": toxdeps,
        "toxargs": toxargs,
        "pytest": pytest,
        "pytest-results-summary": pytest_results_summary,
        "coverage": coverage,
        "conda": conda,
        "setenv": setenv,
        "display": display,
        "cache-path": cache_path,
        "cache-key": cache_key,
        "cache-restore-keys": cache_restore_keys,
        "artifact-path": artifact_path,
        "timeout-minutes": timeout_minutes,
    }

    # Create matrix
    matrix = {"include": []}
    for env in envs:
        matrix["include"].append(get_matrix_item(
            env,
            global_libraries=global_libraries,
            global_string_parameters=string_parameters,
            runs_on=default_runs_on,
            default_python=default_python,
        ))

    # Output matrix
    print(json.dumps(matrix, indent=2))
    with open(os.environ["GITHUB_OUTPUT"], "a") as f:
        f.write(f"matrix={json.dumps(matrix)}\n")


def get_matrix_item(env, global_libraries, global_string_parameters,
                    runs_on, default_python):

    # define spec for each matrix include (+ global_string_parameters)
    item = {
        "os": None,
        "toxenv": None,
        "python_version": None,
        "name": None,
        "pytest_flag": None,
        "libraries_brew": None,
        "libraries_brew_cask": None,
        "libraries_apt": None,
        "libraries_choco": None,
        "cache-path": None,
        "cache-key": None,
        "cache-restore-keys": None,
        "artifact-name": None,
        "artifact-path": None,
        "timeout-minutes": None,
    }
    for string_param, default in global_string_parameters.items():
        env_value = env.get(string_param)
        item[string_param] = default if env_value is None else env_value

    # set os and toxenv
    for k, v in runs_on.items():
        if k in env:
            platform = k
            item["os"] = env.get("runs-on", v)
            item["toxenv"] = env[k]
    assert item["os"] is not None and item["toxenv"] is not None

    # set python_version
    python_version = env.get("python-version")
    m = re.search("^py(2|3)([0-9]+t?)", item["toxenv"])
    if python_version is not None:
        item["python_version"] = python_version
    elif m is not None:
        major, minor = m.groups()
        item["python_version"] = f"{major}.{minor}"
    else:
        item["python_version"] = env.get("default_python") or default_python

    # if Python is <3.10 we can't use macos-latest which is arm64
    try:
        if Version(item["python_version"]) < Version('3.10') and item["os"] == "macos-latest":
            item["os"] = "macos-13"
    except InvalidVersion:
        # python_version might be for example 'pypy-3.10' which won't parse
        pass

    # set name
    item["name"] = env.get("name") or f'{item["toxenv"]} ({item["os"]})'

    # set artifact-name (replace invalid path characters)
    item["artifact-name"] = re.sub(r"[\\ /:<>|*?\"']", "-", item["name"])
    item["artifact-name"] = re.sub(r"-+", "-", item["artifact-name"])

    # set pytest_flag
    item["pytest_flag"] = ""
    sep = r"\\" if platform == "windows" else "/"
    if item["pytest"] == "true" and "codecov" in item.get("coverage", ""):
        item["pytest_flag"] += (
            rf"--cov-report=xml:${{GITHUB_WORKSPACE}}{sep}coverage.xml ")
    if item["pytest"] == "true" and item["pytest-results-summary"] == "true":
        item["pytest_flag"] += rf"--junitxml ${{GITHUB_WORKSPACE}}{sep}results.xml "

    # set libraries
    env_libraries = env.get("libraries")
    if isinstance(env_libraries, str) and len(env_libraries.strip()) == 0:
        env_libraries = {}  # no libraries requested for environment
    libraries = global_libraries if env_libraries is None else env_libraries
    for manager in ["brew", "brew_cask", "apt", "choco"]:
        item[f"libraries_{manager}"] = " ".join(libraries.get(manager, []))

    # set "auto" conda value
    if item["conda"] == "auto":
        item["conda"] = "true" if "conda" in item["toxenv"] else "false"

    # inject toxdeps for conda
    if item["conda"] == "true" and "tox-conda" not in item["toxdeps"].lower():
        item["toxdeps"] = ("tox-conda " + item["toxdeps"]).strip()

    # make timeout-minutes a number
    item["timeout-minutes"] = int(item["timeout-minutes"])

    # verify values
    assert item["pytest"] in {"true", "false"}
    assert item["conda"] in {"true", "false"}
    assert item["display"] in {"true", "false"}

    return item


if __name__ == "__main__":
    load_tox_targets()
 130 | - run: cat tox_matrix.py 131 | - id: set-outputs 132 | run: | 133 | python tox_matrix.py --envs "${{ inputs.envs }}" --libraries "${{ inputs.libraries }}" \ 134 | --posargs "${{ inputs.posargs }}" --toxdeps "${{ inputs.toxdeps }}" \ 135 | --toxargs "${{ inputs.toxargs }}" --pytest "${{ inputs.pytest }}" \ 136 | --pytest-results-summary "${{ inputs.pytest-results-summary }}" \ 137 | --coverage "${{ inputs.coverage }}" --conda "${{ inputs.conda }}" \ 138 | --setenv "${{ inputs.setenv }}" \ 139 | --display "${{ inputs.display }}" --cache-path "${{ inputs.cache-path }}" \ 140 | --cache-key "${{ inputs.cache-key }}" --cache-restore-keys "${{ inputs.cache-restore-keys }}" \ 141 | --artifact-path "${{ inputs.artifact-path }}" \ 142 | --runs-on "${{ inputs.runs-on }}" --default-python "${{ inputs.default_python }}" \ 143 | --timeout-minutes "${{ inputs.timeout-minutes }}" 144 | shell: sh 145 | 146 | tox: 147 | name: ${{ matrix.name }} 148 | needs: [envs] 149 | runs-on: ${{ matrix.os }} 150 | timeout-minutes: ${{ matrix.timeout-minutes }} 151 | strategy: 152 | fail-fast: ${{ inputs.fail-fast }} 153 | matrix: ${{fromJSON(needs.envs.outputs.matrix)}} 154 | defaults: 155 | run: 156 | shell: bash -l {0} 157 | steps: 158 | 159 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 160 | with: 161 | fetch-depth: 0 162 | lfs: true 163 | submodules: ${{ inputs.submodules }} 164 | ref: ${{ inputs.checkout_ref }} 165 | 166 | - name: Cache ${{ matrix.cache_key }} 167 | if: ${{ matrix.cache-path != '' && matrix.cache-key != '' }} 168 | uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 169 | with: 170 | path: ${{ matrix.cache-path }} 171 | key: ${{ matrix.cache-key }} 172 | restore-keys: ${{ matrix.cache-restore-keys }} 173 | 174 | - name: Install dependencies 175 | uses: ConorMacBride/install-package@3e7ad059e07782ee54fa35f827df52aae0626f30 # v1.1.0 176 | with: 177 | brew: ${{ matrix.libraries_brew }} 178 | brew-cask: ${{ matrix.libraries_brew_cask }} 179 | apt: ${{ matrix.libraries_apt }} 180 | choco: ${{ matrix.libraries_choco }} 181 | 182 | - name: Setup Python ${{ matrix.python_version }} 183 | if: ${{ matrix.conda != 'true' }} 184 | uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 185 | with: 186 | python-version: ${{ matrix.python_version }} 187 | allow-prereleases: true 188 | 189 | - name: Setup conda 190 | if: ${{ matrix.conda == 'true' }} 191 | uses: mamba-org/setup-micromamba@0dea6379afdaffa5d528b3d1dabc45da37f443fc # v2.0.4 192 | with: 193 | environment-name: test 194 | condarc: | 195 | channels: 196 | - conda-forge 197 | create-args: >- 198 | conda 199 | python=${{ matrix.python_version }} 200 | tox 201 | init-shell: bash 202 | cache-environment: true 203 | cache-downloads: true 204 | 205 | - id: set-env 206 | if: ${{ matrix.setenv != '' }} 207 | run: | 208 | python -m pip install PyYAML 209 | echo $SET_ENV_SCRIPT | base64 --decode > set_env.py 210 | python set_env.py "${{ matrix.setenv }}" 211 | rm set_env.py 212 | env: 213 | SET_ENV_SCRIPT: aW1wb3J0IGpzb24KaW1wb3J0IG9zCmltcG9ydCBzeXMKCmltcG9ydCB5YW1sCgpHSVRIVUJfRU5WID0gb3MuZ2V0ZW52KCJHSVRIVUJfRU5WIikKaWYgR0lUSFVCX0VOViBpcyBOb25lOgogICAgcmFpc2UgVmFsdWVFcnJvcigiR0lUSFVCX0VOViBub3Qgc2V0LiBNdXN0IGJlIHJ1biBpbnNpZGUgR2l0SHViIEFjdGlvbnMuIikKCkRFTElNSVRFUiA9ICJFT0YiCgoKZGVmIHNldF9lbnYoZW52KToKCiAgICBlbnYgPSB5YW1sLmxvYWQoZW52LCBMb2FkZXI9eWFtbC5CYXNlTG9hZGVyKQogICAgcHJpbnQoanNvbi5kdW1wcyhlbnYsIGluZGVudD0yKSkKCiAgICBpZiBub3QgaXNpbnN0YW5jZShlbnYsIGRpY3QpOgogICAgICAgIHRpdGxlID0gImBlbnZgIG11c3QgYmUgbWFwcGluZyIKICAgICAgICBtZXNzYWdlID0gZiJgZW52YCBtdXN0IGJlIG1hcHBpbmcgb2YgZW52IHZhcmlhYmxlcyB0byB2YWx1ZXMsIGdvdCB0eXBlIHt0eXBlKGVudil9IgogICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgIGV4aXQoMSkKCiAgICBmb3IgaywgdiBpbiBlbnYuaXRlbXMoKToKCiAgICAgICAgaWYgbm90IGlzaW5zdGFuY2Uodiwgc3RyKToKICAgICAgICAgICAgdGl0bGUgPSAiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncyIKICAgICAgICAgICAgbWVzc2FnZSA9IGYiYGVudmAgdmFsdWVzIG11c3QgYmUgc3RyaW5ncywgYnV0IHZhbHVlIG9mIHtrfSBoYXMgdHlwZSB7dHlwZSh2KX0iCiAgICAgICAgICAgIHByaW50KGYiOjplcnJvciB0aXRsZT17dGl0bGV9Ojp7bWVzc2FnZX0iKQogICAgICAgICAgICBleGl0KDEpCgogICAgICAgIHYgPSB2LnNwbGl0KCJcbiIpCgogICAgICAgIHdpdGggb3BlbihHSVRIVUJfRU5WLCAiYSIpIGFzIGY6CiAgICAgICAgICAgIGlmIGxlbih2KSA9PSAxOgogICAgICAgICAgICAgICAgZi53cml0ZShmIntrfT17dlswXX1cbiIpCiAgICAgICAgICAgIGVsc2U6CiAgICAgICAgICAgICAgICBmb3IgbGluZSBpbiB2OgogICAgICAgICAgICAgICAgICAgIGFzc2VydCBsaW5lLnN0cmlwKCkgIT0gREVMSU1JVEVSCiAgICAgICAgICAgICAgICBmLndyaXRlKGYie2t9PDx7REVMSU1JVEVSfVxuIikKICAgICAgICAgICAgICAgIGZvciBsaW5lIGluIHY6CiAgICAgICAgICAgICAgICAgICAgZi53cml0ZShmIntsaW5lfVxuIikKICAgICAgICAgICAgICAgIGYud3JpdGUoZiJ7REVMSU1JVEVSfVxuIikKCiAgICAgICAgcHJpbnQoZiJ7a30gd3JpdHRlbiB0byBHSVRIVUJfRU5WIikKCgppZiBfX25hbWVfXyA9PSAiX19tYWluX18iOgogICAgc2V0X2VudihzeXMuYXJndlsxXSkK 214 | 215 | - name: Setup headless display 216 | if: ${{ matrix.display == 'true' }} 217 | uses: pyvista/setup-headless-display-action@52bda06d59c0fc422fc2512c9c670bf6b66616f8 # v3 218 | 219 | - name: Install tox 220 | run: python -m pip install --upgrade tox ${{ matrix.toxdeps }} 221 | 222 | - run: python -m tox -e ${{ matrix.toxenv }} ${{ matrix.toxargs }} -- ${{ matrix.pytest_flag }} ${{ matrix.posargs }} 223 | 224 | - if: ${{ (success() || failure()) && matrix.artifact-path != '' }} 225 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 226 | with: 227 | name: ${{ matrix.artifact-name }} 228 | path: ${{ matrix.artifact-path }} 229 | 230 | - if: ${{ (success() || failure()) && matrix.pytest-results-summary == 'true' && matrix.pytest == 'true' }} 231 | uses: test-summary/action@31493c76ec9e7aa675f1585d3ed6f1da69269a86 # v2.4 232 | with: 233 | paths: "**/results.xml" 234 | 235 | - name: Upload to Codecov 236 | # Even if tox fails, upload coverage 237 | if: ${{ (success() || failure()) && contains(matrix.coverage, 'codecov') && matrix.pytest == 'true' }} 238 | uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5.4.0 239 | with: 240 | token: ${{ secrets.CODECOV_TOKEN }} 241 | -------------------------------------------------------------------------------- /.github/workflows/update_tag.yml: -------------------------------------------------------------------------------- 1 | name: Update Major Version Tag 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*" 7 | 8 | jobs: 9 | update-majorver: 10 | name: Update Major Version Tag 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: nowactions/update-majorver@f2014bbbba95b635e990ce512c5653bd0f4753fb # v1.1.2 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Python: https://raw.githubusercontent.com/github/gitignore/main/Python.gitignore 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | pip-wheel-metadata/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | *.c 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | junit/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | 111 | ### https://raw.github.com/github/gitignore/main/Global/OSX.gitignore 112 | 113 | .DS_Store 114 | .AppleDouble 115 | .LSOverride 116 | 117 | # Icon must ends with two \r. 118 | Icon 119 | 120 | 121 | # Thumbnails 122 | ._* 123 | 124 | # Files that might appear on external disk 125 | .Spotlight-V100 126 | .Trashes 127 | 128 | ### Linux: https://raw.githubusercontent.com/github/gitignore/main/Global/Linux.gitignore 129 | 130 | *~ 131 | 132 | # temporary files which can be created if a process still has a handle open of a deleted file 133 | .fuse_hidden* 134 | 135 | # KDE directory preferences 136 | .directory 137 | 138 | # Linux trash folder which might appear on any partition or disk 139 | .Trash-* 140 | 141 | # .nfs files are created when an open file is removed but is still being accessed 142 | .nfs* 143 | 144 | ### MacOS: https://raw.githubusercontent.com/github/gitignore/main/Global/macOS.gitignore 145 | 146 | # General 147 | .DS_Store 148 | .AppleDouble 149 | .LSOverride 150 | 151 | # Icon must end with two \r 152 | Icon 153 | 154 | 155 | # Thumbnails 156 | ._* 157 | 158 | # Files that might appear in the root of a volume 159 | .DocumentRevisions-V100 160 | .fseventsd 161 | .Spotlight-V100 162 | .TemporaryItems 163 | .Trashes 164 | .VolumeIcon.icns 165 | .com.apple.timemachine.donotpresent 166 | 167 | # Directories potentially created on remote AFP share 168 | .AppleDB 169 | .AppleDesktop 170 | Network Trash Folder 171 | Temporary Items 172 | .apdisk 173 | 174 | ### Windows: https://raw.githubusercontent.com/github/gitignore/main/Global/Windows.gitignore 175 | 176 | # Windows thumbnail cache files 177 | Thumbs.db 178 | ehthumbs.db 179 | ehthumbs_vista.db 180 | 181 | # Dump file 182 | *.stackdump 183 | 184 | # Folder config file 185 | [Dd]esktop.ini 186 | 187 | # Recycle Bin used on file shares 188 | $RECYCLE.BIN/ 189 | 190 | # Windows Installer files 191 | *.cab 192 | *.msi 193 | *.msix 194 | *.msm 195 | *.msp 196 | 197 | # Windows shortcuts 198 | *.lnk 199 | 200 | ### VScode: https://raw.githubusercontent.com/github/gitignore/main/Global/VisualStudioCode.gitignore 201 | .vscode/* 202 | 203 | ### Pycharm 204 | .idea 205 | .history 206 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | rev: v0.11.12 4 | hooks: 5 | - id: ruff 6 | args: [ --fix ] 7 | - repo: https://github.com/PyCQA/autoflake 8 | rev: v2.3.1 9 | hooks: 10 | - id: autoflake 11 | args: 12 | - "--in-place" 13 | - "--remove-all-unused-imports" 14 | - "--remove-unused-variable" 15 | - repo: https://github.com/PyCQA/isort 16 | rev: 6.0.1 17 | hooks: 18 | - id: isort 19 | - repo: https://github.com/pre-commit/pre-commit-hooks 20 | rev: v5.0.0 21 | hooks: 22 | - id: check-ast 23 | - id: check-case-conflict 24 | - id: trailing-whitespace 25 | - id: check-yaml 26 | - id: check-added-large-files 27 | - id: end-of-file-fixer 28 | - id: mixed-line-ending 29 | - repo: local 30 | hooks: 31 | - id: encode-scripts 32 | name: encode scripts in workflows 33 | language: system 34 | entry: python update_scripts_in_yml.py 35 | always_run: true 36 | pass_filenames: false 37 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: "ubuntu-lts-latest" 5 | tools: 6 | python: "3.13" 7 | 8 | python: 9 | install: 10 | - requirements: docs/requirements.txt 11 | 12 | # Don't build any extra formats 13 | formats: [] 14 | 15 | sphinx: 16 | configuration: docs/source/conf.py 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright © 2022 OpenAstronomy Developers 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # github-actions-workflows 2 | 3 | Reusable workflows for GitHub Actions. 4 | 5 | - [Test a Python package using tox](#test-a-python-package-using-tox) 6 | - [Build and publish a Python package](#build-and-publish-a-python-package) 7 | - [Build and publish a pure Python package](#build-and-publish-a-pure-python-package) 8 | 9 | ## Test a Python package using tox 10 | 11 | This workflow makes it easy to map tox environments to GitHub Actions jobs. 12 | To use this template, your repository will need to have a `tox.ini` file. 13 | [Read the workflow documentation.](https://github-actions-workflows.openastronomy.org/en/stable/tox.html) 14 | 15 | ```yaml 16 | jobs: 17 | test: 18 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 19 | with: 20 | posargs: '-n 4' 21 | envs: | 22 | - linux: pep8 23 | pytest: false 24 | - macos: py310 25 | - windows: py39-docs 26 | libraries: 27 | choco: 28 | - graphviz 29 | coverage: 'codecov' 30 | secrets: 31 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 32 | ``` 33 | 34 | ## Build and publish a Python package 35 | 36 | Build, test and publish a Python source distribution and collection of platform-dependent wheels. 37 | [Read the workflow documentation.](https://github-actions-workflows.openastronomy.org/en/stable/publish.html) 38 | 39 | ```yaml 40 | jobs: 41 | publish: 42 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish.yml@v1 43 | with: 44 | test_extras: test 45 | test_command: pytest --pyargs test_package 46 | targets: | 47 | - linux 48 | - cp3?-macosx_x86_64 49 | secrets: 50 | pypi_token: ${{ secrets.pypi_token }} 51 | ``` 52 | 53 | ## Build and publish a pure Python package 54 | 55 | This the workflow is similar to the `publish.yml` workflow, except, instead of building wheels using cibuildwheel, a pure Python wheel and a source distribution are build, tested and published instead. 56 | [Read the workflow documentation.](https://github-actions-workflows.openastronomy.org/en/stable/publish_pure_python.html) 57 | 58 | ```yaml 59 | jobs: 60 | publish: 61 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@v1 62 | with: 63 | test_extras: test 64 | test_command: pytest --pyargs test_package 65 | secrets: 66 | pypi_token: ${{ secrets.pypi_token }} 67 | ``` 68 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | pydata-sphinx-theme 2 | Sphinx 3 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | # -- Project information ----------------------------------------------------- 7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 8 | 9 | project = 'OpenAstronomy GitHub Actions Workflows' 10 | copyright = '2023, OpenAstronomy developers' 11 | author = 'OpenAstronomy developers' 12 | 13 | # -- General configuration --------------------------------------------------- 14 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 15 | 16 | extensions = [] 17 | 18 | templates_path = ['_templates'] 19 | exclude_patterns = [] 20 | 21 | # -- Options for HTML output ------------------------------------------------- 22 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 23 | 24 | html_theme = 'pydata_sphinx_theme' 25 | html_theme_options = { 26 | 'logo': { 27 | 'image_light': 'https://openastronomy.org/img/logo/logoOA_svg.png', 28 | 'image_dark': 'https://openastronomy.org/img/logo/logoOA_white_svg.png', 29 | 'text': 'GitHub Actions Workflows', 30 | 'alt_text': 'OpenAstronomy', 31 | }, 32 | 'icon_links': [ 33 | { 34 | 'name': 'GitHub', 35 | 'url': 'https://github.com/OpenAstronomy/github-actions-workflows', 36 | 'icon': 'fa-brands fa-square-github', 37 | 'type': 'fontawesome', 38 | }, 39 | ], 40 | } 41 | 42 | # Set the master doc to the index file 43 | master_doc = 'index' 44 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | OpenAstronomy GitHub Actions Workflows 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | 7 | tox 8 | publish 9 | publish_pure_python 10 | -------------------------------------------------------------------------------- /docs/source/publish.rst: -------------------------------------------------------------------------------- 1 | Build and publish a Python package 2 | ---------------------------------- 3 | 4 | Build, test and publish a Python source distribution and collection of 5 | platform-dependent wheels. 6 | 7 | .. code:: yaml 8 | 9 | jobs: 10 | publish: 11 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish.yml@v1 12 | with: 13 | test_extras: test 14 | test_command: pytest --pyargs test_package 15 | targets: | 16 | - linux 17 | - cp3?-macosx_x86_64 18 | secrets: 19 | pypi_token: ${{ secrets.pypi_token }} 20 | 21 | Inputs 22 | ~~~~~~ 23 | 24 | targets 25 | ^^^^^^^ 26 | 27 | List of build targets for cibuildwheel. The list of targets must be 28 | specified as demonstrated by the default value below. Each target is 29 | built within a separate matrix job. 30 | 31 | If the target is ``linux``, ``macos`` or ``windows``, cibuildwheel is 32 | run on the latest version of that OS. 33 | 34 | Any other target is assumed to be a value for the ``CIBW_BUILD`` 35 | environment variable (e.g. ``cp3?-macosx_x86_64``). In this case the OS 36 | to run cibuildwheel on is extracted from the target. 37 | 38 | Targets which end with non-native architectures such as ``aarch64`` on linux or 39 | ``x86_64`` on macos are supported and will be emulated (on linux) or cross 40 | compiled. 41 | 42 | **Note:** ``targets`` is a *string* and must be specified as a 43 | literal block scalar using the ``|``. (Without the ``|``, it must also 44 | be valid YAML.) 45 | 46 | Default is: 47 | 48 | .. code:: yaml 49 | 50 | targets: | 51 | - linux 52 | - macos 53 | - windows 54 | 55 | To not build any wheels: 56 | 57 | .. code:: yaml 58 | 59 | targets: '' 60 | 61 | For additional configuration extra arguments can be passed by making a target a dictionary. 62 | An example of this is specifying the runner for a target, such as building macos x86_64 wheels on native x86_64 runners: 63 | 64 | .. code:: yaml 65 | 66 | targets: 67 | - target: cp311-macosx_x86_64 68 | runs-on: macos-13 69 | 70 | sdist 71 | ^^^^^ 72 | 73 | Whether to build a source distribution. Default is ``true``. 74 | 75 | sdist-runs-on 76 | ^^^^^^^^^^^^^ 77 | 78 | Choose an alternative image for the runner to use for building and 79 | testing the source distribution. By default, this is ``ubuntu-latest``. 80 | 81 | test_extras 82 | ^^^^^^^^^^^ 83 | 84 | Any ``extras_requires`` modifier that should be used to install the 85 | package for testing. Default is none. 86 | If not set, cibuildwheel will use any ``test-extras`` configured in ``pyproject.toml``. 87 | 88 | test_command 89 | ^^^^^^^^^^^^ 90 | 91 | The command to run to test the package. Will be run in a temporary 92 | directory. Default is no testing. 93 | If not set, cibuildwheel will use any ``test-command`` configured in ``pyproject.toml``. 94 | 95 | env 96 | ^^^ 97 | 98 | A map of environment variables to be available when building and 99 | testing. Default is none. 100 | 101 | Due to `GitHub Actions 102 | limitations `__ 103 | this is the only way to pass environment variables from your workflow 104 | file into the publishing job. 105 | 106 | .. code:: yaml 107 | 108 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish.yml@v1 109 | with: 110 | env: | 111 | VAR1: test 112 | VAR2: | 113 | first line 114 | seconds line 115 | VAR3: testing 116 | 117 | libraries 118 | ^^^^^^^^^ 119 | 120 | Packages needed to build the source distribution for testing. Must be a 121 | string of space-separated apt packages. Default is install nothing 122 | extra. 123 | 124 | .. warning:: 125 | These libraries are only installed on the host Linux machine. 126 | To install libraries or packages within the build environment, alter the 127 | ``cibuildwheel`` configuration to add an install command before the build, 128 | such as adding an entry to the ``tool.cibuildwheel`` table in ``pyproject.toml``: 129 | 130 | .. code:: toml 131 | 132 | [tool.cibuildwheel.linux] 133 | before-build = "apt install libfftw3-dev" 134 | 135 | [tool.cibuildwheel.macos] 136 | before-build = "brew install fftw" 137 | 138 | or by [setting a ``CIBW_BEFORE_BUILD_*`` environment variable](https://cibuildwheel.pypa.io/en/stable/options/#before-build): 139 | 140 | .. code:: yaml 141 | 142 | jobs: 143 | build: 144 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish.yml@v1 145 | with: 146 | env: | 147 | CIBW_BEFORE_BUILD_LINUX: apt install libfftw3-dev 148 | CIBW_BEFORE_BUILD_MACOS: brew install fftw 149 | FFTW_DIR: /opt/homebrew/opt/fftw/lib/ 150 | targets: | 151 | - cp3*-manylinux_x86_64 152 | - cp3*-macosx_x86_64 153 | 154 | upload_to_pypi 155 | ^^^^^^^^^^^^^^ 156 | 157 | Whether to upload to PyPI after successful builds. The default is to 158 | upload to PyPI when tags that start with ``v`` are pushed. A boolean can 159 | be passed as ``true`` (always upload) or ``false`` (never upload) either 160 | explicitly or as a boolean expression (``${{ }}``). 161 | 162 | Alternatively, a string can be passed to match the start of a tag ref. 163 | For example, ``'refs/tags/v'`` (default) will upload tags that begin 164 | with ``v``, and ``'refs/tags/'`` will upload on all pushed tags. 165 | 166 | .. code:: yaml 167 | 168 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish.yml@v1 169 | with: 170 | upload_to_pypi: refs/tags/ 171 | 172 | repository_url 173 | ^^^^^^^^^^^^^^ 174 | 175 | The PyPI repository URL to use. Default is the main PyPI repository. 176 | 177 | upload_to_anaconda 178 | ^^^^^^^^^^^^^^^^^^ 179 | 180 | Whether to upload to Anaconda.org after successful builds. The default 181 | is to not upload. A boolean can be passed as ``true`` (always upload) or 182 | ``false`` (never upload) either explicitly or as a boolean expression 183 | (``${{ }}``). 184 | 185 | anaconda_user 186 | ^^^^^^^^^^^^^ 187 | 188 | Anaconda.org user or organisation. Required if ``upload_to_anaconda`` is 189 | true. 190 | 191 | anaconda_package 192 | ^^^^^^^^^^^^^^^^ 193 | 194 | Anaconda.org package. Required if ``upload_to_anaconda`` is true. 195 | 196 | anaconda_keep_n_latest 197 | ^^^^^^^^^^^^^^^^^^^^^^ 198 | 199 | If specified, keep only this number of versions (starting from the most 200 | recent) and remove older versions. This can be useful to prevent a 201 | build-up of too many files when uploading developer versions. 202 | 203 | fail-fast 204 | ^^^^^^^^^ 205 | 206 | Whether to cancel all in-progress jobs if any job fails. Default is 207 | ``false``. 208 | 209 | timeout-minutes 210 | ^^^^^^^^^^^^^^^ 211 | 212 | The maximum number of minutes to let a build job run before GitHub 213 | automatically cancels it. Default is ``360``. 214 | 215 | submodules 216 | ^^^^^^^^^^ 217 | 218 | Whether to checkout submodules. Default is ``true``. 219 | 220 | Secrets 221 | ~~~~~~~ 222 | 223 | pypi_token 224 | ^^^^^^^^^^ 225 | 226 | The authentication token to access the PyPI repository. 227 | 228 | anaconda_token 229 | ^^^^^^^^^^^^^^ 230 | 231 | The authentication token to access the Anaconda.org repository. This 232 | token should have the scope ``api:write`` (allow write access to the API site). 233 | -------------------------------------------------------------------------------- /docs/source/publish_pure_python.rst: -------------------------------------------------------------------------------- 1 | Build and publish a pure Python package 2 | --------------------------------------- 3 | 4 | This the workflow is similar to the ``publish.yml`` workflow, except, 5 | instead of building wheels using cibuildwheel, a pure Python wheel and a 6 | source distribution are build, tested and published instead. 7 | 8 | .. code:: yaml 9 | 10 | jobs: 11 | publish: 12 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@v1 13 | with: 14 | test_extras: test 15 | test_command: pytest --pyargs test_package 16 | secrets: 17 | pypi_token: ${{ secrets.pypi_token }} 18 | 19 | Inputs 20 | ~~~~~~ 21 | 22 | runs-on 23 | ^^^^^^^ 24 | 25 | Choose an alternative image for the runner to use for building and 26 | testing the source distribution and wheel. By default, this is 27 | ``ubuntu-latest``. 28 | 29 | test_extras 30 | ^^^^^^^^^^^ 31 | 32 | Any ``extras_requires`` modifier that should be used to install the 33 | package for testing. Default is none. 34 | 35 | test_command 36 | ^^^^^^^^^^^^ 37 | 38 | The command to run to test the package. Will be run in a temporary 39 | directory. Default is no testing. 40 | 41 | env 42 | ^^^ 43 | 44 | A map of environment variables to be available when building and 45 | testing. Default is none. 46 | 47 | Due to `GitHub Actions 48 | limitations `__ 49 | this is the only way to pass environment variables from your workflow 50 | file into the publishing job. 51 | 52 | .. code:: yaml 53 | 54 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@v1 55 | with: 56 | env: | 57 | VAR1: test 58 | VAR2: | 59 | first line 60 | seconds line 61 | VAR3: testing 62 | 63 | libraries 64 | ^^^^^^^^^ 65 | 66 | Packages needed to build the source distribution for testing. Must be a 67 | string of space-separated apt packages. Default is install nothing 68 | extra. 69 | 70 | python-version 71 | ^^^^^^^^^^^^^^ 72 | 73 | The version of Python used to test and build the package. By default, 74 | this is ``3.x``. 75 | 76 | upload_to_pypi 77 | ^^^^^^^^^^^^^^ 78 | 79 | Whether to upload to PyPI after successful builds. The default is to 80 | upload to PyPI when tags that start with ``v`` are pushed. A boolean can 81 | be passed as ``true`` (always upload) or ``false`` (never upload) either 82 | explicitly or as a boolean expression (``${{ }}``). 83 | 84 | Alternatively, a string can be passed to match the start of a tag ref. 85 | For example, ``'refs/tags/v'`` (default) will upload tags that begin 86 | with ``v``, and ``'refs/tags/'`` will upload on all pushed tags. 87 | 88 | .. code:: yaml 89 | 90 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@v1 91 | with: 92 | upload_to_pypi: refs/tags/ 93 | 94 | repository_url 95 | ^^^^^^^^^^^^^^ 96 | 97 | The PyPI repository URL to use. Default is the main PyPI repository. 98 | 99 | upload_to_anaconda 100 | ^^^^^^^^^^^^^^^^^^ 101 | 102 | Whether to upload to Anaconda.org after successful builds. The default 103 | is to not upload. A boolean can be passed as ``true`` (always upload) or 104 | ``false`` (never upload) either explicitly or as a boolean expression 105 | (``${{ }}``). 106 | 107 | anaconda_user 108 | ^^^^^^^^^^^^^ 109 | 110 | Anaconda.org user or organisation. Required if ``upload_to_anaconda`` is 111 | true. 112 | 113 | anaconda_package 114 | ^^^^^^^^^^^^^^^^ 115 | 116 | Anaconda.org package. Required if ``upload_to_anaconda`` is true. 117 | 118 | anaconda_keep_n_latest 119 | ^^^^^^^^^^^^^^^^^^^^^^ 120 | 121 | If specified, keep only this number of versions (starting from the most 122 | recent) and remove older versions. This can be useful to prevent a 123 | build-up of too many files when uploading developer versions. 124 | 125 | timeout-minutes 126 | ^^^^^^^^^^^^^^^ 127 | 128 | The maximum number of minutes to let the workflow run before GitHub 129 | automatically cancels it. Default is ``360``. 130 | 131 | submodules 132 | ^^^^^^^^^^ 133 | 134 | Whether to checkout submodules. Default is ``true``. 135 | 136 | Secrets 137 | ~~~~~~~ 138 | 139 | pypi_token 140 | ^^^^^^^^^^ 141 | 142 | The authentication token to access the PyPI repository. 143 | 144 | anaconda_token 145 | ^^^^^^^^^^^^^^ 146 | 147 | The authentication token to access the Anaconda.org repository. This 148 | token should have the scope ``api:write`` (allow write access to the API site). 149 | -------------------------------------------------------------------------------- /docs/source/tox.rst: -------------------------------------------------------------------------------- 1 | Test a Python package using tox 2 | ------------------------------- 3 | 4 | This workflow makes it easy to map tox environments to GitHub Actions 5 | jobs. To use this template, your repository will need to have a 6 | ``tox.ini`` file. 7 | 8 | .. code:: yaml 9 | 10 | jobs: 11 | test: 12 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 13 | with: 14 | posargs: '-n 4' 15 | envs: | 16 | - linux: pep8 17 | pytest: false 18 | - macos: py310 19 | - windows: py39-docs 20 | libraries: 21 | choco: 22 | - graphviz 23 | coverage: 'codecov' 24 | secrets: 25 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 26 | 27 | Inputs 28 | ~~~~~~ 29 | 30 | A specification of tox environments must be passed to the ``envs`` 31 | input. There are a number of other inputs. All of these inputs (except 32 | ``submodules``) can also be specified under each tox environment to 33 | overwrite the global value. 34 | 35 | In the following example ``test1`` will pass ``--arg-local`` to pytest, 36 | while ``test2`` will pass ``--arg-global`` to pytest, 37 | 38 | .. code:: yaml 39 | 40 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 41 | with: 42 | posargs: '--arg-global' 43 | envs: | 44 | - linux: test1 45 | posargs: '--arg-local' 46 | - linux: test2 47 | 48 | envs 49 | ^^^^ 50 | 51 | Array of tox environments to test. Required input. 52 | 53 | .. code:: yaml 54 | 55 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 56 | with: 57 | envs: | 58 | - : 59 | - : 60 | 61 | where ```` is the either ``linux``, ``macos`` or ``windows``, and 62 | ```` is the name of the tox environment to run. 63 | 64 | **Note:** ``envs`` is a *string* and must be specified as a literal 65 | block scalar using the ``|``. (Without the ``|``, it must also be valid 66 | YAML.) 67 | 68 | Example: 69 | 70 | .. code:: yaml 71 | 72 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 73 | with: 74 | envs: | 75 | - linux: pep8 76 | - linux: py39 77 | - macos: py38-docs 78 | name: build_docs 79 | - windows: py310-conda 80 | 81 | The name of the GitHub Actions job can be changed with the ``name`` 82 | option as shown above. By default, ``name`` will be the name of the tox 83 | environment. 84 | 85 | If the Python version includes a ``t`` suffix, such as ``py313t``, then 86 | a free-threaded Python interpreter will be used. 87 | 88 | libraries 89 | ^^^^^^^^^ 90 | 91 | Additional packages to install using apt (only on Linux), brew and brew 92 | cask (only on macOS), and choco (only on Windows). 93 | 94 | Global definition: 95 | 96 | .. code:: yaml 97 | 98 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 99 | with: 100 | libraries: | 101 | apt: 102 | - package1 103 | - package2 104 | brew: 105 | - package3 106 | brew-cask: 107 | - package4 108 | choco: 109 | - package5 110 | 111 | **Note:** ``libraries`` is a *string* and must be specified as a 112 | literal block scalar using the ``|``. (Without the ``|``, it must also 113 | be valid YAML.) 114 | 115 | ``envs`` definition: 116 | 117 | .. code:: yaml 118 | 119 | with: 120 | envs: | 121 | - linux: py39 122 | libraries: 123 | apt: 124 | - package1 125 | 126 | posargs 127 | ^^^^^^^ 128 | 129 | Positional arguments for the ``{posargs}`` replacement in an underlying 130 | test command within tox. Default is none. 131 | 132 | toxdeps 133 | ^^^^^^^ 134 | 135 | Additional tox dependencies. This string is included at the end of the 136 | ``pip install`` command when installing tox. Default is none. For example, 137 | to leverage the `uv `__ package manager you can specify 138 | ``toxdeps: tox-uv`` to use the `tox-uv `__ plugin. 139 | 140 | toxargs 141 | ^^^^^^^ 142 | 143 | Positional arguments for tox. Default is none. 144 | 145 | pytest 146 | ^^^^^^ 147 | 148 | Whether pytest is run by the tox environment. This determines if 149 | additional pytest positional arguments should be passed to tox. These 150 | arguments are to assist with saving test coverage reports. Default is 151 | ``true``. 152 | 153 | pytest-results-summary 154 | ^^^^^^^^^^^^^^^^^^^^^^ 155 | 156 | Whether test results from pytest are shown in the 157 | `$GITHUB_STEP_SUMMARY `__. 158 | Default is ``false``. 159 | 160 | This option has no effect if ``pytest`` is ``false``. 161 | 162 | coverage 163 | ^^^^^^^^ 164 | 165 | A space separated list of coverage providers to upload to. Currently 166 | only ``codecov`` is supported. Default is to not upload coverage 167 | reports. 168 | 169 | See also, ``CODECOV_TOKEN`` secret. 170 | 171 | This option has no effect if ``pytest`` is ``false``. 172 | 173 | conda 174 | ^^^^^ 175 | 176 | Whether to test within a conda environment using ``tox-conda``. Options 177 | are ``'auto'`` (default), ``'true'`` and ``'false'``. 178 | 179 | If ``'auto'``, conda will be used if the tox environment names contains 180 | “conda”. For example, ``'auto'`` would enable conda for tox environments 181 | named ``py39-conda``, ``conda-test`` or even ``py38-secondary``. 182 | 183 | setenv 184 | ^^^^^^ 185 | 186 | A map of environment variables to be available when testing. Default is 187 | none. 188 | 189 | Global definition: 190 | 191 | .. code:: yaml 192 | 193 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 194 | with: 195 | setenv: | 196 | VAR1: test 197 | VAR2: | 198 | first line 199 | seconds line 200 | VAR3: testing 201 | 202 | **Note:** ``setenv`` is a *string* and must be specified as a 203 | literal block scalar using the ``|``. (Without the ``|``, it must also 204 | be valid YAML.) 205 | 206 | ``envs`` definition: 207 | 208 | .. code:: yaml 209 | 210 | with: 211 | envs: | 212 | - linux: py39 213 | setenv: | 214 | VAR1: test 215 | VAR2: | 216 | first line 217 | seconds line 218 | VAR3: testing 219 | 220 | display 221 | ^^^^^^^ 222 | 223 | Whether to setup a headless display. This uses the 224 | ``pyvista/setup-headless-display-action@v1`` GitHub Action. Default is 225 | ``false``. 226 | 227 | cache-path 228 | ^^^^^^^^^^ 229 | 230 | A list of files, directories, and wildcard patterns to cache and 231 | restore. Passed to 232 | https://github.com/actions/cache ``path`` input. 233 | Optional. 234 | 235 | In this example, during the ``core_test`` job the ``sample_data`` is 236 | retrieved as usual and cached at the end of the job, however, during the 237 | ``detailed_tests`` jobs the ``sample_data`` is restored from the cache: 238 | 239 | .. code:: yaml 240 | 241 | jobs: 242 | core_test: 243 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 244 | with: 245 | cache-path: sample_data/ 246 | cache-key: sample-${{ github.run_id }} 247 | envs: | 248 | - linux: py39 249 | detailed_tests: 250 | needs: [core_test] 251 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 252 | with: 253 | cache-path: sample_data/ 254 | cache-key: sample-${{ github.run_id }} 255 | envs: | 256 | - macos: py39 257 | - windows: py39 258 | 259 | In this example, the particular set of ``sample_data`` and 260 | ``processed_data`` needed for the job are restored from the cache if the 261 | manifest file has not been modified. As the repository is not checked 262 | out when calling the workflow, we need to find the hash of the files in 263 | a separate job: 264 | 265 | .. code:: yaml 266 | 267 | jobs: 268 | setup: 269 | runs-on: ubuntu-latest 270 | outputs: 271 | data-hash: ${{ steps.data-hash.outputs.hash }} 272 | compressed-data-hash: ${{ steps.compressed-data-hash.outputs.hash }} 273 | steps: 274 | - uses: actions/checkout@v3 275 | - id: data-hash 276 | run: echo "hash=${{ hashFiles('**/data_urls.json') }}" >> $GITHUB_OUTPUT 277 | - id: compressed-data-hash 278 | run: echo "hash=${{ hashFiles('**/compressed_data_urls.json') }}" >> $GITHUB_OUTPUT 279 | tests: 280 | needs: [setup] 281 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 282 | with: 283 | cache-path: | 284 | sample_data/ 285 | processed_data/ 286 | envs: | 287 | - linux: py39 288 | cache-key: full-sample-${{ needs.setup.outputs.data-hash }} 289 | - linux: py39-compressed 290 | cache-key: compressed-sample-${{ needs.setup.outputs.compressed-data-hash }} 291 | 292 | cache-key 293 | ^^^^^^^^^ 294 | 295 | An explicit key for restoring and saving the cache. Passed to 296 | https://github.com/actions/cache ``key`` input. 297 | Optional. 298 | 299 | cache-restore-keys 300 | ^^^^^^^^^^^^^^^^^^ 301 | 302 | An ordered list of keys to use for restoring the cache if no cache hit 303 | occurred for key. Passed to 304 | https://github.com/actions/cache 305 | ``restore-keys`` input. Optional. 306 | 307 | artifact-path 308 | ^^^^^^^^^^^^^ 309 | 310 | A list of files, directories, and wildcard patterns to upload as 311 | artifacts. Passed to https://github.com/actions/upload-artifact 312 | ``path`` input. Optional. 313 | 314 | It can be defined globally: 315 | 316 | .. code:: yaml 317 | 318 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 319 | with: 320 | artifact-path: path/output/bin/ 321 | 322 | .. code:: yaml 323 | 324 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 325 | with: 326 | artifact-path: | 327 | path/output/bin/ 328 | path/output/test-results 329 | !path/**/*.tmp 330 | 331 | ``envs`` definition: 332 | 333 | .. code:: yaml 334 | 335 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 336 | with: 337 | envs: | 338 | - windows: py39 339 | artifact-path: | 340 | path/output/bin/ 341 | path/output/test-results 342 | !path/**/*.tmp 343 | 344 | runs-on 345 | ^^^^^^^ 346 | 347 | Choose an alternative image for the runner to use for each OS. By 348 | default, ``linux`` is ``ubuntu-latest``, ``macos`` is ``macos-latest`` 349 | and ``windows`` is ``windows-latest``. None, some or all OS images can 350 | be specified, and the global value can be overridden in each 351 | environment. 352 | 353 | It can be defined globally: 354 | 355 | .. code:: yaml 356 | 357 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 358 | with: 359 | runs-on: | 360 | linux: ubuntu-18.04 361 | macos: macos-10.15 362 | windows: windows-2019 363 | 364 | .. code:: yaml 365 | 366 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 367 | with: 368 | runs-on: | 369 | macos: macos-10.15 370 | 371 | **Note:** ``runs-on`` is a *string* and must be specified as a 372 | literal block scalar using the ``|``. (Without the ``|``, it must also 373 | be valid YAML.) 374 | 375 | ``envs`` definition: 376 | 377 | .. code:: yaml 378 | 379 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 380 | with: 381 | envs: | 382 | - windows: py39 383 | runs-on: windows-2019 384 | 385 | default_python 386 | ^^^^^^^^^^^^^^ 387 | 388 | The version of Python to use if the tox environment name does not start 389 | with ``py(2|3)[0-9]+`` or ``python-version`` is not set for the tox 390 | environment. Default is ``3.x``. 391 | 392 | For example, a tox environment ``py39-docs`` will run on Python 3.9, 393 | while a tox environment ``build_docs`` will refer to the value of 394 | ``default_python``. The ``default_python`` can also be defined within 395 | ``envs``, however, a Python version specified in the tox environment 396 | name takes priority. 397 | 398 | To force a particular Python version for a tox environment, the 399 | ``python-version`` can be included in the definition of the specific 400 | environment. The value of the ``python-version`` input will override 401 | both the Python version in the tox environment name and any 402 | ``default_python`` inputs. See 403 | https://github.com/actions/setup-python 404 | for a full list of supported values for ``python-version``. In this 405 | example, the development version of Python 3.11 and the PyPy 406 | implementation of Python 3.9 will be tested: 407 | 408 | .. code:: yaml 409 | 410 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v1 411 | with: 412 | envs: | 413 | - linux: py311 414 | python-version: '3.11-dev' 415 | - linux: pypy39 416 | python-version: 'pypy-3.9' 417 | 418 | fail-fast 419 | ^^^^^^^^^ 420 | 421 | Whether to cancel all in-progress jobs if any job fails. Default is 422 | ``false``. 423 | 424 | timeout-minutes 425 | ^^^^^^^^^^^^^^^ 426 | 427 | The maximum number of minutes to let a job run before GitHub 428 | automatically cancels it. Default is ``360``. 429 | 430 | submodules 431 | ^^^^^^^^^^ 432 | 433 | Whether to checkout submodules. Default is ``true``. 434 | 435 | Secrets 436 | ~~~~~~~ 437 | 438 | CODECOV_TOKEN 439 | ^^^^^^^^^^^^^ 440 | 441 | If your repository is private, in order to upload to Codecov you need to 442 | set the ``CODECOV_TOKEN`` environment variable or pass it as a secret to 443 | the workflow. 444 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0", 3 | "wheel"] 4 | build-backend = 'setuptools.build_meta' 5 | 6 | [tool.cibuildwheel.linux] 7 | environment-pass = ["GITHUB_WORKFLOW"] 8 | 9 | [tool.ruff] 10 | line-length = 100 11 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = test-package 3 | 4 | [options] 5 | packages = find: 6 | 7 | [options.extras_require] 8 | test = 9 | pytest 10 | 11 | [flake8] 12 | max-line-length = 100 13 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | if os.getenv("GITHUB_WORKFLOW") == ".github/workflows/test_publish.yml": 4 | from setuptools import Extension, setup 5 | 6 | setup(ext_modules=[Extension('test_package.simple', 7 | [os.path.join('test_package', 'simple.c')])]) 8 | else: 9 | from setuptools import setup 10 | 11 | setup() 12 | -------------------------------------------------------------------------------- /test_package/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | if os.getenv("GITHUB_WORKFLOW") == ".github/workflows/test_publish.yml": 4 | from . import simple 5 | 6 | __all__ = ["simple"] 7 | -------------------------------------------------------------------------------- /test_package/simple.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | static struct PyModuleDef moduledef = { 4 | PyModuleDef_HEAD_INIT, 5 | "simple", 6 | NULL, 7 | -1, 8 | NULL 9 | }; 10 | PyMODINIT_FUNC 11 | PyInit_simple(void) { 12 | return PyModule_Create(&moduledef); 13 | } 14 | -------------------------------------------------------------------------------- /test_package/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenAstronomy/github-actions-workflows/a2e0af7c20b84890b3a72de89c24c3382d6847fb/test_package/tests/__init__.py -------------------------------------------------------------------------------- /test_package/tests/test_example.py: -------------------------------------------------------------------------------- 1 | def test_simple(): 2 | assert True 3 | -------------------------------------------------------------------------------- /tools/load_build_targets.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import re 4 | 5 | import click 6 | import yaml 7 | 8 | MACHINE_TYPE = { 9 | "linux": "ubuntu-latest", 10 | "macos": "macos-latest", 11 | "windows": "windows-latest", 12 | } 13 | 14 | CIBW_BUILD = os.environ.get("CIBW_BUILD", "*") 15 | CIBW_ARCHS = os.environ.get("CIBW_ARCHS", "auto") 16 | 17 | 18 | @click.command() 19 | @click.option("--targets", default="") 20 | def load_build_targets(targets): 21 | """Script to load cibuildwheel targets for GitHub Actions workflow.""" 22 | # Load list of targets 23 | targets = yaml.load(targets, Loader=yaml.BaseLoader) 24 | print(json.dumps(targets, indent=2)) 25 | 26 | # Create matrix 27 | matrix = {"include": []} 28 | for target in targets: 29 | matrix["include"].append(get_matrix_item(target)) 30 | 31 | # Output matrix 32 | print(json.dumps(matrix, indent=2)) 33 | with open(os.environ["GITHUB_OUTPUT"], "a") as f: 34 | f.write(f"matrix={json.dumps(matrix)}\n") 35 | 36 | 37 | def get_os(target): 38 | if "macos" in target: 39 | return MACHINE_TYPE["macos"] 40 | if "win" in target: 41 | return MACHINE_TYPE["windows"] 42 | return MACHINE_TYPE["linux"] 43 | 44 | 45 | def get_cibw_build(target): 46 | if target in {"linux", "macos", "windows"}: 47 | return CIBW_BUILD 48 | return target 49 | 50 | 51 | def get_cibw_archs(target): 52 | """ 53 | Handle non-native architectures 54 | 55 | cibw allows running non-native builds on various platforms: 56 | https://cibuildwheel.pypa.io/en/stable/options/#archs 57 | 58 | This logic overrides the "auto" flag based on OS and a list of supported 59 | non-native arch if a non-native arch is given for a particular platform in 60 | targets, rather than the user having to do this manually. 61 | """ 62 | platform_archs = { 63 | # We now cross compile x86_64 on arm64 by default 64 | "macos": ["universal2", "x86_64"], 65 | # This is a list of supported eumulated arches on linux 66 | "linux": ["aarch64", "ppc64le", "s390x", "armv7l"], 67 | } 68 | for platform, archs in platform_archs.items(): 69 | if platform in target: 70 | for arch in archs: 71 | if target.endswith(arch): 72 | return arch 73 | 74 | # If no explict arch has been specified build both arm64 and x86_64 on macos 75 | if "macos" in target: 76 | return os.environ.get("CIBW_ARCHS", "arm64 x86_64") 77 | 78 | return CIBW_ARCHS 79 | 80 | 81 | def get_artifact_name(target): 82 | artifact_name = re.sub(r"[\\ /:<>|*?\"']", "-", target) 83 | artifact_name = re.sub(r"-+", "-", artifact_name) 84 | return artifact_name 85 | 86 | 87 | def get_matrix_item(target): 88 | extra_target_args = {} 89 | if isinstance(target, dict): 90 | extra_target_args = target 91 | target = extra_target_args.pop("target") 92 | return { 93 | "target": target, 94 | "runs-on": get_os(target), 95 | "CIBW_BUILD": get_cibw_build(target), 96 | "CIBW_ARCHS": get_cibw_archs(target), 97 | "artifact-name": get_artifact_name(target), 98 | **extra_target_args, 99 | } 100 | 101 | 102 | if __name__ == "__main__": 103 | load_build_targets() 104 | -------------------------------------------------------------------------------- /tools/set_env.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import sys 4 | 5 | import yaml 6 | 7 | GITHUB_ENV = os.getenv("GITHUB_ENV") 8 | if GITHUB_ENV is None: 9 | raise ValueError("GITHUB_ENV not set. Must be run inside GitHub Actions.") 10 | 11 | DELIMITER = "EOF" 12 | 13 | 14 | def set_env(env): 15 | 16 | env = yaml.load(env, Loader=yaml.BaseLoader) 17 | print(json.dumps(env, indent=2)) 18 | 19 | if not isinstance(env, dict): 20 | title = "`env` must be mapping" 21 | message = f"`env` must be mapping of env variables to values, got type {type(env)}" 22 | print(f"::error title={title}::{message}") 23 | exit(1) 24 | 25 | for k, v in env.items(): 26 | 27 | if not isinstance(v, str): 28 | title = "`env` values must be strings" 29 | message = f"`env` values must be strings, but value of {k} has type {type(v)}" 30 | print(f"::error title={title}::{message}") 31 | exit(1) 32 | 33 | v = v.split("\n") 34 | 35 | with open(GITHUB_ENV, "a") as f: 36 | if len(v) == 1: 37 | f.write(f"{k}={v[0]}\n") 38 | else: 39 | for line in v: 40 | assert line.strip() != DELIMITER 41 | f.write(f"{k}<<{DELIMITER}\n") 42 | for line in v: 43 | f.write(f"{line}\n") 44 | f.write(f"{DELIMITER}\n") 45 | 46 | print(f"{k} written to GITHUB_ENV") 47 | 48 | 49 | if __name__ == "__main__": 50 | set_env(sys.argv[1]) 51 | -------------------------------------------------------------------------------- /tools/tox_matrix.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import re 4 | 5 | import click 6 | import yaml 7 | from packaging.version import InvalidVersion, Version 8 | 9 | 10 | @click.command() 11 | @click.option("--envs", default="") 12 | @click.option("--libraries", default="") 13 | @click.option("--posargs", default="") 14 | @click.option("--toxdeps", default="") 15 | @click.option("--toxargs", default="") 16 | @click.option("--pytest", default="true") 17 | @click.option("--pytest-results-summary", default="false") 18 | @click.option("--coverage", default="") 19 | @click.option("--conda", default="auto") 20 | @click.option("--setenv", default="") 21 | @click.option("--display", default="false") 22 | @click.option("--cache-path", default="") 23 | @click.option("--cache-key", default="") 24 | @click.option("--cache-restore-keys", default="") 25 | @click.option("--artifact-path", default="") 26 | @click.option("--runs-on", default="") 27 | @click.option("--default-python", default="") 28 | @click.option("--timeout-minutes", default="360") 29 | def load_tox_targets(envs, libraries, posargs, toxdeps, toxargs, pytest, pytest_results_summary, 30 | coverage, conda, setenv, display, cache_path, cache_key, 31 | cache_restore_keys, artifact_path, runs_on, default_python, timeout_minutes): 32 | """Script to load tox targets for GitHub Actions workflow.""" 33 | # Load envs config 34 | envs = yaml.load(envs, Loader=yaml.BaseLoader) 35 | print(json.dumps(envs, indent=2)) 36 | 37 | # Load global libraries config 38 | global_libraries = { 39 | "brew": [], 40 | "brew-cask": [], 41 | "apt": [], 42 | "choco": [], 43 | } 44 | libraries = yaml.load(libraries, Loader=yaml.BaseLoader) 45 | if libraries is not None: 46 | global_libraries.update(libraries) 47 | print(json.dumps(global_libraries, indent=2)) 48 | 49 | # Default images to use for runners 50 | default_runs_on = { 51 | "linux": "ubuntu-latest", 52 | "macos": "macos-latest", 53 | "windows": "windows-latest", 54 | } 55 | custom_runs_on = yaml.load(runs_on, Loader=yaml.BaseLoader) 56 | if isinstance(custom_runs_on, dict): 57 | default_runs_on.update(custom_runs_on) 58 | print(json.dumps(default_runs_on, indent=2)) 59 | 60 | # Default string parameters which can be overwritten by each env 61 | string_parameters = { 62 | "posargs": posargs, 63 | "toxdeps": toxdeps, 64 | "toxargs": toxargs, 65 | "pytest": pytest, 66 | "pytest-results-summary": pytest_results_summary, 67 | "coverage": coverage, 68 | "conda": conda, 69 | "setenv": setenv, 70 | "display": display, 71 | "cache-path": cache_path, 72 | "cache-key": cache_key, 73 | "cache-restore-keys": cache_restore_keys, 74 | "artifact-path": artifact_path, 75 | "timeout-minutes": timeout_minutes, 76 | } 77 | 78 | # Create matrix 79 | matrix = {"include": []} 80 | for env in envs: 81 | matrix["include"].append(get_matrix_item( 82 | env, 83 | global_libraries=global_libraries, 84 | global_string_parameters=string_parameters, 85 | runs_on=default_runs_on, 86 | default_python=default_python, 87 | )) 88 | 89 | # Output matrix 90 | print(json.dumps(matrix, indent=2)) 91 | with open(os.environ["GITHUB_OUTPUT"], "a") as f: 92 | f.write(f"matrix={json.dumps(matrix)}\n") 93 | 94 | 95 | def get_matrix_item(env, global_libraries, global_string_parameters, 96 | runs_on, default_python): 97 | 98 | # define spec for each matrix include (+ global_string_parameters) 99 | item = { 100 | "os": None, 101 | "toxenv": None, 102 | "python_version": None, 103 | "name": None, 104 | "pytest_flag": None, 105 | "libraries_brew": None, 106 | "libraries_brew_cask": None, 107 | "libraries_apt": None, 108 | "libraries_choco": None, 109 | "cache-path": None, 110 | "cache-key": None, 111 | "cache-restore-keys": None, 112 | "artifact-name": None, 113 | "artifact-path": None, 114 | "timeout-minutes": None, 115 | } 116 | for string_param, default in global_string_parameters.items(): 117 | env_value = env.get(string_param) 118 | item[string_param] = default if env_value is None else env_value 119 | 120 | # set os and toxenv 121 | for k, v in runs_on.items(): 122 | if k in env: 123 | platform = k 124 | item["os"] = env.get("runs-on", v) 125 | item["toxenv"] = env[k] 126 | assert item["os"] is not None and item["toxenv"] is not None 127 | 128 | # set python_version 129 | python_version = env.get("python-version") 130 | m = re.search("^py(2|3)([0-9]+t?)", item["toxenv"]) 131 | if python_version is not None: 132 | item["python_version"] = python_version 133 | elif m is not None: 134 | major, minor = m.groups() 135 | item["python_version"] = f"{major}.{minor}" 136 | else: 137 | item["python_version"] = env.get("default_python") or default_python 138 | 139 | # if Python is <3.10 we can't use macos-latest which is arm64 140 | try: 141 | if Version(item["python_version"]) < Version('3.10') and item["os"] == "macos-latest": 142 | item["os"] = "macos-13" 143 | except InvalidVersion: 144 | # python_version might be for example 'pypy-3.10' which won't parse 145 | pass 146 | 147 | # set name 148 | item["name"] = env.get("name") or f'{item["toxenv"]} ({item["os"]})' 149 | 150 | # set artifact-name (replace invalid path characters) 151 | item["artifact-name"] = re.sub(r"[\\ /:<>|*?\"']", "-", item["name"]) 152 | item["artifact-name"] = re.sub(r"-+", "-", item["artifact-name"]) 153 | 154 | # set pytest_flag 155 | item["pytest_flag"] = "" 156 | sep = r"\\" if platform == "windows" else "/" 157 | if item["pytest"] == "true" and "codecov" in item.get("coverage", ""): 158 | item["pytest_flag"] += ( 159 | rf"--cov-report=xml:${{GITHUB_WORKSPACE}}{sep}coverage.xml ") 160 | if item["pytest"] == "true" and item["pytest-results-summary"] == "true": 161 | item["pytest_flag"] += rf"--junitxml ${{GITHUB_WORKSPACE}}{sep}results.xml " 162 | 163 | # set libraries 164 | env_libraries = env.get("libraries") 165 | if isinstance(env_libraries, str) and len(env_libraries.strip()) == 0: 166 | env_libraries = {} # no libraries requested for environment 167 | libraries = global_libraries if env_libraries is None else env_libraries 168 | for manager in ["brew", "brew_cask", "apt", "choco"]: 169 | item[f"libraries_{manager}"] = " ".join(libraries.get(manager, [])) 170 | 171 | # set "auto" conda value 172 | if item["conda"] == "auto": 173 | item["conda"] = "true" if "conda" in item["toxenv"] else "false" 174 | 175 | # inject toxdeps for conda 176 | if item["conda"] == "true" and "tox-conda" not in item["toxdeps"].lower(): 177 | item["toxdeps"] = ("tox-conda " + item["toxdeps"]).strip() 178 | 179 | # make timeout-minutes a number 180 | item["timeout-minutes"] = int(item["timeout-minutes"]) 181 | 182 | # verify values 183 | assert item["pytest"] in {"true", "false"} 184 | assert item["conda"] in {"true", "false"} 185 | assert item["display"] in {"true", "false"} 186 | 187 | return item 188 | 189 | 190 | if __name__ == "__main__": 191 | load_tox_targets() 192 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | pep8 4 | py3{10,11,12} 5 | py3{10,11,12}-inputs-{linux,macos,windows,conda,con_da} 6 | default_python 7 | py{,py}3{10,13}-python_version 8 | libraries 9 | cache-{setup,verify} 10 | 11 | [testenv] 12 | allowlist_externals = 13 | python 14 | micromamba 15 | bcal 16 | dot 17 | bash 18 | rolldice 19 | which 20 | skip_install = true 21 | passenv = MY_VAR 22 | commands = 23 | # Check the python version is as expected 24 | python -c "import sys; assert sys.version_info.major == 3" 25 | py310: python -c "import sys; assert sys.version_info.minor == 10" 26 | py311: python -c "import sys; assert sys.version_info.minor == 11" 27 | pypy311: python -c "import sys; assert sys.version_info.minor == 11" 28 | py312: python -c "import sys; assert sys.version_info.minor == 12" 29 | py313: python -c "import sys; assert sys.version_info.minor == 13" 30 | default_python: python -c "import sys; assert sys.version_info.minor == {posargs}" 31 | python_version: python -c "import platform; assert platform.python_implementation() == '{posargs}'" 32 | # Check the OS is as expected 33 | linux: python -c "import platform; assert platform.system() == 'Linux'" 34 | macos: python -c "import platform; assert platform.system() == 'Darwin'" 35 | windows: python -c "import platform; assert platform.system() == 'Windows'" 36 | # Check is conda is being used 37 | !conda-!con_da: python -c "import os, sys; assert not os.path.exists(os.path.join(sys.prefix, 'conda-meta', 'history'))" 38 | conda,con_da: python -c "import os, sys; assert {posargs} os.path.exists(os.path.join(sys.prefix, 'conda-meta', 'history'))" 39 | # Run a command that should only succeed is the library is installed 40 | libraries: {posargs} 41 | # Verify that setenv is working 42 | setenv-global: python -c "import os; assert os.environ['MY_VAR'] == 'global_value'" 43 | setenv-local: python -c "import os; assert os.environ['MY_VAR'] == 'local_value'" 44 | # Verify that caching is working 45 | cache-verify: python -c "f = open('a/b/c/test.txt', 'r'); f.close()" 46 | cache-setup: python -c "import os; os.makedirs('a/b/c/'); f = open('a/b/c/test.txt', 'w'); f.close()" 47 | # Verify that artifact uploads are working 48 | artifact-upload: bash -c "echo 'hello world' > test.txt" 49 | # Verify that freethreaded builds are using freethreaded interpreter 50 | py313t: python -c "import sys; assert 'free-threading' in sys.version" 51 | 52 | [testenv:pep8] 53 | description = verify pep8 54 | deps = ruff 55 | commands = ruff check . 56 | 57 | [testenv:py3{10,11,12}{,-conda}] 58 | description = run pytest 59 | skip_install = false 60 | extras = test 61 | conda_deps = pytest 62 | commands = 63 | conda: python -c "import os, sys; assert os.path.exists(os.path.join(sys.prefix, 'conda-meta', 'history'))" 64 | conda: micromamba list 65 | pytest --pyargs test_package {posargs} 66 | -------------------------------------------------------------------------------- /update_scripts_in_yml.py: -------------------------------------------------------------------------------- 1 | import os 2 | from base64 import b64encode 3 | 4 | 5 | def base64_encode_into(script, yml_file, env_var): 6 | 7 | with open(os.path.join('tools', script), 'rb') as f: 8 | tox_matrix_base64 = b64encode(f.read()).decode('ascii') 9 | 10 | with open(os.path.join('.github', 'workflows', yml_file)) as f: 11 | tox_yml = f.read() 12 | 13 | tox_yml_lines = tox_yml.splitlines() 14 | 15 | updated = False 16 | for i in range(len(tox_yml_lines)): 17 | if tox_yml_lines[i].strip().startswith(env_var + ':'): 18 | pos = tox_yml_lines[i].index(':') 19 | tox_yml_lines[i] = tox_yml_lines[i][:pos+1] + ' ' + tox_matrix_base64 20 | updated = True 21 | if not updated: 22 | raise ValueError(f'No line containing {env_var} found') 23 | 24 | tox_yml_new = '\n'.join(tox_yml_lines) + '\n' 25 | 26 | with open(os.path.join('.github', 'workflows', yml_file), 'w') as f: 27 | f.write(tox_yml_new) 28 | 29 | 30 | base64_encode_into('tox_matrix.py', 'tox.yml', 'TOX_MATRIX_SCRIPT') 31 | base64_encode_into('load_build_targets.py', 'publish.yml', 'LOAD_BUILD_TARGETS_SCRIPT') 32 | base64_encode_into('set_env.py', 'tox.yml', 'SET_ENV_SCRIPT') 33 | base64_encode_into('set_env.py', 'publish.yml', 'SET_ENV_SCRIPT') 34 | base64_encode_into('set_env.py', 'publish_pure_python.yml', 'SET_ENV_SCRIPT') 35 | --------------------------------------------------------------------------------