├── .codecov.yml ├── .coveragerc ├── .darglint ├── .flake8 ├── .git_archival.txt ├── .gitattributes ├── .github ├── FUNDING.yml ├── actions │ └── cache-keys │ │ └── action.yml ├── reusables │ └── tox-dev │ │ └── workflow │ │ └── reusable-tox │ │ └── hooks │ │ ├── post-src-checkout │ │ └── action.yml │ │ ├── post-tox-run │ │ └── action.yml │ │ └── prepare-for-tox-run │ │ └── action.yml └── workflows │ ├── ci-cd.yml │ └── scheduled-runs.yml ├── .gitignore ├── .isort.cfg ├── .mypy.ini ├── .pre-commit-config.yaml ├── .pylintrc ├── .readthedocs.yaml ├── .yamllint ├── CONTRIBUTING.rst ├── LICENSE ├── README.rst ├── bin ├── .gitignore ├── pip_constraint_helpers.py ├── pip_wrapper.py ├── print_lockfile_base_name.py └── resolve_platform_lock_file.py ├── dependencies └── direct │ └── tox.in ├── docs ├── change-notes-layout.rst ├── conf.py ├── contributing.rst ├── index.rst ├── pkg │ └── .gitignore ├── requirements.in └── requirements.txt ├── nitpick-style.toml ├── pyproject.toml ├── pytest.ini ├── setup.cfg ├── setup.py ├── src └── sphinxcontrib │ └── towncrier │ ├── __init__.py │ ├── _data_transformers.py │ ├── _fragment_discovery.py │ ├── _scm_version.pyi │ ├── _towncrier.py │ ├── _version.py │ ├── ext.py │ └── py.typed ├── tests ├── __init__.py ├── _data_transformers_test.py ├── _fragment_discovery_test.py ├── _towncrier_test.py ├── ext_test.py ├── units_test.py └── version_test.py ├── tox.ini └── toxfile.py /.codecov.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yamllint disable rule:truthy 3 | 4 | codecov: 5 | bot: codecov 6 | notify: 7 | after_n_builds: 75 # number of test matrix+lint jobs uploading coverage 8 | wait_for_ci: false 9 | 10 | require_ci_to_pass: false 11 | 12 | token: bf9ea742-9470-4a97-bfe5-aab1553f30a6 # repo-scoped 13 | 14 | coverage: 15 | precision: 2 16 | round: down 17 | range: "70...100" 18 | 19 | status: 20 | # Only consider coverage of the code snippet changed in PR 21 | project: no 22 | patch: yes 23 | changes: no 24 | 25 | parsers: 26 | gcov: 27 | branch_detection: 28 | conditional: yes 29 | loop: yes 30 | method: no 31 | macro: no 32 | 33 | comment: 34 | layout: "header, diff" 35 | behavior: default 36 | require_changes: no 37 | 38 | ... 39 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [html] 2 | show_contexts = true 3 | skip_covered = false 4 | 5 | [paths] 6 | source = 7 | src 8 | */src 9 | *\src 10 | */lib/pypy*/site-packages 11 | */lib/python*/site-packages 12 | *\Lib\site-packages 13 | 14 | [report] 15 | # exclude_also = 16 | exclude_lines = 17 | ^\s*@pytest\.mark\.xfail 18 | fail_under = 56.67 19 | skip_covered = true 20 | skip_empty = true 21 | show_missing = true 22 | 23 | [run] 24 | branch = true 25 | cover_pylib = false 26 | # https://coverage.rtfd.io/en/latest/contexts.html#dynamic-contexts 27 | # dynamic_context = test_function # conflicts with `pytest-cov` if set here 28 | parallel = true 29 | plugins = 30 | covdefaults 31 | relative_files = true 32 | source = 33 | . 34 | source_pkgs = 35 | sphinxcontrib.towncrier 36 | -------------------------------------------------------------------------------- /.darglint: -------------------------------------------------------------------------------- 1 | [darglint] 2 | # NOTE: All `darglint` styles except for `sphinx` hit ridiculously low 3 | # NOTE: performance on some of the in-project Python modules. 4 | # Refs: 5 | # * https://github.com/terrencepreilly/darglint/issues/186 6 | # * https://github.com/wemake-services/wemake-python-styleguide/issues/2287 7 | docstring_style = sphinx 8 | # enable = DAR104 # covered by `sphinx_autodoc_typehints` 9 | strictness = long 10 | # strictness = full 11 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | 3 | # Print the total number of errors: 4 | count = true 5 | 6 | # Don't even try to analyze these: 7 | extend-exclude = 8 | # Circle CI configs 9 | .circleci, 10 | # No need to traverse egg info dir 11 | *.egg-info, 12 | # GitHub configs 13 | .github, 14 | # Cache files of MyPy 15 | .mypy_cache, 16 | # Cache files of pytest 17 | .pytest_cache, 18 | # Temp dir of pytest-testmon 19 | .tmontmp, 20 | # Countless third-party libs in venvs 21 | .tox, 22 | # Occasional virtualenv dir 23 | .venv, 24 | # VS Code 25 | .vscode, 26 | # Temporary build dir 27 | build, 28 | # This contains sdists and wheels that we don't want to check 29 | dist, 30 | # Metadata of `pip wheel` cmd is autogenerated 31 | pip-wheel-metadata, 32 | 33 | # IMPORTANT: avoid using ignore option, always use extend-ignore instead 34 | # Completely and unconditionally ignore the following errors: 35 | extend-ignore = 36 | # Legitimate cases, no need to "fix" these violations: 37 | # E501: "line too long", its function is replaced by `flake8-length` 38 | E501, 39 | # W505: "doc line too long", its function is replaced by `flake8-length` 40 | W505, 41 | # I: flake8-isort is drunk + we have isort integrated into pre-commit 42 | I, 43 | # S101: MyPy requires `asserts`, plus they're not bad if cooked well 44 | S101, 45 | # WPS300: "Found local folder import" -- nothing bad about this 46 | WPS300, 47 | # WPS305: "Found f string" -- nothing bad about this 48 | WPS305, 49 | # WPS317 enforces weird indents 50 | WPS317, 51 | # WPS318 enforces weird indents too 52 | WPS318, 53 | # WPS326: "Found implicit string concatenation" -- nothing bad about this 54 | WPS326, 55 | 56 | # IMPORTANT: avoid using select option, always use select-ignore instead 57 | # Enable the following errors: 58 | extend-select = 59 | # B950: "line too long", longer than `max-line-length` + 10% 60 | B950, 61 | 62 | # https://wemake-python-stylegui.de/en/latest/pages/usage/formatter.html 63 | format = wemake 64 | 65 | # Let's not overcomplicate the code: 66 | max-complexity = 10 67 | 68 | # Accessibility/large fonts and PEP8 friendly. 69 | # This is being flexibly extended through the `flake8-length`: 70 | max-line-length = 79 71 | 72 | # Allow certain violations in certain files: 73 | # Please keep both sections of this list sorted, as it will be easier for others to find and add entries in the future 74 | per-file-ignores = 75 | # The following ignores have been researched and should be considered permanent 76 | # each should be preceded with an explanation of each of the error codes 77 | # If other ignores are added for a specific file in the section following this, 78 | # these will need to be added to that line as well. 79 | 80 | # WPS412 logic of an extension is in __init__.py file 81 | # F401 imported but unused 82 | src/sphinxcontrib/towncrier/__init__.py: F401, WPS412 83 | # FIXME: WPS201 too many imports 84 | # FIXME: WPS402 too many `noqa`s 85 | src/sphinxcontrib/towncrier/ext.py: WPS201, WPS402 86 | 87 | # FIXME: toxfile is currently rather complicated, allowing these temporarily: 88 | # WPS201 Found module with too many imports: 18 > 12 89 | # WPS202 Found too many module members: 11 > 7 90 | # WPS402 Found `noqa` comments overuse: 14 91 | toxfile.py: WPS201, WPS202, WPS402 92 | 93 | # There are multiple `assert`s (S101) 94 | # and subprocesses (import – S404; call – S603) in tests; 95 | # also, using fixtures looks like shadowing the outer scope (WPS442); 96 | # furthermore, we should be able to import and test private attributes 97 | # (WPS450) and modules (WPS436), and finally it's impossible to 98 | # have <= members in tests (WPS202), including many local vars (WPS210): 99 | tests/**.py: S101, S404, S603, WPS202, WPS210, WPS436, WPS442, WPS450 100 | 101 | 102 | # Count the number of occurrences of each error/warning code and print a report: 103 | statistics = true 104 | 105 | # ## Plugin-provided settings: ## 106 | 107 | # flake8-eradicate 108 | # E800: 109 | eradicate-whitelist-extend = isort:\s+\w+ 110 | 111 | # flake8-pytest-style 112 | # PT001: 113 | pytest-fixture-no-parentheses = true 114 | # PT006: 115 | pytest-parametrize-names-type = tuple 116 | # PT007: 117 | pytest-parametrize-values-type = tuple 118 | pytest-parametrize-values-row-type = tuple 119 | # PT023: 120 | pytest-mark-no-parentheses = true 121 | 122 | # flake8-rst-docstrings 123 | rst-directives = 124 | spelling 125 | rst-roles = 126 | # Built-in Sphinx roles: 127 | class, 128 | data, 129 | file, 130 | exc, 131 | meth, 132 | mod, 133 | term, 134 | py:class, 135 | py:data, 136 | py:exc, 137 | py:meth, 138 | py:term, 139 | # Sphinx's internal role: 140 | event, 141 | 142 | # wemake-python-styleguide 143 | show-source = true 144 | -------------------------------------------------------------------------------- /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: 53943d5b210f70518189390102e5cfce5374bdb8 2 | node-date: 2025-04-18T20:53:17+02:00 3 | describe-name: v0.5.0a0-72-g53943d5 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Needed for setuptools-scm-git-archive 2 | .git_archival.txt export-subst 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | custom: 4 | - https://www.comebackalive.in.ua/donate 5 | - https://github.com/vshymanskyy/StandWithUkraine#for-maintainers-and-authors 6 | - https://www.paypal.me/webknjazCZ 7 | - https://webknjaz.me 8 | 9 | github: 10 | - slsh1o 11 | - webknjaz 12 | 13 | ko_fi: webknjaz 14 | 15 | liberapay: webknjaz 16 | 17 | open_collective: webknjaz 18 | 19 | # patreon: webknjaz # not in use because of the ties with ruscism 20 | 21 | thanks_dev: u/gh/webknjaz 22 | 23 | ... 24 | -------------------------------------------------------------------------------- /.github/actions/cache-keys/action.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | outputs: 4 | cache-key-for-dep-files: 5 | description: >- 6 | A cache key string derived from the dependency declaration files. 7 | value: ${{ steps.calc-cache-key-files.outputs.files-hash-key }} 8 | 9 | runs: 10 | using: composite 11 | steps: 12 | - name: >- 13 | Calculate dependency files' combined hash value 14 | for use in the cache key 15 | id: calc-cache-key-files 16 | run: | 17 | from os import environ 18 | from pathlib import Path 19 | 20 | FILE_APPEND_MODE = 'a' 21 | 22 | files_derived_hash = '${{ 23 | hashFiles( 24 | 'tox.ini', 25 | 'pyproject.toml', 26 | '.pre-commit-config.yaml', 27 | 'pytest.ini', 28 | 'dependencies/**', 29 | 'dependencies/*/**', 30 | 'setup.cfg' 31 | ) 32 | }}' 33 | 34 | print(f'Computed file-derived hash is {files_derived_hash}.') 35 | 36 | with Path(environ['GITHUB_OUTPUT']).open( 37 | mode=FILE_APPEND_MODE, 38 | ) as outputs_file: 39 | print( 40 | f'files-hash-key={files_derived_hash}', 41 | file=outputs_file, 42 | ) 43 | shell: python 44 | 45 | ... 46 | -------------------------------------------------------------------------------- /.github/reusables/tox-dev/workflow/reusable-tox/hooks/post-src-checkout/action.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | inputs: 4 | calling-job-context: 5 | description: A JSON with the calling job inputs 6 | type: string 7 | job-dependencies-context: 8 | default: >- 9 | {} 10 | description: >- 11 | The `$ {{ needs }}` context passed from the calling workflow 12 | encoded as a JSON string. The caller is expected to form this 13 | input as follows: 14 | `job-dependencies-context: $ {{ toJSON(needs) }}`. 15 | required: false 16 | type: string 17 | 18 | runs: 19 | using: composite 20 | steps: 21 | - name: Log setting up pre-commit cache 22 | if: fromJSON(inputs.calling-job-context).toxenv == 'pre-commit' 23 | run: >- 24 | >&2 echo Caching ~/.cache/pre-commit based on 25 | the contents of '.pre-commit-config.yaml'... 26 | shell: bash 27 | - name: Cache pre-commit.com virtualenvs 28 | if: fromJSON(inputs.calling-job-context).toxenv == 'pre-commit' 29 | uses: actions/cache@v4 30 | with: 31 | path: ~/.cache/pre-commit 32 | key: >- 33 | ${{ 34 | runner.os 35 | }}-pre-commit-${{ 36 | hashFiles('.pre-commit-config.yaml') 37 | }} 38 | 39 | ... 40 | -------------------------------------------------------------------------------- /.github/reusables/tox-dev/workflow/reusable-tox/hooks/post-tox-run/action.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | inputs: 4 | calling-job-context: 5 | description: A JSON with the calling job inputs 6 | type: string 7 | job-dependencies-context: 8 | default: >- 9 | {} 10 | description: >- 11 | The `$ {{ needs }}` context passed from the calling workflow 12 | encoded as a JSON string. The caller is expected to form this 13 | input as follows: 14 | `job-dependencies-context: $ {{ toJSON(needs) }}`. 15 | required: false 16 | type: string 17 | 18 | runs: 19 | using: composite 20 | steps: 21 | - name: Verify that the artifacts with expected names got created 22 | if: fromJSON(inputs.calling-job-context).toxenv == 'build-dists' 23 | run: > 24 | # Verify that the artifacts with expected names got created 25 | 26 | 27 | ls -1 28 | 'dist/${{ 29 | fromJSON( 30 | inputs.job-dependencies-context 31 | ).pre-setup.outputs.sdist-artifact-name 32 | }}' 33 | 'dist/${{ 34 | fromJSON( 35 | inputs.job-dependencies-context 36 | ).pre-setup.outputs.wheel-artifact-name 37 | }}' 38 | shell: bash 39 | - name: Store the distribution packages 40 | if: fromJSON(inputs.calling-job-context).toxenv == 'build-dists' 41 | uses: actions/upload-artifact@v4 42 | with: 43 | name: >- 44 | ${{ 45 | fromJSON( 46 | inputs.job-dependencies-context 47 | ).pre-setup.outputs.dists-artifact-name 48 | }} 49 | # NOTE: Exact expected file names are specified here 50 | # NOTE: as a safety measure — if anything weird ends 51 | # NOTE: up being in this dir or not all dists will be 52 | # NOTE: produced, this will fail the workflow. 53 | path: | 54 | dist/${{ 55 | fromJSON( 56 | inputs.job-dependencies-context 57 | ).pre-setup.outputs.sdist-artifact-name 58 | }} 59 | dist/${{ 60 | fromJSON( 61 | inputs.job-dependencies-context 62 | ).pre-setup.outputs.wheel-artifact-name 63 | }} 64 | retention-days: >- 65 | ${{ 66 | fromJSON( 67 | fromJSON( 68 | inputs.job-dependencies-context 69 | ).pre-setup.outputs.release-requested 70 | ) 71 | && 90 72 | || 30 73 | }} 74 | 75 | ... 76 | -------------------------------------------------------------------------------- /.github/reusables/tox-dev/workflow/reusable-tox/hooks/prepare-for-tox-run/action.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | inputs: 4 | calling-job-context: 5 | description: A JSON with the calling job inputs 6 | type: string 7 | job-dependencies-context: 8 | default: >- 9 | {} 10 | description: >- 11 | The `$ {{ needs }}` context passed from the calling workflow 12 | encoded as a JSON string. The caller is expected to form this 13 | input as follows: 14 | `job-dependencies-context: $ {{ toJSON(needs) }}`. 15 | required: false 16 | type: string 17 | 18 | runs: 19 | using: composite 20 | steps: 21 | - name: Drop Git tags from HEAD for non-tag-create events 22 | if: >- 23 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists' 24 | && !fromJSON( 25 | fromJSON( 26 | inputs.job-dependencies-context 27 | ).pre-setup.outputs.release-requested 28 | ) 29 | run: >- 30 | git tag --points-at HEAD 31 | | 32 | xargs git tag --delete 33 | shell: bash 34 | 35 | - name: Setup git user as [bot] 36 | if: >- 37 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists' 38 | && ( 39 | fromJSON( 40 | fromJSON( 41 | inputs.job-dependencies-context 42 | ).pre-setup.outputs.release-requested 43 | ) 44 | || fromJSON( 45 | fromJSON( 46 | inputs.job-dependencies-context 47 | ).pre-setup.outputs.is-untagged-devel 48 | ) 49 | ) 50 | uses: fregante/setup-git-user@v2 51 | - name: >- 52 | Tag the release in the local Git repo 53 | as ${{ 54 | fromJSON( 55 | inputs.job-dependencies-context 56 | ).pre-setup.outputs.git-tag 57 | }} 58 | for setuptools-scm to set the desired version 59 | if: >- 60 | fromJSON(inputs.calling-job-context).toxenv == 'build-dists' 61 | && ( 62 | fromJSON( 63 | fromJSON( 64 | inputs.job-dependencies-context 65 | ).pre-setup.outputs.is-untagged-devel 66 | ) 67 | || fromJSON( 68 | fromJSON( 69 | inputs.job-dependencies-context 70 | ).pre-setup.outputs.release-requested 71 | ) 72 | ) 73 | run: >- 74 | git tag 75 | -m '${{ 76 | fromJSON( 77 | inputs.job-dependencies-context 78 | ).pre-setup.outputs.git-tag 79 | }}' 80 | '${{ 81 | fromJSON( 82 | inputs.job-dependencies-context 83 | ).pre-setup.outputs.git-tag 84 | }}' 85 | -- 86 | ${{ fromJSON(inputs.calling-job-context).checkout-src-git-committish }} 87 | shell: bash 88 | 89 | ... 90 | -------------------------------------------------------------------------------- /.github/workflows/ci-cd.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | name: 🧪 4 | 5 | on: 6 | merge_group: 7 | push: # publishes to TestPyPI pushes to the main branch 8 | branches-ignore: 9 | - dependabot/** # Dependabot always creates PRs 10 | - gh-readonly-queue/** # Temporary merge queue-related GH-made branches 11 | - maintenance/pip-tools-constraint-lockfiles # Lock files through PRs 12 | - maintenance/pip-tools-constraint-lockfiles-** # Lock files through PRs 13 | - patchback/backports/** # Patchback always creates PRs 14 | - pre-commit-ci-update-config # pre-commit.ci always creates a PR 15 | pull_request: 16 | ignore-paths: # changes to the cron workflow are triggered through it 17 | - .github/workflows/scheduled-runs.yml 18 | types: 19 | - opened # default 20 | - synchronize # default 21 | - reopened # default 22 | - ready_for_review # used in PRs created from the release workflow 23 | workflow_call: # a way to embed the main tests 24 | workflow_dispatch: 25 | inputs: 26 | release-version: 27 | # github.event_name == 'workflow_dispatch' 28 | # && github.event.inputs.release-version 29 | description: >- 30 | Target PEP440-compliant version to release. 31 | Please, don't prepend `v`. 32 | required: true 33 | type: string 34 | release-committish: 35 | # github.event_name == 'workflow_dispatch' 36 | # && github.event.inputs.release-committish 37 | default: '' 38 | description: >- 39 | The commit to be released to PyPI and tagged 40 | in Git as `release-version`. Normally, you 41 | should keep this empty. 42 | type: string 43 | YOLO: 44 | default: false 45 | description: >- 46 | Set this flag to disregard the outcome of the 47 | test stage. The test results will block the 48 | release otherwise. Only use this under 49 | extraordinary circumstances to ignore the test 50 | failures and cut the release regardless. 51 | type: boolean 52 | 53 | concurrency: 54 | group: >- 55 | ${{ 56 | github.workflow 57 | }}-${{ 58 | github.ref_type 59 | }}-${{ 60 | github.event.pull_request.number || github.sha 61 | }} 62 | cancel-in-progress: true 63 | 64 | env: 65 | FORCE_COLOR: 1 # Request colored output from CLI tools supporting it 66 | MYPY_FORCE_COLOR: 1 # MyPy's color enforcement 67 | PIP_DISABLE_PIP_VERSION_CHECK: 1 # Hide "there's a newer pip" message 68 | PIP_NO_PYTHON_VERSION_WARNING: 1 # Hide "this Python is deprecated" message 69 | PIP_NO_WARN_SCRIPT_LOCATION: 1 # Hide "script dir is not in $PATH" message 70 | PRE_COMMIT_COLOR: always 71 | PROJECT_NAME: sphinxcontrib-towncrier 72 | PUBLISHING_TO_TESTPYPI_ENABLED: true 73 | PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` 74 | PYTHONIOENCODING: utf-8 75 | PYTHONUTF8: 1 76 | TOX_PARALLEL_NO_SPINNER: 1 # Disable tox's parallel run spinner animation 77 | TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests 78 | FORCE_COLOR 79 | MYPY_FORCE_COLOR 80 | NO_COLOR 81 | PIP_DISABLE_PIP_VERSION_CHECK 82 | PIP_NO_PYTHON_VERSION_WARNING 83 | PIP_NO_WARN_SCRIPT_LOCATION 84 | PRE_COMMIT_COLOR 85 | PY_COLORS 86 | PYTEST_THEME 87 | PYTEST_THEME_MODE 88 | PYTHONIOENCODING 89 | PYTHONLEGACYWINDOWSSTDIO 90 | PYTHONUTF8 91 | UPSTREAM_REPOSITORY_ID: >- 92 | 284275487 93 | 94 | run-name: >- 95 | ${{ 96 | github.event_name == 'workflow_dispatch' 97 | && format('📦 Releasing v{0}...', github.event.inputs.release-version) 98 | || '' 99 | }} 100 | ${{ 101 | github.event.pull_request.number && '🔀 PR' || '' 102 | }}${{ 103 | !github.event.pull_request.number && '🌱 Commit' || '' 104 | }} 105 | ${{ github.event.pull_request.number || github.sha }} 106 | triggered by: ${{ github.event_name }} of ${{ 107 | github.ref 108 | }} ${{ 109 | github.ref_type 110 | }} 111 | (workflow run ID: ${{ 112 | github.run_id 113 | }}; number: ${{ 114 | github.run_number 115 | }}; attempt: ${{ 116 | github.run_attempt 117 | }}) 118 | 119 | jobs: 120 | pre-setup: 121 | name: ⚙️ Pre-set global build settings 122 | 123 | runs-on: ubuntu-latest 124 | 125 | timeout-minutes: 1 126 | 127 | defaults: 128 | run: 129 | shell: python 130 | 131 | outputs: 132 | # NOTE: These aren't env vars because the `${{ env }}` context is 133 | # NOTE: inaccessible when passing inputs to reusable workflows. 134 | dists-artifact-name: python-package-distributions 135 | dist-version: >- 136 | ${{ 137 | steps.request-check.outputs.release-requested == 'true' 138 | && github.event.inputs.release-version 139 | || steps.scm-version.outputs.dist-version 140 | }} 141 | is-untagged-devel: >- 142 | ${{ steps.untagged-check.outputs.is-untagged-devel || false }} 143 | release-requested: >- 144 | ${{ 145 | steps.request-check.outputs.release-requested || false 146 | }} 147 | is-yolo-mode: >- 148 | ${{ 149 | ( 150 | steps.request-check.outputs.release-requested == 'true' 151 | && github.event.inputs.YOLO 152 | ) 153 | && true || false 154 | }} 155 | cache-key-for-dep-files: >- 156 | ${{ steps.calc-cache-key-files.outputs.cache-key-for-dep-files }} 157 | git-tag: ${{ steps.git-tag.outputs.tag }} 158 | sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }} 159 | wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }} 160 | upstream-repository-id: ${{ env.UPSTREAM_REPOSITORY_ID }} 161 | publishing-to-testpypi-enabled: ${{ env.PUBLISHING_TO_TESTPYPI_ENABLED }} 162 | is-debug-mode: ${{ runner.debug == '1' && false || true }} 163 | 164 | steps: 165 | - name: Switch to using Python 3.11 by default 166 | uses: actions/setup-python@v5 167 | with: 168 | python-version: 3.11 169 | - name: >- 170 | Mark the build as untagged '${{ 171 | github.event.repository.default_branch 172 | }}' branch build 173 | id: untagged-check 174 | if: >- 175 | github.event_name == 'push' && 176 | github.ref == format( 177 | 'refs/heads/{0}', github.event.repository.default_branch 178 | ) 179 | run: | 180 | from os import environ 181 | from pathlib import Path 182 | 183 | FILE_APPEND_MODE = 'a' 184 | 185 | with Path(environ['GITHUB_OUTPUT']).open( 186 | mode=FILE_APPEND_MODE, 187 | ) as outputs_file: 188 | print('is-untagged-devel=true', file=outputs_file) 189 | - name: Mark the build as "release request" 190 | id: request-check 191 | if: github.event_name == 'workflow_dispatch' 192 | run: | 193 | from os import environ 194 | from pathlib import Path 195 | 196 | FILE_APPEND_MODE = 'a' 197 | 198 | with Path(environ['GITHUB_OUTPUT']).open( 199 | mode=FILE_APPEND_MODE, 200 | ) as outputs_file: 201 | print('release-requested=true', file=outputs_file) 202 | - name: Check out src from Git 203 | uses: actions/checkout@v4 204 | with: 205 | fetch-depth: >- 206 | ${{ 207 | steps.request-check.outputs.release-requested == 'true' 208 | && 1 209 | || 0 210 | }} 211 | ref: ${{ github.event.inputs.release-committish }} 212 | - name: >- 213 | Calculate dependency files' combined hash value 214 | for use in the cache key 215 | id: calc-cache-key-files 216 | uses: ./.github/actions/cache-keys 217 | - name: Set up pip cache 218 | if: >- 219 | steps.request-check.outputs.release-requested != 'true' 220 | uses: re-actors/cache-python-deps@release/v1 221 | with: 222 | cache-key-for-dependency-files: >- 223 | ${{ steps.calc-cache-key-files.outputs.cache-key-for-dep-files }} 224 | - name: Drop Git tags from HEAD for non-release requests 225 | if: >- 226 | steps.request-check.outputs.release-requested != 'true' 227 | run: >- 228 | git tag --points-at HEAD 229 | | 230 | xargs git tag --delete 231 | shell: bash 232 | - name: Set up versioning prerequisites 233 | if: >- 234 | steps.request-check.outputs.release-requested != 'true' 235 | run: >- 236 | python -m 237 | pip install 238 | --user 239 | setuptools-scm 240 | shell: bash 241 | - name: Set the current dist version from Git 242 | if: steps.request-check.outputs.release-requested != 'true' 243 | id: scm-version 244 | run: | 245 | from os import environ 246 | from pathlib import Path 247 | 248 | import setuptools_scm 249 | 250 | FILE_APPEND_MODE = 'a' 251 | 252 | ver = setuptools_scm.get_version( 253 | ${{ 254 | steps.untagged-check.outputs.is-untagged-devel == 'true' 255 | && 'local_scheme="no-local-version"' || '' 256 | }} 257 | ) 258 | with Path(environ['GITHUB_OUTPUT']).open( 259 | mode=FILE_APPEND_MODE, 260 | ) as outputs_file: 261 | print(f'dist-version={ver}', file=outputs_file) 262 | - name: Set the target Git tag 263 | id: git-tag 264 | run: | 265 | from os import environ 266 | from pathlib import Path 267 | 268 | FILE_APPEND_MODE = 'a' 269 | 270 | with Path(environ['GITHUB_OUTPUT']).open( 271 | mode=FILE_APPEND_MODE, 272 | ) as outputs_file: 273 | print( 274 | "tag=v${{ 275 | steps.request-check.outputs.release-requested == 'true' 276 | && github.event.inputs.release-version 277 | || steps.scm-version.outputs.dist-version 278 | }}", 279 | file=outputs_file, 280 | ) 281 | - name: Set the expected dist artifact names 282 | id: artifact-name 283 | run: | 284 | from os import environ 285 | from pathlib import Path 286 | 287 | FILE_APPEND_MODE = 'a' 288 | 289 | whl_file_prj_base_name = '${{ env.PROJECT_NAME }}'.replace('-', '_') 290 | sdist_file_prj_base_name = whl_file_prj_base_name.replace('.', '_') 291 | 292 | with Path(environ['GITHUB_OUTPUT']).open( 293 | mode=FILE_APPEND_MODE, 294 | ) as outputs_file: 295 | print( 296 | f"sdist={sdist_file_prj_base_name !s}-${{ 297 | steps.request-check.outputs.release-requested == 'true' 298 | && github.event.inputs.release-version 299 | || steps.scm-version.outputs.dist-version 300 | }}.tar.gz", 301 | file=outputs_file, 302 | ) 303 | print( 304 | f"wheel={whl_file_prj_base_name !s}-${{ 305 | steps.request-check.outputs.release-requested == 'true' 306 | && github.event.inputs.release-version 307 | || steps.scm-version.outputs.dist-version 308 | }}-py3-none-any.whl", 309 | file=outputs_file, 310 | ) 311 | 312 | build: 313 | name: >- 314 | 📦 ${{ needs.pre-setup.outputs.git-tag }} 315 | [mode: ${{ 316 | fromJSON(needs.pre-setup.outputs.is-untagged-devel) 317 | && 'test' || '' 318 | }}${{ 319 | fromJSON(needs.pre-setup.outputs.release-requested) 320 | && 'release' || '' 321 | }}${{ 322 | ( 323 | !fromJSON(needs.pre-setup.outputs.is-untagged-devel) 324 | && !fromJSON(needs.pre-setup.outputs.release-requested) 325 | ) && 'nightly' || '' 326 | }}] 327 | needs: 328 | - pre-setup 329 | uses: tox-dev/workflow/.github/workflows/reusable-tox.yml@89de3c6be3cd179adf71e28aa4ac5bef60804209 # yamllint disable-line rule:line-length 330 | with: 331 | cache-key-for-dependency-files: >- 332 | ${{ needs.pre-setup.outputs.cache-key-for-dep-files }} 333 | checkout-src-git-committish: >- 334 | ${{ github.event.inputs.release-committish }} 335 | checkout-src-git-fetch-depth: >- 336 | ${{ 337 | fromJSON(needs.pre-setup.outputs.release-requested) 338 | && 1 339 | || 0 340 | }} 341 | job-dependencies-context: >- # context for hooks 342 | ${{ toJSON(needs) }} 343 | python-version: 3.13 344 | runner-vm-os: ubuntu-latest 345 | timeout-minutes: 2 346 | toxenv: build-dists 347 | xfail: false 348 | 349 | lint: 350 | name: 🧹 Linters${{ '' }} # nest jobs under the same sidebar category 351 | needs: 352 | - build 353 | - pre-setup # transitive, for accessing settings 354 | strategy: 355 | matrix: 356 | runner-vm-os: 357 | - ubuntu-latest 358 | python-version: 359 | # - ~3.12.0-0 360 | - 3.11 361 | - >- 362 | 3.10 363 | - 3.9 364 | toxenv: 365 | - pre-commit 366 | # - metadata-validation 367 | # - build-docs 368 | # - coverage-docs 369 | # - doctest-docs 370 | # - linkcheck-docs 371 | # - spellcheck-docs 372 | xfail: 373 | - false 374 | fail-fast: false 375 | uses: tox-dev/workflow/.github/workflows/reusable-tox.yml@89de3c6be3cd179adf71e28aa4ac5bef60804209 # yamllint disable-line rule:line-length 376 | with: 377 | built-wheel-names: >- 378 | ${{ 379 | matrix.toxenv == 'metadata-validation' 380 | && needs.pre-setup.outputs.wheel-artifact-name 381 | || '' 382 | }} 383 | cache-key-for-dependency-files: >- 384 | ${{ needs.pre-setup.outputs.cache-key-for-dep-files }} 385 | checkout-src-git-committish: >- 386 | ${{ github.event.inputs.release-committish }} 387 | checkout-src-git-fetch-depth: >- 388 | ${{ 389 | fromJSON(needs.pre-setup.outputs.release-requested) 390 | && 1 391 | || 0 392 | }} 393 | dists-artifact-name: >- 394 | ${{ needs.pre-setup.outputs.dists-artifact-name }} 395 | post-toxenv-preparation-command: >- 396 | ${{ 397 | matrix.toxenv == 'pre-commit' 398 | && 'python -Im pre_commit install-hooks' 399 | || '' 400 | }} 401 | python-version: >- 402 | ${{ matrix.python-version }} 403 | require-successful-codecov-uploads: >- 404 | ${{ 405 | toJSON( 406 | needs.pre-setup.outputs.upstream-repository-id 407 | == github.repository_id 408 | ) 409 | }} 410 | runner-vm-os: >- 411 | ${{ matrix.runner-vm-os }} 412 | # NOTE: `pre-commit --show-diff-on-failure` and 413 | # NOTE: `sphinxcontrib-spellcheck` with Git authors allowlist enabled 414 | # NOTE: both depend on the presence of a Git repository. 415 | source-tarball-name: >- 416 | ${{ 417 | !contains( 418 | fromJSON('["pre-commit", "spellcheck-docs"]'), 419 | matrix.toxenv 420 | ) 421 | && needs.pre-setup.outputs.sdist-artifact-name 422 | || '' 423 | }} 424 | timeout-minutes: 3 425 | toxenv: >- 426 | ${{ matrix.toxenv }} 427 | xfail: >- 428 | ${{ 429 | fromJSON(needs.pre-setup.outputs.is-yolo-mode) 430 | || fromJSON(matrix.xfail) 431 | }} 432 | secrets: 433 | codecov-token: ${{ secrets.CODECOV_TOKEN }} 434 | 435 | tests: 436 | name: 🧪 Tests${{ '' }} # nest jobs under the same sidebar category 437 | 438 | needs: 439 | - build 440 | - pre-setup # transitive, for accessing settings 441 | 442 | strategy: 443 | fail-fast: >- # ${{ runner.debug }} is unavailable in this context 444 | ${{ 445 | fromJSON(needs.pre-setup.outputs.is-debug-mode) 446 | && false 447 | || true 448 | }} 449 | matrix: 450 | towncrier-version: 451 | - 24.8.0 452 | - 23.11.0 453 | - 23.6.0 454 | python-version: 455 | # NOTE: The latest and the lowest supported Pythons are prioritized 456 | # NOTE: to improve the responsiveness. It's nice to see the most 457 | # NOTE: important results first. 458 | - 3.13 459 | - 3.9 460 | - 3.12 461 | - 3.11 462 | - >- 463 | 3.10 464 | runner-vm-os: 465 | - ubuntu-24.04 466 | - ubuntu-22.04 467 | - macos-14 468 | - macos-13 469 | - windows-latest 470 | toxenv: 471 | - py 472 | xfail: 473 | - false 474 | 475 | uses: tox-dev/workflow/.github/workflows/reusable-tox.yml@89de3c6be3cd179adf71e28aa4ac5bef60804209 # yamllint disable-line rule:line-length 476 | with: 477 | built-wheel-names: >- 478 | ${{ needs.pre-setup.outputs.wheel-artifact-name }} 479 | cache-key-for-dependency-files: >- 480 | ${{ needs.pre-setup.outputs.cache-key-for-dep-files }} 481 | check-name: >- 482 | 📝${{ 483 | matrix.towncrier-version 484 | }}@🐍${{ 485 | matrix.python-version 486 | }}@💻${{ 487 | matrix.runner-vm-os 488 | }} 489 | dists-artifact-name: >- 490 | ${{ needs.pre-setup.outputs.dists-artifact-name }} 491 | job-dependencies-context: >- # context for hooks 492 | ${{ toJSON(needs) }} 493 | python-version: >- 494 | ${{ matrix.python-version }} 495 | require-successful-codecov-uploads: >- 496 | ${{ 497 | toJSON( 498 | needs.pre-setup.outputs.upstream-repository-id 499 | == github.repository_id 500 | ) 501 | }} 502 | runner-vm-os: >- 503 | ${{ matrix.runner-vm-os }} 504 | source-tarball-name: >- 505 | ${{ needs.pre-setup.outputs.sdist-artifact-name }} 506 | timeout-minutes: 5 507 | toxenv: >- 508 | ${{ matrix.toxenv }} 509 | tox-provision-args: >- 510 | --force-dep 'towncrier ~= ${{ matrix.towncrier-version }}' 511 | tox-run-posargs: >- 512 | --cov-report=xml:.tox/.tmp/.test-results/pytest-${{ 513 | matrix.python-version 514 | }}/cobertura.xml 515 | --junitxml=.tox/.tmp/.test-results/pytest-${{ 516 | matrix.python-version 517 | }}/test.xml 518 | tox-rerun-posargs: >- 519 | --no-cov 520 | -vvvvv 521 | --lf 522 | xfail: >- 523 | ${{ 524 | fromJSON(needs.pre-setup.outputs.is-yolo-mode) 525 | || fromJSON(matrix.xfail) 526 | }} 527 | secrets: 528 | codecov-token: ${{ secrets.CODECOV_TOKEN }} 529 | 530 | check: # This job does nothing and is only used for the branch protection 531 | if: always() 532 | 533 | needs: 534 | - lint 535 | - pre-setup # transitive, for accessing settings 536 | - tests 537 | 538 | runs-on: ubuntu-latest 539 | 540 | timeout-minutes: 1 541 | 542 | steps: 543 | - name: Decide whether the needed jobs succeeded or failed 544 | uses: re-actors/alls-green@release/v1 545 | with: 546 | allowed-failures: >- 547 | ${{ 548 | fromJSON(needs.pre-setup.outputs.is-yolo-mode) 549 | && 'lint, tests' 550 | || '' 551 | }} 552 | jobs: ${{ toJSON(needs) }} 553 | 554 | publish-pypi: 555 | name: >- 556 | 📦 557 | Publish ${{ needs.pre-setup.outputs.git-tag }} to PyPI 558 | needs: 559 | - check 560 | - pre-setup # transitive, for accessing settings 561 | if: >- 562 | always() 563 | && needs.check.result == 'success' 564 | && fromJSON(needs.pre-setup.outputs.release-requested) 565 | && needs.pre-setup.outputs.upstream-repository-id == github.repository_id 566 | 567 | runs-on: ubuntu-latest 568 | 569 | timeout-minutes: 2 # docker+network are slow sometimes 570 | 571 | environment: 572 | name: pypi 573 | url: >- 574 | https://pypi.org/project/${{ env.PROJECT_NAME }}/${{ 575 | needs.pre-setup.outputs.dist-version 576 | }} 577 | 578 | permissions: 579 | contents: read # This job doesn't need to `git push` anything 580 | id-token: write # PyPI Trusted Publishing (OIDC) 581 | 582 | steps: 583 | - name: Download all the dists 584 | uses: actions/download-artifact@v4 585 | with: 586 | name: >- 587 | ${{ needs.pre-setup.outputs.dists-artifact-name }} 588 | path: dist/ 589 | - name: >- 590 | 📦 591 | Publish ${{ needs.pre-setup.outputs.git-tag }} to PyPI 592 | 🔏 593 | uses: pypa/gh-action-pypi-publish@release/v1 594 | 595 | publish-testpypi: 596 | name: >- 597 | 📦 598 | Publish ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI 599 | needs: 600 | - check 601 | - pre-setup # transitive, for accessing settings 602 | if: >- 603 | always() 604 | && needs.check.result == 'success' 605 | && ( 606 | fromJSON(needs.pre-setup.outputs.is-untagged-devel) 607 | || fromJSON(needs.pre-setup.outputs.release-requested) 608 | ) 609 | && needs.pre-setup.outputs.upstream-repository-id == github.repository_id 610 | && fromJSON(needs.pre-setup.outputs.publishing-to-testpypi-enabled) 611 | 612 | runs-on: ubuntu-latest 613 | 614 | timeout-minutes: 2 # docker+network are slow sometimes 615 | 616 | environment: 617 | name: testpypi 618 | url: >- 619 | https://test.pypi.org/project/${{ env.PROJECT_NAME }}/${{ 620 | needs.pre-setup.outputs.dist-version 621 | }} 622 | 623 | permissions: 624 | contents: read # This job doesn't need to `git push` anything 625 | id-token: write # PyPI Trusted Publishing (OIDC) 626 | 627 | steps: 628 | - name: Download all the dists 629 | uses: actions/download-artifact@v4 630 | with: 631 | name: >- 632 | ${{ needs.pre-setup.outputs.dists-artifact-name }} 633 | path: dist/ 634 | - name: >- 635 | 📦 636 | Publish ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI 637 | 🔏 638 | uses: pypa/gh-action-pypi-publish@release/v1 639 | with: 640 | repository-url: https://test.pypi.org/legacy/ 641 | 642 | post-release-repo-update: 643 | name: >- 644 | 🏷️ 645 | Publish post-release Git tag 646 | for ${{ needs.pre-setup.outputs.git-tag }} 647 | needs: 648 | - publish-pypi 649 | - pre-setup # transitive, for accessing settings 650 | if: >- 651 | always() 652 | && needs.publish-pypi.result == 'success' 653 | 654 | runs-on: ubuntu-latest 655 | 656 | timeout-minutes: 2 657 | 658 | steps: 659 | - name: >- 660 | Check if the requested tag ${{ needs.pre-setup.outputs.git-tag }} 661 | is present and is pointing at the required commit ${{ 662 | github.event.inputs.release-committish 663 | }} 664 | id: existing-remote-tag-check 665 | run: | 666 | REMOTE_TAGGED_COMMIT_SHA="$( 667 | git ls-remote --tags --refs $(git remote get-url origin) '${{ 668 | needs.pre-setup.outputs.git-tag 669 | }}' | awk '{print $1}' 670 | )" 671 | 672 | if [[ "${REMOTE_TAGGED_COMMIT_SHA}" == '${{ 673 | github.event.inputs.release-committish 674 | }}' ]] 675 | then 676 | echo "already-exists=true" >> "${GITHUB_OUTPUT}" 677 | fi 678 | 679 | - name: Fetch the src snapshot 680 | if: steps.existing-remote-tag-check.outputs.already-exists != 'true' 681 | uses: actions/checkout@v4 682 | with: 683 | fetch-depth: 1 684 | ref: ${{ github.event.inputs.release-committish }} 685 | - name: Setup git user as [bot] 686 | if: steps.existing-remote-tag-check.outputs.already-exists != 'true' 687 | # Refs: 688 | # * https://github.community/t/github-actions-bot-email-address/17204/6 689 | # * https://github.com/actions/checkout/issues/13#issuecomment-724415212 690 | uses: fregante/setup-git-user@v2 691 | 692 | - name: >- 693 | 🏷️ 694 | Tag the release in the local Git repo 695 | as ${{ needs.pre-setup.outputs.git-tag }} 696 | if: steps.existing-remote-tag-check.outputs.already-exists != 'true' 697 | run: >- 698 | git tag 699 | -m '${{ needs.pre-setup.outputs.git-tag }}' 700 | -m 'Published at https://pypi.org/project/${{ 701 | env.PROJECT_NAME 702 | }}/${{ 703 | needs.pre-setup.outputs.dist-version 704 | }}' 705 | -m 'This release has been produced by the following workflow run: ${{ 706 | github.server_url 707 | }}/${{ 708 | github.repository 709 | }}/actions/runs/${{ 710 | github.run_id 711 | }}' 712 | '${{ needs.pre-setup.outputs.git-tag }}' 713 | -- 714 | ${{ github.event.inputs.release-committish }} 715 | 716 | - name: >- 717 | 🏷️ 718 | Push ${{ needs.pre-setup.outputs.git-tag }} tag corresponding 719 | to the just published release back to GitHub 720 | if: steps.existing-remote-tag-check.outputs.already-exists != 'true' 721 | run: >- 722 | git push --atomic origin 723 | '${{ needs.pre-setup.outputs.git-tag }}' 724 | 725 | slsa-provenance: 726 | name: >- 727 | 🔏 728 | Save in-toto SLSA provenance as a GitHub workflow artifact for 729 | ${{ needs.pre-setup.outputs.git-tag }} 730 | needs: 731 | - build 732 | - post-release-repo-update 733 | - pre-setup # transitive, for accessing settings 734 | 735 | permissions: 736 | actions: read 737 | id-token: write 738 | contents: write 739 | 740 | # Can't pin with hash due to how this workflow works. 741 | uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.0.0 # yamllint disable-line rule:line-length 742 | with: 743 | base64-subjects: >- 744 | ${{ 745 | fromJSON( 746 | needs.build.outputs.steps 747 | ).tox-run.outputs.combined-dists-base64-encoded-sha256-hash 748 | }} 749 | 750 | publish-github-attestations: 751 | name: >- 752 | 🔏 753 | Produce a GitHub-native Attestations for 754 | ${{ needs.pre-setup.outputs.git-tag }} 755 | needs: 756 | - post-release-repo-update 757 | - pre-setup # transitive, for accessing settings 758 | if: >- 759 | always() 760 | && needs.post-release-repo-update.result == 'success' 761 | runs-on: ubuntu-latest 762 | 763 | timeout-minutes: 3 764 | 765 | permissions: 766 | attestations: write # IMPORTANT: needed to persist attestations 767 | contents: read 768 | id-token: write # IMPORTANT: mandatory for Sigstore signing 769 | 770 | steps: 771 | - name: Download all the dists 772 | uses: actions/download-artifact@v4 773 | with: 774 | name: >- 775 | ${{ needs.pre-setup.outputs.dists-artifact-name }} 776 | path: dist/ 777 | 778 | - name: >- 779 | 🔏 780 | Generate provenance attestations for the dists 781 | uses: actions/attest-build-provenance@v1 782 | with: 783 | subject-path: | 784 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} 785 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} 786 | 787 | publish-github-release: 788 | name: >- 789 | 🏷️ 790 | Publish a GitHub Release for 791 | ${{ needs.pre-setup.outputs.git-tag }} 792 | needs: 793 | - post-release-repo-update 794 | - pre-setup # transitive, for accessing settings 795 | - publish-github-attestations 796 | - slsa-provenance 797 | if: >- 798 | always() 799 | && needs.post-release-repo-update.result == 'success' 800 | runs-on: ubuntu-latest 801 | 802 | timeout-minutes: 3 803 | 804 | permissions: 805 | contents: write 806 | discussions: write 807 | id-token: write # IMPORTANT: mandatory for Sigstore signing 808 | 809 | steps: 810 | - name: Download all the dists 811 | uses: actions/download-artifact@v4 812 | with: 813 | name: >- 814 | ${{ needs.pre-setup.outputs.dists-artifact-name }} 815 | path: dist/ 816 | - name: Download SLSA provenance in-toto files 817 | uses: actions/download-artifact@v4 818 | with: 819 | name: >- 820 | ${{ needs.slsa-provenance.outputs.provenance-name }} 821 | path: >- 822 | ${{ needs.slsa-provenance.outputs.provenance-name }} 823 | 824 | - name: Sign the dists with Sigstore 825 | uses: sigstore/gh-action-sigstore-python@v3.0.0 826 | with: 827 | inputs: >- 828 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} 829 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} 830 | 831 | - name: >- 832 | Publish a GitHub Release for 833 | ${{ needs.pre-setup.outputs.git-tag }} 834 | with Sigstore-signed artifacts 835 | uses: ncipollo/release-action@v1 836 | with: 837 | allowUpdates: false 838 | artifactErrorsFailBuild: false 839 | artifacts: | 840 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} 841 | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }}.sigstore.json 842 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} 843 | dist/${{ needs.pre-setup.outputs.wheel-artifact-name }}.sigstore.json 844 | ${{ needs.slsa-provenance.outputs.provenance-name }}/* 845 | artifactContentType: raw # Because whl and tgz are of different types 846 | body: > 847 | # Release ${{ needs.pre-setup.outputs.git-tag }} 848 | 849 | 850 | This release is published to 851 | https://pypi.org/project/${{ env.PROJECT_NAME }}/${{ 852 | needs.pre-setup.outputs.dist-version 853 | }}. 854 | 855 | 856 | This release has been produced by the following workflow run: ${{ 857 | github.server_url 858 | }}/${{ 859 | github.repository 860 | }}/actions/runs/${{ 861 | github.run_id 862 | }}. 863 | # bodyFile: # FIXME: Use once Towncrier is integrated. 864 | commit: ${{ github.event.inputs.release-committish }} 865 | discussionCategory: Announcements 866 | draft: false 867 | name: ${{ needs.pre-setup.outputs.git-tag }} 868 | # omitBody: false 869 | omitBodyDuringUpdate: true 870 | omitName: false 871 | omitNameDuringUpdate: true 872 | omitPrereleaseDuringUpdate: true 873 | prerelease: false 874 | removeArtifacts: false 875 | replacesArtifacts: false 876 | tag: ${{ needs.pre-setup.outputs.git-tag }} 877 | token: ${{ secrets.GITHUB_TOKEN }} 878 | 879 | ... 880 | -------------------------------------------------------------------------------- /.github/workflows/scheduled-runs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | name: ⏳ 4 | 5 | on: 6 | pull_request: 7 | paths: # only changes to this workflow itself trigger PR testing 8 | - .github/workflows/scheduled-runs.yml 9 | schedule: 10 | - cron: 3 5 * * * # run daily at 5:03 UTC 11 | workflow_dispatch: # manual trigger 12 | 13 | run-name: >- 14 | 🌃 15 | Nightly run of 16 | ${{ 17 | github.event.pull_request.number && 'PR' || '' 18 | }}${{ 19 | !github.event.pull_request.number && 'Commit' || '' 20 | }} 21 | ${{ github.event.pull_request.number || github.sha }} 22 | triggered by: ${{ github.event_name }} of ${{ 23 | github.ref 24 | }} ${{ 25 | github.ref_type 26 | }} 27 | (workflow run ID: ${{ 28 | github.run_id 29 | }}; number: ${{ 30 | github.run_number 31 | }}; attempt: ${{ 32 | github.run_attempt 33 | }}) 34 | 35 | jobs: 36 | main-ci-cd-pipeline: 37 | name: 🧪 Main CI/CD pipeline 38 | uses: ./.github/workflows/ci-cd.yml 39 | secrets: inherit 40 | 41 | ... 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # Build-time setuptools-scm generated version module 132 | /src/sphinxcontrib/towncrier/_scm_version.py 133 | 134 | # Pytest related reports 135 | /.test-results/ 136 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | # https://pycqa.github.io/isort/docs/configuration/config_files/ 2 | [settings] 3 | default_section = THIRDPARTY 4 | honor_noqa = true 5 | include_trailing_comma = true 6 | indent = 4 7 | known_frameworks = towncrier, sphinx 8 | known_testing = pytest 9 | line_length = 79 10 | lines_after_imports = 2 11 | # https://pycqa.github.io/isort/#multi-line-output-modes 12 | multi_line_output = 5 13 | no_lines_before = LOCALFOLDER 14 | sections = FUTURE, STDLIB, TESTING, FRAMEWORKS, THIRDPARTY, FIRSTPARTY, LOCALFOLDER 15 | use_parentheses = true 16 | verbose = true 17 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.8 3 | color_output = true 4 | error_summary = true 5 | files = 6 | src/, 7 | tests/, 8 | toxfile.py 9 | 10 | check_untyped_defs = true 11 | 12 | disallow_untyped_calls = true 13 | disallow_untyped_defs = true 14 | disallow_any_generics = true 15 | 16 | enable_error_code = 17 | ignore-without-code 18 | 19 | explicit_package_bases = true 20 | 21 | follow_imports = normal 22 | 23 | ignore_missing_imports = false 24 | 25 | mypy_path = ${MYPY_CONFIG_FILE_DIR}/bin,${MYPY_CONFIG_FILE_DIR}/src 26 | 27 | namespace_packages = true 28 | 29 | pretty = true 30 | 31 | show_column_numbers = true 32 | show_error_codes = true 33 | strict_optional = true 34 | 35 | warn_no_return = true 36 | warn_redundant_casts = true 37 | warn_unused_ignores = true 38 | 39 | [mypy-towncrier.*] 40 | ignore_missing_imports = true 41 | 42 | ; [mypy-sphinxcontrib.towncrier.*] 43 | ; ignore_missing_imports = true 44 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ci: 4 | autoupdate_schedule: quarterly 5 | skip: 6 | - nitpick-check # can't run on pre-commit.ci while using gh:// 7 | - pylint # pylint-pytest crashes because the project isn't self-installed 8 | 9 | repos: 10 | - repo: local 11 | hooks: 12 | - id: immutable-setup-py 13 | name: Verify that setup.py stays immutable 14 | description: >- 15 | This is a sanity check that makes sure that 16 | the `setup.py` file isn't changed. 17 | # Using Python here because using 18 | # shell test does not seem to work in CIs: 19 | entry: >- 20 | sh -c 'git hash-object setup.py 21 | | 22 | python -c raise\ 23 | SystemExit\(input\(\)\ !=\ \"38cc2638672e1f6ca56e4d5ceaf16451dd702b67\"\) 24 | ' 25 | pass_filenames: false 26 | language: system 27 | files: >- 28 | ^setup\.py$ 29 | - id: immutable-tests-init-py 30 | name: Verify that tests/__init__.py stays immutable 31 | description: >- 32 | This is a sanity check that makes sure that 33 | the `tests/__init__.py` file isn't changed. 34 | # Using Python here because using 35 | # shell test does not seem to work in CIs: 36 | entry: >- 37 | sh -c 'git hash-object tests/__init__.py 38 | | 39 | python -c raise\ 40 | SystemExit\(input\(\)\ !=\ \"674d7b907efe86dafcd2b8ddc5655fc55ddf5395\"\) 41 | ' 42 | pass_filenames: false 43 | language: system 44 | files: >- 45 | ^tests/__init__\.py$ 46 | 47 | - repo: https://github.com/asottile/add-trailing-comma.git 48 | rev: v3.1.0 49 | hooks: 50 | - id: add-trailing-comma 51 | 52 | - repo: https://github.com/asottile/pyupgrade.git 53 | rev: v3.19.1 54 | hooks: 55 | - id: pyupgrade 56 | args: 57 | - --py38-plus 58 | 59 | - repo: https://github.com/PyCQA/isort.git 60 | rev: 6.0.1 61 | hooks: 62 | - id: isort 63 | args: 64 | - --honor-noqa 65 | 66 | - repo: https://github.com/Lucas-C/pre-commit-hooks.git 67 | rev: v1.5.5 68 | hooks: 69 | - id: remove-tabs 70 | 71 | - repo: https://github.com/python-jsonschema/check-jsonschema.git 72 | rev: 0.32.1 73 | hooks: 74 | - id: check-github-workflows 75 | files: ^\.github/workflows/[^/]+$ 76 | types: 77 | - yaml 78 | - id: check-jsonschema 79 | name: Check GitHub Workflows set timeout-minutes 80 | args: 81 | - --builtin-schema 82 | - github-workflows-require-timeout 83 | files: ^\.github/workflows/[^/]+$ 84 | types: 85 | - yaml 86 | # yamllint disable rule:comments-indentation 87 | # - id: check-readthedocs 88 | # yamllint enable rule:comments-indentation 89 | 90 | - repo: https://github.com/andreoliwa/nitpick.git 91 | rev: v0.35.0 92 | hooks: 93 | - id: nitpick-check 94 | args: 95 | - -v 96 | pass_filenames: false 97 | stages: [] 98 | 99 | - repo: https://github.com/pre-commit/pygrep-hooks.git 100 | rev: v1.10.0 101 | hooks: 102 | - id: python-check-blanket-noqa 103 | - id: python-check-mock-methods 104 | - id: python-no-eval 105 | - id: python-no-log-warn 106 | - id: rst-backticks 107 | 108 | - repo: https://github.com/pre-commit/pre-commit-hooks.git 109 | rev: v5.0.0 110 | hooks: 111 | # Side-effects: 112 | - id: trailing-whitespace 113 | - id: check-merge-conflict 114 | - id: double-quote-string-fixer 115 | - id: end-of-file-fixer 116 | - id: requirements-txt-fixer 117 | exclude: >- 118 | ^(docs/requirements|requirements-build)\.txt$ 119 | # Non-modifying checks: 120 | - id: name-tests-test 121 | files: >- 122 | ^tests/[^_].*\.py$ 123 | - id: check-added-large-files 124 | - id: check-byte-order-marker 125 | - id: check-case-conflict 126 | # disabled due to pre-commit/pre-commit-hooks#159 127 | # - id: check-docstring-first 128 | - id: check-json 129 | - id: check-symlinks 130 | - id: check-yaml 131 | # args: 132 | # - --unsafe 133 | - id: detect-private-key 134 | 135 | # Heavy checks: 136 | - id: check-ast 137 | - id: debug-statements 138 | 139 | - repo: https://github.com/Lucas-C/pre-commit-hooks-markup.git 140 | rev: v1.0.1 141 | hooks: 142 | - id: rst-linter 143 | files: >- 144 | ^README\.rst$ 145 | 146 | - repo: https://github.com/PyCQA/pydocstyle.git 147 | rev: 6.3.0 148 | hooks: 149 | - id: pydocstyle 150 | 151 | - repo: https://github.com/codespell-project/codespell.git 152 | rev: v2.4.1 153 | hooks: 154 | - id: codespell 155 | exclude: >- 156 | ^\.github/\.json-schemas/.*\.json$ 157 | 158 | - repo: https://github.com/adrienverge/yamllint.git 159 | rev: v1.37.0 160 | hooks: 161 | - id: yamllint 162 | files: \.(yaml|yml)$ 163 | types: [file, yaml] 164 | args: 165 | - --strict 166 | 167 | - repo: https://github.com/openstack/bashate.git 168 | rev: 2.1.1 169 | hooks: 170 | - id: bashate 171 | args: 172 | - -i 173 | - E006 174 | 175 | - repo: https://github.com/shellcheck-py/shellcheck-py.git 176 | rev: v0.10.0.1 177 | hooks: 178 | - id: shellcheck 179 | 180 | - repo: https://github.com/PyCQA/flake8.git 181 | rev: 7.2.0 182 | hooks: 183 | - id: flake8 184 | additional_dependencies: 185 | - flake8-2020 ~= 1.7.0 186 | - flake8-length ~= 0.3.0 187 | - flake8-pytest-style ~= 1.6.0 188 | - wemake-python-styleguide ~= 0.19.2 189 | language_version: python3 190 | 191 | - repo: https://github.com/pre-commit/mirrors-mypy.git 192 | rev: v1.15.0 193 | hooks: 194 | - id: mypy 195 | alias: mypy-py313 196 | name: MyPy, for Python 3.13 197 | additional_dependencies: 198 | - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` 199 | - pytest 200 | - Sphinx >= 3.1.2 201 | - tox # toxfile.py 202 | - types-colorama 203 | - types-Pillow 204 | - types-Pygments 205 | - types-pytz 206 | - types-setuptools 207 | args: 208 | - --python-version=3.13 209 | - --txt-report=.tox/.tmp/.test-results/mypy--py-3.13 210 | - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.13 211 | - --html-report=.tox/.tmp/.test-results/mypy--py-3.13 212 | pass_filenames: false 213 | - id: mypy 214 | alias: mypy-py311 215 | name: MyPy, for Python 3.11 216 | additional_dependencies: 217 | - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` 218 | - pytest 219 | - Sphinx >= 3.1.2 220 | - tox # toxfile.py 221 | - types-colorama 222 | - types-Pillow 223 | - types-Pygments 224 | - types-pytz 225 | - types-setuptools 226 | args: 227 | - --python-version=3.11 228 | - --txt-report=.tox/.tmp/.test-results/mypy--py-3.11 229 | - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.11 230 | - --html-report=.tox/.tmp/.test-results/mypy--py-3.11 231 | pass_filenames: false 232 | - id: mypy 233 | alias: mypy-py39 234 | name: MyPy, for Python 3.9 235 | additional_dependencies: 236 | - lxml # dep of `--txt-report`, `--cobertura-xml-report` & `--html-report` 237 | - pytest 238 | - Sphinx >= 3.1.2 239 | - tox # toxfile.py 240 | - types-colorama 241 | - types-Pillow 242 | - types-Pygments 243 | - types-pytz 244 | - types-setuptools 245 | args: 246 | - --python-version=3.9 247 | - --txt-report=.tox/.tmp/.test-results/mypy--py-3.9 248 | - --cobertura-xml-report=.tox/.tmp/.test-results/mypy--py-3.9 249 | - --html-report=.tox/.tmp/.test-results/mypy--py-3.9 250 | pass_filenames: false 251 | 252 | - repo: https://github.com/PyCQA/pylint.git 253 | rev: v3.3.6 254 | hooks: 255 | - id: pylint 256 | additional_dependencies: 257 | - covdefaults # needed by pylint-pytest due to pytest-cov loading coverage 258 | - pylint-pytest ~= 2.0.0a0 259 | - pytest 260 | - pytest-cov # needed by pylint-pytest since it picks up pytest's args 261 | - pytest-xdist # needed by pylint-pytest since it picks up pytest's args 262 | - setuptools-scm # docs 263 | - sphinx 264 | - towncrier >= 24 265 | - tox # toxfile.py 266 | 267 | ... 268 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-whitelist= 7 | 8 | # Specify a score threshold to be exceeded before program exits with error. 9 | fail-under=10 10 | 11 | # Add files or directories to the blacklist. They should be base names, not 12 | # paths. 13 | ignore=CVS 14 | 15 | # Add files or directories matching the regex patterns to the blacklist. The 16 | # regex matches against base names, not paths. 17 | ignore-patterns= 18 | 19 | # Python code to execute, usually for sys.path manipulation such as 20 | # pygtk.require(). 21 | init-hook='import os, pathlib, sys; repo_root_path = pathlib.Path.cwd(); src_path = repo_root_path / 'src'; sys.path[:0] = [str(src_path if src_path.exists() else repo_root_path), str(repo_root_path / 'bin')]; os.environ['PYTHONPATH'] = os.sep.join(sys.path[:2])' 22 | 23 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 24 | # number of processors available to use. 25 | # jobs = 0 26 | # FIXME: Revert to auto-detection once the pylint bug is fixed. Currently, the 27 | # FIXME: parallel execution mode causes import-related false-positives and 28 | # FIXME: tracebacks. 29 | # Ref: https://github.com/pylint-dev/pylint/issues/10147 30 | jobs = 1 31 | 32 | # Control the amount of potential inferred values when inferring a single 33 | # object. This can help the performance when dealing with large functions or 34 | # complex, nested conditions. 35 | limit-inference-results=100 36 | 37 | # List of plugins (as comma separated values of python module names) to load, 38 | # usually to register additional checkers. 39 | load-plugins= 40 | pylint.extensions.no_self_use, 41 | pylint_pytest, 42 | 43 | # Pickle collected data for later comparisons. 44 | persistent=yes 45 | 46 | # Discover python modules and packages in the file system subtree. 47 | recursive=yes 48 | 49 | # Add paths to the list of the source roots. Supports globbing patterns. The 50 | # source root is an absolute path or a path relative to the current working 51 | # directory used to determine a package namespace for modules located under the 52 | # source root. 53 | source-roots = 54 | src/, 55 | 56 | # When enabled, pylint would attempt to guess common misconfiguration and emit 57 | # user-friendly hints instead of false-positive error messages. 58 | suggestion-mode=yes 59 | 60 | # Allow loading of arbitrary C extensions. Extensions are imported into the 61 | # active Python interpreter and may run arbitrary code. 62 | unsafe-load-any-extension=no 63 | 64 | 65 | [MESSAGES CONTROL] 66 | 67 | # Only show warnings with the listed confidence levels. Leave empty to show 68 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. 69 | confidence= 70 | 71 | # Disable the message, report, category or checker with the given id(s). You 72 | # can either give multiple identifiers separated by comma (,) or put this 73 | # option multiple times (only on the command line, not in the configuration 74 | # file where it should appear only once). You can also use "--disable=all" to 75 | # disable everything first and then reenable specific checks. For example, if 76 | # you want to run only the similarities checker, you can use "--disable=all 77 | # --enable=similarities". If you want to run only the classes checker, but have 78 | # no Warning level messages displayed, use "--disable=all --enable=classes 79 | # --disable=W". 80 | disable= 81 | cyclic-import 82 | 83 | # Enable the message, report, category or checker with the given id(s). You can 84 | # either give multiple identifier separated by comma (,) or put this option 85 | # multiple time (only on the command line, not in the configuration file where 86 | # it should appear only once). See also the "--disable" option for examples. 87 | enable=c-extension-no-member 88 | 89 | 90 | [REPORTS] 91 | 92 | # Python expression which should return a score less than or equal to 10. You 93 | # have access to the variables 'error', 'warning', 'refactor', and 'convention' 94 | # which contain the number of messages in each category, as well as 'statement' 95 | # which is the total number of statements analyzed. This score is used by the 96 | # global evaluation report (RP0004). 97 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 98 | 99 | # Template used to display messages. This is a python new-style format string 100 | # used to format the message information. See doc for all details. 101 | #msg-template= 102 | 103 | # Set the output format. Available formats are text, parseable, colorized, json 104 | # and msvs (visual studio). You can also give a reporter class, e.g. 105 | # mypackage.mymodule.MyReporterClass. 106 | output-format=colorized 107 | 108 | # Tells whether to display a full report or only the messages. 109 | reports=no 110 | 111 | # Activate the evaluation score. 112 | score=yes 113 | 114 | 115 | [REFACTORING] 116 | 117 | # Maximum number of nested blocks for function / method body 118 | max-nested-blocks=5 119 | 120 | # Complete name of functions that never returns. When checking for 121 | # inconsistent-return-statements if a never returning function is called then 122 | # it will be considered as an explicit return statement and no message will be 123 | # printed. 124 | never-returning-functions=sys.exit 125 | 126 | 127 | [MISCELLANEOUS] 128 | 129 | # List of note tags to take in consideration, separated by a comma. 130 | notes=FIXME, 131 | XXX, 132 | TODO 133 | 134 | # Regular expression of note tags to take in consideration. 135 | #notes-rgx= 136 | 137 | 138 | [SPELLING] 139 | 140 | # Limits count of emitted suggestions for spelling mistakes. 141 | max-spelling-suggestions=4 142 | 143 | # Spelling dictionary name. Available dictionaries: none. To make it work, 144 | # install the python-enchant package. 145 | spelling-dict= 146 | 147 | # List of comma separated words that should not be checked. 148 | spelling-ignore-words= 149 | 150 | # A path to a file that contains the private dictionary; one word per line. 151 | spelling-private-dict-file= 152 | 153 | # Tells whether to store unknown words to the private dictionary (see the 154 | # --spelling-private-dict-file option) instead of raising a message. 155 | spelling-store-unknown-words=no 156 | 157 | 158 | [FORMAT] 159 | 160 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 161 | expected-line-ending-format= 162 | 163 | # Regexp for a line that is allowed to be longer than the limit. 164 | ignore-long-lines=^\s*(# )??$ 165 | 166 | # Number of spaces of indent required inside a hanging or continued line. 167 | indent-after-paren=4 168 | 169 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 170 | # tab). 171 | indent-string=' ' 172 | 173 | # Maximum number of characters on a single line. 174 | max-line-length=79 175 | 176 | # Maximum number of lines in a module. 177 | max-module-lines=1000 178 | 179 | # Allow the body of a class to be on the same line as the declaration if body 180 | # contains single statement. 181 | single-line-class-stmt=no 182 | 183 | # Allow the body of an if to be on the same line as the test if there is no 184 | # else. 185 | single-line-if-stmt=no 186 | 187 | 188 | [SIMILARITIES] 189 | 190 | # Ignore comments when computing similarities. 191 | ignore-comments=yes 192 | 193 | # Ignore docstrings when computing similarities. 194 | ignore-docstrings=yes 195 | 196 | # Ignore imports when computing similarities. 197 | ignore-imports=no 198 | 199 | # Minimum lines number of a similarity. 200 | min-similarity-lines=4 201 | 202 | 203 | [STRING] 204 | 205 | # This flag controls whether inconsistent-quotes generates a warning when the 206 | # character used as a quote delimiter is used inconsistently within a module. 207 | check-quote-consistency=no 208 | 209 | # This flag controls whether the implicit-str-concat should generate a warning 210 | # on implicit string concatenation in sequences defined over several lines. 211 | check-str-concat-over-line-jumps=no 212 | 213 | 214 | [TYPECHECK] 215 | 216 | # List of decorators that produce context managers, such as 217 | # contextlib.contextmanager. Add to this list to register other decorators that 218 | # produce valid context managers. 219 | contextmanager-decorators=contextlib.contextmanager 220 | 221 | # List of members which are set dynamically and missed by pylint inference 222 | # system, and so shouldn't trigger E1101 when accessed. Python regular 223 | # expressions are accepted. 224 | generated-members= 225 | 226 | # Tells whether missing members accessed in mixin class should be ignored. A 227 | # mixin class is detected if its name ends with "mixin" (case insensitive). 228 | ignore-mixin-members=yes 229 | 230 | # Tells whether to warn about missing members when the owner of the attribute 231 | # is inferred to be None. 232 | ignore-none=yes 233 | 234 | # This flag controls whether pylint should warn about no-member and similar 235 | # checks whenever an opaque object is returned when inferring. The inference 236 | # can return multiple potential results while evaluating a Python object, but 237 | # some branches might not be evaluated, which results in partial inference. In 238 | # that case, it might be useful to still emit no-member and other checks for 239 | # the rest of the inferred objects. 240 | ignore-on-opaque-inference=yes 241 | 242 | # List of class names for which member attributes should not be checked (useful 243 | # for classes with dynamically set attributes). This supports the use of 244 | # qualified names. 245 | ignored-classes=optparse.Values,thread._local,_thread._local 246 | 247 | # List of module names for which member attributes should not be checked 248 | # (useful for modules/projects where namespaces are manipulated during runtime 249 | # and thus existing member attributes cannot be deduced by static analysis). It 250 | # supports qualified module names, as well as Unix pattern matching. 251 | ignored-modules= 252 | 253 | # Show a hint with possible names when a member name was not found. The aspect 254 | # of finding the hint is based on edit distance. 255 | missing-member-hint=yes 256 | 257 | # The minimum edit distance a name should have in order to be considered a 258 | # similar match for a missing member name. 259 | missing-member-hint-distance=1 260 | 261 | # The total number of similar names that should be taken in consideration when 262 | # showing a hint for a missing member. 263 | missing-member-max-choices=1 264 | 265 | # List of decorators that change the signature of a decorated function. 266 | signature-mutators= 267 | 268 | 269 | [LOGGING] 270 | 271 | # The type of string formatting that logging methods do. `old` means using % 272 | # formatting, `new` is for `{}` formatting. 273 | logging-format-style=old 274 | 275 | # Logging modules to check that the string format arguments are in logging 276 | # function parameter format. 277 | logging-modules=logging 278 | 279 | 280 | [BASIC] 281 | 282 | # Naming style matching correct argument names. 283 | argument-naming-style=snake_case 284 | 285 | # Regular expression matching correct argument names. Overrides argument- 286 | # naming-style. 287 | #argument-rgx= 288 | 289 | # Naming style matching correct attribute names. 290 | attr-naming-style=snake_case 291 | 292 | # Regular expression matching correct attribute names. Overrides attr-naming- 293 | # style. 294 | #attr-rgx= 295 | 296 | # Bad variable names which should always be refused, separated by a comma. 297 | bad-names=foo, 298 | bar, 299 | baz, 300 | toto, 301 | tutu, 302 | tata 303 | 304 | # Bad variable names regexes, separated by a comma. If names match any regex, 305 | # they will always be refused 306 | bad-names-rgxs= 307 | 308 | # Naming style matching correct class attribute names. 309 | class-attribute-naming-style=any 310 | 311 | # Regular expression matching correct class attribute names. Overrides class- 312 | # attribute-naming-style. 313 | #class-attribute-rgx= 314 | 315 | # Naming style matching correct class names. 316 | class-naming-style=PascalCase 317 | 318 | # Regular expression matching correct class names. Overrides class-naming- 319 | # style. 320 | #class-rgx= 321 | 322 | # Naming style matching correct constant names. 323 | const-naming-style=UPPER_CASE 324 | 325 | # Regular expression matching correct constant names. Overrides const-naming- 326 | # style. 327 | #const-rgx= 328 | 329 | # Minimum line length for functions/classes that require docstrings, shorter 330 | # ones are exempt. 331 | docstring-min-length=-1 332 | 333 | # Naming style matching correct function names. 334 | function-naming-style=snake_case 335 | 336 | # Regular expression matching correct function names. Overrides function- 337 | # naming-style. 338 | #function-rgx= 339 | 340 | # Good variable names which should always be accepted, separated by a comma. 341 | good-names=i, 342 | j, 343 | k, 344 | exc, 345 | logger, 346 | _, 347 | 348 | # Good variable names regexes, separated by a comma. If names match any regex, 349 | # they will always be accepted 350 | good-names-rgxs= 351 | 352 | # Include a hint for the correct naming format with invalid-name. 353 | include-naming-hint = yes 354 | 355 | # Naming style matching correct inline iteration names. 356 | inlinevar-naming-style=any 357 | 358 | # Regular expression matching correct inline iteration names. Overrides 359 | # inlinevar-naming-style. 360 | #inlinevar-rgx= 361 | 362 | # Naming style matching correct method names. 363 | method-naming-style=snake_case 364 | 365 | # Regular expression matching correct method names. Overrides method-naming- 366 | # style. 367 | #method-rgx= 368 | 369 | # Naming style matching correct module names. 370 | module-naming-style=snake_case 371 | 372 | # Regular expression matching correct module names. Overrides module-naming- 373 | # style. 374 | #module-rgx= 375 | 376 | # Colon-delimited sets of names that determine each other's naming style when 377 | # the name regexes allow several styles. 378 | name-group= 379 | 380 | # Regular expression which should only match function or class names that do 381 | # not require a docstring. 382 | no-docstring-rgx=^_ 383 | 384 | # List of decorators that produce properties, such as abc.abstractproperty. Add 385 | # to this list to register other decorators that produce valid properties. 386 | # These decorators are taken in consideration only for invalid-name. 387 | property-classes=abc.abstractproperty 388 | 389 | # Naming style matching correct variable names. 390 | variable-naming-style=snake_case 391 | 392 | # Regular expression matching correct variable names. Overrides variable- 393 | # naming-style. 394 | #variable-rgx= 395 | 396 | 397 | [VARIABLES] 398 | 399 | # List of additional names supposed to be defined in builtins. Remember that 400 | # you should avoid defining new builtins when possible. 401 | additional-builtins= 402 | 403 | # Tells whether unused global variables should be treated as a violation. 404 | allow-global-unused-variables=yes 405 | 406 | # List of strings which can identify a callback function by name. A callback 407 | # name must start or end with one of those strings. 408 | callbacks=cb_, 409 | _cb 410 | 411 | # A regular expression matching the name of dummy variables (i.e. expected to 412 | # not be used). 413 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 414 | 415 | # Argument names that match this expression will be ignored. Default to name 416 | # with leading underscore. 417 | ignored-argument-names=_.*|^ignored_|^unused_ 418 | 419 | # Tells whether we should check for unused import in __init__ files. 420 | init-import=no 421 | 422 | # List of qualified module names which can have objects that can redefine 423 | # builtins. 424 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 425 | 426 | 427 | [CLASSES] 428 | 429 | # List of method names used to declare (i.e. assign) instance attributes. 430 | defining-attr-methods=__init__, 431 | __new__, 432 | setUp, 433 | __post_init__ 434 | 435 | # List of member names, which should be excluded from the protected access 436 | # warning. 437 | exclude-protected=_asdict, 438 | _fields, 439 | _replace, 440 | _source, 441 | _make 442 | 443 | # List of valid names for the first argument in a class method. 444 | valid-classmethod-first-arg=cls 445 | 446 | # List of valid names for the first argument in a metaclass class method. 447 | valid-metaclass-classmethod-first-arg=cls 448 | 449 | 450 | [IMPORTS] 451 | 452 | # List of modules that can be imported at any level, not just the top level 453 | # one. 454 | allow-any-import-level= 455 | 456 | # Allow wildcard imports from modules that define __all__. 457 | allow-wildcard-with-all=no 458 | 459 | # Analyse import fallback blocks. This can be used to support both Python 2 and 460 | # 3 compatible code, which means that the block might have code that exists 461 | # only in one or another interpreter, leading to false positives when analysed. 462 | analyse-fallback-blocks = yes 463 | 464 | # Deprecated modules which should not be used, separated by a comma. 465 | deprecated-modules = 466 | optparse, 467 | tkinter.tix, 468 | unittest, 469 | 470 | # Create a graph of external dependencies in the given file (report RP0402 must 471 | # not be disabled). 472 | ext-import-graph= 473 | 474 | # Create a graph of every (i.e. internal and external) dependencies in the 475 | # given file (report RP0402 must not be disabled). 476 | import-graph= 477 | 478 | # Create a graph of internal dependencies in the given file (report RP0402 must 479 | # not be disabled). 480 | int-import-graph= 481 | 482 | # Force import order to recognize a module as part of the standard 483 | # compatibility libraries. 484 | known-standard-library= 485 | 486 | # Force import order to recognize a module as part of a third party library. 487 | known-third-party=enchant 488 | 489 | # Couples of modules and preferred modules, separated by a comma. 490 | preferred-modules = 491 | unittest:pytest, 492 | 493 | 494 | [DESIGN] 495 | 496 | # Maximum number of arguments for function / method. 497 | max-args=5 498 | 499 | # Maximum number of attributes for a class (see R0902). 500 | max-attributes=7 501 | 502 | # Maximum number of boolean expressions in an if statement (see R0916). 503 | max-bool-expr=5 504 | 505 | # Maximum number of branch for function / method body. 506 | max-branches=12 507 | 508 | # Maximum number of locals for function / method body. 509 | max-locals=15 510 | 511 | # Maximum number of parents for a class (see R0901). 512 | max-parents=7 513 | 514 | # Maximum number of public methods for a class (see R0904). 515 | max-public-methods=20 516 | 517 | # Maximum number of return / yield for function / method body. 518 | max-returns=6 519 | 520 | # Maximum number of statements in function / method body. 521 | max-statements=50 522 | 523 | # Minimum number of public methods for a class (see R0903). 524 | min-public-methods=2 525 | 526 | 527 | [EXCEPTIONS] 528 | 529 | # Exceptions that will emit a warning when being caught. Defaults to 530 | # "BaseException, Exception". 531 | overgeneral-exceptions=builtins.BaseException, 532 | builtins.Exception 533 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | --- 4 | 5 | version: 2 6 | 7 | build: 8 | os: ubuntu-24.04 9 | tools: 10 | python: >- # has to be parsed as a YAML string 11 | 3.11 12 | commands: 13 | - >- 14 | PYTHONWARNINGS=error 15 | python3 -Im venv "${READTHEDOCS_VIRTUALENV_PATH}" 16 | - >- 17 | PYTHONWARNINGS=error 18 | "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im 19 | pip install tox 20 | - >- 21 | PYTHONWARNINGS=error 22 | "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im 23 | tox -e build-docs --notest -vvvvv 24 | - >- 25 | PYTHONWARNINGS=error 26 | "${READTHEDOCS_VIRTUALENV_PATH}"/bin/python -Im 27 | tox -e build-docs --skip-pkg-install -q 28 | -- 29 | "${READTHEDOCS_OUTPUT}"/html -b dirhtml 30 | 31 | ... 32 | -------------------------------------------------------------------------------- /.yamllint: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | extends: default 4 | 5 | rules: 6 | indentation: 7 | level: error 8 | indent-sequences: false 9 | truthy: 10 | allowed-values: 11 | - >- 12 | false 13 | - >- 14 | true 15 | - >- # Allow "on" key name in GHA CI/CD workflow definitions 16 | on 17 | 18 | ... 19 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | *************************************** 2 | Contributing to sphinxcontrib-towncrier 3 | *************************************** 4 | 5 | 6 | .. attention:: 7 | 8 | sphinxcontrib-towncrier project exists solely to allow embedding the 9 | unreleased changelog fragments that are prepared for the Towncrier 10 | tool into Sphinx-based docs sites. 11 | At the moment we don't accept any contributions, nor feature 12 | requests that are unrelated to this goal. 13 | 14 | But if you want to contribute a bug fix or send a pull-request 15 | improving our CI, testing and packaging, we will gladly review it. 16 | 17 | 18 | In order to contribute, you'll need to: 19 | 20 | 1. Fork the repository. 21 | 22 | 2. Create a branch, push your changes there. 23 | 24 | 3. Send it to us as a PR. 25 | 26 | 4. Iterate on your PR, incorporating the requested improvements 27 | and participating in the discussions. 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020, sphinx-contrib 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://raw.githubusercontent.com/vshymanskyy/StandWithUkraine/main/banner-direct-team.svg 2 | :target: https://github.com/vshymanskyy/StandWithUkraine/blob/main/docs/README.md 3 | :alt: SWUbanner 4 | 5 | .. image:: https://img.shields.io/pypi/v/sphinxcontrib-towncrier.svg?logo=Python&logoColor=white 6 | :target: https://pypi.org/project/sphinxcontrib-towncrier 7 | :alt: sphinxcontrib-towncrier @ PyPI 8 | 9 | .. image:: https://github.com/sphinx-contrib/sphinxcontrib-towncrier/actions/workflows/ci-cd.yml/badge.svg?event=push 10 | :target: https://github.com/sphinx-contrib/sphinxcontrib-towncrier/actions/workflows/ci-cd.yml 11 | :alt: GitHub Actions CI/CD build status — Python package 12 | 13 | .. image:: https://codecov.io/gh/sphinx-contrib/sphinxcontrib-towncrier/branch/master/graph/badge.svg 14 | :target: https://codecov.io/gh/sphinx-contrib/sphinxcontrib-towncrier 15 | :alt: sphinxcontrib-towncrier coverage charts @ Covecov.io 16 | 17 | .. DO-NOT-REMOVE-docs-badges-END 18 | 19 | .. image:: https://img.shields.io/readthedocs/sphinxcontrib-towncrier/latest.svg?logo=Read%20The%20Docs&logoColor=white 20 | :target: https://sphinxcontrib-towncrier.rtfd.io/en/latest/?badge=latest 21 | :alt: Documentation Status @ RTD 22 | 23 | sphinxcontrib-towncrier 24 | ======================= 25 | 26 | .. DO-NOT-REMOVE-docs-intro-START 27 | 28 | An RST directive for injecting a Towncrier-generated changelog draft 29 | containing fragments for the unreleased (next) project version. 30 | 31 | 32 | How to use this? 33 | ---------------- 34 | 35 | .. code-block:: shell-session 36 | 37 | $ pip install sphinxcontrib-towncrier 38 | 39 | .. code-block:: python 40 | 41 | extensions = ['sphinxcontrib.towncrier.ext'] 42 | 43 | # Options: draft/sphinx-version/sphinx-release 44 | towncrier_draft_autoversion_mode = 'draft' 45 | towncrier_draft_include_empty = True 46 | towncrier_draft_working_directory = PROJECT_ROOT_DIR 47 | # Not yet supported: 48 | # towncrier_draft_config_path = 'pyproject.toml' # relative to cwd 49 | 50 | Make sure to point to the dir with ``pyproject.toml`` and pre-configure 51 | towncrier itself in the config. 52 | 53 | If everything above is set up correctly, you should be able to add 54 | 55 | .. code-block:: rst 56 | 57 | .. towncrier-draft-entries:: 58 | 59 | to your documents, like ``changelog.rst``. With no argument, the version 60 | title will be generated using the strategy set up in the 61 | ``towncrier_draft_autoversion_mode`` setting. 62 | 63 | If you want to be in control, override it with an argument you like: 64 | 65 | .. code-block:: rst 66 | 67 | .. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] 68 | 69 | Native RST substitutions in the argument work, just make sure to declare 70 | any non-default ones via ``rst_epilog`` or at the end of the document 71 | where the ``towncrier-draft-entries`` directive is being used. 72 | 73 | 74 | Does anybody actually use this? 75 | ------------------------------- 76 | 77 | So far we know about two projects using ``sphinxcontrib-towncrier`` — 78 | ansible/pylibssh and pypa/pip. Also, this Sphinx extension is inspired 79 | by and somewhat based on the ideas used in pytest-dev/pytest and 80 | tox-dev/tox. We believe that these projects are full of wonderful tricks 81 | that you may want to explore regardless of whether you'll use our 82 | project. 83 | -------------------------------------------------------------------------------- /bin/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | !*.py 4 | -------------------------------------------------------------------------------- /bin/pip_constraint_helpers.py: -------------------------------------------------------------------------------- 1 | """Shared functions for platform detection.""" 2 | 3 | from __future__ import annotations 4 | 5 | import pathlib 6 | import platform 7 | import shlex 8 | import subprocess # noqa: S404 -- pip/pip-tools don't have importable APIs 9 | import sys 10 | 11 | 12 | PYTHON_IMPLEMENTATION_MAP = { # noqa: WPS407 13 | 'cpython': 'cp', 14 | 'ironpython': 'ip', 15 | 'jython': 'jy', 16 | 'python': 'py', 17 | 'pypy': 'pp', 18 | } 19 | PYTHON_IMPLEMENTATION = platform.python_implementation() 20 | 21 | 22 | def get_runtime_python_tag() -> str: 23 | """Identify the Python tag of the current runtime. 24 | 25 | :returns: Python tag. 26 | """ 27 | python_minor_ver = sys.version_info[:2] 28 | 29 | try: 30 | sys_impl = sys.implementation.name 31 | except AttributeError: 32 | sys_impl = PYTHON_IMPLEMENTATION.lower() 33 | 34 | python_tag_prefix = PYTHON_IMPLEMENTATION_MAP.get(sys_impl, sys_impl) 35 | 36 | python_minor_ver_tag = ''.join(map(str, python_minor_ver)) 37 | 38 | return f'{python_tag_prefix !s}{python_minor_ver_tag !s}' 39 | 40 | 41 | def get_constraint_file_path( # noqa: WPS210 -- no way to drop vars 42 | req_dir: pathlib.Path | str, 43 | toxenv: str, 44 | python_tag: str, 45 | ) -> pathlib.Path: 46 | """Identify the constraints filename for the current environment. 47 | 48 | :param req_dir: Requirements directory. 49 | :param toxenv: tox testenv. 50 | :param python_tag: Python tag. 51 | :returns: Constraints filename for the current environment. 52 | """ 53 | sys_platform = sys.platform 54 | platform_machine = platform.machine().lower() 55 | 56 | if toxenv in {'py', 'python'}: 57 | env_prefix = 'pypy' if PYTHON_IMPLEMENTATION == 'PyPy' else 'py' 58 | python_ver_num = python_tag[2:] 59 | toxenv = f'{env_prefix}{python_ver_num}' 60 | 61 | if sys_platform == 'linux2': 62 | sys_platform = 'linux' 63 | 64 | constraint_name = ( 65 | f'{toxenv}-{python_tag}-{sys_platform}-{platform_machine}' 66 | ) 67 | return (pathlib.Path(req_dir) / constraint_name).with_suffix('.txt') 68 | 69 | 70 | def make_pip_cmd( 71 | pip_args: list[str], 72 | constraint_file_path: pathlib.Path, 73 | ) -> list[str]: 74 | """Inject a lockfile constraint into the pip command if present. 75 | 76 | :param pip_args: pip arguments. 77 | :param constraint_file_path: Path to a ``constraints.txt``-compatible file. 78 | 79 | :returns: pip command. 80 | """ 81 | pip_cmd = [sys.executable, '-Im', 'pip'] + pip_args 82 | if constraint_file_path.is_file(): 83 | pip_cmd += ['--constraint', str(constraint_file_path)] 84 | else: 85 | print( # noqa: WPS421 86 | 'WARNING: The expected pinned constraints file for the current ' 87 | f'env does not exist (should be "{constraint_file_path !s}").', 88 | ) 89 | return pip_cmd 90 | 91 | 92 | def run_cmd(cmd: list[str] | tuple[str, ...]) -> None: 93 | """Invoke a shell command after logging it. 94 | 95 | :param cmd: The command to invoke. 96 | """ 97 | escaped_cmd = shlex.join(cmd) 98 | print(f'Invoking the following command: {escaped_cmd !s}') # noqa: WPS421 99 | subprocess.check_call(cmd) # noqa: S603 100 | -------------------------------------------------------------------------------- /bin/pip_wrapper.py: -------------------------------------------------------------------------------- 1 | """A pip-wrapper that injects platform-specific constraints into pip.""" 2 | 3 | from __future__ import annotations 4 | 5 | import sys 6 | 7 | from pip_constraint_helpers import ( 8 | get_constraint_file_path, get_runtime_python_tag, make_pip_cmd, run_cmd, 9 | ) 10 | 11 | 12 | def main(req_dir: str, toxenv: str, *pip_args: tuple[str, ...]) -> None: 13 | """Invoke pip with the matching constraints file, if present. 14 | 15 | :param req_dir: Requirements directory path. 16 | :param toxenv: Tox env name. 17 | :param pip_args: Iterable of args to bypass to pip. 18 | """ 19 | constraint_file_path = get_constraint_file_path( 20 | req_dir=req_dir, 21 | toxenv=toxenv, 22 | python_tag=get_runtime_python_tag(), 23 | ) 24 | pip_cmd = make_pip_cmd( 25 | pip_args=list(pip_args), 26 | constraint_file_path=constraint_file_path, 27 | ) 28 | run_cmd(pip_cmd) 29 | 30 | 31 | if __name__ == '__main__': 32 | main(*sys.argv[1:]) 33 | -------------------------------------------------------------------------------- /bin/print_lockfile_base_name.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | """A script that prints platform-specific constraints file name base.""" 3 | 4 | from __future__ import annotations 5 | 6 | import sys 7 | 8 | from pip_constraint_helpers import ( 9 | get_constraint_file_path, get_runtime_python_tag, 10 | ) 11 | 12 | 13 | def compute_constraint_base_name(toxenv: str) -> str: 14 | """Get the lock file name stem. 15 | 16 | :param toxenv: Name of the tox env. 17 | :returns: A platform-specific lock file base name for tox env. 18 | """ 19 | return get_constraint_file_path( 20 | req_dir='', 21 | toxenv=toxenv, 22 | python_tag=get_runtime_python_tag(), 23 | ).stem 24 | 25 | 26 | if __name__ == '__main__': 27 | print(compute_constraint_base_name(sys.argv[1])) # noqa: WPS421 28 | -------------------------------------------------------------------------------- /bin/resolve_platform_lock_file.py: -------------------------------------------------------------------------------- 1 | """A script for making a lock file for the current platform and tox env.""" 2 | 3 | from __future__ import annotations 4 | 5 | import sys 6 | 7 | from pip_constraint_helpers import ( 8 | get_constraint_file_path, get_runtime_python_tag, run_cmd, 9 | ) 10 | 11 | 12 | def generate_lock_for( 13 | req_dir: str, toxenv: str, *pip_compile_extra_args: tuple[str, ...], 14 | ) -> None: 15 | """Generate a patform-specific lock file for given tox env. 16 | 17 | :param req_dir: Requirements directory path. 18 | :param toxenv: Tox env name. 19 | :param pip_compile_extra_args: Iterable of args to bypass to pip- 20 | compile. 21 | """ 22 | lock_file_name = get_constraint_file_path( 23 | req_dir, toxenv, get_runtime_python_tag(), 24 | ) 25 | direct_deps_file_name = ( 26 | lock_file_name.parents[1] 27 | / 'direct' 28 | / f'{toxenv}.in' 29 | ) 30 | pip_compile_cmd = ( 31 | sys.executable, '-Im', 'piptools', 'compile', 32 | f'--output-file={lock_file_name !s}', 33 | str(direct_deps_file_name), 34 | *pip_compile_extra_args, 35 | ) 36 | run_cmd(pip_compile_cmd) 37 | 38 | 39 | if __name__ == '__main__': 40 | generate_lock_for(*sys.argv[1:]) 41 | -------------------------------------------------------------------------------- /dependencies/direct/tox.in: -------------------------------------------------------------------------------- 1 | tox 2 | -------------------------------------------------------------------------------- /docs/change-notes-layout.rst: -------------------------------------------------------------------------------- 1 | ******************************* 2 | Advanced changelog layout setup 3 | ******************************* 4 | 5 | This page attempts do demonstrate one way of setting up a docs site for 6 | use with |project| and includes a few opinionated integration solutions. 7 | But of course, this layout may not fit everybody's needs. 8 | When configuring your project, try to figure out what works for you — 9 | you don't have to follow everything laid out here blindly. 10 | 11 | Project structure 12 | ^^^^^^^^^^^^^^^^^ 13 | 14 | The author likes the following project directory layout: 15 | 16 | :: 17 | 18 | {{ project_root }}/ 19 | │ 20 | ├─ docs/ 21 | │ ├─ changelog.d/ 22 | │ │ │─ .gitignore 23 | │ │ │─ .towncrier-template.rst.j2 24 | │ │ │─ {{ issue_number }}.{{ changelog_fragment_type }}.rst 25 | │ │ │─ ... 26 | │ │ └─ README.rst 27 | │ ├─ changelog.rst 28 | │ ├─ conf.py 29 | │ ├─ index.rst 30 | │ ├─ requirements.in 31 | │ └─ requirements.txt 32 | ├─ src/ 33 | │ └─ {{ python_importable_name }}/ 34 | │ └─ ... 35 | ├─ .readthedocs.yml 36 | ├─ CHANGELOG.rst 37 | ├─ pyproject.toml 38 | ├─ README.rst 39 | ├─ tox.ini 40 | └─ ... 41 | 42 | This is an ``src``-layout project with a Python package located at 43 | ``src/{{ python_importable_name }}/`` but we won't touch this topic. 44 | There are several automation, configuration, documentation and metadata 45 | files in the project root that will be described later on this page. 46 | Finally, a :doc:`Sphinx `-based site is located under 47 | the ``docs/``. 48 | 49 | The rest of this page will describe what to have in each of those files. 50 | 51 | 52 | ``docs/changelog.d/`` 53 | ^^^^^^^^^^^^^^^^^^^^^ 54 | 55 | Let's start with the ``docs/changelog.d/``. This is a folder where the 56 | end-users are supposed to add their changelog fragments for Towncrier to 57 | consume. 58 | 59 | ``docs/changelog.d/.gitignore`` 60 | ------------------------------- 61 | 62 | First, let's make sure Git only tracks files that we want there by adding 63 | a ``.gitignore`` file in this folder. First thing, it adds everything to 64 | "ignore" but then allows ``.gitignore``, ``.gitignore``, ``README.rst`` 65 | and any RST documents matching Towncrier change note fragment naming 66 | convention. 67 | 68 | .. code-block:: 69 | 70 | * 71 | 72 | !.gitignore 73 | !.towncrier-template.rst.j2 74 | !*.*.rst 75 | !README.rst 76 | 77 | 78 | ``docs/changelog.d/.towncrier-template.rst.j2`` 79 | ----------------------------------------------- 80 | 81 | Then, there's ``.towncrier-template.rst.j2``. It's a changelog template, 82 | for Towncrier to use. It can be copied from 83 | https://github.com/twisted/towncrier/tree/master/src/towncrier/templates. 84 | This name is set in ``pyproject.toml`` in the project root. 85 | 86 | ``docs/changelog.d/{{ issue_number }}.{{ changelog_fragment_type }}.rst`` 87 | ------------------------------------------------------------------------- 88 | 89 | These are changelog fragments in RST format. They are absorbed by 90 | Towncrier during the release and before that, these files will be used 91 | in the preview generated by |project|. 92 | 93 | ``docs/changelog.d/README.rst`` 94 | ------------------------------- 95 | 96 | This ``README.rst`` file would normally contain — a guide for the 97 | contributors on how to write change notes. For example, setuptools has a 98 | useful write-up on :ref:`authoring changelog fragments 99 | `. It is useful to have it 100 | in this place so that it shows up on GitHub when the users navigate to 101 | the folder with the fragments via the web UI. 102 | 103 | 104 | ``docs/`` 105 | ^^^^^^^^^^^^^^^^^^^^^ 106 | 107 | ``docs/changelog.rst`` 108 | ---------------------- 109 | 110 | This is a Sphinx page that contains both the future version changelog 111 | preview via ``.. towncrier-draft-entries::`` directive and the changelog 112 | for all already released versions that is managed by Towncrier in a 113 | separate RST document ``CHANGELOG.rst`` in the project root. 114 | 115 | .. code-block:: rst 116 | 117 | ********* 118 | Changelog 119 | ********* 120 | 121 | Versions follow `Semantic Versioning`_ (``..``). 122 | Backward incompatible (breaking) changes will only be introduced in major 123 | versions with advance notice in the **Deprecations** section of releases. 124 | 125 | .. _Semantic Versioning: https://semver.org/ 126 | 127 | .. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] as on |today| 128 | 129 | .. include:: ../CHANGELOG.rst 130 | 131 | 132 | ``docs/conf.py`` 133 | ---------------- 134 | 135 | The Sphinx configuration demonstrates how to keep the version 136 | information known to Sphinx in sync with the Git tag based metadata. 137 | Note the exclusion of ``docs/changelog.d/`` and the settings prefixed 138 | with ``towncrier_draft_``. 139 | 140 | .. code-block:: python 141 | 142 | """Configuration for the Sphinx documentation generator.""" 143 | 144 | from functools import partial 145 | from pathlib import Path 146 | 147 | from setuptools_scm import get_version 148 | 149 | 150 | # -- Path setup -------------------------------------------------------------- 151 | 152 | PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() 153 | get_scm_version = partial(get_version, root=PROJECT_ROOT_DIR) 154 | 155 | 156 | # -- Project information ----------------------------------------------------- 157 | 158 | github_url = 'https://github.com' 159 | github_repo_org = 'your-org' 160 | github_repo_name = 'your-project' 161 | github_repo_slug = f'{github_repo_org}/{github_repo_name}' 162 | github_repo_url = f'{github_url}/{github_repo_slug}' 163 | github_sponsors_url = f'{github_url}/sponsors' 164 | 165 | project = github_repo_name 166 | author = f'{project} Contributors' 167 | copyright = f'2021, {author}' 168 | 169 | # The short X.Y version 170 | version = '.'.join( 171 | get_scm_version( 172 | local_scheme='no-local-version', 173 | ).split('.')[:3], 174 | ) 175 | 176 | # The full version, including alpha/beta/rc tags 177 | release = get_scm_version() 178 | 179 | rst_epilog = f""" 180 | .. |project| replace:: {project} 181 | """ 182 | 183 | 184 | # -- General configuration --------------------------------------------------- 185 | 186 | extensions = [ 187 | # Built-in extensions: 188 | 'sphinx.ext.extlinks', 189 | 'sphinx.ext.intersphinx', 190 | 191 | # Third-party extensions: 192 | 'sphinxcontrib.towncrier.ext', # provides `.. towncrier-draft-entries::` 193 | ] 194 | 195 | exclude_patterns = [ 196 | '_build', 'Thumbs.db', '.DS_Store', # <- Defaults 197 | 'changelog.d/**', # Towncrier-managed change notes 198 | ] 199 | 200 | 201 | # -- Options for HTML output ------------------------------------------------- 202 | 203 | html_theme = 'furo' 204 | 205 | 206 | # -- Extension configuration ------------------------------------------------- 207 | 208 | # -- Options for intersphinx extension --------------------------------------- 209 | 210 | intersphinx_mapping = { 211 | 'python': ('https://docs.python.org/3', None), 212 | 'rtd': ('https://docs.rtfd.io/en/stable', None), 213 | 'sphinx': ('https://www.sphinx-doc.org/en/master', None), 214 | } 215 | 216 | # -- Options for extlinks extension ------------------------------------------ 217 | 218 | extlinks = { 219 | 'issue': (f'{github_repo_url}/issues/%s', '#'), 220 | 'pr': (f'{github_repo_url}/pull/%s', 'PR #'), 221 | 'commit': (f'{github_repo_url}/commit/%s', ''), 222 | 'gh': (f'{github_url}/%s', 'GitHub: '), 223 | 'user': (f'{github_sponsors_url}/%s', '@'), 224 | } 225 | 226 | # -- Options for towncrier_draft extension ----------------------------------- 227 | 228 | towncrier_draft_autoversion_mode = 'draft' # or: 'sphinx-version', 'sphinx-release' 229 | towncrier_draft_include_empty = True 230 | towncrier_draft_working_directory = PROJECT_ROOT_DIR 231 | # Not yet supported: towncrier_draft_config_path = 'pyproject.toml' # relative to cwd 232 | 233 | # -- Strict mode ------------------------------------------------------------- 234 | 235 | default_role = 'any' 236 | 237 | nitpicky = True 238 | 239 | 240 | ``docs/index.rst`` 241 | ------------------ 242 | 243 | The root document includes most of the README excluding one badge and 244 | its title. It allows to flexibly control what information goes to the 245 | PyPI and GitHub repo pages and what appears in the docs. 246 | 247 | This document must contain a ``.. toctree::`` directive that has a 248 | pointer to the ``changelog`` document in the list. 249 | 250 | .. code-block:: rst 251 | 252 | Welcome to |project|'s documentation! 253 | ===================================== 254 | 255 | .. include:: ../README.rst 256 | :end-before: DO-NOT-REMOVE-docs-badges-END 257 | 258 | .. include:: ../README.rst 259 | :start-after: DO-NOT-REMOVE-docs-intro-START 260 | 261 | .. toctree:: 262 | :maxdepth: 2 263 | :caption: Contents: 264 | 265 | changelog 266 | 267 | 268 | ``docs/requirements.in`` 269 | ------------------------ 270 | 271 | ``requirements.in`` is a standard ``requirements.txt``-type file that 272 | only lists dependencies that are directly used by the :doc:`Sphinx 273 | static docs site generator `. It may optionally contain 274 | the minimum necessary versions of those. 275 | 276 | .. code-block:: text 277 | 278 | furo 279 | setuptools-scm 280 | Sphinx 281 | sphinxcontrib-towncrier 282 | 283 | ``docs/requirements.txt`` 284 | ------------------------- 285 | 286 | But stating just the direct dependencies without strict version 287 | restrictions is not enough for reproducible builds. Since it is 288 | important to keep the docs build predictable over time, we use 289 | `pip-tools`_ to generate a ``constraints.txt``-type pip-compatible 290 | lockfile with pinned version constraints for the whole transitive 291 | dependency tree. This file is ``requirements.txt`` and using it will 292 | ensure that the virtualenv for building the docs always has the same 293 | software with the same versions in it. 294 | 295 | .. tip:: 296 | 297 | As a bonus, having a ``.in`` + ``.txt`` pair of files is natively 298 | supported by GitHub Dependabot. 299 | 300 | .. _pip-tools: https://github.com/jazzband/pip-tools 301 | 302 | 303 | ``.readthedocs.yml`` 304 | ^^^^^^^^^^^^^^^^^^^^ 305 | 306 | To set up Read the Docs, add a ``.readthedocs.yml`` file in the project 307 | root. The following configuration makes sure that the lockfile is used 308 | to provision the build env. It also configures how Sphinx should behave 309 | like failing the build on any warnings and having nice URLs. 310 | 311 | .. code-block:: yaml 312 | 313 | --- 314 | version: 2 315 | 316 | formats: all 317 | 318 | sphinx: 319 | builder: dirhtml 320 | configuration: docs/conf.py 321 | fail_on_warning: true 322 | 323 | build: 324 | image: latest 325 | 326 | python: 327 | version: 3.13 328 | install: 329 | - requirements: docs/requirements.txt 330 | ... 331 | 332 | .. note:: 333 | 334 | When you have a Read the Docs YAML config in your repository, none 335 | of the :ref:`settings supported by it ` are derived from the web UI. 337 | 338 | .. tip:: 339 | 340 | Having :doc:`Read the Docs ` plugged into your project it 341 | is also possible to :doc:`enable pull-request builds 342 | `. 343 | 344 | ``CHANGELOG.rst`` 345 | ^^^^^^^^^^^^^^^^^ 346 | 347 | This file in the project root contains the compiled changelog with the 348 | notes from the released project versions. It is managed by Towncrier and 349 | should not be edited by you manually. 350 | 351 | .. code-block:: rst 352 | 353 | .. towncrier release notes start 354 | 355 | 356 | ``pyproject.toml`` 357 | ^^^^^^^^^^^^^^^^^^ 358 | 359 | ``pyproject.toml`` in the root contains the setup for Towncrier itself 360 | under the ``[tool.towncrier]`` section. It binds it all together 361 | pointing at the directory for the change notes, the target changelog 362 | document and the template to use when generating it. It also lists the 363 | categories for the change fragments. 364 | 365 | .. code-block:: toml 366 | 367 | [tool.towncrier] 368 | directory = "docs/changelog.d/" 369 | filename = "CHANGELOG.rst" 370 | issue_format = ":issue:`{issue}`" 371 | package_dir = "src" 372 | template = "docs/changelog.d/.towncrier-template.rst.j2" 373 | title_format = "v{version} ({project_date})" 374 | underlines = ["=", "^", "-", "~"] 375 | 376 | [[tool.towncrier.section]] 377 | path = "" 378 | 379 | [[tool.towncrier.type]] 380 | directory = "bugfix" 381 | name = "Bugfixes" 382 | showcontent = true 383 | 384 | [[tool.towncrier.type]] 385 | directory = "feature" 386 | name = "Features" 387 | showcontent = true 388 | 389 | [[tool.towncrier.type]] 390 | directory = "deprecation" 391 | name = "Deprecations (removal in next major release)" 392 | showcontent = true 393 | 394 | [[tool.towncrier.type]] 395 | directory = "breaking" 396 | name = "Backward incompatible changes" 397 | showcontent = true 398 | 399 | [[tool.towncrier.type]] 400 | directory = "doc" 401 | name = "Documentation" 402 | showcontent = true 403 | 404 | [[tool.towncrier.type]] 405 | directory = "misc" 406 | name = "Miscellaneous" 407 | showcontent = true 408 | 409 | 410 | ``README.rst`` 411 | ^^^^^^^^^^^^^^ 412 | 413 | The README document is an important bit of your project. It shows up on 414 | GitHub and is normally shown on PyPI. Besides that, it's possible to 415 | include its fragments into the docs front page. 416 | 417 | The example below shows how to use comment markers to include a part of 418 | the badges into a Sphinx document also embedding some prose from the 419 | README. Scroll up and see how it's being embedded into 420 | ``docs/index.rst``. 421 | 422 | .. code-block:: rst 423 | 424 | .. image:: https://img.shields.io/pypi/v/your-project.svg?logo=Python&logoColor=white 425 | :target: https://pypi.org/project/your-project 426 | :alt: your-project @ PyPI 427 | 428 | .. image:: https://github.com/your-org/your-project/actions/workflows/ci-cd.yml/badge.svg?event=push 429 | :target: https://github.com/your-org/your-project/actions/workflows/ci-cd.yml 430 | :alt: GitHub Actions CI/CD build status 431 | 432 | .. DO-NOT-REMOVE-docs-badges-END 433 | 434 | .. image:: https://img.shields.io/readthedocs/your-project/latest.svg?logo=Read%20The%20Docs&logoColor=white 435 | :target: https://your-project.rtfd.io/en/latest/?badge=latest 436 | :alt: Documentation Status @ RTD 437 | 438 | your-project 439 | ============ 440 | 441 | .. DO-NOT-REMOVE-docs-intro-START 442 | 443 | A project with Sphinx-managed documentation and description sourced 444 | from this README. 445 | 446 | 447 | ``tox.ini`` 448 | ^^^^^^^^^^^ 449 | 450 | This is an example of setting up a tox-based Sphinx invocation 451 | 452 | .. code-block:: ini 453 | 454 | [tox] 455 | envlist = python 456 | isolated_build = true 457 | minversion = 3.21.0 458 | 459 | 460 | [testenv:docs] 461 | basepython = python3 462 | deps = 463 | -r{toxinidir}{/}docs{/}requirements.txt 464 | description = Build The Docs 465 | commands = 466 | # Retrieve possibly missing commits: 467 | -git fetch --unshallow 468 | -git fetch --tags 469 | 470 | # Build the html docs with Sphinx: 471 | {envpython} -m sphinx \ 472 | -j auto \ 473 | -b html \ 474 | {tty:--color} \ 475 | -a \ 476 | -n \ 477 | -W --keep-going \ 478 | -d "{temp_dir}{/}.doctrees" \ 479 | {posargs:} \ 480 | . \ 481 | "{envdir}{/}docs_out" 482 | 483 | # Print out the output docs dir and a way to serve html: 484 | -{envpython} -c\ 485 | 'import pathlib;\ 486 | docs_dir = pathlib.Path(r"{envdir}") / "docs_out";\ 487 | index_file = docs_dir / "index.html";\ 488 | print("\n" + "=" * 120 +\ 489 | f"\n\nDocumentation available under:\n\n\ 490 | \tfile://\{index_file\}\n\nTo serve docs, use\n\n\ 491 | \t$ python3 -m http.server --directory \ 492 | \N\{QUOTATION MARK\}\{docs_dir\}\N\{QUOTATION MARK\} 0\n\n" +\ 493 | "=" * 120)' 494 | changedir = {toxinidir}{/}docs 495 | isolated_build = true 496 | passenv = 497 | SSH_AUTH_SOCK 498 | skip_install = true 499 | whitelist_externals = 500 | git 501 | 502 | With this setup, run ``tox -e docs`` to build the site locally. Integrate 503 | the same command in your CI. 504 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=invalid-name 2 | # Ref: https://www.sphinx-doc.org/en/master/usage/configuration.html 3 | """Configuration for the Sphinx documentation generator.""" 4 | 5 | import sys 6 | from functools import partial 7 | from pathlib import Path 8 | from typing import Mapping 9 | 10 | from sphinx.application import Sphinx 11 | 12 | from setuptools_scm import get_version 13 | 14 | 15 | # -- Path setup -------------------------------------------------------------- 16 | 17 | PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() 18 | PROJECT_SRC_DIR = PROJECT_ROOT_DIR / 'src' 19 | get_scm_version = partial(get_version, root=PROJECT_ROOT_DIR) 20 | 21 | # If extensions (or modules to document with autodoc) are in another directory, 22 | # add these directories to sys.path here. If the directory is relative to the 23 | # documentation root, use os.path.abspath to make it absolute, like shown here. 24 | 25 | 26 | sys.path.insert(0, str(PROJECT_SRC_DIR)) 27 | 28 | 29 | # -- Project information ----------------------------------------------------- 30 | 31 | github_url = 'https://github.com' 32 | github_repo_org = 'sphinx-contrib' 33 | github_repo_name = 'sphinxcontrib-towncrier' 34 | github_repo_slug = f'{github_repo_org}/{github_repo_name}' 35 | github_repo_url = f'{github_url}/{github_repo_slug}' 36 | github_sponsors_url = f'{github_url}/sponsors' 37 | 38 | project = github_repo_name 39 | author = 'Sviatoslav Sydorenko' 40 | copyright = f'2021, {author}' # pylint: disable=redefined-builtin 41 | 42 | # The short X.Y version 43 | version = '.'.join( 44 | get_scm_version( 45 | local_scheme='no-local-version', 46 | ).split('.')[:3], 47 | ) 48 | 49 | # The full version, including alpha/beta/rc tags 50 | release = get_scm_version() 51 | 52 | rst_epilog = f""" 53 | .. |project| replace:: {project} 54 | """ 55 | 56 | 57 | # -- General configuration --------------------------------------------------- 58 | 59 | extensions = [ 60 | # Built-in extensions: 61 | 'sphinx.ext.autodoc', 62 | 'sphinx.ext.autosectionlabel', # autocreate section targets for refs 63 | 'sphinx.ext.doctest', 64 | 'sphinx.ext.extlinks', 65 | 'sphinx.ext.intersphinx', 66 | 'sphinx.ext.todo', 67 | 'sphinx.ext.coverage', 68 | 'sphinx.ext.viewcode', 69 | 70 | # Third-party extensions: 71 | 'myst_parser', # extended markdown; https://pypi.org/project/myst-parser/ 72 | 'sphinxcontrib.apidoc', 73 | 74 | # Tree-local extensions: 75 | 'sphinxcontrib.towncrier.ext', # provides `.. towncrier-draft-entries::` 76 | ] 77 | 78 | # Add any paths that contain templates here, relative to this directory. 79 | templates_path = ['_templates'] 80 | 81 | # The language for content autogenerated by Sphinx. Refer to documentation 82 | # for a list of supported languages. 83 | # 84 | # This is also used if you do content translation via gettext catalogs. 85 | # Usually you set "language" from the command line for these cases. 86 | language = 'en' 87 | 88 | # List of patterns, relative to source directory, that match files and 89 | # directories to ignore when looking for source files. 90 | # This pattern also affects html_static_path and html_extra_path. 91 | exclude_patterns = [ 92 | '_build', 'Thumbs.db', '.DS_Store', # <- Defaults 93 | 'changelog-fragments/**', # Towncrier-managed change notes 94 | ] 95 | 96 | 97 | # -- Options for HTML output ------------------------------------------------- 98 | 99 | html_theme = 'furo' 100 | 101 | html_show_sphinx = True 102 | 103 | 104 | # -- Extension configuration ------------------------------------------------- 105 | 106 | # -- Options for intersphinx extension --------------------------------------- 107 | 108 | intersphinx_mapping = { 109 | 'python': ('https://docs.python.org/3', None), 110 | 'rtd': ('https://docs.rtfd.io/en/stable', None), 111 | 'setuptools': ('https://setuptools.rtfd.io/en/latest', None), 112 | 'sphinx': ('https://www.sphinx-doc.org/en/master', None), 113 | } 114 | 115 | # -- Options for todo extension ---------------------------------------------- 116 | 117 | # If true, `todo` and `todoList` produce output, else they produce nothing. 118 | todo_include_todos = True 119 | 120 | # -- Options for sphinxcontrib.apidoc extension ------------------------------ 121 | 122 | apidoc_excluded_paths = [] 123 | apidoc_extra_args = [ 124 | '--implicit-namespaces', 125 | '--private', # include “_private” modules 126 | ] 127 | apidoc_module_dir = str(PROJECT_SRC_DIR / 'sphinxcontrib') 128 | apidoc_module_first = False 129 | apidoc_output_dir = 'pkg' 130 | apidoc_separate_modules = True 131 | apidoc_toc_file = None 132 | 133 | # -- Options for extlinks extension ------------------------------------------ 134 | 135 | extlinks = { 136 | 'issue': (f'{github_repo_url}/issues/%s', '#%s'), # noqa: WPS323 137 | 'pr': (f'{github_repo_url}/pull/%s', 'PR #%s'), # noqa: WPS323 138 | 'commit': (f'{github_repo_url}/commit/%s', '%s'), # noqa: WPS323 139 | 'gh': (f'{github_url}/%s', 'GitHub: %s'), # noqa: WPS323 140 | 'user': (f'{github_sponsors_url}/%s', '@%s'), # noqa: WPS323 141 | } 142 | 143 | # -- Options for linkcheck builder ------------------------------------------- 144 | 145 | linkcheck_ignore = [ 146 | r'http://localhost:\d+/', # local URLs 147 | ] 148 | linkcheck_workers = 25 149 | 150 | # Ref: 151 | # * https://github.com/djungelorm/sphinx-tabs/issues/26#issuecomment-422160463 152 | sphinx_tabs_valid_builders = ['linkcheck'] # prevent linkcheck warning 153 | 154 | # -- Options for autosectionlabel extension ---------------------------------- 155 | 156 | # Ref: 157 | # * https://www.sphinx-doc.org/en/master/usage/extensions/autosectionlabel.html 158 | autosectionlabel_maxdepth = 2 # mitigate Towncrier nested subtitles collision 159 | 160 | # -- Options for towncrier_draft extension ----------------------------------- 161 | 162 | # mode is one of 'draft', 'sphinx-version' or 'sphinx-release' 163 | towncrier_draft_autoversion_mode = 'draft' 164 | towncrier_draft_include_empty = True 165 | towncrier_draft_working_directory = PROJECT_ROOT_DIR 166 | # Not yet supported: 167 | towncrier_draft_config_path = 'pyproject.toml' # relative to cwd 168 | 169 | # -- Strict mode ------------------------------------------------------------- 170 | 171 | # Ref: python-attrs/attrs#571 172 | default_role = 'any' 173 | 174 | nitpicky = True 175 | _py_class_role = 'py:class' 176 | nitpick_ignore = [ 177 | # NOTE: Docutils does not have any intersphinx-compatible site 178 | (_py_class_role, 'docutils.nodes.Node'), 179 | (_py_class_role, 'docutils.nodes.document'), 180 | (_py_class_role, 'docutils.statemachine.State'), 181 | (_py_class_role, 'docutils.nodes.Node'), 182 | ] 183 | 184 | 185 | def setup(app: Sphinx) -> Mapping[str, str]: 186 | """Set up extra Sphinx extension integrations.""" 187 | # NOTE: Sphinx doesn't seem to expose its :event: role at the moment. 188 | # NOTE: This hack redeclares the event type locally. 189 | # Ref: https://github.com/sphinx-doc/sphinx/issues/8987 190 | app.add_object_type('event', 'event') 191 | 192 | return { 193 | 'parallel_read_safe': True, 194 | 'parallel_write_safe': True, 195 | 'version': 'builtin', 196 | } 197 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to |project|'s documentation! 2 | ===================================== 3 | 4 | .. include:: ../README.rst 5 | :end-before: DO-NOT-REMOVE-docs-badges-END 6 | 7 | .. include:: ../README.rst 8 | :start-after: DO-NOT-REMOVE-docs-intro-START 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | :caption: Go deeper: 13 | 14 | change-notes-layout 15 | contributing 16 | 17 | .. toctree:: 18 | :maxdepth: 2 19 | :caption: Private (Dev) API: 20 | 21 | pkg/modules 22 | 23 | 24 | Indices and tables 25 | ================== 26 | 27 | * :ref:`genindex` 28 | * :ref:`modindex` 29 | * :ref:`search` 30 | -------------------------------------------------------------------------------- /docs/pkg/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | 3 | !.gitignore 4 | -------------------------------------------------------------------------------- /docs/requirements.in: -------------------------------------------------------------------------------- 1 | furo >= 2021.02.28.beta28 2 | myst-parser >= 0.13.5 3 | setuptools_scm >= 3.5.0 4 | Sphinx >= 3.5.1 5 | sphinxcontrib-apidoc >= 0.3.0 6 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --allow-unsafe --generate-hashes --output-file=docs/requirements.txt docs/requirements.in 6 | # 7 | alabaster==0.7.12 \ 8 | --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \ 9 | --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 10 | # via sphinx 11 | attrs==21.2.0 \ 12 | --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ 13 | --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb 14 | # via markdown-it-py 15 | babel==2.9.1 \ 16 | --hash=sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9 \ 17 | --hash=sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0 18 | # via sphinx 19 | beautifulsoup4==4.9.3 \ 20 | --hash=sha256:4c98143716ef1cb40bf7f39a8e3eec8f8b009509e74904ba3a7b315431577e35 \ 21 | --hash=sha256:84729e322ad1d5b4d25f805bfa05b902dd96450f43842c4e99067d5e1369eb25 \ 22 | --hash=sha256:fff47e031e34ec82bf17e00da8f592fe7de69aeea38be00523c04623c04fb666 23 | # via furo 24 | certifi==2024.7.4 \ 25 | --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ 26 | --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 27 | # via requests 28 | charset-normalizer==3.4.0 \ 29 | --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ 30 | --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ 31 | --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ 32 | --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ 33 | --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ 34 | --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ 35 | --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ 36 | --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ 37 | --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ 38 | --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ 39 | --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ 40 | --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ 41 | --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ 42 | --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ 43 | --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ 44 | --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ 45 | --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ 46 | --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ 47 | --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ 48 | --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ 49 | --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ 50 | --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ 51 | --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ 52 | --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ 53 | --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ 54 | --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ 55 | --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ 56 | --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ 57 | --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ 58 | --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ 59 | --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ 60 | --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ 61 | --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ 62 | --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ 63 | --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ 64 | --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ 65 | --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ 66 | --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ 67 | --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ 68 | --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ 69 | --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ 70 | --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ 71 | --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ 72 | --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ 73 | --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ 74 | --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ 75 | --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ 76 | --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ 77 | --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ 78 | --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ 79 | --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ 80 | --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ 81 | --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ 82 | --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ 83 | --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ 84 | --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ 85 | --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ 86 | --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ 87 | --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ 88 | --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ 89 | --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ 90 | --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ 91 | --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ 92 | --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ 93 | --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ 94 | --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ 95 | --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ 96 | --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ 97 | --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ 98 | --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ 99 | --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ 100 | --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ 101 | --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ 102 | --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ 103 | --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ 104 | --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ 105 | --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ 106 | --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ 107 | --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ 108 | --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ 109 | --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ 110 | --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ 111 | --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ 112 | --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ 113 | --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ 114 | --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ 115 | --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ 116 | --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ 117 | --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ 118 | --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ 119 | --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ 120 | --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ 121 | --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ 122 | --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ 123 | --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ 124 | --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ 125 | --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ 126 | --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ 127 | --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ 128 | --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ 129 | --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ 130 | --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ 131 | --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ 132 | --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ 133 | --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 134 | # via requests 135 | docutils==0.16 \ 136 | --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \ 137 | --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc 138 | # via 139 | # myst-parser 140 | # sphinx 141 | furo==2021.4.11b34 \ 142 | --hash=sha256:3d88e2855949cecf5f562e8a28cab1a6d3355d82f3cf5796eddf9ec234e97519 \ 143 | --hash=sha256:576a1dc1bcbe337d7c53cbc75b886802778a5964fd2e3324433d706066e9aea8 144 | # via -r requirements.in 145 | idna==3.7 \ 146 | --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ 147 | --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 148 | # via requests 149 | imagesize==1.2.0 \ 150 | --hash=sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1 \ 151 | --hash=sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1 152 | # via sphinx 153 | jinja2==3.1.6 \ 154 | --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ 155 | --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 156 | # via 157 | # myst-parser 158 | # sphinx 159 | markdown-it-py==1.1.0 \ 160 | --hash=sha256:36be6bb3ad987bfdb839f5ba78ddf094552ca38ccbd784ae4f74a4e1419fc6e3 \ 161 | --hash=sha256:98080fc0bc34c4f2bcf0846a096a9429acbd9d5d8e67ed34026c03c61c464389 162 | # via 163 | # mdit-py-plugins 164 | # myst-parser 165 | markupsafe==2.0.1 \ 166 | --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \ 167 | --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \ 168 | --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \ 169 | --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \ 170 | --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \ 171 | --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \ 172 | --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \ 173 | --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \ 174 | --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \ 175 | --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \ 176 | --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \ 177 | --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \ 178 | --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \ 179 | --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a \ 180 | --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \ 181 | --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \ 182 | --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \ 183 | --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \ 184 | --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \ 185 | --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \ 186 | --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \ 187 | --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \ 188 | --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \ 189 | --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \ 190 | --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \ 191 | --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \ 192 | --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \ 193 | --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \ 194 | --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \ 195 | --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \ 196 | --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \ 197 | --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \ 198 | --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \ 199 | --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872 200 | # via jinja2 201 | mdit-py-plugins==0.2.8 \ 202 | --hash=sha256:1833bf738e038e35d89cb3a07eb0d227ed647ce7dd357579b65343740c6d249c \ 203 | --hash=sha256:5991cef645502e80a5388ec4fc20885d2313d4871e8b8e320ca2de14ac0c015f 204 | # via myst-parser 205 | myst-parser==0.14.0 \ 206 | --hash=sha256:8d7db76e2f33cd1dc1fe0c76af9f09e5cf19ce2c2e85074bc82f272c0f7c08ce \ 207 | --hash=sha256:fc262959a74cdc799d7fa9b30c320c17187485b9a1e8c39e988fc12f3adff63c 208 | # via -r requirements.in 209 | packaging==20.9 \ 210 | --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \ 211 | --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a 212 | # via sphinx 213 | pbr==5.6.0 \ 214 | --hash=sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd \ 215 | --hash=sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4 216 | # via sphinxcontrib-apidoc 217 | pygments==2.15.0 \ 218 | --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ 219 | --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 220 | # via sphinx 221 | pyparsing==2.4.7 \ 222 | --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ 223 | --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b 224 | # via packaging 225 | pytz==2021.1 \ 226 | --hash=sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da \ 227 | --hash=sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798 228 | # via babel 229 | pyyaml==5.4.1 \ 230 | --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ 231 | --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ 232 | --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ 233 | --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ 234 | --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ 235 | --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ 236 | --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ 237 | --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ 238 | --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ 239 | --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ 240 | --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ 241 | --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ 242 | --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ 243 | --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ 244 | --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ 245 | --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ 246 | --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ 247 | --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ 248 | --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ 249 | --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ 250 | --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ 251 | --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ 252 | --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ 253 | --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ 254 | --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ 255 | --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ 256 | --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ 257 | --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ 258 | --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 259 | # via myst-parser 260 | requests==2.32.2 \ 261 | --hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ 262 | --hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c 263 | # via sphinx 264 | setuptools-scm==6.0.1 \ 265 | --hash=sha256:c3bd5f701c8def44a5c0bfe8d407bef3f80342217ef3492b951f3777bd2d915c \ 266 | --hash=sha256:d1925a69cb07e9b29416a275b9fadb009a23c148ace905b2fb220649a6c18e92 267 | # via -r requirements.in 268 | snowballstemmer==2.1.0 \ 269 | --hash=sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2 \ 270 | --hash=sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914 271 | # via sphinx 272 | soupsieve==2.2.1 \ 273 | --hash=sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc \ 274 | --hash=sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b 275 | # via beautifulsoup4 276 | sphinx==3.5.4 \ 277 | --hash=sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1 \ 278 | --hash=sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8 279 | # via 280 | # -r requirements.in 281 | # furo 282 | # myst-parser 283 | # sphinxcontrib-apidoc 284 | sphinxcontrib-apidoc==0.3.0 \ 285 | --hash=sha256:6671a46b2c6c5b0dca3d8a147849d159065e50443df79614f921b42fbd15cb09 \ 286 | --hash=sha256:729bf592cf7b7dd57c4c05794f732dc026127275d785c2a5494521fdde773fb9 287 | # via -r requirements.in 288 | sphinxcontrib-applehelp==1.0.2 \ 289 | --hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \ 290 | --hash=sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58 291 | # via sphinx 292 | sphinxcontrib-devhelp==1.0.2 \ 293 | --hash=sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e \ 294 | --hash=sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4 295 | # via sphinx 296 | sphinxcontrib-htmlhelp==2.0.0 \ 297 | --hash=sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07 \ 298 | --hash=sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2 299 | # via sphinx 300 | sphinxcontrib-jsmath==1.0.1 \ 301 | --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ 302 | --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 303 | # via sphinx 304 | sphinxcontrib-qthelp==1.0.3 \ 305 | --hash=sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \ 306 | --hash=sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6 307 | # via sphinx 308 | sphinxcontrib-serializinghtml==1.1.5 \ 309 | --hash=sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd \ 310 | --hash=sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952 311 | # via sphinx 312 | urllib3==1.26.19 \ 313 | --hash=sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3 \ 314 | --hash=sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429 315 | # via requests 316 | 317 | # The following packages are considered to be unsafe in a requirements file: 318 | setuptools==70.0.0 \ 319 | --hash=sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4 \ 320 | --hash=sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0 321 | # via 322 | # setuptools-scm 323 | # sphinx 324 | -------------------------------------------------------------------------------- /nitpick-style.toml: -------------------------------------------------------------------------------- 1 | [nitpick.styles] 2 | include = [ 3 | # "py://nitpick/resources/any/editorconfig", 4 | # "py://nitpick/resources/any/git-legal", 5 | "py://nitpick/resources/any/pre-commit-hooks", 6 | 7 | # "py://nitpick/resources/python/flake8", 8 | # "py://nitpick/resources/python/github-workflow", 9 | # "py://nitpick/resources/python/pre-commit-hooks", 10 | # "py://nitpick/resources/python/isort", # Requires config to be in `setup.cfg` so not for us 11 | # "py://nitpick/resources/python/mypy", # Requires CLI args but we have that in `mypy.ini` 12 | # "py://nitpick/resources/python/pylint", # Makes several bad suggestions 13 | # "py://nitpick/resources/python/radon", 14 | 15 | "py://nitpick/resources/shell/bashate", 16 | # "py://nitpick/resources/shell/shellcheck", 17 | ] 18 | 19 | [".flake8".flake8] 20 | max-line-length = 79 21 | 22 | [".isort.cfg".settings] 23 | line_length = 79 24 | 25 | [".pylintrc".FORMAT] 26 | max-line-length = 79 27 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | # Essentials 4 | "setuptools >= 64", 5 | 6 | # Plugins 7 | "setuptools_scm[toml] >= 8", # version is required for "no-local-version" scheme + toml extra is needed for supporting config in this file 8 | ] 9 | build-backend = "setuptools.build_meta" 10 | 11 | # ATTENTION: the following section must be kept last in 12 | # `pyproject.toml` because the CI appends one line in 13 | # the end when publishing non-tagged versions. 14 | [tool.setuptools_scm] 15 | write_to = "src/sphinxcontrib/towncrier/_scm_version.py" 16 | # ATTENTION: DO NOT ADD ANYTHING AFTER THIS SECTION ^ 17 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = 3 | # `pytest-xdist`: 4 | --numprocesses=auto 5 | # NOTE: the plugin disabled because it's slower with so few tests 6 | --numprocesses=0 7 | 8 | # Show 10 slowest invocations: 9 | --durations=10 10 | 11 | # Report all the things == -rxXs: 12 | -ra 13 | 14 | # Show values of the local vars in errors/tracebacks: 15 | --showlocals 16 | 17 | # Autocollect and invoke the doctests from all modules: 18 | # https://docs.pytest.org/en/stable/doctest.html 19 | --doctest-modules 20 | 21 | # Pre-load the `pytest-cov` plugin early: 22 | -p pytest_cov 23 | 24 | # `pytest-cov`: 25 | --cov 26 | --cov-config=.coveragerc 27 | --cov-context=test 28 | --no-cov-on-fail 29 | 30 | # Fail on config parsing warnings: 31 | # --strict-config 32 | 33 | # Fail on non-existing markers: 34 | # * Deprecated since v6.2.0 but may be reintroduced later covering a 35 | # broader scope: 36 | # --strict 37 | # * Exists since v4.5.0 (advised to be used instead of `--strict`): 38 | --strict-markers 39 | 40 | doctest_optionflags = ALLOW_UNICODE ELLIPSIS 41 | 42 | # Marks tests with an empty parameterset as xfail(run=False) 43 | empty_parameter_set_mark = xfail 44 | 45 | faulthandler_timeout = 30 46 | 47 | filterwarnings = 48 | error 49 | 50 | # Towncrier < 23.10 emits this. Delete once it's no longer supported. 51 | ignore:is_resource is deprecated\. Use files\(\) instead\. Refer to https.//importlib-resources\.readthedocs\.io/en/latest/using\.html#migrating-from-legacy for migration advice\.:DeprecationWarning:towncrier._settings.load 52 | 53 | # https://docs.pytest.org/en/stable/usage.html#creating-junitxml-format-files 54 | junit_duration_report = call 55 | # xunit1 contains more metadata than xunit2 so it's better for CI UIs: 56 | junit_family = xunit1 57 | junit_logging = all 58 | junit_log_passing_tests = true 59 | junit_suite_name = towncrier_test_suite 60 | 61 | # A mapping of markers to their descriptions allowed in strict mode: 62 | markers = 63 | 64 | minversion = 6.1.0 65 | 66 | # Optimize pytest's lookup by restricting potentially deep dir tree scan: 67 | norecursedirs = 68 | build 69 | dependencies 70 | dist 71 | docs 72 | .* 73 | *.egg 74 | *.egg-info 75 | */*.egg-info 76 | */**/*.egg-info 77 | *.dist-info 78 | */*.dist-info 79 | */**/*.dist-info 80 | 81 | testpaths = tests/ 82 | 83 | xfail_strict = true 84 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = sphinxcontrib-towncrier 3 | url = https://github.com/sphinx-contrib/sphinxcontrib-towncrier 4 | project_urls = 5 | GitHub: repo = https://github.com/sphinx-contrib/sphinxcontrib-towncrier 6 | GitHub: issues = https://github.com/sphinx-contrib/sphinxcontrib-towncrier/issues 7 | description = An RST directive for injecting a Towncrier-generated changelog draft containing fragments for the unreleased (next) project version 8 | long_description = file: README.rst 9 | long_description_content_type = text/x-rst 10 | author = Sviatoslav Sydorenko 11 | author_email = wk+pypi/sphinxcontrib-towncrier@sydorenko.org.ua 12 | maintainer = Oleksiy Vasylyshyn 13 | maintainer_email = slsh1o-git@protonmail.com 14 | license = BSD-3-Clause 15 | license_files = 16 | LICENSE 17 | classifiers = 18 | Development Status :: 3 - Alpha 19 | 20 | Framework :: Sphinx 21 | Framework :: Sphinx :: Extension 22 | 23 | Intended Audience :: Developers 24 | 25 | Operating System :: OS Independent 26 | 27 | Programming Language :: Python :: 3.9 28 | Programming Language :: Python :: 3.10 29 | Programming Language :: Python :: 3.11 30 | Programming Language :: Python :: 3.12 31 | Programming Language :: Python :: 3.13 32 | 33 | Topic :: Software Development :: Documentation 34 | 35 | Topic :: Documentation :: Sphinx 36 | Topic :: System :: Software Distribution 37 | Topic :: Utilities 38 | 39 | [options] 40 | include_package_data = True 41 | install_requires = 42 | sphinx 43 | towncrier >= 23 44 | package_dir = 45 | = src 46 | packages = find_namespace: 47 | python_requires = >= 3.9 48 | zip_safe = True 49 | 50 | [options.packages.find] 51 | where = src 52 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | 3 | # NOTE: THIS FILE IS IMMUTABLE AND SHOULD REMAIN UNCHANGED UNLESS THERE ARE 4 | # NOTE: SERIOUS REASONS TO EDIT IT. THIS IS ENFORCED BY THE PRE-COMMIT TOOL. 5 | 6 | """The distribution package setuptools installer for sphinxcontrib-towncrier. 7 | 8 | The presence of this file ensures the support of pip editable mode 9 | *with setuptools only*. 10 | 11 | It is also required for `tox --devenv some-env-folder` command to work 12 | because it does `pip install -e .` under the hood. 13 | """ 14 | from setuptools import setup 15 | 16 | 17 | # pylint: disable=expression-not-assigned 18 | __name__ == '__main__' and setup() # noqa: WPS428 19 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/__init__.py: -------------------------------------------------------------------------------- 1 | """Sphinx extension for injecting an unreleased changelog into docs. 2 | 3 | This is an importable package containing the whole project. The Sphinx 4 | extension entry point is declared in the :file:`ext` submodule. 5 | 6 | To use this extension, add the following to your :file:`conf.py`: 7 | 8 | .. code-block:: python 9 | 10 | extensions = ['sphinxcontrib.towncrier.ext'] 11 | 12 | """ 13 | 14 | from .ext import __version__, setup 15 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/_data_transformers.py: -------------------------------------------------------------------------------- 1 | """Data transformation helpers.""" 2 | 3 | 4 | def escape_project_version_rst_substitution(version: str) -> str: 5 | """Prepend an escaped whitespace before RST substitution.""" 6 | if not version.startswith('|') or version.count('|') <= 1: 7 | return version 8 | 9 | # A corner case exists when the towncrier config has something like 10 | # `v{version}` in the title format **and** the directive target 11 | # argument starts with a substitution like `|release|`. And so 12 | # when combined, they produce a v|release|` causing RST to not 13 | # substitute the `|release|` part. But adding an escaped space 14 | # solves this: that escaped space renders as an empty string and 15 | # the substitution gets processed properly so the result would 16 | # be something like `v1.0` as expected. 17 | return rf'\ {version}' 18 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/_fragment_discovery.py: -------------------------------------------------------------------------------- 1 | """Changelog fragment discovery helpers.""" 2 | 3 | 4 | from functools import lru_cache 5 | from pathlib import Path 6 | from typing import Optional, Set 7 | 8 | from sphinx.util import logging 9 | 10 | from ._towncrier import ( # noqa: WPS436 11 | find_towncrier_fragments, get_towncrier_config, 12 | ) 13 | 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | def _resolve_spec_config( 19 | base: Path, spec_name: Optional[str] = None, 20 | ) -> Optional[Path]: 21 | return base / spec_name if spec_name is not None else None 22 | 23 | 24 | # pylint: disable=fixme 25 | # FIXME: consider consolidating this logic upstream in towncrier 26 | def _find_config_file(base: Path) -> Path: 27 | """Find the best config file.""" 28 | candidate_names = 'towncrier.toml', 'pyproject.toml' 29 | candidates = list(map(base.joinpath, candidate_names)) 30 | extant = filter(Path.is_file, candidates) 31 | return next(extant, candidates[-1]) 32 | 33 | 34 | # pylint: disable=fixme 35 | # FIXME: refactor `lookup_towncrier_fragments` to drop noqas 36 | @lru_cache(maxsize=1, typed=True) # noqa: WPS210 37 | def lookup_towncrier_fragments( # noqa: WPS210 38 | working_dir: Optional[str] = None, 39 | config_path: Optional[str] = None, 40 | ) -> Set[Path]: 41 | """Emit RST-formatted Towncrier changelog fragment paths.""" 42 | project_path = Path.cwd() if working_dir is None else Path(working_dir) 43 | 44 | final_config_path = ( 45 | _resolve_spec_config(project_path, config_path) 46 | or _find_config_file(project_path) 47 | ) 48 | 49 | try: 50 | towncrier_config = get_towncrier_config( 51 | project_path, 52 | final_config_path, 53 | ) 54 | except LookupError as config_lookup_err: 55 | logger.warning(str(config_lookup_err)) 56 | return set() 57 | 58 | try: 59 | fragment_filenames = find_towncrier_fragments( 60 | str(project_path), 61 | towncrier_config, 62 | ) 63 | except LookupError as change_notes_lookup_err: 64 | logger.warning(str(change_notes_lookup_err)) 65 | return set() 66 | 67 | return set(map(Path, fragment_filenames)) 68 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/_scm_version.pyi: -------------------------------------------------------------------------------- 1 | # This stub file is necessary because `_scm_version.py` 2 | # autogenerated on build and absent on mypy checks time 3 | version: str 4 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/_towncrier.py: -------------------------------------------------------------------------------- 1 | """Towncrier related shims.""" 2 | 3 | from contextlib import suppress as suppress_exceptions 4 | from pathlib import Path 5 | from typing import Set, Union 6 | 7 | from towncrier._settings.load import Config # noqa: WPS436 8 | from towncrier._settings.load import load_config_from_file # noqa: WPS436 9 | from towncrier._settings.load import ( # noqa: WPS436 10 | ConfigError as TowncrierConfigError, 11 | ) 12 | from towncrier.build import find_fragments 13 | 14 | 15 | def find_towncrier_fragments( 16 | base_directory: str, 17 | towncrier_config: Config, 18 | ) -> Set[str]: 19 | """Look up the change note file paths.""" 20 | with suppress_exceptions(TypeError): 21 | # Towncrier >= 24.7.0rc1 22 | _fragments, fragment_filenames = find_fragments( 23 | base_directory=base_directory, 24 | config=towncrier_config, 25 | strict=False, 26 | ) 27 | 28 | return {fname[0] for fname in fragment_filenames} 29 | 30 | # Towncrier < 24.7.0rc1 31 | try: 32 | # pylint: disable-next=no-value-for-parameter,unexpected-keyword-arg 33 | _fragments, fragment_filenames = find_fragments( # noqa: WPS121 34 | base_directory=base_directory, 35 | sections=towncrier_config.sections, 36 | fragment_directory=towncrier_config.directory, 37 | frag_type_names=towncrier_config.types, 38 | orphan_prefix='+', 39 | ) 40 | except TowncrierConfigError as lookup_err: 41 | raise LookupError( 42 | 'Towncrier was unable to perform change note lookup: ' 43 | f'{lookup_err !s}', 44 | ) from lookup_err 45 | 46 | return set(fragment_filenames) 47 | 48 | 49 | def get_towncrier_config( 50 | project_path: Path, 51 | final_config_path: Union[Path, None], 52 | ) -> Config: 53 | """Return the towncrier config in native format.""" 54 | try: 55 | return load_config_from_file(str(project_path), str(final_config_path)) 56 | except (FileNotFoundError, TowncrierConfigError) as config_load_err: 57 | raise LookupError( 58 | 'Towncrier was unable to load the configuration from file ' 59 | f'`{final_config_path !s}`: {config_load_err !s}', 60 | ) from config_load_err 61 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/_version.py: -------------------------------------------------------------------------------- 1 | """Version definition.""" 2 | 3 | try: 4 | # pylint: disable=unused-import 5 | from ._scm_version import version as __version__ # noqa: WPS433, WPS436 6 | except ImportError: # pragma: no cover # difficult to hit in tests 7 | from importlib.metadata import ( # noqa: WPS433, WPS436 8 | version as importlib_metadata_get_version, 9 | ) 10 | 11 | __version__ = importlib_metadata_get_version( # noqa: WPS440 12 | 'sphinxcontrib-towncrier', 13 | ) 14 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/ext.py: -------------------------------------------------------------------------------- 1 | """Sphinx extension for injecting an unreleased changelog into docs.""" 2 | 3 | 4 | import shlex 5 | import subprocess # noqa: S404 6 | import sys 7 | from collections.abc import Set 8 | from contextlib import suppress as suppress_exceptions 9 | from functools import lru_cache 10 | from pathlib import Path 11 | from typing import Dict, List, Literal, Optional, Tuple, Union 12 | 13 | from sphinx.application import Sphinx 14 | from sphinx.config import Config as SphinxConfig 15 | from sphinx.environment import BuildEnvironment 16 | from sphinx.environment.collectors import EnvironmentCollector 17 | from sphinx.util import logging 18 | from sphinx.util.docutils import SphinxDirective 19 | from sphinx.util.nodes import nested_parse_with_titles, nodes 20 | 21 | 22 | # isort: split 23 | 24 | 25 | # Ref: https://github.com/PyCQA/pylint/issues/3817 26 | from docutils import statemachine # pylint: disable=wrong-import-order 27 | from docutils.parsers.rst.states import RSTState 28 | 29 | from ._data_transformers import ( # noqa: WPS436 30 | escape_project_version_rst_substitution, 31 | ) 32 | from ._fragment_discovery import lookup_towncrier_fragments # noqa: WPS436 33 | from ._version import __version__ # noqa: WPS436 34 | 35 | 36 | PROJECT_ROOT_DIR = Path(__file__).parents[3].resolve() 37 | TOWNCRIER_DRAFT_CMD = ( 38 | sys.executable, '-m', # invoke via runpy under the same interpreter 39 | 'towncrier', 40 | 'build', 41 | '--draft', # write to stdout, don't change anything on disk 42 | ) 43 | 44 | 45 | logger = logging.getLogger(__name__) 46 | 47 | 48 | @lru_cache(typed=True) 49 | def _get_changelog_draft_entries( 50 | target_version: str, 51 | allow_empty: bool = False, 52 | working_dir: Optional[str] = None, 53 | config_path: Optional[str] = None, 54 | ) -> str: 55 | """Retrieve the unreleased changelog entries from Towncrier.""" 56 | extra_cli_args: Tuple[str, ...] = ( 57 | '--version', 58 | # A version to be used in the RST title: 59 | escape_project_version_rst_substitution(target_version), 60 | ) 61 | if config_path is not None: 62 | extra_cli_args += '--config', str(config_path) 63 | 64 | try: 65 | towncrier_output = subprocess.check_output( # noqa: S603 66 | TOWNCRIER_DRAFT_CMD + extra_cli_args, 67 | cwd=str(working_dir) if working_dir else None, 68 | stderr=subprocess.PIPE, 69 | text=True, 70 | ).strip() 71 | 72 | except subprocess.CalledProcessError as proc_exc: 73 | cmd = shlex.join(proc_exc.cmd) 74 | stdout = proc_exc.stdout or '[No output]' 75 | stderr = proc_exc.stderr or '[No output]' 76 | raise RuntimeError( 77 | 'Command exited unexpectedly.\n\n' 78 | f'Command: {cmd}\n' 79 | f'Return code: {proc_exc.returncode}\n\n' 80 | f'Standard output:\n{stdout}\n\n' 81 | f'Standard error:\n{stderr}', 82 | ) from proc_exc 83 | 84 | if not allow_empty and 'No significant changes' in towncrier_output: 85 | raise LookupError('There are no unreleased changelog entries so far') 86 | 87 | return towncrier_output 88 | 89 | 90 | @lru_cache(maxsize=1, typed=True) 91 | def _get_draft_version_fallback( 92 | strategy: str, 93 | sphinx_config: SphinxConfig, 94 | ) -> str: 95 | """Generate a fallback version string for towncrier draft.""" 96 | known_strategies = {'draft', 'sphinx-version', 'sphinx-release'} 97 | if strategy not in known_strategies: 98 | raise ValueError( 99 | 'Expected "strategy" to be ' 100 | f'one of {known_strategies!r} but got {strategy!r}', 101 | ) 102 | 103 | if 'sphinx' in strategy: 104 | return ( 105 | sphinx_config.release 106 | if 'release' in strategy 107 | else sphinx_config.version 108 | ) 109 | 110 | return '[UNRELEASED DRAFT]' 111 | 112 | 113 | def _nodes_from_document_markup_source( 114 | state: RSTState, 115 | markup_source: str, 116 | ) -> List[nodes.Node]: 117 | """Turn an RST or Markdown string into a list of nodes. 118 | 119 | These nodes can be used in the document. 120 | """ 121 | node = nodes.Element() 122 | node.document = state.document 123 | nested_parse_with_titles( 124 | state=state, 125 | content=statemachine.StringList( 126 | statemachine.string2lines(markup_source), 127 | source='[towncrier-fragments]', 128 | ), 129 | node=node, 130 | ) 131 | return node.children 132 | 133 | 134 | class TowncrierDraftEntriesDirective(SphinxDirective): 135 | """Definition of the ``towncrier-draft-entries`` directive.""" 136 | 137 | has_content = True # default: False 138 | 139 | def run(self) -> List[nodes.Node]: # noqa: WPS210 140 | """Generate a node tree in place of the directive.""" 141 | target_version = ( 142 | self.content[:1][0] 143 | if self.content[:1] else None 144 | ) 145 | if self.content[1:]: # inner content present 146 | raise self.error( 147 | f'Error in "{self.name!s}" directive: ' 148 | 'only one argument permitted.', 149 | ) 150 | 151 | config = self.env.config 152 | autoversion_mode = config.towncrier_draft_autoversion_mode 153 | include_empty = config.towncrier_draft_include_empty 154 | 155 | towncrier_fragment_paths = lookup_towncrier_fragments( 156 | working_dir=config.towncrier_draft_working_directory, 157 | config_path=config.towncrier_draft_config_path, 158 | ) 159 | for path in towncrier_fragment_paths: 160 | # make sphinx discard doctree cache on file changes 161 | self.env.note_dependency(str(path)) 162 | 163 | try: 164 | # pylint: disable-next=line-too-long 165 | self.env.towncrier_fragment_paths |= ( # type: ignore[attr-defined] 166 | towncrier_fragment_paths 167 | ) 168 | except AttributeError: 169 | # If the attribute hasn't existed, initialize it instead of 170 | # updating 171 | self.env.towncrier_fragment_paths = ( # type: ignore[attr-defined] 172 | towncrier_fragment_paths 173 | ) 174 | 175 | try: 176 | self.env.towncrier_fragment_docs |= { # type: ignore[attr-defined] 177 | self.env.docname, 178 | } 179 | except AttributeError: 180 | # If the attribute hasn't existed, initialize it instead of 181 | # updating 182 | self.env.towncrier_fragment_docs = { # type: ignore[attr-defined] 183 | self.env.docname, 184 | } 185 | 186 | try: 187 | draft_changes = _get_changelog_draft_entries( 188 | target_version or 189 | _get_draft_version_fallback(autoversion_mode, config), 190 | allow_empty=include_empty, 191 | working_dir=config.towncrier_draft_working_directory, 192 | config_path=config.towncrier_draft_config_path, 193 | ) 194 | except RuntimeError as runtime_err: 195 | raise self.error(str(runtime_err)) from runtime_err 196 | except LookupError: 197 | return [] 198 | 199 | return _nodes_from_document_markup_source( 200 | state=self.state, 201 | markup_source=draft_changes, 202 | ) 203 | 204 | 205 | class TowncrierDraftEntriesEnvironmentCollector(EnvironmentCollector): 206 | r"""Environment collector for ``TowncrierDraftEntriesDirective``. 207 | 208 | When :py:class:`~TowncrierDraftEntriesDirective` is used in a 209 | document, it depends on some dynamically generated change fragments. 210 | After the first render, the doctree nodes are put in cache and are 211 | reused from there. There's a way to make Sphinx aware of the 212 | directive dependencies by calling :py:meth:`BuildEnvironment.\ 213 | note_dependency ` but this will only work for fragments that have 215 | existed at the time of that first directive invocation. 216 | 217 | In order to track newly appearing change fragment dependencies, 218 | we need to do so at the time of Sphinx identifying what documents 219 | require rebuilding. There's :event:`env-get-outdated` that 220 | allows to extend this list of planned rebuilds and we could use it 221 | by assigning a document-to-fragments map from within the directive 222 | and reading it in the event handler later (since env contents are 223 | preserved in cache). But this approach does not take into account 224 | cleanups and parallel runs of Sphinx. In order to make it truly 225 | parallelism-compatible, we need to define how to merge our custom 226 | cache attribute collected within multiple Sphinx subprocesses into 227 | one object and that's where :py:class:`~sphinx.environment.\ 228 | collectors.EnvironmentCollector` comes into play. 229 | 230 | Refs: 231 | * https://github.com/sphinx-doc/sphinx/issues/8040#issuecomment-671587308 232 | * https://github.com/sphinx-contrib/sphinxcontrib-towncrier/issues/1 233 | """ 234 | 235 | def clear_doc( 236 | self, 237 | app: Sphinx, 238 | env: BuildEnvironment, 239 | docname: str, 240 | ) -> None: 241 | """Clean up env metadata related to the removed document. 242 | 243 | This is a handler for :event:`env-purge-doc`. 244 | """ 245 | with suppress_exceptions(AttributeError, KeyError): 246 | env.towncrier_fragment_docs.remove( # type: ignore[attr-defined] 247 | docname, 248 | ) 249 | 250 | def merge_other( 251 | self, 252 | app: Sphinx, 253 | env: BuildEnvironment, 254 | docnames: Set[str], 255 | other: BuildEnvironment, 256 | ) -> None: 257 | """Merge doc-to-fragments from another proc into this env. 258 | 259 | This is a handler for :event:`env-merge-info`. 260 | """ 261 | try: 262 | other_fragment_docs: Set[str] = ( 263 | other.towncrier_fragment_docs # type: ignore[attr-defined] 264 | ) 265 | except AttributeError: 266 | # If the other process env doesn't have documents using 267 | # `TowncrierDraftEntriesDirective`, there's nothing to merge 268 | return 269 | 270 | if not hasattr(env, 'towncrier_fragment_docs'): # noqa: WPS421 271 | # If the other process env doesn't have documents using 272 | # `TowncrierDraftEntriesDirective`, initialize the structure 273 | # at least 274 | env.towncrier_fragment_docs = set() # type: ignore[attr-defined] 275 | 276 | if not hasattr(env, 'towncrier_fragment_paths'): # noqa: WPS421 277 | env.towncrier_fragment_paths = set() # type: ignore[attr-defined] 278 | 279 | # Since Sphinx does not pull the same document into multiple 280 | # processes, we don't care about the same dict key appearing 281 | # in different envs with different sets of the deps 282 | env.towncrier_fragment_docs.update( # type: ignore[attr-defined] 283 | other_fragment_docs, 284 | ) 285 | env.towncrier_fragment_paths.update( # type: ignore[attr-defined] 286 | other.towncrier_fragment_paths, # type: ignore[attr-defined] 287 | ) 288 | 289 | def process_doc(self, app: Sphinx, doctree: nodes.document) -> None: 290 | """React to :event:`doctree-read` with no-op.""" 291 | 292 | # pylint: disable-next=too-many-arguments,too-many-positional-arguments 293 | def get_outdated_docs( # noqa: WPS211 294 | self, 295 | app: Sphinx, 296 | env: BuildEnvironment, 297 | added: Set[str], 298 | changed: Set[str], 299 | removed: Set[str], 300 | ) -> List[str]: 301 | """Mark docs with changed fragment deps for rebuild. 302 | 303 | This is a handler for :event:`env-get-outdated`. 304 | """ 305 | towncrier_fragment_paths = lookup_towncrier_fragments( 306 | working_dir=env.config.towncrier_draft_working_directory, 307 | config_path=env.config.towncrier_draft_config_path, 308 | ) 309 | 310 | fragments_changed = False 311 | with suppress_exceptions(AttributeError): 312 | fragments_changed = bool( 313 | towncrier_fragment_paths 314 | ^ env.towncrier_fragment_paths, # type: ignore[attr-defined] 315 | ) 316 | 317 | return ( 318 | list( 319 | env.towncrier_fragment_docs # type: ignore[attr-defined] 320 | - changed, 321 | ) 322 | if fragments_changed 323 | else [] 324 | ) 325 | 326 | 327 | def setup(app: Sphinx) -> Dict[str, Union[bool, str]]: 328 | """Initialize the extension.""" 329 | rebuild_trigger: Literal[ # rebuild full html on settings change 330 | 'html', 331 | ] = 'html' 332 | app.add_config_value( 333 | 'towncrier_draft_config_path', 334 | default=None, 335 | rebuild=rebuild_trigger, 336 | ) 337 | app.add_config_value( 338 | 'towncrier_draft_autoversion_mode', 339 | default='scm-draft', 340 | rebuild=rebuild_trigger, 341 | ) 342 | app.add_config_value( 343 | 'towncrier_draft_include_empty', 344 | default=True, 345 | rebuild=rebuild_trigger, 346 | ) 347 | app.add_config_value( 348 | 'towncrier_draft_working_directory', 349 | default=None, 350 | rebuild=rebuild_trigger, 351 | ) 352 | app.add_directive( 353 | 'towncrier-draft-entries', 354 | TowncrierDraftEntriesDirective, 355 | ) 356 | 357 | # Register an environment collector to merge data gathered by the 358 | # directive in parallel builds 359 | app.add_env_collector(TowncrierDraftEntriesEnvironmentCollector) 360 | 361 | return { 362 | 'parallel_read_safe': True, 363 | 'parallel_write_safe': True, 364 | 'version': __version__, 365 | } 366 | -------------------------------------------------------------------------------- /src/sphinxcontrib/towncrier/py.typed: -------------------------------------------------------------------------------- 1 | `sphinxcontrib.towncrier` is a Sphinx extension. It does not expose own public 2 | API. However, this PEP 561 marker file exists to let the type checkers know 3 | that this project has type annotations declared. In particular, this is useful 4 | for type-checking our own tests since they make corresponding imports. 5 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # ATTENTION: This file only exists to make relative helper imports work. 2 | # ATTENTION: Do not put anything inside! 3 | """Test suite for `sphinxcontrib.towncrier`.""" 4 | -------------------------------------------------------------------------------- /tests/_data_transformers_test.py: -------------------------------------------------------------------------------- 1 | """Data transformation tests.""" 2 | import pytest 3 | 4 | from sphinxcontrib.towncrier._data_transformers import ( 5 | escape_project_version_rst_substitution, 6 | ) 7 | 8 | 9 | @pytest.mark.parametrize( 10 | ('test_input', 'escaped_input'), 11 | ( 12 | (r'\ |release|', r'\ |release|'), 13 | ('|release|', r'\ |release|'), 14 | ('|release', '|release'), 15 | ('v|release|', 'v|release|'), 16 | ), 17 | ids=( 18 | 'substitution already escaped', 19 | 'correct substitution at the beginning', 20 | 'unclosed substitution at the beginning', 21 | 'correct substitution in the middle', 22 | ), 23 | ) 24 | def test_escape_version(test_input: str, escaped_input: str) -> None: 25 | """Test that the version is escaped before RST substitutions. 26 | 27 | RST substitution as the first item should be escaped. Otherwise, 28 | the input is expected to remain unchanged. 29 | """ 30 | assert escape_project_version_rst_substitution(test_input) == escaped_input 31 | -------------------------------------------------------------------------------- /tests/_fragment_discovery_test.py: -------------------------------------------------------------------------------- 1 | """Unit tests of the fragment discovery logic.""" 2 | 3 | 4 | from pathlib import Path 5 | from typing import Set, Union 6 | 7 | import pytest 8 | 9 | from sphinxcontrib.towncrier._fragment_discovery import ( 10 | _find_config_file, _resolve_spec_config, lookup_towncrier_fragments, 11 | ) 12 | 13 | 14 | PYPROJECT_TOML_FILENAME = 'pyproject.toml' 15 | TOWNCRIER_TOML_FILENAME = 'towncrier.toml' 16 | 17 | UTF8_ENCODING = 'utf-8' 18 | 19 | 20 | @pytest.mark.parametrize( 21 | ('config_file_names_on_disk', 'expected_config_file_name'), 22 | ( 23 | pytest.param( 24 | set(), 25 | PYPROJECT_TOML_FILENAME, 26 | id='pyproject.toml-when-no-configs', 27 | ), 28 | pytest.param( 29 | {PYPROJECT_TOML_FILENAME}, 30 | 'pyproject.toml', 31 | id='pyproject.toml-only', 32 | ), 33 | pytest.param( 34 | {TOWNCRIER_TOML_FILENAME}, 35 | TOWNCRIER_TOML_FILENAME, 36 | id='towncrier.toml-only', 37 | ), 38 | pytest.param( 39 | {PYPROJECT_TOML_FILENAME, TOWNCRIER_TOML_FILENAME}, 40 | TOWNCRIER_TOML_FILENAME, 41 | id='towncrier.toml-over-pyproject.toml', 42 | ), 43 | ), 44 | ) 45 | def test_find_config_file( 46 | config_file_names_on_disk: Set[str], 47 | expected_config_file_name: str, 48 | tmp_path: Path, 49 | ) -> None: 50 | """Verify that the correct Towncrier config is always preferred.""" 51 | for config_file_name_on_disk in config_file_names_on_disk: 52 | tmp_path.joinpath(config_file_name_on_disk).write_text( 53 | '', encoding=UTF8_ENCODING, 54 | ) 55 | 56 | assert _find_config_file(tmp_path).name == expected_config_file_name 57 | 58 | 59 | @pytest.mark.parametrize( 60 | ('base_path', 'config_path', 'resolved_path'), 61 | ( 62 | ( 63 | Path('sentinel-path'), 64 | 'towncrier.toml', 65 | Path('sentinel-path/towncrier.toml'), 66 | ), 67 | (Path('sentinel-path'), None, None), 68 | ), 69 | ids=('explicit-config-path', 'unset-config-path'), 70 | ) 71 | def test_resolve_spec_config( 72 | base_path: Path, 73 | config_path: Union[str, None], 74 | resolved_path: Union[Path, None], 75 | ) -> None: 76 | """Verify that config path is resolved properly.""" 77 | assert _resolve_spec_config(base_path, config_path) == resolved_path 78 | 79 | 80 | @pytest.mark.parametrize( 81 | 'chdir', 82 | (True, False), 83 | ids=('cwd', 'detached-work-dir'), 84 | ) 85 | @pytest.mark.parametrize( 86 | ('sphinx_configured_path', 'config_file_path'), 87 | ( 88 | (None, TOWNCRIER_TOML_FILENAME), 89 | (None, PYPROJECT_TOML_FILENAME), 90 | (TOWNCRIER_TOML_FILENAME, TOWNCRIER_TOML_FILENAME), 91 | (PYPROJECT_TOML_FILENAME, PYPROJECT_TOML_FILENAME), 92 | ), 93 | ids=( 94 | 'implicit-config-path-pyproject', 95 | 'explicit-config-path-pyproject', 96 | 'implicit-config-path-towncrier', 97 | 'explicit-config-path-towncrier', 98 | ), 99 | ) 100 | def test_lookup_towncrier_fragments( 101 | chdir: bool, 102 | config_file_path: str, 103 | monkeypatch: pytest.MonkeyPatch, 104 | sphinx_configured_path: Union[str, None], 105 | tmp_path: Path, 106 | ) -> None: 107 | """Test that fragments can be discovered in configured location.""" 108 | change_notes_dir_base_name = 'newsfragments-sentinel' 109 | tmp_working_dir_path = tmp_path / 'working-directory' 110 | change_notes_dir_path = tmp_working_dir_path / change_notes_dir_base_name 111 | change_note_sentinel_path = change_notes_dir_path / '0.misc.1.rst' 112 | 113 | change_notes_dir_path.mkdir(parents=True) 114 | change_note_sentinel_path.write_text('sentinel', encoding=UTF8_ENCODING) 115 | 116 | (tmp_working_dir_path / config_file_path).write_text( 117 | f'[tool.towncrier]\ndirectory={change_notes_dir_base_name !r}', 118 | encoding=UTF8_ENCODING, 119 | ) 120 | 121 | if chdir: 122 | monkeypatch.chdir(tmp_working_dir_path) 123 | discovered_fragment_paths = lookup_towncrier_fragments.__wrapped__( 124 | None if chdir else tmp_working_dir_path, 125 | sphinx_configured_path, 126 | ) 127 | assert discovered_fragment_paths == {change_note_sentinel_path} 128 | 129 | 130 | def test_lookup_towncrier_fragments_missing_cfg(tmp_path: Path) -> None: 131 | """Test that missing config file causes zero fragment set.""" 132 | discovered_fragment_paths = lookup_towncrier_fragments.__wrapped__( 133 | tmp_path, 134 | 'blah.toml', 135 | ) 136 | assert discovered_fragment_paths == set() 137 | 138 | 139 | def test_lookup_towncrier_fragments_missing_dir(tmp_path: Path) -> None: 140 | """Test that missing fragments folder causes zero fragment set.""" 141 | (tmp_path / TOWNCRIER_TOML_FILENAME).write_text( 142 | '[tool.towncrier]\ndirectory="non/existing/dir"', 143 | encoding=UTF8_ENCODING, 144 | ) 145 | discovered_fragment_paths = lookup_towncrier_fragments.__wrapped__( 146 | tmp_path, 147 | TOWNCRIER_TOML_FILENAME, 148 | ) 149 | assert discovered_fragment_paths == set() 150 | 151 | 152 | def test_lookup_towncrier_fragments_unset_dir(tmp_path: Path) -> None: 153 | """Test that implicit fragments folder uses defaults.""" 154 | (tmp_path / TOWNCRIER_TOML_FILENAME).write_text( 155 | '[tool.towncrier]', 156 | encoding=UTF8_ENCODING, 157 | ) 158 | discovered_fragment_paths = lookup_towncrier_fragments.__wrapped__( 159 | tmp_path, 160 | TOWNCRIER_TOML_FILENAME, 161 | ) 162 | assert discovered_fragment_paths == set() 163 | -------------------------------------------------------------------------------- /tests/_towncrier_test.py: -------------------------------------------------------------------------------- 1 | """Towncrier config reader tests.""" 2 | 3 | 4 | from pathlib import Path 5 | from typing import Union 6 | 7 | import pytest 8 | 9 | from sphinxcontrib.towncrier._towncrier import get_towncrier_config 10 | 11 | 12 | def test_towncrier_config_section_missing( 13 | monkeypatch: pytest.MonkeyPatch, 14 | tmp_path: Path, 15 | ) -> None: 16 | """Test config file without Towncrier section raises an error.""" 17 | tmp_working_dir_path = tmp_path / 'working-directory' 18 | tmp_working_dir_path.mkdir() 19 | empty_config_file = Path('arbitrary-config.toml') 20 | (tmp_working_dir_path / empty_config_file).touch() 21 | monkeypatch.chdir(tmp_working_dir_path) 22 | 23 | expected_error_msg = ( 24 | '^Towncrier was unable to load the configuration from file ' 25 | fr'`{empty_config_file !s}`: No \[tool\.towncrier\] section\.$' 26 | ) 27 | 28 | with pytest.raises(LookupError, match=expected_error_msg): 29 | get_towncrier_config(tmp_path, empty_config_file) 30 | 31 | 32 | @pytest.mark.parametrize( 33 | 'config_file_name', 34 | ( 35 | None, 36 | Path('pyproject.toml'), 37 | Path('towncrier.toml'), 38 | ), 39 | ) 40 | def test_towncrier_config_file_missing( 41 | config_file_name: Union[Path, None], 42 | monkeypatch: pytest.MonkeyPatch, 43 | tmp_path: Path, 44 | ) -> None: 45 | """Test missing Towncrier config file raises an error.""" 46 | tmp_working_dir_path = tmp_path / 'working-directory' 47 | tmp_working_dir_path.mkdir() 48 | monkeypatch.chdir(tmp_working_dir_path) 49 | 50 | expected_error_msg = ( 51 | '^Towncrier was unable to load the configuration from file ' 52 | f'`{config_file_name !s}`: ' 53 | fr"\[Errno 2\] No such file or directory: '{config_file_name !s}'$" 54 | ) 55 | 56 | with pytest.raises(LookupError, match=expected_error_msg): 57 | get_towncrier_config(tmp_path, config_file_name) 58 | -------------------------------------------------------------------------------- /tests/ext_test.py: -------------------------------------------------------------------------------- 1 | """The Sphinx extension interface module tests.""" 2 | 3 | import pytest 4 | 5 | from sphinx.config import Config as SphinxConfig 6 | 7 | from sphinxcontrib.towncrier.ext import _get_draft_version_fallback 8 | 9 | 10 | release_sentinel = object() 11 | version_sentinel = object() 12 | 13 | 14 | @pytest.fixture 15 | def sphinx_config() -> SphinxConfig: 16 | """Initialize a Sphinx config stub for testing.""" 17 | return SphinxConfig( 18 | overrides={'release': release_sentinel, 'version': version_sentinel}, 19 | ) 20 | 21 | 22 | @pytest.mark.parametrize( 23 | ('autoversion_mode', 'expected_version'), 24 | ( 25 | ('sphinx-release', release_sentinel), 26 | ('sphinx-version', version_sentinel), 27 | ('draft', '[UNRELEASED DRAFT]'), 28 | ), 29 | ) 30 | def test__get_draft_version_fallback_known_strategy( # noqa: WPS116, WPS118 31 | autoversion_mode: str, 32 | expected_version: object, 33 | sphinx_config: SphinxConfig, 34 | ) -> None: 35 | """Check that valid strategies source correct values.""" 36 | computed_version = _get_draft_version_fallback.__wrapped__( 37 | autoversion_mode, 38 | sphinx_config, 39 | ) 40 | assert computed_version == expected_version 41 | 42 | 43 | @pytest.mark.parametrize('autoversion_mode', ('blah', '', 'v1.0')) 44 | def test__get_draft_version_fallback_invalid_strategy( # noqa: WPS116, WPS118 45 | autoversion_mode: str, 46 | sphinx_config: SphinxConfig, 47 | ) -> None: 48 | """Ensure invalid strategy yields an exception.""" 49 | expected_error_msg = ( 50 | '^Expected "strategy" to be one of ' 51 | r"{'[\w,\s'-]+'} " 52 | f'but got {autoversion_mode !r}$' 53 | ) 54 | with pytest.raises(ValueError, match=expected_error_msg): 55 | _get_draft_version_fallback.__wrapped__( 56 | autoversion_mode, 57 | sphinx_config, 58 | ) 59 | -------------------------------------------------------------------------------- /tests/units_test.py: -------------------------------------------------------------------------------- 1 | """Unit tests of the extension bits.""" 2 | 3 | import json 4 | import shlex 5 | import sys 6 | import typing 7 | 8 | import pytest 9 | 10 | from sphinxcontrib.towncrier.ext import _get_changelog_draft_entries 11 | 12 | 13 | NO_OUTPUT_MARKER = r'\[No output\]' 14 | PYTHON_INLINE_SNIPPET_CMD_FLAG = '-c' 15 | 16 | 17 | _get_changelog_draft_entries_unwrapped = ( 18 | _get_changelog_draft_entries. 19 | __wrapped__ # So that the non-cached function version is tested 20 | ) 21 | 22 | 23 | @pytest.mark.parametrize( 24 | ('failing_cmd', 'stdout_msg', 'stderr_msg'), 25 | ( 26 | ( 27 | ( 28 | 'false', 29 | ), 30 | NO_OUTPUT_MARKER, 31 | NO_OUTPUT_MARKER, 32 | ), 33 | ( 34 | ( 35 | sys.executable, 36 | PYTHON_INLINE_SNIPPET_CMD_FLAG, 37 | r'print("test standard output\nsecond line");' 38 | 'raise SystemExit(1)', 39 | ), 40 | r'test standard output\nsecond line\n', 41 | NO_OUTPUT_MARKER, 42 | ), 43 | ( 44 | ( 45 | sys.executable, 46 | PYTHON_INLINE_SNIPPET_CMD_FLAG, 47 | r'raise SystemExit("test standard error\nsecond line")', 48 | ), 49 | NO_OUTPUT_MARKER, 50 | r'test standard error\nsecond line', 51 | ), 52 | ( 53 | ( 54 | sys.executable, 55 | PYTHON_INLINE_SNIPPET_CMD_FLAG, 56 | r'print("test standard output\nsecond line out");' 57 | r'raise SystemExit("test standard error\nsecond line err")', 58 | ), 59 | r'test standard output\nsecond line out\n', 60 | r'test standard error\nsecond line err', 61 | ), 62 | ), 63 | ids=( 64 | 'no stdout, no stderr', 65 | 'stdout, no stderr', 66 | 'no stdout, stderr', 67 | 'stdout, stderr', 68 | ), 69 | ) 70 | def test_towncrier_draft_generation_failure_msg( 71 | failing_cmd: typing.Tuple[str], stdout_msg: str, stderr_msg: str, 72 | monkeypatch: pytest.MonkeyPatch, 73 | ) -> None: 74 | """Test that a failing command produces a :class:`RuntimeError`.""" 75 | version_string = 'test version' 76 | 77 | monkeypatch.setattr( 78 | 'sphinxcontrib.towncrier.ext.TOWNCRIER_DRAFT_CMD', 79 | failing_cmd, # So that the invoked command would return a failure 80 | ) 81 | 82 | escaped_failing_cmd = ( # This is necessary because it's used in a regexp 83 | shlex.join(failing_cmd). 84 | replace('\\', r'\\'). 85 | replace('(', r'\('). 86 | replace(')', r'\)') 87 | ) 88 | expected_return_code = 1 89 | expected_error_message = ( 90 | '^' # noqa: WPS221 91 | 'Command exited unexpectedly.\n\n' 92 | rf"Command: {escaped_failing_cmd} --version '{version_string}'\n" 93 | f'Return code: {expected_return_code}\n\n' 94 | 'Standard output:\n' 95 | f'{stdout_msg}\n\n' 96 | 'Standard error:\n' 97 | f'{stderr_msg}' 98 | '$' 99 | ) 100 | 101 | with pytest.raises(RuntimeError, match=expected_error_message): 102 | _get_changelog_draft_entries_unwrapped(version_string) 103 | 104 | 105 | def test_towncrier_draft_generation_with_config( 106 | monkeypatch: pytest.MonkeyPatch, 107 | ) -> None: 108 | """Test explicit config gets passed into Towncrier.""" 109 | monkeypatch.setattr( 110 | 'sphinxcontrib.towncrier.ext.TOWNCRIER_DRAFT_CMD', 111 | ( 112 | sys.executable, 113 | '-I', 114 | PYTHON_INLINE_SNIPPET_CMD_FLAG, 115 | 'import json, sys; print(json.dumps(sys.argv))', 116 | ), 117 | ) 118 | 119 | argv_json = _get_changelog_draft_entries_unwrapped( 120 | 'test version', 121 | config_path='sentinel-config-path', 122 | ) 123 | computed_proc_args = json.loads(argv_json) 124 | computed_towncrier_args = computed_proc_args[3:] 125 | 126 | assert computed_towncrier_args == ['--config', 'sentinel-config-path'] 127 | 128 | 129 | def test_towncrier_draft_generation_with_empty_cl( 130 | monkeypatch: pytest.MonkeyPatch, 131 | ) -> None: 132 | """Test that empty change log triggers an exception.""" 133 | monkeypatch.setattr( 134 | 'sphinxcontrib.towncrier.ext.TOWNCRIER_DRAFT_CMD', 135 | ( 136 | sys.executable, 137 | '-I', 138 | PYTHON_INLINE_SNIPPET_CMD_FLAG, 139 | 'print("No significant changes")', 140 | ), 141 | ) 142 | 143 | expected_error_message = ( 144 | '^There are no unreleased changelog entries so far$' 145 | ) 146 | with pytest.raises(LookupError, match=expected_error_message): 147 | _get_changelog_draft_entries_unwrapped( 148 | 'test version', 149 | allow_empty=False, 150 | ) 151 | -------------------------------------------------------------------------------- /tests/version_test.py: -------------------------------------------------------------------------------- 1 | """Version tests.""" 2 | from sphinxcontrib.towncrier import __version__ 3 | 4 | 5 | def test_version() -> None: 6 | """Test that version has at least 3 parts.""" 7 | assert __version__.count('.') >= 2 8 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = python 3 | isolated_build = true 4 | minversion = 3.21.0 5 | 6 | [python-cli-options] 7 | byte-warnings = -b 8 | byte-errors = -bb 9 | max-isolation = -E -s -I 10 | some-isolation = -E -s 11 | warnings-to-errors = -Werror 12 | 13 | 14 | [testenv] 15 | description = Run pytest under {envpython} 16 | commands = 17 | {envpython} \ 18 | {[python-cli-options]byte-errors} \ 19 | {[python-cli-options]max-isolation} \ 20 | {[python-cli-options]warnings-to-errors} \ 21 | -W 'ignore:Coverage failure::pytest_cov.plugin' \ 22 | -m pytest \ 23 | {tty:--color=yes} \ 24 | {posargs:--cov-report=html:{envtmpdir}{/}htmlcov{/}} 25 | commands_post = 26 | -{envpython} \ 27 | {[python-cli-options]byte-errors} \ 28 | {[python-cli-options]max-isolation} \ 29 | {[python-cli-options]warnings-to-errors} \ 30 | -c \ 31 | 'import atexit, os, sys; \ 32 | os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ 33 | import coverage; \ 34 | gh_summary_fd = open(\ 35 | os.environ["GITHUB_STEP_SUMMARY"], encoding="utf-8", mode="a",\ 36 | ); \ 37 | atexit.register(gh_summary_fd.close); \ 38 | cov = coverage.Coverage(); \ 39 | cov.load(); \ 40 | cov.report(file=gh_summary_fd, output_format="markdown")' 41 | {envpython} \ 42 | {[python-cli-options]byte-errors} \ 43 | {[python-cli-options]max-isolation} \ 44 | {[python-cli-options]warnings-to-errors} \ 45 | -c \ 46 | 'import os, importlib.metadata, pathlib, sys; \ 47 | os.getenv("GITHUB_ACTIONS") == "true" or sys.exit(); \ 48 | cov_report_arg_prefix = "--cov-report=xml:"; \ 49 | test_report_arg_prefix = "--junitxml="; \ 50 | cov_reports = [\ 51 | arg[len(cov_report_arg_prefix):] for arg in sys.argv \ 52 | if arg.startswith(cov_report_arg_prefix)\ 53 | ]; \ 54 | test_reports = [\ 55 | arg[len(test_report_arg_prefix):] for arg in sys.argv \ 56 | if arg.startswith(test_report_arg_prefix)\ 57 | ]; \ 58 | cov_report_file = cov_reports[-1] if cov_reports else None; \ 59 | test_report_file = test_reports[-1] if test_reports else None; \ 60 | gh_output_fd = open(\ 61 | os.environ["GITHUB_OUTPUT"], encoding="utf-8", mode="a",\ 62 | ); \ 63 | cov_report_file and \ 64 | print(f"cov-report-files={cov_report_file !s}", file=gh_output_fd); \ 65 | test_report_file and \ 66 | print(f"test-result-files={test_report_file !s}", file=gh_output_fd); \ 67 | towncrier_version = importlib.metadata.version("towncrier"); \ 68 | print(f"codecov-flags=pytest,Towncrier-v{towncrier_version}", \ 69 | file=gh_output_fd); \ 70 | gh_output_fd.close()' \ 71 | {posargs} 72 | # Print out the output coverage dir and a way to serve html: 73 | {envpython} \ 74 | {[python-cli-options]byte-errors} \ 75 | {[python-cli-options]max-isolation} \ 76 | {[python-cli-options]warnings-to-errors} \ 77 | -c\ 78 | 'import pathlib, shlex, sys; \ 79 | cov_html_report_arg_prefix = "--cov-report=html:"; \ 80 | cov_html_reports = [\ 81 | arg[len(cov_html_report_arg_prefix):] for arg in sys.argv \ 82 | if arg.startswith(cov_html_report_arg_prefix)\ 83 | ]; \ 84 | cov_html_reports or sys.exit(); \ 85 | cov_html_report_dir = pathlib.Path(cov_html_reports[-1]); \ 86 | index_file = cov_html_report_dir / "index.html";\ 87 | index_file.exists() or sys.exit(); \ 88 | html_url = f"file://\{index_file\}";\ 89 | browse_cmd = shlex.join(("python3", "-Im", "webbrowser", html_url)); \ 90 | serve_cmd = shlex.join((\ 91 | "python3", "-Im", "http.server", \ 92 | "--directory", str(cov_html_report_dir), "0", \ 93 | )); \ 94 | print(f"\nTo open the HTML coverage report, run\n\n\ 95 | \t\{browse_cmd !s\}\n");\ 96 | print(f"To serve \ 97 | the HTML coverage report with a local web server, use\n\n\ 98 | \t\{serve_cmd !s\}\n")' \ 99 | {posargs:--cov-report=html:{envtmpdir}{/}htmlcov{/}} 100 | deps = 101 | covdefaults 102 | 103 | pytest 104 | pytest-cov >= 5 105 | pytest-xdist 106 | 107 | # Ref: https://github.com/tox-dev/tox/issues/1199 108 | towncrier 109 | isolated_build = true 110 | package = wheel 111 | pass_env = 112 | CI 113 | GITHUB_* 114 | SSH_AUTH_SOCK 115 | TERM 116 | wheel_build_env = .pkg 117 | usedevelop = false 118 | 119 | [testenv:build-dists] 120 | description = 121 | Build dists and put them into the dist{/} folder 122 | basepython = python3 123 | isolated_build = true 124 | # NOTE: `package_env = none` is needed so it's possible to use `--installpkg` 125 | # NOTE: with the main `testenv`. 126 | # Ref: https://github.com/tox-dev/tox/issues/2442 127 | package_env = ❌ DUMMY NON-EXISTENT ENV NAME ❌ 128 | # `usedevelop = true` overrides `skip_install` instruction, it's unwanted 129 | usedevelop = false 130 | skip_install = true 131 | deps = 132 | build ~= 1.0.0 133 | commands = 134 | {envpython} -c \ 135 | "import shutil; \ 136 | shutil.rmtree('{toxinidir}{/}dist{/}', ignore_errors=True)" 137 | 138 | {envpython} -m build \ 139 | --outdir '{toxinidir}{/}dist{/}' \ 140 | {posargs:} \ 141 | '{toxinidir}' 142 | commands_post = 143 | set_env = 144 | # The following warnings controls surface setuptools deprecation warnings: 145 | PYTHONWARNINGS = \ 146 | error,\ 147 | default:DEPRECATION:\ 148 | Warning:pip._internal.cli.base_command,\ 149 | ignore:git archive did not support describe output:\ 150 | UserWarning:setuptools_scm.git,\ 151 | ignore:unprocessed git archival found (no export subst applied):\ 152 | UserWarning:setuptools_scm.git 153 | 154 | 155 | [testenv:pre-commit] 156 | description = 157 | Run the quality checks under {basepython}; run as 158 | `SKIP=check-id1,check-id2 tox r -e pre-commit` to instruct the underlying 159 | `pre-commit` invocation avoid running said checks; Use 160 | `tox r -e pre-commit -- check-id1 --all-files` to select checks matching IDs 161 | aliases{:} `tox r -e pre-commit -- mypy --all-files` will run 3 MyPy 162 | invocations, but `tox r -e pre-commit -- mypy-py313 --all-files` runs one. 163 | basepython = python3 164 | commands = 165 | {envpython} \ 166 | -m pre_commit \ 167 | run \ 168 | --color=always \ 169 | --show-diff-on-failure \ 170 | --hook-stage manual \ 171 | {posargs:--all-files} 172 | 173 | # Print out the advice on how to install pre-commit from this env into Git: 174 | -{envpython} -c \ 175 | 'cmd = "{envpython} -m pre_commit install"; \ 176 | scr_width = len(cmd) + 10; \ 177 | sep = "=" * scr_width; \ 178 | cmd_str = " $ \{cmd\}";' \ 179 | 'print(f"\n\{sep\}\nTo install pre-commit hooks into the Git repo, run:\n\n\{cmd_str\}\n\n\{sep\}\n")' 180 | deps = 181 | pre-commit >= 2.6.0 182 | isolated_build = true 183 | passenv = 184 | SKIP 185 | 186 | 187 | [testenv:build-docs] 188 | allowlist_externals = 189 | git 190 | basepython = python3 191 | depends = 192 | make-changelog 193 | deps = 194 | # -r{toxinidir}{/}docs{/}requirements.txt 195 | # FIXME: re-enable the "-r" + "-c" paradigm once the pip bug is fixed. 196 | # Ref: https://github.com/pypa/pip/issues/9243 197 | -r{toxinidir}{/}docs{/}requirements.in 198 | # -c{toxinidir}{/}docs{/}requirements.txt 199 | description = Build The Docs 200 | commands_pre = 201 | # Retrieve possibly missing commits: 202 | -git fetch --unshallow 203 | -git fetch --tags 204 | commands = 205 | {envpython} \ 206 | -m sphinx \ 207 | -j auto \ 208 | -b html \ 209 | {tty:--color} \ 210 | -a \ 211 | -n \ 212 | -W --keep-going \ 213 | -d '{temp_dir}{/}.doctrees' \ 214 | . \ 215 | {posargs:{envtmpdir}{/}html} 216 | commands_post = 217 | # Print out the output docs dir and a way to serve html: 218 | -{envpython} -c\ 219 | 'import pathlib;\ 220 | docs_dir = pathlib.Path(r"{envtmpdir}") / "html";\ 221 | index_file = docs_dir / "index.html";\ 222 | print("\n" + "=" * 120 +\ 223 | f"\n\nDocumentation available under:\n\n\ 224 | \tfile://\{index_file\}\n\nTo serve docs, use\n\n\ 225 | \t$ python3 -m http.server --directory \ 226 | \N\{QUOTATION MARK\}\{docs_dir\}\N\{QUOTATION MARK\} 0\n\n" +\ 227 | "=" * 120)' 228 | changedir = {toxinidir}{/}docs 229 | isolated_build = true 230 | passenv = 231 | SSH_AUTH_SOCK 232 | skip_install = false 233 | # whitelist_externals is a compatibility alias for allowlist_externals @ old tox 234 | whitelist_externals = 235 | git 236 | -------------------------------------------------------------------------------- /toxfile.py: -------------------------------------------------------------------------------- 1 | """Project-local tox env customizations.""" 2 | 3 | from base64 import b64encode 4 | from functools import cached_property 5 | from hashlib import sha256 6 | from logging import getLogger 7 | from os import environ, getenv 8 | from pathlib import Path 9 | from sys import path as _sys_path 10 | from typing import cast 11 | 12 | from tox.config.types import Command 13 | from tox.execute.request import StdinSource 14 | from tox.plugin import impl 15 | from tox.tox_env.api import ToxEnv 16 | from tox.tox_env.python.api import Python 17 | from tox.tox_env.python.pip.pip_install import Pip as PipInstaller 18 | from tox.tox_env.python.virtual_env.package.cmd_builder import ( 19 | VirtualEnvCmdBuilder, 20 | ) 21 | from tox.tox_env.python.virtual_env.package.pyproject import ( 22 | Pep517VirtualEnvPackager, 23 | ) 24 | from tox.tox_env.python.virtual_env.runner import VirtualEnvRunner 25 | from tox.tox_env.register import ToxEnvRegister 26 | 27 | 28 | _sys_path[:0] = ['bin/'] # noqa: WPS362 29 | 30 | # pylint: disable-next=wrong-import-position 31 | from pip_constraint_helpers import ( # noqa: E402 32 | get_constraint_file_path, get_runtime_python_tag, 33 | ) 34 | 35 | 36 | IS_GITHUB_ACTIONS_RUNTIME = getenv('GITHUB_ACTIONS') == 'true' 37 | FILE_APPEND_MODE = 'a' 38 | UNICODE_ENCODING = 'utf-8' 39 | _PINNED_PREFIX = 'pinned-' 40 | 41 | 42 | logger = getLogger(__name__) 43 | 44 | 45 | @impl 46 | def tox_before_run_commands(tox_env: ToxEnv) -> None: 47 | """Inject SOURCE_DATE_EPOCH env var into build-dists.""" 48 | if tox_env.name == 'build-dists': 49 | logger.debug( 50 | 'toxfile:tox_before_run_commands> Setting the Git HEAD-based ' 51 | 'epoch for reproducibility in GHA...', 52 | ) 53 | git_executable = 'git' 54 | git_log_cmd = ( 55 | git_executable, 56 | '-c', 'core.pager=', # prevents ANSI escape sequences 57 | 'log', 58 | '-1', 59 | '--pretty=%ct', # noqa: WPS323 60 | ) 61 | tox_env.conf['allowlist_externals'].append(git_executable) 62 | git_log_outcome = tox_env.execute(git_log_cmd, StdinSource.OFF) 63 | tox_env.conf['allowlist_externals'].pop() 64 | if git_log_outcome.exit_code: 65 | logger.warning( 66 | 'Failed to look up Git HEAD timestamp. %s', # noqa: WPS323 67 | git_log_outcome, 68 | ) 69 | return 70 | 71 | git_head_timestamp = git_log_outcome.out.strip() 72 | 73 | logger.info( 74 | 'Setting `SOURCE_DATE_EPOCH=%s` environment ' # noqa: WPS323 75 | 'variable to facilitate with build reproducibility', 76 | git_head_timestamp, 77 | ) 78 | tox_env.environment_variables['SOURCE_DATE_EPOCH'] = git_head_timestamp 79 | 80 | 81 | def _compute_sha256sum(file_path: Path) -> str: 82 | return sha256(file_path.read_bytes()).hexdigest() 83 | 84 | 85 | def _produce_sha256sum_line(file_path: Path) -> str: 86 | sha256_str = _compute_sha256sum(file_path) 87 | return f'{sha256_str !s} {file_path.name !s}' 88 | 89 | 90 | @impl 91 | def tox_after_run_commands(tox_env: ToxEnv) -> None: 92 | """Compute combined dists hash post build-dists under GHA.""" 93 | if tox_env.name == 'build-dists' and IS_GITHUB_ACTIONS_RUNTIME: 94 | logger.debug( 95 | 'toxfile:tox_after_run_commands> Computing and storing the base64 ' 96 | 'representation of the combined dists SHA-256 hash in GHA...', 97 | ) 98 | dists_dir_path = Path(__file__).parent / 'dist' 99 | emulated_sha256sum_output = '\n'.join( 100 | _produce_sha256sum_line(artifact_path) 101 | for artifact_path in dists_dir_path.glob('*') 102 | ) 103 | emulated_base64_w0_output = b64encode( 104 | emulated_sha256sum_output.encode(), 105 | ).decode() 106 | 107 | with Path(environ['GITHUB_OUTPUT']).open( 108 | encoding=UNICODE_ENCODING, 109 | mode=FILE_APPEND_MODE, 110 | ) as outputs_file: 111 | print( # noqa: WPS421 112 | 'combined-dists-base64-encoded-sha256-hash=' 113 | f'{emulated_base64_w0_output !s}', 114 | file=outputs_file, 115 | ) 116 | 117 | 118 | class PinnedPipInstaller(PipInstaller): 119 | """A constraint-aware pip installer.""" 120 | 121 | _non_existing_constraint_files: set[Path] = set() 122 | 123 | def post_process_install_command(self, cmd: Command) -> Command: 124 | """Inject an env-specific constraint into pip install.""" 125 | constraint_file_path = get_constraint_file_path( 126 | req_dir='dependencies/lock-files/', 127 | toxenv=self._env.name, 128 | python_tag=get_runtime_python_tag(), 129 | ) 130 | constraint_cli_arg = f'--constraint={constraint_file_path !s}' 131 | if constraint_cli_arg in cmd.args: 132 | logger.debug( 133 | 'tox-lock:%s> `%s` CLI option is already a ' # noqa: WPS323 134 | 'part of the install command. Skipping...', 135 | self._env.name, 136 | constraint_cli_arg, 137 | ) 138 | elif constraint_file_path.is_file(): 139 | logger.info( 140 | 'tox-lock:%s> Applying the pinned constraints ' # noqa: WPS323 141 | 'file `%s` to the current env...', # noqa: WPS323 142 | self._env.name, 143 | constraint_file_path, 144 | ) 145 | logger.debug( 146 | 'tox-lock:%s> Injecting `%s` into the install ' # noqa: WPS323 147 | 'command...', 148 | self._env.name, 149 | constraint_cli_arg, 150 | ) 151 | cmd.args.append(constraint_cli_arg) 152 | else: 153 | if constraint_file_path not in self._non_existing_constraint_files: 154 | logger.warning( 155 | 'tox-lock:%s> The expected pinned ' # noqa: WPS323 156 | 'constraints file for the current env does not exist ' 157 | '(should be `%s`). Skipping...', # noqa: WPS323 158 | self._env.name, 159 | constraint_file_path, 160 | ) 161 | self._non_existing_constraint_files.add(constraint_file_path) 162 | 163 | return super().post_process_install_command(cmd) 164 | 165 | 166 | # pylint: disable-next=too-few-public-methods 167 | class PinnedPipInstallerSelectedMixin: 168 | """A base class with pinned pip installer.""" 169 | 170 | @cached_property 171 | def installer(self) -> PinnedPipInstaller: 172 | """Return a constraint-aware pip installer.""" 173 | return PinnedPipInstaller(cast(Python, self)) 174 | 175 | 176 | # pylint: disable-next=too-many-ancestors 177 | class PinnedPep517VirtualEnvPackager( 178 | PinnedPipInstallerSelectedMixin, Pep517VirtualEnvPackager, 179 | ): 180 | """A pinned package env.""" 181 | 182 | @staticmethod 183 | def id() -> str: # noqa: WPS602, WPS605 184 | """Render a pinned virtualenv packager identifier.""" 185 | return f'{_PINNED_PREFIX}{Pep517VirtualEnvPackager.id()}' 186 | 187 | 188 | # pylint: disable-next=too-many-ancestors 189 | class PinnedVirtualEnvCmdBuilder( 190 | PinnedPipInstallerSelectedMixin, 191 | VirtualEnvCmdBuilder, 192 | ): 193 | """A pinned run env.""" 194 | 195 | @staticmethod 196 | def id() -> str: # noqa: WPS602, WPS605 197 | """Render a pinned virtualenv command builder identifier.""" 198 | return f'{_PINNED_PREFIX}{VirtualEnvCmdBuilder.id()}' 199 | 200 | 201 | # pylint: disable-next=too-many-ancestors 202 | class PinnedVirtualEnvRunner( 203 | PinnedPipInstallerSelectedMixin, 204 | VirtualEnvRunner, 205 | ): 206 | """A pinned virtualenv.""" 207 | 208 | @staticmethod 209 | def id() -> str: # noqa: WPS602, WPS605 210 | """Render a pinned virtualenv runner identifier.""" 211 | return f'{_PINNED_PREFIX}{VirtualEnvRunner.id()}' 212 | 213 | @property 214 | def _package_tox_env_type(self) -> str: 215 | return f'{_PINNED_PREFIX}{super()._package_tox_env_type}' 216 | 217 | @property 218 | def _external_pkg_tox_env_type(self) -> str: 219 | return f'{_PINNED_PREFIX}{super()._external_pkg_tox_env_type}' 220 | 221 | 222 | @impl 223 | def tox_register_tox_env(register: ToxEnvRegister) -> None: 224 | """Register locked virtualenv wrappers.""" 225 | run_env_id = PinnedVirtualEnvRunner.id() 226 | 227 | logger.debug( 228 | 'tox-lock:tox_register_tox_env> Registering the ' # noqa: WPS323 229 | 'following run environment: %s', 230 | run_env_id, 231 | ) 232 | register.add_run_env(PinnedVirtualEnvRunner) 233 | 234 | logger.debug( 235 | 'tox-lock:tox_register_tox_env> Registering the ' # noqa: WPS323 236 | 'following package environment: %s', 237 | PinnedPep517VirtualEnvPackager.id(), 238 | ) 239 | register.add_package_env(PinnedPep517VirtualEnvPackager) 240 | 241 | logger.debug( 242 | 'tox-lock:tox_register_tox_env> Registering the ' # noqa: WPS323 243 | 'following package environment: %s', 244 | PinnedVirtualEnvCmdBuilder.id(), 245 | ) 246 | register.add_package_env(PinnedVirtualEnvCmdBuilder) 247 | 248 | logger.debug( 249 | 'tox-lock:tox_register_tox_env> Setting the default ' # noqa: WPS323 250 | 'run environment to `%s`', 251 | run_env_id, 252 | ) 253 | # pylint: disable-next=protected-access 254 | register._default_run_env = run_env_id # noqa: SLF001, WPS437 255 | 256 | 257 | def tox_append_version_info() -> str: 258 | """Produce text to be rendered in ``tox --version``.""" 259 | return '[toxfile]' # Broken: https://github.com/tox-dev/tox/issues/3508 260 | --------------------------------------------------------------------------------