├── src └── package │ ├── py.typed │ ├── __init__.py │ ├── __main__.py │ └── something.py ├── .github ├── codeql │ └── codeql-config.yaml ├── workflows │ ├── dependabot-automerge.yaml │ ├── pr-change-set.yaml │ ├── _release-notifications.yaml │ ├── _generate-rebase.yaml │ ├── pr-conventional-commits.yaml │ ├── scorecards-analysis.yaml │ ├── codeql-analysis.yaml │ ├── _wiki-documentation.yaml │ ├── sync-with-upstream.yaml │ ├── _build.yaml │ └── release.yaml └── dependabot.yaml ├── CODEOWNERS ├── .vscode ├── extensions.json ├── launch.json └── settings.json ├── tests ├── performance │ └── test_something.py ├── unit │ ├── conftest.py │ └── test_something.py └── integration │ └── test_something.py ├── .gitattributes ├── docs ├── Makefile └── source │ ├── index.rst │ └── conf.py ├── SECURITY.md ├── LICENSE.md ├── .flake8 ├── .gitignore ├── .pre-commit-config.yaml ├── pyproject.toml ├── Makefile ├── CHANGELOG.md └── README.md /src/package/py.typed: -------------------------------------------------------------------------------- 1 | # PEP-561 marker. https://mypy.readthedocs.io/en/latest/installed_packages.html 2 | -------------------------------------------------------------------------------- /.github/codeql/codeql-config.yaml: -------------------------------------------------------------------------------- 1 | name: CodeQL configuration 2 | paths: 3 | - src/package 4 | - .github/workflows 5 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owners are tagged automatically for pull requests. 2 | # https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners 3 | 4 | * @jenstroeger @behnazh 5 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=827846 3 | // for the documentation about the extensions.json format 4 | "recommendations": [ 5 | "bodil.prettier-toml", 6 | "tamasfe.even-better-toml", 7 | "ms-python.python" 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /src/package/__init__.py: -------------------------------------------------------------------------------- 1 | """An example package. 2 | 3 | This package is an empty cookiecutter package example. Enjoy! 4 | """ 5 | 6 | # The version of this package. There's no comprehensive, official list of other 7 | # magic constants, so we stick with this one only for now. See also this conversation: 8 | # https://stackoverflow.com/questions/38344848/is-there-a-comprehensive-table-of-pythons-magic-constants 9 | __version__ = "2.18.0" 10 | -------------------------------------------------------------------------------- /tests/performance/test_something.py: -------------------------------------------------------------------------------- 1 | """Test the performance of various package parts, or the package as a whole.""" 2 | 3 | import pytest 4 | from pytest_benchmark.fixture import BenchmarkFixture 5 | 6 | from package.something import Something 7 | 8 | 9 | @pytest.mark.performance 10 | def test_something(benchmark: BenchmarkFixture) -> None: 11 | """Test performance of the function.""" 12 | benchmark.pedantic(Something.do_something, iterations=10, rounds=100) # type: ignore[no-untyped-call] 13 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Set default behavior to automatically normalize line endings. 2 | * text=auto 3 | 4 | # Force batch scripts to always use CRLF line endings so that if a repo is accessed 5 | # in Windows via a file share from Linux, the scripts will work. 6 | *.{cmd,[cC][mM][dD]} text eol=crlf 7 | *.{bat,[bB][aA][tT]} text eol=crlf 8 | *.{ps1,[pP][sS]1} text eol=crlf 9 | 10 | # Force bash scripts to always use LF line endings so that if a repo is accessed 11 | # in Unix via a file share from Windows, the scripts will work. 12 | *.sh text eol=lf 13 | -------------------------------------------------------------------------------- /src/package/__main__.py: -------------------------------------------------------------------------------- 1 | """The main entry point into this package when run as a script.""" 2 | 3 | # For more details, see also 4 | # https://docs.python.org/3/library/runpy.html 5 | # https://docs.python.org/3/reference/import.html#special-considerations-for-main 6 | 7 | import sys 8 | 9 | from .something import Something 10 | 11 | 12 | def main() -> None: 13 | """Execute the Something standalone command-line tool.""" 14 | _ = Something.do_something() 15 | 16 | 17 | if __name__ == "__main__": 18 | main() 19 | sys.exit() # No exit() argument means successful termination. 20 | -------------------------------------------------------------------------------- /src/package/something.py: -------------------------------------------------------------------------------- 1 | """The Something module provides some things and functions.""" 2 | 3 | 4 | class Something: 5 | """The Something class provides some things.""" 6 | 7 | @staticmethod 8 | def do_something(value: bool = False) -> bool: 9 | """Return true, always. 10 | 11 | Test this function in your local terminal, too, for example: 12 | 13 | .. code: pycon 14 | 15 | >>> s = Something() 16 | >>> s.do_something(False) 17 | True 18 | >>> s.do_something(value=True) 19 | True 20 | 21 | """ 22 | return value or True 23 | -------------------------------------------------------------------------------- /tests/unit/conftest.py: -------------------------------------------------------------------------------- 1 | """Test configuration and other goodness.""" 2 | 3 | import os 4 | 5 | import hypothesis 6 | 7 | # Configure Hypothesis. For Github CI we derandomize to prevent nondeterministic tests 8 | # because we don't want publishing to fail randomly. However, targeted fuzzing should 9 | # use its own profile and randomize. 10 | hypothesis.settings.register_profile("default", max_examples=500, derandomize=False) 11 | hypothesis.settings.register_profile("github", max_examples=100, derandomize=True) 12 | hypothesis.settings.register_profile("fuzz", max_examples=10000, derandomize=False) 13 | hypothesis.settings.load_profile(os.getenv("HYPOTHESIS_PROFILE", "default")) 14 | -------------------------------------------------------------------------------- /.github/workflows/dependabot-automerge.yaml: -------------------------------------------------------------------------------- 1 | # Automatically merge Dependabot PRs upon approval by leaving 2 | # a comment on Dependabot's pull-request. 3 | 4 | name: Automerge Dependabot PR 5 | on: 6 | pull_request_review: 7 | types: [submitted] 8 | 9 | permissions: 10 | pull-requests: write 11 | 12 | jobs: 13 | comment: 14 | if: ${{ github.event.review.state == 'approved' && github.event.pull_request.user.login == 'dependabot[bot]' }} 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Merge Dependabot PR 18 | run: gh pr comment --body "@dependabot squash and merge" "$PR_URL" 19 | env: 20 | PR_URL: ${{ github.event.pull_request.html_url }} 21 | GITHUB_TOKEN: ${{ secrets.DEPENDABOT_AUTOMERGE_TOKEN }} 22 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | 7 | # Treat the warnings as errors. 8 | SPHINXOPTS ?= -W 9 | SPHINXBUILD ?= sphinx-build 10 | SOURCEDIR = source 11 | BUILDDIR = _build 12 | 13 | # Put it first so that "make" without argument is like "make help". 14 | help: 15 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 16 | 17 | .PHONY: help Makefile 18 | 19 | # Catch-all target: route all unknown targets to Sphinx using the new 20 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 21 | %: Makefile 22 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 23 | -------------------------------------------------------------------------------- /tests/integration/test_something.py: -------------------------------------------------------------------------------- 1 | """Test the Package itself using its external interface as in integration into a larger run context.""" 2 | 3 | # https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b404-import-subprocess 4 | import subprocess # nosec B404 5 | 6 | import pytest 7 | 8 | 9 | @pytest.mark.integration 10 | def test_package() -> None: 11 | """Test the Something command.""" 12 | # For testing we disable this warning here: 13 | # https://bandit.readthedocs.io/en/latest/plugins/b603_subprocess_without_shell_equals_true.html 14 | # https://bandit.readthedocs.io/en/latest/plugins/b607_start_process_with_partial_path.html 15 | completed = subprocess.run(["something"], check=True, shell=False) # nosec B603, B607 16 | assert completed.returncode == 0 17 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Python: Module", 6 | "type": "python", 7 | "request": "launch", 8 | "justMyCode": true, 9 | "module": "package" 10 | }, 11 | { 12 | "name": "Python: Run Current File", 13 | "type": "python", 14 | "request": "launch", 15 | "program": "${file}", 16 | "console": "integratedTerminal" 17 | }, 18 | { 19 | "name": "Python: pytest", 20 | "type": "python", 21 | "request": "launch", 22 | "module": "pytest", 23 | "cwd": "${workspaceRoot}", 24 | "env": { 25 | "PYTHONPATH": "./src" 26 | }, 27 | "envFile": "${workspaceRoot}/.env", 28 | "console": "integratedTerminal" 29 | } 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /tests/unit/test_something.py: -------------------------------------------------------------------------------- 1 | """Test the Something module as a unit test. Add more tests here, as needed.""" 2 | 3 | import faker 4 | from hypothesis import given, strategies 5 | from pytest_cases import parametrize_with_cases 6 | 7 | from package.something import Something 8 | 9 | 10 | @given(strategies.booleans()) 11 | def test_something_hypothesis(boolean: bool) -> None: 12 | """Test something here using Hypothesis.""" 13 | assert Something.do_something(boolean) is True 14 | 15 | 16 | def _case_boolean() -> bool: 17 | fake = faker.Faker() 18 | return fake.pybool() 19 | 20 | 21 | @parametrize_with_cases("boolean", cases=_case_boolean) 22 | def test_something_cases(boolean: bool) -> None: 23 | """Test something here using Cases and Faker.""" 24 | assert Something.do_something(boolean) is True 25 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | This is a package template repository, and thus does not support different versions; instead, only the latest version of this repository is being maintained. Adjust this policy to your needs if you build on this repository. For more information, please refer to GitHub’s [Adding a security policy to your repository](https://docs.github.com/en/code-security/getting-started/adding-a-security-policy-to-your-repository). 4 | 5 | ## Supported Versions 6 | 7 | Only current major version: 8 | 9 | | Version | Supported | 10 | | ------- | ------------------ | 11 | | 2.x.x | :white_check_mark: | 12 | | 1.x.x | :x: | 13 | | 0.x.x | :x: | 14 | 15 | ## Reporting a Vulnerability 16 | 17 | Please create a [new issue](https://github.com/jenstroeger/python-package-template/issues/new) if you find a security vulnerability in this repository. 18 | -------------------------------------------------------------------------------- /.github/workflows/pr-change-set.yaml: -------------------------------------------------------------------------------- 1 | # This workflow checks and tests the package code, and it builds all package 2 | # artifacts whenever there were changes to a pull request. 3 | 4 | name: Check change set 5 | on: 6 | pull_request: 7 | branches: 8 | - '*' 9 | types: 10 | - opened 11 | - reopened 12 | - synchronize 13 | - ready_for_review 14 | permissions: 15 | contents: read 16 | 17 | # Cancel existing running workflows for a PR when a new change is pushed 18 | # to that PR. See also: https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#concurrency 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | build: 25 | uses: ./.github/workflows/_build.yaml 26 | permissions: 27 | contents: read 28 | with: 29 | disable-pip-audit: ${{ vars.DISABLE_PIP_AUDIT == 'true' }} 30 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. Package documentation master file, created by 2 | sphinx-quickstart on Thu Sep 2 09:41:50 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root ``toctree`` directive. 5 | 6 | Package package 7 | =============== 8 | 9 | .. toctree:: 10 | :maxdepth: 4 11 | :caption: Contents: 12 | 13 | .. autosummary:: 14 | :toctree: generated 15 | 16 | Something 17 | ========= 18 | 19 | The ``Something`` module contains a useful class which allows you to do something 20 | like the following: 21 | 22 | .. code: pycon 23 | 24 | >>> from package import something 25 | >>> s = something.Something() 26 | >>> s.do_something() 27 | True 28 | >>> s.do_something(False) # doctest: +SKIP 29 | False # This value would fail the test. 30 | 31 | .. automodule:: package 32 | :members: 33 | 34 | Indices and tables 35 | ================== 36 | 37 | * :ref:`genindex` 38 | * :ref:`modindex` 39 | * :ref:`search` 40 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 1999–2023 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 10 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "[python]": { 3 | "editor.codeActionsOnSave": { 4 | "source.organizeImports": true 5 | } 6 | }, 7 | "python.analysis.extraPaths": [ 8 | "./src/package" 9 | ], 10 | // Linux 11 | "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python", 12 | // Windows 13 | // "python.defaultInterpreterPath": "${workspaceFolder}/.venv/Scripts/python.exe", 14 | "python.envFile": "${workspaceFolder}/.env", 15 | "python.linting.enabled": true, 16 | "python.linting.banditEnabled": false, 17 | "python.linting.banditArgs": [ 18 | "--configfile=pyproject.toml" 19 | ], 20 | "python.linting.flake8Args": [ 21 | "--config=.flake8" 22 | ], 23 | "python.linting.flake8Enabled": false, 24 | "python.linting.mypyArgs": [ 25 | "--config-file=pyproject.toml" 26 | ], 27 | "python.linting.mypyEnabled": true, 28 | "python.linting.pylintArgs": [ 29 | "--rcfile=pyproject.toml" 30 | ], 31 | "python.linting.pylintEnabled": true, 32 | "python.terminal.activateEnvironment": true, 33 | "python.testing.pytestEnabled": true 34 | } 35 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | # This configuration file enables Dependabot version updates. 2 | # https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/about-dependabot-version-updates 3 | # https://github.com/dependabot/feedback/issues/551 4 | 5 | version: 2 6 | updates: 7 | - package-ecosystem: pip 8 | directory: / 9 | schedule: 10 | interval: weekly 11 | commit-message: 12 | prefix: chore 13 | prefix-development: chore 14 | include: scope 15 | open-pull-requests-limit: 13 16 | target-branch: main 17 | # Add additional reviewers for PRs opened by Dependabot. For more information, see: 18 | # https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#reviewers 19 | # reviewers: 20 | # - 21 | 22 | - package-ecosystem: github-actions 23 | directory: / 24 | schedule: 25 | interval: weekly 26 | commit-message: 27 | prefix: chore 28 | prefix-development: chore 29 | include: scope 30 | open-pull-requests-limit: 13 31 | target-branch: main 32 | # Add additional reviewers for PRs opened by Dependabot. For more information, see: 33 | # https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#reviewers 34 | # reviewers: 35 | # - 36 | -------------------------------------------------------------------------------- /.github/workflows/_release-notifications.yaml: -------------------------------------------------------------------------------- 1 | # Send a Slack release notification. Instructions to set up Slack to receive 2 | # messages can be found here: https://github.com/slackapi/slack-github-action#setup-2 3 | 4 | name: Release Notifications 5 | on: 6 | workflow_call: 7 | inputs: 8 | repo-name: 9 | type: string 10 | required: true 11 | description: Repository name used for the notification 12 | release-tag: 13 | type: string 14 | required: true 15 | description: Tag name of the release for the notification 16 | release-url: 17 | type: string 18 | required: true 19 | description: URL to the release page for the notification 20 | secrets: 21 | SLACK_WEBHOOK_URL: 22 | required: true 23 | 24 | # Grant no permissions to this workflow. 25 | permissions: {} 26 | 27 | jobs: 28 | slack: 29 | name: Slack release notification 30 | runs-on: ubuntu-latest 31 | steps: 32 | 33 | - name: Notify via Slack 34 | run: | 35 | curl --header "Content-Type: application/json; charset=UTF-8" --request POST --data "$SLACK_WEBHOOK_MSG" "$SLACK_WEBHOOK_URL" 36 | env: 37 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} 38 | SLACK_WEBHOOK_MSG: | 39 | { 40 | "text": "${{ inputs.repo-name }} published a new release ${{ inputs.release-tag }}", 41 | "blocks": [ 42 | { 43 | "type": "section", 44 | "text": { 45 | "type": "mrkdwn", 46 | "text": "*${{ inputs.repo-name }}* published a new release <${{ inputs.release-url }}|${{ inputs.release-tag }}>" 47 | } 48 | } 49 | ] 50 | } 51 | -------------------------------------------------------------------------------- /.github/workflows/_generate-rebase.yaml: -------------------------------------------------------------------------------- 1 | # Automatically rebase one branch on top of another; usually main on top 2 | # of release after a new package version was published. 3 | 4 | name: Rebase branch 5 | on: 6 | workflow_call: 7 | inputs: 8 | to-head: 9 | type: string 10 | required: true 11 | description: Branch that is being rebased 12 | from-base: 13 | type: string 14 | required: true 15 | description: Base branch 16 | git-user-name: 17 | type: string 18 | required: true 19 | description: Name of the git user who rebases and pushes the to_head branch 20 | git-user-email: 21 | type: string 22 | required: true 23 | description: Email address of said git user 24 | secrets: 25 | REPO_ACCESS_TOKEN: 26 | required: true 27 | 28 | permissions: 29 | contents: read 30 | 31 | jobs: 32 | rebase: 33 | runs-on: ubuntu-latest 34 | steps: 35 | 36 | - name: Harden Runner 37 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 38 | with: 39 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 40 | 41 | - name: Check out repository 42 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 43 | with: 44 | fetch-depth: 0 45 | token: ${{ secrets.REPO_ACCESS_TOKEN }} 46 | ref: ${{ github.ref_name }} 47 | 48 | - name: Do rebase 49 | run: | 50 | git config --global user.name "$USER_NAME" 51 | git config --global user.email "$USER_EMAIL" 52 | git checkout "$TO_HEAD" 53 | git rebase "$FROM_BASE" 54 | git push --force-with-lease 55 | env: 56 | USER_NAME: ${{ inputs.git-user-name }} 57 | USER_EMAIL: ${{ inputs.git-user-email }} 58 | TO_HEAD: ${{ inputs.to-head }} 59 | FROM_BASE: ${{ inputs.from-base }} 60 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | # Unfortunately, Flake8 does not support pyproject.toml configuration. 2 | # https://github.com/PyCQA/flake8/issues/234 3 | # 4 | # More details regarding Flake8 and Black interplay: 5 | # https://github.com/psf/black/blob/main/docs/guides/using_black_with_other_tools.md#flake8 6 | [flake8] 7 | 8 | # Enable a few additional checks. 9 | # 10 | # https://github.com/PyCQA/flake8-bugbear#how-to-enable-opinionated-warnings 11 | # B9: Bugbear's extended opinionated checks 12 | # 13 | # https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes 14 | # W504: line break after binary operator (Black compliant) 15 | extend-select = B9, W504 16 | 17 | # Disable several warnings that don't play nice with PEP8 or Black, 18 | # or that are a bit of a nuisance in general. 19 | # 20 | # http://www.pydocstyle.org/en/latest/error_codes.html 21 | # D105: Missing docstring in magic method 22 | # 23 | # https://pycodestyle.pycqa.org/en/latest/intro.html#error-codes 24 | # E203: whitespace before ‘,’, ‘;’, or ‘:’ (not Black compliant) 25 | # E501: line too long (managed better by Bugbear's B950) 26 | # W503: line break before binary operator (not Black compliant) 27 | # 28 | # https://github.com/peterjc/flake8-rst-docstrings#configuration 29 | # RST307: Error in "XXX" directive 30 | ignore = D105, E203, E501, RST307, W503 31 | per-file-ignores = 32 | 33 | # More assorted goodness. 34 | max-line-length = 120 35 | show-source = true 36 | 37 | # Ensure that Flake8 warnings are silenced correctly: 38 | # https://github.com/plinss/flake8-noqa#options 39 | noqa-require-code = true 40 | 41 | # Ensure that Sphinx extensions of .rst are recognized: 42 | # https://github.com/peterjc/flake8-rst-docstrings#configuration 43 | rst-roles = class, func, ref 44 | rst-directives = envvar, exception 45 | rst-substitutions = version 46 | 47 | # Ensure that Sphinx docstrings use Numpy format for docstrings: 48 | # https://github.com/PyCQA/flake8-docstrings 49 | # 50 | # For details on the Numpy format: 51 | # https://www.sphinx-doc.org/en/master/usage/extensions/example_numpy.html 52 | docstring-convention = numpy 53 | -------------------------------------------------------------------------------- /.github/workflows/pr-conventional-commits.yaml: -------------------------------------------------------------------------------- 1 | # This workflow lints the PR's title and commits. It uses the commitizen 2 | # package (https://github.com/commitizen-tools/commitizen) and its `cz` 3 | # tool to check the title of the PR and all commit messages of the branch 4 | # which triggers this Action. 5 | 6 | name: Check conventional commits 7 | on: 8 | pull_request: 9 | branches: 10 | - '*' 11 | types: 12 | - opened 13 | - reopened 14 | - edited 15 | - synchronize 16 | permissions: 17 | contents: read 18 | 19 | jobs: 20 | conventional-commits: 21 | runs-on: ubuntu-latest 22 | steps: 23 | 24 | - name: Check out repository 25 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 26 | with: 27 | fetch-depth: 0 28 | 29 | - name: Set up Python 30 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 31 | with: 32 | python-version: '3.13' 33 | 34 | # Install Commitizen without using the package's Makefile: that's much faster than 35 | # creating a venv and installing heaps of dependencies that aren't required for this job. 36 | - name: Set up Commitizen 37 | run: | 38 | pip install --upgrade pip wheel 39 | pip install 'commitizen ==4.10.0' 40 | 41 | # Run Commitizen to check the title of the PR which triggered this workflow, and check 42 | # all commit messages of the PR's branch. If any of the checks fails then this job fails. 43 | - name: Check PR title 44 | run: echo "$PR_TITLE" | cz check 45 | env: 46 | PR_TITLE: ${{ github.event.pull_request.title }} 47 | - name: Check PR commit messages 48 | run: | 49 | git remote add other "$PR_HEAD_REPO_CLONE_URL" 50 | git fetch other 51 | cz check --rev-range "origin/$PR_BASE_REF..other/$PR_HEAD_REF" 52 | env: 53 | PR_BASE_REF: ${{ github.event.pull_request.base.ref }} 54 | PR_HEAD_REF: ${{ github.event.pull_request.head.ref }} 55 | PR_HEAD_REPO_CLONE_URL: ${{ github.event.pull_request.head.repo.clone_url }} 56 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | 16 | sys.path.insert(0, os.path.abspath("../../src")) 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = "Package" 22 | copyright = "2023, Package" 23 | author = "" 24 | 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | "sphinx.ext.autodoc", 33 | "sphinx.ext.napoleon", 34 | "sphinx.ext.viewcode", 35 | "sphinx.ext.autosummary", 36 | "sphinx_markdown_builder", 37 | ] 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ["_templates"] 41 | 42 | # Generate a summary, too. 43 | autosummary_generate = True 44 | 45 | # List of patterns, relative to source directory, that match files and 46 | # directories to ignore when looking for source files. 47 | # This pattern also affects html_static_path and html_extra_path. 48 | exclude_patterns = [] 49 | 50 | 51 | # -- Options for HTML output ------------------------------------------------- 52 | 53 | # The theme to use for HTML and HTML Help pages. See the documentation for 54 | # a list of builtin themes. 55 | # 56 | html_theme = "alabaster" 57 | 58 | # Add any paths that contain custom static files (such as style sheets) here, 59 | # relative to this directory. They are copied after the builtin static files, 60 | # so a file named "default.css" will overwrite the builtin "default.css". 61 | html_static_path = ["_static"] 62 | -------------------------------------------------------------------------------- /.github/workflows/scorecards-analysis.yaml: -------------------------------------------------------------------------------- 1 | # Run Scorecard for this repository to further check and harden software and process. 2 | 3 | name: Scorecards supply-chain security 4 | on: 5 | # Only the default branch is supported. 6 | branch_protection_rule: 7 | schedule: 8 | - cron: 27 20 * * 1 9 | push: 10 | branches: [main] 11 | 12 | # Declare default permissions as read only. 13 | permissions: read-all 14 | 15 | jobs: 16 | analysis: 17 | name: Scorecards analysis 18 | runs-on: ubuntu-latest 19 | permissions: 20 | # Needed to upload the results to code-scanning dashboard. 21 | security-events: write 22 | actions: read 23 | contents: read 24 | # Needed to access OIDC token. 25 | id-token: write 26 | steps: 27 | 28 | - name: Harden Runner 29 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 30 | with: 31 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 32 | disable-sudo: true 33 | 34 | - name: Check out repository 35 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 36 | with: 37 | persist-credentials: false 38 | 39 | - name: Run analysis 40 | uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 41 | with: 42 | results_file: results.sarif 43 | results_format: sarif 44 | # Read-only PAT token. To create it, 45 | # follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat-optional. 46 | repo_token: ${{ secrets.SCORECARD_READ_TOKEN }} 47 | # Publish the results to enable scorecard badges. For more details, see 48 | # https://github.com/ossf/scorecard-action#publishing-results. 49 | # For private repositories, `publish_results` will automatically be set to `false`, 50 | # regardless of the value entered here. 51 | publish_results: true 52 | 53 | # Upload the results as artifacts (optional). 54 | - name: Upload artifact 55 | uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 56 | with: 57 | name: SARIF file 58 | path: results.sarif 59 | 60 | # Upload the results to GitHub's code scanning dashboard. 61 | - name: Upload to code-scanning 62 | uses: github/codeql-action/upload-sarif@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5 63 | with: 64 | sarif_file: results.sarif 65 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yaml: -------------------------------------------------------------------------------- 1 | # Run CodeQL over the package. For more configuration options see codeql/codeql-config.yaml 2 | # and: https://github.com/github/codeql-action 3 | 4 | name: CodeQL 5 | on: 6 | push: 7 | branches: 8 | - release 9 | - main 10 | pull_request: 11 | branches: 12 | - release 13 | - main 14 | schedule: 15 | - cron: 20 15 * * 3 16 | permissions: 17 | contents: read 18 | 19 | jobs: 20 | analyze: 21 | name: Analyze 22 | runs-on: ubuntu-latest 23 | permissions: 24 | actions: read 25 | contents: read 26 | security-events: write 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | # Learn more about CodeQL language support at https://git.io/codeql-language-support 31 | language: [python, actions] 32 | python: ['3.13'] 33 | steps: 34 | 35 | - name: Harden Runner 36 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 37 | with: 38 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 39 | disable-sudo: true 40 | 41 | - name: Checkout repository 42 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 43 | 44 | - name: Set up Python ${{ matrix.python }} 45 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 46 | with: 47 | python-version: ${{ matrix.python }} 48 | 49 | # For more details see the comment in _build.yaml. 50 | - name: Create empty virtual environment for Actions 51 | run: mkdir .venv 52 | - name: Install dependencies 53 | run: make setup 54 | 55 | # Initializes the CodeQL tools for scanning. 56 | - name: Initialize CodeQL 57 | uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5 58 | with: 59 | languages: ${{ matrix.language }} 60 | config-file: .github/codeql/codeql-config.yaml 61 | # Override the default behavior so that the action doesn't attempt 62 | # to auto-install Python dependencies 63 | setup-python-dependencies: false 64 | # If you wish to specify custom queries, you can do so here or in a config file. 65 | # By default, queries listed here will override any specified in a config file. 66 | # Prefix the list here with "+" to use these queries and those in the config file. 67 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 68 | 69 | - name: Perform CodeQL Analysis 70 | uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5 71 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | include/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | pip-wheel-metadata/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # Local venv 32 | bin/ 33 | pyvenv.cfg 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | db.sqlite3 67 | db.sqlite3-journal 68 | 69 | # Flask stuff: 70 | instance/ 71 | .webassets-cache 72 | 73 | # Scrapy stuff: 74 | .scrapy 75 | 76 | # Sphinx documentation 77 | docs/_build/ 78 | 79 | # PyBuilder 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | .python-version 91 | 92 | # pipenv 93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 96 | # install all needed dependencies. 97 | #Pipfile.lock 98 | 99 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 100 | __pypackages__/ 101 | 102 | # Celery stuff 103 | celerybeat-schedule 104 | celerybeat.pid 105 | 106 | # SageMath parsed files 107 | *.sage.py 108 | 109 | # Environments 110 | .env 111 | .venv 112 | env/ 113 | venv/ 114 | ENV/ 115 | env.bak/ 116 | venv.bak/ 117 | 118 | # Spyder project settings 119 | .spyderproject 120 | .spyproject 121 | 122 | # Rope project settings 123 | .ropeproject 124 | 125 | # mkdocs documentation 126 | /site 127 | 128 | # mypy 129 | .mypy_cache/ 130 | .dmypy.json 131 | dmypy.json 132 | 133 | # Pyre type checker 134 | .pyre/ 135 | 136 | # macOS cruft 137 | .DS_Store 138 | 139 | # vim swap files 140 | .*.swp 141 | -------------------------------------------------------------------------------- /.github/workflows/_wiki-documentation.yaml: -------------------------------------------------------------------------------- 1 | # This reusable workflow publishes Markdown docs to Github Wiki. Some manual 2 | # setup is required before using it: enable Wiki in repository and create at 3 | # least one page. 4 | 5 | name: Publish Github Wiki documentation 6 | on: 7 | workflow_call: 8 | inputs: 9 | release-tag: 10 | type: string 11 | required: true 12 | description: Tag name of the release used in the Wiki commit message 13 | release-url: 14 | type: string 15 | required: true 16 | description: URL to the release page used in the Wiki commit message 17 | artifact-name: 18 | type: string 19 | required: true 20 | description: Name of the build artifact from which to extract the Wiki pages 21 | git-user-name: 22 | type: string 23 | required: true 24 | description: Name of the git user who commits and pushes the Wiki change set 25 | git-user-email: 26 | type: string 27 | required: true 28 | description: Email address of said git user 29 | secrets: 30 | REPO_ACCESS_TOKEN: 31 | required: true 32 | 33 | permissions: 34 | contents: read 35 | 36 | jobs: 37 | publish-wiki: 38 | name: Publish Github Wiki 39 | if: github.event.repository.has_wiki == true 40 | runs-on: ubuntu-latest 41 | steps: 42 | 43 | - name: Harden Runner 44 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 45 | with: 46 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 47 | disable-sudo: true 48 | 49 | # Check out the repository's Wiki repo into the wiki/ folder. The token is required 50 | # only for private repositories. 51 | - name: Check out repository 52 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 53 | with: 54 | token: ${{ secrets.REPO_ACCESS_TOKEN }} 55 | repository: ${{ format('{0}.wiki', github.repository) }} 56 | path: wiki 57 | 58 | # Download the build artifacts attached to this workflow run. 59 | - name: Download artifact 60 | uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 61 | with: 62 | name: ${{ inputs.artifact-name }} 63 | path: dist 64 | 65 | # Unpack the Markdown docs into the Wiki repository. Delete existing files first 66 | # to ensure that no stale files stay behind. 67 | - name: Copy Markdown documentation 68 | run: | 69 | mkdir docs/ 70 | unzip -d docs/ "$(ls dist/*-docs-md.zip)" 71 | rm --recursive --force wiki/* 72 | cp --recursive --verbose --target-directory wiki/ docs/markdown/* 73 | 74 | # If there was any change to the Wiki then push the update. 75 | - name: Push to Wiki 76 | run: | 77 | cd wiki/ 78 | if [ -n "$(git status --porcelain)" ]; then 79 | git add . 80 | git config --global user.name "$USER_NAME" 81 | git config --global user.email "$USER_EMAIL" 82 | git commit --message "$WIKI_COMMIT_MESSAGE" 83 | git push 84 | fi 85 | env: 86 | USER_NAME: ${{ inputs.git-user-name }} 87 | USER_EMAIL: ${{ inputs.git-user-email }} 88 | WIKI_COMMIT_MESSAGE: | 89 | docs: update for ${{ inputs.release-tag }} 90 | 91 | Refs: ${{ github.sha }} 92 | Link: ${{ inputs.release-url }} 93 | -------------------------------------------------------------------------------- /.github/workflows/sync-with-upstream.yaml: -------------------------------------------------------------------------------- 1 | # Create a PR to sync with the upstream template repo. 2 | # The template repo is https://github.com/jenstroeger/python-package-template. 3 | 4 | name: Sync with template repository 5 | on: 6 | workflow_dispatch: 7 | 8 | schedule: 9 | - cron: 11 11 * * * 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | sync: 16 | if: github.repository != 'jenstroeger/python-package-template' 17 | runs-on: ubuntu-latest 18 | permissions: 19 | contents: write 20 | pull-requests: write 21 | steps: 22 | 23 | - name: Check out template repository 24 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 25 | with: 26 | # If you decide to change the upstream template repository to a private one, uncomment 27 | # the following argument to pass the required token to be able to check it out. 28 | # token: ${{ secrets. }} 29 | repository: jenstroeger/python-package-template 30 | fetch-depth: 0 31 | path: template 32 | 33 | - name: Check out current repository 34 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 35 | with: 36 | token: ${{ secrets.REPO_ACCESS_TOKEN }} 37 | fetch-depth: 0 38 | ref: main 39 | path: repo 40 | 41 | - name: Sync with template 42 | env: 43 | GH_TOKEN: ${{ secrets.REPO_ACCESS_TOKEN }} 44 | # This is the username and email for the user who creates a branch and commits 45 | # the changes. In an organisation that should be a dedicated devops account. 46 | USER_NAME: jenstroeger 47 | USER_EMAIL: jenstroeger@users.noreply.github.com 48 | working-directory: ./repo 49 | run: | 50 | LATEST_VERSION=$(cd ../template && git describe --tags --abbrev=0) 51 | CURRENT_VERSION=$(test -f .github/workflows/.template_version && cat .github/workflows/.template_version || echo "v0.0.0") 52 | echo "Latest version is ${LATEST_VERSION} and current version is ${CURRENT_VERSION}." 53 | 54 | # Check if the template repo was changed/updated. 55 | if [ "${CURRENT_VERSION}" == "${LATEST_VERSION}" ]; then 56 | echo "Unable to find a new version, exiting..." 57 | else 58 | 59 | # Check if the branch already exists in the current repo. 60 | BRANCH_NAME="sync-$LATEST_VERSION" 61 | if [ "$(git rev-parse --verify origin/"""$BRANCH_NAME""" 2>/dev/null)" ]; then 62 | echo "Branch $BRANCH_NAME already exists, exiting..." 63 | else 64 | 65 | # Generate a patch file of all template changes in the cloned template repository. 66 | pushd ../template || exit 67 | # shellcheck disable=SC2046 68 | git diff "${CURRENT_VERSION}".."${LATEST_VERSION}" -- $(find . docs/ .github/ .github/workflows/ -maxdepth 1 -type f ! -name "*.md" ! -name ".template_version") > diff.patch 69 | popd || exit 70 | 71 | # Apply the generated patch to the current repo. 72 | patch --strip 1 --batch --merge --input ../template/diff.patch || true 73 | find . -name "*.orig" -type f -delete 74 | find . -name "*.rej" -type f -delete 75 | 76 | # Create a branch, commit, and push the changeset. 77 | git checkout -b "$BRANCH_NAME" 78 | echo "$LATEST_VERSION" > .github/workflows/.template_version 79 | git add . 80 | git config --global user.name "$USER_NAME" 81 | git config --global user.email "$USER_EMAIL" 82 | git config --list --global # For debug purposes. 83 | git commit --message "chore: sync with template $LATEST_VERSION" 84 | git push --set-upstream origin "$BRANCH_NAME" 85 | 86 | # Create the pull request. 87 | gh pr create --base main --head "$BRANCH_NAME" --title "chore: sync with template $LATEST_VERSION" --body "This PR was generated automatically." 88 | 89 | fi 90 | fi 91 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | default_install_hook_types: [pre-commit, commit-msg, pre-push] 4 | default_stages: [pre-commit] 5 | minimum_pre_commit_version: 4.0.0 6 | exclude: ^.vscode/ 7 | 8 | repos: 9 | 10 | # These meta hooks check the pre-commit configuration itself. 11 | - repo: meta 12 | hooks: 13 | - id: check-hooks-apply 14 | - id: check-useless-excludes 15 | 16 | # Commitizen enforces semantic and conventional commit messages. 17 | - repo: https://github.com/commitizen-tools/commitizen 18 | rev: v4.10.0 19 | hooks: 20 | - id: commitizen 21 | name: Check conventional commit message 22 | stages: [commit-msg] 23 | 24 | # Sort imports. 25 | - repo: https://github.com/pycqa/isort 26 | rev: 7.0.0 27 | hooks: 28 | - id: isort 29 | name: Sort import statements 30 | args: [--settings-path, pyproject.toml] 31 | stages: [pre-commit] 32 | 33 | # Add Black code formatters. 34 | - repo: https://github.com/ambv/black 35 | rev: 25.11.0 36 | hooks: 37 | - id: black 38 | name: Format code 39 | args: [--config, pyproject.toml] 40 | - repo: https://github.com/asottile/blacken-docs 41 | rev: 1.20.0 42 | hooks: 43 | - id: blacken-docs 44 | name: Format code in docstrings 45 | args: [--line-length, '120'] 46 | additional_dependencies: [black==25.11.0] 47 | 48 | # Upgrade and rewrite Python idioms. 49 | - repo: https://github.com/asottile/pyupgrade 50 | rev: v3.21.2 51 | hooks: 52 | - id: pyupgrade 53 | name: Upgrade code idioms 54 | files: ^src/package/|^tests/ 55 | args: [--py310-plus] 56 | 57 | # Similar to pylint, with a few more/different checks. For more available 58 | # extensions: https://github.com/DmytroLitvinov/awesome-flake8-extensions 59 | - repo: https://github.com/pycqa/flake8 60 | rev: 7.3.0 61 | hooks: 62 | - id: flake8 63 | name: Check flake8 issues 64 | files: ^src/package/|^tests/ 65 | types: [text, python] 66 | additional_dependencies: [flake8-bugbear==25.11.29, flake8-builtins==3.1.0, flake8-comprehensions==3.17.0, flake8-docstrings==1.7.0, flake8-logging==1.8.0, flake8-mutable==1.2.0, flake8-noqa==1.4.0, flake8-print==5.0.0, flake8-pyi==25.5.0, flake8-pytest-style==2.2.0, flake8-rst-docstrings==0.4.0, pep8-naming==0.15.1] 67 | args: [--config, .flake8] 68 | 69 | # Run Pylint from the local repo to make sure venv packages 70 | # specified in pyproject.toml are available. 71 | - repo: local 72 | hooks: 73 | - id: pylint 74 | name: Check pylint issues 75 | entry: pylint 76 | language: python 77 | files: ^src/package/|^tests/ 78 | types: [text, python] 79 | args: [--rcfile, pyproject.toml] 80 | 81 | # Type-check all Python code. 82 | - repo: local 83 | hooks: 84 | - id: mypy 85 | name: Check typing annotations 86 | entry: mypy 87 | language: python 88 | files: ^src/package/|^tests/ 89 | types: [text, python] 90 | args: [--explicit-package-bases, --config-file, pyproject.toml] 91 | 92 | # Check for potential security issues. 93 | - repo: https://github.com/PyCQA/bandit 94 | rev: 1.9.2 95 | hooks: 96 | - id: bandit 97 | name: Check for security issues 98 | args: [--configfile, pyproject.toml] 99 | files: ^src/package/|^tests/ 100 | types: [text, python] 101 | additional_dependencies: ['bandit[toml]'] 102 | 103 | # Enable a whole bunch of useful helper hooks, too. 104 | # See https://pre-commit.com/hooks.html for more hooks. 105 | - repo: https://github.com/pre-commit/pre-commit-hooks 106 | rev: v6.0.0 107 | hooks: 108 | - id: check-ast 109 | - id: check-case-conflict 110 | - id: check-merge-conflict 111 | - id: check-added-large-files 112 | stages: [pre-commit] 113 | args: [--maxkb=500] 114 | - id: debug-statements 115 | - id: end-of-file-fixer 116 | stages: [pre-commit] 117 | - id: trailing-whitespace 118 | args: [--markdown-linebreak-ext=md] 119 | stages: [pre-commit] 120 | - id: detect-private-key 121 | - id: detect-aws-credentials 122 | args: [--allow-missing-credentials] 123 | - id: check-yaml 124 | - id: check-toml 125 | - repo: https://github.com/pre-commit/pygrep-hooks 126 | rev: v1.10.0 127 | hooks: 128 | - id: python-check-blanket-noqa 129 | - id: python-check-blanket-type-ignore 130 | - id: python-check-mock-methods 131 | - id: python-use-type-annotations 132 | - id: rst-backticks 133 | - id: rst-directive-colons 134 | - id: rst-inline-touching-normal 135 | - id: text-unicode-replacement-char 136 | 137 | # Check the reStructured Text files that make up 138 | # this package's documentation. 139 | # Commenting this out because https://github.com/Lucas-C/pre-commit-hooks-markup/issues/13 140 | # - repo: https://github.com/Lucas-C/pre-commit-hooks-markup 141 | # rev: v1.0.1 142 | # hooks: 143 | # - id: rst-linter 144 | 145 | # Check and prettify the configuration files. 146 | - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks 147 | rev: v2.15.0 148 | hooks: 149 | - id: pretty-format-ini 150 | args: [--autofix] 151 | - id: pretty-format-yaml 152 | args: [--autofix] 153 | # Commenting this out because https://github.com/pappasam/toml-sort/issues/11 154 | # - id: pretty-format-toml 155 | # args: [--autofix] 156 | 157 | # Check GitHub Actions workflow files. 158 | - repo: https://github.com/Mateusz-Grzelinski/actionlint-py 159 | rev: v1.7.9.24 160 | hooks: 161 | - id: actionlint 162 | 163 | # On push to the remote, run all tests. Note that the `COVERAGE_CORE` variable is required 164 | # for Python 3.12+ to make sure Coverage uses the new Python monitoring module. 165 | # See also: https://blog.trailofbits.com/2025/05/01/making-pypis-test-suite-81-faster/#optimizing-coverage-with-python-312s-sysmonitoring 166 | - repo: local 167 | hooks: 168 | - id: pytest 169 | name: Run unit tests 170 | entry: env COVERAGE_CORE=sysmon pytest --config-file pyproject.toml --cov-config pyproject.toml -m 'not integration and not performance' src/package/ tests/ docs/ 171 | language: python 172 | verbose: true 173 | always_run: true 174 | pass_filenames: false 175 | stages: [pre-push] 176 | -------------------------------------------------------------------------------- /.github/workflows/_build.yaml: -------------------------------------------------------------------------------- 1 | # This is a trusted builder implemented as a reusable workflow that can be called by other 2 | # Actions workflows. It checks, tests, and builds the artifacts including SBOM and documentations, 3 | # and computes hash digests as output to be used by a SLSA provenance generator. The artifacts are 4 | # always uploaded for every job to be used for debugging purposes, but they will be removed within 5 | # the specified retention days. 6 | # 7 | # Even though we run the build in a matrix to check against different platforms, due to a known 8 | # limitation of reusable workflows that do not support setting strategy property from the caller 9 | # workflow, we only generate artifacts for ubuntu-latest and Python 3.13, which can be used to 10 | # create a release. For details see: 11 | # 12 | # https://docs.github.com/en/actions/using-workflows/reusing-workflows#limitations 13 | # 14 | # Note: if the build workflow needs to access secrets, they need to be passed by the caller using 15 | # `secrets: inherit`. See also 16 | # 17 | # https://docs.github.com/en/actions/using-workflows/reusing-workflows 18 | # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions 19 | # 20 | # for the security recommendations. 21 | 22 | name: Build the package 23 | on: 24 | workflow_call: 25 | inputs: 26 | disable-pip-audit: 27 | type: boolean 28 | required: true 29 | description: Enable or disable running pip_audit to check installed packages for vulnerabilities 30 | outputs: 31 | artifacts-sha256: 32 | value: ${{ jobs.build.outputs.artifacts-sha256 }} 33 | description: The hash of the artifacts 34 | permissions: 35 | contents: read 36 | env: 37 | ARTIFACT_OS: ubuntu-latest # The default OS for release. 38 | ARTIFACT_PYTHON: '3.13' # The default Python version for release. 39 | 40 | jobs: 41 | build: 42 | # Uncomment the following to disable checks and tests for Draft pull requests. 43 | # if: github.event.pull_request.draft == false 44 | outputs: 45 | artifacts-sha256: ${{ steps.compute-hash.outputs.artifacts-sha256 }} 46 | name: Build Python ${{ matrix.python }} on ${{ matrix.os }} 47 | runs-on: ${{ matrix.os }} 48 | strategy: 49 | fail-fast: false 50 | matrix: 51 | # It is recommended to pin a Runner version specifically: 52 | # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners 53 | os: [ubuntu-latest, macos-latest, windows-latest] 54 | python: ['3.10', '3.11', '3.12', '3.13'] 55 | steps: 56 | 57 | - name: Harden Runner 58 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 59 | with: 60 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 61 | disable-sudo: true 62 | 63 | - name: Check out repository 64 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 65 | with: 66 | fetch-depth: 0 67 | 68 | - name: Set up Python 69 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 70 | with: 71 | python-version: ${{ matrix.python }} 72 | 73 | # Using the Makefile assumes an activated virtual environment, which doesn't exist 74 | # when running in an Action environment (https://github.com/actions/setup-python/issues/359). 75 | # Instead we create an empty .venv folder so that the Makefile continues to function 76 | # while Python operates within the runner's global environment. It is safe to ignore 77 | # warnings from the Makefile about the missing virtual environment. 78 | - name: Create empty virtual environment for Actions 79 | run: mkdir .venv 80 | - name: Install dependencies 81 | run: make setup 82 | 83 | # Audit all currently installed packages for security vulnerabilities. This step can 84 | # be disabled by setting the repository variable DISABLE_PIP_AUDIT to 'true'. 85 | - name: Audit installed packages 86 | if: ${{ !inputs.disable-pip-audit }} 87 | run: make audit 88 | 89 | # Build the sdist and wheel distribution of the package and docs as a zip file. 90 | # We don't need to check and test the package separately because `make dist` runs 91 | # those targets first and only builds the package if they succeed. The Hypothesis 92 | # profile picks the Hypothesis settings (see tests/conftest.py) which, for Github, 93 | # disable randomized testing to avoid breaking CI. 94 | - name: Build the package 95 | run: make dist 96 | env: 97 | HYPOTHESIS_PROFILE: github 98 | 99 | # Generate the requirements.txt that contains the hash digests of the dependencies and 100 | # generate the SBOM using CyclonDX SBOM generator. 101 | - name: Generate requirements.txt and SBOM 102 | if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON 103 | run: make requirements sbom 104 | 105 | # Remove the old requirements.txt file (which includes _all_ packages) and generate a 106 | # new one for the package and its actual and required dependencies only. 107 | - name: Prune packages and generate required requirements.txt 108 | if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON 109 | run: | 110 | rm requirements.txt 111 | make prune requirements 112 | 113 | # Find the paths to the artifact files that will be included in the release, compute 114 | # the SHA digest for all the release files and encode them using Base64, and export it 115 | # from this job. 116 | - name: Compute package hash 117 | if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON 118 | id: compute-hash 119 | shell: bash 120 | run: | 121 | set -euo pipefail 122 | TARBALL_PATH=$(find dist/ -type f -name "*.tar.gz") 123 | WHEEL_PATH=$(find dist/ -type f -name "*.whl") 124 | REQUIREMENTS_PATH=$(find dist/ -type f -name "*-requirements.txt") 125 | SBOM_PATH=$(find dist/ -type f -name "*-sbom.json") 126 | HTML_DOCS_PATH=$(find dist/ -type f -name "*-docs-html.zip") 127 | MARKDOWN_DOCS_PATH=$(find dist/ -type f -name "*-docs-md.zip") 128 | BUILD_EPOCH_PATH=$(find dist/ -type f -name "*-build-epoch.txt") 129 | DIGEST=$(sha256sum "$TARBALL_PATH" "$WHEEL_PATH" "$REQUIREMENTS_PATH" "$SBOM_PATH" "$HTML_DOCS_PATH" "$MARKDOWN_DOCS_PATH" "$BUILD_EPOCH_PATH" | base64 -w0) 130 | echo "Digest of artifacts is $DIGEST." 131 | echo "artifacts-sha256=$DIGEST" >> "$GITHUB_OUTPUT" 132 | 133 | # For now only generate artifacts for the specified OS and Python version in env variables. 134 | # Currently reusable workflows do not support setting strategy property from the caller workflow. 135 | - name: Upload the package artifact for debugging and release 136 | if: matrix.os == env.ARTIFACT_OS && matrix.python == env.ARTIFACT_PYTHON 137 | uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 138 | with: 139 | name: artifact-${{ matrix.os }}-python-${{ matrix.python }} 140 | path: dist 141 | if-no-files-found: error 142 | retention-days: 7 143 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html 2 | # https://flit.pypa.io/en/latest/pyproject_toml.html 3 | [build-system] 4 | requires = ["flit_core >=3.2.0,<4.0.0"] 5 | build-backend = "flit_core.buildapi" 6 | 7 | [project] 8 | name = "package" 9 | requires-python = ">=3.10.0,<3.14.0" 10 | authors = [{name = "Author", email = "author@email"}] 11 | maintainers = [{name = "Maintainer", email = "maintainer@email"}] 12 | dynamic = ["version", "description"] 13 | license = {file = "LICENSE.md"} 14 | readme = "README.md" 15 | dependencies = [] 16 | keywords = [] 17 | # https://pypi.org/classifiers/ 18 | classifiers = [ 19 | "Development Status :: 1 - Planning", 20 | "Intended Audience :: Developers", 21 | "License :: OSI Approved :: MIT License", 22 | "Natural Language :: English", 23 | "Operating System :: OS Independent", 24 | "Programming Language :: Python", 25 | "Programming Language :: Python :: 3.10", 26 | "Programming Language :: Python :: 3.11", 27 | "Programming Language :: Python :: 3.12", 28 | "Programming Language :: Python :: 3.13", 29 | "Programming Language :: Python :: 3 :: Only", 30 | "Programming Language :: Python :: Implementation :: CPython", 31 | "Topic :: Software Development :: Libraries :: Python Modules", 32 | ] 33 | 34 | [project.scripts] 35 | something = "package.__main__:main" 36 | 37 | [project.entry-points] 38 | 39 | [project.optional-dependencies] 40 | # The 'actions' requirements match exactly the packages installed by the workflows. 41 | # We keep them listed here to ensure the infrastructure BOM is consistent with what's 42 | # installed. Make sure to keep the requirements in sync with the workflows! 43 | actions = [ 44 | "commitizen ==4.10.0", 45 | "twine ==6.2.0", 46 | ] 47 | dev = [ 48 | "flit >=3.2.0,<4.0.0", 49 | "mypy >=1.0.0,<1.20", 50 | "pip-audit >=2.4.4,<3.0.0", 51 | "pylint >=3.0.0,<4.1.0", 52 | "perflint >=0.8.0,<1.0.0", 53 | "cyclonedx-bom >=4.0.0,<5.0.0", 54 | ] 55 | docs = [ 56 | "sphinx >=5.1.1,<9.0.0", 57 | "sphinx-markdown-builder >=0.6.4,<1.0.0", 58 | ] 59 | hooks = [ 60 | "pre-commit >=3.0.0,<4.6.0", 61 | ] 62 | # Note that the `custom_exit_code` and `env` plugins may currently be unmaintained. 63 | test = [ 64 | "coverage ==7.6.12; python_version<'3.14'", # https://github.com/pypi/warehouse/pull/17872#issuecomment-2845932281 65 | "faker ==38.2.0", 66 | "hypothesis >=6.21.0,<6.148.6", 67 | "pytest >=7.2.0,<9.0.0", 68 | "pytest-benchmark ==5.2.0", 69 | "pytest-cases ==3.9.1", 70 | "pytest-custom_exit_code ==0.3.0", 71 | "pytest-cov ==6.3.0", # Uses: coverage[toml] >=7.5 72 | "pytest-doctestplus ==1.6.0", 73 | "pytest-env ==1.2.0", 74 | ] 75 | 76 | [project.urls] 77 | Homepage = "https://github.com/jenstroeger/python-package-template" 78 | Changelog = "https://github.com/jenstroeger/python-package-template/blob/main/CHANGELOG.md" 79 | Documentation = "https://github.com/jenstroeger/python-package-template/wiki" 80 | Issues = "https://github.com/jenstroeger/python-package-template/issues" 81 | 82 | 83 | # https://bandit.readthedocs.io/en/latest/config.html 84 | # Skip test B101 because of issue https://github.com/PyCQA/bandit/issues/457 85 | [tool.bandit] 86 | tests = [] 87 | skips = ["B101"] 88 | 89 | 90 | # https://github.com/psf/black#configuration 91 | [tool.black] 92 | line-length = 120 93 | 94 | 95 | # https://github.com/commitizen-tools/commitizen 96 | # https://commitizen-tools.github.io/commitizen/bump/ 97 | [tool.commitizen] 98 | bump_message = """bump: release $current_version → $new_version 99 | 100 | Automatically generated by Commitizen. 101 | """ 102 | tag_format = "v$major.$minor.$patch$prerelease" 103 | update_changelog_on_bump = true 104 | version_files = [ 105 | "src/package/__init__.py:__version__", 106 | ] 107 | major_version_zero = false 108 | version = "2.18.0" 109 | 110 | 111 | # https://github.com/pytest-dev/pytest-cov 112 | # https://github.com/nedbat/coveragepy 113 | [tool.coverage.report] 114 | fail_under = 100 115 | show_missing = true 116 | 117 | [tool.coverage.run] 118 | branch = true 119 | omit = [ 120 | "src/package/__main__.py", 121 | ] 122 | 123 | 124 | # https://flit.pypa.io/en/latest/pyproject_toml.html#sdist-section 125 | # See also: https://github.com/pypa/flit/issues/565 126 | [tool.flit.sdist] 127 | include = [] 128 | exclude = [ 129 | ".github/", 130 | ".vscode/", 131 | "docs/", 132 | "tests/", 133 | ".flake8", 134 | ".gitattributes", 135 | ".gitignore", 136 | ".pre-commit-config.yaml", 137 | "CHANGELOG.md", 138 | "CODEOWNERS", 139 | "Makefile", 140 | "SECURITY.md", 141 | ] 142 | 143 | 144 | # https://pycqa.github.io/isort/ 145 | [tool.isort] 146 | profile = "black" 147 | multi_line_output = 3 148 | line_length = 120 149 | skip_gitignore = true 150 | filter_files = true 151 | 152 | 153 | # https://mypy.readthedocs.io/en/stable/config_file.html#using-a-pyproject-toml 154 | [tool.mypy] 155 | # mypy_path = 156 | # exclude = 157 | show_error_codes = true 158 | show_column_numbers = true 159 | pretty = true 160 | show_traceback = true 161 | check_untyped_defs = true 162 | incremental = false 163 | strict = true 164 | warn_return_any = true 165 | warn_redundant_casts = true 166 | warn_unreachable = true 167 | warn_unused_configs = true 168 | warn_unused_ignores = true 169 | disallow_any_explicit = true 170 | disallow_untyped_calls = true 171 | disallow_untyped_defs = true 172 | disallow_incomplete_defs = true 173 | disallow_untyped_decorators = true 174 | # disable_error_code = 175 | # allow_redefinition = 176 | 177 | [[tool.mypy.overrides]] 178 | module = [ 179 | "pytest.*", 180 | ] 181 | ignore_missing_imports = true 182 | 183 | 184 | # https://pylint.pycqa.org/en/latest/user_guide/configuration/index.html 185 | [tool.pylint.main] 186 | fail-under = 10.0 187 | suggestion-mode = true 188 | load-plugins = [ 189 | "perflint", # A Linter for performance anti-patterns. 190 | "pylint.extensions.bad_builtin", 191 | "pylint.extensions.broad_try_clause", 192 | "pylint.extensions.check_elif", 193 | "pylint.extensions.code_style", 194 | "pylint.extensions.comparison_placement", 195 | "pylint.extensions.confusing_elif", 196 | "pylint.extensions.consider_refactoring_into_while_condition", 197 | "pylint.extensions.consider_ternary_expression", 198 | "pylint.extensions.dict_init_mutate", 199 | # "pylint.extensions.docparams", 200 | # "pylint.extensions.docstyle", 201 | "pylint.extensions.dunder", 202 | "pylint.extensions.empty_comment", 203 | "pylint.extensions.for_any_all", 204 | "pylint.extensions.magic_value", 205 | # "pylint.extensions.mccabe", 206 | "pylint.extensions.no_self_use", 207 | "pylint.extensions.overlapping_exceptions", 208 | "pylint.extensions.private_import", 209 | "pylint.extensions.redefined_loop_name", 210 | "pylint.extensions.redefined_variable_type", 211 | "pylint.extensions.set_membership", 212 | "pylint.extensions.typing", 213 | "pylint.extensions.while_used", 214 | ] 215 | disable = [ 216 | "fixme", 217 | "line-too-long", # Replaced by Flake8 Bugbear B950 check. 218 | "too-few-public-methods", 219 | "too-many-ancestors", 220 | "too-many-arguments", 221 | "too-many-boolean-expressions", 222 | "too-many-branches", 223 | "too-many-instance-attributes", 224 | "too-many-lines", 225 | "too-many-locals", 226 | "too-many-nested-blocks", 227 | "too-many-positional-arguments", 228 | "too-many-public-methods", 229 | "too-many-return-statements", 230 | "too-many-statements", 231 | "too-many-try-statements", 232 | ] 233 | 234 | [tool.pylint.MISCELLANEOUS] 235 | notes = [ 236 | "FIXME", 237 | "TODO", 238 | "BUGBUG", 239 | ] 240 | 241 | [tool.pylint.FORMAT] 242 | max-line-length = 120 243 | 244 | 245 | # https://docs.pytest.org/en/latest/reference/customize.html#configuration-file-formats 246 | # https://docs.pytest.org/en/latest/reference/reference.html#configuration-options 247 | # https://docs.pytest.org/en/latest/reference/reference.html#command-line-flags 248 | # 249 | # To integrate Hypothesis into pytest and coverage, we use its native plugin: 250 | # https://hypothesis.readthedocs.io/en/latest/details.html#the-hypothesis-pytest-plugin 251 | # 252 | # To discover tests in documentation, we use doctest and the doctest-plus plugin which 253 | # adds multiple useful options to control tests in documentation. More details at: 254 | # https://docs.python.org/3/library/doctest.html 255 | # https://github.com/scientific-python/pytest-doctestplus 256 | # 257 | # To avoid failing pytest when no tests were dicovered, we need an extra plugin: 258 | # https://docs.pytest.org/en/latest/reference/exit-codes.html 259 | # https://github.com/yashtodi94/pytest-custom_exit_code 260 | [tool.pytest.ini_options] 261 | minversion = "7.0" 262 | addopts = """-vv -ra --tb native --durations 0 --strict-markers --import-mode importlib \ 263 | --hypothesis-show-statistics --hypothesis-explain --hypothesis-verbosity verbose \ 264 | --doctest-modules --doctest-continue-on-failure --doctest-glob '*.rst' --doctest-plus \ 265 | --suppress-no-test-exit-code \ 266 | --cov package \ 267 | """ # Consider adding --pdb 268 | # https://docs.python.org/3/library/doctest.html#option-flags 269 | doctest_optionflags = "IGNORE_EXCEPTION_DETAIL" 270 | env = [ 271 | "PYTHONDEVMODE=1", # https://docs.python.org/3/library/devmode.html 272 | ] 273 | filterwarnings = [ 274 | "error", 275 | "always::DeprecationWarning", 276 | # The CoverageWarning warning is issued in two contexts: 277 | # Python 3.10, 3.11: sys.monitoring isn't available in this version, using default core (no-sysmon) 278 | # Python 3.12, 3.13: sys.monitoring can't measure branches in this version, using default core (no-sysmon) 279 | "ignore:sys.monitoring isn't available in this version:coverage.exceptions.CoverageWarning", 280 | "always::coverage.exceptions.CoverageWarning", 281 | # https://docs.pytest.org/en/latest/how-to/failures.html#warning-about-unraisable-exceptions-and-unhandled-thread-exceptions 282 | "error::pytest.PytestUnraisableExceptionWarning", 283 | "error::pytest.PytestUnhandledThreadExceptionWarning", 284 | ] 285 | markers = [ 286 | "integration: more complex application-level integration tests.", 287 | "performance: performance tests.", 288 | ] 289 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | # We run checks on pushing to the specified branches. 2 | # Pushing to release also triggers a release. 3 | 4 | name: Check and Release 5 | on: 6 | push: 7 | branches: 8 | - release 9 | - main 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | check: 15 | if: ${{ !startsWith(github.event.head_commit.message, 'bump:') }} 16 | uses: ./.github/workflows/_build.yaml 17 | permissions: 18 | contents: read 19 | with: 20 | disable-pip-audit: ${{ vars.DISABLE_PIP_AUDIT == 'true' }} 21 | 22 | # On pushes to the 'release' branch create a new release by bumping the version 23 | # and generating a change log. That's the new bump commit and associated tag. 24 | bump: 25 | needs: check 26 | if: github.ref == 'refs/heads/release' 27 | runs-on: ubuntu-latest 28 | permissions: 29 | contents: write 30 | steps: 31 | 32 | - name: Harden Runner 33 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 34 | with: 35 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 36 | disable-sudo: true 37 | 38 | - name: Check out repository 39 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 40 | with: 41 | fetch-depth: 0 42 | token: ${{ secrets.REPO_ACCESS_TOKEN }} 43 | 44 | - name: Set up Python 45 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 46 | with: 47 | python-version: '3.13' 48 | 49 | - name: Set up Commitizen 50 | run: | 51 | pip install --upgrade pip wheel 52 | pip install 'commitizen ==4.10.0' 53 | 54 | - name: Set up user 55 | run: | 56 | git config --global user.name "$USER_NAME" 57 | git config --global user.email "$USER_EMAIL" 58 | git config --list --global # For debug purposes. 59 | env: 60 | # This is the username and email for the user who commits and pushes the release 61 | # commit. In an organisation that should be a dedicated devops account. 62 | USER_NAME: jenstroeger 63 | USER_EMAIL: jenstroeger@users.noreply.github.com 64 | 65 | # In some cases a user may merge commits that don't cause a version bump, which causes commitizen 66 | # to fail with error code 21 (NoneIncrementExit). Thus we silence that particular error to avoid 67 | # failing this job: https://commitizen-tools.github.io/commitizen/bump/#avoid-raising-errors 68 | # Furthermore, if the version strings have inconsistent versions then `cz` and CI fail such that 69 | # the issue can be inspected and fixed. 70 | - name: Create changelog and bump 71 | run: cz --no-raise 21 bump --changelog --check-consistency --yes 72 | 73 | - name: Push the release 74 | run: | 75 | git push 76 | git push --tags 77 | 78 | # When triggered by the version bump commit, build the package and publish the release artifacts. 79 | build: 80 | if: github.ref == 'refs/heads/release' && startsWith(github.event.head_commit.message, 'bump:') 81 | uses: ./.github/workflows/_build.yaml 82 | permissions: 83 | contents: read 84 | with: 85 | disable-pip-audit: ${{ vars.DISABLE_PIP_AUDIT == 'true' }} 86 | 87 | # Create a new Release on Github from the verified build artifacts, and optionally 88 | # publish the artifacts to a PyPI server. 89 | release: 90 | needs: [build] 91 | name: Release 92 | outputs: 93 | release-tag: ${{ steps.upload-assets.outputs.release-tag }} 94 | release-url: ${{ steps.upload-assets.outputs.release-url }} 95 | runs-on: ubuntu-latest 96 | permissions: 97 | contents: write # To publish release notes. 98 | steps: 99 | 100 | - name: Harden Runner 101 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 102 | with: 103 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 104 | disable-sudo: true 105 | 106 | - name: Check out repository 107 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 108 | with: 109 | fetch-depth: 0 110 | 111 | - name: Download artifact 112 | uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 113 | with: 114 | name: artifact-ubuntu-latest-python-3.13 115 | path: dist 116 | 117 | # Verify hashes by first computing hashes for the artifacts and then comparing them 118 | # against the hashes computed by the build job. 119 | - name: Verify the artifact hash 120 | env: 121 | ARTIFACT_HASH: ${{ needs.build.outputs.artifacts-sha256 }} 122 | run: | 123 | set -euo pipefail 124 | echo "Hash of package should be $ARTIFACT_HASH." 125 | echo "$ARTIFACT_HASH" | base64 --decode | sha256sum --strict --check --status || exit 1 126 | 127 | # Create the Release Notes using commitizen. 128 | - name: Set up Python 129 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 130 | with: 131 | python-version: '3.13' 132 | 133 | - name: Set up Commitizen 134 | run: | 135 | pip install --upgrade pip wheel 136 | pip install 'commitizen ==4.10.0' 137 | 138 | - name: Create Release Notes 139 | run: cz changelog --dry-run "$(cz version --project)" > RELEASE_NOTES.md 140 | 141 | # Create the release including the artifacts and the SLSA L3 provenance. 142 | - name: Upload assets 143 | id: upload-assets 144 | env: 145 | GH_TOKEN: ${{ secrets.REPO_ACCESS_TOKEN }} 146 | run: | 147 | TAG=$(git describe --tags --abbrev=0) 148 | gh release create "$TAG" dist/* --title "$TAG" --notes-file RELEASE_NOTES.md 149 | echo "release-tag=$TAG" >> "$GITHUB_OUTPUT" 150 | echo "release-url=$(gh release view """$TAG""" --json url --jq .url)" >> "$GITHUB_OUTPUT" 151 | 152 | # Uncomment the following steps to publish to a PyPI server. 153 | # At the moment PyPI does not provide a mechanism to publish 154 | # the provenance. So, users have to download the provenance from 155 | # the release page of the GitHub repository to verify the artifact. 156 | # Install Twine without using the package's Makefile to avoid 157 | # installing unnecessary dependencies, which is slow. 158 | # - name: Set up Twine 159 | # run: | 160 | # pip install --upgrade pip wheel 161 | # pip install 'twine ==4.0.2' 162 | 163 | # Pass the username, password, and PYPI repository URL via env variables. 164 | # Read the password from GitHub secrets or via other trusted mechanisms. 165 | # Do not hardcode the password in the workflow. 166 | # - name: Publish to PyPI server 167 | # run: twine upload --verbose --skip-existing dist/*.tar.gz dist/*.whl 168 | # env: 169 | # TWINE_USERNAME= 170 | # TWINE_PASSWORD= 171 | # TWINE_REPOSITORY_URL= 172 | 173 | # Generate the build provenance. The generator should be referenced with a semantic version. 174 | # The build will fail if we reference it using the commit SHA. To avoid using a pre-built 175 | # provenance generator which depends on an external service Rekor (https://github.com/sigstore/rekor) 176 | # we build this generator from source for now. For more information see this discussion: 177 | # https://github.com/slsa-framework/slsa-github-generator/issues/942 178 | provenance: 179 | needs: [build, release] 180 | uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0 181 | with: 182 | base64-subjects: ${{ needs.build.outputs.artifacts-sha256 }} 183 | compile-generator: false # Do not build the provenance generator from source anymore. 184 | # Set private-repository to true for private repositories. Note that the repository name is 185 | # uploaded as part of the transparency log entry on the public Rekor instance (rekor.sigstore.dev). 186 | private-repository: false 187 | permissions: 188 | actions: read # To read the workflow path. 189 | id-token: write # To sign the provenance. 190 | contents: write # To add assets to a release. 191 | 192 | # Publish the SLSA provenance as the GitHub release asset. 193 | publish_provenance: 194 | needs: [release, provenance] 195 | name: Publish provenance 196 | runs-on: ubuntu-latest 197 | permissions: 198 | contents: write # To publish release notes. 199 | steps: 200 | 201 | - name: Harden Runner 202 | uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 203 | with: 204 | egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs 205 | disable-sudo: true 206 | 207 | - name: Check out repository 208 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 209 | with: 210 | fetch-depth: 0 211 | 212 | - name: Download provenance 213 | uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 214 | with: 215 | name: ${{ needs.provenance.outputs.provenance-name }} 216 | 217 | - name: Upload provenance 218 | run: gh release upload ${{ needs.release.outputs.release-tag }} ${{ needs.provenance.outputs.provenance-name }} 219 | env: 220 | GH_TOKEN: ${{ secrets.REPO_ACCESS_TOKEN }} 221 | 222 | # Send out release notifications after the Release was published on GitHub. 223 | # Uncomment the `if` to disable sending release notifications. 224 | notifications: 225 | # if: ${{ false }} 226 | needs: [release] 227 | name: Send Release notifications 228 | uses: ./.github/workflows/_release-notifications.yaml 229 | permissions: 230 | contents: read 231 | with: 232 | repo-name: ${{ github.event.repository.name }} 233 | release-tag: ${{ needs.release.outputs.release-tag }} 234 | release-url: ${{ needs.release.outputs.release-url }} 235 | secrets: 236 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} 237 | 238 | # Publish the generated Markdown documentation to the repository's Wiki. 239 | # Uncomment the `if` to disable generating Wiki documentation. 240 | wiki: 241 | # if: ${{ false }} 242 | needs: [release] 243 | name: Publish Github Wiki documentation 244 | uses: ./.github/workflows/_wiki-documentation.yaml 245 | permissions: 246 | contents: read 247 | with: 248 | release-tag: ${{ needs.release.outputs.release-tag }} 249 | release-url: ${{ needs.release.outputs.release-url }} 250 | # Github disallows passing environment variables as arguments to a reusable 251 | # workflow, so we have to duplicate these values here. Related discussion 252 | # here: https://github.com/actions/toolkit/issues/931 253 | artifact-name: artifact-ubuntu-latest-python-3.13 254 | git-user-name: jenstroeger 255 | git-user-email: jenstroeger@users.noreply.github.com 256 | secrets: 257 | REPO_ACCESS_TOKEN: ${{ secrets.REPO_ACCESS_TOKEN }} 258 | 259 | # After the bump commit was pushed to the release branch, rebase the main branch 260 | # (to_head argument) on top of the new release branch (from_base argument), to keep 261 | # the histories of both branches in sync. 262 | rebase_main: 263 | # if: ${{ false }} 264 | needs: [release] 265 | name: Rebase main branch on release 266 | uses: ./.github/workflows/_generate-rebase.yaml 267 | permissions: 268 | contents: read 269 | with: 270 | to-head: main 271 | from-base: origin/release 272 | git-user-name: jenstroeger 273 | git-user-email: jenstroeger@users.noreply.github.com 274 | secrets: 275 | REPO_ACCESS_TOKEN: ${{ secrets.REPO_ACCESS_TOKEN }} 276 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | # Use bash as the shell when executing a rule's recipe. For more details: 3 | # https://www.gnu.org/software/make/manual/html_node/Choosing-the-Shell.html 4 | SHELL := bash 5 | 6 | # Set the package's name and version for use throughout the Makefile. 7 | PACKAGE_NAME := package 8 | PACKAGE_VERSION := $(shell python -c $$'try: import $(PACKAGE_NAME); print($(PACKAGE_NAME).__version__);\nexcept: print("unknown");') 9 | 10 | # This variable contains the first goal that matches any of the listed goals 11 | # here, else it contains an empty string. The net effect is to filter out 12 | # whether this current run of `make` requires a Python virtual environment 13 | # by checking if any of the given goals requires a virtual environment (all 14 | # except the 'venv' and the various 'clean' and 'nuke' goals do). Note that 15 | # checking for 'upgrade' and 'check' goals includes all of their variations. 16 | NEED_VENV := $(or \ 17 | $(findstring all,$(MAKECMDGOALS)), \ 18 | $(findstring setup,$(MAKECMDGOALS)), \ 19 | $(findstring upgrade,$(MAKECMDGOALS)), \ 20 | $(findstring sbom,$(MAKECMDGOALS)), \ 21 | $(findstring requirements,$(MAKECMDGOALS)), \ 22 | $(findstring audit,$(MAKECMDGOALS)), \ 23 | $(findstring check,$(MAKECMDGOALS)), \ 24 | $(findstring test,$(MAKECMDGOALS)), \ 25 | $(findstring dist,$(MAKECMDGOALS)), \ 26 | $(findstring docs,$(MAKECMDGOALS)), \ 27 | $(findstring prune,$(MAKECMDGOALS)), \ 28 | ) 29 | ifeq ($(NEED_VENV),) 30 | # None of the current goals requires a virtual environment. 31 | else 32 | ifeq ($(origin VIRTUAL_ENV),undefined) 33 | $(warning No Python virtual environment found, proceeding anyway) 34 | else 35 | ifeq ($(wildcard .venv/upgraded-on),) 36 | $(warning Python virtual environment not yet set up, proceeding anyway) 37 | endif 38 | endif 39 | endif 40 | 41 | # If the project configuration file has been updated (package deps or 42 | # otherwise) then warn the user and suggest resolving the conflict. 43 | ifeq ($(shell test pyproject.toml -nt .venv/upgraded-on; echo $$?),0) 44 | $(warning pyproject.toml was updated, consider `make upgrade` if your packages have changed) 45 | $(warning If this is not correct then run `make upgrade-quiet`) 46 | endif 47 | 48 | # The SOURCE_DATE_EPOCH environment variable allows the `flit` tool to 49 | # reproducibly build packages: https://flit.pypa.io/en/latest/reproducible.html 50 | # If that variable doesn't exist, then set it here to the current epoch. 51 | ifeq ($(origin SOURCE_DATE_EPOCH),undefined) 52 | SOURCE_DATE_EPOCH := $(shell date +%s) 53 | endif 54 | 55 | # Check, test, and build artifacts for this package. 56 | .PHONY: all 57 | all: check test dist docs 58 | 59 | # Create a virtual environment, either for Python3.13 (default) or using 60 | # the Python interpreter specified in the PYTHON environment variable. Also 61 | # create an empty pip.conf file to ensure that `pip config` modifies this 62 | # venv only, unless told otherwise. For more background, see: 63 | # https://github.com/jenstroeger/python-package-template/issues/262 64 | .PHONY: venv 65 | venv: 66 | if [ ! -z "${VIRTUAL_ENV}" ]; then \ 67 | echo "Found an activated Python virtual environment, exiting" && exit 1; \ 68 | fi 69 | if [ -d .venv/ ]; then \ 70 | echo "Found an inactive Python virtual environment, please activate or nuke it" && exit 1; \ 71 | fi 72 | if [ -z "${PYTHON}" ]; then \ 73 | echo "Creating virtual environment in .venv/ for python3.13"; \ 74 | python3.13 -m venv --upgrade-deps --prompt . .venv; \ 75 | else \ 76 | echo "Creating virtual environment in .venv/ for ${PYTHON}"; \ 77 | ${PYTHON} -m venv --upgrade-deps --prompt . .venv; \ 78 | fi 79 | touch .venv/pip.conf 80 | 81 | # Set up a newly created virtual environment. Note: pre-commit uses the 82 | # venv's Python interpreter, so if you've created multiple venvs then 83 | # pre-commit's git hooks run against the most recently set up venv. 84 | # The build.yaml GitHub Actions workflow expects dist directory to exist. 85 | # So we create the dist dir if it doesn't exist in the setup target. 86 | # See https://packaging.python.org/en/latest/tutorials/packaging-projects/#generating-distribution-archives. 87 | .PHONY: setup 88 | setup: force-upgrade 89 | pre-commit install 90 | mkdir -p dist 91 | 92 | # Install or upgrade an existing virtual environment based on the 93 | # package dependencies declared in pyproject.toml. 94 | .PHONY: upgrade force-upgrade 95 | upgrade: .venv/upgraded-on 96 | .venv/upgraded-on: pyproject.toml 97 | python -m pip install --upgrade pip setuptools 98 | python -m pip install --upgrade wheel 99 | python -m pip install --upgrade --upgrade-strategy eager --editable .[actions,dev,docs,hooks,test] 100 | $(MAKE) upgrade-quiet 101 | force-upgrade: 102 | rm -f .venv/upgraded-on 103 | $(MAKE) upgrade 104 | upgrade-quiet: 105 | echo "Automatically generated by Python Package Makefile on $$(date '+%Y-%m-%d %H:%M:%S %z')." > .venv/upgraded-on 106 | 107 | # Generate a Software Bill of Materials (SBOM). 108 | .PHONY: sbom 109 | sbom: requirements 110 | cyclonedx-py requirements --output-format json --outfile dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-sbom.json 111 | 112 | # Generate a requirements.txt file containing version and integrity hashes for all 113 | # packages currently installed in the virtual environment. There's no easy way to 114 | # do this, see also: https://github.com/pypa/pip/issues/4732 115 | # 116 | # If using a private package index, make sure that it implements the JSON API: 117 | # https://warehouse.pypa.io/api-reference/json.html 118 | # 119 | # We also want to make sure that this package itself is added to the requirements.txt 120 | # file, and if possible even with proper hashes. 121 | .PHONY: requirements 122 | requirements: requirements.txt 123 | requirements.txt: pyproject.toml 124 | echo -n "" > requirements.txt 125 | for pkg in $$(python -m pip freeze --local --disable-pip-version-check --exclude-editable); do \ 126 | pkg=$${pkg//[$$'\r\n']}; \ 127 | echo -n $$pkg >> requirements.txt; \ 128 | echo "Fetching package metadata for requirement '$$pkg'"; \ 129 | [[ $$pkg =~ (.*)==(.*) ]] && curl -s https://pypi.org/pypi/$${BASH_REMATCH[1]}/$${BASH_REMATCH[2]}/json | python -c "import json, sys; print(''.join(f''' \\\\\n --hash=sha256:{pkg['digests']['sha256']}''' for pkg in json.load(sys.stdin)['urls']));" >> requirements.txt; \ 130 | done 131 | echo -e -n "$(PACKAGE_NAME)==$(PACKAGE_VERSION)" >> requirements.txt 132 | if [ -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz ]; then \ 133 | echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz | grep '^\-\-hash')" >> requirements.txt; \ 134 | fi 135 | if [ -f dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl ]; then \ 136 | echo -e -n " \\\\\n $$(python -m pip hash --algorithm sha256 dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl | grep '^\-\-hash')" >> requirements.txt; \ 137 | fi 138 | echo "" >> requirements.txt 139 | cp requirements.txt dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-requirements.txt 140 | 141 | # Audit the currently installed packages. Skip packages that are installed in 142 | # editable mode (like the one in development here) because they may not have 143 | # a PyPI entry; also print out CVE description and potential fixes if audit 144 | # found an issue. 145 | .PHONY: audit 146 | audit: 147 | if ! $$(python -c "import pip_audit" &> /dev/null); then \ 148 | echo "No package pip_audit installed, upgrade your environment!" && exit 1; \ 149 | fi; 150 | python -m pip_audit --skip-editable --desc on --fix --dry-run 151 | 152 | # Run some or all checks over the package code base. 153 | .PHONY: check check-code check-bandit check-flake8 check-lint check-mypy check-actionlint 154 | check-code: check-bandit check-flake8 check-lint check-mypy check-actionlint 155 | check-bandit: 156 | pre-commit run bandit --all-files 157 | check-flake8: 158 | pre-commit run flake8 --all-files 159 | check-lint: 160 | pre-commit run pylint --all-files 161 | check-mypy: 162 | pre-commit run mypy --all-files 163 | check-actionlint: 164 | pre-commit run actionlint --all-files 165 | check: 166 | pre-commit run --all-files 167 | 168 | # Run different kinds of tests: unit tests, integration tests, performance tests. 169 | # Note that the default goal 'test' runs the unit tests only, mainly for convenience 170 | # and compatibility with existing scripts. 171 | .PHONY: test test-all test-unit test-integration test-performance 172 | test: test-unit 173 | test-unit: 174 | COVERAGE_CORE=sysmon python -m pytest --config-file pyproject.toml --cov-config pyproject.toml -m 'not integration and not performance' src/package/ tests/ docs/ 175 | test-integration: 176 | python -m pytest --config-file pyproject.toml --no-cov -m integration tests/ 177 | test-performance: 178 | python -m pytest --config-file pyproject.toml --no-cov -m performance tests/ 179 | test-all: test-unit test-integration test-performance 180 | 181 | # Build a source distribution package and a binary wheel distribution artifact. 182 | # When building these artifacts, we need the environment variable SOURCE_DATE_EPOCH 183 | # set to the build date/epoch. For more details, see: https://flit.pypa.io/en/latest/reproducible.html 184 | .PHONY: dist 185 | dist: dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-html.zip dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-md.zip dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt 186 | dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-py3-none-any.whl: check test-all dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt 187 | SOURCE_DATE_EPOCH=$(SOURCE_DATE_EPOCH) python -m flit build --setup-py --format wheel 188 | dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION).tar.gz: check test-all dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt 189 | SOURCE_DATE_EPOCH=$(SOURCE_DATE_EPOCH) python -m flit build --setup-py --format sdist 190 | dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-html.zip: docs-html 191 | python -m zipfile -c dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-html.zip docs/_build/html/ 192 | dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-md.zip: docs-md 193 | python -m zipfile -c dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-docs-md.zip docs/_build/markdown/ 194 | dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt: 195 | echo $(SOURCE_DATE_EPOCH) > dist/$(PACKAGE_NAME)-$(PACKAGE_VERSION)-build-epoch.txt 196 | 197 | # Build the HTML and Markdown documentation from the package's source. 198 | DOCS_SOURCE := $(shell git ls-files docs/source) 199 | .PHONY: docs docs-html docs-md 200 | docs: docs-html docs-md 201 | docs-html: docs/_build/.html-built-on 202 | docs/_build/.html-built-on: $(DOCS_SOURCE) 203 | if [ ! -d docs/source/_static ]; then \ 204 | mkdir docs/source/_static/; \ 205 | fi 206 | $(MAKE) -C docs/ html 207 | echo "Automatically generated by Python Package Makefile on $$(date '+%Y-%m-%d %H:%M:%S %z')." > docs/_build/.html-built-on 208 | docs-md: docs/_build/.markdown-built-on 209 | docs/_build/.markdown-built-on: $(DOCS_SOURCE) 210 | if [ ! -d docs/source/_static ]; then \ 211 | mkdir docs/source/_static/; \ 212 | fi 213 | $(MAKE) -C docs/ markdown 214 | mv docs/_build/markdown/index.md docs/_build/markdown/Home.md 215 | echo "Automatically generated by Python Package Makefile on $$(date '+%Y-%m-%d %H:%M:%S %z')." > docs/_build/.markdown-built-on 216 | 217 | # Prune the packages currently installed in the virtual environment down to the required 218 | # packages only. Pruning works in a roundabout way, where we first generate the wheels for 219 | # all installed packages into the build/wheelhouse/ folder. Next we wipe all packages and 220 | # then reinstall them from the wheels while disabling the PyPI index server. Thus we ensure 221 | # that the same package versions are reinstalled. Use with care! 222 | .PHONY: prune 223 | prune: 224 | mkdir -p build/ 225 | python -m pip freeze --local --disable-pip-version-check --exclude-editable > build/prune-requirements.txt 226 | python -m pip wheel --wheel-dir build/wheelhouse/ --requirement build/prune-requirements.txt 227 | python -m pip wheel --wheel-dir build/wheelhouse/ . 228 | python -m pip uninstall --yes --requirement build/prune-requirements.txt 229 | python -m pip install --no-index --find-links=build/wheelhouse/ --editable . 230 | rm -fr build/ 231 | 232 | # Clean test caches and remove build artifacts. 233 | .PHONY: dist-clean clean 234 | dist-clean: 235 | rm -fr dist/* 236 | rm -f requirements.txt 237 | clean: dist-clean 238 | rm -fr .coverage .hypothesis/ .mypy_cache/ .pytest_cache/ 239 | rm -fr docs/_build/ 240 | 241 | # Remove code caches, or the entire virtual environment. 242 | .PHONY: nuke-git-hooks nuke-caches nuke 243 | nuke-git-hooks: 244 | find .git/hooks/ -type f ! -name '*.sample' -exec rm -fr {} + 245 | nuke-caches: 246 | find src/ -type d -name __pycache__ -exec rm -fr {} + 247 | find tests/ -type d -name __pycache__ -exec rm -fr {} + 248 | nuke: clean nuke-git-hooks nuke-caches 249 | if [ ! -z "${VIRTUAL_ENV}" ]; then \ 250 | echo "Please deactivate the virtual environment first!" && exit 1; \ 251 | fi 252 | rm -fr .venv/ 253 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | This project follows the [semantic versioning](https://packaging.python.org/en/latest/discussions/versioning/#semantic-versioning-vs-calendar-versioning) and [pre-release versioning](https://packaging.python.org/en/latest/discussions/versioning/) schemes recommended by the Python Packaging Authority [here](https://packaging.python.org/en/latest/specifications/version-specifiers/). 2 | 3 | ## v2.18.0 (2025-12-03) 4 | 5 | ### Feat 6 | 7 | - using pytest as a test runner, diversify the tests into unit, integration, and performance tests (#1028) 8 | 9 | ### Fix 10 | 11 | - invoke flit through the Python interpreter’s module-call argument to ensure the Python venv’s flit is being run (#1045) 12 | - **ci**: undo temporary ignore of GHSA-4xh5-x5gv-qwph (CVE-2025-8869) because the latest pip v25.3 addressed the issue (#1022) 13 | - **ci**: temporarily ignore GHSA-4xh5-x5gv-qwph (CVE-2025-8869) until `pip` fixes the problem (#1018) 14 | 15 | ## v2.17.0 (2025-09-20) 16 | 17 | ### Feat 18 | 19 | - add actions to CodeQL languages (#991) 20 | - **ci**: cancel existing running workflows for a PR when a new change is pushed to that PR (#927) 21 | 22 | ### Fix 23 | 24 | - remove various repo files that leaked into the sdist of the package (#948) 25 | - building the package’s dist files should *always* generate the build’s epoch as well, even when using the individual package goals only (#950) 26 | - make sure isort skips over files listed as such in the pyproject.toml tool configuration (#968) 27 | 28 | ### Perf 29 | 30 | - **test**: use Python’s system monitoring facilities to improve performance of test runs (#933) 31 | 32 | ## v2.16.0 (2025-04-05) 33 | 34 | ### Feat 35 | 36 | - change the release and staging branching model (#900) 37 | - **test**: add the pytest-cases package and a simple example of how to use it (#873) 38 | 39 | ## v2.15.0 (2024-12-26) 40 | 41 | ### Feat 42 | 43 | - **test**: add the duration of tests to the printed test summary (#852) 44 | 45 | ### Fix 46 | 47 | - don’t attempt to sort imports on `git push` (they must already be sorted) (#851) 48 | - when nuking then also nuke any installed git hooks (except pre-installed sample hooks) to avoid leaving stale hooks behind (#850) 49 | - declare `check-actionlint` goal in Makefile as proper phony (#849) 50 | - package metadata did not constrain the upper limit of Python’s version range, set now to <3.14 (#848) 51 | 52 | ## v2.14.0 (2024-11-19) 53 | 54 | ### Feat 55 | 56 | - add support for Python 3.13 (#815) 57 | 58 | ## v2.13.4 (2024-09-04) 59 | 60 | ### Fix 61 | 62 | - **ci**: also update setuptools when setting up the virtual environment (#795) 63 | 64 | ## v2.13.3 (2024-06-16) 65 | 66 | ### Fix 67 | 68 | - **ci**: commitizen and CI should fail if the bumped version strings contain inconsistent version numbers (#747) 69 | 70 | ## v2.13.2 (2024-05-14) 71 | 72 | ### Fix 73 | 74 | - **ci**: use actions/upload-artifact@v4 to be compatible with slsa provenance action (#735) 75 | 76 | ## v2.13.1 (2024-05-14) 77 | 78 | ### Fix 79 | 80 | - **ci**: fix the artifact name for the Release workflow (#733) 81 | 82 | ## v2.13.0 (2024-05-14) 83 | 84 | ### Feat 85 | 86 | - add support for Python 3.12 (#632) 87 | 88 | ## v2.12.1 (2024-01-23) 89 | 90 | ### Fix 91 | 92 | - **deps-dev**: revert artifact download and upload GitHub Actions to v3 (#686) 93 | 94 | ## v2.12.0 (2024-01-20) 95 | 96 | ### Feat 97 | 98 | - **ci**: switch from building the SLSA provenance generator to using the pre-built version to improve runtime performance (#667) 99 | - require pylint v3 and add a slew of optional plugins (#658) 100 | - run doctest as part of running tests, which collects doctests from both the package’s doc strings and the package documentation (#637) 101 | - add perflint to find performance anti-patterns (#675) 102 | 103 | ### Fix 104 | 105 | - **ci**: workflow that syncs a repository with this template had multiple problems on patch generation and pushing; also, it can now be triggered via Github UI (#670) 106 | - always fail tests when runtime warnings were raised (#668) 107 | - change git hook to check for large files on commit, instead of push (#664) 108 | 109 | ## v2.11.0 (2023-11-22) 110 | 111 | ### Feat 112 | 113 | - improve, clean up, and stricten the mypy configuration (#641) 114 | - treat Sphinx docs warnings as errors (#648) 115 | - enable Python’s “Development Mode” when running tests to enable resource tracking and add more warnings (#643) 116 | - add flake8-logging plugin to ensure module-level logging is being used correctly (#640) 117 | 118 | ## v2.10.1 (2023-09-02) 119 | 120 | ### Fix 121 | 122 | - **ci**: improve automatic template synchronization (#537) 123 | 124 | ### Refactor 125 | 126 | - **docs**: switch to sphinx-markdown-builder package (#585) 127 | 128 | ## v2.10.0 (2023-07-22) 129 | 130 | ### Feat 131 | 132 | - **ci**: use a Github Repository Variable `DISABLE_PIP_AUDIT` to control running `pip-audit` in CI (#551) 133 | 134 | ### Fix 135 | 136 | - **test**: fail pytest if an unraisable/unhandled thread exception was detected during the execution of a test (#576) 137 | - **deps**: temporarily pin typing-extensions deps, to avoid breaking the sphinxnotes-markdown-builder package (#552) 138 | 139 | ## v2.9.0 (2023-05-21) 140 | 141 | ### Feat 142 | 143 | - **ci**: add a Rebase Branch workflow to rebase staging on top of main after a package was published (#536) 144 | - add actionlint hook and fix current errors (#505) 145 | - add flake8-pyi plugin to check .pyi files (if any) (#512) 146 | - add CODEOWNERS file to the repository (#511) 147 | 148 | ### Fix 149 | 150 | - make Black, Flake8, Sphinx play nice together by tweaking a few Flake8 settings (#479) 151 | - **ci**: add docs/source/ folder to the excludes of the Sync Upstream workflow (#482) 152 | - **ci**: ensure correct epoch for packaged files to enable reproducible builds (#487) 153 | - **test**: make pytest a little more verbose when tests fail (#495) 154 | 155 | ## v2.8.0 (2023-02-20) 156 | 157 | ### Feat 158 | 159 | - add a workflow to create sync PRs (#444) 160 | - add flake8-print plugin to the flake8 git pre-commit hook (#473) 161 | 162 | ### Fix 163 | 164 | - **docs**: update OSSF Scorecard URL (#468) 165 | - **ci**: update isort to latest fix because of PyCQA/isort/issues/2077 (#455) 166 | - update project URLs in the package metadata 167 | 168 | ### Refactor 169 | 170 | - **test**: configure warnings for pytest through pyproject.toml only (#436) 171 | 172 | ## v2.7.0 (2022-12-28) 173 | 174 | ### Feat 175 | 176 | - add workflow to publish code documentation to the Github Wiki upon package releases (#396) 177 | 178 | ### Fix 179 | 180 | - **ci**: don’t fail bump job if there are no commits to bump (#428) 181 | - line-length checks are now a bit more tolerant using Bugbear only (#410) 182 | 183 | ## v2.6.0 (2022-12-01) 184 | 185 | ### Feat 186 | 187 | - add .gitattributes file (#407) 188 | 189 | ## v2.5.0 (2022-11-30) 190 | 191 | ### Feat 192 | 193 | - add more default settings for VSCode (#388) 194 | - automatically merge Dependabot PRs on approval (#390) 195 | 196 | ### Fix 197 | 198 | - **ci**: make Release Notification a reusable workflow to avoid artifact race (#398) 199 | - determine package version gracefully, even for a deactivated venv (#387) 200 | - **ci**: update deprecated SLSA provenance generator, again (#394) 201 | - incorrect folder name for pytest (#376) 202 | - don’t nuke an activated virtual environment (#367) 203 | 204 | ## v2.4.2 (2022-10-29) 205 | 206 | ### Fix 207 | 208 | - **ci**: as of v2 Scorecard requires extra permissions (#366) 209 | 210 | ## v2.4.1 (2022-10-28) 211 | 212 | ### Fix 213 | 214 | - **ci**: trigger PR actions for all target branches (#357) 215 | - **ci**: fix actionlint warnings (#348) 216 | - run pytest hook on unstaged files (#347) 217 | - determine an activated venv correctly when running make (#346) 218 | - exit Makefile gracefully if an inactive venv was found (#345) 219 | - **ci**: use GITHUB_OUTPUT instead of deprecated set-output (#358) 220 | - **ci**: change deprecated SLSA attestation-name to provenance-name (#359) 221 | - **ci**: update pytest to drop dependency on vulnerable py package (#354) 222 | - don’t build the package again if a PR was only edited (#336) 223 | - the ‘upgrade-quiet’ Makefile goal now works with BSD date command too (#335) 224 | - a Makefile’s SHELL variable is not an executable shebang (#329) 225 | - use simple expansion consistently for all Makefile variables (#328) 226 | - explicitly specify flake8 configuration for git hooks (#327) 227 | - **docs**: update README with correct CHANGELOG setup instructions (#320) 228 | 229 | ### Refactor 230 | 231 | - **ci**: allow release when provenance generation fails (#342) 232 | 233 | ## v2.4.0 (2022-09-08) 234 | 235 | ### Feat 236 | 237 | - add git-audit support when building the package artifacts (#307) 238 | 239 | ### Fix 240 | 241 | - **ci**: fix triggering event for the Release Notification action (#317) 242 | - remove trailing CR-LF from package spec when building requirements (#316) 243 | - remove requirements.txt when cleaning the distribution artifacts (#314) 244 | - ensure that config files are passed explicitly to pytest and coverage (#312) 245 | 246 | ## v2.3.3 (2022-09-01) 247 | 248 | ### Fix 249 | 250 | - **ci**: fix release workflow (#305) 251 | 252 | ## v2.3.2 (2022-09-01) 253 | 254 | ### Fix 255 | 256 | - fix Makefile’s check for goals that require a virtual environment (#299) 257 | 258 | ### Refactor 259 | 260 | - **ci**: improve the release workflow (#303) 261 | 262 | ## v2.3.1 (2022-08-26) 263 | 264 | ### Fix 265 | 266 | - **ci**: separate artifacts for release and debugging (#297) (#298) 267 | 268 | ## v2.3.0 (2022-08-26) 269 | 270 | ### Feat 271 | 272 | - persist requirements.txt as a build artifact (#284) 273 | - always create a reproducible build with `make dist` (#272) 274 | 275 | ### Fix 276 | 277 | - **ci**: allow PR Action to check commits across branches of forks (#287) 278 | - disable noise when freezing the current venv (#273) 279 | - enable CI on release bump commit (#269) 280 | 281 | ### Refactor 282 | 283 | - **ci**: remove write permissions from build.yaml (#291) 284 | 285 | ## v2.2.0 (2022-07-31) 286 | 287 | ### Feat 288 | 289 | - generate SLSA level 3 provenance for release artifacts (#259) 290 | 291 | ### Fix 292 | 293 | - create empty pip.conf file inside a new virtual environment (#264) 294 | 295 | ### Refactor 296 | 297 | - **ci**: use commitizen tool for pull request action (#263) 298 | 299 | ## v2.1.0 (2022-07-12) 300 | 301 | ### Feat 302 | 303 | - use Bash as the shell to execute Makefile recipes (#256) 304 | - warn if generated builds are not reproducible (#253) 305 | - move private file .upgraded into .venv/ folder (#248) 306 | 307 | ### Fix 308 | 309 | - default goal for make should be to build the entire package (#257) 310 | - remove shebang comment from Makefile which isn’t actually runable (#252) 311 | 312 | ## v2.0.0 (2022-07-06) 313 | 314 | ### Feat 315 | 316 | - replace the Makefile’s quick-check goal with check-code (#239) 317 | - add pytest-env and pytest-custom-exit-code plugin support (#243) 318 | 319 | ### Fix 320 | 321 | - flit doesn’t support MANIFEST.in, fix sdist accordingly (#244) 322 | 323 | ## v1.6.1 (2022-06-26) 324 | 325 | ### Fix 326 | 327 | - add explicit settings path to isort in pre-commit configuration (#233) 328 | 329 | ## v1.6.0 (2022-06-24) 330 | 331 | ### Feat 332 | 333 | - consolidate sdist and wheel into a single build target (#229) 334 | 335 | ### Fix 336 | 337 | - **ci**: run all Actions except the Release job on the release commit (#230) 338 | 339 | ## v1.5.1 (2022-06-21) 340 | 341 | ### Fix 342 | 343 | - bump min pytest version according to the "test" dependencies (#220) 344 | - don’t pin Black to a particular Python version (#217) 345 | 346 | ### Refactor 347 | 348 | - **ci**: refactor and address security issues in workflows (#211) 349 | 350 | ## v1.5.0 (2022-06-20) 351 | 352 | ### Feat 353 | 354 | - move package specification, tool configs to pyproject.toml (#208) 355 | 356 | ### Fix 357 | 358 | - **ci**: Release Action needs to use flit, too 359 | - **ci**: use dedicated RELEASE_TOKEN for Release Action job (#219) 360 | - **ci**: enable Release Action for private, protected branches (#209) 361 | 362 | ## v1.4.1 (2022-05-19) 363 | 364 | ### Fix 365 | 366 | - add missing phony target to Makefile (#200) 367 | 368 | ## v1.4.0 (2022-05-17) 369 | 370 | ### Feat 371 | 372 | - add scorecards analysis workflow (#105) 373 | 374 | ### Fix 375 | 376 | - on Windows, pip needs to run as a module (#194) 377 | 378 | ## v1.3.2 (2022-05-06) 379 | 380 | ### Fix 381 | 382 | - fix "make dist" failing on latest version of Ubuntu (#190) 383 | 384 | ## v1.3.1 (2022-05-05) 385 | 386 | ### Fix 387 | 388 | - pass pylint configuration to pylint explicitly (#188) 389 | - correctly depend on and ignore .upgraded Makefile helper file (#187) 390 | 391 | ## v1.3.0 (2022-04-10) 392 | 393 | ### Feat 394 | 395 | - add Makefile to enable the “Scripted Build” rule for SLSA Level 1 (#74) 396 | 397 | ## v1.2.0 (2022-01-19) 398 | 399 | ### Feat 400 | 401 | - enable CodeQL security analyzer (#92) 402 | - generate a command-line tool when installing this package (#89) 403 | 404 | ### Fix 405 | 406 | - change .yml to .yaml in documentation (#101) 407 | - remove exclude option from mypy.ini (#98) 408 | 409 | ## v1.1.2 (2021-12-09) 410 | 411 | ### Fix 412 | 413 | - add type marker to enable mypy’s use of this typed package (#55) 414 | 415 | ## v1.1.1 (2021-11-17) 416 | 417 | ### Fix 418 | 419 | - git hook configuration didn’t match package paths anymore (#39) 420 | 421 | ## v1.1.0 (2021-10-13) 422 | 423 | ### Feat 424 | 425 | - add dependency analysis for automatic version updates (#3) 426 | 427 | ## v1.0.0 (2021-09-29) 428 | 429 | ### Feat 430 | 431 | - Initial version of the Python package template 432 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![license](https://img.shields.io/badge/license-MIT-blue) [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-yellow?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) [![conventional-commits](https://img.shields.io/badge/conventional%20commits-1.0.0-yellow)](https://www.conventionalcommits.org/en/v1.0.0/) [![black](https://img.shields.io/badge/code%20style-black-000000)](https://github.com/psf/black) [![mypy](https://img.shields.io/badge/mypy-checked-brightgreen)](http://mypy-lang.org/) [![pylint](https://img.shields.io/badge/pylint-required%2010.0-brightgreen)](http://pylint.org/) [![pytest](https://img.shields.io/badge/pytest-enabled-brightgreen)](https://github.com/pytest-dev/pytest) [![coverage](https://img.shields.io/badge/coverage-required%20100%25-brightgreen)](https://github.com/nedbat/coveragepy) [![hypothesis](https://img.shields.io/badge/hypothesis-tested-brightgreen.svg)](https://hypothesis.readthedocs.io/) 2 | 3 | # Python Package Template 4 | 5 | This repository is intended to be a base template, a cookiecutter for a new Python package project while keeping [PEP518](https://www.python.org/dev/peps/pep-0518/) in mind. Because it’s hosted on Github it already utilizes a few [Github Actions](https://docs.github.com/en/actions) that enforce repository-side checks for continuous integration and that implement a semantic release setup. And while this package is a starting point for a Python project with good engineering practices, it’s intended to be improved and added to in various ways — see the [Wiki](https://github.com/jenstroeger/python-package-template/wiki) for more suggestions. 6 | 7 | ## Table of Contents 8 | 9 | [Features](#features) 10 |  [Typing](#typing) 11 |  [Quality assurance](#quality-assurance) 12 |  [Extensive testing](#extensive-testing) 13 |  [Documentation](#documentation) 14 |  [Versioning and publishing](#versioning-and-publishing) 15 |  [Dependency analysis](#dependency-analysis) 16 |  [Security analysis](#security-analysis) 17 |  [Package or application?](#package-or-application) 18 | [How to use this repository](#how-to-use-this-repository) 19 | [Updating dependent packages](#updating-dependent-packages) 20 | [Git hooks](#git-hooks) 21 | [Testing](#testing) 22 | [Generating documentation](#generating-documentation) 23 | [Synchronizing with this template repo](#synchronizing-with-this-template-repo) 24 | [Versioning, publishing and changelog](#versioning-publishing-and-changelog) 25 | [Build integrity using SLSA framework](#build-integrity-using-slsa-framework) 26 | [Cleaning up](#cleaning-up) 27 | [Frequently asked questions](#frequently-asked-questions) 28 | 29 | ## Features 30 | 31 | The badges above give you an idea of what this project template provides. It’s work in progress, and I try to enable as much engineering goodness as is possible and is sensibly bearable using [git hooks](https://git-scm.com/docs/githooks) (see [below](#git-hooks)) and Github Actions. 32 | 33 | ### Typing 34 | 35 | The package requires a minimum of [Python 3.10](https://www.python.org/downloads/release/python-31015/), and it supports [Python 3.11](https://www.python.org/downloads/release/python-31110/), [Python 3.12](https://www.python.org/downloads/release/python-3127/) and [Python 3.13](https://www.python.org/downloads/release/python-3130/) (default). All code requires comprehensive [typing](https://docs.python.org/3/library/typing.html). The [mypy](http://mypy-lang.org/) static type checker and the [flake8-pyi](https://github.com/PyCQA/flake8-pyi) plugin are invoked by git hooks and through a Github Action to enforce continuous type checks on Python source and [stub files](https://peps.python.org/pep-0484/#stub-files). Make sure to add type hints to your code or to use [stub files](https://mypy.readthedocs.io/en/stable/stubs.html) for types, to ensure that users of your package can `import` and type-check your code (see also [PEP 561](https://www.python.org/dev/peps/pep-0561/)). 36 | 37 | ### Quality assurance 38 | 39 | A number of git hooks are invoked before and after a commit, and before push. These hooks are all managed by the [pre-commit](https://pre-commit.com/) tool and enforce a number of [software quality assurance](https://en.wikipedia.org/wiki/Software_quality_assurance) measures (see [below](#git-hooks)). 40 | 41 | ### Extensive testing 42 | 43 | Comprehensive testing is enabled using [pytest](https://pytest.org/) as a test runner, combined with [doctest](https://docs.python.org/3/library/doctest.html), [Hypothesis](https://hypothesis.works/) (to support [property-based testing](https://en.wikipedia.org/wiki/Software_testing#Property_testing)), [Faker](https://github.com/joke2k/faker) and [Cases](https://github.com/smarie/python-pytest-cases) (to produce valid, localized test case data), as well as both code and branch coverage are measured using [coverage](https://github.com/nedbat/coveragepy) (see [below](#testing)). 44 | 45 | ### Documentation 46 | 47 | Documentation is important, and [Sphinx](https://www.sphinx-doc.org/en/master/) is already set up to produce standard documentation in HTML and Markdown formats for the package, assuming that code contains [docstrings with reStructuredText](https://www.python.org/dev/peps/pep-0287/); the generated Markdown documentation can also optionally be pushed to the repository’s Github Wiki (see [below](#generating-documentation)). 48 | 49 | ### Versioning and publishing 50 | 51 | Automatic package versioning and tagging, publishing to [PyPI](https://pypi.org/), and [Changelog](https://en.wikipedia.org/wiki/Changelog) generation are enabled using Github Actions. Furthermore, an optional [Release Notification](https://github.com/jenstroeger/python-package-template/tree/main/.github/workflows/release-notifications.yaml) Action allows Github to push an update notification to a [Slack bot](https://api.slack.com/bot-users) of your choice. For setup instructions, please see [below](#versioning-publishing-and-changelog). 52 | 53 | ### Dependency analysis 54 | 55 | [Dependabot](https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/about-dependabot-version-updates) is enabled to scan the dependencies and automatically create pull requests when an updated version is available. 56 | 57 | ### Security analysis 58 | 59 | [CodeQL](https://codeql.github.com/) is enabled to scan the Python code for security vulnerabilities. You can adjust the GitHub Actions workflow at `.github/workflows/codeql-analysis.yaml` and the configuration file at `.github/codeql/codeql-config.yaml` to add more languages, change the default paths, scan schedule, and queries. 60 | 61 | [OSSF Security Scorecards](https://github.com/ossf/scorecard) is enabled as a GitHub Actions workflow to give the consumers information about the supply-chain security posture of this project, assigning a score of 0–10. We upload the results as a SARIF (Static Analysis Results Interchange Format) artifact after each run and the results can be found at the Security tab of this GitHub project. We also allow publishing the data at [OpenSSF](https://metrics.openssf.org/). We use this data to continuously improve the security posture of this project. Note that this configuration supports the ``main`` (default) branch and requires the repository to be public and not forked. 62 | 63 | [pip-audit](https://github.com/pypa/pip-audit) is part of the default Python virtual environment, and can be used to check all installed packages for documented [CVE](https://www.cve.org/) by querying the [Python Packaging Advisory Database](https://github.com/pypa/advisory-database). The `_build.yaml` workflow always runs a package audit before the artifacts are being built. In automated production environments it _may_, on rare occasions, be necessary to suspend package auditing in which case you can [add a repository variable](https://docs.github.com/en/actions/learn-github-actions/variables#creating-configuration-variables-for-a-repository) `DISABLE_PIP_AUDIT` with value `true` to your repository to explicitly disable running pip-audit. 64 | 65 | ### Package or application? 66 | 67 | A _shared package_ or library is intended to be imported by another package or application; an _application_ is a self-contained, standalone, runnable package. Unfortunately, Python’s packaging ecosystem is mostly focused on packaging shared packages (libraries), and packaging Python applications is not as well-supported ([discussion](https://discuss.python.org/t/help-packaging-optional-application-features-using-extras/14074/7)). This template, however, supports both scenarios. 68 | 69 | **Shared package**: this template works out of the box as a shared package. Direct dependencies on other packages are declared in `pyproject.toml` (see the [`dependencies`](https://flit.pypa.io/en/latest/pyproject_toml.html#dependencies) field) and should allow for as wide a version range as possible to ensure that this package and its dependencies can be installed by and coexist with other packages and applications without version conflicts. 70 | 71 | **Application**: the [`__main__.py`](https://docs.python.org/3/library/__main__.html#main-py-in-python-packages) file ensures an entry point to run this package as a standalone application using Python’s [-m](https://docs.python.org/3/using/cmdline.html#cmdoption-m) command-line option. A wrapper script named `something` is also generated as an [entry point into this package](https://flit.pypa.io/en/latest/pyproject_toml.html#scripts-section) by `make setup` or `make upgrade`. In addition to specifying directly dependent packages and their version ranges in `pyproject.toml`, an application should _pin_ its entire environment using the [`requirements.txt`](https://pip.pypa.io/en/latest/user_guide/#requirements-files). Use the `make requirements` command to generate that file if you’re building an application. 72 | 73 | The generated `requirements.txt` file with its integrity hash for every dependent package is used to generate a [Software Bill of Materials (SBOM)](https://www.cisa.gov/sbom) in [CycloneDX format](https://cyclonedx.org/). This is an important provenance material to provide transparency in the packaging process (see also [SBOM + SLSA](https://slsa.dev/blog/2022/05/slsa-sbom)). That `requirements.txt` file, in addition to the SBOM, is also stored as a build artifact for every package release. 74 | 75 | ## How to use this repository 76 | 77 | If you’d like to contribute to the project template, please open an issue for discussion or submit a pull request. 78 | 79 | If you’d like to start your own Python project from scratch, you can either copy the content of this repository into your new project folder or fork this repository. Either way, consider making the following adjustments to your local copy: 80 | 81 | - Change the `LICENSE.md` file and the license badge according to your needs, and adjust the `SECURITY.md` file to your needs (more details [here](https://docs.github.com/en/code-security/getting-started/adding-a-security-policy-to-your-repository)). Remove all content from the `CHANGELOG.md` file. 82 | 83 | - Rename the `src/package/` folder to whatever your own package’s name will be, adjust the Github Actions in `.github/workflows/`, and review the `Makefile`, `pyproject.toml`, `.pre-commit-config.yaml` files as well as the tests accordingly. **Note**: by default all Actions run on three different host types (Linux, MacOS, and Windows) whose [rates vary widely](https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#minute-multipliers), so make sure that you disable or budget accordingly if you’re in a private repository! 84 | 85 | - A new protected `release` branch, should be created if it doesn't already exist. This branch should be configured with appropriate security policies and essential checks to ensure the integrity and stability of the release process. 86 | 87 | - Adjust the content of the `pyproject.toml` file according to your needs, and make sure to fill in the project URL, maintainer and author information too. Don’t forget to reset the package’s version number in `src/package/__init__.py`. 88 | 89 | - If you import packages that do not provide type hints into your new repository, then `mypy` needs to be configured accordingly: add these packages to the `pyproject.toml` file using the [`ignore_missing_imports`](https://mypy.readthedocs.io/en/stable/config_file.html#confval-ignore_missing_imports) option. 90 | 91 | - If you’d like to publish your package to PyPI then uncomment the code in the [`release.yaml`](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/release.yaml) Action, and add the appropriate environment variables. 92 | 93 | - Adjust the Dependabot settings in `.github/dependabot.yaml` to your desired target branch that you’d like to have monitored by Dependabot. 94 | 95 | - Create the following [Personal Access Tokens](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) (PAT) with certain [scopes](https://docs.github.com/en/developers/apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes) for your Github user account and then [create secrets](https://docs.github.com/en/actions/security-guides/encrypted-secrets#creating-encrypted-secrets-for-a-repository) for the new Github repository whose values are these new PATs: 96 | - one PAT with `workflow` and `repo` scope (including _all_ of the `repo` permissions) for the secret named `REPO_ACCESS_TOKEN`; this secret is used by the [Release Action](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/release.yaml) to push the release commit and attach assets to the generated [Github release](https://github.com/jenstroeger/python-package-template/releases). 97 | - one PAT with `public_repo`, `read:discussion`, `read:org`, and `read:repo_hook` scopes ([detailed docs](https://github.com/ossf/scorecard-action#authentication-with-pat-optional)) for the secret named `SCORECARD_READ_TOKEN`; this secret is used by the [Scorecard Action](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/scorecards-analysis.yaml) to analyze the code and add its results to your repository. 98 | - one PAT with `repo` scope for the secret named `DEPENDABOT_AUTOMERGE_TOKEN`; this secret is used by the [Dependabot Automerge Action](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/dependabot-automerge.yaml) to comment on Dependabot PRs. 99 | - Create a Wiki and a first empty Wiki page for your new repository. Using the [Wiki Documentation](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/_wiki-documentation.yaml) Action the repository’s Wiki will be automatically updated as part of publishing a new release. 100 | 101 | To develop your new package, first create a [virtual environment](https://docs.python.org/3/tutorial/venv.html) by either using the [Makefile](https://www.gnu.org/software/make/manual/make.html#toc-An-Introduction-to-Makefiles): 102 | 103 | ```bash 104 | make venv # Create a new virtual environment in .venv folder using Python 3.13. 105 | ``` 106 | 107 | or for a specific version of Python: 108 | 109 | ```bash 110 | PYTHON=python3.12 make venv # Same virtual environment for a different Python version. 111 | ``` 112 | 113 | or manually: 114 | 115 | ```bash 116 | python3.13 -m venv .venv # Or use .venv313 for more than one local virtual environments. 117 | ``` 118 | 119 | When working with this Makefile _it is important to always [activate the virtual environment](https://docs.python.org/3/library/venv.html)_ because some of the [git hooks](#git-hooks) (see below) depend on that: 120 | 121 | ```bash 122 | . .venv/bin/activate # Where . is a bash shortcut for the source command. 123 | ``` 124 | 125 | Finally, set up the new package with all of its extras and initialize the local git hooks: 126 | 127 | ```bash 128 | make setup 129 | ``` 130 | 131 | With that in place, you’re ready to build your own package! 132 | 133 | ## Updating dependent packages 134 | 135 | It’s likely that during development you’ll add or update dependent packages in the `pyproject.toml` file, which requires an update to the virtual environment: 136 | 137 | ```bash 138 | make upgrade 139 | ``` 140 | 141 | ## Git hooks 142 | 143 | Using the pre-commit tool and its `.pre-commit-config.yaml` configuration, the following git hooks are active in this repository: 144 | 145 | - When committing code, a number of [pre-commit hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks#_committing_workflow_hooks) ensure that your code is formatted according to [PEP 8](https://www.python.org/dev/peps/pep-0008/) using the [`black`](https://github.com/psf/black) tool, and they’ll invoke [`flake8`](https://github.com/PyCQA/flake8) (and various plugins), [`pylint`](https://github.com/PyCQA/pylint) and [`mypy`](https://github.com/python/mypy) to check for lint and correct types. There are more checks, but those two are the important ones. You can adjust the settings for these tools in the `pyproject.toml` or `.flake8` configuration files. 146 | - The [commit message hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks#_committing_workflow_hooks) enforces [conventional commit messages](https://www.conventionalcommits.org/) and that, in turn, enables a _semantic release_ of this package on the Github side: upon merging changes into the `release` branch, the [release action](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/release.yaml) uses the [Commitizen tool](https://commitizen-tools.github.io/commitizen/) to produce a [changelog](https://en.wikipedia.org/wiki/Changelog) and it computes the next version of this package and publishes a release — all based on the commit messages of a release. 147 | - Using a [pre-push hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks#_other_client_hooks) this package is also set up to run [`pytest`](https://github.com/pytest-dev/pytest); in addition, the [`coverage`](https://github.com/nedbat/coveragepy) plugin makes sure that _all_ of your package’s code is covered by unit tests and [Hypothesis](https://hypothesis.works/) and [Faker](https://github.com/joke2k/faker) are already installed to help with generating test case payloads. 148 | - The [`actionlint`](https://github.com/Mateusz-Grzelinski/actionlint-py) hook is set up to lint GitHub Actions workflows. If [`shellcheck`](https://github.com/koalaman/shellcheck) is installed on the system, `actionlint` runs `shellcheck` to lint the `run` steps in GitHub Actions. Note that `shellcheck` is available on [Ubuntu GitHub Actions runners](https://github.com/actions/runner-images/blob/main/images/linux/Ubuntu2204-Readme.md) by default. 149 | 150 | You can also run these hooks manually, which comes in very handy during daily development tasks. For example 151 | 152 | ```bash 153 | make check-code 154 | ``` 155 | 156 | runs all the code checks (i.e. `bandit`, `flake8`, `pylint`, `mypy`, `actionlint`), whereas 157 | 158 | ```bash 159 | make check 160 | ``` 161 | 162 | runs _all_ installed git hooks over your code. For more control over the code checks, the Makefile also implements the `check-bandit`, `check-flake8`, `check-lint`, `check-mypy`, and `check-actionlint` goals. 163 | 164 | ## Testing 165 | 166 | As mentioned above, this repository is set up to use [pytest](https://pytest.org/) as a test runner, either standalone or as a pre-push git hook. Tests are stored in the `tests/` folder, they’re organized into unit tests, integration tests, and performance tests. You can run the tests manually like so: 167 | ```bash 168 | make test-all # Run all tests: unit, integration, performance. 169 | ``` 170 | which runs all tests in your local Python virtual environment. For more options, see the [pytest command-line flags](https://docs.pytest.org/en/7.4.x/reference/reference.html#command-line-flags). 171 | 172 | There are three kinds of tests: 173 | 174 | 1. [Unit tests](https://en.wikipedia.org/wiki/Unit_testing) are invoked with `make test` or `make test-unit`; 175 | 2. [Integration tests](https://en.wikipedia.org/wiki/Integration_testing) are invoked with `make test-integration`; and 176 | 3. [Performance tests](https://en.wikipedia.org/wiki/Software_performance_testing) (using the [pytest-benchmark](https://github.com/ionelmc/pytest-benchmark) plugin) are invoked with `make test-performance`. 177 | 178 | Note that the unit tests include [doctest](https://docs.python.org/3/library/doctest.html), which means that module and function [docstrings](https://www.python.org/dev/peps/pep-0257/#what-is-a-docstring), as well as the documentation, may contain test code that executes as part of the unit tests. When pushing changes to the remote, the pre-push hook runs *only* unit tests whereas *all* three kinds of tests run as part of Github’s CI. 179 | 180 | Both statement and branch coverage are being tracked using [coverage](https://github.com/nedbat/coveragepy) and the [pytest-cov](https://github.com/pytest-dev/pytest-cov) plugin for pytest when running unit tests, and it measures how much code in the `src/package/` folder is covered by tests: 181 | ``` 182 | =========================================== test session starts =========================================== 183 | platform darwin -- Python 3.13.9, pytest-8.4.2, pluggy-1.6.0 -- /path/to/python-package-template/.venv/bin/python 184 | cachedir: .pytest_cache 185 | hypothesis profile 'default-with-verbose-verbosity' -> verbosity=Verbosity.verbose 186 | benchmark: 5.2.0 (defaults: timer=time.perf_counter disable_gc=False min_rounds=5 min_time=0.000005 max_time=1.0 calibration_precision=10 warmup=False warmup_iterations=100000) 187 | rootdir: /path/to/python-package-template 188 | configfile: pyproject.toml 189 | plugins: cases-3.9.1, hypothesis-6.138.16, env-1.1.5, cov-6.3.0, custom-exit-code-0.3.0, doctestplus-1.4.0, Faker-37.6.0, benchmark-5.2.0 190 | collected 6 items / 2 deselected / 4 selected 191 | 192 | src/package/something.py::package.something.Something.do_something PASSED [ 25%] 193 | tests/unit/test_something.py::test_something_hypothesis PASSED [ 50%] 194 | tests/unit/test_something.py::test_something_cases[_case_boolean] PASSED [ 75%] 195 | docs/source/index.rst::index.rst PASSED [100%] 196 | 197 | ============================================= tests coverage ============================================== 198 | ____________________________ coverage: platform darwin, python 3.13.9-final-0 _____________________________ 199 | 200 | Name Stmts Miss Branch BrPart Cover Missing 201 | ---------------------------------------------------------------------- 202 | src/package/__init__.py 1 0 0 0 100% 203 | src/package/something.py 4 0 0 0 100% 204 | ---------------------------------------------------------------------- 205 | TOTAL 5 0 0 0 100% 206 | Required test coverage of 100.0% reached. Total coverage: 100.00% 207 | ========================================== Hypothesis Statistics ========================================== 208 | tests/unit/test_something.py::test_something_hypothesis: 209 | 210 | - during generate phase (0.00 seconds): 211 | - Typical runtimes: ~ 0-1 ms, of which < 1ms in data generation 212 | - 2 passing examples, 0 failing examples, 0 invalid examples 213 | 214 | - Stopped because nothing left to do 215 | 216 | ============================================ slowest durations ============================================ 217 | 0.10s setup src/package/something.py::package.something.Something.do_something 218 | 0.01s call tests/unit/test_something.py::test_something_hypothesis 219 | 0.00s call src/package/something.py::package.something.Something.do_something 220 | 0.00s setup tests/unit/test_something.py::test_something_cases[_case_boolean] 221 | 0.00s call docs/source/index.rst::index.rst 222 | 0.00s setup docs/source/index.rst::index.rst 223 | 0.00s teardown src/package/something.py::package.something.Something.do_something 224 | 0.00s teardown tests/unit/test_something.py::test_something_cases[_case_boolean] 225 | 0.00s call tests/unit/test_something.py::test_something_cases[_case_boolean] 226 | 0.00s setup tests/unit/test_something.py::test_something_hypothesis 227 | 0.00s teardown docs/source/index.rst::index.rst 228 | 0.00s teardown tests/unit/test_something.py::test_something_hypothesis 229 | ===================================== 4 passed, 2 deselected in 0.23s ===================================== 230 | ``` 231 | Note that code that’s not covered by tests is listed under the `Missing` column, and branches not taken too. The net effect of enforcing 100% code and branch coverage is that every new major and minor feature, every code change, and every fix are being tested (keeping in mind that high _coverage_ does not imply comprehensive, meaningful _test data_). 232 | 233 | Hypothesis is a package that implements [property based testing](https://en.wikipedia.org/wiki/Software_testing#Property_testing) and that provides payload generation for your tests based on strategy descriptions ([more](https://hypothesis.works/#what-is-hypothesis)). Using its [pytest plugin](https://hypothesis.readthedocs.io/en/latest/details.html#the-hypothesis-pytest-plugin) Hypothesis is ready to be used for this package. Likewise, the [Faker](https://github.com/joke2k/faker) package and its [pytest plugin](https://faker.readthedocs.io/en/master/#pytest-fixtures) are installed to provide valid, localized test case data (see also the [pytest-cases](https://github.com/smarie/python-pytest-cases) plugin for more details on how to separate tests and test cases). 234 | 235 | ## Generating documentation 236 | 237 | As mentioned above, all package code should make use of [Python docstrings](https://www.python.org/dev/peps/pep-0257/) in [reStructured text format](https://www.python.org/dev/peps/pep-0287/). Using these docstrings and the documentation template in the `docs/source/` folder, you can then generate proper documentation in different formats using the [Sphinx](https://github.com/sphinx-doc/sphinx/) tool: 238 | 239 | ```bash 240 | make docs 241 | ``` 242 | 243 | This example generates documentation in HTML, which can then be found here: 244 | 245 | ```bash 246 | open docs/_build/html/index.html 247 | ``` 248 | 249 | In addition to the default HTML, Sphinx also generates Markdown documentation compatible with [Github Wiki](https://docs.github.com/en/communities/documenting-your-project-with-wikis), and the [Wiki Documentation](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/_wiki-documentation.yaml) Action automatically updates the project repository’s Wiki. 250 | 251 | ## Synchronizing with this template repo 252 | 253 | The [sync-with-upstream.yaml](https://github.com/jenstroeger/python-package-template/blob/main/.github/workflows/sync-with-upstream.yaml) GitHub Acions workflow checks this template repo daily and automatically creates a pull request in the downstream repo if there is a new release. Make sure to set up the GitHub username and email address in this workflow accordingly. 254 | 255 | ## Versioning, publishing and changelog 256 | 257 | To enable automation for [semantic versioning](https://semver.org/), package publishing, and changelog generation it is important to use meaningful [conventional commit messages](https://www.conventionalcommits.org/)! This package template already has a built-in semantic release support enabled which is set up to take care of all three of these aspects — every time changes are pushed to the `release` branch. 258 | 259 | With every package release, a new `bump:` commit is pushed to the `release` branch and tagged with the package’s new version. In addition, the `main` branch (which this repository uses to stage merged pull requests into for the next release) is rebased on top of the updated `release` branch automatically, so that subsequent pull requests can be merged while keeping a [linear history](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/managing-protected-branches/about-protected-branches#require-linear-history). 260 | 261 | If you’d like to receive Slack notifications whenever a new release is published, follow the comments in the [Release Notification](https://github.com/jenstroeger/python-package-template/tree/main/.github/workflows/_release-notifications.yaml) Action and set up a Slack bot by following [the instructions here](https://github.com/slackapi/slack-github-action#setup-2). 262 | 263 | In order to build a distribution of your package locally instead of publishing it through the Github Actions workflow, you can simply call: 264 | 265 | ```bash 266 | make dist 267 | ``` 268 | 269 | This builds a source package and a binary distribution, and stores the files in your local `dist/` folder. 270 | 271 | You can also generate a changelog and bump the version manually and locally using commitizen (already installed as a dev dependency), for example: 272 | 273 | ```bash 274 | cz changelog 275 | cz bump 276 | ``` 277 | 278 | ## Build integrity using SLSA framework 279 | 280 | The build process in this repository follows the requirements in the [SLSA framework](https://slsa.dev/) to be compliant at level 3. An important aspect of SLSA to improve the supply chain security posture is to generate a verifiable provenance for the build pipeline. Such a provenance can be used to verify the builder and let the consumers check the materials and configurations used while building an artifact. In this repository we use the [generic provenance generator reusable workflow](https://github.com/slsa-framework/slsa-github-generator) to generate a provenance that can attest to the following artifacts in every release: 281 | 282 | - Binary dist (wheel) 283 | - Source dist (tarball) 284 | - SBOM (CycloneDx format) 285 | - HTML and Markdown Docs 286 | - A [UNIX epoch](https://en.wikipedia.org/wiki/Unix_time) timestamp file of the build time for [reproducible builds](https://reproducible-builds.org/) 287 | 288 | To verify the artifact using the provenance follow the instructions in the [SLSA verifier](https://github.com/slsa-framework/slsa-verifier) project to install the verifier tool. After downloading the artifacts and provenance, verify each artifact individually, e.g.,: 289 | 290 | ```bash 291 | slsa-verifier -artifact-path ~/Downloads/package-2.2.0.tar.gz -provenance attestation.intoto.jsonl -source github.com/jenstroeger/python-package-template 292 | ``` 293 | Which should pass and provide the verification details. 294 | 295 | ## Cleaning up 296 | 297 | On occasion it’s useful (and perhaps necessary) to clean up stale files, caches that tools like `mypy` leave behind, or even to nuke the complete virtual environment: 298 | 299 | - **Remove distribution artifacts**: `make dist-clean` 300 | - In addition, **remove tool caches and documentation**: `make clean` 301 | - In addition, **remove Python code caches and git hooks**: `make nuke-caches` 302 | - In addition and to **reset everything**, to restore a clean package to start over fresh: `make nuke` 303 | 304 | Please be careful when nuking your environment, and make sure you know what you’re doing. 305 | 306 | ## Frequently asked questions 307 | 308 | - **Question**: Why don’t you use tools like [tox](https://github.com/tox-dev/tox) or [nox](https://github.com/theacodes/nox) to orchestrate testing? 309 | **Answer**: We’ve removed `tox` based on a discussion in [issue #100](https://github.com/jenstroeger/python-package-template/issues/100) and [PR #102](https://github.com/jenstroeger/python-package-template/pull/102). In short: we want to run tests inside the development venv using `pytest`, and run more tests using an extensive test matrix using Github Actions. 310 | --------------------------------------------------------------------------------