├── tests ├── __init__.py ├── utils.py └── test_plugin.py ├── src └── hatch_build_scripts │ ├── py.typed │ ├── __init__.py │ ├── hooks.py │ └── plugin.py ├── .github ├── ISSUE_TEMPLATE │ ├── config.yml │ └── issue-form.yml └── workflows │ ├── release.yml │ └── check.yml ├── .copier-answers.yml ├── LICENSE.txt ├── README.md ├── .gitignore ├── dev.py └── pyproject.toml /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/hatch_build_scripts/py.typed: -------------------------------------------------------------------------------- 1 | PEP-561 2 | -------------------------------------------------------------------------------- /src/hatch_build_scripts/__init__.py: -------------------------------------------------------------------------------- 1 | from importlib.metadata import PackageNotFoundError 2 | from importlib.metadata import version 3 | 4 | try: 5 | __version__ = version(__name__) 6 | except PackageNotFoundError: # nocov 7 | __version__ = "0.0.0" 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | blank_issues_enabled: false 3 | contact_links: 4 | - name: Start a Discussion 5 | url: https://github.com/rmorshea/hatch-build-scripts/discussions 6 | about: Report issues, request features, ask questions, and share ideas 7 | -------------------------------------------------------------------------------- /src/hatch_build_scripts/hooks.py: -------------------------------------------------------------------------------- 1 | """Register hooks for the plugin.""" 2 | 3 | from hatchling.plugin import hookimpl 4 | 5 | from hatch_build_scripts.plugin import BuildScriptsHook 6 | 7 | 8 | @hookimpl 9 | def hatch_register_build_hook(): 10 | """Get the hook implementation.""" 11 | return BuildScriptsHook 12 | -------------------------------------------------------------------------------- /.copier-answers.yml: -------------------------------------------------------------------------------- 1 | # Changes here will be overwritten by Copier; NEVER EDIT MANUALLY 2 | _commit: v2025.02.0 3 | _src_path: https://github.com/rmorshea/python-copier-template 4 | author_email: ryan.morshead@gmail.com 5 | author_name: Ryan Morshead 6 | project_description: A Hatch plugin for creating build scripts. 7 | project_title: Hatch Build Scripts 8 | python_package_name: hatch_build_scripts 9 | python_version_range: '>=3.9' 10 | repo_url: https://github.com/rmorshea/hatch-build-scripts 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue-form.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Plan a Task 3 | description: Create a detailed plan of action (ONLY START AFTER DISCUSSION PLEASE 🙏). 4 | labels: [flag-triage] 5 | body: 6 | - type: textarea 7 | attributes: 8 | label: Current Situation 9 | description: Discuss how things currently are, why they require action, and any relevant prior discussion/context. 10 | validations: 11 | required: false 12 | - type: textarea 13 | attributes: 14 | label: Proposed Actions 15 | description: Describe what ought to be done, and why that will address the reasons for action mentioned 16 | above. 17 | validations: 18 | required: false 19 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: release 3 | on: 4 | release: 5 | types: [published] 6 | env: 7 | latest-python-version: 3.13 8 | jobs: 9 | py-package: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: UV cache 14 | uses: actions/cache@v4 15 | with: 16 | path: ~/.cache/uv 17 | key: ${{ runner.os }}-${{ env.latest-python-version }}-uv-${{ hashFiles('pyproject.toml') }} 18 | - name: Install UV 19 | run: curl -LsSf https://astral.sh/uv/install.sh | sh 20 | - name: Install Python 21 | run: uv python install ${{ matrix.python-version }} 22 | - name: Build package 23 | run: uv build 24 | - name: Publish package 25 | env: 26 | UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} 27 | run: uv publish 28 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Ryan Morshead 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 6 | software and associated documentation files (the "Software"), to deal in the Software 7 | without restriction, including without limitation the rights to use, copy, modify, 8 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 9 | permit persons to whom the Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all copies or 13 | substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 16 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR 17 | PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 19 | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 | OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import subprocess 4 | import sys 5 | import zipfile 6 | from contextlib import contextmanager 7 | from dataclasses import asdict 8 | from pathlib import Path 9 | from typing import TYPE_CHECKING 10 | 11 | import toml 12 | 13 | import hatch_build_scripts 14 | 15 | if TYPE_CHECKING: 16 | from collections.abc import Iterator 17 | from collections.abc import Sequence 18 | 19 | from hatch_build_scripts.plugin import OneScriptConfig 20 | 21 | ROOT_DIR = Path(__file__).parent.parent 22 | 23 | 24 | def create_project(path: Path | str, scripts: Sequence[OneScriptConfig]) -> FakeProject: 25 | path = Path(path) 26 | 27 | full_config = { 28 | "project": { 29 | "name": "test-project", 30 | "version": "0.0.0", 31 | "description": "A test project", 32 | }, 33 | "build-system": { 34 | "requires": ["hatchling", f"hatch-build-scripts @ {ROOT_DIR.as_uri()}"], 35 | "build-backend": "hatchling.build", 36 | }, 37 | "tool": { 38 | "hatch": { 39 | "build": { 40 | "hooks": { 41 | "build-scripts": { 42 | "scripts": list(map(asdict, scripts)), 43 | } 44 | } 45 | } 46 | } 47 | }, 48 | } 49 | 50 | (path / "pyproject.toml").write_text(toml.dumps(full_config), encoding="utf-8") 51 | 52 | return FakeProject(path) 53 | 54 | 55 | class FakeProject: 56 | def __init__(self, path: Path) -> None: 57 | self.path = path 58 | 59 | def build(self) -> None: 60 | subprocess.run( 61 | [ 62 | sys.executable, 63 | "-m", 64 | "pip", 65 | "cache", 66 | "remove", 67 | hatch_build_scripts.__name__, 68 | ], 69 | cwd=self.path, 70 | check=True, 71 | ) 72 | subprocess.run( 73 | [sys.executable, "-m", "hatch", "build"], 74 | cwd=self.path, 75 | check=True, 76 | ) 77 | 78 | @contextmanager 79 | def dist(self) -> Iterator[zipfile.ZipFile]: 80 | files = list((self.path / "dist").glob("*.whl")) 81 | assert len(files) == 1 82 | with zipfile.ZipFile(str(files[0])) as whl: 83 | yield whl 84 | -------------------------------------------------------------------------------- /tests/test_plugin.py: -------------------------------------------------------------------------------- 1 | from hatch_build_scripts.plugin import OneScriptConfig 2 | from tests.utils import create_project 3 | 4 | 5 | def test_plugin(tmpdir): 6 | tmp_lib_dir = tmpdir / "lib" 7 | tmp_lib_dir.mkdir() 8 | 9 | (tmp_lib_dir / "some-dir").mkdir() 10 | (tmp_lib_dir / "another-dir").mkdir() 11 | (tmp_lib_dir / "yet-another-dir").mkdir() 12 | 13 | some_dir_out = tmp_lib_dir / "some-dir-out" 14 | some_dir_out.mkdir() 15 | # we expect that this file will not be cleaned 16 | (some_dir_out / "module.py").write_text('print("hello")', "utf-8") 17 | # we expect that this file will be cleaned 18 | (some_dir_out / "f3.txt").write_text("this should be cleaned", "utf-8") 19 | 20 | another_dir_out = tmp_lib_dir / "another-dir-out" 21 | another_dir_out.mkdir() 22 | # we expect that this file will be cleaned 23 | (another_dir_out / "module.py").write_text('print("hello")', "utf-8") 24 | 25 | proj = create_project( 26 | tmp_lib_dir, 27 | [ 28 | OneScriptConfig( 29 | out_dir="fake", 30 | commands=["echo 'hello world' > fake.txt"], 31 | artifacts=["fake.txt"], 32 | ), 33 | OneScriptConfig( 34 | out_dir="some-dir-out", 35 | work_dir="some-dir", 36 | commands=[ 37 | "echo 'hello world' > f1.txt", 38 | "echo 'hello world' > f2.txt", 39 | ], 40 | # this will not clean the data.json file 41 | artifacts=["*.txt"], 42 | ), 43 | OneScriptConfig( 44 | out_dir="another-dir-out", 45 | work_dir="another-dir", 46 | commands=[ 47 | "echo 'hello world' > f1.txt", 48 | "echo 'hello world' > f2.txt", 49 | ], 50 | artifacts=["*.txt"], 51 | clean_out_dir=True, 52 | ), 53 | OneScriptConfig( 54 | out_dir="yet-another-dir-out", 55 | work_dir="yet-another-dir", 56 | commands=[ 57 | "echo 'hello world' > f1.txt", 58 | "echo 'hello world' > f2.txt", 59 | ], 60 | artifacts=["*.txt"], 61 | clean_out_dir=True, 62 | clean_artifacts_after_build=True, 63 | ), 64 | ], 65 | ) 66 | 67 | proj.build() 68 | 69 | extract_dir = tmpdir / "extract" 70 | extract_dir.mkdir() 71 | 72 | with proj.dist() as dist: 73 | dist.extractall(extract_dir) 74 | 75 | assert (extract_dir / "fake" / "fake.txt").exists() 76 | assert (extract_dir / "some-dir-out" / "module.py").exists() 77 | 78 | assert not (extract_dir / "some-dir-out" / "f3.txt").exists() 79 | assert not (extract_dir / "another-dir-out" / "module.py").exists() 80 | 81 | # we expect that this file still exists in the source 82 | assert (tmp_lib_dir / "fake" / "fake.txt").exists() 83 | # we expect that this file was cleaned in the source but exists in wheel 84 | assert (extract_dir / "yet-another-dir-out" / "f1.txt").exists() 85 | assert not (tmp_lib_dir / "yet-another-dir-out" / "f1.txt").exists() 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Hatch Build Scripts 2 | 3 | [![PyPI - Version](https://img.shields.io/pypi/v/hatch_build_scripts.svg)](https://pypi.org/project/hatch_build_scripts) 4 | [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/hatch_build_scripts.svg)](https://pypi.org/project/hatch_build_scripts) 5 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 6 | 7 | A plugin for [Hatch](https://github.com/pypa/hatch) that allows you to run arbitrary 8 | build scripts and include their artifacts in your package distributions. 9 | 10 | ## Installation 11 | 12 | To set up `hatch-build-scripts` for your project you'll need to configure it in your 13 | project's `pyproject.toml` file as a `build-system` requirement: 14 | 15 | ```toml 16 | [build-system] 17 | requires = ["hatchling", "hatch-build-scripts"] 18 | build-backend = "hatchling.build" 19 | ``` 20 | 21 | ## Usage 22 | 23 | Now you'll need to configure the build scripts you want to run. This is done by adding 24 | an array of scripts to the `tool.hatch.build.hooks.build-scripts.scripts` key in your 25 | `pyproject.toml` file. Each script is configured with the following keys: 26 | 27 | | Key | Default | Description | 28 | | ----------------- | -------- | ------------------------------------------------------------------------------------------------------- | 29 | | `commands` | required | An array of commands to run. Each command is run in a separate shell. | 30 | | `artifacts` | `[]` | An array of artifact patterns (same as `.gitignore`) to include in your package distributions. | 31 | | `out_dir` | `"."` | The directory to copy artifacts into. | 32 | | `work_dir` | `"."` | The directory to run the commands in. All artifact patterns are relative to this directory. | 33 | | `clean_artifacts` | `true` | Whether to clean files from the `out_dir` that match the artifact patterns before running the commands. | 34 | | `clean_out_dir` | `false` | Whether to clean the `out_dir` before running the commands. | 35 | 36 | In practice this looks like: 37 | 38 | ```toml 39 | [[tool.hatch.build.hooks.build-scripts.scripts]] 40 | out_dir = "out" 41 | commands = [ 42 | "echo 'Hello, world!' > hello.txt", 43 | "echo 'Goodbye, world!' > goodbye.txt", 44 | ] 45 | artifacts = [ 46 | "hello.txt", 47 | "goodbye.txt", 48 | ] 49 | 50 | [[tool.hatch.build.hooks.build-scripts.scripts]] 51 | # you can add more scripts here... 52 | ``` 53 | 54 | You can configure script defaults for scripts by adding a 55 | `[tool.hatch.build.hooks.build-scripts]` table to your `pyproject.toml` file. The 56 | following keys are supported: 57 | 58 | | Key | Default | Description | 59 | | ----------------- | ------- | ------------------------------------------------------------------------------------------------------- | 60 | | `out_dir` | `"."` | The directory to copy artifacts into. | 61 | | `work_dir` | `"."` | The directory to run the commands in. All artifact patterns are relative to this directory. | 62 | | `clean_artifacts` | `true` | Whether to clean files from the `out_dir` that match the artifact patterns before running the commands. | 63 | | `clean_out_dir` | `false` | Whether to clean the `out_dir` before running the commands. | 64 | -------------------------------------------------------------------------------- /.github/workflows/check.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: check 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | env: 9 | latest-python-version: 3.13 10 | jobs: 11 | py-tests: 12 | runs-on: ubuntu-latest 13 | strategy: 14 | matrix: 15 | python-version: ['3.10', '3.11', '3.12', '3.13'] 16 | steps: 17 | - uses: actions/checkout@v4 18 | - name: UV cache 19 | uses: actions/cache@v4 20 | with: 21 | path: ~/.cache/uv 22 | key: ${{ runner.os }}-${{ matrix.python-version }}-uv-${{ hashFiles('pyproject.toml') }} 23 | - name: Install UV 24 | uses: astral-sh/setup-uv@v5 25 | with: 26 | version: latest 27 | python-version: ${{ matrix.python-version }} 28 | enable-cache: true 29 | cache-dependency-glob: '**/pyproject.toml' 30 | - name: Run tests 31 | run: uv run dev.py cov 32 | - name: Upload coverage 33 | if: matrix.python-version == env.latest-python-version 34 | uses: actions/upload-artifact@v4 35 | with: 36 | name: coverage-xml 37 | include-hidden-files: true 38 | path: | 39 | ${{ github.workspace }}/coverage.xml 40 | ${{ github.workspace }}/.coverage 41 | if-no-files-found: error 42 | py-coverage: 43 | runs-on: ubuntu-latest 44 | needs: py-tests 45 | if: github.ref != format('refs/heads/{0}', github.event.repository.default_branch) 46 | steps: 47 | - uses: actions/checkout@v4 48 | with: {fetch-depth: 50} 49 | - name: Download new coverage 50 | uses: actions/download-artifact@v4 51 | with: 52 | name: coverage-xml 53 | path: ${{ github.workspace }} 54 | - name: Download old coverage 55 | uses: dawidd6/action-download-artifact@v6 56 | with: 57 | branch: ${{ github.event.pull_request.base.ref }} 58 | name: coverage-xml 59 | path: ${{ github.workspace }}/old-coverage 60 | - name: UV cache 61 | uses: actions/cache@v4 62 | with: 63 | path: ~/.cache/uv 64 | key: ${{ runner.os }}-${{ env.latest-python-version }}-uv-${{ hashFiles('pyproject.toml') }} 65 | - run: git fetch origin main 66 | - name: Install UV 67 | run: curl -LsSf https://astral.sh/uv/install.sh | sh 68 | - name: Install Python 69 | run: uv python install ${{ matrix.python-version }} 70 | - name: Check coverage 71 | run: uv run dev.py cov --no-test --old-coverage-xml=${{ github.workspace }}/old-coverage/coverage.xml 72 | - name: Coverage summary 73 | run: uv run coverage report --format=markdown >> $GITHUB_STEP_SUMMARY 74 | py-lint: 75 | runs-on: ubuntu-latest 76 | steps: 77 | - uses: actions/checkout@v4 78 | - name: UV cache 79 | uses: actions/cache@v4 80 | with: 81 | path: ~/.cache/uv 82 | key: ${{ runner.os }}-${{ env.latest-python-version }}-uv-${{ hashFiles('pyproject.toml') }} 83 | - name: Install UV 84 | run: curl -LsSf https://astral.sh/uv/install.sh | sh 85 | - name: Install Python 86 | run: uv python install ${{ matrix.python-version }} 87 | - name: Check lint 88 | run: uv run dev.py lint --check 89 | py-build: 90 | runs-on: ubuntu-latest 91 | steps: 92 | - uses: actions/checkout@v4 93 | - name: UV cache 94 | uses: actions/cache@v4 95 | with: 96 | path: ~/.cache/uv 97 | key: ${{ runner.os }}-${{ env.latest-python-version }}-uv-${{ hashFiles('pyproject.toml') }} 98 | - name: Install UV 99 | run: curl -LsSf https://astral.sh/uv/install.sh | sh 100 | - name: Install Python 101 | run: uv python install ${{ matrix.python-version }} 102 | - name: Check lint 103 | run: uv build 104 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 110 | .pdm.toml 111 | .pdm-python 112 | .pdm-build/ 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .env 126 | .venv 127 | env/ 128 | venv/ 129 | ENV/ 130 | env.bak/ 131 | venv.bak/ 132 | 133 | # Spyder project settings 134 | .spyderproject 135 | .spyproject 136 | 137 | # Rope project settings 138 | .ropeproject 139 | 140 | # mkdocs documentation 141 | /site 142 | 143 | # mypy 144 | .mypy_cache/ 145 | .dmypy.json 146 | dmypy.json 147 | 148 | # Pyre type checker 149 | .pyre/ 150 | 151 | # pytype static type analyzer 152 | .pytype/ 153 | 154 | # Cython debug symbols 155 | cython_debug/ 156 | 157 | # PyCharm 158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 160 | # and can be added to the global gitignore or merged into this file. For a more nuclear 161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 162 | #.idea/ 163 | -------------------------------------------------------------------------------- /dev.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: S607,S603,S404,FBT001,D401,D103 2 | from __future__ import annotations 3 | 4 | import os 5 | import subprocess 6 | from pathlib import Path 7 | from typing import TYPE_CHECKING 8 | from typing import Literal 9 | 10 | import click 11 | 12 | IN_CI = bool(os.getenv("GITHUB_ACTIONS")) 13 | 14 | 15 | @click.group() 16 | def main(): 17 | """A collection of dev utilities.""" 18 | 19 | 20 | @main.command("test") 21 | @click.argument("args", nargs=-1) 22 | def test(args: list[str]): 23 | """Run the test suite.""" 24 | run(["pytest", "-v", *args]) 25 | 26 | 27 | @main.command("cov") 28 | @click.option("--no-test", is_flag=True, help="Skip running tests with coverage") 29 | @click.option("--old-coverage-xml", default=None, type=str, help="Path to target coverage.xml.") 30 | def cov(no_test: bool, old_coverage_xml: str | None): 31 | """Run the test suite with coverage.""" 32 | if not no_test: 33 | try: 34 | run(["coverage", "run", "-m", "pytest", "-v"]) 35 | finally: 36 | run(["coverage", "combine"], check=False) 37 | run(["coverage", "report"]) 38 | run(["coverage", "xml"]) 39 | if old_coverage_xml is not None: 40 | if Path(old_coverage_xml).exists(): 41 | run( 42 | [ 43 | "pycobertura", 44 | "diff", 45 | old_coverage_xml, 46 | "coverage.xml", 47 | "--source1", 48 | ".", 49 | "--source2", 50 | ".", 51 | ] 52 | ) 53 | else: 54 | msg = f"Target coverage file {old_coverage_xml} does not exist" 55 | raise click.ClickException(msg) 56 | elif not IN_CI: 57 | run(["diff-cover", "coverage.xml", "--config-file", "pyproject.toml"]) 58 | 59 | 60 | @main.command("lint") 61 | @click.option("--check", is_flag=True, help="Check for linting issues without fixing.") 62 | @click.option("--no-py-style", is_flag=True, help="Skip style check Python files.") 63 | @click.option("--no-py-types", is_flag=True, help="Skip type check Python files.") 64 | @click.option("--no-uv-locked", is_flag=True, help="Skip check that the UV lock file is synced") 65 | @click.option("--no-yml-style", is_flag=True, help="Skip style check YAML files.") 66 | def lint( 67 | check: bool, 68 | no_py_style: bool, 69 | no_py_types: bool, 70 | no_uv_locked: bool, 71 | no_yml_style: bool, 72 | ): 73 | """Linting commands.""" 74 | if not no_uv_locked: 75 | run(["uv", "lock", "--locked"]) 76 | if not no_py_style: 77 | if check: 78 | run(["ruff", "format", "--check", "--diff"]) 79 | run(["ruff", "check"]) 80 | else: 81 | run(["ruff", "format"]) 82 | run(["ruff", "check", "--fix"]) 83 | if not no_yml_style: 84 | if check: 85 | run(["yamlfix", "--check", ".github"]) 86 | else: 87 | run(["yamlfix", ".github"]) 88 | if not no_py_types: 89 | run(["pyright"]) 90 | 91 | 92 | if TYPE_CHECKING: 93 | from collections.abc import Sequence 94 | 95 | run = subprocess.run 96 | else: 97 | 98 | def run(*args, **kwargs): 99 | cmd, *args = args 100 | cmd = tuple(map(str, cmd)) 101 | kwargs.setdefault("check", True) 102 | click.echo(click.style(" ".join(cmd), bold=True)) 103 | try: 104 | return subprocess.run(cmd, *args, **kwargs) 105 | except subprocess.CalledProcessError as e: 106 | raise click.ClickException(e) from None 107 | except FileNotFoundError as e: 108 | msg = f"File not found {e}" 109 | raise click.ClickException(msg) from None 110 | 111 | 112 | def report( 113 | kind: Literal["notice", "warning", "error"], 114 | /, 115 | *, 116 | title: str = "", 117 | message: str = "", 118 | file: str | None = None, 119 | line: int | None = None, 120 | end_line: int | None = None, 121 | col: int | None = None, 122 | end_col: int | None = None, 123 | ): 124 | if not IN_CI: 125 | file_parts = [] 126 | if file: 127 | file_parts.append(f"{file}") 128 | if line: 129 | file_parts.append(f":{line}") 130 | if end_line: 131 | file_parts.append(f"-{end_line}") 132 | if col: 133 | file_parts.append(f":{col}") 134 | if end_col: 135 | file_parts.append(f"-{end_col}") 136 | file_info = "".join(file_parts) 137 | click.echo(" - ".join(filter(None, [kind.upper(), file_info, title, message]))) 138 | else: 139 | file_parts = [] 140 | if title or message: 141 | file_parts.append(f"{title}::{message}") 142 | if file: 143 | file_parts.append(f"file={file}") 144 | if line: 145 | file_parts.append(f"line={line}") 146 | if end_line: 147 | file_parts.append(f"endLine={end_line}") 148 | if col: 149 | file_parts.append(f"col={col}") 150 | if end_col: 151 | file_parts.append(f"endCol={end_col}") 152 | click.echo(f"::{kind} {','.join(file_parts)}") 153 | 154 | 155 | def doc_cmd(cmd: Sequence[str], *, no_pad: bool = False): 156 | run( 157 | list( 158 | filter( 159 | None, 160 | [ 161 | "doccmd", 162 | "-v", 163 | "--language=python", 164 | "--no-pad-file" if no_pad else "", 165 | "--command", 166 | " ".join(cmd), 167 | "docs", 168 | ], 169 | ) 170 | ) 171 | ) 172 | 173 | 174 | if __name__ == "__main__": 175 | main() 176 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [tool.hatch.build.targets.sdist] 6 | include = ["src/hatch_build_scripts", "docs"] 7 | [tool.hatch.build.targets.wheel] 8 | packages = ["src/hatch_build_scripts"] 9 | 10 | [project] 11 | name = "hatch_build_scripts" 12 | version = "1.0.0" 13 | description = "Dependency injection without the boilerplate." 14 | readme = "README.md" 15 | requires-python = ">=3.10,<4" 16 | license = { file = "LICENSE.txt" } 17 | keywords = [] 18 | authors = [{ name = "Ryan Morshead", email = "ryan.morshead@gmail.com" }] 19 | classifiers = [ 20 | "Development Status :: 4 - Beta", 21 | "Programming Language :: Python", 22 | ] 23 | dependencies = ["pathspec", "hatchling"] 24 | [project.urls] 25 | Source = "https://github.com/rmorshea/hatch-build-scripts" 26 | [project.entry-points.hatch] 27 | build-script = "hatch_build_scripts.hooks" 28 | 29 | [dependency-groups] 30 | dev = [ 31 | { include-group = "util" }, 32 | { include-group = "docs" }, 33 | { include-group = "lint" }, 34 | { include-group = "test" }, 35 | ] 36 | util = ["click==8.1.7", "ipykernel==6.29.5", "copier==9.4.1"] 37 | docs = [ 38 | { include-group = "test" }, 39 | "mkdocs-gen-files==0.5.0", 40 | "mkdocs-literate-nav==0.6.1", 41 | "mkdocs-material==9.5.39", 42 | "mkdocs-open-in-new-tab==1.0.5", 43 | "mkdocs==1.6.1", 44 | "mkdocstrings-python==1.13.0", 45 | ] 46 | lint = [ 47 | { include-group = "test" }, 48 | "mdformat-admon @ git+https://github.com/rmorshea/mdformat-admon.git@0e513d7a2c265faf74441938ccbd1010660609f4", 49 | "mdformat-mkdocs==3.1.1", 50 | "mdformat-pyproject==0.0.1", 51 | "mdformat-tables==1.0.0", 52 | "mdformat==0.7.19", 53 | "pyright==1.1.389", 54 | "ruff==0.7.3", 55 | "yamlfix==1.17.0", 56 | "doccmd==2024.11.14", 57 | ] 58 | test = [ 59 | "coverage[toml]==7.6.1", 60 | "diff-cover==9.2.0", 61 | "pycobertura==3.3.2", 62 | "pytest-asyncio==0.24.0", 63 | "pytest-examples==0.0.13", 64 | "pytest==8.3.3", 65 | "pip>=25.1.1", 66 | "hatch>=1.14.1", 67 | ] 68 | 69 | [tool.pytest.ini_options] 70 | asyncio_mode = "auto" 71 | asyncio_default_fixture_loop_scope = "function" 72 | 73 | [tool.ruff] 74 | line-length = 100 75 | 76 | [tool.ruff.format] 77 | docstring-code-format = true 78 | quote-style = "double" 79 | indent-style = "space" 80 | 81 | [tool.ruff.lint] 82 | preview = true 83 | select = ["ALL"] 84 | ignore = [ 85 | "A005", # Module shadowing built-in 86 | "ANN", # Let pyright handle annotations 87 | "ANN401", # Allow Any type hints 88 | "ARG005", # Unused lambda argument 89 | "B027", # Allow non-abstract empty methods in abstract base classes 90 | "B039", # Mutable default for contextvars 91 | "C901", # Ignore complexity 92 | "COM812", # Trailing comma 93 | "CPY001", # Copyright at top of file 94 | "D100", # Docstring for module 95 | "D104", # Ignore missing docstring for __init__.py 96 | "D105", # Docstring for magic method 97 | "D107", # Docstring for __init__ method 98 | "D203", # One blank line before class 99 | "D213", # Multi-line docstring summary second line 100 | "D407", # Docstring dashes under section names 101 | "D413", # Docstring blank line after last section 102 | "DOC201", # Return type documentation 103 | "DOC402", # Yield type documentation 104 | "DOC501", # Ignore raises missing from docstring 105 | "ERA001", # Commented out code 106 | "FBT003", # Allow boolean positional values in function calls, like `dict.get(... True)` 107 | "PL", # PyLint 108 | "PYI", # Stub files 109 | "RET503", # Explicit return 110 | "RET505", # Unnecessary return statement after return 111 | "S105", # Ignore checks for possible passwords 112 | "S404", # Ignore subprocess import 113 | "SIM117", # Use a single `with` statement 114 | "ISC001", # implicitly concatenated strings on a single line 115 | ] 116 | unfixable = [ 117 | "COM819", # Trailing comma 118 | ] 119 | fixable = ["ALL"] 120 | extend-safe-fixes = ["TCH"] 121 | [tool.ruff.lint.isort] 122 | known-first-party = ["hatch_build_scripts"] 123 | force-single-line = true 124 | [tool.ruff.lint.flake8-tidy-imports] 125 | ban-relative-imports = "all" 126 | [tool.ruff.lint.per-file-ignores] 127 | "tests/**" = [ 128 | "PLC2701", # Private imports 129 | "RUF029", # Async functions without await 130 | "S101", # Assert statements 131 | "D", # Docstrings 132 | "ANN", # Type annotations 133 | "S", # Security issues 134 | ] 135 | "**.ipynb" = [ 136 | "T201", # Print statements 137 | ] 138 | "docs/**" = [ 139 | "INP001", # Implicit namespace package 140 | "D", # Docstrings 141 | ] 142 | "doccmd_*.py" = [ 143 | "ANN", # Type annotations 144 | "B018", # Useless expression 145 | "FA102", # Unsafe __futures__ annotations usage 146 | "RUF029", # No await in async function 147 | "S101", # Assert statements 148 | "S106", # Possible passwords 149 | "SIM115", # Use context manager for opening files 150 | "T201", # Print 151 | "TCH002", # Move third-party import into a type-checking block 152 | ] 153 | 154 | [tool.yamlfix] 155 | line_length = 100 156 | 157 | [tool.coverage.run] 158 | source_pkgs = ["hatch_build_scripts", "tests"] 159 | branch = true 160 | omit = [] 161 | 162 | [tool.coverage.paths] 163 | hatch_build_scripts = ["src"] 164 | tests = ["tests"] 165 | 166 | [tool.coverage.report] 167 | exclude_lines = [ 168 | "# nocov", 169 | "@overload", 170 | "if TYPE_CHECKING:", 171 | "raise AssertionError", 172 | "raise NotImplementedError", 173 | 'if __name__ == .__main__.:', 174 | '\.\.\.\n($|\s*#.*)', 175 | ] 176 | show_missing = true 177 | skip_covered = true 178 | sort = "Name" 179 | 180 | [tool.diff_cover] 181 | compare_branch = "origin/main" 182 | fail_under = 100 183 | include_untracked = true 184 | -------------------------------------------------------------------------------- /src/hatch_build_scripts/plugin.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | import os 5 | import shutil 6 | from dataclasses import MISSING 7 | from dataclasses import asdict 8 | from dataclasses import dataclass 9 | from dataclasses import fields 10 | from functools import cached_property 11 | from pathlib import Path 12 | from subprocess import run 13 | from typing import TYPE_CHECKING 14 | from typing import Any 15 | 16 | import pathspec 17 | from hatchling.builders.hooks.plugin.interface import BuildHookInterface 18 | 19 | if TYPE_CHECKING: 20 | from collections.abc import Sequence 21 | 22 | log = logging.getLogger(__name__) 23 | log_level = logging.getLevelName(os.getenv("HATCH_BUILD_SCRIPTS_LOG_LEVEL", "INFO")) 24 | log.setLevel(log_level) 25 | 26 | 27 | class BuildScriptsHook(BuildHookInterface): 28 | """A build hook that runs custom scripts defined in the pyproject.toml.""" 29 | 30 | PLUGIN_NAME = "build-scripts" 31 | 32 | def initialize( 33 | self, 34 | version: str, # noqa: ARG002 35 | build_data: dict[str, Any], 36 | ) -> None: 37 | """Initialize the build hook.""" 38 | created: set[Path] = set() 39 | 40 | all_scripts = load_scripts(self.config) 41 | 42 | for script in all_scripts: 43 | if script.clean_out_dir: 44 | out_dir = Path(self.root, script.out_dir) 45 | log.debug("Cleaning %s", out_dir) 46 | shutil.rmtree(out_dir, ignore_errors=True) 47 | elif script.clean_artifacts: 48 | for out_file in script.out_files(self.root): 49 | log.debug("Cleaning %s", out_file) 50 | out_file.unlink(missing_ok=True) 51 | 52 | for script in all_scripts: 53 | log.debug("Script config: %s", asdict(script)) 54 | work_dir = Path(self.root, script.work_dir) 55 | out_dir = Path(self.root, script.out_dir) 56 | out_dir.mkdir(parents=True, exist_ok=True) 57 | 58 | for cmd in script.commands: 59 | log.info("Running command: %s", cmd) 60 | run(cmd, cwd=str(work_dir), check=True, shell=True) # noqa: S602 61 | 62 | log.info("Copying artifacts to %s", out_dir) 63 | for work_file in script.work_files(self.root, relative=True): 64 | src_file = work_dir / work_file 65 | out_file = out_dir / work_file 66 | log.debug("Copying %s to %s", src_file, out_file) 67 | if src_file not in created and src_file != out_file: 68 | out_file.parent.mkdir(parents=True, exist_ok=True) 69 | shutil.copyfile(src_file, out_file) 70 | shutil.copystat(src_file, out_file) 71 | created.add(out_file) 72 | else: 73 | log.debug("Skipping %s - already exists", src_file) 74 | 75 | build_data["artifacts"].append(str(out_dir.relative_to(self.root))) 76 | 77 | def finalize( 78 | self, 79 | version: str, # noqa: ARG002 80 | build_data: dict[str, Any], # noqa: ARG002 81 | artifact_path: str, # noqa: ARG002 82 | ) -> None: 83 | """Finalize the build hook.""" 84 | all_scripts = load_scripts(self.config) 85 | 86 | for script in all_scripts: 87 | if not script.clean_artifacts_after_build: 88 | continue 89 | 90 | for out_file in script.out_files(self.root): 91 | log.debug("After build, cleaning %s", out_file) 92 | out_file.unlink(missing_ok=True) 93 | 94 | 95 | def load_scripts(config: dict[str, Any]) -> Sequence[OneScriptConfig]: 96 | """Load the build scripts from the configuration.""" 97 | script_defaults = dataclass_defaults(OneScriptConfig) 98 | script_defaults.update({k: config[k] for k in script_defaults if k in config}) 99 | return [ 100 | OneScriptConfig(**{**script_defaults, **script_config}) 101 | for script_config in config.get("scripts", []) 102 | ] 103 | 104 | 105 | @dataclass 106 | class OneScriptConfig: 107 | """A configuration for a single build script.""" 108 | 109 | commands: Sequence[str] 110 | """The commands to run""" 111 | 112 | artifacts: Sequence[str] = () 113 | """Git file patterns relative to the work_dir to save as build artifacts""" 114 | 115 | out_dir: str = "." 116 | """The path where build artifacts will be saved""" 117 | 118 | work_dir: str = "." 119 | """The path where the build script will be run""" 120 | 121 | clean_artifacts: bool = True 122 | """Whether to clean the build directory before running the scripts""" 123 | 124 | clean_out_dir: bool = False 125 | """Whether to clean the output directory before running the scripts""" 126 | 127 | clean_artifacts_after_build: bool = False 128 | """Whether to clean the build directory after running the scripts""" 129 | 130 | def __post_init__(self) -> None: 131 | self.out_dir = conv_path(self.out_dir) 132 | self.work_dir = conv_path(self.work_dir) 133 | 134 | def work_files(self, root: str | Path, *, relative: bool = False) -> Sequence[Path]: 135 | """Get files in the work directory that match the artifacts spec.""" 136 | abs_dir = Path(root, self.work_dir) 137 | if not abs_dir.exists(): 138 | return [] 139 | return [ 140 | Path(f) if relative else abs_dir / f for f in self.artifacts_spec.match_tree(abs_dir) 141 | ] 142 | 143 | def out_files(self, root: str | Path, *, relative: bool = False) -> Sequence[Path]: 144 | """Get files in the output directory that match the artifacts spec.""" 145 | abs_dir = Path(root, self.out_dir) 146 | if not abs_dir.exists(): 147 | return [] 148 | return [ 149 | Path(f) if relative else abs_dir / f for f in self.artifacts_spec.match_tree(abs_dir) 150 | ] 151 | 152 | @cached_property 153 | def artifacts_spec(self) -> pathspec.PathSpec: 154 | """A pathspec for the artifacts.""" 155 | return pathspec.PathSpec.from_lines( 156 | pathspec.patterns.gitwildmatch.GitWildMatchPattern, self.artifacts 157 | ) 158 | 159 | 160 | def dataclass_defaults(obj: Any) -> dict[str, Any]: 161 | """Get the default values for a dataclass.""" 162 | defaults: dict[str, Any] = {} 163 | for f in fields(obj): 164 | if f.default is not MISSING: 165 | defaults[f.name] = f.default 166 | elif f.default_factory is not MISSING: 167 | defaults[f.name] = f.default_factory() 168 | return defaults 169 | 170 | 171 | def conv_path(path: str) -> str: 172 | """Convert a unix path to a platform-specific path.""" 173 | return path.replace("/", os.sep) 174 | --------------------------------------------------------------------------------