├── tests ├── __init__.py ├── requirements │ ├── requirements_diff.txt │ ├── constraints.txt │ ├── requirements.txt │ └── requirements_all.txt ├── test_pip.py ├── conftest.py ├── test_wheel.py └── test_infra.py ├── .github ├── release-drafter.yml ├── dependabot.yml └── workflows │ ├── stale.yml │ ├── release-drafter.yml │ ├── publish.yml │ └── ci.yml ├── requirements_cp313.txt ├── requirements_cp314.txt ├── .hadolint.yaml ├── rootfs ├── bin │ └── run-builder.sh └── etc │ └── pip.conf ├── requirements_tests.txt ├── requirements_wheels_test.txt ├── requirements.txt ├── script └── run-in-env.sh ├── pylintrc ├── Dockerfile ├── README.md ├── .devcontainer └── devcontainer.json ├── pyproject.toml ├── .gitignore ├── .pre-commit-config.yaml ├── action.yml └── LICENSE /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for wheel builder.""" 2 | -------------------------------------------------------------------------------- /tests/requirements/requirements_diff.txt: -------------------------------------------------------------------------------- 1 | aiohttp==4.2.3 2 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | template: | 2 | ## What's Changed 3 | 4 | $CHANGES 5 | -------------------------------------------------------------------------------- /tests/requirements/constraints.txt: -------------------------------------------------------------------------------- 1 | # Constraint file 2 | typing==1000000000000.0.0 3 | -------------------------------------------------------------------------------- /requirements_cp313.txt: -------------------------------------------------------------------------------- 1 | Cython==3.2.3 2 | numpy==2.3.3 3 | scikit-build==0.18.1 4 | cffi==2.0.0 5 | -------------------------------------------------------------------------------- /requirements_cp314.txt: -------------------------------------------------------------------------------- 1 | Cython==3.2.3 2 | numpy==2.3.3 3 | scikit-build==0.18.1 4 | cffi==2.0.0 5 | -------------------------------------------------------------------------------- /.hadolint.yaml: -------------------------------------------------------------------------------- 1 | ignored: 2 | - DL3003 3 | - DL3006 4 | - DL3013 5 | - DL3018 6 | - DL3042 7 | - SC2086 8 | -------------------------------------------------------------------------------- /rootfs/bin/run-builder.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bashio 2 | # shellcheck disable=SC2068 3 | exec python3 -m builder $@ 4 | -------------------------------------------------------------------------------- /tests/requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | # Core requirements 2 | -c constraints.txt 3 | aiohttp==4.2.3 4 | RPi.GPIO==1.2.3 -------------------------------------------------------------------------------- /tests/requirements/requirements_all.txt: -------------------------------------------------------------------------------- 1 | # All requirements 2 | -c constraints.txt 3 | 4 | -r requirements.txt 5 | 6 | aiohue==5.6.7 7 | -------------------------------------------------------------------------------- /requirements_tests.txt: -------------------------------------------------------------------------------- 1 | pylint==4.0.4 2 | pytest==9.0.2 3 | mypy==1.19.1 4 | prek==0.2.22 5 | types-requests==2.32.4.20250913 6 | pylint-per-file-ignores==3.2.0 -------------------------------------------------------------------------------- /requirements_wheels_test.txt: -------------------------------------------------------------------------------- 1 | requests==2.32.5 2 | Brotli==1.2.0 3 | orjson==3.11.5 4 | faust-cchardet==2.1.19 5 | mysqlclient==2.2.7 6 | psycopg2==2.9.11 7 | -------------------------------------------------------------------------------- /rootfs/etc/pip.conf: -------------------------------------------------------------------------------- 1 | [global] 2 | disable-pip-version-check = true 3 | progress-bar = off 4 | 5 | [install] 6 | no-cache-dir = false 7 | prefer-binary = true 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | auditwheel==6.5.0 2 | awesomeversion>=21.2.2 3 | click==8.3.1 4 | requests==2.32.5 5 | wheel==0.46.1 6 | setuptools==80.9.0 7 | packaging==25.0 8 | pip==25.3 9 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: "06:00" 8 | open-pull-requests-limit: 10 9 | - package-ecosystem: "github-actions" 10 | directory: "/" 11 | schedule: 12 | interval: daily 13 | time: "06:00" 14 | open-pull-requests-limit: 10 15 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: "Stale" 2 | on: 3 | schedule: 4 | - cron: "0 * * * *" 5 | 6 | jobs: 7 | stale: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/stale@v10 11 | with: 12 | stale-issue-message: "This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions." 13 | stale-issue-label: "wontfix" 14 | exempt-issue-labels: "pinned,security" 15 | days-before-stale: 60 16 | days-before-close: 7 17 | days-before-pr-close: -1 18 | -------------------------------------------------------------------------------- /script/run-in-env.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | set -eu 3 | 4 | # Used in venv activate script. 5 | # Would be an error if undefined. 6 | OSTYPE="${OSTYPE-}" 7 | 8 | # Activate pyenv and virtualenv if present, then run the specified command 9 | 10 | # pyenv, pyenv-virtualenv 11 | if [ -s .python-version ]; then 12 | PYENV_VERSION=$(head -n 1 .python-version) 13 | export PYENV_VERSION 14 | fi 15 | 16 | if [ -n "${VIRTUAL_ENV-}" ] && [ -f "${VIRTUAL_ENV}/bin/activate" ]; then 17 | . "${VIRTUAL_ENV}/bin/activate" 18 | else 19 | # other common virtualenvs 20 | my_path=$(git rev-parse --show-toplevel) 21 | 22 | for venv in .venv venv .; do 23 | if [ -f "${my_path}/${venv}/bin/activate" ]; then 24 | . "${my_path}/${venv}/bin/activate" 25 | break 26 | fi 27 | done 28 | fi 29 | 30 | exec "$@" -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | reports=no 3 | 4 | disable= 5 | abstract-method, 6 | cyclic-import, 7 | duplicate-code, 8 | global-statement, 9 | locally-disabled, 10 | not-context-manager, 11 | too-few-public-methods, 12 | too-many-arguments, 13 | too-many-branches, 14 | too-many-instance-attributes, 15 | too-many-lines, 16 | too-many-locals, 17 | too-many-positional-arguments, 18 | too-many-public-methods, 19 | too-many-return-statements, 20 | too-many-statements, 21 | unused-argument, 22 | line-too-long, 23 | too-few-public-methods, 24 | not-async-context-manager, 25 | too-many-locals, 26 | too-many-branches, 27 | no-else-return 28 | 29 | [MAIN] 30 | load-plugins = 31 | pylint_per_file_ignores 32 | 33 | [EXCEPTIONS] 34 | overgeneral-exceptions=builtins.Exception 35 | 36 | [TYPECHECK] 37 | ignored-modules = distutils 38 | 39 | [MESSAGES CONTROL] 40 | per-file-ignores = 41 | tests/*:C0116 -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BUILD_FROM 2 | FROM ${BUILD_FROM} 3 | 4 | ARG \ 5 | BUILD_ARCH \ 6 | CPYTHON_ABI \ 7 | PIP_EXTRA_INDEX_URL=https://wheels.home-assistant.io/musllinux-index/ 8 | 9 | SHELL ["/bin/bash", "-exo", "pipefail", "-c"] 10 | 11 | COPY rootfs / 12 | 13 | # Install requirements 14 | RUN \ 15 | --mount=type=bind,source=.,target=/usr/src/builder/,rw \ 16 | apk upgrade --no-cache \ 17 | && apk add --no-cache \ 18 | rsync \ 19 | openssh-client \ 20 | patchelf \ 21 | build-base \ 22 | cmake \ 23 | git \ 24 | linux-headers \ 25 | autoconf \ 26 | automake \ 27 | cargo \ 28 | libffi \ 29 | && apk add --no-cache --virtual .build-dependencies \ 30 | libffi-dev \ 31 | && pip3 install \ 32 | -r /usr/src/builder/requirements.txt \ 33 | -r /usr/src/builder/requirements_${CPYTHON_ABI}.txt \ 34 | /usr/src/builder/ 35 | 36 | # Set build environment information 37 | ENV \ 38 | ARCH=${BUILD_ARCH} \ 39 | ABI=${CPYTHON_ABI} 40 | 41 | # Runtime 42 | WORKDIR /data 43 | 44 | ENTRYPOINT [ "run-builder.sh" ] 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Home Assistant Musl Wheels builder 2 | 3 | https://peps.python.org/pep-0656/ 4 | 5 | ## Platform tags 6 | 7 | Compile utilities: 8 | 9 | - build-base 10 | - cmake 11 | - git 12 | - linux-headers 13 | - autoconf 14 | - automake 15 | - cargo 16 | 17 | ### Python 3.13 / musllinux_1_2 18 | 19 | Build with Alpine 3.22 20 | Images: ghcr.io/home-assistant/wheels/ARCH/musllinux_1_2/cp313:VERSION 21 | 22 | Version of system builds: 23 | 24 | - GCC 14.2.0 25 | - Cython 3.2.2 26 | - numpy 2.3.3 27 | - scikit-build 0.18.1 28 | - cffi 2.0.0 29 | 30 | ### Python 3.14 / musllinux_1_2 31 | 32 | Build with Alpine 3.22 33 | Images: ghcr.io/home-assistant/wheels/ARCH/musllinux_1_2/cp314:VERSION 34 | 35 | Version of system builds: 36 | 37 | - GCC 14.2.0 38 | - Cython 3.2.2 39 | - numpy 2.3.3 40 | - scikit-build 0.18.1 41 | - cffi 2.0.0 42 | 43 | ## Misc 44 | 45 | ```sh 46 | 47 | $ python3 -m builder \ 48 | --index https://wheels.home-assistant.io \ 49 | --requirement requirements_all.txt \ 50 | --upload rsync \ 51 | --remote user@server:/wheels 52 | ``` 53 | 54 | ## Supported file transfer 55 | 56 | - rsync 57 | 58 | ## Folder structure of index folder: 59 | 60 | `/musllinux/*` 61 | -------------------------------------------------------------------------------- /tests/test_pip.py: -------------------------------------------------------------------------------- 1 | """Tests for pip module.""" 2 | 3 | from pathlib import Path 4 | 5 | from builder import pip 6 | 7 | 8 | def test_parse_requirements() -> None: 9 | assert sorted( 10 | pip.parse_requirements( 11 | Path(__file__).parent / "requirements/requirements_all.txt", 12 | ), 13 | ) == ["RPi.GPIO==1.2.3", "aiohttp==4.2.3", "aiohue==5.6.7"] 14 | 15 | 16 | def test_extract_packages() -> None: 17 | assert sorted( 18 | pip.extract_packages( 19 | Path(__file__).parent / "requirements/requirements_all.txt", 20 | ), 21 | ) == ["RPi.GPIO==1.2.3", "aiohttp==4.2.3", "aiohue==5.6.7"] 22 | 23 | 24 | def test_extract_packages_diff() -> None: 25 | assert sorted( 26 | pip.extract_packages( 27 | Path(__file__).parent / "requirements/requirements_all.txt", 28 | Path(__file__).parent / "requirements/requirements_diff.txt", 29 | ), 30 | ) == ["RPi.GPIO==1.2.3", "aiohue==5.6.7"] 31 | 32 | 33 | def test_extract_packages_diff2() -> None: 34 | assert ( 35 | sorted( 36 | pip.extract_packages( 37 | Path(__file__).parent / "requirements/requirements_all.txt", 38 | Path(__file__).parent / "requirements/requirements_all.txt", 39 | ), 40 | ) 41 | == [] 42 | ) 43 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // See https://aka.ms/vscode-remote/devcontainer.json for format details. 2 | { 3 | "name": "Wheels", 4 | "image": "mcr.microsoft.com/devcontainers/python:3.13", 5 | "postStartCommand": "pip install -r requirements.txt -r requirements_tests.txt && prek install", 6 | "customizations": { 7 | "vscode": { 8 | "extensions": [ 9 | "charliermarsh.ruff", 10 | "esbenp.prettier-vscode", 11 | "ms-python.python", 12 | "ms-python.vscode-pylance", 13 | "visualstudioexptteam.vscodeintellicode" 14 | ], 15 | "settings": { 16 | "[python]": { 17 | "editor.defaultFormatter": "charliermarsh.ruff" 18 | }, 19 | "editor.formatOnPaste": false, 20 | "editor.formatOnSave": true, 21 | "editor.formatOnType": true, 22 | "files.trimTrailingWhitespace": true, 23 | "python.pythonPath": "/usr/local/bin/python3", 24 | "python.linting.pylintEnabled": true, 25 | "python.linting.enabled": true, 26 | "python.testing.pytestEnabled": true, 27 | "python.testing.unittestEnabled": false, 28 | "python.linting.mypyPath": "/usr/local/bin/mypy", 29 | "python.linting.pylintPath": "/usr/local/bin/pylint" 30 | } 31 | } 32 | }, 33 | "remoteUser": "vscode", 34 | "runArgs": ["-e", "GIT_EDITOR=code --wait"] 35 | } 36 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | update_release_draft: 10 | runs-on: ubuntu-latest 11 | name: Release Drafter 12 | steps: 13 | - name: Checkout the repository 14 | uses: actions/checkout@v6.0.1 15 | with: 16 | fetch-depth: 0 17 | 18 | - name: Find Next Version 19 | id: version 20 | run: | 21 | declare -i newpost 22 | latest=$(git describe --tags $(git rev-list --tags --max-count=1)) 23 | latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}') 24 | datepre=$(date --utc '+%Y.%m') 25 | 26 | 27 | if [[ "$latestpre" == "$datepre" ]]; then 28 | latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}') 29 | newpost=$latestpost+1 30 | else 31 | newpost=0 32 | fi 33 | 34 | echo Current version: $latest 35 | echo New target version: $datepre.$newpost 36 | echo "version=$datepre.$newpost" >> $GITHUB_OUTPUT 37 | 38 | - name: Run Release Drafter 39 | uses: release-drafter/release-drafter@v6 40 | with: 41 | tag: ${{ steps.version.outputs.version }} 42 | name: ${{ steps.version.outputs.version }} 43 | env: 44 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 45 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "builder" 7 | version = "2.1.0" 8 | license = {text = "Apache-2.0"} 9 | description = "Home Assistant Python wheels builder." 10 | authors = [ 11 | {name = "The Home Assistant Authors", email = "hello@home-assistant.io"} 12 | ] 13 | keywords = ["docker", "home-assistant"] 14 | classifiers = [ 15 | "Intended Audience :: Developers", 16 | "Operating System :: POSIX :: Linux", 17 | "Topic :: Home Automation", 18 | "Development Status :: 5 - Production/Stable", 19 | ] 20 | 21 | [project.urls] 22 | "Homepage" = "https://home-assistant.io/" 23 | 24 | [tool.setuptools.packages.find] 25 | include = ["builder*"] 26 | 27 | [tool.ruff.lint] 28 | select = ["ALL"] 29 | 30 | ignore = [ 31 | "D203", # Conflicts with other rules 32 | "D213", # Conflicts with other rules 33 | 34 | "FBT001", # Boolean-typed positional argument in function definition 35 | 36 | "T201", # print found 37 | ] 38 | 39 | [tool.ruff.lint.per-file-ignores] 40 | "tests/**" = [ 41 | "D100", # Missing docstring in public module 42 | "D103", # Missing docstring in public function 43 | "D104", # Missing docstring in public package 44 | 45 | "S101", # Use of assert detected 46 | 47 | "SLF001", # Private member accessed 48 | ] 49 | 50 | [tool.ruff.lint.pylint] 51 | max-args = 6 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *,cover 45 | .hypothesis/ 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | local_settings.py 54 | 55 | # Flask stuff: 56 | instance/ 57 | .webassets-cache 58 | 59 | # Scrapy stuff: 60 | .scrapy 61 | 62 | # Sphinx documentation 63 | docs/_build/ 64 | 65 | # PyBuilder 66 | target/ 67 | 68 | # IPython Notebook 69 | .ipynb_checkpoints 70 | 71 | # pyenv 72 | .python-version 73 | 74 | # celery beat schedule file 75 | celerybeat-schedule 76 | 77 | # dotenv 78 | .env 79 | 80 | # virtualenv 81 | venv/ 82 | ENV/ 83 | 84 | # Spyder project settings 85 | .spyderproject 86 | 87 | # Rope project settings 88 | .ropeproject 89 | 90 | # pytest 91 | .pytest_cache/ 92 | 93 | # pylint 94 | .pylint.d/ 95 | 96 | # VS Code 97 | .vscode/ 98 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | rev: v0.14.3 4 | hooks: 5 | - id: ruff-check 6 | args: 7 | - --fix 8 | - --unsafe-fixes 9 | - id: ruff-format 10 | - repo: https://github.com/asottile/pyupgrade 11 | rev: v3.21.0 12 | hooks: 13 | - id: pyupgrade 14 | args: 15 | - --py313-plus 16 | - repo: https://github.com/pre-commit/pre-commit-hooks 17 | rev: v6.0.0 18 | hooks: 19 | - id: check-executables-have-shebangs 20 | - id: check-merge-conflict 21 | - id: no-commit-to-branch 22 | args: [--branch, master] 23 | - repo: https://github.com/rbubley/mirrors-prettier 24 | rev: v3.6.2 25 | hooks: 26 | - id: prettier 27 | additional_dependencies: 28 | - prettier@3.6.2 29 | - prettier-plugin-sort-json@4.1.1 30 | exclude_types: 31 | - python 32 | exclude: ^uv.lock$ 33 | - repo: local 34 | hooks: 35 | # Run mypy through our wrapper script in order to get the possible 36 | # pyenv and/or virtualenv activated; it may not have been e.g. if 37 | # committing from a GUI tool that was not launched from an activated 38 | # shell. 39 | - id: mypy 40 | name: mypy 41 | entry: script/run-in-env.sh mypy 42 | language: script 43 | require_serial: true 44 | types_or: [python, pyi] 45 | - id: pylint 46 | name: pylint 47 | entry: script/run-in-env.sh pylint 48 | language: script 49 | require_serial: true 50 | types_or: [python, pyi] 51 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: "Publish" 2 | 3 | on: 4 | release: 5 | types: [published] 6 | push: 7 | branches: 8 | - master 9 | pull_request: 10 | branches: 11 | - master 12 | 13 | jobs: 14 | publish: 15 | name: Publish builder 16 | runs-on: ${{ matrix.runs-on }} 17 | strategy: 18 | fail-fast: False 19 | matrix: 20 | abi: ["cp313", "cp314"] 21 | tag: ["musllinux_1_2"] 22 | arch: ["aarch64", "amd64"] 23 | include: 24 | - abi: cp313 25 | base: 3.13-alpine3.22 26 | - abi: cp314 27 | base: 3.14-alpine3.22 28 | 29 | - arch: aarch64 30 | platform: linux/arm64 31 | runs-on: ubuntu-24.04-arm 32 | - arch: amd64 33 | platform: linux/amd64 34 | runs-on: ubuntu-latest 35 | steps: 36 | - name: Checkout the repository 37 | uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 38 | 39 | - shell: bash 40 | id: info 41 | run: | 42 | name="ghcr.io/home-assistant/wheels/${{ matrix.arch }}/${{ matrix.tag }}/${{ matrix.abi }}" 43 | 44 | version=$(echo "${{ github.ref }}" | awk -F"/" '{print $NF}' ) 45 | if [ "${version}" = "master" ]; then 46 | version=dev 47 | fi 48 | 49 | echo "name=$name" >> $GITHUB_OUTPUT 50 | echo "version=$version" >> $GITHUB_OUTPUT 51 | 52 | - name: Login to GitHub Container Registry 53 | uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 54 | with: 55 | registry: ghcr.io 56 | username: ${{ github.repository_owner }} 57 | password: ${{ secrets.GITHUB_TOKEN }} 58 | 59 | - name: Build Docker image 60 | uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 61 | with: 62 | context: . # So action will not pull the repository again 63 | push: ${{ github.event_name != 'pull_request' }} 64 | pull: true 65 | platforms: ${{ matrix.platform }} 66 | cache-from: type=registry,ref=${{ steps.info.outputs.name }}:dev 67 | build-args: | 68 | BUILD_FROM=ghcr.io/home-assistant/${{ matrix.arch }}-base-python:${{ matrix.base }} 69 | BUILD_ARCH=${{ matrix.arch }} 70 | CPYTHON_ABI=${{ matrix.abi }} 71 | tags: | 72 | ${{ steps.info.outputs.name }}:${{ steps.info.outputs.version }} 73 | ${{ steps.info.outputs.name }}:${{ github.sha }} 74 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Common test functions.""" 2 | 3 | from collections.abc import Generator 4 | from unittest.mock import patch 5 | 6 | import pytest 7 | 8 | # The test makes a fake index with an arbitrary set of wheels and versions based on 9 | # behavior the tests need to exercise. The test will adjust the input packages and 10 | # versions to exercise different corner cases. 11 | TEST_INDEX_FILES = [ 12 | "aiohttp-3.6.0-cp310-cp310-musllinux_1_2_i686.whl", 13 | "aiohttp-3.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", 14 | "aiohttp-3.7.3-cp310-cp310-musllinux_1_2_x86_64.whl", 15 | "aiohttp-3.7.4-cp310-cp310-musllinux_1_2_x86_64.whl", 16 | "google_cloud_pubsub-2.1.0-py2.py3-none-any.whl", 17 | "grpcio-1.31.0-cp310-cp310-musllinux_1_2_x86_64.whl", 18 | "aioconsole-0.4.1-py3-none-any.whl", 19 | "aioconsole-0.4.2-py3-none-any.whl", 20 | ] 21 | 22 | 23 | @pytest.fixture(autouse=True) 24 | def mock_index_data() -> Generator[None]: 25 | """Prepare a fake existing wheel index for use in tests.""" 26 | # Mimc the HTML of a webserver autoindex. 27 | content = "\n".join( 28 | f'{wheel} 28-May-2021 09:53 38181515' 29 | for wheel in TEST_INDEX_FILES 30 | ) 31 | with patch("builder.infra.requests.get") as mock_request_get: 32 | mock_request_get.return_value.status_code = 200 33 | mock_request_get.return_value.text = content 34 | yield 35 | 36 | 37 | @pytest.fixture(autouse=True) 38 | def sys_arch() -> Generator[None]: 39 | """Patch system arch.""" 40 | with ( 41 | patch("builder.utils.build_arch", return_value="amd64"), 42 | patch("builder.wheel.build_arch", return_value="amd64"), 43 | ): 44 | yield 45 | 46 | 47 | @pytest.fixture(autouse=True) 48 | def sys_abi() -> Generator[None]: 49 | """Patch system abi.""" 50 | with ( 51 | patch("builder.utils.build_abi", return_value="cp310"), 52 | patch("builder.wheel.build_abi", return_value="cp310"), 53 | ): 54 | yield 55 | 56 | 57 | @pytest.fixture(autouse=True) 58 | def sys_alpine() -> Generator[None]: 59 | """Patch system abi.""" 60 | with ( 61 | patch("builder.utils.alpine_version", return_value=("3", "16")), 62 | patch("builder.wheel.alpine_version", return_value=("3", "16")), 63 | ): 64 | yield 65 | 66 | 67 | @pytest.fixture(autouse=True) 68 | def sys_musl_version() -> Generator[None]: 69 | """Patch alpine musl version lookup table.""" 70 | with patch("builder.wheel._ALPINE_MUSL_VERSION", new={("3", "16"): (1, 2)}): 71 | yield 72 | -------------------------------------------------------------------------------- /tests/test_wheel.py: -------------------------------------------------------------------------------- 1 | """Tests for pip module.""" 2 | 3 | from pathlib import Path 4 | 5 | import pytest 6 | 7 | from builder import wheel 8 | 9 | # pylint: disable=protected-access 10 | 11 | 12 | @pytest.mark.parametrize( 13 | ("test", "result"), 14 | [ 15 | ("cchardet-2.1.7-cp310-cp310-musllinux_1_2_aarch64.whl", "aarch64"), 16 | ("cchardet-2.1.7-cp310-cp310-musllinux_1_2_x86_64.whl", "x86_64"), 17 | ], 18 | ) 19 | def test_musllinux_regex(test: str, result: str) -> None: 20 | """Test musllinux regex.""" 21 | parse = wheel._RE_MUSLLINUX_PLATFORM.search(test) 22 | assert parse 23 | assert parse["arch"] == result 24 | 25 | 26 | @pytest.mark.parametrize( 27 | "test", 28 | [ 29 | "cchardet-2.1.7-cp310-cp310-linux_aarch64.whl", 30 | "cchardet-2.1.7-cp310-cp310-linux_x86_64.whl", 31 | ], 32 | ) 33 | def test_musllinux_regex_wrong(test: str) -> None: 34 | """Test linux regex.""" 35 | assert wheel._RE_MUSLLINUX_PLATFORM.search(test) is None 36 | 37 | 38 | @pytest.mark.parametrize( 39 | "test", 40 | [ 41 | "cchardet-2.1.7-cp310-cp310-linux_aarch64.whl", 42 | "cchardet-2.1.7-cp310-cp310-linux_x86_64.whl", 43 | ], 44 | ) 45 | def test_linux_regex(test: str) -> None: 46 | """Test linux regex.""" 47 | assert wheel._RE_LINUX_PLATFORM.search(test) 48 | 49 | 50 | @pytest.mark.parametrize( 51 | "test", 52 | [ 53 | "cchardet-2.1.7-cp310-cp310-musllinux_1_2_aarch64.whl", 54 | "cchardet-2.1.7-cp310-cp310-musllinux_1_2_x86_64.whl", 55 | ], 56 | ) 57 | def test_linux_regex_wrong(test: str) -> None: 58 | """Test linux regex not found.""" 59 | assert wheel._RE_LINUX_PLATFORM.search(test) is None 60 | 61 | 62 | @pytest.mark.parametrize( 63 | ("abi", "platform"), 64 | [ 65 | ("cp310", "musllinux_1_2_x86_64"), 66 | ("cp310", "musllinux_1_1_x86_64"), 67 | ("cp310", "musllinux_1_0_x86_64"), 68 | ("abi3", "musllinux_1_2_x86_64"), 69 | ("abi3", "musllinux_1_1_x86_64"), 70 | ("abi3", "musllinux_1_0_x86_64"), 71 | ("none", "any"), 72 | ], 73 | ) 74 | def test_working_abi_platform(abi: str, platform: str) -> None: 75 | """Test working abi/platform variations.""" 76 | assert wheel.check_abi_platform(abi, platform) 77 | 78 | 79 | @pytest.mark.parametrize( 80 | ("abi", "platform"), 81 | [ 82 | ("cp311", "musllinux_1_2_x86_64"), 83 | ("cp310", "musllinux_1_2_i686"), 84 | ("cp310", "musllinux_1_3_x86_64"), 85 | ("abi3", "musllinux_1_3_x86_64"), 86 | ], 87 | ) 88 | def test_not_working_abi_platform(abi: str, platform: str) -> None: 89 | """Test not working abi/platform variations.""" 90 | assert not wheel.check_abi_platform(abi, platform) 91 | 92 | 93 | def test_fix_wheel_unmatch(tmp_path: Path) -> None: 94 | """Test removing an existing wheel that are not match requirements.""" 95 | p = tmp_path / "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl" 96 | p.touch() 97 | p = tmp_path / "grpcio-1.31.0-cp39-cp39-musllinux_1_1_x86_64.whl" 98 | p.touch() 99 | assert {p.name for p in tmp_path.glob("*.whl")} == { 100 | "grpcio-1.31.0-cp39-cp39-musllinux_1_1_x86_64.whl", 101 | "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl", 102 | } 103 | 104 | assert wheel.fix_wheels_unmatch_requirements(tmp_path) == {"grpcio": "1.31.0"} 105 | 106 | # grpc is removed 107 | assert {p.name for p in tmp_path.glob("*.whl")} == { 108 | "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl", 109 | } 110 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | env: 12 | PYTHON_VERSION: "3.13" 13 | 14 | jobs: 15 | tests: 16 | name: Tests 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Check out code from GitHub 20 | uses: actions/checkout@v6.0.1 21 | 22 | - name: Set up Python ${{ env.PYTHON_VERSION }} 23 | uses: actions/setup-python@v6.1.0 24 | with: 25 | python-version: ${{ env.PYTHON_VERSION }} 26 | 27 | - name: Install requirements 28 | run: python3 -m pip install -r requirements.txt -r requirements_tests.txt 29 | 30 | - name: Run pytest 31 | run: pytest 32 | 33 | prek: 34 | name: Prek 35 | runs-on: ubuntu-latest 36 | steps: 37 | - name: Check out code from GitHub 38 | uses: actions/checkout@v6.0.1 39 | 40 | - name: Set up Python ${{ env.PYTHON_VERSION }} 41 | uses: actions/setup-python@v6.1.0 42 | with: 43 | python-version: ${{ env.PYTHON_VERSION }} 44 | 45 | - name: Install requirements 46 | run: python3 -m pip install -r requirements.txt -r requirements_tests.txt 47 | 48 | - name: Run prek 49 | uses: j178/prek-action@91fd7d7cf70ae1dee9f4f44e7dfa5d1073fe6623 # v1.0.11 50 | env: 51 | PREK_SKIP: no-commit-to-branch 52 | 53 | hadolint: 54 | name: Hadolint 55 | runs-on: ubuntu-latest 56 | steps: 57 | - name: Check out code from GitHub 58 | uses: actions/checkout@v6.0.1 59 | 60 | - name: Lint Dockerfile 61 | uses: hadolint/hadolint-action@v3.3.0 62 | with: 63 | dockerfile: "Dockerfile" 64 | failure-threshold: warning 65 | 66 | test: 67 | name: Test wheels ${{ matrix.arch }}-${{ matrix.abi }}-${{ matrix.tag }} 68 | runs-on: ${{ matrix.runs-on }} 69 | strategy: 70 | fail-fast: false 71 | matrix: 72 | abi: ["cp313", "cp314"] 73 | tag: ["musllinux_1_2"] 74 | arch: ["aarch64", "amd64"] 75 | include: 76 | - runs-on: ubuntu-latest 77 | - arch: aarch64 78 | runs-on: ubuntu-24.04-arm 79 | 80 | steps: 81 | - name: Check out code from GitHub 82 | uses: actions/checkout@v6.0.1 83 | 84 | - name: Write env-file 85 | shell: bash 86 | run: | 87 | ( 88 | echo "CARGO_NET_GIT_FETCH_WITH_CLI=true" 89 | ) > .env_file 90 | 91 | - name: Build wheels 92 | uses: ./ 93 | with: 94 | abi: ${{ matrix.abi }} 95 | tag: ${{ matrix.tag }} 96 | arch: ${{ matrix.arch }} 97 | apk: "mariadb-dev;postgresql-dev;libffi-dev;openssl-dev" 98 | skip-binary: "orjson" 99 | env-file: True 100 | test: True 101 | requirements: "requirements_wheels_test.txt" 102 | 103 | test_local_repo: 104 | name: Test publishing to a local repository 105 | runs-on: ubuntu-latest 106 | 107 | steps: 108 | - name: Check out code from GitHub 109 | uses: actions/checkout@v6.0.1 110 | 111 | - name: Write env-file 112 | shell: bash 113 | run: | 114 | ( 115 | echo "CARGO_NET_GIT_FETCH_WITH_CLI=true" 116 | ) > .env_file 117 | 118 | - name: Build wheels 119 | uses: ./ 120 | with: 121 | abi: cp314 122 | tag: musllinux_1_2 123 | arch: amd64 124 | apk: "mariadb-dev;postgresql-dev;libffi-dev;openssl-dev" 125 | skip-binary: "orjson" 126 | env-file: True 127 | requirements: "requirements_wheels_test.txt" 128 | local-wheels-repo-path: "/tmp/local_wheels" 129 | wheels-user: "" 130 | wheels-host: "" 131 | 132 | - name: Check for local wheels 133 | shell: bash 134 | run: | 135 | missing=0 136 | while IFS= read -r line || [[ -n "$line" ]]; do 137 | # Skip empty lines 138 | [[ -z "$line" ]] && continue 139 | # Extract package name (before ==) 140 | pkg_name="${line%%==*}" 141 | # Normalize: replace - with _, convert to lowercase 142 | normalized=$(echo "$pkg_name" | tr '[:upper:]-' '[:lower:]_') 143 | # Check if wheel exists (case-insensitive glob) 144 | if ! compgen -G "/tmp/local_wheels/musllinux/${normalized}-"*.whl > /dev/null; then 145 | echo "::error::Missing wheel for package: $pkg_name" 146 | missing=1 147 | else 148 | echo "Found wheel for package: $pkg_name" 149 | fi 150 | done < requirements_wheels_test.txt 151 | exit $missing 152 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: "Home Assistant wheels builder" 2 | description: "Builds and publishes python wheels" 3 | inputs: 4 | tag: 5 | description: "The musllinux tag for the builder that should be used" 6 | required: true 7 | abi: 8 | description: "CPython ABI that is used" 9 | required: true 10 | arch: 11 | description: "Build architecture" 12 | required: true 13 | apk: 14 | description: "apk packages that should be installed" 15 | default: "" 16 | pip: 17 | description: "pip packages that should be installed" 18 | default: "" 19 | path: 20 | description: "The path to be used for the builder" 21 | default: "" 22 | prebuild-dir: 23 | description: "The directory for prebuild" 24 | default: "" 25 | env-file: 26 | description: "Set to true if the builder should use a env file" 27 | default: false 28 | requirements: 29 | description: "The requirements file" 30 | default: "" 31 | requirements-diff: 32 | description: "The requirements diff file" 33 | default: "" 34 | constraints: 35 | description: "The constraints file" 36 | default: "" 37 | local: 38 | description: "Set to true if local" 39 | default: false 40 | test: 41 | description: "Set to true if not uploading wheels" 42 | default: false 43 | single: 44 | description: "Set to true if should build each wheel as a single prosess" 45 | default: false 46 | name: 47 | description: "Job name" 48 | default: "Wheels" 49 | skip-binary: 50 | description: "Skip binaries" 51 | default: "" 52 | wheels-key: 53 | description: "SSH keys for the wheels host" 54 | required: false 55 | wheels-host: 56 | description: "wheels host URL" 57 | default: "wheels.hass.io" 58 | wheels-user: 59 | description: "User for wheels host" 60 | default: "wheels" 61 | wheels-index: 62 | description: "The wheels index URL" 63 | default: "https://wheels.home-assistant.io" 64 | local-wheels-repo-path: 65 | description: "Path to a local folder to copy the repository to (wheels-host and wheels-user must be empty)" 66 | default: "" 67 | 68 | runs: 69 | using: "composite" 70 | steps: 71 | - shell: bash 72 | id: version 73 | run: | 74 | input="${GITHUB_ACTION_PATH##*/}" 75 | if [[ "${input}" == "master" ]] || [[ -z "${input}" ]]; then 76 | input="dev" 77 | fi 78 | echo "version=${input}" >> $GITHUB_OUTPUT 79 | 80 | - shell: bash 81 | run: | 82 | if [[ "${{ inputs.test }}" =~ false|False ]] && [ -z "${{ inputs.local-wheels-repo-path }}" ]; then 83 | # Write Key 84 | mkdir -p .ssh 85 | echo -e "-----BEGIN RSA PRIVATE KEY-----\n${{ inputs.wheels-key }}\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa 86 | chmod 600 .ssh/id_rsa 87 | 88 | # Validate & update known_hosts 89 | ssh-keygen -y -e -f .ssh/id_rsa 90 | ssh-keyscan -H ${{ inputs.wheels-host }} >> .ssh/known_hosts 91 | chmod 600 .ssh/* 92 | fi 93 | 94 | - shell: bash 95 | id: pull 96 | run: | 97 | name="ghcr.io/home-assistant/wheels/${{ inputs.arch }}/${{ inputs.tag }}/${{ inputs.abi }}:${{ steps.version.outputs.version }}" 98 | docker pull "$name" 99 | echo "name=$name" >> $GITHUB_OUTPUT 100 | 101 | - shell: bash 102 | id: options 103 | run: | 104 | declare -a build 105 | declare -a docker 106 | 107 | # Data Path 108 | if [ -n "${{ inputs.path }}" ]; then 109 | data_path="${{ github.workspace }}/${{ inputs.path }}" 110 | else 111 | data_path="${{ github.workspace }}" 112 | fi 113 | 114 | # Environment 115 | if [[ "${{ inputs.env-file }}" =~ true|True ]] && [ -f .env_file ]; then 116 | docker+=("--env-file .env_file") 117 | fi 118 | if [ -f "${{ inputs.requirements }}" ]; then 119 | build+=("--requirement ${{ inputs.requirements }}") 120 | fi 121 | if [ -f "${{ inputs.requirements-diff }}" ]; then 122 | build+=("--requirement-diff ${{ inputs.requirements-diff }}") 123 | fi 124 | if [ -f "${{ inputs.constraints }}" ]; then 125 | build+=("--constraint ${{ inputs.constraints }}") 126 | fi 127 | if [ -d "${{ inputs.prebuild-dir }}" ]; then 128 | build+=("--prebuild-dir ${{ github.workspace }}/${{ inputs.prebuild-dir }}") 129 | fi 130 | if [[ "${{ inputs.single }}" =~ true|True ]]; then 131 | build+=("--single") 132 | fi 133 | if [[ "${{ inputs.local }}" =~ true|True ]]; then 134 | build+=("--local") 135 | fi 136 | if [[ "${{ inputs.test }}" =~ true|True ]]; then 137 | build+=("--test") 138 | fi 139 | if [ -n "${{ inputs.skip-binary }}" ]; then 140 | build+=("--skip-binary \"${{ inputs.skip-binary }}\"") 141 | fi 142 | if [ -n "${{ inputs.apk }}" ]; then 143 | build+=("--apk \"${{ inputs.apk }}\"") 144 | fi 145 | if [ -n "${{ inputs.pip }}" ]; then 146 | build+=("--pip \"${{ inputs.pip }}\"") 147 | fi 148 | if [ -n "${{ inputs.local-wheels-repo-path }}" ]; then 149 | if [ -n "${{ inputs.wheels-user }}" ] || [ -n "${{ inputs.wheels-host }}" ]; then 150 | echo "::error::Inputs wheels-user and wheels-host must be empty when local-wheels-repo-path is used" 151 | exit 1 152 | fi 153 | build+=("--remote \"/data/output\"") 154 | else 155 | build+=("--remote \"${{ inputs.wheels-user }}@${{ inputs.wheels-host }}:/opt/wheels\"") 156 | fi 157 | 158 | 159 | echo "build=${build[@]}" >> $GITHUB_OUTPUT 160 | echo "docker=${docker[@]}" >> $GITHUB_OUTPUT 161 | echo "path=$data_path" >> $GITHUB_OUTPUT 162 | 163 | - shell: bash 164 | id: host-info 165 | run: | 166 | echo "host-machine=$(uname -m)" >> $GITHUB_OUTPUT 167 | 168 | - shell: bash 169 | run: | 170 | echo "Create container" 171 | docker create --name "${{ inputs.name }}" -t \ 172 | --workdir /data \ 173 | ${{ steps.options.outputs.docker }} \ 174 | ${{ steps.pull.outputs.name }} \ 175 | --index "${{ inputs.wheels-index }}" \ 176 | --upload rsync \ 177 | ${{ steps.options.outputs.build }} 178 | 179 | - shell: bash 180 | run: | 181 | echo "Copy repository and SSH files to the container" 182 | docker cp "${{ steps.options.outputs.path }}/." "${{ inputs.name }}:/data" 183 | if [[ "${{ inputs.test }}" =~ false|False ]] && [ -z "${{ inputs.local-wheels-repo-path }}" ]; then 184 | docker cp -a .ssh/ "${{ inputs.name }}:/root/.ssh" 185 | fi 186 | 187 | - shell: bash 188 | id: build 189 | run: | 190 | set +e 191 | for i in {1..3}; do 192 | echo "$i attempt on starting the container" 193 | docker start -a "${{ inputs.name }}" 194 | return_val=$? 195 | if [ ${return_val} -ne 0 ] && [ ${return_val} -ne 109 ] && [ ${return_val} -ne 80 ]; then 196 | continue 197 | fi 198 | break 199 | done 200 | echo "return_val=$return_val" >> $GITHUB_OUTPUT 201 | 202 | - shell: bash 203 | if: ${{ inputs.local-wheels-repo-path }} 204 | run: | 205 | docker cp "${{ inputs.name }}:/data/output/." "${{ inputs.local-wheels-repo-path }}" 206 | 207 | - shell: bash 208 | run: | 209 | docker rm -f "${{ inputs.name }}" 210 | exit ${{ steps.build.outputs.return_val }} 211 | -------------------------------------------------------------------------------- /tests/test_infra.py: -------------------------------------------------------------------------------- 1 | """Tests for infra module.""" 2 | 3 | from pathlib import Path 4 | 5 | from packaging.utils import canonicalize_name 6 | 7 | from builder import infra 8 | 9 | 10 | def test_extract_packages_from_index() -> None: 11 | """Test index package extraction.""" 12 | package_index = infra.extract_packages_from_index("https://example.com") 13 | assert list(package_index.keys()) == [ 14 | "aiohttp", 15 | "google-cloud-pubsub", 16 | "grpcio", 17 | "aioconsole", 18 | ] 19 | 20 | assert [ 21 | str(package.version) for package in package_index[canonicalize_name("aiohttp")] 22 | ] == [ 23 | "3.6.1", 24 | "3.7.3", 25 | "3.7.4", 26 | ] 27 | 28 | 29 | def test_check_available_binary_none() -> None: 30 | """No-op when no binaries specified to skip.""" 31 | package_index = infra.extract_packages_from_index("https://example.com") 32 | assert ( 33 | infra.check_available_binary( 34 | package_index, 35 | ":none:", 36 | packages=[ 37 | "aiohttp==3.7.4", 38 | "google_cloud_pubsub==2.1.0", 39 | ], 40 | constraints=[], 41 | ) 42 | == ":none:" 43 | ) 44 | 45 | 46 | def test_check_available_binary_all() -> None: 47 | """Verify that the tool does not allow skipping all binaries.""" 48 | package_index = infra.extract_packages_from_index("https://example.com") 49 | assert ( 50 | infra.check_available_binary( 51 | package_index, 52 | ":all:", 53 | packages=[ 54 | "aiohttp==3.7.4", 55 | "google_cloud_pubsub==2.1.0", 56 | ], 57 | constraints=[], 58 | ) 59 | == ":none:" 60 | ) 61 | 62 | 63 | def test_check_available_binary_version_present() -> None: 64 | """Test to skip a binary where the package version is already in the index.""" 65 | package_index = infra.extract_packages_from_index("https://example.com") 66 | assert ( 67 | infra.check_available_binary( 68 | package_index, 69 | "aiohttp", 70 | packages=[ 71 | "aiohttp==3.7.4", 72 | "google_cloud_pubsub==2.1.0", 73 | ], 74 | constraints=[], 75 | ) 76 | == ":none:" 77 | ) 78 | 79 | 80 | def test_check_available_binary_version_missing() -> None: 81 | """Test to skip a binary where the package version is not in the index.""" 82 | package_index = infra.extract_packages_from_index("https://example.com") 83 | assert ( 84 | infra.check_available_binary( 85 | package_index, 86 | "aiohttp", 87 | packages=[ 88 | "aiohttp==3.7.5", # Not in the index 89 | "google_cloud_pubsub==2.1.0", 90 | ], 91 | constraints=[], 92 | ) 93 | == "aiohttp" 94 | ) 95 | 96 | 97 | def test_check_available_binary_implicit_dep_skipped() -> None: 98 | """Test case where skip binary lists an implicit dep which is ignored.""" 99 | package_index = infra.extract_packages_from_index("https://example.com") 100 | assert ( 101 | infra.check_available_binary( 102 | package_index, 103 | "aiohttp;grpcio", 104 | packages=[ 105 | "aiohttp==3.7.4", 106 | "google_cloud_pubsub==2.1.0", 107 | ], 108 | constraints=[], 109 | ) 110 | == ":none:" 111 | ) 112 | 113 | 114 | def test_check_available_binary_skip_constraint() -> None: 115 | """Test case where skip binary is for constraint in the index.""" 116 | package_index = infra.extract_packages_from_index("https://example.com") 117 | assert ( 118 | infra.check_available_binary( 119 | package_index, 120 | "aiohttp;grpcio", 121 | packages=[ 122 | "aiohttp==3.7.4", 123 | "google_cloud_pubsub==2.1.0", 124 | ], 125 | constraints=[ 126 | "grpcio==1.31.0", # Already exists in index 127 | ], 128 | ) 129 | == ":none:" 130 | ) 131 | 132 | 133 | def test_check_available_binary_for_missing_constraint() -> None: 134 | """Test case where skip binary is for constraint not in the index.""" 135 | package_index = infra.extract_packages_from_index("https://example.com") 136 | assert ( 137 | infra.check_available_binary( 138 | package_index, 139 | "aiohttp;grpcio", 140 | packages=[ 141 | "aiohttp==3.7.4", 142 | "google_cloud_pubsub==2.1.0", 143 | ], 144 | constraints=[ 145 | "grpcio==1.43.0", # Not in index 146 | ], 147 | ) 148 | == "grpcio" 149 | ) 150 | 151 | 152 | def test_check_available_binary_normalized_package_names() -> None: 153 | """Test package names are normalized before checking package index.""" 154 | package_index = infra.extract_packages_from_index("https://example.com") 155 | assert list(package_index.keys()) == [ # normalized spelling from index 156 | "aiohttp", 157 | "google-cloud-pubsub", 158 | "grpcio", 159 | "aioconsole", 160 | ] 161 | assert ( 162 | infra.check_available_binary( 163 | package_index, 164 | "AIOhttp;GRPcio", 165 | packages=[ 166 | "aioHTTP==3.7.4", 167 | "google_cloud-PUBSUB==2.1.0", 168 | ], 169 | constraints=[ 170 | "grpcIO==1.31.0", # Already exists in index 171 | ], 172 | ) 173 | == ":none:" 174 | ) 175 | 176 | 177 | def test_remove_local_wheel(tmp_path: Path) -> None: 178 | """Test removing an existing wheel.""" 179 | package_index = infra.extract_packages_from_index("https://example.com") 180 | 181 | p = tmp_path / "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl" 182 | p.touch() 183 | p = tmp_path / "grpcio-1.31.0-cp310-cp310-musllinux_1_2_x86_64.whl" 184 | p.touch() 185 | p = tmp_path / "grpcio-1.31.0-py3-none-any.whl" # different platform tag 186 | p.touch() 187 | p = tmp_path / "some_other_file.txt" # other files are ignored 188 | p.touch() 189 | assert {p.name for p in tmp_path.glob("*.whl")} == { 190 | "grpcio-1.31.0-cp310-cp310-musllinux_1_2_x86_64.whl", 191 | "grpcio-1.31.0-py3-none-any.whl", 192 | "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl", 193 | } 194 | 195 | infra.remove_local_wheels( 196 | package_index, 197 | skip_exists="grpcio", 198 | packages=[ 199 | "google_cloud_pubsub==2.9.0", 200 | "grpcio==1.31.0", # Exists in index 201 | ], 202 | wheels_dir=tmp_path, 203 | ) 204 | 205 | # both grpcio wheels are removed 206 | assert {p.name for p in tmp_path.glob("*.whl")} == { 207 | "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl", 208 | } 209 | 210 | 211 | def test_remove_local_wheel_preserves_newer(tmp_path: Path) -> None: 212 | """Test that the wheel is preserved when newer than in the index.""" 213 | package_index = infra.extract_packages_from_index("https://example.com") 214 | 215 | p = tmp_path / "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl" 216 | p.touch() 217 | p = tmp_path / "grpcio-1.43.0-cp310-cp310-musllinux_1_2_x86_64.whl" 218 | p.touch() 219 | assert {p.name for p in tmp_path.glob("*.whl")} == { 220 | "grpcio-1.43.0-cp310-cp310-musllinux_1_2_x86_64.whl", 221 | "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl", 222 | } 223 | 224 | infra.remove_local_wheels( 225 | package_index, 226 | skip_exists="grpcio", 227 | packages=[ 228 | "google_cloud_pubsub==2.9.0", 229 | "grpcio==1.43.0", # Newer than index 230 | ], 231 | wheels_dir=tmp_path, 232 | ) 233 | 234 | # grpcio is not removed 235 | assert {p.name for p in tmp_path.glob("*.whl")} == { 236 | "grpcio-1.43.0-cp310-cp310-musllinux_1_2_x86_64.whl", 237 | "google_cloud_pubsub-2.9.0-py2.py3-none-any.whl", 238 | } 239 | 240 | 241 | def test_remove_local_wheel_normalized_package_names(tmp_path: Path) -> None: 242 | """Test package names are normalized before removing existing wheels.""" 243 | package_index = infra.extract_packages_from_index("https://example.com") 244 | 245 | p = tmp_path / "google_cloud_pubsub-2.1.0-py2.py3-none-any.whl" 246 | p.touch() 247 | p = tmp_path / "grpcio-1.31.0-cp310-cp310-musllinux_1_2_x86_64.whl" 248 | p.touch() 249 | assert {p.name for p in tmp_path.glob("*.whl")} == { 250 | "grpcio-1.31.0-cp310-cp310-musllinux_1_2_x86_64.whl", 251 | "google_cloud_pubsub-2.1.0-py2.py3-none-any.whl", 252 | } 253 | 254 | infra.remove_local_wheels( 255 | package_index, 256 | skip_exists="GRPcio;GOOGLE-cloud_pubsub", 257 | packages=[ 258 | "google_cloud-PUBSUB==2.1.0", # Exists in index 259 | "grpcIO==1.31.0", # Exists in index 260 | ], 261 | wheels_dir=tmp_path, 262 | ) 263 | 264 | # grpcio and google-cloud-pubsub are removed 265 | assert {p.name for p in tmp_path.glob("*.whl")} == set() 266 | 267 | 268 | def test_remove_local_wheel_no_build_wheels(tmp_path: Path) -> None: 269 | """Test remove_local_wheels does not fail with skip_exists and no build wheels.""" 270 | package_index = infra.extract_packages_from_index("https://example.com") 271 | assert {p.name for p in tmp_path.glob("*.whl")} == set() 272 | 273 | infra.remove_local_wheels( 274 | package_index, 275 | skip_exists="grpcio", 276 | packages=[ 277 | "grpcio==1.31.0", # Exists in index 278 | ], 279 | wheels_dir=tmp_path, 280 | ) 281 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------