├── .coveragerc ├── .git-blame-ignore-revs ├── .github ├── dependabot.yml └── workflows │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .travis.yml ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── README.rst ├── conftest.py ├── noxfile.py ├── pyproject.toml ├── scripts ├── calculate_symbol_versions.py └── create-arch-wheels.sh ├── src └── auditwheel │ ├── __init__.py │ ├── __main__.py │ ├── _vendor │ ├── __init__.py │ └── wheel │ │ ├── LICENSE.txt │ │ ├── __init__.py │ │ └── pkginfo.py │ ├── architecture.py │ ├── condatools.py │ ├── elfutils.py │ ├── error.py │ ├── genericpkgctx.py │ ├── hashfile.py │ ├── json.py │ ├── lddtree.py │ ├── libc.py │ ├── main.py │ ├── main_lddtree.py │ ├── main_repair.py │ ├── main_show.py │ ├── patcher.py │ ├── policy │ ├── __init__.py │ ├── manylinux-policy.json │ └── musllinux-policy.json │ ├── repair.py │ ├── tmpdirs.py │ ├── tools.py │ ├── wheel_abi.py │ └── wheeltools.py └── tests ├── integration ├── __init__.py ├── arch-wheels │ ├── glibc │ │ ├── testsimple-0.0.1-cp313-cp313-linux_aarch64.whl │ │ ├── testsimple-0.0.1-cp313-cp313-linux_armv5l.whl │ │ ├── testsimple-0.0.1-cp313-cp313-linux_armv7l.whl │ │ ├── testsimple-0.0.1-cp313-cp313-linux_i686.whl │ │ ├── testsimple-0.0.1-cp313-cp313-linux_mips64.whl │ │ ├── testsimple-0.0.1-cp313-cp313-linux_ppc64le.whl │ │ ├── testsimple-0.0.1-cp313-cp313-linux_riscv64.whl │ │ ├── testsimple-0.0.1-cp313-cp313-linux_s390x.whl │ │ └── testsimple-0.0.1-cp313-cp313-linux_x86_64.whl │ └── musllinux_1_2 │ │ ├── testsimple-0.0.1-cp312-cp312-linux_aarch64.whl │ │ ├── testsimple-0.0.1-cp312-cp312-linux_armv6l.whl │ │ ├── testsimple-0.0.1-cp312-cp312-linux_armv7l.whl │ │ ├── testsimple-0.0.1-cp312-cp312-linux_i686.whl │ │ ├── testsimple-0.0.1-cp312-cp312-linux_ppc64le.whl │ │ ├── testsimple-0.0.1-cp312-cp312-linux_riscv64.whl │ │ ├── testsimple-0.0.1-cp312-cp312-linux_s390x.whl │ │ └── testsimple-0.0.1-cp312-cp312-linux_x86_64.whl ├── cffi-1.5.0-cp27-none-linux_x86_64.whl ├── conftest.py ├── foo.f90 ├── fpewheel-0.0.0-cp35-cp35m-linux_x86_64.whl ├── internal_rpath │ ├── MANIFEST.in │ ├── Makefile │ ├── internal_rpath │ │ └── __init__.py │ ├── lib-src │ │ ├── a │ │ │ ├── a.c │ │ │ └── a.h │ │ └── b │ │ │ ├── b.c │ │ │ └── b.h │ ├── pyproject.toml │ ├── setup.cfg │ ├── setup.py │ └── src │ │ ├── example_a.pyx │ │ └── example_b.pyx ├── libffi.so.5 ├── multiple_top_level │ ├── MANIFEST.in │ ├── Makefile │ ├── lib-src │ │ ├── a │ │ │ ├── a.c │ │ │ └── a.h │ │ └── b │ │ │ ├── b.c │ │ │ └── b.h │ ├── pyproject.toml │ ├── setup.cfg │ ├── setup.py │ └── src │ │ ├── example_a.pyx │ │ └── example_b.pyx ├── nonpy_rpath │ ├── README.md │ ├── extensions │ │ ├── testcrypt.cpp │ │ └── testcrypt.h │ ├── nonpy_rpath.cpp │ ├── nonpy_rpath │ │ └── __init__.py │ ├── pyproject.toml │ └── setup.py ├── plumbum-1.6.8-py2.py3-none-any.whl ├── python_mscl-67.0.1.0-cp313-cp313-manylinux2014_aarch64.whl ├── python_snappy-0.5.2-pp260-pypy_41-linux_x86_64.whl ├── quick_check_numpy.py ├── sample_extension │ ├── pyproject.toml │ ├── setup.py │ └── src │ │ └── sample_extension.pyx ├── test_bundled_wheels.py ├── test_glibcxx_3_4_25 │ ├── pyproject.toml │ ├── setup.py │ └── testentropy.cpp ├── test_manylinux.py ├── test_nonplatform_wheel.py ├── testdependencies │ ├── dependency.c │ ├── dependency.h │ ├── pyproject.toml │ ├── setup.py │ └── testdependencies.c ├── testpackage │ ├── __init__.py │ ├── pyproject.toml │ ├── setup.py │ └── testpackage │ │ ├── __init__.py │ │ ├── testprogram.c │ │ └── testprogram_nodeps.c ├── testrpath │ ├── MANIFEST.in │ ├── a │ │ ├── a.c │ │ └── a.h │ ├── b │ │ ├── b.c │ │ └── b.h │ ├── pyproject.toml │ ├── setup.py │ └── src │ │ └── testrpath │ │ ├── __init__.py │ │ └── testrpath.c ├── testsimple │ ├── pyproject.toml │ ├── setup.py │ └── testsimple.c └── testzlib │ ├── pyproject.toml │ ├── setup.py │ └── testzlib.c └── unit ├── test-permissions.zip.xz ├── test_architecture.py ├── test_condatools.py ├── test_elfpatcher.py ├── test_elfutils.py ├── test_hashfile.py ├── test_json.py ├── test_lddtree.py ├── test_libc.py ├── test_main.py ├── test_policy.py ├── test_repair.py ├── test_tmpdirs.py ├── test_tools.py ├── test_wheel_abi.py └── test_wheeltools.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit= 3 | */_vendor/* 4 | 5 | [paths] 6 | source = 7 | src/auditwheel/ 8 | /auditwheel_src/src/auditwheel 9 | 10 | [report] 11 | exclude_lines = 12 | raise NotImplementedError 13 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Conversion to black code style 2 | aa52a60c59cd0e703f836d7fc88eae992645e760 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "weekly" 8 | groups: 9 | actions: 10 | patterns: 11 | - "*" 12 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches-ignore: 6 | - 'dependabot/**' 7 | pull_request: 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} 11 | cancel-in-progress: true 12 | 13 | jobs: 14 | pre-commit: 15 | name: Pre-commit checks (Black, Flake8, MyPy, ...) 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: actions/setup-python@v5 20 | with: 21 | python-version: "3.9" 22 | - uses: pre-commit/action@v3.0.1 23 | 24 | test-dist: 25 | name: Test SDist & wheel 26 | needs: pre-commit 27 | runs-on: ubuntu-latest 28 | steps: 29 | - name: Checkout 30 | uses: actions/checkout@v4 31 | - name: Install CPython 3.9 32 | uses: actions/setup-python@v5 33 | with: 34 | python-version: 3.9 35 | architecture: x64 36 | - name: Run tests 37 | run: pipx run nox -s test-sdist-3.9 test-wheel-3.9 38 | 39 | test: 40 | name: CPython ${{ matrix.python }} ${{ matrix.platform[0] }} on ${{ matrix.platform[1] }} 41 | needs: test-dist 42 | runs-on: ${{ matrix.platform[1] }} 43 | strategy: 44 | fail-fast: false 45 | matrix: 46 | platform: 47 | - [ 'x86_64', 'ubuntu-24.04' ] 48 | python: [ '3.9', '3.10', '3.11', '3.12', '3.13' ] 49 | include: 50 | - platform: [ 'aarch64', 'ubuntu-24.04-arm' ] 51 | python: '3.12' 52 | - platform: [ 'i686', 'ubuntu-24.04' ] 53 | python: '3.12' 54 | - platform: [ 'armv7l', 'ubuntu-24.04-arm' ] 55 | python: '3.12' 56 | #- platform: [ 'ppc64le', 'ubuntu-24.04' ] 57 | # python: '3.12' 58 | # qemu: true 59 | #- platform: [ 's390x', 'ubuntu-24.04' ] 60 | # python: '3.12' 61 | # qemu: true 62 | steps: 63 | - name: Checkout 64 | uses: actions/checkout@v4 65 | - name: Setup cache 66 | uses: actions/cache@v4 67 | with: 68 | path: ~/.cache/auditwheel_tests 69 | key: python${{ matrix.python }}-${{ matrix.platform[0] }}-${{ hashFiles('**/test_manylinux.py') }} 70 | restore-keys: python${{ matrix.python }}-${{ matrix.platform[0] }}- 71 | - name: Install CPython ${{ matrix.python }} 72 | uses: actions/setup-python@v5 73 | with: 74 | python-version: "${{ matrix.python }}" 75 | allow-prereleases: true 76 | - name: Set up QEMU 77 | if: matrix.qemu 78 | uses: docker/setup-qemu-action@v3 79 | - name: Run tests 80 | run: pipx run nox -s tests-${{ matrix.python }} 81 | env: 82 | AUDITWHEEL_ARCH: ${{ matrix.platform[0] }} 83 | AUDITWHEEL_QEMU: ${{ matrix.qemu }} 84 | - name: Upload coverage to codecov 85 | uses: codecov/codecov-action@v5 86 | with: 87 | token: ${{ secrets.CODECOV_TOKEN }} 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | 62 | # Generated by test script 63 | *.zip 64 | wheelhoust-* 65 | tests/integration/testpackage/testpackage/testprogram 66 | tests/integration/testpackage/testpackage/testprogram_nodeps 67 | tests/integration/sample_extension/src/sample_extension.c 68 | 69 | # Downloaded by test script 70 | tests/integration/patchelf-0.17.2.1-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.musllinux_1_1_x86_64.whl 71 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | 4 | default_language_version: 5 | python: python3.9 6 | 7 | exclude: ^src/auditwheel/_vendor/ 8 | 9 | repos: 10 | - repo: https://github.com/pre-commit/pre-commit-hooks 11 | rev: v5.0.0 12 | hooks: 13 | - id: check-builtin-literals 14 | - id: check-added-large-files 15 | - id: check-case-conflict 16 | - id: check-json 17 | - id: check-toml 18 | - id: check-yaml 19 | - id: debug-statements 20 | - id: end-of-file-fixer 21 | exclude: ^cache/ 22 | - id: mixed-line-ending 23 | - id: forbid-new-submodules 24 | - id: trailing-whitespace 25 | 26 | - repo: https://github.com/astral-sh/ruff-pre-commit 27 | rev: v0.11.11 28 | hooks: 29 | - id: ruff 30 | args: ["--fix", "--show-fixes"] 31 | - id: ruff-format 32 | 33 | - repo: https://github.com/pre-commit/mirrors-mypy 34 | rev: v1.15.0 35 | hooks: 36 | - id: mypy 37 | exclude: ^tests/integration/.*/.*$|^scripts/calculate_symbol_versions.py$ 38 | additional_dependencies: 39 | - types-requests 40 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | os: linux 2 | dist: jammy 3 | language: python 4 | python: "3.12" 5 | 6 | branches: 7 | except: 8 | - /^dependabot.*$/ 9 | 10 | jobs: 11 | include: 12 | - arch: ppc64le 13 | - arch: s390x 14 | allow_failures: 15 | - arch: ppc64le 16 | 17 | addons: 18 | apt: 19 | packages: 20 | - qemu-user-static 21 | 22 | services: 23 | - docker 24 | 25 | notifications: 26 | email: false 27 | 28 | install: 29 | - curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --keyring trustedkeys.gpg --import 30 | - curl -fsSLo ${HOME}/codecov.SHA256SUM https://uploader.codecov.io/latest/linux/codecov.SHA256SUM 31 | - curl -fsSLo ${HOME}/codecov.SHA256SUM.sig https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig 32 | - curl -fsSLo ${HOME}/codecov https://uploader.codecov.io/latest/linux/codecov 33 | - gpgv ${HOME}/codecov.SHA256SUM.sig ${HOME}/codecov.SHA256SUM 34 | - pushd ${HOME} && shasum -a 256 -c codecov.SHA256SUM && popd 35 | - chmod +x ${HOME}/codecov 36 | - qemu-x86_64-static ${HOME}/codecov --version 37 | - pip install nox 38 | 39 | script: 40 | - nox -s tests-3.12 41 | 42 | after_success: 43 | - qemu-x86_64-static ${HOME}/codecov 44 | 45 | cache: 46 | directories: 47 | - $HOME/.cache/auditwheel_tests 48 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The auditwheel package is covered by the MIT license. 2 | 3 | :: 4 | 5 | The MIT License 6 | 7 | Copyright (c) 2016 Robert T. McGibbon 8 | 9 | Permission is hereby granted, free of charge, to any person obtaining a copy 10 | of this software and associated documentation files (the "Software"), to deal 11 | in the Software without restriction, including without limitation the rights 12 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the Software is 14 | furnished to do so, subject to the following conditions: 15 | 16 | The above copyright notice and this permission notice shall be included in 17 | all copies or substantial portions of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 22 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 23 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 25 | THE SOFTWARE. 26 | 27 | 28 | 3rd party code and data 29 | ======================= 30 | 31 | Some code distributed within the auditwheel sources was developed by other 32 | projects. This code is distributed under its respective licenses that are 33 | listed below. 34 | 35 | delocate 36 | -------- 37 | The files tools.py and wheeltools.py were copied from delocate 38 | (https://github.com/matthew-brett/delocate) with minor mindifications. 39 | 40 | Copyright (c) 2014-2015, Matthew Brett 41 | All rights reserved. 42 | 43 | Redistribution and use in source and binary forms, with or without 44 | modification, are permitted provided that the following conditions are met: 45 | 46 | 1. Redistributions of source code must retain the above copyright notice, this 47 | list of conditions and the following disclaimer. 48 | 49 | 2. Redistributions in binary form must reproduce the above copyright notice, 50 | this list of conditions and the following disclaimer in the documentation 51 | and/or other materials provided with the distribution. 52 | 53 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 54 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 55 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 56 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 57 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 58 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 59 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 60 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 61 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 62 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 63 | 64 | nibabel 65 | ------- 66 | The file tmpdirs.py was copied from nibabel (https://github.com/nipy/nibabel). 67 | 68 | Copyright (c) 2009-2014 Matthew Brett 69 | Copyright (c) 2010-2013 Stephan Gerhard 70 | Copyright (c) 2006-2014 Michael Hanke 71 | Copyright (c) 2011 Christian Haselgrove 72 | Copyright (c) 2010-2011 Jarrod Millman 73 | Copyright (c) 2011-2014 Yaroslav Halchenko 74 | 75 | Permission is hereby granted, free of charge, to any person obtaining a copy 76 | of this software and associated documentation files (the "Software"), to deal 77 | in the Software without restriction, including without limitation the rights 78 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 79 | copies of the Software, and to permit persons to whom the Software is 80 | furnished to do so, subject to the following conditions: 81 | 82 | The above copyright notice and this permission notice shall be included in 83 | all copies or substantial portions of the Software. 84 | 85 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 86 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 87 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 88 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 89 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 90 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 91 | THE SOFTWARE. 92 | 93 | pax-utils 94 | --------- 95 | Some of the ELF-handling code was copied from gentoo's pax-utils/lddtree.py, 96 | available at https://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-projects/pax-utils/lddtree.py 97 | 98 | Copyright 2012-2014 Gentoo Foundation 99 | Copyright 2012-2014 Mike Frysinger 100 | Copyright 2012-2014 The Chromium OS Authors 101 | Use of this source code is governed by a BSD-style license (BSD-3) 102 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include LICENSE 3 | include CHANGELOG.md 4 | include src/auditwheel/policy/*.json 5 | include src/auditwheel/_vendor/wheel/LICENSE.txt 6 | 7 | graft tests 8 | 9 | exclude .coveragerc 10 | exclude .gitignore 11 | exclude .git-blame-ignore-revs 12 | exclude .pre-commit-config.yaml 13 | exclude .travis.yml 14 | exclude noxfile.py 15 | 16 | prune .github 17 | prune scripts 18 | prune tests/**/__pycache__ 19 | prune tests/**/*.egg-info 20 | prune tests/**/build 21 | 22 | global-exclude *.so .DS_Store 23 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | auditwheel 2 | ========== 3 | 4 | .. image:: https://travis-ci.org/pypa/auditwheel.svg?branch=main 5 | :target: https://travis-ci.org/pypa/auditwheel 6 | .. image:: https://badge.fury.io/py/auditwheel.svg 7 | :target: https://pypi.org/project/auditwheel 8 | .. image:: https://pepy.tech/badge/auditwheel/month 9 | :target: https://pepy.tech/project/auditwheel/month 10 | 11 | Auditing and relabeling of `PEP 600 manylinux_x_y 12 | `_, `PEP 513 manylinux1 13 | `_, `PEP 571 manylinux2010 14 | `_ and `PEP 599 manylinux2014 15 | `_ Linux wheels. 16 | 17 | Overview 18 | -------- 19 | 20 | ``auditwheel`` is a command line tool to facilitate the creation of Python 21 | `wheel packages `_ for Linux (containing pre-compiled 22 | binary extensions) that are compatible with a wide variety of Linux distributions, 23 | consistent with the `PEP 600 manylinux_x_y 24 | `_, `PEP 513 manylinux1 25 | `_, `PEP 571 manylinux2010 26 | `_ and `PEP 599 manylinux2014 27 | `_ platform tags. 28 | 29 | ``auditwheel show``: shows external shared libraries that the wheel depends on 30 | (beyond the libraries included in the ``manylinux`` policies), and 31 | checks the extension modules for the use of versioned symbols that exceed 32 | the ``manylinux`` ABI. 33 | 34 | ``auditwheel repair``: copies these external shared libraries into the wheel itself, 35 | and automatically modifies the appropriate ``RPATH`` entries such that these libraries 36 | will be picked up at runtime. This accomplishes a similar result as if the libraries had 37 | been statically linked without requiring changes to the build system. Packagers are 38 | advised that bundling, like static linking, may implicate copyright concerns. 39 | 40 | Requirements 41 | ------------ 42 | - OS: Linux 43 | - Python: 3.9+ 44 | - `patchelf `_: 0.14+ 45 | 46 | Only systems that use `ELF 47 | `_-based linkage 48 | are supported (this should be essentially every Linux). 49 | 50 | In general, building ``manylinux1`` wheels requires running on a CentOS5 51 | machine, building ``manylinux2010`` wheels requires running on a CentOS6 52 | machine, and building ``manylinux2014`` wheels requires running on a CentOS7 53 | machine, so we recommend using the pre-built manylinux `Docker images 54 | `_, e.g. :: 55 | 56 | $ docker run -i -t -v `pwd`:/io quay.io/pypa/manylinux1_x86_64 /bin/bash 57 | 58 | Installation 59 | ------------ 60 | 61 | ``auditwheel`` can be installed using pip: 62 | 63 | .. code:: bash 64 | 65 | $ pip3 install auditwheel 66 | 67 | Examples 68 | -------- 69 | 70 | Inspecting a wheel: :: 71 | 72 | $ auditwheel show cffi-1.5.0-cp35-cp35m-linux_x86_64.whl 73 | 74 | cffi-1.5.0-cp35-cp35m-linux_x86_64.whl is consistent with the 75 | following platform tag: "linux_x86_64". 76 | 77 | The wheel references the following external versioned symbols in 78 | system-provided shared libraries: GLIBC_2.3. 79 | 80 | The following external shared libraries are required by the wheel: 81 | { 82 | "libc.so.6": "/lib64/libc-2.5.so", 83 | "libffi.so.5": "/usr/lib64/libffi.so.5.0.6", 84 | "libpthread.so.0": "/lib64/libpthread-2.5.so" 85 | } 86 | 87 | In order to achieve the tag platform tag "manylinux1_x86_64" the 88 | following shared library dependencies will need to be eliminated: 89 | 90 | libffi.so.5 91 | 92 | Repairing a wheel. :: 93 | 94 | $ auditwheel repair cffi-1.5.2-cp35-cp35m-linux_x86_64.whl 95 | Repairing cffi-1.5.2-cp35-cp35m-linux_x86_64.whl 96 | Grafting: /usr/lib64/libffi.so.5.0.6 97 | Setting RPATH: _cffi_backend.cpython-35m-x86_64-linux-gnu.so to "$ORIGIN/.libs_cffi_backend" 98 | Previous filename tags: linux_x86_64 99 | New filename tags: manylinux1_x86_64 100 | Previous WHEEL info tags: cp35-cp35m-linux_x86_64 101 | New WHEEL info tags: cp35-cp35m-manylinux1_x86_64 102 | 103 | Fixed-up wheel written to /wheelhouse/cffi-1.5.2-cp35-cp35m-manylinux1_x86_64.whl 104 | 105 | 106 | Limitations 107 | ----------- 108 | 109 | 1. ``auditwheel`` uses the `DT_NEEDED `_ 110 | information (like ``ldd``) from the Python extension modules to determine 111 | which system libraries they depend on. Code that dynamically 112 | loads libraries at runtime using ``ctypes`` / ``cffi`` (from Python) or 113 | ``dlopen`` (from C/C++) doesn't contain this information in a way that can 114 | be statically determined, so dependencies that are loaded via those 115 | mechanisms will be missed. 116 | 2. There's nothing we can do about "fixing" binaries if they were compiled and 117 | linked against a too-recent version of ``libc`` or ``libstdc++``. These 118 | libraries (and some others) use symbol versioning for backward 119 | compatibility. In general, this means that code that was compiled against an 120 | old version of ``glibc`` will run fine on systems with a newer version of 121 | ``glibc``, but code what was compiled on a new system won't / might not run 122 | on older system. 123 | 124 | So, to compile widely-compatible binaries, you're best off doing the build 125 | on an old Linux distribution, such as a manylinux Docker image. 126 | 127 | Testing 128 | ------- 129 | 130 | The tests can be run with ``nox``, which will automatically install 131 | test dependencies. 132 | 133 | Some of the integration tests also require a running and accessible Docker 134 | daemon. These tests will pull a number of docker images if they are not already 135 | available on your system, but it won't update existing images. 136 | To update these images manually, run:: 137 | 138 | docker pull python:3.9-slim-bookworm 139 | docker pull quay.io/pypa/manylinux1_x86_64 140 | docker pull quay.io/pypa/manylinux2010_x86_64 141 | docker pull quay.io/pypa/manylinux2014_x86_64 142 | docker pull quay.io/pypa/manylinux_2_28_x86_64 143 | docker pull quay.io/pypa/manylinux_2_34_x86_64 144 | docker pull quay.io/pypa/musllinux_1_2_x86_64 145 | 146 | You may also remove these images using ``docker rmi``. 147 | 148 | Code of Conduct 149 | --------------- 150 | 151 | Everyone interacting in the ``auditwheel`` project's codebases, issue trackers, 152 | chat rooms, and mailing lists is expected to follow the 153 | `PSF Code of Conduct`_. 154 | 155 | .. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md 156 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | 6 | @pytest.fixture(autouse=True, scope="session") 7 | def clean_env(): 8 | variables = ("AUDITWHEEL_PLAT", "AUDITWHEEL_ZIP_COMPRESSION_LEVEL") 9 | for var in variables: 10 | os.environ.pop(var, None) 11 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | # /// script 2 | # dependencies = ["nox>=2025.2.9"] 3 | # /// 4 | 5 | from __future__ import annotations 6 | 7 | import os 8 | import sys 9 | from pathlib import Path 10 | 11 | import nox 12 | 13 | nox.needs_version = ">=2025.2.9" 14 | 15 | PYTHON_ALL_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] 16 | RUNNING_CI = "TRAVIS" in os.environ or "GITHUB_ACTIONS" in os.environ 17 | 18 | wheel = "" 19 | sdist = "" 20 | 21 | 22 | @nox.session(python=["3.9"], reuse_venv=True) 23 | def lint(session: nox.Session) -> None: 24 | """ 25 | Run linters on the codebase. 26 | """ 27 | session.install("pre-commit") 28 | session.run("pre-commit", "run", "--all-files") 29 | 30 | 31 | @nox.session(default=False) 32 | def coverage(session: nox.Session) -> None: 33 | """ 34 | Run coverage using unit tests. 35 | """ 36 | pyproject = nox.project.load_toml("pyproject.toml") 37 | deps = nox.project.dependency_groups(pyproject, "coverage") 38 | session.install("-e", ".", *deps) 39 | session.run( 40 | "python", 41 | "-m", 42 | "pytest", 43 | "tests/unit", 44 | "--cov=auditwheel", 45 | "--cov-report=term-missing", 46 | ) 47 | 48 | 49 | def _docker_images(session: nox.Session) -> list[str]: 50 | tmp_dir = Path(session.create_tmp()) 51 | script = tmp_dir / "list_images.py" 52 | images_file = tmp_dir / "images.lst" 53 | script.write_text( 54 | rf""" 55 | import sys 56 | from pathlib import Path 57 | sys.path.append("./tests/integration") 58 | from test_manylinux import MANYLINUX_IMAGES 59 | images = "\n".join(MANYLINUX_IMAGES.values()) 60 | Path(r"{images_file}").write_text(images) 61 | """ 62 | ) 63 | session.run("python", str(script), silent=True) 64 | return images_file.read_text().splitlines() 65 | 66 | 67 | @nox.session(python=PYTHON_ALL_VERSIONS, default=False) 68 | def tests(session: nox.Session) -> None: 69 | """ 70 | Run tests. 71 | """ 72 | posargs = session.posargs 73 | dep_group = "coverage" if RUNNING_CI else "test" 74 | pyproject = nox.project.load_toml("pyproject.toml") 75 | deps = nox.project.dependency_groups(pyproject, dep_group) 76 | session.install("-U", "pip") 77 | session.install("-e", ".", *deps) 78 | # for tests/integration/test_bundled_wheels.py::test_analyze_wheel_abi_static_exe 79 | session.run( 80 | "pip", 81 | "download", 82 | "--only-binary", 83 | ":all:", 84 | "--no-deps", 85 | "--platform", 86 | "manylinux1_x86_64", 87 | "-d", 88 | "./tests/integration/", 89 | "patchelf==0.17.2.1", 90 | ) 91 | if RUNNING_CI: 92 | posargs.extend(["--cov", "auditwheel", "--cov-branch"]) 93 | # pull manylinux images that will be used. 94 | # this helps passing tests which would otherwise timeout. 95 | for image in _docker_images(session): 96 | session.run("docker", "pull", image, external=True) 97 | 98 | session.run("pytest", "-s", *posargs) 99 | if RUNNING_CI: 100 | session.run("auditwheel", "lddtree", sys.executable) 101 | session.run("coverage", "xml", "-ocoverage.xml") 102 | 103 | 104 | @nox.session(python=["3.9"], default=False) 105 | def build(session: nox.Session) -> None: 106 | session.install("build") 107 | tmp_dir = Path(session.create_tmp()) / "build-output" 108 | session.run("python", "-m", "build", "--outdir", str(tmp_dir)) 109 | (wheel_path,) = tmp_dir.glob("*.whl") 110 | (sdist_path,) = tmp_dir.glob("*.tar.gz") 111 | Path("dist").mkdir(exist_ok=True) 112 | wheel_path.rename(f"dist/{wheel_path.name}") 113 | sdist_path.rename(f"dist/{sdist_path.name}") 114 | 115 | global sdist # noqa: PLW0603 116 | sdist = f"dist/{sdist_path.name}" 117 | global wheel # noqa: PLW0603 118 | wheel = f"dist/{wheel_path.name}" 119 | 120 | 121 | def _test_dist(session: nox.Session, path: str) -> None: 122 | pyproject = nox.project.load_toml("pyproject.toml") 123 | deps = nox.project.dependency_groups(pyproject, "test") 124 | session.install(path, *deps) 125 | session.run("pytest", "tests/unit") 126 | 127 | 128 | @nox.session(name="test-sdist", python=PYTHON_ALL_VERSIONS, requires=["build"]) 129 | def test_sdist(session: nox.Session) -> None: 130 | """ 131 | Do not run explicitly. 132 | """ 133 | _test_dist(session, sdist) 134 | 135 | 136 | @nox.session(name="test-wheel", python=PYTHON_ALL_VERSIONS, requires=["build"]) 137 | def test_wheel(session: nox.Session) -> None: 138 | """ 139 | Do not run explicitly. 140 | """ 141 | _test_dist(session, wheel) 142 | 143 | 144 | @nox.session(python=PYTHON_ALL_VERSIONS, reuse_venv=True, default=False) 145 | def develop(session: nox.Session) -> None: 146 | session.run("python", "-m", "pip", "install", "--upgrade", "pip") 147 | pyproject = nox.project.load_toml("pyproject.toml") 148 | deps = nox.project.dependency_groups(pyproject, "dev") 149 | session.install("-e", ".", *deps) 150 | 151 | 152 | if __name__ == "__main__": 153 | nox.main() 154 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61", "setuptools_scm>=8"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "auditwheel" 7 | dynamic = ["version"] 8 | description = "Cross-distribution Linux wheels" 9 | readme = "README.rst" 10 | license = {text = "MIT" } 11 | requires-python = ">=3.9" 12 | authors = [ 13 | { name = "Robert T. McGibbon", email = "rmcgibbo@gmail.com" }, 14 | ] 15 | classifiers = [ 16 | "Development Status :: 4 - Beta", 17 | "Environment :: Console", 18 | "Intended Audience :: Developers", 19 | "License :: OSI Approved :: MIT License", 20 | "Operating System :: POSIX :: Linux", 21 | "Programming Language :: Python :: 3", 22 | "Programming Language :: Python :: 3.9", 23 | "Programming Language :: Python :: 3.10", 24 | "Programming Language :: Python :: 3.11", 25 | "Programming Language :: Python :: 3.12", 26 | "Programming Language :: Python :: 3.13", 27 | "Programming Language :: Python :: 3 :: Only", 28 | "Topic :: Software Development", 29 | "Topic :: Software Development :: Build Tools", 30 | "Topic :: Software Development :: Libraries :: Python Modules", 31 | ] 32 | dependencies = [ 33 | "packaging>=20.9", 34 | "pyelftools>=0.24", 35 | ] 36 | 37 | [project.scripts] 38 | auditwheel = "auditwheel.main:main" 39 | 40 | [project.urls] 41 | Homepage = "https://github.com/pypa/auditwheel" 42 | 43 | [dependency-groups] 44 | test = ["pytest>=3.4", "jsonschema", "patchelf", "pretend", "docker"] 45 | coverage = ["pytest-cov", {include-group = "test"}] 46 | dev = [{include-group = "test"}, {include-group = "coverage"}] 47 | 48 | [tool.setuptools] 49 | include-package-data = true 50 | zip-safe = false 51 | 52 | [tool.setuptools.packages.find] 53 | where = ["src"] 54 | namespaces = false 55 | 56 | [tool.setuptools.package-data] 57 | auditwheel = ["*.json"] 58 | 59 | [tool.setuptools_scm] 60 | # enable version inference 61 | 62 | [tool.mypy] 63 | check_untyped_defs = true 64 | disallow_any_generics = true 65 | disallow_incomplete_defs = true 66 | disallow_subclassing_any = true 67 | disallow_untyped_defs = false 68 | enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] 69 | extra_checks = true 70 | strict = false 71 | strict_equality = true 72 | warn_redundant_casts = true 73 | warn_unreachable = false 74 | warn_unused_configs = true 75 | warn_unused_ignores = true 76 | 77 | [[tool.mypy.overrides]] 78 | module = "auditwheel.*" 79 | disallow_untyped_calls = true 80 | disallow_untyped_decorators = true 81 | disallow_untyped_defs = true 82 | warn_return_any = true 83 | 84 | [[tool.mypy.overrides]] 85 | module = "auditwheel._vendor.*" 86 | follow_imports = "skip" 87 | ignore_errors = true 88 | 89 | [tool.pytest.ini_options] 90 | log_cli = true 91 | log_cli_level = 20 92 | 93 | [tool.ruff] 94 | target-version = "py39" 95 | exclude = ["src/auditwheel/_vendor"] 96 | 97 | [tool.ruff.lint] 98 | extend-select = [ 99 | "B", # flake8-bugbear 100 | "I", # isort 101 | "ARG", # flake8-unused-arguments 102 | "C4", # flake8-comprehensions 103 | "EM", # flake8-errmsg 104 | "ICN", # flake8-import-conventions 105 | "ISC", # flake8-implicit-str-concat 106 | "G", # flake8-logging-format 107 | "PGH", # pygrep-hooks 108 | "PIE", # flake8-pie 109 | "PL", # pylint 110 | "PT", # flake8-pytest-style 111 | "RET", # flake8-return 112 | "RUF", # Ruff-specific 113 | "SIM", # flake8-simplify 114 | "TID251", # flake8-tidy-imports.banned-api 115 | "UP", # pyupgrade 116 | "YTT", # flake8-2020 117 | "EXE", # flake8-executable 118 | "PYI", # flake8-pyi 119 | ] 120 | ignore = [ 121 | "ISC001", # Conflicts with formatter 122 | "PLR", # Design related pylint codes 123 | ] 124 | -------------------------------------------------------------------------------- /scripts/calculate_symbol_versions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Calculate symbol_versions for a policy in policy.json by collection 3 | defined version (.gnu.version_d) from libraries in lib_whitelist. 4 | This should be run inside a manylinux Docker container. 5 | """ 6 | 7 | from __future__ import annotations 8 | 9 | import argparse 10 | import contextlib 11 | import json 12 | import os 13 | import platform 14 | 15 | from elftools.elf.elffile import ELFFile 16 | 17 | if platform.architecture()[0] == "64bit": 18 | LIBRARY_PATHS = ["/lib64", "/usr/lib64"] 19 | else: 20 | LIBRARY_PATHS = ["/lib", "/usr/lib"] 21 | 22 | parser = argparse.ArgumentParser(description=__doc__) 23 | parser.add_argument("policy", help="The policy name") 24 | parser.add_argument("policyjson", help="The policy.json file.") 25 | 26 | 27 | def load_policies(path): 28 | with open(path) as f: 29 | return json.load(f) 30 | 31 | 32 | def choose_policy(name, policies): 33 | try: 34 | return next(policy for policy in policies if policy["name"] == name) 35 | except StopIteration: 36 | msg = f"Unknown policy {name}" 37 | raise RuntimeError(msg) from None 38 | 39 | 40 | def find_library(library): 41 | for p in LIBRARY_PATHS: 42 | path = os.path.join(p, library) 43 | if os.path.exists(path): 44 | return path 45 | msg = f"Unknown library {library}" 46 | raise RuntimeError(msg) 47 | 48 | 49 | def versionify(version_string): 50 | try: 51 | result = [int(n) for n in version_string.split(".")] 52 | assert len(result) <= 3 53 | except ValueError: 54 | result = [999999, 999999, 999999, version_string] 55 | return result 56 | 57 | 58 | def calculate_symbol_versions(libraries, symbol_versions, arch): 59 | calculated_symbol_versions = {k: set() for k in symbol_versions} 60 | prefixes = ["/lib", "/usr/lib"] 61 | if arch == "64bit": 62 | prefixes = [p + "64" for p in prefixes] 63 | 64 | for library in libraries: 65 | library_path = find_library(library) 66 | with open(library_path, "rb") as f: 67 | e = ELFFile(f) 68 | section = e.get_section_by_name(".gnu.version_d") 69 | if section: 70 | for _, verdef_iter in section.iter_versions(): 71 | for vernaux in verdef_iter: 72 | with contextlib.suppress(ValueError): 73 | name, version = vernaux.name.split("_", 1) 74 | if ( 75 | name in calculated_symbol_versions 76 | and version != "PRIVATE" 77 | ): 78 | calculated_symbol_versions[name].add(version) 79 | return {k: sorted(v, key=versionify) for k, v in calculated_symbol_versions.items()} 80 | 81 | 82 | def main(): 83 | args = parser.parse_args() 84 | policies = load_policies(args.policyjson) 85 | policy = choose_policy(args.policy, policies) 86 | arch, _ = platform.architecture() 87 | print( 88 | json.dumps( 89 | calculate_symbol_versions( 90 | policy["lib_whitelist"], 91 | policy["symbol_versions"], 92 | arch, 93 | ) 94 | ) 95 | ) 96 | 97 | 98 | main() 99 | -------------------------------------------------------------------------------- /scripts/create-arch-wheels.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # This script is used to create wheels for unsupported architectures 4 | # in order to extend coverage and check errors with those. 5 | 6 | set -eux 7 | 8 | SCRIPT_DIR="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd -P)" 9 | INTEGRATION_TEST_DIR="${SCRIPT_DIR}/../tests/integration" 10 | mkdir -p "${INTEGRATION_TEST_DIR}/arch-wheels/glibc" 11 | mkdir -p "${INTEGRATION_TEST_DIR}/arch-wheels/musllinux_1_2" 12 | 13 | # "mips64le" built with buildpack-deps:bookworm and renamed cp313-cp313 14 | for ARCH in "386" "amd64" "arm/v5" "arm/v7" "arm64/v8" "ppc64le" "riscv64" "s390x"; do 15 | docker run --platform linux/${ARCH} -i --rm -v "${INTEGRATION_TEST_DIR}:/tests" debian:trixie-20250203 << "EOF" 16 | # for, "arm/v5" QEMU will report armv7l, running on aarch64 will report aarch64, force armv5l/armv7l 17 | case "$(dpkg --print-architecture)" in 18 | armel) export _PYTHON_HOST_PLATFORM="linux-armv5l";; 19 | armhf) export _PYTHON_HOST_PLATFORM="linux-armv7l";; 20 | *) ;; 21 | esac 22 | DEBIAN_FRONTEND=noninteractive apt-get update 23 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends gcc python3-pip python3-dev 24 | python3 -m pip wheel --no-deps -w /tests/arch-wheels/glibc /tests/testsimple 25 | EOF 26 | done 27 | 28 | for ARCH in "386" "amd64" "arm/v6" "arm/v7" "arm64/v8" "ppc64le" "riscv64" "s390x"; do 29 | docker run --platform linux/${ARCH} -i --rm -v "${INTEGRATION_TEST_DIR}:/tests" alpine:3.21 << "EOF" 30 | # for, "arm/v5" QEMU will report armv7l, running on aarch64 will report aarch64, force armv5l/armv7l 31 | case "$(cat /etc/apk/arch)" in 32 | armhf) export _PYTHON_HOST_PLATFORM="linux-armv6l";; 33 | armv7) export _PYTHON_HOST_PLATFORM="linux-armv7l";; 34 | *) ;; 35 | esac 36 | apk add gcc binutils musl-dev python3-dev py3-pip 37 | python3 -m pip wheel --no-deps -w /tests/arch-wheels/musllinux_1_2 /tests/testsimple 38 | EOF 39 | done 40 | -------------------------------------------------------------------------------- /src/auditwheel/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/src/auditwheel/__init__.py -------------------------------------------------------------------------------- /src/auditwheel/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | 5 | from .main import main 6 | 7 | if __name__ == "__main__": 8 | sys.exit(main()) 9 | -------------------------------------------------------------------------------- /src/auditwheel/_vendor/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/src/auditwheel/_vendor/__init__.py -------------------------------------------------------------------------------- /src/auditwheel/_vendor/wheel/LICENSE.txt: -------------------------------------------------------------------------------- 1 | "wheel" copyright (c) 2012-2014 Daniel Holth and 2 | contributors. 3 | 4 | The MIT License 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a 7 | copy of this software and associated documentation files (the "Software"), 8 | to deal in the Software without restriction, including without limitation 9 | the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 | and/or sell copies of the Software, and to permit persons to whom the 11 | Software is furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included 14 | in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 | THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /src/auditwheel/_vendor/wheel/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.36.2' 2 | -------------------------------------------------------------------------------- /src/auditwheel/_vendor/wheel/pkginfo.py: -------------------------------------------------------------------------------- 1 | """Tools for reading and writing PKG-INFO / METADATA without caring 2 | about the encoding.""" 3 | 4 | from email.parser import Parser 5 | 6 | try: 7 | unicode 8 | _PY3 = False 9 | except NameError: 10 | _PY3 = True 11 | 12 | if not _PY3: 13 | from email.generator import Generator 14 | 15 | def read_pkg_info_bytes(bytestr): 16 | return Parser().parsestr(bytestr) 17 | 18 | def read_pkg_info(path): 19 | with open(path, "r") as headers: 20 | message = Parser().parse(headers) 21 | return message 22 | 23 | def write_pkg_info(path, message): 24 | with open(path, 'w') as metadata: 25 | Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message) 26 | else: 27 | from email.generator import BytesGenerator 28 | 29 | def read_pkg_info_bytes(bytestr): 30 | headers = bytestr.decode(encoding="ascii", errors="surrogateescape") 31 | message = Parser().parsestr(headers) 32 | return message 33 | 34 | def read_pkg_info(path): 35 | with open(path, "r", 36 | encoding="ascii", 37 | errors="surrogateescape") as headers: 38 | message = Parser().parse(headers) 39 | return message 40 | 41 | def write_pkg_info(path, message): 42 | with open(path, "wb") as out: 43 | BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message) 44 | -------------------------------------------------------------------------------- /src/auditwheel/architecture.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | import platform 5 | import struct 6 | import sys 7 | from enum import Enum 8 | 9 | 10 | class Architecture(Enum): 11 | value: str 12 | 13 | aarch64 = "aarch64" 14 | armv7l = "armv7l" 15 | i686 = "i686" 16 | loongarch64 = "loongarch64" 17 | ppc64 = "ppc64" 18 | ppc64le = "ppc64le" 19 | riscv64 = "riscv64" 20 | s390x = "s390x" 21 | x86_64 = "x86_64" 22 | x86_64_v2 = "x86_64_v2" 23 | x86_64_v3 = "x86_64_v3" 24 | x86_64_v4 = "x86_64_v4" 25 | 26 | def __str__(self) -> str: 27 | return self.value 28 | 29 | @property 30 | def baseline(self) -> Architecture: 31 | if self.value.startswith("x86_64"): 32 | return Architecture.x86_64 33 | return self 34 | 35 | @classmethod 36 | @functools.lru_cache(None) 37 | def _member_list(cls) -> list[Architecture]: 38 | return list(cls) 39 | 40 | def is_subset(self, other: Architecture) -> bool: 41 | if self.baseline != other.baseline: 42 | return False 43 | member_list = Architecture._member_list() 44 | return member_list.index(self) <= member_list.index(other) 45 | 46 | def is_superset(self, other: Architecture) -> bool: 47 | if self.baseline != other.baseline: 48 | return False 49 | return other.is_subset(self) 50 | 51 | @staticmethod 52 | def detect(*, bits: int | None = None) -> Architecture: 53 | machine = platform.machine() 54 | if sys.platform.startswith("win"): 55 | machine = {"AMD64": "x86_64", "ARM64": "aarch64", "x86": "i686"}.get( 56 | machine, machine 57 | ) 58 | elif sys.platform.startswith("darwin"): 59 | machine = {"arm64": "aarch64"}.get(machine, machine) 60 | 61 | if bits is None: 62 | # c.f. https://github.com/pypa/packaging/pull/711 63 | bits = 8 * struct.calcsize("P") 64 | 65 | if machine in {"x86_64", "i686"}: 66 | machine = {64: "x86_64", 32: "i686"}[bits] 67 | elif machine in {"aarch64", "armv8l"}: 68 | # use armv7l policy for 64-bit arm kernel in 32-bit mode (armv8l) 69 | machine = {64: "aarch64", 32: "armv7l"}[bits] 70 | 71 | return Architecture(machine) 72 | -------------------------------------------------------------------------------- /src/auditwheel/condatools.py: -------------------------------------------------------------------------------- 1 | """Context managers like those in wheeltools.py for unpacking 2 | conda packages. 3 | """ 4 | 5 | from __future__ import annotations 6 | 7 | from pathlib import Path 8 | 9 | from .tmpdirs import InTemporaryDirectory 10 | from .tools import tarbz2todir 11 | 12 | 13 | class InCondaPkg(InTemporaryDirectory): 14 | def __init__(self, in_conda_pkg: Path) -> None: 15 | """Initialize in-conda-package context manager""" 16 | self.in_conda_pkg = in_conda_pkg.absolute() 17 | super().__init__() 18 | 19 | def __enter__(self) -> Path: 20 | tarbz2todir(self.in_conda_pkg, self.name) 21 | return super().__enter__() 22 | 23 | 24 | class InCondaPkgCtx(InCondaPkg): 25 | def __init__(self, in_conda_pkg: Path) -> None: 26 | super().__init__(in_conda_pkg) 27 | self.path: Path | None = None 28 | 29 | def __enter__(self): # type: ignore[no-untyped-def] 30 | self.path = super().__enter__() 31 | return self 32 | 33 | def iter_files(self) -> list[str]: 34 | if self.path is None: 35 | msg = "This function should be called from context manager" 36 | raise ValueError(msg) 37 | files = self.path / "info" / "files" 38 | with open(files) as f: 39 | return [line.strip() for line in f.readlines()] 40 | -------------------------------------------------------------------------------- /src/auditwheel/elfutils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from collections.abc import Iterable, Iterator 4 | from pathlib import Path 5 | 6 | from elftools.common.exceptions import ELFError 7 | from elftools.elf.elffile import ELFFile 8 | 9 | from .lddtree import parse_ld_paths 10 | 11 | 12 | def elf_read_dt_needed(fn: Path) -> list[str]: 13 | needed = [] 14 | with open(fn, "rb") as f: 15 | elf = ELFFile(f) 16 | section = elf.get_section_by_name(".dynamic") 17 | if section is None: 18 | msg = f"Could not find soname in {fn}" 19 | raise ValueError(msg) 20 | 21 | for t in section.iter_tags(): 22 | if t.entry.d_tag == "DT_NEEDED": 23 | needed.append(t.needed) 24 | 25 | return needed 26 | 27 | 28 | def elf_file_filter(paths: Iterable[Path]) -> Iterator[tuple[Path, ELFFile]]: 29 | """Filter through an iterator of filenames and load up only ELF 30 | files 31 | """ 32 | 33 | for path in paths: 34 | if path.name.endswith(".py"): 35 | continue 36 | else: 37 | try: 38 | with open(path, "rb") as f: 39 | candidate = ELFFile(f) 40 | yield path, candidate 41 | except ELFError: 42 | # not an elf file 43 | continue 44 | 45 | 46 | def elf_find_versioned_symbols(elf: ELFFile) -> Iterator[tuple[str, str]]: 47 | section = elf.get_section_by_name(".gnu.version_r") 48 | 49 | if section is not None: 50 | for verneed, verneed_iter in section.iter_versions(): 51 | if verneed.name.startswith("ld-linux") or verneed.name in [ 52 | "ld64.so.2", 53 | "ld64.so.1", 54 | ]: 55 | continue 56 | for vernaux in verneed_iter: 57 | yield (verneed.name, vernaux.name) 58 | 59 | 60 | def elf_find_ucs2_symbols(elf: ELFFile) -> Iterator[str]: 61 | section = elf.get_section_by_name(".dynsym") 62 | if section is not None: 63 | # look for UCS2 symbols that are externally referenced 64 | for sym in section.iter_symbols(): 65 | if ( 66 | "PyUnicodeUCS2_" in sym.name 67 | and sym["st_shndx"] == "SHN_UNDEF" 68 | and sym["st_info"]["type"] == "STT_FUNC" 69 | ): 70 | yield sym.name 71 | 72 | 73 | def elf_references_PyFPE_jbuf(elf: ELFFile) -> bool: 74 | offending_symbol_names = ("PyFPE_jbuf", "PyFPE_dummy", "PyFPE_counter") 75 | section = elf.get_section_by_name(".dynsym") 76 | if section is not None: 77 | # look for symbols that are externally referenced 78 | for sym in section.iter_symbols(): 79 | if ( 80 | sym.name in offending_symbol_names 81 | and sym["st_shndx"] == "SHN_UNDEF" 82 | and sym["st_info"]["type"] in ("STT_FUNC", "STT_NOTYPE") 83 | ): 84 | return True 85 | return False 86 | 87 | 88 | def elf_is_python_extension(fn: Path, elf: ELFFile) -> tuple[bool, int | None]: 89 | modname = fn.name.split(".", 1)[0] 90 | module_init_f = { 91 | "init" + modname: 2, 92 | "PyInit_" + modname: 3, 93 | "_cffi_pypyinit_" + modname: 2, 94 | } 95 | 96 | sect = elf.get_section_by_name(".dynsym") 97 | if sect is None: 98 | return False, None 99 | 100 | for sym in sect.iter_symbols(): 101 | if ( 102 | sym.name in module_init_f 103 | and sym["st_shndx"] != "SHN_UNDEF" 104 | and sym["st_info"]["type"] == "STT_FUNC" 105 | ): 106 | return True, module_init_f[sym.name] 107 | 108 | return False, None 109 | 110 | 111 | def elf_read_rpaths(fn: Path) -> dict[str, list[str]]: 112 | result: dict[str, list[str]] = {"rpaths": [], "runpaths": []} 113 | 114 | with open(fn, "rb") as f: 115 | elf = ELFFile(f) 116 | section = elf.get_section_by_name(".dynamic") 117 | if section is None: 118 | return result 119 | 120 | for t in section.iter_tags(): 121 | if t.entry.d_tag == "DT_RPATH": 122 | result["rpaths"] = parse_ld_paths(t.rpath, root="/", path=str(fn)) 123 | elif t.entry.d_tag == "DT_RUNPATH": 124 | result["runpaths"] = parse_ld_paths(t.runpath, root="/", path=str(fn)) 125 | 126 | return result 127 | 128 | 129 | def get_undefined_symbols(path: Path) -> set[str]: 130 | undef_symbols = set() 131 | with open(path, "rb") as f: 132 | elf = ELFFile(f) 133 | section = elf.get_section_by_name(".dynsym") 134 | if section is not None: 135 | # look for all undef symbols 136 | for sym in section.iter_symbols(): 137 | if sym["st_shndx"] == "SHN_UNDEF": 138 | undef_symbols.add(sym.name) 139 | return undef_symbols 140 | 141 | 142 | def filter_undefined_symbols( 143 | path: Path, symbols: dict[str, frozenset[str]] 144 | ) -> dict[str, list[str]]: 145 | if not symbols: 146 | return {} 147 | undef_symbols = set("*") | get_undefined_symbols(path) 148 | result = {} 149 | for lib, sym_list in symbols.items(): 150 | intersection = sym_list & undef_symbols 151 | if intersection: 152 | result[lib] = sorted(intersection) 153 | return result 154 | -------------------------------------------------------------------------------- /src/auditwheel/error.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | 4 | class AuditwheelException(Exception): 5 | def __init__(self, msg: str): 6 | super().__init__(msg) 7 | 8 | @property 9 | def message(self) -> str: 10 | assert isinstance(self.args[0], str) 11 | return self.args[0] 12 | 13 | 14 | class InvalidLibc(AuditwheelException): 15 | pass 16 | 17 | 18 | class WheelToolsError(AuditwheelException): 19 | pass 20 | 21 | 22 | class NonPlatformWheel(AuditwheelException): 23 | """No ELF binaries in the wheel""" 24 | 25 | def __init__(self, architecture: str | None, libraries: list[str] | None) -> None: 26 | if architecture is None or not libraries: 27 | msg = ( 28 | "This does not look like a platform wheel, no ELF executable " 29 | "or shared library file (including compiled Python C extension) " 30 | "found in the wheel archive" 31 | ) 32 | else: 33 | libraries_str = "\n\t".join(libraries) 34 | msg = ( 35 | "Invalid binary wheel: no ELF executable or shared library file " 36 | "(including compiled Python C extension) with a " 37 | f"{architecture!r} architecure found. The following " 38 | f"ELF files were found:\n\t{libraries_str}\n" 39 | ) 40 | super().__init__(msg) 41 | -------------------------------------------------------------------------------- /src/auditwheel/genericpkgctx.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | from .condatools import InCondaPkgCtx 6 | from .wheeltools import InWheelCtx 7 | 8 | 9 | def InGenericPkgCtx( 10 | in_path: Path, out_path: Path | None = None 11 | ) -> InWheelCtx | InCondaPkgCtx: 12 | """Factory that returns a InWheelCtx or InCondaPkgCtx 13 | context manager depending on the file extension 14 | """ 15 | if in_path.name.endswith(".whl"): 16 | return InWheelCtx(in_path, out_path) 17 | if in_path.name.endswith(".tar.bz2"): 18 | if out_path is not None: 19 | raise NotImplementedError() 20 | return InCondaPkgCtx(in_path) 21 | msg = f"Invalid package: {in_path}. File formats supported: .whl, .tar.bz2" 22 | raise ValueError(msg) 23 | -------------------------------------------------------------------------------- /src/auditwheel/hashfile.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import hashlib 4 | from typing import BinaryIO 5 | 6 | 7 | def hashfile(afile: BinaryIO, blocksize: int = 65536) -> str: 8 | """Hash the contents of an open file handle with SHA256""" 9 | hasher = hashlib.sha256() 10 | buf = afile.read(blocksize) 11 | while len(buf) > 0: 12 | hasher.update(buf) 13 | buf = afile.read(blocksize) 14 | return hasher.hexdigest() 15 | -------------------------------------------------------------------------------- /src/auditwheel/json.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import dataclasses 4 | import json 5 | from enum import Enum 6 | from pathlib import PurePath 7 | from typing import Any 8 | 9 | 10 | class _CustomEncoder(json.JSONEncoder): 11 | def default(self, value: Any) -> Any: 12 | if dataclasses.is_dataclass(value) and not isinstance(value, type): 13 | as_dict = dataclasses.asdict(value) 14 | as_dict.pop("policy", None) # don't dump full policy in logs 15 | return as_dict 16 | if isinstance(value, frozenset): 17 | return sorted(value) 18 | if isinstance(value, Enum): 19 | return repr(value) 20 | if isinstance(value, PurePath): 21 | return str(value) 22 | return super().default(value) 23 | 24 | def encode(self, o: Any) -> str: 25 | if isinstance(o, dict): 26 | o = {str(k): v for k, v in o.items()} 27 | return super().encode(o) 28 | 29 | 30 | def dumps(obj: Any) -> str: 31 | return json.dumps(obj, indent=4, cls=_CustomEncoder) 32 | -------------------------------------------------------------------------------- /src/auditwheel/libc.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | import os 5 | import re 6 | import subprocess 7 | from dataclasses import dataclass 8 | from enum import Enum 9 | from pathlib import Path 10 | 11 | from .error import InvalidLibc 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | @dataclass(frozen=True, order=True) 17 | class LibcVersion: 18 | major: int 19 | minor: int 20 | 21 | 22 | class Libc(Enum): 23 | value: str 24 | 25 | GLIBC = "glibc" 26 | MUSL = "musl" 27 | 28 | def __str__(self) -> str: 29 | return self.value 30 | 31 | def get_current_version(self) -> LibcVersion: 32 | if self == Libc.MUSL: 33 | return _get_musl_version(_find_musl_libc()) 34 | return _get_glibc_version() 35 | 36 | @staticmethod 37 | def detect() -> Libc: 38 | # check musl first, default to GLIBC 39 | try: 40 | _find_musl_libc() 41 | logger.debug("Detected musl libc") 42 | return Libc.MUSL 43 | except InvalidLibc: 44 | logger.debug("Falling back to GNU libc") 45 | return Libc.GLIBC 46 | 47 | 48 | def _find_musl_libc() -> Path: 49 | try: 50 | (dl_path,) = list(Path("/lib").glob("libc.musl-*.so.1")) 51 | except ValueError: 52 | msg = "musl libc not detected" 53 | logger.debug("%s", msg) 54 | raise InvalidLibc(msg) from None 55 | 56 | return dl_path 57 | 58 | 59 | def _get_musl_version(ld_path: Path) -> LibcVersion: 60 | try: 61 | ld = subprocess.run( 62 | [ld_path], check=False, errors="strict", stderr=subprocess.PIPE 63 | ).stderr 64 | except FileNotFoundError as err: 65 | msg = "failed to determine musl version" 66 | logger.exception("%s", msg) 67 | raise InvalidLibc(msg) from err 68 | 69 | match = re.search(r"Version (?P\d+).(?P\d+).(?P\d+)", ld) 70 | if not match: 71 | msg = f"failed to parse musl version from string {ld!r}" 72 | raise InvalidLibc(msg) from None 73 | 74 | return LibcVersion(int(match.group("major")), int(match.group("minor"))) 75 | 76 | 77 | def _get_glibc_version() -> LibcVersion: 78 | # CS_GNU_LIBC_VERSION is only for glibc and shall return e.g. "glibc 2.3.4" 79 | try: 80 | version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") 81 | assert version_string is not None 82 | _, version = version_string.rsplit() 83 | except (AssertionError, AttributeError, OSError, ValueError) as err: 84 | # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... 85 | msg = "failed to determine glibc version" 86 | raise InvalidLibc(msg) from err 87 | 88 | m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version) 89 | if not m: 90 | msg = f"failed to parse glibc version from string {version!r}" 91 | raise InvalidLibc(msg) 92 | 93 | return LibcVersion(int(m.group("major")), int(m.group("minor"))) 94 | -------------------------------------------------------------------------------- /src/auditwheel/main.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import logging 5 | import os 6 | import pathlib 7 | import sys 8 | from importlib import metadata 9 | 10 | import auditwheel 11 | 12 | from . import main_lddtree, main_repair, main_show 13 | 14 | 15 | def main() -> int | None: 16 | if sys.platform != "linux": 17 | print("Error: This tool only supports Linux") 18 | return 1 19 | 20 | location = pathlib.Path(auditwheel.__file__).parent.resolve() 21 | version = "auditwheel {} installed at {} (python {}.{})".format( 22 | metadata.version("auditwheel"), location, *sys.version_info 23 | ) 24 | 25 | p = argparse.ArgumentParser(description="Cross-distro Python wheels.") 26 | p.set_defaults(prog=os.path.basename(sys.argv[0])) 27 | p.add_argument("-V", "--version", action="version", version=version) 28 | p.add_argument( 29 | "-v", 30 | "--verbose", 31 | action="count", 32 | dest="verbose", 33 | default=0, 34 | help="Give more output. Option is additive", 35 | ) 36 | sub_parsers = p.add_subparsers(metavar="command", dest="cmd") 37 | 38 | main_show.configure_parser(sub_parsers) 39 | main_repair.configure_parser(sub_parsers) 40 | main_lddtree.configure_subparser(sub_parsers) 41 | 42 | args = p.parse_args() 43 | 44 | logging.disable(logging.NOTSET) 45 | if args.verbose >= 1: 46 | logging.basicConfig(level=logging.DEBUG) 47 | else: 48 | logging.basicConfig(level=logging.INFO) 49 | 50 | if not hasattr(args, "func"): 51 | p.print_help() 52 | return None 53 | result: int | None = args.func(args, p) 54 | return result 55 | -------------------------------------------------------------------------------- /src/auditwheel/main_lddtree.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import logging 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | 9 | def configure_subparser(sub_parsers) -> None: # type: ignore[no-untyped-def] 10 | help = "Analyze a single ELF file (similar to ``ldd``)." 11 | p = sub_parsers.add_parser("lddtree", help=help, description=help) 12 | p.add_argument("file", help="Path to .so file") 13 | p.set_defaults(func=execute) 14 | 15 | 16 | def execute(args: argparse.Namespace, p: argparse.ArgumentParser) -> int: # noqa: ARG001 17 | from . import json 18 | from .lddtree import ldd 19 | 20 | logger.info(json.dumps(ldd(args.file))) 21 | return 0 22 | -------------------------------------------------------------------------------- /src/auditwheel/main_repair.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import logging 5 | import zlib 6 | from pathlib import Path 7 | 8 | from auditwheel.architecture import Architecture 9 | from auditwheel.error import NonPlatformWheel, WheelToolsError 10 | from auditwheel.libc import Libc 11 | from auditwheel.patcher import Patchelf 12 | from auditwheel.wheeltools import get_wheel_architecture, get_wheel_libc 13 | 14 | from .policy import WheelPolicies 15 | from .tools import EnvironmentDefault 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | def configure_parser(sub_parsers) -> None: # type: ignore[no-untyped-def] 21 | policies = WheelPolicies(libc=Libc.detect(), arch=Architecture.detect()) 22 | policy_names = [p.name for p in policies if p != policies.linux] 23 | policy_names += [alias for p in policies for alias in p.aliases] 24 | policy_names += ["auto"] 25 | epilog = """PLATFORMS: 26 | These are the possible target platform tags, as specified by PEP 600. 27 | Note that old, pre-PEP 600 tags are still usable and are listed as aliases 28 | below. 29 | """ 30 | for p in policies: 31 | epilog += f"- {p.name}" 32 | if len(p.aliases) > 0: 33 | epilog += f" (aliased by {', '.join(p.aliases)})" 34 | epilog += "\n" 35 | help = """Vendor in external shared library dependencies of a wheel. 36 | If multiple wheels are specified, an error processing one 37 | wheel will abort processing of subsequent wheels. 38 | """ 39 | parser = sub_parsers.add_parser( 40 | "repair", 41 | help=help, 42 | description=help, 43 | epilog=epilog, 44 | formatter_class=argparse.RawDescriptionHelpFormatter, 45 | ) 46 | parser.add_argument("WHEEL_FILE", type=Path, help="Path to wheel file.", nargs="+") 47 | parser.add_argument( 48 | "-z", 49 | "--zip-compression-level", 50 | action=EnvironmentDefault, 51 | metavar="ZIP_COMPRESSION_LEVEL", 52 | env="AUDITWHEEL_ZIP_COMPRESSION_LEVEL", 53 | dest="ZIP_COMPRESSION_LEVEL", 54 | type=int, 55 | help="Compress level to be used to create zip file.", 56 | choices=list(range(zlib.Z_NO_COMPRESSION, zlib.Z_BEST_COMPRESSION + 1)), 57 | default=zlib.Z_DEFAULT_COMPRESSION, 58 | ) 59 | parser.add_argument( 60 | "--plat", 61 | action=EnvironmentDefault, 62 | metavar="PLATFORM", 63 | env="AUDITWHEEL_PLAT", 64 | dest="PLAT", 65 | help="Desired target platform. See the available platforms under the " 66 | 'PLATFORMS section below. (default: "auto")', 67 | choices=policy_names, 68 | default="auto", 69 | ) 70 | parser.add_argument( 71 | "-L", 72 | "--lib-sdir", 73 | dest="LIB_SDIR", 74 | help=('Subdirectory in packages to store copied libraries. (default: ".libs")'), 75 | default=".libs", 76 | ) 77 | parser.add_argument( 78 | "-w", 79 | "--wheel-dir", 80 | dest="WHEEL_DIR", 81 | type=Path, 82 | help=('Directory to store delocated wheels (default: "wheelhouse/")'), 83 | default="wheelhouse/", 84 | ) 85 | parser.add_argument( 86 | "--no-update-tags", 87 | dest="UPDATE_TAGS", 88 | action="store_false", 89 | help=( 90 | "Do not update the wheel filename tags and WHEEL info" 91 | " to match the repaired platform tag." 92 | ), 93 | default=True, 94 | ) 95 | parser.add_argument( 96 | "--strip", 97 | dest="STRIP", 98 | action="store_true", 99 | help="Strip symbols in the resulting wheel", 100 | default=False, 101 | ) 102 | parser.add_argument( 103 | "--exclude", 104 | dest="EXCLUDE", 105 | help="Exclude SONAME from grafting into the resulting wheel " 106 | "Please make sure wheel metadata reflects your dependencies. " 107 | "See https://github.com/pypa/auditwheel/pull/411#issuecomment-1500826281 " 108 | "(can be specified multiple times) " 109 | "(can contain wildcards, for example libfoo.so.*)", 110 | action="append", 111 | default=[], 112 | ) 113 | parser.add_argument( 114 | "--only-plat", 115 | dest="ONLY_PLAT", 116 | action="store_true", 117 | help="Do not check for higher policy compatibility", 118 | default=False, 119 | ) 120 | parser.add_argument( 121 | "--disable-isa-ext-check", 122 | dest="DISABLE_ISA_EXT_CHECK", 123 | action="store_true", 124 | help="Do not check for extended ISA compatibility (e.g. x86_64_v2)", 125 | default=False, 126 | ) 127 | parser.set_defaults(func=execute) 128 | 129 | 130 | def execute(args: argparse.Namespace, parser: argparse.ArgumentParser) -> int: 131 | from .repair import repair_wheel 132 | from .wheel_abi import analyze_wheel_abi 133 | 134 | exclude: frozenset[str] = frozenset(args.EXCLUDE) 135 | wheel_dir: Path = args.WHEEL_DIR.absolute() 136 | wheel_files: list[Path] = args.WHEEL_FILE 137 | 138 | requested_architecture: Architecture | None = None 139 | 140 | plat_base: str = args.PLAT 141 | for a in Architecture: 142 | suffix = f"_{a.value}" 143 | if plat_base.endswith(suffix): 144 | plat_base = plat_base[: -len(suffix)] 145 | requested_architecture = a 146 | break 147 | 148 | for wheel_file in wheel_files: 149 | if not wheel_file.is_file(): 150 | parser.error(f"cannot access {wheel_file}. No such file") 151 | 152 | wheel_filename = wheel_file.name 153 | arch = requested_architecture 154 | try: 155 | arch = get_wheel_architecture(wheel_filename) 156 | if requested_architecture is not None and requested_architecture != arch: 157 | msg = f"can't repair wheel {wheel_filename} with {arch.value} architecture to a wheel targeting {requested_architecture.value}" 158 | parser.error(msg) 159 | except (WheelToolsError, NonPlatformWheel): 160 | logger.warning( 161 | "The architecture could not be deduced from the wheel filename" 162 | ) 163 | 164 | try: 165 | libc = get_wheel_libc(wheel_filename) 166 | except WheelToolsError: 167 | logger.debug("The libc could not be deduced from the wheel filename") 168 | libc = None 169 | 170 | if plat_base.startswith("manylinux"): 171 | if libc is None: 172 | libc = Libc.GLIBC 173 | if libc != Libc.GLIBC: 174 | msg = f"can't repair wheel {wheel_filename} with {libc.name} libc to a wheel targeting GLIBC" 175 | parser.error(msg) 176 | elif plat_base.startswith("musllinux"): 177 | if libc is None: 178 | libc = Libc.MUSL 179 | if libc != Libc.MUSL: 180 | msg = f"can't repair wheel {wheel_filename} with {libc.name} libc to a wheel targeting MUSL" 181 | parser.error(msg) 182 | 183 | logger.info("Repairing %s", wheel_filename) 184 | 185 | if not wheel_dir.exists(): 186 | wheel_dir.mkdir(parents=True) 187 | 188 | try: 189 | wheel_abi = analyze_wheel_abi( 190 | libc, arch, wheel_file, exclude, args.DISABLE_ISA_EXT_CHECK, True 191 | ) 192 | except NonPlatformWheel as e: 193 | logger.info(e.message) 194 | return 1 195 | 196 | policies = wheel_abi.policies 197 | if plat_base == "auto": 198 | if wheel_abi.overall_policy == policies.linux: 199 | # we're getting 'linux', override 200 | plat = policies.lowest.name 201 | else: 202 | plat = wheel_abi.overall_policy.name 203 | else: 204 | plat = f"{plat_base}_{policies.architecture.value}" 205 | requested_policy = policies.get_policy_by_name(plat) 206 | 207 | if requested_policy > wheel_abi.sym_policy: 208 | msg = ( 209 | f'cannot repair "{wheel_file}" to "{plat}" ABI because of the ' 210 | "presence of too-recent versioned symbols. You'll need to compile " 211 | "the wheel on an older toolchain." 212 | ) 213 | parser.error(msg) 214 | 215 | if requested_policy > wheel_abi.ucs_policy: 216 | msg = ( 217 | f'cannot repair "{wheel_file}" to "{plat}" ABI because it was ' 218 | "compiled against a UCS2 build of Python. You'll need to compile " 219 | "the wheel against a wide-unicode build of Python." 220 | ) 221 | parser.error(msg) 222 | 223 | if requested_policy > wheel_abi.blacklist_policy: 224 | msg = ( 225 | f'cannot repair "{wheel_file}" to "{plat}" ABI because it ' 226 | "depends on black-listed symbols." 227 | ) 228 | parser.error(msg) 229 | 230 | if requested_policy > wheel_abi.machine_policy: 231 | msg = ( 232 | f'cannot repair "{wheel_file}" to "{plat}" ABI because it ' 233 | "depends on unsupported ISA extensions." 234 | ) 235 | parser.error(msg) 236 | 237 | abis = [requested_policy.name, *requested_policy.aliases] 238 | if (not args.ONLY_PLAT) and requested_policy < wheel_abi.overall_policy: 239 | logger.info( 240 | ( 241 | "Wheel is eligible for a higher priority tag. " 242 | "You requested %s but I have found this wheel is " 243 | "eligible for %s." 244 | ), 245 | plat, 246 | wheel_abi.overall_policy.name, 247 | ) 248 | abis = [ 249 | wheel_abi.overall_policy.name, 250 | *wheel_abi.overall_policy.aliases, 251 | *abis, 252 | ] 253 | 254 | patcher = Patchelf() 255 | out_wheel = repair_wheel( 256 | wheel_abi, 257 | wheel_file, 258 | abis=abis, 259 | lib_sdir=args.LIB_SDIR, 260 | out_dir=wheel_dir, 261 | update_tags=args.UPDATE_TAGS, 262 | patcher=patcher, 263 | strip=args.STRIP, 264 | zip_compression_level=args.ZIP_COMPRESSION_LEVEL, 265 | ) 266 | 267 | if out_wheel is not None: 268 | logger.info("\nFixed-up wheel written to %s", out_wheel) 269 | return 0 270 | -------------------------------------------------------------------------------- /src/auditwheel/main_show.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import logging 5 | from pathlib import Path 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | def configure_parser(sub_parsers) -> None: # type: ignore[no-untyped-def] 11 | help = "Audit a wheel for external shared library dependencies." 12 | p = sub_parsers.add_parser("show", help=help, description=help) 13 | p.add_argument("WHEEL_FILE", type=Path, help="Path to wheel file.") 14 | p.add_argument( 15 | "--disable-isa-ext-check", 16 | dest="DISABLE_ISA_EXT_CHECK", 17 | action="store_true", 18 | help="Do not check for extended ISA compatibility (e.g. x86_64_v2)", 19 | default=False, 20 | ) 21 | p.set_defaults(func=execute) 22 | 23 | 24 | def printp(text: str) -> None: 25 | from textwrap import wrap 26 | 27 | print() 28 | print("\n".join(wrap(text, break_long_words=False, break_on_hyphens=False))) 29 | 30 | 31 | def execute(args: argparse.Namespace, parser: argparse.ArgumentParser) -> int: 32 | from . import json 33 | from .error import NonPlatformWheel, WheelToolsError 34 | from .wheel_abi import analyze_wheel_abi 35 | from .wheeltools import get_wheel_architecture, get_wheel_libc 36 | 37 | wheel_file: Path = args.WHEEL_FILE 38 | fn = wheel_file.name 39 | 40 | if not wheel_file.is_file(): 41 | parser.error(f"cannot access {wheel_file}. No such file") 42 | 43 | fn = wheel_file.name 44 | try: 45 | arch = get_wheel_architecture(fn) 46 | except (WheelToolsError, NonPlatformWheel): 47 | logger.warning("The architecture could not be deduced from the wheel filename") 48 | arch = None 49 | 50 | try: 51 | libc = get_wheel_libc(fn) 52 | except WheelToolsError: 53 | logger.debug("The libc could not be deduced from the wheel filename") 54 | libc = None 55 | 56 | try: 57 | winfo = analyze_wheel_abi( 58 | libc, arch, wheel_file, frozenset(), args.DISABLE_ISA_EXT_CHECK, False 59 | ) 60 | except NonPlatformWheel as e: 61 | logger.info("%s", e.message) 62 | return 1 63 | 64 | policies = winfo.policies 65 | 66 | libs_with_versions = [ 67 | f"{k} with versions {v}" for k, v in winfo.versioned_symbols.items() 68 | ] 69 | 70 | printp( 71 | f'{fn} is consistent with the following platform tag: "{winfo.overall_policy.name}".' 72 | ) 73 | 74 | if winfo.pyfpe_policy == policies.linux: 75 | printp( 76 | "This wheel uses the PyFPE_jbuf function, which is not compatible with the" 77 | " manylinux/musllinux tags. (see https://www.python.org/dev/peps/pep-0513/" 78 | "#fpectl-builds-vs-no-fpectl-builds)" 79 | ) 80 | if args.verbose < 1: 81 | return 0 82 | 83 | if winfo.ucs_policy == policies.linux: 84 | printp( 85 | "This wheel is compiled against a narrow unicode (UCS2) " 86 | "version of Python, which is not compatible with the " 87 | "manylinux/musllinux tags." 88 | ) 89 | if args.verbose < 1: 90 | return 0 91 | 92 | if winfo.machine_policy == policies.linux: 93 | printp("This wheel depends on unsupported ISA extensions.") 94 | if args.verbose < 1: 95 | return 0 96 | 97 | if len(libs_with_versions) == 0: 98 | printp( 99 | "The wheel references no external versioned symbols from " 100 | "system-provided shared libraries." 101 | ) 102 | else: 103 | printp( 104 | "The wheel references external versioned symbols in these " 105 | f"system-provided shared libraries: {', '.join(libs_with_versions)}" 106 | ) 107 | 108 | if winfo.sym_policy < policies.highest: 109 | printp( 110 | f'This constrains the platform tag to "{winfo.sym_policy.name}". ' 111 | "In order to achieve a more compatible tag, you would " 112 | "need to recompile a new wheel from source on a system " 113 | "with earlier versions of these libraries, such as " 114 | "a recent manylinux image." 115 | ) 116 | if args.verbose < 1: 117 | return 0 118 | 119 | libs = winfo.external_refs[policies.lowest.name].libs 120 | if len(libs) == 0: 121 | printp("The wheel requires no external shared libraries! :)") 122 | else: 123 | printp("The following external shared libraries are required by the wheel:") 124 | print(json.dumps(dict(sorted(libs.items())))) 125 | 126 | for p in policies: 127 | if p > winfo.overall_policy: 128 | libs = winfo.external_refs[p.name].libs 129 | if len(libs): 130 | printp( 131 | f"In order to achieve the tag platform tag {p.name!r} " 132 | "the following shared library dependencies " 133 | "will need to be eliminated:" 134 | ) 135 | printp(", ".join(sorted(libs.keys()))) 136 | blacklist = winfo.external_refs[p.name].blacklist 137 | if len(blacklist): 138 | printp( 139 | f"In order to achieve the tag platform tag {p.name!r} " 140 | "the following black-listed symbol dependencies " 141 | "will need to be eliminated:" 142 | ) 143 | for key in sorted(blacklist.keys()): 144 | printp(f"From {key}: " + ", ".join(sorted(blacklist[key]))) 145 | return 0 146 | -------------------------------------------------------------------------------- /src/auditwheel/patcher.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from itertools import chain 5 | from pathlib import Path 6 | from shutil import which 7 | from subprocess import CalledProcessError, check_call, check_output 8 | 9 | 10 | class ElfPatcher: 11 | def replace_needed(self, file_name: Path, *old_new_pairs: tuple[str, str]) -> None: 12 | raise NotImplementedError() 13 | 14 | def remove_needed(self, file_name: Path, *sonames: str) -> None: 15 | raise NotImplementedError() 16 | 17 | def set_soname(self, file_name: Path, new_so_name: str) -> None: 18 | raise NotImplementedError() 19 | 20 | def set_rpath(self, file_name: Path, rpath: str) -> None: 21 | raise NotImplementedError() 22 | 23 | def get_rpath(self, file_name: Path) -> str: 24 | raise NotImplementedError() 25 | 26 | 27 | def _verify_patchelf() -> None: 28 | """This function looks for the ``patchelf`` external binary in the PATH, 29 | checks for the required version, and throws an exception if a proper 30 | version can't be found. Otherwise, silence is golden 31 | """ 32 | if not which("patchelf"): 33 | msg = "Cannot find required utility `patchelf` in PATH" 34 | raise ValueError(msg) 35 | try: 36 | version = check_output(["patchelf", "--version"]).decode("utf-8") 37 | except CalledProcessError: 38 | msg = "Could not call `patchelf` binary" 39 | raise ValueError(msg) from None 40 | 41 | m = re.match(r"patchelf\s+(\d+(.\d+)?)", version) 42 | if m and tuple(int(x) for x in m.group(1).split(".")) >= (0, 14): 43 | return 44 | msg = f"patchelf {version} found. auditwheel repair requires patchelf >= 0.14." 45 | raise ValueError(msg) 46 | 47 | 48 | class Patchelf(ElfPatcher): 49 | def __init__(self) -> None: 50 | _verify_patchelf() 51 | 52 | def replace_needed(self, file_name: Path, *old_new_pairs: tuple[str, str]) -> None: 53 | check_call( 54 | [ 55 | "patchelf", 56 | *chain.from_iterable( 57 | ("--replace-needed", *pair) for pair in old_new_pairs 58 | ), 59 | file_name, 60 | ] 61 | ) 62 | 63 | def remove_needed(self, file_name: Path, *sonames: str) -> None: 64 | check_call( 65 | [ 66 | "patchelf", 67 | *chain.from_iterable(("--remove-needed", soname) for soname in sonames), 68 | file_name, 69 | ] 70 | ) 71 | 72 | def set_soname(self, file_name: Path, new_so_name: str) -> None: 73 | check_call(["patchelf", "--set-soname", new_so_name, file_name]) 74 | 75 | def set_rpath(self, file_name: Path, rpath: str) -> None: 76 | check_call(["patchelf", "--remove-rpath", file_name]) 77 | check_call(["patchelf", "--force-rpath", "--set-rpath", rpath, file_name]) 78 | 79 | def get_rpath(self, file_name: Path) -> str: 80 | return ( 81 | check_output(["patchelf", "--print-rpath", file_name]) 82 | .decode("utf-8") 83 | .strip() 84 | ) 85 | -------------------------------------------------------------------------------- /src/auditwheel/policy/musllinux-policy.json: -------------------------------------------------------------------------------- 1 | [ 2 | {"name": "linux", 3 | "aliases": [], 4 | "priority": 0, 5 | "symbol_versions": {}, 6 | "lib_whitelist": [], 7 | "blacklist": {} 8 | }, 9 | {"name": "musllinux_1_1", 10 | "aliases": [], 11 | "priority": 100, 12 | "symbol_versions": { 13 | "i686": { 14 | }, 15 | "x86_64": { 16 | }, 17 | "aarch64": { 18 | }, 19 | "ppc64le": { 20 | }, 21 | "s390x": { 22 | }, 23 | "armv7l": { 24 | }, 25 | "riscv64": { 26 | } 27 | }, 28 | "lib_whitelist": ["libc.so", "libz.so.1"], 29 | "blacklist": { 30 | "libz.so.1": ["_dist_code", "_length_code", "_tr_align", "_tr_flush_block", "_tr_init", "_tr_stored_block", "_tr_tally", "bi_windup", "crc32_vpmsum", "crc_fold_512to32", "crc_fold_copy", "crc_fold_init", "deflate_copyright", "deflate_medium", "fill_window", "flush_pending", "gzflags", "inflate_copyright", "inflate_fast", "inflate_table", "longest_match", "slide_hash_sse", "static_ltree", "uncompress2", "x86_check_features", "x86_cpu_has_pclmul", "x86_cpu_has_sse2", "x86_cpu_has_sse42", "z_errmsg", "zcalloc", "zcfree"] 31 | }}, 32 | {"name": "musllinux_1_2", 33 | "aliases": [], 34 | "priority": 90, 35 | "symbol_versions": { 36 | "i686": { 37 | }, 38 | "x86_64": { 39 | }, 40 | "aarch64": { 41 | }, 42 | "ppc64le": { 43 | }, 44 | "s390x": { 45 | }, 46 | "armv7l": { 47 | }, 48 | "riscv64": { 49 | }, 50 | "loongarch64": { 51 | } 52 | }, 53 | "lib_whitelist": ["libc.so", "libz.so.1"], 54 | "blacklist": { 55 | "libz.so.1": ["_dist_code", "_length_code", "_tr_align", "_tr_flush_block", "_tr_init", "_tr_stored_block", "_tr_tally", "bi_windup", "crc32_vpmsum", "crc_fold_512to32", "crc_fold_copy", "crc_fold_init", "deflate_copyright", "deflate_medium", "fill_window", "flush_pending", "gzflags", "inflate_copyright", "inflate_fast", "inflate_table", "longest_match", "slide_hash_sse", "static_ltree", "uncompress2", "x86_check_features", "x86_cpu_has_pclmul", "x86_cpu_has_sse2", "x86_cpu_has_sse42", "z_errmsg", "zcalloc", "zcfree"] 56 | }} 57 | ] 58 | -------------------------------------------------------------------------------- /src/auditwheel/repair.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import itertools 4 | import logging 5 | import os 6 | import platform 7 | import re 8 | import shutil 9 | import stat 10 | from collections.abc import Iterable 11 | from os.path import isabs 12 | from pathlib import Path 13 | from subprocess import check_call 14 | 15 | from auditwheel.patcher import ElfPatcher 16 | 17 | from .elfutils import elf_read_dt_needed, elf_read_rpaths 18 | from .hashfile import hashfile 19 | from .lddtree import LIBPYTHON_RE 20 | from .policy import get_replace_platforms 21 | from .tools import is_subdir, unique_by_index 22 | from .wheel_abi import WheelAbIInfo 23 | from .wheeltools import InWheelCtx, add_platforms 24 | 25 | logger = logging.getLogger(__name__) 26 | 27 | # Copied from wheel 0.31.1 28 | WHEEL_INFO_RE = re.compile( 29 | r"""^(?P(?P.+?)-(?P\d.*?))(-(?P\d.*?))? 30 | -(?P[a-z].+?)-(?P.+?)-(?P.+?)(\.whl|\.dist-info)$""", 31 | re.VERBOSE, 32 | ).match 33 | 34 | 35 | def repair_wheel( 36 | wheel_abi: WheelAbIInfo, 37 | wheel_path: Path, 38 | abis: list[str], 39 | lib_sdir: str, 40 | out_dir: Path, 41 | update_tags: bool, 42 | patcher: ElfPatcher, 43 | strip: bool, 44 | zip_compression_level: int, 45 | ) -> Path | None: 46 | external_refs_by_fn = wheel_abi.full_external_refs 47 | # Do not repair a pure wheel, i.e. has no external refs 48 | if not external_refs_by_fn: 49 | return None 50 | 51 | soname_map: dict[str, tuple[str, Path]] = {} 52 | 53 | out_dir = out_dir.resolve(strict=True) 54 | wheel_fname = wheel_path.name 55 | 56 | with InWheelCtx(wheel_path) as ctx: 57 | ctx.out_wheel = out_dir / wheel_fname 58 | ctx.zip_compression_level = zip_compression_level 59 | 60 | match = WHEEL_INFO_RE(wheel_fname) 61 | if not match: 62 | msg = f"Failed to parse wheel file name: {wheel_fname}" 63 | raise ValueError(msg) 64 | 65 | dest_dir = Path(match.group("name") + lib_sdir) 66 | 67 | # here, fn is a path to an ELF file (lib or executable) in 68 | # the wheel, and v['libs'] contains its required libs 69 | for fn, v in external_refs_by_fn.items(): 70 | ext_libs = v[abis[0]].libs 71 | replacements: list[tuple[str, str]] = [] 72 | for soname, src_path in ext_libs.items(): 73 | # Handle libpython dependencies by removing them 74 | if LIBPYTHON_RE.match(soname): 75 | logger.warning( 76 | "Removing %s dependency from %s. Linking with libpython is forbidden for manylinux/musllinux wheels.", 77 | soname, 78 | str(fn), 79 | ) 80 | patcher.remove_needed(fn, soname) 81 | continue 82 | 83 | if src_path is None: 84 | msg = ( 85 | "Cannot repair wheel, because required " 86 | f'library "{soname}" could not be located' 87 | ) 88 | raise ValueError(msg) 89 | 90 | if not dest_dir.exists(): 91 | dest_dir.mkdir() 92 | new_soname, new_path = copylib(src_path, dest_dir, patcher) 93 | soname_map[soname] = (new_soname, new_path) 94 | replacements.append((soname, new_soname)) 95 | if replacements: 96 | patcher.replace_needed(fn, *replacements) 97 | 98 | if len(ext_libs) > 0: 99 | new_fn = fn 100 | if _path_is_script(fn): 101 | new_fn = _replace_elf_script_with_shim(match.group("name"), fn) 102 | 103 | new_rpath = os.path.relpath(dest_dir, new_fn.parent) 104 | new_rpath = os.path.join("$ORIGIN", new_rpath) 105 | append_rpath_within_wheel(new_fn, new_rpath, ctx.name, patcher) 106 | 107 | # we grafted in a bunch of libraries and modified their sonames, but 108 | # they may have internal dependencies (DT_NEEDED) on one another, so 109 | # we need to update those records so each now knows about the new 110 | # name of the other. 111 | for _, path in soname_map.values(): 112 | needed = elf_read_dt_needed(path) 113 | replacements = [] 114 | for n in needed: 115 | if n in soname_map: 116 | replacements.append((n, soname_map[n][0])) 117 | if replacements: 118 | patcher.replace_needed(path, *replacements) 119 | 120 | if update_tags: 121 | ctx.out_wheel = add_platforms(ctx, abis, get_replace_platforms(abis[0])) 122 | 123 | if strip: 124 | libs_to_strip = [path for (_, path) in soname_map.values()] 125 | extensions = external_refs_by_fn.keys() 126 | strip_symbols(itertools.chain(libs_to_strip, extensions)) 127 | 128 | return ctx.out_wheel 129 | 130 | 131 | def strip_symbols(libraries: Iterable[Path]) -> None: 132 | for lib in libraries: 133 | logger.info("Stripping symbols from %s", lib) 134 | check_call(["strip", "-s", lib]) 135 | 136 | 137 | def copylib(src_path: Path, dest_dir: Path, patcher: ElfPatcher) -> tuple[str, Path]: 138 | """Graft a shared library from the system into the wheel and update the 139 | relevant links. 140 | 141 | 1) Copy the file from src_path to dest_dir/ 142 | 2) Rename the shared object from soname to soname. 143 | 3) If the library has a RUNPATH/RPATH, clear it and set RPATH to point to 144 | its new location. 145 | """ 146 | # Copy the a shared library from the system (src_path) into the wheel 147 | # if the library has a RUNPATH/RPATH we clear it and set RPATH to point to 148 | # its new location. 149 | 150 | with open(src_path, "rb") as f: 151 | shorthash = hashfile(f)[:8] 152 | 153 | src_name = src_path.name 154 | base, ext = src_name.split(".", 1) 155 | if not base.endswith(f"-{shorthash}"): 156 | new_soname = f"{base}-{shorthash}.{ext}" 157 | else: 158 | new_soname = src_name 159 | 160 | dest_path = dest_dir / new_soname 161 | if dest_path.exists(): 162 | return new_soname, dest_path 163 | 164 | logger.debug("Grafting: %s -> %s", src_path, dest_path) 165 | rpaths = elf_read_rpaths(src_path) 166 | shutil.copy2(src_path, dest_path) 167 | statinfo = dest_path.stat() 168 | if not statinfo.st_mode & stat.S_IWRITE: 169 | os.chmod(dest_path, statinfo.st_mode | stat.S_IWRITE) 170 | 171 | patcher.set_soname(dest_path, new_soname) 172 | 173 | if any(itertools.chain(rpaths["rpaths"], rpaths["runpaths"])): 174 | patcher.set_rpath(dest_path, "$ORIGIN") 175 | 176 | return new_soname, dest_path 177 | 178 | 179 | def append_rpath_within_wheel( 180 | lib_name: Path, rpath: str, wheel_base_dir: Path, patcher: ElfPatcher 181 | ) -> None: 182 | """Add a new rpath entry to a file while preserving as many existing 183 | rpath entries as possible. 184 | 185 | In order to preserve an rpath entry it must: 186 | 187 | 1) Point to a location within wheel_base_dir. 188 | 2) Not be a duplicate of an already-existing rpath entry. 189 | """ 190 | if not lib_name.is_absolute(): 191 | lib_name = lib_name.absolute() 192 | lib_dir = lib_name.parent 193 | if not wheel_base_dir.is_absolute(): 194 | wheel_base_dir = wheel_base_dir.absolute() 195 | 196 | def is_valid_rpath(rpath: str) -> bool: 197 | return _is_valid_rpath(rpath, lib_dir, wheel_base_dir) 198 | 199 | old_rpaths = patcher.get_rpath(lib_name) 200 | rpaths = list(filter(is_valid_rpath, old_rpaths.split(":"))) 201 | rpaths = unique_by_index([*rpaths, rpath]) 202 | patcher.set_rpath(lib_name, ":".join(rpaths)) 203 | 204 | 205 | def _is_valid_rpath(rpath: str, lib_dir: Path, wheel_base_dir: Path) -> bool: 206 | full_rpath_entry = _resolve_rpath_tokens(rpath, lib_dir) 207 | if not isabs(full_rpath_entry): 208 | logger.debug( 209 | "rpath entry %s could not be resolved to an absolute path -- discarding it.", 210 | rpath, 211 | ) 212 | return False 213 | if not is_subdir(full_rpath_entry, wheel_base_dir): 214 | logger.debug("rpath entry %s points outside the wheel -- discarding it.", rpath) 215 | return False 216 | logger.debug("Preserved rpath entry %s", rpath) 217 | return True 218 | 219 | 220 | def _resolve_rpath_tokens(rpath: str, lib_base_dir: Path) -> str: 221 | # See https://www.man7.org/linux/man-pages/man8/ld.so.8.html#DESCRIPTION 222 | system_lib_dir = "lib64" if platform.architecture()[0] == "64bit" else "lib" 223 | system_processor_type = platform.machine() 224 | token_replacements = { 225 | "ORIGIN": str(lib_base_dir), 226 | "LIB": system_lib_dir, 227 | "PLATFORM": system_processor_type, 228 | } 229 | for token, target in token_replacements.items(): 230 | rpath = rpath.replace(f"${token}", target) # $TOKEN 231 | rpath = rpath.replace(f"${{{token}}}", target) # ${TOKEN} 232 | return rpath 233 | 234 | 235 | def _path_is_script(path: Path) -> bool: 236 | # Looks something like "uWSGI-2.0.21.data/scripts/uwsgi" 237 | components = path.parts 238 | return ( 239 | len(components) == 3 240 | and components[0].endswith(".data") 241 | and components[1] == "scripts" 242 | ) 243 | 244 | 245 | def _replace_elf_script_with_shim(package_name: str, orig_path: Path) -> Path: 246 | """Move an ELF script and replace it with a shim. 247 | 248 | We can't directly rewrite the RPATH of ELF executables in the "scripts" 249 | directory since scripts aren't installed to a consistent relative path to 250 | platlib files. 251 | 252 | Instead, we move the executable into a special directory in platlib and put 253 | a shim script in its place which execs the real executable. 254 | 255 | More context: https://github.com/pypa/auditwheel/issues/340 256 | 257 | Returns the new path of the moved executable. 258 | """ 259 | scripts_dir = Path(f"{package_name}.scripts") 260 | scripts_dir.mkdir(exist_ok=True) 261 | 262 | new_path = scripts_dir / orig_path.name 263 | os.rename(orig_path, new_path) 264 | 265 | with open(orig_path, "w", newline="\n") as f: 266 | f.write(_script_shim(new_path)) 267 | os.chmod(orig_path, os.stat(new_path).st_mode) 268 | 269 | return new_path 270 | 271 | 272 | def _script_shim(binary_path: Path) -> str: 273 | return f"""\ 274 | #!python 275 | import os 276 | import sys 277 | import sysconfig 278 | 279 | 280 | if __name__ == "__main__": 281 | os.execv( 282 | os.path.join(sysconfig.get_path("platlib"), {binary_path.as_posix()!r}), 283 | sys.argv, 284 | ) 285 | """ 286 | -------------------------------------------------------------------------------- /src/auditwheel/tmpdirs.py: -------------------------------------------------------------------------------- 1 | """Contexts for *with* statement providing temporary directories""" 2 | 3 | from __future__ import annotations 4 | 5 | import os 6 | from pathlib import Path 7 | from tempfile import TemporaryDirectory 8 | from types import TracebackType 9 | 10 | 11 | class InTemporaryDirectory: 12 | """Create, return, and change directory to a temporary directory 13 | 14 | Examples 15 | -------- 16 | >>> from pathlib import Path 17 | >>> my_cwd = Path.cwd() 18 | >>> with InTemporaryDirectory() as tmpdir: 19 | ... _ = open('test.txt', 'wt').write('some text') 20 | ... assert os.path.isfile('test.txt') 21 | ... assert tmpdir.joinpath('test.txt').is_file() 22 | >>> tmpdir.exists() 23 | False 24 | >>> Path.cwd() == my_cwd 25 | True 26 | """ 27 | 28 | def __init__(self) -> None: 29 | self._tmpdir = TemporaryDirectory() 30 | self._name = Path(self._tmpdir.name).resolve(strict=True) 31 | 32 | @property 33 | def name(self) -> Path: 34 | return self._name 35 | 36 | def __enter__(self) -> Path: 37 | self._pwd = Path.cwd() 38 | os.chdir(self._name) 39 | self._tmpdir.__enter__() 40 | return self._name 41 | 42 | def __exit__( 43 | self, 44 | exc: type[BaseException] | None, 45 | value: BaseException | None, 46 | tb: TracebackType | None, 47 | ) -> None: 48 | os.chdir(self._pwd) 49 | self._tmpdir.__exit__(exc, value, tb) 50 | 51 | 52 | class InGivenDirectory: 53 | """Change directory to given directory for duration of ``with`` block 54 | 55 | Useful when you want to use `InTemporaryDirectory` for the final test, but 56 | you are still debugging. For example, you may want to do this in the end: 57 | 58 | >>> with InTemporaryDirectory() as tmpdir: 59 | ... # do something complicated which might break 60 | ... pass 61 | 62 | But indeed the complicated thing does break, and meanwhile the 63 | ``InTemporaryDirectory`` context manager wiped out the directory with the 64 | temporary files that you wanted for debugging. So, while debugging, you 65 | replace with something like: 66 | 67 | >>> with InGivenDirectory() as tmpdir: # Use working directory by default 68 | ... # do something complicated which might break 69 | ... pass 70 | 71 | You can then look at the temporary file outputs to debug what is happening, 72 | fix, and finally replace ``InGivenDirectory`` with ``InTemporaryDirectory`` 73 | again. 74 | """ 75 | 76 | def __init__(self, path: Path | None = None) -> None: 77 | """Initialize directory context manager 78 | 79 | Parameters 80 | ---------- 81 | path : None or Path, optional 82 | path to change directory to, for duration of ``with`` block. 83 | Defaults to ``Path.cwd()`` if None 84 | """ 85 | if path is None: 86 | path = Path.cwd() 87 | self.name = path.absolute() 88 | 89 | def __enter__(self) -> Path: 90 | self._pwd = Path.cwd() 91 | if not self.name.is_dir(): 92 | self.name.mkdir() 93 | os.chdir(self.name) 94 | return self.name 95 | 96 | def __exit__( 97 | self, 98 | exc: type[BaseException] | None, 99 | value: BaseException | None, 100 | tb: TracebackType | None, 101 | ) -> None: 102 | os.chdir(self._pwd) 103 | -------------------------------------------------------------------------------- /src/auditwheel/tools.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import logging 5 | import os 6 | import subprocess 7 | import zipfile 8 | from collections.abc import Generator, Iterable 9 | from datetime import datetime, timezone 10 | from pathlib import Path 11 | from typing import Any, TypeVar 12 | 13 | _T = TypeVar("_T") 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | def unique_by_index(sequence: Iterable[_T]) -> list[_T]: 19 | """unique elements in `sequence` in the order in which they occur 20 | 21 | Parameters 22 | ---------- 23 | sequence : iterable 24 | 25 | Returns 26 | ------- 27 | uniques : list 28 | unique elements of sequence, ordered by the order in which the element 29 | occurs in `sequence` 30 | """ 31 | uniques = [] 32 | for element in sequence: 33 | if element not in uniques: 34 | uniques.append(element) 35 | return uniques 36 | 37 | 38 | def walk(topdir: Path) -> Generator[tuple[Path, list[str], list[str]]]: 39 | """Wrapper for `os.walk` with outputs in reproducible order 40 | 41 | Parameters 42 | ---------- 43 | topdir : Path 44 | Root of the directory tree 45 | 46 | Yields 47 | ------ 48 | dirpath : Path 49 | Path to a directory 50 | dirnames : list[str] 51 | List of subdirectory names in `dirpath` 52 | filenames : list[str] 53 | List of non-directory file names in `dirpath` 54 | """ 55 | topdir = topdir.resolve(strict=True) 56 | for dirpath_, dirnames, filenames in os.walk(topdir): 57 | dirpath = Path(dirpath_) 58 | # sort list of dirnames in-place such that `os.walk` 59 | # will recurse into subdirectories in reproducible order 60 | dirnames.sort() 61 | # recurse into any top-level .dist-info subdirectory last 62 | if dirpath == topdir: 63 | subdirs = [] 64 | dist_info = [] 65 | for dir in dirnames: 66 | if dir.endswith(".dist-info"): 67 | dist_info.append(dir) 68 | else: 69 | subdirs.append(dir) 70 | dirnames[:] = subdirs 71 | dirnames.extend(dist_info) 72 | del dist_info 73 | # sort list of filenames for iteration in reproducible order 74 | filenames.sort() 75 | # list any dist-info/RECORD file last 76 | if ( 77 | dirpath.name.endswith(".dist-info") 78 | and dirpath.parent == topdir 79 | and "RECORD" in filenames 80 | ): 81 | filenames.remove("RECORD") 82 | filenames.append("RECORD") 83 | yield dirpath, dirnames, filenames 84 | 85 | 86 | def zip2dir(zip_fname: Path, out_dir: Path) -> None: 87 | """Extract `zip_fname` into output directory `out_dir` 88 | 89 | Parameters 90 | ---------- 91 | zip_fname : str 92 | Filename of zip archive to write 93 | out_dir : str 94 | Directory path containing files to go in the zip archive 95 | """ 96 | start = datetime.now() 97 | with zipfile.ZipFile(zip_fname, "r") as z: 98 | for name in z.namelist(): 99 | member = z.getinfo(name) 100 | extracted_path = z.extract(member, out_dir) 101 | attr = member.external_attr >> 16 102 | if member.is_dir(): 103 | # this is always rebuilt as 755 by dir2zip 104 | os.chmod(extracted_path, 0o755) 105 | elif attr != 0: 106 | attr &= 511 # only keep permission bits 107 | attr |= 6 << 6 # at least read/write for current user 108 | os.chmod(extracted_path, attr) 109 | logger.debug( 110 | "zip2dir from %s to %s takes %s", zip_fname, out_dir, datetime.now() - start 111 | ) 112 | 113 | 114 | def dir2zip( 115 | in_dir: Path, 116 | zip_fname: Path, 117 | zip_compression_level: int, 118 | date_time: datetime | None, 119 | ) -> None: 120 | """Make a zip file `zip_fname` with contents of directory `in_dir` 121 | 122 | The recorded filenames are relative to `in_dir`, so doing a standard zip 123 | unpack of the resulting `zip_fname` in an empty directory will result in 124 | the original directory contents. 125 | 126 | Parameters 127 | ---------- 128 | in_dir : Path 129 | Directory path containing files to go in the zip archive 130 | zip_fname : Path 131 | Filename of zip archive to write 132 | zip_compression_level: int 133 | zlib.Z_DEFAULT_COMPRESSION (-1 aka. level 6) balances speed and size. 134 | zlib.Z_NO_COMPRESSION (O) for some test builds that needs no compression at all 135 | zlib.Z_BEST_COMPRESSION (9) for bandwidth-constrained or large amount of downloads 136 | date_time : Optional[datetime] 137 | Time stamp to set on each file in the archive 138 | """ 139 | start = datetime.now() 140 | in_dir = in_dir.resolve(strict=True) 141 | if date_time is None: 142 | st = in_dir.stat() 143 | date_time = datetime.fromtimestamp(st.st_mtime, tz=timezone.utc) 144 | date_time_args = date_time.timetuple()[:6] 145 | if date_time_args < (1980, 1, 1, 0, 0, 0): 146 | logger.warning("dir2zip, clipping timestamp to 1980-01-01") 147 | date_time_args = (1980, 1, 1, 0, 0, 0) 148 | compression = zipfile.ZIP_DEFLATED 149 | with zipfile.ZipFile(zip_fname, "w", compression=compression) as z: 150 | for dname, _, files in walk(in_dir): 151 | if dname != in_dir: 152 | out_dname = f"{dname.relative_to(in_dir)}/" 153 | zinfo = zipfile.ZipInfo.from_file(dname, out_dname) 154 | zinfo.date_time = date_time_args 155 | z.writestr(zinfo, b"") 156 | for file in files: 157 | fname = dname / file 158 | out_fname = fname.relative_to(in_dir) 159 | zinfo = zipfile.ZipInfo.from_file(fname, out_fname) 160 | zinfo.date_time = date_time_args 161 | zinfo.compress_type = compression 162 | with open(fname, "rb") as fp: 163 | z.writestr(zinfo, fp.read(), compresslevel=zip_compression_level) 164 | logger.debug( 165 | "dir2zip from %s to %s takes %s", in_dir, zip_fname, datetime.now() - start 166 | ) 167 | 168 | 169 | def tarbz2todir(tarbz2_fname: Path, out_dir: Path) -> None: 170 | """Extract `tarbz2_fname` into output directory `out_dir`""" 171 | subprocess.check_output(["tar", "xjf", tarbz2_fname, "-C", out_dir]) 172 | 173 | 174 | class EnvironmentDefault(argparse.Action): 175 | """Get values from environment variable.""" 176 | 177 | def __init__( 178 | self, 179 | env: str, 180 | required: bool = True, 181 | default: str | None = None, 182 | choices: Iterable[str] | None = None, 183 | type: type | None = None, 184 | **kwargs: Any, 185 | ) -> None: 186 | self.env_default = os.environ.get(env) 187 | self.env = env 188 | if self.env_default: 189 | if type: 190 | try: 191 | self.env_default = type(self.env_default) 192 | except Exception: 193 | self.option_strings = kwargs["option_strings"] 194 | args = { 195 | "value": self.env_default, 196 | "type": type, 197 | "env": self.env, 198 | } 199 | msg = ( 200 | "invalid type: %(value)r from environment variable " 201 | "%(env)r cannot be converted to %(type)r" 202 | ) 203 | raise argparse.ArgumentError(self, msg % args) from None 204 | default = self.env_default 205 | if ( 206 | self.env_default is not None 207 | and choices is not None 208 | and self.env_default not in choices 209 | ): 210 | self.option_strings = kwargs["option_strings"] 211 | args = { 212 | "value": self.env_default, 213 | "choices": ", ".join(map(repr, choices)), 214 | "env": self.env, 215 | } 216 | msg = ( 217 | "invalid choice: %(value)r from environment variable " 218 | "%(env)r (choose from %(choices)s)" 219 | ) 220 | raise argparse.ArgumentError(self, msg % args) 221 | 222 | if default is not None: 223 | required = False 224 | 225 | super().__init__( 226 | default=default, required=required, choices=choices, type=type, **kwargs 227 | ) 228 | 229 | def __call__( 230 | self, 231 | parser: argparse.ArgumentParser, # noqa: ARG002 232 | namespace: argparse.Namespace, 233 | values: Any, 234 | option_string: str | None = None, # noqa: ARG002 235 | ) -> None: 236 | setattr(namespace, self.dest, values) 237 | 238 | 239 | def is_subdir(path: str | Path | None, root: str | Path) -> bool: 240 | if path is None: 241 | return False 242 | 243 | path = Path(path).resolve() 244 | root = Path(root).resolve() 245 | 246 | return root == path or root in path.parents 247 | -------------------------------------------------------------------------------- /src/auditwheel/wheeltools.py: -------------------------------------------------------------------------------- 1 | """General tools for working with wheels 2 | 3 | Tools that aren't specific to delocation 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | import csv 9 | import hashlib 10 | import logging 11 | import os 12 | import zlib 13 | from base64 import urlsafe_b64encode 14 | from collections.abc import Generator, Iterable 15 | from datetime import datetime, timezone 16 | from itertools import product 17 | from os.path import splitext 18 | from pathlib import Path 19 | from types import TracebackType 20 | 21 | from packaging.utils import parse_wheel_filename 22 | 23 | from ._vendor.wheel.pkginfo import read_pkg_info, write_pkg_info 24 | from .architecture import Architecture 25 | from .error import NonPlatformWheel, WheelToolsError 26 | from .libc import Libc 27 | from .tmpdirs import InTemporaryDirectory 28 | from .tools import dir2zip, unique_by_index, walk, zip2dir 29 | 30 | logger = logging.getLogger(__name__) 31 | 32 | 33 | def _dist_info_dir(bdist_dir: Path) -> Path: 34 | """Get the .dist-info directory from an unpacked wheel 35 | 36 | Parameters 37 | ---------- 38 | bdist_dir : Path 39 | Path of unpacked wheel file 40 | """ 41 | 42 | info_dirs = list(bdist_dir.glob("*.dist-info")) 43 | if len(info_dirs) != 1: 44 | msg = "Should be exactly one `*.dist_info` directory" 45 | raise WheelToolsError(msg) 46 | return info_dirs[0] 47 | 48 | 49 | def rewrite_record(bdist_dir: Path) -> None: 50 | """Rewrite RECORD file with hashes for all files in `wheel_sdir` 51 | 52 | Copied from :method:`wheel.bdist_wheel.bdist_wheel.write_record` 53 | 54 | Will also unsign wheel 55 | 56 | Parameters 57 | ---------- 58 | bdist_dir : Path 59 | Path of unpacked wheel file 60 | """ 61 | info_dir = _dist_info_dir(bdist_dir) 62 | record_path = info_dir / "RECORD" 63 | record_relpath = record_path.relative_to(bdist_dir) 64 | # Unsign wheel - because we're invalidating the record hash 65 | sig_path = info_dir / "RECORD.jws" 66 | if sig_path.exists(): 67 | sig_path.unlink() 68 | 69 | def files() -> Generator[Path]: 70 | for dir_, _, files in walk(bdist_dir): 71 | for file in files: 72 | yield dir_ / file 73 | 74 | def skip(path: Path) -> bool: 75 | """Wheel hashes every possible file.""" 76 | return path == record_relpath 77 | 78 | with open(record_path, "w+", newline="", encoding="utf-8") as record_file: 79 | writer = csv.writer(record_file) 80 | for path in files(): 81 | relative_path = path.relative_to(bdist_dir) 82 | if skip(relative_path): 83 | hash_ = "" 84 | size = "" 85 | else: 86 | data = path.read_bytes() 87 | digest = hashlib.sha256(data).digest() 88 | sha256 = urlsafe_b64encode(digest).rstrip(b"=").decode("ascii") 89 | hash_ = f"sha256={sha256}" 90 | size = f"{len(data)}" 91 | record_path_ = path.relative_to(bdist_dir).as_posix() 92 | writer.writerow((record_path_, hash_, size)) 93 | 94 | 95 | class InWheel(InTemporaryDirectory): 96 | """Context manager for doing things inside wheels 97 | 98 | On entering, you'll find yourself in the root tree of the wheel. If you've 99 | asked for an output wheel, then on exit we'll rewrite the wheel record and 100 | pack stuff up for you. 101 | """ 102 | 103 | def __init__(self, in_wheel: Path, out_wheel: Path | None = None) -> None: 104 | """Initialize in-wheel context manager 105 | 106 | Parameters 107 | ---------- 108 | in_wheel : Path 109 | filename of wheel to unpack and work inside 110 | out_wheel : None or Path: 111 | filename of wheel to write after exiting. If None, don't write and 112 | discard 113 | """ 114 | self.in_wheel = in_wheel.absolute() 115 | self.out_wheel = None if out_wheel is None else out_wheel.absolute() 116 | self.zip_compression_level = zlib.Z_DEFAULT_COMPRESSION 117 | super().__init__() 118 | 119 | def __enter__(self) -> Path: 120 | zip2dir(self.in_wheel, self.name) 121 | return super().__enter__() 122 | 123 | def __exit__( 124 | self, 125 | exc: type[BaseException] | None, 126 | value: BaseException | None, 127 | tb: TracebackType | None, 128 | ) -> None: 129 | if self.out_wheel is not None: 130 | rewrite_record(self.name) 131 | date_time = None 132 | timestamp = os.environ.get("SOURCE_DATE_EPOCH") 133 | if timestamp: 134 | date_time = datetime.fromtimestamp(int(timestamp), tz=timezone.utc) 135 | dir2zip(self.name, self.out_wheel, self.zip_compression_level, date_time) 136 | return super().__exit__(exc, value, tb) 137 | 138 | 139 | class InWheelCtx(InWheel): 140 | """Context manager for doing things inside wheels 141 | 142 | On entering, you'll find yourself in the root tree of the wheel. If you've 143 | asked for an output wheel, then on exit we'll rewrite the wheel record and 144 | pack stuff up for you. 145 | 146 | The context manager returns itself from the __enter__ method, so you can 147 | set things like ``out_wheel``. This is useful when processing in the wheel 148 | will dicate what the output wheel name is, or whether you want to save at 149 | all. 150 | 151 | The current path of the wheel contents is set in the attribute 152 | ``wheel_path``. 153 | """ 154 | 155 | def __init__(self, in_wheel: Path, out_wheel: Path | None = None) -> None: 156 | """Init in-wheel context manager returning self from enter 157 | 158 | Parameters 159 | ---------- 160 | in_wheel : Path 161 | filename of wheel to unpack and work inside 162 | out_wheel : None or Path: 163 | filename of wheel to write after exiting. If None, don't write and 164 | discard 165 | """ 166 | super().__init__(in_wheel, out_wheel) 167 | self.path: Path | None = None 168 | 169 | def __enter__(self): # type: ignore[no-untyped-def] 170 | self.path = super().__enter__() 171 | return self 172 | 173 | def iter_files(self) -> Generator[Path]: 174 | if self.path is None: 175 | msg = "This function should be called from context manager" 176 | raise ValueError(msg) 177 | record_names = list(self.path.glob("*.dist-info/RECORD")) 178 | if len(record_names) != 1: 179 | msg = "Should be exactly one `*.dist_info` directory" 180 | raise ValueError(msg) 181 | 182 | record = record_names[0].read_text() 183 | reader = csv.reader(r for r in record.splitlines()) 184 | for row in reader: 185 | filename = row[0] 186 | yield Path(filename) 187 | 188 | 189 | def add_platforms( 190 | wheel_ctx: InWheelCtx, platforms: list[str], remove_platforms: Iterable[str] = () 191 | ) -> Path: 192 | """Add platform tags `platforms` to a wheel 193 | 194 | Add any platform tags in `platforms` that are missing 195 | to wheel_ctx's filename and ``WHEEL`` file. 196 | 197 | Parameters 198 | ---------- 199 | wheel_ctx : InWheelCtx 200 | An open wheel context 201 | platforms : list 202 | platform tags to add to wheel filename and WHEEL tags - e.g. 203 | ``('macosx_10_9_intel', 'macosx_10_9_x86_64') 204 | remove_platforms : iterable 205 | platform tags to remove to the wheel filename and WHEEL tags, e.g. 206 | ``('linux_x86_64',)`` when ``('manylinux_x86_64')`` is added 207 | """ 208 | if wheel_ctx.path is None: 209 | msg = "This function should be called from wheel_ctx context manager" 210 | raise ValueError(msg) 211 | 212 | to_remove = list(remove_platforms) # we might want to modify this, make a copy 213 | 214 | definitely_not_purelib = False 215 | 216 | info_fname = _dist_info_dir(wheel_ctx.path) / "WHEEL" 217 | info = read_pkg_info(info_fname) 218 | # Check what tags we have 219 | if wheel_ctx.out_wheel is not None: 220 | out_dir = wheel_ctx.out_wheel.parent 221 | wheel_fname = wheel_ctx.out_wheel.name 222 | else: 223 | out_dir = Path.cwd() 224 | wheel_fname = wheel_ctx.in_wheel.name 225 | 226 | _, _, _, in_tags = parse_wheel_filename(wheel_fname) 227 | original_fname_tags = sorted({tag.platform for tag in in_tags}) 228 | logger.info("Previous filename tags: %s", ", ".join(original_fname_tags)) 229 | fname_tags = [tag for tag in original_fname_tags if tag not in to_remove] 230 | fname_tags = unique_by_index(fname_tags + platforms) 231 | 232 | # Can't be 'any' and another platform 233 | if "any" in fname_tags and len(fname_tags) > 1: 234 | fname_tags.remove("any") 235 | to_remove.append("any") 236 | definitely_not_purelib = True 237 | 238 | if fname_tags != original_fname_tags: 239 | logger.info("New filename tags: %s", ", ".join(fname_tags)) 240 | else: 241 | logger.info("No filename tags change needed.") 242 | 243 | fparts = { 244 | "prefix": wheel_fname.rsplit("-", maxsplit=1)[0], 245 | "plat": ".".join(sorted(fname_tags)), 246 | "ext": splitext(wheel_fname)[1], 247 | } 248 | out_wheel_fname = "{prefix}-{plat}{ext}".format(**fparts) 249 | out_wheel = out_dir / out_wheel_fname 250 | 251 | in_info_tags = [tag for name, tag in info.items() if name == "Tag"] 252 | logger.info("Previous WHEEL info tags: %s", ", ".join(in_info_tags)) 253 | # Python version, C-API version combinations 254 | pyc_apis = ["-".join(tag.split("-")[:2]) for tag in in_info_tags] 255 | # unique Python version, C-API version combinations 256 | pyc_apis = unique_by_index(pyc_apis) 257 | # Add new platform tags for each Python version, C-API combination 258 | wanted_tags = ["-".join(tup) for tup in product(pyc_apis, platforms)] 259 | new_tags = [tag for tag in wanted_tags if tag not in in_info_tags] 260 | unwanted_tags = ["-".join(tup) for tup in product(pyc_apis, to_remove)] 261 | updated_tags = [tag for tag in in_info_tags if tag not in unwanted_tags] 262 | updated_tags += new_tags 263 | if updated_tags != in_info_tags: 264 | del info["Tag"] 265 | for tag in updated_tags: 266 | info.add_header("Tag", tag) 267 | 268 | if definitely_not_purelib: 269 | info["Root-Is-Purelib"] = "False" 270 | logger.info("Changed wheel type to Platlib") 271 | 272 | logger.info("New WHEEL info tags: %s", ", ".join(info.get_all("Tag"))) 273 | write_pkg_info(info_fname, info) 274 | else: 275 | logger.info("No WHEEL info change needed.") 276 | return out_wheel 277 | 278 | 279 | def get_wheel_architecture(filename: str) -> Architecture: 280 | result: set[Architecture] = set() 281 | missed = False 282 | pure = True 283 | _, _, _, in_tags = parse_wheel_filename(filename) 284 | for tag in in_tags: 285 | found = False 286 | pure_ = tag.platform == "any" 287 | pure = pure and pure_ 288 | missed = missed or pure_ 289 | if not pure_: 290 | for arch in Architecture: 291 | if tag.platform.endswith(f"_{arch.value}"): 292 | result.add(arch.baseline) 293 | found = True 294 | if not found: 295 | logger.warning( 296 | "couldn't guess architecture for platform tag '%s'", tag.platform 297 | ) 298 | missed = True 299 | if len(result) == 0: 300 | if pure: 301 | raise NonPlatformWheel(None, None) 302 | msg = "unknown architecture" 303 | raise WheelToolsError(msg) 304 | if missed or len(result) > 1: 305 | if len(result) == 1: 306 | msg = "wheels with multiple architectures are not supported" 307 | else: 308 | msg = f"wheels with multiple architectures are not supported, got {result}" 309 | raise WheelToolsError(msg) 310 | return result.pop() 311 | 312 | 313 | def get_wheel_libc(filename: str) -> Libc: 314 | result: set[Libc] = set() 315 | _, _, _, in_tags = parse_wheel_filename(filename) 316 | for tag in in_tags: 317 | if "musllinux_" in tag.platform: 318 | result.add(Libc.MUSL) 319 | if "manylinux" in tag.platform: 320 | result.add(Libc.GLIBC) 321 | if len(result) == 0: 322 | msg = "unknown libc used" 323 | raise WheelToolsError(msg) 324 | if len(result) > 1: 325 | msg = f"wheels with multiple libc are not supported, got {result}" 326 | raise WheelToolsError(msg) 327 | return result.pop() 328 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/__init__.py -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_aarch64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_aarch64.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_armv5l.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_armv5l.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_armv7l.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_armv7l.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_i686.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_i686.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_mips64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_mips64.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_ppc64le.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_ppc64le.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_riscv64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_riscv64.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_s390x.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_s390x.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_x86_64.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_aarch64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_aarch64.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_armv6l.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_armv6l.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_armv7l.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_armv7l.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_i686.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_i686.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_ppc64le.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_ppc64le.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_riscv64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_riscv64.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_s390x.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_s390x.whl -------------------------------------------------------------------------------- /tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_x86_64.whl -------------------------------------------------------------------------------- /tests/integration/cffi-1.5.0-cp27-none-linux_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/cffi-1.5.0-cp27-none-linux_x86_64.whl -------------------------------------------------------------------------------- /tests/integration/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | collect_ignore = ["quick_check_numpy.py"] 4 | -------------------------------------------------------------------------------- /tests/integration/foo.f90: -------------------------------------------------------------------------------- 1 | 2 | 3 | subroutine selectedrealkind(p, r, res) 4 | implicit none 5 | 6 | integer, intent(in) :: p, r 7 | !f2py integer :: r=0 8 | integer, intent(out) :: res 9 | res = selected_real_kind(p, r) 10 | 11 | end subroutine 12 | 13 | subroutine selectedintkind(p, res) 14 | implicit none 15 | 16 | integer, intent(in) :: p 17 | integer, intent(out) :: res 18 | res = selected_int_kind(p) 19 | 20 | end subroutine 21 | -------------------------------------------------------------------------------- /tests/integration/fpewheel-0.0.0-cp35-cp35m-linux_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/fpewheel-0.0.0-cp35-cp35m-linux_x86_64.whl -------------------------------------------------------------------------------- /tests/integration/internal_rpath/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include pyproject.toml 2 | graft lib 3 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all clean 2 | 3 | all: lib-src/b/libb.so lib-src/a/liba.so 4 | 5 | clean: 6 | -rm -f lib-src/b/libb.so lib-src/a/liba.so 7 | 8 | lib-src/b/libb.so: lib-src/b/b.c 9 | gcc -fPIC -shared -o lib-src/b/libb.so lib-src/b/b.c 10 | 11 | 12 | lib-src/a/liba.so: lib-src/a/a.c 13 | gcc -fPIC -shared -o lib-src/a/liba.so lib-src/a/a.c 14 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/internal_rpath/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/internal_rpath/internal_rpath/__init__.py -------------------------------------------------------------------------------- /tests/integration/internal_rpath/lib-src/a/a.c: -------------------------------------------------------------------------------- 1 | int fa(void) { 2 | return 11; 3 | } 4 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/lib-src/a/a.h: -------------------------------------------------------------------------------- 1 | int fa(void); 2 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/lib-src/b/b.c: -------------------------------------------------------------------------------- 1 | int fb(void) { 2 | return 10; 3 | } 4 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/lib-src/b/b.h: -------------------------------------------------------------------------------- 1 | int fb(void); 2 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["cython", "setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/setup.cfg: -------------------------------------------------------------------------------- 1 | [build_ext] 2 | cython_c_in_temp = 1 3 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import Extension, find_packages, setup 4 | 5 | package_name = "internal_rpath" 6 | setup( 7 | name=package_name, 8 | version="0.0.1", 9 | description="Auditwheel multiple top-level extensions example", 10 | package_data={package_name: ["liba.so"]}, 11 | packages=find_packages(), 12 | ext_modules=[ 13 | Extension( 14 | f"{package_name}.example_a", 15 | ["src/example_a.pyx"], 16 | include_dirs=["lib-src/a"], 17 | library_dirs=[package_name], 18 | libraries=["a"], 19 | extra_link_args=["-Wl,-rpath,$ORIGIN"], 20 | ), 21 | Extension( 22 | f"{package_name}.example_b", 23 | ["src/example_b.pyx"], 24 | include_dirs=["lib-src/b"], 25 | library_dirs=["lib-src/b"], 26 | libraries=["b"], 27 | ), 28 | ], 29 | ) 30 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/src/example_a.pyx: -------------------------------------------------------------------------------- 1 | cdef extern from "a.h": 2 | int fa(); 3 | 4 | 5 | cpdef int example_a(): 6 | return fa() 7 | -------------------------------------------------------------------------------- /tests/integration/internal_rpath/src/example_b.pyx: -------------------------------------------------------------------------------- 1 | cdef extern from "b.h": 2 | int fb(); 3 | 4 | 5 | cpdef int example_b(): 6 | return fb() 7 | -------------------------------------------------------------------------------- /tests/integration/libffi.so.5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/libffi.so.5 -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include pyproject.toml 2 | graft lib 3 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all clean 2 | 3 | all: lib-src/b/libb.so lib-src/a/liba.so 4 | 5 | clean: 6 | -rm -f lib-src/b/libb.so lib-src/a/liba.so 7 | 8 | lib-src/b/libb.so: lib-src/b/b.c 9 | gcc -fPIC -shared -o lib-src/b/libb.so lib-src/b/b.c 10 | 11 | 12 | lib-src/a/liba.so: lib-src/b/libb.so lib-src/a/a.c 13 | gcc -fPIC -shared -o lib-src/a/liba.so -Ilib-src/b lib-src/a/a.c -Llib-src/b -lb 14 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/lib-src/a/a.c: -------------------------------------------------------------------------------- 1 | #include "b.h" 2 | 3 | 4 | int fa(void) { 5 | return 1 + fb(); 6 | } 7 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/lib-src/a/a.h: -------------------------------------------------------------------------------- 1 | int fa(void); 2 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/lib-src/b/b.c: -------------------------------------------------------------------------------- 1 | int fb(void) { 2 | return 10; 3 | } 4 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/lib-src/b/b.h: -------------------------------------------------------------------------------- 1 | int fb(void); 2 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["cython", "setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/setup.cfg: -------------------------------------------------------------------------------- 1 | [build_ext] 2 | cython_c_in_temp = 1 3 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import Extension, find_packages, setup 4 | 5 | setup( 6 | name="multiple_top_level", 7 | version="0.0.1", 8 | description="Auditwheel multiple top-level extensions example", 9 | packages=find_packages(where="src"), 10 | ext_modules=[ 11 | Extension( 12 | "example_a", 13 | ["src/example_a.pyx"], 14 | include_dirs=["lib-src/a"], 15 | library_dirs=["lib-src/a", "lib-src/b"], 16 | libraries=["a"], 17 | ), 18 | Extension( 19 | "example_b", 20 | ["src/example_b.pyx"], 21 | include_dirs=["lib-src/a"], 22 | library_dirs=["lib-src/a", "lib-src/b"], 23 | libraries=["a"], 24 | ), 25 | ], 26 | ) 27 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/src/example_a.pyx: -------------------------------------------------------------------------------- 1 | cdef extern from "a.h": 2 | int fa(); 3 | 4 | 5 | cpdef int example_a(): 6 | return fa() 7 | -------------------------------------------------------------------------------- /tests/integration/multiple_top_level/src/example_b.pyx: -------------------------------------------------------------------------------- 1 | cdef extern from "a.h": 2 | int fa(); 3 | 4 | 5 | cpdef int example_b(): 6 | return fa() * 10 7 | -------------------------------------------------------------------------------- /tests/integration/nonpy_rpath/README.md: -------------------------------------------------------------------------------- 1 | # Python 3 extension with non-Python library dependency 2 | 3 | This example was inspired from https://gist.github.com/physacco/2e1b52415f3a964ad2a542a99bebed8f 4 | 5 | This test extension builds two libraries: `_nonpy_rpath.*.so` and `lib_cryptexample.*.so`, where the `*` is a string composed of Python ABI versions and platform tags. 6 | 7 | The extension `lib_cryptexample.*.so` should be repaired by auditwheel because it is a needed library, even though it is not a Python extension. 8 | 9 | [Issue #136](https://github.com/pypa/auditwheel/issues/136) documents the underlying problem that this test case is designed to solve. 10 | -------------------------------------------------------------------------------- /tests/integration/nonpy_rpath/extensions/testcrypt.cpp: -------------------------------------------------------------------------------- 1 | #include "testcrypt.h" 2 | #include 3 | 4 | std::string crypt_something() { 5 | char const* result = crypt("will error out", "\0"); 6 | if (result == NULL) { 7 | return std::string("*"); 8 | } 9 | return std::string(result); 10 | } 11 | -------------------------------------------------------------------------------- /tests/integration/nonpy_rpath/extensions/testcrypt.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | 4 | std::string crypt_something(void); 5 | -------------------------------------------------------------------------------- /tests/integration/nonpy_rpath/nonpy_rpath.cpp: -------------------------------------------------------------------------------- 1 | #define PY_SSIZE_T_CLEAN 2 | #include 3 | #include "extensions/testcrypt.h" 4 | 5 | // Module method definitions 6 | static PyObject* crypt_something(PyObject *self, PyObject *args) { 7 | return PyUnicode_FromString(crypt_something().c_str()); 8 | } 9 | 10 | /* Module initialization */ 11 | PyMODINIT_FUNC PyInit__nonpy_rpath(void) 12 | { 13 | static PyMethodDef module_methods[] = { 14 | {"crypt_something", (PyCFunction)crypt_something, METH_NOARGS, "crypt_something."}, 15 | {NULL} /* Sentinel */ 16 | }; 17 | static struct PyModuleDef moduledef = { 18 | PyModuleDef_HEAD_INIT, 19 | "_nonpy_rpath", 20 | "_nonpy_rpath module", 21 | -1, 22 | module_methods, 23 | }; 24 | return PyModule_Create(&moduledef); 25 | } 26 | -------------------------------------------------------------------------------- /tests/integration/nonpy_rpath/nonpy_rpath/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from ._nonpy_rpath import crypt_something 4 | 5 | __all__ = ["crypt_something"] 6 | -------------------------------------------------------------------------------- /tests/integration/nonpy_rpath/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/nonpy_rpath/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | import setuptools.command.build_ext 6 | from setuptools import Distribution, find_packages, setup 7 | from setuptools.extension import Extension, Library 8 | 9 | # despite its name, setuptools.command.build_ext.link_shared_object won't 10 | # link a shared object on Linux, but a static library and patches distutils 11 | # for this ... We're patching this back now. 12 | 13 | 14 | def always_link_shared_object( 15 | self, 16 | objects, 17 | output_libname, 18 | output_dir=None, 19 | libraries=None, 20 | library_dirs=None, 21 | runtime_library_dirs=None, 22 | export_symbols=None, 23 | debug=0, 24 | extra_preargs=None, 25 | extra_postargs=None, 26 | build_temp=None, 27 | target_lang=None, 28 | ): 29 | self.link( 30 | self.SHARED_LIBRARY, 31 | objects, 32 | output_libname, 33 | output_dir, 34 | libraries, 35 | library_dirs, 36 | runtime_library_dirs, 37 | export_symbols, 38 | debug, 39 | extra_preargs, 40 | extra_postargs, 41 | build_temp, 42 | target_lang, 43 | ) 44 | 45 | 46 | setuptools.command.build_ext.libtype = "shared" 47 | setuptools.command.build_ext.link_shared_object = always_link_shared_object 48 | 49 | libtype = setuptools.command.build_ext.libtype 50 | build_ext_cmd = Distribution().get_command_obj("build_ext") 51 | build_ext_cmd.initialize_options() 52 | build_ext_cmd.setup_shlib_compiler() 53 | 54 | 55 | def libname(name): 56 | """gets 'name' and returns something like libname.cpython-37m-darwin.so""" 57 | filename = build_ext_cmd.get_ext_filename(name) 58 | fn, ext = os.path.splitext(filename) 59 | return build_ext_cmd.shlib_compiler.library_filename(fn, libtype) 60 | 61 | 62 | pkg_name = "nonpy_rpath" 63 | crypt_name = "_cryptexample" 64 | crypt_soname = libname(crypt_name) 65 | 66 | build_cmd = Distribution().get_command_obj("build") 67 | build_cmd.finalize_options() 68 | build_platlib = build_cmd.build_platlib 69 | 70 | 71 | def link_args(soname=None): 72 | args = [] 73 | if soname: 74 | args += ["-Wl,-soname," + soname] 75 | loader_path = "$ORIGIN" 76 | args += ["-Wl,-rpath," + loader_path] 77 | return args 78 | 79 | 80 | nonpy_rpath_module = Extension( 81 | pkg_name + "._nonpy_rpath", 82 | language="c++", 83 | sources=["nonpy_rpath.cpp"], 84 | extra_link_args=link_args(), 85 | extra_objects=[build_platlib + "/nonpy_rpath/" + crypt_soname], 86 | ) 87 | crypt_example = Library( 88 | pkg_name + "." + crypt_name, 89 | language="c++", 90 | extra_compile_args=["-lcrypt"], 91 | extra_link_args=[*link_args(crypt_soname), "-lcrypt"], 92 | sources=["extensions/testcrypt.cpp"], 93 | ) 94 | 95 | setup( 96 | name="nonpy_rpath", 97 | version="0.0.1", 98 | packages=find_packages(), 99 | description="Test package for nonpy_rpath", 100 | ext_modules=[crypt_example, nonpy_rpath_module], 101 | ) 102 | -------------------------------------------------------------------------------- /tests/integration/plumbum-1.6.8-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/plumbum-1.6.8-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/integration/python_mscl-67.0.1.0-cp313-cp313-manylinux2014_aarch64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/python_mscl-67.0.1.0-cp313-cp313-manylinux2014_aarch64.whl -------------------------------------------------------------------------------- /tests/integration/python_snappy-0.5.2-pp260-pypy_41-linux_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/python_snappy-0.5.2-pp260-pypy_41-linux_x86_64.whl -------------------------------------------------------------------------------- /tests/integration/quick_check_numpy.py: -------------------------------------------------------------------------------- 1 | # Sample numpy program that requires some BLAS and LAPACK routines to work 2 | # properly 3 | from __future__ import annotations 4 | 5 | import numpy as np 6 | 7 | rng = np.random.RandomState(0) 8 | X = rng.randn(500, 200) 9 | XTX = np.dot(X.T, X) 10 | U, S, VT = np.linalg.svd(XTX) 11 | if all(S > 0): 12 | print("ok") 13 | else: 14 | print("[ERROR] invalid singular values:", S) 15 | -------------------------------------------------------------------------------- /tests/integration/sample_extension/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools >= 45.0.0", "cython"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/sample_extension/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from Cython.Build import cythonize 4 | from setuptools import setup 5 | 6 | setup( 7 | name="sample_extension", 8 | version="0.1.0", 9 | ext_modules=cythonize("src/sample_extension.pyx"), 10 | ) 11 | -------------------------------------------------------------------------------- /tests/integration/sample_extension/src/sample_extension.pyx: -------------------------------------------------------------------------------- 1 | def test_func(x): 2 | return _test_func(x) 3 | 4 | cdef _test_func(x): 5 | return x + 1 6 | -------------------------------------------------------------------------------- /tests/integration/test_bundled_wheels.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import importlib 4 | import os 5 | import platform 6 | import sys 7 | import zipfile 8 | from argparse import Namespace 9 | from datetime import datetime, timezone 10 | from os.path import isabs 11 | from pathlib import Path 12 | from unittest.mock import Mock 13 | 14 | import pytest 15 | 16 | from auditwheel import lddtree, main_repair 17 | from auditwheel.architecture import Architecture 18 | from auditwheel.libc import Libc 19 | from auditwheel.main import main 20 | from auditwheel.wheel_abi import NonPlatformWheel, analyze_wheel_abi 21 | 22 | HERE = Path(__file__).parent.resolve() 23 | 24 | 25 | @pytest.mark.parametrize( 26 | ("file", "external_libs", "exclude"), 27 | [ 28 | ( 29 | "cffi-1.5.0-cp27-none-linux_x86_64.whl", 30 | {"libffi.so.5", "libpython2.7.so.1.0"}, 31 | frozenset(), 32 | ), 33 | ( 34 | "cffi-1.5.0-cp27-none-linux_x86_64.whl", 35 | set(), 36 | frozenset(["libffi.so.5", "libpython2.7.so.1.0"]), 37 | ), 38 | ( 39 | "cffi-1.5.0-cp27-none-linux_x86_64.whl", 40 | {"libffi.so.5", "libpython2.7.so.1.0"}, 41 | frozenset(["libffi.so.noexist", "libnoexist.so.*"]), 42 | ), 43 | ( 44 | "cffi-1.5.0-cp27-none-linux_x86_64.whl", 45 | {"libpython2.7.so.1.0"}, 46 | frozenset(["libffi.so.[4,5]"]), 47 | ), 48 | ( 49 | "cffi-1.5.0-cp27-none-linux_x86_64.whl", 50 | {"libffi.so.5", "libpython2.7.so.1.0"}, 51 | frozenset(["libffi.so.[6,7]"]), 52 | ), 53 | ( 54 | "cffi-1.5.0-cp27-none-linux_x86_64.whl", 55 | {"libpython2.7.so.1.0"}, 56 | frozenset([f"{HERE}/*"]), 57 | ), 58 | ( 59 | "cffi-1.5.0-cp27-none-linux_x86_64.whl", 60 | {"libpython2.7.so.1.0"}, 61 | frozenset(["libffi.so.*"]), 62 | ), 63 | ("cffi-1.5.0-cp27-none-linux_x86_64.whl", set(), frozenset(["*"])), 64 | ( 65 | "python_snappy-0.5.2-pp260-pypy_41-linux_x86_64.whl", 66 | {"libsnappy.so.1"}, 67 | frozenset(), 68 | ), 69 | ], 70 | ) 71 | def test_analyze_wheel_abi(file, external_libs, exclude): 72 | # If exclude libs contain path, LD_LIBRARY_PATH need to be modified to find the libs 73 | # `lddtree.load_ld_paths` needs to be reloaded for it's `lru_cache`-ed. 74 | modify_ld_library_path = any(isabs(e) for e in exclude) 75 | 76 | with pytest.MonkeyPatch.context() as cp: 77 | if modify_ld_library_path: 78 | cp.setenv("LD_LIBRARY_PATH", f"{HERE}") 79 | importlib.reload(lddtree) 80 | 81 | winfo = analyze_wheel_abi( 82 | Libc.GLIBC, Architecture.x86_64, HERE / file, exclude, False, True 83 | ) 84 | assert set(winfo.external_refs["manylinux_2_5_x86_64"].libs) == external_libs, ( 85 | f"{HERE}, {exclude}, {os.environ}" 86 | ) 87 | 88 | if modify_ld_library_path: 89 | importlib.reload(lddtree) 90 | 91 | 92 | def test_analyze_wheel_abi_pyfpe(): 93 | winfo = analyze_wheel_abi( 94 | Libc.GLIBC, 95 | Architecture.x86_64, 96 | HERE / "fpewheel-0.0.0-cp35-cp35m-linux_x86_64.whl", 97 | frozenset(), 98 | False, 99 | True, 100 | ) 101 | # for external symbols, it could get manylinux1 102 | assert winfo.sym_policy.name == "manylinux_2_5_x86_64" 103 | # but for having the pyfpe reference, it gets just linux 104 | assert winfo.pyfpe_policy.name == "linux_x86_64" 105 | assert winfo.overall_policy.name == "linux_x86_64" 106 | 107 | 108 | def test_show_wheel_abi_pyfpe(monkeypatch, capsys): 109 | wheel = str(HERE / "fpewheel-0.0.0-cp35-cp35m-linux_x86_64.whl") 110 | monkeypatch.setattr(sys, "platform", "linux") 111 | monkeypatch.setattr(platform, "machine", lambda: "x86_64") 112 | monkeypatch.setattr(sys, "argv", ["auditwheel", "show", wheel]) 113 | assert main() == 0 114 | captured = capsys.readouterr() 115 | assert "This wheel uses the PyFPE_jbuf function" in captured.out 116 | 117 | 118 | def test_analyze_wheel_abi_bad_architecture(): 119 | with pytest.raises(NonPlatformWheel): 120 | analyze_wheel_abi( 121 | Libc.GLIBC, 122 | Architecture.aarch64, 123 | HERE / "fpewheel-0.0.0-cp35-cp35m-linux_x86_64.whl", 124 | frozenset(), 125 | False, 126 | True, 127 | ) 128 | 129 | 130 | def test_analyze_wheel_abi_static_exe(caplog): 131 | result = analyze_wheel_abi( 132 | None, 133 | None, 134 | HERE 135 | / "patchelf-0.17.2.1-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.musllinux_1_1_x86_64.whl", 136 | frozenset(), 137 | False, 138 | False, 139 | ) 140 | assert "setting architecture to x86_64" in caplog.text 141 | assert "couldn't detect wheel libc, defaulting to" in caplog.text 142 | assert result.policies.architecture == Architecture.x86_64 143 | if Libc.detect() == Libc.MUSL: 144 | assert result.policies.libc == Libc.MUSL 145 | assert result.overall_policy.name.startswith("musllinux_1_") 146 | else: 147 | assert result.policies.libc == Libc.GLIBC 148 | assert result.overall_policy.name == "manylinux_2_5_x86_64" 149 | 150 | 151 | @pytest.mark.parametrize( 152 | "timestamp", 153 | [ 154 | (0, 315532800), # zip timestamp starts 1980-01-01, not 1970-01-01 155 | (315532799, 315532800), # zip timestamp starts 1980-01-01, not 1970-01-01 156 | (315532801, 315532800), # zip timestamp round odd seconds down to even seconds 157 | (315532802, 315532802), 158 | (650203201, 650203200), # zip timestamp round odd seconds down to even seconds 159 | ], 160 | ) 161 | def test_wheel_source_date_epoch(timestamp, tmp_path, monkeypatch): 162 | wheel_path = ( 163 | HERE / "arch-wheels/musllinux_1_2/testsimple-0.0.1-cp312-cp312-linux_x86_64.whl" 164 | ) 165 | wheel_output_path = tmp_path / "out" 166 | args = Namespace( 167 | LIB_SDIR=".libs", 168 | ONLY_PLAT=False, 169 | PLAT="auto", 170 | STRIP=False, 171 | UPDATE_TAGS=True, 172 | WHEEL_DIR=wheel_output_path, 173 | WHEEL_FILE=[wheel_path], 174 | EXCLUDE=[], 175 | DISABLE_ISA_EXT_CHECK=False, 176 | ZIP_COMPRESSION_LEVEL=6, 177 | cmd="repair", 178 | func=Mock(), 179 | prog="auditwheel", 180 | verbose=1, 181 | ) 182 | 183 | monkeypatch.setenv("SOURCE_DATE_EPOCH", str(timestamp[0])) 184 | main_repair.execute(args, Mock()) 185 | output_wheel, *_ = list(wheel_output_path.glob("*.whl")) 186 | with zipfile.ZipFile(output_wheel) as wheel_file: 187 | for file in wheel_file.infolist(): 188 | file_date_time = datetime(*file.date_time, tzinfo=timezone.utc) 189 | assert file_date_time.timestamp() == timestamp[1] 190 | 191 | 192 | def test_libpython(tmp_path, caplog): 193 | wheel = HERE / "python_mscl-67.0.1.0-cp313-cp313-manylinux2014_aarch64.whl" 194 | args = Namespace( 195 | LIB_SDIR=".libs", 196 | ONLY_PLAT=False, 197 | PLAT="auto", 198 | STRIP=False, 199 | UPDATE_TAGS=True, 200 | WHEEL_DIR=tmp_path, 201 | WHEEL_FILE=[wheel], 202 | EXCLUDE=[], 203 | DISABLE_ISA_EXT_CHECK=False, 204 | ZIP_COMPRESSION_LEVEL=6, 205 | cmd="repair", 206 | func=Mock(), 207 | prog="auditwheel", 208 | verbose=0, 209 | ) 210 | main_repair.execute(args, Mock()) 211 | assert ( 212 | "Removing libpython3.13.so.1.0 dependency from python_mscl/_mscl.so" 213 | in caplog.text 214 | ) 215 | assert tuple(path.name for path in tmp_path.glob("*.whl")) == ( 216 | "python_mscl-67.0.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_31_aarch64.whl", 217 | ) 218 | -------------------------------------------------------------------------------- /tests/integration/test_glibcxx_3_4_25/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/test_glibcxx_3_4_25/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import Extension, setup 4 | 5 | setup( 6 | name="testentropy", 7 | version="0.0.1", 8 | ext_modules=[ 9 | Extension( 10 | "testentropy", 11 | language="c++", 12 | sources=["testentropy.cpp"], 13 | extra_compile_args=["-std=c++11"], 14 | ), 15 | ], 16 | ) 17 | -------------------------------------------------------------------------------- /tests/integration/test_glibcxx_3_4_25/testentropy.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | static PyObject * 5 | run(PyObject *self, PyObject *args) 6 | { 7 | (void)self; 8 | (void)args; 9 | std::random_device rd; 10 | return PyLong_FromLong(rd.entropy() >= 0.0 ? 0 : -1); 11 | } 12 | 13 | /* Module initialization */ 14 | PyMODINIT_FUNC PyInit_testentropy(void) 15 | { 16 | static PyMethodDef module_methods[] = { 17 | {"run", (PyCFunction)run, METH_NOARGS, "run."}, 18 | {NULL} /* Sentinel */ 19 | }; 20 | static struct PyModuleDef moduledef = { 21 | PyModuleDef_HEAD_INIT, 22 | "testentropy", 23 | "testentropy module", 24 | -1, 25 | module_methods, 26 | }; 27 | return PyModule_Create(&moduledef); 28 | } 29 | -------------------------------------------------------------------------------- /tests/integration/test_nonplatform_wheel.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pathlib 4 | import subprocess 5 | 6 | import pytest 7 | 8 | from auditwheel.architecture import Architecture 9 | 10 | HERE = pathlib.Path(__file__).parent.resolve() 11 | 12 | 13 | @pytest.mark.parametrize("mode", ["repair", "show"]) 14 | def test_non_platform_wheel_pure(mode): 15 | wheel = HERE / "plumbum-1.6.8-py2.py3-none-any.whl" 16 | proc = subprocess.run( 17 | ["auditwheel", mode, str(wheel)], 18 | stderr=subprocess.PIPE, 19 | text=True, 20 | check=False, 21 | ) 22 | assert proc.returncode == 1 23 | assert "This does not look like a platform wheel" in proc.stderr 24 | assert "AttributeError" not in proc.stderr 25 | 26 | 27 | @pytest.mark.parametrize("mode", ["repair", "show"]) 28 | @pytest.mark.parametrize("arch", ["armv5l", "mips64"]) 29 | def test_non_platform_wheel_unknown_arch(mode, arch, tmp_path): 30 | wheel_name = f"testsimple-0.0.1-cp313-cp313-linux_{arch}.whl" 31 | wheel_path = HERE / "arch-wheels" / "glibc" / wheel_name 32 | wheel_x86_64 = tmp_path / f"{wheel_path.stem}_x86_64.whl" 33 | wheel_x86_64.symlink_to(wheel_path) 34 | proc = subprocess.run( 35 | ["auditwheel", mode, str(wheel_x86_64)], 36 | stderr=subprocess.PIPE, 37 | text=True, 38 | check=False, 39 | ) 40 | assert proc.returncode == 1 41 | assert "Invalid binary wheel: no ELF executable or" in proc.stderr 42 | assert "unknown architecture" in proc.stderr 43 | assert "AttributeError" not in proc.stderr 44 | 45 | 46 | @pytest.mark.parametrize("mode", ["repair", "show"]) 47 | @pytest.mark.parametrize( 48 | "arch", ["aarch64", "armv7l", "i686", "x86_64", "ppc64le", "s390x"] 49 | ) 50 | def test_non_platform_wheel_bad_arch(mode, arch, tmp_path): 51 | host_arch = Architecture.detect().value 52 | if host_arch == arch: 53 | pytest.skip("host architecture") 54 | wheel_name = f"testsimple-0.0.1-cp313-cp313-linux_{arch}.whl" 55 | wheel_path = HERE / "arch-wheels" / "glibc" / wheel_name 56 | wheel_host = tmp_path / f"{wheel_path.stem}_{host_arch}.whl" 57 | wheel_host.symlink_to(wheel_path) 58 | proc = subprocess.run( 59 | ["auditwheel", mode, str(wheel_host)], 60 | stderr=subprocess.PIPE, 61 | text=True, 62 | check=False, 63 | ) 64 | assert proc.returncode == 1 65 | assert "Invalid binary wheel: no ELF executable or" in proc.stderr 66 | assert f"{arch} architecture" in proc.stderr 67 | assert "AttributeError" not in proc.stderr 68 | -------------------------------------------------------------------------------- /tests/integration/testdependencies/dependency.c: -------------------------------------------------------------------------------- 1 | #include "dependency.h" 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #if defined(__GLIBC_PREREQ) 11 | #if __GLIBC_PREREQ(2, 28) 12 | #include 13 | #endif 14 | #endif 15 | 16 | int dep_run() 17 | { 18 | #if defined(__GLIBC_PREREQ) 19 | #if __GLIBC_PREREQ(2, 34) 20 | // pthread_mutexattr_init was moved to libc.so.6 in manylinux_2_34+ 21 | pthread_mutexattr_t attr; 22 | int sts = pthread_mutexattr_init(&attr); 23 | if (sts == 0) { 24 | pthread_mutexattr_destroy(&attr); 25 | } 26 | return sts; 27 | #elif __GLIBC_PREREQ(2, 30) 28 | return gettid() == getpid() ? 0 : 1; 29 | #elif __GLIBC_PREREQ(2, 28) 30 | return thrd_equal(thrd_current(), thrd_current()) ? 0 : 1; 31 | #elif __GLIBC_PREREQ(2, 24) 32 | return (int)nextupf(0.0F); 33 | #elif __GLIBC_PREREQ(2, 17) 34 | return (int)(intptr_t)secure_getenv("NON_EXISTING_ENV_VARIABLE"); 35 | #elif __GLIBC_PREREQ(2, 10) 36 | return malloc_info(0, stdout); 37 | #else 38 | return 0; 39 | #endif 40 | #else // !defined(__GLIBC_PREREQ) 41 | return 0; 42 | #endif 43 | } 44 | -------------------------------------------------------------------------------- /tests/integration/testdependencies/dependency.h: -------------------------------------------------------------------------------- 1 | int dep_run(void); 2 | -------------------------------------------------------------------------------- /tests/integration/testdependencies/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/testdependencies/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import subprocess 4 | from os import getenv, path 5 | 6 | from setuptools import Extension, setup 7 | from setuptools.command.build_ext import build_ext 8 | 9 | define_macros = [("_GNU_SOURCE", None)] 10 | libraries = [] 11 | library_dirs = [] 12 | extra_compile_args = [] 13 | 14 | if getenv("WITH_DEPENDENCY", "0") == "1": 15 | libraries.append("dependency") 16 | library_dirs.append(path.abspath(path.dirname(__file__))) 17 | define_macros.append(("WITH_DEPENDENCY", "1")) 18 | 19 | if getenv("WITH_ARCH", "") != "": 20 | extra_compile_args.extend((f"-march={getenv('WITH_ARCH')}", "-mneeded")) 21 | 22 | libraries.extend(["m", "c"]) 23 | 24 | 25 | class BuildExt(build_ext): 26 | def run(self) -> None: 27 | cflags = ("-shared", "-fPIC", "-D_GNU_SOURCE", *extra_compile_args) 28 | cmd = f"gcc {' '.join(cflags)} dependency.c -o libdependency.so -lm -lc" 29 | subprocess.check_call(cmd.split()) 30 | super().run() 31 | 32 | 33 | setup( 34 | name="testdependencies", 35 | version="0.0.1", 36 | cmdclass={"build_ext": BuildExt}, 37 | ext_modules=[ 38 | Extension( 39 | "testdependencies", 40 | sources=["testdependencies.c"], 41 | extra_compile_args=extra_compile_args, 42 | define_macros=define_macros, 43 | libraries=libraries, 44 | library_dirs=library_dirs, 45 | ) 46 | ], 47 | ) 48 | -------------------------------------------------------------------------------- /tests/integration/testdependencies/testdependencies.c: -------------------------------------------------------------------------------- 1 | #ifdef WITH_DEPENDENCY 2 | #include "dependency.h" 3 | #else 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #if defined(__GLIBC_PREREQ) 13 | #if __GLIBC_PREREQ(2, 28) 14 | #include 15 | #endif 16 | #endif 17 | #endif 18 | #include 19 | 20 | static __thread int tres = 0; 21 | 22 | static PyObject * 23 | run(PyObject *self, PyObject *args) 24 | { 25 | int res; 26 | 27 | (void)self; 28 | (void)args; 29 | 30 | #ifdef WITH_DEPENDENCY 31 | res = dep_run(); 32 | #elif defined(__GLIBC_PREREQ) 33 | #if __GLIBC_PREREQ(2, 34) 34 | // pthread_mutexattr_init was moved to libc.so.6 in manylinux_2_34+ 35 | pthread_mutexattr_t attr; 36 | res = pthread_mutexattr_init(&attr); 37 | if (res == 0) { 38 | pthread_mutexattr_destroy(&attr); 39 | } 40 | #elif __GLIBC_PREREQ(2, 30) 41 | res = gettid() == getpid() ? 0 : 1; 42 | #elif __GLIBC_PREREQ(2, 28) 43 | res = thrd_equal(thrd_current(), thrd_current()) ? 0 : 1; 44 | #elif __GLIBC_PREREQ(2, 24) 45 | res = (int)nextupf(0.0F); 46 | #elif __GLIBC_PREREQ(2, 17) 47 | res = (int)(intptr_t)secure_getenv("NON_EXISTING_ENV_VARIABLE"); 48 | #elif __GLIBC_PREREQ(2, 10) 49 | res = malloc_info(0, stdout); 50 | #else 51 | res = 0; 52 | #endif 53 | #else // !defined(__GLIBC_PREREQ) 54 | res = 0; 55 | #endif 56 | return PyLong_FromLong(res + tres); 57 | } 58 | 59 | static PyObject * 60 | set_tres(PyObject *self, PyObject *args) 61 | { 62 | (void)self; 63 | (void)args; 64 | tres = 1; 65 | return PyLong_FromLong(tres); 66 | } 67 | 68 | /* Module initialization */ 69 | PyMODINIT_FUNC PyInit_testdependencies(void) 70 | { 71 | static PyMethodDef module_methods[] = { 72 | {"run", (PyCFunction)run, METH_NOARGS, "run."}, 73 | {"set_tres", (PyCFunction)set_tres, METH_NOARGS, "set_tres."}, 74 | {NULL} /* Sentinel */ 75 | }; 76 | static struct PyModuleDef moduledef = { 77 | PyModuleDef_HEAD_INIT, 78 | "testdependencies", 79 | "testdependencies module", 80 | -1, 81 | module_methods, 82 | }; 83 | return PyModule_Create(&moduledef); 84 | } 85 | -------------------------------------------------------------------------------- /tests/integration/testpackage/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/testpackage/__init__.py -------------------------------------------------------------------------------- /tests/integration/testpackage/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/testpackage/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import subprocess 4 | 5 | from setuptools import setup 6 | 7 | subprocess.check_call( 8 | ( 9 | "gcc", 10 | "testpackage/testprogram.c", 11 | "-lgsl", 12 | "-lgslcblas", 13 | "-o", 14 | "testpackage/testprogram", 15 | ) 16 | ) 17 | subprocess.check_call( 18 | ("gcc", "testpackage/testprogram_nodeps.c", "-o", "testpackage/testprogram_nodeps") 19 | ) 20 | 21 | setup( 22 | name="testpackage", 23 | version="0.0.1", 24 | packages=["testpackage"], 25 | package_data={"testpackage": ["testprogram", "testprogram_nodeps"]}, 26 | # This places these files at a path like 27 | # "testpackage-0.0.1.data/scripts/testprogram", which is needed to test 28 | # rewriting ELF binaries installed into the scripts directory. 29 | # 30 | # Note that using scripts=[] doesn't work here since setuptools expects the 31 | # scripts to be text and tries to decode them using UTF-8. 32 | data_files=[ 33 | ("../scripts", ["testpackage/testprogram", "testpackage/testprogram_nodeps"]) 34 | ], 35 | ) 36 | -------------------------------------------------------------------------------- /tests/integration/testpackage/testpackage/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import subprocess 5 | import sysconfig 6 | from importlib.metadata import distribution 7 | 8 | 9 | def runit(x): 10 | dist = distribution("testpackage") 11 | scripts_paths = [ 12 | os.path.abspath(sysconfig.get_path("scripts", scheme)) 13 | for scheme in sysconfig.get_scheme_names() 14 | ] 15 | scripts = [] 16 | for file in dist.files: 17 | if os.path.abspath(str(file.locate().parent)) in scripts_paths: 18 | scripts.append(file.locate().resolve(strict=True)) 19 | assert len(scripts) == 2, scripts 20 | filename = next(script for script in scripts if script.stem == "testprogram") 21 | output = subprocess.check_output([filename, str(x)]) 22 | return float(output) 23 | -------------------------------------------------------------------------------- /tests/integration/testpackage/testpackage/testprogram.c: -------------------------------------------------------------------------------- 1 | /* A simple example program to square a number using GSL. */ 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | int main(int argc, char **argv) 8 | { 9 | double x; 10 | char *startptr, *endptr; 11 | 12 | if (argc != 2) 13 | { 14 | fputs("Expected exactly one command line argument\n", stderr); 15 | return EXIT_FAILURE; 16 | } 17 | 18 | startptr = argv[1]; 19 | endptr = NULL; 20 | x = strtod(startptr, &endptr); 21 | 22 | if (startptr == endptr) 23 | { 24 | fputs("Expected command line argument to be a float\n", stderr); 25 | return EXIT_FAILURE; 26 | } 27 | 28 | x = gsl_pow_2(x); 29 | printf("%g\n", x); 30 | return EXIT_SUCCESS; 31 | } 32 | -------------------------------------------------------------------------------- /tests/integration/testpackage/testpackage/testprogram_nodeps.c: -------------------------------------------------------------------------------- 1 | /* A simple example program to square a number using no shared libraries. */ 2 | 3 | #include 4 | #include 5 | 6 | int main(int argc, char **argv) 7 | { 8 | int x; 9 | 10 | if (argc != 2) 11 | { 12 | fputs("Expected exactly one command line argument\n", stderr); 13 | return EXIT_FAILURE; 14 | } 15 | 16 | x = atoi(argv[1]); 17 | printf("%d\n", x*x); 18 | return EXIT_SUCCESS; 19 | } 20 | -------------------------------------------------------------------------------- /tests/integration/testrpath/MANIFEST.in: -------------------------------------------------------------------------------- 1 | graft a 2 | graft b 3 | -------------------------------------------------------------------------------- /tests/integration/testrpath/a/a.c: -------------------------------------------------------------------------------- 1 | #include "b.h" 2 | 3 | 4 | int fa(void) { 5 | return 1 + fb(); 6 | } 7 | -------------------------------------------------------------------------------- /tests/integration/testrpath/a/a.h: -------------------------------------------------------------------------------- 1 | int fa(void); 2 | -------------------------------------------------------------------------------- /tests/integration/testrpath/b/b.c: -------------------------------------------------------------------------------- 1 | int fb(void) { 2 | return 10; 3 | } 4 | -------------------------------------------------------------------------------- /tests/integration/testrpath/b/b.h: -------------------------------------------------------------------------------- 1 | int fb(void); 2 | -------------------------------------------------------------------------------- /tests/integration/testrpath/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/testrpath/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import subprocess 5 | 6 | from setuptools import Extension, setup 7 | from setuptools.command.build_ext import build_ext 8 | 9 | 10 | class BuildExt(build_ext): 11 | def run(self) -> None: 12 | cmd = "gcc -fPIC -shared -o b/libb.so b/b.c" 13 | subprocess.check_call(cmd.split()) 14 | cmd = ( 15 | "gcc -fPIC -shared -o a/liba.so " 16 | "-Wl,{dtags_flag} -Wl,-rpath=$ORIGIN/../b " 17 | "-Ib a/a.c -Lb -lb" 18 | ).format( 19 | dtags_flag=( 20 | "--enable-new-dtags" 21 | if os.getenv("DTAG") == "runpath" 22 | else "--disable-new-dtags" 23 | ) 24 | ) 25 | subprocess.check_call(cmd.split()) 26 | super().run() 27 | 28 | 29 | setup( 30 | name="testrpath", 31 | version="0.0.1", 32 | packages=["testrpath"], 33 | package_dir={"": "src"}, 34 | cmdclass={"build_ext": BuildExt}, 35 | ext_modules=[ 36 | Extension( 37 | "testrpath/testrpath", 38 | sources=["src/testrpath/testrpath.c"], 39 | include_dirs=["a"], 40 | libraries=["a"], 41 | library_dirs=["a"], 42 | ) 43 | ], 44 | ) 45 | -------------------------------------------------------------------------------- /tests/integration/testrpath/src/testrpath/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/integration/testrpath/src/testrpath/__init__.py -------------------------------------------------------------------------------- /tests/integration/testrpath/src/testrpath/testrpath.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include "a.h" 3 | 4 | static PyObject * 5 | func(PyObject *self, PyObject *args) 6 | { 7 | int res; 8 | 9 | (void)self; 10 | (void)args; 11 | 12 | res = fa(); 13 | return PyLong_FromLong(res); 14 | } 15 | 16 | /* Module initialization */ 17 | PyMODINIT_FUNC PyInit_testrpath(void) 18 | { 19 | static PyMethodDef module_methods[] = { 20 | {"func", (PyCFunction)func, METH_NOARGS, "func."}, 21 | {NULL} /* Sentinel */ 22 | }; 23 | static struct PyModuleDef moduledef = { 24 | PyModuleDef_HEAD_INIT, 25 | "testrpath", 26 | "testrpath module", 27 | -1, 28 | module_methods, 29 | }; 30 | return PyModule_Create(&moduledef); 31 | } 32 | -------------------------------------------------------------------------------- /tests/integration/testsimple/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/testsimple/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import Extension, setup 4 | 5 | setup( 6 | name="testsimple", 7 | version="0.0.1", 8 | ext_modules=[Extension("testsimple", sources=["testsimple.c"])], 9 | ) 10 | -------------------------------------------------------------------------------- /tests/integration/testsimple/testsimple.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | static PyObject * 4 | run(PyObject *self, PyObject *args) 5 | { 6 | (void)self; 7 | (void)args; 8 | return PyLong_FromLong(0); 9 | } 10 | 11 | /* Module initialization */ 12 | PyMODINIT_FUNC PyInit_testsimple(void) 13 | { 14 | static PyMethodDef module_methods[] = { 15 | {"run", (PyCFunction)run, METH_NOARGS, "run."}, 16 | {NULL} /* Sentinel */ 17 | }; 18 | static struct PyModuleDef moduledef = { 19 | PyModuleDef_HEAD_INIT, 20 | "testsimple", 21 | "testsimple module", 22 | -1, 23 | module_methods, 24 | }; 25 | return PyModule_Create(&moduledef); 26 | } 27 | -------------------------------------------------------------------------------- /tests/integration/testzlib/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /tests/integration/testzlib/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import Extension, setup 4 | 5 | define_macros = [("_GNU_SOURCE", None)] 6 | libraries = ["z", "c"] 7 | 8 | setup( 9 | name="testzlib", 10 | version="0.0.1", 11 | ext_modules=[ 12 | Extension( 13 | "testzlib", 14 | sources=["testzlib.c"], 15 | define_macros=define_macros, 16 | libraries=libraries, 17 | ) 18 | ], 19 | ) 20 | -------------------------------------------------------------------------------- /tests/integration/testzlib/testzlib.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | 6 | static PyObject * 7 | run(PyObject *self, PyObject *args) 8 | { 9 | int res; 10 | 11 | (void)self; 12 | (void)args; 13 | 14 | #if defined(__GLIBC_PREREQ) && __GLIBC_PREREQ(2, 24) 15 | res = gzflags() != 0; 16 | #elif defined(__GLIBC_PREREQ) && __GLIBC_PREREQ(2, 17) 17 | /* blacklist ineffective on manylinux 2014 */ 18 | res = 0; 19 | #elif defined(__GLIBC_PREREQ) 20 | { 21 | void* memory = zcalloc(NULL, 1U, 1U); 22 | res = (memory != NULL); 23 | zcfree(NULL, memory); 24 | } 25 | #else 26 | res = 0; 27 | #endif 28 | return PyLong_FromLong(res); 29 | } 30 | 31 | /* Module initialization */ 32 | PyMODINIT_FUNC PyInit_testzlib(void) 33 | { 34 | static PyMethodDef module_methods[] = { 35 | {"run", (PyCFunction)run, METH_NOARGS, "run."}, 36 | {NULL} /* Sentinel */ 37 | }; 38 | static struct PyModuleDef moduledef = { 39 | PyModuleDef_HEAD_INIT, 40 | "testzlib", 41 | "testzlib module", 42 | -1, 43 | module_methods, 44 | }; 45 | return PyModule_Create(&moduledef); 46 | } 47 | -------------------------------------------------------------------------------- /tests/unit/test-permissions.zip.xz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pypa/auditwheel/8b411219cbf4a7c8a081b8a323810b78f9a3b2e2/tests/unit/test-permissions.zip.xz -------------------------------------------------------------------------------- /tests/unit/test_architecture.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import struct 3 | import sys 4 | 5 | import pytest 6 | 7 | from auditwheel.architecture import Architecture 8 | 9 | 10 | @pytest.mark.parametrize( 11 | ("sys_platform", "reported_arch", "expected_arch"), 12 | [ 13 | ("linux", "armv7l", Architecture.armv7l), 14 | ("linux", "armv8l", Architecture.armv7l), 15 | ("linux", "aarch64", Architecture.armv7l), 16 | ("linux", "i686", Architecture.i686), 17 | ("linux", "x86_64", Architecture.i686), 18 | ("win32", "x86", Architecture.i686), 19 | ("win32", "AMD64", Architecture.i686), 20 | ], 21 | ) 22 | def test_32bits_arch_name(sys_platform, reported_arch, expected_arch, monkeypatch): 23 | monkeypatch.setattr(sys, "platform", sys_platform) 24 | monkeypatch.setattr(platform, "machine", lambda: reported_arch) 25 | machine = Architecture.detect(bits=32) 26 | assert machine == expected_arch 27 | 28 | 29 | @pytest.mark.parametrize( 30 | ("sys_platform", "reported_arch", "expected_arch"), 31 | [ 32 | ("linux", "armv8l", Architecture.aarch64), 33 | ("linux", "aarch64", Architecture.aarch64), 34 | ("linux", "ppc64le", Architecture.ppc64le), 35 | ("linux", "i686", Architecture.x86_64), 36 | ("linux", "x86_64", Architecture.x86_64), 37 | ("darwin", "arm64", Architecture.aarch64), 38 | ("darwin", "x86_64", Architecture.x86_64), 39 | ("win32", "ARM64", Architecture.aarch64), 40 | ("win32", "AMD64", Architecture.x86_64), 41 | ], 42 | ) 43 | def test_64bits_arch_name(sys_platform, reported_arch, expected_arch, monkeypatch): 44 | monkeypatch.setattr(sys, "platform", sys_platform) 45 | monkeypatch.setattr(platform, "machine", lambda: reported_arch) 46 | machine = Architecture.detect(bits=64) 47 | assert machine == expected_arch 48 | 49 | 50 | @pytest.mark.parametrize( 51 | ("maxsize", "sizeof_voidp", "expected"), 52 | [ 53 | # 64-bit 54 | (9223372036854775807, 8, Architecture.x86_64), 55 | # 32-bit 56 | (2147483647, 4, Architecture.i686), 57 | # 64-bit w/ 32-bit sys.maxsize: GraalPy, IronPython, Jython 58 | (2147483647, 8, Architecture.x86_64), 59 | ], 60 | ) 61 | def test_arch_name_bits(maxsize, sizeof_voidp, expected, monkeypatch): 62 | def _calcsize(fmt): 63 | assert fmt == "P" 64 | return sizeof_voidp 65 | 66 | monkeypatch.setattr(platform, "machine", lambda: "x86_64") 67 | monkeypatch.setattr(sys, "maxsize", maxsize) 68 | monkeypatch.setattr(struct, "calcsize", _calcsize) 69 | machine = Architecture.detect() 70 | assert machine == expected 71 | 72 | 73 | @pytest.mark.parametrize( 74 | ("smaller", "larger"), 75 | [ 76 | (Architecture.x86_64, Architecture.x86_64_v4), 77 | (Architecture.x86_64, Architecture.x86_64), 78 | (Architecture.x86_64, Architecture.x86_64_v2), 79 | (Architecture.x86_64_v2, Architecture.x86_64_v3), 80 | (Architecture.x86_64_v3, Architecture.x86_64_v4), 81 | ], 82 | ) 83 | def test_order_valid(smaller, larger): 84 | assert smaller.is_subset(larger) 85 | assert larger.is_superset(smaller) 86 | 87 | 88 | @pytest.mark.parametrize( 89 | ("smaller", "larger"), 90 | [ 91 | (Architecture.x86_64, Architecture.x86_64_v4), 92 | (Architecture.x86_64, Architecture.x86_64_v2), 93 | (Architecture.x86_64_v2, Architecture.x86_64_v3), 94 | (Architecture.x86_64_v3, Architecture.x86_64_v4), 95 | (Architecture.aarch64, Architecture.x86_64), 96 | (Architecture.x86_64, Architecture.aarch64), 97 | ], 98 | ) 99 | def test_order_invalid(smaller, larger): 100 | assert not smaller.is_superset(larger) 101 | assert not larger.is_subset(smaller) 102 | -------------------------------------------------------------------------------- /tests/unit/test_condatools.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from unittest.mock import Mock, patch 5 | 6 | from auditwheel.condatools import InCondaPkg, InCondaPkgCtx 7 | 8 | 9 | @patch("auditwheel.condatools.tarbz2todir") 10 | def test_in_condapkg(_): # noqa: PT019 11 | with InCondaPkg(Path("/fakepath")): 12 | assert True 13 | 14 | 15 | @patch("auditwheel.condatools.tarbz2todir") 16 | @patch("auditwheel.condatools.open") 17 | def test_in_condapkg_context(open_mock, _): # noqa: PT019 18 | with InCondaPkgCtx(Path("/fakepath")) as conda_pkg: 19 | file_mock = Mock() 20 | file_mock.readlines.return_value = ["file1\n", "file2\n", "\n"] 21 | open_mock.return_value.__enter__.return_value = file_mock 22 | # This returns empty lines so we have count with those as well. This 23 | # might be a subtle bug in the implementation. 24 | files = conda_pkg.iter_files() 25 | assert len(files) == 3 26 | assert "file1" in files 27 | assert "file2" in files 28 | -------------------------------------------------------------------------------- /tests/unit/test_elfpatcher.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from subprocess import CalledProcessError 5 | from unittest.mock import call, patch 6 | 7 | import pytest 8 | 9 | from auditwheel.patcher import Patchelf 10 | 11 | 12 | @patch("auditwheel.patcher.which") 13 | def test_patchelf_unavailable(which): 14 | which.return_value = False 15 | with pytest.raises(ValueError, match="Cannot find required utility"): 16 | Patchelf() 17 | 18 | 19 | @patch("auditwheel.patcher.which") 20 | @patch("auditwheel.patcher.check_output") 21 | def test_patchelf_check_output_fail(check_output, which): 22 | which.return_value = True 23 | check_output.side_effect = CalledProcessError(1, "patchelf --version") 24 | with pytest.raises(ValueError, match="Could not call"): 25 | Patchelf() 26 | 27 | 28 | @patch("auditwheel.patcher.which") 29 | @patch("auditwheel.patcher.check_output") 30 | @pytest.mark.parametrize("version", ["0.14", "0.14.1", "0.15"]) 31 | def test_patchelf_version_check(check_output, which, version): 32 | which.return_value = True 33 | check_output.return_value.decode.return_value = f"patchelf {version}" 34 | Patchelf() 35 | 36 | 37 | @patch("auditwheel.patcher.which") 38 | @patch("auditwheel.patcher.check_output") 39 | @pytest.mark.parametrize("version", ["0.13.99", "0.13", "0.9", "0.1"]) 40 | def test_patchelf_version_check_fail(check_output, which, version): 41 | which.return_value = True 42 | check_output.return_value.decode.return_value = f"patchelf {version}" 43 | with pytest.raises(ValueError, match=f"patchelf {version} found"): 44 | Patchelf() 45 | 46 | 47 | @patch("auditwheel.patcher._verify_patchelf") 48 | @patch("auditwheel.patcher.check_output") 49 | @patch("auditwheel.patcher.check_call") 50 | class TestPatchElf: 51 | """ "Validate that patchelf is invoked with the correct arguments.""" 52 | 53 | def test_replace_needed_one(self, check_call, _0, _1): # noqa: PT019 54 | patcher = Patchelf() 55 | filename = Path("test.so") 56 | soname_old = "TEST_OLD" 57 | soname_new = "TEST_NEW" 58 | patcher.replace_needed(filename, (soname_old, soname_new)) 59 | check_call.assert_called_once_with( 60 | ["patchelf", "--replace-needed", soname_old, soname_new, filename] 61 | ) 62 | 63 | def test_replace_needed_multple(self, check_call, _0, _1): # noqa: PT019 64 | patcher = Patchelf() 65 | filename = Path("test.so") 66 | replacements = [ 67 | ("TEST_OLD1", "TEST_NEW1"), 68 | ("TEST_OLD2", "TEST_NEW2"), 69 | ] 70 | patcher.replace_needed(filename, *replacements) 71 | check_call.assert_called_once_with( 72 | [ 73 | "patchelf", 74 | "--replace-needed", 75 | *replacements[0], 76 | "--replace-needed", 77 | *replacements[1], 78 | filename, 79 | ] 80 | ) 81 | 82 | def test_set_soname(self, check_call, _0, _1): # noqa: PT019 83 | patcher = Patchelf() 84 | filename = Path("test.so") 85 | soname_new = "TEST_NEW" 86 | patcher.set_soname(filename, soname_new) 87 | check_call.assert_called_once_with( 88 | ["patchelf", "--set-soname", soname_new, filename] 89 | ) 90 | 91 | def test_set_rpath(self, check_call, _0, _1): # noqa: PT019 92 | patcher = Patchelf() 93 | filename = Path("test.so") 94 | patcher.set_rpath(filename, "$ORIGIN/.lib") 95 | check_call_expected_args = [ 96 | call(["patchelf", "--remove-rpath", filename]), 97 | call( 98 | ["patchelf", "--force-rpath", "--set-rpath", "$ORIGIN/.lib", filename] 99 | ), 100 | ] 101 | 102 | assert check_call.call_args_list == check_call_expected_args 103 | 104 | def test_get_rpath(self, _0, check_output, _1): # noqa: PT019 105 | patcher = Patchelf() 106 | filename = Path("test.so") 107 | check_output.return_value = b"existing_rpath" 108 | result = patcher.get_rpath(filename) 109 | check_output_expected_args = [call(["patchelf", "--print-rpath", filename])] 110 | 111 | assert result == check_output.return_value.decode() 112 | assert check_output.call_args_list == check_output_expected_args 113 | 114 | def test_remove_needed(self, check_call, _0, _1): # noqa: PT019 115 | patcher = Patchelf() 116 | filename = Path("test.so") 117 | soname_1 = "TEST_REM_1" 118 | soname_2 = "TEST_REM_2" 119 | patcher.remove_needed(filename, soname_1, soname_2) 120 | check_call.assert_called_once_with( 121 | [ 122 | "patchelf", 123 | "--remove-needed", 124 | soname_1, 125 | "--remove-needed", 126 | soname_2, 127 | filename, 128 | ] 129 | ) 130 | -------------------------------------------------------------------------------- /tests/unit/test_elfutils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from typing import Any 5 | from unittest.mock import Mock, patch 6 | 7 | import pytest 8 | from elftools.common.exceptions import ELFError 9 | 10 | from auditwheel.elfutils import ( 11 | elf_file_filter, 12 | elf_find_ucs2_symbols, 13 | elf_find_versioned_symbols, 14 | elf_read_dt_needed, 15 | elf_references_PyFPE_jbuf, 16 | ) 17 | 18 | 19 | class MockSymbol(dict[str, Any]): 20 | """Mock representing a Symbol in ELFTools.""" 21 | 22 | def __init__(self, name: str, **kwargs): # type: ignore[no-untyped-def] 23 | super().__init__(**kwargs) 24 | self._name = name 25 | 26 | @property 27 | def name(self): 28 | return self._name 29 | 30 | 31 | @patch("auditwheel.elfutils.open") 32 | @patch("auditwheel.elfutils.ELFFile") 33 | class TestElfReadDt: 34 | def test_missing_section(self, elffile_mock, open_mock): 35 | # GIVEN 36 | open_mock.return_value.__enter__.return_value = Mock() 37 | elffile_mock.return_value.get_section_by_name.return_value = None 38 | 39 | # THEN 40 | with pytest.raises(ValueError, match=r"^Could not find soname.*"): 41 | # WHEN 42 | elf_read_dt_needed(Path("/fake.so")) 43 | 44 | def test_needed_libs(self, elffile_mock, open_mock): 45 | # GIVEN 46 | open_mock.return_value.__enter__.return_value = Mock() 47 | section_mock = Mock() 48 | tag1 = Mock(needed="libz.so") 49 | tag1.entry.d_tag = "DT_NEEDED" 50 | tag2 = Mock(needed="libfoo.so") 51 | tag2.entry.d_tag = "DT_NEEDED" 52 | section_mock.iter_tags.return_value = [tag1, tag2] 53 | elffile_mock.return_value.get_section_by_name.return_value = section_mock 54 | 55 | # WHEN 56 | needed = elf_read_dt_needed(Path("/fake.so")) 57 | 58 | # THEN 59 | assert len(needed) == 2 60 | assert "libz.so" in needed 61 | assert "libfoo.so" in needed 62 | 63 | 64 | @patch("auditwheel.elfutils.open") 65 | @patch("auditwheel.elfutils.ELFFile") 66 | class TestElfFileFilter: 67 | def test_filter(self, elffile_mock, open_mock): # noqa: ARG002 68 | result = elf_file_filter([Path("file1.so"), Path("file2.so")]) 69 | assert len(list(result)) == 2 70 | 71 | def test_some_py_files(self, elffile_mock, open_mock): # noqa: ARG002 72 | result = elf_file_filter([Path("file1.py"), Path("file2.so"), Path("file3.py")]) 73 | assert len(list(result)) == 1 74 | 75 | def test_not_elf(self, elffile_mock, open_mock): # noqa: ARG002 76 | # GIVEN 77 | elffile_mock.side_effect = ELFError 78 | 79 | # WHEN 80 | result = elf_file_filter([Path("file1.notelf"), Path("file2.notelf")]) 81 | 82 | # THEN 83 | assert len(list(result)) == 0 84 | 85 | 86 | class TestElfFindVersionedSymbols: 87 | def test_find_symbols(self): 88 | # GIVEN 89 | elf = Mock() 90 | verneed = Mock() 91 | verneed.configure_mock(name="foo-lib") 92 | veraux = Mock() 93 | veraux.configure_mock(name="foo-lib") 94 | elf.get_section_by_name.return_value.iter_versions.return_value = ( 95 | (verneed, [veraux]), 96 | ) 97 | 98 | # WHEN 99 | symbols = list(elf_find_versioned_symbols(elf)) 100 | 101 | # THEN 102 | assert symbols == [("foo-lib", "foo-lib")] 103 | 104 | @pytest.mark.parametrize("ld_name", ["ld-linux", "ld64.so.2", "ld64.so.1"]) 105 | def test_only_ld_linux(self, ld_name): 106 | # GIVEN 107 | elf = Mock() 108 | verneed = Mock() 109 | verneed.configure_mock(name=ld_name) 110 | veraux = Mock() 111 | veraux.configure_mock(name="foo-lib") 112 | elf.get_section_by_name.return_value.iter_versions.return_value = ( 113 | (verneed, [veraux]), 114 | ) 115 | 116 | # WHEN 117 | symbols = list(elf_find_versioned_symbols(elf)) 118 | 119 | # THEN 120 | assert len(symbols) == 0 121 | 122 | def test_empty_section(self): 123 | # GIVEN 124 | elf = Mock() 125 | elf.get_section_by_name.return_value = None 126 | 127 | # WHEN 128 | symbols = list(elf_find_versioned_symbols(elf)) 129 | 130 | # THEN 131 | assert len(symbols) == 0 132 | 133 | 134 | class TestFindUcs2Symbols: 135 | def test_elf_find_ucs2_symbols(self): 136 | # GIVEN 137 | elf = Mock() 138 | 139 | asunicode = MockSymbol( 140 | "PyUnicodeUCS2_AsUnicode", 141 | st_shndx="SHN_UNDEF", 142 | st_info={"type": "STT_FUNC"}, 143 | ) 144 | elf_symbols = (asunicode, Mock()) 145 | elf_symbols[1].name = "foobar" 146 | elf.get_section_by_name.return_value.iter_symbols.return_value = elf_symbols 147 | 148 | # WHEN 149 | symbols = list(elf_find_ucs2_symbols(elf)) 150 | 151 | # THEN 152 | assert len(symbols) == 1 153 | assert symbols[0] == "PyUnicodeUCS2_AsUnicode" 154 | 155 | def test_elf_find_ucs2_symbols_no_symbol(self): 156 | # GIVEN 157 | elf = Mock() 158 | 159 | elf_symbols = (MockSymbol("FooSymbol"),) 160 | elf.get_section_by_name.return_value.iter_symbols.return_value = elf_symbols 161 | 162 | # WHEN/THEN 163 | symbols = list(elf_find_ucs2_symbols(elf)) 164 | assert len(symbols) == 0 165 | 166 | 167 | class TestElfReferencesPyPFE: 168 | def test_elf_references_pyfpe_jbuf(self): 169 | # GIVEN 170 | elf = Mock() 171 | symbols = ( 172 | MockSymbol( 173 | "PyFPE_jbuf", st_shndx="SHN_UNDEF", st_info={"type": "STT_FUNC"} 174 | ), 175 | ) 176 | 177 | elf.get_section_by_name.return_value.iter_symbols.return_value = symbols 178 | 179 | # WHEN/THEN 180 | assert elf_references_PyFPE_jbuf(elf) is True 181 | 182 | def test_elf_references_pyfpe_jbuf_false(self): 183 | # GIVEN 184 | elf = Mock() 185 | symbols = ( 186 | MockSymbol( 187 | "SomeSymbol", st_shndx="SHN_UNDEF", st_info={"type": "STT_FUNC"} 188 | ), 189 | ) 190 | 191 | elf.get_section_by_name.return_value.iter_symbols.return_value = symbols 192 | 193 | # WHEN/THEN 194 | assert elf_references_PyFPE_jbuf(elf) is False 195 | 196 | def test_elf_references_pyfpe_jbuf_no_section(self): 197 | # GIVEN 198 | elf = Mock() 199 | 200 | # WHEN 201 | elf.get_section_by_name.return_value = None 202 | 203 | # WHEN/THEN 204 | assert elf_references_PyFPE_jbuf(elf) is False 205 | -------------------------------------------------------------------------------- /tests/unit/test_hashfile.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from io import BytesIO 4 | 5 | from auditwheel.hashfile import hashfile 6 | 7 | 8 | def test_hash(): 9 | # GIVEN 10 | mock_file = BytesIO(b"this is a test file") 11 | 12 | # WHEN 13 | result = hashfile(mock_file) 14 | 15 | # THEN 16 | assert result == "5881707e54b0112f901bc83a1ffbacac8fab74ea46a6f706a3efc5f7d4c1c625" 17 | -------------------------------------------------------------------------------- /tests/unit/test_json.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from enum import Enum 3 | from json import loads 4 | 5 | import pytest 6 | 7 | from auditwheel.json import dumps 8 | 9 | 10 | def test_dataclass(): 11 | @dataclass(frozen=True) 12 | class Dummy: 13 | first: str = "val0" 14 | second: int = 2 15 | 16 | assert loads(dumps(Dummy())) == {"first": "val0", "second": 2} 17 | 18 | 19 | def test_enum(): 20 | class Dummy(Enum): 21 | value: str 22 | 23 | TEST = "dummy" 24 | 25 | def __repr__(self): 26 | return self.value 27 | 28 | assert Dummy.TEST.value == loads(dumps(Dummy.TEST)) 29 | 30 | 31 | def test_frozenset(): 32 | obj = frozenset((3, 9, 6, 5, 21)) 33 | data = loads(dumps(obj)) 34 | assert data == sorted(obj) 35 | 36 | 37 | def test_invalid_type(): 38 | class Dummy: 39 | pass 40 | 41 | with pytest.raises(TypeError): 42 | dumps(Dummy()) 43 | -------------------------------------------------------------------------------- /tests/unit/test_lddtree.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from auditwheel.architecture import Architecture 6 | from auditwheel.lddtree import LIBPYTHON_RE, ldd 7 | from auditwheel.libc import Libc 8 | from auditwheel.tools import zip2dir 9 | 10 | HERE = Path(__file__).parent.resolve(strict=True) 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "soname", 15 | [ 16 | "libpython3.7m.so.1.0", 17 | "libpython3.9.so.1.0", 18 | "libpython3.10.so.1.0", 19 | "libpython999.999.so.1.0", 20 | ], 21 | ) 22 | def test_libpython_re_match(soname: str) -> None: 23 | assert LIBPYTHON_RE.match(soname) 24 | 25 | 26 | @pytest.mark.parametrize( 27 | "soname", 28 | [ 29 | "libpython3.7m.soa1.0", 30 | "libpython3.9.so.1a0", 31 | ], 32 | ) 33 | def test_libpython_re_nomatch(soname: str) -> None: 34 | assert LIBPYTHON_RE.match(soname) is None 35 | 36 | 37 | def test_libpython(tmp_path: Path, caplog: pytest.CaptureFixture) -> None: 38 | wheel = ( 39 | HERE 40 | / ".." 41 | / "integration" 42 | / "python_mscl-67.0.1.0-cp313-cp313-manylinux2014_aarch64.whl" 43 | ) 44 | so = tmp_path / "python_mscl" / "_mscl.so" 45 | zip2dir(wheel, tmp_path) 46 | result = ldd(so) 47 | assert "Skip libpython3.13.so.1.0 resolution" in caplog.text 48 | assert result.interpreter is None 49 | assert result.libc == Libc.GLIBC 50 | assert result.platform.baseline_architecture == Architecture.aarch64 51 | assert result.platform.extended_architecture is None 52 | assert result.path is not None 53 | assert result.realpath.samefile(so) 54 | assert result.needed == ( 55 | "libpython3.13.so.1.0", 56 | "libstdc++.so.6", 57 | "libm.so.6", 58 | "libgcc_s.so.1", 59 | "libc.so.6", 60 | "ld-linux-aarch64.so.1", 61 | ) 62 | # libpython must be present in dependencies without path 63 | libpython = result.libraries["libpython3.13.so.1.0"] 64 | assert libpython.soname == "libpython3.13.so.1.0" 65 | assert libpython.path is None 66 | assert libpython.platform is None 67 | assert libpython.realpath is None 68 | assert libpython.needed == () 69 | -------------------------------------------------------------------------------- /tests/unit/test_libc.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import subprocess 5 | from pathlib import Path 6 | from unittest.mock import patch 7 | 8 | import pytest 9 | 10 | from auditwheel.error import InvalidLibc 11 | from auditwheel.libc import Libc, LibcVersion, _find_musl_libc, _get_musl_version 12 | 13 | 14 | @patch("auditwheel.libc.Path") 15 | def test_find_musllinux_not_found(path_mock): 16 | path_mock.return_value.glob.return_value = [] 17 | with pytest.raises(InvalidLibc): 18 | _find_musl_libc() 19 | assert Libc.detect() != Libc.MUSL 20 | 21 | 22 | @patch("auditwheel.libc.Path") 23 | def test_find_musllinux_found(path_mock): 24 | path_mock.return_value.glob.return_value = ["/lib/ld-musl-dummy.so.1"] 25 | musl = _find_musl_libc() 26 | assert str(musl) == "/lib/ld-musl-dummy.so.1" 27 | assert Libc.detect() == Libc.MUSL 28 | 29 | 30 | def test_get_musl_version_invalid_path(): 31 | with pytest.raises(InvalidLibc): 32 | _get_musl_version(Path("/tmp/no/executable/here")) 33 | 34 | 35 | @patch("auditwheel.libc.subprocess.run") 36 | def test_get_musl_version_invalid_version(run_mock): 37 | run_mock.return_value = subprocess.CompletedProcess([], 1, None, "Version 1.1") 38 | with pytest.raises(InvalidLibc): 39 | _get_musl_version(Path("anything")) 40 | 41 | 42 | @patch("auditwheel.libc.subprocess.run") 43 | def test_get_musl_version_valid_version(run_mock): 44 | run_mock.return_value = subprocess.CompletedProcess([], 1, None, "Version 5.6.7") 45 | version = _get_musl_version(Path("anything")) 46 | assert version.major == 5 47 | assert version.minor == 6 48 | 49 | 50 | @patch("auditwheel.libc.Path") 51 | def test_detect_glibc(path_mock): 52 | path_mock.return_value.glob.return_value = [] 53 | assert Libc.detect() == Libc.GLIBC 54 | 55 | 56 | @pytest.mark.parametrize( 57 | "confstr", 58 | [ 59 | "glibc 42.42", 60 | "glibc 42.42-test", 61 | "glibc 42.42.0", 62 | "glibc 42.42~0", 63 | ], 64 | ) 65 | def test_glibc_version(monkeypatch, confstr): 66 | monkeypatch.setattr(os, "confstr", lambda _: confstr) 67 | assert Libc.GLIBC.get_current_version() == LibcVersion(42, 42) 68 | 69 | 70 | @pytest.mark.parametrize( 71 | "confstr", 72 | [ 73 | "glibc", 74 | "glibc 42.42 test", 75 | "glibc 42", 76 | "glibc 42.test", 77 | "glibc test.42", 78 | ], 79 | ) 80 | def test_bad_glibc_version(monkeypatch, confstr): 81 | monkeypatch.setattr(os, "confstr", lambda _: confstr) 82 | with pytest.raises(InvalidLibc): 83 | Libc.GLIBC.get_current_version() 84 | -------------------------------------------------------------------------------- /tests/unit/test_main.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import platform 4 | import sys 5 | 6 | import pytest 7 | 8 | from auditwheel.libc import Libc, LibcVersion 9 | from auditwheel.main import main 10 | 11 | on_supported_platform = pytest.mark.skipif( 12 | sys.platform != "linux", reason="requires Linux system" 13 | ) 14 | 15 | 16 | def test_unsupported_platform(monkeypatch): 17 | # GIVEN 18 | monkeypatch.setattr(sys, "platform", "unsupported_platform") 19 | 20 | # WHEN 21 | retval = main() 22 | 23 | # THEN 24 | assert retval == 1 25 | 26 | 27 | @on_supported_platform 28 | def test_help(monkeypatch, capsys): 29 | # GIVEN 30 | monkeypatch.setattr(sys, "argv", ["auditwheel"]) 31 | 32 | # WHEN 33 | retval = main() 34 | 35 | # THEN 36 | assert retval is None 37 | captured = capsys.readouterr() 38 | assert "usage: auditwheel [-h] [-V] [-v] command ..." in captured.out 39 | 40 | 41 | @pytest.mark.parametrize("function", ["show", "repair"]) 42 | def test_unexisting_wheel(monkeypatch, capsys, tmp_path, function): 43 | monkeypatch.setattr(sys, "platform", "linux") 44 | monkeypatch.setattr(platform, "machine", lambda: "x86_64") 45 | wheel = str(tmp_path / "not-a-file.whl") 46 | monkeypatch.setattr(sys, "argv", ["auditwheel", function, wheel]) 47 | 48 | with pytest.raises(SystemExit): 49 | main() 50 | 51 | captured = capsys.readouterr() 52 | assert "No such file" in captured.err 53 | 54 | 55 | @pytest.mark.parametrize( 56 | ("libc", "filename", "plat", "message"), 57 | [ 58 | ( 59 | Libc.GLIBC, 60 | "foo-1.0-py3-none-manylinux1_aarch64.whl", 61 | "manylinux_2_28_x86_64", 62 | "can't repair wheel foo-1.0-py3-none-manylinux1_aarch64.whl with aarch64 architecture to a wheel targeting x86_64", 63 | ), 64 | ( 65 | Libc.GLIBC, 66 | "foo-1.0-py3-none-musllinux_1_1_x86_64.whl", 67 | "manylinux_2_28_x86_64", 68 | "can't repair wheel foo-1.0-py3-none-musllinux_1_1_x86_64.whl with MUSL libc to a wheel targeting GLIBC", 69 | ), 70 | ( 71 | Libc.MUSL, 72 | "foo-1.0-py3-none-manylinux1_x86_64.whl", 73 | "musllinux_1_1_x86_64", 74 | "can't repair wheel foo-1.0-py3-none-manylinux1_x86_64.whl with GLIBC libc to a wheel targeting MUSL", 75 | ), 76 | ], 77 | ) 78 | def test_repair_wheel_mismatch( 79 | monkeypatch, capsys, tmp_path, libc, filename, plat, message 80 | ): 81 | monkeypatch.setattr(sys, "platform", "linux") 82 | monkeypatch.setattr(platform, "machine", lambda: "x86_64") 83 | monkeypatch.setattr(Libc, "detect", lambda: libc) 84 | monkeypatch.setattr(Libc, "get_current_version", lambda _: LibcVersion(1, 1)) 85 | wheel = tmp_path / filename 86 | wheel.write_text("") 87 | monkeypatch.setattr( 88 | sys, "argv", ["auditwheel", "repair", "--plat", plat, str(wheel)] 89 | ) 90 | 91 | with pytest.raises(SystemExit): 92 | main() 93 | 94 | captured = capsys.readouterr() 95 | assert message in captured.err 96 | -------------------------------------------------------------------------------- /tests/unit/test_policy.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from contextlib import nullcontext as does_not_raise 5 | from pathlib import Path 6 | 7 | import pytest 8 | 9 | from auditwheel.architecture import Architecture 10 | from auditwheel.lddtree import DynamicExecutable, DynamicLibrary, Platform 11 | from auditwheel.libc import Libc 12 | from auditwheel.policy import ( 13 | Policy, 14 | WheelPolicies, 15 | _validate_pep600_compliance, 16 | get_replace_platforms, 17 | ) 18 | 19 | 20 | def ids(x): 21 | if isinstance(x, Libc): 22 | return x.name 23 | if isinstance(x, does_not_raise): 24 | return "NoError" 25 | if hasattr(x, "expected_exception"): 26 | return x.expected_exception 27 | return None 28 | 29 | 30 | def raises(exception, match=None, escape=True): 31 | if escape and match is not None: 32 | match = re.escape(match) 33 | return pytest.raises(exception, match=match) 34 | 35 | 36 | @pytest.mark.parametrize( 37 | ("name", "expected"), 38 | [ 39 | ("linux_aarch64", []), 40 | ("manylinux1_ppc64le", ["linux_ppc64le"]), 41 | ("manylinux2014_x86_64", ["linux_x86_64"]), 42 | ("manylinux_2_24_x86_64", ["linux_x86_64"]), 43 | ], 44 | ) 45 | def test_replacement_platform(name, expected): 46 | assert get_replace_platforms(name) == expected 47 | 48 | 49 | def test_pep600_compliance(): 50 | _validate_pep600_compliance( 51 | [ 52 | { 53 | "name": "manylinux1", 54 | "priority": 100, 55 | "symbol_versions": { 56 | "i686": {"CXXABI": ["1.3"]}, 57 | }, 58 | "lib_whitelist": ["libgcc_s.so.1"], 59 | }, 60 | { 61 | "name": "manylinux2010", 62 | "priority": 90, 63 | "symbol_versions": { 64 | "i686": {"CXXABI": ["1.3", "1.3.1"]}, 65 | }, 66 | "lib_whitelist": ["libgcc_s.so.1", "libstdc++.so.6"], 67 | }, 68 | ] 69 | ) 70 | 71 | _validate_pep600_compliance( 72 | [ 73 | { 74 | "name": "manylinux1", 75 | "priority": 100, 76 | "symbol_versions": { 77 | "i686": {"CXXABI": ["1.3"]}, 78 | "x86_64": {"CXXABI": ["1.3"]}, 79 | }, 80 | "lib_whitelist": ["libgcc_s.so.1"], 81 | }, 82 | { 83 | "name": "manylinux2010", 84 | "priority": 90, 85 | "symbol_versions": { 86 | "i686": {"CXXABI": ["1.3", "1.3.1"]}, 87 | }, 88 | "lib_whitelist": ["libgcc_s.so.1", "libstdc++.so.6"], 89 | }, 90 | ] 91 | ) 92 | 93 | with pytest.raises(ValueError, match=r"manylinux2010_i686.*CXXABI.*1.3.2"): 94 | _validate_pep600_compliance( 95 | [ 96 | { 97 | "name": "manylinux1", 98 | "priority": 100, 99 | "symbol_versions": { 100 | "i686": {"CXXABI": ["1.3", "1.3.2"]}, 101 | }, 102 | "lib_whitelist": ["libgcc_s.so.1"], 103 | }, 104 | { 105 | "name": "manylinux2010", 106 | "priority": 90, 107 | "symbol_versions": { 108 | "i686": {"CXXABI": ["1.3", "1.3.1"]}, 109 | }, 110 | "lib_whitelist": ["libgcc_s.so.1", "libstdc++.so.6"], 111 | }, 112 | ] 113 | ) 114 | 115 | with pytest.raises(ValueError, match=r"manylinux2010.*libstdc\+\+\.so\.6"): 116 | _validate_pep600_compliance( 117 | [ 118 | { 119 | "name": "manylinux1", 120 | "priority": 100, 121 | "symbol_versions": { 122 | "i686": {"CXXABI": ["1.3"]}, 123 | }, 124 | "lib_whitelist": ["libgcc_s.so.1", "libstdc++.so.6"], 125 | }, 126 | { 127 | "name": "manylinux2010", 128 | "priority": 90, 129 | "symbol_versions": { 130 | "i686": {"CXXABI": ["1.3", "1.3.1"]}, 131 | }, 132 | "lib_whitelist": ["libgcc_s.so.1"], 133 | }, 134 | ] 135 | ) 136 | 137 | 138 | class TestPolicyAccess: 139 | def test_get_by_name(self): 140 | arch = Architecture.detect() 141 | policies = WheelPolicies(libc=Libc.GLIBC, arch=arch) 142 | assert policies.get_policy_by_name(f"manylinux_2_27_{arch}").priority == 65 143 | assert policies.get_policy_by_name(f"manylinux_2_24_{arch}").priority == 70 144 | assert policies.get_policy_by_name(f"manylinux2014_{arch}").priority == 80 145 | assert policies.get_policy_by_name(f"manylinux_2_17_{arch}").priority == 80 146 | if arch not in {Architecture.x86_64, Architecture.i686}: 147 | return 148 | assert policies.get_policy_by_name(f"manylinux2010_{arch}").priority == 90 149 | assert policies.get_policy_by_name(f"manylinux_2_12_{arch}").priority == 90 150 | assert policies.get_policy_by_name(f"manylinux1_{arch}").priority == 100 151 | assert policies.get_policy_by_name(f"manylinux_2_5_{arch}").priority == 100 152 | 153 | def test_get_by_name_missing(self): 154 | policies = WheelPolicies(libc=Libc.GLIBC, arch=Architecture.x86_64) 155 | with pytest.raises(LookupError): 156 | policies.get_policy_by_name("nosuchpolicy") 157 | 158 | def test_get_by_name_duplicate(self): 159 | policies = WheelPolicies(libc=Libc.GLIBC, arch=Architecture.x86_64) 160 | policy = Policy("duplicate", (), 0, {}, frozenset(), {}) 161 | policies._policies = [policy, policy] 162 | with pytest.raises(RuntimeError): 163 | policies.get_policy_by_name("duplicate") 164 | 165 | 166 | class TestLddTreeExternalReferences: 167 | """Tests for lddtree_external_references.""" 168 | 169 | def test_filter_libs(self): 170 | """Test the nested filter_libs function.""" 171 | filtered_libs = [ 172 | "ld-linux-x86_64.so.1", 173 | "ld64.so.1", 174 | "ld64.so.2", 175 | ] 176 | unfiltered_libs = ["libfoo.so.1.0", "libbar.so.999.999.999"] 177 | libs = filtered_libs + unfiltered_libs 178 | lddtree = DynamicExecutable( 179 | interpreter=None, 180 | libc=Libc.GLIBC, 181 | path="/path/to/lib", 182 | realpath=Path("/path/to/lib"), 183 | platform=Platform( 184 | "", 64, True, "EM_X86_64", Architecture.x86_64, None, None 185 | ), 186 | needed=tuple(libs), 187 | libraries={ 188 | lib: DynamicLibrary(lib, f"/path/to/{lib}", Path(f"/path/to/{lib}")) 189 | for lib in libs 190 | }, 191 | rpath=(), 192 | runpath=(), 193 | ) 194 | policies = WheelPolicies(libc=Libc.GLIBC, arch=Architecture.x86_64) 195 | full_external_refs = policies.lddtree_external_references( 196 | lddtree, Path("/path/to/wheel") 197 | ) 198 | 199 | # Assert that each policy only has the unfiltered libs. 200 | for policy in full_external_refs: 201 | if policy.startswith("linux_"): 202 | assert set(full_external_refs[policy].libs) == set() 203 | else: 204 | assert set(full_external_refs[policy].libs) == set(unfiltered_libs) 205 | 206 | 207 | @pytest.mark.parametrize( 208 | ("libc", "musl_policy", "arch", "exception"), 209 | [ 210 | # valid 211 | (Libc.detect(), None, Architecture.detect(), does_not_raise()), 212 | (Libc.GLIBC, None, Architecture.x86_64, does_not_raise()), 213 | (Libc.MUSL, "musllinux_1_1", Architecture.x86_64, does_not_raise()), 214 | (Libc.GLIBC, None, Architecture.aarch64, does_not_raise()), 215 | # invalid 216 | ( 217 | Libc.GLIBC, 218 | "musllinux_1_1", 219 | Architecture.x86_64, 220 | raises(ValueError, "'musl_policy' shall be None"), 221 | ), 222 | ( 223 | Libc.MUSL, 224 | "manylinux_1_1", 225 | Architecture.x86_64, 226 | raises(ValueError, "Invalid 'musl_policy'"), 227 | ), 228 | (Libc.MUSL, "musllinux_5_1", Architecture.x86_64, raises(AssertionError)), 229 | (Libc.MUSL, None, Architecture.x86_64, does_not_raise()), 230 | ], 231 | ids=ids, 232 | ) 233 | def test_wheel_policies_args(libc, musl_policy, arch, exception): 234 | with exception: 235 | policies = WheelPolicies(libc=libc, musl_policy=musl_policy, arch=arch) 236 | assert policies.libc == libc 237 | assert policies.architecture == arch 238 | if musl_policy is not None: 239 | assert policies._musl_policy == musl_policy 240 | elif libc == Libc.MUSL: 241 | assert policies._musl_policy == "musllinux_1_2" 242 | 243 | 244 | def test_policy_checks_glibc(): 245 | policies = WheelPolicies(libc=Libc.GLIBC, arch=Architecture.x86_64) 246 | 247 | policy = policies.versioned_symbols_policy({"some_library.so": {"GLIBC_2.17"}}) 248 | assert policy > policies.linux 249 | policy = policies.versioned_symbols_policy({"some_library.so": {"GLIBC_999"}}) 250 | assert policy == policies.linux 251 | policy = policies.versioned_symbols_policy({"some_library.so": {"OPENSSL_1_1_0"}}) 252 | assert policy == policies.highest 253 | policy = policies.versioned_symbols_policy({"some_library.so": {"IAMALIBRARY"}}) 254 | assert policy == policies.highest 255 | assert policies.linux < policies.lowest < policies.highest 256 | -------------------------------------------------------------------------------- /tests/unit/test_repair.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from unittest.mock import call, patch 5 | 6 | from auditwheel.patcher import Patchelf 7 | from auditwheel.repair import append_rpath_within_wheel 8 | 9 | 10 | @patch("auditwheel.patcher._verify_patchelf") 11 | @patch("auditwheel.patcher.check_output") 12 | @patch("auditwheel.patcher.check_call") 13 | class TestRepair: 14 | def test_append_rpath(self, check_call, check_output, _): # noqa: PT019 15 | patcher = Patchelf() 16 | # When a library has an existing RPATH entry within wheel_dir 17 | existing_rpath = b"$ORIGIN/.existinglibdir" 18 | check_output.return_value = existing_rpath 19 | wheel_dir = Path.cwd() 20 | lib_name = Path("test.so") 21 | full_lib_name = lib_name.absolute() 22 | append_rpath_within_wheel(lib_name, "$ORIGIN/.lib", wheel_dir, patcher) 23 | check_output_expected_args = [ 24 | call(["patchelf", "--print-rpath", full_lib_name]) 25 | ] 26 | # Then that entry is preserved when updating the RPATH 27 | check_call_expected_args = [ 28 | call(["patchelf", "--remove-rpath", full_lib_name]), 29 | call( 30 | [ 31 | "patchelf", 32 | "--force-rpath", 33 | "--set-rpath", 34 | f"{existing_rpath.decode()}:$ORIGIN/.lib", 35 | full_lib_name, 36 | ] 37 | ), 38 | ] 39 | 40 | assert check_output.call_args_list == check_output_expected_args 41 | assert check_call.call_args_list == check_call_expected_args 42 | 43 | def test_append_rpath_reject_outside_wheel(self, check_call, check_output, _): # noqa: PT019 44 | patcher = Patchelf() 45 | # When a library has an existing RPATH entry outside wheel_dir 46 | existing_rpath = b"/outside/wheel/dir" 47 | check_output.return_value = existing_rpath 48 | wheel_dir = Path("/not/outside") 49 | lib_name = Path("test.so") 50 | full_lib_name = lib_name.absolute() 51 | append_rpath_within_wheel(lib_name, "$ORIGIN/.lib", wheel_dir, patcher) 52 | check_output_expected_args = [ 53 | call(["patchelf", "--print-rpath", full_lib_name]) 54 | ] 55 | # Then that entry is eliminated when updating the RPATH 56 | check_call_expected_args = [ 57 | call(["patchelf", "--remove-rpath", full_lib_name]), 58 | call( 59 | [ 60 | "patchelf", 61 | "--force-rpath", 62 | "--set-rpath", 63 | "$ORIGIN/.lib", 64 | full_lib_name, 65 | ] 66 | ), 67 | ] 68 | 69 | assert check_output.call_args_list == check_output_expected_args 70 | assert check_call.call_args_list == check_call_expected_args 71 | 72 | def test_append_rpath_ignore_duplicates(self, check_call, check_output, _): # noqa: PT019 73 | patcher = Patchelf() 74 | # When a library has an existing RPATH entry and we try and append it again 75 | existing_rpath = b"$ORIGIN" 76 | check_output.return_value = existing_rpath 77 | wheel_dir = Path.cwd() 78 | lib_name = Path("test.so") 79 | full_lib_name = lib_name.absolute() 80 | append_rpath_within_wheel(lib_name, "$ORIGIN", wheel_dir, patcher) 81 | check_output_expected_args = [ 82 | call(["patchelf", "--print-rpath", full_lib_name]) 83 | ] 84 | # Then that entry is ignored when updating the RPATH 85 | check_call_expected_args = [ 86 | call(["patchelf", "--remove-rpath", full_lib_name]), 87 | call( 88 | ["patchelf", "--force-rpath", "--set-rpath", "$ORIGIN", full_lib_name] 89 | ), 90 | ] 91 | 92 | assert check_output.call_args_list == check_output_expected_args 93 | assert check_call.call_args_list == check_call_expected_args 94 | 95 | def test_append_rpath_ignore_relative(self, check_call, check_output, _): # noqa: PT019 96 | patcher = Patchelf() 97 | # When a library has an existing RPATH entry but it cannot be resolved 98 | # to an absolute path, it is eliminated 99 | existing_rpath = b"not/absolute" 100 | check_output.return_value = existing_rpath 101 | wheel_dir = Path.cwd() 102 | lib_name = Path("test.so") 103 | full_lib_name = lib_name.absolute() 104 | append_rpath_within_wheel(lib_name, "$ORIGIN", wheel_dir, patcher) 105 | check_output_expected_args = [ 106 | call(["patchelf", "--print-rpath", full_lib_name]) 107 | ] 108 | # Then that entry is ignored when updating the RPATH 109 | check_call_expected_args = [ 110 | call(["patchelf", "--remove-rpath", full_lib_name]), 111 | call( 112 | ["patchelf", "--force-rpath", "--set-rpath", "$ORIGIN", full_lib_name] 113 | ), 114 | ] 115 | 116 | assert check_output.call_args_list == check_output_expected_args 117 | assert check_call.call_args_list == check_call_expected_args 118 | -------------------------------------------------------------------------------- /tests/unit/test_tmpdirs.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | import pytest 6 | 7 | from auditwheel.tmpdirs import InGivenDirectory, InTemporaryDirectory 8 | 9 | 10 | def test_intemporarydirectory() -> None: 11 | cwd = Path.cwd() 12 | with InTemporaryDirectory() as path: 13 | assert path.is_dir() 14 | assert path.samefile(Path.cwd()) 15 | assert not path.samefile(cwd) 16 | assert not path.exists() 17 | assert cwd.samefile(Path.cwd()) 18 | 19 | 20 | def test_intemporarydirectory_name() -> None: 21 | tmp_dir = InTemporaryDirectory() 22 | with tmp_dir as path: 23 | assert tmp_dir.name == path 24 | 25 | 26 | def test_ingivendirectory(tmp_path: Path) -> None: 27 | cwd = Path.cwd() 28 | expected_path = tmp_path / "foo" 29 | with InGivenDirectory(expected_path) as path: 30 | assert path.is_dir() 31 | assert path.samefile(Path.cwd()) 32 | assert path.samefile(expected_path) 33 | assert path.exists() 34 | assert cwd.samefile(Path.cwd()) 35 | 36 | 37 | def test_ingivendirectory_cwd(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None: 38 | monkeypatch.chdir(tmp_path) 39 | with InGivenDirectory() as path: 40 | assert path.is_dir() 41 | assert path.samefile(Path.cwd()) 42 | assert path.samefile(tmp_path) 43 | assert path.exists() 44 | 45 | 46 | def test_ingivendirectory_name(): 47 | given_dir = InGivenDirectory() 48 | with given_dir as path: 49 | assert given_dir.name == path 50 | -------------------------------------------------------------------------------- /tests/unit/test_tools.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import lzma 5 | import zipfile 6 | import zlib 7 | from pathlib import Path 8 | 9 | import pytest 10 | 11 | from auditwheel.tools import EnvironmentDefault, dir2zip, is_subdir, zip2dir 12 | 13 | 14 | @pytest.mark.parametrize( 15 | ("environ", "passed", "expected"), 16 | [ 17 | (None, None, "manylinux1"), 18 | (None, "manylinux2010", "manylinux2010"), 19 | ("manylinux2010", None, "manylinux2010"), 20 | ("manylinux2010", "linux", "linux"), 21 | ], 22 | ) 23 | def test_plat_environment_action( 24 | monkeypatch: pytest.MonkeyPatch, 25 | environ: str | None, 26 | passed: str | None, 27 | expected: str, 28 | ) -> None: 29 | choices = ["linux", "manylinux1", "manylinux2010"] 30 | argv = [] 31 | if passed: 32 | argv = ["--plat", passed] 33 | if environ: 34 | monkeypatch.setenv("AUDITWHEEL_PLAT", environ) 35 | p = argparse.ArgumentParser() 36 | p.add_argument( 37 | "--plat", 38 | action=EnvironmentDefault, 39 | env="AUDITWHEEL_PLAT", 40 | dest="PLAT", 41 | choices=choices, 42 | default="manylinux1", 43 | ) 44 | args = p.parse_args(argv) 45 | assert expected == args.PLAT 46 | 47 | 48 | _all_zip_level: list[int] = list( 49 | range(zlib.Z_NO_COMPRESSION, zlib.Z_BEST_COMPRESSION + 1) 50 | ) 51 | 52 | 53 | @pytest.mark.parametrize( 54 | ("environ", "passed", "expected"), 55 | [ 56 | (None, None, -1), 57 | (0, None, 0), 58 | (0, 1, 1), 59 | (6, 1, 1), 60 | ], 61 | ) 62 | def test_zip_environment_action( 63 | monkeypatch: pytest.MonkeyPatch, 64 | environ: int | None, 65 | passed: int | None, 66 | expected: int, 67 | ) -> None: 68 | choices = _all_zip_level 69 | argv = [] 70 | if passed is not None: 71 | argv = ["--zip-compression-level", str(passed)] 72 | if environ is not None: 73 | monkeypatch.setenv("AUDITWHEEL_ZIP_COMPRESSION_LEVEL", str(environ)) 74 | p = argparse.ArgumentParser() 75 | p.add_argument( 76 | "-z", 77 | "--zip-compression-level", 78 | action=EnvironmentDefault, 79 | metavar="zip", 80 | env="AUDITWHEEL_ZIP_COMPRESSION_LEVEL", 81 | dest="zip", 82 | type=int, 83 | help="Compress level to be used to create zip file.", 84 | choices=choices, 85 | default=zlib.Z_DEFAULT_COMPRESSION, 86 | ) 87 | args = p.parse_args(argv) 88 | assert expected == args.zip 89 | 90 | 91 | def test_environment_action_invalid_plat_env(monkeypatch: pytest.MonkeyPatch) -> None: 92 | choices = ["linux", "manylinux1", "manylinux2010"] 93 | monkeypatch.setenv("AUDITWHEEL_PLAT", "foo") 94 | p = argparse.ArgumentParser() 95 | with pytest.raises(argparse.ArgumentError): 96 | p.add_argument( 97 | "--plat", 98 | action=EnvironmentDefault, 99 | env="AUDITWHEEL_PLAT", 100 | dest="PLAT", 101 | choices=choices, 102 | default="manylinux1", 103 | ) 104 | 105 | 106 | def test_environment_action_invalid_zip_env(monkeypatch: pytest.MonkeyPatch) -> None: 107 | choices = _all_zip_level 108 | monkeypatch.setenv("AUDITWHEEL_ZIP_COMPRESSION_LEVEL", "foo") 109 | p = argparse.ArgumentParser() 110 | with pytest.raises(argparse.ArgumentError): 111 | p.add_argument( 112 | "-z", 113 | "--zip-compression-level", 114 | action=EnvironmentDefault, 115 | metavar="zip", 116 | env="AUDITWHEEL_ZIP_COMPRESSION_LEVEL", 117 | dest="zip", 118 | type=int, 119 | help="Compress level to be used to create zip file.", 120 | choices=choices, 121 | default=zlib.Z_DEFAULT_COMPRESSION, 122 | ) 123 | monkeypatch.setenv("AUDITWHEEL_ZIP_COMPRESSION_LEVEL", "10") 124 | with pytest.raises(argparse.ArgumentError): 125 | p.add_argument( 126 | "-z", 127 | "--zip-compression-level", 128 | action=EnvironmentDefault, 129 | metavar="zip", 130 | env="AUDITWHEEL_ZIP_COMPRESSION_LEVEL", 131 | dest="zip", 132 | type=int, 133 | help="Compress level to be used to create zip file.", 134 | choices=choices, 135 | default=zlib.Z_DEFAULT_COMPRESSION, 136 | ) 137 | 138 | 139 | def _write_test_permissions_zip(path: Path) -> None: 140 | source_zip_xz = Path(__file__).parent / "test-permissions.zip.xz" 141 | with lzma.open(source_zip_xz) as f: 142 | path.write_bytes(f.read()) 143 | 144 | 145 | def _check_permissions(path: Path) -> None: 146 | for i in range(8): 147 | for j in range(8): 148 | for k in range(8): 149 | mode = (path / f"{i}{j}{k}.f").stat().st_mode 150 | assert ((mode >> 6) & 7) == (i | 6) # always read/write 151 | assert ((mode >> 3) & 7) == j 152 | assert ((mode >> 0) & 7) == k 153 | mode = (path / f"{i}{j}{k}.d").stat().st_mode 154 | assert ((mode >> 6) & 7) == 7 # always read/write/execute 155 | assert ((mode >> 3) & 7) == 5 # always read/execute 156 | assert ((mode >> 0) & 7) == 5 # always read/execute 157 | 158 | 159 | def test_zip2dir_permissions(tmp_path: Path) -> None: 160 | source_zip = tmp_path / "test-permissions.zip" 161 | _write_test_permissions_zip(source_zip) 162 | extract_path = tmp_path / "unzip" 163 | zip2dir(source_zip, extract_path) 164 | _check_permissions(extract_path) 165 | 166 | 167 | def test_zip2dir_round_trip_permissions(tmp_path: Path) -> None: 168 | source_zip = tmp_path / "test-permissions.zip" 169 | _write_test_permissions_zip(source_zip) 170 | extract_path = tmp_path / "unzip2" 171 | zip2dir(source_zip, tmp_path / "unzip1") 172 | dir2zip(tmp_path / "unzip1", tmp_path / "tmp.zip", zlib.Z_DEFAULT_COMPRESSION, None) 173 | zip2dir(tmp_path / "tmp.zip", extract_path) 174 | _check_permissions(extract_path) 175 | 176 | 177 | def test_dir2zip_deflate(tmp_path: Path) -> None: 178 | buffer = b"\0" * 1024 * 1024 179 | input_dir = tmp_path / "input_dir" 180 | input_dir.mkdir() 181 | input_file = input_dir / "zeros.bin" 182 | input_file.write_bytes(buffer) 183 | output_file = tmp_path / "ouput.zip" 184 | dir2zip(input_dir, output_file, zlib.Z_DEFAULT_COMPRESSION, None) 185 | assert output_file.stat().st_size < len(buffer) / 4 186 | 187 | 188 | def test_dir2zip_folders(tmp_path: Path) -> None: 189 | input_dir = tmp_path / "input_dir" 190 | input_dir.mkdir() 191 | dist_info_folder = input_dir / "dummy-1.0.dist-info" 192 | dist_info_folder.mkdir() 193 | dist_info_folder.joinpath("METADATA").write_text("") 194 | empty_folder = input_dir / "dummy" / "empty" 195 | empty_folder.mkdir(parents=True) 196 | output_file = tmp_path / "output.zip" 197 | dir2zip(input_dir, output_file, zlib.Z_DEFAULT_COMPRESSION, None) 198 | expected_dirs = {"dummy/", "dummy/empty/", "dummy-1.0.dist-info/"} 199 | with zipfile.ZipFile(output_file, "r") as z: 200 | assert len(z.filelist) == 4 201 | for info in z.filelist: 202 | if info.is_dir(): 203 | assert info.filename in expected_dirs 204 | expected_dirs.remove(info.filename) 205 | else: 206 | assert info.filename == "dummy-1.0.dist-info/METADATA" 207 | assert len(expected_dirs) == 0 208 | 209 | 210 | def test_is_subdir(tmp_path: Path) -> None: 211 | root = tmp_path / "root" 212 | subdir = root / "subdir" 213 | subdir.mkdir(parents=True) 214 | assert is_subdir(subdir, root) 215 | assert is_subdir(root, root) 216 | assert not is_subdir(None, root) 217 | assert not is_subdir(tmp_path, root) 218 | -------------------------------------------------------------------------------- /tests/unit/test_wheel_abi.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | import pretend 6 | import pytest 7 | 8 | from auditwheel import wheel_abi 9 | from auditwheel.architecture import Architecture 10 | from auditwheel.libc import Libc 11 | 12 | 13 | class TestGetWheelElfdata: 14 | @pytest.mark.parametrize( 15 | ("filenames", "message"), 16 | [ 17 | ( 18 | # A single invalid file 19 | [Path("purelib") / "foo"], 20 | "Invalid binary wheel, found the following shared library/libraries in" 21 | " purelib folder:\n\tfoo\nThe wheel has to be platlib compliant in " 22 | "order to be repaired by auditwheel.", 23 | ), 24 | ( 25 | # Multiple invalid files 26 | [Path("purelib") / "foo", Path("purelib") / "bar"], 27 | "Invalid binary wheel, found the following shared library/libraries in" 28 | " purelib folder:\n\tfoo\n\tbar\nThe wheel has to be platlib compliant" 29 | " in order to be repaired by auditwheel.", 30 | ), 31 | ], 32 | ) 33 | def test_finds_shared_library_in_purelib( 34 | self, filenames: list[Path], message: str, monkeypatch: pytest.MonkeyPatch 35 | ) -> None: 36 | entered_context = pretend.stub(iter_files=lambda: filenames) 37 | context = pretend.stub( 38 | __enter__=lambda: entered_context, __exit__=lambda *_: None 39 | ) 40 | InGenericPkgCtx = pretend.stub(__call__=lambda _: context) 41 | 42 | monkeypatch.setattr(wheel_abi, "InGenericPkgCtx", InGenericPkgCtx) 43 | monkeypatch.setattr( 44 | wheel_abi, "elf_is_python_extension", lambda fn, elf: (fn, elf) 45 | ) 46 | monkeypatch.setattr( 47 | wheel_abi, 48 | "elf_file_filter", 49 | lambda fns: [(fn, pretend.stub()) for fn in fns], 50 | ) 51 | 52 | with pytest.raises(RuntimeError) as exec_info: 53 | wheel_abi.get_wheel_elfdata( 54 | Libc.GLIBC, Architecture.x86_64, Path("/fakepath"), frozenset() 55 | ) 56 | 57 | assert exec_info.value.args == (message,) 58 | -------------------------------------------------------------------------------- /tests/unit/test_wheeltools.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | import tempfile 5 | from pathlib import Path 6 | 7 | import pytest 8 | 9 | from auditwheel.architecture import Architecture 10 | from auditwheel.error import NonPlatformWheel 11 | from auditwheel.libc import Libc 12 | from auditwheel.wheeltools import ( 13 | InWheelCtx, 14 | WheelToolsError, 15 | get_wheel_architecture, 16 | get_wheel_libc, 17 | ) 18 | 19 | HERE = Path(__file__).parent.resolve() 20 | 21 | 22 | @pytest.mark.parametrize( 23 | ("filename", "expected"), 24 | [(f"foo-1.0-py3-none-linux_{arch}.whl", arch) for arch in Architecture] 25 | + [("foo-1.0-py3-none-linux_x86_64.manylinux1_x86_64.whl", Architecture.x86_64)], 26 | ) 27 | def test_get_wheel_architecture(filename: str, expected: Architecture) -> None: 28 | arch = get_wheel_architecture(filename) 29 | assert arch == expected.baseline 30 | 31 | 32 | def test_get_wheel_architecture_unknown() -> None: 33 | with pytest.raises(WheelToolsError, match=re.escape("unknown architecture")): 34 | get_wheel_architecture("foo-1.0-py3-none-linux_mipsel.whl") 35 | 36 | 37 | def test_get_wheel_architecture_pure() -> None: 38 | with pytest.raises(NonPlatformWheel): 39 | get_wheel_architecture("foo-1.0-py3-none-any.whl") 40 | 41 | 42 | @pytest.mark.parametrize( 43 | "filename", 44 | [ 45 | "foo-1.0-py3-none-linux_x86_64.linux_aarch64.whl", 46 | "foo-1.0-py3-none-linux_x86_64.linux_mipsel.whl", 47 | "foo-1.0-py3-none-linux_x86_64.any.whl", 48 | ], 49 | ) 50 | def test_get_wheel_architecture_multiple(filename: str) -> None: 51 | match = re.escape("multiple architectures are not supported") 52 | with pytest.raises(WheelToolsError, match=match): 53 | get_wheel_architecture(filename) 54 | 55 | 56 | @pytest.mark.parametrize( 57 | ("filename", "expected"), 58 | [ 59 | ("foo-1.0-py3-none-manylinux1_x86_64.whl", Libc.GLIBC), 60 | ("foo-1.0-py3-none-manylinux1_x86_64.manylinux2010_x86_64.whl", Libc.GLIBC), 61 | ("foo-1.0-py3-none-musllinux_1_1_x86_64.whl", Libc.MUSL), 62 | ], 63 | ) 64 | def test_get_wheel_libc(filename: str, expected: Libc) -> None: 65 | libc = get_wheel_libc(filename) 66 | assert libc == expected 67 | 68 | 69 | @pytest.mark.parametrize( 70 | "filename", ["foo-1.0-py3-none-any.whl", "foo-1.0-py3-none-something.whl"] 71 | ) 72 | def test_get_wheel_libc_unknown(filename: str) -> None: 73 | with pytest.raises(WheelToolsError, match=re.escape("unknown libc used")): 74 | get_wheel_libc(filename) 75 | 76 | 77 | @pytest.mark.parametrize( 78 | "filename", ["foo-1.0-py3-none-manylinux1_x86_64.musllinux_1_1_x86_64.whl"] 79 | ) 80 | def test_get_wheel_libc_multiple(filename: str) -> None: 81 | match = re.escape("multiple libc are not supported") 82 | with pytest.raises(WheelToolsError, match=match): 83 | get_wheel_libc(filename) 84 | 85 | 86 | def test_inwheel_tmpdir(tmp_path, monkeypatch): 87 | wheel_path = ( 88 | HERE 89 | / "../integration/arch-wheels/glibc/testsimple-0.0.1-cp313-cp313-linux_x86_64.whl" 90 | ) 91 | tmp_path = tmp_path.resolve(strict=True) 92 | tmpdir = tmp_path / "tmpdir" 93 | tmpdir.mkdir() 94 | tmpdir_symlink = tmp_path / "symlink" 95 | tmpdir_symlink.symlink_to(str(tmpdir), target_is_directory=True) 96 | monkeypatch.setattr(tempfile, "gettempdir", lambda: str(tmpdir_symlink)) 97 | with InWheelCtx(wheel_path, tmp_path / wheel_path.name) as context: 98 | Path(context._tmpdir.name).relative_to(tmpdir_symlink) 99 | context.name.relative_to(tmpdir) 100 | --------------------------------------------------------------------------------