├── piptools ├── py.typed ├── scripts │ ├── __init__.py │ ├── _deprecations.py │ ├── sync.py │ └── options.py ├── repositories │ ├── __init__.py │ ├── base.py │ └── local.py ├── _compat │ ├── __init__.py │ ├── path_compat.py │ └── pip_compat.py ├── __main__.py ├── locations.py ├── __init__.py ├── subprocess_utils.py ├── logging.py ├── exceptions.py ├── cache.py ├── sync.py ├── writer.py └── build.py ├── tests ├── __init__.py ├── test_top_level_editable.py ├── test_data │ ├── minimal_wheels │ │ ├── small-fake-multi-arch-0.1.tar.gz │ │ ├── small_fake_a-0.1-py2.py3-none-any.whl │ │ ├── small_fake_a-0.2-py2.py3-none-any.whl │ │ ├── small_fake_b-0.1-py2.py3-none-any.whl │ │ ├── small_fake_b-0.2-py2.py3-none-any.whl │ │ ├── small_fake_b-0.3-py2.py3-none-any.whl │ │ ├── small_fake_a-0.3b1-py2.py3-none-any.whl │ │ ├── small_fake_with_deps-0.1-py2.py3-none-any.whl │ │ ├── small_fake_multi_arch-0.1-py2.py3-none-win32.whl │ │ ├── small_fake_with_unpinned_deps-0.1-py2.py3-none-any.whl │ │ ├── small_fake_multi_arch-0.1-py2.py3-none-manylinux1_i686.whl │ │ ├── small_fake_with_deps_and_sub_deps-0.1-py2.py3-none-any.whl │ │ └── small_fake_multi_arch-0.1-py2.py3-none-manylinux1_x86_64.whl │ ├── packages │ │ ├── fake_with_deps │ │ │ ├── fake_with_deps │ │ │ │ └── __init__.py │ │ │ └── pyproject.toml │ │ ├── small_fake_a │ │ │ └── setup.py │ │ ├── small_fake_with_subdir │ │ │ └── subdir │ │ │ │ └── setup.py │ │ ├── small_fake_with_build_deps │ │ │ ├── pyproject.toml │ │ │ ├── setup.py │ │ │ └── backend │ │ │ │ └── backend.py │ │ ├── small_fake_with_deps │ │ │ └── setup.py │ │ ├── small_fake_with_pyproject │ │ │ └── pyproject.toml │ │ ├── small_fake_with_unpinned_deps │ │ │ └── setup.py │ │ └── small_fake_with_deps_and_sub_deps │ │ │ └── setup.py │ ├── fake-editables.json │ └── fake-index.json ├── utils.py ├── test_subprocess_utils.py ├── constants.py ├── test_logging.py ├── test_pip_compat.py ├── test_minimal_upgrade.py ├── test_repository_local.py ├── test_fake_index.py ├── test_circular_imports.py ├── test_cache.py └── test_build.py ├── docs ├── pkg │ └── .gitignore ├── .gitignore ├── cli │ ├── pip-sync.md │ ├── pip-compile.md │ └── index.md ├── contributing.md ├── requirements.in ├── index.md ├── changelog.md ├── requirements.txt └── conf.py ├── examples ├── hypothesis.in ├── flask.in ├── sentry.in ├── django.in ├── protection.in └── readme │ ├── pyproject.toml │ └── constraints.txt ├── .bandit ├── changelog.d ├── +9da8d07a.breaking.md ├── .draft_changelog_partial.md ├── 2150.bugfix.md ├── 2281.contrib.md ├── +380ef8d4.contrib.md ├── 2273.contrib.md ├── 2291.contrib.md ├── 2289.contrib.md ├── .gitignore ├── .towncrier_template.md.jinja └── README.md ├── .github ├── CODEOWNERS ├── workflows │ ├── cron.yml │ ├── reusable-qa.yml │ ├── release.yml │ └── ci.yml ├── chronographer.yml ├── ISSUE_TEMPLATE │ ├── bug-report.md │ └── feature-request.md └── PULL_REQUEST_TEMPLATE.md ├── .gitattributes ├── .git_archival.txt ├── .coveragerc ├── MANIFEST.in ├── .pre-commit-hooks.yaml ├── .codecov.yml ├── .gitignore ├── .flake8 ├── .readthedocs.yaml ├── LICENSE ├── towncrier.toml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── .pre-commit-config.yaml ├── pyproject.toml ├── tox.ini └── img └── pip-tools-overview.svg /piptools/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /piptools/scripts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_top_level_editable.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/pkg/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /examples/hypothesis.in: -------------------------------------------------------------------------------- 1 | hypothesis[django] 2 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | !requirements.in 2 | !requirements.txt 3 | -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small-fake-multi-arch-0.1.tar.gz: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/test_data/packages/fake_with_deps/fake_with_deps/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.bandit: -------------------------------------------------------------------------------- 1 | [bandit] 2 | exclude: tests,.tox,.eggs,.venv,.git 3 | skips: B101 4 | -------------------------------------------------------------------------------- /examples/flask.in: -------------------------------------------------------------------------------- 1 | # Flask has 2nd and 3rd level dependencies 2 | Flask 3 | -------------------------------------------------------------------------------- /examples/sentry.in: -------------------------------------------------------------------------------- 1 | # Sentry has a very large dependency tree 2 | sentry 3 | -------------------------------------------------------------------------------- /changelog.d/+9da8d07a.breaking.md: -------------------------------------------------------------------------------- 1 | Removed support for Python 3.8 -- by {user}`sirosen`. 2 | -------------------------------------------------------------------------------- /docs/cli/pip-sync.md: -------------------------------------------------------------------------------- 1 | # pip-sync 2 | 3 | ```{program-output} pip-sync --help 4 | 5 | ``` 6 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | /.github/ @jazzband/pip-tools-leads 2 | /tox.ini @jazzband/pip-tools-leads 3 | -------------------------------------------------------------------------------- /changelog.d/.draft_changelog_partial.md: -------------------------------------------------------------------------------- 1 | ```{towncrier-draft-entries} 2 | DRAFT_VERSION 3 | ``` 4 | -------------------------------------------------------------------------------- /docs/cli/pip-compile.md: -------------------------------------------------------------------------------- 1 | # pip-compile 2 | 3 | ```{program-output} pip-compile --help 4 | 5 | ``` 6 | -------------------------------------------------------------------------------- /changelog.d/2150.bugfix.md: -------------------------------------------------------------------------------- 1 | The option `--unsafe-package` is now normalized 2 | -- by {user}`shifqu`. 3 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # fill in placeholders when `git archive` is used, setuptools-scm support 2 | .git_archival.txt export-subst 3 | -------------------------------------------------------------------------------- /changelog.d/2281.contrib.md: -------------------------------------------------------------------------------- 1 | Fix `actionlint` hook usage to always include `shellcheck` integration -- by {user}`sirosen`. 2 | -------------------------------------------------------------------------------- /.git_archival.txt: -------------------------------------------------------------------------------- 1 | node: fa47496c63e56de71f86d728b882898d0da67e80 2 | node-date: 2025-12-10T15:25:19Z 3 | describe-name: v7.5.2-41-gfa47496c6 4 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | plugins = covdefaults 3 | omit = 4 | piptools/_compat/* 5 | 6 | [report] 7 | include = piptools/*, tests/* 8 | fail_under = 99 9 | -------------------------------------------------------------------------------- /changelog.d/+380ef8d4.contrib.md: -------------------------------------------------------------------------------- 1 | The `check-jsonschema` ReadTheDocs hook has been enabled, and the config has been tweaked to pass -- by {user}`sirosen`. 2 | -------------------------------------------------------------------------------- /examples/django.in: -------------------------------------------------------------------------------- 1 | # This file includes the Django project, and the debug toolbar 2 | Django<2.2.1 # suppose some version requirement 3 | django-debug-toolbar 4 | -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ```{include} ../CONTRIBUTING.md 4 | 5 | ``` 6 | 7 | 8 | ```{include} ../changelog.d/README.md 9 | 10 | ``` 11 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_a/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup(name="small_fake_a", version=0.1) 6 | -------------------------------------------------------------------------------- /changelog.d/2273.contrib.md: -------------------------------------------------------------------------------- 1 | The linting is now set up to perform structured GitHub Actions 2 | workflows and actions checks against json schemas 3 | -- by {user}`webknjaz`. 4 | -------------------------------------------------------------------------------- /changelog.d/2291.contrib.md: -------------------------------------------------------------------------------- 1 | The {file}`tox.ini` and {file}`.github/` parts of the repository now 2 | have project leads assigned as GitHub code owners -- by {user}`webknjaz`. 3 | -------------------------------------------------------------------------------- /docs/requirements.in: -------------------------------------------------------------------------------- 1 | furo 2 | myst-parser 3 | setuptools-scm 4 | sphinx 5 | sphinxcontrib-apidoc 6 | sphinxcontrib-programoutput 7 | sphinxcontrib-towncrier 8 | sphinx-issues 9 | -------------------------------------------------------------------------------- /tests/test_data/fake-editables.json: -------------------------------------------------------------------------------- 1 | { 2 | "git+git://example.org/django.git#egg=django": [], 3 | "git+https://github.com/celery/billiard#egg=billiard==3.5.9999": [] 4 | } 5 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # do not include the `.git_archival.txt` file in sdist builds from repo source 2 | # this avoids setuptools-scm warnings on development builds 3 | exclude .git_archival.txt 4 | -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_a-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_a-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_a-0.2-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_a-0.2-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_b-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_b-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_b-0.2-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_b-0.2-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_b-0.3-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_b-0.3-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_subdir/subdir/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup(name="small_fake_a", version=0.1) 6 | -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_a-0.3b1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_a-0.3b1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /examples/protection.in: -------------------------------------------------------------------------------- 1 | # This package depends on setuptools, which should not end up in the compiled 2 | # requirements, because it may cause conflicts with pip itself 3 | python-levenshtein>=0.12.0 4 | -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_with_deps-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_with_deps-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-win32.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-win32.whl -------------------------------------------------------------------------------- /docs/cli/index.md: -------------------------------------------------------------------------------- 1 | # Command Line Reference 2 | 3 | This page provides a reference for the `pip-tools` command-line interface (CLI): 4 | 5 | ```{toctree} 6 | :maxdepth: 1 7 | 8 | pip-compile 9 | pip-sync 10 | ``` 11 | -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_with_unpinned_deps-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_with_unpinned_deps-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_i686.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_i686.whl -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_with_deps_and_sub_deps-0.1-py2.py3-none-any.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_with_deps_and_sub_deps-0.1-py2.py3-none-any.whl -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | - id: pip-compile 2 | name: pip-compile 3 | description: Automatically compile requirements. 4 | entry: pip-compile 5 | language: python 6 | files: ^requirements\.(in|txt)$ 7 | pass_filenames: false 8 | -------------------------------------------------------------------------------- /piptools/repositories/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from .local import LocalRequirementsRepository 4 | from .pypi import PyPIRepository 5 | 6 | __all__ = ["LocalRequirementsRepository", "PyPIRepository"] 7 | -------------------------------------------------------------------------------- /tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jazzband/pip-tools/main/tests/test_data/minimal_wheels/small_fake_multi_arch-0.1-py2.py3-none-manylinux1_x86_64.whl -------------------------------------------------------------------------------- /.codecov.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | codecov: 4 | notify: 5 | manual_trigger: true # prevent notifications until we notify Codecov 6 | 7 | require_ci_to_pass: false 8 | 9 | comment: false # avoid spamming reviews 10 | 11 | ... 12 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_build_deps/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools==68.1.2", 4 | "wheel==0.41.1", 5 | "fake_static_build_dep" 6 | ] 7 | build-backend = "backend" 8 | backend-path = ["backend"] 9 | -------------------------------------------------------------------------------- /changelog.d/2289.contrib.md: -------------------------------------------------------------------------------- 1 | The linting is now set up to demand that {py:mod}`typing` is always 2 | imported as a module under the name of `_t` -- by {user}`webknjaz`. 3 | 4 | This is enforced by {user}`sirosen`'s {pypi}`flake8-typing-as-t` 5 | plugin for {pypi}`flake8`. 6 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | # NOTE: keep in sync with "passenv" in tox.ini 6 | CI_VARIABLES = {"CI", "GITHUB_ACTIONS"} 7 | 8 | 9 | def looks_like_ci(): 10 | return bool(set(os.environ.keys()) & CI_VARIABLES) 11 | -------------------------------------------------------------------------------- /examples/readme/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "my-cool-django-app" 7 | version = "42" 8 | dependencies = ["django"] 9 | 10 | [project.optional-dependencies] 11 | dev = ["pytest"] 12 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_deps", 7 | version=0.1, 8 | install_requires=["small-fake-a==0.1", "small-fake-b==0.1"], 9 | ) 10 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_pyproject/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name="small_fake_with_pyproject" 3 | version=0.1 4 | dependencies=[ 5 | "fake_direct_runtime_dep", 6 | ] 7 | [project.optional-dependencies] 8 | x = ["fake_direct_extra_runtime_dep[with_its_own_extra]"] 9 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_unpinned_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_unpinned_deps", 7 | version=0.1, 8 | install_requires=["small-fake-a", "small-fake-b"], 9 | ) 10 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_deps_and_sub_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_deps_and_sub_deps", 7 | version=0.1, 8 | install_requires=["small-fake-with-unpinned-deps"], 9 | ) 10 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Welcome to pip-tools' documentation! 2 | 3 | ```{include} ../README.md 4 | 5 | ``` 6 | 7 | ```{toctree} 8 | :hidden: 9 | :maxdepth: 2 10 | :caption: Contents 11 | 12 | cli/index 13 | contributing 14 | changelog 15 | ``` 16 | 17 | ```{toctree} 18 | :hidden: 19 | :caption: Private API reference 20 | 21 | pkg/modules 22 | ``` 23 | -------------------------------------------------------------------------------- /piptools/_compat/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from .pip_compat import ( 4 | Distribution, 5 | create_wheel_cache, 6 | get_dev_pkgs, 7 | parse_requirements, 8 | ) 9 | 10 | __all__ = [ 11 | "Distribution", 12 | "parse_requirements", 13 | "create_wheel_cache", 14 | "get_dev_pkgs", 15 | ] 16 | -------------------------------------------------------------------------------- /tests/test_subprocess_utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | 5 | from piptools.subprocess_utils import run_python_snippet 6 | 7 | 8 | def test_run_python_snippet_returns_multilne(): 9 | result = run_python_snippet(sys.executable, r'print("MULTILINE\nOUTPUT", end="")') 10 | assert result == "MULTILINE\nOUTPUT" 11 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_build_deps/setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from setuptools import setup 4 | 5 | setup( 6 | name="small_fake_with_build_deps", 7 | version=0.1, 8 | install_requires=[ 9 | "fake_direct_runtime_dep", 10 | ], 11 | extras_require={ 12 | "x": ["fake_direct_extra_runtime_dep"], 13 | }, 14 | ) 15 | -------------------------------------------------------------------------------- /piptools/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import click 4 | 5 | from piptools.scripts import compile, sync 6 | 7 | 8 | @click.group() 9 | def cli() -> None: 10 | pass 11 | 12 | 13 | cli.add_command(compile.cli, "compile") 14 | cli.add_command(sync.cli, "sync") 15 | 16 | 17 | # Enable ``python -m piptools ...``. 18 | if __name__ == "__main__": 19 | cli() 20 | -------------------------------------------------------------------------------- /piptools/locations.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pip._internal.utils.appdirs import user_cache_dir 4 | 5 | # The user_cache_dir helper comes straight from pip itself 6 | CACHE_DIR = user_cache_dir("pip-tools") 7 | 8 | # The project defaults specific to pip-tools should be written to this filenames 9 | DEFAULT_CONFIG_FILE_NAMES = (".pip-tools.toml", "pyproject.toml") 10 | -------------------------------------------------------------------------------- /.github/workflows/cron.yml: -------------------------------------------------------------------------------- 1 | name: Cron 2 | 3 | on: 4 | schedule: 5 | # Run everyday at 03:53 UTC 6 | - cron: 53 3 * * * 7 | 8 | jobs: 9 | main: 10 | name: CI 11 | uses: ./.github/workflows/ci.yml 12 | with: 13 | cpython-versions: >- 14 | ["3.10", "3.11", "3.12", "3.13"] 15 | cpython-pip-version: >- 16 | ["main", "latest", "supported", "lowest"] 17 | -------------------------------------------------------------------------------- /tests/constants.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | TEST_DATA_PATH = os.path.join(os.path.dirname(__file__), "test_data") 6 | MINIMAL_WHEELS_PATH = os.path.join(TEST_DATA_PATH, "minimal_wheels") 7 | PACKAGES_PATH = os.path.join(TEST_DATA_PATH, "packages") 8 | PACKAGES_RELATIVE_PATH = os.path.relpath( 9 | PACKAGES_PATH, os.path.commonpath([os.getcwd(), PACKAGES_PATH]) 10 | ) 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore cram test output 2 | *.t.err 3 | 4 | # Python cruft 5 | *.pyc 6 | 7 | # Virtualenvs 8 | .envrc 9 | .direnv 10 | .venv 11 | venv/ 12 | 13 | # Testing 14 | .pytest_cache/ 15 | .tox 16 | htmlcov 17 | 18 | # Build output 19 | build 20 | dist 21 | *.egg-info 22 | .coverage 23 | .coverage.* 24 | coverage.xml 25 | .cache 26 | 27 | # IDE 28 | .idea 29 | 30 | # Test files 31 | requirements.in 32 | requirements.txt 33 | .eggs/ 34 | -------------------------------------------------------------------------------- /piptools/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import locale 4 | 5 | from click import secho 6 | 7 | # Needed for locale.getpreferredencoding(False) to work 8 | # in pip._internal.utils.encoding.auto_decode 9 | try: 10 | locale.setlocale(locale.LC_ALL, "") 11 | except locale.Error as e: # pragma: no cover 12 | # setlocale can apparently crash if locale are uninitialized 13 | secho(f"Ignoring error when setting locale: {e}", fg="red") 14 | -------------------------------------------------------------------------------- /.github/chronographer.yml: -------------------------------------------------------------------------------- 1 | branch-protection-check-name: Change log entry 2 | action-hints: 3 | check-title-prefix: "Chronographer: " 4 | external-docs-url: https://pip-tools.rtfd.io/en/latest/contributing/#adding-change-notes-with-prs 5 | inline-markdown: | 6 | See [the changelog contribution docs] for news fragment authoring gotchas. 7 | 8 | [the changelog contribution docs]: https://pip-tools.rtfd.io/en/latest/contributing/#adding-change-notes-with-prs 9 | enforce-name: 10 | suffix: .md 11 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 100 3 | # E203 conflicts with PEP8; see https://github.com/psf/black#slices 4 | extend-ignore = E203 5 | 6 | # flake8-pytest-style 7 | # PT001: 8 | pytest-fixture-no-parentheses = true 9 | # PT006: 10 | pytest-parametrize-names-type = tuple 11 | # PT007: 12 | pytest-parametrize-values-type = tuple 13 | pytest-parametrize-values-row-type = tuple 14 | # PT023: 15 | pytest-mark-no-parentheses = true 16 | 17 | # flake8-typing-as-t 18 | # TYT02: 19 | typing-as-t-imported-name = _t 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | --- 5 | 6 | 7 | 8 | #### Environment Versions 9 | 10 | 1. OS Type 11 | 1. Python version: `$ python -V` 12 | 1. pip version: `$ pip --version` 13 | 1. pip-tools version: `$ pip-compile --version` 14 | 15 | #### Steps to replicate 16 | 17 | 1. ... 18 | 2. ... 19 | 3. ... 20 | 21 | #### Expected result 22 | 23 | ... 24 | 25 | #### Actual result 26 | 27 | ... 28 | -------------------------------------------------------------------------------- /changelog.d/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | !.towncrier_template.md.jinja 4 | !.draft_changelog_partial.md 5 | !README.md 6 | !*.bugfix 7 | !*.bugfix.md 8 | !*.bugfix.*.md 9 | !*.breaking 10 | !*.breaking.md 11 | !*.breaking.*.md 12 | !*.contrib 13 | !*.contrib.md 14 | !*.contrib.*.md 15 | !*.deprecation 16 | !*.deprecation.md 17 | !*.deprecation.*.md 18 | !*.doc 19 | !*.doc.md 20 | !*.doc.*.md 21 | !*.feature 22 | !*.feature.md 23 | !*.feature.*.md 24 | !*.misc 25 | !*.misc.md 26 | !*.misc.*.md 27 | !*.packaging 28 | !*.packaging.md 29 | !*.packaging.*.md 30 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ##### Contributor checklist 4 | 5 | - [ ] Included tests for the changes. 6 | - [ ] A change note is created in `changelog.d/` (see [`changelog.d/README.md`](https://github.com/jazzband/pip-tools/blob/main/changelog.d/#readme) for instructions) or the PR text says "no changelog needed". 7 | 8 | ##### Maintainer checklist 9 | 10 | - [ ] If no changelog is needed, apply the `bot:chronographer:skip` label. 11 | - [ ] Assign the PR to an existing or new milestone for the target version (following [Semantic Versioning](https://blog.versioneye.com/2014/01/16/semantic-versioning/)). 12 | -------------------------------------------------------------------------------- /piptools/subprocess_utils.py: -------------------------------------------------------------------------------- 1 | # WARNING! BE CAREFUL UPDATING THIS FILE 2 | # Consider possible security implications associated with subprocess module. 3 | from __future__ import annotations 4 | 5 | import subprocess # nosec 6 | 7 | 8 | def run_python_snippet(python_executable: str, code_to_run: str) -> str: 9 | """ 10 | Execute Python code by calling ``python_executable`` with '-c' option. 11 | """ 12 | py_exec_cmd = python_executable, "-c", code_to_run 13 | 14 | # subprocess module should never be used with untrusted input 15 | return subprocess.check_output( # nosec 16 | py_exec_cmd, 17 | shell=False, 18 | text=True, 19 | ) 20 | -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ```{eval-rst} 4 | 5 | .. MyST doesn't support the "only" directive correctly. It always evaluates to 6 | .. true. 7 | .. 8 | .. But if we drop into sphinx eval-rst, it works fine. 9 | .. We then need to include our draft changelog content as markdown. 10 | .. 11 | .. We're making a "MyST sandwich", with RST for the `{only}` directive in the 12 | .. middle. 13 | .. 14 | .. Using `include` with a `parser` is documented here: 15 | .. https://myst-parser.readthedocs.io/en/latest/faq/index.html#include-rst-files-into-a-markdown-file 16 | 17 | .. only:: not is_release 18 | 19 | .. include:: ../changelog.d/.draft_changelog_partial.md 20 | :parser: myst_parser.sphinx_ 21 | 22 | ``` 23 | 24 | ```{include} ../CHANGELOG.md 25 | 26 | ``` 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | --- 5 | 6 | #### What's the problem this feature will solve? 7 | 8 | 9 | 10 | #### Describe the solution you'd like 11 | 12 | 13 | 14 | 15 | 16 | #### Alternative Solutions 17 | 18 | 19 | 20 | #### Additional context 21 | 22 | 23 | -------------------------------------------------------------------------------- /tests/test_data/packages/fake_with_deps/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "fake_with_deps" 7 | description = "Fake package with dependencies" 8 | authors = [{name = "jazzband"}] 9 | # license = "0BSD" 10 | # Deprecated license table for Python 3.8's latest setuptools compatibility: 11 | license = {text = "0BSD"} 12 | version = "0.1" 13 | dependencies = [ 14 | "python-dateutil>=2.4.2,<2.5", 15 | "colorama<0.4.0,>=0.3.7", 16 | "cornice<1.1,>=1.0.0", 17 | "enum34<1.1.7,>=1.0.4", 18 | "six>1.5,<=1.8", 19 | "ipaddress<1.1,>=1.0.16", 20 | "jsonschema<3.0,>=2.4.0", 21 | "pyramid<1.6,>=1.5.7", 22 | "pyzmq<26.3.0,>=26.2.0", 23 | "simplejson>=3.5,!=3.8,>3.9", 24 | "SQLAlchemy!=0.9.5,<2.0.0,>=0.7.8,>=1.0.0", 25 | "python-memcached>=1.57,<2.0", 26 | "xmltodict<=0.11,>=0.4.6" 27 | ] 28 | -------------------------------------------------------------------------------- /tests/test_logging.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from piptools.logging import LogContext 4 | 5 | 6 | def test_indentation(runner): 7 | """ 8 | Test LogContext.indentation() context manager increases indentation. 9 | """ 10 | log = LogContext(indent_width=2) 11 | 12 | with runner.isolation() as streams: 13 | log.log("Test message 1") 14 | with log.indentation(): 15 | log.log("Test message 2") 16 | with log.indentation(): 17 | log.log("Test message 3") 18 | log.log("Test message 4") 19 | log.log("Test message 5") 20 | 21 | stderr_bytes = streams[1].getvalue() 22 | 23 | assert stderr_bytes.decode().splitlines() == [ 24 | "Test message 1", 25 | " Test message 2", 26 | " Test message 3", 27 | " Test message 4", 28 | "Test message 5", 29 | ] 30 | -------------------------------------------------------------------------------- /tests/test_pip_compat.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path, PurePosixPath 4 | 5 | from piptools._compat.pip_compat import parse_requirements 6 | 7 | from .constants import PACKAGES_RELATIVE_PATH 8 | 9 | 10 | def test_parse_requirements_preserve_editable_relative_path(tmp_path, repository): 11 | test_package_path = str( 12 | PurePosixPath(Path(PACKAGES_RELATIVE_PATH)) / "small_fake_a" 13 | ) 14 | requirements_in_path = str(tmp_path / "requirements.in") 15 | 16 | with open(requirements_in_path, "w") as requirements_in_file: 17 | requirements_in_file.write(f"-e {test_package_path}") 18 | 19 | [install_requirement] = parse_requirements( 20 | requirements_in_path, session=repository.session 21 | ) 22 | 23 | assert install_requirement.link.url == test_package_path 24 | assert install_requirement.link.file_path == test_package_path 25 | -------------------------------------------------------------------------------- /piptools/_compat/path_compat.py: -------------------------------------------------------------------------------- 1 | """ 2 | Compatibility helpers for working with paths and :mod:`pathlib` across platforms 3 | and Python versions. 4 | """ 5 | 6 | from __future__ import annotations 7 | 8 | import os.path 9 | import pathlib 10 | import sys 11 | 12 | __all__ = ("relative_to_walk_up",) 13 | 14 | 15 | def relative_to_walk_up(path: pathlib.Path, start: pathlib.Path) -> pathlib.Path: 16 | """ 17 | Compute a relative path allowing for the input to not be a subpath of the start. 18 | 19 | This is a compatibility helper for ``pathlib.Path.relative_to(..., walk_up=True)`` 20 | on all Python versions. (``walk_up: bool`` is Python 3.12+) 21 | """ 22 | # prefer `pathlib.Path.relative_to` where available 23 | if sys.version_info >= (3, 12): 24 | return path.relative_to(start, walk_up=True) 25 | 26 | str_result = os.path.relpath(path, start=start) 27 | return pathlib.Path(str_result) 28 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # https://docs.readthedocs.io/en/stable/config-file/v2.html 2 | 3 | version: 2 4 | 5 | build: 6 | os: ubuntu-24.04 7 | 8 | # in order to have RTD use our tox configuration for the build, we take full 9 | # control over 'build.commands' 10 | # see also: https://github.com/astral-sh/uv/issues/10074#issuecomment-3128225815 11 | commands: 12 | # install/setup uv 13 | - asdf plugin add uv 14 | - asdf install uv latest 15 | - asdf global uv latest 16 | # use uv to get 17 | # - Python 3.13 18 | # - tox (with tox-uv) 19 | - uv tool install tox --with tox-uv --python "3.13" --managed-python 20 | # create the tox environment (`--notest` skips commands) 21 | - uv tool run tox run -e build-docs --notest -vvvvv 22 | # do the actual build step, to the RTD documented output directory 23 | - uv tool run tox run -e build-docs --skip-pkg-install -q -- "${READTHEDOCS_OUTPUT}"/html -b dirhtml 24 | -------------------------------------------------------------------------------- /examples/readme/constraints.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.11 3 | # by the following command: 4 | # 5 | # pip-compile --all-build-deps --all-extras --output-file=constraints.txt --strip-extras pyproject.toml 6 | # 7 | asgiref==3.5.2 8 | # via django 9 | attrs==22.1.0 10 | # via pytest 11 | django==4.1 12 | # via my-cool-django-app (pyproject.toml) 13 | editables==0.3 14 | # via hatchling 15 | hatchling==1.11.1 16 | # via my-cool-django-app (pyproject.toml::build-system.requires) 17 | iniconfig==1.1.1 18 | # via pytest 19 | packaging==21.3 20 | # via 21 | # hatchling 22 | # pytest 23 | pathspec==0.10.2 24 | # via hatchling 25 | pluggy==1.0.0 26 | # via 27 | # hatchling 28 | # pytest 29 | py==1.11.0 30 | # via pytest 31 | pyparsing==3.0.9 32 | # via packaging 33 | pytest==7.1.2 34 | # via my-cool-django-app (pyproject.toml) 35 | sqlparse==0.4.2 36 | # via django 37 | tomli==2.0.1 38 | # via 39 | # hatchling 40 | # pytest 41 | -------------------------------------------------------------------------------- /tests/test_data/packages/small_fake_with_build_deps/backend/backend.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | # A dependency of the build backend that is not installed is equivalent to a build 4 | # backend that is not installed so we don't have to test both cases. 5 | import fake_static_build_dep # noqa: F401 6 | import setuptools.build_meta 7 | 8 | # Re-export all names in case more hooks are added in the future 9 | from setuptools.build_meta import * # noqa: F401, F403 10 | 11 | build_wheel = setuptools.build_meta.build_wheel 12 | build_sdist = setuptools.build_meta.build_sdist 13 | 14 | 15 | def get_requires_for_build_sdist(config_settings=None): 16 | result = setuptools.build_meta.get_requires_for_build_sdist(config_settings) 17 | assert result == [] 18 | result.append("fake_dynamic_build_dep_for_all") 19 | result.append("fake_dynamic_build_dep_for_sdist") 20 | return result 21 | 22 | 23 | def get_requires_for_build_wheel(config_settings=None): 24 | result = setuptools.build_meta.get_requires_for_build_wheel(config_settings) 25 | assert result == ["wheel"] 26 | result.append("fake_dynamic_build_dep_for_all") 27 | result.append("fake_dynamic_build_dep_for_wheel") 28 | return result 29 | 30 | 31 | def get_requires_for_build_editable(config_settings=None): 32 | return ["fake_dynamic_build_dep_for_all", "fake_dynamic_build_dep_for_editable"] 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c). All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without modification, 6 | are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, 9 | this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | 3. Neither the name of pip-tools nor the names of its contributors may be 16 | used to endorse or promote products derived from this software without 17 | specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 20 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 21 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 23 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 24 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 25 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 26 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 27 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 28 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 29 | -------------------------------------------------------------------------------- /piptools/scripts/_deprecations.py: -------------------------------------------------------------------------------- 1 | """Module to deprecate script arguments.""" 2 | 3 | from __future__ import annotations 4 | 5 | from ..logging import log 6 | from ..utils import PIP_VERSION 7 | 8 | 9 | def filter_deprecated_pip_args(args: list[str]) -> list[str]: 10 | """ 11 | Warn about and drop pip args that are no longer supported by pip. 12 | 13 | Currently drops: 14 | 15 | - ``--use-pep517`` 16 | - ``--no-use-pep517`` 17 | - ``--global-option`` 18 | - ``--build-option`` 19 | """ 20 | if PIP_VERSION < (25, 3): # pragma: <3.9 cover 21 | return args 22 | 23 | deprecation_mapping = { 24 | "--use-pep517": "Pip always uses PEP 517 for building projects now.", 25 | "--no-use-pep517": "Pip always uses PEP 517 for building projects now.", 26 | "--global-option": ( 27 | "--config-setting is now the only way to pass options to the build backend." 28 | ), 29 | "--build-option": ( 30 | "--config-setting is now the only way to pass options to the build backend." 31 | ), 32 | } 33 | supported_args = [] 34 | for arg in args: 35 | opt_key = arg.split("=")[0] 36 | try: 37 | warn_msg = deprecation_mapping[opt_key] 38 | except KeyError: 39 | supported_args.append(arg) 40 | else: 41 | log.warning( 42 | "WARNING: " 43 | f"{arg} is no longer supported by pip and is deprecated in pip-tools. " 44 | "This option is ignored and will result in errors in a future release. " 45 | f"{warn_msg}" 46 | ) 47 | 48 | return supported_args 49 | -------------------------------------------------------------------------------- /tests/test_minimal_upgrade.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from piptools.repositories import LocalRequirementsRepository 6 | from piptools.utils import key_from_ireq 7 | 8 | 9 | @pytest.mark.parametrize( 10 | ("input", "pins", "expected"), 11 | ( 12 | (tup) 13 | for tup in [ 14 | # Add Flask to an existing requirements.in, using --no-upgrade 15 | ( 16 | ["flask", "jinja2", "werkzeug"], 17 | [ 18 | # The requirements.txt from a previous round 19 | "jinja2==2.7.3", 20 | "markupsafe==0.23", 21 | "werkzeug==0.6", 22 | ], 23 | [ 24 | # Add flask and upgrade werkzeug from incompatible 0.6 25 | "flask==0.10.1", 26 | "itsdangerous==0.24 (from flask==0.10.1)", 27 | "werkzeug==0.10.4", 28 | # Other requirements are unchanged from 29 | # the original requirements.txt 30 | "jinja2==2.7.3", 31 | "markupsafe==0.23 (from jinja2==2.7.3)", 32 | ], 33 | ) 34 | ] 35 | ), 36 | ) 37 | def test_no_upgrades(base_resolver, repository, from_line, input, pins, expected): 38 | input = [from_line(line) for line in input] 39 | existing_pins = {} 40 | for line in pins: 41 | ireq = from_line(line) 42 | existing_pins[key_from_ireq(ireq)] = ireq 43 | local_repository = LocalRequirementsRepository(existing_pins, repository) 44 | output = base_resolver( 45 | input, prereleases=False, repository=local_repository 46 | ).resolve() 47 | output = {str(line) for line in output} 48 | assert output == {str(line) for line in expected} 49 | -------------------------------------------------------------------------------- /changelog.d/.towncrier_template.md.jinja: -------------------------------------------------------------------------------- 1 | {%- set is_draft_preview = versiondata["version"] == "DRAFT_VERSION" -%} 2 | 3 | {%- if is_draft_preview -%} 4 | ## Unreleased Changes 5 | {% else -%} 6 | ## {{ versiondata["version"] }} 7 | {% endif %} 8 | 9 | *{%- if is_draft_preview -%}Generated for preview on {% endif -%}{{ versiondata["date"] }}* 10 | 11 | {% for section, _ in sections.items() %} 12 | {% if sections[section] %} 13 | {% for category, val in definitions.items() if category in sections[section] -%} 14 | 15 | ### {{ definitions[category]['name'] }} 16 | 17 | {% for text, change_note_refs in sections[section][category].items() %} 18 | 19 | {%- 20 | set pr_issue_numbers = change_note_refs 21 | | map('lower') 22 | | map('int', default=None) 23 | | select('integer') 24 | | map('string') 25 | | list 26 | -%} 27 | 28 | {%- set arbitrary_refs = [] -%} 29 | {%- set commit_refs = [] -%} 30 | {%- with -%} 31 | {%- set commit_ref_candidates = change_note_refs | reject('in', pr_issue_numbers) -%} 32 | {%- for cf in commit_ref_candidates -%} 33 | {%- if cf | length in (7, 8, 40) and cf | int(default=None, base=16) is not none -%} 34 | {%- set _ = commit_refs.append(cf) -%} 35 | {%- else -%} 36 | {%- set _ = arbitrary_refs.append(cf) -%} 37 | {%- endif -%} 38 | {%- endfor -%} 39 | {%- endwith -%} 40 | 41 | - {{ text }} 42 | 43 | {{- '\n\n' -}} 44 | 45 | {%- if pr_issue_numbers %} 46 | *PRs and issues:* {issue}`{{ pr_issue_numbers | join('`, {issue}`') }}` 47 | {{- '\n' -}} 48 | {%- endif -%} 49 | 50 | {%- if commit_refs %} 51 | *Related commits:* {commit}`{{ commit_refs | join('`, {commit}`') }}` 52 | {{- '\n' -}} 53 | {%- endif -%} 54 | 55 | {%- if arbitrary_refs %} 56 | *Unlinked references:* {{ arbitrary_refs | join(', ') }} 57 | {{- '\n' -}} 58 | {%- endif -%} 59 | 60 | {{- '\n' -}} 61 | 62 | {% endfor -%} 63 | 64 | {% endfor %} 65 | {% else %} 66 | No significant changes. 67 | 68 | {% endif %} 69 | 70 | {% endfor %} 71 | -------------------------------------------------------------------------------- /.github/workflows/reusable-qa.yml: -------------------------------------------------------------------------------- 1 | name: QA 2 | 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | qa: 8 | name: ${{ matrix.toxenv }} 9 | runs-on: ubuntu-latest 10 | timeout-minutes: 2 # network is slow sometimes 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | toxenv: 15 | - readme 16 | - build-docs 17 | - linkcheck-docs 18 | - changelog-draft 19 | python-version: 20 | - "3.13" 21 | env: 22 | PY_COLORS: 1 23 | TOXENV: ${{ matrix.toxenv }} 24 | TOX_PARALLEL_NO_SPINNER: 1 25 | steps: 26 | - uses: actions/checkout@v5 27 | - name: Set up Python ${{ matrix.python-version }} 28 | uses: actions/setup-python@v6 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | - name: Get pip cache dir 32 | id: pip-cache 33 | run: | 34 | echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" 35 | - name: Pip cache 36 | uses: actions/cache@v4 37 | with: 38 | path: ${{ steps.pip-cache.outputs.dir }} 39 | key: >- 40 | ${{ runner.os }}-pip-${{ hashFiles('setup.cfg') }}-${{ 41 | hashFiles('pyproject.toml') }}-${{ hashFiles('tox.ini') }}-${{ 42 | hashFiles('.pre-commit-config.yaml') }} 43 | restore-keys: | 44 | ${{ runner.os }}-pip- 45 | ${{ runner.os }}- 46 | - name: Prepare cache key 47 | id: cache-key 48 | run: echo "sha-256=$(python -VV | sha256sum | cut -d' ' -f1)" >> "${GITHUB_OUTPUT}" 49 | - uses: actions/cache@v4 50 | with: 51 | path: ~/.cache/pre-commit 52 | key: pre-commit|${{ steps.cache-key.outputs.sha-256 }}|${{ hashFiles('.pre-commit-config.yaml') }} 53 | - name: Install tox 54 | run: pip install tox 55 | - name: Prepare test environment 56 | run: tox -vv --notest -p auto --parallel-live 57 | - name: Test ${{ matrix.toxenv }} 58 | run: tox --skip-pkg-install 59 | -------------------------------------------------------------------------------- /piptools/logging.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import logging 5 | import sys 6 | import typing as _t 7 | from collections.abc import Iterator 8 | 9 | import click 10 | 11 | # Initialise the builtin logging module for other component using it. 12 | # Ex: pip 13 | logging.basicConfig() 14 | 15 | 16 | class LogContext: 17 | stream = sys.stderr 18 | 19 | def __init__(self, verbosity: int = 0, indent_width: int = 2): 20 | self.verbosity = self._initial_verbosity = verbosity 21 | self.current_indent = self._initial_indent = 0 22 | self._indent_width = self._initial_indent_width = indent_width 23 | 24 | def log(self, message: str, *args: _t.Any, **kwargs: _t.Any) -> None: 25 | kwargs.setdefault("err", True) 26 | prefix = " " * self.current_indent 27 | click.secho(prefix + message, *args, **kwargs) 28 | 29 | def debug(self, message: str, *args: _t.Any, **kwargs: _t.Any) -> None: 30 | if self.verbosity >= 1: 31 | self.log(message, *args, **kwargs) 32 | 33 | def info(self, message: str, *args: _t.Any, **kwargs: _t.Any) -> None: 34 | if self.verbosity >= 0: 35 | self.log(message, *args, **kwargs) 36 | 37 | def warning(self, message: str, *args: _t.Any, **kwargs: _t.Any) -> None: 38 | kwargs.setdefault("fg", "yellow") 39 | self.log(message, *args, **kwargs) 40 | 41 | def error(self, message: str, *args: _t.Any, **kwargs: _t.Any) -> None: 42 | kwargs.setdefault("fg", "red") 43 | self.log(message, *args, **kwargs) 44 | 45 | def _indent(self) -> None: 46 | self.current_indent += self._indent_width 47 | 48 | def _dedent(self) -> None: 49 | self.current_indent -= self._indent_width 50 | 51 | @contextlib.contextmanager 52 | def indentation(self) -> Iterator[None]: 53 | """ 54 | Increase indentation. 55 | """ 56 | self._indent() 57 | try: 58 | yield 59 | finally: 60 | self._dedent() 61 | 62 | def reset(self) -> None: 63 | """Reset logger to initial state.""" 64 | self.verbosity = self._initial_verbosity 65 | self.current_indent = self._initial_indent 66 | self._indent_width = self._initial_indent_width 67 | 68 | 69 | log = LogContext() 70 | -------------------------------------------------------------------------------- /tests/test_repository_local.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from piptools.repositories.local import LocalRequirementsRepository 6 | from piptools.utils import key_from_ireq 7 | 8 | EXPECTED = {"sha256:5e6071ee6e4c59e0d0408d366fe9b66781d2cf01be9a6e19a2433bb3c5336330"} 9 | 10 | 11 | def test_get_hashes_local_repository_cache_miss( 12 | capsys, pip_conf, from_line, pypi_repository 13 | ): 14 | existing_pins = {} 15 | local_repository = LocalRequirementsRepository(existing_pins, pypi_repository) 16 | with local_repository.allow_all_wheels(): 17 | hashes = local_repository.get_hashes(from_line("small-fake-a==0.1")) 18 | assert hashes == EXPECTED 19 | captured = capsys.readouterr() 20 | assert captured.out == "" 21 | assert captured.err == "" 22 | 23 | 24 | def test_get_hashes_local_repository_cache_hit(from_line, repository): 25 | # Create an install requirement with the hashes included in its options 26 | hash_options = {"sha256": [entry.split(":")[1] for entry in EXPECTED]} 27 | req = from_line("small-fake-a==0.1", hash_options=hash_options) 28 | existing_pins = {key_from_ireq(req): req} 29 | 30 | # Use fake repository so that we know the hashes are coming from cache 31 | local_repository = LocalRequirementsRepository(existing_pins, repository) 32 | with local_repository.allow_all_wheels(): 33 | hashes = local_repository.get_hashes(from_line("small-fake-a==0.1")) 34 | assert hashes == EXPECTED 35 | 36 | 37 | NONSENSE = {"sha256:NONSENSE"} 38 | 39 | 40 | @pytest.mark.parametrize( 41 | ("reuse_hashes", "expected"), ((True, NONSENSE), (False, EXPECTED)) 42 | ) 43 | def test_toggle_reuse_hashes_local_repository( 44 | capsys, pip_conf, from_line, pypi_repository, reuse_hashes, expected 45 | ): 46 | # Create an install requirement with the hashes included in its options 47 | hash_options = {"sha256": [entry.split(":")[1] for entry in NONSENSE]} 48 | req = from_line("small-fake-a==0.1", hash_options=hash_options) 49 | existing_pins = {key_from_ireq(req): req} 50 | 51 | local_repository = LocalRequirementsRepository( 52 | existing_pins, pypi_repository, reuse_hashes=reuse_hashes 53 | ) 54 | with local_repository.allow_all_wheels(): 55 | assert local_repository.get_hashes(from_line("small-fake-a==0.1")) == expected 56 | captured = capsys.readouterr() 57 | assert captured.out == "" 58 | assert captured.err == "" 59 | -------------------------------------------------------------------------------- /towncrier.toml: -------------------------------------------------------------------------------- 1 | [tool.towncrier] 2 | package = "piptools" 3 | filename = "CHANGELOG.md" 4 | start_string = "\n" 5 | directory = "changelog.d/" 6 | title_format = "" 7 | template = "changelog.d/.towncrier_template.md.jinja" 8 | # the issue format is bare here, but then handled in the template 9 | # see changelog.d/.towncrier_template.md.jinja for details 10 | issue_format = "{issue}" 11 | underlines = ["", "", ""] 12 | 13 | [[tool.towncrier.section]] 14 | path = "" 15 | 16 | # Fully redeclare towncrier types to control names and set 'showcontent' 17 | 18 | [[tool.towncrier.type]] 19 | # Improper/undesired behavior that got corrected. 20 | directory = "bugfix" 21 | name = "Bug fixes" 22 | showcontent = true 23 | 24 | [[tool.towncrier.type]] 25 | # New behaviors, CLI flags, etc. 26 | directory = "feature" 27 | name = "Features" 28 | showcontent = true 29 | 30 | [[tool.towncrier.type]] 31 | # Declarations of future removals and breaking changes in behavior. 32 | directory = "deprecation" 33 | name = "Deprecations (removal in next major release)" 34 | showcontent = true 35 | 36 | [[tool.towncrier.type]] 37 | # A change in the behavior of a command, such that users may observe the 38 | # change purely via their usage and be negatively impacted. 39 | directory = "breaking" 40 | name = "Removals and backward incompatible breaking changes" 41 | showcontent = true 42 | 43 | [[tool.towncrier.type]] 44 | # Notable updates to the documentation structure or build process. 45 | directory = "doc" 46 | name = "Improved documentation" 47 | showcontent = true 48 | 49 | [[tool.towncrier.type]] 50 | # Notes for downstreams about unobvious side effects and tooling. Changes 51 | # in the test invocation considerations and runtime assumptions. 52 | directory = "packaging" 53 | name = "Packaging updates and notes for downstreams" 54 | showcontent = true 55 | 56 | [[tool.towncrier.type]] 57 | # Stuff that affects the contributor experience. e.g. Running tests, 58 | # building the docs, setting up the development environment. 59 | directory = "contrib" 60 | name = "Contributor-facing changes" 61 | showcontent = true 62 | 63 | [[tool.towncrier.type]] 64 | # Changes that are hard to assign to any of the above categories. 65 | directory = "misc" 66 | name = "Miscellaneous internal changes" 67 | showcontent = true 68 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | As contributors and maintainers of the Jazzband projects, and in the interest of 4 | fostering an open and welcoming community, we pledge to respect all people who 5 | contribute through reporting issues, posting feature requests, updating documentation, 6 | submitting pull requests or patches, and other activities. 7 | 8 | We are committed to making participation in the Jazzband a harassment-free experience 9 | for everyone, regardless of the level of experience, gender, gender identity and 10 | expression, sexual orientation, disability, personal appearance, body size, race, 11 | ethnicity, age, religion, or nationality. 12 | 13 | Examples of unacceptable behavior by participants include: 14 | 15 | - The use of sexualized language or imagery 16 | - Personal attacks 17 | - Trolling or insulting/derogatory comments 18 | - Public or private harassment 19 | - Publishing other's private information, such as physical or electronic addresses, 20 | without explicit permission 21 | - Other unethical or unprofessional conduct 22 | 23 | The Jazzband roadies have the right and responsibility to remove, edit, or reject 24 | comments, commits, code, wiki edits, issues, and other contributions that are not 25 | aligned to this Code of Conduct, or to ban temporarily or permanently any contributor 26 | for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 27 | 28 | By adopting this Code of Conduct, the roadies commit themselves to fairly and 29 | consistently applying these principles to every aspect of managing the jazzband 30 | projects. Roadies who do not follow or enforce the Code of Conduct may be permanently 31 | removed from the Jazzband roadies. 32 | 33 | This code of conduct applies both within project spaces and in public spaces when an 34 | individual is representing the project or its community. 35 | 36 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 37 | contacting the roadies at `roadies@jazzband.co`. All complaints will be reviewed and 38 | investigated and will result in a response that is deemed necessary and appropriate to 39 | the circumstances. Roadies are obligated to maintain confidentiality with regard to the 40 | reporter of an incident. 41 | 42 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 43 | 1.3.0, available at [https://contributor-covenant.org/version/1/3/0/][version] 44 | 45 | [homepage]: https://contributor-covenant.org 46 | [version]: https://contributor-covenant.org/version/1/3/0/ 47 | -------------------------------------------------------------------------------- /piptools/repositories/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import optparse 4 | from abc import ABCMeta, abstractmethod 5 | from collections.abc import Iterator 6 | from contextlib import contextmanager 7 | 8 | from pip._internal.commands.install import InstallCommand 9 | from pip._internal.index.package_finder import PackageFinder 10 | from pip._internal.models.index import PyPI 11 | from pip._internal.network.session import PipSession 12 | from pip._internal.req import InstallRequirement 13 | 14 | 15 | class BaseRepository(metaclass=ABCMeta): 16 | DEFAULT_INDEX_URL = PyPI.simple_url 17 | 18 | def clear_caches(self) -> None: 19 | """Should clear any caches used by the implementation.""" 20 | 21 | @abstractmethod 22 | def find_best_match( 23 | self, ireq: InstallRequirement, prereleases: bool | None 24 | ) -> InstallRequirement: 25 | """ 26 | Returns a pinned InstallRequirement object that indicates the best match 27 | for the given InstallRequirement according to the external repository. 28 | """ 29 | 30 | @abstractmethod 31 | def get_dependencies(self, ireq: InstallRequirement) -> set[InstallRequirement]: 32 | """ 33 | Given a pinned, URL, or editable InstallRequirement, returns a set of 34 | dependencies (also InstallRequirements, but not necessarily pinned). 35 | They indicate the secondary dependencies for the given requirement. 36 | """ 37 | 38 | @abstractmethod 39 | def get_hashes(self, ireq: InstallRequirement) -> set[str]: 40 | """ 41 | Given a pinned InstallRequirement, returns a set of hashes that represent 42 | all of the files for a given requirement. It is not acceptable for an 43 | editable or unpinned requirement to be passed to this function. 44 | """ 45 | 46 | @abstractmethod 47 | @contextmanager 48 | def allow_all_wheels(self) -> Iterator[None]: 49 | """ 50 | Monkey patches pip.Wheel to allow wheels from all platforms and Python versions. 51 | """ 52 | 53 | @property 54 | @abstractmethod 55 | def options(self) -> optparse.Values: 56 | """Returns parsed pip options""" 57 | 58 | @property 59 | @abstractmethod 60 | def session(self) -> PipSession: 61 | """Returns a session to make requests""" 62 | 63 | @property 64 | @abstractmethod 65 | def finder(self) -> PackageFinder: 66 | """Returns a package finder to interact with simple repository API (PEP 503)""" 67 | 68 | @property 69 | @abstractmethod 70 | def command(self) -> InstallCommand: 71 | """Return an install command.""" 72 | -------------------------------------------------------------------------------- /piptools/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import operator 4 | from collections.abc import Iterable 5 | 6 | from pip._internal.index.package_finder import PackageFinder 7 | from pip._internal.models.candidate import InstallationCandidate 8 | from pip._internal.req import InstallRequirement 9 | from pip._internal.utils.misc import redact_auth_from_url 10 | 11 | 12 | class PipToolsError(Exception): 13 | pass 14 | 15 | 16 | class NoCandidateFound(PipToolsError): 17 | def __init__( 18 | self, 19 | ireq: InstallRequirement, 20 | candidates_tried: Iterable[InstallationCandidate], 21 | finder: PackageFinder, 22 | ) -> None: 23 | self.ireq = ireq 24 | self.candidates_tried = candidates_tried 25 | self.finder = finder 26 | 27 | def __str__(self) -> str: 28 | versions = [] 29 | pre_versions = [] 30 | 31 | for candidate in sorted( 32 | self.candidates_tried, key=operator.attrgetter("version") 33 | ): 34 | version = str(candidate.version) 35 | if candidate.version.is_prerelease: 36 | pre_versions.append(version) 37 | else: 38 | versions.append(version) 39 | 40 | lines = [f"Could not find a version that matches {self.ireq}"] 41 | 42 | if versions: 43 | lines.append(f"Tried: {', '.join(versions)}") 44 | 45 | if pre_versions: 46 | if self.finder.allow_all_prereleases: 47 | line = "Tried" 48 | else: 49 | line = "Skipped" 50 | 51 | line += f" pre-versions: {', '.join(pre_versions)}" 52 | lines.append(line) 53 | 54 | if versions or pre_versions: 55 | lines.append( 56 | "There are incompatible versions in the resolved dependencies:" 57 | ) 58 | source_ireqs = getattr(self.ireq, "_source_ireqs", []) 59 | lines.extend(f" {ireq}" for ireq in source_ireqs) 60 | else: 61 | redacted_urls = tuple( 62 | redact_auth_from_url(url) for url in self.finder.index_urls 63 | ) 64 | lines.append("No versions found") 65 | lines.append( 66 | "{} {} reachable?".format( 67 | "Were" if len(redacted_urls) > 1 else "Was", 68 | " or ".join(redacted_urls), 69 | ) 70 | ) 71 | return "\n".join(lines) 72 | 73 | 74 | class IncompatibleRequirements(PipToolsError): 75 | def __init__(self, ireq_a: InstallRequirement, ireq_b: InstallRequirement) -> None: 76 | self.ireq_a = ireq_a 77 | self.ireq_b = ireq_b 78 | 79 | def __str__(self) -> str: 80 | message = "Incompatible requirements found: {} and {}" 81 | return message.format(self.ireq_a, self.ireq_b) 82 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | [![Jazzband](https://jazzband.co/static/img/jazzband.svg)](https://jazzband.co/) 2 | 3 | This is a [Jazzband](https://jazzband.co/) project. By contributing you agree 4 | to abide by the [Contributor Code of Conduct](https://jazzband.co/about/conduct) 5 | and follow the [guidelines](https://jazzband.co/about/guidelines). 6 | 7 | ## Project Contribution Guidelines 8 | 9 | Here are a few additional or emphasized guidelines to follow when contributing to `pip-tools`: 10 | 11 | - If you need to have a virtualenv outside of `tox`, it is possible to reuse its configuration to provision it with [tox devenv](). 12 | - Always provide tests for your changes and run `tox -p all` to make sure they are passing the checks locally. 13 | - Give a clear one-line description in the PR (that the maintainers can add to [CHANGELOG] afterwards). 14 | - Wait for the review of at least one other contributor before merging (even if you're a Jazzband member). 15 | - Before merging, assign the PR to a milestone for a version to help with the release process. 16 | 17 | The only exception to those guidelines is for trivial changes, such as 18 | documentation corrections or contributions that do not change pip-tools itself. 19 | 20 | Contributions following these guidelines are always welcomed, encouraged and appreciated. 21 | 22 | ## Project Release Process 23 | 24 | Releases require approval by a member of the [`pip-tools-leads` team]. 25 | 26 | Commands given below may assume that your fork is named `origin` in git remotes and the main repo is named `upstream`. 27 | 28 | This is the current release process: 29 | 30 | - Create a branch for the release. _e.g., `release/v3.4.0`_. 31 | - Use `towncrier` to update the [CHANGELOG], _e.g., `towncrier build --version v3.4.0`_. 32 | - Push the branch to your fork, _e.g., `git push -u origin release/v3.4.0`_, 33 | and create a pull request. 34 | - Merge the pull request after the changes are approved. 35 | - Make sure that the tests/CI still pass. 36 | - Fetch the latest changes to `main` locally. 37 | - Create an unsigned tag with the release version number prefixed with a `v`, 38 | _e.g., `git tag -a v3.4.0 -m v3.4.0`_, and push it to `upstream`. 39 | - Create a GitHub Release, populated with a copy of the changelog and set to 40 | "Create a discussion for this release" in the `Announcements` category. 41 | Some of the markdown will need to be reformatted into GFM. 42 | The release title and tag should be the newly created tag. 43 | - The [GitHub Release Workflow] will trigger off of the release to publish to PyPI. 44 | A member of the [`pip-tools-leads` team] must approve the publication step. 45 | - Once the release to PyPI is confirmed, close the milestone. 46 | - Publish any release notifications, 47 | _e.g., pip-tools matrix channel, discuss.python.org, bluesky, mastodon, pypa Discord_. 48 | 49 | [changelog]: ./CHANGELOG.md 50 | [GitHub Release Workflow]: https://github.com/jazzband/pip-tools/actions/workflows/release.yml 51 | [`pip-tools-leads` team]: https://github.com/orgs/jazzband/teams/pip-tools-leads 52 | -------------------------------------------------------------------------------- /tests/test_fake_index.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | def test_find_best_match(from_line, repository): 7 | ireq = from_line("django>1.5") 8 | assert str(repository.find_best_match(ireq)) == "django==1.8" 9 | 10 | ireq = from_line("django<1.8,~=1.6") 11 | assert str(repository.find_best_match(ireq)) == "django==1.7.7" 12 | 13 | # Extras available, but no extras specified 14 | ireq = from_line("ipython") 15 | assert str(repository.find_best_match(ireq)) == "ipython==2.1.0" 16 | 17 | # Make sure we include extras. They should be sorted in the output. 18 | ireq = from_line("ipython[notebook,nbconvert]") 19 | assert str(repository.find_best_match(ireq)) == "ipython[nbconvert,notebook]==2.1.0" 20 | 21 | 22 | def test_find_best_match_incl_prereleases(from_line, repository): 23 | ireq = from_line("SQLAlchemy") 24 | assert ( 25 | str(repository.find_best_match(ireq, prereleases=False)) == "sqlalchemy==0.9.9" 26 | ) 27 | assert ( 28 | str(repository.find_best_match(ireq, prereleases=True)) == "sqlalchemy==1.0.0b5" 29 | ) 30 | 31 | 32 | def test_find_best_match_for_editable(from_editable, repository): 33 | ireq = from_editable("git+git://whatev.org/blah.git#egg=flask") 34 | assert repository.find_best_match(ireq) == ireq 35 | 36 | 37 | def test_get_dependencies(from_line, repository): 38 | ireq = from_line("django==1.6.11") 39 | assert repository.get_dependencies(ireq) == [] 40 | 41 | ireq = from_line("Flask==0.10.1") 42 | dependencies = repository.get_dependencies(ireq) 43 | assert {str(req) for req in dependencies} == { 44 | "Werkzeug>=0.7", 45 | "Jinja2>=2.4", 46 | "itsdangerous>=0.21", 47 | } 48 | 49 | ireq = from_line("ipython==2.1.0") 50 | dependencies = repository.get_dependencies(ireq) 51 | assert {str(req) for req in dependencies} == {"gnureadline"} 52 | 53 | ireq = from_line("ipython[notebook]==2.1.0") 54 | dependencies = repository.get_dependencies(ireq) 55 | assert {str(req) for req in dependencies} == { 56 | "gnureadline", 57 | "pyzmq>=2.1.11", 58 | "tornado>=3.1", 59 | "jinja2", 60 | } 61 | 62 | ireq = from_line("ipython[notebook,nbconvert]==2.1.0") 63 | dependencies = repository.get_dependencies(ireq) 64 | assert {str(req) for req in dependencies} == { 65 | "gnureadline", 66 | "pyzmq>=2.1.11", 67 | "tornado>=3.1", 68 | "jinja2", 69 | "pygments", 70 | "Sphinx>=0.3", 71 | } 72 | 73 | 74 | def test_get_dependencies_for_editable(from_editable, repository): 75 | ireq = from_editable("git+git://example.org/django.git#egg=django") 76 | assert repository.get_dependencies(ireq) == [] 77 | 78 | 79 | def test_get_dependencies_rejects_non_pinned_requirements(from_line, repository): 80 | not_a_pinned_req = from_line("django>1.6") 81 | with pytest.raises(TypeError): 82 | repository.get_dependencies(not_a_pinned_req) 83 | 84 | 85 | def test_get_hashes(from_line, repository): 86 | ireq = from_line("django==1.8") 87 | expected = { 88 | "test:123", 89 | "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", 90 | } 91 | assert repository.get_hashes(ireq) == expected 92 | -------------------------------------------------------------------------------- /tests/test_data/fake-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "aiohttp": { 3 | "3.6.2": { "": ["yarl"] } 4 | }, 5 | "anyjson": { 6 | "0.3.3": { "": [] } 7 | }, 8 | "amqp": { 9 | "1.4.9": { "": [] }, 10 | "2.0.2": { "": ["vine>=1.1.1"] }, 11 | "2.1.4": { "": ["vine>=1.1.3"] } 12 | }, 13 | "appdirs": { 14 | "1.4.9": { "": [] } 15 | }, 16 | "arrow": { 17 | "0.5.0": { "": ["python-dateutil"] }, 18 | "0.5.4": { "": ["python-dateutil"] } 19 | }, 20 | "billiard": { 21 | "3.3.0.23": { "": [] }, 22 | "3.5.0.2": { "": [] } 23 | }, 24 | "celery": { 25 | "3.1.18": { 26 | "": ["kombu<3.1,>=3.0.25", "pytz>0.dev.0", "billiard<3.4,>=3.3.0.20"] 27 | }, 28 | "3.1.23": { "": ["kombu>=3.0.34,<4", "pytz>0.dev.0", "billiard>=3.3.0.23"] }, 29 | "4.0.2": { 30 | "": ["kombu<5.0,>=4.0.2", "pytz>0.dev.0", "billiard<3.6.0,>=3.5.0.2"] 31 | } 32 | }, 33 | "click": { 34 | "3.3": { "": [] }, 35 | "4.0": { "": [] } 36 | }, 37 | "django": { 38 | "1.6.11": { "": [] }, 39 | "1.7.7": { "": [] }, 40 | "1.8": { "": [] } 41 | }, 42 | "fake-piptools-test-with-pinned-deps": { 43 | "0.1": { "": ["celery==3.1.18"] } 44 | }, 45 | "fake-piptools-test-with-unsafe-deps": { 46 | "0.1": { "": ["setuptools==34.0.0"] } 47 | }, 48 | "flask": { 49 | "0.10.1": { "": ["Jinja2>=2.4", "Werkzeug>=0.7", "itsdangerous>=0.21"] } 50 | }, 51 | "flask-cors": { 52 | "1.10.2": { "": ["Flask>=0.9", "Six"] }, 53 | "2.0.0": { "": ["Flask>=0.9", "Six"] } 54 | }, 55 | "gnureadline": { 56 | "6.3.3": { "": [] } 57 | }, 58 | "html5lib": { 59 | "0.999999999": { "": ["setuptools>=18.5"] } 60 | }, 61 | "idna": { 62 | "2.8": { "": [] } 63 | }, 64 | "ipython": { 65 | "2.1.0": { 66 | "": ["gnureadline"], 67 | "nbconvert": ["pygments", "jinja2", "Sphinx>=0.3"], 68 | "notebook": ["tornado>=3.1", "pyzmq>=2.1.11", "jinja2"] 69 | } 70 | }, 71 | "itsdangerous": { 72 | "0.24": { "": [] } 73 | }, 74 | "jinja2": { 75 | "2.7.3": { "": ["markupsafe"] } 76 | }, 77 | "kombu": { 78 | "3.0.35": { "": ["anyjson>=0.3.3", "amqp>=1.4.9,<2.0"] }, 79 | "4.0.2": { "": ["amqp<3.0,>=2.1.4"] } 80 | }, 81 | "librabbitmq": { 82 | "1.6.1": { "": ["amqp>=1.4.6"] } 83 | }, 84 | "markupsafe": { 85 | "0.23": { "": [] } 86 | }, 87 | "packaging": { 88 | "16.8": { "": [] } 89 | }, 90 | "psycopg2": { 91 | "2.5.4": { "": [] }, 92 | "2.6": { "": [] } 93 | }, 94 | "pygments": { 95 | "1.5": { "": [] } 96 | }, 97 | "pyzmq": { 98 | "2.1.12": { "": [] } 99 | }, 100 | "pytz": { 101 | "2016.4": { "": [] } 102 | }, 103 | "setuptools": { 104 | "34.0.0": { "": ["packaging>=16.8", "appdirs>=1.4.0"] }, 105 | "35.0.0": { "": [] } 106 | }, 107 | "six": { 108 | "1.6.1": { "": [] }, 109 | "1.9.0": { "": [] } 110 | }, 111 | "sphinx": { 112 | "0.3": { "": [] } 113 | }, 114 | "sqlalchemy": { 115 | "0.9.8": { "": [] }, 116 | "0.9.9": { "": [] }, 117 | "1.0.0b5": { "": [] } 118 | }, 119 | "tornado": { 120 | "3.2.2": { "": [] } 121 | }, 122 | "vine": { 123 | "1.1.1": { "": [] }, 124 | "1.1.3": { "": [] } 125 | }, 126 | "werkzeug": { 127 | "0.6": { "": [] }, 128 | "0.10": { "": [] }, 129 | "0.10.4": { "": [] } 130 | }, 131 | "yarl": { 132 | "1.4.2": { "": ["idna"] } 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.13 3 | # by the following command: 4 | # 5 | # pip-compile --allow-unsafe --output-file=docs/requirements.txt --strip-extras ./pyproject.toml docs/requirements.in 6 | # 7 | alabaster==0.7.13 8 | # via sphinx 9 | babel==2.12.1 10 | # via sphinx 11 | beautifulsoup4==4.12.2 12 | # via furo 13 | build==1.3.0 14 | # via pip-tools (pyproject.toml) 15 | certifi==2024.7.4 16 | # via requests 17 | charset-normalizer==3.2.0 18 | # via requests 19 | click==8.2.1 20 | # via 21 | # pip-tools (pyproject.toml) 22 | # towncrier 23 | docutils==0.20.1 24 | # via 25 | # myst-parser 26 | # sphinx 27 | furo==2023.8.17 28 | # via -r docs/requirements.in 29 | idna==3.7 30 | # via requests 31 | imagesize==1.4.1 32 | # via sphinx 33 | jinja2==3.1.6 34 | # via 35 | # myst-parser 36 | # sphinx 37 | # towncrier 38 | markdown-it-py==3.0.0 39 | # via 40 | # mdit-py-plugins 41 | # myst-parser 42 | markupsafe==2.1.3 43 | # via jinja2 44 | mdit-py-plugins==0.4.0 45 | # via myst-parser 46 | mdurl==0.1.2 47 | # via markdown-it-py 48 | myst-parser==2.0.0 49 | # via -r docs/requirements.in 50 | packaging==23.1 51 | # via 52 | # build 53 | # setuptools-scm 54 | # sphinx 55 | pbr==6.0.0 56 | # via sphinxcontrib-apidoc 57 | pygments==2.16.1 58 | # via 59 | # furo 60 | # sphinx 61 | pyproject-hooks==1.2.0 62 | # via 63 | # build 64 | # pip-tools (pyproject.toml) 65 | pyyaml==6.0.1 66 | # via myst-parser 67 | requests==2.32.4 68 | # via sphinx 69 | setuptools-scm==7.1.0 70 | # via -r docs/requirements.in 71 | snowballstemmer==2.2.0 72 | # via sphinx 73 | soupsieve==2.4.1 74 | # via beautifulsoup4 75 | sphinx==7.2.6 76 | # via 77 | # -r docs/requirements.in 78 | # furo 79 | # myst-parser 80 | # sphinx-basic-ng 81 | # sphinx-issues 82 | # sphinxcontrib-apidoc 83 | # sphinxcontrib-applehelp 84 | # sphinxcontrib-devhelp 85 | # sphinxcontrib-htmlhelp 86 | # sphinxcontrib-programoutput 87 | # sphinxcontrib-qthelp 88 | # sphinxcontrib-serializinghtml 89 | # sphinxcontrib-towncrier 90 | sphinx-basic-ng==1.0.0b2 91 | # via furo 92 | sphinx-issues==5.0.1 93 | # via -r docs/requirements.in 94 | sphinxcontrib-apidoc==0.5.0 95 | # via -r docs/requirements.in 96 | sphinxcontrib-applehelp==1.0.7 97 | # via sphinx 98 | sphinxcontrib-devhelp==1.0.5 99 | # via sphinx 100 | sphinxcontrib-htmlhelp==2.0.4 101 | # via sphinx 102 | sphinxcontrib-jsmath==1.0.1 103 | # via sphinx 104 | sphinxcontrib-programoutput==0.17 105 | # via -r docs/requirements.in 106 | sphinxcontrib-qthelp==1.0.6 107 | # via sphinx 108 | sphinxcontrib-serializinghtml==1.1.10 109 | # via sphinx 110 | sphinxcontrib-towncrier==0.5.0a0 111 | # via -r docs/requirements.in 112 | towncrier==24.8.0 113 | # via sphinxcontrib-towncrier 114 | typing-extensions==4.7.1 115 | # via setuptools-scm 116 | urllib3==2.5.0 117 | # via requests 118 | wheel==0.45.1 119 | # via pip-tools (pyproject.toml) 120 | 121 | # The following packages are considered to be unsafe in a requirements file: 122 | pip==25.1 123 | # via pip-tools (pyproject.toml) 124 | setuptools==78.1.1 125 | # via 126 | # pip-tools (pyproject.toml) 127 | # setuptools-scm 128 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black-pre-commit-mirror 3 | rev: 25.12.0 4 | hooks: 5 | - id: black 6 | args: [--target-version=py39] 7 | - repo: https://github.com/PyCQA/isort 8 | rev: 7.0.0 9 | hooks: 10 | - id: isort 11 | - repo: https://github.com/asottile/pyupgrade 12 | rev: v3.21.2 13 | hooks: 14 | - id: pyupgrade 15 | args: [--py39-plus] 16 | 17 | - repo: https://github.com/python-jsonschema/check-jsonschema.git 18 | rev: 0.35.0 19 | hooks: 20 | - id: check-github-actions 21 | - id: check-github-workflows 22 | - id: check-jsonschema 23 | alias: enforce-gha-timeouts 24 | name: Check GitHub Workflows set timeout-minutes 25 | args: 26 | - --builtin-schema 27 | - github-workflows-require-timeout 28 | files: ^\.github/workflows/[^/]+$ 29 | types: 30 | - yaml 31 | - id: check-readthedocs 32 | 33 | - repo: https://github.com/PyCQA/flake8 34 | rev: 7.3.0 35 | hooks: 36 | - id: flake8 37 | additional_dependencies: 38 | - flake8-pytest-style == 2.2.0 39 | - flake8-typing-as-t == 1.1.0 40 | - repo: https://github.com/pre-commit/mirrors-mypy 41 | rev: v1.19.0 42 | hooks: 43 | - id: mypy 44 | # Avoid error: Duplicate module named 'setup' 45 | # https://github.com/python/mypy/issues/4008 46 | # Keep exclude in sync with mypy own excludes 47 | exclude: ^tests/test_data/ 48 | additional_dependencies: 49 | - click==8.0.1 50 | - pep517==0.10.0 51 | - toml==0.10.2 52 | - pip==20.3.4 53 | - build==1.0.0 54 | - pyproject_hooks==1.0.0 55 | - pytest==7.4.2 56 | - repo: https://github.com/PyCQA/bandit 57 | rev: 1.9.2 58 | hooks: 59 | - id: bandit 60 | args: [--ini, .bandit] 61 | exclude: ^tests/ 62 | 63 | - repo: local 64 | hooks: 65 | - id: changelogs-md 66 | name: changelog filenames 67 | language: fail 68 | entry: >- 69 | Changelog files must be named 70 | ####.( 71 | bugfix 72 | | feature 73 | | deprecation 74 | | breaking 75 | | doc 76 | | packaging 77 | | contrib 78 | | misc 79 | )(.#)?(.md)? 80 | exclude: >- 81 | (?x) 82 | ^ 83 | changelog.d/( 84 | \.gitignore 85 | |\.towncrier_template\.md\.jinja 86 | |\.draft_changelog_partial\.md 87 | |README\.md 88 | |(\d+|[0-9a-f]{8}|[0-9a-f]{7}|[0-9a-f]{40}|\+[^.]+)\.( 89 | bugfix 90 | |feature 91 | |deprecation 92 | |breaking 93 | |doc 94 | |packaging 95 | |contrib 96 | |misc 97 | )(\.\d+)?(\.md)? 98 | ) 99 | $ 100 | files: ^changelog\.d/ 101 | types: [] 102 | types_or: 103 | - file 104 | - symlink 105 | 106 | - repo: https://github.com/rhysd/actionlint.git 107 | rev: v1.7.9 108 | hooks: 109 | - id: actionlint 110 | additional_dependencies: 111 | # actionlint has a shellcheck integration which extracts shell scripts in `run:` steps from GitHub Actions 112 | # and checks these with shellcheck. 113 | # The integration only works if shellcheck is installed. 114 | - "github.com/wasilibs/go-shellcheck/cmd/shellcheck@v0.11.1" 115 | 116 | ... 117 | -------------------------------------------------------------------------------- /tests/test_circular_imports.py: -------------------------------------------------------------------------------- 1 | """Tests for circular imports in all local packages and modules. 2 | 3 | This ensures all internal packages can be imported right away without 4 | any need to import some other module before doing so. 5 | 6 | This module is based on an idea that pytest uses for self-testing: 7 | * https://github.com/aio-libs/aiohttp/blob/91108c9/tests/test_circular_imports.py 8 | * https://github.com/sanitizers/octomachinery/blob/be18b54/tests/circular_imports_test.py 9 | * https://github.com/pytest-dev/pytest/blob/d18c75b/testing/test_meta.py 10 | * https://twitter.com/codewithanthony/status/1229445110510735361 11 | """ 12 | 13 | from __future__ import annotations 14 | 15 | import os 16 | import pkgutil 17 | import subprocess 18 | import sys 19 | from collections.abc import Iterator 20 | from itertools import chain 21 | from pathlib import Path 22 | from types import ModuleType 23 | 24 | import pytest 25 | 26 | import piptools 27 | from piptools.utils import PIP_VERSION 28 | 29 | 30 | def _find_all_importables(pkg: ModuleType) -> list[str]: 31 | """Find all importables in the project. 32 | 33 | Return them in order. 34 | """ 35 | return sorted( 36 | set( 37 | chain.from_iterable( 38 | _discover_path_importables(Path(p), pkg.__name__) for p in pkg.__path__ 39 | ), 40 | ), 41 | ) 42 | 43 | 44 | def _discover_path_importables(pkg_pth: Path, pkg_name: str) -> Iterator[str]: 45 | """Yield all importables under a given path and package.""" 46 | for dir_path, _d, file_names in os.walk(pkg_pth): 47 | pkg_dir_path = Path(dir_path) 48 | 49 | if pkg_dir_path.parts[-1] == "__pycache__": 50 | continue 51 | 52 | if all(Path(_).suffix != ".py" for _ in file_names): # pragma: no cover 53 | continue 54 | 55 | rel_pt = pkg_dir_path.relative_to(pkg_pth) 56 | pkg_pref = ".".join((pkg_name,) + rel_pt.parts) 57 | yield from ( 58 | pkg_path 59 | for _, pkg_path, _ in pkgutil.walk_packages( 60 | (str(pkg_dir_path),), 61 | prefix=f"{pkg_pref}.", 62 | ) 63 | ) 64 | 65 | 66 | def _allowed_deprecation_warning_filters() -> list[str]: 67 | """ 68 | Yield filters which allow for deprecation warnings based on the current 69 | test environment. 70 | """ 71 | # note that we can't use regex syntax in filters as of yet, only literals 72 | # https://github.com/python/cpython/pull/138149 allows regex usage, but is not 73 | # yet supported on all Python versions we support 74 | flags: list[str] = [] 75 | if PIP_VERSION[:2] < (25, 3): 76 | flags.extend( 77 | ("-W", "ignore:pkg_resources is deprecated as an API.:DeprecationWarning:") 78 | ) 79 | if PIP_VERSION[:2] <= (22, 2): 80 | flags.extend( 81 | ( 82 | "-W", 83 | ( 84 | "ignore:path is deprecated. Use files() instead." 85 | ":DeprecationWarning:" 86 | ), 87 | "-W", 88 | ( 89 | "ignore:Creating a LegacyVersion has been deprecated " 90 | "and will be removed in the next major release" 91 | ":DeprecationWarning:" 92 | ), 93 | ) 94 | ) 95 | return flags 96 | 97 | 98 | @pytest.mark.parametrize("import_path", _find_all_importables(piptools)) 99 | def test_no_warnings(import_path: str) -> None: 100 | """Verify that each importable name can be independently imported. 101 | 102 | This is seeking for any import errors including ones caused 103 | by circular imports. 104 | """ 105 | import_statement = f"import {import_path!s}" 106 | # On lower pip versions, we need to allow certain deprecation warnings. 107 | flags = ("-W", "error", *_allowed_deprecation_warning_filters()) 108 | command = (sys.executable, *flags, "-c", import_statement) 109 | 110 | subprocess.check_call(command) 111 | -------------------------------------------------------------------------------- /piptools/repositories/local.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import optparse 4 | import typing as _t 5 | from collections.abc import Iterator, Mapping 6 | from contextlib import contextmanager 7 | 8 | from pip._internal.commands.install import InstallCommand 9 | from pip._internal.index.package_finder import PackageFinder 10 | from pip._internal.models.candidate import InstallationCandidate 11 | from pip._internal.network.session import PipSession 12 | from pip._internal.req import InstallRequirement 13 | from pip._internal.utils.hashes import FAVORITE_HASH 14 | 15 | from piptools.utils import as_tuple, key_from_ireq, make_install_requirement 16 | 17 | from .base import BaseRepository 18 | from .pypi import PyPIRepository 19 | 20 | 21 | def ireq_satisfied_by_existing_pin( 22 | ireq: InstallRequirement, existing_pin: InstallationCandidate 23 | ) -> bool: 24 | """ 25 | Return :py:data:`True` if the given ``InstallRequirement`` is satisfied by the 26 | previously encountered version pin. 27 | """ 28 | version = next(iter(existing_pin.req.specifier)).version 29 | result = ireq.req.specifier.contains( 30 | version, prereleases=existing_pin.req.specifier.prereleases 31 | ) 32 | return _t.cast(bool, result) 33 | 34 | 35 | class LocalRequirementsRepository(BaseRepository): 36 | """ 37 | The LocalRequirementsRepository proxied the _real_ repository by first 38 | checking if a requirement can be satisfied by existing pins (i.e. the 39 | result of a previous compile step). 40 | 41 | In effect, if a requirement can be satisfied with a version pinned in the 42 | requirements file, we prefer that version over the best match found in 43 | PyPI. This keeps updates to the requirements.txt down to a minimum. 44 | """ 45 | 46 | def __init__( 47 | self, 48 | existing_pins: Mapping[str, InstallationCandidate], 49 | proxied_repository: PyPIRepository, 50 | reuse_hashes: bool = True, 51 | ): 52 | self._reuse_hashes = reuse_hashes 53 | self.repository = proxied_repository 54 | self.existing_pins = existing_pins 55 | 56 | @property 57 | def options(self) -> optparse.Values: 58 | return self.repository.options 59 | 60 | @property 61 | def finder(self) -> PackageFinder: 62 | return self.repository.finder 63 | 64 | @property 65 | def session(self) -> PipSession: 66 | return self.repository.session 67 | 68 | @property 69 | def command(self) -> InstallCommand: 70 | """Return an install command instance.""" 71 | return self.repository.command 72 | 73 | def clear_caches(self) -> None: 74 | self.repository.clear_caches() 75 | 76 | def find_best_match( 77 | self, ireq: InstallRequirement, prereleases: bool | None = None 78 | ) -> InstallationCandidate: 79 | key = key_from_ireq(ireq) 80 | existing_pin = self.existing_pins.get(key) 81 | if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): 82 | project, version, _ = as_tuple(existing_pin) 83 | return make_install_requirement(project, version, ireq) 84 | else: 85 | return self.repository.find_best_match(ireq, prereleases) 86 | 87 | def get_dependencies(self, ireq: InstallRequirement) -> set[InstallRequirement]: 88 | return self.repository.get_dependencies(ireq) 89 | 90 | def get_hashes(self, ireq: InstallRequirement) -> set[str]: 91 | existing_pin = self._reuse_hashes and self.existing_pins.get( 92 | key_from_ireq(ireq) 93 | ) 94 | if existing_pin and ireq_satisfied_by_existing_pin(ireq, existing_pin): 95 | hashes = existing_pin.hash_options 96 | hexdigests = hashes.get(FAVORITE_HASH) 97 | if hexdigests: 98 | return { 99 | ":".join([FAVORITE_HASH, hexdigest]) for hexdigest in hexdigests 100 | } 101 | return self.repository.get_hashes(ireq) 102 | 103 | @contextmanager 104 | def allow_all_wheels(self) -> Iterator[None]: 105 | with self.repository.allow_all_wheels(): 106 | yield 107 | -------------------------------------------------------------------------------- /tests/test_cache.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import sys 5 | from contextlib import contextmanager 6 | from shutil import rmtree 7 | from tempfile import NamedTemporaryFile 8 | 9 | import pytest 10 | 11 | from piptools.cache import CorruptCacheError, DependencyCache, read_cache_file 12 | 13 | 14 | @contextmanager 15 | def _read_cache_file_helper(to_write): 16 | """ 17 | On enter, create the file with the given string, and then yield its path. 18 | On exit, delete that file. 19 | 20 | :param str to_write: the content to write to the file 21 | :yield: the path to the temporary file 22 | """ 23 | # Create the file and write to it 24 | cache_file = NamedTemporaryFile(mode="w", delete=False) 25 | try: 26 | cache_file.write(to_write) 27 | cache_file.close() 28 | 29 | # Yield the path to the file 30 | yield cache_file.name 31 | 32 | finally: 33 | # Delete the file on exit 34 | os.remove(cache_file.name) 35 | 36 | 37 | def test_read_cache_file_not_json(): 38 | """ 39 | A cache file that's not JSON should throw a corrupt cache error. 40 | """ 41 | with _read_cache_file_helper("not json") as cache_file_name: 42 | with pytest.raises( 43 | CorruptCacheError, 44 | match="The dependency cache seems to have been corrupted.", 45 | ): 46 | read_cache_file(cache_file_name) 47 | 48 | 49 | def test_read_cache_file_wrong_format(): 50 | """ 51 | A cache file with a wrong "__format__" value should throw an assertion error. 52 | """ 53 | with _read_cache_file_helper('{"__format__": 2}') as cache_file_name: 54 | with pytest.raises(ValueError, match=r"^Unknown cache file format$"): 55 | read_cache_file(cache_file_name) 56 | 57 | 58 | def test_read_cache_file_successful(): 59 | """ 60 | A good cache file. 61 | """ 62 | with _read_cache_file_helper( 63 | '{"__format__": 1, "dependencies": "success"}' 64 | ) as cache_file_name: 65 | assert "success" == read_cache_file(cache_file_name) 66 | 67 | 68 | def test_read_cache_does_not_exist(tmpdir): 69 | cache = DependencyCache(cache_dir=tmpdir) 70 | assert cache.cache == {} 71 | 72 | 73 | @pytest.mark.skipif( 74 | sys.platform == "win32", reason="os.fchmod() not available on Windows" 75 | ) 76 | def test_read_cache_permission_error(tmpdir): 77 | cache = DependencyCache(cache_dir=tmpdir) 78 | with open(cache._cache_file, "w") as fp: 79 | os.fchmod(fp.fileno(), 0o000) 80 | with pytest.raises(IOError, match="Permission denied"): 81 | cache.cache 82 | 83 | 84 | def test_reverse_dependencies(from_line, tmpdir): 85 | # Create a cache object. The keys are packages, and the values are lists 86 | # of packages on which the keys depend. 87 | cache = DependencyCache(cache_dir=tmpdir) 88 | cache[from_line("top==1.2")] = ["middle>=0.3", "bottom>=5.1.2"] 89 | cache[from_line("top[xtra]==1.2")] = ["middle>=0.3", "bottom>=5.1.2", "bonus==0.4"] 90 | cache[from_line("middle==0.4")] = ["bottom<6"] 91 | cache[from_line("bottom==5.3.5")] = [] 92 | cache[from_line("bonus==0.4")] = [] 93 | 94 | # In this case, we're using top 1.2 without an extra, so the "bonus" package 95 | # is not depended upon. 96 | reversed_no_extra = cache.reverse_dependencies( 97 | [ 98 | from_line("top==1.2"), 99 | from_line("middle==0.4"), 100 | from_line("bottom==5.3.5"), 101 | from_line("bonus==0.4"), 102 | ] 103 | ) 104 | assert reversed_no_extra == {"middle": {"top"}, "bottom": {"middle", "top"}} 105 | 106 | # Now we're using top 1.2 with the "xtra" extra, so it depends 107 | # on the "bonus" package. 108 | reversed_extra = cache.reverse_dependencies( 109 | [ 110 | from_line("top[xtra]==1.2"), 111 | from_line("middle==0.4"), 112 | from_line("bottom==5.3.5"), 113 | from_line("bonus==0.4"), 114 | ] 115 | ) 116 | assert reversed_extra == { 117 | "middle": {"top"}, 118 | "bottom": {"middle", "top"}, 119 | "bonus": {"top"}, 120 | } 121 | 122 | # Clean up our temp directory 123 | rmtree(tmpdir) 124 | -------------------------------------------------------------------------------- /changelog.d/README.md: -------------------------------------------------------------------------------- 1 | ## Adding Change Notes with PRs 2 | 3 | It is important to maintain a changelog to explain to users what changed 4 | between versions. 5 | 6 | To avoid merge conflicts, we use 7 | [Towncrier](https://towncrier.readthedocs.io/en/stable/) to maintain our 8 | changelog. 9 | 10 | Towncrier uses separate files, "news fragments", for each pull request. 11 | On release, those fragments are compiled into the changelog. 12 | 13 | You don't need to install Towncrier to contribute, you just have to follow some 14 | simple rules! 15 | 16 | - In your pull request, add a new file into `changelog.d/` with a filename 17 | formatted as `$NUMBER.$CATEGORY.md`. 18 | 19 | - The number is the PR number or issue number which your PR addresses. 20 | 21 | - The category is `bugfix`, `feature`, `deprecation`, `breaking`, `doc`, 22 | `packaging`, `contrib`, or `misc`. 23 | 24 | - For example, if your PR fixes bug #404, the change notes should be named 25 | `changelog.d/404.bugfix.md`. 26 | 27 | - If multiple issues are addressed, create a symlink to the change notes with 28 | another issue number in the name. 29 | Towncrier will automatically merge files into one entry with multiple links. 30 | 31 | - Prefer the simple past or constructions with "now". 32 | 33 | - Include a byline, `` -- by {user}`github-username` `` 34 | 35 | You can preview the changelog by running `tox run -e build-docs` and viewing 36 | the changelog in the docs. 37 | 38 | ### Categories 39 | 40 | The categories for change notes are defined as follows. 41 | 42 | - `bugfix`: A fix for something we deemed improper or undesired behavior. 43 | 44 | - `feature`: A new behavior, such as a new flag or environment variable. 45 | 46 | - `deprecation`: A declaration of future removals and breaking changes in behavior. 47 | 48 | - `breaking`: A change in behavior which changes or violates established user expectations 49 | (e.g., removing a flag or changing output formatting). 50 | 51 | - `doc`: Notable updates to the documentation structure or build process. 52 | 53 | - `packaging`: Changes in how `pip-tools` itself is packaged and tested which may impact downstreams and redistributors. 54 | 55 | - `contrib`: Changes to the contributor experience 56 | (e.g., running tests, building the docs, or setting up a development environment). 57 | 58 | - `misc`: Changes that don't fit any of the other categories. 59 | 60 | Sometimes it's not clear which category to use for a change. 61 | Do your best and a maintainer can discuss this with you during review. 62 | 63 | ### Examples 64 | 65 | Example bugfix, [`2223.bugfix.md`](https://github.com/jazzband/pip-tools/pull/2224): 66 | 67 | ```md 68 | Fixed a bug which removed slashes from URLs in `-r` and `-c` in the output 69 | of `pip-compile` -- by {user}`sirosen`. 70 | ``` 71 | 72 | Example contributor update, [`2214.contrib.md`](https://github.com/jazzband/pip-tools/pull/2214): 73 | 74 | ```md 75 | `pip-tools` now tests on and officially supports `pip` version 25.2 -- by {user}`sirosen`. 76 | ``` 77 | 78 | ### Rationale 79 | 80 | When making a change to `pip-tools`, it is important to communicate the differences that end-users will experience in a manner that they can understand. 81 | 82 | Details of the change that are primarily of interest only to `pip-tools` developers may be irrelevant to most users, and if so, then those details can be omitted from the change notes. 83 | Then, when the maintainers publish a new release, they'll automatically use these records to compose a change log for the respective version. 84 | 85 | We write change notes in the past tense because this suits the users who will be reading these notes. 86 | Combined with others, the notes will be a part of the "news digest" telling the readers what **changed** in a specific version of `pip-tools` since the previous version. 87 | 88 | This methodology has several benefits, including those covered by the 89 | [Towncrier Philosophy](https://towncrier.readthedocs.io/en/stable/#philosophy): 90 | 91 | - Change notes separate the user-facing description of changes from the implementation details. 92 | Details go into the git history, but users aren't expected to care about them. 93 | 94 | - The release engineer may not have been involved in each issue and pull request. 95 | Writing the notes early in the process involves the developers in the best position to write good notes. 96 | 97 | - Describing a change can help during code review. 98 | The reviewer can better identify which effects of a change were intentional and which were not. 99 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=63", "setuptools_scm[toml]>=7"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | # https://peps.python.org/pep-0621/#readme 7 | requires-python = ">= 3.9" 8 | dynamic = ["version"] 9 | name = "pip-tools" 10 | description = "pip-tools keeps your pinned dependencies fresh." 11 | readme = "README.md" 12 | authors = [{ "name" = "Vincent Driessen", "email" = "me@nvie.com" }] 13 | license = { text = "BSD" } 14 | classifiers = [ 15 | "Development Status :: 5 - Production/Stable", 16 | "Environment :: Console", 17 | "Intended Audience :: Developers", 18 | "Intended Audience :: System Administrators", 19 | "License :: OSI Approved :: BSD License", 20 | "Operating System :: OS Independent", 21 | "Programming Language :: Python :: 3 :: Only", 22 | "Programming Language :: Python :: 3", 23 | "Programming Language :: Python :: 3.9", 24 | "Programming Language :: Python :: 3.10", 25 | "Programming Language :: Python :: 3.11", 26 | "Programming Language :: Python :: 3.12", 27 | "Programming Language :: Python :: 3.13", 28 | "Programming Language :: Python :: Implementation :: CPython", 29 | "Programming Language :: Python :: Implementation :: PyPy", 30 | "Programming Language :: Python", 31 | "Topic :: Software Development :: Quality Assurance", 32 | "Topic :: Software Development :: Testing", 33 | "Topic :: System :: Systems Administration", 34 | "Topic :: Utilities", 35 | "Typing :: Typed", 36 | ] 37 | keywords = ["pip", "requirements", "packaging"] 38 | dependencies = [ 39 | # direct dependencies 40 | "build >= 1.0.0", 41 | "click >= 8", 42 | "pip >= 22.2", 43 | "pyproject_hooks", 44 | "tomli; python_version < '3.11'", 45 | # indirect dependencies 46 | "setuptools", # typically needed when pip-tools invokes setup.py 47 | "wheel", # pip plugin needed by pip-tools 48 | 49 | ] 50 | 51 | [project.urls] 52 | homepage = "https://github.com/jazzband/pip-tools/" 53 | documentation = "https://pip-tools.readthedocs.io/en/latest/" 54 | repository = "https://github.com/jazzband/pip-tools" 55 | changelog = "https://github.com/jazzband/pip-tools/releases" 56 | 57 | [project.optional-dependencies] 58 | testing = [ 59 | "pytest >= 7.2.0", 60 | "pytest-rerunfailures", 61 | "pytest-xdist", 62 | "tomli-w", 63 | # build deps for tests 64 | "flit_core >=2,<4", 65 | "poetry_core>=1.0.0", 66 | ] 67 | coverage = ["covdefaults", "pytest-cov"] 68 | 69 | [project.scripts] 70 | pip-compile = "piptools.scripts.compile:cli" 71 | pip-sync = "piptools.scripts.sync:cli" 72 | 73 | [tool.isort] 74 | profile = "black" 75 | # explicitly mark 'build' as a third-party package 76 | # otherwise, in some executions, `isort` can mistake `piptools.build` for 77 | # `build` and treat it as a first-party module name 78 | known_third_party = ["build"] 79 | add_imports = "from __future__ import annotations" 80 | 81 | [tool.mypy] 82 | disallow_untyped_defs = true 83 | disallow_any_generics = true 84 | disallow_incomplete_defs = true 85 | disallow_subclassing_any = true 86 | disallow_untyped_calls = true 87 | disallow_untyped_decorators = true 88 | ignore_missing_imports = true 89 | no_implicit_optional = true 90 | no_implicit_reexport = true 91 | strict_equality = true 92 | warn_redundant_casts = true 93 | warn_return_any = true 94 | warn_unused_configs = true 95 | warn_unused_ignores = true 96 | # Avoid error: Duplicate module named 'setup' 97 | # https://github.com/python/mypy/issues/4008 98 | exclude = "^tests/test_data/" 99 | 100 | [[tool.mypy.overrides]] 101 | module = ["tests.*"] 102 | disallow_untyped_defs = false 103 | disallow_incomplete_defs = false 104 | 105 | [tool.pytest.ini_options] 106 | addopts = [ 107 | # `pytest-xdist`: 108 | "--numprocesses=auto", 109 | 110 | # The `worksteal` distribution method is useful if the run times of different tests vary greatly, 111 | # as it ensures more efficient resource usage, improving the performance of testing. 112 | "--dist=worksteal", 113 | 114 | # Show 20 slowest invocations: 115 | "--durations=20", 116 | ] 117 | norecursedirs = ".* build dist venv test_data piptools/_compat/*" 118 | testpaths = "tests piptools" 119 | filterwarnings = ["always"] 120 | markers = ["network: mark tests that require internet access"] 121 | 122 | [tool.setuptools.packages.find] 123 | # needed only because we did not adopt src layout yet 124 | include = ["piptools*"] 125 | 126 | [tool.setuptools_scm] 127 | local_scheme = "dirty-tag" 128 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: 📦 Packaging 3 | 4 | on: 5 | pull_request: 6 | push: 7 | branches: 8 | - main 9 | release: 10 | types: 11 | - published 12 | 13 | env: 14 | FORCE_COLOR: 1 # Request colored output from CLI tools supporting it 15 | MYPY_FORCE_COLOR: 1 # MyPy's color enforcement 16 | PIP_DISABLE_PIP_VERSION_CHECK: 1 # Hide "there's a newer pip" message 17 | PIP_NO_PYTHON_VERSION_WARNING: 1 # Hide "this Python is deprecated" message 18 | PIP_NO_WARN_SCRIPT_LOCATION: 1 # Hide "script dir is not in $PATH" message 19 | PRE_COMMIT_COLOR: always 20 | PROJECT_NAME: pip-tools 21 | PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` 22 | PYTHONIOENCODING: utf-8 23 | PYTHONUTF8: 1 24 | TOX_PARALLEL_NO_SPINNER: 1 # Disable tox's parallel run spinner animation 25 | TOX_TESTENV_PASSENV: >- # Make tox-wrapped tools see color requests 26 | FORCE_COLOR 27 | MYPY_FORCE_COLOR 28 | NO_COLOR 29 | PIP_DISABLE_PIP_VERSION_CHECK 30 | PIP_NO_PYTHON_VERSION_WARNING 31 | PIP_NO_WARN_SCRIPT_LOCATION 32 | PRE_COMMIT_COLOR 33 | PY_COLORS 34 | PYTEST_THEME 35 | PYTEST_THEME_MODE 36 | PYTHONIOENCODING 37 | PYTHONLEGACYWINDOWSSTDIO 38 | PYTHONUTF8 39 | UPSTREAM_REPOSITORY_ID: >- 40 | 5746963 41 | 42 | run-name: >- 43 | ${{ 44 | github.event.action == 'published' 45 | && format('📦 Releasing v{0}...', github.ref_name) 46 | || format('🌱 Smoke-testing packaging for commit {0}', github.sha) 47 | }} 48 | triggered by: ${{ github.event_name }} of ${{ 49 | github.ref 50 | }} ${{ 51 | github.ref_type 52 | }} 53 | (workflow run ID: ${{ 54 | github.run_id 55 | }}; number: ${{ 56 | github.run_number 57 | }}; attempt: ${{ 58 | github.run_attempt 59 | }}) 60 | 61 | jobs: 62 | build: 63 | name: >- 64 | 📦 v${{ github.ref_name }} 65 | [mode: ${{ 66 | github.event.action == 'published' 67 | && 'release' || 'nightly' 68 | }}] 69 | 70 | runs-on: ubuntu-latest 71 | 72 | timeout-minutes: 2 73 | 74 | outputs: 75 | # NOTE: These aren't env vars because the `${{ env }}` context is 76 | # NOTE: inaccessible when passing inputs to reusable workflows. 77 | upstream-repository-id: ${{ env.UPSTREAM_REPOSITORY_ID }} 78 | 79 | steps: 80 | - uses: actions/checkout@v4 81 | with: 82 | fetch-depth: 0 83 | 84 | - name: Set up Python 85 | uses: actions/setup-python@v5 86 | with: 87 | python-version: 3.12 88 | 89 | - name: Install dependencies 90 | run: | 91 | python -Im pip install -U twine build 92 | 93 | - name: Build package 94 | run: | 95 | python -Im build 96 | twine check --strict dist/* 97 | 98 | - name: Store the distribution packages 99 | uses: actions/upload-artifact@v4 100 | with: 101 | name: python-package-distributions 102 | # NOTE: Exact expected file names are specified here 103 | # NOTE: as a safety measure — if anything weird ends 104 | # NOTE: up being in this dir or not all dists will be 105 | # NOTE: produced, this will fail the workflow. 106 | path: | 107 | dist/*.tar.gz 108 | dist/*.whl 109 | retention-days: >- 110 | ${{ 111 | github.event.action == 'published' 112 | && 90 || 30 113 | }} 114 | 115 | publish-pypi: 116 | name: >- 117 | 📦 118 | Publish v${{ github.ref_name }} to PyPI 119 | needs: 120 | - build 121 | if: >- 122 | github.event.action == 'published' 123 | && needs.build.outputs.upstream-repository-id == github.repository_id 124 | 125 | runs-on: ubuntu-latest 126 | 127 | timeout-minutes: 2 # docker+network are slow sometimes 128 | 129 | environment: 130 | name: pypi 131 | url: >- 132 | https://pypi.org/project/${{ env.PROJECT_NAME }}/${{ github.ref_name }} 133 | 134 | permissions: 135 | id-token: write # PyPI Trusted Publishing (OIDC) 136 | 137 | steps: 138 | - name: Download all the dists 139 | uses: actions/download-artifact@v4 140 | with: 141 | name: python-package-distributions 142 | path: dist/ 143 | - name: >- 144 | 📦 145 | Publish v${{ github.ref_name }} to PyPI 146 | 🔏 147 | uses: pypa/gh-action-pypi-publish@release/v1 148 | - name: Clean up the publish attestation leftovers 149 | run: rm -fv dist/*.publish.attestation 150 | - name: Upload packages to Jazzband 151 | uses: pypa/gh-action-pypi-publish@release/v1 152 | with: 153 | user: jazzband 154 | password: ${{ secrets.JAZZBAND_RELEASE_KEY }} 155 | repository-url: https://jazzband.co/projects/${{ env.PROJECT_NAME }}/upload 156 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 2 | """Configuration file for the Sphinx documentation builder.""" 3 | 4 | from __future__ import annotations 5 | 6 | import os 7 | from importlib.metadata import version as get_version 8 | from pathlib import Path 9 | 10 | from sphinx.application import Sphinx 11 | from sphinx.util import logging 12 | from sphinx.util.console import bold 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() 17 | IS_RELEASE_ON_RTD = ( 18 | os.getenv("READTHEDOCS", "False") == "True" 19 | and os.environ["READTHEDOCS_VERSION_TYPE"] == "tag" 20 | ) 21 | 22 | 23 | # -- Project information ----------------------------------------------------- 24 | 25 | project = "pip-tools" 26 | author = f"{project} Contributors" 27 | copyright = f"The {author}" 28 | 29 | # The full version, including alpha/beta/rc tags 30 | release = get_version(project) 31 | 32 | # The short X.Y version 33 | version = ".".join(release.split(".")[:3]) 34 | 35 | logger.info(bold("%s version: %s"), project, version) 36 | logger.info(bold("%s release: %s"), project, release) 37 | 38 | # -- General configuration --------------------------------------------------- 39 | 40 | # Add any Sphinx extension module names here, as strings. They can be 41 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 42 | # ones. 43 | extensions = [ 44 | # Stdlib extensions: 45 | "sphinx.ext.intersphinx", 46 | # Third-party extensions: 47 | "myst_parser", 48 | "sphinxcontrib.apidoc", 49 | "sphinxcontrib.programoutput", 50 | "sphinxcontrib.towncrier.ext", # provides `.. towncrier-draft-entries::` 51 | "sphinx_issues", 52 | ] 53 | 54 | # -- Options for HTML output ------------------------------------------------- 55 | 56 | # The theme to use for HTML and HTML Help pages. See the documentation for 57 | # a list of builtin themes. 58 | # 59 | html_theme = "furo" 60 | html_title = f"{project} documentation v{release}" 61 | 62 | 63 | # -- Options for intersphinx ---------------------------------------------------------- 64 | # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration 65 | 66 | intersphinx_mapping = { 67 | "python": ("https://docs.python.org/3", None), 68 | } 69 | 70 | issues_github_path = "jazzband/pip-tools" 71 | 72 | towncrier_draft_autoversion_mode = "draft" 73 | towncrier_draft_include_empty = True 74 | towncrier_draft_working_directory = PROJECT_ROOT_DIR 75 | towncrier_draft_config_path = "towncrier.toml" # relative to cwd 76 | 77 | # ------------------------------------------------------------------------- 78 | default_role = "any" 79 | nitpicky = True 80 | 81 | linkcheck_ignore = [ 82 | r"^https://matrix\.to/#", 83 | r"^https://img.shields.io/matrix", 84 | r"^https://results\.pre-commit\.ci/latest/github/jazzband/pip-tools/", 85 | # checking sphinx-issues links to GitHub results in rate limiting errors 86 | # skip any username validation and pip-tools link checking 87 | # (this also means we won't get spurious errors when users delete their GitHub accounts) 88 | r"^https://github\.com/jazzband/pip-tools/(issues|pull|commit)/", 89 | r"^https://github\.com/sponsors/", 90 | ] 91 | 92 | nitpick_ignore_regex = [ 93 | ("py:class", "pip.*"), 94 | ("py:class", "pathlib.*"), 95 | ("py:class", "click.*"), 96 | ("py:class", "build.*"), 97 | ("py:class", "optparse.*"), 98 | ("py:class", "_ImportLibDist"), 99 | ("py:class", "PackageMetadata"), 100 | ("py:class", "importlib.*"), 101 | ("py:class", "IndexContent"), 102 | ("py:exc", "click.*"), 103 | ] 104 | 105 | suppress_warnings = [ 106 | "myst.xref_missing", 107 | # MyST erroneously flags the draft changelog as having improper header levels 108 | # because it starts at H2 instead of H1. 109 | # However, it is written only for inclusion in a broader doc, so the heading 110 | # levels are actually correct. 111 | "myst.header", 112 | ] 113 | 114 | # -- Apidoc options ------------------------------------------------------- 115 | 116 | apidoc_excluded_paths: list[str] = [] 117 | apidoc_extra_args = [ 118 | "--implicit-namespaces", 119 | "--private", # include “_private” modules 120 | ] 121 | apidoc_module_first = False 122 | apidoc_module_dir = "../piptools" 123 | apidoc_output_dir = "pkg" 124 | apidoc_separate_modules = True 125 | apidoc_toc_file = None 126 | 127 | 128 | # -- Sphinx extension-API `setup()` hook 129 | 130 | 131 | def setup(app: Sphinx) -> dict[str, bool | str]: 132 | """Register project-local Sphinx extension-API customizations. 133 | 134 | :param app: Initialized Sphinx app instance. 135 | :returns: Extension metadata. 136 | """ 137 | if IS_RELEASE_ON_RTD: 138 | app.tags.add("is_release") 139 | 140 | return { 141 | "parallel_read_safe": True, 142 | "parallel_write_safe": True, 143 | "version": release, 144 | } 145 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | # NOTE: keep this in sync with the env list in .github/workflows/ci.yml. 4 | py{39,310,311,312,313,py3}-pip{supported,lowest,latest,main}-coverage 5 | pip{supported,lowest,latest,main}-coverage 6 | pip{supported,lowest,latest,main} 7 | checkqa 8 | readme 9 | skip_missing_interpreters = True 10 | 11 | [testenv] 12 | description = run the tests with pytest 13 | extras = 14 | testing 15 | coverage: coverage 16 | deps = 17 | pipsupported: pip == 25.3 18 | pipsupported: setuptools <= 80.0 19 | 20 | piplowest: pip == 22.2.* ; python_version < "3.12" 21 | piplowest: pip == 23.2.* ; python_version >= "3.12" 22 | 23 | piplatest: pip 24 | pipmain: https://github.com/pypa/pip/archive/main.zip 25 | setenv = 26 | coverage: PYTEST_ADDOPTS=--strict-markers --doctest-modules --cov --cov-report=term-missing --cov-report=xml {env:PYTEST_ADDOPTS:} 27 | commands_pre = 28 | piplatest: python -m pip install -U pip 29 | pip --version 30 | commands = pytest {posargs} 31 | passenv = 32 | CI 33 | FORCE_COLOR 34 | GITHUB_ACTIONS 35 | MYPY_FORCE_COLOR 36 | PRE_COMMIT_COLOR 37 | PY_COLORS 38 | pip_pre=True 39 | 40 | [testenv:checkqa] 41 | description = format the code base and check its quality 42 | skip_install = True 43 | deps = pre-commit 44 | commands_pre = 45 | commands = pre-commit run --all-files --show-diff-on-failure 46 | 47 | [testenv:readme] 48 | description = check whether the long description will render correctly on PyPI 49 | deps = 50 | build 51 | twine 52 | commands_pre = 53 | commands = 54 | python -m build --outdir {envtmpdir} --sdist {toxinidir} 55 | twine check --strict {envtmpdir}{/}* 56 | skip_install = true 57 | 58 | [testenv:pip-compile-docs] 59 | description = compile requirements for the documentation 60 | commands_pre = 61 | # compile requirements.in + pyproject.toml to get pip-tools install requirements in 62 | # addition to doc tooling requirements *but without* putting `pip-tools` itself 63 | # into the output 64 | commands = 65 | python -m piptools compile \ 66 | --strip-extras \ 67 | --allow-unsafe \ 68 | --quiet \ 69 | docs/requirements.in \ 70 | './pyproject.toml' \ 71 | -o docs/requirements.txt \ 72 | {posargs} 73 | 74 | [testenv:build-docs] 75 | description = build the documentation 76 | deps = 77 | -r{toxinidir}/docs/requirements.txt 78 | # FIXME: re-enable the "-r" + "-c" paradigm once the pip bug is fixed. 79 | # Ref: https://github.com/pypa/pip/issues/9243 80 | # -r{toxinidir}/docs/requirements.in 81 | # -c{toxinidir}/docs/requirements.txt 82 | commands_pre = 83 | # Retrieve possibly missing commits: 84 | -git fetch --unshallow 85 | -git fetch --tags 86 | commands = 87 | # Build the html docs with Sphinx: 88 | {envpython} -m sphinx \ 89 | -j auto \ 90 | -b html \ 91 | {tty:--color} \ 92 | -a \ 93 | -n -W --keep-going \ 94 | -d "{temp_dir}/.doctrees" \ 95 | . \ 96 | {posargs:{envdir}/docs_out} 97 | # Print out the output docs dir and a way to serve html after the build 98 | commands_post = 99 | {envpython} -c \ 100 | 'import pathlib;\ 101 | docs_dir = pathlib.Path(r"{envdir}") / "docs_out";\ 102 | index_file = docs_dir / "index.html";\ 103 | print("\n" + "=" * 120 +\ 104 | f"\n\nDocumentation available under:\n\n\ 105 | \tfile://\{index_file\}\n\nTo serve docs, use\n\n\ 106 | \t$ python3 -m http.server --directory \ 107 | \N\{QUOTATION MARK\}\{docs_dir\}\N\{QUOTATION MARK\} 0\n\n" +\ 108 | "=" * 120)' 109 | changedir = {toxinidir}/docs 110 | isolated_build = true 111 | passenv = 112 | READTHEDOCS 113 | READTHEDOCS_VERSION_TYPE 114 | SSH_AUTH_SOCK 115 | skip_install = false 116 | allowlist_externals = 117 | git 118 | 119 | 120 | [testenv:preview-docs] 121 | description = preview the docs 122 | deps = 123 | sphinx-autobuild 124 | {[testenv:build-docs]deps} 125 | commands_pre = 126 | commands = 127 | # Retrieve possibly missing commits: 128 | -git fetch --unshallow 129 | -git fetch --tags 130 | 131 | # Build the html docs with sphinx-autobuild: 132 | {envpython} -m sphinx_autobuild \ 133 | -j auto \ 134 | -b html \ 135 | -n \ 136 | -W \ 137 | -d "{temp_dir}/.doctrees" \ 138 | . \ 139 | --watch ../README.md \ 140 | --watch ../CHANGELOG.md \ 141 | "{envdir}/docs_out" 142 | 143 | changedir = {[testenv:build-docs]changedir} 144 | isolated_build = {[testenv:build-docs]isolated_build} 145 | passenv = {[testenv:build-docs]passenv} 146 | skip_install = {[testenv:build-docs]skip_install} 147 | allowlist_externals = {[testenv:build-docs]allowlist_externals} 148 | 149 | 150 | [testenv:linkcheck-docs] 151 | description = check links in the documentation 152 | deps = 153 | -r{toxinidir}/docs/requirements.txt 154 | # FIXME: re-enable the "-r" + "-c" paradigm once the pip bug is fixed. 155 | # Ref: https://github.com/pypa/pip/issues/9243 156 | # -r{toxinidir}/docs/requirements.in 157 | # -c{toxinidir}/docs/requirements.txt 158 | commands_pre = 159 | commands = 160 | # Retrieve possibly missing commits: 161 | -git fetch --unshallow 162 | -git fetch --tags 163 | 164 | # Build the html docs with Sphinx: 165 | {envpython} -m sphinx \ 166 | -j auto \ 167 | -b linkcheck \ 168 | {tty:--color} \ 169 | -a \ 170 | -n -W --keep-going \ 171 | -d "{temp_dir}/.doctrees" \ 172 | . \ 173 | "{envdir}/docs_out" 174 | changedir = {toxinidir}/docs 175 | isolated_build = true 176 | passenv = 177 | SSH_AUTH_SOCK 178 | skip_install = false 179 | allowlist_externals = 180 | git 181 | 182 | [testenv:changelog-draft] 183 | deps = 184 | towncrier 185 | -c{toxinidir}/docs/requirements.txt 186 | commands_pre = 187 | towncrier --version 188 | # wrap the invocation of `towncrier build --version main --draft` to discard stderr 189 | # unfortunately, stderr gets interleaved with stdout, often mixing towncrier's draft build output 190 | # with user messaging about the invocation 191 | commands = 192 | {envpython} -bb -I -Werror \ 193 | -c 'import sys, subprocess; subprocess.run([sys.executable, "-bb", "-I", "-Werror", "-m", "towncrier", "build", "--version", "DRAFT_VERSION", "--draft", *sys.argv[1:]], stderr=subprocess.DEVNULL)' {posargs} 194 | -------------------------------------------------------------------------------- /piptools/cache.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import os 5 | import platform 6 | import sys 7 | import typing as _t 8 | from collections.abc import Iterable 9 | 10 | from pip._internal.req import InstallRequirement 11 | from pip._vendor.packaging.requirements import Requirement 12 | 13 | from .exceptions import PipToolsError 14 | from .utils import as_tuple, key_from_req, lookup_table_from_tuples 15 | 16 | CacheKey = tuple[str, str] 17 | CacheLookup = dict[str, list[str]] 18 | CacheDict = dict[str, CacheLookup] 19 | 20 | _PEP425_PY_TAGS = {"cpython": "cp", "pypy": "pp", "ironpython": "ip", "jython": "jy"} 21 | 22 | 23 | def _implementation_name() -> str: 24 | """ 25 | Get Python implementation and version. 26 | 27 | Similar to PEP 425, however the minor version is separated from the major to 28 | differentiate "3.10" and "31.0". 29 | """ 30 | implementation_name = platform.python_implementation().lower() 31 | implementation = _PEP425_PY_TAGS.get(implementation_name, "??") 32 | return "{}{}.{}".format(implementation, *sys.version_info) 33 | 34 | 35 | class CorruptCacheError(PipToolsError): 36 | def __init__(self, path: str): 37 | self.path = path 38 | 39 | def __str__(self) -> str: 40 | lines = [ 41 | "The dependency cache seems to have been corrupted.", 42 | "Inspect, or delete, the following file:", 43 | f" {self.path}", 44 | ] 45 | return os.linesep.join(lines) 46 | 47 | 48 | def read_cache_file(cache_file_path: str) -> CacheDict: 49 | with open(cache_file_path, encoding="utf-8") as cache_file: 50 | try: 51 | doc = json.load(cache_file) 52 | except (json.JSONDecodeError, UnicodeDecodeError): 53 | raise CorruptCacheError(cache_file_path) 54 | 55 | # Check version and load the contents 56 | if doc["__format__"] != 1: 57 | raise ValueError("Unknown cache file format") 58 | return _t.cast(CacheDict, doc["dependencies"]) 59 | 60 | 61 | class DependencyCache: 62 | """ 63 | Create new persistent dependency cache for the current Python version. 64 | 65 | The cache file is written to the appropriate user cache dir for the 66 | current platform, i.e. 67 | 68 | ~/.cache/pip-tools/depcache-pyX.Y.json 69 | 70 | Where py indicates the Python implementation. 71 | Where X.Y indicates the Python version. 72 | """ 73 | 74 | def __init__(self, cache_dir: str): 75 | os.makedirs(cache_dir, exist_ok=True) 76 | cache_filename = f"depcache-{_implementation_name()}.json" 77 | 78 | self._cache_file = os.path.join(cache_dir, cache_filename) 79 | self._cache: CacheDict | None = None 80 | 81 | @property 82 | def cache(self) -> CacheDict: 83 | """ 84 | The dictionary that is the actual in-memory cache. This property 85 | lazily loads the cache from disk. 86 | """ 87 | if self._cache is None: 88 | try: 89 | self._cache = read_cache_file(self._cache_file) 90 | except FileNotFoundError: 91 | self._cache = {} 92 | return self._cache 93 | 94 | def as_cache_key(self, ireq: InstallRequirement) -> CacheKey: 95 | """ 96 | Given a requirement, return its cache key. 97 | 98 | This behavior is a little weird 99 | in order to allow backwards compatibility with cache files. For a requirement 100 | without extras, this will return, for example: 101 | 102 | ("ipython", "2.1.0") 103 | 104 | For a requirement with extras, the extras will be comma-separated and appended 105 | to the version, inside brackets, like so: 106 | 107 | ("ipython", "2.1.0[nbconvert,notebook]") 108 | """ 109 | name, version, extras = as_tuple(ireq) 110 | if not extras: 111 | extras_string = "" 112 | else: 113 | extras_string = f"[{','.join(extras)}]" 114 | return name, f"{version}{extras_string}" 115 | 116 | def write_cache(self) -> None: 117 | """Write the cache to disk as JSON.""" 118 | doc = {"__format__": 1, "dependencies": self._cache} 119 | with open(self._cache_file, "w", encoding="utf-8") as f: 120 | json.dump(doc, f, sort_keys=True) 121 | 122 | def clear(self) -> None: 123 | self._cache = {} 124 | self.write_cache() 125 | 126 | def __contains__(self, ireq: InstallRequirement) -> bool: 127 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 128 | return pkgversion_and_extras in self.cache.get(pkgname, {}) 129 | 130 | def __getitem__(self, ireq: InstallRequirement) -> list[str]: 131 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 132 | return self.cache[pkgname][pkgversion_and_extras] 133 | 134 | def __setitem__(self, ireq: InstallRequirement, values: list[str]) -> None: 135 | pkgname, pkgversion_and_extras = self.as_cache_key(ireq) 136 | self.cache.setdefault(pkgname, {}) 137 | self.cache[pkgname][pkgversion_and_extras] = values 138 | self.write_cache() 139 | 140 | def reverse_dependencies( 141 | self, ireqs: Iterable[InstallRequirement] 142 | ) -> dict[str, set[str]]: 143 | """ 144 | Return a lookup table of reverse dependencies for all the given ireqs. 145 | 146 | Since this is all static, it only works if the dependency cache 147 | contains the complete data, otherwise you end up with a partial view. 148 | This is typically no problem if you use this function after the entire 149 | dependency tree is resolved. 150 | """ 151 | ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs] 152 | return self._reverse_dependencies(ireqs_as_cache_values) 153 | 154 | def _reverse_dependencies( 155 | self, cache_keys: Iterable[tuple[str, str]] 156 | ) -> dict[str, set[str]]: 157 | """ 158 | Return a lookup table of reverse dependencies for all the given cache keys. 159 | 160 | Example input: 161 | 162 | [('pep8', '1.5.7'), 163 | ('flake8', '2.4.0'), 164 | ('mccabe', '0.3'), 165 | ('pyflakes', '0.8.1')] 166 | 167 | Example output: 168 | 169 | {'pep8': ['flake8'], 170 | 'flake8': [], 171 | 'mccabe': ['flake8'], 172 | 'pyflakes': ['flake8']} 173 | 174 | """ 175 | # First, collect all the dependencies into a sequence of (parent, child) 176 | # tuples, like [('flake8', 'pep8'), ('flake8', 'mccabe'), ...] 177 | return lookup_table_from_tuples( 178 | (key_from_req(Requirement(dep_name)), name) 179 | for name, version_and_extras in cache_keys 180 | for dep_name in self.cache[name][version_and_extras] 181 | ) 182 | -------------------------------------------------------------------------------- /tests/test_build.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pathlib 4 | import shutil 5 | import textwrap 6 | 7 | import pytest 8 | from build import BuildBackendException 9 | 10 | from piptools.build import ( 11 | ProjectMetadata, 12 | StaticProjectMetadata, 13 | build_project_metadata, 14 | maybe_statically_parse_project_metadata, 15 | ) 16 | from tests.constants import PACKAGES_PATH 17 | 18 | 19 | @pytest.mark.network 20 | def test_build_project_metadata_resolved_correct_build_dependencies( 21 | fake_dists_with_build_deps, tmp_path, monkeypatch 22 | ): 23 | """Test that the resolved build dependencies are correct. 24 | 25 | Because this is a slow process we test it only for one build target and rely 26 | on ``test_all_extras_and_all_build_deps`` to test that it works with multiple build 27 | targets. 28 | """ 29 | # When used as argument to the runner it is not passed to pip 30 | monkeypatch.setenv("PIP_FIND_LINKS", fake_dists_with_build_deps) 31 | src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_build_deps" 32 | shutil.copytree(src_pkg_path, tmp_path, dirs_exist_ok=True) 33 | src_file = tmp_path / "setup.py" 34 | metadata = build_project_metadata( 35 | src_file, ("editable",), attempt_static_parse=False, isolated=True, quiet=False 36 | ) 37 | assert isinstance(metadata, ProjectMetadata) 38 | build_requirements = sorted(r.name for r in metadata.build_requirements) 39 | assert build_requirements == [ 40 | "fake_dynamic_build_dep_for_all", 41 | "fake_dynamic_build_dep_for_editable", 42 | "fake_static_build_dep", 43 | "setuptools", 44 | "wheel", 45 | ] 46 | 47 | 48 | def test_build_project_metadata_static(tmp_path): 49 | """Test static parsing branch of build_project_metadata""" 50 | src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_pyproject" 51 | shutil.copytree(src_pkg_path, tmp_path, dirs_exist_ok=True) 52 | src_file = tmp_path / "pyproject.toml" 53 | metadata = build_project_metadata( 54 | src_file, (), attempt_static_parse=True, isolated=True, quiet=False 55 | ) 56 | assert isinstance(metadata, StaticProjectMetadata) 57 | requirements = [(r.name, r.extras, str(r.markers)) for r in metadata.requirements] 58 | requirements.sort(key=lambda x: x[0]) 59 | assert requirements == [ 60 | ("fake_direct_extra_runtime_dep", {"with_its_own_extra"}, 'extra == "x"'), 61 | ("fake_direct_runtime_dep", set(), "None"), 62 | ] 63 | assert metadata.extras == ("x",) 64 | 65 | 66 | def test_build_project_metadata_raises_error(tmp_path): 67 | src_pkg_path = pathlib.Path(PACKAGES_PATH) / "small_fake_with_build_deps" 68 | shutil.copytree(src_pkg_path, tmp_path, dirs_exist_ok=True) 69 | src_file = tmp_path / "setup.py" 70 | with pytest.raises( 71 | ValueError, match="Cannot execute the PEP 517 optional.* hooks statically" 72 | ): 73 | build_project_metadata( 74 | src_file, 75 | ("editable",), 76 | attempt_static_parse=True, 77 | isolated=True, 78 | quiet=False, 79 | ) 80 | 81 | 82 | def test_build_project_metadata_upgrading_raises_error(tmp_path): 83 | """Test build_project_metadata doesn't swallow error.""" 84 | src_file = tmp_path / "pyproject.toml" 85 | src_file.write_text( 86 | textwrap.dedent( 87 | """ 88 | [project] 89 | # missing name 90 | version = "0.1" 91 | dependencies=["test_dep"] 92 | """ 93 | ), 94 | ) 95 | with pytest.raises( 96 | BuildBackendException, 97 | match=( 98 | "Backend subprocess exited when trying to invoke " 99 | "get_requires_for_build_wheel" 100 | ), 101 | ): 102 | build_project_metadata( 103 | src_file, 104 | (), 105 | attempt_static_parse=False, 106 | isolated=True, 107 | quiet=False, 108 | upgrade_packages=["test_dep"], 109 | ) 110 | 111 | 112 | def test_static_parse_valid(tmp_path): 113 | src_file = tmp_path / "pyproject.toml" 114 | 115 | valid = """ 116 | [project] 117 | name = "foo" 118 | version = "0.1.0" 119 | dependencies = ["bar>=1"] 120 | [project.optional-dependencies] 121 | baz = ["qux[extra]"] 122 | """ 123 | src_file.write_text(valid) 124 | metadata = maybe_statically_parse_project_metadata(src_file) 125 | assert isinstance(metadata, StaticProjectMetadata) 126 | assert [str(r.req) for r in metadata.requirements] == ["bar>=1", "qux[extra]"] 127 | assert metadata.extras == ("baz",) 128 | 129 | 130 | @pytest.mark.parametrize( 131 | "input_path_is_absolute", 132 | (True, False), 133 | ids=("absolute-input", "relative-input"), 134 | ) 135 | def test_static_parse_of_self_referential_extra( 136 | tmp_path, monkeypatch, input_path_is_absolute 137 | ): 138 | monkeypatch.chdir(tmp_path) 139 | 140 | src_file = tmp_path / "pyproject.toml" 141 | src_file.write_text( 142 | textwrap.dedent( 143 | """ 144 | [project] 145 | name = "foo" 146 | version = "0.1.0" 147 | [project.optional-dependencies] 148 | ext1 = ["bar"] 149 | ext2 = ["foo[ext1]"] 150 | """ 151 | ) 152 | ) 153 | 154 | if input_path_is_absolute: 155 | parse_path = src_file 156 | else: 157 | parse_path = src_file.relative_to(tmp_path) 158 | 159 | metadata = maybe_statically_parse_project_metadata(parse_path) 160 | assert isinstance(metadata, StaticProjectMetadata) 161 | assert metadata.extras == ("ext1", "ext2") 162 | assert len(metadata.requirements) == 2 163 | 164 | assert [r.name for r in metadata.requirements] == ["bar", "foo"] 165 | assert [r.comes_from for r in metadata.requirements] == [ 166 | f"foo ({parse_path.as_posix()})" 167 | ] * 2 168 | 169 | foo_req = metadata.requirements[1] 170 | assert foo_req.extras == {"ext1"} 171 | assert foo_req.link.url == tmp_path.as_uri() 172 | 173 | 174 | def test_static_parse_invalid(tmp_path): 175 | src_file = tmp_path / "pyproject.toml" 176 | 177 | invalid_toml = """this is not valid toml""" 178 | src_file.write_text(invalid_toml) 179 | assert maybe_statically_parse_project_metadata(src_file) is None 180 | 181 | no_pep621 = """ 182 | [build-system] 183 | requires = ["setuptools"] 184 | """ 185 | src_file.write_text(no_pep621) 186 | assert maybe_statically_parse_project_metadata(src_file) is None 187 | 188 | invalid_pep621 = """ 189 | [project] 190 | # no name 191 | version = "0.1.0" 192 | """ 193 | src_file.write_text(invalid_pep621) 194 | assert maybe_statically_parse_project_metadata(src_file) is None 195 | 196 | dynamic_deps = """ 197 | [project] 198 | name = "foo" 199 | dynamic = ["dependencies"] 200 | """ 201 | src_file.write_text(dynamic_deps) 202 | assert maybe_statically_parse_project_metadata(src_file) is None 203 | 204 | dynamic_optional_deps = """ 205 | [project] 206 | name = "foo" 207 | dynamic = ["optional-dependencies"] 208 | """ 209 | src_file.write_text(dynamic_optional_deps) 210 | assert maybe_statically_parse_project_metadata(src_file) is None 211 | 212 | src_file = tmp_path / "setup.py" 213 | src_file.write_text("print('hello')") 214 | assert maybe_statically_parse_project_metadata(src_file) is None 215 | 216 | 217 | @pytest.mark.network 218 | def test_build_metadata_from_dynamic_dependencies(tmp_path): 219 | pyproject_file = tmp_path / "pyproject.toml" 220 | setuppy_file = tmp_path / "setup.py" 221 | 222 | pyproject_file.write_text( 223 | textwrap.dedent( 224 | """ 225 | [project] 226 | name = "foo" 227 | version = "0.1.0" 228 | dynamic = ["dependencies"] 229 | """ 230 | ) 231 | ) 232 | setuppy_file.write_text( 233 | textwrap.dedent( 234 | """\ 235 | from setuptools import setup 236 | setup(install_requires=["bar > 2"]) 237 | """ 238 | ) 239 | ) 240 | 241 | metadata = build_project_metadata( 242 | pyproject_file, (), attempt_static_parse=True, isolated=True, quiet=False 243 | ) 244 | assert isinstance(metadata, ProjectMetadata) 245 | assert [str(r.req) for r in metadata.requirements] == ["bar>2"] 246 | assert [r.comes_from for r in metadata.requirements] == [ 247 | f"foo ({pyproject_file.as_posix()})" 248 | ] 249 | -------------------------------------------------------------------------------- /piptools/_compat/pip_compat.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import optparse 4 | import pathlib 5 | import typing as _t 6 | import urllib.parse 7 | from collections.abc import Iterable, Iterator 8 | from dataclasses import dataclass 9 | 10 | from pip._internal.cache import WheelCache 11 | from pip._internal.index.package_finder import PackageFinder 12 | from pip._internal.metadata import BaseDistribution 13 | from pip._internal.metadata.pkg_resources import Distribution as _PkgResourcesDist 14 | from pip._internal.models.direct_url import DirectUrl 15 | from pip._internal.models.link import Link 16 | from pip._internal.network.session import PipSession 17 | from pip._internal.req import InstallRequirement 18 | from pip._internal.req import parse_requirements as _parse_requirements 19 | from pip._internal.req.constructors import install_req_from_parsed_requirement 20 | from pip._vendor.pkg_resources import Requirement 21 | 22 | from .path_compat import relative_to_walk_up 23 | 24 | # The Distribution interface has changed between pkg_resources and 25 | # importlib.metadata, so this compat layer allows for a consistent access 26 | # pattern. In pip 22.1, importlib.metadata became the default on Python 3.11 27 | # (and later), but is overridable. `select_backend` returns what's being used. 28 | if _t.TYPE_CHECKING: 29 | from pip._internal.metadata.importlib import Distribution as _ImportLibDist 30 | 31 | from ..utils import PIP_VERSION, copy_install_requirement 32 | 33 | 34 | @dataclass(frozen=True) 35 | class Distribution: 36 | key: str 37 | version: str 38 | requires: Iterable[Requirement] 39 | direct_url: DirectUrl | None 40 | 41 | @classmethod 42 | def from_pip_distribution(cls, dist: BaseDistribution) -> Distribution: 43 | # TODO: Use only the BaseDistribution protocol properties and methods 44 | # instead of specializing by type. 45 | if isinstance(dist, _PkgResourcesDist): 46 | return cls._from_pkg_resources(dist) 47 | else: 48 | return cls._from_importlib(dist) 49 | 50 | @classmethod 51 | def _from_pkg_resources(cls, dist: _PkgResourcesDist) -> Distribution: 52 | return cls( 53 | dist._dist.key, dist._dist.version, dist._dist.requires(), dist.direct_url 54 | ) 55 | 56 | @classmethod 57 | def _from_importlib(cls, dist: _ImportLibDist) -> Distribution: 58 | """Mimic pkg_resources.Distribution.requires for the case of no 59 | extras. 60 | 61 | This doesn't fulfill that API's ``extras`` parameter but 62 | satisfies the needs of pip-tools. 63 | """ 64 | reqs = (Requirement.parse(req) for req in (dist._dist.requires or ())) 65 | requires = [ 66 | req 67 | for req in reqs 68 | if not req.marker or req.marker.evaluate({"extra": None}) 69 | ] 70 | return cls(dist._dist.name, dist._dist.version, requires, dist.direct_url) 71 | 72 | 73 | class FileLink(Link): # type: ignore[misc] 74 | """Wrapper for ``pip``'s ``Link`` class.""" 75 | 76 | _url: str 77 | 78 | @property 79 | def file_path(self) -> str: 80 | # overriding the actual property to bypass some validation 81 | return self._url 82 | 83 | 84 | def parse_requirements( 85 | filename: str, 86 | session: PipSession, 87 | finder: PackageFinder | None = None, 88 | options: optparse.Values | None = None, 89 | constraint: bool = False, 90 | isolated: bool = False, 91 | comes_from_stdin: bool = False, 92 | ) -> Iterator[InstallRequirement]: 93 | # the `comes_from` data will be rewritten in different ways in different conditions 94 | # each rewrite rule is expressible as a str->str function 95 | rewrite_comes_from: _t.Callable[[str], str] 96 | 97 | if comes_from_stdin: 98 | # if data is coming from stdin, then `comes_from="-r -"` 99 | rewrite_comes_from = _rewrite_comes_from_to_hardcoded_stdin_value 100 | elif pathlib.Path(filename).is_absolute(): 101 | # if the input path is absolute, just normalize paths to posix-style 102 | rewrite_comes_from = _normalize_comes_from_location 103 | else: 104 | # if the input was a relative path, set the rewrite rule to rewrite 105 | # absolute paths to be relative 106 | rewrite_comes_from = _relativize_comes_from_location 107 | 108 | for parsed_req in _parse_requirements( 109 | filename, session, finder=finder, options=options, constraint=constraint 110 | ): 111 | install_req = install_req_from_parsed_requirement(parsed_req, isolated=isolated) 112 | if install_req.editable and not parsed_req.requirement.startswith("file://"): 113 | # ``Link.url`` is what is saved to the output file 114 | # we set the url directly to undo the transformation in pip's Link class 115 | file_link = FileLink(install_req.link.url) 116 | file_link._url = parsed_req.requirement 117 | install_req.link = file_link 118 | install_req = copy_install_requirement(install_req) 119 | 120 | install_req.comes_from = rewrite_comes_from(install_req.comes_from) 121 | 122 | yield install_req 123 | 124 | 125 | def _rewrite_comes_from_to_hardcoded_stdin_value(_: str, /) -> str: 126 | """Produce the hardcoded ``comes_from`` value for stdin.""" 127 | return "-r -" 128 | 129 | 130 | def _relativize_comes_from_location(original_comes_from: str, /) -> str: 131 | """ 132 | Convert a ``comes_from`` path to a relative posix path. 133 | 134 | This is the rewrite rule used when ``-r`` or ``-c`` appears in 135 | ``comes_from`` data with an absolute path. 136 | 137 | The ``-r`` or ``-c`` qualifier is retained, the path is relativized 138 | with respect to the CWD, and the path is converted to posix style. 139 | """ 140 | # require `-r` or `-c` as the source 141 | if not original_comes_from.startswith(("-r ", "-c ")): 142 | return original_comes_from 143 | 144 | # split on the space 145 | prefix, space_sep, suffix = original_comes_from.partition(" ") 146 | 147 | # if the value part is a remote URI for pip, return the original 148 | if _is_remote_pip_uri(suffix): 149 | return original_comes_from 150 | 151 | file_path = pathlib.Path(suffix) 152 | 153 | # if the path was not absolute, normalize to posix-style and finish processing 154 | if not file_path.is_absolute(): 155 | return f"{prefix} {file_path.as_posix()}" 156 | 157 | # make it relative to the current working dir 158 | suffix = relative_to_walk_up(file_path, pathlib.Path.cwd()).as_posix() 159 | return f"{prefix}{space_sep}{suffix}" 160 | 161 | 162 | def _normalize_comes_from_location(original_comes_from: str, /) -> str: 163 | """ 164 | Convert a ``comes_from`` path to a posix-style path. 165 | 166 | This is the rewrite rule when ``-r`` or ``-c`` appears in ``comes_from`` 167 | data and the input path was absolute, meaning we should not relativize the 168 | locations. 169 | 170 | The ``-r`` or ``-c`` qualifier is retained, and the path is converted to 171 | posix style. 172 | """ 173 | # require `-r` or `-c` as the source 174 | if not original_comes_from.startswith(("-r ", "-c ")): 175 | return original_comes_from 176 | 177 | # split on the space 178 | prefix, space_sep, suffix = original_comes_from.partition(" ") 179 | 180 | # if the value part is a remote URI for pip, return the original 181 | if _is_remote_pip_uri(suffix): 182 | return original_comes_from 183 | 184 | # convert to a posix-style path 185 | suffix = pathlib.Path(suffix).as_posix() 186 | return f"{prefix}{space_sep}{suffix}" 187 | 188 | 189 | def _is_remote_pip_uri(value: str) -> bool: 190 | """ 191 | Test a string to see if it is a URI treated as a remote file in ``pip``. 192 | Specifically this means that it's a 'file', 'http', or 'https' URI. 193 | 194 | The test is performed by trying a URL parse and reading the scheme. 195 | """ 196 | scheme = urllib.parse.urlsplit(value).scheme 197 | return scheme in {"file", "http", "https"} 198 | 199 | 200 | def create_wheel_cache(cache_dir: str, format_control: str | None = None) -> WheelCache: 201 | kwargs: dict[str, str | None] = {"cache_dir": cache_dir} 202 | if PIP_VERSION[:2] <= (23, 0): 203 | kwargs["format_control"] = format_control 204 | return WheelCache(**kwargs) 205 | 206 | 207 | def get_dev_pkgs() -> set[str]: 208 | if PIP_VERSION[:2] <= (23, 1): 209 | from pip._internal.commands.freeze import DEV_PKGS 210 | 211 | return _t.cast(set[str], DEV_PKGS) 212 | 213 | from pip._internal.commands.freeze import _dev_pkgs 214 | 215 | return _t.cast(set[str], _dev_pkgs()) 216 | -------------------------------------------------------------------------------- /piptools/scripts/sync.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import itertools 4 | import os 5 | import shlex 6 | import shutil 7 | import sys 8 | import typing as _t 9 | from pathlib import Path 10 | 11 | import click 12 | from pip._internal.commands import create_command 13 | from pip._internal.commands.install import InstallCommand 14 | from pip._internal.index.package_finder import PackageFinder 15 | from pip._internal.metadata import get_environment 16 | 17 | from .. import sync 18 | from .._compat import Distribution, parse_requirements 19 | from ..exceptions import PipToolsError 20 | from ..logging import log 21 | from ..repositories import PyPIRepository 22 | from ..utils import ( 23 | flat_map, 24 | get_pip_version_for_python_executable, 25 | get_required_pip_specification, 26 | get_sys_path_for_python_executable, 27 | ) 28 | from . import options 29 | from ._deprecations import filter_deprecated_pip_args 30 | 31 | DEFAULT_REQUIREMENTS_FILE = "requirements.txt" 32 | 33 | 34 | @click.command( 35 | name="pip-sync", context_settings={"help_option_names": options.help_option_names} 36 | ) 37 | @options.version 38 | @options.ask 39 | @options.dry_run 40 | @options.force 41 | @options.find_links 42 | @options.index_url 43 | @options.extra_index_url 44 | @options.trusted_host 45 | @options.no_index 46 | @options.python_executable 47 | @options.verbose 48 | @options.quiet 49 | @options.user 50 | @options.cert 51 | @options.client_cert 52 | @options.src_files 53 | @options.pip_args 54 | @options.config 55 | @options.no_config 56 | def cli( 57 | ask: bool, 58 | dry_run: bool, 59 | force: bool, 60 | find_links: tuple[str, ...], 61 | index_url: str | None, 62 | extra_index_url: tuple[str, ...], 63 | trusted_host: tuple[str, ...], 64 | no_index: bool, 65 | python_executable: str | None, 66 | verbose: int, 67 | quiet: int, 68 | user_only: bool, 69 | cert: str | None, 70 | client_cert: str | None, 71 | src_files: tuple[str, ...], 72 | pip_args_str: str | None, 73 | config: Path | None, 74 | no_config: bool, 75 | ) -> None: 76 | """Synchronize virtual environment with requirements.txt.""" 77 | log.verbosity = verbose - quiet 78 | 79 | if not src_files: 80 | if os.path.exists(DEFAULT_REQUIREMENTS_FILE): 81 | src_files = (DEFAULT_REQUIREMENTS_FILE,) 82 | else: 83 | msg = "No requirement files given and no {} found in the current directory" 84 | log.error(msg.format(DEFAULT_REQUIREMENTS_FILE)) 85 | sys.exit(2) 86 | 87 | if any(src_file.endswith(".in") for src_file in src_files): 88 | msg = ( 89 | "Some input files have the .in extension, which is most likely an error " 90 | "and can cause weird behaviour. You probably meant to use " 91 | "the corresponding *.txt file?" 92 | ) 93 | if force: 94 | log.warning("WARNING: " + msg) 95 | else: 96 | log.error("ERROR: " + msg) 97 | sys.exit(2) 98 | 99 | if config: 100 | log.debug(f"Using pip-tools configuration defaults found in '{config !s}'.") 101 | 102 | if python_executable: 103 | _validate_python_executable(python_executable) 104 | 105 | install_command = _t.cast(InstallCommand, create_command("install")) 106 | options, _ = install_command.parse_args([]) 107 | session = install_command._build_session(options) 108 | finder = install_command._build_package_finder(options=options, session=session) 109 | 110 | # Parse requirements file. Note, all options inside requirements file 111 | # will be collected by the finder. 112 | requirements = flat_map( 113 | lambda src: parse_requirements(src, finder=finder, session=session), src_files 114 | ) 115 | 116 | try: 117 | merged_requirements = sync.merge(requirements, ignore_conflicts=force) 118 | except PipToolsError as e: 119 | log.error(str(e)) 120 | sys.exit(2) 121 | 122 | paths = ( 123 | None 124 | if python_executable is None 125 | else get_sys_path_for_python_executable(python_executable) 126 | ) 127 | installed_dists = _get_installed_distributions( 128 | user_only=user_only, 129 | local_only=python_executable is None, 130 | paths=paths, 131 | ) 132 | to_install, to_uninstall = sync.diff(merged_requirements, installed_dists) 133 | 134 | install_flags = _compose_install_flags( 135 | finder, 136 | no_index=no_index, 137 | index_url=index_url, 138 | extra_index_url=extra_index_url, 139 | trusted_host=trusted_host, 140 | find_links=find_links, 141 | user_only=user_only, 142 | cert=cert, 143 | client_cert=client_cert, 144 | ) + shlex.split(pip_args_str or "") 145 | install_flags = filter_deprecated_pip_args(install_flags) 146 | 147 | sys.exit( 148 | sync.sync( 149 | to_install, 150 | to_uninstall, 151 | dry_run=dry_run, 152 | install_flags=install_flags, 153 | ask=ask, 154 | python_executable=python_executable, 155 | ) 156 | ) 157 | 158 | 159 | def _validate_python_executable(python_executable: str) -> None: 160 | """ 161 | Validates incoming python_executable argument passed to CLI. 162 | """ 163 | resolved_python_executable = shutil.which(python_executable) 164 | if resolved_python_executable is None: 165 | msg = "Could not resolve '{}' as valid executable path or alias." 166 | log.error(msg.format(python_executable)) 167 | sys.exit(2) 168 | 169 | # Ensure that target python executable has the right version of pip installed 170 | pip_version = get_pip_version_for_python_executable(python_executable) 171 | required_pip_specification = get_required_pip_specification() 172 | if not required_pip_specification.contains(pip_version, prereleases=True): 173 | msg = ( 174 | "Target python executable '{}' has pip version {} installed. " 175 | "Version {} is expected." 176 | ) 177 | log.error( 178 | msg.format(python_executable, pip_version, required_pip_specification) 179 | ) 180 | sys.exit(2) 181 | 182 | 183 | def _compose_install_flags( 184 | finder: PackageFinder, 185 | no_index: bool, 186 | index_url: str | None, 187 | extra_index_url: tuple[str, ...], 188 | trusted_host: tuple[str, ...], 189 | find_links: tuple[str, ...], 190 | user_only: bool, 191 | cert: str | None, 192 | client_cert: str | None, 193 | ) -> list[str]: 194 | """ 195 | Compose install flags with the given finder and CLI options. 196 | """ 197 | result = [] 198 | 199 | # Build --index-url/--extra-index-url/--no-index 200 | if no_index: 201 | result.append("--no-index") 202 | elif index_url is not None: 203 | result.extend(["--index-url", index_url]) 204 | elif finder.index_urls: 205 | finder_index_url = finder.index_urls[0] 206 | if finder_index_url != PyPIRepository.DEFAULT_INDEX_URL: 207 | result.extend(["--index-url", finder_index_url]) 208 | for extra_index in finder.index_urls[1:]: 209 | result.extend(["--extra-index-url", extra_index]) 210 | else: 211 | result.append("--no-index") 212 | 213 | for extra_index in extra_index_url: 214 | result.extend(["--extra-index-url", extra_index]) 215 | 216 | # Build --trusted-hosts 217 | for host in itertools.chain(trusted_host, finder.trusted_hosts): 218 | result.extend(["--trusted-host", host]) 219 | 220 | # Build --find-links 221 | for link in itertools.chain(find_links, finder.find_links): 222 | result.extend(["--find-links", link]) 223 | 224 | # Build format controls --no-binary/--only-binary 225 | for format_control in ("no_binary", "only_binary"): 226 | formats = getattr(finder.format_control, format_control) 227 | if not formats: 228 | continue 229 | result.extend( 230 | ["--" + format_control.replace("_", "-"), ",".join(sorted(formats))] 231 | ) 232 | 233 | if user_only: 234 | result.append("--user") 235 | 236 | if cert is not None: 237 | result.extend(["--cert", cert]) 238 | 239 | if client_cert is not None: 240 | result.extend(["--client-cert", client_cert]) 241 | 242 | return result 243 | 244 | 245 | def _get_installed_distributions( 246 | local_only: bool = True, 247 | user_only: bool = False, 248 | paths: list[str] | None = None, 249 | ) -> list[Distribution]: 250 | """Return a list of installed Distribution objects.""" 251 | 252 | env = get_environment(paths) 253 | dists = env.iter_installed_distributions( 254 | local_only=local_only, 255 | user_only=user_only, 256 | skip=[], 257 | ) 258 | return [Distribution.from_pip_distribution(dist) for dist in dists] 259 | -------------------------------------------------------------------------------- /img/pip-tools-overview.svg: -------------------------------------------------------------------------------- 1 |
Source Spec files
Source Spec fil...
Compiled Spec files
Compiled Spec f...
Your (virtual)
environment
Your (virtual)...
pip-compile
pip-compile
pip-sync
pip-sync
PyPI
PyPI
requirements.indev-requirements.in...requirements.txtdev-requirements.txt...Viewer does not support full SVG 1.1
-------------------------------------------------------------------------------- /piptools/sync.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import collections 4 | import os 5 | import sys 6 | import tempfile 7 | from collections.abc import Iterable, Mapping, ValuesView 8 | from subprocess import run # nosec 9 | 10 | import click 11 | from pip._internal.models.direct_url import ArchiveInfo 12 | from pip._internal.req import InstallRequirement 13 | from pip._internal.utils.compat import stdlib_pkgs 14 | from pip._internal.utils.direct_url_helpers import ( 15 | direct_url_as_pep440_direct_reference, 16 | direct_url_from_link, 17 | ) 18 | from pip._vendor.packaging.utils import canonicalize_name 19 | 20 | from ._compat import Distribution, get_dev_pkgs 21 | from .exceptions import IncompatibleRequirements 22 | from .logging import log 23 | from .utils import ( 24 | flat_map, 25 | format_requirement, 26 | get_hashes_from_ireq, 27 | is_url_requirement, 28 | key_from_ireq, 29 | key_from_req, 30 | ) 31 | 32 | PACKAGES_TO_IGNORE = [ 33 | "-markerlib", 34 | "pip", 35 | "pip-tools", 36 | "pip-review", 37 | "pkg-resources", 38 | *stdlib_pkgs, 39 | *get_dev_pkgs(), 40 | ] 41 | 42 | 43 | def dependency_tree( 44 | installed_keys: Mapping[str, Distribution], root_key: str 45 | ) -> set[str]: 46 | """Calculate the dependency tree for a package. 47 | 48 | Return a collection of all of the package's dependencies. 49 | Uses a DFS traversal algorithm. 50 | 51 | ``installed_keys`` should be a {key: requirement} mapping, e.g. 52 | {'django': from_line('django==1.8')} 53 | :param root_key: the key to return the dependency tree for 54 | :type root_key: str 55 | """ 56 | dependencies = set() 57 | queue: collections.deque[Distribution] = collections.deque() 58 | 59 | if root_key in installed_keys: 60 | dep = installed_keys[root_key] 61 | queue.append(dep) 62 | 63 | while queue: 64 | v = queue.popleft() 65 | key = str(canonicalize_name(v.key)) 66 | if key in dependencies: 67 | continue 68 | 69 | dependencies.add(key) 70 | 71 | for dep_specifier in v.requires: 72 | dep_name = key_from_req(dep_specifier) 73 | if dep_name in installed_keys: 74 | dep = installed_keys[dep_name] 75 | 76 | if dep_specifier.specifier.contains(dep.version): 77 | queue.append(dep) 78 | 79 | return dependencies 80 | 81 | 82 | def get_dists_to_ignore(installed: Iterable[Distribution]) -> list[str]: 83 | """Return a collection of package names to ignore by ``pip-sync``. 84 | 85 | Based on the currently installed environment. For example, when pip-tools 86 | is installed in the local environment, it should be ignored, including all 87 | of its dependencies (e.g. click). When pip-tools is not installed 88 | locally, click should also be installed/uninstalled depending on the given 89 | requirements. 90 | """ 91 | installed_keys = {str(canonicalize_name(r.key)): r for r in installed} 92 | return list( 93 | flat_map(lambda req: dependency_tree(installed_keys, req), PACKAGES_TO_IGNORE) 94 | ) 95 | 96 | 97 | def merge( 98 | requirements: Iterable[InstallRequirement], ignore_conflicts: bool 99 | ) -> ValuesView[InstallRequirement]: 100 | by_key: dict[str, InstallRequirement] = {} 101 | 102 | for ireq in requirements: 103 | # Limitation: URL requirements are merged by precise string match, so 104 | # "file:///example.zip#egg=example", "file:///example.zip", and 105 | # "example==1.0" will not merge with each other 106 | if ireq.match_markers(): 107 | key = key_from_ireq(ireq) 108 | 109 | if not ignore_conflicts: 110 | existing_ireq = by_key.get(key) 111 | if existing_ireq: 112 | # NOTE: We check equality here since we can assume that the 113 | # requirements are all pinned 114 | if ( 115 | ireq.req 116 | and existing_ireq.req 117 | and ireq.specifier != existing_ireq.specifier 118 | ): 119 | raise IncompatibleRequirements(ireq, existing_ireq) 120 | 121 | # TODO: Always pick the largest specifier in case of a conflict 122 | by_key[key] = ireq 123 | return by_key.values() 124 | 125 | 126 | def diff_key_from_ireq(ireq: InstallRequirement) -> str: 127 | """Calculate key for comparing a compiled requirement with installed modules. 128 | 129 | For URL requirements, only provide a useful key if the url includes 130 | a hash, e.g. #sha1=..., in any of the supported hash algorithms. 131 | Otherwise return ``ireq.link`` so the key will not match and the package will 132 | reinstall. Reinstall is necessary to ensure that packages will reinstall 133 | if the contents at the URL have changed but the version has not. 134 | """ 135 | if is_url_requirement(ireq): 136 | if getattr(ireq.req, "name", None) and ireq.link.has_hash: 137 | return str( 138 | direct_url_as_pep440_direct_reference( 139 | direct_url_from_link(ireq.link), ireq.req.name 140 | ) 141 | ) 142 | # TODO: Also support VCS and editable installs. 143 | return str(ireq.link) 144 | return key_from_ireq(ireq) 145 | 146 | 147 | def diff_key_from_req(req: Distribution) -> str: 148 | """Get a unique key for the requirement.""" 149 | key = str(canonicalize_name(req.key)) 150 | if ( 151 | req.direct_url 152 | and isinstance(req.direct_url.info, ArchiveInfo) 153 | and req.direct_url.info.hash 154 | ): 155 | key = direct_url_as_pep440_direct_reference(req.direct_url, key) 156 | # TODO: Also support VCS and editable installs. 157 | return key 158 | 159 | 160 | def diff( 161 | compiled_requirements: Iterable[InstallRequirement], 162 | installed_dists: Iterable[Distribution], 163 | ) -> tuple[set[InstallRequirement], set[str]]: 164 | """Calculate which packages should be installed or uninstalled. 165 | 166 | Compared are the compiled requirements and a list of currently 167 | installed modules. 168 | """ 169 | requirements_lut = {diff_key_from_ireq(r): r for r in compiled_requirements} 170 | 171 | satisfied = set() # holds keys 172 | to_install = set() # holds InstallRequirement objects 173 | to_uninstall = set() # holds keys 174 | 175 | pkgs_to_ignore = get_dists_to_ignore(installed_dists) 176 | for dist in installed_dists: 177 | key = diff_key_from_req(dist) 178 | if key not in requirements_lut or not requirements_lut[key].match_markers(): 179 | to_uninstall.add(key) 180 | elif requirements_lut[key].specifier.contains(dist.version): 181 | satisfied.add(key) 182 | 183 | for key, requirement in requirements_lut.items(): 184 | if key not in satisfied and requirement.match_markers(): 185 | to_install.add(requirement) 186 | 187 | # Make sure to not uninstall any packages that should be ignored 188 | to_uninstall -= set(pkgs_to_ignore) 189 | 190 | return (to_install, to_uninstall) 191 | 192 | 193 | def sync( 194 | to_install: Iterable[InstallRequirement], 195 | to_uninstall: Iterable[InstallRequirement], 196 | dry_run: bool = False, 197 | install_flags: list[str] | None = None, 198 | ask: bool = False, 199 | python_executable: str | None = None, 200 | ) -> int: 201 | """Install and uninstall the given sets of modules.""" 202 | exit_code = 0 203 | 204 | python_executable = python_executable or sys.executable 205 | 206 | if not to_uninstall and not to_install: 207 | log.info("Everything up-to-date", err=False) 208 | return exit_code 209 | 210 | pip_flags = [] 211 | if log.verbosity < 0: 212 | pip_flags += ["-q"] 213 | 214 | if ask: 215 | dry_run = True 216 | 217 | if dry_run: 218 | if to_uninstall: 219 | click.echo("Would uninstall:") 220 | for pkg in sorted(to_uninstall): 221 | click.echo(f" {pkg}") 222 | 223 | if to_install: 224 | click.echo("Would install:") 225 | for ireq in sorted(to_install, key=key_from_ireq): 226 | click.echo(f" {format_requirement(ireq)}") 227 | 228 | exit_code = 1 229 | 230 | if ask and click.confirm("Would you like to proceed with these changes?"): 231 | dry_run = False 232 | exit_code = 0 233 | 234 | if not dry_run: 235 | if to_uninstall: 236 | run( # nosec 237 | [ 238 | python_executable, 239 | "-m", 240 | "pip", 241 | "uninstall", 242 | "-y", 243 | *pip_flags, 244 | *sorted(to_uninstall), 245 | ], 246 | check=True, 247 | ) 248 | 249 | if to_install: 250 | if install_flags is None: 251 | install_flags = [] 252 | # prepare requirement lines 253 | req_lines = [] 254 | for ireq in sorted(to_install, key=key_from_ireq): 255 | ireq_hashes = get_hashes_from_ireq(ireq) 256 | req_lines.append(format_requirement(ireq, hashes=ireq_hashes)) 257 | 258 | # save requirement lines to a temporary file 259 | tmp_req_file = tempfile.NamedTemporaryFile(mode="wt", delete=False) 260 | tmp_req_file.write("\n".join(req_lines)) 261 | tmp_req_file.close() 262 | 263 | try: 264 | run( # nosec 265 | [ 266 | python_executable, 267 | "-m", 268 | "pip", 269 | "install", 270 | "-r", 271 | tmp_req_file.name, 272 | *pip_flags, 273 | *install_flags, 274 | ], 275 | check=True, 276 | ) 277 | finally: 278 | os.unlink(tmp_req_file.name) 279 | 280 | return exit_code 281 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | merge_group: 5 | pull_request: 6 | push: 7 | branches: 8 | - main 9 | tags: 10 | - v* 11 | workflow_call: 12 | inputs: 13 | cpython-pip-version: 14 | description: >- 15 | A JSON string with pip versions 16 | to test against under CPython. 17 | required: true 18 | type: string 19 | cpython-versions: 20 | description: >- 21 | A JSON string with CPython versions 22 | to test against. 23 | required: true 24 | type: string 25 | 26 | concurrency: 27 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} 28 | cancel-in-progress: true 29 | 30 | env: 31 | FORCE_COLOR: 1 # Request colored output from CLI tools supporting it 32 | MYPY_FORCE_COLOR: 1 # MyPy's color enforcement 33 | PIP_DISABLE_PIP_VERSION_CHECK: 1 34 | PIP_NO_PYTHON_VERSION_WARNING: 1 35 | PIP_NO_WARN_SCRIPT_LOCATION: 1 36 | PRE_COMMIT_COLOR: 1 37 | PY_COLORS: 1 # Recognized by the `py` package, dependency of `pytest` 38 | TOX_PARALLEL_NO_SPINNER: 1 39 | TOX_TESTENV_PASSENV: >- 40 | FORCE_COLOR 41 | MYPY_FORCE_COLOR 42 | NO_COLOR 43 | PY_COLORS 44 | PYTEST_THEME 45 | PYTEST_THEME_MODE 46 | PRE_COMMIT_COLOR 47 | 48 | jobs: 49 | linters: 50 | name: Linters 51 | uses: ./.github/workflows/reusable-qa.yml 52 | 53 | test: 54 | name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }} 55 | runs-on: ${{ matrix.os }}-latest 56 | timeout-minutes: 9 57 | strategy: 58 | fail-fast: false 59 | matrix: 60 | os: 61 | - Ubuntu 62 | - Windows 63 | - macOS 64 | python-version: >- 65 | ${{ 66 | fromJSON( 67 | inputs.cpython-versions 68 | && inputs.cpython-versions 69 | || '["3.9", "3.10", "3.11", "3.12", "3.13"]' 70 | ) 71 | }} 72 | pip-version: >- 73 | ${{ 74 | fromJSON( 75 | inputs.cpython-pip-version 76 | && inputs.cpython-pip-version 77 | || '["supported", "lowest"]' 78 | ) 79 | }} 80 | env: 81 | TOXENV: >- 82 | pip${{ matrix.pip-version }}${{ 83 | !inputs.cpython-pip-version 84 | && '-coverage' 85 | || '' 86 | }} 87 | steps: 88 | - uses: actions/checkout@v5 89 | - name: Set up Python ${{ matrix.python-version }} from GitHub 90 | id: python-install 91 | if: "!endsWith(matrix.python-version, '-dev')" 92 | uses: actions/setup-python@v6 93 | with: 94 | python-version: ${{ matrix.python-version }} 95 | - name: Set up Python ${{ matrix.python-version }} from deadsnakes 96 | if: endsWith(matrix.python-version, '-dev') 97 | uses: deadsnakes/action@v2.1.1 98 | with: 99 | python-version: ${{ matrix.python-version }} 100 | - name: Log python version info (${{ matrix.python-version }}) 101 | run: python --version --version 102 | - name: Get pip cache dir 103 | id: pip-cache 104 | shell: bash 105 | run: | 106 | echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" 107 | - name: Pip cache 108 | uses: actions/cache@v4 109 | with: 110 | path: ${{ steps.pip-cache.outputs.dir }} 111 | key: >- 112 | ${{ runner.os }}-pip-${{ hashFiles('setup.cfg') }}-${{ 113 | hashFiles('pyproject.toml') }}-${{ hashFiles('tox.ini') }}-${{ 114 | hashFiles('.pre-commit-config.yaml') }} 115 | restore-keys: | 116 | ${{ runner.os }}-pip- 117 | ${{ runner.os }}- 118 | - name: Install test dependencies 119 | run: python -m pip install -U tox virtualenv 120 | - name: Prepare test environment 121 | # NOTE: `--parallel-live` is a workaround for the regression in 122 | # NOTE: the upstream tox project that made the 123 | # NOTE: `TOX_PARALLEL_NO_SPINNER=1` env var auto-enable parallelism 124 | # NOTE: and disable output from the tox environments. 125 | # 126 | # Ref: https://github.com/tox-dev/tox/issues/3193 127 | run: tox -vv --notest -p auto --parallel-live 128 | - name: Test pip ${{ matrix.pip-version }} 129 | # NOTE: `--parallel-live` is a workaround for the regression in 130 | # NOTE: the upstream tox project that made the 131 | # NOTE: `TOX_PARALLEL_NO_SPINNER=1` env var auto-enable parallelism 132 | # NOTE: and disable output from the tox environments. 133 | # 134 | # Ref: https://github.com/tox-dev/tox/issues/3193 135 | run: tox --skip-pkg-install --parallel-live 136 | - name: Re-run the failing tests with maximum verbosity 137 | if: >- 138 | !cancelled() 139 | && failure() 140 | run: >- # `exit 1` makes sure that the job remains red with flaky runs 141 | python -Xutf8 -Im 142 | tox 143 | --parallel=auto 144 | --parallel-live 145 | --skip-missing-interpreters=false 146 | --skip-pkg-install 147 | -vvvvv 148 | -- 149 | --continue-on-collection-errors 150 | --full-trace 151 | --last-failed 152 | ${{ !inputs.cpython-pip-version && '--no-cov' || '' }} 153 | --numprocesses=0 154 | --showlocals 155 | --trace-config 156 | -rA 157 | -vvvvv 158 | && exit 1 159 | shell: bash 160 | - name: Upload coverage to Codecov 161 | if: >- 162 | !cancelled() 163 | && !inputs.cpython-pip-version 164 | uses: codecov/codecov-action@v5 165 | with: 166 | files: ./coverage.xml 167 | flags: >- 168 | CI-GHA, 169 | OS-${{ runner.os }}, 170 | VM-${{ matrix.os }}, 171 | Py-${{ steps.python-install.outputs.python-version }}, 172 | Pip-${{ matrix.pip-version }} 173 | name: >- 174 | OS-${{ runner.os }}, 175 | VM-${{ matrix.os }}, 176 | Py-${{ steps.python-install.outputs.python-version }}, 177 | Pip-${{ matrix.pip-version }} 178 | 179 | pypy: 180 | name: ${{ matrix.os }} / ${{ matrix.python-version }} / ${{ matrix.pip-version }} 181 | runs-on: ${{ matrix.os }}-latest 182 | timeout-minutes: 9 183 | strategy: 184 | fail-fast: false 185 | matrix: 186 | os: 187 | - Ubuntu 188 | - MacOS 189 | - Windows 190 | python-version: 191 | - pypy-3.10 192 | pip-version: 193 | - supported 194 | env: 195 | TOXENV: pip${{ matrix.pip-version }} 196 | steps: 197 | - uses: actions/checkout@v5 198 | - name: Set up Python ${{ matrix.python-version }} 199 | uses: actions/setup-python@v6 200 | with: 201 | python-version: ${{ matrix.python-version }} 202 | - name: Get pip cache dir 203 | id: pip-cache 204 | shell: bash 205 | run: | 206 | echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" 207 | - name: Pip cache 208 | uses: actions/cache@v4 209 | with: 210 | path: ${{ steps.pip-cache.outputs.dir }} 211 | key: >- 212 | ${{ runner.os }}-pip-${{ hashFiles('setup.cfg') }}-${{ 213 | hashFiles('pyproject.toml') }}-${{ hashFiles('tox.ini') }}-${{ 214 | hashFiles('.pre-commit-config.yaml') }} 215 | restore-keys: | 216 | ${{ runner.os }}-pip- 217 | ${{ runner.os }}- 218 | - name: Install tox 219 | run: pip install tox 220 | - name: Prepare test environment 221 | # NOTE: `--parallel-live` is a workaround for the regression in 222 | # NOTE: the upstream tox project that made the 223 | # NOTE: `TOX_PARALLEL_NO_SPINNER=1` env var auto-enable parallelism 224 | # NOTE: and disable output from the tox environments. 225 | # 226 | # Ref: https://github.com/tox-dev/tox/issues/3193 227 | run: tox --notest -p auto --parallel-live 228 | - name: Test pip ${{ matrix.pip-version }} 229 | # NOTE: `--parallel-live` is a workaround for the regression in 230 | # NOTE: the upstream tox project that made the 231 | # NOTE: `TOX_PARALLEL_NO_SPINNER=1` env var auto-enable parallelism 232 | # NOTE: and disable output from the tox environments. 233 | # 234 | # Ref: https://github.com/tox-dev/tox/issues/3193 235 | run: tox --skip-pkg-install --parallel-live 236 | - name: Re-run the failing tests with maximum verbosity 237 | if: >- 238 | !cancelled() 239 | && failure() 240 | run: >- # `exit 1` makes sure that the job remains red with flaky runs 241 | python -Xutf8 -Im 242 | tox 243 | --parallel=auto 244 | --parallel-live 245 | --skip-missing-interpreters=false 246 | --skip-pkg-install 247 | -vvvvv 248 | -- 249 | --continue-on-collection-errors 250 | --full-trace 251 | --last-failed 252 | --numprocesses=0 253 | --showlocals 254 | --trace-config 255 | -rA 256 | -vvvvv 257 | && exit 1 258 | shell: bash 259 | 260 | coverage-summary: 261 | name: Coverage processing 262 | if: >- 263 | !cancelled() 264 | runs-on: ubuntu-latest 265 | timeout-minutes: 1 266 | needs: 267 | - test 268 | steps: 269 | - name: Notify Codecov that all coverage reports have been uploaded 270 | if: >- 271 | !cancelled() 272 | uses: codecov/codecov-action@v5 273 | with: 274 | fail_ci_if_error: true 275 | run_command: send-notifications 276 | 277 | check: # This job does nothing and is only used for the branch protection 278 | if: always() 279 | 280 | needs: 281 | - linters 282 | - pypy 283 | - test 284 | 285 | runs-on: ubuntu-latest 286 | 287 | timeout-minutes: 1 288 | 289 | steps: 290 | - name: Decide whether the needed jobs succeeded or failed 291 | uses: re-actors/alls-green@afee1c1eac2a506084c274e9c02c8e0687b48d9e 292 | with: 293 | jobs: ${{ toJSON(needs) }} 294 | -------------------------------------------------------------------------------- /piptools/scripts/options.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import typing as _t 4 | 5 | import click 6 | from pip._internal.commands import create_command 7 | from pip._internal.utils.misc import redact_auth_from_url 8 | 9 | from piptools.locations import CACHE_DIR, DEFAULT_CONFIG_FILE_NAMES 10 | from piptools.utils import UNSAFE_PACKAGES, override_defaults_from_config_file 11 | 12 | BuildTargetT = _t.Literal["sdist", "wheel", "editable"] 13 | ALL_BUILD_TARGETS: tuple[BuildTargetT, ...] = ( 14 | "editable", 15 | "sdist", 16 | "wheel", 17 | ) 18 | 19 | 20 | def _get_default_option(option_name: str) -> _t.Any: 21 | """ 22 | Get default value of the pip's option (including option from pip.conf) 23 | by a given option name. 24 | """ 25 | install_command = create_command("install") 26 | default_values = install_command.parser.get_default_values() 27 | return getattr(default_values, option_name) 28 | 29 | 30 | help_option_names = ("-h", "--help") 31 | 32 | # The options used by pip-compile and pip-sync are presented in no specific order. 33 | 34 | version = click.version_option(package_name="pip-tools") 35 | 36 | color = click.option( 37 | "--color/--no-color", 38 | default=None, 39 | help="Force output to be colorized or not, instead of auto-detecting color support", 40 | ) 41 | 42 | verbose = click.option( 43 | "-v", 44 | "--verbose", 45 | count=True, 46 | help="Show more output", 47 | ) 48 | quiet = click.option( 49 | "-q", 50 | "--quiet", 51 | count=True, 52 | help="Give less output", 53 | ) 54 | 55 | dry_run = click.option( 56 | "-n", 57 | "--dry-run", 58 | is_flag=True, 59 | help="Only show what would happen, don't change anything", 60 | ) 61 | 62 | pre = click.option( 63 | "-p", 64 | "--pre", 65 | is_flag=True, 66 | default=None, 67 | help="Allow resolving to prereleases (default is not)", 68 | ) 69 | 70 | rebuild = click.option( 71 | "-r", 72 | "--rebuild", 73 | is_flag=True, 74 | help="Clear any caches upfront, rebuild from scratch", 75 | ) 76 | 77 | extra = click.option( 78 | "--extra", 79 | "extras", 80 | multiple=True, 81 | help="Name of an extras_require group to install; may be used more than once", 82 | ) 83 | 84 | all_extras = click.option( 85 | "--all-extras", 86 | is_flag=True, 87 | default=False, 88 | help="Install all extras_require groups", 89 | ) 90 | 91 | find_links = click.option( 92 | "-f", 93 | "--find-links", 94 | multiple=True, 95 | help="Look for archives in this directory or on this HTML page; may be used more than once", 96 | ) 97 | 98 | index_url = click.option( 99 | "-i", 100 | "--index-url", 101 | help="Change index URL (defaults to {index_url})".format( 102 | index_url=redact_auth_from_url(_get_default_option("index_url")) 103 | ), 104 | ) 105 | 106 | no_index = click.option( 107 | "--no-index", 108 | is_flag=True, 109 | help="Ignore package index (only looking at --find-links URLs instead).", 110 | ) 111 | 112 | extra_index_url = click.option( 113 | "--extra-index-url", 114 | multiple=True, 115 | help="Add another index URL to search; may be used more than once", 116 | ) 117 | 118 | cert = click.option("--cert", help="Path to alternate CA bundle.") 119 | 120 | client_cert = click.option( 121 | "--client-cert", 122 | help=( 123 | "Path to SSL client certificate, a single file containing " 124 | "the private key and the certificate in PEM format." 125 | ), 126 | ) 127 | 128 | trusted_host = click.option( 129 | "--trusted-host", 130 | multiple=True, 131 | help=( 132 | "Mark this host as trusted, even though it does not have " 133 | "valid or any HTTPS; may be used more than once" 134 | ), 135 | ) 136 | 137 | header = click.option( 138 | "--header/--no-header", 139 | is_flag=True, 140 | default=True, 141 | help="Add header to generated file", 142 | ) 143 | 144 | emit_trusted_host = click.option( 145 | "--emit-trusted-host/--no-emit-trusted-host", 146 | is_flag=True, 147 | default=True, 148 | help="Add trusted host option to generated file", 149 | ) 150 | 151 | annotate = click.option( 152 | "--annotate/--no-annotate", 153 | is_flag=True, 154 | default=True, 155 | help="Annotate results, indicating where dependencies come from", 156 | ) 157 | 158 | annotation_style = click.option( 159 | "--annotation-style", 160 | type=click.Choice(("line", "split")), 161 | default="split", 162 | help="Choose the format of annotation comments", 163 | ) 164 | 165 | upgrade = click.option( 166 | "-U", 167 | "--upgrade/--no-upgrade", 168 | is_flag=True, 169 | default=False, 170 | help="Try to upgrade all dependencies to their latest versions", 171 | ) 172 | 173 | upgrade_package = click.option( 174 | "-P", 175 | "--upgrade-package", 176 | "upgrade_packages", 177 | nargs=1, 178 | multiple=True, 179 | help="Specify a particular package to upgrade; may be used more than once", 180 | ) 181 | 182 | output_file = click.option( 183 | "-o", 184 | "--output-file", 185 | nargs=1, 186 | default=None, 187 | type=click.File("w+b", atomic=True, lazy=True), 188 | help=( 189 | "Output file name. Required if more than one input file is given. " 190 | "Will be derived from input file otherwise." 191 | ), 192 | ) 193 | 194 | newline = click.option( 195 | "--newline", 196 | type=click.Choice(("LF", "CRLF", "native", "preserve"), case_sensitive=False), 197 | default="preserve", 198 | help="Override the newline control characters used", 199 | ) 200 | 201 | allow_unsafe = click.option( 202 | "--allow-unsafe/--no-allow-unsafe", 203 | is_flag=True, 204 | default=False, 205 | help=( 206 | "Pin packages considered unsafe: {}.\n\n" 207 | "WARNING: Future versions of pip-tools will enable this behavior by default. " 208 | "Use --no-allow-unsafe to keep the old behavior. It is recommended to pass the " 209 | "--allow-unsafe now to adapt to the upcoming change.".format( 210 | ", ".join(sorted(UNSAFE_PACKAGES)) 211 | ) 212 | ), 213 | ) 214 | 215 | strip_extras = click.option( 216 | "--strip-extras/--no-strip-extras", 217 | is_flag=True, 218 | default=None, 219 | help="Assure output file is constraints compatible, avoiding use of extras.", 220 | ) 221 | 222 | generate_hashes = click.option( 223 | "--generate-hashes", 224 | is_flag=True, 225 | default=False, 226 | help="Generate pip 8 style hashes in the resulting requirements file.", 227 | ) 228 | 229 | reuse_hashes = click.option( 230 | "--reuse-hashes/--no-reuse-hashes", 231 | is_flag=True, 232 | default=True, 233 | help=( 234 | "Improve the speed of --generate-hashes by reusing the hashes from an " 235 | "existing output file." 236 | ), 237 | ) 238 | 239 | max_rounds = click.option( 240 | "--max-rounds", 241 | default=10, 242 | help="Maximum number of rounds before resolving the requirements aborts.", 243 | ) 244 | 245 | src_files = click.argument( 246 | "src_files", 247 | nargs=-1, 248 | type=click.Path(exists=True, allow_dash=True), 249 | ) 250 | 251 | build_isolation = click.option( 252 | "--build-isolation/--no-build-isolation", 253 | is_flag=True, 254 | default=True, 255 | help=( 256 | "Enable isolation when building a modern source distribution. " 257 | "Build dependencies specified by PEP 518 must be already installed " 258 | "if build isolation is disabled." 259 | ), 260 | ) 261 | 262 | emit_find_links = click.option( 263 | "--emit-find-links/--no-emit-find-links", 264 | is_flag=True, 265 | default=True, 266 | help="Add the find-links option to generated file", 267 | ) 268 | 269 | cache_dir = click.option( 270 | "--cache-dir", 271 | help="Store the cache data in DIRECTORY.", 272 | default=CACHE_DIR, 273 | envvar="PIP_TOOLS_CACHE_DIR", 274 | show_default=True, 275 | show_envvar=True, 276 | type=click.Path(file_okay=False, writable=True), 277 | ) 278 | 279 | pip_args = click.option( 280 | "--pip-args", 281 | "pip_args_str", 282 | help="Arguments to pass directly to the pip command.", 283 | ) 284 | 285 | resolver = click.option( 286 | "--resolver", 287 | "resolver_name", 288 | type=click.Choice(("legacy", "backtracking")), 289 | default="backtracking", 290 | envvar="PIP_TOOLS_RESOLVER", 291 | help="Choose the dependency resolver.", 292 | ) 293 | 294 | emit_index_url = click.option( 295 | "--emit-index-url/--no-emit-index-url", 296 | is_flag=True, 297 | default=True, 298 | help="Add index URL to generated file", 299 | ) 300 | 301 | emit_options = click.option( 302 | "--emit-options/--no-emit-options", 303 | is_flag=True, 304 | default=True, 305 | help="Add options to generated file", 306 | ) 307 | 308 | unsafe_package = click.option( 309 | "--unsafe-package", 310 | multiple=True, 311 | help=( 312 | "Specify a package to consider unsafe; may be used more than once. " 313 | f"Replaces default unsafe packages: {', '.join(sorted(UNSAFE_PACKAGES))}" 314 | ), 315 | ) 316 | 317 | config = click.option( 318 | "--config", 319 | type=click.Path( 320 | exists=True, 321 | file_okay=True, 322 | dir_okay=False, 323 | readable=True, 324 | allow_dash=False, 325 | path_type=str, 326 | ), 327 | help=( 328 | f"Read configuration from TOML file. By default, looks for the following " 329 | f"files in the given order: {', '.join(DEFAULT_CONFIG_FILE_NAMES)}." 330 | ), 331 | is_eager=True, 332 | callback=override_defaults_from_config_file, 333 | ) 334 | 335 | no_config = click.option( 336 | "--no-config", 337 | is_flag=True, 338 | default=False, 339 | help="Do not read any config file.", 340 | is_eager=True, 341 | ) 342 | 343 | constraint = click.option( 344 | "-c", 345 | "--constraint", 346 | multiple=True, 347 | help="Constrain versions using the given constraints file; may be used more than once.", 348 | ) 349 | 350 | ask = click.option( 351 | "-a", 352 | "--ask", 353 | is_flag=True, 354 | help="Show what would happen, then ask whether to continue", 355 | ) 356 | 357 | force = click.option( 358 | "--force", is_flag=True, help="Proceed even if conflicts are found" 359 | ) 360 | 361 | python_executable = click.option( 362 | "--python-executable", 363 | help="Custom python executable path if targeting an environment other than current.", 364 | ) 365 | 366 | user = click.option( 367 | "--user", 368 | "user_only", 369 | is_flag=True, 370 | help="Restrict attention to user directory", 371 | ) 372 | 373 | build_deps_for = click.option( 374 | "--build-deps-for", 375 | "build_deps_targets", 376 | multiple=True, 377 | type=click.Choice(ALL_BUILD_TARGETS), 378 | help="Name of a build target to extract dependencies for. " 379 | "Static dependencies declared in 'pyproject.toml::build-system.requires' will be included as " 380 | "well; may be used more than once.", 381 | ) 382 | 383 | all_build_deps = click.option( 384 | "--all-build-deps", 385 | is_flag=True, 386 | default=False, 387 | help="Extract dependencies for all build targets. " 388 | "Static dependencies declared in 'pyproject.toml::build-system.requires' will be included as " 389 | "well.", 390 | ) 391 | 392 | only_build_deps = click.option( 393 | "--only-build-deps", 394 | is_flag=True, 395 | default=False, 396 | help="Extract a package only if it is a build dependency.", 397 | ) 398 | -------------------------------------------------------------------------------- /piptools/writer.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import io 4 | import os 5 | import re 6 | import sys 7 | import typing as _t 8 | from collections.abc import Iterable, Iterator 9 | from itertools import chain 10 | 11 | from click import unstyle 12 | from click.core import Context 13 | from pip._internal.models.format_control import FormatControl 14 | from pip._internal.req.req_install import InstallRequirement 15 | from pip._vendor.packaging.markers import Marker 16 | from pip._vendor.packaging.utils import canonicalize_name 17 | 18 | from .logging import log 19 | from .utils import ( 20 | comment, 21 | dedup, 22 | format_requirement, 23 | get_compile_command, 24 | key_from_ireq, 25 | strip_extras, 26 | ) 27 | 28 | MESSAGE_UNHASHED_PACKAGE = comment( 29 | "# WARNING: pip install will require the following package to be hashed." 30 | "\n# Consider using a hashable URL like " 31 | "https://github.com/jazzband/pip-tools/archive/SOMECOMMIT.zip" 32 | ) 33 | 34 | MESSAGE_UNSAFE_PACKAGES_UNPINNED = comment( 35 | "# WARNING: The following packages were not pinned, but pip requires them to be" 36 | "\n# pinned when the requirements file includes hashes and the requirement is not" 37 | "\n# satisfied by a package already installed. " 38 | "Consider using the --allow-unsafe flag." 39 | ) 40 | 41 | MESSAGE_UNSAFE_PACKAGES = comment( 42 | "# The following packages are considered to be unsafe in a requirements file:" 43 | ) 44 | 45 | MESSAGE_UNINSTALLABLE = ( 46 | "The generated requirements file may be rejected by pip install. " 47 | "See # WARNING lines for details." 48 | ) 49 | 50 | 51 | strip_comes_from_line_re = re.compile(r" \(line \d+\)$") 52 | 53 | 54 | def _comes_from_as_string(comes_from: str | InstallRequirement) -> str: 55 | if isinstance(comes_from, str): 56 | return strip_comes_from_line_re.sub("", comes_from) 57 | return _t.cast(str, canonicalize_name(key_from_ireq(comes_from))) 58 | 59 | 60 | def annotation_style_split(required_by: set[str]) -> str: 61 | sorted_required_by = sorted(required_by) 62 | if len(sorted_required_by) == 1: 63 | source = sorted_required_by[0] 64 | annotation = "# via " + source 65 | else: 66 | annotation_lines = ["# via"] 67 | for source in sorted_required_by: 68 | annotation_lines.append(" # " + source) 69 | annotation = "\n".join(annotation_lines) 70 | return annotation 71 | 72 | 73 | def annotation_style_line(required_by: set[str]) -> str: 74 | return f"# via {', '.join(sorted(required_by))}" 75 | 76 | 77 | class OutputWriter: 78 | def __init__( 79 | self, 80 | dst_file: _t.BinaryIO, 81 | click_ctx: Context, 82 | dry_run: bool, 83 | emit_header: bool, 84 | emit_index_url: bool, 85 | emit_trusted_host: bool, 86 | annotate: bool, 87 | annotation_style: str, 88 | strip_extras: bool, 89 | generate_hashes: bool, 90 | default_index_url: str, 91 | index_urls: Iterable[str], 92 | trusted_hosts: Iterable[str], 93 | format_control: FormatControl, 94 | linesep: str, 95 | allow_unsafe: bool, 96 | find_links: list[str], 97 | emit_find_links: bool, 98 | emit_options: bool, 99 | ) -> None: 100 | self.dst_file = dst_file 101 | self.click_ctx = click_ctx 102 | self.dry_run = dry_run 103 | self.emit_header = emit_header 104 | self.emit_index_url = emit_index_url 105 | self.emit_trusted_host = emit_trusted_host 106 | self.annotate = annotate 107 | self.annotation_style = annotation_style 108 | self.strip_extras = strip_extras 109 | self.generate_hashes = generate_hashes 110 | self.default_index_url = default_index_url 111 | self.index_urls = index_urls 112 | self.trusted_hosts = trusted_hosts 113 | self.format_control = format_control 114 | self.linesep = linesep 115 | self.allow_unsafe = allow_unsafe 116 | self.find_links = find_links 117 | self.emit_find_links = emit_find_links 118 | self.emit_options = emit_options 119 | 120 | def _sort_key(self, ireq: InstallRequirement) -> tuple[bool, str]: 121 | return (not ireq.editable, key_from_ireq(ireq)) 122 | 123 | def write_header(self) -> Iterator[str]: 124 | if self.emit_header: 125 | yield comment("#") 126 | yield comment( 127 | "# This file is autogenerated by pip-compile with Python " 128 | f"{sys.version_info.major}.{sys.version_info.minor}" 129 | ) 130 | yield comment("# by the following command:") 131 | yield comment("#") 132 | compile_command = os.environ.get( 133 | "CUSTOM_COMPILE_COMMAND" 134 | ) or get_compile_command(self.click_ctx) 135 | yield comment(f"# {compile_command}") 136 | yield comment("#") 137 | 138 | def write_index_options(self) -> Iterator[str]: 139 | if self.emit_index_url: 140 | for index, index_url in enumerate(dedup(self.index_urls)): 141 | if index == 0 and index_url.rstrip("/") == self.default_index_url: 142 | continue 143 | flag = "--index-url" if index == 0 else "--extra-index-url" 144 | yield f"{flag} {index_url}" 145 | 146 | def write_trusted_hosts(self) -> Iterator[str]: 147 | if self.emit_trusted_host: 148 | for trusted_host in dedup(self.trusted_hosts): 149 | yield f"--trusted-host {trusted_host}" 150 | 151 | def write_format_controls(self) -> Iterator[str]: 152 | # The ordering of output needs to preserve the behavior of pip's 153 | # FormatControl.get_allowed_formats(). The behavior is the following: 154 | # 155 | # * Parsing of CLI options happens first to last. 156 | # * --only-binary takes precedence over --no-binary 157 | # * Package names take precedence over :all: 158 | # * We'll never see :all: in both due to mutual exclusion. 159 | # 160 | # So in summary, we want to emit :all: first and then package names later. 161 | no_binary = self.format_control.no_binary.copy() 162 | only_binary = self.format_control.only_binary.copy() 163 | 164 | if ":all:" in no_binary: 165 | yield "--no-binary :all:" 166 | no_binary.remove(":all:") 167 | if ":all:" in only_binary: 168 | yield "--only-binary :all:" 169 | only_binary.remove(":all:") 170 | for nb in dedup(sorted(no_binary)): 171 | yield f"--no-binary {nb}" 172 | for ob in dedup(sorted(only_binary)): 173 | yield f"--only-binary {ob}" 174 | 175 | def write_find_links(self) -> Iterator[str]: 176 | if self.emit_find_links: 177 | for find_link in dedup(self.find_links): 178 | yield f"--find-links {find_link}" 179 | 180 | def write_flags(self) -> Iterator[str]: 181 | if not self.emit_options: 182 | return 183 | emitted = False 184 | for line in chain( 185 | self.write_index_options(), 186 | self.write_find_links(), 187 | self.write_trusted_hosts(), 188 | self.write_format_controls(), 189 | ): 190 | emitted = True 191 | yield line 192 | if emitted: 193 | yield "" 194 | 195 | def _iter_lines( 196 | self, 197 | results: set[InstallRequirement], 198 | unsafe_requirements: set[InstallRequirement], 199 | unsafe_packages: set[str], 200 | markers: dict[str, Marker], 201 | hashes: dict[InstallRequirement, set[str]] | None = None, 202 | ) -> Iterator[str]: 203 | # default values 204 | unsafe_packages = unsafe_packages if self.allow_unsafe else set() 205 | hashes = hashes or {} 206 | 207 | # Check for unhashed or unpinned packages if at least one package does have 208 | # hashes, which will trigger pip install's --require-hashes mode. 209 | warn_uninstallable = False 210 | has_hashes = hashes and any(hash for hash in hashes.values()) 211 | 212 | yielded = False 213 | 214 | for line in self.write_header(): 215 | yield line 216 | yielded = True 217 | for line in self.write_flags(): 218 | yield line 219 | yielded = True 220 | 221 | unsafe_requirements = unsafe_requirements or { 222 | r for r in results if r.name in unsafe_packages 223 | } 224 | packages = {r for r in results if r.name not in unsafe_packages} 225 | 226 | if packages: 227 | for ireq in sorted(packages, key=self._sort_key): 228 | if has_hashes and not hashes.get(ireq): 229 | yield MESSAGE_UNHASHED_PACKAGE 230 | warn_uninstallable = True 231 | line = self._format_requirement( 232 | ireq, markers.get(key_from_ireq(ireq)), hashes=hashes 233 | ) 234 | yield line 235 | yielded = True 236 | 237 | if unsafe_requirements: 238 | yield "" 239 | yielded = True 240 | if has_hashes and not self.allow_unsafe: 241 | yield MESSAGE_UNSAFE_PACKAGES_UNPINNED 242 | warn_uninstallable = True 243 | else: 244 | yield MESSAGE_UNSAFE_PACKAGES 245 | 246 | for ireq in sorted(unsafe_requirements, key=self._sort_key): 247 | ireq_key = key_from_ireq(ireq) 248 | if not self.allow_unsafe: 249 | yield comment(f"# {ireq_key}") 250 | else: 251 | line = self._format_requirement( 252 | ireq, marker=markers.get(ireq_key), hashes=hashes 253 | ) 254 | yield line 255 | 256 | # Yield even when there's no real content, so that blank files are written 257 | if not yielded: 258 | yield "" 259 | 260 | if warn_uninstallable: 261 | log.warning(MESSAGE_UNINSTALLABLE) 262 | 263 | def write( 264 | self, 265 | results: set[InstallRequirement], 266 | unsafe_requirements: set[InstallRequirement], 267 | unsafe_packages: set[str], 268 | markers: dict[str, Marker], 269 | hashes: dict[InstallRequirement, set[str]] | None, 270 | ) -> None: 271 | if not self.dry_run: 272 | dst_file = io.TextIOWrapper( 273 | self.dst_file, 274 | encoding="utf8", 275 | newline=self.linesep, 276 | line_buffering=True, 277 | ) 278 | try: 279 | for line in self._iter_lines( 280 | results, unsafe_requirements, unsafe_packages, markers, hashes 281 | ): 282 | if self.dry_run: 283 | # Bypass the log level to always print this during a dry run 284 | log.log(line) 285 | else: 286 | log.info(line) 287 | dst_file.write(unstyle(line)) 288 | dst_file.write("\n") 289 | finally: 290 | if not self.dry_run: 291 | dst_file.detach() 292 | 293 | def _format_requirement( 294 | self, 295 | ireq: InstallRequirement, 296 | marker: Marker | None = None, 297 | hashes: dict[InstallRequirement, set[str]] | None = None, 298 | ) -> str: 299 | ireq_hashes = (hashes if hashes is not None else {}).get(ireq) 300 | 301 | line = format_requirement(ireq, marker=marker, hashes=ireq_hashes) 302 | if self.strip_extras: 303 | line = strip_extras(line) 304 | 305 | if not self.annotate: 306 | return line 307 | 308 | # Annotate what packages or reqs-ins this package is required by 309 | required_by = set() 310 | if hasattr(ireq, "_source_ireqs"): 311 | required_by |= { 312 | _comes_from_as_string(src_ireq.comes_from) 313 | for src_ireq in ireq._source_ireqs 314 | if src_ireq.comes_from 315 | } 316 | 317 | # Filter out the origin install requirements for extras. 318 | # See https://github.com/jazzband/pip-tools/issues/2003 319 | if ireq.comes_from and ( 320 | isinstance(ireq.comes_from, str) or ireq.comes_from.name != ireq.name 321 | ): 322 | required_by.add(_comes_from_as_string(ireq.comes_from)) 323 | 324 | required_by |= set(getattr(ireq, "_required_by", set())) 325 | 326 | if required_by: 327 | if self.annotation_style == "split": 328 | annotation = annotation_style_split(required_by) 329 | sep = "\n " 330 | elif self.annotation_style == "line": 331 | annotation = annotation_style_line(required_by) 332 | sep = "\n " if ireq_hashes else " " 333 | else: # pragma: no cover 334 | raise ValueError("Invalid value for annotation style") 335 | if self.strip_extras: 336 | annotation = strip_extras(annotation) 337 | # 24 is one reasonable column size to use here, that we've used in the past 338 | lines = f"{line:24}{sep}{comment(annotation)}".splitlines() 339 | line = "\n".join(ln.rstrip() for ln in lines) 340 | 341 | return line 342 | -------------------------------------------------------------------------------- /piptools/build.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import collections 4 | import contextlib 5 | import os 6 | import pathlib 7 | import sys 8 | import tempfile 9 | import typing as _t 10 | from collections.abc import Iterator 11 | from dataclasses import dataclass 12 | from importlib import metadata as importlib_metadata 13 | 14 | import build 15 | import build.env 16 | import pyproject_hooks 17 | from pip._internal.req import InstallRequirement 18 | from pip._internal.req.constructors import parse_req_from_line 19 | from pip._vendor.packaging.markers import Marker 20 | from pip._vendor.packaging.requirements import Requirement 21 | 22 | from .utils import copy_install_requirement, install_req_from_line 23 | 24 | if sys.version_info >= (3, 11): 25 | import tomllib 26 | else: 27 | import tomli as tomllib 28 | 29 | PYPROJECT_TOML = "pyproject.toml" 30 | 31 | _T = _t.TypeVar("_T") 32 | 33 | 34 | if sys.version_info >= (3, 10): 35 | from importlib.metadata import PackageMetadata 36 | else: 37 | 38 | class PackageMetadata(_t.Protocol): 39 | @_t.overload 40 | def get_all(self, name: str, failobj: None = None) -> list[_t.Any] | None: ... 41 | 42 | @_t.overload 43 | def get_all(self, name: str, failobj: _T) -> list[_t.Any] | _T: ... 44 | 45 | 46 | @dataclass 47 | class StaticProjectMetadata: 48 | extras: tuple[str, ...] 49 | requirements: tuple[InstallRequirement, ...] 50 | 51 | 52 | @dataclass 53 | class ProjectMetadata: 54 | extras: tuple[str, ...] 55 | requirements: tuple[InstallRequirement, ...] 56 | build_requirements: tuple[InstallRequirement, ...] 57 | 58 | 59 | def maybe_statically_parse_project_metadata( 60 | src_file: pathlib.Path, 61 | ) -> StaticProjectMetadata | None: 62 | """ 63 | Return the metadata for a project, if it can be statically parsed from ``pyproject.toml``. 64 | 65 | This function is typically significantly faster than invoking a build backend. 66 | Returns None if the project metadata cannot be statically parsed. 67 | """ 68 | if src_file.name != PYPROJECT_TOML: 69 | return None 70 | 71 | try: 72 | with open(src_file, "rb") as f: 73 | pyproject_contents = tomllib.load(f) 74 | except tomllib.TOMLDecodeError: 75 | return None 76 | 77 | # Not valid PEP 621 metadata 78 | if ( 79 | "project" not in pyproject_contents 80 | or "name" not in pyproject_contents["project"] 81 | ): 82 | return None 83 | 84 | project_table = pyproject_contents["project"] 85 | 86 | # Dynamic dependencies require build backend invocation 87 | dynamic = project_table.get("dynamic", []) 88 | if "dependencies" in dynamic or "optional-dependencies" in dynamic: 89 | return None 90 | 91 | package_name = project_table["name"] 92 | comes_from = f"{package_name} ({src_file.as_posix()})" 93 | 94 | extras = project_table.get("optional-dependencies", {}).keys() 95 | install_requirements = [ 96 | InstallRequirement(Requirement(req), comes_from) 97 | for req in project_table.get("dependencies", []) 98 | ] 99 | for extra, reqs in ( 100 | pyproject_contents.get("project", {}).get("optional-dependencies", {}).items() 101 | ): 102 | for req in reqs: 103 | requirement = Requirement(req) 104 | if requirement.name == package_name: 105 | # Similar to logic for handling self-referential requirements 106 | # from _prepare_requirements 107 | requirement.url = src_file.parent.absolute().as_uri() 108 | 109 | # Note we don't need to modify `requirement` to include this extra 110 | marker = Marker(f"extra == '{extra}'") 111 | install_requirements.append( 112 | InstallRequirement(requirement, comes_from, markers=marker) 113 | ) 114 | 115 | return StaticProjectMetadata( 116 | extras=tuple(extras), 117 | requirements=tuple(install_requirements), 118 | ) 119 | 120 | 121 | def build_project_metadata( 122 | src_file: pathlib.Path, 123 | build_targets: tuple[str, ...], 124 | *, 125 | upgrade_packages: tuple[str, ...] | None = None, 126 | attempt_static_parse: bool, 127 | isolated: bool, 128 | quiet: bool, 129 | ) -> ProjectMetadata | StaticProjectMetadata: 130 | """ 131 | Return the metadata for a project. 132 | 133 | First, optionally attempt to determine the metadata statically from the 134 | ``pyproject.toml`` file. This will not work if build_targets are specified, 135 | since we cannot determine build requirements statically. 136 | 137 | Uses the ``prepare_metadata_for_build_wheel`` hook for the wheel metadata 138 | if available, otherwise ``build_wheel``. 139 | 140 | Uses the ``prepare_metadata_for_build_{target}`` hook for each ``build_targets`` 141 | if available. 142 | 143 | :param src_file: Project source file 144 | :param build_targets: A tuple of build targets to get the dependencies 145 | of (``sdist`` or ``wheel`` or ``editable``). 146 | :param attempt_static_parse: Whether to attempt to statically parse the 147 | project metadata from ``pyproject.toml``. 148 | Cannot be used with ``build_targets``. 149 | :param isolated: Whether to run invoke the backend in the current 150 | environment or to create an isolated one and invoke it 151 | there. 152 | :param quiet: Whether to suppress the output of subprocesses. 153 | """ 154 | 155 | if attempt_static_parse: 156 | if build_targets: 157 | raise ValueError( 158 | "Cannot execute the PEP 517 optional get_requires_for_build* " 159 | "hooks statically, as build requirements are requested" 160 | ) 161 | project_metadata = maybe_statically_parse_project_metadata(src_file) 162 | if project_metadata is not None: 163 | return project_metadata 164 | 165 | src_dir = src_file.parent 166 | with _create_project_builder( 167 | src_dir, 168 | upgrade_packages=upgrade_packages, 169 | isolated=isolated, 170 | quiet=quiet, 171 | ) as builder: 172 | metadata = _build_project_wheel_metadata(builder) 173 | extras = tuple(metadata.get_all("Provides-Extra") or ()) 174 | requirements = tuple( 175 | _prepare_requirements(metadata=metadata, src_file=src_file) 176 | ) 177 | build_requirements = tuple( 178 | _prepare_build_requirements( 179 | builder=builder, 180 | src_file=src_file, 181 | build_targets=build_targets, 182 | package_name=_get_name(metadata), 183 | ) 184 | ) 185 | return ProjectMetadata( 186 | extras=extras, 187 | requirements=requirements, 188 | build_requirements=build_requirements, 189 | ) 190 | 191 | 192 | @contextlib.contextmanager 193 | def _env_var( 194 | env_var_name: str, 195 | env_var_value: str, 196 | /, 197 | ) -> Iterator[None]: 198 | sentinel = object() 199 | original_pip_constraint = os.getenv(env_var_name, sentinel) 200 | pip_constraint_was_unset = original_pip_constraint is sentinel 201 | 202 | os.environ[env_var_name] = env_var_value 203 | try: 204 | yield 205 | finally: 206 | if pip_constraint_was_unset: 207 | del os.environ[env_var_name] 208 | else: 209 | # Assert here is necessary because MyPy can't infer type 210 | # narrowing in the complex case. 211 | assert isinstance(original_pip_constraint, str) 212 | os.environ[env_var_name] = original_pip_constraint 213 | 214 | 215 | @contextlib.contextmanager 216 | def _temporary_constraints_file_set_for_pip( 217 | upgrade_packages: tuple[str, ...], 218 | ) -> Iterator[None]: 219 | with tempfile.NamedTemporaryFile( 220 | mode="w+t", 221 | delete=False, # FIXME: switch to `delete_on_close` in Python 3.12+ 222 | ) as tmpfile: 223 | # NOTE: `delete_on_close=False` here (or rather `delete=False`, 224 | # NOTE: temporarily) is important for cross-platform execution. It is 225 | # NOTE: required on Windows so that the underlying `pip install` 226 | # NOTE: invocation by pypa/build will be able to access the constraint 227 | # NOTE: file via a subprocess and not fail installing it due to a 228 | # NOTE: permission error related to this file handle still open in our 229 | # NOTE: parent process. To achieve this, we `.close()` the file 230 | # NOTE: descriptor before we hand off the control to the build frontend 231 | # NOTE: and with `delete_on_close=False`, the 232 | # NOTE: `tempfile.NamedTemporaryFile()` context manager does not remove 233 | # NOTE: it from disk right away. 234 | # NOTE: Due to support of versions below Python 3.12, we are forced to 235 | # NOTE: temporarily resort to using `delete=False`, meaning that the CM 236 | # NOTE: never attempts removing the file from disk, not even on exit. 237 | # NOTE: So we do this manually until we can migrate to using the more 238 | # NOTE: ergonomic argument `delete_on_close=False`. 239 | 240 | # Write packages to upgrade to a temporary file to set as 241 | # constraints for the installation to the builder environment, 242 | # in case build requirements are among them 243 | tmpfile.write("\n".join(upgrade_packages)) 244 | 245 | # FIXME: replace `delete` with `delete_on_close` in Python 3.12+ 246 | # FIXME: and replace `.close()` with `.flush()` 247 | tmpfile.close() 248 | 249 | try: 250 | with _env_var("PIP_CONSTRAINT", tmpfile.name): 251 | yield 252 | finally: 253 | # FIXME: replace `delete` with `delete_on_close` in Python 3.12+ 254 | # FIXME: and drop this manual deletion 255 | os.unlink(tmpfile.name) 256 | 257 | 258 | @contextlib.contextmanager 259 | def _create_project_builder( 260 | src_dir: pathlib.Path, 261 | *, 262 | upgrade_packages: tuple[str, ...] | None = None, 263 | isolated: bool, 264 | quiet: bool, 265 | ) -> Iterator[build.ProjectBuilder]: 266 | if quiet: 267 | runner = pyproject_hooks.quiet_subprocess_runner 268 | else: 269 | runner = pyproject_hooks.default_subprocess_runner 270 | 271 | if not isolated: 272 | yield build.ProjectBuilder(src_dir, runner=runner) 273 | return 274 | 275 | maybe_pip_constrained_context = ( 276 | contextlib.nullcontext() 277 | if upgrade_packages is None 278 | else _temporary_constraints_file_set_for_pip(upgrade_packages) 279 | ) 280 | 281 | with maybe_pip_constrained_context, build.env.DefaultIsolatedEnv() as env: 282 | builder = build.ProjectBuilder.from_isolated_env(env, src_dir, runner) 283 | env.install(builder.build_system_requires) 284 | env.install(builder.get_requires_for_build("wheel")) 285 | yield builder 286 | 287 | 288 | def _build_project_wheel_metadata( 289 | builder: build.ProjectBuilder, 290 | ) -> PackageMetadata: 291 | with tempfile.TemporaryDirectory() as tmpdir: 292 | path = pathlib.Path(builder.metadata_path(tmpdir)) 293 | return importlib_metadata.PathDistribution(path).metadata 294 | 295 | 296 | def _get_name(metadata: PackageMetadata) -> str: 297 | retval = metadata.get_all("Name")[0] # type: ignore[index] 298 | assert isinstance(retval, str) 299 | return retval 300 | 301 | 302 | def _prepare_requirements( 303 | metadata: PackageMetadata, src_file: pathlib.Path 304 | ) -> Iterator[InstallRequirement]: 305 | package_name = _get_name(metadata) 306 | comes_from = f"{package_name} ({src_file.as_posix()})" 307 | package_dir = src_file.parent 308 | 309 | for req in metadata.get_all("Requires-Dist") or []: 310 | parts = parse_req_from_line(req, comes_from) 311 | if parts.requirement.name == package_name: 312 | # Replace package name with package directory in the requirement 313 | # string so that pip can find the package as self-referential. 314 | # Note the string can contain extras, so we need to replace only 315 | # the package name, not the whole string. 316 | replaced_package_name = req.replace(package_name, str(package_dir), 1) 317 | parts = parse_req_from_line(replaced_package_name, comes_from) 318 | 319 | yield copy_install_requirement( 320 | InstallRequirement( 321 | parts.requirement, 322 | comes_from, 323 | link=parts.link, 324 | markers=parts.markers, 325 | extras=parts.extras, 326 | ) 327 | ) 328 | 329 | 330 | def _prepare_build_requirements( 331 | builder: build.ProjectBuilder, 332 | src_file: pathlib.Path, 333 | build_targets: tuple[str, ...], 334 | package_name: str, 335 | ) -> Iterator[InstallRequirement]: 336 | result = collections.defaultdict(set) 337 | 338 | # Build requirements will only be present if a pyproject.toml file exists, 339 | # but if there is also a setup.py file then only that will be explicitly 340 | # processed due to the order of `DEFAULT_REQUIREMENTS_FILES`. 341 | src_file = src_file.parent / PYPROJECT_TOML 342 | 343 | for req in builder.build_system_requires: 344 | result[req].add(f"{package_name} ({src_file}::build-system.requires)") 345 | for build_target in build_targets: 346 | for req in builder.get_requires_for_build(build_target): 347 | result[req].add( 348 | f"{package_name} ({src_file}::build-system.backend::{build_target})" 349 | ) 350 | 351 | for req, comes_from_sources in result.items(): 352 | for comes_from in comes_from_sources: 353 | yield install_req_from_line(req, comes_from=comes_from) 354 | --------------------------------------------------------------------------------